From 9cc6f59b59556b7bb2668317e6f8072ed97f0075 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 26 Sep 2016 12:46:48 -0700 Subject: [PATCH 0001/1197] Moving all pubsub files into subdirectory. Done via: $ mkdir -p pubsub/google/cloud $ cp google/__init__.py pubsub/google/__init__.py $ git add pubsub/google/__init__.py $ cp google/cloud/__init__.py pubsub/google/cloud/__init__.py $ git add pubsub/google/cloud/__init__.py $ git mv google/cloud/pubsub pubsub/google/cloud/pubsub $ git mv unit_tests/pubsub pubsub/unit_tests --- .../google-cloud-pubsub/google/__init__.py | 20 + .../google/cloud/__init__.py | 20 + .../google/cloud/pubsub/__init__.py | 33 + .../google/cloud/pubsub/_gax.py | 544 +++++++++ .../google/cloud/pubsub/_helpers.py | 73 ++ .../google/cloud/pubsub/client.py | 190 +++ .../google/cloud/pubsub/connection.py | 541 +++++++++ .../google/cloud/pubsub/iam.py | 259 ++++ .../google/cloud/pubsub/message.py | 93 ++ .../google/cloud/pubsub/subscription.py | 509 ++++++++ .../google/cloud/pubsub/topic.py | 471 ++++++++ .../unit_tests/__init__.py | 13 + .../unit_tests/test__gax.py | 1045 +++++++++++++++++ .../unit_tests/test__helpers.py | 57 + .../unit_tests/test_client.py | 303 +++++ .../unit_tests/test_connection.py | 737 ++++++++++++ .../unit_tests/test_iam.py | 188 +++ .../unit_tests/test_message.py | 126 ++ .../unit_tests/test_subscription.py | 811 +++++++++++++ .../unit_tests/test_topic.py | 803 +++++++++++++ 20 files changed, 6836 insertions(+) create mode 100644 packages/google-cloud-pubsub/google/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/client.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/connection.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/iam.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/message.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/topic.py create mode 100644 packages/google-cloud-pubsub/unit_tests/__init__.py create mode 100644 packages/google-cloud-pubsub/unit_tests/test__gax.py create mode 100644 packages/google-cloud-pubsub/unit_tests/test__helpers.py create mode 100644 packages/google-cloud-pubsub/unit_tests/test_client.py create mode 100644 packages/google-cloud-pubsub/unit_tests/test_connection.py create mode 100644 packages/google-cloud-pubsub/unit_tests/test_iam.py create mode 100644 packages/google-cloud-pubsub/unit_tests/test_message.py create mode 100644 packages/google-cloud-pubsub/unit_tests/test_subscription.py create mode 100644 packages/google-cloud-pubsub/unit_tests/test_topic.py diff --git a/packages/google-cloud-pubsub/google/__init__.py b/packages/google-cloud-pubsub/google/__init__.py new file mode 100644 index 000000000000..b2b833373882 --- /dev/null +++ b/packages/google-cloud-pubsub/google/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py new file mode 100644 index 000000000000..8ac7b74af136 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2014 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py new file mode 100644 index 000000000000..d072d315ed98 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py @@ -0,0 +1,33 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Pubsub API wrapper. + +The main concepts with this API are: + +- :class:`~google.cloud.pubsub.topic.Topic` represents an endpoint to which + messages can be published using the Cloud Storage Pubsub API. + +- :class:`~google.cloud.pubsub.subscription.Subscription` represents a named + subscription (either pull or push) to a topic. +""" + + +from google.cloud.pubsub.client import Client +from google.cloud.pubsub.connection import Connection +from google.cloud.pubsub.subscription import Subscription +from google.cloud.pubsub.topic import Topic + + +SCOPE = Connection.SCOPE diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py new file mode 100644 index 000000000000..6d92c818f43c --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -0,0 +1,544 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""GAX wrapper for Pubsub API requests.""" + +from google.cloud.gapic.pubsub.v1.publisher_api import PublisherApi +from google.cloud.gapic.pubsub.v1.subscriber_api import SubscriberApi +from google.gax import CallOptions +from google.gax import INITIAL_PAGE +from google.gax.errors import GaxError +from google.gax.grpc import exc_to_code +from google.pubsub.v1.pubsub_pb2 import PubsubMessage +from google.pubsub.v1.pubsub_pb2 import PushConfig +from grpc import insecure_channel +from grpc import StatusCode + +# pylint: disable=ungrouped-imports +from google.cloud._helpers import _to_bytes +from google.cloud._helpers import _pb_timestamp_to_rfc3339 +from google.cloud.exceptions import Conflict +from google.cloud.exceptions import NotFound +# pylint: enable=ungrouped-imports + + +class _PublisherAPI(object): + """Helper mapping publisher-related APIs. + + :type gax_api: :class:`google.pubsub.v1.publisher_api.PublisherApi` + :param gax_api: API object used to make GAX requests. + """ + def __init__(self, gax_api): + self._gax_api = gax_api + + def list_topics(self, project, page_size=0, page_token=None): + """List topics for the project associated with this API. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list + + :type project: string + :param project: project ID + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: tuple, (list, str) + :returns: list of ``Topic`` resource dicts, plus a + "next page token" string: if not None, indicates that + more topics can be retrieved with another call (pass that + value as ``page_token``). + """ + if page_token is None: + page_token = INITIAL_PAGE + options = CallOptions(page_token=page_token) + path = 'projects/%s' % (project,) + page_iter = self._gax_api.list_topics( + path, page_size=page_size, options=options) + topics = [{'name': topic_pb.name} for topic_pb in page_iter.next()] + token = page_iter.page_token or None + return topics, token + + def topic_create(self, topic_path): + """API call: create a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create + + :type topic_path: string + :param topic_path: fully-qualified path of the new topic, in format + ``projects//topics/``. + + :rtype: dict + :returns: ``Topic`` resource returned from the API. + :raises: :exc:`google.cloud.exceptions.Conflict` if the topic already + exists + """ + try: + topic_pb = self._gax_api.create_topic(topic_path) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + raise Conflict(topic_path) + raise + return {'name': topic_pb.name} + + def topic_get(self, topic_path): + """API call: retrieve a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get + + :type topic_path: string + :param topic_path: fully-qualified path of the topic, in format + ``projects//topics/``. + + :rtype: dict + :returns: ``Topic`` resource returned from the API. + :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not + exist + """ + try: + topic_pb = self._gax_api.get_topic(topic_path) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(topic_path) + raise + return {'name': topic_pb.name} + + def topic_delete(self, topic_path): + """API call: delete a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create + + :type topic_path: string + :param topic_path: fully-qualified path of the new topic, in format + ``projects//topics/``. + """ + try: + self._gax_api.delete_topic(topic_path) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(topic_path) + raise + + def topic_publish(self, topic_path, messages): + """API call: publish one or more messages to a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish + + :type topic_path: string + :param topic_path: fully-qualified path of the topic, in format + ``projects//topics/``. + + :type messages: list of dict + :param messages: messages to be published. + + :rtype: list of string + :returns: list of opaque IDs for published messages. + :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not + exist + """ + options = CallOptions(is_bundling=False) + message_pbs = [_message_pb_from_mapping(message) + for message in messages] + try: + result = self._gax_api.publish(topic_path, message_pbs, + options=options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(topic_path) + raise + return result.message_ids + + def topic_list_subscriptions(self, topic_path, page_size=0, + page_token=None): + """API call: list subscriptions bound to a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list + + :type topic_path: string + :param topic_path: fully-qualified path of the topic, in format + ``projects//topics/``. + + :type page_size: int + :param page_size: maximum number of subscriptions to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of subscriptions. + If not passed, the API will return the first page + of subscriptions. + + :rtype: list of strings + :returns: fully-qualified names of subscriptions for the supplied + topic. + :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not + exist + """ + if page_token is None: + page_token = INITIAL_PAGE + options = CallOptions(page_token=page_token) + try: + page_iter = self._gax_api.list_topic_subscriptions( + topic_path, page_size=page_size, options=options) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(topic_path) + raise + subs = page_iter.next() + token = page_iter.page_token or None + return subs, token + + +class _SubscriberAPI(object): + """Helper mapping subscriber-related APIs. + + :type gax_api: :class:`google.pubsub.v1.publisher_api.SubscriberApi` + :param gax_api: API object used to make GAX requests. + """ + def __init__(self, gax_api): + self._gax_api = gax_api + + def list_subscriptions(self, project, page_size=0, page_token=None): + """List subscriptions for the project associated with this API. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list + + :type project: string + :param project: project ID + + :type page_size: int + :param page_size: maximum number of subscriptions to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of subscriptions. + If not passed, the API will return the first page + of subscriptions. + + :rtype: tuple, (list, str) + :returns: list of ``Subscription`` resource dicts, plus a + "next page token" string: if not None, indicates that + more topics can be retrieved with another call (pass that + value as ``page_token``). + """ + if page_token is None: + page_token = INITIAL_PAGE + options = CallOptions(page_token=page_token) + path = 'projects/%s' % (project,) + page_iter = self._gax_api.list_subscriptions( + path, page_size=page_size, options=options) + subscriptions = [_subscription_pb_to_mapping(sub_pb) + for sub_pb in page_iter.next()] + token = page_iter.page_token or None + return subscriptions, token + + def subscription_create(self, subscription_path, topic_path, + ack_deadline=None, push_endpoint=None): + """API call: create a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type topic_path: string + :param topic_path: the fully-qualified path of the topic being + subscribed, in format + ``projects//topics/``. + + :type ack_deadline: int, or ``NoneType`` + :param ack_deadline: the deadline (in seconds) by which messages pulled + from the back-end must be acknowledged. + + :type push_endpoint: string, or ``NoneType`` + :param push_endpoint: URL to which messages will be pushed by the + back-end. If not set, the application must pull + messages. + + :rtype: dict + :returns: ``Subscription`` resource returned from the API. + """ + if push_endpoint is not None: + push_config = PushConfig(push_endpoint=push_endpoint) + else: + push_config = None + + if ack_deadline is None: + ack_deadline = 0 + + try: + sub_pb = self._gax_api.create_subscription( + subscription_path, topic_path, push_config, ack_deadline) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + raise Conflict(topic_path) + raise + return _subscription_pb_to_mapping(sub_pb) + + def subscription_get(self, subscription_path): + """API call: retrieve a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the subscription, in format + ``projects//subscriptions/``. + + :rtype: dict + :returns: ``Subscription`` resource returned from the API. + """ + try: + sub_pb = self._gax_api.get_subscription(subscription_path) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(subscription_path) + raise + return _subscription_pb_to_mapping(sub_pb) + + def subscription_delete(self, subscription_path): + """API call: delete a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the subscription, in format + ``projects//subscriptions/``. + """ + try: + self._gax_api.delete_subscription(subscription_path) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(subscription_path) + raise + + def subscription_modify_push_config(self, subscription_path, + push_endpoint): + """API call: update push config of a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type push_endpoint: string, or ``NoneType`` + :param push_endpoint: URL to which messages will be pushed by the + back-end. If not set, the application must pull + messages. + """ + push_config = PushConfig(push_endpoint=push_endpoint) + try: + self._gax_api.modify_push_config(subscription_path, push_config) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(subscription_path) + raise + + def subscription_pull(self, subscription_path, return_immediately=False, + max_messages=1): + """API call: retrieve messages for a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type return_immediately: boolean + :param return_immediately: if True, the back-end returns even if no + messages are available; if False, the API + call blocks until one or more messages are + available. + + :type max_messages: int + :param max_messages: the maximum number of messages to return. + + :rtype: list of dict + :returns: the ``receivedMessages`` element of the response. + """ + try: + response_pb = self._gax_api.pull( + subscription_path, max_messages, return_immediately) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(subscription_path) + raise + return [_received_message_pb_to_mapping(rmpb) + for rmpb in response_pb.received_messages] + + def subscription_acknowledge(self, subscription_path, ack_ids): + """API call: acknowledge retrieved messages + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type ack_ids: list of string + :param ack_ids: ack IDs of messages being acknowledged + """ + try: + self._gax_api.acknowledge(subscription_path, ack_ids) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(subscription_path) + raise + + def subscription_modify_ack_deadline(self, subscription_path, ack_ids, + ack_deadline): + """API call: update ack deadline for retrieved messages + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type ack_ids: list of string + :param ack_ids: ack IDs of messages being acknowledged + + :type ack_deadline: int + :param ack_deadline: the deadline (in seconds) by which messages pulled + from the back-end must be acknowledged. + """ + try: + self._gax_api.modify_ack_deadline( + subscription_path, ack_ids, ack_deadline) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(subscription_path) + raise + + +def _message_pb_from_mapping(message): + """Helper for :meth:`_PublisherAPI.topic_publish`. + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + return PubsubMessage(data=_to_bytes(message['data']), + attributes=message['attributes']) + + +def _subscription_pb_to_mapping(sub_pb): + """Helper for :meth:`list_subscriptions`, et aliae + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + mapping = { + 'name': sub_pb.name, + 'topic': sub_pb.topic, + 'ackDeadlineSeconds': sub_pb.ack_deadline_seconds, + } + if sub_pb.push_config.push_endpoint != '': + mapping['pushConfig'] = { + 'pushEndpoint': sub_pb.push_config.push_endpoint, + } + return mapping + + +def _message_pb_to_mapping(message_pb): + """Helper for :meth:`pull`, et aliae + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + return { + 'messageId': message_pb.message_id, + 'data': message_pb.data, + 'attributes': message_pb.attributes, + 'publishTime': _pb_timestamp_to_rfc3339(message_pb.publish_time), + } + + +def _received_message_pb_to_mapping(received_message_pb): + """Helper for :meth:`pull`, et aliae + + Performs "impedance matching" between the protobuf attrs and the keys + expected in the JSON API. + """ + return { + 'ackId': received_message_pb.ack_id, + 'message': _message_pb_to_mapping( + received_message_pb.message), + } + + +def make_gax_publisher_api(connection): + """Create an instance of the GAX Publisher API. + + If the ``connection`` is intended for a local emulator, then + an insecure ``channel`` is created pointing at the local + Pub / Sub server. + + :type connection: :class:`~google.cloud.pubsub.connection.Connection` + :param connection: The connection that holds configuration details. + + :rtype: :class:`~google.cloud.pubsub.v1.publisher_api.PublisherApi` + :returns: A publisher API instance with the proper connection + configuration. + :rtype: :class:`~google.cloud.pubsub.v1.subscriber_api.SubscriberApi` + """ + channel = None + if connection.in_emulator: + channel = insecure_channel(connection.host) + return PublisherApi(channel=channel) + + +def make_gax_subscriber_api(connection): + """Create an instance of the GAX Subscriber API. + + If the ``connection`` is intended for a local emulator, then + an insecure ``channel`` is created pointing at the local + Pub / Sub server. + + :type connection: :class:`~google.cloud.pubsub.connection.Connection` + :param connection: The connection that holds configuration details. + + :rtype: :class:`~google.cloud.pubsub.v1.subscriber_api.SubscriberApi` + :returns: A subscriber API instance with the proper connection + configuration. + """ + channel = None + if connection.in_emulator: + channel = insecure_channel(connection.host) + return SubscriberApi(channel=channel) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py new file mode 100644 index 000000000000..d9d144488dfc --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py @@ -0,0 +1,73 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper functions for shared behavior.""" + +import re + +from google.cloud._helpers import _name_from_project_path + + +_TOPIC_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /topics/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +_SUBSCRIPTION_TEMPLATE = re.compile(r""" + projects/ # static prefix + (?P[^/]+) # initial letter, wordchars + hyphen + /subscriptions/ # static midfix + (?P[^/]+) # initial letter, wordchars + allowed punc +""", re.VERBOSE) + + +def topic_name_from_path(path, project): + """Validate a topic URI path and get the topic name. + + :type path: string + :param path: URI path for a topic API request. + + :type project: string + :param project: The project associated with the request. It is + included for validation purposes. + + :rtype: string + :returns: Topic name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, project, _TOPIC_TEMPLATE) + + +def subscription_name_from_path(path, project): + """Validate a subscription URI path and get the subscription name. + + :type path: string + :param path: URI path for a subscription API request. + + :type project: string + :param project: The project associated with the request. It is + included for validation purposes. + + :rtype: string + :returns: subscription name parsed from ``path``. + :raises: :class:`ValueError` if the ``path`` is ill-formed or if + the project from the ``path`` does not agree with the + ``project`` passed in. + """ + return _name_from_project_path(path, project, _SUBSCRIPTION_TEMPLATE) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py new file mode 100644 index 000000000000..09fca44cfcfc --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -0,0 +1,190 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client for interacting with the Google Cloud Pub/Sub API.""" + +import os + +from google.cloud.client import JSONClient +from google.cloud.environment_vars import DISABLE_GRPC +from google.cloud.pubsub.connection import Connection +from google.cloud.pubsub.connection import _PublisherAPI as JSONPublisherAPI +from google.cloud.pubsub.connection import _SubscriberAPI as JSONSubscriberAPI +from google.cloud.pubsub.connection import _IAMPolicyAPI +from google.cloud.pubsub.subscription import Subscription +from google.cloud.pubsub.topic import Topic + +# pylint: disable=ungrouped-imports +try: + from google.cloud.pubsub._gax import _PublisherAPI as GAXPublisherAPI + from google.cloud.pubsub._gax import _SubscriberAPI as GAXSubscriberAPI + from google.cloud.pubsub._gax import make_gax_publisher_api + from google.cloud.pubsub._gax import make_gax_subscriber_api +except ImportError: # pragma: NO COVER + _HAVE_GAX = False + GAXPublisherAPI = None + GAXSubscriberAPI = None + make_gax_publisher_api = None + make_gax_subscriber_api = None +else: + _HAVE_GAX = True +# pylint: enable=ungrouped-imports + + +_DISABLE_GAX = os.getenv(DISABLE_GRPC, False) +_USE_GAX = _HAVE_GAX and not _DISABLE_GAX + + +class Client(JSONClient): + """Client to bundle configuration needed for API requests. + + :type project: string + :param project: the project which the client acts on behalf of. Will be + passed when creating a topic. If not passed, + falls back to the default inferred from the environment. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` or + :class:`NoneType` + :param credentials: The OAuth2 Credentials to use for the connection + owned by this client. If not passed (and if no ``http`` + object is passed), falls back to the default inferred + from the environment. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: An optional HTTP object to make requests. If not passed, an + ``http`` object is created that is bound to the + ``credentials`` for the current object. + """ + + _connection_class = Connection + _publisher_api = _subscriber_api = _iam_policy_api = None + + @property + def publisher_api(self): + """Helper for publisher-related API calls.""" + if self._publisher_api is None: + if _USE_GAX: + generated = make_gax_publisher_api(self.connection) + self._publisher_api = GAXPublisherAPI(generated) + else: + self._publisher_api = JSONPublisherAPI(self.connection) + return self._publisher_api + + @property + def subscriber_api(self): + """Helper for subscriber-related API calls.""" + if self._subscriber_api is None: + if _USE_GAX: + generated = make_gax_subscriber_api(self.connection) + self._subscriber_api = GAXSubscriberAPI(generated) + else: + self._subscriber_api = JSONSubscriberAPI(self.connection) + return self._subscriber_api + + @property + def iam_policy_api(self): + """Helper for IAM policy-related API calls.""" + if self._iam_policy_api is None: + self._iam_policy_api = _IAMPolicyAPI(self.connection) + return self._iam_policy_api + + def list_topics(self, page_size=None, page_token=None): + """List topics for the project associated with this client. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START client_list_topics] + :end-before: [END client_list_topics] + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: tuple, (list, str) + :returns: list of :class:`google.cloud.pubsub.topic.Topic`, plus a + "next page token" string: if not None, indicates that + more topics can be retrieved with another call (pass that + value as ``page_token``). + """ + api = self.publisher_api + resources, next_token = api.list_topics( + self.project, page_size, page_token) + topics = [Topic.from_api_repr(resource, self) + for resource in resources] + return topics, next_token + + def list_subscriptions(self, page_size=None, page_token=None): + """List subscriptions for the project associated with this client. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START client_list_subscriptions] + :end-before: [END client_list_subscriptions] + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: tuple, (list, str) + :returns: list of :class:`~.pubsub.subscription.Subscription`, + plus a "next page token" string: if not None, indicates that + more topics can be retrieved with another call (pass that + value as ``page_token``). + """ + api = self.subscriber_api + resources, next_token = api.list_subscriptions( + self.project, page_size, page_token) + topics = {} + subscriptions = [Subscription.from_api_repr(resource, self, + topics=topics) + for resource in resources] + return subscriptions, next_token + + def topic(self, name, timestamp_messages=False): + """Creates a topic bound to the current client. + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START client_topic] + :end-before: [END client_topic] + + :type name: string + :param name: the name of the topic to be constructed. + + :type timestamp_messages: boolean + :param timestamp_messages: To be passed to ``Topic`` constructor. + + :rtype: :class:`google.cloud.pubsub.topic.Topic` + :returns: Topic created with the current client. + """ + return Topic(name, client=self, timestamp_messages=timestamp_messages) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py new file mode 100644 index 000000000000..ecff402e3990 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -0,0 +1,541 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Create / interact with Google Cloud Pub/Sub connections.""" + +import os + +from google.cloud import connection as base_connection +from google.cloud.environment_vars import PUBSUB_EMULATOR + + +PUBSUB_API_HOST = 'pubsub.googleapis.com' +"""Pub / Sub API request host.""" + + +class Connection(base_connection.JSONConnection): + """A connection to Google Cloud Pub/Sub via the JSON REST API. + + :type credentials: :class:`oauth2client.client.OAuth2Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + connection. + + :type http: :class:`httplib2.Http` or class that defines ``request()``. + :param http: (Optional) HTTP object to make requests. + """ + + API_BASE_URL = 'https://' + PUBSUB_API_HOST + """The base of the API call URL.""" + + API_VERSION = 'v1' + """The version of the API, used in building the API call's URL.""" + + API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' + """A template for the URL of a particular API call.""" + + SCOPE = ('https://www.googleapis.com/auth/pubsub', + 'https://www.googleapis.com/auth/cloud-platform') + """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" + + def __init__(self, credentials=None, http=None): + super(Connection, self).__init__(credentials=credentials, http=http) + emulator_host = os.getenv(PUBSUB_EMULATOR) + if emulator_host is None: + self.host = self.__class__.API_BASE_URL + self.api_base_url = self.__class__.API_BASE_URL + self.in_emulator = False + else: + self.host = emulator_host + self.api_base_url = 'http://' + emulator_host + self.in_emulator = True + + def build_api_url(self, path, query_params=None, + api_base_url=None, api_version=None): + """Construct an API url given a few components, some optional. + + Typically, you shouldn't need to use this method. + + :type path: string + :param path: The path to the resource. + + :type query_params: dict or list + :param query_params: A dictionary of keys and values (or list of + key-value pairs) to insert into the query + string of the URL. + + :type api_base_url: string + :param api_base_url: The base URL for the API endpoint. + Typically you won't have to provide this. + + :type api_version: string + :param api_version: The version of the API to call. + Typically you shouldn't provide this and instead + use the default for the library. + + :rtype: string + :returns: The URL assembled from the pieces provided. + """ + if api_base_url is None: + api_base_url = self.api_base_url + return super(Connection, self.__class__).build_api_url( + path, query_params=query_params, + api_base_url=api_base_url, api_version=api_version) + + +class _PublisherAPI(object): + """Helper mapping publisher-related APIs. + + :type connection: :class:`Connection` + :param connection: the connection used to make API requests. + """ + + def __init__(self, connection): + self._connection = connection + + def list_topics(self, project, page_size=None, page_token=None): + """API call: list topics for a given project + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list + + :type project: string + :param project: project ID + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: tuple, (list, str) + :returns: list of ``Topic`` resource dicts, plus a + "next page token" string: if not None, indicates that + more topics can be retrieved with another call (pass that + value as ``page_token``). + """ + conn = self._connection + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/topics' % (project,) + resp = conn.api_request(method='GET', path=path, query_params=params) + return resp.get('topics', ()), resp.get('nextPageToken') + + def topic_create(self, topic_path): + """API call: create a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create + + :type topic_path: string + :param topic_path: the fully-qualified path of the new topic, in format + ``projects//topics/``. + + :rtype: dict + :returns: ``Topic`` resource returned from the API. + """ + conn = self._connection + return conn.api_request(method='PUT', path='/%s' % (topic_path,)) + + def topic_get(self, topic_path): + """API call: retrieve a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get + + :type topic_path: string + :param topic_path: the fully-qualified path of the topic, in format + ``projects//topics/``. + + :rtype: dict + :returns: ``Topic`` resource returned from the API. + """ + conn = self._connection + return conn.api_request(method='GET', path='/%s' % (topic_path,)) + + def topic_delete(self, topic_path): + """API call: delete a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete + + :type topic_path: string + :param topic_path: the fully-qualified path of the topic, in format + ``projects//topics/``. + """ + conn = self._connection + conn.api_request(method='DELETE', path='/%s' % (topic_path,)) + + def topic_publish(self, topic_path, messages): + """API call: publish one or more messages to a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish + + :type topic_path: string + :param topic_path: the fully-qualified path of the topic, in format + ``projects//topics/``. + + :type messages: list of dict + :param messages: messages to be published. + + :rtype: list of string + :returns: list of opaque IDs for published messages. + """ + conn = self._connection + data = {'messages': messages} + response = conn.api_request( + method='POST', path='/%s:publish' % (topic_path,), data=data) + return response['messageIds'] + + def topic_list_subscriptions(self, topic_path, page_size=None, + page_token=None): + """API call: list subscriptions bound to a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list + + :type topic_path: string + :param topic_path: the fully-qualified path of the topic, in format + ``projects//topics/``. + + :type page_size: int + :param page_size: maximum number of subscriptions to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: list of strings + :returns: fully-qualified names of subscriptions for the supplied + topic. + """ + conn = self._connection + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/%s/subscriptions' % (topic_path,) + resp = conn.api_request(method='GET', path=path, query_params=params) + return resp.get('subscriptions', ()), resp.get('nextPageToken') + + +class _SubscriberAPI(object): + """Helper mapping subscriber-related APIs. + + :type connection: :class:`Connection` + :param connection: the connection used to make API requests. + """ + + def __init__(self, connection): + self._connection = connection + + def list_subscriptions(self, project, page_size=None, page_token=None): + """API call: list subscriptions for a given project + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list + + :type project: string + :param project: project ID + + :type page_size: int + :param page_size: maximum number of subscriptions to return, If not + passed, defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of subscriptions. + If not passed, the API will return the first page + of subscriptions. + + :rtype: tuple, (list, str) + :returns: list of ``Subscription`` resource dicts, plus a + "next page token" string: if not None, indicates that + more subscriptions can be retrieved with another call (pass + that value as ``page_token``). + """ + conn = self._connection + params = {} + + if page_size is not None: + params['pageSize'] = page_size + + if page_token is not None: + params['pageToken'] = page_token + + path = '/projects/%s/subscriptions' % (project,) + resp = conn.api_request(method='GET', path=path, query_params=params) + return resp.get('subscriptions', ()), resp.get('nextPageToken') + + def subscription_create(self, subscription_path, topic_path, + ack_deadline=None, push_endpoint=None): + """API call: create a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type topic_path: string + :param topic_path: the fully-qualified path of the topic being + subscribed, in format + ``projects//topics/``. + + :type ack_deadline: int, or ``NoneType`` + :param ack_deadline: the deadline (in seconds) by which messages pulled + from the back-end must be acknowledged. + + :type push_endpoint: string, or ``NoneType`` + :param push_endpoint: URL to which messages will be pushed by the + back-end. If not set, the application must pull + messages. + + :rtype: dict + :returns: ``Subscription`` resource returned from the API. + """ + conn = self._connection + path = '/%s' % (subscription_path,) + resource = {'topic': topic_path} + + if ack_deadline is not None: + resource['ackDeadlineSeconds'] = ack_deadline + + if push_endpoint is not None: + resource['pushConfig'] = {'pushEndpoint': push_endpoint} + + return conn.api_request(method='PUT', path=path, data=resource) + + def subscription_get(self, subscription_path): + """API call: retrieve a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the subscription, in format + ``projects//subscriptions/``. + + :rtype: dict + :returns: ``Subscription`` resource returned from the API. + """ + conn = self._connection + path = '/%s' % (subscription_path,) + return conn.api_request(method='GET', path=path) + + def subscription_delete(self, subscription_path): + """API call: delete a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the subscription, in format + ``projects//subscriptions/``. + """ + conn = self._connection + path = '/%s' % (subscription_path,) + conn.api_request(method='DELETE', path=path) + + def subscription_modify_push_config(self, subscription_path, + push_endpoint): + """API call: update push config of a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type push_endpoint: string, or ``NoneType`` + :param push_endpoint: URL to which messages will be pushed by the + back-end. If not set, the application must pull + messages. + """ + conn = self._connection + path = '/%s:modifyPushConfig' % (subscription_path,) + resource = {'pushConfig': {'pushEndpoint': push_endpoint}} + conn.api_request(method='POST', path=path, data=resource) + + def subscription_pull(self, subscription_path, return_immediately=False, + max_messages=1): + """API call: retrieve messages for a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type return_immediately: boolean + :param return_immediately: if True, the back-end returns even if no + messages are available; if False, the API + call blocks until one or more messages are + available. + + :type max_messages: int + :param max_messages: the maximum number of messages to return. + + :rtype: list of dict + :returns: the ``receivedMessages`` element of the response. + """ + conn = self._connection + path = '/%s:pull' % (subscription_path,) + data = { + 'returnImmediately': return_immediately, + 'maxMessages': max_messages, + } + response = conn.api_request(method='POST', path=path, data=data) + return response.get('receivedMessages', ()) + + def subscription_acknowledge(self, subscription_path, ack_ids): + """API call: acknowledge retrieved messages + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type ack_ids: list of string + :param ack_ids: ack IDs of messages being acknowledged + """ + conn = self._connection + path = '/%s:acknowledge' % (subscription_path,) + data = { + 'ackIds': ack_ids, + } + conn.api_request(method='POST', path=path, data=data) + + def subscription_modify_ack_deadline(self, subscription_path, ack_ids, + ack_deadline): + """API call: update ack deadline for retrieved messages + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline + + :type subscription_path: string + :param subscription_path: + the fully-qualified path of the new subscription, in format + ``projects//subscriptions/``. + + :type ack_ids: list of string + :param ack_ids: ack IDs of messages being acknowledged + + :type ack_deadline: int + :param ack_deadline: the deadline (in seconds) by which messages pulled + from the back-end must be acknowledged. + """ + conn = self._connection + path = '/%s:modifyAckDeadline' % (subscription_path,) + data = { + 'ackIds': ack_ids, + 'ackDeadlineSeconds': ack_deadline, + } + conn.api_request(method='POST', path=path, data=data) + + +class _IAMPolicyAPI(object): + """Helper mapping IAM policy-related APIs. + + :type connection: :class:`Connection` + :param connection: the connection used to make API requests. + """ + + def __init__(self, connection): + self._connection = connection + + def get_iam_policy(self, target_path): + """API call: fetch the IAM policy for the target + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy + + :type target_path: string + :param target_path: the path of the target object. + + :rtype: dict + :returns: the resource returned by the ``getIamPolicy`` API request. + """ + conn = self._connection + path = '/%s:getIamPolicy' % (target_path,) + return conn.api_request(method='GET', path=path) + + def set_iam_policy(self, target_path, policy): + """API call: update the IAM policy for the target + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy + + :type target_path: string + :param target_path: the path of the target object. + + :type policy: dict + :param policy: the new policy resource. + + :rtype: dict + :returns: the resource returned by the ``setIamPolicy`` API request. + """ + conn = self._connection + wrapped = {'policy': policy} + path = '/%s:setIamPolicy' % (target_path,) + return conn.api_request(method='POST', path=path, data=wrapped) + + def test_iam_permissions(self, target_path, permissions): + """API call: test permissions + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions + + :type target_path: string + :param target_path: the path of the target object. + + :type permissions: list of string + :param permissions: the permissions to check + + :rtype: dict + :returns: the resource returned by the ``getIamPolicy`` API request. + """ + conn = self._connection + wrapped = {'permissions': permissions} + path = '/%s:testIamPermissions' % (target_path,) + resp = conn.api_request(method='POST', path=path, data=wrapped) + return resp.get('permissions', []) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py new file mode 100644 index 000000000000..3f07b06c3862 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -0,0 +1,259 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""PubSub API IAM policy definitions + +For allowed roles / permissions, see: +https://cloud.google.com/pubsub/access_control#permissions +""" + +# Generic IAM roles + +OWNER_ROLE = 'roles/owner' +"""Generic role implying all rights to an object.""" + +EDITOR_ROLE = 'roles/editor' +"""Generic role implying rights to modify an object.""" + +VIEWER_ROLE = 'roles/viewer' +"""Generic role implying rights to access an object.""" + +# Pubsub-specific IAM roles + +PUBSUB_ADMIN_ROLE = 'roles/pubsub.admin' +"""Role implying all rights to an object.""" + +PUBSUB_EDITOR_ROLE = 'roles/pubsub.editor' +"""Role implying rights to modify an object.""" + +PUBSUB_VIEWER_ROLE = 'roles/pubsub.viewer' +"""Role implying rights to access an object.""" + +PUBSUB_PUBLISHER_ROLE = 'roles/pubsub.publisher' +"""Role implying rights to publish to a topic.""" + +PUBSUB_SUBSCRIBER_ROLE = 'roles/pubsub.subscriber' +"""Role implying rights to subscribe to a topic.""" + + +# Pubsub-specific permissions + +PUBSUB_TOPICS_CONSUME = 'pubsub.topics.consume' +"""Permission: consume events from a subscription.""" + +PUBSUB_TOPICS_CREATE = 'pubsub.topics.create' +"""Permission: create topics.""" + +PUBSUB_TOPICS_DELETE = 'pubsub.topics.delete' +"""Permission: delete topics.""" + +PUBSUB_TOPICS_GET = 'pubsub.topics.get' +"""Permission: retrieve topics.""" + +PUBSUB_TOPICS_GET_IAM_POLICY = 'pubsub.topics.getIamPolicy' +"""Permission: retrieve subscription IAM policies.""" + +PUBSUB_TOPICS_LIST = 'pubsub.topics.list' +"""Permission: list topics.""" + +PUBSUB_TOPICS_SET_IAM_POLICY = 'pubsub.topics.setIamPolicy' +"""Permission: update subscription IAM policies.""" + +PUBSUB_SUBSCRIPTIONS_CONSUME = 'pubsub.subscriptions.consume' +"""Permission: consume events from a subscription.""" + +PUBSUB_SUBSCRIPTIONS_CREATE = 'pubsub.subscriptions.create' +"""Permission: create subscriptions.""" + +PUBSUB_SUBSCRIPTIONS_DELETE = 'pubsub.subscriptions.delete' +"""Permission: delete subscriptions.""" + +PUBSUB_SUBSCRIPTIONS_GET = 'pubsub.subscriptions.get' +"""Permission: retrieve subscriptions.""" + +PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY = 'pubsub.subscriptions.getIamPolicy' +"""Permission: retrieve subscription IAM policies.""" + +PUBSUB_SUBSCRIPTIONS_LIST = 'pubsub.subscriptions.list' +"""Permission: list subscriptions.""" + +PUBSUB_SUBSCRIPTIONS_SET_IAM_POLICY = 'pubsub.subscriptions.setIamPolicy' +"""Permission: update subscription IAM policies.""" + +PUBSUB_SUBSCRIPTIONS_UPDATE = 'pubsub.subscriptions.update' +"""Permission: update subscriptions.""" + + +class Policy(object): + """Combined IAM Policy / Bindings. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Policy + https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Binding + + :type etag: string + :param etag: ETag used to identify a unique of the policy + + :type version: int + :param version: unique version of the policy + """ + def __init__(self, etag=None, version=None): + self.etag = etag + self.version = version + self.owners = set() + self.editors = set() + self.viewers = set() + self.publishers = set() + self.subscribers = set() + + @staticmethod + def user(email): + """Factory method for a user member. + + :type email: string + :param email: E-mail for this particular user. + + :rtype: string + :returns: A member string corresponding to the given user. + """ + return 'user:%s' % (email,) + + @staticmethod + def service_account(email): + """Factory method for a service account member. + + :type email: string + :param email: E-mail for this particular service account. + + :rtype: string + :returns: A member string corresponding to the given service account. + """ + return 'serviceAccount:%s' % (email,) + + @staticmethod + def group(email): + """Factory method for a group member. + + :type email: string + :param email: An id or e-mail for this particular group. + + :rtype: string + :returns: A member string corresponding to the given group. + """ + return 'group:%s' % (email,) + + @staticmethod + def domain(domain): + """Factory method for a domain member. + + :type domain: string + :param domain: The domain for this member. + + :rtype: string + :returns: A member string corresponding to the given domain. + """ + return 'domain:%s' % (domain,) + + @staticmethod + def all_users(): + """Factory method for a member representing all users. + + :rtype: string + :returns: A member string representing all users. + """ + return 'allUsers' + + @staticmethod + def authenticated_users(): + """Factory method for a member representing all authenticated users. + + :rtype: string + :returns: A member string representing all authenticated users. + """ + return 'allAuthenticatedUsers' + + @classmethod + def from_api_repr(cls, resource): + """Create a policy from the resource returned from the API. + + :type resource: dict + :param resource: resource returned from the ``getIamPolicy`` API. + + :rtype: :class:`Policy` + :returns: the parsed policy + """ + version = resource.get('version') + etag = resource.get('etag') + policy = cls(etag, version) + for binding in resource.get('bindings', ()): + role = binding['role'] + members = set(binding['members']) + if role in (OWNER_ROLE, PUBSUB_ADMIN_ROLE): + policy.owners |= members + elif role in (EDITOR_ROLE, PUBSUB_EDITOR_ROLE): + policy.editors |= members + elif role in (VIEWER_ROLE, PUBSUB_VIEWER_ROLE): + policy.viewers |= members + elif role == PUBSUB_PUBLISHER_ROLE: + policy.publishers |= members + elif role == PUBSUB_SUBSCRIBER_ROLE: + policy.subscribers |= members + else: + raise ValueError('Unknown role: %s' % (role,)) + return policy + + def to_api_repr(self): + """Construct a Policy resource. + + :rtype: dict + :returns: a resource to be passed to the ``setIamPolicy`` API. + """ + resource = {} + + if self.etag is not None: + resource['etag'] = self.etag + + if self.version is not None: + resource['version'] = self.version + + bindings = [] + + if self.owners: + bindings.append( + {'role': PUBSUB_ADMIN_ROLE, + 'members': sorted(self.owners)}) + + if self.editors: + bindings.append( + {'role': PUBSUB_EDITOR_ROLE, + 'members': sorted(self.editors)}) + + if self.viewers: + bindings.append( + {'role': PUBSUB_VIEWER_ROLE, + 'members': sorted(self.viewers)}) + + if self.publishers: + bindings.append( + {'role': PUBSUB_PUBLISHER_ROLE, + 'members': sorted(self.publishers)}) + + if self.subscribers: + bindings.append( + {'role': PUBSUB_SUBSCRIBER_ROLE, + 'members': sorted(self.subscribers)}) + + if bindings: + resource['bindings'] = bindings + + return resource diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py new file mode 100644 index 000000000000..2f810baa5e2e --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py @@ -0,0 +1,93 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Topics.""" + +import base64 + +from google.cloud._helpers import _rfc3339_to_datetime + + +class Message(object): + """Messages can be published to a topic and received by subscribers. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage + + :type data: bytes + :param data: the payload of the message. + + :type message_id: string + :param message_id: An ID assigned to the message by the API. + + :type attributes: dict or None + :param attributes: Extra metadata associated by the publisher with the + message. + """ + _service_timestamp = None + + def __init__(self, data, message_id, attributes=None): + self.data = data + self.message_id = message_id + self._attributes = attributes + + @property + def attributes(self): + """Lazily-constructed attribute dictionary.""" + if self._attributes is None: + self._attributes = {} + return self._attributes + + @property + def timestamp(self): + """Return sortable timestamp from attributes, if passed. + + Allows sorting messages in publication order (assuming consistent + clocks across all publishers). + + :rtype: :class:`datetime.datetime` + :returns: timestamp (in UTC timezone) parsed from RFC 3339 timestamp + :raises: ValueError if timestamp not in ``attributes``, or if it does + not match the RFC 3339 format. + """ + stamp = self.attributes.get('timestamp') + if stamp is None: + raise ValueError('No timestamp') + return _rfc3339_to_datetime(stamp) + + @property + def service_timestamp(self): + """Return server-set timestamp. + + :rtype: string + :returns: timestamp (in UTC timezone) in RFC 3339 format + """ + return self._service_timestamp + + @classmethod + def from_api_repr(cls, api_repr): + """Factory: construct message from API representation. + + :type api_repr: dict or None + :param api_repr: The API representation of the message + + :rtype: :class:`Message` + :returns: The message created from the response. + """ + data = base64.b64decode(api_repr.get('data', b'')) + instance = cls( + data=data, message_id=api_repr['messageId'], + attributes=api_repr.get('attributes')) + instance._service_timestamp = api_repr.get('publishTimestamp') + return instance diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py new file mode 100644 index 000000000000..b63e77177c3f --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -0,0 +1,509 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Subscriptions.""" + +from google.cloud.exceptions import NotFound +from google.cloud.pubsub._helpers import topic_name_from_path +from google.cloud.pubsub.iam import Policy +from google.cloud.pubsub.message import Message + + +class Subscription(object): + """Subscriptions receive messages published to their topics. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions + + :type name: string + :param name: the name of the subscription. + + :type topic: :class:`google.cloud.pubsub.topic.Topic` or ``NoneType`` + :param topic: the topic to which the subscription belongs; if ``None``, + the subscription's topic has been deleted. + + :type ack_deadline: int + :param ack_deadline: the deadline (in seconds) by which messages pulled + from the back-end must be acknowledged. + + :type push_endpoint: string + :param push_endpoint: URL to which messages will be pushed by the back-end. + If not set, the application must pull messages. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the topic. + """ + + _DELETED_TOPIC_PATH = '_deleted-topic_' + """Value of ``projects.subscriptions.topic`` when topic has been deleted. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions#Subscription.FIELDS.topic + """ + + def __init__(self, name, topic=None, ack_deadline=None, push_endpoint=None, + client=None): + + if client is None and topic is None: + raise TypeError("Pass only one of 'topic' or 'client'.") + + if client is not None and topic is not None: + raise TypeError("Pass only one of 'topic' or 'client'.") + + self.name = name + self.topic = topic + self._client = client or topic._client + self._project = self._client.project + self.ack_deadline = ack_deadline + self.push_endpoint = push_endpoint + + @classmethod + def from_api_repr(cls, resource, client, topics=None): + """Factory: construct a topic given its API representation + + :type resource: dict + :param resource: topic resource representation returned from the API. + + :type client: :class:`google.cloud.pubsub.client.Client` + :param client: Client which holds credentials and project + configuration for a topic. + + :type topics: dict or None + :param topics: A mapping of topic names -> topics. If not passed, + the subscription will have a newly-created topic. + + :rtype: :class:`google.cloud.pubsub.subscription.Subscription` + :returns: Subscription parsed from ``resource``. + """ + if topics is None: + topics = {} + topic_path = resource['topic'] + if topic_path == cls._DELETED_TOPIC_PATH: + topic = None + else: + topic = topics.get(topic_path) + if topic is None: + # NOTE: This duplicates behavior from Topic.from_api_repr to + # avoid an import cycle. + topic_name = topic_name_from_path(topic_path, client.project) + topic = topics[topic_path] = client.topic(topic_name) + _, _, _, name = resource['name'].split('/') + ack_deadline = resource.get('ackDeadlineSeconds') + push_config = resource.get('pushConfig', {}) + push_endpoint = push_config.get('pushEndpoint') + if topic is None: + return cls(name, ack_deadline=ack_deadline, + push_endpoint=push_endpoint, client=client) + return cls(name, topic, ack_deadline, push_endpoint) + + @property + def project(self): + """Project bound to the subscription.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in subscription APIs""" + return 'projects/%s/subscriptions/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the subscription's APIs""" + return '/%s' % (self.full_name,) + + def auto_ack(self, return_immediately=False, max_messages=1, client=None): + """:class:`AutoAck` factory + + :type return_immediately: boolean + :param return_immediately: passed through to :meth:`Subscription.pull` + + :type max_messages: int + :param max_messages: passed through to :meth:`Subscription.pull` + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: passed through to :meth:`Subscription.pull` and + :meth:`Subscription.acknowledge`. + + :rtype: :class:`AutoAck` + :returns: the instance created for the given ``ack_id`` and ``message`` + """ + return AutoAck(self, return_immediately, max_messages, client) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the topic of the + current subscription. + + :rtype: :class:`google.cloud.pubsub.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the subscription via a PUT request + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_create] + :end-before: [END subscription_create] + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + """ + client = self._require_client(client) + api = client.subscriber_api + api.subscription_create( + self.full_name, self.topic.full_name, self.ack_deadline, + self.push_endpoint) + + def exists(self, client=None): + """API call: test existence of the subscription via a GET request + + See + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_exists] + :end-before: [END subscription_exists] + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + + :rtype: bool + :returns: Boolean indicating existence of the subscription. + """ + client = self._require_client(client) + api = client.subscriber_api + try: + api.subscription_get(self.full_name) + except NotFound: + return False + else: + return True + + def reload(self, client=None): + """API call: sync local subscription configuration via a GET request + + See + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_reload] + :end-before: [END subscription_reload] + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + """ + client = self._require_client(client) + api = client.subscriber_api + data = api.subscription_get(self.full_name) + self.ack_deadline = data.get('ackDeadlineSeconds') + push_config = data.get('pushConfig', {}) + self.push_endpoint = push_config.get('pushEndpoint') + + def delete(self, client=None): + """API call: delete the subscription via a DELETE request. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_delete] + :end-before: [END subscription_delete] + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + """ + client = self._require_client(client) + api = client.subscriber_api + api.subscription_delete(self.full_name) + + def modify_push_configuration(self, push_endpoint, client=None): + """API call: update the push endpoint for the subscription. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_push_pull] + :end-before: [END subscription_push_pull] + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_pull_push] + :end-before: [END subscription_pull_push] + + :type push_endpoint: string + :param push_endpoint: URL to which messages will be pushed by the + back-end. If None, the application must pull + messages. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + """ + client = self._require_client(client) + api = client.subscriber_api + api.subscription_modify_push_config(self.full_name, push_endpoint) + self.push_endpoint = push_endpoint + + def pull(self, return_immediately=False, max_messages=1, client=None): + """API call: retrieve messages for the subscription. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_pull] + :end-before: [END subscription_pull] + + :type return_immediately: boolean + :param return_immediately: if True, the back-end returns even if no + messages are available; if False, the API + call blocks until one or more messages are + available. + + :type max_messages: int + :param max_messages: the maximum number of messages to return. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + + :rtype: list of (ack_id, message) tuples + :returns: sequence of tuples: ``ack_id`` is the ID to be used in a + subsequent call to :meth:`acknowledge`, and ``message`` + is an instance of + :class:`~google.cloud.pubsub.message.Message`. + """ + client = self._require_client(client) + api = client.subscriber_api + response = api.subscription_pull( + self.full_name, return_immediately, max_messages) + return [(info['ackId'], Message.from_api_repr(info['message'])) + for info in response] + + def acknowledge(self, ack_ids, client=None): + """API call: acknowledge retrieved messages for the subscription. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/acknowledge + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_acknowledge] + :end-before: [END subscription_acknowledge] + + :type ack_ids: list of string + :param ack_ids: ack IDs of messages being acknowledged + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + """ + client = self._require_client(client) + api = client.subscriber_api + api.subscription_acknowledge(self.full_name, ack_ids) + + def modify_ack_deadline(self, ack_ids, ack_deadline, client=None): + """API call: update acknowledgement deadline for a retrieved message. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline + + :type ack_ids: list of string + :param ack_ids: ack IDs of messages being updated + + :type ack_deadline: int + :param ack_deadline: new deadline for the message, in seconds + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + """ + client = self._require_client(client) + api = client.subscriber_api + api.subscription_modify_ack_deadline( + self.full_name, ack_ids, ack_deadline) + + def get_iam_policy(self, client=None): + """Fetch the IAM policy for the subscription. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_get_iam_policy] + :end-before: [END subscription_get_iam_policy] + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + + :rtype: :class:`google.cloud.pubsub.iam.Policy` + :returns: policy created from the resource returned by the + ``getIamPolicy`` API request. + """ + client = self._require_client(client) + api = client.iam_policy_api + resp = api.get_iam_policy(self.full_name) + return Policy.from_api_repr(resp) + + def set_iam_policy(self, policy, client=None): + """Update the IAM policy for the subscription. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_set_iam_policy] + :end-before: [END subscription_set_iam_policy] + + :type policy: :class:`google.cloud.pubsub.iam.Policy` + :param policy: the new policy, typically fetched via + :meth:`get_iam_policy` and updated in place. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + + :rtype: :class:`google.cloud.pubsub.iam.Policy` + :returns: updated policy created from the resource returned by the + ``setIamPolicy`` API request. + """ + client = self._require_client(client) + api = client.iam_policy_api + resource = policy.to_api_repr() + resp = api.set_iam_policy(self.full_name, resource) + return Policy.from_api_repr(resp) + + def check_iam_permissions(self, permissions, client=None): + """Verify permissions allowed for the current user. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START subscription_check_iam_permissions] + :end-before: [END subscription_check_iam_permissions] + + :type permissions: list of string + :param permissions: list of permissions to be tested + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + + :rtype: sequence of string + :returns: subset of ``permissions`` allowed by current IAM policy. + """ + client = self._require_client(client) + api = client.iam_policy_api + return api.test_iam_permissions( + self.full_name, list(permissions)) + + +class AutoAck(dict): + """Wrapper for :meth:`Subscription.pull` results. + + Mapping, tracks messages still-to-be-acknowledged. + + When used as a context manager, acknowledges all messages still in the + mapping on `__exit__`. When processing the pulled messages, application + code MUST delete messages from the :class:`AutoAck` mapping which are not + successfully processed, e.g.: + + .. code-block: python + + with AutoAck(subscription) as ack: # calls ``subscription.pull`` + for ack_id, message in ack.items(): + try: + do_something_with(message): + except: + del ack[ack_id] + + :type subscription: :class:`Subscription` + :param subscription: subscription to be pulled. + + :type return_immediately: boolean + :param return_immediately: passed through to :meth:`Subscription.pull` + + :type max_messages: int + :param max_messages: passed through to :meth:`Subscription.pull` + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: passed through to :meth:`Subscription.pull` and + :meth:`Subscription.acknowledge`. + """ + def __init__(self, subscription, + return_immediately=False, max_messages=1, client=None): + super(AutoAck, self).__init__() + self._subscription = subscription + self._return_immediately = return_immediately + self._max_messages = max_messages + self._client = client + + def __enter__(self): + items = self._subscription.pull( + self._return_immediately, self._max_messages, self._client) + self.update(items) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self._subscription.acknowledge(list(self), self._client) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py new file mode 100644 index 000000000000..f879d8349c89 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -0,0 +1,471 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Topics.""" + +import base64 + +from google.cloud._helpers import _datetime_to_rfc3339 +from google.cloud._helpers import _NOW +from google.cloud.exceptions import NotFound +from google.cloud.pubsub._helpers import subscription_name_from_path +from google.cloud.pubsub._helpers import topic_name_from_path +from google.cloud.pubsub.iam import Policy +from google.cloud.pubsub.subscription import Subscription + + +class Topic(object): + """Topics are targets to which messages can be published. + + Subscribers then receive those messages. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics + + :type name: string + :param name: the name of the topic + + :type client: :class:`google.cloud.pubsub.client.Client` + :param client: A client which holds credentials and project configuration + for the topic (which requires a project). + + :type timestamp_messages: boolean + :param timestamp_messages: If true, the topic will add a ``timestamp`` key + to the attributes of each published message: + the value will be an RFC 3339 timestamp. + """ + def __init__(self, name, client, timestamp_messages=False): + self.name = name + self._client = client + self.timestamp_messages = timestamp_messages + + def subscription(self, name, ack_deadline=None, push_endpoint=None): + """Creates a subscription bound to the current topic. + + Example: pull-mode subcription, default paramter values + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_subscription_defaults] + :end-before: [END topic_subscription_defaults] + + Example: pull-mode subcription, override ``ack_deadline`` default + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_subscription_ack90] + :end-before: [END topic_subscription_ack90] + + Example: push-mode subcription + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_subscription_push] + :end-before: [END topic_subscription_push] + + :type name: string + :param name: the name of the subscription + + :type ack_deadline: int + :param ack_deadline: the deadline (in seconds) by which messages pulled + from the back-end must be acknowledged. + + :type push_endpoint: string + :param push_endpoint: URL to which messages will be pushed by the + back-end. If not set, the application must pull + messages. + + :rtype: :class:`Subscription` + :returns: The subscription created with the passed in arguments. + """ + return Subscription(name, self, ack_deadline=ack_deadline, + push_endpoint=push_endpoint) + + @classmethod + def from_api_repr(cls, resource, client): + """Factory: construct a topic given its API representation + + :type resource: dict + :param resource: topic resource representation returned from the API + + :type client: :class:`google.cloud.pubsub.client.Client` + :param client: Client which holds credentials and project + configuration for the topic. + + :rtype: :class:`google.cloud.pubsub.topic.Topic` + :returns: Topic parsed from ``resource``. + :raises: :class:`ValueError` if ``client`` is not ``None`` and the + project from the resource does not agree with the project + from the client. + """ + topic_name = topic_name_from_path(resource['name'], client.project) + return cls(topic_name, client=client) + + @property + def project(self): + """Project bound to the topic.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in topic / subscription APIs""" + return 'projects/%s/topics/%s' % (self.project, self.name) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :rtype: :class:`google.cloud.pubsub.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the topic via a PUT request + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_create] + :end-before: [END topic_create] + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + """ + client = self._require_client(client) + api = client.publisher_api + api.topic_create(topic_path=self.full_name) + + def exists(self, client=None): + """API call: test for the existence of the topic via a GET request + + See + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_exists] + :end-before: [END topic_exists] + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :rtype: bool + :returns: Boolean indicating existence of the topic. + """ + client = self._require_client(client) + api = client.publisher_api + + try: + api.topic_get(topic_path=self.full_name) + except NotFound: + return False + else: + return True + + def delete(self, client=None): + """API call: delete the topic via a DELETE request + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_delete] + :end-before: [END topic_delete] + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + """ + client = self._require_client(client) + api = client.publisher_api + api.topic_delete(topic_path=self.full_name) + + def _timestamp_message(self, attrs): + """Add a timestamp to ``attrs``, if the topic is so configured. + + If ``attrs`` already has the key, do nothing. + + Helper method for ``publish``/``Batch.publish``. + """ + if self.timestamp_messages and 'timestamp' not in attrs: + attrs['timestamp'] = _datetime_to_rfc3339(_NOW()) + + def publish(self, message, client=None, **attrs): + """API call: publish a message to a topic via a POST request + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish + + Example without message attributes: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_publish_simple_message] + :end-before: [END topic_publish_simple_message] + + With message attributes: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_publish_message_with_attrs] + :end-before: [END topic_publish_message_with_attrs] + + :type message: bytes + :param message: the message payload + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :type attrs: dict (string -> string) + :param attrs: key-value pairs to send as message attributes + + :rtype: str + :returns: message ID assigned by the server to the published message + """ + client = self._require_client(client) + api = client.publisher_api + + self._timestamp_message(attrs) + message_b = base64.b64encode(message).decode('ascii') + message_data = {'data': message_b, 'attributes': attrs} + message_ids = api.topic_publish(self.full_name, [message_data]) + return message_ids[0] + + def batch(self, client=None): + """Return a batch to use as a context manager. + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_batch] + :end-before: [END topic_batch] + + .. note:: + + The only API request happens during the ``__exit__()`` of the topic + used as a context manager, and only if the block exits without + raising an exception. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :rtype: :class:`Batch` + :returns: A batch to use as a context manager. + """ + client = self._require_client(client) + return Batch(self, client) + + def list_subscriptions(self, page_size=None, page_token=None, client=None): + """List subscriptions for the project associated with this client. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_list_subscriptions] + :end-before: [END topic_list_subscriptions] + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: string + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current topic. + + :rtype: tuple, (list, str) + :returns: list of :class:`~.pubsub.subscription.Subscription`, + plus a "next page token" string: if not None, indicates that + more topics can be retrieved with another call (pass that + value as ``page_token``). + """ + client = self._require_client(client) + api = client.publisher_api + sub_paths, next_token = api.topic_list_subscriptions( + self.full_name, page_size, page_token) + subscriptions = [] + for sub_path in sub_paths: + sub_name = subscription_name_from_path(sub_path, self.project) + subscriptions.append(Subscription(sub_name, self)) + return subscriptions, next_token + + def get_iam_policy(self, client=None): + """Fetch the IAM policy for the topic. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_get_iam_policy] + :end-before: [END topic_get_iam_policy] + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current batch. + + :rtype: :class:`google.cloud.pubsub.iam.Policy` + :returns: policy created from the resource returned by the + ``getIamPolicy`` API request. + """ + client = self._require_client(client) + api = client.iam_policy_api + resp = api.get_iam_policy(self.full_name) + return Policy.from_api_repr(resp) + + def set_iam_policy(self, policy, client=None): + """Update the IAM policy for the topic. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_set_iam_policy] + :end-before: [END topic_set_iam_policy] + + :type policy: :class:`google.cloud.pubsub.iam.Policy` + :param policy: the new policy, typically fetched via + :meth:`get_iam_policy` and updated in place. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current batch. + + :rtype: :class:`google.cloud.pubsub.iam.Policy` + :returns: updated policy created from the resource returned by the + ``setIamPolicy`` API request. + """ + client = self._require_client(client) + api = client.iam_policy_api + resource = policy.to_api_repr() + resp = api.set_iam_policy(self.full_name, resource) + return Policy.from_api_repr(resp) + + def check_iam_permissions(self, permissions, client=None): + """Verify permissions allowed for the current user. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START topic_check_iam_permissions] + :end-before: [END topic_check_iam_permissions] + + :type permissions: list of string + :param permissions: list of permissions to be tested + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current batch. + + :rtype: sequence of string + :returns: subset of ``permissions`` allowed by current IAM policy. + """ + client = self._require_client(client) + api = client.iam_policy_api + return api.test_iam_permissions( + self.full_name, list(permissions)) + + +class Batch(object): + """Context manager: collect messages to publish via a single API call. + + Helper returned by :meth:Topic.batch + + :type topic: :class:`google.cloud.pubsub.topic.Topic` + :param topic: the topic being published + + :type client: :class:`google.cloud.pubsub.client.Client` + :param client: The client to use. + """ + def __init__(self, topic, client): + self.topic = topic + self.messages = [] + self.message_ids = [] + self.client = client + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + if exc_type is None: + self.commit() + + def __iter__(self): + return iter(self.message_ids) + + def publish(self, message, **attrs): + """Emulate publishing a message, but save it. + + :type message: bytes + :param message: the message payload + + :type attrs: dict (string -> string) + :param attrs: key-value pairs to send as message attributes + """ + self.topic._timestamp_message(attrs) + self.messages.append( + {'data': base64.b64encode(message).decode('ascii'), + 'attributes': attrs}) + + def commit(self, client=None): + """Send saved messages as a single API call. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current batch. + """ + if not self.messages: + return + + if client is None: + client = self.client + api = client.publisher_api + message_ids = api.topic_publish(self.topic.full_name, self.messages[:]) + self.message_ids.extend(message_ids) + del self.messages[:] diff --git a/packages/google-cloud-pubsub/unit_tests/__init__.py b/packages/google-cloud-pubsub/unit_tests/__init__.py new file mode 100644 index 000000000000..58e0d9153632 --- /dev/null +++ b/packages/google-cloud-pubsub/unit_tests/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py new file mode 100644 index 000000000000..46d05a23d583 --- /dev/null +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -0,0 +1,1045 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +try: + # pylint: disable=unused-import + import google.cloud.pubsub._gax + # pylint: enable=unused-import +except ImportError: # pragma: NO COVER + _HAVE_GAX = False +else: + _HAVE_GAX = True + +from google.cloud._testing import _GAXBaseAPI + + +class _Base(object): + PROJECT = 'PROJECT' + PROJECT_PATH = 'projects/%s' % (PROJECT,) + LIST_TOPICS_PATH = '%s/topics' % (PROJECT_PATH,) + TOPIC_NAME = 'topic_name' + TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) + LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) + SUB_NAME = 'sub_name' + SUB_PATH = '%s/subscriptions/%s' % (TOPIC_PATH, SUB_NAME) + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_PublisherAPI(_Base, unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.pubsub._gax import _PublisherAPI + return _PublisherAPI + + def test_ctor(self): + gax_api = _GAXPublisherAPI() + api = self._makeOne(gax_api) + self.assertIs(api._gax_api, gax_api) + + def test_list_topics_no_paging(self): + from google.gax import INITIAL_PAGE + from google.cloud._testing import _GAXPageIterator + TOKEN = 'TOKEN' + response = _GAXPageIterator([_TopicPB(self.TOPIC_PATH)], TOKEN) + gax_api = _GAXPublisherAPI(_list_topics_response=response) + api = self._makeOne(gax_api) + + topics, next_token = api.list_topics(self.PROJECT) + + self.assertEqual(len(topics), 1) + topic = topics[0] + self.assertIsInstance(topic, dict) + self.assertEqual(topic['name'], self.TOPIC_PATH) + self.assertEqual(next_token, TOKEN) + + name, page_size, options = gax_api._list_topics_called_with + self.assertEqual(name, self.PROJECT_PATH) + self.assertEqual(page_size, 0) + self.assertIs(options.page_token, INITIAL_PAGE) + + def test_list_topics_with_paging(self): + from google.cloud._testing import _GAXPageIterator + SIZE = 23 + TOKEN = 'TOKEN' + NEW_TOKEN = 'NEW_TOKEN' + response = _GAXPageIterator( + [_TopicPB(self.TOPIC_PATH)], NEW_TOKEN) + gax_api = _GAXPublisherAPI(_list_topics_response=response) + api = self._makeOne(gax_api) + + topics, next_token = api.list_topics( + self.PROJECT, page_size=SIZE, page_token=TOKEN) + + self.assertEqual(len(topics), 1) + topic = topics[0] + self.assertIsInstance(topic, dict) + self.assertEqual(topic['name'], self.TOPIC_PATH) + self.assertEqual(next_token, NEW_TOKEN) + + name, page_size, options = gax_api._list_topics_called_with + self.assertEqual(name, self.PROJECT_PATH) + self.assertEqual(page_size, SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_topic_create(self): + topic_pb = _TopicPB(self.TOPIC_PATH) + gax_api = _GAXPublisherAPI(_create_topic_response=topic_pb) + api = self._makeOne(gax_api) + + resource = api.topic_create(self.TOPIC_PATH) + + self.assertEqual(resource, {'name': self.TOPIC_PATH}) + topic_path, options = gax_api._create_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_topic_create_already_exists(self): + from google.cloud.exceptions import Conflict + gax_api = _GAXPublisherAPI(_create_topic_conflict=True) + api = self._makeOne(gax_api) + + with self.assertRaises(Conflict): + api.topic_create(self.TOPIC_PATH) + + topic_path, options = gax_api._create_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_topic_create_error(self): + from google.gax.errors import GaxError + gax_api = _GAXPublisherAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.topic_create(self.TOPIC_PATH) + + topic_path, options = gax_api._create_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_topic_get_hit(self): + topic_pb = _TopicPB(self.TOPIC_PATH) + gax_api = _GAXPublisherAPI(_get_topic_response=topic_pb) + api = self._makeOne(gax_api) + + resource = api.topic_get(self.TOPIC_PATH) + + self.assertEqual(resource, {'name': self.TOPIC_PATH}) + topic_path, options = gax_api._get_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_topic_get_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXPublisherAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.topic_get(self.TOPIC_PATH) + + topic_path, options = gax_api._get_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_topic_get_error(self): + from google.gax.errors import GaxError + gax_api = _GAXPublisherAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.topic_get(self.TOPIC_PATH) + + topic_path, options = gax_api._get_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_topic_delete_hit(self): + gax_api = _GAXPublisherAPI(_delete_topic_ok=True) + api = self._makeOne(gax_api) + + api.topic_delete(self.TOPIC_PATH) + + topic_path, options = gax_api._delete_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_topic_delete_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXPublisherAPI(_delete_topic_ok=False) + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.topic_delete(self.TOPIC_PATH) + + topic_path, options = gax_api._delete_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_topic_delete_error(self): + from google.gax.errors import GaxError + gax_api = _GAXPublisherAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.topic_delete(self.TOPIC_PATH) + + topic_path, options = gax_api._delete_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_topic_publish_hit(self): + import base64 + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MSGID = 'DEADBEEF' + MESSAGE = {'data': B64, 'attributes': {}} + response = _PublishResponsePB([MSGID]) + gax_api = _GAXPublisherAPI(_publish_response=response) + api = self._makeOne(gax_api) + + resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) + + self.assertEqual(resource, [MSGID]) + topic_path, message_pbs, options = gax_api._publish_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + message_pb, = message_pbs + self.assertEqual(message_pb.data.decode('ascii'), B64) + self.assertEqual(message_pb.attributes, {}) + self.assertEqual(options.is_bundling, False) + + def test_topic_publish_miss_w_attrs_w_bytes_payload(self): + import base64 + from google.cloud.exceptions import NotFound + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD) + MESSAGE = {'data': B64, 'attributes': {'foo': 'bar'}} + gax_api = _GAXPublisherAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.topic_publish(self.TOPIC_PATH, [MESSAGE]) + + topic_path, message_pbs, options = gax_api._publish_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + message_pb, = message_pbs + self.assertEqual(message_pb.data, B64) + self.assertEqual(message_pb.attributes, {'foo': 'bar'}) + self.assertEqual(options.is_bundling, False) + + def test_topic_publish_error(self): + import base64 + from google.gax.errors import GaxError + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MESSAGE = {'data': B64, 'attributes': {}} + gax_api = _GAXPublisherAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.topic_publish(self.TOPIC_PATH, [MESSAGE]) + + topic_path, message_pbs, options = gax_api._publish_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + message_pb, = message_pbs + self.assertEqual(message_pb.data.decode('ascii'), B64) + self.assertEqual(message_pb.attributes, {}) + self.assertEqual(options.is_bundling, False) + + def test_topic_list_subscriptions_no_paging(self): + from google.gax import INITIAL_PAGE + from google.cloud._testing import _GAXPageIterator + response = _GAXPageIterator([ + {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}], None) + gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) + api = self._makeOne(gax_api) + + subscriptions, next_token = api.topic_list_subscriptions( + self.TOPIC_PATH) + + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, dict) + self.assertEqual(subscription['name'], self.SUB_PATH) + self.assertEqual(subscription['topic'], self.TOPIC_PATH) + self.assertIsNone(next_token) + + topic_path, page_size, options = ( + gax_api._list_topic_subscriptions_called_with) + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertEqual(page_size, 0) + self.assertIs(options.page_token, INITIAL_PAGE) + + def test_topic_list_subscriptions_with_paging(self): + from google.cloud._testing import _GAXPageIterator + SIZE = 23 + TOKEN = 'TOKEN' + NEW_TOKEN = 'NEW_TOKEN' + response = _GAXPageIterator([ + {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}], NEW_TOKEN) + gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) + api = self._makeOne(gax_api) + + subscriptions, next_token = api.topic_list_subscriptions( + self.TOPIC_PATH, page_size=SIZE, page_token=TOKEN) + + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, dict) + self.assertEqual(subscription['name'], self.SUB_PATH) + self.assertEqual(subscription['topic'], self.TOPIC_PATH) + self.assertEqual(next_token, NEW_TOKEN) + + name, page_size, options = ( + gax_api._list_topic_subscriptions_called_with) + self.assertEqual(name, self.TOPIC_PATH) + self.assertEqual(page_size, SIZE) + self.assertEqual(options.page_token, TOKEN) + + def test_topic_list_subscriptions_miss(self): + from google.gax import INITIAL_PAGE + from google.cloud.exceptions import NotFound + gax_api = _GAXPublisherAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.topic_list_subscriptions(self.TOPIC_PATH) + + topic_path, page_size, options = ( + gax_api._list_topic_subscriptions_called_with) + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertEqual(page_size, 0) + self.assertIs(options.page_token, INITIAL_PAGE) + + def test_topic_list_subscriptions_error(self): + from google.gax import INITIAL_PAGE + from google.gax.errors import GaxError + gax_api = _GAXPublisherAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.topic_list_subscriptions(self.TOPIC_PATH) + + topic_path, page_size, options = ( + gax_api._list_topic_subscriptions_called_with) + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertEqual(page_size, 0) + self.assertIs(options.page_token, INITIAL_PAGE) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_SubscriberAPI(_Base, unittest.TestCase): + + PUSH_ENDPOINT = 'https://api.example.com/push' + + def _getTargetClass(self): + from google.cloud.pubsub._gax import _SubscriberAPI + return _SubscriberAPI + + def test_ctor(self): + gax_api = _GAXSubscriberAPI() + api = self._makeOne(gax_api) + self.assertIs(api._gax_api, gax_api) + + def test_list_subscriptions_no_paging(self): + from google.gax import INITIAL_PAGE + from google.cloud._testing import _GAXPageIterator + response = _GAXPageIterator([_SubscriptionPB( + self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0)], None) + gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) + api = self._makeOne(gax_api) + + subscriptions, next_token = api.list_subscriptions(self.PROJECT) + + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, dict) + self.assertEqual(subscription['name'], self.SUB_PATH) + self.assertEqual(subscription['topic'], self.TOPIC_PATH) + self.assertEqual(subscription['pushConfig'], + {'pushEndpoint': self.PUSH_ENDPOINT}) + self.assertEqual(subscription['ackDeadlineSeconds'], 0) + self.assertIsNone(next_token) + + name, page_size, options = gax_api._list_subscriptions_called_with + self.assertEqual(name, self.PROJECT_PATH) + self.assertEqual(page_size, 0) + self.assertIs(options.page_token, INITIAL_PAGE) + + def test_list_subscriptions_with_paging(self): + from google.cloud._testing import _GAXPageIterator + SIZE = 23 + TOKEN = 'TOKEN' + NEW_TOKEN = 'NEW_TOKEN' + response = _GAXPageIterator([_SubscriptionPB( + self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0)], NEW_TOKEN) + gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) + api = self._makeOne(gax_api) + + subscriptions, next_token = api.list_subscriptions( + self.PROJECT, page_size=SIZE, page_token=TOKEN) + + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, dict) + self.assertEqual(subscription['name'], self.SUB_PATH) + self.assertEqual(subscription['topic'], self.TOPIC_PATH) + self.assertEqual(subscription['pushConfig'], + {'pushEndpoint': self.PUSH_ENDPOINT}) + self.assertEqual(subscription['ackDeadlineSeconds'], 0) + self.assertEqual(next_token, NEW_TOKEN) + + name, page_size, options = gax_api._list_subscriptions_called_with + self.assertEqual(name, self.PROJECT_PATH) + self.assertEqual(page_size, 23) + self.assertEqual(options.page_token, TOKEN) + + def test_subscription_create(self): + sub_pb = _SubscriptionPB(self.SUB_PATH, self.TOPIC_PATH, '', 0) + gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) + api = self._makeOne(gax_api) + + resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) + + expected = { + 'name': self.SUB_PATH, + 'topic': self.TOPIC_PATH, + 'ackDeadlineSeconds': 0, + } + self.assertEqual(resource, expected) + name, topic, push_config, ack_deadline, options = ( + gax_api._create_subscription_called_with) + self.assertEqual(name, self.SUB_PATH) + self.assertEqual(topic, self.TOPIC_PATH) + self.assertIsNone(push_config) + self.assertEqual(ack_deadline, 0) + self.assertIsNone(options) + + def test_subscription_create_already_exists(self): + from google.cloud.exceptions import Conflict + DEADLINE = 600 + gax_api = _GAXSubscriberAPI(_create_subscription_conflict=True) + api = self._makeOne(gax_api) + + with self.assertRaises(Conflict): + api.subscription_create( + self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) + + name, topic, push_config, ack_deadline, options = ( + gax_api._create_subscription_called_with) + self.assertEqual(name, self.SUB_PATH) + self.assertEqual(topic, self.TOPIC_PATH) + self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) + self.assertEqual(ack_deadline, DEADLINE) + self.assertIsNone(options) + + def test_subscription_create_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) + + name, topic, push_config, ack_deadline, options = ( + gax_api._create_subscription_called_with) + self.assertEqual(name, self.SUB_PATH) + self.assertEqual(topic, self.TOPIC_PATH) + self.assertIsNone(push_config) + self.assertEqual(ack_deadline, 0) + self.assertIsNone(options) + + def test_subscription_get_hit(self): + sub_pb = _SubscriptionPB( + self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0) + gax_api = _GAXSubscriberAPI(_get_subscription_response=sub_pb) + api = self._makeOne(gax_api) + + resource = api.subscription_get(self.SUB_PATH) + + expected = { + 'name': self.SUB_PATH, + 'topic': self.TOPIC_PATH, + 'ackDeadlineSeconds': 0, + 'pushConfig': { + 'pushEndpoint': self.PUSH_ENDPOINT, + }, + } + self.assertEqual(resource, expected) + sub_path, options = gax_api._get_subscription_called_with + self.assertEqual(sub_path, self.SUB_PATH) + self.assertIsNone(options) + + def test_subscription_get_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXSubscriberAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.subscription_get(self.SUB_PATH) + + sub_path, options = gax_api._get_subscription_called_with + self.assertEqual(sub_path, self.SUB_PATH) + self.assertIsNone(options) + + def test_subscription_get_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.subscription_get(self.SUB_PATH) + + sub_path, options = gax_api._get_subscription_called_with + self.assertEqual(sub_path, self.SUB_PATH) + self.assertIsNone(options) + + def test_subscription_delete_hit(self): + gax_api = _GAXSubscriberAPI(_delete_subscription_ok=True) + api = self._makeOne(gax_api) + + api.subscription_delete(self.TOPIC_PATH) + + sub_path, options = gax_api._delete_subscription_called_with + self.assertEqual(sub_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_subscription_delete_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXSubscriberAPI(_delete_subscription_ok=False) + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.subscription_delete(self.TOPIC_PATH) + + sub_path, options = gax_api._delete_subscription_called_with + self.assertEqual(sub_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_subscription_delete_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.subscription_delete(self.TOPIC_PATH) + + sub_path, options = gax_api._delete_subscription_called_with + self.assertEqual(sub_path, self.TOPIC_PATH) + self.assertIsNone(options) + + def test_subscription_modify_push_config_hit(self): + gax_api = _GAXSubscriberAPI(_modify_push_config_ok=True) + api = self._makeOne(gax_api) + + api.subscription_modify_push_config(self.SUB_PATH, self.PUSH_ENDPOINT) + + sub_path, config, options = gax_api._modify_push_config_called_with + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) + self.assertIsNone(options) + + def test_subscription_modify_push_config_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXSubscriberAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.subscription_modify_push_config( + self.SUB_PATH, self.PUSH_ENDPOINT) + + sub_path, config, options = gax_api._modify_push_config_called_with + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) + self.assertIsNone(options) + + def test_subscription_modify_push_config_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.subscription_modify_push_config( + self.SUB_PATH, self.PUSH_ENDPOINT) + + sub_path, config, options = gax_api._modify_push_config_called_with + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) + self.assertIsNone(options) + + def test_subscription_pull_explicit(self): + import base64 + import datetime + from google.cloud._helpers import UTC + from google.cloud._helpers import _datetime_to_pb_timestamp + from google.cloud._helpers import _datetime_to_rfc3339 + NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) + NOW_PB = _datetime_to_pb_timestamp(NOW) + NOW_RFC3339 = _datetime_to_rfc3339(NOW) + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + ACK_ID = 'DEADBEEF' + MSG_ID = 'BEADCAFE' + MESSAGE = { + 'messageId': MSG_ID, + 'data': B64, + 'attributes': {'a': 'b'}, + 'publishTime': NOW_RFC3339, + } + RECEIVED = [{'ackId': ACK_ID, 'message': MESSAGE}] + message_pb = _PubsubMessagePB(MSG_ID, B64, {'a': 'b'}, NOW_PB) + response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)]) + gax_api = _GAXSubscriberAPI(_pull_response=response_pb) + api = self._makeOne(gax_api) + MAX_MESSAGES = 10 + + received = api.subscription_pull( + self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) + + self.assertEqual(received, RECEIVED) + sub_path, max_messages, return_immediately, options = ( + gax_api._pull_called_with) + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(max_messages, MAX_MESSAGES) + self.assertTrue(return_immediately) + self.assertIsNone(options) + + def test_subscription_pull_defaults_miss(self): + from google.cloud.exceptions import NotFound + gax_api = _GAXSubscriberAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.subscription_pull(self.SUB_PATH) + + sub_path, max_messages, return_immediately, options = ( + gax_api._pull_called_with) + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(max_messages, 1) + self.assertFalse(return_immediately) + self.assertIsNone(options) + + def test_subscription_pull_defaults_error(self): + from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.subscription_pull(self.SUB_PATH) + + sub_path, max_messages, return_immediately, options = ( + gax_api._pull_called_with) + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(max_messages, 1) + self.assertFalse(return_immediately) + self.assertIsNone(options) + + def test_subscription_acknowledge_hit(self): + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + gax_api = _GAXSubscriberAPI(_acknowledge_ok=True) + api = self._makeOne(gax_api) + + api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) + + sub_path, ack_ids, options = gax_api._acknowledge_called_with + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) + self.assertIsNone(options) + + def test_subscription_acknowledge_miss(self): + from google.cloud.exceptions import NotFound + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + gax_api = _GAXSubscriberAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) + + sub_path, ack_ids, options = gax_api._acknowledge_called_with + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) + self.assertIsNone(options) + + def test_subscription_acknowledge_error(self): + from google.gax.errors import GaxError + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) + + sub_path, ack_ids, options = gax_api._acknowledge_called_with + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) + self.assertIsNone(options) + + def test_subscription_modify_ack_deadline_hit(self): + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + NEW_DEADLINE = 90 + gax_api = _GAXSubscriberAPI(_modify_ack_deadline_ok=True) + api = self._makeOne(gax_api) + + api.subscription_modify_ack_deadline( + self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) + + sub_path, ack_ids, deadline, options = ( + gax_api._modify_ack_deadline_called_with) + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) + self.assertEqual(deadline, NEW_DEADLINE) + self.assertIsNone(options) + + def test_subscription_modify_ack_deadline_miss(self): + from google.cloud.exceptions import NotFound + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + NEW_DEADLINE = 90 + gax_api = _GAXSubscriberAPI() + api = self._makeOne(gax_api) + + with self.assertRaises(NotFound): + api.subscription_modify_ack_deadline( + self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) + + sub_path, ack_ids, deadline, options = ( + gax_api._modify_ack_deadline_called_with) + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) + self.assertEqual(deadline, NEW_DEADLINE) + self.assertIsNone(options) + + def test_subscription_modify_ack_deadline_error(self): + from google.gax.errors import GaxError + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + NEW_DEADLINE = 90 + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + api = self._makeOne(gax_api) + + with self.assertRaises(GaxError): + api.subscription_modify_ack_deadline( + self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) + + sub_path, ack_ids, deadline, options = ( + gax_api._modify_ack_deadline_called_with) + self.assertEqual(sub_path, self.SUB_PATH) + self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) + self.assertEqual(deadline, NEW_DEADLINE) + self.assertIsNone(options) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_make_gax_publisher_api(_Base, unittest.TestCase): + + def _callFUT(self, connection): + from google.cloud.pubsub._gax import make_gax_publisher_api + return make_gax_publisher_api(connection) + + def test_live_api(self): + from google.cloud._testing import _Monkey + from google.cloud.pubsub import _gax as MUT + + channels = [] + mock_result = object() + + def mock_publisher_api(channel): + channels.append(channel) + return mock_result + + connection = _Connection(in_emulator=False) + with _Monkey(MUT, PublisherApi=mock_publisher_api): + result = self._callFUT(connection) + + self.assertIs(result, mock_result) + self.assertEqual(channels, [None]) + + def test_emulator(self): + from google.cloud._testing import _Monkey + from google.cloud.pubsub import _gax as MUT + + channels = [] + mock_result = object() + insecure_args = [] + mock_channel = object() + + def mock_publisher_api(channel): + channels.append(channel) + return mock_result + + def mock_insecure_channel(host): + insecure_args.append(host) + return mock_channel + + host = 'CURR_HOST:1234' + connection = _Connection(in_emulator=True, host=host) + with _Monkey(MUT, PublisherApi=mock_publisher_api, + insecure_channel=mock_insecure_channel): + result = self._callFUT(connection) + + self.assertIs(result, mock_result) + self.assertEqual(channels, [mock_channel]) + self.assertEqual(insecure_args, [host]) + + +@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +class Test_make_gax_subscriber_api(_Base, unittest.TestCase): + + def _callFUT(self, connection): + from google.cloud.pubsub._gax import make_gax_subscriber_api + return make_gax_subscriber_api(connection) + + def test_live_api(self): + from google.cloud._testing import _Monkey + from google.cloud.pubsub import _gax as MUT + + channels = [] + mock_result = object() + + def mock_subscriber_api(channel): + channels.append(channel) + return mock_result + + connection = _Connection(in_emulator=False) + with _Monkey(MUT, SubscriberApi=mock_subscriber_api): + result = self._callFUT(connection) + + self.assertIs(result, mock_result) + self.assertEqual(channels, [None]) + + def test_emulator(self): + from google.cloud._testing import _Monkey + from google.cloud.pubsub import _gax as MUT + + channels = [] + mock_result = object() + insecure_args = [] + mock_channel = object() + + def mock_subscriber_api(channel): + channels.append(channel) + return mock_result + + def mock_insecure_channel(host): + insecure_args.append(host) + return mock_channel + + host = 'CURR_HOST:1234' + connection = _Connection(in_emulator=True, host=host) + with _Monkey(MUT, SubscriberApi=mock_subscriber_api, + insecure_channel=mock_insecure_channel): + result = self._callFUT(connection) + + self.assertIs(result, mock_result) + self.assertEqual(channels, [mock_channel]) + self.assertEqual(insecure_args, [host]) + + +class _GAXPublisherAPI(_GAXBaseAPI): + + _create_topic_conflict = False + + def list_topics(self, name, page_size, options): + self._list_topics_called_with = name, page_size, options + return self._list_topics_response + + def create_topic(self, name, options=None): + from google.gax.errors import GaxError + self._create_topic_called_with = name, options + if self._random_gax_error: + raise GaxError('error') + if self._create_topic_conflict: + raise GaxError('conflict', self._make_grpc_failed_precondition()) + return self._create_topic_response + + def get_topic(self, name, options=None): + from google.gax.errors import GaxError + self._get_topic_called_with = name, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._get_topic_response + except AttributeError: + raise GaxError('miss', self._make_grpc_not_found()) + + def delete_topic(self, name, options=None): + from google.gax.errors import GaxError + self._delete_topic_called_with = name, options + if self._random_gax_error: + raise GaxError('error') + if not self._delete_topic_ok: + raise GaxError('miss', self._make_grpc_not_found()) + + def publish(self, topic, messages, options=None): + from google.gax.errors import GaxError + self._publish_called_with = topic, messages, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._publish_response + except AttributeError: + raise GaxError('miss', self._make_grpc_not_found()) + + def list_topic_subscriptions(self, topic, page_size, options=None): + from google.gax.errors import GaxError + self._list_topic_subscriptions_called_with = topic, page_size, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._list_topic_subscriptions_response + except AttributeError: + raise GaxError('miss', self._make_grpc_not_found()) + + +class _GAXSubscriberAPI(_GAXBaseAPI): + + _create_subscription_conflict = False + _modify_push_config_ok = False + _acknowledge_ok = False + _modify_ack_deadline_ok = False + + def list_subscriptions(self, project, page_size, options=None): + self._list_subscriptions_called_with = (project, page_size, options) + return self._list_subscriptions_response + + def create_subscription(self, name, topic, + push_config, ack_deadline_seconds, + options=None): + from google.gax.errors import GaxError + self._create_subscription_called_with = ( + name, topic, push_config, ack_deadline_seconds, options) + if self._random_gax_error: + raise GaxError('error') + if self._create_subscription_conflict: + raise GaxError('conflict', self._make_grpc_failed_precondition()) + return self._create_subscription_response + + def get_subscription(self, name, options=None): + from google.gax.errors import GaxError + self._get_subscription_called_with = name, options + if self._random_gax_error: + raise GaxError('error') + try: + return self._get_subscription_response + except AttributeError: + raise GaxError('miss', self._make_grpc_not_found()) + + def delete_subscription(self, name, options=None): + from google.gax.errors import GaxError + self._delete_subscription_called_with = name, options + if self._random_gax_error: + raise GaxError('error') + if not self._delete_subscription_ok: + raise GaxError('miss', self._make_grpc_not_found()) + + def modify_push_config(self, name, push_config, options=None): + from google.gax.errors import GaxError + self._modify_push_config_called_with = name, push_config, options + if self._random_gax_error: + raise GaxError('error') + if not self._modify_push_config_ok: + raise GaxError('miss', self._make_grpc_not_found()) + + def pull(self, name, max_messages, return_immediately, options=None): + from google.gax.errors import GaxError + self._pull_called_with = ( + name, max_messages, return_immediately, options) + if self._random_gax_error: + raise GaxError('error') + try: + return self._pull_response + except AttributeError: + raise GaxError('miss', self._make_grpc_not_found()) + + def acknowledge(self, name, ack_ids, options=None): + from google.gax.errors import GaxError + self._acknowledge_called_with = name, ack_ids, options + if self._random_gax_error: + raise GaxError('error') + if not self._acknowledge_ok: + raise GaxError('miss', self._make_grpc_not_found()) + + def modify_ack_deadline(self, name, ack_ids, deadline, options=None): + from google.gax.errors import GaxError + self._modify_ack_deadline_called_with = ( + name, ack_ids, deadline, options) + if self._random_gax_error: + raise GaxError('error') + if not self._modify_ack_deadline_ok: + raise GaxError('miss', self._make_grpc_not_found()) + + +class _TopicPB(object): + + def __init__(self, name): + self.name = name + + +class _PublishResponsePB(object): + + def __init__(self, message_ids): + self.message_ids = message_ids + + +class _PushConfigPB(object): + + def __init__(self, push_endpoint): + self.push_endpoint = push_endpoint + + +class _PubsubMessagePB(object): + + def __init__(self, message_id, data, attributes, publish_time): + self.message_id = message_id + self.data = data + self.attributes = attributes + self.publish_time = publish_time + + +class _ReceivedMessagePB(object): + + def __init__(self, ack_id, message): + self.ack_id = ack_id + self.message = message + + +class _PullResponsePB(object): + + def __init__(self, received_messages): + self.received_messages = received_messages + + +class _SubscriptionPB(object): + + def __init__(self, name, topic, push_endpoint, ack_deadline_seconds): + self.name = name + self.topic = topic + self.push_config = _PushConfigPB(push_endpoint) + self.ack_deadline_seconds = ack_deadline_seconds + + +class _Connection(object): + + def __init__(self, in_emulator=False, host=None): + self.in_emulator = in_emulator + self.host = host diff --git a/packages/google-cloud-pubsub/unit_tests/test__helpers.py b/packages/google-cloud-pubsub/unit_tests/test__helpers.py new file mode 100644 index 000000000000..5ff47b7802ea --- /dev/null +++ b/packages/google-cloud-pubsub/unit_tests/test__helpers.py @@ -0,0 +1,57 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class Test_topic_name_from_path(unittest.TestCase): + + def _callFUT(self, path, project): + from google.cloud.pubsub._helpers import topic_name_from_path + return topic_name_from_path(path, project) + + def test_w_simple_name(self): + TOPIC_NAME = 'TOPIC_NAME' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) + topic_name = self._callFUT(PATH, PROJECT) + self.assertEqual(topic_name, TOPIC_NAME) + + def test_w_name_w_all_extras(self): + TOPIC_NAME = 'TOPIC_NAME-part.one~part.two%part-three' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) + topic_name = self._callFUT(PATH, PROJECT) + self.assertEqual(topic_name, TOPIC_NAME) + + +class Test_subscription_name_from_path(unittest.TestCase): + + def _callFUT(self, path, project): + from google.cloud.pubsub._helpers import subscription_name_from_path + return subscription_name_from_path(path, project) + + def test_w_simple_name(self): + SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) + subscription_name = self._callFUT(PATH, PROJECT) + self.assertEqual(subscription_name, SUBSCRIPTION_NAME) + + def test_w_name_w_all_extras(self): + SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME-part.one~part.two%part-three' + PROJECT = 'my-project-1234' + PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) + topic_name = self._callFUT(PATH, PROJECT) + self.assertEqual(topic_name, SUBSCRIPTION_NAME) diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py new file mode 100644 index 000000000000..b1929be76df3 --- /dev/null +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -0,0 +1,303 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestClient(unittest.TestCase): + PROJECT = 'PROJECT' + TOPIC_NAME = 'topic_name' + TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) + SUB_NAME = 'subscription_name' + SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) + + def _getTargetClass(self): + from google.cloud.pubsub.client import Client + return Client + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_publisher_api_wo_gax(self): + from google.cloud.pubsub.connection import _PublisherAPI + from google.cloud.pubsub import client as MUT + from google.cloud._testing import _Monkey + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + conn = client.connection = object() + + with _Monkey(MUT, _USE_GAX=False): + api = client.publisher_api + + self.assertIsInstance(api, _PublisherAPI) + self.assertIs(api._connection, conn) + # API instance is cached + again = client.publisher_api + self.assertIs(again, api) + + def test_publisher_api_w_gax(self): + from google.cloud.pubsub import client as MUT + from google.cloud._testing import _Monkey + + wrapped = object() + _called_with = [] + + def _generated_api(*args, **kw): + _called_with.append((args, kw)) + return wrapped + + class _GaxPublisherAPI(object): + + def __init__(self, _wrapped): + self._wrapped = _wrapped + + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + + with _Monkey(MUT, + _USE_GAX=True, + make_gax_publisher_api=_generated_api, + GAXPublisherAPI=_GaxPublisherAPI): + api = client.publisher_api + + self.assertIsInstance(api, _GaxPublisherAPI) + self.assertIs(api._wrapped, wrapped) + # API instance is cached + again = client.publisher_api + self.assertIs(again, api) + args = (client.connection,) + self.assertEqual(_called_with, [(args, {})]) + + def test_subscriber_api_wo_gax(self): + from google.cloud.pubsub.connection import _SubscriberAPI + from google.cloud.pubsub import client as MUT + from google.cloud._testing import _Monkey + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + conn = client.connection = object() + + with _Monkey(MUT, _USE_GAX=False): + api = client.subscriber_api + + self.assertIsInstance(api, _SubscriberAPI) + self.assertIs(api._connection, conn) + # API instance is cached + again = client.subscriber_api + self.assertIs(again, api) + + def test_subscriber_api_w_gax(self): + from google.cloud.pubsub import client as MUT + from google.cloud._testing import _Monkey + + wrapped = object() + _called_with = [] + + def _generated_api(*args, **kw): + _called_with.append((args, kw)) + return wrapped + + class _GaxSubscriberAPI(object): + + def __init__(self, _wrapped): + self._wrapped = _wrapped + + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + + with _Monkey(MUT, + _USE_GAX=True, + make_gax_subscriber_api=_generated_api, + GAXSubscriberAPI=_GaxSubscriberAPI): + api = client.subscriber_api + + self.assertIsInstance(api, _GaxSubscriberAPI) + self.assertIs(api._wrapped, wrapped) + # API instance is cached + again = client.subscriber_api + self.assertIs(again, api) + args = (client.connection,) + self.assertEqual(_called_with, [(args, {})]) + + def test_iam_policy_api(self): + from google.cloud.pubsub.connection import _IAMPolicyAPI + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + conn = client.connection = object() + api = client.iam_policy_api + self.assertIsInstance(api, _IAMPolicyAPI) + self.assertIs(api._connection, conn) + # API instance is cached + again = client.iam_policy_api + self.assertIs(again, api) + + def test_list_topics_no_paging(self): + from google.cloud.pubsub.topic import Topic + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + client.connection = object() + api = client._publisher_api = _FauxPublisherAPI() + api._list_topics_response = [{'name': self.TOPIC_PATH}], None + + topics, next_page_token = client.list_topics() + + self.assertEqual(len(topics), 1) + self.assertIsInstance(topics[0], Topic) + self.assertEqual(topics[0].name, self.TOPIC_NAME) + self.assertIsNone(next_page_token) + + self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) + + def test_list_topics_with_paging(self): + from google.cloud.pubsub.topic import Topic + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + client.connection = object() + api = client._publisher_api = _FauxPublisherAPI() + api._list_topics_response = [{'name': self.TOPIC_PATH}], TOKEN2 + + topics, next_page_token = client.list_topics(SIZE, TOKEN1) + + self.assertEqual(len(topics), 1) + self.assertIsInstance(topics[0], Topic) + self.assertEqual(topics[0].name, self.TOPIC_NAME) + self.assertEqual(next_page_token, TOKEN2) + + self.assertEqual(api._listed_topics, (self.PROJECT, 1, TOKEN1)) + + def test_list_topics_missing_key(self): + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + client.connection = object() + api = client._publisher_api = _FauxPublisherAPI() + api._list_topics_response = (), None + + topics, next_page_token = client.list_topics() + + self.assertEqual(len(topics), 0) + self.assertIsNone(next_page_token) + + self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) + + def test_list_subscriptions_no_paging(self): + from google.cloud.pubsub.subscription import Subscription + SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + client.connection = object() + api = client._subscriber_api = _FauxSubscriberAPI() + api._list_subscriptions_response = [SUB_INFO], None + + subscriptions, next_page_token = client.list_subscriptions() + + self.assertEqual(len(subscriptions), 1) + self.assertIsInstance(subscriptions[0], Subscription) + self.assertEqual(subscriptions[0].name, self.SUB_NAME) + self.assertEqual(subscriptions[0].topic.name, self.TOPIC_NAME) + self.assertIsNone(next_page_token) + + self.assertEqual(api._listed_subscriptions, + (self.PROJECT, None, None)) + + def test_list_subscriptions_with_paging(self): + from google.cloud.pubsub.subscription import Subscription + SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} + creds = _Credentials() + client = self._makeOne(project=self.PROJECT, credentials=creds) + ACK_DEADLINE = 42 + PUSH_ENDPOINT = 'https://push.example.com/endpoint' + SUB_INFO = {'name': self.SUB_PATH, + 'topic': self.TOPIC_PATH, + 'ackDeadlineSeconds': ACK_DEADLINE, + 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}} + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + client.connection = object() + api = client._subscriber_api = _FauxSubscriberAPI() + api._list_subscriptions_response = [SUB_INFO], TOKEN2 + + subscriptions, next_page_token = client.list_subscriptions( + SIZE, TOKEN1) + + self.assertEqual(len(subscriptions), 1) + self.assertIsInstance(subscriptions[0], Subscription) + self.assertEqual(subscriptions[0].name, self.SUB_NAME) + self.assertEqual(subscriptions[0].topic.name, self.TOPIC_NAME) + self.assertEqual(subscriptions[0].ack_deadline, ACK_DEADLINE) + self.assertEqual(subscriptions[0].push_endpoint, PUSH_ENDPOINT) + self.assertEqual(next_page_token, TOKEN2) + + self.assertEqual(api._listed_subscriptions, + (self.PROJECT, SIZE, TOKEN1)) + + def test_list_subscriptions_w_missing_key(self): + PROJECT = 'PROJECT' + creds = _Credentials() + + client = self._makeOne(project=PROJECT, credentials=creds) + client.connection = object() + api = client._subscriber_api = _FauxSubscriberAPI() + api._list_subscriptions_response = (), None + + subscriptions, next_page_token = client.list_subscriptions() + + self.assertEqual(len(subscriptions), 0) + self.assertIsNone(next_page_token) + + self.assertEqual(api._listed_subscriptions, + (self.PROJECT, None, None)) + + def test_topic(self): + PROJECT = 'PROJECT' + TOPIC_NAME = 'TOPIC_NAME' + creds = _Credentials() + + client_obj = self._makeOne(project=PROJECT, credentials=creds) + new_topic = client_obj.topic(TOPIC_NAME) + self.assertEqual(new_topic.name, TOPIC_NAME) + self.assertIs(new_topic._client, client_obj) + self.assertEqual(new_topic.project, PROJECT) + self.assertEqual(new_topic.full_name, + 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME)) + self.assertFalse(new_topic.timestamp_messages) + + +class _Credentials(object): + + _scopes = None + + @staticmethod + def create_scoped_required(): + return True + + def create_scoped(self, scope): + self._scopes = scope + return self + + +class _FauxPublisherAPI(object): + + def list_topics(self, project, page_size, page_token): + self._listed_topics = (project, page_size, page_token) + return self._list_topics_response + + +class _FauxSubscriberAPI(object): + + def list_subscriptions(self, project, page_size, page_token): + self._listed_subscriptions = (project, page_size, page_token) + return self._list_subscriptions_response diff --git a/packages/google-cloud-pubsub/unit_tests/test_connection.py b/packages/google-cloud-pubsub/unit_tests/test_connection.py new file mode 100644 index 000000000000..9e14e1e0d528 --- /dev/null +++ b/packages/google-cloud-pubsub/unit_tests/test_connection.py @@ -0,0 +1,737 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class _Base(unittest.TestCase): + PROJECT = 'PROJECT' + LIST_TOPICS_PATH = 'projects/%s/topics' % (PROJECT,) + LIST_SUBSCRIPTIONS_PATH = 'projects/%s/subscriptions' % (PROJECT,) + TOPIC_NAME = 'topic_name' + TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) + LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) + SUB_NAME = 'subscription_name' + SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + +class TestConnection(_Base): + + def _getTargetClass(self): + from google.cloud.pubsub.connection import Connection + return Connection + + def test_default_url(self): + conn = self._makeOne() + klass = self._getTargetClass() + self.assertEqual(conn.api_base_url, klass.API_BASE_URL) + + def test_custom_url_from_env(self): + import os + from google.cloud._testing import _Monkey + from google.cloud.environment_vars import PUBSUB_EMULATOR + + HOST = 'localhost:8187' + fake_environ = {PUBSUB_EMULATOR: HOST} + + with _Monkey(os, getenv=fake_environ.get): + conn = self._makeOne() + + klass = self._getTargetClass() + self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) + self.assertEqual(conn.api_base_url, 'http://' + HOST) + + def test_build_api_url_no_extra_query_params(self): + conn = self._makeOne() + URI = '/'.join([ + conn.API_BASE_URL, + conn.API_VERSION, + 'foo', + ]) + self.assertEqual(conn.build_api_url('/foo'), URI) + + def test_build_api_url_w_extra_query_params(self): + from six.moves.urllib.parse import parse_qsl + from six.moves.urllib.parse import urlsplit + conn = self._makeOne() + uri = conn.build_api_url('/foo', {'bar': 'baz'}) + scheme, netloc, path, qs, _ = urlsplit(uri) + self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) + self.assertEqual(path, + '/'.join(['', conn.API_VERSION, 'foo'])) + parms = dict(parse_qsl(qs)) + self.assertEqual(parms['bar'], 'baz') + + def test_build_api_url_w_base_url_override(self): + base_url1 = 'api-base-url1' + base_url2 = 'api-base-url2' + conn = self._makeOne() + conn.api_base_url = base_url1 + URI = '/'.join([ + base_url2, + conn.API_VERSION, + 'foo', + ]) + self.assertEqual(conn.build_api_url('/foo', api_base_url=base_url2), + URI) + + +class Test_PublisherAPI(_Base): + + def _getTargetClass(self): + from google.cloud.pubsub.connection import _PublisherAPI + return _PublisherAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = _Connection() + api = self._makeOne(connection) + self.assertIs(api._connection, connection) + + def test_list_topics_no_paging(self): + RETURNED = {'topics': [{'name': self.TOPIC_PATH}]} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + topics, next_token = api.list_topics(self.PROJECT) + + self.assertEqual(len(topics), 1) + topic = topics[0] + self.assertIsInstance(topic, dict) + self.assertEqual(topic['name'], self.TOPIC_PATH) + self.assertIsNone(next_token) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_TOPICS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], {}) + + def test_list_topics_with_paging(self): + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + RETURNED = { + 'topics': [{'name': self.TOPIC_PATH}], + 'nextPageToken': 'TOKEN2', + } + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + topics, next_token = api.list_topics( + self.PROJECT, page_token=TOKEN1, page_size=SIZE) + + self.assertEqual(len(topics), 1) + topic = topics[0] + self.assertIsInstance(topic, dict) + self.assertEqual(topic['name'], self.TOPIC_PATH) + self.assertEqual(next_token, TOKEN2) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_TOPICS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], + {'pageToken': TOKEN1, 'pageSize': SIZE}) + + def test_list_topics_missing_key(self): + RETURNED = {} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + topics, next_token = api.list_topics(self.PROJECT) + + self.assertEqual(len(topics), 0) + self.assertIsNone(next_token) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_TOPICS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], {}) + + def test_topic_create(self): + RETURNED = {'name': self.TOPIC_PATH} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + resource = api.topic_create(self.TOPIC_PATH) + + self.assertEqual(resource, RETURNED) + self.assertEqual(connection._called_with['method'], 'PUT') + path = '/%s' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + + def test_topic_create_already_exists(self): + from google.cloud.exceptions import Conflict + connection = _Connection() + connection._no_response_error = Conflict + api = self._makeOne(connection) + + with self.assertRaises(Conflict): + api.topic_create(self.TOPIC_PATH) + + self.assertEqual(connection._called_with['method'], 'PUT') + path = '/%s' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + + def test_topic_get_hit(self): + RETURNED = {'name': self.TOPIC_PATH} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + resource = api.topic_get(self.TOPIC_PATH) + + self.assertEqual(resource, RETURNED) + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + + def test_topic_get_miss(self): + from google.cloud.exceptions import NotFound + connection = _Connection() + api = self._makeOne(connection) + + with self.assertRaises(NotFound): + api.topic_get(self.TOPIC_PATH) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + + def test_topic_delete_hit(self): + RETURNED = {} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + api.topic_delete(self.TOPIC_PATH) + + self.assertEqual(connection._called_with['method'], 'DELETE') + path = '/%s' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + + def test_topic_delete_miss(self): + from google.cloud.exceptions import NotFound + connection = _Connection() + api = self._makeOne(connection) + + with self.assertRaises(NotFound): + api.topic_delete(self.TOPIC_PATH) + + self.assertEqual(connection._called_with['method'], 'DELETE') + path = '/%s' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + + def test_topic_publish_hit(self): + import base64 + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MSGID = 'DEADBEEF' + MESSAGE = {'data': B64, 'attributes': {}} + RETURNED = {'messageIds': [MSGID]} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) + + self.assertEqual(resource, [MSGID]) + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:publish' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], + {'messages': [MESSAGE]}) + + def test_topic_publish_miss(self): + import base64 + from google.cloud.exceptions import NotFound + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MESSAGE = {'data': B64, 'attributes': {}} + connection = _Connection() + api = self._makeOne(connection) + + with self.assertRaises(NotFound): + api.topic_publish(self.TOPIC_PATH, [MESSAGE]) + + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:publish' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], + {'messages': [MESSAGE]}) + + def test_topic_list_subscriptions_no_paging(self): + SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} + RETURNED = {'subscriptions': [SUB_INFO]} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + subscriptions, next_token = api.topic_list_subscriptions( + self.TOPIC_PATH) + + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, dict) + self.assertEqual(subscription['name'], self.SUB_PATH) + self.assertEqual(subscription['topic'], self.TOPIC_PATH) + self.assertIsNone(next_token) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], {}) + + def test_topic_list_subscriptions_with_paging(self): + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} + RETURNED = { + 'subscriptions': [SUB_INFO], + 'nextPageToken': 'TOKEN2', + } + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + subscriptions, next_token = api.topic_list_subscriptions( + self.TOPIC_PATH, page_token=TOKEN1, page_size=SIZE) + + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, dict) + self.assertEqual(subscription['name'], self.SUB_PATH) + self.assertEqual(subscription['topic'], self.TOPIC_PATH) + self.assertEqual(next_token, TOKEN2) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], + {'pageToken': TOKEN1, 'pageSize': SIZE}) + + def test_topic_list_subscriptions_missing_key(self): + RETURNED = {} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + subscriptions, next_token = api.topic_list_subscriptions( + self.TOPIC_PATH) + + self.assertEqual(len(subscriptions), 0) + self.assertIsNone(next_token) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], {}) + + def test_topic_list_subscriptions_miss(self): + from google.cloud.exceptions import NotFound + connection = _Connection() + api = self._makeOne(connection) + + with self.assertRaises(NotFound): + api.topic_list_subscriptions(self.TOPIC_PATH) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], {}) + + +class Test_SubscriberAPI(_Base): + + def _getTargetClass(self): + from google.cloud.pubsub.connection import _SubscriberAPI + return _SubscriberAPI + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor(self): + connection = _Connection() + api = self._makeOne(connection) + self.assertIs(api._connection, connection) + + def test_list_subscriptions_no_paging(self): + SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} + RETURNED = {'subscriptions': [SUB_INFO]} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + subscriptions, next_token = api.list_subscriptions(self.PROJECT) + + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, dict) + self.assertEqual(subscription['name'], self.SUB_PATH) + self.assertEqual(subscription['topic'], self.TOPIC_PATH) + self.assertIsNone(next_token) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], {}) + + def test_list_subscriptions_with_paging(self): + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} + RETURNED = { + 'subscriptions': [SUB_INFO], + 'nextPageToken': 'TOKEN2', + } + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + subscriptions, next_token = api.list_subscriptions( + self.PROJECT, page_token=TOKEN1, page_size=SIZE) + + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, dict) + self.assertEqual(subscription['name'], self.SUB_PATH) + self.assertEqual(subscription['topic'], self.TOPIC_PATH) + self.assertEqual(next_token, TOKEN2) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], + {'pageToken': TOKEN1, 'pageSize': SIZE}) + + def test_list_subscriptions_missing_key(self): + RETURNED = {} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + subscriptions, next_token = api.list_subscriptions(self.PROJECT) + + self.assertEqual(len(subscriptions), 0) + self.assertIsNone(next_token) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], {}) + + def test_subscription_create_defaults(self): + RESOURCE = {'topic': self.TOPIC_PATH} + RETURNED = RESOURCE.copy() + RETURNED['name'] = self.SUB_PATH + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) + + self.assertEqual(resource, RETURNED) + self.assertEqual(connection._called_with['method'], 'PUT') + path = '/%s' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], RESOURCE) + + def test_subscription_create_explicit(self): + ACK_DEADLINE = 90 + PUSH_ENDPOINT = 'https://api.example.com/push' + RESOURCE = { + 'topic': self.TOPIC_PATH, + 'ackDeadlineSeconds': ACK_DEADLINE, + 'pushConfig': { + 'pushEndpoint': PUSH_ENDPOINT, + }, + } + RETURNED = RESOURCE.copy() + RETURNED['name'] = self.SUB_PATH + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + resource = api.subscription_create( + self.SUB_PATH, self.TOPIC_PATH, + ack_deadline=ACK_DEADLINE, push_endpoint=PUSH_ENDPOINT) + + self.assertEqual(resource, RETURNED) + self.assertEqual(connection._called_with['method'], 'PUT') + path = '/%s' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], RESOURCE) + + def test_subscription_get(self): + ACK_DEADLINE = 90 + PUSH_ENDPOINT = 'https://api.example.com/push' + RETURNED = { + 'topic': self.TOPIC_PATH, + 'name': self.SUB_PATH, + 'ackDeadlineSeconds': ACK_DEADLINE, + 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, + } + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + resource = api.subscription_get(self.SUB_PATH) + + self.assertEqual(resource, RETURNED) + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + + def test_subscription_delete(self): + RETURNED = {} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + api.subscription_delete(self.SUB_PATH) + + self.assertEqual(connection._called_with['method'], 'DELETE') + path = '/%s' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + + def test_subscription_modify_push_config(self): + PUSH_ENDPOINT = 'https://api.example.com/push' + BODY = { + 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, + } + RETURNED = {} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + api.subscription_modify_push_config(self.SUB_PATH, PUSH_ENDPOINT) + + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:modifyPushConfig' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], BODY) + + def test_subscription_pull_defaults(self): + import base64 + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + ACK_ID = 'DEADBEEF' + MSG_ID = 'BEADCAFE' + MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} + RETURNED = { + 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], + } + connection = _Connection(RETURNED) + api = self._makeOne(connection) + BODY = { + 'returnImmediately': False, + 'maxMessages': 1, + } + + received = api.subscription_pull(self.SUB_PATH) + + self.assertEqual(received, RETURNED['receivedMessages']) + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:pull' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], BODY) + + def test_subscription_pull_explicit(self): + import base64 + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + ACK_ID = 'DEADBEEF' + MSG_ID = 'BEADCAFE' + MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} + RETURNED = { + 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], + } + connection = _Connection(RETURNED) + api = self._makeOne(connection) + MAX_MESSAGES = 10 + BODY = { + 'returnImmediately': True, + 'maxMessages': MAX_MESSAGES, + } + + received = api.subscription_pull( + self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) + + self.assertEqual(received, RETURNED['receivedMessages']) + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:pull' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], BODY) + + def test_subscription_acknowledge(self): + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + BODY = { + 'ackIds': [ACK_ID1, ACK_ID2], + } + RETURNED = {} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) + + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:acknowledge' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], BODY) + + def test_subscription_modify_ack_deadline(self): + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + NEW_DEADLINE = 90 + BODY = { + 'ackIds': [ACK_ID1, ACK_ID2], + 'ackDeadlineSeconds': NEW_DEADLINE, + } + RETURNED = {} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + api.subscription_modify_ack_deadline( + self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) + + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:modifyAckDeadline' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], BODY) + + +class Test_IAMPolicyAPI(_Base): + + def _getTargetClass(self): + from google.cloud.pubsub.connection import _IAMPolicyAPI + return _IAMPolicyAPI + + def test_ctor(self): + connection = _Connection() + api = self._makeOne(connection) + self.assertIs(api._connection, connection) + + def test_get_iam_policy(self): + from google.cloud.pubsub.iam import OWNER_ROLE + from google.cloud.pubsub.iam import EDITOR_ROLE + from google.cloud.pubsub.iam import VIEWER_ROLE + + OWNER1 = 'user:phred@example.com' + OWNER2 = 'group:cloud-logs@google.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + RETURNED = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + ], + } + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + policy = api.get_iam_policy(self.TOPIC_PATH) + + self.assertEqual(policy, RETURNED) + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s:getIamPolicy' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + + def test_set_iam_policy(self): + from google.cloud.pubsub.iam import OWNER_ROLE + from google.cloud.pubsub.iam import EDITOR_ROLE + from google.cloud.pubsub.iam import VIEWER_ROLE + + OWNER1 = 'user:phred@example.com' + OWNER2 = 'group:cloud-logs@google.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + POLICY = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + ], + } + RETURNED = POLICY.copy() + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + policy = api.set_iam_policy(self.TOPIC_PATH, POLICY) + + self.assertEqual(policy, RETURNED) + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:setIamPolicy' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], + {'policy': POLICY}) + + def test_test_iam_permissions(self): + from google.cloud.pubsub.iam import OWNER_ROLE + from google.cloud.pubsub.iam import EDITOR_ROLE + from google.cloud.pubsub.iam import VIEWER_ROLE + + ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] + ALLOWED = ALL_ROLES[1:] + RETURNED = {'permissions': ALLOWED} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) + + self.assertEqual(allowed, ALLOWED) + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], + {'permissions': ALL_ROLES}) + + def test_test_iam_permissions_missing_key(self): + from google.cloud.pubsub.iam import OWNER_ROLE + from google.cloud.pubsub.iam import EDITOR_ROLE + from google.cloud.pubsub.iam import VIEWER_ROLE + + ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] + RETURNED = {} + connection = _Connection(RETURNED) + api = self._makeOne(connection) + + allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) + + self.assertEqual(allowed, []) + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], + {'permissions': ALL_ROLES}) + + +class _Connection(object): + + _called_with = None + _no_response_error = None + + def __init__(self, *responses): + self._responses = responses + + def api_request(self, **kw): + from google.cloud.exceptions import NotFound + self._called_with = kw + try: + response, self._responses = self._responses[0], self._responses[1:] + except IndexError: + err_class = self._no_response_error or NotFound + raise err_class('miss') + return response diff --git a/packages/google-cloud-pubsub/unit_tests/test_iam.py b/packages/google-cloud-pubsub/unit_tests/test_iam.py new file mode 100644 index 000000000000..0a31697b2990 --- /dev/null +++ b/packages/google-cloud-pubsub/unit_tests/test_iam.py @@ -0,0 +1,188 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestPolicy(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.pubsub.iam import Policy + return Policy + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + policy = self._makeOne() + self.assertIsNone(policy.etag) + self.assertIsNone(policy.version) + self.assertEqual(list(policy.owners), []) + self.assertEqual(list(policy.editors), []) + self.assertEqual(list(policy.viewers), []) + self.assertEqual(list(policy.publishers), []) + self.assertEqual(list(policy.subscribers), []) + + def test_ctor_explicit(self): + VERSION = 17 + ETAG = 'ETAG' + policy = self._makeOne(ETAG, VERSION) + self.assertEqual(policy.etag, ETAG) + self.assertEqual(policy.version, VERSION) + self.assertEqual(list(policy.owners), []) + self.assertEqual(list(policy.editors), []) + self.assertEqual(list(policy.viewers), []) + self.assertEqual(list(policy.publishers), []) + self.assertEqual(list(policy.subscribers), []) + + def test_user(self): + EMAIL = 'phred@example.com' + MEMBER = 'user:%s' % (EMAIL,) + policy = self._makeOne() + self.assertEqual(policy.user(EMAIL), MEMBER) + + def test_service_account(self): + EMAIL = 'phred@example.com' + MEMBER = 'serviceAccount:%s' % (EMAIL,) + policy = self._makeOne() + self.assertEqual(policy.service_account(EMAIL), MEMBER) + + def test_group(self): + EMAIL = 'phred@example.com' + MEMBER = 'group:%s' % (EMAIL,) + policy = self._makeOne() + self.assertEqual(policy.group(EMAIL), MEMBER) + + def test_domain(self): + DOMAIN = 'example.com' + MEMBER = 'domain:%s' % (DOMAIN,) + policy = self._makeOne() + self.assertEqual(policy.domain(DOMAIN), MEMBER) + + def test_all_users(self): + policy = self._makeOne() + self.assertEqual(policy.all_users(), 'allUsers') + + def test_authenticated_users(self): + policy = self._makeOne() + self.assertEqual(policy.authenticated_users(), 'allAuthenticatedUsers') + + def test_from_api_repr_only_etag(self): + RESOURCE = { + 'etag': 'ACAB', + } + klass = self._getTargetClass() + policy = klass.from_api_repr(RESOURCE) + self.assertEqual(policy.etag, 'ACAB') + self.assertIsNone(policy.version) + self.assertEqual(list(policy.owners), []) + self.assertEqual(list(policy.editors), []) + self.assertEqual(list(policy.viewers), []) + + def test_from_api_repr_complete(self): + from google.cloud.pubsub.iam import ( + OWNER_ROLE, + EDITOR_ROLE, + VIEWER_ROLE, + PUBSUB_PUBLISHER_ROLE, + PUBSUB_SUBSCRIBER_ROLE, + ) + OWNER1 = 'user:phred@example.com' + OWNER2 = 'group:cloud-logs@google.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + PUBLISHER = 'user:phred@example.com' + SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' + RESOURCE = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, + {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, + ], + } + klass = self._getTargetClass() + policy = klass.from_api_repr(RESOURCE) + self.assertEqual(policy.etag, 'DEADBEEF') + self.assertEqual(policy.version, 17) + self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) + self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) + self.assertEqual(sorted(policy.publishers), [PUBLISHER]) + self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) + + def test_from_api_repr_bad_role(self): + BOGUS1 = 'user:phred@example.com' + BOGUS2 = 'group:cloud-logs@google.com' + RESOURCE = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': 'nonesuch', 'members': [BOGUS1, BOGUS2]}, + ], + } + klass = self._getTargetClass() + with self.assertRaises(ValueError): + klass.from_api_repr(RESOURCE) + + def test_to_api_repr_defaults(self): + policy = self._makeOne() + self.assertEqual(policy.to_api_repr(), {}) + + def test_to_api_repr_only_etag(self): + policy = self._makeOne('DEADBEEF') + self.assertEqual(policy.to_api_repr(), {'etag': 'DEADBEEF'}) + + def test_to_api_repr_full(self): + from google.cloud.pubsub.iam import ( + PUBSUB_ADMIN_ROLE, + PUBSUB_EDITOR_ROLE, + PUBSUB_VIEWER_ROLE, + PUBSUB_PUBLISHER_ROLE, + PUBSUB_SUBSCRIBER_ROLE, + ) + OWNER1 = 'group:cloud-logs@google.com' + OWNER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + PUBLISHER = 'user:phred@example.com' + SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' + EXPECTED = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, + {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, + ], + } + policy = self._makeOne('DEADBEEF', 17) + policy.owners.add(OWNER1) + policy.owners.add(OWNER2) + policy.editors.add(EDITOR1) + policy.editors.add(EDITOR2) + policy.viewers.add(VIEWER1) + policy.viewers.add(VIEWER2) + policy.publishers.add(PUBLISHER) + policy.subscribers.add(SUBSCRIBER) + self.assertEqual(policy.to_api_repr(), EXPECTED) diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/unit_tests/test_message.py new file mode 100644 index 000000000000..5d08972e5430 --- /dev/null +++ b/packages/google-cloud-pubsub/unit_tests/test_message.py @@ -0,0 +1,126 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestMessage(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.pubsub.message import Message + return Message + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_no_attributes(self): + DATA = b'DEADBEEF' + MESSAGE_ID = b'12345' + message = self._makeOne(data=DATA, message_id=MESSAGE_ID) + self.assertEqual(message.data, DATA) + self.assertEqual(message.message_id, MESSAGE_ID) + self.assertEqual(message.attributes, {}) + self.assertIsNone(message.service_timestamp) + + def test_ctor_w_attributes(self): + DATA = b'DEADBEEF' + MESSAGE_ID = b'12345' + ATTRS = {'a': 'b'} + message = self._makeOne(data=DATA, message_id=MESSAGE_ID, + attributes=ATTRS) + self.assertEqual(message.data, DATA) + self.assertEqual(message.message_id, MESSAGE_ID) + self.assertEqual(message.attributes, ATTRS) + self.assertIsNone(message.service_timestamp) + + def test_timestamp_no_attributes(self): + DATA = b'DEADBEEF' + MESSAGE_ID = b'12345' + message = self._makeOne(data=DATA, message_id=MESSAGE_ID) + + def _to_fail(): + return message.timestamp + + self.assertRaises(ValueError, _to_fail) + + def test_timestamp_wo_timestamp_in_attributes(self): + DATA = b'DEADBEEF' + MESSAGE_ID = b'12345' + ATTRS = {'a': 'b'} + message = self._makeOne(data=DATA, message_id=MESSAGE_ID, + attributes=ATTRS) + + def _to_fail(): + return message.timestamp + + self.assertRaises(ValueError, _to_fail) + + def test_timestamp_w_timestamp_in_attributes(self): + from datetime import datetime + from google.cloud._helpers import _RFC3339_MICROS + from google.cloud._helpers import UTC + DATA = b'DEADBEEF' + MESSAGE_ID = b'12345' + TIMESTAMP = '2015-04-10T18:42:27.131956Z' + naive = datetime.strptime(TIMESTAMP, _RFC3339_MICROS) + timestamp = naive.replace(tzinfo=UTC) + ATTRS = {'timestamp': TIMESTAMP} + message = self._makeOne(data=DATA, message_id=MESSAGE_ID, + attributes=ATTRS) + self.assertEqual(message.timestamp, timestamp) + + def test_from_api_repr_missing_data(self): + MESSAGE_ID = '12345' + api_repr = {'messageId': MESSAGE_ID} + message = self._getTargetClass().from_api_repr(api_repr) + self.assertEqual(message.data, b'') + self.assertEqual(message.message_id, MESSAGE_ID) + self.assertEqual(message.attributes, {}) + self.assertIsNone(message.service_timestamp) + + def test_from_api_repr_no_attributes(self): + from base64 import b64encode as b64 + DATA = b'DEADBEEF' + B64_DATA = b64(DATA) + MESSAGE_ID = '12345' + TIMESTAMP = '2016-03-18-19:38:22.001393427Z' + api_repr = { + 'data': B64_DATA, + 'messageId': MESSAGE_ID, + 'publishTimestamp': TIMESTAMP, + } + message = self._getTargetClass().from_api_repr(api_repr) + self.assertEqual(message.data, DATA) + self.assertEqual(message.message_id, MESSAGE_ID) + self.assertEqual(message.attributes, {}) + self.assertEqual(message.service_timestamp, TIMESTAMP) + + def test_from_api_repr_w_attributes(self): + from base64 import b64encode as b64 + DATA = b'DEADBEEF' + B64_DATA = b64(DATA) + MESSAGE_ID = '12345' + ATTRS = {'a': 'b'} + TIMESTAMP = '2016-03-18-19:38:22.001393427Z' + api_repr = { + 'data': B64_DATA, + 'messageId': MESSAGE_ID, + 'publishTimestamp': TIMESTAMP, + 'attributes': ATTRS, + } + message = self._getTargetClass().from_api_repr(api_repr) + self.assertEqual(message.data, DATA) + self.assertEqual(message.message_id, MESSAGE_ID) + self.assertEqual(message.service_timestamp, TIMESTAMP) + self.assertEqual(message.attributes, ATTRS) diff --git a/packages/google-cloud-pubsub/unit_tests/test_subscription.py b/packages/google-cloud-pubsub/unit_tests/test_subscription.py new file mode 100644 index 000000000000..dbefa7d6ec1a --- /dev/null +++ b/packages/google-cloud-pubsub/unit_tests/test_subscription.py @@ -0,0 +1,811 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestSubscription(unittest.TestCase): + PROJECT = 'PROJECT' + TOPIC_NAME = 'topic_name' + TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) + SUB_NAME = 'sub_name' + SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) + DEADLINE = 42 + ENDPOINT = 'https://api.example.com/push' + + def _getTargetClass(self): + from google.cloud.pubsub.subscription import Subscription + return Subscription + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + client = _Client(project=self.PROJECT) + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIs(subscription.topic, topic) + self.assertIsNone(subscription.ack_deadline) + self.assertIsNone(subscription.push_endpoint) + + def test_ctor_explicit(self): + client = _Client(project=self.PROJECT) + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic, + self.DEADLINE, self.ENDPOINT) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIs(subscription.topic, topic) + self.assertEqual(subscription.ack_deadline, self.DEADLINE) + self.assertEqual(subscription.push_endpoint, self.ENDPOINT) + + def test_ctor_w_client_wo_topic(self): + client = _Client(project=self.PROJECT) + subscription = self._makeOne(self.SUB_NAME, client=client) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIsNone(subscription.topic) + + def test_ctor_w_both_topic_and_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + topic = _Topic(self.TOPIC_NAME, client=client1) + with self.assertRaises(TypeError): + self._makeOne(self.SUB_NAME, topic, client=client2) + + def test_ctor_w_neither_topic_nor_client(self): + with self.assertRaises(TypeError): + self._makeOne(self.SUB_NAME) + + def test_from_api_repr_no_topics(self): + from google.cloud.pubsub.topic import Topic + resource = {'topic': self.TOPIC_PATH, + 'name': self.SUB_PATH, + 'ackDeadlineSeconds': self.DEADLINE, + 'pushConfig': {'pushEndpoint': self.ENDPOINT}} + klass = self._getTargetClass() + client = _Client(project=self.PROJECT) + subscription = klass.from_api_repr(resource, client) + self.assertEqual(subscription.name, self.SUB_NAME) + topic = subscription.topic + self.assertIsInstance(topic, Topic) + self.assertEqual(topic.name, self.TOPIC_NAME) + self.assertEqual(topic.project, self.PROJECT) + self.assertEqual(subscription.ack_deadline, self.DEADLINE) + self.assertEqual(subscription.push_endpoint, self.ENDPOINT) + + def test_from_api_repr_w_deleted_topic(self): + klass = self._getTargetClass() + resource = {'topic': klass._DELETED_TOPIC_PATH, + 'name': self.SUB_PATH, + 'ackDeadlineSeconds': self.DEADLINE, + 'pushConfig': {'pushEndpoint': self.ENDPOINT}} + klass = self._getTargetClass() + client = _Client(project=self.PROJECT) + subscription = klass.from_api_repr(resource, client) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIsNone(subscription.topic) + self.assertEqual(subscription.ack_deadline, self.DEADLINE) + self.assertEqual(subscription.push_endpoint, self.ENDPOINT) + + def test_from_api_repr_w_topics_no_topic_match(self): + from google.cloud.pubsub.topic import Topic + resource = {'topic': self.TOPIC_PATH, + 'name': self.SUB_PATH, + 'ackDeadlineSeconds': self.DEADLINE, + 'pushConfig': {'pushEndpoint': self.ENDPOINT}} + topics = {} + klass = self._getTargetClass() + client = _Client(project=self.PROJECT) + subscription = klass.from_api_repr(resource, client, topics=topics) + self.assertEqual(subscription.name, self.SUB_NAME) + topic = subscription.topic + self.assertIsInstance(topic, Topic) + self.assertIs(topic, topics[self.TOPIC_PATH]) + self.assertEqual(topic.name, self.TOPIC_NAME) + self.assertEqual(topic.project, self.PROJECT) + self.assertEqual(subscription.ack_deadline, self.DEADLINE) + self.assertEqual(subscription.push_endpoint, self.ENDPOINT) + + def test_from_api_repr_w_topics_w_topic_match(self): + resource = {'topic': self.TOPIC_PATH, + 'name': self.SUB_PATH, + 'ackDeadlineSeconds': self.DEADLINE, + 'pushConfig': {'pushEndpoint': self.ENDPOINT}} + client = _Client(project=self.PROJECT) + topic = _Topic(self.TOPIC_NAME, client=client) + topics = {self.TOPIC_PATH: topic} + klass = self._getTargetClass() + subscription = klass.from_api_repr(resource, client, topics=topics) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIs(subscription.topic, topic) + self.assertEqual(subscription.ack_deadline, self.DEADLINE) + self.assertEqual(subscription.push_endpoint, self.ENDPOINT) + + def test_full_name_and_path(self): + PROJECT = 'PROJECT' + SUB_FULL = 'projects/%s/subscriptions/%s' % (PROJECT, self.SUB_NAME) + SUB_PATH = '/%s' % (SUB_FULL,) + TOPIC_NAME = 'topic_name' + CLIENT = _Client(project=PROJECT) + topic = _Topic(TOPIC_NAME, client=CLIENT) + subscription = self._makeOne(self.SUB_NAME, topic) + self.assertEqual(subscription.full_name, SUB_FULL) + self.assertEqual(subscription.path, SUB_PATH) + + def test_autoack_defaults(self): + from google.cloud.pubsub.subscription import AutoAck + client = _Client(project=self.PROJECT) + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + auto_ack = subscription.auto_ack() + self.assertIsInstance(auto_ack, AutoAck) + self.assertIs(auto_ack._subscription, subscription) + self.assertEqual(auto_ack._return_immediately, False) + self.assertEqual(auto_ack._max_messages, 1) + self.assertIsNone(auto_ack._client) + + def test_autoack_explicit(self): + from google.cloud.pubsub.subscription import AutoAck + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic) + auto_ack = subscription.auto_ack(True, 10, client2) + self.assertIsInstance(auto_ack, AutoAck) + self.assertIs(auto_ack._subscription, subscription) + self.assertEqual(auto_ack._return_immediately, True) + self.assertEqual(auto_ack._max_messages, 10) + self.assertIs(auto_ack._client, client2) + + def test_create_pull_wo_ack_deadline_w_bound_client(self): + RESPONSE = { + 'topic': self.TOPIC_PATH, + 'name': self.SUB_PATH, + } + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_create_response = RESPONSE + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + subscription.create() + + self.assertEqual(api._subscription_created, + (self.SUB_PATH, self.TOPIC_PATH, None, None)) + + def test_create_push_w_ack_deadline_w_alternate_client(self): + RESPONSE = { + 'topic': self.TOPIC_PATH, + 'name': self.SUB_PATH, + 'ackDeadlineSeconds': self.DEADLINE, + 'pushConfig': {'pushEndpoint': self.ENDPOINT} + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_create_response = RESPONSE + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic, + self.DEADLINE, self.ENDPOINT) + + subscription.create(client=client2) + + self.assertEqual( + api._subscription_created, + (self.SUB_PATH, self.TOPIC_PATH, self.DEADLINE, self.ENDPOINT)) + + def test_exists_miss_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + self.assertFalse(subscription.exists()) + + self.assertEqual(api._subscription_got, self.SUB_PATH) + + def test_exists_hit_w_alternate_client(self): + RESPONSE = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_get_response = RESPONSE + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic) + + self.assertTrue(subscription.exists(client=client2)) + + self.assertEqual(api._subscription_got, self.SUB_PATH) + + def test_reload_w_bound_client(self): + RESPONSE = { + 'name': self.SUB_PATH, + 'topic': self.TOPIC_PATH, + 'ackDeadlineSeconds': self.DEADLINE, + 'pushConfig': {'pushEndpoint': self.ENDPOINT}, + } + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_get_response = RESPONSE + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + subscription.reload() + + self.assertEqual(subscription.ack_deadline, self.DEADLINE) + self.assertEqual(subscription.push_endpoint, self.ENDPOINT) + self.assertEqual(api._subscription_got, self.SUB_PATH) + + def test_reload_w_alternate_client(self): + RESPONSE = { + 'name': self.SUB_PATH, + 'topic': self.TOPIC_PATH, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_get_response = RESPONSE + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic, + self.DEADLINE, self.ENDPOINT) + + subscription.reload(client=client2) + + self.assertIsNone(subscription.ack_deadline) + self.assertIsNone(subscription.push_endpoint) + self.assertEqual(api._subscription_got, self.SUB_PATH) + + def test_delete_w_bound_client(self): + RESPONSE = {} + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_delete_response = RESPONSE + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + subscription.delete() + + self.assertEqual(api._subscription_deleted, self.SUB_PATH) + + def test_delete_w_alternate_client(self): + RESPONSE = {} + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_delete_response = RESPONSE + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic, + self.DEADLINE, self.ENDPOINT) + + subscription.delete(client=client2) + + self.assertEqual(api._subscription_deleted, self.SUB_PATH) + + def test_modify_push_config_w_endpoint_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_modify_push_config_response = {} + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + subscription.modify_push_configuration(push_endpoint=self.ENDPOINT) + + self.assertEqual(subscription.push_endpoint, self.ENDPOINT) + self.assertEqual(api._subscription_modified_push_config, + (self.SUB_PATH, self.ENDPOINT)) + + def test_modify_push_config_wo_endpoint_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_modify_push_config_response = {} + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic, + push_endpoint=self.ENDPOINT) + + subscription.modify_push_configuration(push_endpoint=None, + client=client2) + + self.assertIsNone(subscription.push_endpoint) + self.assertEqual(api._subscription_modified_push_config, + (self.SUB_PATH, None)) + + def test_pull_wo_return_immediately_max_messages_w_bound_client(self): + import base64 + from google.cloud.pubsub.message import Message + ACK_ID = 'DEADBEEF' + MSG_ID = 'BEADCAFE' + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD) + MESSAGE = {'messageId': MSG_ID, 'data': B64} + REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_pull_response = [REC_MESSAGE] + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + pulled = subscription.pull() + + self.assertEqual(len(pulled), 1) + ack_id, message = pulled[0] + self.assertEqual(ack_id, ACK_ID) + self.assertIsInstance(message, Message) + self.assertEqual(message.data, PAYLOAD) + self.assertEqual(message.message_id, MSG_ID) + self.assertEqual(message.attributes, {}) + self.assertEqual(api._subscription_pulled, + (self.SUB_PATH, False, 1)) + + def test_pull_w_return_immediately_w_max_messages_w_alt_client(self): + import base64 + from google.cloud.pubsub.message import Message + ACK_ID = 'DEADBEEF' + MSG_ID = 'BEADCAFE' + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD) + MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} + REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_pull_response = [REC_MESSAGE] + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic) + + pulled = subscription.pull(return_immediately=True, max_messages=3, + client=client2) + + self.assertEqual(len(pulled), 1) + ack_id, message = pulled[0] + self.assertEqual(ack_id, ACK_ID) + self.assertIsInstance(message, Message) + self.assertEqual(message.data, PAYLOAD) + self.assertEqual(message.message_id, MSG_ID) + self.assertEqual(message.attributes, {'a': 'b'}) + self.assertEqual(api._subscription_pulled, + (self.SUB_PATH, True, 3)) + + def test_pull_wo_receivedMessages(self): + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_pull_response = {} + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + pulled = subscription.pull(return_immediately=False) + + self.assertEqual(len(pulled), 0) + self.assertEqual(api._subscription_pulled, + (self.SUB_PATH, False, 1)) + + def test_acknowledge_w_bound_client(self): + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_acknowlege_response = {} + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + subscription.acknowledge([ACK_ID1, ACK_ID2]) + + self.assertEqual(api._subscription_acked, + (self.SUB_PATH, [ACK_ID1, ACK_ID2])) + + def test_acknowledge_w_alternate_client(self): + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_acknowlege_response = {} + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic) + + subscription.acknowledge([ACK_ID1, ACK_ID2], client=client2) + + self.assertEqual(api._subscription_acked, + (self.SUB_PATH, [ACK_ID1, ACK_ID2])) + + def test_modify_ack_deadline_w_bound_client(self): + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_modify_ack_deadline_response = {} + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + subscription.modify_ack_deadline([ACK_ID1, ACK_ID2], self.DEADLINE) + + self.assertEqual(api._subscription_modified_ack_deadline, + (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) + + def test_modify_ack_deadline_w_alternate_client(self): + ACK_ID1 = 'DEADBEEF' + ACK_ID2 = 'BEADCAFE' + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_modify_ack_deadline_response = {} + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic) + + subscription.modify_ack_deadline( + [ACK_ID1, ACK_ID2], self.DEADLINE, client=client2) + + self.assertEqual(api._subscription_modified_ack_deadline, + (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) + + def test_get_iam_policy_w_bound_client(self): + from google.cloud.pubsub.iam import ( + PUBSUB_ADMIN_ROLE, + PUBSUB_EDITOR_ROLE, + PUBSUB_VIEWER_ROLE, + PUBSUB_PUBLISHER_ROLE, + PUBSUB_SUBSCRIBER_ROLE, + ) + OWNER1 = 'user:phred@example.com' + OWNER2 = 'group:cloud-logs@google.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + PUBLISHER = 'user:phred@example.com' + SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' + POLICY = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, + {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, + ], + } + client = _Client(project=self.PROJECT) + api = client.iam_policy_api = _FauxIAMPolicy() + api._get_iam_policy_response = POLICY + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + policy = subscription.get_iam_policy() + + self.assertEqual(policy.etag, 'DEADBEEF') + self.assertEqual(policy.version, 17) + self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) + self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) + self.assertEqual(sorted(policy.publishers), [PUBLISHER]) + self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) + self.assertEqual(api._got_iam_policy, self.SUB_PATH) + + def test_get_iam_policy_w_alternate_client(self): + POLICY = { + 'etag': 'ACAB', + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.iam_policy_api = _FauxIAMPolicy() + api._get_iam_policy_response = POLICY + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic) + + policy = subscription.get_iam_policy(client=client2) + + self.assertEqual(policy.etag, 'ACAB') + self.assertIsNone(policy.version) + self.assertEqual(sorted(policy.owners), []) + self.assertEqual(sorted(policy.editors), []) + self.assertEqual(sorted(policy.viewers), []) + + self.assertEqual(api._got_iam_policy, self.SUB_PATH) + + def test_set_iam_policy_w_bound_client(self): + from google.cloud.pubsub.iam import Policy + from google.cloud.pubsub.iam import ( + PUBSUB_ADMIN_ROLE, + PUBSUB_EDITOR_ROLE, + PUBSUB_VIEWER_ROLE, + PUBSUB_PUBLISHER_ROLE, + PUBSUB_SUBSCRIBER_ROLE, + ) + OWNER1 = 'group:cloud-logs@google.com' + OWNER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + PUBLISHER = 'user:phred@example.com' + SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' + POLICY = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, + {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, + ], + } + RESPONSE = POLICY.copy() + RESPONSE['etag'] = 'ABACABAF' + RESPONSE['version'] = 18 + client = _Client(project=self.PROJECT) + api = client.iam_policy_api = _FauxIAMPolicy() + api._set_iam_policy_response = RESPONSE + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + policy = Policy('DEADBEEF', 17) + policy.owners.add(OWNER1) + policy.owners.add(OWNER2) + policy.editors.add(EDITOR1) + policy.editors.add(EDITOR2) + policy.viewers.add(VIEWER1) + policy.viewers.add(VIEWER2) + policy.publishers.add(PUBLISHER) + policy.subscribers.add(SUBSCRIBER) + + new_policy = subscription.set_iam_policy(policy) + + self.assertEqual(new_policy.etag, 'ABACABAF') + self.assertEqual(new_policy.version, 18) + self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) + self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) + self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) + self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) + self.assertEqual(api._set_iam_policy, (self.SUB_PATH, POLICY)) + + def test_set_iam_policy_w_alternate_client(self): + from google.cloud.pubsub.iam import Policy + RESPONSE = {'etag': 'ACAB'} + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.iam_policy_api = _FauxIAMPolicy() + api._set_iam_policy_response = RESPONSE + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic) + + policy = Policy() + new_policy = subscription.set_iam_policy(policy, client=client2) + + self.assertEqual(new_policy.etag, 'ACAB') + self.assertIsNone(new_policy.version) + self.assertEqual(sorted(new_policy.owners), []) + self.assertEqual(sorted(new_policy.editors), []) + self.assertEqual(sorted(new_policy.viewers), []) + self.assertEqual(api._set_iam_policy, (self.SUB_PATH, {})) + + def test_check_iam_permissions_w_bound_client(self): + from google.cloud.pubsub.iam import OWNER_ROLE + from google.cloud.pubsub.iam import EDITOR_ROLE + from google.cloud.pubsub.iam import VIEWER_ROLE + + ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] + client = _Client(project=self.PROJECT) + api = client.iam_policy_api = _FauxIAMPolicy() + api._test_iam_permissions_response = ROLES[:-1] + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._makeOne(self.SUB_NAME, topic) + + allowed = subscription.check_iam_permissions(ROLES) + + self.assertEqual(allowed, ROLES[:-1]) + self.assertEqual(api._tested_iam_permissions, + (self.SUB_PATH, ROLES)) + + def test_check_iam_permissions_w_alternate_client(self): + from google.cloud.pubsub.iam import OWNER_ROLE + from google.cloud.pubsub.iam import EDITOR_ROLE + from google.cloud.pubsub.iam import VIEWER_ROLE + + ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.iam_policy_api = _FauxIAMPolicy() + api._test_iam_permissions_response = [] + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._makeOne(self.SUB_NAME, topic) + + allowed = subscription.check_iam_permissions(ROLES, client=client2) + + self.assertEqual(len(allowed), 0) + self.assertEqual(api._tested_iam_permissions, + (self.SUB_PATH, ROLES)) + + +class _FauxSubscribererAPI(object): + + def subscription_create(self, subscription_path, topic_path, + ack_deadline=None, push_endpoint=None): + self._subscription_created = ( + subscription_path, topic_path, ack_deadline, push_endpoint) + return self._subscription_create_response + + def subscription_get(self, subscription_path): + from google.cloud.exceptions import NotFound + self._subscription_got = subscription_path + try: + return self._subscription_get_response + except AttributeError: + raise NotFound(subscription_path) + + def subscription_delete(self, subscription_path): + self._subscription_deleted = subscription_path + return self._subscription_delete_response + + def subscription_modify_push_config( + self, subscription_path, push_endpoint): + self._subscription_modified_push_config = ( + subscription_path, push_endpoint) + return self._subscription_modify_push_config_response + + def subscription_pull(self, subscription_path, return_immediately, + max_messages): + self._subscription_pulled = ( + subscription_path, return_immediately, max_messages) + return self._subscription_pull_response + + def subscription_acknowledge(self, subscription_path, ack_ids): + self._subscription_acked = (subscription_path, ack_ids) + return self._subscription_acknowlege_response + + def subscription_modify_ack_deadline(self, subscription_path, ack_ids, + ack_deadline): + self._subscription_modified_ack_deadline = ( + subscription_path, ack_ids, ack_deadline) + return self._subscription_modify_ack_deadline_response + + +class TestAutoAck(unittest.TestCase): + + def _getTargetClass(self): + from google.cloud.pubsub.subscription import AutoAck + return AutoAck + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_defaults(self): + subscription = _FauxSubscription(()) + auto_ack = self._makeOne(subscription) + self.assertEqual(auto_ack._return_immediately, False) + self.assertEqual(auto_ack._max_messages, 1) + self.assertIsNone(auto_ack._client) + + def test_ctor_explicit(self): + CLIENT = object() + subscription = _FauxSubscription(()) + auto_ack = self._makeOne( + subscription, return_immediately=True, max_messages=10, + client=CLIENT) + self.assertIs(auto_ack._subscription, subscription) + self.assertEqual(auto_ack._return_immediately, True) + self.assertEqual(auto_ack._max_messages, 10) + self.assertIs(auto_ack._client, CLIENT) + + def test___enter___w_defaults(self): + subscription = _FauxSubscription(()) + auto_ack = self._makeOne(subscription) + + with auto_ack as returned: + pass + + self.assertIs(returned, auto_ack) + self.assertEqual(subscription._return_immediately, False) + self.assertEqual(subscription._max_messages, 1) + self.assertIsNone(subscription._client) + + def test___enter___w_explicit(self): + CLIENT = object() + subscription = _FauxSubscription(()) + auto_ack = self._makeOne( + subscription, return_immediately=True, max_messages=10, + client=CLIENT) + + with auto_ack as returned: + pass + + self.assertIs(returned, auto_ack) + self.assertEqual(subscription._return_immediately, True) + self.assertEqual(subscription._max_messages, 10) + self.assertIs(subscription._client, CLIENT) + + def test___exit___(self): + CLIENT = object() + ACK_ID1, MESSAGE1 = 'ACK_ID1', _FallibleMessage() + ACK_ID2, MESSAGE2 = 'ACK_ID2', _FallibleMessage() + ACK_ID3, MESSAGE3 = 'ACK_ID3', _FallibleMessage(True) + ITEMS = [ + (ACK_ID1, MESSAGE1), + (ACK_ID2, MESSAGE2), + (ACK_ID3, MESSAGE3), + ] + subscription = _FauxSubscription(ITEMS) + auto_ack = self._makeOne(subscription, client=CLIENT) + with auto_ack: + for ack_id, message in list(auto_ack.items()): + if message.fail: + del auto_ack[ack_id] + self.assertEqual(sorted(subscription._acknowledged), + [ACK_ID1, ACK_ID2]) + self.assertIs(subscription._ack_client, CLIENT) + + +class _FauxIAMPolicy(object): + + def get_iam_policy(self, target_path): + self._got_iam_policy = target_path + return self._get_iam_policy_response + + def set_iam_policy(self, target_path, policy): + self._set_iam_policy = target_path, policy + return self._set_iam_policy_response + + def test_iam_permissions(self, target_path, permissions): + self._tested_iam_permissions = target_path, permissions + return self._test_iam_permissions_response + + +class _Topic(object): + + def __init__(self, name, client): + self.name = name + self._client = client + self.project = client.project + self.full_name = 'projects/%s/topics/%s' % (client.project, name) + self.path = '/projects/%s/topics/%s' % (client.project, name) + + +class _Client(object): + + connection = None + + def __init__(self, project): + self.project = project + + def topic(self, name, timestamp_messages=False): + from google.cloud.pubsub.topic import Topic + return Topic(name, client=self, timestamp_messages=timestamp_messages) + + +class _FallibleMessage(object): + + def __init__(self, fail=False): + self.fail = fail + + +class _FauxSubscription(object): + + def __init__(self, items): + self._items = items + self._mapping = dict(items) + self._acknowledged = set() + + def pull(self, return_immediately=False, max_messages=1, client=None): + self._return_immediately = return_immediately + self._max_messages = max_messages + self._client = client + return self._items + + def acknowledge(self, ack_ids, client=None): + self._ack_client = client + for ack_id in ack_ids: + message = self._mapping[ack_id] + assert not message.fail + self._acknowledged.add(ack_id) diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py new file mode 100644 index 000000000000..58e819e187b7 --- /dev/null +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -0,0 +1,803 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + + +class TestTopic(unittest.TestCase): + PROJECT = 'PROJECT' + TOPIC_NAME = 'topic_name' + TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) + + def _getTargetClass(self): + from google.cloud.pubsub.topic import Topic + return Topic + + def _makeOne(self, *args, **kw): + return self._getTargetClass()(*args, **kw) + + def test_ctor_w_explicit_timestamp(self): + client = _Client(project=self.PROJECT) + topic = self._makeOne(self.TOPIC_NAME, + client=client, + timestamp_messages=True) + self.assertEqual(topic.name, self.TOPIC_NAME) + self.assertEqual(topic.project, self.PROJECT) + self.assertEqual(topic.full_name, self.TOPIC_PATH) + self.assertTrue(topic.timestamp_messages) + + def test_from_api_repr(self): + client = _Client(project=self.PROJECT) + resource = {'name': self.TOPIC_PATH} + klass = self._getTargetClass() + topic = klass.from_api_repr(resource, client=client) + self.assertEqual(topic.name, self.TOPIC_NAME) + self.assertIs(topic._client, client) + self.assertEqual(topic.project, self.PROJECT) + self.assertEqual(topic.full_name, self.TOPIC_PATH) + + def test_from_api_repr_with_bad_client(self): + PROJECT1 = 'PROJECT1' + PROJECT2 = 'PROJECT2' + client = _Client(project=PROJECT1) + PATH = 'projects/%s/topics/%s' % (PROJECT2, self.TOPIC_NAME) + resource = {'name': PATH} + klass = self._getTargetClass() + self.assertRaises(ValueError, klass.from_api_repr, + resource, client=client) + + def test_create_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_create_response = {'name': self.TOPIC_PATH} + topic = self._makeOne(self.TOPIC_NAME, client=client) + + topic.create() + + self.assertEqual(api._topic_created, self.TOPIC_PATH) + + def test_create_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.publisher_api = _FauxPublisherAPI() + api._topic_create_response = {'name': self.TOPIC_PATH} + topic = self._makeOne(self.TOPIC_NAME, client=client1) + + topic.create(client=client2) + + self.assertEqual(api._topic_created, self.TOPIC_PATH) + + def test_exists_miss_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + topic = self._makeOne(self.TOPIC_NAME, client=client) + + self.assertFalse(topic.exists()) + + self.assertEqual(api._topic_got, self.TOPIC_PATH) + + def test_exists_hit_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.publisher_api = _FauxPublisherAPI() + api._topic_get_response = {'name': self.TOPIC_PATH} + topic = self._makeOne(self.TOPIC_NAME, client=client1) + + self.assertTrue(topic.exists(client=client2)) + + self.assertEqual(api._topic_got, self.TOPIC_PATH) + + def test_delete_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_delete_response = {} + topic = self._makeOne(self.TOPIC_NAME, client=client) + + topic.delete() + + self.assertEqual(api._topic_deleted, self.TOPIC_PATH) + + def test_delete_w_alternate_client(self): + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.publisher_api = _FauxPublisherAPI() + api._topic_delete_response = {} + topic = self._makeOne(self.TOPIC_NAME, client=client1) + + topic.delete(client=client2) + + self.assertEqual(api._topic_deleted, self.TOPIC_PATH) + + def test_publish_single_bytes_wo_attrs_w_bound_client(self): + import base64 + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MSGID = 'DEADBEEF' + MESSAGE = {'data': B64, 'attributes': {}} + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID] + topic = self._makeOne(self.TOPIC_NAME, client=client) + + msgid = topic.publish(PAYLOAD) + + self.assertEqual(msgid, MSGID) + self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) + + def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): + import base64 + import datetime + from google.cloud.pubsub import topic as MUT + from google.cloud._helpers import _RFC3339_MICROS + from google.cloud._testing import _Monkey + NOW = datetime.datetime.utcnow() + + def _utcnow(): + return NOW + + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MSGID = 'DEADBEEF' + MESSAGE = { + 'data': B64, + 'attributes': {'timestamp': NOW.strftime(_RFC3339_MICROS)}, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID] + + topic = self._makeOne(self.TOPIC_NAME, client=client1, + timestamp_messages=True) + with _Monkey(MUT, _NOW=_utcnow): + msgid = topic.publish(PAYLOAD, client=client2) + + self.assertEqual(msgid, MSGID) + self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) + + def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self): + import base64 + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MSGID = 'DEADBEEF' + OVERRIDE = '2015-04-10T16:46:22.868399Z' + MESSAGE = {'data': B64, + 'attributes': {'timestamp': OVERRIDE}} + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID] + topic = self._makeOne(self.TOPIC_NAME, client=client, + timestamp_messages=True) + + msgid = topic.publish(PAYLOAD, timestamp=OVERRIDE) + + self.assertEqual(msgid, MSGID) + self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) + + def test_publish_single_w_attrs(self): + import base64 + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MSGID = 'DEADBEEF' + MESSAGE = {'data': B64, + 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID] + topic = self._makeOne(self.TOPIC_NAME, client=client) + + msgid = topic.publish(PAYLOAD, attr1='value1', attr2='value2') + + self.assertEqual(msgid, MSGID) + self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) + + def test_publish_multiple_w_bound_client(self): + import base64 + PAYLOAD1 = b'This is the first message text' + PAYLOAD2 = b'This is the second message text' + B64_1 = base64.b64encode(PAYLOAD1) + B64_2 = base64.b64encode(PAYLOAD2) + MSGID1 = 'DEADBEEF' + MSGID2 = 'BEADCAFE' + MESSAGE1 = {'data': B64_1.decode('ascii'), + 'attributes': {}} + MESSAGE2 = {'data': B64_2.decode('ascii'), + 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID1, MSGID2] + topic = self._makeOne(self.TOPIC_NAME, client=client) + + with topic.batch() as batch: + batch.publish(PAYLOAD1) + batch.publish(PAYLOAD2, attr1='value1', attr2='value2') + + self.assertEqual(list(batch), [MSGID1, MSGID2]) + self.assertEqual(list(batch.messages), []) + self.assertEqual(api._topic_published, + (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) + + def test_publish_w_no_messages(self): + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [] + topic = self._makeOne(self.TOPIC_NAME, client=client) + + with topic.batch() as batch: + pass + + self.assertEqual(list(batch.messages), []) + self.assertEqual(api._api_called, 0) + + def test_publish_multiple_w_alternate_client(self): + import base64 + PAYLOAD1 = b'This is the first message text' + PAYLOAD2 = b'This is the second message text' + B64_1 = base64.b64encode(PAYLOAD1) + B64_2 = base64.b64encode(PAYLOAD2) + MSGID1 = 'DEADBEEF' + MSGID2 = 'BEADCAFE' + MESSAGE1 = {'data': B64_1.decode('ascii'), 'attributes': {}} + MESSAGE2 = { + 'data': B64_2.decode('ascii'), + 'attributes': {'attr1': 'value1', 'attr2': 'value2'}, + } + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID1, MSGID2] + topic = self._makeOne(self.TOPIC_NAME, client=client1) + + with topic.batch(client=client2) as batch: + batch.publish(PAYLOAD1) + batch.publish(PAYLOAD2, attr1='value1', attr2='value2') + + self.assertEqual(list(batch), [MSGID1, MSGID2]) + self.assertEqual(list(batch.messages), []) + self.assertEqual(api._topic_published, + (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) + + def test_publish_multiple_error(self): + PAYLOAD1 = b'This is the first message text' + PAYLOAD2 = b'This is the second message text' + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + topic = self._makeOne(self.TOPIC_NAME, client=client) + + try: + with topic.batch() as batch: + batch.publish(PAYLOAD1) + batch.publish(PAYLOAD2, attr1='value1', attr2='value2') + raise _Bugout() + except _Bugout: + pass + + self.assertEqual(list(batch), []) + self.assertEqual(getattr(api, '_topic_published', self), self) + + def test_subscription(self): + from google.cloud.pubsub.subscription import Subscription + client = _Client(project=self.PROJECT) + topic = self._makeOne(self.TOPIC_NAME, client=client) + + SUBSCRIPTION_NAME = 'subscription_name' + subscription = topic.subscription(SUBSCRIPTION_NAME) + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, SUBSCRIPTION_NAME) + self.assertIs(subscription.topic, topic) + + def test_list_subscriptions_no_paging(self): + from google.cloud.pubsub.subscription import Subscription + SUB_NAME_1 = 'subscription_1' + SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( + self.PROJECT, SUB_NAME_1) + SUB_NAME_2 = 'subscription_2' + SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( + self.PROJECT, SUB_NAME_2) + SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] + TOKEN = 'TOKEN' + + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_list_subscriptions_response = SUBS_LIST, TOKEN + topic = self._makeOne(self.TOPIC_NAME, client=client) + + subscriptions, next_page_token = topic.list_subscriptions() + + self.assertEqual(len(subscriptions), 2) + + subscription = subscriptions[0] + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscriptions[0].name, SUB_NAME_1) + self.assertIs(subscription.topic, topic) + + subscription = subscriptions[1] + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscriptions[1].name, SUB_NAME_2) + self.assertIs(subscription.topic, topic) + + self.assertEqual(next_page_token, TOKEN) + self.assertEqual(api._topic_listed, + (self.TOPIC_PATH, None, None)) + + def test_list_subscriptions_with_paging(self): + from google.cloud.pubsub.subscription import Subscription + SUB_NAME_1 = 'subscription_1' + SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( + self.PROJECT, SUB_NAME_1) + SUB_NAME_2 = 'subscription_2' + SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( + self.PROJECT, SUB_NAME_2) + SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] + PAGE_SIZE = 10 + TOKEN = 'TOKEN' + + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_list_subscriptions_response = SUBS_LIST, None + topic = self._makeOne(self.TOPIC_NAME, client=client) + + subscriptions, next_page_token = topic.list_subscriptions( + page_size=PAGE_SIZE, page_token=TOKEN) + + self.assertEqual(len(subscriptions), 2) + + subscription = subscriptions[0] + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscriptions[0].name, SUB_NAME_1) + self.assertIs(subscription.topic, topic) + + subscription = subscriptions[1] + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscriptions[1].name, SUB_NAME_2) + self.assertIs(subscription.topic, topic) + + self.assertIsNone(next_page_token) + self.assertEqual(api._topic_listed, + (self.TOPIC_PATH, PAGE_SIZE, TOKEN)) + + def test_list_subscriptions_missing_key(self): + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_list_subscriptions_response = (), None + topic = self._makeOne(self.TOPIC_NAME, client=client) + + subscriptions, next_page_token = topic.list_subscriptions() + + self.assertEqual(len(subscriptions), 0) + self.assertIsNone(next_page_token) + + self.assertEqual(api._topic_listed, + (self.TOPIC_PATH, None, None)) + + def test_get_iam_policy_w_bound_client(self): + from google.cloud.pubsub.iam import ( + PUBSUB_ADMIN_ROLE, + PUBSUB_EDITOR_ROLE, + PUBSUB_VIEWER_ROLE, + PUBSUB_PUBLISHER_ROLE, + PUBSUB_SUBSCRIBER_ROLE, + ) + OWNER1 = 'user:phred@example.com' + OWNER2 = 'group:cloud-logs@google.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + PUBLISHER = 'user:phred@example.com' + SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' + POLICY = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, + {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, + ], + } + + client = _Client(project=self.PROJECT) + api = client.iam_policy_api = _FauxIAMPolicy() + api._get_iam_policy_response = POLICY + topic = self._makeOne(self.TOPIC_NAME, client=client) + + policy = topic.get_iam_policy() + + self.assertEqual(policy.etag, 'DEADBEEF') + self.assertEqual(policy.version, 17) + self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) + self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) + self.assertEqual(sorted(policy.publishers), [PUBLISHER]) + self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) + self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) + + def test_get_iam_policy_w_alternate_client(self): + POLICY = { + 'etag': 'ACAB', + } + + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.iam_policy_api = _FauxIAMPolicy() + api._get_iam_policy_response = POLICY + topic = self._makeOne(self.TOPIC_NAME, client=client1) + + policy = topic.get_iam_policy(client=client2) + + self.assertEqual(policy.etag, 'ACAB') + self.assertIsNone(policy.version) + self.assertEqual(sorted(policy.owners), []) + self.assertEqual(sorted(policy.editors), []) + self.assertEqual(sorted(policy.viewers), []) + + self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) + + def test_set_iam_policy_w_bound_client(self): + from google.cloud.pubsub.iam import Policy + from google.cloud.pubsub.iam import ( + PUBSUB_ADMIN_ROLE, + PUBSUB_EDITOR_ROLE, + PUBSUB_VIEWER_ROLE, + PUBSUB_PUBLISHER_ROLE, + PUBSUB_SUBSCRIBER_ROLE, + ) + OWNER1 = 'group:cloud-logs@google.com' + OWNER2 = 'user:phred@example.com' + EDITOR1 = 'domain:google.com' + EDITOR2 = 'user:phred@example.com' + VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' + VIEWER2 = 'user:phred@example.com' + PUBLISHER = 'user:phred@example.com' + SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' + POLICY = { + 'etag': 'DEADBEEF', + 'version': 17, + 'bindings': [ + {'role': PUBSUB_ADMIN_ROLE, + 'members': [OWNER1, OWNER2]}, + {'role': PUBSUB_EDITOR_ROLE, + 'members': [EDITOR1, EDITOR2]}, + {'role': PUBSUB_VIEWER_ROLE, + 'members': [VIEWER1, VIEWER2]}, + {'role': PUBSUB_PUBLISHER_ROLE, + 'members': [PUBLISHER]}, + {'role': PUBSUB_SUBSCRIBER_ROLE, + 'members': [SUBSCRIBER]}, + ], + } + RESPONSE = POLICY.copy() + RESPONSE['etag'] = 'ABACABAF' + RESPONSE['version'] = 18 + + client = _Client(project=self.PROJECT) + api = client.iam_policy_api = _FauxIAMPolicy() + api._set_iam_policy_response = RESPONSE + topic = self._makeOne(self.TOPIC_NAME, client=client) + policy = Policy('DEADBEEF', 17) + policy.owners.add(OWNER1) + policy.owners.add(OWNER2) + policy.editors.add(EDITOR1) + policy.editors.add(EDITOR2) + policy.viewers.add(VIEWER1) + policy.viewers.add(VIEWER2) + policy.publishers.add(PUBLISHER) + policy.subscribers.add(SUBSCRIBER) + + new_policy = topic.set_iam_policy(policy) + + self.assertEqual(new_policy.etag, 'ABACABAF') + self.assertEqual(new_policy.version, 18) + self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) + self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) + self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) + self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) + self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) + self.assertEqual(api._set_iam_policy, (self.TOPIC_PATH, POLICY)) + + def test_set_iam_policy_w_alternate_client(self): + from google.cloud.pubsub.iam import Policy + RESPONSE = {'etag': 'ACAB'} + + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.iam_policy_api = _FauxIAMPolicy() + api._set_iam_policy_response = RESPONSE + topic = self._makeOne(self.TOPIC_NAME, client=client1) + + policy = Policy() + new_policy = topic.set_iam_policy(policy, client=client2) + + self.assertEqual(new_policy.etag, 'ACAB') + self.assertIsNone(new_policy.version) + self.assertEqual(sorted(new_policy.owners), []) + self.assertEqual(sorted(new_policy.editors), []) + self.assertEqual(sorted(new_policy.viewers), []) + + self.assertEqual(api._set_iam_policy, (self.TOPIC_PATH, {})) + + def test_check_iam_permissions_w_bound_client(self): + from google.cloud.pubsub.iam import OWNER_ROLE + from google.cloud.pubsub.iam import EDITOR_ROLE + from google.cloud.pubsub.iam import VIEWER_ROLE + + ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] + client = _Client(project=self.PROJECT) + api = client.iam_policy_api = _FauxIAMPolicy() + api._test_iam_permissions_response = ROLES[:-1] + topic = self._makeOne(self.TOPIC_NAME, client=client) + + allowed = topic.check_iam_permissions(ROLES) + + self.assertEqual(allowed, ROLES[:-1]) + self.assertEqual(api._tested_iam_permissions, + (self.TOPIC_PATH, ROLES)) + + def test_check_iam_permissions_w_alternate_client(self): + from google.cloud.pubsub.iam import OWNER_ROLE + from google.cloud.pubsub.iam import EDITOR_ROLE + from google.cloud.pubsub.iam import VIEWER_ROLE + + ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.iam_policy_api = _FauxIAMPolicy() + api._test_iam_permissions_response = [] + topic = self._makeOne(self.TOPIC_NAME, client=client1) + + allowed = topic.check_iam_permissions(ROLES, client=client2) + + self.assertEqual(len(allowed), 0) + self.assertEqual(api._tested_iam_permissions, + (self.TOPIC_PATH, ROLES)) + + +class TestBatch(unittest.TestCase): + PROJECT = 'PROJECT' + + def _getTargetClass(self): + from google.cloud.pubsub.topic import Batch + return Batch + + def _makeOne(self, *args, **kwargs): + return self._getTargetClass()(*args, **kwargs) + + def test_ctor_defaults(self): + topic = _Topic() + client = _Client(project=self.PROJECT) + batch = self._makeOne(topic, client) + self.assertIs(batch.topic, topic) + self.assertIs(batch.client, client) + self.assertEqual(len(batch.messages), 0) + self.assertEqual(len(batch.message_ids), 0) + + def test___iter___empty(self): + topic = _Topic() + client = object() + batch = self._makeOne(topic, client) + self.assertEqual(list(batch), []) + + def test___iter___non_empty(self): + topic = _Topic() + client = object() + batch = self._makeOne(topic, client) + batch.message_ids[:] = ['ONE', 'TWO', 'THREE'] + self.assertEqual(list(batch), ['ONE', 'TWO', 'THREE']) + + def test_publish_bytes_wo_attrs(self): + import base64 + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MESSAGE = {'data': B64, + 'attributes': {}} + client = _Client(project=self.PROJECT) + topic = _Topic() + batch = self._makeOne(topic, client=client) + batch.publish(PAYLOAD) + self.assertEqual(batch.messages, [MESSAGE]) + + def test_publish_bytes_w_add_timestamp(self): + import base64 + PAYLOAD = b'This is the message text' + B64 = base64.b64encode(PAYLOAD).decode('ascii') + MESSAGE = {'data': B64, + 'attributes': {'timestamp': 'TIMESTAMP'}} + client = _Client(project=self.PROJECT) + topic = _Topic(timestamp_messages=True) + batch = self._makeOne(topic, client=client) + batch.publish(PAYLOAD) + self.assertEqual(batch.messages, [MESSAGE]) + + def test_commit_w_bound_client(self): + import base64 + PAYLOAD1 = b'This is the first message text' + PAYLOAD2 = b'This is the second message text' + B64_1 = base64.b64encode(PAYLOAD1) + B64_2 = base64.b64encode(PAYLOAD2) + MSGID1 = 'DEADBEEF' + MSGID2 = 'BEADCAFE' + MESSAGE1 = {'data': B64_1.decode('ascii'), + 'attributes': {}} + MESSAGE2 = {'data': B64_2.decode('ascii'), + 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} + client = _Client(project='PROJECT') + api = client.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID1, MSGID2] + topic = _Topic() + batch = self._makeOne(topic, client=client) + + batch.publish(PAYLOAD1) + batch.publish(PAYLOAD2, attr1='value1', attr2='value2') + batch.commit() + + self.assertEqual(list(batch), [MSGID1, MSGID2]) + self.assertEqual(list(batch.messages), []) + self.assertEqual(api._topic_published, + (topic.full_name, [MESSAGE1, MESSAGE2])) + + def test_commit_w_alternate_client(self): + import base64 + PAYLOAD1 = b'This is the first message text' + PAYLOAD2 = b'This is the second message text' + B64_1 = base64.b64encode(PAYLOAD1) + B64_2 = base64.b64encode(PAYLOAD2) + MSGID1 = 'DEADBEEF' + MSGID2 = 'BEADCAFE' + MESSAGE1 = {'data': B64_1.decode('ascii'), + 'attributes': {}} + MESSAGE2 = {'data': B64_2.decode('ascii'), + 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} + client1 = _Client(project='PROJECT') + client2 = _Client(project='PROJECT') + api = client2.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID1, MSGID2] + topic = _Topic() + batch = self._makeOne(topic, client=client1) + + batch.publish(PAYLOAD1) + batch.publish(PAYLOAD2, attr1='value1', attr2='value2') + batch.commit(client=client2) + + self.assertEqual(list(batch), [MSGID1, MSGID2]) + self.assertEqual(list(batch.messages), []) + self.assertEqual(api._topic_published, + (topic.full_name, [MESSAGE1, MESSAGE2])) + + def test_context_mgr_success(self): + import base64 + PAYLOAD1 = b'This is the first message text' + PAYLOAD2 = b'This is the second message text' + B64_1 = base64.b64encode(PAYLOAD1) + B64_2 = base64.b64encode(PAYLOAD2) + MSGID1 = 'DEADBEEF' + MSGID2 = 'BEADCAFE' + MESSAGE1 = {'data': B64_1.decode('ascii'), + 'attributes': {}} + MESSAGE2 = {'data': B64_2.decode('ascii'), + 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} + client = _Client(project='PROJECT') + api = client.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID1, MSGID2] + topic = _Topic() + batch = self._makeOne(topic, client=client) + + with batch as other: + batch.publish(PAYLOAD1) + batch.publish(PAYLOAD2, attr1='value1', attr2='value2') + + self.assertIs(other, batch) + self.assertEqual(list(batch), [MSGID1, MSGID2]) + self.assertEqual(list(batch.messages), []) + self.assertEqual(api._topic_published, + (topic.full_name, [MESSAGE1, MESSAGE2])) + + def test_context_mgr_failure(self): + import base64 + PAYLOAD1 = b'This is the first message text' + PAYLOAD2 = b'This is the second message text' + B64_1 = base64.b64encode(PAYLOAD1) + B64_2 = base64.b64encode(PAYLOAD2) + MESSAGE1 = {'data': B64_1.decode('ascii'), + 'attributes': {}} + MESSAGE2 = {'data': B64_2.decode('ascii'), + 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} + client = _Client(project='PROJECT') + api = client.publisher_api = _FauxPublisherAPI() + topic = _Topic() + batch = self._makeOne(topic, client=client) + + try: + with batch as other: + batch.publish(PAYLOAD1) + batch.publish(PAYLOAD2, attr1='value1', attr2='value2') + raise _Bugout() + except _Bugout: + pass + + self.assertIs(other, batch) + self.assertEqual(list(batch), []) + self.assertEqual(list(batch.messages), [MESSAGE1, MESSAGE2]) + self.assertEqual(getattr(api, '_topic_published', self), self) + + +class _FauxPublisherAPI(object): + _api_called = 0 + + def topic_create(self, topic_path): + self._topic_created = topic_path + return self._topic_create_response + + def topic_get(self, topic_path): + from google.cloud.exceptions import NotFound + self._topic_got = topic_path + try: + return self._topic_get_response + except AttributeError: + raise NotFound(topic_path) + + def topic_delete(self, topic_path): + self._topic_deleted = topic_path + return self._topic_delete_response + + def topic_publish(self, topic_path, messages): + self._topic_published = topic_path, messages + self._api_called += 1 + return self._topic_publish_response + + def topic_list_subscriptions(self, topic_path, page_size=None, + page_token=None): + self._topic_listed = topic_path, page_size, page_token + return self._topic_list_subscriptions_response + + +class _FauxIAMPolicy(object): + + def get_iam_policy(self, target_path): + self._got_iam_policy = target_path + return self._get_iam_policy_response + + def set_iam_policy(self, target_path, policy): + self._set_iam_policy = target_path, policy + return self._set_iam_policy_response + + def test_iam_permissions(self, target_path, permissions): + self._tested_iam_permissions = target_path, permissions + return self._test_iam_permissions_response + + +class _Topic(object): + + def __init__(self, name="NAME", project="PROJECT", + timestamp_messages=False): + self.full_name = 'projects/%s/topics/%s' % (project, name) + self.path = '/%s' % (self.full_name,) + self.timestamp_messages = timestamp_messages + + def _timestamp_message(self, attrs): + if self.timestamp_messages: + attrs['timestamp'] = 'TIMESTAMP' + + +class _Client(object): + + connection = None + + def __init__(self, project): + self.project = project + + +class _Bugout(Exception): + pass From 0c06ab0d9bac329531f733043163a8d0c5a3db78 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 26 Sep 2016 12:46:48 -0700 Subject: [PATCH 0002/1197] Making pubsub subpackage into a proper package. - Adding README, setup.py, MANIFEST.in, .coveragerc and tox.ini - Adding google-cloud-pubsub as a dependency to the umbrella package - Adding the pubsub subdirectory into the list of packages for verifying the docs - Incorporating the pubsub subdirectory into the umbrella coverage report - Adding the pubsub only tox tests to the Travis config - Adding {toxinidir}/../core as a dependency for the pubsub tox config --- packages/google-cloud-pubsub/.coveragerc | 11 ++++ packages/google-cloud-pubsub/MANIFEST.in | 4 ++ packages/google-cloud-pubsub/README.rst | 61 ++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 72 ++++++++++++++++++++++++ packages/google-cloud-pubsub/tox.ini | 30 ++++++++++ 5 files changed, 178 insertions(+) create mode 100644 packages/google-cloud-pubsub/.coveragerc create mode 100644 packages/google-cloud-pubsub/MANIFEST.in create mode 100644 packages/google-cloud-pubsub/README.rst create mode 100644 packages/google-cloud-pubsub/setup.py create mode 100644 packages/google-cloud-pubsub/tox.ini diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc new file mode 100644 index 000000000000..a54b99aa14b7 --- /dev/null +++ b/packages/google-cloud-pubsub/.coveragerc @@ -0,0 +1,11 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in new file mode 100644 index 000000000000..cb3a2b9ef4fa --- /dev/null +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -0,0 +1,4 @@ +include README.rst +graft google +graft unit_tests +global-exclude *.pyc diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst new file mode 100644 index 000000000000..201a61e3eaf2 --- /dev/null +++ b/packages/google-cloud-pubsub/README.rst @@ -0,0 +1,61 @@ +Python Client for Google Cloud Pub / Sub +======================================== + + Python idiomatic client for `Google Cloud Pub / Sub`_ + +.. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/docs + +- `Homepage`_ +- `API Documentation`_ + +.. _Homepage: https://googlecloudplatform.github.io/google-cloud-python/ +.. _API Documentation: http://googlecloudplatform.github.io/google-cloud-python/ + +Quick Start +----------- + +:: + + $ pip install --upgrade google-cloud-pubsub + +Authentication +-------------- + +With ``google-cloud-python`` we try to make authentication as painless as +possible. Check out the `Authentication section`_ in our documentation to +learn more. You may also find the `authentication document`_ shared by all +the ``google-cloud-*`` libraries to be helpful. + +.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication + +Using the API +------------- + +Google `Cloud Pub/Sub`_ (`Pub/Sub API docs`_) is designed to provide reliable, +many-to-many, asynchronous messaging between applications. Publisher +applications can send messages to a ``topic`` and other applications can +subscribe to that topic to receive the messages. By decoupling senders and +receivers, Google Cloud Pub/Sub allows developers to communicate between +independently written applications. + +.. _Cloud Pub/Sub: https://cloud.google.com/pubsub/docs +.. _Pub/Sub API docs: https://cloud.google.com/pubsub/reference/rest/ + +See the ``google-cloud-python`` API `Pub/Sub documentation`_ to learn how to connect +to Cloud Pub/Sub using this Client Library. + +.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub-usage.html + +To get started with this API, you'll need to create + +.. code:: python + + from google.cloud import pubsub + + client = pubsub.Client() + topic = client.topic('topic_name') + topic.create() + + topic.publish('this is the message_payload', + attr1='value1', attr2='value2') diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py new file mode 100644 index 000000000000..d60b3f27b7b6 --- /dev/null +++ b/packages/google-cloud-pubsub/setup.py @@ -0,0 +1,72 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from setuptools import find_packages +from setuptools import setup + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + +with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: + README = file_obj.read() + +# NOTE: This is duplicated throughout and we should try to +# consolidate. +SETUP_BASE = { + 'author': 'Google Cloud Platform', + 'author_email': 'jjg+google-cloud-python@google.com', + 'scripts': [], + 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', + 'license': 'Apache 2.0', + 'platforms': 'Posix; MacOS X; Windows', + 'include_package_data': True, + 'zip_safe': False, + 'classifiers': [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Topic :: Internet', + ], +} + + +REQUIREMENTS = [ + 'google-cloud-core', + 'grpcio >= 1.0.0', + 'google-gax >= 0.14.1, < 0.15dev', + 'gapic-google-pubsub-v1 >= 0.9.0, < 0.10dev', + 'grpc-google-pubsub-v1 >= 0.9.0, < 0.10dev', +] + +setup( + name='google-cloud-pubsub', + version='0.20.0dev', + description='Python Client for Google Cloud Pub/Sub', + long_description=README, + namespace_packages=[ + 'google', + 'google.cloud', + ], + packages=find_packages(), + install_requires=REQUIREMENTS, + **SETUP_BASE +) diff --git a/packages/google-cloud-pubsub/tox.ini b/packages/google-cloud-pubsub/tox.ini new file mode 100644 index 000000000000..9a9990704fbe --- /dev/null +++ b/packages/google-cloud-pubsub/tox.ini @@ -0,0 +1,30 @@ +[tox] +envlist = + py27,py34,py35,cover + +[testing] +deps = + {toxinidir}/../core + pytest +covercmd = + py.test --quiet \ + --cov=google.cloud.pubsub \ + --cov=unit_tests \ + --cov-config {toxinidir}/.coveragerc \ + unit_tests + +[testenv] +commands = + py.test --quiet {posargs} unit_tests +deps = + {[testing]deps} + +[testenv:cover] +basepython = + python2.7 +commands = + {[testing]covercmd} +deps = + {[testenv]deps} + coverage + pytest-cov From e1435444e56f4504a668c163223884ab11170672 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 08:53:24 -0700 Subject: [PATCH 0003/1197] Preparing for a release of all packages. Towards #2441. - Updating umbrella README to point at all packages - Putting upper bounds on grpcio in dependencies - Putting lower bounds on all google-cloud-* packages listed as dependencies - Adding `setup.cfg` for universal wheels --- packages/google-cloud-pubsub/setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index d60b3f27b7b6..e4e142594bcf 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -50,8 +50,8 @@ REQUIREMENTS = [ - 'google-cloud-core', - 'grpcio >= 1.0.0', + 'google-cloud-core >= 0.20.0', + 'grpcio >= 1.0.0, < 2.0dev', 'google-gax >= 0.14.1, < 0.15dev', 'gapic-google-pubsub-v1 >= 0.9.0, < 0.10dev', 'grpc-google-pubsub-v1 >= 0.9.0, < 0.10dev', @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.20.0dev', + version='0.20.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 05883b40e8bfd01cc9bcbba4ac31d515e36f7138 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 08:57:43 -0700 Subject: [PATCH 0004/1197] Adding setup.cfg to all packages. --- packages/google-cloud-pubsub/setup.cfg | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 packages/google-cloud-pubsub/setup.cfg diff --git a/packages/google-cloud-pubsub/setup.cfg b/packages/google-cloud-pubsub/setup.cfg new file mode 100644 index 000000000000..2a9acf13daa9 --- /dev/null +++ b/packages/google-cloud-pubsub/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 From dd490a777d3cb59728bd1cc8d0d788f86055e9ab Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 14:11:08 -0700 Subject: [PATCH 0005/1197] Upgrading versions of GAPIC and gRPC generated libraries. This resolves the google-gax 0.14.* conflict. --- packages/google-cloud-pubsub/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index e4e142594bcf..6accc0ae8138 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -53,8 +53,8 @@ 'google-cloud-core >= 0.20.0', 'grpcio >= 1.0.0, < 2.0dev', 'google-gax >= 0.14.1, < 0.15dev', - 'gapic-google-pubsub-v1 >= 0.9.0, < 0.10dev', - 'grpc-google-pubsub-v1 >= 0.9.0, < 0.10dev', + 'gapic-google-pubsub-v1 >= 0.10.1, < 0.11dev', + 'grpc-google-pubsub-v1 >= 0.10.1, < 0.11dev', ] setup( From 9d664d41bf2d0885c0d3a87012362dfad875a380 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 27 Sep 2016 15:27:14 -0700 Subject: [PATCH 0006/1197] Making sure to use kwargs when calling GAPIC surfaces. --- packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 6d92c818f43c..18811bb93ae4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -293,7 +293,8 @@ def subscription_create(self, subscription_path, topic_path, try: sub_pb = self._gax_api.create_subscription( - subscription_path, topic_path, push_config, ack_deadline) + subscription_path, topic_path, + push_config=push_config, ack_deadline_seconds=ack_deadline) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: raise Conflict(topic_path) @@ -391,7 +392,8 @@ def subscription_pull(self, subscription_path, return_immediately=False, """ try: response_pb = self._gax_api.pull( - subscription_path, max_messages, return_immediately) + subscription_path, max_messages, + return_immediately=return_immediately) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(subscription_path) From 6d7f547a4f40465c8fb97550511f5e0a624264cc Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 26 Sep 2016 14:13:40 -0400 Subject: [PATCH 0007/1197] Disable pylint's 'ungrouped-imports' error. We share the 'google' namespace with third-party pacakages. PEP 8 wants 'local' imports to be separated fro 'third-party' imports, which is more important than pylint's attempt to group them by name alone. --- packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py | 2 -- packages/google-cloud-pubsub/google/cloud/pubsub/client.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 18811bb93ae4..0a602beab661 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -25,12 +25,10 @@ from grpc import insecure_channel from grpc import StatusCode -# pylint: disable=ungrouped-imports from google.cloud._helpers import _to_bytes from google.cloud._helpers import _pb_timestamp_to_rfc3339 from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound -# pylint: enable=ungrouped-imports class _PublisherAPI(object): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 09fca44cfcfc..3909d4a25f3f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -25,7 +25,6 @@ from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic -# pylint: disable=ungrouped-imports try: from google.cloud.pubsub._gax import _PublisherAPI as GAXPublisherAPI from google.cloud.pubsub._gax import _SubscriberAPI as GAXSubscriberAPI @@ -39,7 +38,6 @@ make_gax_subscriber_api = None else: _HAVE_GAX = True -# pylint: enable=ungrouped-imports _DISABLE_GAX = os.getenv(DISABLE_GRPC, False) From 3cf97b05500eae81274f34de4a2846b7d7542e65 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 29 Sep 2016 16:17:02 -0700 Subject: [PATCH 0008/1197] General clean-up after rename. - Removing "graft google" from MANIFEST for umbrella package. It isn't needed since the umbrella package has no source - Updating license year on copy-pasted namespace package __init__.py files. Done via: https://gist.github.com/dhermes/a0e88f891ffffc3ecea5c9bb2f13e4f5 - Removing unused HTML context from docs/conf.py - Setting GH_OWNER AND GH_PROJECT_NAME (which together make the REPO_SLUG) manually in the docs update scripts. This way the env. variables don't need to be set in the Travis UI / CLI. Also updating tox.ini to stop passing those variables through - Removing the root package from `verify_included_modules.py` since it no longer has any source - Updated a docstring reference to a moved class in the Bigtable system test - Removing redundant `GOOGLE_CLOUD_*` in `tox` system test `passenv` (already covered by `GOOGLE_*`) --- packages/google-cloud-pubsub/google/cloud/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py index 8ac7b74af136..b2b833373882 100644 --- a/packages/google-cloud-pubsub/google/cloud/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2014 Google Inc. +# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From dfd10e1e81c41dcca3f7c104c94aedf8e729f342 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 3 Oct 2016 21:32:48 -0700 Subject: [PATCH 0009/1197] Updating package README's with more useful doc links. Also removing duplicate "Homepage" links (duplicate of "API Documentation" links). --- packages/google-cloud-pubsub/README.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 201a61e3eaf2..25abe2b5b6d8 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -5,11 +5,9 @@ Python Client for Google Cloud Pub / Sub .. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/docs -- `Homepage`_ -- `API Documentation`_ +- `Documentation`_ -.. _Homepage: https://googlecloudplatform.github.io/google-cloud-python/ -.. _API Documentation: http://googlecloudplatform.github.io/google-cloud-python/ +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub-usage.html Quick Start ----------- From 8e547f1ecbb09546c643e4bedeac034bc064b616 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 12 Oct 2016 08:41:58 -0400 Subject: [PATCH 0010/1197] Use correct 'publishTime' field name for server-set message timestamp. Closes #2529. --- packages/google-cloud-pubsub/google/cloud/pubsub/message.py | 2 +- packages/google-cloud-pubsub/unit_tests/test_message.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py index 2f810baa5e2e..b20b901639be 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py @@ -89,5 +89,5 @@ def from_api_repr(cls, api_repr): instance = cls( data=data, message_id=api_repr['messageId'], attributes=api_repr.get('attributes')) - instance._service_timestamp = api_repr.get('publishTimestamp') + instance._service_timestamp = api_repr.get('publishTime') return instance diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/unit_tests/test_message.py index 5d08972e5430..8187eea3cf06 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_message.py +++ b/packages/google-cloud-pubsub/unit_tests/test_message.py @@ -98,7 +98,7 @@ def test_from_api_repr_no_attributes(self): api_repr = { 'data': B64_DATA, 'messageId': MESSAGE_ID, - 'publishTimestamp': TIMESTAMP, + 'publishTime': TIMESTAMP, } message = self._getTargetClass().from_api_repr(api_repr) self.assertEqual(message.data, DATA) @@ -116,7 +116,7 @@ def test_from_api_repr_w_attributes(self): api_repr = { 'data': B64_DATA, 'messageId': MESSAGE_ID, - 'publishTimestamp': TIMESTAMP, + 'publishTime': TIMESTAMP, 'attributes': ATTRS, } message = self._getTargetClass().from_api_repr(api_repr) From bb603e2915492bf512626f7a691cc101bb254568 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Sat, 8 Oct 2016 17:02:13 -0400 Subject: [PATCH 0011/1197] Fix pubsub errors when receiving unencoded message. #2513. --- .../google/cloud/pubsub/message.py | 10 +++++++++- .../google-cloud-pubsub/unit_tests/test_message.py | 13 +++++++++++++ 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py index 2f810baa5e2e..c5fcfb1a2a1d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py @@ -15,6 +15,7 @@ """Define API Topics.""" import base64 +import binascii from google.cloud._helpers import _rfc3339_to_datetime @@ -85,7 +86,14 @@ def from_api_repr(cls, api_repr): :rtype: :class:`Message` :returns: The message created from the response. """ - data = base64.b64decode(api_repr.get('data', b'')) + raw_data = api_repr.get('data', b'') + try: + data = base64.b64decode(raw_data) + except (binascii.Error, TypeError): + to_pad = (- len(raw_data)) % 4 + padded_data = raw_data + b'=' * to_pad + data = base64.b64decode(padded_data) + instance = cls( data=data, message_id=api_repr['messageId'], attributes=api_repr.get('attributes')) diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/unit_tests/test_message.py index 5d08972e5430..7f760200d024 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_message.py +++ b/packages/google-cloud-pubsub/unit_tests/test_message.py @@ -89,6 +89,19 @@ def test_from_api_repr_missing_data(self): self.assertEqual(message.attributes, {}) self.assertIsNone(message.service_timestamp) + def test_from_api_repr_bad_b64_data(self): + DATA = b'wefwefw' + BAD_B64_DATA = b'd2Vmd2Vmdw=' + MESSAGE_ID = '12345' + TIMESTAMP = '2016-03-18-19:38:22.001393427Z' + api_repr = { + 'data': BAD_B64_DATA, + 'messageId': MESSAGE_ID, + 'publishTimestamp': TIMESTAMP, + } + message = self._getTargetClass().from_api_repr(api_repr) + self.assertEqual(message.data, DATA) + def test_from_api_repr_no_attributes(self): from base64 import b64encode as b64 DATA = b'DEADBEEF' From eb443755521dddd95b8d0b8a5558fd88815eb54b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 14 Oct 2016 10:04:34 -0400 Subject: [PATCH 0012/1197] Fix link to pubsub API docs. Broken by back-end move w/o redirect. Hissssss. Closes #2541. --- packages/google-cloud-pubsub/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 25abe2b5b6d8..ffee47733a18 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -38,7 +38,7 @@ receivers, Google Cloud Pub/Sub allows developers to communicate between independently written applications. .. _Cloud Pub/Sub: https://cloud.google.com/pubsub/docs -.. _Pub/Sub API docs: https://cloud.google.com/pubsub/reference/rest/ +.. _Pub/Sub API docs: https://cloud.google.com/pubsub/docs/reference/rest/ See the ``google-cloud-python`` API `Pub/Sub documentation`_ to learn how to connect to Cloud Pub/Sub using this Client Library. From 53d4cff31543001a0561a76389df71335e5bff5e Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 15:34:03 -0700 Subject: [PATCH 0013/1197] Replace string with str in rtypes. Used the command: ag -l 'rtype: string' | xargs sed -i .bak 's/rtype: string/rtype: str/g' Based on this comment: https://github.com/GoogleCloudPlatform/google-cloud-python/pull/2485#discussion_r83267163 `str` is a type, `string` is a module. --- .../google/cloud/pubsub/_helpers.py | 4 ++-- .../google/cloud/pubsub/connection.py | 2 +- .../google-cloud-pubsub/google/cloud/pubsub/iam.py | 12 ++++++------ .../google/cloud/pubsub/message.py | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py index d9d144488dfc..3587f6f6acb5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py @@ -45,7 +45,7 @@ def topic_name_from_path(path, project): :param project: The project associated with the request. It is included for validation purposes. - :rtype: string + :rtype: str :returns: Topic name parsed from ``path``. :raises: :class:`ValueError` if the ``path`` is ill-formed or if the project from the ``path`` does not agree with the @@ -64,7 +64,7 @@ def subscription_name_from_path(path, project): :param project: The project associated with the request. It is included for validation purposes. - :rtype: string + :rtype: str :returns: subscription name parsed from ``path``. :raises: :class:`ValueError` if the ``path`` is ill-formed or if the project from the ``path`` does not agree with the diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index ecff402e3990..7f40d21305e6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -83,7 +83,7 @@ def build_api_url(self, path, query_params=None, Typically you shouldn't provide this and instead use the default for the library. - :rtype: string + :rtype: str :returns: The URL assembled from the pieces provided. """ if api_base_url is None: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py index 3f07b06c3862..fb7393fd24d8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -123,7 +123,7 @@ def user(email): :type email: string :param email: E-mail for this particular user. - :rtype: string + :rtype: str :returns: A member string corresponding to the given user. """ return 'user:%s' % (email,) @@ -135,7 +135,7 @@ def service_account(email): :type email: string :param email: E-mail for this particular service account. - :rtype: string + :rtype: str :returns: A member string corresponding to the given service account. """ return 'serviceAccount:%s' % (email,) @@ -147,7 +147,7 @@ def group(email): :type email: string :param email: An id or e-mail for this particular group. - :rtype: string + :rtype: str :returns: A member string corresponding to the given group. """ return 'group:%s' % (email,) @@ -159,7 +159,7 @@ def domain(domain): :type domain: string :param domain: The domain for this member. - :rtype: string + :rtype: str :returns: A member string corresponding to the given domain. """ return 'domain:%s' % (domain,) @@ -168,7 +168,7 @@ def domain(domain): def all_users(): """Factory method for a member representing all users. - :rtype: string + :rtype: str :returns: A member string representing all users. """ return 'allUsers' @@ -177,7 +177,7 @@ def all_users(): def authenticated_users(): """Factory method for a member representing all authenticated users. - :rtype: string + :rtype: str :returns: A member string representing all authenticated users. """ return 'allAuthenticatedUsers' diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py index 17e7c88289a6..a58b4c0e8024 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py @@ -71,7 +71,7 @@ def timestamp(self): def service_timestamp(self): """Return server-set timestamp. - :rtype: string + :rtype: str :returns: timestamp (in UTC timezone) in RFC 3339 format """ return self._service_timestamp From aadcd9ac2643215018b1db01619350c6339bc62f Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 15:50:55 -0700 Subject: [PATCH 0014/1197] Replace types string with str. Uses command: ag -l 'type ([^:]+): string' | \ xargs gsed -r -i.bak -e 's/type ([^:]+): string/type \1: str/g' Note: [-r for gsed (GNU sed) is needed for group matching](http://superuser.com/a/336819/125262). --- .../google/cloud/pubsub/_gax.py | 40 +++++++------- .../google/cloud/pubsub/_helpers.py | 8 +-- .../google/cloud/pubsub/client.py | 8 +-- .../google/cloud/pubsub/connection.py | 52 +++++++++---------- .../google/cloud/pubsub/iam.py | 10 ++-- .../google/cloud/pubsub/message.py | 2 +- .../google/cloud/pubsub/subscription.py | 6 +-- .../google/cloud/pubsub/topic.py | 8 +-- 8 files changed, 67 insertions(+), 67 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 0a602beab661..d945a4b47d54 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -46,14 +46,14 @@ def list_topics(self, project, page_size=0, page_token=None): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - :type project: string + :type project: str :param project: project ID :type page_size: int :param page_size: maximum number of topics to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of topics. If not passed, the API will return the first page of topics. @@ -80,7 +80,7 @@ def topic_create(self, topic_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - :type topic_path: string + :type topic_path: str :param topic_path: fully-qualified path of the new topic, in format ``projects//topics/``. @@ -103,7 +103,7 @@ def topic_get(self, topic_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - :type topic_path: string + :type topic_path: str :param topic_path: fully-qualified path of the topic, in format ``projects//topics/``. @@ -126,7 +126,7 @@ def topic_delete(self, topic_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - :type topic_path: string + :type topic_path: str :param topic_path: fully-qualified path of the new topic, in format ``projects//topics/``. """ @@ -143,7 +143,7 @@ def topic_publish(self, topic_path, messages): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - :type topic_path: string + :type topic_path: str :param topic_path: fully-qualified path of the topic, in format ``projects//topics/``. @@ -174,7 +174,7 @@ def topic_list_subscriptions(self, topic_path, page_size=0, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - :type topic_path: string + :type topic_path: str :param topic_path: fully-qualified path of the topic, in format ``projects//topics/``. @@ -182,7 +182,7 @@ def topic_list_subscriptions(self, topic_path, page_size=0, :param page_size: maximum number of subscriptions to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of subscriptions. If not passed, the API will return the first page of subscriptions. @@ -223,14 +223,14 @@ def list_subscriptions(self, project, page_size=0, page_token=None): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - :type project: string + :type project: str :param project: project ID :type page_size: int :param page_size: maximum number of subscriptions to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of subscriptions. If not passed, the API will return the first page of subscriptions. @@ -259,12 +259,12 @@ def subscription_create(self, subscription_path, topic_path, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. - :type topic_path: string + :type topic_path: str :param topic_path: the fully-qualified path of the topic being subscribed, in format ``projects//topics/``. @@ -273,7 +273,7 @@ def subscription_create(self, subscription_path, topic_path, :param ack_deadline: the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. - :type push_endpoint: string, or ``NoneType`` + :type push_endpoint: str, or ``NoneType`` :param push_endpoint: URL to which messages will be pushed by the back-end. If not set, the application must pull messages. @@ -305,7 +305,7 @@ def subscription_get(self, subscription_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the subscription, in format ``projects//subscriptions/``. @@ -327,7 +327,7 @@ def subscription_delete(self, subscription_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the subscription, in format ``projects//subscriptions/``. @@ -346,12 +346,12 @@ def subscription_modify_push_config(self, subscription_path, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. - :type push_endpoint: string, or ``NoneType`` + :type push_endpoint: str, or ``NoneType`` :param push_endpoint: URL to which messages will be pushed by the back-end. If not set, the application must pull messages. @@ -371,7 +371,7 @@ def subscription_pull(self, subscription_path, return_immediately=False, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. @@ -405,7 +405,7 @@ def subscription_acknowledge(self, subscription_path, ack_ids): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. @@ -427,7 +427,7 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py index 3587f6f6acb5..2f021f20ab3e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py @@ -38,10 +38,10 @@ def topic_name_from_path(path, project): """Validate a topic URI path and get the topic name. - :type path: string + :type path: str :param path: URI path for a topic API request. - :type project: string + :type project: str :param project: The project associated with the request. It is included for validation purposes. @@ -57,10 +57,10 @@ def topic_name_from_path(path, project): def subscription_name_from_path(path, project): """Validate a subscription URI path and get the subscription name. - :type path: string + :type path: str :param path: URI path for a subscription API request. - :type project: string + :type project: str :param project: The project associated with the request. It is included for validation purposes. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 3909d4a25f3f..48c1c4cf60cd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -47,7 +47,7 @@ class Client(JSONClient): """Client to bundle configuration needed for API requests. - :type project: string + :type project: str :param project: the project which the client acts on behalf of. Will be passed when creating a topic. If not passed, falls back to the default inferred from the environment. @@ -113,7 +113,7 @@ def list_topics(self, page_size=None, page_token=None): :param page_size: maximum number of topics to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of topics. If not passed, the API will return the first page of topics. @@ -147,7 +147,7 @@ def list_subscriptions(self, page_size=None, page_token=None): :param page_size: maximum number of topics to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of topics. If not passed, the API will return the first page of topics. @@ -176,7 +176,7 @@ def topic(self, name, timestamp_messages=False): :start-after: [START client_topic] :end-before: [END client_topic] - :type name: string + :type name: str :param name: the name of the topic to be constructed. :type timestamp_messages: boolean diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 7f40d21305e6..71898029782e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -66,7 +66,7 @@ def build_api_url(self, path, query_params=None, Typically, you shouldn't need to use this method. - :type path: string + :type path: str :param path: The path to the resource. :type query_params: dict or list @@ -74,11 +74,11 @@ def build_api_url(self, path, query_params=None, key-value pairs) to insert into the query string of the URL. - :type api_base_url: string + :type api_base_url: str :param api_base_url: The base URL for the API endpoint. Typically you won't have to provide this. - :type api_version: string + :type api_version: str :param api_version: The version of the API to call. Typically you shouldn't provide this and instead use the default for the library. @@ -109,14 +109,14 @@ def list_topics(self, project, page_size=None, page_token=None): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - :type project: string + :type project: str :param project: project ID :type page_size: int :param page_size: maximum number of topics to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of topics. If not passed, the API will return the first page of topics. @@ -146,7 +146,7 @@ def topic_create(self, topic_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - :type topic_path: string + :type topic_path: str :param topic_path: the fully-qualified path of the new topic, in format ``projects//topics/``. @@ -162,7 +162,7 @@ def topic_get(self, topic_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - :type topic_path: string + :type topic_path: str :param topic_path: the fully-qualified path of the topic, in format ``projects//topics/``. @@ -178,7 +178,7 @@ def topic_delete(self, topic_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - :type topic_path: string + :type topic_path: str :param topic_path: the fully-qualified path of the topic, in format ``projects//topics/``. """ @@ -191,7 +191,7 @@ def topic_publish(self, topic_path, messages): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - :type topic_path: string + :type topic_path: str :param topic_path: the fully-qualified path of the topic, in format ``projects//topics/``. @@ -214,7 +214,7 @@ def topic_list_subscriptions(self, topic_path, page_size=None, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - :type topic_path: string + :type topic_path: str :param topic_path: the fully-qualified path of the topic, in format ``projects//topics/``. @@ -222,7 +222,7 @@ def topic_list_subscriptions(self, topic_path, page_size=None, :param page_size: maximum number of subscriptions to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of topics. If not passed, the API will return the first page of topics. @@ -261,14 +261,14 @@ def list_subscriptions(self, project, page_size=None, page_token=None): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - :type project: string + :type project: str :param project: project ID :type page_size: int :param page_size: maximum number of subscriptions to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of subscriptions. If not passed, the API will return the first page of subscriptions. @@ -299,12 +299,12 @@ def subscription_create(self, subscription_path, topic_path, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. - :type topic_path: string + :type topic_path: str :param topic_path: the fully-qualified path of the topic being subscribed, in format ``projects//topics/``. @@ -313,7 +313,7 @@ def subscription_create(self, subscription_path, topic_path, :param ack_deadline: the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. - :type push_endpoint: string, or ``NoneType`` + :type push_endpoint: str, or ``NoneType`` :param push_endpoint: URL to which messages will be pushed by the back-end. If not set, the application must pull messages. @@ -339,7 +339,7 @@ def subscription_get(self, subscription_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the subscription, in format ``projects//subscriptions/``. @@ -357,7 +357,7 @@ def subscription_delete(self, subscription_path): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the subscription, in format ``projects//subscriptions/``. @@ -373,12 +373,12 @@ def subscription_modify_push_config(self, subscription_path, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. - :type push_endpoint: string, or ``NoneType`` + :type push_endpoint: str, or ``NoneType`` :param push_endpoint: URL to which messages will be pushed by the back-end. If not set, the application must pull messages. @@ -395,7 +395,7 @@ def subscription_pull(self, subscription_path, return_immediately=False, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. @@ -427,7 +427,7 @@ def subscription_acknowledge(self, subscription_path, ack_ids): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. @@ -449,7 +449,7 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - :type subscription_path: string + :type subscription_path: str :param subscription_path: the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. @@ -487,7 +487,7 @@ def get_iam_policy(self, target_path): https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy - :type target_path: string + :type target_path: str :param target_path: the path of the target object. :rtype: dict @@ -504,7 +504,7 @@ def set_iam_policy(self, target_path, policy): https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy - :type target_path: string + :type target_path: str :param target_path: the path of the target object. :type policy: dict @@ -525,7 +525,7 @@ def test_iam_permissions(self, target_path, permissions): https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions - :type target_path: string + :type target_path: str :param target_path: the path of the target object. :type permissions: list of string diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py index fb7393fd24d8..53c0f36579f3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -101,7 +101,7 @@ class Policy(object): https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Policy https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Binding - :type etag: string + :type etag: str :param etag: ETag used to identify a unique of the policy :type version: int @@ -120,7 +120,7 @@ def __init__(self, etag=None, version=None): def user(email): """Factory method for a user member. - :type email: string + :type email: str :param email: E-mail for this particular user. :rtype: str @@ -132,7 +132,7 @@ def user(email): def service_account(email): """Factory method for a service account member. - :type email: string + :type email: str :param email: E-mail for this particular service account. :rtype: str @@ -144,7 +144,7 @@ def service_account(email): def group(email): """Factory method for a group member. - :type email: string + :type email: str :param email: An id or e-mail for this particular group. :rtype: str @@ -156,7 +156,7 @@ def group(email): def domain(domain): """Factory method for a domain member. - :type domain: string + :type domain: str :param domain: The domain for this member. :rtype: str diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py index a58b4c0e8024..8fd65b48785a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py @@ -29,7 +29,7 @@ class Message(object): :type data: bytes :param data: the payload of the message. - :type message_id: string + :type message_id: str :param message_id: An ID assigned to the message by the API. :type attributes: dict or None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index b63e77177c3f..7c2a204c3ac7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -26,7 +26,7 @@ class Subscription(object): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions - :type name: string + :type name: str :param name: the name of the subscription. :type topic: :class:`google.cloud.pubsub.topic.Topic` or ``NoneType`` @@ -37,7 +37,7 @@ class Subscription(object): :param ack_deadline: the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. - :type push_endpoint: string + :type push_endpoint: str :param push_endpoint: URL to which messages will be pushed by the back-end. If not set, the application must pull messages. @@ -272,7 +272,7 @@ def modify_push_configuration(self, push_endpoint, client=None): :start-after: [START subscription_pull_push] :end-before: [END subscription_pull_push] - :type push_endpoint: string + :type push_endpoint: str :param push_endpoint: URL to which messages will be pushed by the back-end. If None, the application must pull messages. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index f879d8349c89..82fe5365190e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -33,7 +33,7 @@ class Topic(object): See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics - :type name: string + :type name: str :param name: the name of the topic :type client: :class:`google.cloud.pubsub.client.Client` @@ -71,14 +71,14 @@ def subscription(self, name, ack_deadline=None, push_endpoint=None): :start-after: [START topic_subscription_push] :end-before: [END topic_subscription_push] - :type name: string + :type name: str :param name: the name of the subscription :type ack_deadline: int :param ack_deadline: the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. - :type push_endpoint: string + :type push_endpoint: str :param push_endpoint: URL to which messages will be pushed by the back-end. If not set, the application must pull messages. @@ -299,7 +299,7 @@ def list_subscriptions(self, page_size=None, page_token=None, client=None): :param page_size: maximum number of topics to return, If not passed, defaults to a value set by the API. - :type page_token: string + :type page_token: str :param page_token: opaque marker for the next "page" of topics. If not passed, the API will return the first page of topics. From 877c7f57e74974c55346391764caaf9d37bf31c0 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 20 Oct 2016 16:02:02 -0700 Subject: [PATCH 0015/1197] Replace types boolean with bool. Uses the command: ag -l 'type ([^:]+): boolean' | \ xargs gsed -r -i.bak -e 's/type ([^:]+): boolean/type \1: bool/g' --- packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py | 2 +- packages/google-cloud-pubsub/google/cloud/pubsub/client.py | 2 +- .../google-cloud-pubsub/google/cloud/pubsub/connection.py | 2 +- .../google-cloud-pubsub/google/cloud/pubsub/subscription.py | 6 +++--- packages/google-cloud-pubsub/google/cloud/pubsub/topic.py | 2 +- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index d945a4b47d54..d346cf526c63 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -376,7 +376,7 @@ def subscription_pull(self, subscription_path, return_immediately=False, the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. - :type return_immediately: boolean + :type return_immediately: bool :param return_immediately: if True, the back-end returns even if no messages are available; if False, the API call blocks until one or more messages are diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 48c1c4cf60cd..9c0742d1727a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -179,7 +179,7 @@ def topic(self, name, timestamp_messages=False): :type name: str :param name: the name of the topic to be constructed. - :type timestamp_messages: boolean + :type timestamp_messages: bool :param timestamp_messages: To be passed to ``Topic`` constructor. :rtype: :class:`google.cloud.pubsub.topic.Topic` diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 71898029782e..e854495b3682 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -400,7 +400,7 @@ def subscription_pull(self, subscription_path, return_immediately=False, the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. - :type return_immediately: boolean + :type return_immediately: bool :param return_immediately: if True, the back-end returns even if no messages are available; if False, the API call blocks until one or more messages are diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index 7c2a204c3ac7..79d9400bdc86 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -127,7 +127,7 @@ def path(self): def auto_ack(self, return_immediately=False, max_messages=1, client=None): """:class:`AutoAck` factory - :type return_immediately: boolean + :type return_immediately: bool :param return_immediately: passed through to :meth:`Subscription.pull` :type max_messages: int @@ -299,7 +299,7 @@ def pull(self, return_immediately=False, max_messages=1, client=None): :start-after: [START subscription_pull] :end-before: [END subscription_pull] - :type return_immediately: boolean + :type return_immediately: bool :param return_immediately: if True, the back-end returns even if no messages are available; if False, the API call blocks until one or more messages are @@ -480,7 +480,7 @@ class AutoAck(dict): :type subscription: :class:`Subscription` :param subscription: subscription to be pulled. - :type return_immediately: boolean + :type return_immediately: bool :param return_immediately: passed through to :meth:`Subscription.pull` :type max_messages: int diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index 82fe5365190e..5559b06088c0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -40,7 +40,7 @@ class Topic(object): :param client: A client which holds credentials and project configuration for the topic (which requires a project). - :type timestamp_messages: boolean + :type timestamp_messages: bool :param timestamp_messages: If true, the topic will add a ``timestamp`` key to the attributes of each published message: the value will be an RFC 3339 timestamp. From bfcf71fbf8957c34023a07c857802b2a30dda7a7 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Oct 2016 17:04:00 -0700 Subject: [PATCH 0016/1197] Replace :: with `.. code-block:: console`. Towards #2404. --- packages/google-cloud-pubsub/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index ffee47733a18..08db812e6539 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -12,7 +12,7 @@ Python Client for Google Cloud Pub / Sub Quick Start ----------- -:: +.. code-block:: console $ pip install --upgrade google-cloud-pubsub From 262c02522d4ec3b4710c51b0328714f014306baa Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 21 Oct 2016 09:51:22 -0700 Subject: [PATCH 0017/1197] Remove None from param types and add (Optional). This runs a script to remove None from the types for parameters, and added (Optional) to the description. Does not pass lint due to some too-long lines. I will clean those up manually. See: https://github.com/GoogleCloudPlatform/google-cloud-python/pull/2580#pullrequestreview-5178193 --- .../google-cloud-pubsub/google/cloud/pubsub/_gax.py | 12 ++++++------ .../google/cloud/pubsub/connection.py | 12 ++++++------ .../google/cloud/pubsub/message.py | 8 ++++---- .../google/cloud/pubsub/subscription.py | 8 ++++---- 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index d346cf526c63..da4ae25625c5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -269,12 +269,12 @@ def subscription_create(self, subscription_path, topic_path, subscribed, in format ``projects//topics/``. - :type ack_deadline: int, or ``NoneType`` - :param ack_deadline: the deadline (in seconds) by which messages pulled + :type ack_deadline: int + :param ack_deadline: (Optional) the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. - :type push_endpoint: str, or ``NoneType`` - :param push_endpoint: URL to which messages will be pushed by the + :type push_endpoint: str + :param push_endpoint: (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. @@ -351,8 +351,8 @@ def subscription_modify_push_config(self, subscription_path, the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. - :type push_endpoint: str, or ``NoneType`` - :param push_endpoint: URL to which messages will be pushed by the + :type push_endpoint: str + :param push_endpoint: (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. """ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index e854495b3682..1cabb127bc37 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -309,12 +309,12 @@ def subscription_create(self, subscription_path, topic_path, subscribed, in format ``projects//topics/``. - :type ack_deadline: int, or ``NoneType`` - :param ack_deadline: the deadline (in seconds) by which messages pulled + :type ack_deadline: int + :param ack_deadline: (Optional) the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. - :type push_endpoint: str, or ``NoneType`` - :param push_endpoint: URL to which messages will be pushed by the + :type push_endpoint: str + :param push_endpoint: (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. @@ -378,8 +378,8 @@ def subscription_modify_push_config(self, subscription_path, the fully-qualified path of the new subscription, in format ``projects//subscriptions/``. - :type push_endpoint: str, or ``NoneType`` - :param push_endpoint: URL to which messages will be pushed by the + :type push_endpoint: str + :param push_endpoint: (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. """ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py index 8fd65b48785a..035693e82b9c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py @@ -32,8 +32,8 @@ class Message(object): :type message_id: str :param message_id: An ID assigned to the message by the API. - :type attributes: dict or None - :param attributes: Extra metadata associated by the publisher with the + :type attributes: dict + :param attributes: (Optional) Extra metadata associated by the publisher with the message. """ _service_timestamp = None @@ -80,8 +80,8 @@ def service_timestamp(self): def from_api_repr(cls, api_repr): """Factory: construct message from API representation. - :type api_repr: dict or None - :param api_repr: The API representation of the message + :type api_repr: dict + :param api_repr: (Optional) The API representation of the message :rtype: :class:`Message` :returns: The message created from the response. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index 79d9400bdc86..bf6882eab3cf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -29,8 +29,8 @@ class Subscription(object): :type name: str :param name: the name of the subscription. - :type topic: :class:`google.cloud.pubsub.topic.Topic` or ``NoneType`` - :param topic: the topic to which the subscription belongs; if ``None``, + :type topic: :class:`google.cloud.pubsub.topic.Topic` + :param topic: (Optional) the topic to which the subscription belongs; if ``None``, the subscription's topic has been deleted. :type ack_deadline: int @@ -81,8 +81,8 @@ def from_api_repr(cls, resource, client, topics=None): :param client: Client which holds credentials and project configuration for a topic. - :type topics: dict or None - :param topics: A mapping of topic names -> topics. If not passed, + :type topics: dict + :param topics: (Optional) A mapping of topic names -> topics. If not passed, the subscription will have a newly-created topic. :rtype: :class:`google.cloud.pubsub.subscription.Subscription` From 9617e7d6f9b4377d5261aa61aec3e07547ca9053 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Fri, 21 Oct 2016 11:21:42 -0700 Subject: [PATCH 0018/1197] Fix lint errors caused by addition of (Optional). Mostly, lines that were too long. --- .../google/cloud/pubsub/_gax.py | 17 +++++++------ .../google/cloud/pubsub/connection.py | 17 +++++++------ .../google/cloud/pubsub/message.py | 4 +-- .../google/cloud/pubsub/subscription.py | 25 +++++++++++-------- 4 files changed, 34 insertions(+), 29 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index da4ae25625c5..23ecb55f6024 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -270,13 +270,14 @@ def subscription_create(self, subscription_path, topic_path, ``projects//topics/``. :type ack_deadline: int - :param ack_deadline: (Optional) the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. + :param ack_deadline: + (Optional) the deadline (in seconds) by which messages pulled from + the back-end must be acknowledged. :type push_endpoint: str - :param push_endpoint: (Optional) URL to which messages will be pushed by the - back-end. If not set, the application must pull - messages. + :param push_endpoint: + (Optional) URL to which messages will be pushed by the back-end. + If not set, the application must pull messages. :rtype: dict :returns: ``Subscription`` resource returned from the API. @@ -352,9 +353,9 @@ def subscription_modify_push_config(self, subscription_path, ``projects//subscriptions/``. :type push_endpoint: str - :param push_endpoint: (Optional) URL to which messages will be pushed by the - back-end. If not set, the application must pull - messages. + :param push_endpoint: + (Optional) URL to which messages will be pushed by the back-end. + If not set, the application must pull messages. """ push_config = PushConfig(push_endpoint=push_endpoint) try: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 1cabb127bc37..85ce000dea91 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -310,13 +310,14 @@ def subscription_create(self, subscription_path, topic_path, ``projects//topics/``. :type ack_deadline: int - :param ack_deadline: (Optional) the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. + :param ack_deadline: + (Optional) the deadline (in seconds) by which messages pulled from + the back-end must be acknowledged. :type push_endpoint: str - :param push_endpoint: (Optional) URL to which messages will be pushed by the - back-end. If not set, the application must pull - messages. + :param push_endpoint: + (Optional) URL to which messages will be pushed by the back-end. + If not set, the application must pull messages. :rtype: dict :returns: ``Subscription`` resource returned from the API. @@ -379,9 +380,9 @@ def subscription_modify_push_config(self, subscription_path, ``projects//subscriptions/``. :type push_endpoint: str - :param push_endpoint: (Optional) URL to which messages will be pushed by the - back-end. If not set, the application must pull - messages. + :param push_endpoint: + (Optional) URL to which messages will be pushed by the back-end. + If not set, the application must pull messages. """ conn = self._connection path = '/%s:modifyPushConfig' % (subscription_path,) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py index 035693e82b9c..89ce690eccec 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py @@ -33,8 +33,8 @@ class Message(object): :param message_id: An ID assigned to the message by the API. :type attributes: dict - :param attributes: (Optional) Extra metadata associated by the publisher with the - message. + :param attributes: + (Optional) Extra metadata associated by the publisher with the message. """ _service_timestamp = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index bf6882eab3cf..c98277d660df 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -30,21 +30,23 @@ class Subscription(object): :param name: the name of the subscription. :type topic: :class:`google.cloud.pubsub.topic.Topic` - :param topic: (Optional) the topic to which the subscription belongs; if ``None``, - the subscription's topic has been deleted. + :param topic: + (Optional) the topic to which the subscription belongs; if ``None``, + the subscription's topic has been deleted. :type ack_deadline: int :param ack_deadline: the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. :type push_endpoint: str - :param push_endpoint: URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the topic. + :param push_endpoint: + (Optional) URL to which messages will be pushed by the back-end. If + not set, the application must pull messages. + + :type client: :class:`~google.cloud.pubsub.client.Client` + :param client: + (Optional) The client to use. If not passed, falls back to the + ``client`` stored on the topic. """ _DELETED_TOPIC_PATH = '_deleted-topic_' @@ -82,8 +84,9 @@ def from_api_repr(cls, resource, client, topics=None): configuration for a topic. :type topics: dict - :param topics: (Optional) A mapping of topic names -> topics. If not passed, - the subscription will have a newly-created topic. + :param topics: + (Optional) A mapping of topic names -> topics. If not passed, the + subscription will have a newly-created topic. :rtype: :class:`google.cloud.pubsub.subscription.Subscription` :returns: Subscription parsed from ``resource``. From c5b0539b535b31cb9323581b822a6401ec863aed Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Fri, 21 Oct 2016 14:42:06 -0700 Subject: [PATCH 0019/1197] Merge pull request #2553 from waprin/logging_gax_optional Allows Explicitly Enabling/Disabling GAX for Logging/Pubsub --- .../google/cloud/pubsub/client.py | 17 +++++++++-- .../unit_tests/test_client.py | 28 +++++++++++++++---- 2 files changed, 37 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 9c0742d1727a..42a5c0e0a7da 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -63,7 +63,20 @@ class Client(JSONClient): :param http: An optional HTTP object to make requests. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. + + :type use_gax: bool + :param use_gax: (Optional) Explicitly specifies whether + to use the gRPC transport (via GAX) or HTTP. If unset, + falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment + variable """ + def __init__(self, project=None, credentials=None, + http=None, use_gax=None): + super(Client, self).__init__(project, credentials, http) + if use_gax is None: + self._use_gax = _USE_GAX + else: + self._use_gax = use_gax _connection_class = Connection _publisher_api = _subscriber_api = _iam_policy_api = None @@ -72,7 +85,7 @@ class Client(JSONClient): def publisher_api(self): """Helper for publisher-related API calls.""" if self._publisher_api is None: - if _USE_GAX: + if self._use_gax: generated = make_gax_publisher_api(self.connection) self._publisher_api = GAXPublisherAPI(generated) else: @@ -83,7 +96,7 @@ def publisher_api(self): def subscriber_api(self): """Helper for subscriber-related API calls.""" if self._subscriber_api is None: - if _USE_GAX: + if self._use_gax: generated = make_gax_subscriber_api(self.connection) self._subscriber_api = GAXSubscriberAPI(generated) else: diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index b1929be76df3..6ed7a5a56ad3 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -34,11 +34,12 @@ def test_publisher_api_wo_gax(self): from google.cloud.pubsub import client as MUT from google.cloud._testing import _Monkey creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) - conn = client.connection = object() with _Monkey(MUT, _USE_GAX=False): - api = client.publisher_api + client = self._makeOne(project=self.PROJECT, credentials=creds) + + conn = client.connection = object() + api = client.publisher_api self.assertIsInstance(api, _PublisherAPI) self.assertIs(api._connection, conn) @@ -46,6 +47,20 @@ def test_publisher_api_wo_gax(self): again = client.publisher_api self.assertIs(again, api) + def test_no_gax_ctor(self): + from google.cloud._testing import _Monkey + from google.cloud.pubsub.connection import _PublisherAPI + from google.cloud.pubsub import client as MUT + + creds = _Credentials() + with _Monkey(MUT, _USE_GAX=True): + client = self._makeOne(project=self.PROJECT, credentials=creds, + use_gax=False) + + self.assertFalse(client._use_gax) + api = client.publisher_api + self.assertIsInstance(api, _PublisherAPI) + def test_publisher_api_w_gax(self): from google.cloud.pubsub import client as MUT from google.cloud._testing import _Monkey @@ -84,11 +99,12 @@ def test_subscriber_api_wo_gax(self): from google.cloud.pubsub import client as MUT from google.cloud._testing import _Monkey creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) - conn = client.connection = object() with _Monkey(MUT, _USE_GAX=False): - api = client.subscriber_api + client = self._makeOne(project=self.PROJECT, credentials=creds) + + conn = client.connection = object() + api = client.subscriber_api self.assertIsInstance(api, _SubscriberAPI) self.assertIs(api._connection, conn) From c7689d0b8d93e08686b0aecb10e473c21d19c1b5 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Thu, 20 Oct 2016 15:06:56 -0400 Subject: [PATCH 0020/1197] Do not encode pubsub message for gRPC requests. Fixes: 2577 --- .../google/cloud/pubsub/connection.py | 38 ++++- .../google/cloud/pubsub/message.py | 12 +- .../google/cloud/pubsub/topic.py | 7 +- .../unit_tests/test_connection.py | 45 +++++- .../unit_tests/test_message.py | 21 +-- .../unit_tests/test_subscription.py | 9 +- .../unit_tests/test_topic.py | 132 ++++++++---------- 7 files changed, 146 insertions(+), 118 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index ecff402e3990..8a4c7086da8e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -14,6 +14,7 @@ """Create / interact with Google Cloud Pub/Sub connections.""" +import base64 import os from google.cloud import connection as base_connection @@ -201,6 +202,7 @@ def topic_publish(self, topic_path, messages): :rtype: list of string :returns: list of opaque IDs for published messages. """ + _transform_messages_base64(messages, _base64_unicode) conn = self._connection data = {'messages': messages} response = conn.api_request( @@ -419,7 +421,9 @@ def subscription_pull(self, subscription_path, return_immediately=False, 'maxMessages': max_messages, } response = conn.api_request(method='POST', path=path, data=data) - return response.get('receivedMessages', ()) + messages = response.get('receivedMessages', ()) + _transform_messages_base64(messages, base64.b64decode, 'message') + return messages def subscription_acknowledge(self, subscription_path, ack_ids): """API call: acknowledge retrieved messages @@ -539,3 +543,35 @@ def test_iam_permissions(self, target_path, permissions): path = '/%s:testIamPermissions' % (target_path,) resp = conn.api_request(method='POST', path=path, data=wrapped) return resp.get('permissions', []) + + +def _base64_unicode(value): + """Helper to base64 encode and make JSON serializable. + + :type value: str + :param value: String value to be base64 encoded and made serializable. + + :rtype: str + :returns: Base64 encoded string/unicode value. + """ + as_bytes = base64.b64encode(value) + return as_bytes.decode('ascii') + + +def _transform_messages_base64(messages, transform, key=None): + """Helper for base64 encoding and decoding messages. + + :type messages: list + :param messages: List of dictionaries with message data. + + :type transform: :class:`~types.FunctionType` + :param transform: Function to encode/decode the message data. + + :type key: str + :param key: Index to access messages. + """ + for message in messages: + if key is not None: + message = message[key] + if 'data' in message: + message['data'] = transform(message['data']) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py index 17e7c88289a6..22d0290c81b5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py @@ -14,9 +14,6 @@ """Define API Topics.""" -import base64 -import binascii - from google.cloud._helpers import _rfc3339_to_datetime @@ -86,14 +83,7 @@ def from_api_repr(cls, api_repr): :rtype: :class:`Message` :returns: The message created from the response. """ - raw_data = api_repr.get('data', b'') - try: - data = base64.b64decode(raw_data) - except (binascii.Error, TypeError): - to_pad = (- len(raw_data)) % 4 - padded_data = raw_data + b'=' * to_pad - data = base64.b64decode(padded_data) - + data = api_repr.get('data', b'') instance = cls( data=data, message_id=api_repr['messageId'], attributes=api_repr.get('attributes')) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index f879d8349c89..7ad8de6570d7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -14,8 +14,6 @@ """Define API Topics.""" -import base64 - from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _NOW from google.cloud.exceptions import NotFound @@ -252,8 +250,7 @@ def publish(self, message, client=None, **attrs): api = client.publisher_api self._timestamp_message(attrs) - message_b = base64.b64encode(message).decode('ascii') - message_data = {'data': message_b, 'attributes': attrs} + message_data = {'data': message, 'attributes': attrs} message_ids = api.topic_publish(self.full_name, [message_data]) return message_ids[0] @@ -449,7 +446,7 @@ def publish(self, message, **attrs): """ self.topic._timestamp_message(attrs) self.messages.append( - {'data': base64.b64encode(message).decode('ascii'), + {'data': message, 'attributes': attrs}) def commit(self, client=None): diff --git a/packages/google-cloud-pubsub/unit_tests/test_connection.py b/packages/google-cloud-pubsub/unit_tests/test_connection.py index 9e14e1e0d528..4b4a544469bc 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_connection.py +++ b/packages/google-cloud-pubsub/unit_tests/test_connection.py @@ -238,9 +238,10 @@ def test_topic_delete_miss(self): def test_topic_publish_hit(self): import base64 PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') + B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') MSGID = 'DEADBEEF' - MESSAGE = {'data': B64, 'attributes': {}} + MESSAGE = {'data': PAYLOAD, 'attributes': {}} + B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} RETURNED = {'messageIds': [MSGID]} connection = _Connection(RETURNED) api = self._makeOne(connection) @@ -252,14 +253,14 @@ def test_topic_publish_hit(self): path = '/%s:publish' % (self.TOPIC_PATH,) self.assertEqual(connection._called_with['path'], path) self.assertEqual(connection._called_with['data'], - {'messages': [MESSAGE]}) + {'messages': [B64MSG]}) + msg_data = connection._called_with['data']['messages'][0]['data'] + self.assertEqual(msg_data, B64_PAYLOAD) def test_topic_publish_miss(self): - import base64 from google.cloud.exceptions import NotFound PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': B64, 'attributes': {}} + MESSAGE = {'data': PAYLOAD, 'attributes': {}} connection = _Connection() api = self._makeOne(connection) @@ -534,6 +535,7 @@ def test_subscription_pull_defaults(self): received = api.subscription_pull(self.SUB_PATH) self.assertEqual(received, RETURNED['receivedMessages']) + self.assertEqual(received[0]['message']['data'], PAYLOAD) self.assertEqual(connection._called_with['method'], 'POST') path = '/%s:pull' % (self.SUB_PATH,) self.assertEqual(connection._called_with['path'], path) @@ -718,6 +720,37 @@ def test_test_iam_permissions_missing_key(self): {'permissions': ALL_ROLES}) +class Test__transform_messages_base64_empty(unittest.TestCase): + def _callFUT(self, messages, transform, key=None): + from google.cloud.pubsub.connection import _transform_messages_base64 + return _transform_messages_base64(messages, transform, key) + + def test__transform_messages_base64_empty_message(self): + from base64 import b64decode + DATA = [{'message': {}}] + self._callFUT(DATA, b64decode, 'message') + self.assertEqual(DATA, [{'message': {}}]) + + def test__transform_messages_base64_empty_data(self): + from base64 import b64decode + DATA = [{'message': {'data': b''}}] + self._callFUT(DATA, b64decode, 'message') + self.assertEqual(DATA, [{'message': {'data': b''}}]) + + def test__transform_messages_base64_pull(self): + from base64 import b64encode + DATA = [{'message': {'data': b'testing 1 2 3'}}] + self._callFUT(DATA, b64encode, 'message') + self.assertEqual(DATA[0]['message']['data'], + b64encode(b'testing 1 2 3')) + + def test__transform_messages_base64_publish(self): + from base64 import b64encode + DATA = [{'data': b'testing 1 2 3'}] + self._callFUT(DATA, b64encode) + self.assertEqual(DATA[0]['data'], b64encode(b'testing 1 2 3')) + + class _Connection(object): _called_with = None diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/unit_tests/test_message.py index 9e0ea940a071..0d71ec9715f8 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_message.py +++ b/packages/google-cloud-pubsub/unit_tests/test_message.py @@ -89,27 +89,12 @@ def test_from_api_repr_missing_data(self): self.assertEqual(message.attributes, {}) self.assertIsNone(message.service_timestamp) - def test_from_api_repr_bad_b64_data(self): - DATA = b'wefwefw' - BAD_B64_DATA = b'd2Vmd2Vmdw=' - MESSAGE_ID = '12345' - TIMESTAMP = '2016-03-18-19:38:22.001393427Z' - api_repr = { - 'data': BAD_B64_DATA, - 'messageId': MESSAGE_ID, - 'publishTimestamp': TIMESTAMP, - } - message = self._getTargetClass().from_api_repr(api_repr) - self.assertEqual(message.data, DATA) - def test_from_api_repr_no_attributes(self): - from base64 import b64encode as b64 DATA = b'DEADBEEF' - B64_DATA = b64(DATA) MESSAGE_ID = '12345' TIMESTAMP = '2016-03-18-19:38:22.001393427Z' api_repr = { - 'data': B64_DATA, + 'data': DATA, 'messageId': MESSAGE_ID, 'publishTime': TIMESTAMP, } @@ -120,14 +105,12 @@ def test_from_api_repr_no_attributes(self): self.assertEqual(message.service_timestamp, TIMESTAMP) def test_from_api_repr_w_attributes(self): - from base64 import b64encode as b64 DATA = b'DEADBEEF' - B64_DATA = b64(DATA) MESSAGE_ID = '12345' ATTRS = {'a': 'b'} TIMESTAMP = '2016-03-18-19:38:22.001393427Z' api_repr = { - 'data': B64_DATA, + 'data': DATA, 'messageId': MESSAGE_ID, 'publishTime': TIMESTAMP, 'attributes': ATTRS, diff --git a/packages/google-cloud-pubsub/unit_tests/test_subscription.py b/packages/google-cloud-pubsub/unit_tests/test_subscription.py index dbefa7d6ec1a..2afa45c3cc94 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_subscription.py +++ b/packages/google-cloud-pubsub/unit_tests/test_subscription.py @@ -322,13 +322,11 @@ def test_modify_push_config_wo_endpoint_w_alternate_client(self): (self.SUB_PATH, None)) def test_pull_wo_return_immediately_max_messages_w_bound_client(self): - import base64 from google.cloud.pubsub.message import Message ACK_ID = 'DEADBEEF' MSG_ID = 'BEADCAFE' PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD) - MESSAGE = {'messageId': MSG_ID, 'data': B64} + MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD} REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} client = _Client(project=self.PROJECT) api = client.subscriber_api = _FauxSubscribererAPI() @@ -349,13 +347,12 @@ def test_pull_wo_return_immediately_max_messages_w_bound_client(self): (self.SUB_PATH, False, 1)) def test_pull_w_return_immediately_w_max_messages_w_alt_client(self): - import base64 from google.cloud.pubsub.message import Message ACK_ID = 'DEADBEEF' MSG_ID = 'BEADCAFE' PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD) - MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} + MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD, + 'attributes': {'a': 'b'}} REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index 58e819e187b7..1786ed477d4e 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -120,11 +120,9 @@ def test_delete_w_alternate_client(self): self.assertEqual(api._topic_deleted, self.TOPIC_PATH) def test_publish_single_bytes_wo_attrs_w_bound_client(self): - import base64 - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') + PAYLOAD = 'This is the message text' MSGID = 'DEADBEEF' - MESSAGE = {'data': B64, 'attributes': {}} + MESSAGE = {'data': PAYLOAD, 'attributes': {}} client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID] @@ -136,7 +134,6 @@ def test_publish_single_bytes_wo_attrs_w_bound_client(self): self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): - import base64 import datetime from google.cloud.pubsub import topic as MUT from google.cloud._helpers import _RFC3339_MICROS @@ -146,11 +143,10 @@ def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): def _utcnow(): return NOW - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') + PAYLOAD = 'This is the message text' MSGID = 'DEADBEEF' MESSAGE = { - 'data': B64, + 'data': PAYLOAD, 'attributes': {'timestamp': NOW.strftime(_RFC3339_MICROS)}, } client1 = _Client(project=self.PROJECT) @@ -167,12 +163,10 @@ def _utcnow(): self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self): - import base64 - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') + PAYLOAD = 'This is the message text' MSGID = 'DEADBEEF' OVERRIDE = '2015-04-10T16:46:22.868399Z' - MESSAGE = {'data': B64, + MESSAGE = {'data': PAYLOAD, 'attributes': {'timestamp': OVERRIDE}} client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() @@ -186,11 +180,9 @@ def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self): self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) def test_publish_single_w_attrs(self): - import base64 - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') + PAYLOAD = 'This is the message text' MSGID = 'DEADBEEF' - MESSAGE = {'data': B64, + MESSAGE = {'data': PAYLOAD, 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() @@ -202,17 +194,39 @@ def test_publish_single_w_attrs(self): self.assertEqual(msgid, MSGID) self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) + def test_publish_with_gax(self): + PAYLOAD = 'This is the message text' + MSGID = 'DEADBEEF' + MESSAGE = {'data': PAYLOAD, 'attributes': {}} + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID] + topic = self._makeOne(self.TOPIC_NAME, client=client) + msgid = topic.publish(PAYLOAD) + + self.assertEqual(msgid, MSGID) + self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) + + def test_publish_without_gax(self): + PAYLOAD = 'This is the message text' + MSGID = 'DEADBEEF' + MESSAGE = {'data': PAYLOAD, 'attributes': {}} + client = _Client(project=self.PROJECT) + api = client.publisher_api = _FauxPublisherAPI() + api._topic_publish_response = [MSGID] + topic = self._makeOne(self.TOPIC_NAME, client=client) + msgid = topic.publish(PAYLOAD) + + self.assertEqual(msgid, MSGID) + self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) + def test_publish_multiple_w_bound_client(self): - import base64 - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - B64_1 = base64.b64encode(PAYLOAD1) - B64_2 = base64.b64encode(PAYLOAD2) + PAYLOAD1 = 'This is the first message text' + PAYLOAD2 = 'This is the second message text' MSGID1 = 'DEADBEEF' MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': B64_1.decode('ascii'), - 'attributes': {}} - MESSAGE2 = {'data': B64_2.decode('ascii'), + MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} + MESSAGE2 = {'data': PAYLOAD2, 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() @@ -241,16 +255,13 @@ def test_publish_w_no_messages(self): self.assertEqual(api._api_called, 0) def test_publish_multiple_w_alternate_client(self): - import base64 - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - B64_1 = base64.b64encode(PAYLOAD1) - B64_2 = base64.b64encode(PAYLOAD2) + PAYLOAD1 = 'This is the first message text' + PAYLOAD2 = 'This is the second message text' MSGID1 = 'DEADBEEF' MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': B64_1.decode('ascii'), 'attributes': {}} + MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} MESSAGE2 = { - 'data': B64_2.decode('ascii'), + 'data': PAYLOAD2, 'attributes': {'attr1': 'value1', 'attr2': 'value2'}, } client1 = _Client(project=self.PROJECT) @@ -598,10 +609,8 @@ def test___iter___non_empty(self): self.assertEqual(list(batch), ['ONE', 'TWO', 'THREE']) def test_publish_bytes_wo_attrs(self): - import base64 - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': B64, + PAYLOAD = 'This is the message text' + MESSAGE = {'data': PAYLOAD, 'attributes': {}} client = _Client(project=self.PROJECT) topic = _Topic() @@ -610,10 +619,8 @@ def test_publish_bytes_wo_attrs(self): self.assertEqual(batch.messages, [MESSAGE]) def test_publish_bytes_w_add_timestamp(self): - import base64 - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': B64, + PAYLOAD = 'This is the message text' + MESSAGE = {'data': PAYLOAD, 'attributes': {'timestamp': 'TIMESTAMP'}} client = _Client(project=self.PROJECT) topic = _Topic(timestamp_messages=True) @@ -622,16 +629,13 @@ def test_publish_bytes_w_add_timestamp(self): self.assertEqual(batch.messages, [MESSAGE]) def test_commit_w_bound_client(self): - import base64 - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - B64_1 = base64.b64encode(PAYLOAD1) - B64_2 = base64.b64encode(PAYLOAD2) + PAYLOAD1 = 'This is the first message text' + PAYLOAD2 = 'This is the second message text' MSGID1 = 'DEADBEEF' MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': B64_1.decode('ascii'), + MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': B64_2.decode('ascii'), + MESSAGE2 = {'data': PAYLOAD2, 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} client = _Client(project='PROJECT') api = client.publisher_api = _FauxPublisherAPI() @@ -649,16 +653,12 @@ def test_commit_w_bound_client(self): (topic.full_name, [MESSAGE1, MESSAGE2])) def test_commit_w_alternate_client(self): - import base64 - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - B64_1 = base64.b64encode(PAYLOAD1) - B64_2 = base64.b64encode(PAYLOAD2) + PAYLOAD1 = 'This is the first message text' + PAYLOAD2 = 'This is the second message text' MSGID1 = 'DEADBEEF' MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': B64_1.decode('ascii'), - 'attributes': {}} - MESSAGE2 = {'data': B64_2.decode('ascii'), + MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} + MESSAGE2 = {'data': PAYLOAD2, 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} client1 = _Client(project='PROJECT') client2 = _Client(project='PROJECT') @@ -677,16 +677,12 @@ def test_commit_w_alternate_client(self): (topic.full_name, [MESSAGE1, MESSAGE2])) def test_context_mgr_success(self): - import base64 - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - B64_1 = base64.b64encode(PAYLOAD1) - B64_2 = base64.b64encode(PAYLOAD2) + PAYLOAD1 = 'This is the first message text' + PAYLOAD2 = 'This is the second message text' MSGID1 = 'DEADBEEF' MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': B64_1.decode('ascii'), - 'attributes': {}} - MESSAGE2 = {'data': B64_2.decode('ascii'), + MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} + MESSAGE2 = {'data': PAYLOAD2, 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} client = _Client(project='PROJECT') api = client.publisher_api = _FauxPublisherAPI() @@ -705,14 +701,10 @@ def test_context_mgr_success(self): (topic.full_name, [MESSAGE1, MESSAGE2])) def test_context_mgr_failure(self): - import base64 - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - B64_1 = base64.b64encode(PAYLOAD1) - B64_2 = base64.b64encode(PAYLOAD2) - MESSAGE1 = {'data': B64_1.decode('ascii'), - 'attributes': {}} - MESSAGE2 = {'data': B64_2.decode('ascii'), + PAYLOAD1 = 'This is the first message text' + PAYLOAD2 = 'This is the second message text' + MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} + MESSAGE2 = {'data': PAYLOAD2, 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} client = _Client(project='PROJECT') api = client.publisher_api = _FauxPublisherAPI() From aa9c23037e63946818ab116f80b50170b290ed59 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 24 Oct 2016 23:39:47 -0700 Subject: [PATCH 0021/1197] Using Iterators for list_topics() in Pub/Sub. In the process, had to add custom support for the GAX page iterator in our core Iterator implementation. --- .../google/cloud/pubsub/_gax.py | 66 ++++++++++++++++--- .../google/cloud/pubsub/client.py | 17 +++-- .../google/cloud/pubsub/connection.py | 47 ++++++++----- .../unit_tests/test__gax.py | 32 +++++++-- .../unit_tests/test_client.py | 42 +++++++++--- .../unit_tests/test_connection.py | 49 ++++++++++---- 6 files changed, 193 insertions(+), 60 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 23ecb55f6024..6ad835102fd9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -14,6 +14,8 @@ """GAX wrapper for Pubsub API requests.""" +import functools + from google.cloud.gapic.pubsub.v1.publisher_api import PublisherApi from google.cloud.gapic.pubsub.v1.subscriber_api import SubscriberApi from google.gax import CallOptions @@ -29,6 +31,12 @@ from google.cloud._helpers import _pb_timestamp_to_rfc3339 from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound +from google.cloud.iterator import Iterator +from google.cloud.iterator import Page +from google.cloud.pubsub.topic import Topic + + +_FAKE_ITEMS_KEY = 'not-a-key' class _PublisherAPI(object): @@ -58,11 +66,9 @@ def list_topics(self, project, page_size=0, page_token=None): passed, the API will return the first page of topics. - :rtype: tuple, (list, str) - :returns: list of ``Topic`` resource dicts, plus a - "next page token" string: if not None, indicates that - more topics can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` + accessible to the current API. """ if page_token is None: page_token = INITIAL_PAGE @@ -70,9 +76,14 @@ def list_topics(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_topics( path, page_size=page_size, options=options) - topics = [{'name': topic_pb.name} for topic_pb in page_iter.next()] - token = page_iter.page_token or None - return topics, token + page_iter = functools.partial(_recast_page_iterator, page_iter) + + # NOTE: We don't currently have access to the client, so callers + # that want the client, must manually bind the client to the + # iterator instance returned. + return Iterator(client=None, path=path, + item_to_value=_item_to_topic, + page_iter=page_iter) def topic_create(self, topic_path): """API call: create a topic @@ -543,3 +554,42 @@ def make_gax_subscriber_api(connection): if connection.in_emulator: channel = insecure_channel(connection.host) return SubscriberApi(channel=channel) + + +def _item_to_topic(iterator, resource): + """Convert a JSON job to the native object. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type resource: :class:`google.pubsub.v1.pubsub_pb2.Topic` + :param resource: A topic returned from the API. + + :rtype: :class:`~google.cloud.pubsub.topic.Topic` + :returns: The next topic in the page. + """ + return Topic.from_api_repr( + {'name': resource.name}, iterator.client) + + +def _recast_page_iterator(page_iter, iterator): + """Wrap GAX pages generator. + + In particular, wrap each page and capture some state from the + GAX iterator. + + Yields :class:`~google.cloud.iterator.Page` instances + + :type page_iter: :class:`~google.gax.PageIterator` + :param page_iter: The iterator to wrap. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that owns each page. + """ + for items in page_iter: + fake_response = {_FAKE_ITEMS_KEY: items} + page = Page( + iterator, fake_response, _FAKE_ITEMS_KEY, _item_to_topic) + iterator.next_page_token = page_iter.page_token or None + iterator.num_results += page.num_items + yield page diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 42a5c0e0a7da..100a567fbc7a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -131,18 +131,17 @@ def list_topics(self, page_size=None, page_token=None): passed, the API will return the first page of topics. - :rtype: tuple, (list, str) - :returns: list of :class:`google.cloud.pubsub.topic.Topic`, plus a - "next page token" string: if not None, indicates that - more topics can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` + accessible to the current API. """ api = self.publisher_api - resources, next_token = api.list_topics( + iterator = api.list_topics( self.project, page_size, page_token) - topics = [Topic.from_api_repr(resource, self) - for resource in resources] - return topics, next_token + # NOTE: Make sure to set the client since ``api.list_topics()`` may + # not have access to the current client. + iterator.client = self + return iterator def list_subscriptions(self, page_size=None, page_token=None): """List subscriptions for the project associated with this client. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 296e4ed55009..9a88670bddcf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -19,6 +19,8 @@ from google.cloud import connection as base_connection from google.cloud.environment_vars import PUBSUB_EMULATOR +from google.cloud.iterator import Iterator +from google.cloud.pubsub.topic import Topic PUBSUB_API_HOST = 'pubsub.googleapis.com' @@ -104,7 +106,8 @@ class _PublisherAPI(object): def __init__(self, connection): self._connection = connection - def list_topics(self, project, page_size=None, page_token=None): + @staticmethod + def list_topics(project, page_size=None, page_token=None): """API call: list topics for a given project See: @@ -122,24 +125,21 @@ def list_topics(self, project, page_size=None, page_token=None): passed, the API will return the first page of topics. - :rtype: tuple, (list, str) - :returns: list of ``Topic`` resource dicts, plus a - "next page token" string: if not None, indicates that - more topics can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` + accessible to the current connection. """ - conn = self._connection - params = {} - + extra_params = {} if page_size is not None: - params['pageSize'] = page_size - - if page_token is not None: - params['pageToken'] = page_token - + extra_params['pageSize'] = page_size path = '/projects/%s/topics' % (project,) - resp = conn.api_request(method='GET', path=path, query_params=params) - return resp.get('topics', ()), resp.get('nextPageToken') + + # NOTE: We don't currently have access to the client, so callers + # that want the client, must manually bind the client to the + # iterator instance returned. + return Iterator(client=None, path=path, + items_key='topics', item_to_value=_item_to_topic, + page_token=page_token, extra_params=extra_params) def topic_create(self, topic_path): """API call: create a topic @@ -576,3 +576,18 @@ def _transform_messages_base64(messages, transform, key=None): message = message[key] if 'data' in message: message['data'] = transform(message['data']) + + +def _item_to_topic(iterator, resource): + """Convert a JSON job to the native object. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type resource: dict + :param resource: A topic returned from the API. + + :rtype: :class:`~google.cloud.pubsub.topic.Topic` + :returns: The next topic in the page. + """ + return Topic.from_api_repr(resource, iterator.client) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 46d05a23d583..baefa61dbdae 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -56,17 +56,24 @@ def test_ctor(self): def test_list_topics_no_paging(self): from google.gax import INITIAL_PAGE from google.cloud._testing import _GAXPageIterator + from google.cloud.pubsub.topic import Topic + TOKEN = 'TOKEN' response = _GAXPageIterator([_TopicPB(self.TOPIC_PATH)], TOKEN) gax_api = _GAXPublisherAPI(_list_topics_response=response) api = self._makeOne(gax_api) - topics, next_token = api.list_topics(self.PROJECT) + iterator = api.list_topics(self.PROJECT) + # Add back the client to support API requests. + iterator.client = _Client(self.PROJECT) + topics = list(iterator) + next_token = iterator.next_page_token self.assertEqual(len(topics), 1) topic = topics[0] - self.assertIsInstance(topic, dict) - self.assertEqual(topic['name'], self.TOPIC_PATH) + self.assertIsInstance(topic, Topic) + self.assertEqual(topic.name, self.TOPIC_NAME) + self.assertEqual(topic.full_name, self.TOPIC_PATH) self.assertEqual(next_token, TOKEN) name, page_size, options = gax_api._list_topics_called_with @@ -76,6 +83,8 @@ def test_list_topics_no_paging(self): def test_list_topics_with_paging(self): from google.cloud._testing import _GAXPageIterator + from google.cloud.pubsub.topic import Topic + SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' @@ -84,13 +93,18 @@ def test_list_topics_with_paging(self): gax_api = _GAXPublisherAPI(_list_topics_response=response) api = self._makeOne(gax_api) - topics, next_token = api.list_topics( + iterator = api.list_topics( self.PROJECT, page_size=SIZE, page_token=TOKEN) + # Add back the client to support API requests. + iterator.client = _Client(self.PROJECT) + topics = list(iterator) + next_token = iterator.next_page_token self.assertEqual(len(topics), 1) topic = topics[0] - self.assertIsInstance(topic, dict) - self.assertEqual(topic['name'], self.TOPIC_PATH) + self.assertIsInstance(topic, Topic) + self.assertEqual(topic.name, self.TOPIC_NAME) + self.assertEqual(topic.full_name, self.TOPIC_PATH) self.assertEqual(next_token, NEW_TOKEN) name, page_size, options = gax_api._list_topics_called_with @@ -1043,3 +1057,9 @@ class _Connection(object): def __init__(self, in_emulator=False, host=None): self.in_emulator = in_emulator self.host = host + + +class _Client(object): + + def __init__(self, project): + self.project = project diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 6ed7a5a56ad3..54f9b68599c7 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -159,13 +159,16 @@ def test_iam_policy_api(self): def test_list_topics_no_paging(self): from google.cloud.pubsub.topic import Topic + creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) client.connection = object() - api = client._publisher_api = _FauxPublisherAPI() - api._list_topics_response = [{'name': self.TOPIC_PATH}], None + api = _FauxPublisherAPI(items=[Topic(self.TOPIC_NAME, client)]) + client._publisher_api = api - topics, next_page_token = client.list_topics() + iterator = client.list_topics() + topics = list(iterator) + next_page_token = iterator.next_page_token self.assertEqual(len(topics), 1) self.assertIsInstance(topics[0], Topic) @@ -176,16 +179,19 @@ def test_list_topics_no_paging(self): def test_list_topics_with_paging(self): from google.cloud.pubsub.topic import Topic + TOKEN1 = 'TOKEN1' TOKEN2 = 'TOKEN2' SIZE = 1 creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) client.connection = object() - api = client._publisher_api = _FauxPublisherAPI() - api._list_topics_response = [{'name': self.TOPIC_PATH}], TOKEN2 + api = _FauxPublisherAPI([Topic(self.TOPIC_NAME, client)], TOKEN2) + client._publisher_api = api - topics, next_page_token = client.list_topics(SIZE, TOKEN1) + iterator = client.list_topics(SIZE, TOKEN1) + topics = list(iterator) + next_page_token = iterator.next_page_token self.assertEqual(len(topics), 1) self.assertIsInstance(topics[0], Topic) @@ -198,10 +204,12 @@ def test_list_topics_missing_key(self): creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) client.connection = object() - api = client._publisher_api = _FauxPublisherAPI() - api._list_topics_response = (), None + api = _FauxPublisherAPI() + client._publisher_api = api - topics, next_page_token = client.list_topics() + iterator = client.list_topics() + topics = list(iterator) + next_page_token = iterator.next_page_token self.assertEqual(len(topics), 0) self.assertIsNone(next_page_token) @@ -305,11 +313,25 @@ def create_scoped(self, scope): return self +class _Iterator(object): + + def __init__(self, items, token): + self._items = items or () + self.next_page_token = token + + def __iter__(self): + return iter(self._items) + + class _FauxPublisherAPI(object): + def __init__(self, items=None, token=None): + self._items = items + self._token = token + def list_topics(self, project, page_size, page_token): self._listed_topics = (project, page_size, page_token) - return self._list_topics_response + return _Iterator(self._items, self._token) class _FauxSubscriberAPI(object): diff --git a/packages/google-cloud-pubsub/unit_tests/test_connection.py b/packages/google-cloud-pubsub/unit_tests/test_connection.py index 4b4a544469bc..6caaf764639a 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_connection.py +++ b/packages/google-cloud-pubsub/unit_tests/test_connection.py @@ -105,16 +105,23 @@ def test_ctor(self): self.assertIs(api._connection, connection) def test_list_topics_no_paging(self): - RETURNED = {'topics': [{'name': self.TOPIC_PATH}]} - connection = _Connection(RETURNED) + from google.cloud.pubsub.topic import Topic + + returned = {'topics': [{'name': self.TOPIC_PATH}]} + connection = _Connection(returned) api = self._makeOne(connection) - topics, next_token = api.list_topics(self.PROJECT) + iterator = api.list_topics(self.PROJECT) + # Add back the client to support API requests. + iterator.client = _Client(connection, self.PROJECT) + topics = list(iterator) + next_token = iterator.next_page_token self.assertEqual(len(topics), 1) topic = topics[0] - self.assertIsInstance(topic, dict) - self.assertEqual(topic['name'], self.TOPIC_PATH) + self.assertIsInstance(topic, Topic) + self.assertEqual(topic.name, self.TOPIC_NAME) + self.assertEqual(topic.full_name, self.TOPIC_PATH) self.assertIsNone(next_token) self.assertEqual(connection._called_with['method'], 'GET') @@ -123,6 +130,9 @@ def test_list_topics_no_paging(self): self.assertEqual(connection._called_with['query_params'], {}) def test_list_topics_with_paging(self): + import six + from google.cloud.pubsub.topic import Topic + TOKEN1 = 'TOKEN1' TOKEN2 = 'TOKEN2' SIZE = 1 @@ -133,13 +143,19 @@ def test_list_topics_with_paging(self): connection = _Connection(RETURNED) api = self._makeOne(connection) - topics, next_token = api.list_topics( + iterator = api.list_topics( self.PROJECT, page_token=TOKEN1, page_size=SIZE) + # Add back the client to support API requests. + iterator.client = _Client(connection, self.PROJECT) + page = six.next(iterator.pages) + topics = list(page) + next_token = iterator.next_page_token self.assertEqual(len(topics), 1) topic = topics[0] - self.assertIsInstance(topic, dict) - self.assertEqual(topic['name'], self.TOPIC_PATH) + self.assertIsInstance(topic, Topic) + self.assertEqual(topic.name, self.TOPIC_NAME) + self.assertEqual(topic.full_name, self.TOPIC_PATH) self.assertEqual(next_token, TOKEN2) self.assertEqual(connection._called_with['method'], 'GET') @@ -149,11 +165,15 @@ def test_list_topics_with_paging(self): {'pageToken': TOKEN1, 'pageSize': SIZE}) def test_list_topics_missing_key(self): - RETURNED = {} - connection = _Connection(RETURNED) + returned = {} + connection = _Connection(returned) api = self._makeOne(connection) - topics, next_token = api.list_topics(self.PROJECT) + iterator = api.list_topics(self.PROJECT) + # Add back the client to support API requests. + iterator.client = _Client(connection, self.PROJECT) + topics = list(iterator) + next_token = iterator.next_page_token self.assertEqual(len(topics), 0) self.assertIsNone(next_token) @@ -768,3 +788,10 @@ def api_request(self, **kw): err_class = self._no_response_error or NotFound raise err_class('miss') return response + + +class _Client(object): + + def __init__(self, connection, project): + self.connection = connection + self.project = project From 93adf9ab6ce7b185eebe121983498529fbcf1e0f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 11:11:21 -0700 Subject: [PATCH 0022/1197] Passing client to Pub/Sub API classes. Also tweaking an exhausted _GAXPageIterator in google.cloud._testing. --- .../google/cloud/pubsub/_gax.py | 12 ++-- .../google/cloud/pubsub/client.py | 10 +-- .../google/cloud/pubsub/connection.py | 17 +++-- .../unit_tests/test__gax.py | 62 ++++++++++++------- .../unit_tests/test_client.py | 4 +- .../unit_tests/test_connection.py | 55 +++++++++------- 6 files changed, 92 insertions(+), 68 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 6ad835102fd9..1a39c0078b69 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -44,9 +44,14 @@ class _PublisherAPI(object): :type gax_api: :class:`google.pubsub.v1.publisher_api.PublisherApi` :param gax_api: API object used to make GAX requests. + + :type client: :class:`~google.cloud.pubsub.client.Client` + :param client: The client that owns this API object. """ - def __init__(self, gax_api): + + def __init__(self, gax_api, client): self._gax_api = gax_api + self._client = client def list_topics(self, project, page_size=0, page_token=None): """List topics for the project associated with this API. @@ -78,10 +83,7 @@ def list_topics(self, project, page_size=0, page_token=None): path, page_size=page_size, options=options) page_iter = functools.partial(_recast_page_iterator, page_iter) - # NOTE: We don't currently have access to the client, so callers - # that want the client, must manually bind the client to the - # iterator instance returned. - return Iterator(client=None, path=path, + return Iterator(client=self._client, path=path, item_to_value=_item_to_topic, page_iter=page_iter) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 100a567fbc7a..8c7272266660 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -87,9 +87,9 @@ def publisher_api(self): if self._publisher_api is None: if self._use_gax: generated = make_gax_publisher_api(self.connection) - self._publisher_api = GAXPublisherAPI(generated) + self._publisher_api = GAXPublisherAPI(generated, self) else: - self._publisher_api = JSONPublisherAPI(self.connection) + self._publisher_api = JSONPublisherAPI(self) return self._publisher_api @property @@ -136,12 +136,8 @@ def list_topics(self, page_size=None, page_token=None): accessible to the current API. """ api = self.publisher_api - iterator = api.list_topics( + return api.list_topics( self.project, page_size, page_token) - # NOTE: Make sure to set the client since ``api.list_topics()`` may - # not have access to the current client. - iterator.client = self - return iterator def list_subscriptions(self, page_size=None, page_token=None): """List subscriptions for the project associated with this client. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 9a88670bddcf..722b28102dfa 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -99,15 +99,15 @@ def build_api_url(self, path, query_params=None, class _PublisherAPI(object): """Helper mapping publisher-related APIs. - :type connection: :class:`Connection` - :param connection: the connection used to make API requests. + :type client: :class:`~google.cloud.pubsub.client.Client` + :param client: the client used to make API requests. """ - def __init__(self, connection): - self._connection = connection + def __init__(self, client): + self._client = client + self._connection = client.connection - @staticmethod - def list_topics(project, page_size=None, page_token=None): + def list_topics(self, project, page_size=None, page_token=None): """API call: list topics for a given project See: @@ -134,10 +134,7 @@ def list_topics(project, page_size=None, page_token=None): extra_params['pageSize'] = page_size path = '/projects/%s/topics' % (project,) - # NOTE: We don't currently have access to the client, so callers - # that want the client, must manually bind the client to the - # iterator instance returned. - return Iterator(client=None, path=path, + return Iterator(client=self._client, path=path, items_key='topics', item_to_value=_item_to_topic, page_token=page_token, extra_params=extra_params) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index baefa61dbdae..907bdecb5c89 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -50,8 +50,10 @@ def _getTargetClass(self): def test_ctor(self): gax_api = _GAXPublisherAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) self.assertIs(api._gax_api, gax_api) + self.assertIs(api._client, client) def test_list_topics_no_paging(self): from google.gax import INITIAL_PAGE @@ -61,11 +63,10 @@ def test_list_topics_no_paging(self): TOKEN = 'TOKEN' response = _GAXPageIterator([_TopicPB(self.TOPIC_PATH)], TOKEN) gax_api = _GAXPublisherAPI(_list_topics_response=response) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) iterator = api.list_topics(self.PROJECT) - # Add back the client to support API requests. - iterator.client = _Client(self.PROJECT) topics = list(iterator) next_token = iterator.next_page_token @@ -91,12 +92,11 @@ def test_list_topics_with_paging(self): response = _GAXPageIterator( [_TopicPB(self.TOPIC_PATH)], NEW_TOKEN) gax_api = _GAXPublisherAPI(_list_topics_response=response) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) iterator = api.list_topics( self.PROJECT, page_size=SIZE, page_token=TOKEN) - # Add back the client to support API requests. - iterator.client = _Client(self.PROJECT) topics = list(iterator) next_token = iterator.next_page_token @@ -115,7 +115,8 @@ def test_list_topics_with_paging(self): def test_topic_create(self): topic_pb = _TopicPB(self.TOPIC_PATH) gax_api = _GAXPublisherAPI(_create_topic_response=topic_pb) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) resource = api.topic_create(self.TOPIC_PATH) @@ -127,7 +128,8 @@ def test_topic_create(self): def test_topic_create_already_exists(self): from google.cloud.exceptions import Conflict gax_api = _GAXPublisherAPI(_create_topic_conflict=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(Conflict): api.topic_create(self.TOPIC_PATH) @@ -139,7 +141,8 @@ def test_topic_create_already_exists(self): def test_topic_create_error(self): from google.gax.errors import GaxError gax_api = _GAXPublisherAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.topic_create(self.TOPIC_PATH) @@ -151,7 +154,8 @@ def test_topic_create_error(self): def test_topic_get_hit(self): topic_pb = _TopicPB(self.TOPIC_PATH) gax_api = _GAXPublisherAPI(_get_topic_response=topic_pb) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) resource = api.topic_get(self.TOPIC_PATH) @@ -163,7 +167,8 @@ def test_topic_get_hit(self): def test_topic_get_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXPublisherAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.topic_get(self.TOPIC_PATH) @@ -175,7 +180,8 @@ def test_topic_get_miss(self): def test_topic_get_error(self): from google.gax.errors import GaxError gax_api = _GAXPublisherAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.topic_get(self.TOPIC_PATH) @@ -186,7 +192,8 @@ def test_topic_get_error(self): def test_topic_delete_hit(self): gax_api = _GAXPublisherAPI(_delete_topic_ok=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) api.topic_delete(self.TOPIC_PATH) @@ -197,7 +204,8 @@ def test_topic_delete_hit(self): def test_topic_delete_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXPublisherAPI(_delete_topic_ok=False) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.topic_delete(self.TOPIC_PATH) @@ -209,7 +217,8 @@ def test_topic_delete_miss(self): def test_topic_delete_error(self): from google.gax.errors import GaxError gax_api = _GAXPublisherAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.topic_delete(self.TOPIC_PATH) @@ -226,7 +235,8 @@ def test_topic_publish_hit(self): MESSAGE = {'data': B64, 'attributes': {}} response = _PublishResponsePB([MSGID]) gax_api = _GAXPublisherAPI(_publish_response=response) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -245,7 +255,8 @@ def test_topic_publish_miss_w_attrs_w_bytes_payload(self): B64 = base64.b64encode(PAYLOAD) MESSAGE = {'data': B64, 'attributes': {'foo': 'bar'}} gax_api = _GAXPublisherAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -264,7 +275,8 @@ def test_topic_publish_error(self): B64 = base64.b64encode(PAYLOAD).decode('ascii') MESSAGE = {'data': B64, 'attributes': {}} gax_api = _GAXPublisherAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -282,7 +294,8 @@ def test_topic_list_subscriptions_no_paging(self): response = _GAXPageIterator([ {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}], None) gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) subscriptions, next_token = api.topic_list_subscriptions( self.TOPIC_PATH) @@ -308,7 +321,8 @@ def test_topic_list_subscriptions_with_paging(self): response = _GAXPageIterator([ {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}], NEW_TOKEN) gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) subscriptions, next_token = api.topic_list_subscriptions( self.TOPIC_PATH, page_size=SIZE, page_token=TOKEN) @@ -330,7 +344,8 @@ def test_topic_list_subscriptions_miss(self): from google.gax import INITIAL_PAGE from google.cloud.exceptions import NotFound gax_api = _GAXPublisherAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.topic_list_subscriptions(self.TOPIC_PATH) @@ -345,7 +360,8 @@ def test_topic_list_subscriptions_error(self): from google.gax import INITIAL_PAGE from google.gax.errors import GaxError gax_api = _GAXPublisherAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.topic_list_subscriptions(self.TOPIC_PATH) diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 54f9b68599c7..5b12aee800e3 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -74,8 +74,9 @@ def _generated_api(*args, **kw): class _GaxPublisherAPI(object): - def __init__(self, _wrapped): + def __init__(self, _wrapped, client): self._wrapped = _wrapped + self._client = client creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) @@ -88,6 +89,7 @@ def __init__(self, _wrapped): self.assertIsInstance(api, _GaxPublisherAPI) self.assertIs(api._wrapped, wrapped) + self.assertIs(api._client, client) # API instance is cached again = client.publisher_api self.assertIs(again, api) diff --git a/packages/google-cloud-pubsub/unit_tests/test_connection.py b/packages/google-cloud-pubsub/unit_tests/test_connection.py index 6caaf764639a..ed52d292ce8f 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_connection.py +++ b/packages/google-cloud-pubsub/unit_tests/test_connection.py @@ -101,7 +101,9 @@ def _makeOne(self, *args, **kw): def test_ctor(self): connection = _Connection() - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) + self.assertIs(api._client, client) self.assertIs(api._connection, connection) def test_list_topics_no_paging(self): @@ -109,11 +111,10 @@ def test_list_topics_no_paging(self): returned = {'topics': [{'name': self.TOPIC_PATH}]} connection = _Connection(returned) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) iterator = api.list_topics(self.PROJECT) - # Add back the client to support API requests. - iterator.client = _Client(connection, self.PROJECT) topics = list(iterator) next_token = iterator.next_page_token @@ -141,12 +142,11 @@ def test_list_topics_with_paging(self): 'nextPageToken': 'TOKEN2', } connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) iterator = api.list_topics( self.PROJECT, page_token=TOKEN1, page_size=SIZE) - # Add back the client to support API requests. - iterator.client = _Client(connection, self.PROJECT) page = six.next(iterator.pages) topics = list(page) next_token = iterator.next_page_token @@ -167,11 +167,10 @@ def test_list_topics_with_paging(self): def test_list_topics_missing_key(self): returned = {} connection = _Connection(returned) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) iterator = api.list_topics(self.PROJECT) - # Add back the client to support API requests. - iterator.client = _Client(connection, self.PROJECT) topics = list(iterator) next_token = iterator.next_page_token @@ -186,7 +185,8 @@ def test_list_topics_missing_key(self): def test_topic_create(self): RETURNED = {'name': self.TOPIC_PATH} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) resource = api.topic_create(self.TOPIC_PATH) @@ -199,7 +199,8 @@ def test_topic_create_already_exists(self): from google.cloud.exceptions import Conflict connection = _Connection() connection._no_response_error = Conflict - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) with self.assertRaises(Conflict): api.topic_create(self.TOPIC_PATH) @@ -211,7 +212,8 @@ def test_topic_create_already_exists(self): def test_topic_get_hit(self): RETURNED = {'name': self.TOPIC_PATH} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) resource = api.topic_get(self.TOPIC_PATH) @@ -223,7 +225,8 @@ def test_topic_get_hit(self): def test_topic_get_miss(self): from google.cloud.exceptions import NotFound connection = _Connection() - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) with self.assertRaises(NotFound): api.topic_get(self.TOPIC_PATH) @@ -235,7 +238,8 @@ def test_topic_get_miss(self): def test_topic_delete_hit(self): RETURNED = {} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) api.topic_delete(self.TOPIC_PATH) @@ -246,7 +250,8 @@ def test_topic_delete_hit(self): def test_topic_delete_miss(self): from google.cloud.exceptions import NotFound connection = _Connection() - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) with self.assertRaises(NotFound): api.topic_delete(self.TOPIC_PATH) @@ -264,7 +269,8 @@ def test_topic_publish_hit(self): B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} RETURNED = {'messageIds': [MSGID]} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -282,7 +288,8 @@ def test_topic_publish_miss(self): PAYLOAD = b'This is the message text' MESSAGE = {'data': PAYLOAD, 'attributes': {}} connection = _Connection() - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) with self.assertRaises(NotFound): api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -297,7 +304,8 @@ def test_topic_list_subscriptions_no_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} RETURNED = {'subscriptions': [SUB_INFO]} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) subscriptions, next_token = api.topic_list_subscriptions( self.TOPIC_PATH) @@ -324,7 +332,8 @@ def test_topic_list_subscriptions_with_paging(self): 'nextPageToken': 'TOKEN2', } connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) subscriptions, next_token = api.topic_list_subscriptions( self.TOPIC_PATH, page_token=TOKEN1, page_size=SIZE) @@ -345,7 +354,8 @@ def test_topic_list_subscriptions_with_paging(self): def test_topic_list_subscriptions_missing_key(self): RETURNED = {} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) subscriptions, next_token = api.topic_list_subscriptions( self.TOPIC_PATH) @@ -361,7 +371,8 @@ def test_topic_list_subscriptions_missing_key(self): def test_topic_list_subscriptions_miss(self): from google.cloud.exceptions import NotFound connection = _Connection() - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) with self.assertRaises(NotFound): api.topic_list_subscriptions(self.TOPIC_PATH) From df68bf37b290580590512b6812ff532c5d584499 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 12:44:53 -0700 Subject: [PATCH 0023/1197] Renaming Iterator to HTTPIterator. --- .../google-cloud-pubsub/google/cloud/pubsub/_gax.py | 8 ++++---- .../google/cloud/pubsub/connection.py | 9 +++++---- packages/google-cloud-pubsub/unit_tests/test_client.py | 10 ++++------ 3 files changed, 13 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 1a39c0078b69..06b8cec1faeb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -31,7 +31,7 @@ from google.cloud._helpers import _pb_timestamp_to_rfc3339 from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound -from google.cloud.iterator import Iterator +from google.cloud.iterator import HTTPIterator from google.cloud.iterator import Page from google.cloud.pubsub.topic import Topic @@ -83,9 +83,9 @@ def list_topics(self, project, page_size=0, page_token=None): path, page_size=page_size, options=options) page_iter = functools.partial(_recast_page_iterator, page_iter) - return Iterator(client=self._client, path=path, - item_to_value=_item_to_topic, - page_iter=page_iter) + return HTTPIterator( + client=self._client, path=path, item_to_value=_item_to_topic, + page_iter=page_iter) def topic_create(self, topic_path): """API call: create a topic diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 722b28102dfa..53f57f6c7435 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -19,7 +19,7 @@ from google.cloud import connection as base_connection from google.cloud.environment_vars import PUBSUB_EMULATOR -from google.cloud.iterator import Iterator +from google.cloud.iterator import HTTPIterator from google.cloud.pubsub.topic import Topic @@ -134,9 +134,10 @@ def list_topics(self, project, page_size=None, page_token=None): extra_params['pageSize'] = page_size path = '/projects/%s/topics' % (project,) - return Iterator(client=self._client, path=path, - items_key='topics', item_to_value=_item_to_topic, - page_token=page_token, extra_params=extra_params) + return HTTPIterator( + client=self._client, path=path, item_to_value=_item_to_topic, + items_key='topics', page_token=page_token, + extra_params=extra_params) def topic_create(self, topic_path): """API call: create a topic diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 5b12aee800e3..4f4d597591fa 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -79,12 +79,11 @@ def __init__(self, _wrapped, client): self._client = client creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) - with _Monkey(MUT, - _USE_GAX=True, + with _Monkey(MUT, _USE_GAX=True, make_gax_publisher_api=_generated_api, GAXPublisherAPI=_GaxPublisherAPI): + client = self._makeOne(project=self.PROJECT, credentials=creds) api = client.publisher_api self.assertIsInstance(api, _GaxPublisherAPI) @@ -131,12 +130,11 @@ def __init__(self, _wrapped): self._wrapped = _wrapped creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) - with _Monkey(MUT, - _USE_GAX=True, + with _Monkey(MUT, _USE_GAX=True, make_gax_subscriber_api=_generated_api, GAXSubscriberAPI=_GaxSubscriberAPI): + client = self._makeOne(project=self.PROJECT, credentials=creds) api = client.subscriber_api self.assertIsInstance(api, _GaxSubscriberAPI) From ce11de382c02c5460ffd5c49877d5e23e014eea8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 16:00:10 -0700 Subject: [PATCH 0024/1197] Removing items key usage from Page helper. This was HTTP/JSON specific and belongs in the HTTP subclass. --- packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 06b8cec1faeb..2fd5810b6582 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -36,9 +36,6 @@ from google.cloud.pubsub.topic import Topic -_FAKE_ITEMS_KEY = 'not-a-key' - - class _PublisherAPI(object): """Helper mapping publisher-related APIs. @@ -589,9 +586,7 @@ def _recast_page_iterator(page_iter, iterator): :param iterator: The iterator that owns each page. """ for items in page_iter: - fake_response = {_FAKE_ITEMS_KEY: items} - page = Page( - iterator, fake_response, _FAKE_ITEMS_KEY, _item_to_topic) + page = Page(iterator, items, _item_to_topic) iterator.next_page_token = page_iter.page_token or None iterator.num_results += page.num_items yield page From 632c1993ce838f29c1a0712c5acf76a77659f499 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 16:43:27 -0700 Subject: [PATCH 0025/1197] Adding GAXIterator._wrap_gax for wrapping the GAX iterator. Also updating the _GAXPageIterator mock to allow multiple pages. --- .../unit_tests/test__gax.py | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 907bdecb5c89..06e2b87adb78 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -61,7 +61,8 @@ def test_list_topics_no_paging(self): from google.cloud.pubsub.topic import Topic TOKEN = 'TOKEN' - response = _GAXPageIterator([_TopicPB(self.TOPIC_PATH)], TOKEN) + response = _GAXPageIterator([_TopicPB(self.TOPIC_PATH)], + page_token=TOKEN) gax_api = _GAXPublisherAPI(_list_topics_response=response) client = _Client(self.PROJECT) api = self._makeOne(gax_api, client) @@ -90,7 +91,7 @@ def test_list_topics_with_paging(self): TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' response = _GAXPageIterator( - [_TopicPB(self.TOPIC_PATH)], NEW_TOKEN) + [_TopicPB(self.TOPIC_PATH)], page_token=NEW_TOKEN) gax_api = _GAXPublisherAPI(_list_topics_response=response) client = _Client(self.PROJECT) api = self._makeOne(gax_api, client) @@ -291,8 +292,8 @@ def test_topic_publish_error(self): def test_topic_list_subscriptions_no_paging(self): from google.gax import INITIAL_PAGE from google.cloud._testing import _GAXPageIterator - response = _GAXPageIterator([ - {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}], None) + response = _GAXPageIterator( + [{'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}]) gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) client = _Client(self.PROJECT) api = self._makeOne(gax_api, client) @@ -318,8 +319,9 @@ def test_topic_list_subscriptions_with_paging(self): SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' - response = _GAXPageIterator([ - {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}], NEW_TOKEN) + response = _GAXPageIterator( + [{'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}], + page_token=NEW_TOKEN) gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) client = _Client(self.PROJECT) api = self._makeOne(gax_api, client) @@ -390,8 +392,10 @@ def test_ctor(self): def test_list_subscriptions_no_paging(self): from google.gax import INITIAL_PAGE from google.cloud._testing import _GAXPageIterator - response = _GAXPageIterator([_SubscriptionPB( - self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0)], None) + + sub_pb = _SubscriptionPB( + self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0) + response = _GAXPageIterator([sub_pb]) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) api = self._makeOne(gax_api) @@ -417,8 +421,9 @@ def test_list_subscriptions_with_paging(self): SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' - response = _GAXPageIterator([_SubscriptionPB( - self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0)], NEW_TOKEN) + sub_pb = _SubscriptionPB( + self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0) + response = _GAXPageIterator([sub_pb], page_token=NEW_TOKEN) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) api = self._makeOne(gax_api) From 6294d5a592eaa9afbebc91f5955dafb51ca809b5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 16:54:18 -0700 Subject: [PATCH 0026/1197] Moving pubsub _gax impl. of list_topics to GAXIterator. --- .../google/cloud/pubsub/_gax.py | 30 ++----------------- 1 file changed, 2 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 2fd5810b6582..fcc56838bff7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -14,8 +14,6 @@ """GAX wrapper for Pubsub API requests.""" -import functools - from google.cloud.gapic.pubsub.v1.publisher_api import PublisherApi from google.cloud.gapic.pubsub.v1.subscriber_api import SubscriberApi from google.gax import CallOptions @@ -31,7 +29,7 @@ from google.cloud._helpers import _pb_timestamp_to_rfc3339 from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound -from google.cloud.iterator import HTTPIterator +from google.cloud.iterator import GAXIterator from google.cloud.iterator import Page from google.cloud.pubsub.topic import Topic @@ -78,11 +76,8 @@ def list_topics(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_topics( path, page_size=page_size, options=options) - page_iter = functools.partial(_recast_page_iterator, page_iter) - return HTTPIterator( - client=self._client, path=path, item_to_value=_item_to_topic, - page_iter=page_iter) + return GAXIterator(self._client, page_iter, _item_to_topic) def topic_create(self, topic_path): """API call: create a topic @@ -569,24 +564,3 @@ def _item_to_topic(iterator, resource): """ return Topic.from_api_repr( {'name': resource.name}, iterator.client) - - -def _recast_page_iterator(page_iter, iterator): - """Wrap GAX pages generator. - - In particular, wrap each page and capture some state from the - GAX iterator. - - Yields :class:`~google.cloud.iterator.Page` instances - - :type page_iter: :class:`~google.gax.PageIterator` - :param page_iter: The iterator to wrap. - - :type iterator: :class:`~google.cloud.iterator.Iterator` - :param iterator: The iterator that owns each page. - """ - for items in page_iter: - page = Page(iterator, items, _item_to_topic) - iterator.next_page_token = page_iter.page_token or None - iterator.num_results += page.num_items - yield page From 640bf2f4744fb95d804260c1bd3587a77854f38b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 17:23:20 -0700 Subject: [PATCH 0027/1197] Removing page_token from GAXIterator constructor. Instead, using the page token directly from the page iterator passed in (this may occasionally be strange to a user, e.g. if the token is INITIAL_PAGE). --- packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index fcc56838bff7..a392cc679bf5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -77,7 +77,11 @@ def list_topics(self, project, page_size=0, page_token=None): page_iter = self._gax_api.list_topics( path, page_size=page_size, options=options) - return GAXIterator(self._client, page_iter, _item_to_topic) + iter_kwargs = {} + if page_size: # page_size can be 0 or explicit None. + iter_kwargs['max_results'] = page_size + return GAXIterator(self._client, page_iter, _item_to_topic, + **iter_kwargs) def topic_create(self, topic_path): """API call: create a topic From 69668ca3fef0082f0025b0fd7a5ff8f258b8a8d3 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 18:36:58 -0700 Subject: [PATCH 0028/1197] Unifying Iterator paging via _next_page(). Also a lint fix for an unimorted member and a unit test fix adding a page token to allow more paging. --- packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index a392cc679bf5..b6b357cbdc04 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -30,7 +30,6 @@ from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator -from google.cloud.iterator import Page from google.cloud.pubsub.topic import Topic From c611c6230509d45e0213ebf67dda4202a46298c4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 25 Oct 2016 23:38:34 -0700 Subject: [PATCH 0029/1197] Converting Pub/Sub topic->list_subscriptions to iterator. --- .../google/cloud/pubsub/_gax.py | 52 +++++++--- .../google/cloud/pubsub/connection.py | 48 ++++++---- .../google/cloud/pubsub/topic.py | 18 +--- .../unit_tests/test__gax.py | 57 +++++++---- .../unit_tests/test_connection.py | 67 ++++++++----- .../unit_tests/test_topic.py | 96 ++++++++++++++----- 6 files changed, 231 insertions(+), 107 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index b6b357cbdc04..0cd2e485010d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -30,6 +30,8 @@ from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator +from google.cloud.pubsub._helpers import subscription_name_from_path +from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic @@ -175,16 +177,14 @@ def topic_publish(self, topic_path, messages): raise return result.message_ids - def topic_list_subscriptions(self, topic_path, page_size=0, - page_token=None): + def topic_list_subscriptions(self, topic, page_size=0, page_token=None): """API call: list subscriptions bound to a topic See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. + :type topic: :class:`~google.cloud.pubsub.topic.Topic` + :param topic: The topic that owns the subscriptions. :type page_size: int :param page_size: maximum number of subscriptions to return, If not @@ -195,15 +195,17 @@ def topic_list_subscriptions(self, topic_path, page_size=0, If not passed, the API will return the first page of subscriptions. - :rtype: list of strings - :returns: fully-qualified names of subscriptions for the supplied - topic. - :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not - exist + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of + :class:`~google.cloud.pubsub.subscription.Subscription` + accessible to the current API. + :raises: :exc:`~google.cloud.exceptions.NotFound` if the topic does + not exist. """ if page_token is None: page_token = INITIAL_PAGE options = CallOptions(page_token=page_token) + topic_path = topic.full_name try: page_iter = self._gax_api.list_topic_subscriptions( topic_path, page_size=page_size, options=options) @@ -211,9 +213,14 @@ def topic_list_subscriptions(self, topic_path, page_size=0, if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(topic_path) raise - subs = page_iter.next() - token = page_iter.page_token or None - return subs, token + + iter_kwargs = {} + if page_size: # page_size can be 0 or explicit None. + iter_kwargs['max_results'] = page_size + iterator = GAXIterator(self._client, page_iter, + _item_to_subscription, **iter_kwargs) + iterator.topic = topic + return iterator class _SubscriberAPI(object): @@ -554,7 +561,7 @@ def make_gax_subscriber_api(connection): def _item_to_topic(iterator, resource): - """Convert a JSON job to the native object. + """Convert a protobuf topic to the native object. :type iterator: :class:`~google.cloud.iterator.Iterator` :param iterator: The iterator that is currently in use. @@ -567,3 +574,20 @@ def _item_to_topic(iterator, resource): """ return Topic.from_api_repr( {'name': resource.name}, iterator.client) + + +def _item_to_subscription(iterator, subscription_path): + """Convert a subscription name to the native object. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type subscription_path: str + :param subscription_path: Subscription path returned from the API. + + :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` + :returns: The next subscription in the page. + """ + subscription_name = subscription_name_from_path( + subscription_path, iterator.client.project) + return Subscription(subscription_name, iterator.topic) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 53f57f6c7435..44f4cc048206 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -20,6 +20,8 @@ from google.cloud import connection as base_connection from google.cloud.environment_vars import PUBSUB_EMULATOR from google.cloud.iterator import HTTPIterator +from google.cloud.pubsub._helpers import subscription_name_from_path +from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic @@ -207,16 +209,14 @@ def topic_publish(self, topic_path, messages): method='POST', path='/%s:publish' % (topic_path,), data=data) return response['messageIds'] - def topic_list_subscriptions(self, topic_path, page_size=None, - page_token=None): + def topic_list_subscriptions(self, topic, page_size=None, page_token=None): """API call: list subscriptions bound to a topic See: https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. + :type topic: :class:`~google.cloud.pubsub.topic.Topic` + :param topic: The topic that owns the subscriptions. :type page_size: int :param page_size: maximum number of subscriptions to return, If not @@ -231,18 +231,17 @@ def topic_list_subscriptions(self, topic_path, page_size=None, :returns: fully-qualified names of subscriptions for the supplied topic. """ - conn = self._connection - params = {} - + extra_params = {} if page_size is not None: - params['pageSize'] = page_size - - if page_token is not None: - params['pageToken'] = page_token + extra_params['pageSize'] = page_size + path = '/%s/subscriptions' % (topic.full_name,) - path = '/%s/subscriptions' % (topic_path,) - resp = conn.api_request(method='GET', path=path, query_params=params) - return resp.get('subscriptions', ()), resp.get('nextPageToken') + iterator = HTTPIterator( + client=self._client, path=path, + item_to_value=_item_to_subscription, items_key='subscriptions', + page_token=page_token, extra_params=extra_params) + iterator.topic = topic + return iterator class _SubscriberAPI(object): @@ -577,7 +576,7 @@ def _transform_messages_base64(messages, transform, key=None): def _item_to_topic(iterator, resource): - """Convert a JSON job to the native object. + """Convert a JSON topic to the native object. :type iterator: :class:`~google.cloud.iterator.Iterator` :param iterator: The iterator that is currently in use. @@ -589,3 +588,20 @@ def _item_to_topic(iterator, resource): :returns: The next topic in the page. """ return Topic.from_api_repr(resource, iterator.client) + + +def _item_to_subscription(iterator, subscription_path): + """Convert a subscription name to the native object. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type subscription_path: str + :param subscription_path: Subscription path returned from the API. + + :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` + :returns: The next subscription in the page. + """ + subscription_name = subscription_name_from_path( + subscription_path, iterator.client.project) + return Subscription(subscription_name, iterator.topic) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index 9b24a3dd9928..12c6c3a68450 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -17,7 +17,6 @@ from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _NOW from google.cloud.exceptions import NotFound -from google.cloud.pubsub._helpers import subscription_name_from_path from google.cloud.pubsub._helpers import topic_name_from_path from google.cloud.pubsub.iam import Policy from google.cloud.pubsub.subscription import Subscription @@ -306,21 +305,14 @@ def list_subscriptions(self, page_size=None, page_token=None, client=None): :param client: the client to use. If not passed, falls back to the ``client`` stored on the current topic. - :rtype: tuple, (list, str) - :returns: list of :class:`~.pubsub.subscription.Subscription`, - plus a "next page token" string: if not None, indicates that - more topics can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of + :class:`~google.cloud.pubsub.subscription.Subscription` + accessible to the current topic. """ client = self._require_client(client) api = client.publisher_api - sub_paths, next_token = api.topic_list_subscriptions( - self.full_name, page_size, page_token) - subscriptions = [] - for sub_path in sub_paths: - sub_name = subscription_name_from_path(sub_path, self.project) - subscriptions.append(Subscription(sub_name, self)) - return subscriptions, next_token + return api.topic_list_subscriptions(self, page_size, page_token) def get_iam_policy(self, client=None): """Fetch the IAM policy for the topic. diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 06e2b87adb78..52f6b051466b 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -292,21 +292,28 @@ def test_topic_publish_error(self): def test_topic_list_subscriptions_no_paging(self): from google.gax import INITIAL_PAGE from google.cloud._testing import _GAXPageIterator - response = _GAXPageIterator( - [{'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}]) + from google.cloud.pubsub.subscription import Subscription + from google.cloud.pubsub.topic import Topic + + local_sub_path = '%s/subscriptions/%s' % ( + self.PROJECT_PATH, self.SUB_NAME) + response = _GAXPageIterator([local_sub_path]) gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) client = _Client(self.PROJECT) api = self._makeOne(gax_api, client) - subscriptions, next_token = api.topic_list_subscriptions( - self.TOPIC_PATH) + topic = Topic(self.TOPIC_NAME, client) + iterator = api.topic_list_subscriptions(topic) + subscriptions = list(iterator) + next_token = iterator.next_page_token + self.assertIsNone(next_token) self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertIsInstance(subscription, dict) - self.assertEqual(subscription['name'], self.SUB_PATH) - self.assertEqual(subscription['topic'], self.TOPIC_PATH) - self.assertIsNone(next_token) + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertEqual(subscription.topic, topic) + self.assertIs(subscription._client, client) topic_path, page_size, options = ( gax_api._list_topic_subscriptions_called_with) @@ -316,25 +323,33 @@ def test_topic_list_subscriptions_no_paging(self): def test_topic_list_subscriptions_with_paging(self): from google.cloud._testing import _GAXPageIterator + from google.cloud.pubsub.subscription import Subscription + from google.cloud.pubsub.topic import Topic + SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' + local_sub_path = '%s/subscriptions/%s' % ( + self.PROJECT_PATH, self.SUB_NAME) response = _GAXPageIterator( - [{'name': self.SUB_PATH, 'topic': self.TOPIC_PATH}], - page_token=NEW_TOKEN) + [local_sub_path], page_token=NEW_TOKEN) gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) client = _Client(self.PROJECT) api = self._makeOne(gax_api, client) - subscriptions, next_token = api.topic_list_subscriptions( - self.TOPIC_PATH, page_size=SIZE, page_token=TOKEN) + topic = Topic(self.TOPIC_NAME, client) + iterator = api.topic_list_subscriptions( + topic, page_size=SIZE, page_token=TOKEN) + subscriptions = list(iterator) + next_token = iterator.next_page_token + self.assertEqual(next_token, NEW_TOKEN) self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertIsInstance(subscription, dict) - self.assertEqual(subscription['name'], self.SUB_PATH) - self.assertEqual(subscription['topic'], self.TOPIC_PATH) - self.assertEqual(next_token, NEW_TOKEN) + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertEqual(subscription.topic, topic) + self.assertIs(subscription._client, client) name, page_size, options = ( gax_api._list_topic_subscriptions_called_with) @@ -345,12 +360,15 @@ def test_topic_list_subscriptions_with_paging(self): def test_topic_list_subscriptions_miss(self): from google.gax import INITIAL_PAGE from google.cloud.exceptions import NotFound + from google.cloud.pubsub.topic import Topic + gax_api = _GAXPublisherAPI() client = _Client(self.PROJECT) api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): - api.topic_list_subscriptions(self.TOPIC_PATH) + topic = Topic(self.TOPIC_NAME, client) + api.topic_list_subscriptions(topic) topic_path, page_size, options = ( gax_api._list_topic_subscriptions_called_with) @@ -361,12 +379,15 @@ def test_topic_list_subscriptions_miss(self): def test_topic_list_subscriptions_error(self): from google.gax import INITIAL_PAGE from google.gax.errors import GaxError + from google.cloud.pubsub.topic import Topic + gax_api = _GAXPublisherAPI(_random_gax_error=True) client = _Client(self.PROJECT) api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): - api.topic_list_subscriptions(self.TOPIC_PATH) + topic = Topic(self.TOPIC_NAME, client) + api.topic_list_subscriptions(topic) topic_path, page_size, options = ( gax_api._list_topic_subscriptions_called_with) diff --git a/packages/google-cloud-pubsub/unit_tests/test_connection.py b/packages/google-cloud-pubsub/unit_tests/test_connection.py index ed52d292ce8f..28a63025c6d7 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_connection.py +++ b/packages/google-cloud-pubsub/unit_tests/test_connection.py @@ -301,21 +301,28 @@ def test_topic_publish_miss(self): {'messages': [MESSAGE]}) def test_topic_list_subscriptions_no_paging(self): - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - RETURNED = {'subscriptions': [SUB_INFO]} + from google.cloud.pubsub.topic import Topic + from google.cloud.pubsub.subscription import Subscription + + local_sub_path = 'projects/%s/subscriptions/%s' % ( + self.PROJECT, self.SUB_NAME) + RETURNED = {'subscriptions': [local_sub_path]} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) api = self._makeOne(client) - subscriptions, next_token = api.topic_list_subscriptions( - self.TOPIC_PATH) + topic = Topic(self.TOPIC_NAME, client) + iterator = api.topic_list_subscriptions(topic) + subscriptions = list(iterator) + next_token = iterator.next_page_token + self.assertIsNone(next_token) self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertIsInstance(subscription, dict) - self.assertEqual(subscription['name'], self.SUB_PATH) - self.assertEqual(subscription['topic'], self.TOPIC_PATH) - self.assertIsNone(next_token) + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertEqual(subscription.topic, topic) + self.assertIs(subscription._client, client) self.assertEqual(connection._called_with['method'], 'GET') path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) @@ -323,27 +330,37 @@ def test_topic_list_subscriptions_no_paging(self): self.assertEqual(connection._called_with['query_params'], {}) def test_topic_list_subscriptions_with_paging(self): + import six + from google.cloud.pubsub.subscription import Subscription + from google.cloud.pubsub.topic import Topic + TOKEN1 = 'TOKEN1' TOKEN2 = 'TOKEN2' SIZE = 1 - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} + local_sub_path = 'projects/%s/subscriptions/%s' % ( + self.PROJECT, self.SUB_NAME) RETURNED = { - 'subscriptions': [SUB_INFO], - 'nextPageToken': 'TOKEN2', + 'subscriptions': [local_sub_path], + 'nextPageToken': TOKEN2, } connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) api = self._makeOne(client) - subscriptions, next_token = api.topic_list_subscriptions( - self.TOPIC_PATH, page_token=TOKEN1, page_size=SIZE) + topic = Topic(self.TOPIC_NAME, client) + iterator = api.topic_list_subscriptions( + topic, page_token=TOKEN1, page_size=SIZE) + page = six.next(iterator.pages) + subscriptions = list(page) + next_token = iterator.next_page_token + self.assertEqual(next_token, TOKEN2) self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertIsInstance(subscription, dict) - self.assertEqual(subscription['name'], self.SUB_PATH) - self.assertEqual(subscription['topic'], self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN2) + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertEqual(subscription.topic, topic) + self.assertIs(subscription._client, client) self.assertEqual(connection._called_with['method'], 'GET') path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) @@ -352,13 +369,16 @@ def test_topic_list_subscriptions_with_paging(self): {'pageToken': TOKEN1, 'pageSize': SIZE}) def test_topic_list_subscriptions_missing_key(self): - RETURNED = {} - connection = _Connection(RETURNED) + from google.cloud.pubsub.topic import Topic + + connection = _Connection({}) client = _Client(connection, self.PROJECT) api = self._makeOne(client) - subscriptions, next_token = api.topic_list_subscriptions( - self.TOPIC_PATH) + topic = Topic(self.TOPIC_NAME, client) + iterator = api.topic_list_subscriptions(topic) + subscriptions = list(iterator) + next_token = iterator.next_page_token self.assertEqual(len(subscriptions), 0) self.assertIsNone(next_token) @@ -370,12 +390,15 @@ def test_topic_list_subscriptions_missing_key(self): def test_topic_list_subscriptions_miss(self): from google.cloud.exceptions import NotFound + from google.cloud.pubsub.topic import Topic + connection = _Connection() client = _Client(connection, self.PROJECT) api = self._makeOne(client) with self.assertRaises(NotFound): - api.topic_list_subscriptions(self.TOPIC_PATH) + topic = Topic(self.TOPIC_NAME, client) + list(api.topic_list_subscriptions(topic)) self.assertEqual(connection._called_with['method'], 'GET') path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index 1786ed477d4e..b67d6ac3ce66 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -309,7 +309,13 @@ def test_subscription(self): self.assertIs(subscription.topic, topic) def test_list_subscriptions_no_paging(self): + import six + from google.cloud.pubsub.client import Client from google.cloud.pubsub.subscription import Subscription + + client = Client(project=self.PROJECT, credentials=object(), + use_gax=False) + SUB_NAME_1 = 'subscription_1' SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( self.PROJECT, SUB_NAME_1) @@ -319,12 +325,18 @@ def test_list_subscriptions_no_paging(self): SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] TOKEN = 'TOKEN' - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_list_subscriptions_response = SUBS_LIST, TOKEN + returned = { + 'subscriptions': SUBS_LIST, + 'nextPageToken': TOKEN, + } + client.connection = _Connection(returned) + topic = self._makeOne(self.TOPIC_NAME, client=client) - subscriptions, next_page_token = topic.list_subscriptions() + iterator = topic.list_subscriptions() + page = six.next(iterator.pages) + subscriptions = list(page) + next_page_token = iterator.next_page_token self.assertEqual(len(subscriptions), 2) @@ -339,11 +351,21 @@ def test_list_subscriptions_no_paging(self): self.assertIs(subscription.topic, topic) self.assertEqual(next_page_token, TOKEN) - self.assertEqual(api._topic_listed, - (self.TOPIC_PATH, None, None)) + # Verify the mock. + called_with = client.connection._called_with + self.assertEqual(len(called_with), 3) + self.assertEqual(called_with['method'], 'GET') + path = '/%s/subscriptions' % (self.TOPIC_PATH,) + self.assertEqual(called_with['path'], path) + self.assertEqual(called_with['query_params'], {}) def test_list_subscriptions_with_paging(self): + from google.cloud.pubsub.client import Client from google.cloud.pubsub.subscription import Subscription + + client = Client(project=self.PROJECT, credentials=object(), + use_gax=False) + SUB_NAME_1 = 'subscription_1' SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( self.PROJECT, SUB_NAME_1) @@ -354,13 +376,17 @@ def test_list_subscriptions_with_paging(self): PAGE_SIZE = 10 TOKEN = 'TOKEN' - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_list_subscriptions_response = SUBS_LIST, None + returned = { + 'subscriptions': SUBS_LIST, + } + client.connection = _Connection(returned) + topic = self._makeOne(self.TOPIC_NAME, client=client) - subscriptions, next_page_token = topic.list_subscriptions( + iterator = topic.list_subscriptions( page_size=PAGE_SIZE, page_token=TOKEN) + subscriptions = list(iterator) + next_page_token = iterator.next_page_token self.assertEqual(len(subscriptions), 2) @@ -375,22 +401,36 @@ def test_list_subscriptions_with_paging(self): self.assertIs(subscription.topic, topic) self.assertIsNone(next_page_token) - self.assertEqual(api._topic_listed, - (self.TOPIC_PATH, PAGE_SIZE, TOKEN)) + # Verify the mock. + called_with = client.connection._called_with + self.assertEqual(len(called_with), 3) + self.assertEqual(called_with['method'], 'GET') + path = '/%s/subscriptions' % (self.TOPIC_PATH,) + self.assertEqual(called_with['path'], path) + self.assertEqual(called_with['query_params'], + {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) def test_list_subscriptions_missing_key(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_list_subscriptions_response = (), None + from google.cloud.pubsub.client import Client + + client = Client(project=self.PROJECT, credentials=object(), + use_gax=False) + client.connection = _Connection({}) topic = self._makeOne(self.TOPIC_NAME, client=client) - subscriptions, next_page_token = topic.list_subscriptions() + iterator = topic.list_subscriptions() + subscriptions = list(iterator) + next_page_token = iterator.next_page_token self.assertEqual(len(subscriptions), 0) self.assertIsNone(next_page_token) - - self.assertEqual(api._topic_listed, - (self.TOPIC_PATH, None, None)) + # Verify the mock. + called_with = client.connection._called_with + self.assertEqual(len(called_with), 3) + self.assertEqual(called_with['method'], 'GET') + path = '/%s/subscriptions' % (self.TOPIC_PATH,) + self.assertEqual(called_with['path'], path) + self.assertEqual(called_with['query_params'], {}) def test_get_iam_policy_w_bound_client(self): from google.cloud.pubsub.iam import ( @@ -749,11 +789,6 @@ def topic_publish(self, topic_path, messages): self._api_called += 1 return self._topic_publish_response - def topic_list_subscriptions(self, topic_path, page_size=None, - page_token=None): - self._topic_listed = topic_path, page_size, page_token - return self._topic_list_subscriptions_response - class _FauxIAMPolicy(object): @@ -793,3 +828,16 @@ def __init__(self, project): class _Bugout(Exception): pass + + +class _Connection(object): + + _called_with = None + + def __init__(self, *responses): + self._responses = responses + + def api_request(self, **kw): + self._called_with = kw + response, self._responses = self._responses[0], self._responses[1:] + return response From 9017dbc7eba1f4122b2dc9e7ae0ff89cf415729a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 27 Oct 2016 09:38:03 -0700 Subject: [PATCH 0030/1197] Adding client to GAX _SubscriberAPI constructor. Also replacing _subscription_pb_to_mapping with MessageToDict. --- .../google/cloud/pubsub/_gax.py | 31 ++-- .../google/cloud/pubsub/client.py | 4 +- .../google/cloud/pubsub/connection.py | 9 +- .../unit_tests/test__gax.py | 138 +++++++++++------- .../unit_tests/test_client.py | 4 +- .../unit_tests/test_connection.py | 40 +++-- 6 files changed, 130 insertions(+), 96 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 0cd2e485010d..54f5f800bc4c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -20,6 +20,7 @@ from google.gax import INITIAL_PAGE from google.gax.errors import GaxError from google.gax.grpc import exc_to_code +from google.protobuf.json_format import MessageToDict from google.pubsub.v1.pubsub_pb2 import PubsubMessage from google.pubsub.v1.pubsub_pb2 import PushConfig from grpc import insecure_channel @@ -228,9 +229,13 @@ class _SubscriberAPI(object): :type gax_api: :class:`google.pubsub.v1.publisher_api.SubscriberApi` :param gax_api: API object used to make GAX requests. + + :type client: :class:`~google.cloud.pubsub.client.Client` + :param client: The client that owns this API object. """ - def __init__(self, gax_api): + def __init__(self, gax_api, client): self._gax_api = gax_api + self._client = client def list_subscriptions(self, project, page_size=0, page_token=None): """List subscriptions for the project associated with this API. @@ -262,7 +267,7 @@ def list_subscriptions(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_subscriptions( path, page_size=page_size, options=options) - subscriptions = [_subscription_pb_to_mapping(sub_pb) + subscriptions = [MessageToDict(sub_pb) for sub_pb in page_iter.next()] token = page_iter.page_token or None return subscriptions, token @@ -313,7 +318,7 @@ def subscription_create(self, subscription_path, topic_path, if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: raise Conflict(topic_path) raise - return _subscription_pb_to_mapping(sub_pb) + return MessageToDict(sub_pb) def subscription_get(self, subscription_path): """API call: retrieve a subscription @@ -335,7 +340,7 @@ def subscription_get(self, subscription_path): if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(subscription_path) raise - return _subscription_pb_to_mapping(sub_pb) + return MessageToDict(sub_pb) def subscription_delete(self, subscription_path): """API call: delete a subscription @@ -474,24 +479,6 @@ def _message_pb_from_mapping(message): attributes=message['attributes']) -def _subscription_pb_to_mapping(sub_pb): - """Helper for :meth:`list_subscriptions`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - mapping = { - 'name': sub_pb.name, - 'topic': sub_pb.topic, - 'ackDeadlineSeconds': sub_pb.ack_deadline_seconds, - } - if sub_pb.push_config.push_endpoint != '': - mapping['pushConfig'] = { - 'pushEndpoint': sub_pb.push_config.push_endpoint, - } - return mapping - - def _message_pb_to_mapping(message_pb): """Helper for :meth:`pull`, et aliae diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 8c7272266660..d720ee9c73a6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -98,9 +98,9 @@ def subscriber_api(self): if self._subscriber_api is None: if self._use_gax: generated = make_gax_subscriber_api(self.connection) - self._subscriber_api = GAXSubscriberAPI(generated) + self._subscriber_api = GAXSubscriberAPI(generated, self) else: - self._subscriber_api = JSONSubscriberAPI(self.connection) + self._subscriber_api = JSONSubscriberAPI(self) return self._subscriber_api @property diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 44f4cc048206..030f094494e9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -247,12 +247,13 @@ def topic_list_subscriptions(self, topic, page_size=None, page_token=None): class _SubscriberAPI(object): """Helper mapping subscriber-related APIs. - :type connection: :class:`Connection` - :param connection: the connection used to make API requests. + :type client: :class:`~google.cloud.pubsub.client.Client` + :param client: the client used to make API requests. """ - def __init__(self, connection): - self._connection = connection + def __init__(self, client): + self._client = client + self._connection = client.connection def list_subscriptions(self, project, page_size=None, page_token=None): """API call: list subscriptions for a given project diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 52f6b051466b..7f9a9ff8e5c9 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -407,29 +407,36 @@ def _getTargetClass(self): def test_ctor(self): gax_api = _GAXSubscriberAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) self.assertIs(api._gax_api, gax_api) + self.assertIs(api._client, client) def test_list_subscriptions_no_paging(self): from google.gax import INITIAL_PAGE + from google.pubsub.v1.pubsub_pb2 import PushConfig + from google.pubsub.v1.pubsub_pb2 import Subscription from google.cloud._testing import _GAXPageIterator - sub_pb = _SubscriptionPB( - self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0) + push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) + sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, + push_config=push_cfg_pb) response = _GAXPageIterator([sub_pb]) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) subscriptions, next_token = api.list_subscriptions(self.PROJECT) self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertIsInstance(subscription, dict) - self.assertEqual(subscription['name'], self.SUB_PATH) - self.assertEqual(subscription['topic'], self.TOPIC_PATH) - self.assertEqual(subscription['pushConfig'], - {'pushEndpoint': self.PUSH_ENDPOINT}) - self.assertEqual(subscription['ackDeadlineSeconds'], 0) + self.assertEqual(subscription, { + 'name': self.SUB_PATH, + 'topic': self.TOPIC_PATH, + 'pushConfig': { + 'pushEndpoint': self.PUSH_ENDPOINT, + }, + }) self.assertIsNone(next_token) name, page_size, options = gax_api._list_subscriptions_called_with @@ -438,27 +445,33 @@ def test_list_subscriptions_no_paging(self): self.assertIs(options.page_token, INITIAL_PAGE) def test_list_subscriptions_with_paging(self): + from google.pubsub.v1.pubsub_pb2 import PushConfig + from google.pubsub.v1.pubsub_pb2 import Subscription from google.cloud._testing import _GAXPageIterator + SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' - sub_pb = _SubscriptionPB( - self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0) + push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) + sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, + push_config=push_cfg_pb) response = _GAXPageIterator([sub_pb], page_token=NEW_TOKEN) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) subscriptions, next_token = api.list_subscriptions( self.PROJECT, page_size=SIZE, page_token=TOKEN) self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertIsInstance(subscription, dict) - self.assertEqual(subscription['name'], self.SUB_PATH) - self.assertEqual(subscription['topic'], self.TOPIC_PATH) - self.assertEqual(subscription['pushConfig'], - {'pushEndpoint': self.PUSH_ENDPOINT}) - self.assertEqual(subscription['ackDeadlineSeconds'], 0) + self.assertEqual(subscription, { + 'name': self.SUB_PATH, + 'topic': self.TOPIC_PATH, + 'pushConfig': { + 'pushEndpoint': self.PUSH_ENDPOINT, + }, + }) self.assertEqual(next_token, NEW_TOKEN) name, page_size, options = gax_api._list_subscriptions_called_with @@ -467,16 +480,18 @@ def test_list_subscriptions_with_paging(self): self.assertEqual(options.page_token, TOKEN) def test_subscription_create(self): - sub_pb = _SubscriptionPB(self.SUB_PATH, self.TOPIC_PATH, '', 0) + from google.pubsub.v1.pubsub_pb2 import Subscription + + sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) expected = { 'name': self.SUB_PATH, 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': 0, } self.assertEqual(resource, expected) name, topic, push_config, ack_deadline, options = ( @@ -491,7 +506,8 @@ def test_subscription_create_already_exists(self): from google.cloud.exceptions import Conflict DEADLINE = 600 gax_api = _GAXSubscriberAPI(_create_subscription_conflict=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(Conflict): api.subscription_create( @@ -508,7 +524,8 @@ def test_subscription_create_already_exists(self): def test_subscription_create_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) @@ -522,17 +539,21 @@ def test_subscription_create_error(self): self.assertIsNone(options) def test_subscription_get_hit(self): - sub_pb = _SubscriptionPB( - self.SUB_PATH, self.TOPIC_PATH, self.PUSH_ENDPOINT, 0) + from google.pubsub.v1.pubsub_pb2 import PushConfig + from google.pubsub.v1.pubsub_pb2 import Subscription + + push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) + sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, + push_config=push_cfg_pb) gax_api = _GAXSubscriberAPI(_get_subscription_response=sub_pb) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) resource = api.subscription_get(self.SUB_PATH) expected = { 'name': self.SUB_PATH, 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': 0, 'pushConfig': { 'pushEndpoint': self.PUSH_ENDPOINT, }, @@ -545,7 +566,8 @@ def test_subscription_get_hit(self): def test_subscription_get_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSubscriberAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.subscription_get(self.SUB_PATH) @@ -557,7 +579,8 @@ def test_subscription_get_miss(self): def test_subscription_get_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.subscription_get(self.SUB_PATH) @@ -568,7 +591,8 @@ def test_subscription_get_error(self): def test_subscription_delete_hit(self): gax_api = _GAXSubscriberAPI(_delete_subscription_ok=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) api.subscription_delete(self.TOPIC_PATH) @@ -579,7 +603,8 @@ def test_subscription_delete_hit(self): def test_subscription_delete_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSubscriberAPI(_delete_subscription_ok=False) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.subscription_delete(self.TOPIC_PATH) @@ -591,7 +616,8 @@ def test_subscription_delete_miss(self): def test_subscription_delete_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.subscription_delete(self.TOPIC_PATH) @@ -602,7 +628,8 @@ def test_subscription_delete_error(self): def test_subscription_modify_push_config_hit(self): gax_api = _GAXSubscriberAPI(_modify_push_config_ok=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) api.subscription_modify_push_config(self.SUB_PATH, self.PUSH_ENDPOINT) @@ -614,7 +641,8 @@ def test_subscription_modify_push_config_hit(self): def test_subscription_modify_push_config_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSubscriberAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.subscription_modify_push_config( @@ -628,7 +656,8 @@ def test_subscription_modify_push_config_miss(self): def test_subscription_modify_push_config_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.subscription_modify_push_config( @@ -645,6 +674,7 @@ def test_subscription_pull_explicit(self): from google.cloud._helpers import UTC from google.cloud._helpers import _datetime_to_pb_timestamp from google.cloud._helpers import _datetime_to_rfc3339 + NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) NOW_PB = _datetime_to_pb_timestamp(NOW) NOW_RFC3339 = _datetime_to_rfc3339(NOW) @@ -662,7 +692,8 @@ def test_subscription_pull_explicit(self): message_pb = _PubsubMessagePB(MSG_ID, B64, {'a': 'b'}, NOW_PB) response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)]) gax_api = _GAXSubscriberAPI(_pull_response=response_pb) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) MAX_MESSAGES = 10 received = api.subscription_pull( @@ -679,7 +710,8 @@ def test_subscription_pull_explicit(self): def test_subscription_pull_defaults_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSubscriberAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.subscription_pull(self.SUB_PATH) @@ -694,7 +726,8 @@ def test_subscription_pull_defaults_miss(self): def test_subscription_pull_defaults_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.subscription_pull(self.SUB_PATH) @@ -710,7 +743,8 @@ def test_subscription_acknowledge_hit(self): ACK_ID1 = 'DEADBEEF' ACK_ID2 = 'BEADCAFE' gax_api = _GAXSubscriberAPI(_acknowledge_ok=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) @@ -724,7 +758,8 @@ def test_subscription_acknowledge_miss(self): ACK_ID1 = 'DEADBEEF' ACK_ID2 = 'BEADCAFE' gax_api = _GAXSubscriberAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) @@ -739,7 +774,8 @@ def test_subscription_acknowledge_error(self): ACK_ID1 = 'DEADBEEF' ACK_ID2 = 'BEADCAFE' gax_api = _GAXSubscriberAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) @@ -754,7 +790,8 @@ def test_subscription_modify_ack_deadline_hit(self): ACK_ID2 = 'BEADCAFE' NEW_DEADLINE = 90 gax_api = _GAXSubscriberAPI(_modify_ack_deadline_ok=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) api.subscription_modify_ack_deadline( self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) @@ -772,7 +809,8 @@ def test_subscription_modify_ack_deadline_miss(self): ACK_ID2 = 'BEADCAFE' NEW_DEADLINE = 90 gax_api = _GAXSubscriberAPI() - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(NotFound): api.subscription_modify_ack_deadline( @@ -791,7 +829,8 @@ def test_subscription_modify_ack_deadline_error(self): ACK_ID2 = 'BEADCAFE' NEW_DEADLINE = 90 gax_api = _GAXSubscriberAPI(_random_gax_error=True) - api = self._makeOne(gax_api) + client = _Client(self.PROJECT) + api = self._makeOne(gax_api, client) with self.assertRaises(GaxError): api.subscription_modify_ack_deadline( @@ -1085,15 +1124,6 @@ def __init__(self, received_messages): self.received_messages = received_messages -class _SubscriptionPB(object): - - def __init__(self, name, topic, push_endpoint, ack_deadline_seconds): - self.name = name - self.topic = topic - self.push_config = _PushConfigPB(push_endpoint) - self.ack_deadline_seconds = ack_deadline_seconds - - class _Connection(object): def __init__(self, in_emulator=False, host=None): diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 4f4d597591fa..de19a8761240 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -126,8 +126,9 @@ def _generated_api(*args, **kw): class _GaxSubscriberAPI(object): - def __init__(self, _wrapped): + def __init__(self, _wrapped, client): self._wrapped = _wrapped + self._client = client creds = _Credentials() @@ -139,6 +140,7 @@ def __init__(self, _wrapped): self.assertIsInstance(api, _GaxSubscriberAPI) self.assertIs(api._wrapped, wrapped) + self.assertIs(api._client, client) # API instance is cached again = client.subscriber_api self.assertIs(again, api) diff --git a/packages/google-cloud-pubsub/unit_tests/test_connection.py b/packages/google-cloud-pubsub/unit_tests/test_connection.py index 28a63025c6d7..a588dc693898 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_connection.py +++ b/packages/google-cloud-pubsub/unit_tests/test_connection.py @@ -417,14 +417,17 @@ def _makeOne(self, *args, **kw): def test_ctor(self): connection = _Connection() - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) self.assertIs(api._connection, connection) + self.assertIs(api._client, client) def test_list_subscriptions_no_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} RETURNED = {'subscriptions': [SUB_INFO]} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) subscriptions, next_token = api.list_subscriptions(self.PROJECT) @@ -450,7 +453,8 @@ def test_list_subscriptions_with_paging(self): 'nextPageToken': 'TOKEN2', } connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) subscriptions, next_token = api.list_subscriptions( self.PROJECT, page_token=TOKEN1, page_size=SIZE) @@ -471,7 +475,8 @@ def test_list_subscriptions_with_paging(self): def test_list_subscriptions_missing_key(self): RETURNED = {} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) subscriptions, next_token = api.list_subscriptions(self.PROJECT) @@ -488,7 +493,8 @@ def test_subscription_create_defaults(self): RETURNED = RESOURCE.copy() RETURNED['name'] = self.SUB_PATH connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) @@ -511,7 +517,8 @@ def test_subscription_create_explicit(self): RETURNED = RESOURCE.copy() RETURNED['name'] = self.SUB_PATH connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) resource = api.subscription_create( self.SUB_PATH, self.TOPIC_PATH, @@ -533,7 +540,8 @@ def test_subscription_get(self): 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, } connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) resource = api.subscription_get(self.SUB_PATH) @@ -545,7 +553,8 @@ def test_subscription_get(self): def test_subscription_delete(self): RETURNED = {} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) api.subscription_delete(self.SUB_PATH) @@ -560,7 +569,8 @@ def test_subscription_modify_push_config(self): } RETURNED = {} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) api.subscription_modify_push_config(self.SUB_PATH, PUSH_ENDPOINT) @@ -580,7 +590,8 @@ def test_subscription_pull_defaults(self): 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], } connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) BODY = { 'returnImmediately': False, 'maxMessages': 1, @@ -606,7 +617,8 @@ def test_subscription_pull_explicit(self): 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], } connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) MAX_MESSAGES = 10 BODY = { 'returnImmediately': True, @@ -630,7 +642,8 @@ def test_subscription_acknowledge(self): } RETURNED = {} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) @@ -649,7 +662,8 @@ def test_subscription_modify_ack_deadline(self): } RETURNED = {} connection = _Connection(RETURNED) - api = self._makeOne(connection) + client = _Client(connection, self.PROJECT) + api = self._makeOne(client) api.subscription_modify_ack_deadline( self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) From eba4f74ec475a0739fcb03b344590f70ba8f130f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 27 Oct 2016 10:25:32 -0700 Subject: [PATCH 0031/1197] Renaming _item_to_subscription helper. Making it more specific to the methods that use it (topic_list_subscriptions) since list_subscriptions will also need to convert an item to a subscription. --- packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py | 7 ++++--- .../google-cloud-pubsub/google/cloud/pubsub/connection.py | 5 +++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 54f5f800bc4c..63c6a2bf4090 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -218,8 +218,9 @@ def topic_list_subscriptions(self, topic, page_size=0, page_token=None): iter_kwargs = {} if page_size: # page_size can be 0 or explicit None. iter_kwargs['max_results'] = page_size - iterator = GAXIterator(self._client, page_iter, - _item_to_subscription, **iter_kwargs) + iterator = GAXIterator( + self._client, page_iter, + _item_to_subscription_for_topic, **iter_kwargs) iterator.topic = topic return iterator @@ -563,7 +564,7 @@ def _item_to_topic(iterator, resource): {'name': resource.name}, iterator.client) -def _item_to_subscription(iterator, subscription_path): +def _item_to_subscription_for_topic(iterator, subscription_path): """Convert a subscription name to the native object. :type iterator: :class:`~google.cloud.iterator.Iterator` diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 030f094494e9..7c4e515d9c8a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -238,7 +238,8 @@ def topic_list_subscriptions(self, topic, page_size=None, page_token=None): iterator = HTTPIterator( client=self._client, path=path, - item_to_value=_item_to_subscription, items_key='subscriptions', + item_to_value=_item_to_subscription_for_topic, + items_key='subscriptions', page_token=page_token, extra_params=extra_params) iterator.topic = topic return iterator @@ -591,7 +592,7 @@ def _item_to_topic(iterator, resource): return Topic.from_api_repr(resource, iterator.client) -def _item_to_subscription(iterator, subscription_path): +def _item_to_subscription_for_topic(iterator, subscription_path): """Convert a subscription name to the native object. :type iterator: :class:`~google.cloud.iterator.Iterator` From 8dc21fd8a0f04efc0d216d8849fccfb6660926af Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 27 Oct 2016 11:29:17 -0700 Subject: [PATCH 0032/1197] Using Iterator as response for Pub/Sub list_subscriptions(). Also remove GAX confusion in Pub/Sub between max_results and page_size. --- .../google/cloud/pubsub/_gax.py | 68 +++++++++++++------ .../google/cloud/pubsub/client.py | 16 ++--- .../google/cloud/pubsub/connection.py | 62 +++++++++++++---- 3 files changed, 99 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 63c6a2bf4090..b11d77d54066 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -14,6 +14,8 @@ """GAX wrapper for Pubsub API requests.""" +import functools + from google.cloud.gapic.pubsub.v1.publisher_api import PublisherApi from google.cloud.gapic.pubsub.v1.subscriber_api import SubscriberApi from google.gax import CallOptions @@ -78,12 +80,7 @@ def list_topics(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_topics( path, page_size=page_size, options=options) - - iter_kwargs = {} - if page_size: # page_size can be 0 or explicit None. - iter_kwargs['max_results'] = page_size - return GAXIterator(self._client, page_iter, _item_to_topic, - **iter_kwargs) + return GAXIterator(self._client, page_iter, _item_to_topic) def topic_create(self, topic_path): """API call: create a topic @@ -215,12 +212,8 @@ def topic_list_subscriptions(self, topic, page_size=0, page_token=None): raise NotFound(topic_path) raise - iter_kwargs = {} - if page_size: # page_size can be 0 or explicit None. - iter_kwargs['max_results'] = page_size - iterator = GAXIterator( - self._client, page_iter, - _item_to_subscription_for_topic, **iter_kwargs) + iterator = GAXIterator(self._client, page_iter, + _item_to_subscription_for_topic) iterator.topic = topic return iterator @@ -256,11 +249,10 @@ def list_subscriptions(self, project, page_size=0, page_token=None): If not passed, the API will return the first page of subscriptions. - :rtype: tuple, (list, str) - :returns: list of ``Subscription`` resource dicts, plus a - "next page token" string: if not None, indicates that - more topics can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of + :class:`~google.cloud.pubsub.subscription.Subscription` + accessible to the current API. """ if page_token is None: page_token = INITIAL_PAGE @@ -268,10 +260,14 @@ def list_subscriptions(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_subscriptions( path, page_size=page_size, options=options) - subscriptions = [MessageToDict(sub_pb) - for sub_pb in page_iter.next()] - token = page_iter.page_token or None - return subscriptions, token + + # We attach a mutable topics dictionary so that as topic + # objects are created by Subscription.from_api_repr, they + # can be re-used by other subscriptions from the same topic. + topics = {} + item_to_value = functools.partial( + _item_to_subscription_for_client, topics=topics) + return GAXIterator(self._client, page_iter, item_to_value) def subscription_create(self, subscription_path, topic_path, ack_deadline=None, push_endpoint=None): @@ -579,3 +575,33 @@ def _item_to_subscription_for_topic(iterator, subscription_path): subscription_name = subscription_name_from_path( subscription_path, iterator.client.project) return Subscription(subscription_name, iterator.topic) + + +def _item_to_subscription_for_client(iterator, sub_pb, topics): + """Convert a subscription protobuf to the native object. + + .. note:: + + This method does not have the correct signature to be used as + the ``item_to_value`` argument to + :class:`~google.cloud.iterator.Iterator`. It is intended to be + patched with a mutable topics argument that can be updated + on subsequent calls. For an example, see how the method is + used above in :meth:`_SubscriberAPI.list_subscriptions`. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type sub_pb: :class:`~google.pubsub.v1.pubsub_pb2.Subscription` + :param sub_pb: A subscription returned from the API. + + :type topics: dict + :param topics: A dictionary of topics to be used (and modified) + as new subscriptions are created bound to topics. + + :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` + :returns: The next subscription in the page. + """ + resource = MessageToDict(sub_pb) + return Subscription.from_api_repr( + resource, iterator.client, topics=topics) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index d720ee9c73a6..63a68b4392a2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -160,20 +160,14 @@ def list_subscriptions(self, page_size=None, page_token=None): passed, the API will return the first page of topics. - :rtype: tuple, (list, str) - :returns: list of :class:`~.pubsub.subscription.Subscription`, - plus a "next page token" string: if not None, indicates that - more topics can be retrieved with another call (pass that - value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of + :class:`~google.cloud.pubsub.subscription.Subscription` + accessible to the current client. """ api = self.subscriber_api - resources, next_token = api.list_subscriptions( + return api.list_subscriptions( self.project, page_size, page_token) - topics = {} - subscriptions = [Subscription.from_api_repr(resource, self, - topics=topics) - for resource in resources] - return subscriptions, next_token def topic(self, name, timestamp_messages=False): """Creates a topic bound to the current client. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 7c4e515d9c8a..1686dbeab942 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -15,6 +15,7 @@ """Create / interact with Google Cloud Pub/Sub connections.""" import base64 +import functools import os from google.cloud import connection as base_connection @@ -274,24 +275,26 @@ def list_subscriptions(self, project, page_size=None, page_token=None): If not passed, the API will return the first page of subscriptions. - :rtype: tuple, (list, str) - :returns: list of ``Subscription`` resource dicts, plus a - "next page token" string: if not None, indicates that - more subscriptions can be retrieved with another call (pass - that value as ``page_token``). + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of + :class:`~google.cloud.pubsub.subscription.Subscription` + accessible to the current API. """ - conn = self._connection - params = {} - + extra_params = {} if page_size is not None: - params['pageSize'] = page_size - - if page_token is not None: - params['pageToken'] = page_token - + extra_params['pageSize'] = page_size path = '/projects/%s/subscriptions' % (project,) - resp = conn.api_request(method='GET', path=path, query_params=params) - return resp.get('subscriptions', ()), resp.get('nextPageToken') + + # We attach a mutable topics dictionary so that as topic + # objects are created by Subscription.from_api_repr, they + # can be re-used by other subscriptions from the same topic. + topics = {} + item_to_value = functools.partial( + _item_to_subscription_for_client, topics=topics) + return HTTPIterator( + client=self._client, path=path, item_to_value=item_to_value, + items_key='subscriptions', page_token=page_token, + extra_params=extra_params) def subscription_create(self, subscription_path, topic_path, ack_deadline=None, push_endpoint=None): @@ -607,3 +610,32 @@ def _item_to_subscription_for_topic(iterator, subscription_path): subscription_name = subscription_name_from_path( subscription_path, iterator.client.project) return Subscription(subscription_name, iterator.topic) + + +def _item_to_subscription_for_client(iterator, resource, topics): + """Convert a subscription to the native object. + + .. note:: + + This method does not have the correct signature to be used as + the ``item_to_value`` argument to + :class:`~google.cloud.iterator.Iterator`. It is intended to be + patched with a mutable topics argument that can be updated + on subsequent calls. For an example, see how the method is + used above in :meth:`_SubscriberAPI.list_subscriptions`. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type resource: dict + :param resource: A subscription returned from the API. + + :type topics: dict + :param topics: A dictionary of topics to be used (and modified) + as new subscriptions are created bound to topics. + + :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` + :returns: The next subscription in the page. + """ + return Subscription.from_api_repr( + resource, iterator.client, topics=topics) From 387d33dbe84871b142f9ad37d5eef9bb19746e6d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 27 Oct 2016 12:55:13 -0700 Subject: [PATCH 0033/1197] Updating Client.list_subscriptions() unit tests. These changes are needed to cover the change to iterators. --- .../google/cloud/pubsub/_gax.py | 4 +- .../google/cloud/pubsub/client.py | 1 - .../google/cloud/pubsub/connection.py | 4 +- .../unit_tests/test__gax.py | 86 +++++++++----- .../unit_tests/test_client.py | 107 +++++++++++++----- .../unit_tests/test_connection.py | 67 ++++++++--- 6 files changed, 194 insertions(+), 75 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index b11d77d54066..bb700f7018c5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -266,7 +266,7 @@ def list_subscriptions(self, project, page_size=0, page_token=None): # can be re-used by other subscriptions from the same topic. topics = {} item_to_value = functools.partial( - _item_to_subscription_for_client, topics=topics) + _item_to_sub_for_client, topics=topics) return GAXIterator(self._client, page_iter, item_to_value) def subscription_create(self, subscription_path, topic_path, @@ -577,7 +577,7 @@ def _item_to_subscription_for_topic(iterator, subscription_path): return Subscription(subscription_name, iterator.topic) -def _item_to_subscription_for_client(iterator, sub_pb, topics): +def _item_to_sub_for_client(iterator, sub_pb, topics): """Convert a subscription protobuf to the native object. .. note:: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 63a68b4392a2..ec909fda4f2e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -22,7 +22,6 @@ from google.cloud.pubsub.connection import _PublisherAPI as JSONPublisherAPI from google.cloud.pubsub.connection import _SubscriberAPI as JSONSubscriberAPI from google.cloud.pubsub.connection import _IAMPolicyAPI -from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic try: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py index 1686dbeab942..4946ff37bd12 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py @@ -290,7 +290,7 @@ def list_subscriptions(self, project, page_size=None, page_token=None): # can be re-used by other subscriptions from the same topic. topics = {} item_to_value = functools.partial( - _item_to_subscription_for_client, topics=topics) + _item_to_sub_for_client, topics=topics) return HTTPIterator( client=self._client, path=path, item_to_value=item_to_value, items_key='subscriptions', page_token=page_token, @@ -612,7 +612,7 @@ def _item_to_subscription_for_topic(iterator, subscription_path): return Subscription(subscription_name, iterator.topic) -def _item_to_subscription_for_client(iterator, resource, topics): +def _item_to_sub_for_client(iterator, resource, topics): """Convert a subscription to the native object. .. note:: diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 7f9a9ff8e5c9..c47b1cd24111 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -415,29 +415,40 @@ def test_ctor(self): def test_list_subscriptions_no_paging(self): from google.gax import INITIAL_PAGE from google.pubsub.v1.pubsub_pb2 import PushConfig - from google.pubsub.v1.pubsub_pb2 import Subscription + from google.pubsub.v1.pubsub_pb2 import Subscription as SubscriptionPB from google.cloud._testing import _GAXPageIterator + from google.cloud.pubsub.client import Client + from google.cloud.pubsub.subscription import Subscription + from google.cloud.pubsub.topic import Topic push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) + local_sub_path = '%s/subscriptions/%s' % ( + self.PROJECT_PATH, self.SUB_NAME) + sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, + push_config=push_cfg_pb) response = _GAXPageIterator([sub_pb]) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - client = _Client(self.PROJECT) + creds = _Credentials() + client = Client(project=self.PROJECT, credentials=creds) api = self._makeOne(gax_api, client) - subscriptions, next_token = api.list_subscriptions(self.PROJECT) + iterator = api.list_subscriptions(self.PROJECT) + subscriptions = list(iterator) + next_token = iterator.next_page_token + # Check the token returned. + self.assertIsNone(next_token) + # Check the subscription object returned. self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertEqual(subscription, { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'pushConfig': { - 'pushEndpoint': self.PUSH_ENDPOINT, - }, - }) - self.assertIsNone(next_token) + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIsInstance(subscription.topic, Topic) + self.assertEqual(subscription.topic.name, self.TOPIC_NAME) + self.assertIs(subscription._client, client) + self.assertEqual(subscription._project, self.PROJECT) + self.assertIsNone(subscription.ack_deadline) + self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) name, page_size, options = gax_api._list_subscriptions_called_with self.assertEqual(name, self.PROJECT_PATH) @@ -446,33 +457,45 @@ def test_list_subscriptions_no_paging(self): def test_list_subscriptions_with_paging(self): from google.pubsub.v1.pubsub_pb2 import PushConfig - from google.pubsub.v1.pubsub_pb2 import Subscription + from google.pubsub.v1.pubsub_pb2 import Subscription as SubscriptionPB from google.cloud._testing import _GAXPageIterator + from google.cloud.pubsub.client import Client + from google.cloud.pubsub.subscription import Subscription + from google.cloud.pubsub.topic import Topic SIZE = 23 TOKEN = 'TOKEN' NEW_TOKEN = 'NEW_TOKEN' push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) + local_sub_path = '%s/subscriptions/%s' % ( + self.PROJECT_PATH, self.SUB_NAME) + sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, + push_config=push_cfg_pb) response = _GAXPageIterator([sub_pb], page_token=NEW_TOKEN) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) client = _Client(self.PROJECT) + creds = _Credentials() + client = Client(project=self.PROJECT, credentials=creds) api = self._makeOne(gax_api, client) - subscriptions, next_token = api.list_subscriptions( + iterator = api.list_subscriptions( self.PROJECT, page_size=SIZE, page_token=TOKEN) + subscriptions = list(iterator) + next_token = iterator.next_page_token + # Check the token returned. + self.assertEqual(next_token, NEW_TOKEN) + # Check the subscription object returned. self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertEqual(subscription, { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'pushConfig': { - 'pushEndpoint': self.PUSH_ENDPOINT, - }, - }) - self.assertEqual(next_token, NEW_TOKEN) + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIsInstance(subscription.topic, Topic) + self.assertEqual(subscription.topic.name, self.TOPIC_NAME) + self.assertIs(subscription._client, client) + self.assertEqual(subscription._project, self.PROJECT) + self.assertIsNone(subscription.ack_deadline) + self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) name, page_size, options = gax_api._list_subscriptions_called_with self.assertEqual(name, self.PROJECT_PATH) @@ -1096,12 +1119,6 @@ def __init__(self, message_ids): self.message_ids = message_ids -class _PushConfigPB(object): - - def __init__(self, push_endpoint): - self.push_endpoint = push_endpoint - - class _PubsubMessagePB(object): def __init__(self, message_id, data, attributes, publish_time): @@ -1135,3 +1152,10 @@ class _Client(object): def __init__(self, project): self.project = project + + +class _Credentials(object): + + @staticmethod + def create_scoped_required(): + return False diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index de19a8761240..08ed1f29f2fc 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -220,29 +220,52 @@ def test_list_topics_missing_key(self): def test_list_subscriptions_no_paging(self): from google.cloud.pubsub.subscription import Subscription + from google.cloud.pubsub.topic import Topic + SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) - client.connection = object() - api = client._subscriber_api = _FauxSubscriberAPI() - api._list_subscriptions_response = [SUB_INFO], None + client = self._makeOne(project=self.PROJECT, credentials=creds, + use_gax=False) + returned = {'subscriptions': [SUB_INFO]} + client.connection = _Connection(returned) - subscriptions, next_page_token = client.list_subscriptions() + iterator = client.list_subscriptions() + subscriptions = list(iterator) + next_page_token = iterator.next_page_token - self.assertEqual(len(subscriptions), 1) - self.assertIsInstance(subscriptions[0], Subscription) - self.assertEqual(subscriptions[0].name, self.SUB_NAME) - self.assertEqual(subscriptions[0].topic.name, self.TOPIC_NAME) + # Check the token returned. self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_subscriptions, - (self.PROJECT, None, None)) + # Check the subscription object returned. + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIsInstance(subscription.topic, Topic) + self.assertEqual(subscription.topic.name, self.TOPIC_NAME) + self.assertIs(subscription._client, client) + self.assertEqual(subscription._project, self.PROJECT) + self.assertIsNone(subscription.ack_deadline) + self.assertIsNone(subscription.push_endpoint) + + called_with = client.connection._called_with + expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': expected_path, + 'query_params': {}, + }) def test_list_subscriptions_with_paging(self): + import six from google.cloud.pubsub.subscription import Subscription + from google.cloud.pubsub.topic import Topic + SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._makeOne(project=self.PROJECT, credentials=creds, + use_gax=False) + + # Set up the mock response. ACK_DEADLINE = 42 PUSH_ENDPOINT = 'https://push.example.com/endpoint' SUB_INFO = {'name': self.SUB_PATH, @@ -252,23 +275,42 @@ def test_list_subscriptions_with_paging(self): TOKEN1 = 'TOKEN1' TOKEN2 = 'TOKEN2' SIZE = 1 - client.connection = object() - api = client._subscriber_api = _FauxSubscriberAPI() - api._list_subscriptions_response = [SUB_INFO], TOKEN2 + returned = { + 'subscriptions': [SUB_INFO], + 'nextPageToken': TOKEN2, + } + client.connection = _Connection(returned) - subscriptions, next_page_token = client.list_subscriptions( + iterator = client.list_subscriptions( SIZE, TOKEN1) + page = six.next(iterator.pages) + subscriptions = list(page) + next_page_token = iterator.next_page_token - self.assertEqual(len(subscriptions), 1) - self.assertIsInstance(subscriptions[0], Subscription) - self.assertEqual(subscriptions[0].name, self.SUB_NAME) - self.assertEqual(subscriptions[0].topic.name, self.TOPIC_NAME) - self.assertEqual(subscriptions[0].ack_deadline, ACK_DEADLINE) - self.assertEqual(subscriptions[0].push_endpoint, PUSH_ENDPOINT) + # Check the token returned. self.assertEqual(next_page_token, TOKEN2) - - self.assertEqual(api._listed_subscriptions, - (self.PROJECT, SIZE, TOKEN1)) + # Check the subscription object returned. + self.assertEqual(len(subscriptions), 1) + subscription = subscriptions[0] + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIsInstance(subscription.topic, Topic) + self.assertEqual(subscription.topic.name, self.TOPIC_NAME) + self.assertIs(subscription._client, client) + self.assertEqual(subscription._project, self.PROJECT) + self.assertEqual(subscription.ack_deadline, ACK_DEADLINE) + self.assertEqual(subscription.push_endpoint, PUSH_ENDPOINT) + + called_with = client.connection._called_with + expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) + self.assertEqual(called_with, { + 'method': 'GET', + 'path': expected_path, + 'query_params': { + 'pageSize': SIZE, + 'pageToken': TOKEN1, + }, + }) def test_list_subscriptions_w_missing_key(self): PROJECT = 'PROJECT' @@ -341,3 +383,16 @@ class _FauxSubscriberAPI(object): def list_subscriptions(self, project, page_size, page_token): self._listed_subscriptions = (project, page_size, page_token) return self._list_subscriptions_response + + +class _Connection(object): + + _called_with = None + + def __init__(self, *responses): + self._responses = responses + + def api_request(self, **kw): + self._called_with = kw + response, self._responses = self._responses[0], self._responses[1:] + return response diff --git a/packages/google-cloud-pubsub/unit_tests/test_connection.py b/packages/google-cloud-pubsub/unit_tests/test_connection.py index a588dc693898..3d6f39b27113 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_connection.py +++ b/packages/google-cloud-pubsub/unit_tests/test_connection.py @@ -423,20 +423,35 @@ def test_ctor(self): self.assertIs(api._client, client) def test_list_subscriptions_no_paging(self): + from google.cloud.pubsub.client import Client + from google.cloud.pubsub.subscription import Subscription + from google.cloud.pubsub.topic import Topic + SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} RETURNED = {'subscriptions': [SUB_INFO]} connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) + creds = _Credentials() + client = Client(project=self.PROJECT, credentials=creds) + client.connection = connection api = self._makeOne(client) - subscriptions, next_token = api.list_subscriptions(self.PROJECT) + iterator = api.list_subscriptions(self.PROJECT) + subscriptions = list(iterator) + next_token = iterator.next_page_token + # Check the token returned. + self.assertIsNone(next_token) + # Check the subscription object returned. self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertIsInstance(subscription, dict) - self.assertEqual(subscription['name'], self.SUB_PATH) - self.assertEqual(subscription['topic'], self.TOPIC_PATH) - self.assertIsNone(next_token) + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIsInstance(subscription.topic, Topic) + self.assertEqual(subscription.topic.name, self.TOPIC_NAME) + self.assertIs(subscription._client, client) + self.assertEqual(subscription._project, self.PROJECT) + self.assertIsNone(subscription.ack_deadline) + self.assertIsNone(subscription.push_endpoint) self.assertEqual(connection._called_with['method'], 'GET') path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) @@ -444,6 +459,11 @@ def test_list_subscriptions_no_paging(self): self.assertEqual(connection._called_with['query_params'], {}) def test_list_subscriptions_with_paging(self): + import six + from google.cloud.pubsub.client import Client + from google.cloud.pubsub.subscription import Subscription + from google.cloud.pubsub.topic import Topic + TOKEN1 = 'TOKEN1' TOKEN2 = 'TOKEN2' SIZE = 1 @@ -453,18 +473,30 @@ def test_list_subscriptions_with_paging(self): 'nextPageToken': 'TOKEN2', } connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) + creds = _Credentials() + client = Client(project=self.PROJECT, credentials=creds) + client.connection = connection api = self._makeOne(client) - subscriptions, next_token = api.list_subscriptions( + iterator = api.list_subscriptions( self.PROJECT, page_token=TOKEN1, page_size=SIZE) + page = six.next(iterator.pages) + subscriptions = list(page) + next_token = iterator.next_page_token + # Check the token returned. + self.assertEqual(next_token, TOKEN2) + # Check the subscription object returned. self.assertEqual(len(subscriptions), 1) subscription = subscriptions[0] - self.assertIsInstance(subscription, dict) - self.assertEqual(subscription['name'], self.SUB_PATH) - self.assertEqual(subscription['topic'], self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN2) + self.assertIsInstance(subscription, Subscription) + self.assertEqual(subscription.name, self.SUB_NAME) + self.assertIsInstance(subscription.topic, Topic) + self.assertEqual(subscription.topic.name, self.TOPIC_NAME) + self.assertIs(subscription._client, client) + self.assertEqual(subscription._project, self.PROJECT) + self.assertIsNone(subscription.ack_deadline) + self.assertIsNone(subscription.push_endpoint) self.assertEqual(connection._called_with['method'], 'GET') path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) @@ -478,7 +510,9 @@ def test_list_subscriptions_missing_key(self): client = _Client(connection, self.PROJECT) api = self._makeOne(client) - subscriptions, next_token = api.list_subscriptions(self.PROJECT) + iterator = api.list_subscriptions(self.PROJECT) + subscriptions = list(iterator) + next_token = iterator.next_page_token self.assertEqual(len(subscriptions), 0) self.assertIsNone(next_token) @@ -843,3 +877,10 @@ class _Client(object): def __init__(self, connection, project): self.connection = connection self.project = project + + +class _Credentials(object): + + @staticmethod + def create_scoped_required(): + return False From 7b2a8cf5c2fbcbc964f256f9888666f80a56cd82 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 10:12:12 -0700 Subject: [PATCH 0034/1197] Adding PyPI badges to package READMEs. --- packages/google-cloud-pubsub/README.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 08db812e6539..6bf9d77ee82e 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -5,6 +5,8 @@ Python Client for Google Cloud Pub / Sub .. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/docs +|pypi| |versions| + - `Documentation`_ .. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub-usage.html @@ -57,3 +59,8 @@ To get started with this API, you'll need to create topic.publish('this is the message_payload', attr1='value1', attr2='value2') + +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg + :target: https://pypi.python.org/pypi/google-cloud-pubsub +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg + :target: https://pypi.python.org/pypi/google-cloud-pubsub From 4692a7d8636ee1d45cfb9dfc2da184616bc29c54 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 01:18:11 -0700 Subject: [PATCH 0035/1197] Making a gRPC channel with credentials from Client's. This over-rides the default behavior within GAX, which uses application default credentials. --- .../google/cloud/pubsub/_gax.py | 12 ++++- .../unit_tests/test__gax.py | 44 ++++++++++++++++--- 2 files changed, 47 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index bb700f7018c5..ecfba42477f4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -30,6 +30,8 @@ from google.cloud._helpers import _to_bytes from google.cloud._helpers import _pb_timestamp_to_rfc3339 +from google.cloud._helpers import make_secure_channel +from google.cloud.connection import DEFAULT_USER_AGENT from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator @@ -518,9 +520,12 @@ def make_gax_publisher_api(connection): configuration. :rtype: :class:`~google.cloud.pubsub.v1.subscriber_api.SubscriberApi` """ - channel = None if connection.in_emulator: channel = insecure_channel(connection.host) + else: + channel = make_secure_channel( + connection.credentials, DEFAULT_USER_AGENT, + PublisherApi.SERVICE_ADDRESS) return PublisherApi(channel=channel) @@ -538,9 +543,12 @@ def make_gax_subscriber_api(connection): :returns: A subscriber API instance with the proper connection configuration. """ - channel = None if connection.in_emulator: channel = insecure_channel(connection.host) + else: + channel = make_secure_channel( + connection.credentials, DEFAULT_USER_AGENT, + SubscriberApi.SERVICE_ADDRESS) return SubscriberApi(channel=channel) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index c47b1cd24111..4c2957df14b0 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -879,18 +879,32 @@ def test_live_api(self): from google.cloud.pubsub import _gax as MUT channels = [] + channel_args = [] + channel_obj = object() mock_result = object() + host = 'foo.apis.invalid' def mock_publisher_api(channel): channels.append(channel) return mock_result - connection = _Connection(in_emulator=False) - with _Monkey(MUT, PublisherApi=mock_publisher_api): + def make_channel(*args): + channel_args.append(args) + return channel_obj + + mock_publisher_api.SERVICE_ADDRESS = host + + creds = _Credentials() + connection = _Connection(in_emulator=False, + credentials=creds) + with _Monkey(MUT, PublisherApi=mock_publisher_api, + make_secure_channel=make_channel): result = self._callFUT(connection) self.assertIs(result, mock_result) - self.assertEqual(channels, [None]) + self.assertEqual(channels, [channel_obj]) + self.assertEqual(channel_args, + [(creds, MUT.DEFAULT_USER_AGENT, host)]) def test_emulator(self): from google.cloud._testing import _Monkey @@ -932,18 +946,32 @@ def test_live_api(self): from google.cloud.pubsub import _gax as MUT channels = [] + channel_args = [] + channel_obj = object() mock_result = object() + host = 'foo.apis.invalid' def mock_subscriber_api(channel): channels.append(channel) return mock_result - connection = _Connection(in_emulator=False) - with _Monkey(MUT, SubscriberApi=mock_subscriber_api): + def make_channel(*args): + channel_args.append(args) + return channel_obj + + mock_subscriber_api.SERVICE_ADDRESS = host + + creds = _Credentials() + connection = _Connection(in_emulator=False, + credentials=creds) + with _Monkey(MUT, SubscriberApi=mock_subscriber_api, + make_secure_channel=make_channel): result = self._callFUT(connection) self.assertIs(result, mock_result) - self.assertEqual(channels, [None]) + self.assertEqual(channels, [channel_obj]) + self.assertEqual(channel_args, + [(creds, MUT.DEFAULT_USER_AGENT, host)]) def test_emulator(self): from google.cloud._testing import _Monkey @@ -1143,9 +1171,11 @@ def __init__(self, received_messages): class _Connection(object): - def __init__(self, in_emulator=False, host=None): + def __init__(self, in_emulator=False, host=None, + credentials=None): self.in_emulator = in_emulator self.host = host + self.credentials = credentials class _Client(object): From d145c941684224a6bb372069825cb4b97de85b84 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 12:49:02 -0700 Subject: [PATCH 0036/1197] Renaming connection module as _http in 5 packages. The packages are BigQuery, Datastore, Logging, Pub/Sub and Storage. The rename is in advance of a larger re-factor. But so long as the connections are not public, the re-factor can happen without user-facing implications. --- .../google/cloud/pubsub/{connection.py => _http.py} | 0 .../unit_tests/{test_connection.py => test__http.py} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename packages/google-cloud-pubsub/google/cloud/pubsub/{connection.py => _http.py} (100%) rename packages/google-cloud-pubsub/unit_tests/{test_connection.py => test__http.py} (100%) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/connection.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py similarity index 100% rename from packages/google-cloud-pubsub/google/cloud/pubsub/connection.py rename to packages/google-cloud-pubsub/google/cloud/pubsub/_http.py diff --git a/packages/google-cloud-pubsub/unit_tests/test_connection.py b/packages/google-cloud-pubsub/unit_tests/test__http.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/test_connection.py rename to packages/google-cloud-pubsub/unit_tests/test__http.py From 9ff521970b1768c615499d0ef493f9c57e59dede Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 13:22:29 -0700 Subject: [PATCH 0037/1197] Updating imports to reflect connection->_http module rename. --- .../google/cloud/pubsub/__init__.py | 4 ---- .../google-cloud-pubsub/google/cloud/pubsub/client.py | 8 ++++---- packages/google-cloud-pubsub/unit_tests/test__http.py | 10 +++++----- packages/google-cloud-pubsub/unit_tests/test_client.py | 8 ++++---- 4 files changed, 13 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py index d072d315ed98..9c0fcb9d45f6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py @@ -25,9 +25,5 @@ from google.cloud.pubsub.client import Client -from google.cloud.pubsub.connection import Connection from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic - - -SCOPE = Connection.SCOPE diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index ec909fda4f2e..c357a5f8753d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -18,10 +18,10 @@ from google.cloud.client import JSONClient from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.pubsub.connection import Connection -from google.cloud.pubsub.connection import _PublisherAPI as JSONPublisherAPI -from google.cloud.pubsub.connection import _SubscriberAPI as JSONSubscriberAPI -from google.cloud.pubsub.connection import _IAMPolicyAPI +from google.cloud.pubsub._http import Connection +from google.cloud.pubsub._http import _PublisherAPI as JSONPublisherAPI +from google.cloud.pubsub._http import _SubscriberAPI as JSONSubscriberAPI +from google.cloud.pubsub._http import _IAMPolicyAPI from google.cloud.pubsub.topic import Topic try: diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index 3d6f39b27113..ce6abe4553ae 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -32,7 +32,7 @@ def _makeOne(self, *args, **kw): class TestConnection(_Base): def _getTargetClass(self): - from google.cloud.pubsub.connection import Connection + from google.cloud.pubsub._http import Connection return Connection def test_default_url(self): @@ -93,7 +93,7 @@ def test_build_api_url_w_base_url_override(self): class Test_PublisherAPI(_Base): def _getTargetClass(self): - from google.cloud.pubsub.connection import _PublisherAPI + from google.cloud.pubsub._http import _PublisherAPI return _PublisherAPI def _makeOne(self, *args, **kw): @@ -409,7 +409,7 @@ def test_topic_list_subscriptions_miss(self): class Test_SubscriberAPI(_Base): def _getTargetClass(self): - from google.cloud.pubsub.connection import _SubscriberAPI + from google.cloud.pubsub._http import _SubscriberAPI return _SubscriberAPI def _makeOne(self, *args, **kw): @@ -711,7 +711,7 @@ def test_subscription_modify_ack_deadline(self): class Test_IAMPolicyAPI(_Base): def _getTargetClass(self): - from google.cloud.pubsub.connection import _IAMPolicyAPI + from google.cloud.pubsub._http import _IAMPolicyAPI return _IAMPolicyAPI def test_ctor(self): @@ -824,7 +824,7 @@ def test_test_iam_permissions_missing_key(self): class Test__transform_messages_base64_empty(unittest.TestCase): def _callFUT(self, messages, transform, key=None): - from google.cloud.pubsub.connection import _transform_messages_base64 + from google.cloud.pubsub._http import _transform_messages_base64 return _transform_messages_base64(messages, transform, key) def test__transform_messages_base64_empty_message(self): diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 08ed1f29f2fc..2f2dbfec02c5 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -30,7 +30,7 @@ def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def test_publisher_api_wo_gax(self): - from google.cloud.pubsub.connection import _PublisherAPI + from google.cloud.pubsub._http import _PublisherAPI from google.cloud.pubsub import client as MUT from google.cloud._testing import _Monkey creds = _Credentials() @@ -49,7 +49,7 @@ def test_publisher_api_wo_gax(self): def test_no_gax_ctor(self): from google.cloud._testing import _Monkey - from google.cloud.pubsub.connection import _PublisherAPI + from google.cloud.pubsub._http import _PublisherAPI from google.cloud.pubsub import client as MUT creds = _Credentials() @@ -96,7 +96,7 @@ def __init__(self, _wrapped, client): self.assertEqual(_called_with, [(args, {})]) def test_subscriber_api_wo_gax(self): - from google.cloud.pubsub.connection import _SubscriberAPI + from google.cloud.pubsub._http import _SubscriberAPI from google.cloud.pubsub import client as MUT from google.cloud._testing import _Monkey creds = _Credentials() @@ -148,7 +148,7 @@ def __init__(self, _wrapped, client): self.assertEqual(_called_with, [(args, {})]) def test_iam_policy_api(self): - from google.cloud.pubsub.connection import _IAMPolicyAPI + from google.cloud.pubsub._http import _IAMPolicyAPI creds = _Credentials() client = self._makeOne(project=self.PROJECT, credentials=creds) conn = client.connection = object() From 3c5f7a2c325b92dc13b0fe4ce9bd7fe887562ef1 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Nov 2016 14:30:24 -0700 Subject: [PATCH 0038/1197] Removing docs references to connection module. --- packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index ecfba42477f4..2636dbea2c89 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -512,7 +512,7 @@ def make_gax_publisher_api(connection): an insecure ``channel`` is created pointing at the local Pub / Sub server. - :type connection: :class:`~google.cloud.pubsub.connection.Connection` + :type connection: :class:`~google.cloud.pubsub._http.Connection` :param connection: The connection that holds configuration details. :rtype: :class:`~google.cloud.pubsub.v1.publisher_api.PublisherApi` @@ -536,7 +536,7 @@ def make_gax_subscriber_api(connection): an insecure ``channel`` is created pointing at the local Pub / Sub server. - :type connection: :class:`~google.cloud.pubsub.connection.Connection` + :type connection: :class:`~google.cloud.pubsub._http.Connection` :param connection: The connection that holds configuration details. :rtype: :class:`~google.cloud.pubsub.v1.subscriber_api.SubscriberApi` From c738ef413933f120a27aa19fa695d645bc6eb09f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 21:26:07 -0800 Subject: [PATCH 0039/1197] Avoiding using filesystem deps in package tox.ini configs. --- packages/google-cloud-pubsub/tox.ini | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tox.ini b/packages/google-cloud-pubsub/tox.ini index 9a9990704fbe..a05bed81c961 100644 --- a/packages/google-cloud-pubsub/tox.ini +++ b/packages/google-cloud-pubsub/tox.ini @@ -3,8 +3,9 @@ envlist = py27,py34,py35,cover [testing] +localdeps = + pip install --upgrade {toxinidir}/../core deps = - {toxinidir}/../core pytest covercmd = py.test --quiet \ @@ -15,6 +16,7 @@ covercmd = [testenv] commands = + {[testing]localdeps} py.test --quiet {posargs} unit_tests deps = {[testing]deps} @@ -23,6 +25,7 @@ deps = basepython = python2.7 commands = + {[testing]localdeps} {[testing]covercmd} deps = {[testenv]deps} From a5a2696195d4aa691492a33ef5e3350b6a09d4ac Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 20:20:59 -0800 Subject: [PATCH 0040/1197] Renaming _getTargetClass to _get_target_class. Done via: $ git grep -l 'def _getTargetClass(self)' | \ > xargs sed -i s/'def _getTargetClass(self)'/'@staticmethod\n def _get_target_class()'/g --- packages/google-cloud-pubsub/unit_tests/test__gax.py | 6 ++++-- .../google-cloud-pubsub/unit_tests/test__http.py | 12 ++++++++---- .../google-cloud-pubsub/unit_tests/test_client.py | 3 ++- packages/google-cloud-pubsub/unit_tests/test_iam.py | 3 ++- .../google-cloud-pubsub/unit_tests/test_message.py | 3 ++- .../unit_tests/test_subscription.py | 6 ++++-- .../google-cloud-pubsub/unit_tests/test_topic.py | 6 ++++-- 7 files changed, 26 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 4c2957df14b0..7922e84459b5 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -44,7 +44,8 @@ def _makeOne(self, *args, **kw): @unittest.skipUnless(_HAVE_GAX, 'No gax-python') class Test_PublisherAPI(_Base, unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub._gax import _PublisherAPI return _PublisherAPI @@ -401,7 +402,8 @@ class Test_SubscriberAPI(_Base, unittest.TestCase): PUSH_ENDPOINT = 'https://api.example.com/push' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub._gax import _SubscriberAPI return _SubscriberAPI diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index ce6abe4553ae..4761d89438ff 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -31,7 +31,8 @@ def _makeOne(self, *args, **kw): class TestConnection(_Base): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub._http import Connection return Connection @@ -92,7 +93,8 @@ def test_build_api_url_w_base_url_override(self): class Test_PublisherAPI(_Base): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub._http import _PublisherAPI return _PublisherAPI @@ -408,7 +410,8 @@ def test_topic_list_subscriptions_miss(self): class Test_SubscriberAPI(_Base): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub._http import _SubscriberAPI return _SubscriberAPI @@ -710,7 +713,8 @@ def test_subscription_modify_ack_deadline(self): class Test_IAMPolicyAPI(_Base): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub._http import _IAMPolicyAPI return _IAMPolicyAPI diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 2f2dbfec02c5..cbb9d62f2d39 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -22,7 +22,8 @@ class TestClient(unittest.TestCase): SUB_NAME = 'subscription_name' SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub.client import Client return Client diff --git a/packages/google-cloud-pubsub/unit_tests/test_iam.py b/packages/google-cloud-pubsub/unit_tests/test_iam.py index 0a31697b2990..d219ce797def 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_iam.py +++ b/packages/google-cloud-pubsub/unit_tests/test_iam.py @@ -17,7 +17,8 @@ class TestPolicy(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub.iam import Policy return Policy diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/unit_tests/test_message.py index 0d71ec9715f8..66fcf6b2b415 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_message.py +++ b/packages/google-cloud-pubsub/unit_tests/test_message.py @@ -17,7 +17,8 @@ class TestMessage(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub.message import Message return Message diff --git a/packages/google-cloud-pubsub/unit_tests/test_subscription.py b/packages/google-cloud-pubsub/unit_tests/test_subscription.py index 2afa45c3cc94..e98ee69e764e 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_subscription.py +++ b/packages/google-cloud-pubsub/unit_tests/test_subscription.py @@ -24,7 +24,8 @@ class TestSubscription(unittest.TestCase): DEADLINE = 42 ENDPOINT = 'https://api.example.com/push' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub.subscription import Subscription return Subscription @@ -671,7 +672,8 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, class TestAutoAck(unittest.TestCase): - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub.subscription import AutoAck return AutoAck diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index b67d6ac3ce66..a7f5dab2d617 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -20,7 +20,8 @@ class TestTopic(unittest.TestCase): TOPIC_NAME = 'topic_name' TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub.topic import Topic return Topic @@ -619,7 +620,8 @@ def test_check_iam_permissions_w_alternate_client(self): class TestBatch(unittest.TestCase): PROJECT = 'PROJECT' - def _getTargetClass(self): + @staticmethod + def _get_target_class(): from google.cloud.pubsub.topic import Batch return Batch From 7019f5a2e7063bb374e03ac7dc5d91a4d329a0ce Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 20:22:12 -0800 Subject: [PATCH 0041/1197] Changing uses of _getTargetClass to _get_target_class. Done via: $ git grep -l _getTargetClass | \ > xargs sed -i s/_getTargetClass/_get_target_class/g --- .../google-cloud-pubsub/unit_tests/test__gax.py | 2 +- .../google-cloud-pubsub/unit_tests/test__http.py | 10 +++++----- .../google-cloud-pubsub/unit_tests/test_client.py | 2 +- .../google-cloud-pubsub/unit_tests/test_iam.py | 8 ++++---- .../google-cloud-pubsub/unit_tests/test_message.py | 8 ++++---- .../unit_tests/test_subscription.py | 14 +++++++------- .../google-cloud-pubsub/unit_tests/test_topic.py | 8 ++++---- 7 files changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 7922e84459b5..26c22c133fad 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -38,7 +38,7 @@ class _Base(object): SUB_PATH = '%s/subscriptions/%s' % (TOPIC_PATH, SUB_NAME) def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) @unittest.skipUnless(_HAVE_GAX, 'No gax-python') diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index 4761d89438ff..2064bed8463f 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -26,7 +26,7 @@ class _Base(unittest.TestCase): SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) class TestConnection(_Base): @@ -38,7 +38,7 @@ def _get_target_class(): def test_default_url(self): conn = self._makeOne() - klass = self._getTargetClass() + klass = self._get_target_class() self.assertEqual(conn.api_base_url, klass.API_BASE_URL) def test_custom_url_from_env(self): @@ -52,7 +52,7 @@ def test_custom_url_from_env(self): with _Monkey(os, getenv=fake_environ.get): conn = self._makeOne() - klass = self._getTargetClass() + klass = self._get_target_class() self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) self.assertEqual(conn.api_base_url, 'http://' + HOST) @@ -99,7 +99,7 @@ def _get_target_class(): return _PublisherAPI def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): connection = _Connection() @@ -416,7 +416,7 @@ def _get_target_class(): return _SubscriberAPI def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor(self): connection = _Connection() diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index cbb9d62f2d39..1ed67be22e8a 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -28,7 +28,7 @@ def _get_target_class(): return Client def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_publisher_api_wo_gax(self): from google.cloud.pubsub._http import _PublisherAPI diff --git a/packages/google-cloud-pubsub/unit_tests/test_iam.py b/packages/google-cloud-pubsub/unit_tests/test_iam.py index d219ce797def..dc6e2a8efd68 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_iam.py +++ b/packages/google-cloud-pubsub/unit_tests/test_iam.py @@ -23,7 +23,7 @@ def _get_target_class(): return Policy def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): policy = self._makeOne() @@ -83,7 +83,7 @@ def test_from_api_repr_only_etag(self): RESOURCE = { 'etag': 'ACAB', } - klass = self._getTargetClass() + klass = self._get_target_class() policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'ACAB') self.assertIsNone(policy.version) @@ -118,7 +118,7 @@ def test_from_api_repr_complete(self): {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, ], } - klass = self._getTargetClass() + klass = self._get_target_class() policy = klass.from_api_repr(RESOURCE) self.assertEqual(policy.etag, 'DEADBEEF') self.assertEqual(policy.version, 17) @@ -138,7 +138,7 @@ def test_from_api_repr_bad_role(self): {'role': 'nonesuch', 'members': [BOGUS1, BOGUS2]}, ], } - klass = self._getTargetClass() + klass = self._get_target_class() with self.assertRaises(ValueError): klass.from_api_repr(RESOURCE) diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/unit_tests/test_message.py index 66fcf6b2b415..6c32abbfff62 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_message.py +++ b/packages/google-cloud-pubsub/unit_tests/test_message.py @@ -23,7 +23,7 @@ def _get_target_class(): return Message def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_no_attributes(self): DATA = b'DEADBEEF' @@ -84,7 +84,7 @@ def test_timestamp_w_timestamp_in_attributes(self): def test_from_api_repr_missing_data(self): MESSAGE_ID = '12345' api_repr = {'messageId': MESSAGE_ID} - message = self._getTargetClass().from_api_repr(api_repr) + message = self._get_target_class().from_api_repr(api_repr) self.assertEqual(message.data, b'') self.assertEqual(message.message_id, MESSAGE_ID) self.assertEqual(message.attributes, {}) @@ -99,7 +99,7 @@ def test_from_api_repr_no_attributes(self): 'messageId': MESSAGE_ID, 'publishTime': TIMESTAMP, } - message = self._getTargetClass().from_api_repr(api_repr) + message = self._get_target_class().from_api_repr(api_repr) self.assertEqual(message.data, DATA) self.assertEqual(message.message_id, MESSAGE_ID) self.assertEqual(message.attributes, {}) @@ -116,7 +116,7 @@ def test_from_api_repr_w_attributes(self): 'publishTime': TIMESTAMP, 'attributes': ATTRS, } - message = self._getTargetClass().from_api_repr(api_repr) + message = self._get_target_class().from_api_repr(api_repr) self.assertEqual(message.data, DATA) self.assertEqual(message.message_id, MESSAGE_ID) self.assertEqual(message.service_timestamp, TIMESTAMP) diff --git a/packages/google-cloud-pubsub/unit_tests/test_subscription.py b/packages/google-cloud-pubsub/unit_tests/test_subscription.py index e98ee69e764e..68ffab56697e 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_subscription.py +++ b/packages/google-cloud-pubsub/unit_tests/test_subscription.py @@ -30,7 +30,7 @@ def _get_target_class(): return Subscription def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): client = _Client(project=self.PROJECT) @@ -74,7 +74,7 @@ def test_from_api_repr_no_topics(self): 'name': self.SUB_PATH, 'ackDeadlineSeconds': self.DEADLINE, 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._getTargetClass() + klass = self._get_target_class() client = _Client(project=self.PROJECT) subscription = klass.from_api_repr(resource, client) self.assertEqual(subscription.name, self.SUB_NAME) @@ -86,12 +86,12 @@ def test_from_api_repr_no_topics(self): self.assertEqual(subscription.push_endpoint, self.ENDPOINT) def test_from_api_repr_w_deleted_topic(self): - klass = self._getTargetClass() + klass = self._get_target_class() resource = {'topic': klass._DELETED_TOPIC_PATH, 'name': self.SUB_PATH, 'ackDeadlineSeconds': self.DEADLINE, 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._getTargetClass() + klass = self._get_target_class() client = _Client(project=self.PROJECT) subscription = klass.from_api_repr(resource, client) self.assertEqual(subscription.name, self.SUB_NAME) @@ -106,7 +106,7 @@ def test_from_api_repr_w_topics_no_topic_match(self): 'ackDeadlineSeconds': self.DEADLINE, 'pushConfig': {'pushEndpoint': self.ENDPOINT}} topics = {} - klass = self._getTargetClass() + klass = self._get_target_class() client = _Client(project=self.PROJECT) subscription = klass.from_api_repr(resource, client, topics=topics) self.assertEqual(subscription.name, self.SUB_NAME) @@ -126,7 +126,7 @@ def test_from_api_repr_w_topics_w_topic_match(self): client = _Client(project=self.PROJECT) topic = _Topic(self.TOPIC_NAME, client=client) topics = {self.TOPIC_PATH: topic} - klass = self._getTargetClass() + klass = self._get_target_class() subscription = klass.from_api_repr(resource, client, topics=topics) self.assertEqual(subscription.name, self.SUB_NAME) self.assertIs(subscription.topic, topic) @@ -678,7 +678,7 @@ def _get_target_class(): return AutoAck def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): subscription = _FauxSubscription(()) diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index a7f5dab2d617..fcdb9b6e535d 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -26,7 +26,7 @@ def _get_target_class(): return Topic def _makeOne(self, *args, **kw): - return self._getTargetClass()(*args, **kw) + return self._get_target_class()(*args, **kw) def test_ctor_w_explicit_timestamp(self): client = _Client(project=self.PROJECT) @@ -41,7 +41,7 @@ def test_ctor_w_explicit_timestamp(self): def test_from_api_repr(self): client = _Client(project=self.PROJECT) resource = {'name': self.TOPIC_PATH} - klass = self._getTargetClass() + klass = self._get_target_class() topic = klass.from_api_repr(resource, client=client) self.assertEqual(topic.name, self.TOPIC_NAME) self.assertIs(topic._client, client) @@ -54,7 +54,7 @@ def test_from_api_repr_with_bad_client(self): client = _Client(project=PROJECT1) PATH = 'projects/%s/topics/%s' % (PROJECT2, self.TOPIC_NAME) resource = {'name': PATH} - klass = self._getTargetClass() + klass = self._get_target_class() self.assertRaises(ValueError, klass.from_api_repr, resource, client=client) @@ -626,7 +626,7 @@ def _get_target_class(): return Batch def _makeOne(self, *args, **kwargs): - return self._getTargetClass()(*args, **kwargs) + return self._get_target_class()(*args, **kwargs) def test_ctor_defaults(self): topic = _Topic() From 6495c1a33ffc8d1778af2d7b2a6236ef8874f6c0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 11:05:35 -0800 Subject: [PATCH 0042/1197] Changing all instances of _makeOne to _make_one. Done via: $ git grep -l _makeOne | \ > xargs sed -i s/_makeOne/_make_one/g --- .../unit_tests/test__gax.py | 88 +++++++++---------- .../unit_tests/test__http.py | 84 +++++++++--------- .../unit_tests/test_client.py | 28 +++--- .../unit_tests/test_iam.py | 24 ++--- .../unit_tests/test_message.py | 12 +-- .../unit_tests/test_subscription.py | 76 ++++++++-------- .../unit_tests/test_topic.py | 76 ++++++++-------- 7 files changed, 194 insertions(+), 194 deletions(-) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 26c22c133fad..d69d3b8eb987 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -37,7 +37,7 @@ class _Base(object): SUB_NAME = 'sub_name' SUB_PATH = '%s/subscriptions/%s' % (TOPIC_PATH, SUB_NAME) - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) @@ -52,7 +52,7 @@ def _get_target_class(): def test_ctor(self): gax_api = _GAXPublisherAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) self.assertIs(api._gax_api, gax_api) self.assertIs(api._client, client) @@ -66,7 +66,7 @@ def test_list_topics_no_paging(self): page_token=TOKEN) gax_api = _GAXPublisherAPI(_list_topics_response=response) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_topics(self.PROJECT) topics = list(iterator) @@ -95,7 +95,7 @@ def test_list_topics_with_paging(self): [_TopicPB(self.TOPIC_PATH)], page_token=NEW_TOKEN) gax_api = _GAXPublisherAPI(_list_topics_response=response) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_topics( self.PROJECT, page_size=SIZE, page_token=TOKEN) @@ -118,7 +118,7 @@ def test_topic_create(self): topic_pb = _TopicPB(self.TOPIC_PATH) gax_api = _GAXPublisherAPI(_create_topic_response=topic_pb) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) resource = api.topic_create(self.TOPIC_PATH) @@ -131,7 +131,7 @@ def test_topic_create_already_exists(self): from google.cloud.exceptions import Conflict gax_api = _GAXPublisherAPI(_create_topic_conflict=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(Conflict): api.topic_create(self.TOPIC_PATH) @@ -144,7 +144,7 @@ def test_topic_create_error(self): from google.gax.errors import GaxError gax_api = _GAXPublisherAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.topic_create(self.TOPIC_PATH) @@ -157,7 +157,7 @@ def test_topic_get_hit(self): topic_pb = _TopicPB(self.TOPIC_PATH) gax_api = _GAXPublisherAPI(_get_topic_response=topic_pb) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) resource = api.topic_get(self.TOPIC_PATH) @@ -170,7 +170,7 @@ def test_topic_get_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXPublisherAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): api.topic_get(self.TOPIC_PATH) @@ -183,7 +183,7 @@ def test_topic_get_error(self): from google.gax.errors import GaxError gax_api = _GAXPublisherAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.topic_get(self.TOPIC_PATH) @@ -195,7 +195,7 @@ def test_topic_get_error(self): def test_topic_delete_hit(self): gax_api = _GAXPublisherAPI(_delete_topic_ok=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) api.topic_delete(self.TOPIC_PATH) @@ -207,7 +207,7 @@ def test_topic_delete_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXPublisherAPI(_delete_topic_ok=False) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): api.topic_delete(self.TOPIC_PATH) @@ -220,7 +220,7 @@ def test_topic_delete_error(self): from google.gax.errors import GaxError gax_api = _GAXPublisherAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.topic_delete(self.TOPIC_PATH) @@ -238,7 +238,7 @@ def test_topic_publish_hit(self): response = _PublishResponsePB([MSGID]) gax_api = _GAXPublisherAPI(_publish_response=response) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -258,7 +258,7 @@ def test_topic_publish_miss_w_attrs_w_bytes_payload(self): MESSAGE = {'data': B64, 'attributes': {'foo': 'bar'}} gax_api = _GAXPublisherAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -278,7 +278,7 @@ def test_topic_publish_error(self): MESSAGE = {'data': B64, 'attributes': {}} gax_api = _GAXPublisherAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -301,7 +301,7 @@ def test_topic_list_subscriptions_no_paging(self): response = _GAXPageIterator([local_sub_path]) gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) topic = Topic(self.TOPIC_NAME, client) iterator = api.topic_list_subscriptions(topic) @@ -336,7 +336,7 @@ def test_topic_list_subscriptions_with_paging(self): [local_sub_path], page_token=NEW_TOKEN) gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) topic = Topic(self.TOPIC_NAME, client) iterator = api.topic_list_subscriptions( @@ -365,7 +365,7 @@ def test_topic_list_subscriptions_miss(self): gax_api = _GAXPublisherAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): topic = Topic(self.TOPIC_NAME, client) @@ -384,7 +384,7 @@ def test_topic_list_subscriptions_error(self): gax_api = _GAXPublisherAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): topic = Topic(self.TOPIC_NAME, client) @@ -410,7 +410,7 @@ def _get_target_class(): def test_ctor(self): gax_api = _GAXSubscriberAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) self.assertIs(api._gax_api, gax_api) self.assertIs(api._client, client) @@ -432,7 +432,7 @@ def test_list_subscriptions_no_paging(self): gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) creds = _Credentials() client = Client(project=self.PROJECT, credentials=creds) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_subscriptions(self.PROJECT) subscriptions = list(iterator) @@ -478,7 +478,7 @@ def test_list_subscriptions_with_paging(self): client = _Client(self.PROJECT) creds = _Credentials() client = Client(project=self.PROJECT, credentials=creds) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) iterator = api.list_subscriptions( self.PROJECT, page_size=SIZE, page_token=TOKEN) @@ -510,7 +510,7 @@ def test_subscription_create(self): sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) @@ -532,7 +532,7 @@ def test_subscription_create_already_exists(self): DEADLINE = 600 gax_api = _GAXSubscriberAPI(_create_subscription_conflict=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(Conflict): api.subscription_create( @@ -550,7 +550,7 @@ def test_subscription_create_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) @@ -572,7 +572,7 @@ def test_subscription_get_hit(self): push_config=push_cfg_pb) gax_api = _GAXSubscriberAPI(_get_subscription_response=sub_pb) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) resource = api.subscription_get(self.SUB_PATH) @@ -592,7 +592,7 @@ def test_subscription_get_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSubscriberAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): api.subscription_get(self.SUB_PATH) @@ -605,7 +605,7 @@ def test_subscription_get_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.subscription_get(self.SUB_PATH) @@ -617,7 +617,7 @@ def test_subscription_get_error(self): def test_subscription_delete_hit(self): gax_api = _GAXSubscriberAPI(_delete_subscription_ok=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) api.subscription_delete(self.TOPIC_PATH) @@ -629,7 +629,7 @@ def test_subscription_delete_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSubscriberAPI(_delete_subscription_ok=False) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): api.subscription_delete(self.TOPIC_PATH) @@ -642,7 +642,7 @@ def test_subscription_delete_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.subscription_delete(self.TOPIC_PATH) @@ -654,7 +654,7 @@ def test_subscription_delete_error(self): def test_subscription_modify_push_config_hit(self): gax_api = _GAXSubscriberAPI(_modify_push_config_ok=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) api.subscription_modify_push_config(self.SUB_PATH, self.PUSH_ENDPOINT) @@ -667,7 +667,7 @@ def test_subscription_modify_push_config_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSubscriberAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): api.subscription_modify_push_config( @@ -682,7 +682,7 @@ def test_subscription_modify_push_config_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.subscription_modify_push_config( @@ -718,7 +718,7 @@ def test_subscription_pull_explicit(self): response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)]) gax_api = _GAXSubscriberAPI(_pull_response=response_pb) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) MAX_MESSAGES = 10 received = api.subscription_pull( @@ -736,7 +736,7 @@ def test_subscription_pull_defaults_miss(self): from google.cloud.exceptions import NotFound gax_api = _GAXSubscriberAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): api.subscription_pull(self.SUB_PATH) @@ -752,7 +752,7 @@ def test_subscription_pull_defaults_error(self): from google.gax.errors import GaxError gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.subscription_pull(self.SUB_PATH) @@ -769,7 +769,7 @@ def test_subscription_acknowledge_hit(self): ACK_ID2 = 'BEADCAFE' gax_api = _GAXSubscriberAPI(_acknowledge_ok=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) @@ -784,7 +784,7 @@ def test_subscription_acknowledge_miss(self): ACK_ID2 = 'BEADCAFE' gax_api = _GAXSubscriberAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) @@ -800,7 +800,7 @@ def test_subscription_acknowledge_error(self): ACK_ID2 = 'BEADCAFE' gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) @@ -816,7 +816,7 @@ def test_subscription_modify_ack_deadline_hit(self): NEW_DEADLINE = 90 gax_api = _GAXSubscriberAPI(_modify_ack_deadline_ok=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) api.subscription_modify_ack_deadline( self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) @@ -835,7 +835,7 @@ def test_subscription_modify_ack_deadline_miss(self): NEW_DEADLINE = 90 gax_api = _GAXSubscriberAPI() client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(NotFound): api.subscription_modify_ack_deadline( @@ -855,7 +855,7 @@ def test_subscription_modify_ack_deadline_error(self): NEW_DEADLINE = 90 gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) - api = self._makeOne(gax_api, client) + api = self._make_one(gax_api, client) with self.assertRaises(GaxError): api.subscription_modify_ack_deadline( diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index 2064bed8463f..5918e26281ef 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -25,7 +25,7 @@ class _Base(unittest.TestCase): SUB_NAME = 'subscription_name' SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) @@ -37,7 +37,7 @@ def _get_target_class(): return Connection def test_default_url(self): - conn = self._makeOne() + conn = self._make_one() klass = self._get_target_class() self.assertEqual(conn.api_base_url, klass.API_BASE_URL) @@ -50,14 +50,14 @@ def test_custom_url_from_env(self): fake_environ = {PUBSUB_EMULATOR: HOST} with _Monkey(os, getenv=fake_environ.get): - conn = self._makeOne() + conn = self._make_one() klass = self._get_target_class() self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) self.assertEqual(conn.api_base_url, 'http://' + HOST) def test_build_api_url_no_extra_query_params(self): - conn = self._makeOne() + conn = self._make_one() URI = '/'.join([ conn.API_BASE_URL, conn.API_VERSION, @@ -68,7 +68,7 @@ def test_build_api_url_no_extra_query_params(self): def test_build_api_url_w_extra_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit - conn = self._makeOne() + conn = self._make_one() uri = conn.build_api_url('/foo', {'bar': 'baz'}) scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) @@ -80,7 +80,7 @@ def test_build_api_url_w_extra_query_params(self): def test_build_api_url_w_base_url_override(self): base_url1 = 'api-base-url1' base_url2 = 'api-base-url2' - conn = self._makeOne() + conn = self._make_one() conn.api_base_url = base_url1 URI = '/'.join([ base_url2, @@ -98,13 +98,13 @@ def _get_target_class(): from google.cloud.pubsub._http import _PublisherAPI return _PublisherAPI - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): connection = _Connection() client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) self.assertIs(api._client, client) self.assertIs(api._connection, connection) @@ -114,7 +114,7 @@ def test_list_topics_no_paging(self): returned = {'topics': [{'name': self.TOPIC_PATH}]} connection = _Connection(returned) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_topics(self.PROJECT) topics = list(iterator) @@ -145,7 +145,7 @@ def test_list_topics_with_paging(self): } connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_topics( self.PROJECT, page_token=TOKEN1, page_size=SIZE) @@ -170,7 +170,7 @@ def test_list_topics_missing_key(self): returned = {} connection = _Connection(returned) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_topics(self.PROJECT) topics = list(iterator) @@ -188,7 +188,7 @@ def test_topic_create(self): RETURNED = {'name': self.TOPIC_PATH} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) resource = api.topic_create(self.TOPIC_PATH) @@ -202,7 +202,7 @@ def test_topic_create_already_exists(self): connection = _Connection() connection._no_response_error = Conflict client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(Conflict): api.topic_create(self.TOPIC_PATH) @@ -215,7 +215,7 @@ def test_topic_get_hit(self): RETURNED = {'name': self.TOPIC_PATH} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) resource = api.topic_get(self.TOPIC_PATH) @@ -228,7 +228,7 @@ def test_topic_get_miss(self): from google.cloud.exceptions import NotFound connection = _Connection() client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): api.topic_get(self.TOPIC_PATH) @@ -241,7 +241,7 @@ def test_topic_delete_hit(self): RETURNED = {} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) api.topic_delete(self.TOPIC_PATH) @@ -253,7 +253,7 @@ def test_topic_delete_miss(self): from google.cloud.exceptions import NotFound connection = _Connection() client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): api.topic_delete(self.TOPIC_PATH) @@ -272,7 +272,7 @@ def test_topic_publish_hit(self): RETURNED = {'messageIds': [MSGID]} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -291,7 +291,7 @@ def test_topic_publish_miss(self): MESSAGE = {'data': PAYLOAD, 'attributes': {}} connection = _Connection() client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): api.topic_publish(self.TOPIC_PATH, [MESSAGE]) @@ -311,7 +311,7 @@ def test_topic_list_subscriptions_no_paging(self): RETURNED = {'subscriptions': [local_sub_path]} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) topic = Topic(self.TOPIC_NAME, client) iterator = api.topic_list_subscriptions(topic) @@ -347,7 +347,7 @@ def test_topic_list_subscriptions_with_paging(self): } connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) topic = Topic(self.TOPIC_NAME, client) iterator = api.topic_list_subscriptions( @@ -375,7 +375,7 @@ def test_topic_list_subscriptions_missing_key(self): connection = _Connection({}) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) topic = Topic(self.TOPIC_NAME, client) iterator = api.topic_list_subscriptions(topic) @@ -396,7 +396,7 @@ def test_topic_list_subscriptions_miss(self): connection = _Connection() client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) with self.assertRaises(NotFound): topic = Topic(self.TOPIC_NAME, client) @@ -415,13 +415,13 @@ def _get_target_class(): from google.cloud.pubsub._http import _SubscriberAPI return _SubscriberAPI - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor(self): connection = _Connection() client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) self.assertIs(api._connection, connection) self.assertIs(api._client, client) @@ -436,7 +436,7 @@ def test_list_subscriptions_no_paging(self): creds = _Credentials() client = Client(project=self.PROJECT, credentials=creds) client.connection = connection - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_subscriptions(self.PROJECT) subscriptions = list(iterator) @@ -479,7 +479,7 @@ def test_list_subscriptions_with_paging(self): creds = _Credentials() client = Client(project=self.PROJECT, credentials=creds) client.connection = connection - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_subscriptions( self.PROJECT, page_token=TOKEN1, page_size=SIZE) @@ -511,7 +511,7 @@ def test_list_subscriptions_missing_key(self): RETURNED = {} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) iterator = api.list_subscriptions(self.PROJECT) subscriptions = list(iterator) @@ -531,7 +531,7 @@ def test_subscription_create_defaults(self): RETURNED['name'] = self.SUB_PATH connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) @@ -555,7 +555,7 @@ def test_subscription_create_explicit(self): RETURNED['name'] = self.SUB_PATH connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) resource = api.subscription_create( self.SUB_PATH, self.TOPIC_PATH, @@ -578,7 +578,7 @@ def test_subscription_get(self): } connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) resource = api.subscription_get(self.SUB_PATH) @@ -591,7 +591,7 @@ def test_subscription_delete(self): RETURNED = {} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) api.subscription_delete(self.SUB_PATH) @@ -607,7 +607,7 @@ def test_subscription_modify_push_config(self): RETURNED = {} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) api.subscription_modify_push_config(self.SUB_PATH, PUSH_ENDPOINT) @@ -628,7 +628,7 @@ def test_subscription_pull_defaults(self): } connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) BODY = { 'returnImmediately': False, 'maxMessages': 1, @@ -655,7 +655,7 @@ def test_subscription_pull_explicit(self): } connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) MAX_MESSAGES = 10 BODY = { 'returnImmediately': True, @@ -680,7 +680,7 @@ def test_subscription_acknowledge(self): RETURNED = {} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) @@ -700,7 +700,7 @@ def test_subscription_modify_ack_deadline(self): RETURNED = {} connection = _Connection(RETURNED) client = _Client(connection, self.PROJECT) - api = self._makeOne(client) + api = self._make_one(client) api.subscription_modify_ack_deadline( self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) @@ -720,7 +720,7 @@ def _get_target_class(): def test_ctor(self): connection = _Connection() - api = self._makeOne(connection) + api = self._make_one(connection) self.assertIs(api._connection, connection) def test_get_iam_policy(self): @@ -744,7 +744,7 @@ def test_get_iam_policy(self): ], } connection = _Connection(RETURNED) - api = self._makeOne(connection) + api = self._make_one(connection) policy = api.get_iam_policy(self.TOPIC_PATH) @@ -775,7 +775,7 @@ def test_set_iam_policy(self): } RETURNED = POLICY.copy() connection = _Connection(RETURNED) - api = self._makeOne(connection) + api = self._make_one(connection) policy = api.set_iam_policy(self.TOPIC_PATH, POLICY) @@ -795,7 +795,7 @@ def test_test_iam_permissions(self): ALLOWED = ALL_ROLES[1:] RETURNED = {'permissions': ALLOWED} connection = _Connection(RETURNED) - api = self._makeOne(connection) + api = self._make_one(connection) allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) @@ -814,7 +814,7 @@ def test_test_iam_permissions_missing_key(self): ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] RETURNED = {} connection = _Connection(RETURNED) - api = self._makeOne(connection) + api = self._make_one(connection) allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 1ed67be22e8a..8ca227d243cc 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -27,7 +27,7 @@ def _get_target_class(): from google.cloud.pubsub.client import Client return Client - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_publisher_api_wo_gax(self): @@ -37,7 +37,7 @@ def test_publisher_api_wo_gax(self): creds = _Credentials() with _Monkey(MUT, _USE_GAX=False): - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) conn = client.connection = object() api = client.publisher_api @@ -55,7 +55,7 @@ def test_no_gax_ctor(self): creds = _Credentials() with _Monkey(MUT, _USE_GAX=True): - client = self._makeOne(project=self.PROJECT, credentials=creds, + client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) self.assertFalse(client._use_gax) @@ -84,7 +84,7 @@ def __init__(self, _wrapped, client): with _Monkey(MUT, _USE_GAX=True, make_gax_publisher_api=_generated_api, GAXPublisherAPI=_GaxPublisherAPI): - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) api = client.publisher_api self.assertIsInstance(api, _GaxPublisherAPI) @@ -103,7 +103,7 @@ def test_subscriber_api_wo_gax(self): creds = _Credentials() with _Monkey(MUT, _USE_GAX=False): - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) conn = client.connection = object() api = client.subscriber_api @@ -136,7 +136,7 @@ def __init__(self, _wrapped, client): with _Monkey(MUT, _USE_GAX=True, make_gax_subscriber_api=_generated_api, GAXSubscriberAPI=_GaxSubscriberAPI): - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) api = client.subscriber_api self.assertIsInstance(api, _GaxSubscriberAPI) @@ -151,7 +151,7 @@ def __init__(self, _wrapped, client): def test_iam_policy_api(self): from google.cloud.pubsub._http import _IAMPolicyAPI creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) conn = client.connection = object() api = client.iam_policy_api self.assertIsInstance(api, _IAMPolicyAPI) @@ -164,7 +164,7 @@ def test_list_topics_no_paging(self): from google.cloud.pubsub.topic import Topic creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) client.connection = object() api = _FauxPublisherAPI(items=[Topic(self.TOPIC_NAME, client)]) client._publisher_api = api @@ -187,7 +187,7 @@ def test_list_topics_with_paging(self): TOKEN2 = 'TOKEN2' SIZE = 1 creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) client.connection = object() api = _FauxPublisherAPI([Topic(self.TOPIC_NAME, client)], TOKEN2) client._publisher_api = api @@ -205,7 +205,7 @@ def test_list_topics_with_paging(self): def test_list_topics_missing_key(self): creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds) + client = self._make_one(project=self.PROJECT, credentials=creds) client.connection = object() api = _FauxPublisherAPI() client._publisher_api = api @@ -225,7 +225,7 @@ def test_list_subscriptions_no_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds, + client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) returned = {'subscriptions': [SUB_INFO]} client.connection = _Connection(returned) @@ -263,7 +263,7 @@ def test_list_subscriptions_with_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} creds = _Credentials() - client = self._makeOne(project=self.PROJECT, credentials=creds, + client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) # Set up the mock response. @@ -317,7 +317,7 @@ def test_list_subscriptions_w_missing_key(self): PROJECT = 'PROJECT' creds = _Credentials() - client = self._makeOne(project=PROJECT, credentials=creds) + client = self._make_one(project=PROJECT, credentials=creds) client.connection = object() api = client._subscriber_api = _FauxSubscriberAPI() api._list_subscriptions_response = (), None @@ -335,7 +335,7 @@ def test_topic(self): TOPIC_NAME = 'TOPIC_NAME' creds = _Credentials() - client_obj = self._makeOne(project=PROJECT, credentials=creds) + client_obj = self._make_one(project=PROJECT, credentials=creds) new_topic = client_obj.topic(TOPIC_NAME) self.assertEqual(new_topic.name, TOPIC_NAME) self.assertIs(new_topic._client, client_obj) diff --git a/packages/google-cloud-pubsub/unit_tests/test_iam.py b/packages/google-cloud-pubsub/unit_tests/test_iam.py index dc6e2a8efd68..ea2703dd56b6 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_iam.py +++ b/packages/google-cloud-pubsub/unit_tests/test_iam.py @@ -22,11 +22,11 @@ def _get_target_class(): from google.cloud.pubsub.iam import Policy return Policy - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): - policy = self._makeOne() + policy = self._make_one() self.assertIsNone(policy.etag) self.assertIsNone(policy.version) self.assertEqual(list(policy.owners), []) @@ -38,7 +38,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): VERSION = 17 ETAG = 'ETAG' - policy = self._makeOne(ETAG, VERSION) + policy = self._make_one(ETAG, VERSION) self.assertEqual(policy.etag, ETAG) self.assertEqual(policy.version, VERSION) self.assertEqual(list(policy.owners), []) @@ -50,33 +50,33 @@ def test_ctor_explicit(self): def test_user(self): EMAIL = 'phred@example.com' MEMBER = 'user:%s' % (EMAIL,) - policy = self._makeOne() + policy = self._make_one() self.assertEqual(policy.user(EMAIL), MEMBER) def test_service_account(self): EMAIL = 'phred@example.com' MEMBER = 'serviceAccount:%s' % (EMAIL,) - policy = self._makeOne() + policy = self._make_one() self.assertEqual(policy.service_account(EMAIL), MEMBER) def test_group(self): EMAIL = 'phred@example.com' MEMBER = 'group:%s' % (EMAIL,) - policy = self._makeOne() + policy = self._make_one() self.assertEqual(policy.group(EMAIL), MEMBER) def test_domain(self): DOMAIN = 'example.com' MEMBER = 'domain:%s' % (DOMAIN,) - policy = self._makeOne() + policy = self._make_one() self.assertEqual(policy.domain(DOMAIN), MEMBER) def test_all_users(self): - policy = self._makeOne() + policy = self._make_one() self.assertEqual(policy.all_users(), 'allUsers') def test_authenticated_users(self): - policy = self._makeOne() + policy = self._make_one() self.assertEqual(policy.authenticated_users(), 'allAuthenticatedUsers') def test_from_api_repr_only_etag(self): @@ -143,11 +143,11 @@ def test_from_api_repr_bad_role(self): klass.from_api_repr(RESOURCE) def test_to_api_repr_defaults(self): - policy = self._makeOne() + policy = self._make_one() self.assertEqual(policy.to_api_repr(), {}) def test_to_api_repr_only_etag(self): - policy = self._makeOne('DEADBEEF') + policy = self._make_one('DEADBEEF') self.assertEqual(policy.to_api_repr(), {'etag': 'DEADBEEF'}) def test_to_api_repr_full(self): @@ -177,7 +177,7 @@ def test_to_api_repr_full(self): {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, ], } - policy = self._makeOne('DEADBEEF', 17) + policy = self._make_one('DEADBEEF', 17) policy.owners.add(OWNER1) policy.owners.add(OWNER2) policy.editors.add(EDITOR1) diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/unit_tests/test_message.py index 6c32abbfff62..dd646afd5cc4 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_message.py +++ b/packages/google-cloud-pubsub/unit_tests/test_message.py @@ -22,13 +22,13 @@ def _get_target_class(): from google.cloud.pubsub.message import Message return Message - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_no_attributes(self): DATA = b'DEADBEEF' MESSAGE_ID = b'12345' - message = self._makeOne(data=DATA, message_id=MESSAGE_ID) + message = self._make_one(data=DATA, message_id=MESSAGE_ID) self.assertEqual(message.data, DATA) self.assertEqual(message.message_id, MESSAGE_ID) self.assertEqual(message.attributes, {}) @@ -38,7 +38,7 @@ def test_ctor_w_attributes(self): DATA = b'DEADBEEF' MESSAGE_ID = b'12345' ATTRS = {'a': 'b'} - message = self._makeOne(data=DATA, message_id=MESSAGE_ID, + message = self._make_one(data=DATA, message_id=MESSAGE_ID, attributes=ATTRS) self.assertEqual(message.data, DATA) self.assertEqual(message.message_id, MESSAGE_ID) @@ -48,7 +48,7 @@ def test_ctor_w_attributes(self): def test_timestamp_no_attributes(self): DATA = b'DEADBEEF' MESSAGE_ID = b'12345' - message = self._makeOne(data=DATA, message_id=MESSAGE_ID) + message = self._make_one(data=DATA, message_id=MESSAGE_ID) def _to_fail(): return message.timestamp @@ -59,7 +59,7 @@ def test_timestamp_wo_timestamp_in_attributes(self): DATA = b'DEADBEEF' MESSAGE_ID = b'12345' ATTRS = {'a': 'b'} - message = self._makeOne(data=DATA, message_id=MESSAGE_ID, + message = self._make_one(data=DATA, message_id=MESSAGE_ID, attributes=ATTRS) def _to_fail(): @@ -77,7 +77,7 @@ def test_timestamp_w_timestamp_in_attributes(self): naive = datetime.strptime(TIMESTAMP, _RFC3339_MICROS) timestamp = naive.replace(tzinfo=UTC) ATTRS = {'timestamp': TIMESTAMP} - message = self._makeOne(data=DATA, message_id=MESSAGE_ID, + message = self._make_one(data=DATA, message_id=MESSAGE_ID, attributes=ATTRS) self.assertEqual(message.timestamp, timestamp) diff --git a/packages/google-cloud-pubsub/unit_tests/test_subscription.py b/packages/google-cloud-pubsub/unit_tests/test_subscription.py index 68ffab56697e..66c7aac1dd95 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_subscription.py +++ b/packages/google-cloud-pubsub/unit_tests/test_subscription.py @@ -29,13 +29,13 @@ def _get_target_class(): from google.cloud.pubsub.subscription import Subscription return Subscription - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): client = _Client(project=self.PROJECT) topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) self.assertEqual(subscription.name, self.SUB_NAME) self.assertIs(subscription.topic, topic) self.assertIsNone(subscription.ack_deadline) @@ -44,7 +44,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): client = _Client(project=self.PROJECT) topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic, + subscription = self._make_one(self.SUB_NAME, topic, self.DEADLINE, self.ENDPOINT) self.assertEqual(subscription.name, self.SUB_NAME) self.assertIs(subscription.topic, topic) @@ -53,7 +53,7 @@ def test_ctor_explicit(self): def test_ctor_w_client_wo_topic(self): client = _Client(project=self.PROJECT) - subscription = self._makeOne(self.SUB_NAME, client=client) + subscription = self._make_one(self.SUB_NAME, client=client) self.assertEqual(subscription.name, self.SUB_NAME) self.assertIsNone(subscription.topic) @@ -62,11 +62,11 @@ def test_ctor_w_both_topic_and_client(self): client2 = _Client(project=self.PROJECT) topic = _Topic(self.TOPIC_NAME, client=client1) with self.assertRaises(TypeError): - self._makeOne(self.SUB_NAME, topic, client=client2) + self._make_one(self.SUB_NAME, topic, client=client2) def test_ctor_w_neither_topic_nor_client(self): with self.assertRaises(TypeError): - self._makeOne(self.SUB_NAME) + self._make_one(self.SUB_NAME) def test_from_api_repr_no_topics(self): from google.cloud.pubsub.topic import Topic @@ -140,7 +140,7 @@ def test_full_name_and_path(self): TOPIC_NAME = 'topic_name' CLIENT = _Client(project=PROJECT) topic = _Topic(TOPIC_NAME, client=CLIENT) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) self.assertEqual(subscription.full_name, SUB_FULL) self.assertEqual(subscription.path, SUB_PATH) @@ -148,7 +148,7 @@ def test_autoack_defaults(self): from google.cloud.pubsub.subscription import AutoAck client = _Client(project=self.PROJECT) topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) auto_ack = subscription.auto_ack() self.assertIsInstance(auto_ack, AutoAck) self.assertIs(auto_ack._subscription, subscription) @@ -161,7 +161,7 @@ def test_autoack_explicit(self): client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) auto_ack = subscription.auto_ack(True, 10, client2) self.assertIsInstance(auto_ack, AutoAck) self.assertIs(auto_ack._subscription, subscription) @@ -178,7 +178,7 @@ def test_create_pull_wo_ack_deadline_w_bound_client(self): api = client.subscriber_api = _FauxSubscribererAPI() api._subscription_create_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) subscription.create() @@ -197,7 +197,7 @@ def test_create_push_w_ack_deadline_w_alternate_client(self): api = client2.subscriber_api = _FauxSubscribererAPI() api._subscription_create_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic, + subscription = self._make_one(self.SUB_NAME, topic, self.DEADLINE, self.ENDPOINT) subscription.create(client=client2) @@ -210,7 +210,7 @@ def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.subscriber_api = _FauxSubscribererAPI() topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) self.assertFalse(subscription.exists()) @@ -223,7 +223,7 @@ def test_exists_hit_w_alternate_client(self): api = client2.subscriber_api = _FauxSubscribererAPI() api._subscription_get_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) self.assertTrue(subscription.exists(client=client2)) @@ -240,7 +240,7 @@ def test_reload_w_bound_client(self): api = client.subscriber_api = _FauxSubscribererAPI() api._subscription_get_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) subscription.reload() @@ -258,7 +258,7 @@ def test_reload_w_alternate_client(self): api = client2.subscriber_api = _FauxSubscribererAPI() api._subscription_get_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic, + subscription = self._make_one(self.SUB_NAME, topic, self.DEADLINE, self.ENDPOINT) subscription.reload(client=client2) @@ -273,7 +273,7 @@ def test_delete_w_bound_client(self): api = client.subscriber_api = _FauxSubscribererAPI() api._subscription_delete_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) subscription.delete() @@ -286,7 +286,7 @@ def test_delete_w_alternate_client(self): api = client2.subscriber_api = _FauxSubscribererAPI() api._subscription_delete_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic, + subscription = self._make_one(self.SUB_NAME, topic, self.DEADLINE, self.ENDPOINT) subscription.delete(client=client2) @@ -298,7 +298,7 @@ def test_modify_push_config_w_endpoint_w_bound_client(self): api = client.subscriber_api = _FauxSubscribererAPI() api._subscription_modify_push_config_response = {} topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) subscription.modify_push_configuration(push_endpoint=self.ENDPOINT) @@ -312,7 +312,7 @@ def test_modify_push_config_wo_endpoint_w_alternate_client(self): api = client2.subscriber_api = _FauxSubscribererAPI() api._subscription_modify_push_config_response = {} topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic, + subscription = self._make_one(self.SUB_NAME, topic, push_endpoint=self.ENDPOINT) subscription.modify_push_configuration(push_endpoint=None, @@ -333,7 +333,7 @@ def test_pull_wo_return_immediately_max_messages_w_bound_client(self): api = client.subscriber_api = _FauxSubscribererAPI() api._subscription_pull_response = [REC_MESSAGE] topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) pulled = subscription.pull() @@ -360,7 +360,7 @@ def test_pull_w_return_immediately_w_max_messages_w_alt_client(self): api = client2.subscriber_api = _FauxSubscribererAPI() api._subscription_pull_response = [REC_MESSAGE] topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) pulled = subscription.pull(return_immediately=True, max_messages=3, client=client2) @@ -380,7 +380,7 @@ def test_pull_wo_receivedMessages(self): api = client.subscriber_api = _FauxSubscribererAPI() api._subscription_pull_response = {} topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) pulled = subscription.pull(return_immediately=False) @@ -395,7 +395,7 @@ def test_acknowledge_w_bound_client(self): api = client.subscriber_api = _FauxSubscribererAPI() api._subscription_acknowlege_response = {} topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) subscription.acknowledge([ACK_ID1, ACK_ID2]) @@ -410,7 +410,7 @@ def test_acknowledge_w_alternate_client(self): api = client2.subscriber_api = _FauxSubscribererAPI() api._subscription_acknowlege_response = {} topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) subscription.acknowledge([ACK_ID1, ACK_ID2], client=client2) @@ -424,7 +424,7 @@ def test_modify_ack_deadline_w_bound_client(self): api = client.subscriber_api = _FauxSubscribererAPI() api._subscription_modify_ack_deadline_response = {} topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) subscription.modify_ack_deadline([ACK_ID1, ACK_ID2], self.DEADLINE) @@ -439,7 +439,7 @@ def test_modify_ack_deadline_w_alternate_client(self): api = client2.subscriber_api = _FauxSubscribererAPI() api._subscription_modify_ack_deadline_response = {} topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) subscription.modify_ack_deadline( [ACK_ID1, ACK_ID2], self.DEADLINE, client=client2) @@ -478,7 +478,7 @@ def test_get_iam_policy_w_bound_client(self): api = client.iam_policy_api = _FauxIAMPolicy() api._get_iam_policy_response = POLICY topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) policy = subscription.get_iam_policy() @@ -500,7 +500,7 @@ def test_get_iam_policy_w_alternate_client(self): api = client2.iam_policy_api = _FauxIAMPolicy() api._get_iam_policy_response = POLICY topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) policy = subscription.get_iam_policy(client=client2) @@ -547,7 +547,7 @@ def test_set_iam_policy_w_bound_client(self): api = client.iam_policy_api = _FauxIAMPolicy() api._set_iam_policy_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) policy = Policy('DEADBEEF', 17) policy.owners.add(OWNER1) policy.owners.add(OWNER2) @@ -577,7 +577,7 @@ def test_set_iam_policy_w_alternate_client(self): api = client2.iam_policy_api = _FauxIAMPolicy() api._set_iam_policy_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) policy = Policy() new_policy = subscription.set_iam_policy(policy, client=client2) @@ -599,7 +599,7 @@ def test_check_iam_permissions_w_bound_client(self): api = client.iam_policy_api = _FauxIAMPolicy() api._test_iam_permissions_response = ROLES[:-1] topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) allowed = subscription.check_iam_permissions(ROLES) @@ -618,7 +618,7 @@ def test_check_iam_permissions_w_alternate_client(self): api = client2.iam_policy_api = _FauxIAMPolicy() api._test_iam_permissions_response = [] topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._makeOne(self.SUB_NAME, topic) + subscription = self._make_one(self.SUB_NAME, topic) allowed = subscription.check_iam_permissions(ROLES, client=client2) @@ -677,12 +677,12 @@ def _get_target_class(): from google.cloud.pubsub.subscription import AutoAck return AutoAck - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): subscription = _FauxSubscription(()) - auto_ack = self._makeOne(subscription) + auto_ack = self._make_one(subscription) self.assertEqual(auto_ack._return_immediately, False) self.assertEqual(auto_ack._max_messages, 1) self.assertIsNone(auto_ack._client) @@ -690,7 +690,7 @@ def test_ctor_defaults(self): def test_ctor_explicit(self): CLIENT = object() subscription = _FauxSubscription(()) - auto_ack = self._makeOne( + auto_ack = self._make_one( subscription, return_immediately=True, max_messages=10, client=CLIENT) self.assertIs(auto_ack._subscription, subscription) @@ -700,7 +700,7 @@ def test_ctor_explicit(self): def test___enter___w_defaults(self): subscription = _FauxSubscription(()) - auto_ack = self._makeOne(subscription) + auto_ack = self._make_one(subscription) with auto_ack as returned: pass @@ -713,7 +713,7 @@ def test___enter___w_defaults(self): def test___enter___w_explicit(self): CLIENT = object() subscription = _FauxSubscription(()) - auto_ack = self._makeOne( + auto_ack = self._make_one( subscription, return_immediately=True, max_messages=10, client=CLIENT) @@ -736,7 +736,7 @@ def test___exit___(self): (ACK_ID3, MESSAGE3), ] subscription = _FauxSubscription(ITEMS) - auto_ack = self._makeOne(subscription, client=CLIENT) + auto_ack = self._make_one(subscription, client=CLIENT) with auto_ack: for ack_id, message in list(auto_ack.items()): if message.fail: diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index fcdb9b6e535d..2164a0d1cadf 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -25,12 +25,12 @@ def _get_target_class(): from google.cloud.pubsub.topic import Topic return Topic - def _makeOne(self, *args, **kw): + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_w_explicit_timestamp(self): client = _Client(project=self.PROJECT) - topic = self._makeOne(self.TOPIC_NAME, + topic = self._make_one(self.TOPIC_NAME, client=client, timestamp_messages=True) self.assertEqual(topic.name, self.TOPIC_NAME) @@ -62,7 +62,7 @@ def test_create_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) topic.create() @@ -73,7 +73,7 @@ def test_create_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.publisher_api = _FauxPublisherAPI() api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._makeOne(self.TOPIC_NAME, client=client1) + topic = self._make_one(self.TOPIC_NAME, client=client1) topic.create(client=client2) @@ -82,7 +82,7 @@ def test_create_w_alternate_client(self): def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) self.assertFalse(topic.exists()) @@ -93,7 +93,7 @@ def test_exists_hit_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.publisher_api = _FauxPublisherAPI() api._topic_get_response = {'name': self.TOPIC_PATH} - topic = self._makeOne(self.TOPIC_NAME, client=client1) + topic = self._make_one(self.TOPIC_NAME, client=client1) self.assertTrue(topic.exists(client=client2)) @@ -103,7 +103,7 @@ def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_delete_response = {} - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) topic.delete() @@ -114,7 +114,7 @@ def test_delete_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.publisher_api = _FauxPublisherAPI() api._topic_delete_response = {} - topic = self._makeOne(self.TOPIC_NAME, client=client1) + topic = self._make_one(self.TOPIC_NAME, client=client1) topic.delete(client=client2) @@ -127,7 +127,7 @@ def test_publish_single_bytes_wo_attrs_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID] - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) msgid = topic.publish(PAYLOAD) @@ -155,7 +155,7 @@ def _utcnow(): api = client2.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID] - topic = self._makeOne(self.TOPIC_NAME, client=client1, + topic = self._make_one(self.TOPIC_NAME, client=client1, timestamp_messages=True) with _Monkey(MUT, _NOW=_utcnow): msgid = topic.publish(PAYLOAD, client=client2) @@ -172,7 +172,7 @@ def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID] - topic = self._makeOne(self.TOPIC_NAME, client=client, + topic = self._make_one(self.TOPIC_NAME, client=client, timestamp_messages=True) msgid = topic.publish(PAYLOAD, timestamp=OVERRIDE) @@ -188,7 +188,7 @@ def test_publish_single_w_attrs(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID] - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) msgid = topic.publish(PAYLOAD, attr1='value1', attr2='value2') @@ -202,7 +202,7 @@ def test_publish_with_gax(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID] - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) msgid = topic.publish(PAYLOAD) self.assertEqual(msgid, MSGID) @@ -215,7 +215,7 @@ def test_publish_without_gax(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID] - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) msgid = topic.publish(PAYLOAD) self.assertEqual(msgid, MSGID) @@ -232,7 +232,7 @@ def test_publish_multiple_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID1, MSGID2] - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) with topic.batch() as batch: batch.publish(PAYLOAD1) @@ -247,7 +247,7 @@ def test_publish_w_no_messages(self): client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [] - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) with topic.batch() as batch: pass @@ -269,7 +269,7 @@ def test_publish_multiple_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID1, MSGID2] - topic = self._makeOne(self.TOPIC_NAME, client=client1) + topic = self._make_one(self.TOPIC_NAME, client=client1) with topic.batch(client=client2) as batch: batch.publish(PAYLOAD1) @@ -285,7 +285,7 @@ def test_publish_multiple_error(self): PAYLOAD2 = b'This is the second message text' client = _Client(project=self.PROJECT) api = client.publisher_api = _FauxPublisherAPI() - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) try: with topic.batch() as batch: @@ -301,7 +301,7 @@ def test_publish_multiple_error(self): def test_subscription(self): from google.cloud.pubsub.subscription import Subscription client = _Client(project=self.PROJECT) - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) SUBSCRIPTION_NAME = 'subscription_name' subscription = topic.subscription(SUBSCRIPTION_NAME) @@ -332,7 +332,7 @@ def test_list_subscriptions_no_paging(self): } client.connection = _Connection(returned) - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) iterator = topic.list_subscriptions() page = six.next(iterator.pages) @@ -382,7 +382,7 @@ def test_list_subscriptions_with_paging(self): } client.connection = _Connection(returned) - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) iterator = topic.list_subscriptions( page_size=PAGE_SIZE, page_token=TOKEN) @@ -417,7 +417,7 @@ def test_list_subscriptions_missing_key(self): client = Client(project=self.PROJECT, credentials=object(), use_gax=False) client.connection = _Connection({}) - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) iterator = topic.list_subscriptions() subscriptions = list(iterator) @@ -464,7 +464,7 @@ def test_get_iam_policy_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.iam_policy_api = _FauxIAMPolicy() api._get_iam_policy_response = POLICY - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) policy = topic.get_iam_policy() @@ -486,7 +486,7 @@ def test_get_iam_policy_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.iam_policy_api = _FauxIAMPolicy() api._get_iam_policy_response = POLICY - topic = self._makeOne(self.TOPIC_NAME, client=client1) + topic = self._make_one(self.TOPIC_NAME, client=client1) policy = topic.get_iam_policy(client=client2) @@ -538,7 +538,7 @@ def test_set_iam_policy_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.iam_policy_api = _FauxIAMPolicy() api._set_iam_policy_response = RESPONSE - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) policy = Policy('DEADBEEF', 17) policy.owners.add(OWNER1) policy.owners.add(OWNER2) @@ -568,7 +568,7 @@ def test_set_iam_policy_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.iam_policy_api = _FauxIAMPolicy() api._set_iam_policy_response = RESPONSE - topic = self._makeOne(self.TOPIC_NAME, client=client1) + topic = self._make_one(self.TOPIC_NAME, client=client1) policy = Policy() new_policy = topic.set_iam_policy(policy, client=client2) @@ -590,7 +590,7 @@ def test_check_iam_permissions_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.iam_policy_api = _FauxIAMPolicy() api._test_iam_permissions_response = ROLES[:-1] - topic = self._makeOne(self.TOPIC_NAME, client=client) + topic = self._make_one(self.TOPIC_NAME, client=client) allowed = topic.check_iam_permissions(ROLES) @@ -608,7 +608,7 @@ def test_check_iam_permissions_w_alternate_client(self): client2 = _Client(project=self.PROJECT) api = client2.iam_policy_api = _FauxIAMPolicy() api._test_iam_permissions_response = [] - topic = self._makeOne(self.TOPIC_NAME, client=client1) + topic = self._make_one(self.TOPIC_NAME, client=client1) allowed = topic.check_iam_permissions(ROLES, client=client2) @@ -625,13 +625,13 @@ def _get_target_class(): from google.cloud.pubsub.topic import Batch return Batch - def _makeOne(self, *args, **kwargs): + def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) def test_ctor_defaults(self): topic = _Topic() client = _Client(project=self.PROJECT) - batch = self._makeOne(topic, client) + batch = self._make_one(topic, client) self.assertIs(batch.topic, topic) self.assertIs(batch.client, client) self.assertEqual(len(batch.messages), 0) @@ -640,13 +640,13 @@ def test_ctor_defaults(self): def test___iter___empty(self): topic = _Topic() client = object() - batch = self._makeOne(topic, client) + batch = self._make_one(topic, client) self.assertEqual(list(batch), []) def test___iter___non_empty(self): topic = _Topic() client = object() - batch = self._makeOne(topic, client) + batch = self._make_one(topic, client) batch.message_ids[:] = ['ONE', 'TWO', 'THREE'] self.assertEqual(list(batch), ['ONE', 'TWO', 'THREE']) @@ -656,7 +656,7 @@ def test_publish_bytes_wo_attrs(self): 'attributes': {}} client = _Client(project=self.PROJECT) topic = _Topic() - batch = self._makeOne(topic, client=client) + batch = self._make_one(topic, client=client) batch.publish(PAYLOAD) self.assertEqual(batch.messages, [MESSAGE]) @@ -666,7 +666,7 @@ def test_publish_bytes_w_add_timestamp(self): 'attributes': {'timestamp': 'TIMESTAMP'}} client = _Client(project=self.PROJECT) topic = _Topic(timestamp_messages=True) - batch = self._makeOne(topic, client=client) + batch = self._make_one(topic, client=client) batch.publish(PAYLOAD) self.assertEqual(batch.messages, [MESSAGE]) @@ -683,7 +683,7 @@ def test_commit_w_bound_client(self): api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID1, MSGID2] topic = _Topic() - batch = self._makeOne(topic, client=client) + batch = self._make_one(topic, client=client) batch.publish(PAYLOAD1) batch.publish(PAYLOAD2, attr1='value1', attr2='value2') @@ -707,7 +707,7 @@ def test_commit_w_alternate_client(self): api = client2.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID1, MSGID2] topic = _Topic() - batch = self._makeOne(topic, client=client1) + batch = self._make_one(topic, client=client1) batch.publish(PAYLOAD1) batch.publish(PAYLOAD2, attr1='value1', attr2='value2') @@ -730,7 +730,7 @@ def test_context_mgr_success(self): api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID1, MSGID2] topic = _Topic() - batch = self._makeOne(topic, client=client) + batch = self._make_one(topic, client=client) with batch as other: batch.publish(PAYLOAD1) @@ -751,7 +751,7 @@ def test_context_mgr_failure(self): client = _Client(project='PROJECT') api = client.publisher_api = _FauxPublisherAPI() topic = _Topic() - batch = self._makeOne(topic, client=client) + batch = self._make_one(topic, client=client) try: with batch as other: From 40d43de8154590ed714c544c8e442550246310e0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 11:06:21 -0800 Subject: [PATCH 0043/1197] Changing all instances of _callFUT to _call_fut. Done via: $ git grep -l _callFUT | \ > xargs sed -i s/_callFUT/_call_fut/g --- packages/google-cloud-pubsub/unit_tests/test__gax.py | 12 ++++++------ .../google-cloud-pubsub/unit_tests/test__helpers.py | 12 ++++++------ .../google-cloud-pubsub/unit_tests/test__http.py | 10 +++++----- 3 files changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index d69d3b8eb987..71888431d2e2 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -872,7 +872,7 @@ def test_subscription_modify_ack_deadline_error(self): @unittest.skipUnless(_HAVE_GAX, 'No gax-python') class Test_make_gax_publisher_api(_Base, unittest.TestCase): - def _callFUT(self, connection): + def _call_fut(self, connection): from google.cloud.pubsub._gax import make_gax_publisher_api return make_gax_publisher_api(connection) @@ -901,7 +901,7 @@ def make_channel(*args): credentials=creds) with _Monkey(MUT, PublisherApi=mock_publisher_api, make_secure_channel=make_channel): - result = self._callFUT(connection) + result = self._call_fut(connection) self.assertIs(result, mock_result) self.assertEqual(channels, [channel_obj]) @@ -929,7 +929,7 @@ def mock_insecure_channel(host): connection = _Connection(in_emulator=True, host=host) with _Monkey(MUT, PublisherApi=mock_publisher_api, insecure_channel=mock_insecure_channel): - result = self._callFUT(connection) + result = self._call_fut(connection) self.assertIs(result, mock_result) self.assertEqual(channels, [mock_channel]) @@ -939,7 +939,7 @@ def mock_insecure_channel(host): @unittest.skipUnless(_HAVE_GAX, 'No gax-python') class Test_make_gax_subscriber_api(_Base, unittest.TestCase): - def _callFUT(self, connection): + def _call_fut(self, connection): from google.cloud.pubsub._gax import make_gax_subscriber_api return make_gax_subscriber_api(connection) @@ -968,7 +968,7 @@ def make_channel(*args): credentials=creds) with _Monkey(MUT, SubscriberApi=mock_subscriber_api, make_secure_channel=make_channel): - result = self._callFUT(connection) + result = self._call_fut(connection) self.assertIs(result, mock_result) self.assertEqual(channels, [channel_obj]) @@ -996,7 +996,7 @@ def mock_insecure_channel(host): connection = _Connection(in_emulator=True, host=host) with _Monkey(MUT, SubscriberApi=mock_subscriber_api, insecure_channel=mock_insecure_channel): - result = self._callFUT(connection) + result = self._call_fut(connection) self.assertIs(result, mock_result) self.assertEqual(channels, [mock_channel]) diff --git a/packages/google-cloud-pubsub/unit_tests/test__helpers.py b/packages/google-cloud-pubsub/unit_tests/test__helpers.py index 5ff47b7802ea..aaf00c651709 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__helpers.py +++ b/packages/google-cloud-pubsub/unit_tests/test__helpers.py @@ -17,7 +17,7 @@ class Test_topic_name_from_path(unittest.TestCase): - def _callFUT(self, path, project): + def _call_fut(self, path, project): from google.cloud.pubsub._helpers import topic_name_from_path return topic_name_from_path(path, project) @@ -25,20 +25,20 @@ def test_w_simple_name(self): TOPIC_NAME = 'TOPIC_NAME' PROJECT = 'my-project-1234' PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._callFUT(PATH, PROJECT) + topic_name = self._call_fut(PATH, PROJECT) self.assertEqual(topic_name, TOPIC_NAME) def test_w_name_w_all_extras(self): TOPIC_NAME = 'TOPIC_NAME-part.one~part.two%part-three' PROJECT = 'my-project-1234' PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._callFUT(PATH, PROJECT) + topic_name = self._call_fut(PATH, PROJECT) self.assertEqual(topic_name, TOPIC_NAME) class Test_subscription_name_from_path(unittest.TestCase): - def _callFUT(self, path, project): + def _call_fut(self, path, project): from google.cloud.pubsub._helpers import subscription_name_from_path return subscription_name_from_path(path, project) @@ -46,12 +46,12 @@ def test_w_simple_name(self): SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME' PROJECT = 'my-project-1234' PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - subscription_name = self._callFUT(PATH, PROJECT) + subscription_name = self._call_fut(PATH, PROJECT) self.assertEqual(subscription_name, SUBSCRIPTION_NAME) def test_w_name_w_all_extras(self): SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME-part.one~part.two%part-three' PROJECT = 'my-project-1234' PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - topic_name = self._callFUT(PATH, PROJECT) + topic_name = self._call_fut(PATH, PROJECT) self.assertEqual(topic_name, SUBSCRIPTION_NAME) diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index 5918e26281ef..f9f94fff098d 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -827,33 +827,33 @@ def test_test_iam_permissions_missing_key(self): class Test__transform_messages_base64_empty(unittest.TestCase): - def _callFUT(self, messages, transform, key=None): + def _call_fut(self, messages, transform, key=None): from google.cloud.pubsub._http import _transform_messages_base64 return _transform_messages_base64(messages, transform, key) def test__transform_messages_base64_empty_message(self): from base64 import b64decode DATA = [{'message': {}}] - self._callFUT(DATA, b64decode, 'message') + self._call_fut(DATA, b64decode, 'message') self.assertEqual(DATA, [{'message': {}}]) def test__transform_messages_base64_empty_data(self): from base64 import b64decode DATA = [{'message': {'data': b''}}] - self._callFUT(DATA, b64decode, 'message') + self._call_fut(DATA, b64decode, 'message') self.assertEqual(DATA, [{'message': {'data': b''}}]) def test__transform_messages_base64_pull(self): from base64 import b64encode DATA = [{'message': {'data': b'testing 1 2 3'}}] - self._callFUT(DATA, b64encode, 'message') + self._call_fut(DATA, b64encode, 'message') self.assertEqual(DATA[0]['message']['data'], b64encode(b'testing 1 2 3')) def test__transform_messages_base64_publish(self): from base64 import b64encode DATA = [{'data': b'testing 1 2 3'}] - self._callFUT(DATA, b64encode) + self._call_fut(DATA, b64encode) self.assertEqual(DATA[0]['data'], b64encode(b'testing 1 2 3')) From 2d7ebb85d615920cb7ab284409699f7f87ebc3fc Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 8 Nov 2016 21:02:17 -0800 Subject: [PATCH 0044/1197] Manually fixing up bad indents / long lines after renames. --- packages/google-cloud-pubsub/unit_tests/test_client.py | 6 +++--- .../google-cloud-pubsub/unit_tests/test_message.py | 6 +++--- .../unit_tests/test_subscription.py | 10 +++++----- packages/google-cloud-pubsub/unit_tests/test_topic.py | 8 ++++---- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 8ca227d243cc..462d55abba2b 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -56,7 +56,7 @@ def test_no_gax_ctor(self): creds = _Credentials() with _Monkey(MUT, _USE_GAX=True): client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + use_gax=False) self.assertFalse(client._use_gax) api = client.publisher_api @@ -226,7 +226,7 @@ def test_list_subscriptions_no_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + use_gax=False) returned = {'subscriptions': [SUB_INFO]} client.connection = _Connection(returned) @@ -264,7 +264,7 @@ def test_list_subscriptions_with_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + use_gax=False) # Set up the mock response. ACK_DEADLINE = 42 diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/unit_tests/test_message.py index dd646afd5cc4..8bcbec6c87b7 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_message.py +++ b/packages/google-cloud-pubsub/unit_tests/test_message.py @@ -39,7 +39,7 @@ def test_ctor_w_attributes(self): MESSAGE_ID = b'12345' ATTRS = {'a': 'b'} message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) + attributes=ATTRS) self.assertEqual(message.data, DATA) self.assertEqual(message.message_id, MESSAGE_ID) self.assertEqual(message.attributes, ATTRS) @@ -60,7 +60,7 @@ def test_timestamp_wo_timestamp_in_attributes(self): MESSAGE_ID = b'12345' ATTRS = {'a': 'b'} message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) + attributes=ATTRS) def _to_fail(): return message.timestamp @@ -78,7 +78,7 @@ def test_timestamp_w_timestamp_in_attributes(self): timestamp = naive.replace(tzinfo=UTC) ATTRS = {'timestamp': TIMESTAMP} message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) + attributes=ATTRS) self.assertEqual(message.timestamp, timestamp) def test_from_api_repr_missing_data(self): diff --git a/packages/google-cloud-pubsub/unit_tests/test_subscription.py b/packages/google-cloud-pubsub/unit_tests/test_subscription.py index 66c7aac1dd95..6d4dc1068f2a 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_subscription.py +++ b/packages/google-cloud-pubsub/unit_tests/test_subscription.py @@ -45,7 +45,7 @@ def test_ctor_explicit(self): client = _Client(project=self.PROJECT) topic = _Topic(self.TOPIC_NAME, client=client) subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) + self.DEADLINE, self.ENDPOINT) self.assertEqual(subscription.name, self.SUB_NAME) self.assertIs(subscription.topic, topic) self.assertEqual(subscription.ack_deadline, self.DEADLINE) @@ -198,7 +198,7 @@ def test_create_push_w_ack_deadline_w_alternate_client(self): api._subscription_create_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client1) subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) + self.DEADLINE, self.ENDPOINT) subscription.create(client=client2) @@ -259,7 +259,7 @@ def test_reload_w_alternate_client(self): api._subscription_get_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client1) subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) + self.DEADLINE, self.ENDPOINT) subscription.reload(client=client2) @@ -287,7 +287,7 @@ def test_delete_w_alternate_client(self): api._subscription_delete_response = RESPONSE topic = _Topic(self.TOPIC_NAME, client=client1) subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) + self.DEADLINE, self.ENDPOINT) subscription.delete(client=client2) @@ -313,7 +313,7 @@ def test_modify_push_config_wo_endpoint_w_alternate_client(self): api._subscription_modify_push_config_response = {} topic = _Topic(self.TOPIC_NAME, client=client1) subscription = self._make_one(self.SUB_NAME, topic, - push_endpoint=self.ENDPOINT) + push_endpoint=self.ENDPOINT) subscription.modify_push_configuration(push_endpoint=None, client=client2) diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index 2164a0d1cadf..8f4838237fd7 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -31,8 +31,8 @@ def _make_one(self, *args, **kw): def test_ctor_w_explicit_timestamp(self): client = _Client(project=self.PROJECT) topic = self._make_one(self.TOPIC_NAME, - client=client, - timestamp_messages=True) + client=client, + timestamp_messages=True) self.assertEqual(topic.name, self.TOPIC_NAME) self.assertEqual(topic.project, self.PROJECT) self.assertEqual(topic.full_name, self.TOPIC_PATH) @@ -156,7 +156,7 @@ def _utcnow(): api._topic_publish_response = [MSGID] topic = self._make_one(self.TOPIC_NAME, client=client1, - timestamp_messages=True) + timestamp_messages=True) with _Monkey(MUT, _NOW=_utcnow): msgid = topic.publish(PAYLOAD, client=client2) @@ -173,7 +173,7 @@ def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self): api = client.publisher_api = _FauxPublisherAPI() api._topic_publish_response = [MSGID] topic = self._make_one(self.TOPIC_NAME, client=client, - timestamp_messages=True) + timestamp_messages=True) msgid = topic.publish(PAYLOAD, timestamp=OVERRIDE) From 35c74b850a0102f8668320e392328fa181abc269 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 10 Nov 2016 21:17:51 -0800 Subject: [PATCH 0045/1197] Adding quiet flag to pip command for local deps. --- packages/google-cloud-pubsub/tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tox.ini b/packages/google-cloud-pubsub/tox.ini index a05bed81c961..ac8cb16f8238 100644 --- a/packages/google-cloud-pubsub/tox.ini +++ b/packages/google-cloud-pubsub/tox.ini @@ -4,7 +4,7 @@ envlist = [testing] localdeps = - pip install --upgrade {toxinidir}/../core + pip install --quiet --upgrade {toxinidir}/../core deps = pytest covercmd = From a6d1f354a14832b97e1de35b7a92354ef39d56a6 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 7 Nov 2016 19:41:24 -0800 Subject: [PATCH 0046/1197] Updating connection -> _connection attribute in some packages. In particular: pubsub/resource_manager/runtimeconfig/speech/translate. --- .../google/cloud/pubsub/_gax.py | 2 +- .../google/cloud/pubsub/_http.py | 8 +++--- .../google/cloud/pubsub/client.py | 6 ++--- .../unit_tests/test__http.py | 6 ++--- .../unit_tests/test_client.py | 26 +++++++++---------- .../unit_tests/test_topic.py | 12 ++++----- 6 files changed, 30 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 2636dbea2c89..57bf688486aa 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -31,7 +31,7 @@ from google.cloud._helpers import _to_bytes from google.cloud._helpers import _pb_timestamp_to_rfc3339 from google.cloud._helpers import make_secure_channel -from google.cloud.connection import DEFAULT_USER_AGENT +from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index 4946ff37bd12..6aef4edd6437 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -18,7 +18,7 @@ import functools import os -from google.cloud import connection as base_connection +from google.cloud import _http from google.cloud.environment_vars import PUBSUB_EMULATOR from google.cloud.iterator import HTTPIterator from google.cloud.pubsub._helpers import subscription_name_from_path @@ -30,7 +30,7 @@ """Pub / Sub API request host.""" -class Connection(base_connection.JSONConnection): +class Connection(_http.JSONConnection): """A connection to Google Cloud Pub/Sub via the JSON REST API. :type credentials: :class:`oauth2client.client.OAuth2Credentials` @@ -108,7 +108,7 @@ class _PublisherAPI(object): def __init__(self, client): self._client = client - self._connection = client.connection + self._connection = client._connection def list_topics(self, project, page_size=None, page_token=None): """API call: list topics for a given project @@ -255,7 +255,7 @@ class _SubscriberAPI(object): def __init__(self, client): self._client = client - self._connection = client.connection + self._connection = client._connection def list_subscriptions(self, project, page_size=None, page_token=None): """API call: list subscriptions for a given project diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index c357a5f8753d..dd323aa25fe5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -85,7 +85,7 @@ def publisher_api(self): """Helper for publisher-related API calls.""" if self._publisher_api is None: if self._use_gax: - generated = make_gax_publisher_api(self.connection) + generated = make_gax_publisher_api(self._connection) self._publisher_api = GAXPublisherAPI(generated, self) else: self._publisher_api = JSONPublisherAPI(self) @@ -96,7 +96,7 @@ def subscriber_api(self): """Helper for subscriber-related API calls.""" if self._subscriber_api is None: if self._use_gax: - generated = make_gax_subscriber_api(self.connection) + generated = make_gax_subscriber_api(self._connection) self._subscriber_api = GAXSubscriberAPI(generated, self) else: self._subscriber_api = JSONSubscriberAPI(self) @@ -106,7 +106,7 @@ def subscriber_api(self): def iam_policy_api(self): """Helper for IAM policy-related API calls.""" if self._iam_policy_api is None: - self._iam_policy_api = _IAMPolicyAPI(self.connection) + self._iam_policy_api = _IAMPolicyAPI(self._connection) return self._iam_policy_api def list_topics(self, page_size=None, page_token=None): diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index f9f94fff098d..290df47012a1 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -435,7 +435,7 @@ def test_list_subscriptions_no_paging(self): connection = _Connection(RETURNED) creds = _Credentials() client = Client(project=self.PROJECT, credentials=creds) - client.connection = connection + client._connection = connection api = self._make_one(client) iterator = api.list_subscriptions(self.PROJECT) @@ -478,7 +478,7 @@ def test_list_subscriptions_with_paging(self): connection = _Connection(RETURNED) creds = _Credentials() client = Client(project=self.PROJECT, credentials=creds) - client.connection = connection + client._connection = connection api = self._make_one(client) iterator = api.list_subscriptions( @@ -879,7 +879,7 @@ def api_request(self, **kw): class _Client(object): def __init__(self, connection, project): - self.connection = connection + self._connection = connection self.project = project diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 462d55abba2b..d5e29cd94dc7 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -39,7 +39,7 @@ def test_publisher_api_wo_gax(self): with _Monkey(MUT, _USE_GAX=False): client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client.connection = object() + conn = client._connection = object() api = client.publisher_api self.assertIsInstance(api, _PublisherAPI) @@ -93,7 +93,7 @@ def __init__(self, _wrapped, client): # API instance is cached again = client.publisher_api self.assertIs(again, api) - args = (client.connection,) + args = (client._connection,) self.assertEqual(_called_with, [(args, {})]) def test_subscriber_api_wo_gax(self): @@ -105,7 +105,7 @@ def test_subscriber_api_wo_gax(self): with _Monkey(MUT, _USE_GAX=False): client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client.connection = object() + conn = client._connection = object() api = client.subscriber_api self.assertIsInstance(api, _SubscriberAPI) @@ -145,14 +145,14 @@ def __init__(self, _wrapped, client): # API instance is cached again = client.subscriber_api self.assertIs(again, api) - args = (client.connection,) + args = (client._connection,) self.assertEqual(_called_with, [(args, {})]) def test_iam_policy_api(self): from google.cloud.pubsub._http import _IAMPolicyAPI creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client.connection = object() + conn = client._connection = object() api = client.iam_policy_api self.assertIsInstance(api, _IAMPolicyAPI) self.assertIs(api._connection, conn) @@ -165,7 +165,7 @@ def test_list_topics_no_paging(self): creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - client.connection = object() + client._connection = object() api = _FauxPublisherAPI(items=[Topic(self.TOPIC_NAME, client)]) client._publisher_api = api @@ -188,7 +188,7 @@ def test_list_topics_with_paging(self): SIZE = 1 creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - client.connection = object() + client._connection = object() api = _FauxPublisherAPI([Topic(self.TOPIC_NAME, client)], TOKEN2) client._publisher_api = api @@ -206,7 +206,7 @@ def test_list_topics_with_paging(self): def test_list_topics_missing_key(self): creds = _Credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - client.connection = object() + client._connection = object() api = _FauxPublisherAPI() client._publisher_api = api @@ -228,7 +228,7 @@ def test_list_subscriptions_no_paging(self): client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) returned = {'subscriptions': [SUB_INFO]} - client.connection = _Connection(returned) + client._connection = _Connection(returned) iterator = client.list_subscriptions() subscriptions = list(iterator) @@ -248,7 +248,7 @@ def test_list_subscriptions_no_paging(self): self.assertIsNone(subscription.ack_deadline) self.assertIsNone(subscription.push_endpoint) - called_with = client.connection._called_with + called_with = client._connection._called_with expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) self.assertEqual(called_with, { 'method': 'GET', @@ -280,7 +280,7 @@ def test_list_subscriptions_with_paging(self): 'subscriptions': [SUB_INFO], 'nextPageToken': TOKEN2, } - client.connection = _Connection(returned) + client._connection = _Connection(returned) iterator = client.list_subscriptions( SIZE, TOKEN1) @@ -302,7 +302,7 @@ def test_list_subscriptions_with_paging(self): self.assertEqual(subscription.ack_deadline, ACK_DEADLINE) self.assertEqual(subscription.push_endpoint, PUSH_ENDPOINT) - called_with = client.connection._called_with + called_with = client._connection._called_with expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) self.assertEqual(called_with, { 'method': 'GET', @@ -318,7 +318,7 @@ def test_list_subscriptions_w_missing_key(self): creds = _Credentials() client = self._make_one(project=PROJECT, credentials=creds) - client.connection = object() + client._connection = object() api = client._subscriber_api = _FauxSubscriberAPI() api._list_subscriptions_response = (), None diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index 8f4838237fd7..06ea6bff42af 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -330,7 +330,7 @@ def test_list_subscriptions_no_paging(self): 'subscriptions': SUBS_LIST, 'nextPageToken': TOKEN, } - client.connection = _Connection(returned) + client._connection = _Connection(returned) topic = self._make_one(self.TOPIC_NAME, client=client) @@ -353,7 +353,7 @@ def test_list_subscriptions_no_paging(self): self.assertEqual(next_page_token, TOKEN) # Verify the mock. - called_with = client.connection._called_with + called_with = client._connection._called_with self.assertEqual(len(called_with), 3) self.assertEqual(called_with['method'], 'GET') path = '/%s/subscriptions' % (self.TOPIC_PATH,) @@ -380,7 +380,7 @@ def test_list_subscriptions_with_paging(self): returned = { 'subscriptions': SUBS_LIST, } - client.connection = _Connection(returned) + client._connection = _Connection(returned) topic = self._make_one(self.TOPIC_NAME, client=client) @@ -403,7 +403,7 @@ def test_list_subscriptions_with_paging(self): self.assertIsNone(next_page_token) # Verify the mock. - called_with = client.connection._called_with + called_with = client._connection._called_with self.assertEqual(len(called_with), 3) self.assertEqual(called_with['method'], 'GET') path = '/%s/subscriptions' % (self.TOPIC_PATH,) @@ -416,7 +416,7 @@ def test_list_subscriptions_missing_key(self): client = Client(project=self.PROJECT, credentials=object(), use_gax=False) - client.connection = _Connection({}) + client._connection = _Connection({}) topic = self._make_one(self.TOPIC_NAME, client=client) iterator = topic.list_subscriptions() @@ -426,7 +426,7 @@ def test_list_subscriptions_missing_key(self): self.assertEqual(len(subscriptions), 0) self.assertIsNone(next_page_token) # Verify the mock. - called_with = client.connection._called_with + called_with = client._connection._called_with self.assertEqual(len(called_with), 3) self.assertEqual(called_with['method'], 'GET') path = '/%s/subscriptions' % (self.TOPIC_PATH,) From ed9a301bc86b9c2046007186b692ff7273031497 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 11 Nov 2016 14:16:26 -0800 Subject: [PATCH 0047/1197] Removing use of _Monkey in pubsub and logging. --- packages/google-cloud-pubsub/tox.ini | 1 + .../unit_tests/test__gax.py | 46 +++++++++------ .../unit_tests/test__http.py | 5 +- .../unit_tests/test_client.py | 57 ++++++++++--------- .../unit_tests/test_topic.py | 6 +- 5 files changed, 65 insertions(+), 50 deletions(-) diff --git a/packages/google-cloud-pubsub/tox.ini b/packages/google-cloud-pubsub/tox.ini index ac8cb16f8238..212ba8f7d619 100644 --- a/packages/google-cloud-pubsub/tox.ini +++ b/packages/google-cloud-pubsub/tox.ini @@ -7,6 +7,7 @@ localdeps = pip install --quiet --upgrade {toxinidir}/../core deps = pytest + mock covercmd = py.test --quiet \ --cov=google.cloud.pubsub \ diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 71888431d2e2..43d9804b90a9 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -877,8 +877,8 @@ def _call_fut(self, connection): return make_gax_publisher_api(connection) def test_live_api(self): - from google.cloud._testing import _Monkey - from google.cloud.pubsub import _gax as MUT + import mock + from google.cloud.pubsub._gax import DEFAULT_USER_AGENT channels = [] channel_args = [] @@ -899,18 +899,20 @@ def make_channel(*args): creds = _Credentials() connection = _Connection(in_emulator=False, credentials=creds) - with _Monkey(MUT, PublisherApi=mock_publisher_api, - make_secure_channel=make_channel): + patch = mock.patch.multiple( + 'google.cloud.pubsub._gax', + PublisherApi=mock_publisher_api, + make_secure_channel=make_channel) + with patch: result = self._call_fut(connection) self.assertIs(result, mock_result) self.assertEqual(channels, [channel_obj]) self.assertEqual(channel_args, - [(creds, MUT.DEFAULT_USER_AGENT, host)]) + [(creds, DEFAULT_USER_AGENT, host)]) def test_emulator(self): - from google.cloud._testing import _Monkey - from google.cloud.pubsub import _gax as MUT + import mock channels = [] mock_result = object() @@ -927,8 +929,11 @@ def mock_insecure_channel(host): host = 'CURR_HOST:1234' connection = _Connection(in_emulator=True, host=host) - with _Monkey(MUT, PublisherApi=mock_publisher_api, - insecure_channel=mock_insecure_channel): + patch = mock.patch.multiple( + 'google.cloud.pubsub._gax', + PublisherApi=mock_publisher_api, + insecure_channel=mock_insecure_channel) + with patch: result = self._call_fut(connection) self.assertIs(result, mock_result) @@ -944,8 +949,8 @@ def _call_fut(self, connection): return make_gax_subscriber_api(connection) def test_live_api(self): - from google.cloud._testing import _Monkey - from google.cloud.pubsub import _gax as MUT + import mock + from google.cloud.pubsub._gax import DEFAULT_USER_AGENT channels = [] channel_args = [] @@ -966,18 +971,20 @@ def make_channel(*args): creds = _Credentials() connection = _Connection(in_emulator=False, credentials=creds) - with _Monkey(MUT, SubscriberApi=mock_subscriber_api, - make_secure_channel=make_channel): + patch = mock.patch.multiple( + 'google.cloud.pubsub._gax', + SubscriberApi=mock_subscriber_api, + make_secure_channel=make_channel) + with patch: result = self._call_fut(connection) self.assertIs(result, mock_result) self.assertEqual(channels, [channel_obj]) self.assertEqual(channel_args, - [(creds, MUT.DEFAULT_USER_AGENT, host)]) + [(creds, DEFAULT_USER_AGENT, host)]) def test_emulator(self): - from google.cloud._testing import _Monkey - from google.cloud.pubsub import _gax as MUT + import mock channels = [] mock_result = object() @@ -994,8 +1001,11 @@ def mock_insecure_channel(host): host = 'CURR_HOST:1234' connection = _Connection(in_emulator=True, host=host) - with _Monkey(MUT, SubscriberApi=mock_subscriber_api, - insecure_channel=mock_insecure_channel): + patch = mock.patch.multiple( + 'google.cloud.pubsub._gax', + SubscriberApi=mock_subscriber_api, + insecure_channel=mock_insecure_channel) + with patch: result = self._call_fut(connection) self.assertIs(result, mock_result) diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index f9f94fff098d..859a952feeff 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -42,14 +42,13 @@ def test_default_url(self): self.assertEqual(conn.api_base_url, klass.API_BASE_URL) def test_custom_url_from_env(self): - import os - from google.cloud._testing import _Monkey + import mock from google.cloud.environment_vars import PUBSUB_EMULATOR HOST = 'localhost:8187' fake_environ = {PUBSUB_EMULATOR: HOST} - with _Monkey(os, getenv=fake_environ.get): + with mock.patch('os.environ', new=fake_environ): conn = self._make_one() klass = self._get_target_class() diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 462d55abba2b..7ef6e150d410 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -32,12 +32,12 @@ def _make_one(self, *args, **kw): def test_publisher_api_wo_gax(self): from google.cloud.pubsub._http import _PublisherAPI - from google.cloud.pubsub import client as MUT - from google.cloud._testing import _Monkey + creds = _Credentials() - with _Monkey(MUT, _USE_GAX=False): - client = self._make_one(project=self.PROJECT, credentials=creds) + client = self._make_one( + project=self.PROJECT, credentials=creds, + use_gax=False) conn = client.connection = object() api = client.publisher_api @@ -49,12 +49,12 @@ def test_publisher_api_wo_gax(self): self.assertIs(again, api) def test_no_gax_ctor(self): - from google.cloud._testing import _Monkey + import mock from google.cloud.pubsub._http import _PublisherAPI - from google.cloud.pubsub import client as MUT creds = _Credentials() - with _Monkey(MUT, _USE_GAX=True): + with mock.patch('google.cloud.pubsub.client._USE_GAX', + new=True): client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) @@ -63,8 +63,7 @@ def test_no_gax_ctor(self): self.assertIsInstance(api, _PublisherAPI) def test_publisher_api_w_gax(self): - from google.cloud.pubsub import client as MUT - from google.cloud._testing import _Monkey + import mock wrapped = object() _called_with = [] @@ -80,11 +79,15 @@ def __init__(self, _wrapped, client): self._client = client creds = _Credentials() - - with _Monkey(MUT, _USE_GAX=True, - make_gax_publisher_api=_generated_api, - GAXPublisherAPI=_GaxPublisherAPI): - client = self._make_one(project=self.PROJECT, credentials=creds) + client = self._make_one( + project=self.PROJECT, credentials=creds, + use_gax=True) + + patch = mock.patch.multiple( + 'google.cloud.pubsub.client', + make_gax_publisher_api=_generated_api, + GAXPublisherAPI=_GaxPublisherAPI) + with patch: api = client.publisher_api self.assertIsInstance(api, _GaxPublisherAPI) @@ -98,12 +101,11 @@ def __init__(self, _wrapped, client): def test_subscriber_api_wo_gax(self): from google.cloud.pubsub._http import _SubscriberAPI - from google.cloud.pubsub import client as MUT - from google.cloud._testing import _Monkey - creds = _Credentials() - with _Monkey(MUT, _USE_GAX=False): - client = self._make_one(project=self.PROJECT, credentials=creds) + creds = _Credentials() + client = self._make_one( + project=self.PROJECT, credentials=creds, + use_gax=False) conn = client.connection = object() api = client.subscriber_api @@ -115,8 +117,7 @@ def test_subscriber_api_wo_gax(self): self.assertIs(again, api) def test_subscriber_api_w_gax(self): - from google.cloud.pubsub import client as MUT - from google.cloud._testing import _Monkey + import mock wrapped = object() _called_with = [] @@ -132,11 +133,15 @@ def __init__(self, _wrapped, client): self._client = client creds = _Credentials() - - with _Monkey(MUT, _USE_GAX=True, - make_gax_subscriber_api=_generated_api, - GAXSubscriberAPI=_GaxSubscriberAPI): - client = self._make_one(project=self.PROJECT, credentials=creds) + client = self._make_one( + project=self.PROJECT, credentials=creds, + use_gax=True) + + patch = mock.patch.multiple( + 'google.cloud.pubsub.client', + make_gax_subscriber_api=_generated_api, + GAXSubscriberAPI=_GaxSubscriberAPI) + with patch: api = client.subscriber_api self.assertIsInstance(api, _GaxSubscriberAPI) diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index 8f4838237fd7..37eeb09f732c 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -136,9 +136,9 @@ def test_publish_single_bytes_wo_attrs_w_bound_client(self): def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): import datetime - from google.cloud.pubsub import topic as MUT + import mock from google.cloud._helpers import _RFC3339_MICROS - from google.cloud._testing import _Monkey + NOW = datetime.datetime.utcnow() def _utcnow(): @@ -157,7 +157,7 @@ def _utcnow(): topic = self._make_one(self.TOPIC_NAME, client=client1, timestamp_messages=True) - with _Monkey(MUT, _NOW=_utcnow): + with mock.patch('google.cloud.pubsub.topic._NOW', new=_utcnow): msgid = topic.publish(PAYLOAD, client=client2) self.assertEqual(msgid, MSGID) From 94673f83d77574034aa84fd2bfd8d8fe5bdb2a1c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 12:44:19 -0800 Subject: [PATCH 0048/1197] Upgrading core to version to 0.21.0. As a result, also upgrading the umbrella package and all packages to 0.21.0 (since they all depend on core). --- packages/google-cloud-pubsub/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 6accc0ae8138..eb6b58cb86a0 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.20.0', + 'google-cloud-core >= 0.21.0', 'grpcio >= 1.0.0, < 2.0dev', 'google-gax >= 0.14.1, < 0.15dev', 'gapic-google-pubsub-v1 >= 0.10.1, < 0.11dev', @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.20.0', + version='0.21.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 1fc3e951b9430c929eb05ce770fa03f24135ab25 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 14:11:34 -0800 Subject: [PATCH 0049/1197] Need to install from local deps first. The `pip install --upgrade` still is needed to ensure freshness but by removing the filesystem paths from deps we made the initial install grab from PyPI (by mistake). This way, all local package deps are grabbed from the local filesystem. --- packages/google-cloud-pubsub/tox.ini | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/tox.ini b/packages/google-cloud-pubsub/tox.ini index 212ba8f7d619..25e6fae5ea27 100644 --- a/packages/google-cloud-pubsub/tox.ini +++ b/packages/google-cloud-pubsub/tox.ini @@ -6,8 +6,9 @@ envlist = localdeps = pip install --quiet --upgrade {toxinidir}/../core deps = - pytest + {toxinidir}/../core mock + pytest covercmd = py.test --quiet \ --cov=google.cloud.pubsub \ @@ -17,7 +18,6 @@ covercmd = [testenv] commands = - {[testing]localdeps} py.test --quiet {posargs} unit_tests deps = {[testing]deps} @@ -26,7 +26,6 @@ deps = basepython = python2.7 commands = - {[testing]localdeps} {[testing]covercmd} deps = {[testenv]deps} From b8351a66504f6ee42a4cc7befe6e4f88cfc18e26 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 14 Nov 2016 14:58:42 -0800 Subject: [PATCH 0050/1197] Fixing accidental removal of {localdeps} Also - adding RTD dependency for runtimeconfig. - adding local paths to umbrella tox config "deps" as was done in #2733. --- packages/google-cloud-pubsub/tox.ini | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/tox.ini b/packages/google-cloud-pubsub/tox.ini index 25e6fae5ea27..001886c51e9a 100644 --- a/packages/google-cloud-pubsub/tox.ini +++ b/packages/google-cloud-pubsub/tox.ini @@ -18,6 +18,7 @@ covercmd = [testenv] commands = + {[testing]localdeps} py.test --quiet {posargs} unit_tests deps = {[testing]deps} @@ -26,6 +27,7 @@ deps = basepython = python2.7 commands = + {[testing]localdeps} {[testing]covercmd} deps = {[testenv]deps} From cbe70d5a22dcaecbd16524025afc50012f7a9b93 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Wed, 16 Nov 2016 11:09:27 -0500 Subject: [PATCH 0051/1197] Set core version compatible specifier to packages. --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index eb6b58cb86a0..c206a94991ed 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.21.0', + 'google-cloud-core >= 0.21.0, < 0.22dev', 'grpcio >= 1.0.0, < 2.0dev', 'google-gax >= 0.14.1, < 0.15dev', 'gapic-google-pubsub-v1 >= 0.10.1, < 0.11dev', From 8e49ee60755cdb14919b2a0b772f6e1b85c63fc6 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Thu, 20 Oct 2016 12:32:12 -0400 Subject: [PATCH 0052/1197] Add GatewayTimeout exception to pubsub subscription pull. --- .../google/cloud/pubsub/_gax.py | 9 +++++++- .../unit_tests/test__gax.py | 22 +++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 57bf688486aa..11ab7d4aac7b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -413,8 +413,15 @@ def subscription_pull(self, subscription_path, return_immediately=False, subscription_path, max_messages, return_immediately=return_immediately) except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + code = exc_to_code(exc.cause) + if code == StatusCode.NOT_FOUND: raise NotFound(subscription_path) + elif code == StatusCode.DEADLINE_EXCEEDED: + # NOTE: The JSON-over-HTTP API returns a 200 with an empty + # response when ``return_immediately`` is ``False``, so + # we "mutate" the gRPC error into a non-error to conform. + if not return_immediately: + return [] raise return [_received_message_pb_to_mapping(rmpb) for rmpb in response_pb.received_messages] diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 43d9804b90a9..decf9a25c068 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -764,6 +764,24 @@ def test_subscription_pull_defaults_error(self): self.assertFalse(return_immediately) self.assertIsNone(options) + def test_subscription_pull_deadline_exceeded(self): + client = _Client(self.PROJECT) + gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) + api = self._make_one(gax_api, client) + + result = api.subscription_pull(self.SUB_PATH) + self.assertEqual(result, []) + + def test_subscription_pull_deadline_exceeded_return_immediately(self): + from google.gax.errors import GaxError + + client = _Client(self.PROJECT) + gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) + api = self._make_one(gax_api, client) + + with self.assertRaises(GaxError): + api.subscription_pull(self.SUB_PATH, return_immediately=True) + def test_subscription_acknowledge_hit(self): ACK_ID1 = 'DEADBEEF' ACK_ID2 = 'BEADCAFE' @@ -1075,6 +1093,7 @@ class _GAXSubscriberAPI(_GAXBaseAPI): _modify_push_config_ok = False _acknowledge_ok = False _modify_ack_deadline_ok = False + _deadline_exceeded_gax_error = False def list_subscriptions(self, project, page_size, options=None): self._list_subscriptions_called_with = (project, page_size, options) @@ -1124,6 +1143,9 @@ def pull(self, name, max_messages, return_immediately, options=None): name, max_messages, return_immediately, options) if self._random_gax_error: raise GaxError('error') + if self._deadline_exceeded_gax_error: + raise GaxError('deadline exceeded', + self._make_grpc_deadline_exceeded()) try: return self._pull_response except AttributeError: From 9f0a78f62a438bd045aec351b283bf3bb34d2547 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Mon, 28 Nov 2016 13:15:56 -0500 Subject: [PATCH 0053/1197] Fix double encoding of messages when commit() fails. --- .../google/cloud/pubsub/_http.py | 6 +++-- .../unit_tests/test__http.py | 24 ++++++++++++++++++- 2 files changed, 27 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index 6aef4edd6437..635c43bdaab5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -15,6 +15,7 @@ """Create / interact with Google Cloud Pub/Sub connections.""" import base64 +import copy import functools import os @@ -203,9 +204,10 @@ def topic_publish(self, topic_path, messages): :rtype: list of string :returns: list of opaque IDs for published messages. """ - _transform_messages_base64(messages, _base64_unicode) + messages_to_send = copy.deepcopy(messages) + _transform_messages_base64(messages_to_send, _base64_unicode) conn = self._connection - data = {'messages': messages} + data = {'messages': messages_to_send} response = conn.api_request( method='POST', path='/%s:publish' % (topic_path,), data=data) return response['messageIds'] diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index 3401d0c5869e..3a0281a03eb2 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -284,10 +284,32 @@ def test_topic_publish_hit(self): msg_data = connection._called_with['data']['messages'][0]['data'] self.assertEqual(msg_data, B64_PAYLOAD) + def test_topic_publish_twice(self): + import base64 + + PAYLOAD = b'This is the message text' + B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') + MESSAGE = {'data': PAYLOAD, 'attributes': {}} + RETURNED = {'messageIds': []} + connection = _Connection(RETURNED, RETURNED) + client = _Client(connection, self.PROJECT) + api = self._make_one(client) + + api.topic_publish(self.TOPIC_PATH, [MESSAGE]) + api.topic_publish(self.TOPIC_PATH, [MESSAGE]) + + messages = connection._called_with['data']['messages'] + self.assertEqual(len(messages), 1) + self.assertEqual(messages[0]['data'], B64_PAYLOAD) + def test_topic_publish_miss(self): + import base64 from google.cloud.exceptions import NotFound + PAYLOAD = b'This is the message text' + B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') MESSAGE = {'data': PAYLOAD, 'attributes': {}} + B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} connection = _Connection() client = _Client(connection, self.PROJECT) api = self._make_one(client) @@ -299,7 +321,7 @@ def test_topic_publish_miss(self): path = '/%s:publish' % (self.TOPIC_PATH,) self.assertEqual(connection._called_with['path'], path) self.assertEqual(connection._called_with['data'], - {'messages': [MESSAGE]}) + {'messages': [B64MSG]}) def test_topic_list_subscriptions_no_paging(self): from google.cloud.pubsub.topic import Topic From 077b9c139f4655576684b716d2d58b2519bda6ba Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 2 Dec 2016 12:38:43 -0800 Subject: [PATCH 0054/1197] Upgrading GAPIC deps to 0.14 series. In the process, ditching **hard** deps on protobufs, grpcio, and google-gax in those packages. Also had to upgrade calls to list_log_entries() based on https://github.com/googleapis/api-client-staging/pull/118/files/2bcd2875a578ae525d2aabb862cf9c131b4665f5#r90422054 --- .../google/cloud/pubsub/_gax.py | 29 +++++++++---------- packages/google-cloud-pubsub/setup.py | 5 +--- .../unit_tests/test__gax.py | 24 ++++++++------- 3 files changed, 28 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 11ab7d4aac7b..97b73db687aa 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -16,15 +16,15 @@ import functools -from google.cloud.gapic.pubsub.v1.publisher_api import PublisherApi -from google.cloud.gapic.pubsub.v1.subscriber_api import SubscriberApi +from google.cloud.gapic.pubsub.v1.publisher_client import PublisherClient +from google.cloud.gapic.pubsub.v1.subscriber_client import SubscriberClient from google.gax import CallOptions from google.gax import INITIAL_PAGE from google.gax.errors import GaxError from google.gax.grpc import exc_to_code from google.protobuf.json_format import MessageToDict -from google.pubsub.v1.pubsub_pb2 import PubsubMessage -from google.pubsub.v1.pubsub_pb2 import PushConfig +from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PubsubMessage +from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PushConfig from grpc import insecure_channel from grpc import StatusCode @@ -43,7 +43,7 @@ class _PublisherAPI(object): """Helper mapping publisher-related APIs. - :type gax_api: :class:`google.pubsub.v1.publisher_api.PublisherApi` + :type gax_api: :class:`.publisher_client.PublisherClient` :param gax_api: API object used to make GAX requests. :type client: :class:`~google.cloud.pubsub.client.Client` @@ -223,7 +223,7 @@ def topic_list_subscriptions(self, topic, page_size=0, page_token=None): class _SubscriberAPI(object): """Helper mapping subscriber-related APIs. - :type gax_api: :class:`google.pubsub.v1.publisher_api.SubscriberApi` + :type gax_api: :class:`.publisher_client.SubscriberClient` :param gax_api: API object used to make GAX requests. :type client: :class:`~google.cloud.pubsub.client.Client` @@ -522,18 +522,17 @@ def make_gax_publisher_api(connection): :type connection: :class:`~google.cloud.pubsub._http.Connection` :param connection: The connection that holds configuration details. - :rtype: :class:`~google.cloud.pubsub.v1.publisher_api.PublisherApi` + :rtype: :class:`.publisher_client.PublisherClient` :returns: A publisher API instance with the proper connection configuration. - :rtype: :class:`~google.cloud.pubsub.v1.subscriber_api.SubscriberApi` """ if connection.in_emulator: channel = insecure_channel(connection.host) else: channel = make_secure_channel( connection.credentials, DEFAULT_USER_AGENT, - PublisherApi.SERVICE_ADDRESS) - return PublisherApi(channel=channel) + PublisherClient.SERVICE_ADDRESS) + return PublisherClient(channel=channel) def make_gax_subscriber_api(connection): @@ -546,7 +545,7 @@ def make_gax_subscriber_api(connection): :type connection: :class:`~google.cloud.pubsub._http.Connection` :param connection: The connection that holds configuration details. - :rtype: :class:`~google.cloud.pubsub.v1.subscriber_api.SubscriberApi` + :rtype: :class:`.subscriber_client.SubscriberClient` :returns: A subscriber API instance with the proper connection configuration. """ @@ -555,8 +554,8 @@ def make_gax_subscriber_api(connection): else: channel = make_secure_channel( connection.credentials, DEFAULT_USER_AGENT, - SubscriberApi.SERVICE_ADDRESS) - return SubscriberApi(channel=channel) + SubscriberClient.SERVICE_ADDRESS) + return SubscriberClient(channel=channel) def _item_to_topic(iterator, resource): @@ -565,7 +564,7 @@ def _item_to_topic(iterator, resource): :type iterator: :class:`~google.cloud.iterator.Iterator` :param iterator: The iterator that is currently in use. - :type resource: :class:`google.pubsub.v1.pubsub_pb2.Topic` + :type resource: :class:`.pubsub_pb2.Topic` :param resource: A topic returned from the API. :rtype: :class:`~google.cloud.pubsub.topic.Topic` @@ -607,7 +606,7 @@ def _item_to_sub_for_client(iterator, sub_pb, topics): :type iterator: :class:`~google.cloud.iterator.Iterator` :param iterator: The iterator that is currently in use. - :type sub_pb: :class:`~google.pubsub.v1.pubsub_pb2.Subscription` + :type sub_pb: :class:`.pubsub_pb2.Subscription` :param sub_pb: A subscription returned from the API. :type topics: dict diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index c206a94991ed..37d9cb9d6496 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,10 +51,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.21.0, < 0.22dev', - 'grpcio >= 1.0.0, < 2.0dev', - 'google-gax >= 0.14.1, < 0.15dev', - 'gapic-google-pubsub-v1 >= 0.10.1, < 0.11dev', - 'grpc-google-pubsub-v1 >= 0.10.1, < 0.11dev', + 'gapic-google-cloud-pubsub-v1 >= 0.14.0, < 0.15dev', ] setup( diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index decf9a25c068..8faf871cfa4d 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -416,8 +416,9 @@ def test_ctor(self): def test_list_subscriptions_no_paging(self): from google.gax import INITIAL_PAGE - from google.pubsub.v1.pubsub_pb2 import PushConfig - from google.pubsub.v1.pubsub_pb2 import Subscription as SubscriptionPB + from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PushConfig + from google.cloud.grpc.pubsub.v1.pubsub_pb2 import ( + Subscription as SubscriptionPB) from google.cloud._testing import _GAXPageIterator from google.cloud.pubsub.client import Client from google.cloud.pubsub.subscription import Subscription @@ -458,8 +459,9 @@ def test_list_subscriptions_no_paging(self): self.assertIs(options.page_token, INITIAL_PAGE) def test_list_subscriptions_with_paging(self): - from google.pubsub.v1.pubsub_pb2 import PushConfig - from google.pubsub.v1.pubsub_pb2 import Subscription as SubscriptionPB + from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PushConfig + from google.cloud.grpc.pubsub.v1.pubsub_pb2 import ( + Subscription as SubscriptionPB) from google.cloud._testing import _GAXPageIterator from google.cloud.pubsub.client import Client from google.cloud.pubsub.subscription import Subscription @@ -505,7 +507,7 @@ def test_list_subscriptions_with_paging(self): self.assertEqual(options.page_token, TOKEN) def test_subscription_create(self): - from google.pubsub.v1.pubsub_pb2 import Subscription + from google.cloud.grpc.pubsub.v1.pubsub_pb2 import Subscription sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) @@ -564,8 +566,8 @@ def test_subscription_create_error(self): self.assertIsNone(options) def test_subscription_get_hit(self): - from google.pubsub.v1.pubsub_pb2 import PushConfig - from google.pubsub.v1.pubsub_pb2 import Subscription + from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PushConfig + from google.cloud.grpc.pubsub.v1.pubsub_pb2 import Subscription push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, @@ -919,7 +921,7 @@ def make_channel(*args): credentials=creds) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', - PublisherApi=mock_publisher_api, + PublisherClient=mock_publisher_api, make_secure_channel=make_channel) with patch: result = self._call_fut(connection) @@ -949,7 +951,7 @@ def mock_insecure_channel(host): connection = _Connection(in_emulator=True, host=host) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', - PublisherApi=mock_publisher_api, + PublisherClient=mock_publisher_api, insecure_channel=mock_insecure_channel) with patch: result = self._call_fut(connection) @@ -991,7 +993,7 @@ def make_channel(*args): credentials=creds) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', - SubscriberApi=mock_subscriber_api, + SubscriberClient=mock_subscriber_api, make_secure_channel=make_channel) with patch: result = self._call_fut(connection) @@ -1021,7 +1023,7 @@ def mock_insecure_channel(host): connection = _Connection(in_emulator=True, host=host) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', - SubscriberApi=mock_subscriber_api, + SubscriberClient=mock_subscriber_api, insecure_channel=mock_insecure_channel) with patch: result = self._call_fut(connection) From 405ae406518d522086a82f0a5be266197cbf48af Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 2 Dec 2016 15:02:25 -0800 Subject: [PATCH 0055/1197] Switch from oauth2client to google-auth (#2726) * Removes all use of oauth2client from every package and tests. * Updates core to use google-auth's default credentials, project ID, and scoping logic. * Updates bigtable to use google-auth's scoping logic. --- .../unit_tests/test__gax.py | 15 ++----- .../unit_tests/test__http.py | 11 +----- .../unit_tests/test_client.py | 39 +++++++------------ 3 files changed, 19 insertions(+), 46 deletions(-) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index decf9a25c068..9f32d29a2f9a 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -430,7 +430,7 @@ def test_list_subscriptions_no_paging(self): push_config=push_cfg_pb) response = _GAXPageIterator([sub_pb]) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - creds = _Credentials() + creds = object() client = Client(project=self.PROJECT, credentials=creds) api = self._make_one(gax_api, client) @@ -476,7 +476,7 @@ def test_list_subscriptions_with_paging(self): response = _GAXPageIterator([sub_pb], page_token=NEW_TOKEN) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) client = _Client(self.PROJECT) - creds = _Credentials() + creds = object() client = Client(project=self.PROJECT, credentials=creds) api = self._make_one(gax_api, client) @@ -914,7 +914,7 @@ def make_channel(*args): mock_publisher_api.SERVICE_ADDRESS = host - creds = _Credentials() + creds = object() connection = _Connection(in_emulator=False, credentials=creds) patch = mock.patch.multiple( @@ -986,7 +986,7 @@ def make_channel(*args): mock_subscriber_api.SERVICE_ADDRESS = host - creds = _Credentials() + creds = object() connection = _Connection(in_emulator=False, credentials=creds) patch = mock.patch.multiple( @@ -1216,10 +1216,3 @@ class _Client(object): def __init__(self, project): self.project = project - - -class _Credentials(object): - - @staticmethod - def create_scoped_required(): - return False diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index 3a0281a03eb2..be0f69835683 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -454,7 +454,7 @@ def test_list_subscriptions_no_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} RETURNED = {'subscriptions': [SUB_INFO]} connection = _Connection(RETURNED) - creds = _Credentials() + creds = object() client = Client(project=self.PROJECT, credentials=creds) client._connection = connection api = self._make_one(client) @@ -497,7 +497,7 @@ def test_list_subscriptions_with_paging(self): 'nextPageToken': 'TOKEN2', } connection = _Connection(RETURNED) - creds = _Credentials() + creds = object() client = Client(project=self.PROJECT, credentials=creds) client._connection = connection api = self._make_one(client) @@ -902,10 +902,3 @@ class _Client(object): def __init__(self, connection, project): self._connection = connection self.project = project - - -class _Credentials(object): - - @staticmethod - def create_scoped_required(): - return False diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 5731f1082586..1e549f8f7f4b 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -33,7 +33,7 @@ def _make_one(self, *args, **kw): def test_publisher_api_wo_gax(self): from google.cloud.pubsub._http import _PublisherAPI - creds = _Credentials() + creds = object() client = self._make_one( project=self.PROJECT, credentials=creds, @@ -52,7 +52,7 @@ def test_no_gax_ctor(self): import mock from google.cloud.pubsub._http import _PublisherAPI - creds = _Credentials() + creds = object() with mock.patch('google.cloud.pubsub.client._USE_GAX', new=True): client = self._make_one(project=self.PROJECT, credentials=creds, @@ -78,7 +78,7 @@ def __init__(self, _wrapped, client): self._wrapped = _wrapped self._client = client - creds = _Credentials() + creds = object() client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=True) @@ -102,7 +102,7 @@ def __init__(self, _wrapped, client): def test_subscriber_api_wo_gax(self): from google.cloud.pubsub._http import _SubscriberAPI - creds = _Credentials() + creds = object() client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=False) @@ -132,7 +132,7 @@ def __init__(self, _wrapped, client): self._wrapped = _wrapped self._client = client - creds = _Credentials() + creds = object() client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=True) @@ -155,7 +155,7 @@ def __init__(self, _wrapped, client): def test_iam_policy_api(self): from google.cloud.pubsub._http import _IAMPolicyAPI - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds) conn = client._connection = object() api = client.iam_policy_api @@ -168,7 +168,7 @@ def test_iam_policy_api(self): def test_list_topics_no_paging(self): from google.cloud.pubsub.topic import Topic - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds) client._connection = object() api = _FauxPublisherAPI(items=[Topic(self.TOPIC_NAME, client)]) @@ -191,7 +191,7 @@ def test_list_topics_with_paging(self): TOKEN1 = 'TOKEN1' TOKEN2 = 'TOKEN2' SIZE = 1 - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds) client._connection = object() api = _FauxPublisherAPI([Topic(self.TOPIC_NAME, client)], TOKEN2) @@ -209,7 +209,7 @@ def test_list_topics_with_paging(self): self.assertEqual(api._listed_topics, (self.PROJECT, 1, TOKEN1)) def test_list_topics_missing_key(self): - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds) client._connection = object() api = _FauxPublisherAPI() @@ -229,7 +229,7 @@ def test_list_subscriptions_no_paging(self): from google.cloud.pubsub.topic import Topic SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) returned = {'subscriptions': [SUB_INFO]} @@ -267,7 +267,7 @@ def test_list_subscriptions_with_paging(self): from google.cloud.pubsub.topic import Topic SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _Credentials() + creds = object() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) @@ -320,7 +320,7 @@ def test_list_subscriptions_with_paging(self): def test_list_subscriptions_w_missing_key(self): PROJECT = 'PROJECT' - creds = _Credentials() + creds = object() client = self._make_one(project=PROJECT, credentials=creds) client._connection = object() @@ -338,7 +338,7 @@ def test_list_subscriptions_w_missing_key(self): def test_topic(self): PROJECT = 'PROJECT' TOPIC_NAME = 'TOPIC_NAME' - creds = _Credentials() + creds = object() client_obj = self._make_one(project=PROJECT, credentials=creds) new_topic = client_obj.topic(TOPIC_NAME) @@ -350,19 +350,6 @@ def test_topic(self): self.assertFalse(new_topic.timestamp_messages) -class _Credentials(object): - - _scopes = None - - @staticmethod - def create_scoped_required(): - return True - - def create_scoped(self, scope): - self._scopes = scope - return self - - class _Iterator(object): def __init__(self, items, token): From 899b181c13bf9a27c5f945a779d0c0c5019f7861 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 7 Dec 2016 16:00:24 -0800 Subject: [PATCH 0056/1197] Raise ValueError if credentials are not from google-auth (#2828) --- .../unit_tests/test__gax.py | 22 ++++++----- .../unit_tests/test__http.py | 12 ++++-- .../unit_tests/test_client.py | 38 ++++++++++--------- .../unit_tests/test_topic.py | 20 ++++++---- 4 files changed, 54 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index ac13bc36758f..4f2037d7c4dd 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -14,6 +14,7 @@ import unittest +import mock try: # pylint: disable=unused-import @@ -27,6 +28,13 @@ from google.cloud._testing import _GAXBaseAPI +def _make_credentials(): + # pylint: disable=redefined-outer-name + import google.auth.credentials + # pylint: enable=redefined-outer-name + return mock.Mock(spec=google.auth.credentials.Credentials) + + class _Base(object): PROJECT = 'PROJECT' PROJECT_PATH = 'projects/%s' % (PROJECT,) @@ -431,7 +439,7 @@ def test_list_subscriptions_no_paging(self): push_config=push_cfg_pb) response = _GAXPageIterator([sub_pb]) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - creds = object() + creds = _make_credentials() client = Client(project=self.PROJECT, credentials=creds) api = self._make_one(gax_api, client) @@ -478,7 +486,7 @@ def test_list_subscriptions_with_paging(self): response = _GAXPageIterator([sub_pb], page_token=NEW_TOKEN) gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) client = _Client(self.PROJECT) - creds = object() + creds = _make_credentials() client = Client(project=self.PROJECT, credentials=creds) api = self._make_one(gax_api, client) @@ -897,7 +905,6 @@ def _call_fut(self, connection): return make_gax_publisher_api(connection) def test_live_api(self): - import mock from google.cloud.pubsub._gax import DEFAULT_USER_AGENT channels = [] @@ -916,7 +923,7 @@ def make_channel(*args): mock_publisher_api.SERVICE_ADDRESS = host - creds = object() + creds = _make_credentials() connection = _Connection(in_emulator=False, credentials=creds) patch = mock.patch.multiple( @@ -932,8 +939,6 @@ def make_channel(*args): [(creds, DEFAULT_USER_AGENT, host)]) def test_emulator(self): - import mock - channels = [] mock_result = object() insecure_args = [] @@ -969,7 +974,6 @@ def _call_fut(self, connection): return make_gax_subscriber_api(connection) def test_live_api(self): - import mock from google.cloud.pubsub._gax import DEFAULT_USER_AGENT channels = [] @@ -988,7 +992,7 @@ def make_channel(*args): mock_subscriber_api.SERVICE_ADDRESS = host - creds = object() + creds = _make_credentials() connection = _Connection(in_emulator=False, credentials=creds) patch = mock.patch.multiple( @@ -1004,8 +1008,6 @@ def make_channel(*args): [(creds, DEFAULT_USER_AGENT, host)]) def test_emulator(self): - import mock - channels = [] mock_result = object() insecure_args = [] diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index be0f69835683..955fc06a9104 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -14,6 +14,13 @@ import unittest +import mock + + +def _make_credentials(): + import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) + class _Base(unittest.TestCase): PROJECT = 'PROJECT' @@ -42,7 +49,6 @@ def test_default_url(self): self.assertEqual(conn.api_base_url, klass.API_BASE_URL) def test_custom_url_from_env(self): - import mock from google.cloud.environment_vars import PUBSUB_EMULATOR HOST = 'localhost:8187' @@ -454,7 +460,7 @@ def test_list_subscriptions_no_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} RETURNED = {'subscriptions': [SUB_INFO]} connection = _Connection(RETURNED) - creds = object() + creds = _make_credentials() client = Client(project=self.PROJECT, credentials=creds) client._connection = connection api = self._make_one(client) @@ -497,7 +503,7 @@ def test_list_subscriptions_with_paging(self): 'nextPageToken': 'TOKEN2', } connection = _Connection(RETURNED) - creds = object() + creds = _make_credentials() client = Client(project=self.PROJECT, credentials=creds) client._connection = connection api = self._make_one(client) diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 1e549f8f7f4b..3bde01417359 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -14,6 +14,13 @@ import unittest +import mock + + +def _make_credentials(): + import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) + class TestClient(unittest.TestCase): PROJECT = 'PROJECT' @@ -33,7 +40,7 @@ def _make_one(self, *args, **kw): def test_publisher_api_wo_gax(self): from google.cloud.pubsub._http import _PublisherAPI - creds = object() + creds = _make_credentials() client = self._make_one( project=self.PROJECT, credentials=creds, @@ -49,10 +56,9 @@ def test_publisher_api_wo_gax(self): self.assertIs(again, api) def test_no_gax_ctor(self): - import mock from google.cloud.pubsub._http import _PublisherAPI - creds = object() + creds = _make_credentials() with mock.patch('google.cloud.pubsub.client._USE_GAX', new=True): client = self._make_one(project=self.PROJECT, credentials=creds, @@ -63,8 +69,6 @@ def test_no_gax_ctor(self): self.assertIsInstance(api, _PublisherAPI) def test_publisher_api_w_gax(self): - import mock - wrapped = object() _called_with = [] @@ -78,7 +82,7 @@ def __init__(self, _wrapped, client): self._wrapped = _wrapped self._client = client - creds = object() + creds = _make_credentials() client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=True) @@ -102,7 +106,7 @@ def __init__(self, _wrapped, client): def test_subscriber_api_wo_gax(self): from google.cloud.pubsub._http import _SubscriberAPI - creds = object() + creds = _make_credentials() client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=False) @@ -117,8 +121,6 @@ def test_subscriber_api_wo_gax(self): self.assertIs(again, api) def test_subscriber_api_w_gax(self): - import mock - wrapped = object() _called_with = [] @@ -132,7 +134,7 @@ def __init__(self, _wrapped, client): self._wrapped = _wrapped self._client = client - creds = object() + creds = _make_credentials() client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=True) @@ -155,7 +157,7 @@ def __init__(self, _wrapped, client): def test_iam_policy_api(self): from google.cloud.pubsub._http import _IAMPolicyAPI - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) conn = client._connection = object() api = client.iam_policy_api @@ -168,7 +170,7 @@ def test_iam_policy_api(self): def test_list_topics_no_paging(self): from google.cloud.pubsub.topic import Topic - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) client._connection = object() api = _FauxPublisherAPI(items=[Topic(self.TOPIC_NAME, client)]) @@ -191,7 +193,7 @@ def test_list_topics_with_paging(self): TOKEN1 = 'TOKEN1' TOKEN2 = 'TOKEN2' SIZE = 1 - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) client._connection = object() api = _FauxPublisherAPI([Topic(self.TOPIC_NAME, client)], TOKEN2) @@ -209,7 +211,7 @@ def test_list_topics_with_paging(self): self.assertEqual(api._listed_topics, (self.PROJECT, 1, TOKEN1)) def test_list_topics_missing_key(self): - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) client._connection = object() api = _FauxPublisherAPI() @@ -229,7 +231,7 @@ def test_list_subscriptions_no_paging(self): from google.cloud.pubsub.topic import Topic SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) returned = {'subscriptions': [SUB_INFO]} @@ -267,7 +269,7 @@ def test_list_subscriptions_with_paging(self): from google.cloud.pubsub.topic import Topic SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = object() + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, use_gax=False) @@ -320,7 +322,7 @@ def test_list_subscriptions_with_paging(self): def test_list_subscriptions_w_missing_key(self): PROJECT = 'PROJECT' - creds = object() + creds = _make_credentials() client = self._make_one(project=PROJECT, credentials=creds) client._connection = object() @@ -338,7 +340,7 @@ def test_list_subscriptions_w_missing_key(self): def test_topic(self): PROJECT = 'PROJECT' TOPIC_NAME = 'TOPIC_NAME' - creds = object() + creds = _make_credentials() client_obj = self._make_one(project=PROJECT, credentials=creds) new_topic = client_obj.topic(TOPIC_NAME) diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index 0a7e4cede8d7..f23967081fa5 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -14,6 +14,13 @@ import unittest +import mock + + +def _make_credentials(): + import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) + class TestTopic(unittest.TestCase): PROJECT = 'PROJECT' @@ -136,7 +143,6 @@ def test_publish_single_bytes_wo_attrs_w_bound_client(self): def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): import datetime - import mock from google.cloud._helpers import _RFC3339_MICROS NOW = datetime.datetime.utcnow() @@ -314,8 +320,8 @@ def test_list_subscriptions_no_paging(self): from google.cloud.pubsub.client import Client from google.cloud.pubsub.subscription import Subscription - client = Client(project=self.PROJECT, credentials=object(), - use_gax=False) + client = Client(project=self.PROJECT, + credentials=_make_credentials(), use_gax=False) SUB_NAME_1 = 'subscription_1' SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( @@ -364,8 +370,8 @@ def test_list_subscriptions_with_paging(self): from google.cloud.pubsub.client import Client from google.cloud.pubsub.subscription import Subscription - client = Client(project=self.PROJECT, credentials=object(), - use_gax=False) + client = Client(project=self.PROJECT, + credentials=_make_credentials(), use_gax=False) SUB_NAME_1 = 'subscription_1' SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( @@ -414,8 +420,8 @@ def test_list_subscriptions_with_paging(self): def test_list_subscriptions_missing_key(self): from google.cloud.pubsub.client import Client - client = Client(project=self.PROJECT, credentials=object(), - use_gax=False) + client = Client(project=self.PROJECT, + credentials=_make_credentials(), use_gax=False) client._connection = _Connection({}) topic = self._make_one(self.TOPIC_NAME, client=client) From 3f66d40c41e5eda58be95fdb124bc9436187c45d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 8 Dec 2016 15:17:03 -0800 Subject: [PATCH 0057/1197] Update versions for mega-release. We want to update - `google-cloud-bigquery` - `google-cloud-datastore` - `google-cloud-logging` - `google-cloud-storage` - `google-cloud-core` And then update `google-cloud` to re-wrap the latest versions of each. However, to avoid having packages in `google-cloud` with conflicting versions of `google-cloud-core`, we must release all packages. --- packages/google-cloud-pubsub/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 37d9cb9d6496..2923526312a4 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -50,13 +50,13 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.21.0, < 0.22dev', + 'google-cloud-core >= 0.22.1, < 0.23dev', 'gapic-google-cloud-pubsub-v1 >= 0.14.0, < 0.15dev', ] setup( name='google-cloud-pubsub', - version='0.21.0', + version='0.22.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From c08ceadc4b90e929b1db4474cb19405f787d1fec Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 9 Dec 2016 16:57:17 -0800 Subject: [PATCH 0058/1197] Explicitly putting 1.0.2 lower bound on grpcio. Also upgrading logging from 0.14.x to 0.90.x --- packages/google-cloud-pubsub/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 2923526312a4..3d713235e7a8 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,6 +51,7 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.22.1, < 0.23dev', + 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-pubsub-v1 >= 0.14.0, < 0.15dev', ] From b43e12b9a99e6157f3723a16d933fb16b29ae350 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 14 Dec 2016 23:43:05 -0800 Subject: [PATCH 0059/1197] Manually creating Client._connection in subclasses. --- .../google/cloud/pubsub/client.py | 32 +++++++++++-------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index dd323aa25fe5..271d231c2329 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -51,15 +51,16 @@ class Client(JSONClient): passed when creating a topic. If not passed, falls back to the default inferred from the environment. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` or - :class:`NoneType` - :param credentials: The OAuth2 Credentials to use for the connection - owned by this client. If not passed (and if no ``http`` - object is passed), falls back to the default inferred - from the environment. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: An optional HTTP object to make requests. If not passed, an + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: (Optional) The OAuth2 Credentials to use for this + client. If not passed (and if no ``http`` object is + passed), falls back to the default inferred from the + environment. + + :type http: :class:`~httplib2.Http` + :param http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an ``http`` object is created that is bound to the ``credentials`` for the current object. @@ -69,17 +70,22 @@ class Client(JSONClient): falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment variable """ + + _publisher_api = None + _subscriber_api = None + _iam_policy_api = None + def __init__(self, project=None, credentials=None, http=None, use_gax=None): - super(Client, self).__init__(project, credentials, http) + super(Client, self).__init__( + project=project, credentials=credentials, http=http) + self._connection = Connection( + credentials=self._credentials, http=self._http) if use_gax is None: self._use_gax = _USE_GAX else: self._use_gax = use_gax - _connection_class = Connection - _publisher_api = _subscriber_api = _iam_policy_api = None - @property def publisher_api(self): """Helper for publisher-related API calls.""" From 3df93f233d6b96eb52984ddcd83217d8654d873c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 15 Dec 2016 10:17:07 -0800 Subject: [PATCH 0060/1197] Removing references to cnxn in Pub/Sub _gax module. --- .../google/cloud/pubsub/_gax.py | 56 +++++++++++-------- .../google/cloud/pubsub/client.py | 8 ++- .../unit_tests/test__gax.py | 31 +++------- .../unit_tests/test_client.py | 14 +++-- 4 files changed, 58 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 97b73db687aa..2ca9c1be6c02 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -512,48 +512,60 @@ def _received_message_pb_to_mapping(received_message_pb): } -def make_gax_publisher_api(connection): +def make_gax_publisher_api(credentials, host=None, secure=True): """Create an instance of the GAX Publisher API. - If the ``connection`` is intended for a local emulator, then - an insecure ``channel`` is created pointing at the local - Pub / Sub server. + If the ``secure=False`` then we create an insecure ``channel`` + pointing at the local Pub / Sub emulator. - :type connection: :class:`~google.cloud.pubsub._http.Connection` - :param connection: The connection that holds configuration details. + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: Credentials for getting access tokens. + + :type host: str + :param host: (Optional) The host for an insecure channel. Only + used if ``secure=False``. + + :type secure: bool + :param secure: (Optional) Indicates if we should create a secure + or insecure channel. Defaults to :data:`True`. :rtype: :class:`.publisher_client.PublisherClient` - :returns: A publisher API instance with the proper connection - configuration. + :returns: A publisher API instance with the proper channel. """ - if connection.in_emulator: - channel = insecure_channel(connection.host) + if not secure: + channel = insecure_channel(host) else: channel = make_secure_channel( - connection.credentials, DEFAULT_USER_AGENT, + credentials, DEFAULT_USER_AGENT, PublisherClient.SERVICE_ADDRESS) return PublisherClient(channel=channel) -def make_gax_subscriber_api(connection): +def make_gax_subscriber_api(credentials, host=None, secure=True): """Create an instance of the GAX Subscriber API. - If the ``connection`` is intended for a local emulator, then - an insecure ``channel`` is created pointing at the local - Pub / Sub server. + If the ``secure=False`` then we create an insecure ``channel`` + pointing at the local Pub / Sub emulator. + + :type credentials: :class:`~google.auth.credentials.Credentials` + :param credentials: Credentials for getting access tokens. + + :type host: str + :param host: (Optional) The host for an insecure channel. Only + used if ``secure=False``. - :type connection: :class:`~google.cloud.pubsub._http.Connection` - :param connection: The connection that holds configuration details. + :type secure: bool + :param secure: (Optional) Indicates if we should create a secure + or insecure channel. Defaults to :data:`True`. :rtype: :class:`.subscriber_client.SubscriberClient` - :returns: A subscriber API instance with the proper connection - configuration. + :returns: A subscriber API instance with the proper channel. """ - if connection.in_emulator: - channel = insecure_channel(connection.host) + if not secure: + channel = insecure_channel(host) else: channel = make_secure_channel( - connection.credentials, DEFAULT_USER_AGENT, + credentials, DEFAULT_USER_AGENT, SubscriberClient.SERVICE_ADDRESS) return SubscriberClient(channel=channel) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 271d231c2329..6388da2cbdbf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -91,7 +91,9 @@ def publisher_api(self): """Helper for publisher-related API calls.""" if self._publisher_api is None: if self._use_gax: - generated = make_gax_publisher_api(self._connection) + generated = make_gax_publisher_api( + self._credentials, host=self._connection.host, + secure=not self._connection.in_emulator) self._publisher_api = GAXPublisherAPI(generated, self) else: self._publisher_api = JSONPublisherAPI(self) @@ -102,7 +104,9 @@ def subscriber_api(self): """Helper for subscriber-related API calls.""" if self._subscriber_api is None: if self._use_gax: - generated = make_gax_subscriber_api(self._connection) + generated = make_gax_subscriber_api( + self._credentials, host=self._connection.host, + secure=not self._connection.in_emulator) self._subscriber_api = GAXSubscriberAPI(generated, self) else: self._subscriber_api = JSONSubscriberAPI(self) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 4f2037d7c4dd..f9682f02e14b 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -900,9 +900,9 @@ def test_subscription_modify_ack_deadline_error(self): @unittest.skipUnless(_HAVE_GAX, 'No gax-python') class Test_make_gax_publisher_api(_Base, unittest.TestCase): - def _call_fut(self, connection): + def _call_fut(self, *args, **kwargs): from google.cloud.pubsub._gax import make_gax_publisher_api - return make_gax_publisher_api(connection) + return make_gax_publisher_api(*args, **kwargs) def test_live_api(self): from google.cloud.pubsub._gax import DEFAULT_USER_AGENT @@ -924,14 +924,12 @@ def make_channel(*args): mock_publisher_api.SERVICE_ADDRESS = host creds = _make_credentials() - connection = _Connection(in_emulator=False, - credentials=creds) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', PublisherClient=mock_publisher_api, make_secure_channel=make_channel) with patch: - result = self._call_fut(connection) + result = self._call_fut(creds) self.assertIs(result, mock_result) self.assertEqual(channels, [channel_obj]) @@ -953,13 +951,12 @@ def mock_insecure_channel(host): return mock_channel host = 'CURR_HOST:1234' - connection = _Connection(in_emulator=True, host=host) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', PublisherClient=mock_publisher_api, insecure_channel=mock_insecure_channel) with patch: - result = self._call_fut(connection) + result = self._call_fut(None, host=host, secure=False) self.assertIs(result, mock_result) self.assertEqual(channels, [mock_channel]) @@ -969,9 +966,9 @@ def mock_insecure_channel(host): @unittest.skipUnless(_HAVE_GAX, 'No gax-python') class Test_make_gax_subscriber_api(_Base, unittest.TestCase): - def _call_fut(self, connection): + def _call_fut(self, *args, **kwargs): from google.cloud.pubsub._gax import make_gax_subscriber_api - return make_gax_subscriber_api(connection) + return make_gax_subscriber_api(*args, **kwargs) def test_live_api(self): from google.cloud.pubsub._gax import DEFAULT_USER_AGENT @@ -993,14 +990,12 @@ def make_channel(*args): mock_subscriber_api.SERVICE_ADDRESS = host creds = _make_credentials() - connection = _Connection(in_emulator=False, - credentials=creds) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', SubscriberClient=mock_subscriber_api, make_secure_channel=make_channel) with patch: - result = self._call_fut(connection) + result = self._call_fut(creds) self.assertIs(result, mock_result) self.assertEqual(channels, [channel_obj]) @@ -1022,13 +1017,12 @@ def mock_insecure_channel(host): return mock_channel host = 'CURR_HOST:1234' - connection = _Connection(in_emulator=True, host=host) patch = mock.patch.multiple( 'google.cloud.pubsub._gax', SubscriberClient=mock_subscriber_api, insecure_channel=mock_insecure_channel) with patch: - result = self._call_fut(connection) + result = self._call_fut(None, host=host, secure=False) self.assertIs(result, mock_result) self.assertEqual(channels, [mock_channel]) @@ -1207,15 +1201,6 @@ def __init__(self, received_messages): self.received_messages = received_messages -class _Connection(object): - - def __init__(self, in_emulator=False, host=None, - credentials=None): - self.in_emulator = in_emulator - self.host = host - self.credentials = credentials - - class _Client(object): def __init__(self, project): diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 3bde01417359..7ba9a5b50d8c 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -69,6 +69,8 @@ def test_no_gax_ctor(self): self.assertIsInstance(api, _PublisherAPI) def test_publisher_api_w_gax(self): + from google.cloud.pubsub import _http + wrapped = object() _called_with = [] @@ -100,8 +102,9 @@ def __init__(self, _wrapped, client): # API instance is cached again = client.publisher_api self.assertIs(again, api) - args = (client._connection,) - self.assertEqual(_called_with, [(args, {})]) + args = (creds,) + kwargs = {'host': _http.Connection.API_BASE_URL, 'secure': True} + self.assertEqual(_called_with, [(args, kwargs)]) def test_subscriber_api_wo_gax(self): from google.cloud.pubsub._http import _SubscriberAPI @@ -121,6 +124,8 @@ def test_subscriber_api_wo_gax(self): self.assertIs(again, api) def test_subscriber_api_w_gax(self): + from google.cloud.pubsub import _http + wrapped = object() _called_with = [] @@ -152,8 +157,9 @@ def __init__(self, _wrapped, client): # API instance is cached again = client.subscriber_api self.assertIs(again, api) - args = (client._connection,) - self.assertEqual(_called_with, [(args, {})]) + args = (creds,) + kwargs = {'host': _http.Connection.API_BASE_URL, 'secure': True} + self.assertEqual(_called_with, [(args, kwargs)]) def test_iam_policy_api(self): from google.cloud.pubsub._http import _IAMPolicyAPI From 7638eba44a309e0a225d3ccee0e0f8c74aa7c7e2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 15 Dec 2016 10:47:34 -0800 Subject: [PATCH 0061/1197] Removing many references to connection in Pub / Sub _http. Still using a method of a connection object, but this way it can be more easily swapped out for a function defined in that module doing the same task. --- .../google/cloud/pubsub/_http.py | 58 +++++++------------ .../google/cloud/pubsub/client.py | 2 +- .../unit_tests/test__http.py | 21 ++++--- .../unit_tests/test_client.py | 13 +++-- 4 files changed, 43 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index 635c43bdaab5..583413e313b6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Create / interact with Google Cloud Pub/Sub connections.""" +"""Interact with Google Cloud Pub/Sub via JSON-over-HTTP.""" import base64 import copy @@ -109,7 +109,7 @@ class _PublisherAPI(object): def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_topics(self, project, page_size=None, page_token=None): """API call: list topics for a given project @@ -131,7 +131,7 @@ def list_topics(self, project, page_size=None, page_token=None): :rtype: :class:`~google.cloud.iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current connection. + accessible to the current client. """ extra_params = {} if page_size is not None: @@ -156,8 +156,7 @@ def topic_create(self, topic_path): :rtype: dict :returns: ``Topic`` resource returned from the API. """ - conn = self._connection - return conn.api_request(method='PUT', path='/%s' % (topic_path,)) + return self.api_request(method='PUT', path='/%s' % (topic_path,)) def topic_get(self, topic_path): """API call: retrieve a topic @@ -172,8 +171,7 @@ def topic_get(self, topic_path): :rtype: dict :returns: ``Topic`` resource returned from the API. """ - conn = self._connection - return conn.api_request(method='GET', path='/%s' % (topic_path,)) + return self.api_request(method='GET', path='/%s' % (topic_path,)) def topic_delete(self, topic_path): """API call: delete a topic @@ -185,8 +183,7 @@ def topic_delete(self, topic_path): :param topic_path: the fully-qualified path of the topic, in format ``projects//topics/``. """ - conn = self._connection - conn.api_request(method='DELETE', path='/%s' % (topic_path,)) + self.api_request(method='DELETE', path='/%s' % (topic_path,)) def topic_publish(self, topic_path, messages): """API call: publish one or more messages to a topic @@ -206,9 +203,8 @@ def topic_publish(self, topic_path, messages): """ messages_to_send = copy.deepcopy(messages) _transform_messages_base64(messages_to_send, _base64_unicode) - conn = self._connection data = {'messages': messages_to_send} - response = conn.api_request( + response = self.api_request( method='POST', path='/%s:publish' % (topic_path,), data=data) return response['messageIds'] @@ -257,7 +253,7 @@ class _SubscriberAPI(object): def __init__(self, client): self._client = client - self._connection = client._connection + self.api_request = client._connection.api_request def list_subscriptions(self, project, page_size=None, page_token=None): """API call: list subscriptions for a given project @@ -328,7 +324,6 @@ def subscription_create(self, subscription_path, topic_path, :rtype: dict :returns: ``Subscription`` resource returned from the API. """ - conn = self._connection path = '/%s' % (subscription_path,) resource = {'topic': topic_path} @@ -338,7 +333,7 @@ def subscription_create(self, subscription_path, topic_path, if push_endpoint is not None: resource['pushConfig'] = {'pushEndpoint': push_endpoint} - return conn.api_request(method='PUT', path=path, data=resource) + return self.api_request(method='PUT', path=path, data=resource) def subscription_get(self, subscription_path): """API call: retrieve a subscription @@ -354,9 +349,8 @@ def subscription_get(self, subscription_path): :rtype: dict :returns: ``Subscription`` resource returned from the API. """ - conn = self._connection path = '/%s' % (subscription_path,) - return conn.api_request(method='GET', path=path) + return self.api_request(method='GET', path=path) def subscription_delete(self, subscription_path): """API call: delete a subscription @@ -369,9 +363,8 @@ def subscription_delete(self, subscription_path): the fully-qualified path of the subscription, in format ``projects//subscriptions/``. """ - conn = self._connection path = '/%s' % (subscription_path,) - conn.api_request(method='DELETE', path=path) + self.api_request(method='DELETE', path=path) def subscription_modify_push_config(self, subscription_path, push_endpoint): @@ -390,10 +383,9 @@ def subscription_modify_push_config(self, subscription_path, (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. """ - conn = self._connection path = '/%s:modifyPushConfig' % (subscription_path,) resource = {'pushConfig': {'pushEndpoint': push_endpoint}} - conn.api_request(method='POST', path=path, data=resource) + self.api_request(method='POST', path=path, data=resource) def subscription_pull(self, subscription_path, return_immediately=False, max_messages=1): @@ -419,13 +411,12 @@ def subscription_pull(self, subscription_path, return_immediately=False, :rtype: list of dict :returns: the ``receivedMessages`` element of the response. """ - conn = self._connection path = '/%s:pull' % (subscription_path,) data = { 'returnImmediately': return_immediately, 'maxMessages': max_messages, } - response = conn.api_request(method='POST', path=path, data=data) + response = self.api_request(method='POST', path=path, data=data) messages = response.get('receivedMessages', ()) _transform_messages_base64(messages, base64.b64decode, 'message') return messages @@ -444,12 +435,11 @@ def subscription_acknowledge(self, subscription_path, ack_ids): :type ack_ids: list of string :param ack_ids: ack IDs of messages being acknowledged """ - conn = self._connection path = '/%s:acknowledge' % (subscription_path,) data = { 'ackIds': ack_ids, } - conn.api_request(method='POST', path=path, data=data) + self.api_request(method='POST', path=path, data=data) def subscription_modify_ack_deadline(self, subscription_path, ack_ids, ack_deadline): @@ -470,24 +460,23 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, :param ack_deadline: the deadline (in seconds) by which messages pulled from the back-end must be acknowledged. """ - conn = self._connection path = '/%s:modifyAckDeadline' % (subscription_path,) data = { 'ackIds': ack_ids, 'ackDeadlineSeconds': ack_deadline, } - conn.api_request(method='POST', path=path, data=data) + self.api_request(method='POST', path=path, data=data) class _IAMPolicyAPI(object): """Helper mapping IAM policy-related APIs. - :type connection: :class:`Connection` - :param connection: the connection used to make API requests. + :type client: :class:`~google.cloud.pubsub.client.Client` + :param client: the client used to make API requests. """ - def __init__(self, connection): - self._connection = connection + def __init__(self, client): + self.api_request = client._connection.api_request def get_iam_policy(self, target_path): """API call: fetch the IAM policy for the target @@ -502,9 +491,8 @@ def get_iam_policy(self, target_path): :rtype: dict :returns: the resource returned by the ``getIamPolicy`` API request. """ - conn = self._connection path = '/%s:getIamPolicy' % (target_path,) - return conn.api_request(method='GET', path=path) + return self.api_request(method='GET', path=path) def set_iam_policy(self, target_path, policy): """API call: update the IAM policy for the target @@ -522,10 +510,9 @@ def set_iam_policy(self, target_path, policy): :rtype: dict :returns: the resource returned by the ``setIamPolicy`` API request. """ - conn = self._connection wrapped = {'policy': policy} path = '/%s:setIamPolicy' % (target_path,) - return conn.api_request(method='POST', path=path, data=wrapped) + return self.api_request(method='POST', path=path, data=wrapped) def test_iam_permissions(self, target_path, permissions): """API call: test permissions @@ -543,10 +530,9 @@ def test_iam_permissions(self, target_path, permissions): :rtype: dict :returns: the resource returned by the ``getIamPolicy`` API request. """ - conn = self._connection wrapped = {'permissions': permissions} path = '/%s:testIamPermissions' % (target_path,) - resp = conn.api_request(method='POST', path=path, data=wrapped) + resp = self.api_request(method='POST', path=path, data=wrapped) return resp.get('permissions', []) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 6388da2cbdbf..6e2f0e06ffd6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -116,7 +116,7 @@ def subscriber_api(self): def iam_policy_api(self): """Helper for IAM policy-related API calls.""" if self._iam_policy_api is None: - self._iam_policy_api = _IAMPolicyAPI(self._connection) + self._iam_policy_api = _IAMPolicyAPI(self) return self._iam_policy_api def list_topics(self, page_size=None, page_token=None): diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index 955fc06a9104..e60ebf480684 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -111,7 +111,7 @@ def test_ctor(self): client = _Client(connection, self.PROJECT) api = self._make_one(client) self.assertIs(api._client, client) - self.assertIs(api._connection, connection) + self.assertEqual(api.api_request, connection.api_request) def test_list_topics_no_paging(self): from google.cloud.pubsub.topic import Topic @@ -449,8 +449,8 @@ def test_ctor(self): connection = _Connection() client = _Client(connection, self.PROJECT) api = self._make_one(client) - self.assertIs(api._connection, connection) self.assertIs(api._client, client) + self.assertEqual(api.api_request, connection.api_request) def test_list_subscriptions_no_paging(self): from google.cloud.pubsub.client import Client @@ -747,8 +747,9 @@ def _get_target_class(): def test_ctor(self): connection = _Connection() - api = self._make_one(connection) - self.assertIs(api._connection, connection) + client = _Client(connection, None) + api = self._make_one(client) + self.assertEqual(api.api_request, connection.api_request) def test_get_iam_policy(self): from google.cloud.pubsub.iam import OWNER_ROLE @@ -771,7 +772,8 @@ def test_get_iam_policy(self): ], } connection = _Connection(RETURNED) - api = self._make_one(connection) + client = _Client(connection, None) + api = self._make_one(client) policy = api.get_iam_policy(self.TOPIC_PATH) @@ -802,7 +804,8 @@ def test_set_iam_policy(self): } RETURNED = POLICY.copy() connection = _Connection(RETURNED) - api = self._make_one(connection) + client = _Client(connection, None) + api = self._make_one(client) policy = api.set_iam_policy(self.TOPIC_PATH, POLICY) @@ -822,7 +825,8 @@ def test_test_iam_permissions(self): ALLOWED = ALL_ROLES[1:] RETURNED = {'permissions': ALLOWED} connection = _Connection(RETURNED) - api = self._make_one(connection) + client = _Client(connection, None) + api = self._make_one(client) allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) @@ -841,7 +845,8 @@ def test_test_iam_permissions_missing_key(self): ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] RETURNED = {} connection = _Connection(RETURNED) - api = self._make_one(connection) + client = _Client(connection, None) + api = self._make_one(client) allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 7ba9a5b50d8c..d0119b22853e 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -46,11 +46,11 @@ def test_publisher_api_wo_gax(self): project=self.PROJECT, credentials=creds, use_gax=False) - conn = client._connection = object() + conn = client._connection = _Connection() api = client.publisher_api self.assertIsInstance(api, _PublisherAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.publisher_api self.assertIs(again, api) @@ -114,11 +114,11 @@ def test_subscriber_api_wo_gax(self): project=self.PROJECT, credentials=creds, use_gax=False) - conn = client._connection = object() + conn = client._connection = _Connection() api = client.subscriber_api self.assertIsInstance(api, _SubscriberAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.subscriber_api self.assertIs(again, api) @@ -165,10 +165,11 @@ def test_iam_policy_api(self): from google.cloud.pubsub._http import _IAMPolicyAPI creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = object() + conn = client._connection = _Connection() + api = client.iam_policy_api self.assertIsInstance(api, _IAMPolicyAPI) - self.assertIs(api._connection, conn) + self.assertEqual(api.api_request, conn.api_request) # API instance is cached again = client.iam_policy_api self.assertIs(again, api) From c54814475c2a9611562d57ec4f0a479ae27cf528 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 15 Dec 2016 22:27:01 -0800 Subject: [PATCH 0062/1197] Re-factor of GAX Pub / Sub channel helpers. --- .../google/cloud/pubsub/_gax.py | 34 ++++++++----------- .../google/cloud/pubsub/client.py | 18 ++++++---- .../unit_tests/test__gax.py | 4 +-- .../unit_tests/test_client.py | 34 ++++++++++++++----- 4 files changed, 54 insertions(+), 36 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 2ca9c1be6c02..582cb8d0e128 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -512,27 +512,24 @@ def _received_message_pb_to_mapping(received_message_pb): } -def make_gax_publisher_api(credentials, host=None, secure=True): +def make_gax_publisher_api(credentials=None, host=None): """Create an instance of the GAX Publisher API. - If the ``secure=False`` then we create an insecure ``channel`` - pointing at the local Pub / Sub emulator. + If the ``credentials`` are omitted, then we create an insecure + ``channel`` pointing at the local Pub / Sub emulator. :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: Credentials for getting access tokens. + :param credentials: (Optional) Credentials for getting access + tokens. :type host: str :param host: (Optional) The host for an insecure channel. Only - used if ``secure=False``. - - :type secure: bool - :param secure: (Optional) Indicates if we should create a secure - or insecure channel. Defaults to :data:`True`. + used if ``credentials`` are omitted. :rtype: :class:`.publisher_client.PublisherClient` :returns: A publisher API instance with the proper channel. """ - if not secure: + if credentials is None: channel = insecure_channel(host) else: channel = make_secure_channel( @@ -541,27 +538,24 @@ def make_gax_publisher_api(credentials, host=None, secure=True): return PublisherClient(channel=channel) -def make_gax_subscriber_api(credentials, host=None, secure=True): +def make_gax_subscriber_api(credentials=None, host=None): """Create an instance of the GAX Subscriber API. - If the ``secure=False`` then we create an insecure ``channel`` - pointing at the local Pub / Sub emulator. + If the ``credentials`` are omitted, then we create an insecure + ``channel`` pointing at the local Pub / Sub emulator. :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: Credentials for getting access tokens. + :param credentials: (Optional) Credentials for getting access + tokens. :type host: str :param host: (Optional) The host for an insecure channel. Only - used if ``secure=False``. - - :type secure: bool - :param secure: (Optional) Indicates if we should create a secure - or insecure channel. Defaults to :data:`True`. + used if ``credentials`` are omitted. :rtype: :class:`.subscriber_client.SubscriberClient` :returns: A subscriber API instance with the proper channel. """ - if not secure: + if credentials is None: channel = insecure_channel(host) else: channel = make_secure_channel( diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 6e2f0e06ffd6..689113631b12 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -91,9 +91,12 @@ def publisher_api(self): """Helper for publisher-related API calls.""" if self._publisher_api is None: if self._use_gax: - generated = make_gax_publisher_api( - self._credentials, host=self._connection.host, - secure=not self._connection.in_emulator) + if self._connection.in_emulator: + generated = make_gax_publisher_api( + host=self._connection.host) + else: + generated = make_gax_publisher_api( + credentials=self._credentials) self._publisher_api = GAXPublisherAPI(generated, self) else: self._publisher_api = JSONPublisherAPI(self) @@ -104,9 +107,12 @@ def subscriber_api(self): """Helper for subscriber-related API calls.""" if self._subscriber_api is None: if self._use_gax: - generated = make_gax_subscriber_api( - self._credentials, host=self._connection.host, - secure=not self._connection.in_emulator) + if self._connection.in_emulator: + generated = make_gax_subscriber_api( + host=self._connection.host) + else: + generated = make_gax_subscriber_api( + credentials=self._credentials) self._subscriber_api = GAXSubscriberAPI(generated, self) else: self._subscriber_api = JSONSubscriberAPI(self) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index f9682f02e14b..aeb2cfc229cf 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -956,7 +956,7 @@ def mock_insecure_channel(host): PublisherClient=mock_publisher_api, insecure_channel=mock_insecure_channel) with patch: - result = self._call_fut(None, host=host, secure=False) + result = self._call_fut(host=host) self.assertIs(result, mock_result) self.assertEqual(channels, [mock_channel]) @@ -1022,7 +1022,7 @@ def mock_insecure_channel(host): SubscriberClient=mock_subscriber_api, insecure_channel=mock_insecure_channel) with patch: - result = self._call_fut(None, host=host, secure=False) + result = self._call_fut(host=host) self.assertIs(result, mock_result) self.assertEqual(channels, [mock_channel]) diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index d0119b22853e..34b4cd4d6b8b 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -68,7 +68,7 @@ def test_no_gax_ctor(self): api = client.publisher_api self.assertIsInstance(api, _PublisherAPI) - def test_publisher_api_w_gax(self): + def _publisher_api_w_gax_helper(self, emulator=False): from google.cloud.pubsub import _http wrapped = object() @@ -88,6 +88,7 @@ def __init__(self, _wrapped, client): client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=True) + client._connection.in_emulator = emulator patch = mock.patch.multiple( 'google.cloud.pubsub.client', @@ -102,9 +103,17 @@ def __init__(self, _wrapped, client): # API instance is cached again = client.publisher_api self.assertIs(again, api) - args = (creds,) - kwargs = {'host': _http.Connection.API_BASE_URL, 'secure': True} - self.assertEqual(_called_with, [(args, kwargs)]) + if emulator: + kwargs = {'host': _http.Connection.API_BASE_URL} + else: + kwargs = {'credentials': creds} + self.assertEqual(_called_with, [((), kwargs)]) + + def test_publisher_api_w_gax(self): + self._publisher_api_w_gax_helper() + + def test_publisher_api_w_gax_and_emulator(self): + self._publisher_api_w_gax_helper(emulator=True) def test_subscriber_api_wo_gax(self): from google.cloud.pubsub._http import _SubscriberAPI @@ -123,7 +132,7 @@ def test_subscriber_api_wo_gax(self): again = client.subscriber_api self.assertIs(again, api) - def test_subscriber_api_w_gax(self): + def _subscriber_api_w_gax_helper(self, emulator=False): from google.cloud.pubsub import _http wrapped = object() @@ -143,6 +152,7 @@ def __init__(self, _wrapped, client): client = self._make_one( project=self.PROJECT, credentials=creds, use_gax=True) + client._connection.in_emulator = emulator patch = mock.patch.multiple( 'google.cloud.pubsub.client', @@ -157,9 +167,17 @@ def __init__(self, _wrapped, client): # API instance is cached again = client.subscriber_api self.assertIs(again, api) - args = (creds,) - kwargs = {'host': _http.Connection.API_BASE_URL, 'secure': True} - self.assertEqual(_called_with, [(args, kwargs)]) + if emulator: + kwargs = {'host': _http.Connection.API_BASE_URL} + else: + kwargs = {'credentials': creds} + self.assertEqual(_called_with, [((), kwargs)]) + + def test_subscriber_api_w_gax(self): + self._subscriber_api_w_gax_helper() + + def test_subscriber_api_w_gax_and_emulator(self): + self._subscriber_api_w_gax_helper(emulator=True) def test_iam_policy_api(self): from google.cloud.pubsub._http import _IAMPolicyAPI From ad74cb71505548d7a1840089689d5aa17efae83f Mon Sep 17 00:00:00 2001 From: quom Date: Fri, 16 Dec 2016 10:48:49 +0000 Subject: [PATCH 0063/1197] Check dict is not empty before acknowledging messages upon context exit. Closes #2878. --- .../google-cloud-pubsub/google/cloud/pubsub/subscription.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index c98277d660df..e76abcf94dd8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -509,4 +509,5 @@ def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): - self._subscription.acknowledge(list(self), self._client) + if self: + self._subscription.acknowledge(list(self), self._client) From 5420617c3673a2eb6b2330f69a2ef253c3e4001c Mon Sep 17 00:00:00 2001 From: quom Date: Fri, 16 Dec 2016 15:05:40 +0000 Subject: [PATCH 0064/1197] Add test to reproduce #2878 --- .../unit_tests/test_subscription.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/google-cloud-pubsub/unit_tests/test_subscription.py b/packages/google-cloud-pubsub/unit_tests/test_subscription.py index 6d4dc1068f2a..6078a3cc70c8 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_subscription.py +++ b/packages/google-cloud-pubsub/unit_tests/test_subscription.py @@ -14,6 +14,8 @@ import unittest +import mock + class TestSubscription(unittest.TestCase): PROJECT = 'PROJECT' @@ -745,6 +747,16 @@ def test___exit___(self): [ACK_ID1, ACK_ID2]) self.assertIs(subscription._ack_client, CLIENT) + def test_empty_ack_no_acknowledge(self): + subscription = mock.Mock(_FauxSubscription) + subscription.pull = lambda *args: [] + + auto_ack = self._make_one(subscription) + with auto_ack: + pass + + subscription.acknowledge.assert_not_called() + class _FauxIAMPolicy(object): From b35c4e5b3c8df89a7bc9ef4923be68c3895b1352 Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Tue, 3 Jan 2017 11:57:33 -0500 Subject: [PATCH 0065/1197] Fix pubsub system tests failure due to credentials. --- packages/google-cloud-pubsub/google/cloud/pubsub/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 689113631b12..a66aebc5db95 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -96,7 +96,7 @@ def publisher_api(self): host=self._connection.host) else: generated = make_gax_publisher_api( - credentials=self._credentials) + credentials=self._connection._credentials) self._publisher_api = GAXPublisherAPI(generated, self) else: self._publisher_api = JSONPublisherAPI(self) @@ -112,7 +112,7 @@ def subscriber_api(self): host=self._connection.host) else: generated = make_gax_subscriber_api( - credentials=self._credentials) + credentials=self._connection._credentials) self._subscriber_api = GAXSubscriberAPI(generated, self) else: self._subscriber_api = JSONSubscriberAPI(self) From de5653557638b4472b428c9773b1971ae4d0326f Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Tue, 17 Jan 2017 22:21:29 -0500 Subject: [PATCH 0066/1197] Update import spacing in tests. --- .../unit_tests/test__gax.py | 39 +++++++++++++++++++ .../unit_tests/test__helpers.py | 2 + .../unit_tests/test__http.py | 18 +++++++++ .../unit_tests/test_client.py | 3 ++ .../unit_tests/test_iam.py | 3 ++ .../unit_tests/test_message.py | 2 + .../unit_tests/test_subscription.py | 13 +++++++ .../unit_tests/test_topic.py | 8 ++++ 8 files changed, 88 insertions(+) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index aeb2cfc229cf..d7e3409a18e6 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -32,6 +32,7 @@ def _make_credentials(): # pylint: disable=redefined-outer-name import google.auth.credentials # pylint: enable=redefined-outer-name + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -55,6 +56,7 @@ class Test_PublisherAPI(_Base, unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.pubsub._gax import _PublisherAPI + return _PublisherAPI def test_ctor(self): @@ -137,6 +139,7 @@ def test_topic_create(self): def test_topic_create_already_exists(self): from google.cloud.exceptions import Conflict + gax_api = _GAXPublisherAPI(_create_topic_conflict=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -150,6 +153,7 @@ def test_topic_create_already_exists(self): def test_topic_create_error(self): from google.gax.errors import GaxError + gax_api = _GAXPublisherAPI(_random_gax_error=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -176,6 +180,7 @@ def test_topic_get_hit(self): def test_topic_get_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXPublisherAPI() client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -189,6 +194,7 @@ def test_topic_get_miss(self): def test_topic_get_error(self): from google.gax.errors import GaxError + gax_api = _GAXPublisherAPI(_random_gax_error=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -213,6 +219,7 @@ def test_topic_delete_hit(self): def test_topic_delete_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXPublisherAPI(_delete_topic_ok=False) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -226,6 +233,7 @@ def test_topic_delete_miss(self): def test_topic_delete_error(self): from google.gax.errors import GaxError + gax_api = _GAXPublisherAPI(_random_gax_error=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -239,6 +247,7 @@ def test_topic_delete_error(self): def test_topic_publish_hit(self): import base64 + PAYLOAD = b'This is the message text' B64 = base64.b64encode(PAYLOAD).decode('ascii') MSGID = 'DEADBEEF' @@ -261,6 +270,7 @@ def test_topic_publish_hit(self): def test_topic_publish_miss_w_attrs_w_bytes_payload(self): import base64 from google.cloud.exceptions import NotFound + PAYLOAD = b'This is the message text' B64 = base64.b64encode(PAYLOAD) MESSAGE = {'data': B64, 'attributes': {'foo': 'bar'}} @@ -281,6 +291,7 @@ def test_topic_publish_miss_w_attrs_w_bytes_payload(self): def test_topic_publish_error(self): import base64 from google.gax.errors import GaxError + PAYLOAD = b'This is the message text' B64 = base64.b64encode(PAYLOAD).decode('ascii') MESSAGE = {'data': B64, 'attributes': {}} @@ -539,6 +550,7 @@ def test_subscription_create(self): def test_subscription_create_already_exists(self): from google.cloud.exceptions import Conflict + DEADLINE = 600 gax_api = _GAXSubscriberAPI(_create_subscription_conflict=True) client = _Client(self.PROJECT) @@ -558,6 +570,7 @@ def test_subscription_create_already_exists(self): def test_subscription_create_error(self): from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -600,6 +613,7 @@ def test_subscription_get_hit(self): def test_subscription_get_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXSubscriberAPI() client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -613,6 +627,7 @@ def test_subscription_get_miss(self): def test_subscription_get_error(self): from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -637,6 +652,7 @@ def test_subscription_delete_hit(self): def test_subscription_delete_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXSubscriberAPI(_delete_subscription_ok=False) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -650,6 +666,7 @@ def test_subscription_delete_miss(self): def test_subscription_delete_error(self): from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -675,6 +692,7 @@ def test_subscription_modify_push_config_hit(self): def test_subscription_modify_push_config_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXSubscriberAPI() client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -690,6 +708,7 @@ def test_subscription_modify_push_config_miss(self): def test_subscription_modify_push_config_error(self): from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -744,6 +763,7 @@ def test_subscription_pull_explicit(self): def test_subscription_pull_defaults_miss(self): from google.cloud.exceptions import NotFound + gax_api = _GAXSubscriberAPI() client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -760,6 +780,7 @@ def test_subscription_pull_defaults_miss(self): def test_subscription_pull_defaults_error(self): from google.gax.errors import GaxError + gax_api = _GAXSubscriberAPI(_random_gax_error=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -808,6 +829,7 @@ def test_subscription_acknowledge_hit(self): def test_subscription_acknowledge_miss(self): from google.cloud.exceptions import NotFound + ACK_ID1 = 'DEADBEEF' ACK_ID2 = 'BEADCAFE' gax_api = _GAXSubscriberAPI() @@ -824,6 +846,7 @@ def test_subscription_acknowledge_miss(self): def test_subscription_acknowledge_error(self): from google.gax.errors import GaxError + ACK_ID1 = 'DEADBEEF' ACK_ID2 = 'BEADCAFE' gax_api = _GAXSubscriberAPI(_random_gax_error=True) @@ -858,6 +881,7 @@ def test_subscription_modify_ack_deadline_hit(self): def test_subscription_modify_ack_deadline_miss(self): from google.cloud.exceptions import NotFound + ACK_ID1 = 'DEADBEEF' ACK_ID2 = 'BEADCAFE' NEW_DEADLINE = 90 @@ -878,6 +902,7 @@ def test_subscription_modify_ack_deadline_miss(self): def test_subscription_modify_ack_deadline_error(self): from google.gax.errors import GaxError + ACK_ID1 = 'DEADBEEF' ACK_ID2 = 'BEADCAFE' NEW_DEADLINE = 90 @@ -902,6 +927,7 @@ class Test_make_gax_publisher_api(_Base, unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud.pubsub._gax import make_gax_publisher_api + return make_gax_publisher_api(*args, **kwargs) def test_live_api(self): @@ -968,6 +994,7 @@ class Test_make_gax_subscriber_api(_Base, unittest.TestCase): def _call_fut(self, *args, **kwargs): from google.cloud.pubsub._gax import make_gax_subscriber_api + return make_gax_subscriber_api(*args, **kwargs) def test_live_api(self): @@ -1039,6 +1066,7 @@ def list_topics(self, name, page_size, options): def create_topic(self, name, options=None): from google.gax.errors import GaxError + self._create_topic_called_with = name, options if self._random_gax_error: raise GaxError('error') @@ -1048,6 +1076,7 @@ def create_topic(self, name, options=None): def get_topic(self, name, options=None): from google.gax.errors import GaxError + self._get_topic_called_with = name, options if self._random_gax_error: raise GaxError('error') @@ -1058,6 +1087,7 @@ def get_topic(self, name, options=None): def delete_topic(self, name, options=None): from google.gax.errors import GaxError + self._delete_topic_called_with = name, options if self._random_gax_error: raise GaxError('error') @@ -1066,6 +1096,7 @@ def delete_topic(self, name, options=None): def publish(self, topic, messages, options=None): from google.gax.errors import GaxError + self._publish_called_with = topic, messages, options if self._random_gax_error: raise GaxError('error') @@ -1076,6 +1107,7 @@ def publish(self, topic, messages, options=None): def list_topic_subscriptions(self, topic, page_size, options=None): from google.gax.errors import GaxError + self._list_topic_subscriptions_called_with = topic, page_size, options if self._random_gax_error: raise GaxError('error') @@ -1101,6 +1133,7 @@ def create_subscription(self, name, topic, push_config, ack_deadline_seconds, options=None): from google.gax.errors import GaxError + self._create_subscription_called_with = ( name, topic, push_config, ack_deadline_seconds, options) if self._random_gax_error: @@ -1111,6 +1144,7 @@ def create_subscription(self, name, topic, def get_subscription(self, name, options=None): from google.gax.errors import GaxError + self._get_subscription_called_with = name, options if self._random_gax_error: raise GaxError('error') @@ -1121,6 +1155,7 @@ def get_subscription(self, name, options=None): def delete_subscription(self, name, options=None): from google.gax.errors import GaxError + self._delete_subscription_called_with = name, options if self._random_gax_error: raise GaxError('error') @@ -1129,6 +1164,7 @@ def delete_subscription(self, name, options=None): def modify_push_config(self, name, push_config, options=None): from google.gax.errors import GaxError + self._modify_push_config_called_with = name, push_config, options if self._random_gax_error: raise GaxError('error') @@ -1137,6 +1173,7 @@ def modify_push_config(self, name, push_config, options=None): def pull(self, name, max_messages, return_immediately, options=None): from google.gax.errors import GaxError + self._pull_called_with = ( name, max_messages, return_immediately, options) if self._random_gax_error: @@ -1151,6 +1188,7 @@ def pull(self, name, max_messages, return_immediately, options=None): def acknowledge(self, name, ack_ids, options=None): from google.gax.errors import GaxError + self._acknowledge_called_with = name, ack_ids, options if self._random_gax_error: raise GaxError('error') @@ -1159,6 +1197,7 @@ def acknowledge(self, name, ack_ids, options=None): def modify_ack_deadline(self, name, ack_ids, deadline, options=None): from google.gax.errors import GaxError + self._modify_ack_deadline_called_with = ( name, ack_ids, deadline, options) if self._random_gax_error: diff --git a/packages/google-cloud-pubsub/unit_tests/test__helpers.py b/packages/google-cloud-pubsub/unit_tests/test__helpers.py index aaf00c651709..0503d68b20b9 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__helpers.py +++ b/packages/google-cloud-pubsub/unit_tests/test__helpers.py @@ -19,6 +19,7 @@ class Test_topic_name_from_path(unittest.TestCase): def _call_fut(self, path, project): from google.cloud.pubsub._helpers import topic_name_from_path + return topic_name_from_path(path, project) def test_w_simple_name(self): @@ -40,6 +41,7 @@ class Test_subscription_name_from_path(unittest.TestCase): def _call_fut(self, path, project): from google.cloud.pubsub._helpers import subscription_name_from_path + return subscription_name_from_path(path, project) def test_w_simple_name(self): diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index e60ebf480684..72847782c4f0 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -19,6 +19,7 @@ def _make_credentials(): import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -41,6 +42,7 @@ class TestConnection(_Base): @staticmethod def _get_target_class(): from google.cloud.pubsub._http import Connection + return Connection def test_default_url(self): @@ -73,6 +75,7 @@ def test_build_api_url_no_extra_query_params(self): def test_build_api_url_w_extra_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit + conn = self._make_one() uri = conn.build_api_url('/foo', {'bar': 'baz'}) scheme, netloc, path, qs, _ = urlsplit(uri) @@ -101,6 +104,7 @@ class Test_PublisherAPI(_Base): @staticmethod def _get_target_class(): from google.cloud.pubsub._http import _PublisherAPI + return _PublisherAPI def _make_one(self, *args, **kw): @@ -204,6 +208,7 @@ def test_topic_create(self): def test_topic_create_already_exists(self): from google.cloud.exceptions import Conflict + connection = _Connection() connection._no_response_error = Conflict client = _Client(connection, self.PROJECT) @@ -231,6 +236,7 @@ def test_topic_get_hit(self): def test_topic_get_miss(self): from google.cloud.exceptions import NotFound + connection = _Connection() client = _Client(connection, self.PROJECT) api = self._make_one(client) @@ -256,6 +262,7 @@ def test_topic_delete_hit(self): def test_topic_delete_miss(self): from google.cloud.exceptions import NotFound + connection = _Connection() client = _Client(connection, self.PROJECT) api = self._make_one(client) @@ -269,6 +276,7 @@ def test_topic_delete_miss(self): def test_topic_publish_hit(self): import base64 + PAYLOAD = b'This is the message text' B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') MSGID = 'DEADBEEF' @@ -440,6 +448,7 @@ class Test_SubscriberAPI(_Base): @staticmethod def _get_target_class(): from google.cloud.pubsub._http import _SubscriberAPI + return _SubscriberAPI def _make_one(self, *args, **kw): @@ -645,6 +654,7 @@ def test_subscription_modify_push_config(self): def test_subscription_pull_defaults(self): import base64 + PAYLOAD = b'This is the message text' B64 = base64.b64encode(PAYLOAD).decode('ascii') ACK_ID = 'DEADBEEF' @@ -672,6 +682,7 @@ def test_subscription_pull_defaults(self): def test_subscription_pull_explicit(self): import base64 + PAYLOAD = b'This is the message text' B64 = base64.b64encode(PAYLOAD).decode('ascii') ACK_ID = 'DEADBEEF' @@ -743,6 +754,7 @@ class Test_IAMPolicyAPI(_Base): @staticmethod def _get_target_class(): from google.cloud.pubsub._http import _IAMPolicyAPI + return _IAMPolicyAPI def test_ctor(self): @@ -861,22 +873,26 @@ def test_test_iam_permissions_missing_key(self): class Test__transform_messages_base64_empty(unittest.TestCase): def _call_fut(self, messages, transform, key=None): from google.cloud.pubsub._http import _transform_messages_base64 + return _transform_messages_base64(messages, transform, key) def test__transform_messages_base64_empty_message(self): from base64 import b64decode + DATA = [{'message': {}}] self._call_fut(DATA, b64decode, 'message') self.assertEqual(DATA, [{'message': {}}]) def test__transform_messages_base64_empty_data(self): from base64 import b64decode + DATA = [{'message': {'data': b''}}] self._call_fut(DATA, b64decode, 'message') self.assertEqual(DATA, [{'message': {'data': b''}}]) def test__transform_messages_base64_pull(self): from base64 import b64encode + DATA = [{'message': {'data': b'testing 1 2 3'}}] self._call_fut(DATA, b64encode, 'message') self.assertEqual(DATA[0]['message']['data'], @@ -884,6 +900,7 @@ def test__transform_messages_base64_pull(self): def test__transform_messages_base64_publish(self): from base64 import b64encode + DATA = [{'data': b'testing 1 2 3'}] self._call_fut(DATA, b64encode) self.assertEqual(DATA[0]['data'], b64encode(b'testing 1 2 3')) @@ -899,6 +916,7 @@ def __init__(self, *responses): def api_request(self, **kw): from google.cloud.exceptions import NotFound + self._called_with = kw try: response, self._responses = self._responses[0], self._responses[1:] diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/unit_tests/test_client.py index 34b4cd4d6b8b..d297327b2c4b 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_client.py +++ b/packages/google-cloud-pubsub/unit_tests/test_client.py @@ -19,6 +19,7 @@ def _make_credentials(): import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -32,6 +33,7 @@ class TestClient(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.pubsub.client import Client + return Client def _make_one(self, *args, **kw): @@ -181,6 +183,7 @@ def test_subscriber_api_w_gax_and_emulator(self): def test_iam_policy_api(self): from google.cloud.pubsub._http import _IAMPolicyAPI + creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) conn = client._connection = _Connection() diff --git a/packages/google-cloud-pubsub/unit_tests/test_iam.py b/packages/google-cloud-pubsub/unit_tests/test_iam.py index ea2703dd56b6..1d73277c270d 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_iam.py +++ b/packages/google-cloud-pubsub/unit_tests/test_iam.py @@ -20,6 +20,7 @@ class TestPolicy(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.pubsub.iam import Policy + return Policy def _make_one(self, *args, **kw): @@ -99,6 +100,7 @@ def test_from_api_repr_complete(self): PUBSUB_PUBLISHER_ROLE, PUBSUB_SUBSCRIBER_ROLE, ) + OWNER1 = 'user:phred@example.com' OWNER2 = 'group:cloud-logs@google.com' EDITOR1 = 'domain:google.com' @@ -158,6 +160,7 @@ def test_to_api_repr_full(self): PUBSUB_PUBLISHER_ROLE, PUBSUB_SUBSCRIBER_ROLE, ) + OWNER1 = 'group:cloud-logs@google.com' OWNER2 = 'user:phred@example.com' EDITOR1 = 'domain:google.com' diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/unit_tests/test_message.py index 8bcbec6c87b7..b4f6abfbb1b2 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_message.py +++ b/packages/google-cloud-pubsub/unit_tests/test_message.py @@ -20,6 +20,7 @@ class TestMessage(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.pubsub.message import Message + return Message def _make_one(self, *args, **kw): @@ -71,6 +72,7 @@ def test_timestamp_w_timestamp_in_attributes(self): from datetime import datetime from google.cloud._helpers import _RFC3339_MICROS from google.cloud._helpers import UTC + DATA = b'DEADBEEF' MESSAGE_ID = b'12345' TIMESTAMP = '2015-04-10T18:42:27.131956Z' diff --git a/packages/google-cloud-pubsub/unit_tests/test_subscription.py b/packages/google-cloud-pubsub/unit_tests/test_subscription.py index 6078a3cc70c8..42fb23d9ae68 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_subscription.py +++ b/packages/google-cloud-pubsub/unit_tests/test_subscription.py @@ -29,6 +29,7 @@ class TestSubscription(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.pubsub.subscription import Subscription + return Subscription def _make_one(self, *args, **kw): @@ -72,6 +73,7 @@ def test_ctor_w_neither_topic_nor_client(self): def test_from_api_repr_no_topics(self): from google.cloud.pubsub.topic import Topic + resource = {'topic': self.TOPIC_PATH, 'name': self.SUB_PATH, 'ackDeadlineSeconds': self.DEADLINE, @@ -103,6 +105,7 @@ def test_from_api_repr_w_deleted_topic(self): def test_from_api_repr_w_topics_no_topic_match(self): from google.cloud.pubsub.topic import Topic + resource = {'topic': self.TOPIC_PATH, 'name': self.SUB_PATH, 'ackDeadlineSeconds': self.DEADLINE, @@ -148,6 +151,7 @@ def test_full_name_and_path(self): def test_autoack_defaults(self): from google.cloud.pubsub.subscription import AutoAck + client = _Client(project=self.PROJECT) topic = _Topic(self.TOPIC_NAME, client=client) subscription = self._make_one(self.SUB_NAME, topic) @@ -160,6 +164,7 @@ def test_autoack_defaults(self): def test_autoack_explicit(self): from google.cloud.pubsub.subscription import AutoAck + client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) topic = _Topic(self.TOPIC_NAME, client=client1) @@ -326,6 +331,7 @@ def test_modify_push_config_wo_endpoint_w_alternate_client(self): def test_pull_wo_return_immediately_max_messages_w_bound_client(self): from google.cloud.pubsub.message import Message + ACK_ID = 'DEADBEEF' MSG_ID = 'BEADCAFE' PAYLOAD = b'This is the message text' @@ -351,6 +357,7 @@ def test_pull_wo_return_immediately_max_messages_w_bound_client(self): def test_pull_w_return_immediately_w_max_messages_w_alt_client(self): from google.cloud.pubsub.message import Message + ACK_ID = 'DEADBEEF' MSG_ID = 'BEADCAFE' PAYLOAD = b'This is the message text' @@ -457,6 +464,7 @@ def test_get_iam_policy_w_bound_client(self): PUBSUB_PUBLISHER_ROLE, PUBSUB_SUBSCRIBER_ROLE, ) + OWNER1 = 'user:phred@example.com' OWNER2 = 'group:cloud-logs@google.com' EDITOR1 = 'domain:google.com' @@ -523,6 +531,7 @@ def test_set_iam_policy_w_bound_client(self): PUBSUB_PUBLISHER_ROLE, PUBSUB_SUBSCRIBER_ROLE, ) + OWNER1 = 'group:cloud-logs@google.com' OWNER2 = 'user:phred@example.com' EDITOR1 = 'domain:google.com' @@ -573,6 +582,7 @@ def test_set_iam_policy_w_bound_client(self): def test_set_iam_policy_w_alternate_client(self): from google.cloud.pubsub.iam import Policy + RESPONSE = {'etag': 'ACAB'} client1 = _Client(project=self.PROJECT) client2 = _Client(project=self.PROJECT) @@ -639,6 +649,7 @@ def subscription_create(self, subscription_path, topic_path, def subscription_get(self, subscription_path): from google.cloud.exceptions import NotFound + self._subscription_got = subscription_path try: return self._subscription_get_response @@ -677,6 +688,7 @@ class TestAutoAck(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.pubsub.subscription import AutoAck + return AutoAck def _make_one(self, *args, **kw): @@ -792,6 +804,7 @@ def __init__(self, project): def topic(self, name, timestamp_messages=False): from google.cloud.pubsub.topic import Topic + return Topic(name, client=self, timestamp_messages=timestamp_messages) diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index f23967081fa5..5009e53a0a89 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -19,6 +19,7 @@ def _make_credentials(): import google.auth.credentials + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -30,6 +31,7 @@ class TestTopic(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.pubsub.topic import Topic + return Topic def _make_one(self, *args, **kw): @@ -306,6 +308,7 @@ def test_publish_multiple_error(self): def test_subscription(self): from google.cloud.pubsub.subscription import Subscription + client = _Client(project=self.PROJECT) topic = self._make_one(self.TOPIC_NAME, client=client) @@ -447,6 +450,7 @@ def test_get_iam_policy_w_bound_client(self): PUBSUB_PUBLISHER_ROLE, PUBSUB_SUBSCRIBER_ROLE, ) + OWNER1 = 'user:phred@example.com' OWNER2 = 'group:cloud-logs@google.com' EDITOR1 = 'domain:google.com' @@ -513,6 +517,7 @@ def test_set_iam_policy_w_bound_client(self): PUBSUB_PUBLISHER_ROLE, PUBSUB_SUBSCRIBER_ROLE, ) + OWNER1 = 'group:cloud-logs@google.com' OWNER2 = 'user:phred@example.com' EDITOR1 = 'domain:google.com' @@ -568,6 +573,7 @@ def test_set_iam_policy_w_bound_client(self): def test_set_iam_policy_w_alternate_client(self): from google.cloud.pubsub.iam import Policy + RESPONSE = {'etag': 'ACAB'} client1 = _Client(project=self.PROJECT) @@ -629,6 +635,7 @@ class TestBatch(unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.pubsub.topic import Batch + return Batch def _make_one(self, *args, **kwargs): @@ -782,6 +789,7 @@ def topic_create(self, topic_path): def topic_get(self, topic_path): from google.cloud.exceptions import NotFound + self._topic_got = topic_path try: return self._topic_get_response From af57d88e10c0febe1ddfcf17b35658b84a7ae31a Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Wed, 18 Jan 2017 13:14:03 -0500 Subject: [PATCH 0067/1197] Update import spacing part 2. --- packages/google-cloud-pubsub/unit_tests/test__gax.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index d7e3409a18e6..cb3bb0ee7c07 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -424,6 +424,7 @@ class Test_SubscriberAPI(_Base, unittest.TestCase): @staticmethod def _get_target_class(): from google.cloud.pubsub._gax import _SubscriberAPI + return _SubscriberAPI def test_ctor(self): From 9a5389306875e10356287eb0a16c01f697b37a72 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 13:13:02 -0800 Subject: [PATCH 0068/1197] Renaming JSONClient -> ClientWithProject. Done via: $ git grep -l JSONClient | xargs sed -i s/JSONClient/ClientWithProject/g Also fixing test b0rken by previous commit. --- packages/google-cloud-pubsub/google/cloud/pubsub/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index a66aebc5db95..60c4a510d1fa 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -16,7 +16,7 @@ import os -from google.cloud.client import JSONClient +from google.cloud.client import ClientWithProject from google.cloud.environment_vars import DISABLE_GRPC from google.cloud.pubsub._http import Connection from google.cloud.pubsub._http import _PublisherAPI as JSONPublisherAPI @@ -43,7 +43,7 @@ _USE_GAX = _HAVE_GAX and not _DISABLE_GAX -class Client(JSONClient): +class Client(ClientWithProject): """Client to bundle configuration needed for API requests. :type project: str From 0001481e0a3b5198a94fc2c989f4ee6b238f9488 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 26 Jan 2017 16:45:36 -0800 Subject: [PATCH 0069/1197] Changing Pub / Sub Connection to only accept client. --- .../google/cloud/pubsub/_http.py | 16 ++++------------ .../google/cloud/pubsub/client.py | 11 +++++++---- .../google-cloud-pubsub/unit_tests/test__http.py | 10 +++++----- 3 files changed, 16 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index 583413e313b6..bd438f72948d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -34,12 +34,8 @@ class Connection(_http.JSONConnection): """A connection to Google Cloud Pub/Sub via the JSON REST API. - :type credentials: :class:`oauth2client.client.OAuth2Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for this - connection. - - :type http: :class:`httplib2.Http` or class that defines ``request()``. - :param http: (Optional) HTTP object to make requests. + :type client: :class:`~google.cloud.pubsub.client.Client` + :param client: The client that owns the current connection. """ API_BASE_URL = 'https://' + PUBSUB_API_HOST @@ -51,12 +47,8 @@ class Connection(_http.JSONConnection): API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' """A template for the URL of a particular API call.""" - SCOPE = ('https://www.googleapis.com/auth/pubsub', - 'https://www.googleapis.com/auth/cloud-platform') - """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" - - def __init__(self, credentials=None, http=None): - super(Connection, self).__init__(credentials=credentials, http=http) + def __init__(self, client): + super(Connection, self).__init__(client) emulator_host = os.getenv(PUBSUB_EMULATOR) if emulator_host is None: self.host = self.__class__.API_BASE_URL diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 60c4a510d1fa..f92182d7ef4e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -75,12 +75,15 @@ class Client(ClientWithProject): _subscriber_api = None _iam_policy_api = None + SCOPE = ('https://www.googleapis.com/auth/pubsub', + 'https://www.googleapis.com/auth/cloud-platform') + """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" + def __init__(self, project=None, credentials=None, http=None, use_gax=None): super(Client, self).__init__( project=project, credentials=credentials, http=http) - self._connection = Connection( - credentials=self._credentials, http=self._http) + self._connection = Connection(self) if use_gax is None: self._use_gax = _USE_GAX else: @@ -96,7 +99,7 @@ def publisher_api(self): host=self._connection.host) else: generated = make_gax_publisher_api( - credentials=self._connection._credentials) + credentials=self._credentials) self._publisher_api = GAXPublisherAPI(generated, self) else: self._publisher_api = JSONPublisherAPI(self) @@ -112,7 +115,7 @@ def subscriber_api(self): host=self._connection.host) else: generated = make_gax_subscriber_api( - credentials=self._connection._credentials) + credentials=self._credentials) self._subscriber_api = GAXSubscriberAPI(generated, self) else: self._subscriber_api = JSONSubscriberAPI(self) diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index 72847782c4f0..69130536608e 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -46,7 +46,7 @@ def _get_target_class(): return Connection def test_default_url(self): - conn = self._make_one() + conn = self._make_one(object()) klass = self._get_target_class() self.assertEqual(conn.api_base_url, klass.API_BASE_URL) @@ -57,14 +57,14 @@ def test_custom_url_from_env(self): fake_environ = {PUBSUB_EMULATOR: HOST} with mock.patch('os.environ', new=fake_environ): - conn = self._make_one() + conn = self._make_one(object()) klass = self._get_target_class() self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) self.assertEqual(conn.api_base_url, 'http://' + HOST) def test_build_api_url_no_extra_query_params(self): - conn = self._make_one() + conn = self._make_one(object()) URI = '/'.join([ conn.API_BASE_URL, conn.API_VERSION, @@ -76,7 +76,7 @@ def test_build_api_url_w_extra_query_params(self): from six.moves.urllib.parse import parse_qsl from six.moves.urllib.parse import urlsplit - conn = self._make_one() + conn = self._make_one(object()) uri = conn.build_api_url('/foo', {'bar': 'baz'}) scheme, netloc, path, qs, _ = urlsplit(uri) self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) @@ -88,7 +88,7 @@ def test_build_api_url_w_extra_query_params(self): def test_build_api_url_w_base_url_override(self): base_url1 = 'api-base-url1' base_url2 = 'api-base-url2' - conn = self._make_one() + conn = self._make_one(object()) conn.api_base_url = base_url1 URI = '/'.join([ base_url2, From 68cea8a559b04fd13f49ddc136783c510c0b8271 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 27 Jan 2017 11:11:28 -0800 Subject: [PATCH 0070/1197] Pubsub batch autocommitting. (#2966) * Pubsub batch autocommitting. This PR adds some functionality to the Batch object: * The ability to specify `max_messages` and have the batch automatically call `commit` when the number of messages gets that high. * The ability to specify `max_interval` and have the batch automatically commit when a publish occurs and the batch is at least as old as the specified interval. This is one of two changes requested by the PubSub team. * Addressing comments from @dhermes. * Remove unneeded -lt check @dhermes. * Make INFINITY have a leading underscore. @dhermes --- .../google/cloud/pubsub/topic.py | 45 +++++++++++- .../unit_tests/test_topic.py | 71 +++++++++++++++++++ 2 files changed, 114 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index 12c6c3a68450..0dd5b4fda038 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -14,6 +14,8 @@ """Define API Topics.""" +import time + from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _NOW from google.cloud.exceptions import NotFound @@ -408,15 +410,40 @@ class Batch(object): :type topic: :class:`google.cloud.pubsub.topic.Topic` :param topic: the topic being published - :type client: :class:`google.cloud.pubsub.client.Client` :param client: The client to use. + :type client: :class:`google.cloud.pubsub.client.Client` + + :param max_interval: The maximum interval, in seconds, before the batch + will automatically commit. Note that this does not + run a background loop; it just checks when each + message is published. Therefore, this is intended + for situations where messages are published at + reasonably regular intervals. Defaults to infinity + (off). + :type max_interval: float + + :param max_messages: The maximum number of messages to hold in the batch + before automatically commiting. Defaults to infinity + (off). + :type max_messages: float """ - def __init__(self, topic, client): + _INFINITY = float('inf') + + def __init__(self, topic, client, max_interval=_INFINITY, + max_messages=_INFINITY): self.topic = topic self.messages = [] self.message_ids = [] self.client = client + # Set the autocommit rules. If the interval or number of messages + # is exceeded, then the .publish() method will imply a commit. + self._max_interval = max_interval + self._max_messages = max_messages + + # Set the initial starting timestamp (used against the interval). + self._start_timestamp = time.time() + def __enter__(self): return self @@ -441,6 +468,20 @@ def publish(self, message, **attrs): {'data': message, 'attributes': attrs}) + # If too much time has elapsed since the first message + # was added, autocommit. + now = time.time() + if now - self._start_timestamp > self._max_interval: + self.commit() + self._start_timestamp = now + return + + # If the number of messages on the list is greater than the + # maximum allowed, autocommit (with the batch's client). + if len(self.messages) >= self._max_messages: + self.commit() + return + def commit(self, client=None): """Send saved messages as a single API call. diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index 5009e53a0a89..f264b4dcd036 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -779,6 +779,77 @@ def test_context_mgr_failure(self): self.assertEqual(list(batch.messages), [MESSAGE1, MESSAGE2]) self.assertEqual(getattr(api, '_topic_published', self), self) + def test_message_count_autocommit(self): + """Establish that if the batch is assigned to take a maximum + number of messages, that it commits when it reaches that maximum. + """ + client = _Client(project='PROJECT') + topic = _Topic(name='TOPIC') + + # Track commits, but do not perform them. + Batch = self._get_target_class() + with mock.patch.object(Batch, 'commit') as commit: + with self._make_one(topic, client=client, max_messages=5) as batch: + self.assertIsInstance(batch, Batch) + + # Publish four messages and establish that the batch does + # not commit. + for i in range(0, 4): + batch.publish({ + 'attributes': {}, + 'data': 'Batch message %d.' % (i,), + }) + commit.assert_not_called() + + # Publish a fifth message and observe the commit. + batch.publish({ + 'attributes': {}, + 'data': 'The final call to trigger a commit!', + }) + commit.assert_called_once_with() + + # There should be a second commit after the context manager + # exits. + self.assertEqual(commit.call_count, 2) + + @mock.patch('time.time') + def test_message_time_autocommit(self, mock_time): + """Establish that if the batch is sufficiently old, that it commits + the next time it receives a publish. + """ + client = _Client(project='PROJECT') + topic = _Topic(name='TOPIC') + + # Track commits, but do not perform them. + Batch = self._get_target_class() + with mock.patch.object(Batch, 'commit') as commit: + mock_time.return_value = 0.0 + with self._make_one(topic, client=client, max_interval=5) as batch: + self.assertIsInstance(batch, Batch) + + # Publish some messages and establish that the batch does + # not commit. + for i in range(0, 10): + batch.publish({ + 'attributes': {}, + 'data': 'Batch message %d.' % (i,), + }) + commit.assert_not_called() + + # Move time ahead so that this batch is too old. + mock_time.return_value = 10.0 + + # Publish another message and observe the commit. + batch.publish({ + 'attributes': {}, + 'data': 'The final call to trigger a commit!', + }) + commit.assert_called_once_with() + + # There should be a second commit after the context manager + # exits. + self.assertEqual(commit.call_count, 2) + class _FauxPublisherAPI(object): _api_called = 0 From f3d74387ddf4bbf7b31b279ded10773380cf963f Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Wed, 15 Feb 2017 15:34:59 -0500 Subject: [PATCH 0071/1197] Update Beta classifiers to Alpha for specified services. --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 3d713235e7a8..51a9171358c5 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -35,7 +35,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 4 - Beta', + 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', From a9251b9ec77081754352e11f9aac2c04b27f12fc Mon Sep 17 00:00:00 2001 From: Thomas Schultz Date: Thu, 16 Feb 2017 14:19:53 -0500 Subject: [PATCH 0072/1197] Update core dependency to google-cloud-core >= 0.23.0, < 0.24dev. (#3028) * Update core dependency to google-cloud-core >= 0.23.0, < 0.24dev. --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 51a9171358c5..6976796efeaf 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.22.1, < 0.23dev', + 'google-cloud-core >= 0.23.0, < 0.24dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-pubsub-v1 >= 0.14.0, < 0.15dev', ] From 62eb535eb15d7eb55c5f360b3dcf3be3fb2abe4c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 22 Feb 2017 07:40:10 -0800 Subject: [PATCH 0073/1197] Adding GCCL header for HTTP APIs. (#3046) --- .../google/cloud/pubsub/__init__.py | 3 ++ .../google/cloud/pubsub/_http.py | 8 +++++ .../unit_tests/test__http.py | 30 +++++++++++++++++++ 3 files changed, 41 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py index 9c0fcb9d45f6..6e6f29644727 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py @@ -24,6 +24,9 @@ """ +from pkg_resources import get_distribution +__version__ = get_distribution('google-cloud-pubsub').version + from google.cloud.pubsub.client import Client from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index bd438f72948d..e9538dce22d4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -22,6 +22,8 @@ from google.cloud import _http from google.cloud.environment_vars import PUBSUB_EMULATOR from google.cloud.iterator import HTTPIterator + +from google.cloud.pubsub import __version__ from google.cloud.pubsub._helpers import subscription_name_from_path from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic @@ -30,6 +32,8 @@ PUBSUB_API_HOST = 'pubsub.googleapis.com' """Pub / Sub API request host.""" +_CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__) + class Connection(_http.JSONConnection): """A connection to Google Cloud Pub/Sub via the JSON REST API. @@ -47,6 +51,10 @@ class Connection(_http.JSONConnection): API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' """A template for the URL of a particular API call.""" + _EXTRA_HEADERS = { + _http.CLIENT_INFO_HEADER: _CLIENT_INFO, + } + def __init__(self, client): super(Connection, self).__init__(client) emulator_host = os.getenv(PUBSUB_EMULATOR) diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/unit_tests/test__http.py index 69130536608e..3f94b966dd66 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__http.py +++ b/packages/google-cloud-pubsub/unit_tests/test__http.py @@ -98,6 +98,36 @@ def test_build_api_url_w_base_url_override(self): self.assertEqual(conn.build_api_url('/foo', api_base_url=base_url2), URI) + def test_extra_headers(self): + from google.cloud import _http as base_http + from google.cloud.pubsub import _http as MUT + + http = mock.Mock(spec=['request']) + response = mock.Mock(status=200, spec=['status']) + data = b'brent-spiner' + http.request.return_value = response, data + client = mock.Mock(_http=http, spec=['_http']) + + conn = self._make_one(client) + req_data = 'req-data-boring' + result = conn.api_request( + 'GET', '/rainbow', data=req_data, expect_json=False) + self.assertEqual(result, data) + + expected_headers = { + 'Content-Length': str(len(req_data)), + 'Accept-Encoding': 'gzip', + base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, + 'User-Agent': conn.USER_AGENT, + } + expected_uri = conn.build_api_url('/rainbow') + http.request.assert_called_once_with( + body=req_data, + headers=expected_headers, + method='GET', + uri=expected_uri, + ) + class Test_PublisherAPI(_Base): From e6dc12e8a1a28b875b6e49847f24390625d77354 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 22 Feb 2017 11:15:01 -0500 Subject: [PATCH 0074/1197] Expand motivation in 'Subscription.reload' docstring. Closes #3037. --- .../google-cloud-pubsub/google/cloud/pubsub/subscription.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index e76abcf94dd8..19089380cf41 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -220,6 +220,10 @@ def reload(self, client=None): See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get + :attr:`ack_deadline` and :attr:`push_endpoint` might never have + been set locally, or might have been updated by another client. This + method fetches their values from the server. + Example: .. literalinclude:: pubsub_snippets.py From 6d8a71891dff871dbfcf13c218a9e751123d805a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 23 Feb 2017 08:50:43 -0800 Subject: [PATCH 0075/1197] GAPIC Header Consistency: Pubsub (#3052) --- .../google/cloud/pubsub/_gax.py | 11 +++-- packages/google-cloud-pubsub/setup.py | 4 +- .../unit_tests/test__gax.py | 48 ++++++++++++++----- 3 files changed, 46 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 582cb8d0e128..17c93786c442 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -23,8 +23,8 @@ from google.gax.errors import GaxError from google.gax.grpc import exc_to_code from google.protobuf.json_format import MessageToDict -from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PubsubMessage -from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PushConfig +from google.cloud.proto.pubsub.v1.pubsub_pb2 import PubsubMessage +from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig from grpc import insecure_channel from grpc import StatusCode @@ -35,6 +35,7 @@ from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound from google.cloud.iterator import GAXIterator +from google.cloud.pubsub import __version__ from google.cloud.pubsub._helpers import subscription_name_from_path from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic @@ -535,7 +536,8 @@ def make_gax_publisher_api(credentials=None, host=None): channel = make_secure_channel( credentials, DEFAULT_USER_AGENT, PublisherClient.SERVICE_ADDRESS) - return PublisherClient(channel=channel) + return PublisherClient( + channel=channel, lib_name='gccl', lib_version=__version__) def make_gax_subscriber_api(credentials=None, host=None): @@ -561,7 +563,8 @@ def make_gax_subscriber_api(credentials=None, host=None): channel = make_secure_channel( credentials, DEFAULT_USER_AGENT, SubscriberClient.SERVICE_ADDRESS) - return SubscriberClient(channel=channel) + return SubscriberClient( + channel=channel, lib_name='gccl', lib_version=__version__) def _item_to_topic(iterator, resource): diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 6976796efeaf..2c69849b3708 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -52,12 +52,12 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.23.0, < 0.24dev', 'grpcio >= 1.0.2, < 2.0dev', - 'gapic-google-cloud-pubsub-v1 >= 0.14.0, < 0.15dev', + 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-pubsub', - version='0.22.0', + version='0.23.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index cb3bb0ee7c07..95f908f0c3f4 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -436,8 +436,8 @@ def test_ctor(self): def test_list_subscriptions_no_paging(self): from google.gax import INITIAL_PAGE - from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.grpc.pubsub.v1.pubsub_pb2 import ( + from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig + from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( Subscription as SubscriptionPB) from google.cloud._testing import _GAXPageIterator from google.cloud.pubsub.client import Client @@ -479,8 +479,8 @@ def test_list_subscriptions_no_paging(self): self.assertIs(options.page_token, INITIAL_PAGE) def test_list_subscriptions_with_paging(self): - from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.grpc.pubsub.v1.pubsub_pb2 import ( + from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig + from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( Subscription as SubscriptionPB) from google.cloud._testing import _GAXPageIterator from google.cloud.pubsub.client import Client @@ -527,7 +527,7 @@ def test_list_subscriptions_with_paging(self): self.assertEqual(options.page_token, TOKEN) def test_subscription_create(self): - from google.cloud.grpc.pubsub.v1.pubsub_pb2 import Subscription + from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) @@ -588,8 +588,8 @@ def test_subscription_create_error(self): self.assertIsNone(options) def test_subscription_get_hit(self): - from google.cloud.grpc.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.grpc.pubsub.v1.pubsub_pb2 import Subscription + from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig + from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, @@ -932,16 +932,19 @@ def _call_fut(self, *args, **kwargs): return make_gax_publisher_api(*args, **kwargs) def test_live_api(self): + from google.cloud.pubsub import __version__ from google.cloud.pubsub._gax import DEFAULT_USER_AGENT channels = [] + publisher_api_kwargs = [] channel_args = [] channel_obj = object() mock_result = object() host = 'foo.apis.invalid' - def mock_publisher_api(channel): + def mock_publisher_api(channel, **kwargs): channels.append(channel) + publisher_api_kwargs.append(kwargs) return mock_result def make_channel(*args): @@ -959,18 +962,25 @@ def make_channel(*args): result = self._call_fut(creds) self.assertIs(result, mock_result) + self.assertEqual(len(publisher_api_kwargs), 1) + self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') + self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) self.assertEqual(channels, [channel_obj]) self.assertEqual(channel_args, [(creds, DEFAULT_USER_AGENT, host)]) def test_emulator(self): + from google.cloud.pubsub import __version__ + channels = [] + publisher_api_kwargs = [] mock_result = object() insecure_args = [] mock_channel = object() - def mock_publisher_api(channel): + def mock_publisher_api(channel, **kwargs): channels.append(channel) + publisher_api_kwargs.append(kwargs) return mock_result def mock_insecure_channel(host): @@ -986,6 +996,9 @@ def mock_insecure_channel(host): result = self._call_fut(host=host) self.assertIs(result, mock_result) + self.assertEqual(len(publisher_api_kwargs), 1) + self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') + self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) self.assertEqual(channels, [mock_channel]) self.assertEqual(insecure_args, [host]) @@ -999,16 +1012,19 @@ def _call_fut(self, *args, **kwargs): return make_gax_subscriber_api(*args, **kwargs) def test_live_api(self): + from google.cloud.pubsub import __version__ from google.cloud.pubsub._gax import DEFAULT_USER_AGENT channels = [] + subscriber_api_kwargs = [] channel_args = [] channel_obj = object() mock_result = object() host = 'foo.apis.invalid' - def mock_subscriber_api(channel): + def mock_subscriber_api(channel, **kwargs): channels.append(channel) + subscriber_api_kwargs.append(kwargs) return mock_result def make_channel(*args): @@ -1026,18 +1042,25 @@ def make_channel(*args): result = self._call_fut(creds) self.assertIs(result, mock_result) + self.assertEqual(len(subscriber_api_kwargs), 1) + self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') + self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) self.assertEqual(channels, [channel_obj]) self.assertEqual(channel_args, [(creds, DEFAULT_USER_AGENT, host)]) def test_emulator(self): + from google.cloud.pubsub import __version__ + channels = [] + subscriber_api_kwargs = [] mock_result = object() insecure_args = [] mock_channel = object() - def mock_subscriber_api(channel): + def mock_subscriber_api(channel, **kwargs): channels.append(channel) + subscriber_api_kwargs.append(kwargs) return mock_result def mock_insecure_channel(host): @@ -1053,6 +1076,9 @@ def mock_insecure_channel(host): result = self._call_fut(host=host) self.assertIs(result, mock_result) + self.assertEqual(len(subscriber_api_kwargs), 1) + self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') + self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) self.assertEqual(channels, [mock_channel]) self.assertEqual(insecure_args, [host]) From 1af58ecc6890a1290303e2ee3c4109d3a768aa49 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 24 Feb 2017 11:30:18 -0800 Subject: [PATCH 0076/1197] Upgrading all versions for umbrella release. --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 2c69849b3708..9f74c1e49e71 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -50,7 +50,7 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.23.0, < 0.24dev', + 'google-cloud-core >= 0.23.1, < 0.24dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', ] From 18e3e477931d2ed52bdb925546092ce2502916ed Mon Sep 17 00:00:00 2001 From: daspecster Date: Fri, 10 Mar 2017 10:44:03 -0500 Subject: [PATCH 0077/1197] Add timeout for topic_publish() for gRPC side. --- packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py | 7 +++++-- packages/google-cloud-pubsub/unit_tests/test__gax.py | 4 +++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 17c93786c442..599c99ef1d44 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -148,7 +148,7 @@ def topic_delete(self, topic_path): raise NotFound(topic_path) raise - def topic_publish(self, topic_path, messages): + def topic_publish(self, topic_path, messages, timeout=30): """API call: publish one or more messages to a topic See: @@ -161,12 +161,15 @@ def topic_publish(self, topic_path, messages): :type messages: list of dict :param messages: messages to be published. + :type timeout: int + :param timeout: (Optional) Timeout seconds. + :rtype: list of string :returns: list of opaque IDs for published messages. :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not exist """ - options = CallOptions(is_bundling=False) + options = CallOptions(is_bundling=False, timeout=timeout) message_pbs = [_message_pb_from_mapping(message) for message in messages] try: diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/unit_tests/test__gax.py index 95f908f0c3f4..0f55c8fe298c 100644 --- a/packages/google-cloud-pubsub/unit_tests/test__gax.py +++ b/packages/google-cloud-pubsub/unit_tests/test__gax.py @@ -274,12 +274,13 @@ def test_topic_publish_miss_w_attrs_w_bytes_payload(self): PAYLOAD = b'This is the message text' B64 = base64.b64encode(PAYLOAD) MESSAGE = {'data': B64, 'attributes': {'foo': 'bar'}} + timeout = 120 # 120 seconds or 2 minutes gax_api = _GAXPublisherAPI() client = _Client(self.PROJECT) api = self._make_one(gax_api, client) with self.assertRaises(NotFound): - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) + api.topic_publish(self.TOPIC_PATH, [MESSAGE], timeout=timeout) topic_path, message_pbs, options = gax_api._publish_called_with self.assertEqual(topic_path, self.TOPIC_PATH) @@ -287,6 +288,7 @@ def test_topic_publish_miss_w_attrs_w_bytes_payload(self): self.assertEqual(message_pb.data, B64) self.assertEqual(message_pb.attributes, {'foo': 'bar'}) self.assertEqual(options.is_bundling, False) + self.assertEqual(options.timeout, timeout) def test_topic_publish_error(self): import base64 From 6c5abcfddc13988a7f59451062b6b0da5f844823 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 20 Mar 2017 08:43:11 -0700 Subject: [PATCH 0078/1197] Add a `max_size` argument to Pub / Sub Batch. (#3157) --- .../google/cloud/pubsub/topic.py | 46 +++++++++-- .../unit_tests/test_topic.py | 82 ++++++++++++++----- 2 files changed, 98 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index 0dd5b4fda038..b0898328cb60 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -14,10 +14,13 @@ """Define API Topics.""" +import base64 +import json import time from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _NOW +from google.cloud._helpers import _to_bytes from google.cloud.exceptions import NotFound from google.cloud.pubsub._helpers import topic_name_from_path from google.cloud.pubsub.iam import Policy @@ -255,7 +258,7 @@ def publish(self, message, client=None, **attrs): message_ids = api.topic_publish(self.full_name, [message_data]) return message_ids[0] - def batch(self, client=None): + def batch(self, client=None, **kwargs): """Return a batch to use as a context manager. Example: @@ -275,11 +278,15 @@ def batch(self, client=None): :param client: the client to use. If not passed, falls back to the ``client`` stored on the current topic. + :type kwargs: dict + :param kwargs: Keyword arguments passed to the + :class:`~google.cloud.pubsub.topic.Batch` constructor. + :rtype: :class:`Batch` :returns: A batch to use as a context manager. """ client = self._require_client(client) - return Batch(self, client) + return Batch(self, client, **kwargs) def list_subscriptions(self, page_size=None, page_token=None, client=None): """List subscriptions for the project associated with this client. @@ -426,11 +433,16 @@ class Batch(object): before automatically commiting. Defaults to infinity (off). :type max_messages: float + + :param max_size: The maximum size that the serialized messages can be + before automatically commiting. Defaults to 9 MB + (slightly less than the API limit). + :type max_size: int """ _INFINITY = float('inf') def __init__(self, topic, client, max_interval=_INFINITY, - max_messages=_INFINITY): + max_messages=_INFINITY, max_size=1024 * 1024 * 9): self.topic = topic self.messages = [] self.message_ids = [] @@ -440,9 +452,12 @@ def __init__(self, topic, client, max_interval=_INFINITY, # is exceeded, then the .publish() method will imply a commit. self._max_interval = max_interval self._max_messages = max_messages + self._max_size = max_size - # Set the initial starting timestamp (used against the interval). + # Set the initial starting timestamp (used against the interval) + # and initial size. self._start_timestamp = time.time() + self._current_size = 0 def __enter__(self): return self @@ -464,16 +479,24 @@ def publish(self, message, **attrs): :param attrs: key-value pairs to send as message attributes """ self.topic._timestamp_message(attrs) - self.messages.append( - {'data': message, - 'attributes': attrs}) + + # Append the message to the list of messages.. + item = {'attributes': attrs, 'data': message} + self.messages.append(item) + + # Determine the approximate size of the message, and increment + # the current batch size appropriately. + encoded = base64.b64encode(_to_bytes(message)) + encoded += base64.b64encode( + json.dumps(attrs, ensure_ascii=False).encode('utf8'), + ) + self._current_size += len(encoded) # If too much time has elapsed since the first message # was added, autocommit. now = time.time() if now - self._start_timestamp > self._max_interval: self.commit() - self._start_timestamp = now return # If the number of messages on the list is greater than the @@ -482,6 +505,11 @@ def publish(self, message, **attrs): self.commit() return + # If we have reached the max size, autocommit. + if self._current_size >= self._max_size: + self.commit() + return + def commit(self, client=None): """Send saved messages as a single API call. @@ -499,3 +527,5 @@ def commit(self, client=None): message_ids = api.topic_publish(self.topic.full_name, self.messages[:]) self.message_ids.extend(message_ids) del self.messages[:] + self._start_timestamp = time.time() + self._current_size = 0 diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/unit_tests/test_topic.py index f264b4dcd036..664ede8f12b5 100644 --- a/packages/google-cloud-pubsub/unit_tests/test_topic.py +++ b/packages/google-cloud-pubsub/unit_tests/test_topic.py @@ -779,10 +779,35 @@ def test_context_mgr_failure(self): self.assertEqual(list(batch.messages), [MESSAGE1, MESSAGE2]) self.assertEqual(getattr(api, '_topic_published', self), self) + def test_batch_messages(self): + # Establish that a batch actually batches messsages in the expected + # way. + client = _Client(project='PROJECT') + topic = _Topic(name='TOPIC') + + # Track commits, but do not perform them. + Batch = self._get_target_class() + with mock.patch.object(Batch, 'commit') as commit: + with self._make_one(topic, client=client) as batch: + self.assertIsInstance(batch, Batch) + + # Publish four messages and establish that the batch does + # not commit. + for i in range(0, 4): + batch.publish('Batch message %d.' % (i,)) + commit.assert_not_called() + + # Check the contents of the batch. + self.assertEqual(batch.messages, [ + {'data': 'Batch message 0.', 'attributes': {}}, + {'data': 'Batch message 1.', 'attributes': {}}, + {'data': 'Batch message 2.', 'attributes': {}}, + {'data': 'Batch message 3.', 'attributes': {}}, + ]) + def test_message_count_autocommit(self): - """Establish that if the batch is assigned to take a maximum - number of messages, that it commits when it reaches that maximum. - """ + # Establish that if the batch is assigned to take a maximum + # number of messages, that it commits when it reaches that maximum. client = _Client(project='PROJECT') topic = _Topic(name='TOPIC') @@ -795,17 +820,11 @@ def test_message_count_autocommit(self): # Publish four messages and establish that the batch does # not commit. for i in range(0, 4): - batch.publish({ - 'attributes': {}, - 'data': 'Batch message %d.' % (i,), - }) + batch.publish('Batch message %d.' % (i,)) commit.assert_not_called() # Publish a fifth message and observe the commit. - batch.publish({ - 'attributes': {}, - 'data': 'The final call to trigger a commit!', - }) + batch.publish('The final call to trigger a commit!') commit.assert_called_once_with() # There should be a second commit after the context manager @@ -814,9 +833,8 @@ def test_message_count_autocommit(self): @mock.patch('time.time') def test_message_time_autocommit(self, mock_time): - """Establish that if the batch is sufficiently old, that it commits - the next time it receives a publish. - """ + # Establish that if the batch is sufficiently old, that it commits + # the next time it receives a publish. client = _Client(project='PROJECT') topic = _Topic(name='TOPIC') @@ -830,20 +848,40 @@ def test_message_time_autocommit(self, mock_time): # Publish some messages and establish that the batch does # not commit. for i in range(0, 10): - batch.publish({ - 'attributes': {}, - 'data': 'Batch message %d.' % (i,), - }) + batch.publish('Batch message %d.' % (i,)) commit.assert_not_called() # Move time ahead so that this batch is too old. mock_time.return_value = 10.0 # Publish another message and observe the commit. - batch.publish({ - 'attributes': {}, - 'data': 'The final call to trigger a commit!', - }) + batch.publish('The final call to trigger a commit!') + commit.assert_called_once_with() + + # There should be a second commit after the context manager + # exits. + self.assertEqual(commit.call_count, 2) + + def test_message_size_autocommit(self): + # Establish that if the batch is sufficiently large, that it + # auto-commits. + client = _Client(project='PROJECT') + topic = _Topic(name='TOPIC') + + # Track commits, but do not perform them. + Batch = self._get_target_class() + with mock.patch.object(Batch, 'commit') as commit: + with self._make_one(topic, client=client, max_size=100) as batch: + self.assertIsInstance(batch, Batch) + + # Publish a short (< 100 bytes) message and establish that + # the batch does not commit. + batch.publish(b'foo') + commit.assert_not_called() + + # Publish another message and observe the commit. + batch.publish(u'The final call to trigger a commit, because ' + u'this message is sufficiently long.') commit.assert_called_once_with() # There should be a second commit after the context manager From 658dbd7aca7b59cb1b15d508fa0b0684570e5485 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 20 Mar 2017 09:35:23 -0700 Subject: [PATCH 0079/1197] Add Batch._reset_state for DRY. (#3169) --- .../google/cloud/pubsub/topic.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index b0898328cb60..5490617a3ea5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -444,9 +444,9 @@ class Batch(object): def __init__(self, topic, client, max_interval=_INFINITY, max_messages=_INFINITY, max_size=1024 * 1024 * 9): self.topic = topic + self.client = client self.messages = [] self.message_ids = [] - self.client = client # Set the autocommit rules. If the interval or number of messages # is exceeded, then the .publish() method will imply a commit. @@ -454,10 +454,9 @@ def __init__(self, topic, client, max_interval=_INFINITY, self._max_messages = max_messages self._max_size = max_size - # Set the initial starting timestamp (used against the interval) - # and initial size. - self._start_timestamp = time.time() - self._current_size = 0 + # Set up the initial state, initializing messages, the starting + # timestamp, etc. + self._reset_state() def __enter__(self): return self @@ -469,6 +468,13 @@ def __exit__(self, exc_type, exc_val, exc_tb): def __iter__(self): return iter(self.message_ids) + def _reset_state(self): + """Reset the state of this batch.""" + + del self.messages[:] + self._start_timestamp = time.time() + self._current_size = 0 + def publish(self, message, **attrs): """Emulate publishing a message, but save it. @@ -526,6 +532,4 @@ def commit(self, client=None): api = client.publisher_api message_ids = api.topic_publish(self.topic.full_name, self.messages[:]) self.message_ids.extend(message_ids) - del self.messages[:] - self._start_timestamp = time.time() - self._current_size = 0 + self._reset_state() From aec801c716f75609027f20d9e79ffc9e186a7128 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 23 Mar 2017 14:49:26 -0700 Subject: [PATCH 0080/1197] CI Rehash (#3146) --- packages/google-cloud-pubsub/.flake8 | 6 + packages/google-cloud-pubsub/LICENSE | 202 +++++++++++++ packages/google-cloud-pubsub/MANIFEST.in | 8 +- .../google/cloud/pubsub/__init__.py | 2 + packages/google-cloud-pubsub/nox.py | 87 ++++++ packages/google-cloud-pubsub/setup.py | 2 +- .../google-cloud-pubsub/tests/__init__.py | 0 packages/google-cloud-pubsub/tests/system.py | 285 ++++++++++++++++++ .../{unit_tests => tests/unit}/__init__.py | 0 .../{unit_tests => tests/unit}/test__gax.py | 0 .../unit}/test__helpers.py | 0 .../{unit_tests => tests/unit}/test__http.py | 0 .../{unit_tests => tests/unit}/test_client.py | 0 .../{unit_tests => tests/unit}/test_iam.py | 0 .../unit}/test_message.py | 0 .../unit}/test_subscription.py | 0 .../{unit_tests => tests/unit}/test_topic.py | 0 packages/google-cloud-pubsub/tox.ini | 35 --- 18 files changed, 587 insertions(+), 40 deletions(-) create mode 100644 packages/google-cloud-pubsub/.flake8 create mode 100644 packages/google-cloud-pubsub/LICENSE create mode 100644 packages/google-cloud-pubsub/nox.py create mode 100644 packages/google-cloud-pubsub/tests/__init__.py create mode 100644 packages/google-cloud-pubsub/tests/system.py rename packages/google-cloud-pubsub/{unit_tests => tests/unit}/__init__.py (100%) rename packages/google-cloud-pubsub/{unit_tests => tests/unit}/test__gax.py (100%) rename packages/google-cloud-pubsub/{unit_tests => tests/unit}/test__helpers.py (100%) rename packages/google-cloud-pubsub/{unit_tests => tests/unit}/test__http.py (100%) rename packages/google-cloud-pubsub/{unit_tests => tests/unit}/test_client.py (100%) rename packages/google-cloud-pubsub/{unit_tests => tests/unit}/test_iam.py (100%) rename packages/google-cloud-pubsub/{unit_tests => tests/unit}/test_message.py (100%) rename packages/google-cloud-pubsub/{unit_tests => tests/unit}/test_subscription.py (100%) rename packages/google-cloud-pubsub/{unit_tests => tests/unit}/test_topic.py (100%) delete mode 100644 packages/google-cloud-pubsub/tox.ini diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 new file mode 100644 index 000000000000..25168dc87605 --- /dev/null +++ b/packages/google-cloud-pubsub/.flake8 @@ -0,0 +1,6 @@ +[flake8] +exclude = + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-pubsub/LICENSE b/packages/google-cloud-pubsub/LICENSE new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/packages/google-cloud-pubsub/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index cb3a2b9ef4fa..9f7100c9528a 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -1,4 +1,4 @@ -include README.rst -graft google -graft unit_tests -global-exclude *.pyc +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include unit_tests * +global-exclude *.pyc __pycache__ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py index 6e6f29644727..070e8243bf2b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py @@ -30,3 +30,5 @@ from google.cloud.pubsub.client import Client from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic + +__all__ = ['__version__', 'Client', 'Subscription', 'Topic'] diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py new file mode 100644 index 000000000000..1f0ae6130bf7 --- /dev/null +++ b/packages/google-cloud-pubsub/nox.py @@ -0,0 +1,87 @@ +# Copyright 2016 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import os + +import nox + + +@nox.session +@nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) +def unit_tests(session, python_version): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + session.interpreter = 'python%s' % python_version + + # Install all test dependencies, then install this package in-place. + session.install('mock', 'pytest', 'pytest-cov', '../core/') + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run('py.test', '--quiet', + '--cov=google.cloud.pubsub', '--cov=tests.unit', '--cov-append', + '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', + 'tests/unit', + ) + + +@nox.session +@nox.parametrize('python_version', ['2.7', '3.6']) +def system_tests(session, python_version): + """Run the system test suite.""" + + # Sanity check: Only run system tests if the environment variable is set. + if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): + return + + # Run the system tests against latest Python 2 and Python 3 only. + session.interpreter = 'python%s' % python_version + + # Install all test dependencies, then install this package into the + # virutalenv's dist-packages. + session.install('mock', 'pytest', + '../core/', '../test_utils/') + session.install('.') + + # Run py.test against the system tests. + session.run('py.test', '--quiet', 'tests/system.py') + + +@nox.session +def lint(session): + """Run flake8. + + Returns a failure if flake8 finds linting errors or sufficiently + serious code quality issues. + """ + session.interpreter = 'python3.6' + session.install('flake8') + session.install('.') + session.run('flake8', 'google/cloud/pubsub') + + +@nox.session +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.interpreter = 'python3.6' + session.install('coverage', 'pytest-cov') + session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase') diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 9f74c1e49e71..655385112b7b 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -64,7 +64,7 @@ 'google', 'google.cloud', ], - packages=find_packages(), + packages=find_packages(exclude=('unit_tests*',)), install_requires=REQUIREMENTS, **SETUP_BASE ) diff --git a/packages/google-cloud-pubsub/tests/__init__.py b/packages/google-cloud-pubsub/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py new file mode 100644 index 000000000000..c1fad735bbf9 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/system.py @@ -0,0 +1,285 @@ +# Copyright 2015 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import unittest + +from google.gax.errors import GaxError +from google.gax.grpc import exc_to_code +from grpc import StatusCode +import httplib2 + +from google.cloud.environment_vars import PUBSUB_EMULATOR +from google.cloud.pubsub import client + +from test_utils.retry import RetryInstanceState +from test_utils.retry import RetryResult +from test_utils.retry import RetryErrors +from test_utils.system import EmulatorCreds +from test_utils.system import unique_resource_id + + +def _unavailable(exc): + return exc_to_code(exc) == StatusCode.UNAVAILABLE + + +retry_unavailable = RetryErrors(GaxError, _unavailable) + + +class Config(object): + """Run-time configuration to be modified at set-up. + + This is a mutable stand-in to allow test set-up to modify + global state. + """ + CLIENT = None + IN_EMULATOR = False + + +def setUpModule(): + Config.IN_EMULATOR = os.getenv(PUBSUB_EMULATOR) is not None + if Config.IN_EMULATOR: + credentials = EmulatorCreds() + http = httplib2.Http() # Un-authorized. + Config.CLIENT = client.Client(credentials=credentials, + http=http) + else: + Config.CLIENT = client.Client() + + +def _consume_topics(pubsub_client): + """Consume entire iterator. + + :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` + :param pubsub_client: Client to use to retrieve topics. + + :rtype: list + :returns: List of all topics encountered. + """ + return list(pubsub_client.list_topics()) + + +def _consume_subscriptions(topic): + """Consume entire iterator. + + :type topic: :class:`~google.cloud.pubsub.topic.Topic` + :param topic: Topic to use to retrieve subscriptions. + + :rtype: list + :returns: List of all subscriptions encountered. + """ + return list(topic.list_subscriptions()) + + +class TestPubsub(unittest.TestCase): + + def setUp(self): + self.to_delete = [] + + def tearDown(self): + for doomed in self.to_delete: + doomed.delete() + + def test_create_topic(self): + topic_name = 'a-new-topic' + unique_resource_id('-') + topic = Config.CLIENT.topic(topic_name) + self.assertFalse(topic.exists()) + topic.create() + self.to_delete.append(topic) + self.assertTrue(topic.exists()) + self.assertEqual(topic.name, topic_name) + + def test_list_topics(self): + before = _consume_topics(Config.CLIENT) + topics_to_create = [ + 'new' + unique_resource_id(), + 'newer' + unique_resource_id(), + 'newest' + unique_resource_id(), + ] + for topic_name in topics_to_create: + topic = Config.CLIENT.topic(topic_name) + topic.create() + self.to_delete.append(topic) + + # Retrieve the topics. + def _all_created(result): + return len(result) == len(before) + len(topics_to_create) + + retry = RetryResult(_all_created) + after = retry(_consume_topics)(Config.CLIENT) + + created = [topic for topic in after + if topic.name in topics_to_create and + topic.project == Config.CLIENT.project] + self.assertEqual(len(created), len(topics_to_create)) + + def test_create_subscription_defaults(self): + TOPIC_NAME = 'create-sub-def' + unique_resource_id('-') + topic = Config.CLIENT.topic(TOPIC_NAME) + self.assertFalse(topic.exists()) + topic.create() + self.to_delete.append(topic) + SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') + subscription = topic.subscription(SUBSCRIPTION_NAME) + self.assertFalse(subscription.exists()) + subscription.create() + self.to_delete.append(subscription) + self.assertTrue(subscription.exists()) + self.assertEqual(subscription.name, SUBSCRIPTION_NAME) + self.assertIs(subscription.topic, topic) + + def test_create_subscription_w_ack_deadline(self): + TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') + topic = Config.CLIENT.topic(TOPIC_NAME) + self.assertFalse(topic.exists()) + topic.create() + self.to_delete.append(topic) + SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() + subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=120) + self.assertFalse(subscription.exists()) + subscription.create() + self.to_delete.append(subscription) + self.assertTrue(subscription.exists()) + self.assertEqual(subscription.name, SUBSCRIPTION_NAME) + self.assertEqual(subscription.ack_deadline, 120) + self.assertIs(subscription.topic, topic) + + def test_list_subscriptions(self): + TOPIC_NAME = 'list-sub' + unique_resource_id('-') + topic = Config.CLIENT.topic(TOPIC_NAME) + topic.create() + self.to_delete.append(topic) + empty = _consume_subscriptions(topic) + self.assertEqual(len(empty), 0) + subscriptions_to_create = [ + 'new' + unique_resource_id(), + 'newer' + unique_resource_id(), + 'newest' + unique_resource_id(), + ] + for subscription_name in subscriptions_to_create: + subscription = topic.subscription(subscription_name) + subscription.create() + self.to_delete.append(subscription) + + # Retrieve the subscriptions. + def _all_created(result): + return len(result) == len(subscriptions_to_create) + + retry = RetryResult(_all_created) + all_subscriptions = retry(_consume_subscriptions)(topic) + + created = [subscription for subscription in all_subscriptions + if subscription.name in subscriptions_to_create] + self.assertEqual(len(created), len(subscriptions_to_create)) + + def test_message_pull_mode_e2e(self): + import operator + TOPIC_NAME = 'message-e2e' + unique_resource_id('-') + topic = Config.CLIENT.topic(TOPIC_NAME, + timestamp_messages=True) + self.assertFalse(topic.exists()) + topic.create() + self.to_delete.append(topic) + SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') + subscription = topic.subscription(SUBSCRIPTION_NAME) + self.assertFalse(subscription.exists()) + subscription.create() + self.to_delete.append(subscription) + + MESSAGE_1 = b'MESSAGE ONE' + MESSAGE_2 = b'MESSAGE ONE' + EXTRA_1 = 'EXTRA 1' + EXTRA_2 = 'EXTRA 2' + topic.publish(MESSAGE_1, extra=EXTRA_1) + topic.publish(MESSAGE_2, extra=EXTRA_2) + + class Hoover(object): + + def __init__(self): + self.received = [] + + def done(self, *dummy): + return len(self.received) == 2 + + def suction(self): + with subscription.auto_ack(max_messages=2) as ack: + self.received.extend(ack.values()) + + hoover = Hoover() + retry = RetryInstanceState(hoover.done) + retry(hoover.suction)() + + message1, message2 = sorted(hoover.received, + key=operator.attrgetter('timestamp')) + + self.assertEqual(message1.data, MESSAGE_1) + self.assertEqual(message1.attributes['extra'], EXTRA_1) + self.assertIsNotNone(message1.service_timestamp) + + self.assertEqual(message2.data, MESSAGE_2) + self.assertEqual(message2.attributes['extra'], EXTRA_2) + self.assertIsNotNone(message2.service_timestamp) + + def _maybe_emulator_skip(self): + # NOTE: This method is necessary because ``Config.IN_EMULATOR`` + # is set at runtime rather than import time, which means we + # can't use the @unittest.skipIf decorator. + if Config.IN_EMULATOR: + self.skipTest('IAM not supported by Pub/Sub emulator') + + def test_topic_iam_policy(self): + from google.cloud.pubsub.iam import PUBSUB_TOPICS_GET_IAM_POLICY + self._maybe_emulator_skip() + topic_name = 'test-topic-iam-policy-topic' + unique_resource_id('-') + topic = Config.CLIENT.topic(topic_name) + topic.create() + + # Retry / backoff up to 7 seconds (1 + 2 + 4) + retry = RetryResult(lambda result: result, max_tries=4) + retry(topic.exists)() + self.to_delete.append(topic) + + if topic.check_iam_permissions([PUBSUB_TOPICS_GET_IAM_POLICY]): + policy = topic.get_iam_policy() + policy.viewers.add(policy.user('jjg@google.com')) + new_policy = topic.set_iam_policy(policy) + self.assertEqual(new_policy.viewers, policy.viewers) + + def test_subscription_iam_policy(self): + from google.cloud.pubsub.iam import PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY + self._maybe_emulator_skip() + topic_name = 'test-sub-iam-policy-topic' + unique_resource_id('-') + topic = Config.CLIENT.topic(topic_name) + topic.create() + + # Retry / backoff up to 7 seconds (1 + 2 + 4) + retry = RetryResult(lambda result: result, max_tries=4) + retry(topic.exists)() + self.to_delete.append(topic) + + SUB_NAME = 'test-sub-iam-policy-sub' + unique_resource_id('-') + subscription = topic.subscription(SUB_NAME) + subscription.create() + + # Retry / backoff up to 7 seconds (1 + 2 + 4) + retry = RetryResult(lambda result: result, max_tries=4) + retry(subscription.exists)() + self.to_delete.insert(0, subscription) + + if subscription.check_iam_permissions( + [PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY]): + policy = subscription.get_iam_policy() + policy.viewers.add(policy.user('jjg@google.com')) + new_policy = subscription.set_iam_policy(policy) + self.assertEqual(new_policy.viewers, policy.viewers) diff --git a/packages/google-cloud-pubsub/unit_tests/__init__.py b/packages/google-cloud-pubsub/tests/unit/__init__.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/__init__.py rename to packages/google-cloud-pubsub/tests/unit/__init__.py diff --git a/packages/google-cloud-pubsub/unit_tests/test__gax.py b/packages/google-cloud-pubsub/tests/unit/test__gax.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/test__gax.py rename to packages/google-cloud-pubsub/tests/unit/test__gax.py diff --git a/packages/google-cloud-pubsub/unit_tests/test__helpers.py b/packages/google-cloud-pubsub/tests/unit/test__helpers.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/test__helpers.py rename to packages/google-cloud-pubsub/tests/unit/test__helpers.py diff --git a/packages/google-cloud-pubsub/unit_tests/test__http.py b/packages/google-cloud-pubsub/tests/unit/test__http.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/test__http.py rename to packages/google-cloud-pubsub/tests/unit/test__http.py diff --git a/packages/google-cloud-pubsub/unit_tests/test_client.py b/packages/google-cloud-pubsub/tests/unit/test_client.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/test_client.py rename to packages/google-cloud-pubsub/tests/unit/test_client.py diff --git a/packages/google-cloud-pubsub/unit_tests/test_iam.py b/packages/google-cloud-pubsub/tests/unit/test_iam.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/test_iam.py rename to packages/google-cloud-pubsub/tests/unit/test_iam.py diff --git a/packages/google-cloud-pubsub/unit_tests/test_message.py b/packages/google-cloud-pubsub/tests/unit/test_message.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/test_message.py rename to packages/google-cloud-pubsub/tests/unit/test_message.py diff --git a/packages/google-cloud-pubsub/unit_tests/test_subscription.py b/packages/google-cloud-pubsub/tests/unit/test_subscription.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/test_subscription.py rename to packages/google-cloud-pubsub/tests/unit/test_subscription.py diff --git a/packages/google-cloud-pubsub/unit_tests/test_topic.py b/packages/google-cloud-pubsub/tests/unit/test_topic.py similarity index 100% rename from packages/google-cloud-pubsub/unit_tests/test_topic.py rename to packages/google-cloud-pubsub/tests/unit/test_topic.py diff --git a/packages/google-cloud-pubsub/tox.ini b/packages/google-cloud-pubsub/tox.ini deleted file mode 100644 index 001886c51e9a..000000000000 --- a/packages/google-cloud-pubsub/tox.ini +++ /dev/null @@ -1,35 +0,0 @@ -[tox] -envlist = - py27,py34,py35,cover - -[testing] -localdeps = - pip install --quiet --upgrade {toxinidir}/../core -deps = - {toxinidir}/../core - mock - pytest -covercmd = - py.test --quiet \ - --cov=google.cloud.pubsub \ - --cov=unit_tests \ - --cov-config {toxinidir}/.coveragerc \ - unit_tests - -[testenv] -commands = - {[testing]localdeps} - py.test --quiet {posargs} unit_tests -deps = - {[testing]deps} - -[testenv:cover] -basepython = - python2.7 -commands = - {[testing]localdeps} - {[testing]covercmd} -deps = - {[testenv]deps} - coverage - pytest-cov From 39d97a14d522ebf515e8874b8baebfb5689d0d80 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Mar 2017 10:20:16 -0700 Subject: [PATCH 0081/1197] Fixing up some format strings in nox configs. Using `STRING_TEMPLATE % VARIABLE` can introduce hard-to-find bugs if `VARIABLE` is expected to be a string but ends up being a tuple. Instead of using percent formatting, just using `.format`. Also making tweaks to `get_target_packages` to make some path manipulation / checks OS-independent. --- packages/google-cloud-pubsub/nox.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 1f0ae6130bf7..13d73c4a2893 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -25,7 +25,7 @@ def unit_tests(session, python_version): """Run the unit test suite.""" # Run unit tests against all supported versions of Python. - session.interpreter = 'python%s' % python_version + session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', '../core/') @@ -49,7 +49,7 @@ def system_tests(session, python_version): return # Run the system tests against latest Python 2 and Python 3 only. - session.interpreter = 'python%s' % python_version + session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package into the # virutalenv's dist-packages. From 2839a524434fec6678cadad5c4e2cd5ab4a8ec9b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 30 Mar 2017 08:43:22 -0700 Subject: [PATCH 0082/1197] Renaming http argument(s) as _http. (#3235) --- .../google/cloud/pubsub/client.py | 52 ++++++++++--------- packages/google-cloud-pubsub/tests/system.py | 2 +- .../tests/unit/test__gax.py | 12 ++--- .../tests/unit/test_client.py | 18 +++---- .../tests/unit/test_topic.py | 6 +-- 5 files changed, 47 insertions(+), 43 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index f92182d7ef4e..4e5b76bd118c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -30,17 +30,17 @@ from google.cloud.pubsub._gax import make_gax_publisher_api from google.cloud.pubsub._gax import make_gax_subscriber_api except ImportError: # pragma: NO COVER - _HAVE_GAX = False + _HAVE_GRPC = False GAXPublisherAPI = None GAXSubscriberAPI = None make_gax_publisher_api = None make_gax_subscriber_api = None else: - _HAVE_GAX = True + _HAVE_GRPC = True -_DISABLE_GAX = os.getenv(DISABLE_GRPC, False) -_USE_GAX = _HAVE_GAX and not _DISABLE_GAX +_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) +_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC class Client(ClientWithProject): @@ -53,22 +53,26 @@ class Client(ClientWithProject): :type credentials: :class:`~google.auth.credentials.Credentials` :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``http`` object is + client. If not passed (and if no ``_http`` object is passed), falls back to the default inferred from the environment. - :type http: :class:`~httplib2.Http` - :param http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an - ``http`` object is created that is bound to the - ``credentials`` for the current object. - - :type use_gax: bool - :param use_gax: (Optional) Explicitly specifies whether - to use the gRPC transport (via GAX) or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` environment - variable + :type _http: :class:`~httplib2.Http` + :param _http: (Optional) HTTP object to make requests. Can be any object + that defines ``request()`` with the same interface as + :meth:`~httplib2.Http.request`. If not passed, an + ``_http`` object is created that is bound to the + ``credentials`` for the current object. + This parameter should be considered private, and could + change in the future. + + :type _use_grpc: bool + :param _use_grpc: (Optional) Explicitly specifies whether + to use the gRPC transport (via GAX) or HTTP. If unset, + falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` + environment variable. + This parameter should be considered private, and could + change in the future. """ _publisher_api = None @@ -80,20 +84,20 @@ class Client(ClientWithProject): """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" def __init__(self, project=None, credentials=None, - http=None, use_gax=None): + _http=None, _use_grpc=None): super(Client, self).__init__( - project=project, credentials=credentials, http=http) + project=project, credentials=credentials, _http=_http) self._connection = Connection(self) - if use_gax is None: - self._use_gax = _USE_GAX + if _use_grpc is None: + self._use_grpc = _USE_GRPC else: - self._use_gax = use_gax + self._use_grpc = _use_grpc @property def publisher_api(self): """Helper for publisher-related API calls.""" if self._publisher_api is None: - if self._use_gax: + if self._use_grpc: if self._connection.in_emulator: generated = make_gax_publisher_api( host=self._connection.host) @@ -109,7 +113,7 @@ def publisher_api(self): def subscriber_api(self): """Helper for subscriber-related API calls.""" if self._subscriber_api is None: - if self._use_gax: + if self._use_grpc: if self._connection.in_emulator: generated = make_gax_subscriber_api( host=self._connection.host) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index c1fad735bbf9..54c4ba7a895c 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -53,7 +53,7 @@ def setUpModule(): credentials = EmulatorCreds() http = httplib2.Http() # Un-authorized. Config.CLIENT = client.Client(credentials=credentials, - http=http) + _http=http) else: Config.CLIENT = client.Client() diff --git a/packages/google-cloud-pubsub/tests/unit/test__gax.py b/packages/google-cloud-pubsub/tests/unit/test__gax.py index 0f55c8fe298c..8a41d3ff70f8 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__gax.py +++ b/packages/google-cloud-pubsub/tests/unit/test__gax.py @@ -21,9 +21,9 @@ import google.cloud.pubsub._gax # pylint: enable=unused-import except ImportError: # pragma: NO COVER - _HAVE_GAX = False + _HAVE_GRPC = False else: - _HAVE_GAX = True + _HAVE_GRPC = True from google.cloud._testing import _GAXBaseAPI @@ -50,7 +50,7 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_PublisherAPI(_Base, unittest.TestCase): @staticmethod @@ -418,7 +418,7 @@ def test_topic_list_subscriptions_error(self): self.assertIs(options.page_token, INITIAL_PAGE) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_SubscriberAPI(_Base, unittest.TestCase): PUSH_ENDPOINT = 'https://api.example.com/push' @@ -925,7 +925,7 @@ def test_subscription_modify_ack_deadline_error(self): self.assertIsNone(options) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_make_gax_publisher_api(_Base, unittest.TestCase): def _call_fut(self, *args, **kwargs): @@ -1005,7 +1005,7 @@ def mock_insecure_channel(host): self.assertEqual(insecure_args, [host]) -@unittest.skipUnless(_HAVE_GAX, 'No gax-python') +@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_make_gax_subscriber_api(_Base, unittest.TestCase): def _call_fut(self, *args, **kwargs): diff --git a/packages/google-cloud-pubsub/tests/unit/test_client.py b/packages/google-cloud-pubsub/tests/unit/test_client.py index d297327b2c4b..d3aa25378b38 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_client.py +++ b/packages/google-cloud-pubsub/tests/unit/test_client.py @@ -46,7 +46,7 @@ def test_publisher_api_wo_gax(self): client = self._make_one( project=self.PROJECT, credentials=creds, - use_gax=False) + _use_grpc=False) conn = client._connection = _Connection() api = client.publisher_api @@ -61,12 +61,12 @@ def test_no_gax_ctor(self): from google.cloud.pubsub._http import _PublisherAPI creds = _make_credentials() - with mock.patch('google.cloud.pubsub.client._USE_GAX', + with mock.patch('google.cloud.pubsub.client._USE_GRPC', new=True): client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + _use_grpc=False) - self.assertFalse(client._use_gax) + self.assertFalse(client._use_grpc) api = client.publisher_api self.assertIsInstance(api, _PublisherAPI) @@ -89,7 +89,7 @@ def __init__(self, _wrapped, client): creds = _make_credentials() client = self._make_one( project=self.PROJECT, credentials=creds, - use_gax=True) + _use_grpc=True) client._connection.in_emulator = emulator patch = mock.patch.multiple( @@ -123,7 +123,7 @@ def test_subscriber_api_wo_gax(self): creds = _make_credentials() client = self._make_one( project=self.PROJECT, credentials=creds, - use_gax=False) + _use_grpc=False) conn = client._connection = _Connection() api = client.subscriber_api @@ -153,7 +153,7 @@ def __init__(self, _wrapped, client): creds = _make_credentials() client = self._make_one( project=self.PROJECT, credentials=creds, - use_gax=True) + _use_grpc=True) client._connection.in_emulator = emulator patch = mock.patch.multiple( @@ -261,7 +261,7 @@ def test_list_subscriptions_no_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + _use_grpc=False) returned = {'subscriptions': [SUB_INFO]} client._connection = _Connection(returned) @@ -299,7 +299,7 @@ def test_list_subscriptions_with_paging(self): SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds, - use_gax=False) + _use_grpc=False) # Set up the mock response. ACK_DEADLINE = 42 diff --git a/packages/google-cloud-pubsub/tests/unit/test_topic.py b/packages/google-cloud-pubsub/tests/unit/test_topic.py index 664ede8f12b5..01864fa24fdd 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_topic.py +++ b/packages/google-cloud-pubsub/tests/unit/test_topic.py @@ -324,7 +324,7 @@ def test_list_subscriptions_no_paging(self): from google.cloud.pubsub.subscription import Subscription client = Client(project=self.PROJECT, - credentials=_make_credentials(), use_gax=False) + credentials=_make_credentials(), _use_grpc=False) SUB_NAME_1 = 'subscription_1' SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( @@ -374,7 +374,7 @@ def test_list_subscriptions_with_paging(self): from google.cloud.pubsub.subscription import Subscription client = Client(project=self.PROJECT, - credentials=_make_credentials(), use_gax=False) + credentials=_make_credentials(), _use_grpc=False) SUB_NAME_1 = 'subscription_1' SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( @@ -424,7 +424,7 @@ def test_list_subscriptions_missing_key(self): from google.cloud.pubsub.client import Client client = Client(project=self.PROJECT, - credentials=_make_credentials(), use_gax=False) + credentials=_make_credentials(), _use_grpc=False) client._connection = _Connection({}) topic = self._make_one(self.TOPIC_NAME, client=client) From 0a49f6f19ecaf87b2c606dc06823f76933c449b1 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 30 Mar 2017 14:45:10 -0700 Subject: [PATCH 0083/1197] GA and Beta Promotions (#3245) * Make clients explicitly unpickleable. Closes #3211. * Make clients explicitly unpickleable. Closes #3211. * Add GA designator, add 1.0 version numbers. * Version changes. Eep. * Oops, Speech is still alpha. * 0.24.0, not 0.24.1 * Remove double __getstate__ goof. * Version changes. Eep. * Oops, Speech is still alpha. * Remove double __getstate__ goof. * Adding 3.6 classifier where missing and fixing bad versions. Done via "git grep '0\.24'" and "git grep '0\.23'". * Fix Noxfiles forlocal packages. * Fixing copy-pasta issue in error reporting nox config. Also fixing bad indent in same file. * Depend on stable logging in error reporting package. * Fixing lint errors in error_reporting. These were masked because error_reporting's lint nox session was linting the datastore codebase. This also means that the error reporting package has gained __all__. * Fixing a syntax error in nox config for logging. Also fixing an indent error while I was in there. * Revert "Add docs for 'result_index' usage and a system test." This reverts commit b5742aa160f604ec7cd81873ad24ac9aa75e548d. * Fixing docs nox session for umbrella package. Two issues: - error_reporting came BEFORE logging (which means it would try to pull in a logging dep from PyPI that doesn't exist) - dns was NOT in the list of local packages * Updating upper bound on logging in error_reporting. * Un-revert typo fix. --- packages/google-cloud-pubsub/nox.py | 11 +++++++---- packages/google-cloud-pubsub/setup.py | 5 +++-- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 13d73c4a2893..c9300982d6b8 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -19,6 +19,9 @@ import nox +LOCAL_DEPS = ('../core/',) + + @nox.session @nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) def unit_tests(session, python_version): @@ -28,7 +31,7 @@ def unit_tests(session, python_version): session.interpreter = 'python{}'.format(python_version) # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', '../core/') + session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') # Run py.test against the unit tests. @@ -53,8 +56,8 @@ def system_tests(session, python_version): # Install all test dependencies, then install this package into the # virutalenv's dist-packages. - session.install('mock', 'pytest', - '../core/', '../test_utils/') + session.install('mock', 'pytest', *LOCAL_DEPS) + session.install('../test_utils/') session.install('.') # Run py.test against the system tests. @@ -69,7 +72,7 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8') + session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/pubsub') diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 655385112b7b..d18e07fcae65 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -44,20 +44,21 @@ 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', 'Topic :: Internet', ], } REQUIREMENTS = [ - 'google-cloud-core >= 0.23.1, < 0.24dev', + 'google-cloud-core >= 0.24.0, < 0.25dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-pubsub', - version='0.23.0', + version='0.24.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 13f94130816ac44b31a47e7bc75aa26b230c9646 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 22 Mar 2017 15:14:35 -0400 Subject: [PATCH 0084/1197] Factor common IAM policy bits into 'google.cloud.iam'. Pubsub-specific roles, permissions left behind in 'google.cloud.pubsub.iam'. 'google.cloud.pubsub.iam.Policy' subclasses the core one, extending it to deal with the pubsub-specific roles. --- .../google/cloud/pubsub/iam.py | 154 ++++-------------- .../tests/unit/test_iam.py | 130 +++------------ 2 files changed, 56 insertions(+), 228 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py index 53c0f36579f3..b650f82486e0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -17,16 +17,10 @@ https://cloud.google.com/pubsub/access_control#permissions """ -# Generic IAM roles - -OWNER_ROLE = 'roles/owner' -"""Generic role implying all rights to an object.""" - -EDITOR_ROLE = 'roles/editor' -"""Generic role implying rights to modify an object.""" - -VIEWER_ROLE = 'roles/viewer' -"""Generic role implying rights to access an object.""" +from google.cloud.iam import OWNER_ROLE +from google.cloud.iam import EDITOR_ROLE +from google.cloud.iam import VIEWER_ROLE +from google.cloud.iam import Policy as _BasePolicy # Pubsub-specific IAM roles @@ -94,8 +88,8 @@ """Permission: update subscriptions.""" -class Policy(object): - """Combined IAM Policy / Bindings. +class Policy(_BasePolicy): + """IAM Policy / Bindings. See: https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Policy @@ -107,125 +101,44 @@ class Policy(object): :type version: int :param version: unique version of the policy """ + _OWNER_ROLES = (OWNER_ROLE, PUBSUB_ADMIN_ROLE) + _EDITOR_ROLES = (EDITOR_ROLE, PUBSUB_EDITOR_ROLE) + _VIEWER_ROLES = (VIEWER_ROLE, PUBSUB_VIEWER_ROLE) + def __init__(self, etag=None, version=None): - self.etag = etag - self.version = version - self.owners = set() - self.editors = set() - self.viewers = set() + super(Policy, self).__init__(etag, version) self.publishers = set() self.subscribers = set() - @staticmethod - def user(email): - """Factory method for a user member. + def _bind_custom_role(self, role, members): + """Bind an API-specific role to members. - :type email: str - :param email: E-mail for this particular user. + Helper for :meth:`from_api_repr`. - :rtype: str - :returns: A member string corresponding to the given user. - """ - return 'user:%s' % (email,) - - @staticmethod - def service_account(email): - """Factory method for a service account member. + :type role: str + :param role: role to bind. - :type email: str - :param email: E-mail for this particular service account. + :type members: set of str + :param members: member IDs to be bound to the role. - :rtype: str - :returns: A member string corresponding to the given service account. + Subclasses may override. """ - return 'serviceAccount:%s' % (email,) - - @staticmethod - def group(email): - """Factory method for a group member. + if role == PUBSUB_PUBLISHER_ROLE: + self.publishers |= members + elif role == PUBSUB_SUBSCRIBER_ROLE: + self.subscribers |= members + else: + super(Policy, self)._bind_custom_role(role, members) - :type email: str - :param email: An id or e-mail for this particular group. - - :rtype: str - :returns: A member string corresponding to the given group. - """ - return 'group:%s' % (email,) - - @staticmethod - def domain(domain): - """Factory method for a domain member. - - :type domain: str - :param domain: The domain for this member. - - :rtype: str - :returns: A member string corresponding to the given domain. - """ - return 'domain:%s' % (domain,) + def _role_bindings(self): + """Enumerate members bound to roles for the policy. - @staticmethod - def all_users(): - """Factory method for a member representing all users. + Helper for :meth:`to_api_repr`. - :rtype: str - :returns: A member string representing all users. + :rtype: list of mapping + :returns: zero or more mappings describing roles / members bound by + the policy. """ - return 'allUsers' - - @staticmethod - def authenticated_users(): - """Factory method for a member representing all authenticated users. - - :rtype: str - :returns: A member string representing all authenticated users. - """ - return 'allAuthenticatedUsers' - - @classmethod - def from_api_repr(cls, resource): - """Create a policy from the resource returned from the API. - - :type resource: dict - :param resource: resource returned from the ``getIamPolicy`` API. - - :rtype: :class:`Policy` - :returns: the parsed policy - """ - version = resource.get('version') - etag = resource.get('etag') - policy = cls(etag, version) - for binding in resource.get('bindings', ()): - role = binding['role'] - members = set(binding['members']) - if role in (OWNER_ROLE, PUBSUB_ADMIN_ROLE): - policy.owners |= members - elif role in (EDITOR_ROLE, PUBSUB_EDITOR_ROLE): - policy.editors |= members - elif role in (VIEWER_ROLE, PUBSUB_VIEWER_ROLE): - policy.viewers |= members - elif role == PUBSUB_PUBLISHER_ROLE: - policy.publishers |= members - elif role == PUBSUB_SUBSCRIBER_ROLE: - policy.subscribers |= members - else: - raise ValueError('Unknown role: %s' % (role,)) - return policy - - def to_api_repr(self): - """Construct a Policy resource. - - :rtype: dict - :returns: a resource to be passed to the ``setIamPolicy`` API. - """ - resource = {} - - if self.etag is not None: - resource['etag'] = self.etag - - if self.version is not None: - resource['version'] = self.version - bindings = [] if self.owners: @@ -253,7 +166,4 @@ def to_api_repr(self): {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': sorted(self.subscribers)}) - if bindings: - resource['bindings'] = bindings - - return resource + return bindings diff --git a/packages/google-cloud-pubsub/tests/unit/test_iam.py b/packages/google-cloud-pubsub/tests/unit/test_iam.py index 1d73277c270d..394317218486 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_iam.py +++ b/packages/google-cloud-pubsub/tests/unit/test_iam.py @@ -48,111 +48,33 @@ def test_ctor_explicit(self): self.assertEqual(list(policy.publishers), []) self.assertEqual(list(policy.subscribers), []) - def test_user(self): - EMAIL = 'phred@example.com' - MEMBER = 'user:%s' % (EMAIL,) - policy = self._make_one() - self.assertEqual(policy.user(EMAIL), MEMBER) - - def test_service_account(self): - EMAIL = 'phred@example.com' - MEMBER = 'serviceAccount:%s' % (EMAIL,) - policy = self._make_one() - self.assertEqual(policy.service_account(EMAIL), MEMBER) - - def test_group(self): - EMAIL = 'phred@example.com' - MEMBER = 'group:%s' % (EMAIL,) - policy = self._make_one() - self.assertEqual(policy.group(EMAIL), MEMBER) - - def test_domain(self): - DOMAIN = 'example.com' - MEMBER = 'domain:%s' % (DOMAIN,) - policy = self._make_one() - self.assertEqual(policy.domain(DOMAIN), MEMBER) - - def test_all_users(self): - policy = self._make_one() - self.assertEqual(policy.all_users(), 'allUsers') - - def test_authenticated_users(self): + def test__bind_custom_role_publisher(self): + from google.cloud.pubsub.iam import ( + PUBSUB_PUBLISHER_ROLE, + ) + PUBLISHER = 'user:phred@example.com' policy = self._make_one() - self.assertEqual(policy.authenticated_users(), 'allAuthenticatedUsers') + policy._bind_custom_role(PUBSUB_PUBLISHER_ROLE, set([PUBLISHER])) - def test_from_api_repr_only_etag(self): - RESOURCE = { - 'etag': 'ACAB', - } - klass = self._get_target_class() - policy = klass.from_api_repr(RESOURCE) - self.assertEqual(policy.etag, 'ACAB') - self.assertIsNone(policy.version) - self.assertEqual(list(policy.owners), []) - self.assertEqual(list(policy.editors), []) - self.assertEqual(list(policy.viewers), []) + self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - def test_from_api_repr_complete(self): + def test__bind_custom_role_subscriber(self): from google.cloud.pubsub.iam import ( - OWNER_ROLE, - EDITOR_ROLE, - VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, PUBSUB_SUBSCRIBER_ROLE, ) - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - RESOURCE = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - klass = self._get_target_class() - policy = klass.from_api_repr(RESOURCE) - self.assertEqual(policy.etag, 'DEADBEEF') - self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) + policy = self._make_one() + policy._bind_custom_role(PUBSUB_SUBSCRIBER_ROLE, set([SUBSCRIBER])) - def test_from_api_repr_bad_role(self): - BOGUS1 = 'user:phred@example.com' - BOGUS2 = 'group:cloud-logs@google.com' - RESOURCE = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': 'nonesuch', 'members': [BOGUS1, BOGUS2]}, - ], - } - klass = self._get_target_class() - with self.assertRaises(ValueError): - klass.from_api_repr(RESOURCE) + self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - def test_to_api_repr_defaults(self): + def test__bind_custom_role_unknown(self): policy = self._make_one() - self.assertEqual(policy.to_api_repr(), {}) - - def test_to_api_repr_only_etag(self): - policy = self._make_one('DEADBEEF') - self.assertEqual(policy.to_api_repr(), {'etag': 'DEADBEEF'}) + USER = 'user:phred@example.com' + with self.assertRaises(ValueError): + policy._bind_custom_role('nonesuch', set([USER])) - def test_to_api_repr_full(self): + def test__role_bindings(self): from google.cloud.pubsub.iam import ( PUBSUB_ADMIN_ROLE, PUBSUB_EDITOR_ROLE, @@ -169,17 +91,13 @@ def test_to_api_repr_full(self): VIEWER2 = 'user:phred@example.com' PUBLISHER = 'user:phred@example.com' SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - EXPECTED = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } + EXPECTED = [ + {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, + {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, + ] policy = self._make_one('DEADBEEF', 17) policy.owners.add(OWNER1) policy.owners.add(OWNER2) @@ -189,4 +107,4 @@ def test_to_api_repr_full(self): policy.viewers.add(VIEWER2) policy.publishers.add(PUBLISHER) policy.subscribers.add(SUBSCRIBER) - self.assertEqual(policy.to_api_repr(), EXPECTED) + self.assertEqual(policy._role_bindings(), EXPECTED) From dbcb7feb2af3fadfcefbd90901603c4bc9e1fdc7 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 23 Mar 2017 13:25:44 -0400 Subject: [PATCH 0085/1197] Accomodate (future) user-defined roles. - google.cloud.iam.Policy holds a 'bindings' mapping, which doesn't enforce using known roles. - Its 'owners', 'editors', and 'viewers' are now properties which indirect over that 'bindings' attribute. Note that this is a breaking change, as users who relied on mutating one of those sets (rather than re-assigning it) will need to update. --- .../google/cloud/pubsub/iam.py | 102 ++++++------------ packages/google-cloud-pubsub/tests/system.py | 8 +- .../tests/unit/test_iam.py | 53 ++------- .../tests/unit/test_subscription.py | 36 ++++--- .../tests/unit/test_topic.py | 36 ++++--- 5 files changed, 86 insertions(+), 149 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py index b650f82486e0..d7aedc2d3d56 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -17,9 +17,11 @@ https://cloud.google.com/pubsub/access_control#permissions """ -from google.cloud.iam import OWNER_ROLE -from google.cloud.iam import EDITOR_ROLE -from google.cloud.iam import VIEWER_ROLE +# pylint: disable=unused-import +from google.cloud.iam import OWNER_ROLE # noqa - backward compat +from google.cloud.iam import EDITOR_ROLE # noqa - backward compat +from google.cloud.iam import VIEWER_ROLE # noqa - backward compat +# pylint: enable=unused-import from google.cloud.iam import Policy as _BasePolicy # Pubsub-specific IAM roles @@ -94,76 +96,32 @@ class Policy(_BasePolicy): See: https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Policy https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Binding - - :type etag: str - :param etag: ETag used to identify a unique of the policy - - :type version: int - :param version: unique version of the policy """ _OWNER_ROLES = (OWNER_ROLE, PUBSUB_ADMIN_ROLE) - _EDITOR_ROLES = (EDITOR_ROLE, PUBSUB_EDITOR_ROLE) - _VIEWER_ROLES = (VIEWER_ROLE, PUBSUB_VIEWER_ROLE) - - def __init__(self, etag=None, version=None): - super(Policy, self).__init__(etag, version) - self.publishers = set() - self.subscribers = set() - - def _bind_custom_role(self, role, members): - """Bind an API-specific role to members. - - Helper for :meth:`from_api_repr`. - - :type role: str - :param role: role to bind. - - :type members: set of str - :param members: member IDs to be bound to the role. - - Subclasses may override. - """ - if role == PUBSUB_PUBLISHER_ROLE: - self.publishers |= members - elif role == PUBSUB_SUBSCRIBER_ROLE: - self.subscribers |= members - else: - super(Policy, self)._bind_custom_role(role, members) + """Roles mapped onto our ``owners`` attribute.""" - def _role_bindings(self): - """Enumerate members bound to roles for the policy. - - Helper for :meth:`to_api_repr`. - - :rtype: list of mapping - :returns: zero or more mappings describing roles / members bound by - the policy. - """ - bindings = [] - - if self.owners: - bindings.append( - {'role': PUBSUB_ADMIN_ROLE, - 'members': sorted(self.owners)}) - - if self.editors: - bindings.append( - {'role': PUBSUB_EDITOR_ROLE, - 'members': sorted(self.editors)}) - - if self.viewers: - bindings.append( - {'role': PUBSUB_VIEWER_ROLE, - 'members': sorted(self.viewers)}) - - if self.publishers: - bindings.append( - {'role': PUBSUB_PUBLISHER_ROLE, - 'members': sorted(self.publishers)}) - - if self.subscribers: - bindings.append( - {'role': PUBSUB_SUBSCRIBER_ROLE, - 'members': sorted(self.subscribers)}) + _EDITOR_ROLES = (EDITOR_ROLE, PUBSUB_EDITOR_ROLE) + """Roles mapped onto our ``editors`` attribute.""" - return bindings + _VIEWER_ROLES = (VIEWER_ROLE, PUBSUB_VIEWER_ROLE) + """Roles mapped onto our ``viewers`` attribute.""" + + @property + def publishers(self): + """Legacy access to owner role.""" + return self.bindings.get(PUBSUB_PUBLISHER_ROLE, ()) + + @publishers.setter + def publishers(self, value): + """Update publishers.""" + self.bindings[PUBSUB_PUBLISHER_ROLE] = list(value) + + @property + def subscribers(self): + """Legacy access to owner role.""" + return self.bindings.get(PUBSUB_SUBSCRIBER_ROLE, ()) + + @subscribers.setter + def subscribers(self, value): + """Update subscribers.""" + self.bindings[PUBSUB_SUBSCRIBER_ROLE] = list(value) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 54c4ba7a895c..93ad19b76647 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -252,7 +252,9 @@ def test_topic_iam_policy(self): if topic.check_iam_permissions([PUBSUB_TOPICS_GET_IAM_POLICY]): policy = topic.get_iam_policy() - policy.viewers.add(policy.user('jjg@google.com')) + viewers = set(policy.viewers) + viewers.add(policy.user('jjg@google.com')) + policy.viewers = viewers new_policy = topic.set_iam_policy(policy) self.assertEqual(new_policy.viewers, policy.viewers) @@ -280,6 +282,8 @@ def test_subscription_iam_policy(self): if subscription.check_iam_permissions( [PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY]): policy = subscription.get_iam_policy() - policy.viewers.add(policy.user('jjg@google.com')) + viewers = set(policy.viewers) + viewers.add(policy.user('jjg@google.com')) + policy.viewers = viewers new_policy = subscription.set_iam_policy(policy) self.assertEqual(new_policy.viewers, policy.viewers) diff --git a/packages/google-cloud-pubsub/tests/unit/test_iam.py b/packages/google-cloud-pubsub/tests/unit/test_iam.py index 394317218486..16231a0d73c7 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_iam.py +++ b/packages/google-cloud-pubsub/tests/unit/test_iam.py @@ -48,63 +48,26 @@ def test_ctor_explicit(self): self.assertEqual(list(policy.publishers), []) self.assertEqual(list(policy.subscribers), []) - def test__bind_custom_role_publisher(self): + def test_publishers_setter(self): from google.cloud.pubsub.iam import ( PUBSUB_PUBLISHER_ROLE, ) PUBLISHER = 'user:phred@example.com' policy = self._make_one() - policy._bind_custom_role(PUBSUB_PUBLISHER_ROLE, set([PUBLISHER])) + policy.publishers = [PUBLISHER] self.assertEqual(sorted(policy.publishers), [PUBLISHER]) + self.assertEqual( + policy.bindings, {PUBSUB_PUBLISHER_ROLE: [PUBLISHER]}) - def test__bind_custom_role_subscriber(self): + def test_subscribers_setter(self): from google.cloud.pubsub.iam import ( PUBSUB_SUBSCRIBER_ROLE, ) SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' policy = self._make_one() - policy._bind_custom_role(PUBSUB_SUBSCRIBER_ROLE, set([SUBSCRIBER])) + policy.subscribers = [SUBSCRIBER] self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - - def test__bind_custom_role_unknown(self): - policy = self._make_one() - USER = 'user:phred@example.com' - with self.assertRaises(ValueError): - policy._bind_custom_role('nonesuch', set([USER])) - - def test__role_bindings(self): - from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'group:cloud-logs@google.com' - OWNER2 = 'user:phred@example.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - EXPECTED = [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ] - policy = self._make_one('DEADBEEF', 17) - policy.owners.add(OWNER1) - policy.owners.add(OWNER2) - policy.editors.add(EDITOR1) - policy.editors.add(EDITOR2) - policy.viewers.add(VIEWER1) - policy.viewers.add(VIEWER2) - policy.publishers.add(PUBLISHER) - policy.subscribers.add(SUBSCRIBER) - self.assertEqual(policy._role_bindings(), EXPECTED) + self.assertEqual( + policy.bindings, {PUBSUB_SUBSCRIBER_ROLE: [SUBSCRIBER]}) diff --git a/packages/google-cloud-pubsub/tests/unit/test_subscription.py b/packages/google-cloud-pubsub/tests/unit/test_subscription.py index 42fb23d9ae68..f16f0ad9126e 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_subscription.py +++ b/packages/google-cloud-pubsub/tests/unit/test_subscription.py @@ -523,11 +523,12 @@ def test_get_iam_policy_w_alternate_client(self): self.assertEqual(api._got_iam_policy, self.SUB_PATH) def test_set_iam_policy_w_bound_client(self): + import operator from google.cloud.pubsub.iam import Policy from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, + OWNER_ROLE, + EDITOR_ROLE, + VIEWER_ROLE, PUBSUB_PUBLISHER_ROLE, PUBSUB_SUBSCRIBER_ROLE, ) @@ -544,9 +545,9 @@ def test_set_iam_policy_w_bound_client(self): 'etag': 'DEADBEEF', 'version': 17, 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, + {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, ], @@ -560,14 +561,11 @@ def test_set_iam_policy_w_bound_client(self): topic = _Topic(self.TOPIC_NAME, client=client) subscription = self._make_one(self.SUB_NAME, topic) policy = Policy('DEADBEEF', 17) - policy.owners.add(OWNER1) - policy.owners.add(OWNER2) - policy.editors.add(EDITOR1) - policy.editors.add(EDITOR2) - policy.viewers.add(VIEWER1) - policy.viewers.add(VIEWER2) - policy.publishers.add(PUBLISHER) - policy.subscribers.add(SUBSCRIBER) + policy.owners = [OWNER1, OWNER2] + policy.editors = [EDITOR1, EDITOR2] + policy.viewers = [VIEWER1, VIEWER2] + policy.publishers = [PUBLISHER] + policy.subscribers = [SUBSCRIBER] new_policy = subscription.set_iam_policy(policy) @@ -578,7 +576,15 @@ def test_set_iam_policy_w_bound_client(self): self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._set_iam_policy, (self.SUB_PATH, POLICY)) + self.assertEqual(len(api._set_iam_policy), 2) + self.assertEqual(api._set_iam_policy[0], self.SUB_PATH) + resource = api._set_iam_policy[1] + self.assertEqual(resource['etag'], POLICY['etag']) + self.assertEqual(resource['version'], POLICY['version']) + key = operator.itemgetter('role') + self.assertEqual( + sorted(resource['bindings'], key=key), + sorted(POLICY['bindings'], key=key)) def test_set_iam_policy_w_alternate_client(self): from google.cloud.pubsub.iam import Policy diff --git a/packages/google-cloud-pubsub/tests/unit/test_topic.py b/packages/google-cloud-pubsub/tests/unit/test_topic.py index 01864fa24fdd..2c90432195c2 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_topic.py +++ b/packages/google-cloud-pubsub/tests/unit/test_topic.py @@ -509,11 +509,12 @@ def test_get_iam_policy_w_alternate_client(self): self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) def test_set_iam_policy_w_bound_client(self): + import operator from google.cloud.pubsub.iam import Policy from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, + OWNER_ROLE, + EDITOR_ROLE, + VIEWER_ROLE, PUBSUB_PUBLISHER_ROLE, PUBSUB_SUBSCRIBER_ROLE, ) @@ -530,11 +531,11 @@ def test_set_iam_policy_w_bound_client(self): 'etag': 'DEADBEEF', 'version': 17, 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, + {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, + {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, + {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, @@ -551,14 +552,11 @@ def test_set_iam_policy_w_bound_client(self): api._set_iam_policy_response = RESPONSE topic = self._make_one(self.TOPIC_NAME, client=client) policy = Policy('DEADBEEF', 17) - policy.owners.add(OWNER1) - policy.owners.add(OWNER2) - policy.editors.add(EDITOR1) - policy.editors.add(EDITOR2) - policy.viewers.add(VIEWER1) - policy.viewers.add(VIEWER2) - policy.publishers.add(PUBLISHER) - policy.subscribers.add(SUBSCRIBER) + policy.owners = [OWNER1, OWNER2] + policy.editors = [EDITOR1, EDITOR2] + policy.viewers = [VIEWER1, VIEWER2] + policy.publishers = [PUBLISHER] + policy.subscribers = [SUBSCRIBER] new_policy = topic.set_iam_policy(policy) @@ -569,7 +567,15 @@ def test_set_iam_policy_w_bound_client(self): self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._set_iam_policy, (self.TOPIC_PATH, POLICY)) + self.assertEqual(len(api._set_iam_policy), 2) + self.assertEqual(api._set_iam_policy[0], self.TOPIC_PATH) + resource = api._set_iam_policy[1] + self.assertEqual(resource['etag'], POLICY['etag']) + self.assertEqual(resource['version'], POLICY['version']) + key = operator.itemgetter('role') + self.assertEqual( + sorted(resource['bindings'], key=key), + sorted(POLICY['bindings'], key=key)) def test_set_iam_policy_w_alternate_client(self): from google.cloud.pubsub.iam import Policy From 1ab4946d60fb99a59a69fbe5439bc37a5f130a73 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 5 Apr 2017 19:43:07 -0400 Subject: [PATCH 0086/1197] Return frozensets from named Policy properties. Updating them in place never actually worked (they were sets created on the fly), but at least we give an appropriate error now if the user tries. --- packages/google-cloud-pubsub/google/cloud/pubsub/iam.py | 4 ++-- packages/google-cloud-pubsub/tests/unit/test_iam.py | 5 +++++ 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py index d7aedc2d3d56..b0093d263871 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -109,7 +109,7 @@ class Policy(_BasePolicy): @property def publishers(self): """Legacy access to owner role.""" - return self.bindings.get(PUBSUB_PUBLISHER_ROLE, ()) + return frozenset(self.bindings.get(PUBSUB_PUBLISHER_ROLE, ())) @publishers.setter def publishers(self, value): @@ -119,7 +119,7 @@ def publishers(self, value): @property def subscribers(self): """Legacy access to owner role.""" - return self.bindings.get(PUBSUB_SUBSCRIBER_ROLE, ()) + return frozenset(self.bindings.get(PUBSUB_SUBSCRIBER_ROLE, ())) @subscribers.setter def subscribers(self, value): diff --git a/packages/google-cloud-pubsub/tests/unit/test_iam.py b/packages/google-cloud-pubsub/tests/unit/test_iam.py index 16231a0d73c7..cdb750fd2573 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_iam.py +++ b/packages/google-cloud-pubsub/tests/unit/test_iam.py @@ -30,10 +30,15 @@ def test_ctor_defaults(self): policy = self._make_one() self.assertIsNone(policy.etag) self.assertIsNone(policy.version) + self.assertIsInstance(policy.owners, frozenset) self.assertEqual(list(policy.owners), []) + self.assertIsInstance(policy.editors, frozenset) self.assertEqual(list(policy.editors), []) + self.assertIsInstance(policy.viewers, frozenset) self.assertEqual(list(policy.viewers), []) + self.assertIsInstance(policy.publishers, frozenset) self.assertEqual(list(policy.publishers), []) + self.assertIsInstance(policy.subscribers, frozenset) self.assertEqual(list(policy.subscribers), []) def test_ctor_explicit(self): From df865c2209ba20b0547078596d02e2aac3ecfd44 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Apr 2017 11:49:10 -0400 Subject: [PATCH 0087/1197] Make IAM Policy objects dict-line. Keys are roles, values are lists of principals. --- packages/google-cloud-pubsub/google/cloud/pubsub/iam.py | 8 ++++---- packages/google-cloud-pubsub/tests/unit/test_iam.py | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py index b0093d263871..704da83b3d6a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -109,19 +109,19 @@ class Policy(_BasePolicy): @property def publishers(self): """Legacy access to owner role.""" - return frozenset(self.bindings.get(PUBSUB_PUBLISHER_ROLE, ())) + return frozenset(self._bindings.get(PUBSUB_PUBLISHER_ROLE, ())) @publishers.setter def publishers(self, value): """Update publishers.""" - self.bindings[PUBSUB_PUBLISHER_ROLE] = list(value) + self._bindings[PUBSUB_PUBLISHER_ROLE] = list(value) @property def subscribers(self): """Legacy access to owner role.""" - return frozenset(self.bindings.get(PUBSUB_SUBSCRIBER_ROLE, ())) + return frozenset(self._bindings.get(PUBSUB_SUBSCRIBER_ROLE, ())) @subscribers.setter def subscribers(self, value): """Update subscribers.""" - self.bindings[PUBSUB_SUBSCRIBER_ROLE] = list(value) + self._bindings[PUBSUB_SUBSCRIBER_ROLE] = list(value) diff --git a/packages/google-cloud-pubsub/tests/unit/test_iam.py b/packages/google-cloud-pubsub/tests/unit/test_iam.py index cdb750fd2573..4b7b63259186 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_iam.py +++ b/packages/google-cloud-pubsub/tests/unit/test_iam.py @@ -63,7 +63,7 @@ def test_publishers_setter(self): self.assertEqual(sorted(policy.publishers), [PUBLISHER]) self.assertEqual( - policy.bindings, {PUBSUB_PUBLISHER_ROLE: [PUBLISHER]}) + dict(policy), {PUBSUB_PUBLISHER_ROLE: [PUBLISHER]}) def test_subscribers_setter(self): from google.cloud.pubsub.iam import ( @@ -75,4 +75,4 @@ def test_subscribers_setter(self): self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) self.assertEqual( - policy.bindings, {PUBSUB_SUBSCRIBER_ROLE: [SUBSCRIBER]}) + dict(policy), {PUBSUB_SUBSCRIBER_ROLE: [SUBSCRIBER]}) From 23139521f99596458e6a7b4ac6c514a600cb4236 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Apr 2017 12:27:21 -0400 Subject: [PATCH 0088/1197] Deprecate assignment to legacy role attributes. --- .../google-cloud-pubsub/google/cloud/pubsub/iam.py | 11 +++++++++++ packages/google-cloud-pubsub/tests/unit/test_iam.py | 8 ++++++-- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py index 704da83b3d6a..e92f2151dc05 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -17,12 +17,15 @@ https://cloud.google.com/pubsub/access_control#permissions """ +import warnings + # pylint: disable=unused-import from google.cloud.iam import OWNER_ROLE # noqa - backward compat from google.cloud.iam import EDITOR_ROLE # noqa - backward compat from google.cloud.iam import VIEWER_ROLE # noqa - backward compat # pylint: enable=unused-import from google.cloud.iam import Policy as _BasePolicy +from google.cloud.iam import _ASSIGNMENT_DEPRECATED_MSG # Pubsub-specific IAM roles @@ -114,6 +117,10 @@ def publishers(self): @publishers.setter def publishers(self, value): """Update publishers.""" + warnings.warn( + _ASSIGNMENT_DEPRECATED_MSG.format( + 'publishers', PUBSUB_PUBLISHER_ROLE), + DeprecationWarning) self._bindings[PUBSUB_PUBLISHER_ROLE] = list(value) @property @@ -124,4 +131,8 @@ def subscribers(self): @subscribers.setter def subscribers(self, value): """Update subscribers.""" + warnings.warn( + _ASSIGNMENT_DEPRECATED_MSG.format( + 'subscribers', PUBSUB_SUBSCRIBER_ROLE), + DeprecationWarning) self._bindings[PUBSUB_SUBSCRIBER_ROLE] = list(value) diff --git a/packages/google-cloud-pubsub/tests/unit/test_iam.py b/packages/google-cloud-pubsub/tests/unit/test_iam.py index 4b7b63259186..3bf4aaa922f0 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_iam.py +++ b/packages/google-cloud-pubsub/tests/unit/test_iam.py @@ -54,24 +54,28 @@ def test_ctor_explicit(self): self.assertEqual(list(policy.subscribers), []) def test_publishers_setter(self): + import warnings from google.cloud.pubsub.iam import ( PUBSUB_PUBLISHER_ROLE, ) PUBLISHER = 'user:phred@example.com' policy = self._make_one() - policy.publishers = [PUBLISHER] + with warnings.catch_warnings(): + policy.publishers = [PUBLISHER] self.assertEqual(sorted(policy.publishers), [PUBLISHER]) self.assertEqual( dict(policy), {PUBSUB_PUBLISHER_ROLE: [PUBLISHER]}) def test_subscribers_setter(self): + import warnings from google.cloud.pubsub.iam import ( PUBSUB_SUBSCRIBER_ROLE, ) SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' policy = self._make_one() - policy.subscribers = [SUBSCRIBER] + with warnings.catch_warnings(): + policy.subscribers = [SUBSCRIBER] self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) self.assertEqual( From b8602c6bc6169510cff5bb905ea83ea94945e55d Mon Sep 17 00:00:00 2001 From: Jacob Geiger Date: Tue, 11 Apr 2017 11:05:42 -0700 Subject: [PATCH 0089/1197] Add Pub/Sub snapshot and seek functionality Notes: - cloud.google.com links do not yet work - SeekResponse exists but is empty, so corresponding methods return None instead - It's not documented whether the deleted topic path applies to snapshots analogous to how it applies to subscriptions, but it seems logical to assume so. - The new Subscription fields are part of this release but will come shortly as a separate PR. (They can be done independently of the snap-seek work.) --- .../google/cloud/pubsub/_gax.py | 159 +++++++++- .../google/cloud/pubsub/_http.py | 128 +++++++++ .../google/cloud/pubsub/client.py | 26 ++ .../google/cloud/pubsub/snapshot.py | 141 +++++++++ .../google/cloud/pubsub/subscription.py | 40 +++ packages/google-cloud-pubsub/tests/system.py | 82 ++++++ .../tests/unit/test__gax.py | 272 ++++++++++++++++++ .../tests/unit/test__http.py | 171 +++++++++++ .../tests/unit/test_client.py | 13 + .../tests/unit/test_snpashot.py | 215 ++++++++++++++ .../tests/unit/test_subscription.py | 88 ++++++ 11 files changed, 1328 insertions(+), 7 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py create mode 100644 packages/google-cloud-pubsub/tests/unit/test_snpashot.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 599c99ef1d44..10593beee827 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -37,6 +37,7 @@ from google.cloud.iterator import GAXIterator from google.cloud.pubsub import __version__ from google.cloud.pubsub._helpers import subscription_name_from_path +from google.cloud.pubsub.snapshot import Snapshot from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic @@ -135,10 +136,10 @@ def topic_delete(self, topic_path): """API call: delete a topic See: - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete :type topic_path: str - :param topic_path: fully-qualified path of the new topic, in format + :param topic_path: fully-qualified path of the topic, in format ``projects//topics/``. """ try: @@ -372,7 +373,7 @@ def subscription_modify_push_config(self, subscription_path, :type subscription_path: str :param subscription_path: - the fully-qualified path of the new subscription, in format + the fully-qualified path of the subscription to affect, in format ``projects//subscriptions/``. :type push_endpoint: str @@ -397,8 +398,8 @@ def subscription_pull(self, subscription_path, return_immediately=False, :type subscription_path: str :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. + the fully-qualified path of the subscription to pull from, in + format ``projects//subscriptions/``. :type return_immediately: bool :param return_immediately: if True, the back-end returns even if no @@ -438,7 +439,7 @@ def subscription_acknowledge(self, subscription_path, ack_ids): :type subscription_path: str :param subscription_path: - the fully-qualified path of the new subscription, in format + the fully-qualified path of the subscription to affect, in format ``projects//subscriptions/``. :type ack_ids: list of string @@ -460,7 +461,7 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, :type subscription_path: str :param subscription_path: - the fully-qualified path of the new subscription, in format + the fully-qualified path of the subscription to affect, in format ``projects//subscriptions/``. :type ack_ids: list of string @@ -478,6 +479,120 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, raise NotFound(subscription_path) raise + def subscription_seek(self, subscription_path, time=None, snapshot=None): + """API call: seek a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek + + :type subscription_path: str + :param subscription_path:: + the fully-qualified path of the subscription to affect, in format + ``projects//subscriptions/``. + + :type time: :class:`.timestamp_pb2.Timestamp` + :param time: The time to seek to. + + :type snapshot: str + :param snapshot: The snapshot to seek to. + """ + try: + self._gax_api.seek(subscription_path, time=time, snapshot=snapshot) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(subscription_path) + raise + + def list_snapshots(self, project, page_size=0, page_token=None): + """List snapshots for the project associated with this API. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list + + :type project: str + :param project: project ID + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` + accessible to the current API. + """ + if page_token is None: + page_token = INITIAL_PAGE + options = CallOptions(page_token=page_token) + path = 'projects/%s' % (project,) + page_iter = self._gax_api.list_snapshots( + path, page_size=page_size, options=options) + + # We attach a mutable topics dictionary so that as topic + # objects are created by Snapshot.from_api_repr, they + # can be re-used by other snapshots of the same topic. + topics = {} + item_to_value = functools.partial( + _item_to_snapshot_for_client, topics=topics) + return GAXIterator(self._client, page_iter, item_to_value) + + def snapshot_create(self, snapshot_path, subscription_path): + """API call: create a snapshot + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create + + :type snapshot_path: str + :param snapshot_path: fully-qualified path of the snapshot, in format + ``projects//snapshots/``. + + :type subscription_path: str + :param subscription_path: fully-qualified path of the subscrption that + the new snapshot captures, in format + ``projects//subscription/``. + + :rtype: dict + :returns: ``Snapshot`` resource returned from the API. + :raises: :exc:`google.cloud.exceptions.Conflict` if the snapshot + already exists + :raises: :exc:`google.cloud.exceptions.NotFound` if the subscription + does not exist + """ + try: + snapshot_pb = self._gax_api.create_snapshot( + snapshot_path, subscription_path) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + raise Conflict(snapshot_path) + elif exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(subscription_path) + raise + return MessageToDict(snapshot_pb) + + def snapshot_delete(self, snapshot_path): + """API call: delete a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete + + :type snapshot_path: str + :param snapshot_path: fully-qualified path of the snapshot, in format + ``projects//snapshots/``. + + :raises: :exc:`google.cloud.exceptions.NotFound` if the snapshot does + not exist + """ + try: + self._gax_api.delete_snapshot(snapshot_path) + except GaxError as exc: + if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: + raise NotFound(snapshot_path) + raise + def _message_pb_from_mapping(message): """Helper for :meth:`_PublisherAPI.topic_publish`. @@ -631,3 +746,33 @@ def _item_to_sub_for_client(iterator, sub_pb, topics): resource = MessageToDict(sub_pb) return Subscription.from_api_repr( resource, iterator.client, topics=topics) + + +def _item_to_snapshot_for_client(iterator, snapshot_pb, topics): + """Convert a subscription protobuf to the native object. + + .. note:: + + This method does not have the correct signature to be used as + the ``item_to_value`` argument to + :class:`~google.cloud.iterator.Iterator`. It is intended to be + patched with a mutable topics argument that can be updated + on subsequent calls. For an example, see how the method is + used above in :meth:`_SubscriberAPI.list_snapshots`. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type sub_pb: :class:`.pubsub_pb2.Snapshot` + :param sub_pb: A subscription returned from the API. + + :type topics: dict + :param topics: A dictionary of topics to be used (and modified) + as new subscriptions are created bound to topics. + + :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` + :returns: The next subscription in the page. + """ + resource = MessageToDict(snapshot_pb) + return Snapshot.from_api_repr( + resource, iterator.client, topics=topics) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index e9538dce22d4..21bacac9cdb6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -25,6 +25,7 @@ from google.cloud.pubsub import __version__ from google.cloud.pubsub._helpers import subscription_name_from_path +from google.cloud.pubsub.snapshot import Snapshot from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic @@ -467,6 +468,104 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, } self.api_request(method='POST', path=path, data=data) + def subscription_seek(self, subscription_path, time=None, snapshot=None): + """API call: seek a subscription + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek + + :type subscription_path: str + :param subscription_path:: + the fully-qualified path of the subscription to affect, in format + ``projects//subscriptions/``. + + :type time: str + :param time: The time to seek to, in RFC 3339 format. + + :type snapshot: str + :param snapshot: The snapshot to seek to. + """ + path = '/%s:seek' % (subscription_path,) + data = {} + if time is not None: + data['time'] = time + elif snapshot is not None: + data['snapshot'] = snapshot + self.api_request(method='POST', path=path, data=data) + + def list_snapshots(self, project, page_size=None, page_token=None): + """List snapshots for the project associated with this API. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list + + :type project: str + :param project: project ID + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` + accessible to the current API. + """ + extra_params = {} + if page_size is not None: + extra_params['pageSize'] = page_size + path = '/projects/%s/snapshots' % (project,) + + # We attach a mutable topics dictionary so that as topic + # objects are created by Snapshot.from_api_repr, they + # can be re-used by other snapshots of the same topic. + topics = {} + item_to_value = functools.partial( + _item_to_snapshot_for_client, topics=topics) + return HTTPIterator( + client=self._client, path=path, item_to_value=item_to_value, + items_key='snapshots', page_token=page_token, + extra_params=extra_params) + + def snapshot_create(self, snapshot_path, subscription_path): + """API call: create a snapshot + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create + + :type snapshot_path: str + :param snapshot_path: fully-qualified path of the snapshot, in format + ``projects//snapshots/``. + + :type subscription_path: str + :param subscription_path: fully-qualified path of the subscrption that + the new snapshot captures, in format + ``projects//subscription/``. + + :rtype: dict + :returns: ``Snapshot`` resource returned from the API. + """ + path = '/%s' % (snapshot_path,) + data = {'subscription': subscription_path} + return self.api_request(method='PUT', path=path, data=data) + + def snapshot_delete(self, snapshot_path): + """API call: delete a topic + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete + + :type snapshot_path: str + :param snapshot_path: fully-qualified path of the snapshot, in format + ``projects//snapshots/``. + """ + path = '/%s' % (snapshot_path,) + self.api_request(method='DELETE', path=path) + class _IAMPolicyAPI(object): """Helper mapping IAM policy-related APIs. @@ -627,3 +726,32 @@ def _item_to_sub_for_client(iterator, resource, topics): """ return Subscription.from_api_repr( resource, iterator.client, topics=topics) + + +def _item_to_snapshot_for_client(iterator, resource, topics): + """Convert a subscription to the native object. + + .. note:: + + This method does not have the correct signature to be used as + the ``item_to_value`` argument to + :class:`~google.cloud.iterator.Iterator`. It is intended to be + patched with a mutable topics argument that can be updated + on subsequent calls. For an example, see how the method is + used above in :meth:`_SubscriberAPI.list_snapshots`. + + :type iterator: :class:`~google.cloud.iterator.Iterator` + :param iterator: The iterator that is currently in use. + + :type resource: dict + :param resource: A subscription returned from the API. + + :type topics: dict + :param topics: A dictionary of topics to be used (and modified) + as new subscriptions are created bound to topics. + + :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` + :returns: The next subscription in the page. + """ + return Snapshot.from_api_repr( + resource, iterator.client, topics=topics) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 4e5b76bd118c..cccecd27f4c4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -191,6 +191,32 @@ def list_subscriptions(self, page_size=None, page_token=None): return api.list_subscriptions( self.project, page_size, page_token) + def list_snapshots(self, page_size=None, page_token=None): + """List snapshots for the project associated with this API. + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list + + :type project: str + :param project: project ID + + :type page_size: int + :param page_size: maximum number of topics to return, If not passed, + defaults to a value set by the API. + + :type page_token: str + :param page_token: opaque marker for the next "page" of topics. If not + passed, the API will return the first page of + topics. + + :rtype: :class:`~google.cloud.iterator.Iterator` + :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` + accessible to the current API. + """ + api = self.subscriber_api + return api.list_snapshots( + self.project, page_size, page_token) + def topic(self, name, timestamp_messages=False): """Creates a topic bound to the current client. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py b/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py new file mode 100644 index 000000000000..fd9a78376397 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py @@ -0,0 +1,141 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Define API Snapshots.""" + +from google.cloud.pubsub._helpers import topic_name_from_path + + +class Snapshot(object): + + _DELETED_TOPIC_PATH = '_deleted-topic_' + """Value of ``projects.snapshots.topic`` when topic has been deleted.""" + + def __init__(self, name, subscription=None, topic=None, client=None): + + num_kwargs = len( + [param for param in (subscription, topic, client) if param]) + if num_kwargs != 1: + raise TypeError( + "Pass only one of 'subscription', 'topic', 'client'.") + + self.name = name + self.topic = topic or getattr(subscription, 'topic', None) + self._subscription = subscription + self._client = client or getattr( + subscription, '_client', None) or topic._client + self._project = self._client.project + + @classmethod + def from_api_repr(cls, resource, client, topics=None): + """Factory: construct a subscription given its API representation + + :type resource: dict + :param resource: snapshot resource representation returned from the + API. + + :type client: :class:`google.cloud.pubsub.client.Client` + :param client: Client which holds credentials and project + configuration. + + :type subscriptions: dict + :param subscriptions: + (Optional) A Subscription to which this snapshot belongs. If not + passed, the subscription will have a newly-created subscription. + Must have the same topic as the snapshot. + + :rtype: :class:`google.cloud.pubsub.subscription.Subscription` + :returns: Subscription parsed from ``resource``. + """ + if topics is None: + topics = {} + topic_path = resource['topic'] + if topic_path == cls._DELETED_TOPIC_PATH: + topic = None + else: + topic = topics.get(topic_path) + if topic is None: + # NOTE: This duplicates behavior from Topic.from_api_repr to + # avoid an import cycle. + topic_name = topic_name_from_path(topic_path, client.project) + topic = topics[topic_path] = client.topic(topic_name) + _, _, _, name = resource['name'].split('/') + if topic is None: + return cls(name, client=client) + return cls(name, topic=topic) + + @property + def project(self): + """Project bound to the subscription.""" + return self._client.project + + @property + def full_name(self): + """Fully-qualified name used in subscription APIs""" + return 'projects/%s/snapshots/%s' % (self.project, self.name) + + @property + def path(self): + """URL path for the subscription's APIs""" + return '/%s' % (self.full_name,) + + def _require_client(self, client): + """Check client or verify over-ride. + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the topic of the + current subscription. + + :rtype: :class:`google.cloud.pubsub.client.Client` + :returns: The client passed in or the currently bound client. + """ + if client is None: + client = self._client + return client + + def create(self, client=None): + """API call: create the snapshot + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + """ + if not self._subscription: + raise RuntimeError( + 'Cannot create a snapshot not bound to a subscription') + + client = self._require_client(client) + api = client.subscriber_api + api.snapshot_create(self.full_name, self._subscription.full_name) + + def delete(self, client=None): + """API call: delete the snapshot + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete + + :type client: :class:`~google.cloud.pubsub.client.Client` or + ``NoneType`` + :param client: the client to use. If not passed, falls back to the + ``client`` stored on the current subscription's topic. + """ + client = self._require_client(client) + api = client.subscriber_api + api.snapshot_delete(self.full_name) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index 19089380cf41..4f5be33c5e82 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -15,6 +15,8 @@ """Define API Subscriptions.""" from google.cloud.exceptions import NotFound +from google.cloud._helpers import _datetime_to_rfc3339 +from google.cloud.pubsub.snapshot import Snapshot from google.cloud.pubsub._helpers import topic_name_from_path from google.cloud.pubsub.iam import Policy from google.cloud.pubsub.message import Message @@ -379,6 +381,44 @@ def modify_ack_deadline(self, ack_ids, ack_deadline, client=None): api.subscription_modify_ack_deadline( self.full_name, ack_ids, ack_deadline) + def snapshot(self, name, client=None): + """Creates a snapshot of this subscription. + + :type name: str + :param name: the name of the subscription + + :rtype: :class:`Snapshot` + :returns: The snapshot created with the passed in arguments. + """ + return Snapshot(name, subscription=self) + + def seek_snapshot(self, snapshot, client=None): + """API call: seek a subscription to a given snapshot + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek + + :type snapshot: :class:`Snapshot` + :param snapshot: The snapshot to seek to. + """ + client = self._require_client(client) + api = client.subscriber_api + api.subscription_seek(self.full_name, snapshot=snapshot.full_name) + + def seek_timestamp(self, timestamp, client=None): + """API call: seek a subscription to a given point in time + + See: + https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek + + :type time: :class:`datetime.datetime` + :param time: The time to seek to. + """ + client = self._require_client(client) + timestamp = _datetime_to_rfc3339(timestamp) + api = client.subscriber_api + api.subscription_seek(self.full_name, time=timestamp) + def get_iam_policy(self, client=None): """Fetch the IAM policy for the subscription. diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 54c4ba7a895c..ea88b0478a8c 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -70,6 +70,18 @@ def _consume_topics(pubsub_client): return list(pubsub_client.list_topics()) +def _consume_snapshots(pubsub_client): + """Consume entire iterator. + + :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` + :param pubsub_client: Client to use to retrieve snapshots. + + :rtype: list + :returns: List of all snapshots encountered. + """ + return list(pubsub_client.list_snapshots()) + + def _consume_subscriptions(topic): """Consume entire iterator. @@ -283,3 +295,73 @@ def test_subscription_iam_policy(self): policy.viewers.add(policy.user('jjg@google.com')) new_policy = subscription.set_iam_policy(policy) self.assertEqual(new_policy.viewers, policy.viewers) + + def test_create_snapshot(self): + TOPIC_NAME = 'create-snap-def' + unique_resource_id('-') + topic = Config.CLIENT.topic(TOPIC_NAME) + before_snapshots = _consume_snapshots(Config.CLIENT) + + self.assertFalse(topic.exists()) + topic.create() + self.to_delete.append(topic) + SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') + subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=600) + self.assertFalse(subscription.exists()) + subscription.create() + self.to_delete.append(subscription) + SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') + snapshot = subscription.snapshot(SNAPSHOT_NAME) + snapshot.create() + self.to_delete.append(snapshot) + + # There is no GET method for snapshot, so check existence using + # list + after_snapshots = _consume_snapshots(Config.CLIENT) + self.assertEqual(len(before_snapshots) + 1, len(after_snapshots)) + + def full_name(obj): + return obj.full_name + + self.assertIn(snapshot.full_name, map(full_name, after_snapshots)) + self.assertNotIn(snapshot.full_name, map(full_name, before_snapshots)) + + + def test_seek(self): + TOPIC_NAME = 'seek-e2e' + unique_resource_id('-') + topic = Config.CLIENT.topic(TOPIC_NAME, + timestamp_messages=True) + self.assertFalse(topic.exists()) + topic.create() + self.to_delete.append(topic) + + SUBSCRIPTION_NAME = 'subscribing-to-seek' + unique_resource_id('-') + subscription = topic.subscription(SUBSCRIPTION_NAME) + self.assertFalse(subscription.exists()) + subscription.create() + self.to_delete.append(subscription) + + SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') + snapshot = subscription.snapshot(SNAPSHOT_NAME) + snapshot.create() + self.to_delete.append(snapshot) + + MESSAGE_1 = b'MESSAGE ONE' + topic.publish(MESSAGE_1) + MESSAGE_2 = b'MESSAGE TWO' + topic.publish(MESSAGE_2) + + ((ack_id_1a, recvd_1a), ) = subscription.pull() + ((ack_id_2a, recvd_2a), ) = subscription.pull() + before_data = [obj.data for obj in (recvd_1a, recvd_2a)] + self.assertIn(MESSAGE_1, before_data) + self.assertIn(MESSAGE_2, before_data) + subscription.acknowledge((ack_id_1a, ack_id_2a)) + + self.assertFalse(subscription.pull(return_immediately=True)) + + subscription.seek_snapshot(snapshot) + + ((_, recvd_1b), ) = subscription.pull() + ((_, recvd_2b), ) = subscription.pull() + after_data = [obj.data for obj in (recvd_1b, recvd_2b)] + self.assertEqual(sorted(before_data), sorted(after_data)) diff --git a/packages/google-cloud-pubsub/tests/unit/test__gax.py b/packages/google-cloud-pubsub/tests/unit/test__gax.py index 8a41d3ff70f8..055bdeabfb68 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__gax.py +++ b/packages/google-cloud-pubsub/tests/unit/test__gax.py @@ -45,6 +45,10 @@ class _Base(object): LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) SUB_NAME = 'sub_name' SUB_PATH = '%s/subscriptions/%s' % (TOPIC_PATH, SUB_NAME) + SNAPSHOT_NAME = 'snapshot_name' + SNAPSHOT_PATH = '%s/snapshots/%s' % (PROJECT_PATH, SNAPSHOT_NAME) + TIME = 12345 + def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) @@ -924,6 +928,238 @@ def test_subscription_modify_ack_deadline_error(self): self.assertEqual(deadline, NEW_DEADLINE) self.assertIsNone(options) + def test_list_snapshots_no_paging(self): + from google.gax import INITIAL_PAGE + from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( + Snapshot as SnapshotPB) + from google.cloud._testing import _GAXPageIterator + from google.cloud.pubsub.client import Client + from google.cloud.pubsub.snapshot import Snapshot + from google.cloud.pubsub.topic import Topic + + local_snapshot_path = '%s/snapshots/%s' % ( + self.PROJECT_PATH, self.SNAPSHOT_NAME) + snapshot_pb = SnapshotPB( + name=local_snapshot_path, topic=self.TOPIC_PATH) + response = _GAXPageIterator([snapshot_pb]) + gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) + creds = _make_credentials() + client = Client(project=self.PROJECT, credentials=creds) + api = self._make_one(gax_api, client) + + iterator = api.list_snapshots(self.PROJECT) + snapshots = list(iterator) + next_token = iterator.next_page_token + + # Check the token returned. + self.assertIsNone(next_token) + # Check the snapshot object returned. + self.assertEqual(len(snapshots), 1) + snapshot = snapshots[0] + self.assertIsInstance(snapshot, Snapshot) + self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) + self.assertIsInstance(snapshot.topic, Topic) + self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) + self.assertIs(snapshot._client, client) + self.assertEqual(snapshot._project, self.PROJECT) + + def test_list_snapshots_with_paging(self): + from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( + Snapshot as SnapshotPB) + from google.cloud._testing import _GAXPageIterator + from google.cloud.pubsub.client import Client + from google.cloud.pubsub.snapshot import Snapshot + from google.cloud.pubsub.topic import Topic + + SIZE = 23 + TOKEN = 'TOKEN' + NEW_TOKEN = 'NEW_TOKEN' + local_snapshot_path = '%s/snapshots/%s' % ( + self.PROJECT_PATH, self.SNAPSHOT_NAME) + snapshot_pb = SnapshotPB(name=local_snapshot_path, topic=self.TOPIC_PATH) + response = _GAXPageIterator([snapshot_pb], page_token=NEW_TOKEN) + gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) + client = _Client(self.PROJECT) + creds = _make_credentials() + client = Client(project=self.PROJECT, credentials=creds) + api = self._make_one(gax_api, client) + + iterator = api.list_snapshots( + self.PROJECT, page_size=SIZE, page_token=TOKEN) + snapshots = list(iterator) + next_token = iterator.next_page_token + + # Check the token returned. + self.assertEqual(next_token, NEW_TOKEN) + # Check the snapshot object returned. + self.assertEqual(len(snapshots), 1) + snapshot = snapshots[0] + self.assertIsInstance(snapshot, Snapshot) + self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) + self.assertIsInstance(snapshot.topic, Topic) + self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) + self.assertIs(snapshot._client, client) + self.assertEqual(snapshot._project, self.PROJECT) + + def test_subscription_seek_hit(self): + gax_api = _GAXSubscriberAPI(_seek_ok=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + api.subscription_seek( + self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) + + subscription_path, time, snapshot_path, options = ( + gax_api._seek_called_with) + self.assertEqual(subscription_path, self.SUB_PATH) + self.assertEqual(time, self.TIME) + self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) + self.assertIsNone(options) + + def test_subscription_seek_miss(self): + from google.cloud.exceptions import NotFound + + gax_api = _GAXSubscriberAPI(_seek_ok=False) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(NotFound): + api.subscription_seek( + self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) + + subscription_path, time, snapshot_path, options = ( + gax_api._seek_called_with) + self.assertEqual(subscription_path, self.SUB_PATH) + self.assertEqual(time, self.TIME) + self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) + self.assertIsNone(options) + + def test_subscription_seek_error(self): + from google.gax.errors import GaxError + + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(GaxError): + api.subscription_seek( + self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) + + subscription_path, time, snapshot_path, options = ( + gax_api._seek_called_with) + self.assertEqual(subscription_path, self.SUB_PATH) + self.assertEqual(time, self.TIME) + self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) + self.assertIsNone(options) + + def test_snapshot_create(self): + from google.cloud.proto.pubsub.v1.pubsub_pb2 import Snapshot + + snapshot_pb = Snapshot(name=self.SNAPSHOT_PATH, topic=self.TOPIC_PATH) + gax_api = _GAXSubscriberAPI(_create_snapshot_response=snapshot_pb) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) + + expected = { + 'name': self.SNAPSHOT_PATH, + 'topic': self.TOPIC_PATH, + } + self.assertEqual(resource, expected) + name, subscription, options = ( + gax_api._create_snapshot_called_with) + self.assertEqual(name, self.SNAPSHOT_PATH) + self.assertEqual(subscription, self.SUB_PATH) + self.assertIsNone(options) + + def test_snapshot_create_already_exists(self): + from google.cloud.exceptions import Conflict + + gax_api = _GAXSubscriberAPI(_create_snapshot_conflict=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(Conflict): + api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) + + name, subscription, options = ( + gax_api._create_snapshot_called_with) + self.assertEqual(name, self.SNAPSHOT_PATH) + self.assertEqual(subscription, self.SUB_PATH) + self.assertIsNone(options) + + def test_snapshot_create_subscrption_miss(self): + from google.cloud.exceptions import NotFound + + gax_api = _GAXSubscriberAPI(_snapshot_create_subscription_miss=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(NotFound): + api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) + + name, subscription, options = ( + gax_api._create_snapshot_called_with) + self.assertEqual(name, self.SNAPSHOT_PATH) + self.assertEqual(subscription, self.SUB_PATH) + self.assertIsNone(options) + + def test_snapshot_create_error(self): + from google.gax.errors import GaxError + + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(GaxError): + api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) + + name, subscription, options = ( + gax_api._create_snapshot_called_with) + self.assertEqual(name, self.SNAPSHOT_PATH) + self.assertEqual(subscription, self.SUB_PATH) + self.assertIsNone(options) + + def test_snapshot_delete_hit(self): + gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + api.snapshot_delete(self.SNAPSHOT_PATH) + + snapshot_path, options = gax_api._delete_snapshot_called_with + self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) + self.assertIsNone(options) + + def test_snapshot_delete_miss(self): + from google.cloud.exceptions import NotFound + + gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=False) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(NotFound): + api.snapshot_delete(self.SNAPSHOT_PATH) + + snapshot_path, options = gax_api._delete_snapshot_called_with + self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) + self.assertIsNone(options) + + def test_snapshot_delete_error(self): + from google.gax.errors import GaxError + + gax_api = _GAXSubscriberAPI(_random_gax_error=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(GaxError): + api.snapshot_delete(self.SNAPSHOT_PATH) + + snapshot_path, options = gax_api._delete_snapshot_called_with + self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) + self.assertIsNone(options) + @unittest.skipUnless(_HAVE_GRPC, 'No gax-python') class Test_make_gax_publisher_api(_Base, unittest.TestCase): @@ -1148,11 +1384,13 @@ def list_topic_subscriptions(self, topic, page_size, options=None): class _GAXSubscriberAPI(_GAXBaseAPI): + _create_snapshot_conflict = False _create_subscription_conflict = False _modify_push_config_ok = False _acknowledge_ok = False _modify_ack_deadline_ok = False _deadline_exceeded_gax_error = False + _snapshot_create_subscription_miss=False def list_subscriptions(self, project, page_size, options=None): self._list_subscriptions_called_with = (project, page_size, options) @@ -1234,6 +1472,40 @@ def modify_ack_deadline(self, name, ack_ids, deadline, options=None): if not self._modify_ack_deadline_ok: raise GaxError('miss', self._make_grpc_not_found()) + def list_snapshots(self, project, page_size, options=None): + self._list_snapshots_called_with = (project, page_size, options) + return self._list_snapshots_response + + def create_snapshot(self, name, subscription, options=None): + from google.gax.errors import GaxError + + self._create_snapshot_called_with = (name, subscription, options) + if self._random_gax_error: + raise GaxError('error') + if self._create_snapshot_conflict: + raise GaxError('conflict', self._make_grpc_failed_precondition()) + if self._snapshot_create_subscription_miss: + raise GaxError('miss', self._make_grpc_not_found()) + + return self._create_snapshot_response + + def delete_snapshot(self, snapshot, options=None): + from google.gax.errors import GaxError + + self._delete_snapshot_called_with = (snapshot, options) + if self._random_gax_error: + raise GaxError('error') + if not self._delete_snapshot_ok: + raise GaxError('miss', self._make_grpc_not_found()) + + def seek(self, subscription, time=None, snapshot=None, options=None): + from google.gax.errors import GaxError + + self._seek_called_with = (subscription, time, snapshot, options) + if self._random_gax_error: + raise GaxError('error') + if not self._seek_ok: + raise GaxError('miss', self._make_grpc_not_found()) class _TopicPB(object): diff --git a/packages/google-cloud-pubsub/tests/unit/test__http.py b/packages/google-cloud-pubsub/tests/unit/test__http.py index 3f94b966dd66..d224f2c17359 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__http.py +++ b/packages/google-cloud-pubsub/tests/unit/test__http.py @@ -26,10 +26,13 @@ def _make_credentials(): class _Base(unittest.TestCase): PROJECT = 'PROJECT' LIST_TOPICS_PATH = 'projects/%s/topics' % (PROJECT,) + LIST_SNAPSHOTS_PATH = 'projects/%s/snapshots' % (PROJECT,) LIST_SUBSCRIPTIONS_PATH = 'projects/%s/subscriptions' % (PROJECT,) TOPIC_NAME = 'topic_name' TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) + SNAPSHOT_NAME = 'snapshot_name' + SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) SUB_NAME = 'subscription_name' SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) @@ -778,6 +781,174 @@ def test_subscription_modify_ack_deadline(self): self.assertEqual(connection._called_with['path'], path) self.assertEqual(connection._called_with['data'], BODY) + def test_list_snapshots_no_paging(self): + from google.cloud.pubsub.client import Client + from google.cloud.pubsub.snapshot import Snapshot + + local_snapshot_path = 'projects/%s/snapshots/%s' % ( + self.PROJECT, self.SNAPSHOT_NAME) + local_topic_path = 'projects/%s/topics/%s' % ( + self.PROJECT, self.TOPIC_NAME) + RETURNED = {'snapshots': [{ + 'name': local_snapshot_path, + 'topic': local_topic_path, + }], + } + + connection = _Connection(RETURNED) + creds = _make_credentials() + client = Client(project=self.PROJECT, credentials=creds) + client._connection = connection + api = self._make_one(client) + + iterator = api.list_snapshots(self.PROJECT) + snapshots = list(iterator) + next_token = iterator.next_page_token + + self.assertIsNone(next_token) + self.assertEqual(len(snapshots), 1) + snapshot = snapshots[0] + self.assertIsInstance(snapshot, Snapshot) + self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) + self.assertIs(snapshot._client, client) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], {}) + + def test_list_snapshots_with_paging(self): + import six + + from google.cloud.pubsub.client import Client + from google.cloud.pubsub.snapshot import Snapshot + + TOKEN1 = 'TOKEN1' + TOKEN2 = 'TOKEN2' + SIZE = 1 + local_snapshot_path = 'projects/%s/snapshots/%s' % ( + self.PROJECT, self.SNAPSHOT_NAME) + local_topic_path = 'projects/%s/topics/%s' % ( + self.PROJECT, self.TOPIC_NAME) + RETURNED = { + 'snapshots': [{ + 'name': local_snapshot_path, + 'topic': local_topic_path, + }], + 'nextPageToken': TOKEN2, + } + + connection = _Connection(RETURNED) + creds = _make_credentials() + client = Client(project=self.PROJECT, credentials=creds) + client._connection = connection + api = self._make_one(client) + + iterator = api.list_snapshots( + self.PROJECT, page_token=TOKEN1, page_size=SIZE) + page = six.next(iterator.pages) + snapshots = list(page) + next_token = iterator.next_page_token + + self.assertEqual(next_token, TOKEN2) + self.assertEqual(len(snapshots), 1) + snapshot = snapshots[0] + self.assertIsInstance(snapshot, Snapshot) + self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) + self.assertIs(snapshot._client, client) + + self.assertEqual(connection._called_with['method'], 'GET') + path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['query_params'], + {'pageToken': TOKEN1, 'pageSize': SIZE}) + + def test_subscription_seek_snapshot(self): + local_snapshot_path = 'projects/%s/snapshots/%s' % ( + self.PROJECT, self.SNAPSHOT_NAME) + RETURNED = {} + BODY = { + 'snapshot': local_snapshot_path + } + connection = _Connection(RETURNED) + client = _Client(connection, self.PROJECT) + api = self._make_one(client) + + api.subscription_seek( + self.SUB_PATH, snapshot=local_snapshot_path) + + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:seek' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], BODY) + + def test_subscription_seek_time(self): + time = '12345' + RETURNED = {} + BODY = { + 'time': time + } + connection = _Connection(RETURNED) + client = _Client(connection, self.PROJECT) + api = self._make_one(client) + + api.subscription_seek(self.SUB_PATH, time=time) + + self.assertEqual(connection._called_with['method'], 'POST') + path = '/%s:seek' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], BODY) + + def test_snapshot_create(self): + RETURNED = { + 'name': self.SNAPSHOT_PATH, + 'subscription': self.SUB_PATH + } + BODY = { + 'subscription': self.SUB_PATH + } + connection = _Connection(RETURNED) + client = _Client(connection, self.PROJECT) + api = self._make_one(client) + + resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) + + self.assertEqual(resource, RETURNED) + self.assertEqual(connection._called_with['method'], 'PUT') + path = '/%s' % (self.SNAPSHOT_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], BODY) + + def test_snapshot_create_already_exists(self): + from google.cloud.exceptions import NotFound + + BODY = { + 'subscription': self.SUB_PATH + } + connection = _Connection() + client = _Client(connection, self.PROJECT) + api = self._make_one(client) + + with self.assertRaises(NotFound): + resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) + + self.assertEqual(connection._called_with['method'], 'PUT') + path = '/%s' % (self.SNAPSHOT_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], BODY) + + def test_snapshot_delete(self): + RETURNED = {} + connection = _Connection(RETURNED) + client = _Client(connection, self.PROJECT) + api = self._make_one(client) + + api.snapshot_delete(self.SNAPSHOT_PATH) + + self.assertEqual(connection._called_with['method'], 'DELETE') + path = '/%s' % (self.SNAPSHOT_PATH,) + self.assertEqual(connection._called_with['path'], path) + class Test_IAMPolicyAPI(_Base): diff --git a/packages/google-cloud-pubsub/tests/unit/test_client.py b/packages/google-cloud-pubsub/tests/unit/test_client.py index d3aa25378b38..e251a0632dc8 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_client.py +++ b/packages/google-cloud-pubsub/tests/unit/test_client.py @@ -379,7 +379,17 @@ def test_topic(self): 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME)) self.assertFalse(new_topic.timestamp_messages) + def test_list_snapshots(self): + creds = _make_credentials() + client = self._make_one(project=self.PROJECT, credentials=creds) + client._connection = object() + api = _FauxSubscriberAPI() + response = api._list_snapshots_response = object() + client._subscriber_api = api + self.assertEqual(client.list_snapshots(), response) + self.assertEqual(api._listed_snapshots, (self.PROJECT, None, None)) + class _Iterator(object): def __init__(self, items, token): @@ -407,6 +417,9 @@ def list_subscriptions(self, project, page_size, page_token): self._listed_subscriptions = (project, page_size, page_token) return self._list_subscriptions_response + def list_snapshots(self, project, page_size, page_token): + self._listed_snapshots = (project, page_size, page_token) + return self._list_snapshots_response class _Connection(object): diff --git a/packages/google-cloud-pubsub/tests/unit/test_snpashot.py b/packages/google-cloud-pubsub/tests/unit/test_snpashot.py new file mode 100644 index 000000000000..5834a1fedd89 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/test_snpashot.py @@ -0,0 +1,215 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import unittest + +import mock + + +class TestSnapshot(unittest.TestCase): + PROJECT = 'PROJECT' + SNAPSHOT_NAME = 'snapshot_name' + SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) + SUB_NAME = 'subscription_name' + SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) + TOPIC_NAME = 'topic_name' + TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) + + @staticmethod + def _get_target_class(): + from google.cloud.pubsub.snapshot import Snapshot + + return Snapshot + + def _make_one(self, *args, **kw): + return self._get_target_class()(*args, **kw) + + def test_ctor(self): + client = _Client(project=self.PROJECT) + snapshot = self._make_one(self.SNAPSHOT_NAME, + client=client) + self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) + self.assertEqual(snapshot.project, self.PROJECT) + self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) + self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) + + def test_ctor_w_subscription(self): + client = _Client(project=self.PROJECT) + subscription = _Subscription(name=self.SUB_NAME, client=client) + snapshot = self._make_one(self.SNAPSHOT_NAME, + subscription=subscription) + self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) + self.assertEqual(snapshot.project, self.PROJECT) + self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) + self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) + + def test_ctor_error(self): + client = _Client(project=self.PROJECT) + subscription = _Subscription(name=self.SUB_NAME, client=client) + with self.assertRaises(TypeError): + snapshot = self._make_one(self.SNAPSHOT_NAME, + client=client, + subscription=subscription) + + def test_from_api_repr_no_topics(self): + from google.cloud.pubsub.topic import Topic + + client = _Client(project=self.PROJECT) + resource = { + 'name': self.SNAPSHOT_PATH, + 'topic': self.TOPIC_PATH + } + klass = self._get_target_class() + snapshot = klass.from_api_repr(resource, client=client) + self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) + self.assertIs(snapshot._client, client) + self.assertEqual(snapshot.project, self.PROJECT) + self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) + self.assertIsInstance(snapshot.topic, Topic) + + def test_from_api_repr_w_deleted_topic(self): + client = _Client(project=self.PROJECT) + klass = self._get_target_class() + resource = { + 'name': self.SNAPSHOT_PATH, + 'topic': klass._DELETED_TOPIC_PATH + } + snapshot = klass.from_api_repr(resource, client=client) + self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) + self.assertIs(snapshot._client, client) + self.assertEqual(snapshot.project, self.PROJECT) + self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) + self.assertIsNone(snapshot.topic) + + def test_from_api_repr_w_topics_w_no_topic_match(self): + from google.cloud.pubsub.topic import Topic + + client = _Client(project=self.PROJECT) + klass = self._get_target_class() + resource = { + 'name': self.SNAPSHOT_PATH, + 'topic': self.TOPIC_PATH + } + topics = {} + snapshot = klass.from_api_repr(resource, client=client, topics=topics) + topic = snapshot.topic + self.assertIsInstance(topic, Topic) + self.assertIs(topic, topics[self.TOPIC_PATH]) + self.assertEqual(topic.name, self.TOPIC_NAME) + self.assertEqual(topic.project, self.PROJECT) + + def test_from_api_repr_w_topics_w_topic_match(self): + from google.cloud.pubsub.topic import Topic + + client = _Client(project=self.PROJECT) + klass = self._get_target_class() + resource = { + 'name': self.SNAPSHOT_PATH, + 'topic': self.TOPIC_PATH + } + topic = _Topic(self.TOPIC_NAME, client=client) + topics = {self.TOPIC_PATH: topic} + snapshot = klass.from_api_repr(resource, client=client, topics=topics) + self.assertIs(snapshot.topic, topic) + + def test_create_w_bound_client_error(self): + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscriberAPI() + expected_response = api._snapshot_create_response = object() + snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) + + with self.assertRaises(RuntimeError): + snapshot.create() + + def test_create_w_bound_subscription(self): + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscriberAPI() + expected_result = api._snapshot_create_response = object() + subscription = _Subscription(name=self.SUB_NAME, client=client) + snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) + + snapshot.create() + + self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) + + def test_create_w_bound_subscription_w_alternate_client(self): + client = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscriberAPI() + expected_result = api._snapshot_create_response = object() + subscription = _Subscription(name=self.SUB_NAME, client=client) + snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) + + snapshot.create(client=client2) + + self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) + + def test_delete_w_bound_client(self): + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscriberAPI() + expected_result = api._snapshot_create_response = object() + snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) + + snapshot.delete() + + self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) + + def test_delete_w_alternate_client(self): + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscriberAPI() + expected_result = api._snapshot_create_response = object() + subscription = _Subscription(name=self.SUB_NAME, client=client) + snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) + + snapshot.delete() + + self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) + + +class _Client(object): + + connection = None + + def __init__(self, project): + self.project = project + + def topic(self, name): + from google.cloud.pubsub.topic import Topic + + return Topic(name, client=self) + + +class _Topic(object): + + def __init__(self, name, client): + self._client = client + + +class _Subscription(object): + + def __init__(self, name, client=None): + self._client = client + self.full_name = 'projects/%s/subscriptions/%s' % ( + client.project, name, ) + + +class _FauxSubscriberAPI(object): + + def snapshot_create(self, snapshot_path, subscription_path): + self._snapshot_created = (snapshot_path, subscription_path, ) + + def snapshot_delete(self, snapshot_path): + self._snapshot_deleted = (snapshot_path, ) + + diff --git a/packages/google-cloud-pubsub/tests/unit/test_subscription.py b/packages/google-cloud-pubsub/tests/unit/test_subscription.py index 42fb23d9ae68..89b6bb8d9d94 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_subscription.py +++ b/packages/google-cloud-pubsub/tests/unit/test_subscription.py @@ -21,6 +21,8 @@ class TestSubscription(unittest.TestCase): PROJECT = 'PROJECT' TOPIC_NAME = 'topic_name' TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) + SNAPSHOT_NAME = 'snapshot_name' + SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) SUB_NAME = 'sub_name' SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) DEADLINE = 42 @@ -456,6 +458,87 @@ def test_modify_ack_deadline_w_alternate_client(self): self.assertEqual(api._subscription_modified_ack_deadline, (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) + def test_snapshot(self): + from google.cloud.pubsub.snapshot import Snapshot + + client = _Client(project=self.PROJECT) + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._make_one(self.SUB_NAME, topic) + + snapshot = subscription.snapshot(self.SNAPSHOT_NAME) + self.assertIsInstance(snapshot, Snapshot) + self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) + self.assertIs(snapshot.topic, topic) + + def test_seek_snapshot_w_bound_client(self): + from google.cloud.pubsub.snapshot import Snapshot + + client = _Client(project=self.PROJECT) + snapshot = Snapshot + snapshot = Snapshot(self.SNAPSHOT_NAME, client=client) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_seek_response = {} + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._make_one(self.SUB_NAME, topic) + + subscription.seek_snapshot(snapshot) + + self.assertEqual(api._subscription_seeked, + (self.SUB_PATH, None, self.SNAPSHOT_PATH)) + + def test_seek_snapshot_w_alternate_client(self): + from google.cloud.pubsub.snapshot import Snapshot + + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + snapshot = Snapshot(self.SNAPSHOT_NAME, client=client1) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_seek_response = {} + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._make_one(self.SUB_NAME, topic) + + subscription.seek_snapshot(snapshot, client=client2) + + self.assertEqual(api._subscription_seeked, + (self.SUB_PATH, None, self.SNAPSHOT_PATH)) + + def test_seek_time_w_bound_client(self): + import datetime + + from google.cloud import _helpers + + time = datetime.time() + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_seek_response = {} + topic = _Topic(self.TOPIC_NAME, client=client) + subscription = self._make_one(self.SUB_NAME, topic) + + subscription.seek_timestamp(time) + + self.assertEqual( + api._subscription_seeked, + (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) + + def test_seek_time_w_alternate_client(self): + import datetime + + from google.cloud import _helpers + + time = datetime.time() + client1 = _Client(project=self.PROJECT) + client2 = _Client(project=self.PROJECT) + api = client2.subscriber_api = _FauxSubscribererAPI() + api._subscription_seek_response = {} + topic = _Topic(self.TOPIC_NAME, client=client1) + subscription = self._make_one(self.SUB_NAME, topic) + + subscription.seek_timestamp(time, client=client2) + + self.assertEqual( + api._subscription_seeked, + (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) + def test_get_iam_policy_w_bound_client(self): from google.cloud.pubsub.iam import ( PUBSUB_ADMIN_ROLE, @@ -682,6 +765,11 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, subscription_path, ack_ids, ack_deadline) return self._subscription_modify_ack_deadline_response + def subscription_seek(self, subscription_path, time=None, snapshot=None): + self._subscription_seeked = ( + subscription_path, time, snapshot) + return self._subscription_seek_response + class TestAutoAck(unittest.TestCase): From 4e7517b15fa0da867722b909923e0bd4ce9e4fb1 Mon Sep 17 00:00:00 2001 From: Jacob Geiger Date: Mon, 17 Apr 2017 16:02:56 -0700 Subject: [PATCH 0090/1197] Add new subscription fields Also simplify a helper method to use built-in logic from google.protobuf to convert between Duration and timedelta --- .../google/cloud/pubsub/_gax.py | 26 +++++-- .../google/cloud/pubsub/_http.py | 27 +++++++- .../google/cloud/pubsub/subscription.py | 39 +++++++++-- .../google/cloud/pubsub/topic.py | 22 +++++- packages/google-cloud-pubsub/tests/system.py | 24 +++++++ .../tests/unit/test__gax.py | 67 ++++++++++++++++--- .../tests/unit/test__http.py | 28 ++++++++ .../tests/unit/test_subscription.py | 15 +++-- 8 files changed, 223 insertions(+), 25 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 599c99ef1d44..6b47b42524a3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -30,6 +30,7 @@ from google.cloud._helpers import _to_bytes from google.cloud._helpers import _pb_timestamp_to_rfc3339 +from google.cloud._helpers import _timedelta_to_duration_pb from google.cloud._helpers import make_secure_channel from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.exceptions import Conflict @@ -276,7 +277,9 @@ def list_subscriptions(self, project, page_size=0, page_token=None): return GAXIterator(self._client, page_iter, item_to_value) def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None): + ack_deadline=None, push_endpoint=None, + retain_acked_messages=None, + message_retention_duration=None): """API call: create a subscription See: @@ -302,6 +305,18 @@ def subscription_create(self, subscription_path, topic_path, (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. + :type retain_acked_messages: bool + :param retain_acked_messages: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by `message_retention_duration`. + + :type message_retention_duration: :class:`datetime.timedelta` + :param message_retention_duration: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by `message_retention_duration`. If unset, defaults to 7 days. + :rtype: dict :returns: ``Subscription`` resource returned from the API. """ @@ -310,13 +325,16 @@ def subscription_create(self, subscription_path, topic_path, else: push_config = None - if ack_deadline is None: - ack_deadline = 0 + if message_retention_duration is not None: + message_retention_duration = _timedelta_to_duration_pb( + message_retention_duration) try: sub_pb = self._gax_api.create_subscription( subscription_path, topic_path, - push_config=push_config, ack_deadline_seconds=ack_deadline) + push_config=push_config, ack_deadline_seconds=ack_deadline, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration) except GaxError as exc: if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: raise Conflict(topic_path) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index e9538dce22d4..47fa7015c60d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -20,6 +20,7 @@ import os from google.cloud import _http +from google.cloud._helpers import _timedelta_to_duration_pb from google.cloud.environment_vars import PUBSUB_EMULATOR from google.cloud.iterator import HTTPIterator @@ -295,7 +296,9 @@ def list_subscriptions(self, project, page_size=None, page_token=None): extra_params=extra_params) def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None): + ack_deadline=None, push_endpoint=None, + retain_acked_messages=None, + message_retention_duration=None): """API call: create a subscription See: @@ -321,6 +324,18 @@ def subscription_create(self, subscription_path, topic_path, (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. + :type retain_acked_messages: bool + :param retain_acked_messages: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by `message_retention_duration`. + + :type message_retention_duration: :class:`datetime.timedelta` + :param message_retention_duration: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by `message_retention_duration`. If unset, defaults to 7 days. + :rtype: dict :returns: ``Subscription`` resource returned from the API. """ @@ -333,6 +348,16 @@ def subscription_create(self, subscription_path, topic_path, if push_endpoint is not None: resource['pushConfig'] = {'pushEndpoint': push_endpoint} + if retain_acked_messages is not None: + resource['retainAckedMessages'] = retain_acked_messages + + if message_retention_duration is not None: + pb = _timedelta_to_duration_pb(message_retention_duration) + resource['messageRetentionDuration'] = { + 'seconds': pb.seconds, + 'nanos': pb.nanos + } + return self.api_request(method='PUT', path=path, data=resource) def subscription_get(self, subscription_path): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index 19089380cf41..100ac13474b6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -14,6 +14,8 @@ """Define API Subscriptions.""" +import datetime + from google.cloud.exceptions import NotFound from google.cloud.pubsub._helpers import topic_name_from_path from google.cloud.pubsub.iam import Policy @@ -43,6 +45,19 @@ class Subscription(object): (Optional) URL to which messages will be pushed by the back-end. If not set, the application must pull messages. + :type retain_acked_messages: bool + :param retain_acked_messages: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by `message_retention_duration`. + + :type message_retention_duration: :class:`datetime.timedelta` + :param message_retention_duration: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by `message_retention_duration`. If unset, defaults to 7 days. + + :type client: :class:`~google.cloud.pubsub.client.Client` :param client: (Optional) The client to use. If not passed, falls back to the @@ -57,6 +72,7 @@ class Subscription(object): """ def __init__(self, name, topic=None, ack_deadline=None, push_endpoint=None, + retain_acked_messages=None, message_retention_duration=None, client=None): if client is None and topic is None: @@ -71,6 +87,8 @@ def __init__(self, name, topic=None, ack_deadline=None, push_endpoint=None, self._project = self._client.project self.ack_deadline = ack_deadline self.push_endpoint = push_endpoint + self.retain_acked_messages = retain_acked_messages + self.message_retention_duration = message_retention_duration @classmethod def from_api_repr(cls, resource, client, topics=None): @@ -107,10 +125,21 @@ def from_api_repr(cls, resource, client, topics=None): ack_deadline = resource.get('ackDeadlineSeconds') push_config = resource.get('pushConfig', {}) push_endpoint = push_config.get('pushEndpoint') + retain_acked_messages = resource.get('retainAckedMessages') + resource_duration = resource.get('duration', {}) + message_retention_duration = datetime.timedelta( + seconds=resource_duration.get('seconds', 0), + microseconds=resource_duration.get('nanos', 0) / 1000) if topic is None: return cls(name, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, client=client) - return cls(name, topic, ack_deadline, push_endpoint) + push_endpoint=push_endpoint, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration, + client=client) + return cls(name, topic=topic, ack_deadline=ack_deadline, + push_endpoint=push_endpoint, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration) @property def project(self): @@ -182,8 +211,10 @@ def create(self, client=None): client = self._require_client(client) api = client.subscriber_api api.subscription_create( - self.full_name, self.topic.full_name, self.ack_deadline, - self.push_endpoint) + self.full_name, self.topic.full_name, + ack_deadline=self.ack_deadline, push_endpoint=self.push_endpoint, + retain_acked_messages=self.retain_acked_messages, + message_retention_duration=self.message_retention_duration) def exists(self, client=None): """API call: test existence of the subscription via a GET request diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index 5490617a3ea5..4e038370c535 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -52,7 +52,9 @@ def __init__(self, name, client, timestamp_messages=False): self._client = client self.timestamp_messages = timestamp_messages - def subscription(self, name, ack_deadline=None, push_endpoint=None): + def subscription(self, name, ack_deadline=None, push_endpoint=None, + retain_acked_messages=None, + message_retention_duration=None): """Creates a subscription bound to the current topic. Example: pull-mode subcription, default paramter values @@ -85,11 +87,25 @@ def subscription(self, name, ack_deadline=None, push_endpoint=None): back-end. If not set, the application must pull messages. + :type retain_acked_messages: bool + :param retain_acked_messages: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by `message_retention_duration`. + + :type message_retention_duration: :class:`datetime.timedelta` + :param message_retention_duration: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by `message_retention_duration`. If unset, defaults to 7 days. + :rtype: :class:`Subscription` :returns: The subscription created with the passed in arguments. """ - return Subscription(name, self, ack_deadline=ack_deadline, - push_endpoint=push_endpoint) + return Subscription( + name, self, ack_deadline=ack_deadline, push_endpoint=push_endpoint, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration) @classmethod def from_api_repr(cls, resource, client): diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 54c4ba7a895c..36ca95b242a0 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import os import unittest @@ -155,6 +156,26 @@ def test_create_subscription_w_ack_deadline(self): self.assertEqual(subscription.ack_deadline, 120) self.assertIs(subscription.topic, topic) + def test_create_subscription_w_message_retention(self): + TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') + topic = Config.CLIENT.topic(TOPIC_NAME) + self.assertFalse(topic.exists()) + topic.create() + self.to_delete.append(topic) + SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() + duration = datetime.timedelta(hours=12) + subscription = topic.subscription( + SUBSCRIPTION_NAME, retain_acked_messages=True, + message_retention_duration=duration) + self.assertFalse(subscription.exists()) + subscription.create() + self.to_delete.append(subscription) + self.assertTrue(subscription.exists()) + self.assertEqual(subscription.name, SUBSCRIPTION_NAME) + self.assertTrue(subscription.retain_acked_messages) + self.assertEqual(subscription.message_retention_duration, duration) + self.assertIs(subscription.topic, topic) + def test_list_subscriptions(self): TOPIC_NAME = 'list-sub' + unique_resource_id('-') topic = Config.CLIENT.topic(TOPIC_NAME) @@ -283,3 +304,6 @@ def test_subscription_iam_policy(self): policy.viewers.add(policy.user('jjg@google.com')) new_policy = subscription.set_iam_policy(policy) self.assertEqual(new_policy.viewers, policy.viewers) + + # TODO(geigerj): set retain_acked_messages=True in snapshot system test once + # PR #3303 is merged diff --git a/packages/google-cloud-pubsub/tests/unit/test__gax.py b/packages/google-cloud-pubsub/tests/unit/test__gax.py index 8a41d3ff70f8..2da629e92bc8 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__gax.py +++ b/packages/google-cloud-pubsub/tests/unit/test__gax.py @@ -543,12 +543,54 @@ def test_subscription_create(self): 'topic': self.TOPIC_PATH, } self.assertEqual(resource, expected) - name, topic, push_config, ack_deadline, options = ( + (name, topic, push_config, ack_deadline, retain_acked_messages, + message_retention_duration, options) = ( gax_api._create_subscription_called_with) self.assertEqual(name, self.SUB_PATH) self.assertEqual(topic, self.TOPIC_PATH) self.assertIsNone(push_config) - self.assertEqual(ack_deadline, 0) + self.assertEqual(ack_deadline, None) + self.assertIsNone(retain_acked_messages) + self.assertIsNone(message_retention_duration) + self.assertIsNone(options) + + def test_subscription_create_optional_params(self): + import datetime + + from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription + + sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) + gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + expected_ack_deadline = 1729 + expected_push_endpoint = 'push-endpoint' + expected_retain_acked_messages = True + expected_message_retention_duration = datetime.timedelta( + days=1, hours=7, minutes=2, seconds=9) + + resource = api.subscription_create( + self.SUB_PATH, self.TOPIC_PATH, ack_deadline=expected_ack_deadline, + push_endpoint=expected_push_endpoint, + retain_acked_messages=expected_retain_acked_messages, + message_retention_duration=expected_message_retention_duration) + + expected = { + 'name': self.SUB_PATH, + 'topic': self.TOPIC_PATH, + } + self.assertEqual(resource, expected) + (name, topic, push_config, ack_deadline, retain_acked_messages, + message_retention_duration, options) = ( + gax_api._create_subscription_called_with) + print(gax_api._create_subscription_called_with) + self.assertEqual(name, self.SUB_PATH) + self.assertEqual(topic, self.TOPIC_PATH) + self.assertEqual(push_config.push_endpoint, expected_push_endpoint) + self.assertEqual(ack_deadline, expected_ack_deadline) + self.assertEqual(retain_acked_messages, expected_retain_acked_messages) + self.assertEqual(message_retention_duration.seconds, + expected_message_retention_duration.total_seconds()) self.assertIsNone(options) def test_subscription_create_already_exists(self): @@ -563,12 +605,15 @@ def test_subscription_create_already_exists(self): api.subscription_create( self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) - name, topic, push_config, ack_deadline, options = ( + (name, topic, push_config, ack_deadline, retain_acked_messages, + message_retention_duration, options) = ( gax_api._create_subscription_called_with) self.assertEqual(name, self.SUB_PATH) self.assertEqual(topic, self.TOPIC_PATH) self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) self.assertEqual(ack_deadline, DEADLINE) + self.assertIsNone(retain_acked_messages) + self.assertIsNone(message_retention_duration) self.assertIsNone(options) def test_subscription_create_error(self): @@ -581,12 +626,15 @@ def test_subscription_create_error(self): with self.assertRaises(GaxError): api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - name, topic, push_config, ack_deadline, options = ( + (name, topic, push_config, ack_deadline, retain_acked_messages, + message_retention_duration, options) = ( gax_api._create_subscription_called_with) self.assertEqual(name, self.SUB_PATH) self.assertEqual(topic, self.TOPIC_PATH) self.assertIsNone(push_config) - self.assertEqual(ack_deadline, 0) + self.assertEqual(ack_deadline, None) + self.assertIsNone(retain_acked_messages) + self.assertIsNone(message_retention_duration) self.assertIsNone(options) def test_subscription_get_hit(self): @@ -1158,13 +1206,16 @@ def list_subscriptions(self, project, page_size, options=None): self._list_subscriptions_called_with = (project, page_size, options) return self._list_subscriptions_response - def create_subscription(self, name, topic, - push_config, ack_deadline_seconds, + def create_subscription(self, name, topic, push_config=None, + ack_deadline_seconds=None, + retain_acked_messages=None, + message_retention_duration=None, options=None): from google.gax.errors import GaxError self._create_subscription_called_with = ( - name, topic, push_config, ack_deadline_seconds, options) + name, topic, push_config, ack_deadline_seconds, + retain_acked_messages, message_retention_duration, options) if self._random_gax_error: raise GaxError('error') if self._create_subscription_conflict: diff --git a/packages/google-cloud-pubsub/tests/unit/test__http.py b/packages/google-cloud-pubsub/tests/unit/test__http.py index 3f94b966dd66..8d6d6f05fcf6 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__http.py +++ b/packages/google-cloud-pubsub/tests/unit/test__http.py @@ -607,6 +607,34 @@ def test_subscription_create_defaults(self): self.assertEqual(connection._called_with['path'], path) self.assertEqual(connection._called_with['data'], RESOURCE) + def test_subscription_create_retain_messages(self): + import datetime + + RESOURCE = {'topic': self.TOPIC_PATH, + 'retainAckedMessages': True, + 'messageRetentionDuration': { + 'seconds': 1729, + 'nanos': 2718 * 1000 + } + } + RETURNED = RESOURCE.copy() + RETURNED['name'] = self.SUB_PATH + connection = _Connection(RETURNED) + client = _Client(connection, self.PROJECT) + api = self._make_one(client) + + resource = api.subscription_create( + self.SUB_PATH, self.TOPIC_PATH, + retain_acked_messages=True, + message_retention_duration=datetime.timedelta( + seconds=1729, microseconds=2718)) + + self.assertEqual(resource, RETURNED) + self.assertEqual(connection._called_with['method'], 'PUT') + path = '/%s' % (self.SUB_PATH,) + self.assertEqual(connection._called_with['path'], path) + self.assertEqual(connection._called_with['data'], RESOURCE) + def test_subscription_create_explicit(self): ACK_DEADLINE = 90 PUSH_ENDPOINT = 'https://api.example.com/push' diff --git a/packages/google-cloud-pubsub/tests/unit/test_subscription.py b/packages/google-cloud-pubsub/tests/unit/test_subscription.py index 42fb23d9ae68..a76516ec63c1 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_subscription.py +++ b/packages/google-cloud-pubsub/tests/unit/test_subscription.py @@ -189,8 +189,9 @@ def test_create_pull_wo_ack_deadline_w_bound_client(self): subscription.create() - self.assertEqual(api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, None, None)) + self.assertEqual( + api._subscription_created, + (self.SUB_PATH, self.TOPIC_PATH, None, None, None, None)) def test_create_push_w_ack_deadline_w_alternate_client(self): RESPONSE = { @@ -211,7 +212,8 @@ def test_create_push_w_ack_deadline_w_alternate_client(self): self.assertEqual( api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, self.DEADLINE, self.ENDPOINT)) + (self.SUB_PATH, self.TOPIC_PATH, self.DEADLINE, self.ENDPOINT, + None, None)) def test_exists_miss_w_bound_client(self): client = _Client(project=self.PROJECT) @@ -642,9 +644,12 @@ def test_check_iam_permissions_w_alternate_client(self): class _FauxSubscribererAPI(object): def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None): + ack_deadline=None, push_endpoint=None, + retain_acked_messages=None, + message_retention_duration=None): self._subscription_created = ( - subscription_path, topic_path, ack_deadline, push_endpoint) + subscription_path, topic_path, ack_deadline, push_endpoint, + retain_acked_messages, message_retention_duration) return self._subscription_create_response def subscription_get(self, subscription_path): From 4206768cf97627f2535df821c035c084e7edc780 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 19 Apr 2017 11:16:06 -0400 Subject: [PATCH 0091/1197] Store policy bindings as sets, not frozensets. (#3308) The legacy accessors still return frozensets, as they cannot safely be mutated in plcae. --- .../google/cloud/pubsub/iam.py | 4 +- .../tests/unit/test_iam.py | 37 +++++++++---------- 2 files changed, 20 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py index e92f2151dc05..9c7e46af222a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -121,7 +121,7 @@ def publishers(self, value): _ASSIGNMENT_DEPRECATED_MSG.format( 'publishers', PUBSUB_PUBLISHER_ROLE), DeprecationWarning) - self._bindings[PUBSUB_PUBLISHER_ROLE] = list(value) + self[PUBSUB_PUBLISHER_ROLE] = value @property def subscribers(self): @@ -135,4 +135,4 @@ def subscribers(self, value): _ASSIGNMENT_DEPRECATED_MSG.format( 'subscribers', PUBSUB_SUBSCRIBER_ROLE), DeprecationWarning) - self._bindings[PUBSUB_SUBSCRIBER_ROLE] = list(value) + self[PUBSUB_SUBSCRIBER_ROLE] = value diff --git a/packages/google-cloud-pubsub/tests/unit/test_iam.py b/packages/google-cloud-pubsub/tests/unit/test_iam.py index 3bf4aaa922f0..475d375d0cd8 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_iam.py +++ b/packages/google-cloud-pubsub/tests/unit/test_iam.py @@ -27,31 +27,28 @@ def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) def test_ctor_defaults(self): + empty = frozenset() policy = self._make_one() self.assertIsNone(policy.etag) self.assertIsNone(policy.version) - self.assertIsInstance(policy.owners, frozenset) - self.assertEqual(list(policy.owners), []) - self.assertIsInstance(policy.editors, frozenset) - self.assertEqual(list(policy.editors), []) - self.assertIsInstance(policy.viewers, frozenset) - self.assertEqual(list(policy.viewers), []) - self.assertIsInstance(policy.publishers, frozenset) - self.assertEqual(list(policy.publishers), []) - self.assertIsInstance(policy.subscribers, frozenset) - self.assertEqual(list(policy.subscribers), []) + self.assertEqual(policy.owners, empty) + self.assertEqual(policy.editors, empty) + self.assertEqual(policy.viewers, empty) + self.assertEqual(policy.publishers, empty) + self.assertEqual(policy.subscribers, empty) def test_ctor_explicit(self): VERSION = 17 ETAG = 'ETAG' + empty = frozenset() policy = self._make_one(ETAG, VERSION) self.assertEqual(policy.etag, ETAG) self.assertEqual(policy.version, VERSION) - self.assertEqual(list(policy.owners), []) - self.assertEqual(list(policy.editors), []) - self.assertEqual(list(policy.viewers), []) - self.assertEqual(list(policy.publishers), []) - self.assertEqual(list(policy.subscribers), []) + self.assertEqual(policy.owners, empty) + self.assertEqual(policy.editors, empty) + self.assertEqual(policy.viewers, empty) + self.assertEqual(policy.publishers, empty) + self.assertEqual(policy.subscribers, empty) def test_publishers_setter(self): import warnings @@ -59,13 +56,14 @@ def test_publishers_setter(self): PUBSUB_PUBLISHER_ROLE, ) PUBLISHER = 'user:phred@example.com' + expected = set([PUBLISHER]) policy = self._make_one() with warnings.catch_warnings(): policy.publishers = [PUBLISHER] - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) + self.assertEqual(policy.publishers, frozenset(expected)) self.assertEqual( - dict(policy), {PUBSUB_PUBLISHER_ROLE: [PUBLISHER]}) + dict(policy), {PUBSUB_PUBLISHER_ROLE: expected}) def test_subscribers_setter(self): import warnings @@ -73,10 +71,11 @@ def test_subscribers_setter(self): PUBSUB_SUBSCRIBER_ROLE, ) SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' + expected = set([SUBSCRIBER]) policy = self._make_one() with warnings.catch_warnings(): policy.subscribers = [SUBSCRIBER] - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) + self.assertEqual(policy.subscribers, frozenset(expected)) self.assertEqual( - dict(policy), {PUBSUB_SUBSCRIBER_ROLE: [SUBSCRIBER]}) + dict(policy), {PUBSUB_SUBSCRIBER_ROLE: expected}) From 4cd7aadeb591e4aef0114c4e5b5af0b4575c98d5 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 19 Apr 2017 08:47:44 -0700 Subject: [PATCH 0092/1197] Change if/elif to if/if. --- packages/google-cloud-pubsub/google/cloud/pubsub/_http.py | 2 +- packages/google-cloud-pubsub/tests/unit/test__http.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index 21bacac9cdb6..7fa1776050fd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -489,7 +489,7 @@ def subscription_seek(self, subscription_path, time=None, snapshot=None): data = {} if time is not None: data['time'] = time - elif snapshot is not None: + if snapshot is not None: data['snapshot'] = snapshot self.api_request(method='POST', path=path, data=data) diff --git a/packages/google-cloud-pubsub/tests/unit/test__http.py b/packages/google-cloud-pubsub/tests/unit/test__http.py index d224f2c17359..be5a2f7f3175 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__http.py +++ b/packages/google-cloud-pubsub/tests/unit/test__http.py @@ -819,7 +819,7 @@ def test_list_snapshots_no_paging(self): def test_list_snapshots_with_paging(self): import six - + from google.cloud.pubsub.client import Client from google.cloud.pubsub.snapshot import Snapshot From 501b4d8fb933caaf64dcbc4af9d6653f2946dd90 Mon Sep 17 00:00:00 2001 From: Jacob Geiger Date: Wed, 19 Apr 2017 12:02:13 -0700 Subject: [PATCH 0093/1197] Fix a system test TODO (#3314) This should prevent the (as of yet, unexperienced) error case where the API deletes the acked messages before we have a chance to seek to them. --- packages/google-cloud-pubsub/tests/system.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 41ae43f1f55d..acdbde0dffca 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -360,7 +360,8 @@ def test_seek(self): self.to_delete.append(topic) SUBSCRIPTION_NAME = 'subscribing-to-seek' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME) + subscription = topic.subscription( + SUBSCRIPTION_NAME, retain_acked_messages=True) self.assertFalse(subscription.exists()) subscription.create() self.to_delete.append(subscription) From 4da4b4b6d3ff9302f26d42c5f306c403885633e4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Apr 2017 13:00:32 -0700 Subject: [PATCH 0094/1197] Adding check that **all** setup.py README's are valid RST. (#3318) * Adding check that **all** setup.py README's are valid RST. Follow up to #3316. Fixes #2446. * Fixing duplicate reference in Logging README. * Fixing duplicate reference in Monitoring README. --- packages/google-cloud-pubsub/nox.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index c9300982d6b8..209ed41f9bfc 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -77,6 +77,15 @@ def lint(session): session.run('flake8', 'google/cloud/pubsub') +@nox.session +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.interpreter = 'python3.6' + session.install('docutils', 'Pygments') + session.run( + 'python', 'setup.py', 'check', '--restructuredtext', '--strict') + + @nox.session def cover(session): """Run the final coverage report. From c8692f697e6585f36938680526451505fa413586 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 21 Apr 2017 10:03:56 -0700 Subject: [PATCH 0095/1197] Ignore tests (rather than unit_tests) in setup.py files. (#3319) --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index d18e07fcae65..fa6c6e8447f5 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -65,7 +65,7 @@ 'google', 'google.cloud', ], - packages=find_packages(exclude=('unit_tests*',)), + packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, **SETUP_BASE ) From 7aa659621923dcd83a2265e450cf2c798b4121b2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 28 Apr 2017 11:15:27 -0700 Subject: [PATCH 0096/1197] Cut releases of core, error reporting, pubsub, spanner and storage. (#3340) Also updating the umbrella/uber package along the way. --- packages/google-cloud-pubsub/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index fa6c6e8447f5..b1b1375ed870 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.0, < 0.25dev', + 'google-cloud-core >= 0.24.1, < 0.25dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-pubsub', - version='0.24.0', + version='0.25.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 0354e2652921cdc189339f93c3c0865d27569234 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 4 May 2017 12:40:39 -0700 Subject: [PATCH 0097/1197] Fixing paramter typo in a few places. --- packages/google-cloud-pubsub/google/cloud/pubsub/topic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index 4e038370c535..f16c9d99baed 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -57,7 +57,7 @@ def subscription(self, name, ack_deadline=None, push_endpoint=None, message_retention_duration=None): """Creates a subscription bound to the current topic. - Example: pull-mode subcription, default paramter values + Example: pull-mode subcription, default parameter values .. literalinclude:: pubsub_snippets.py :start-after: [START topic_subscription_defaults] From 11c976fb3ef84ae8c8f9307407c95fcd6ef4374f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 4 May 2017 11:35:09 -0700 Subject: [PATCH 0098/1197] Adding subscription factory on Pub / Sub client. Fixes #3369. --- .../google/cloud/pubsub/client.py | 47 +++++++++++++++++ .../tests/unit/test_client.py | 50 +++++++++++++++---- 2 files changed, 86 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index cccecd27f4c4..1b38780da7fe 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -22,6 +22,7 @@ from google.cloud.pubsub._http import _PublisherAPI as JSONPublisherAPI from google.cloud.pubsub._http import _SubscriberAPI as JSONSubscriberAPI from google.cloud.pubsub._http import _IAMPolicyAPI +from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic try: @@ -225,6 +226,7 @@ def topic(self, name, timestamp_messages=False): .. literalinclude:: pubsub_snippets.py :start-after: [START client_topic] :end-before: [END client_topic] + :dedent: 4 :type name: str :param name: the name of the topic to be constructed. @@ -236,3 +238,48 @@ def topic(self, name, timestamp_messages=False): :returns: Topic created with the current client. """ return Topic(name, client=self, timestamp_messages=timestamp_messages) + + def subscription(self, name, ack_deadline=None, push_endpoint=None, + retain_acked_messages=None, + message_retention_duration=None): + """Creates a subscription bound to the current client. + + Example: + + .. literalinclude:: pubsub_snippets.py + :start-after: [START client_subscription] + :end-before: [END client_subscription] + :dedent: 4 + + :type name: str + :param name: the name of the subscription to be constructed. + + :type ack_deadline: int + :param ack_deadline: (Optional) The deadline (in seconds) by which + messages pulledfrom the back-end must be + acknowledged. + + :type push_endpoint: str + :param push_endpoint: + (Optional) URL to which messages will be pushed by the back-end. + If not set, the application must pull messages. + + :type retain_acked_messages: bool + :param retain_acked_messages: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by ``message_retention_duration``. + + :type message_retention_duration: :class:`datetime.timedelta` + :param message_retention_duration: + (Optional) Whether to retain acked messages. If set, acked messages + are retained in the subscription's backlog for a duration indicated + by ``message_retention_duration``. If unset, defaults to 7 days. + + :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` + :returns: Subscription created with the current client. + """ + return Subscription( + name, ack_deadline=ack_deadline, push_endpoint=push_endpoint, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration, client=self) diff --git a/packages/google-cloud-pubsub/tests/unit/test_client.py b/packages/google-cloud-pubsub/tests/unit/test_client.py index e251a0632dc8..f71e9ba21d0b 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_client.py +++ b/packages/google-cloud-pubsub/tests/unit/test_client.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import unittest import mock @@ -365,7 +366,17 @@ def test_list_subscriptions_w_missing_key(self): self.assertEqual(api._listed_subscriptions, (self.PROJECT, None, None)) - def test_topic(self): + def test_list_snapshots(self): + creds = _make_credentials() + client = self._make_one(project=self.PROJECT, credentials=creds) + client._connection = object() + api = _FauxSubscriberAPI() + response = api._list_snapshots_response = object() + client._subscriber_api = api + self.assertEqual(client.list_snapshots(), response) + self.assertEqual(api._listed_snapshots, (self.PROJECT, None, None)) + + def test_topic_factory(self): PROJECT = 'PROJECT' TOPIC_NAME = 'TOPIC_NAME' creds = _make_credentials() @@ -379,17 +390,33 @@ def test_topic(self): 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME)) self.assertFalse(new_topic.timestamp_messages) - def test_list_snapshots(self): + def test_subscription_factory(self): + project = 'PROJECT' creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxSubscriberAPI() - response = api._list_snapshots_response = object() - client._subscriber_api = api - self.assertEqual(client.list_snapshots(), response) - self.assertEqual(api._listed_snapshots, (self.PROJECT, None, None)) + client_obj = self._make_one(project=project, credentials=creds) + + sub_name = 'hoot-n-holler' + ack_deadline = 60, + push_endpoint = 'https://api.example.com/push' + message_retention_duration = datetime.timedelta(3600) + new_subscription = client_obj.subscription( + sub_name, ack_deadline=ack_deadline, + push_endpoint=push_endpoint, + retain_acked_messages=True, + message_retention_duration=message_retention_duration) + + self.assertEqual(new_subscription.name, sub_name) + self.assertIsNone(new_subscription.topic) + self.assertIs(new_subscription._client, client_obj) + self.assertEqual(new_subscription._project, project) + self.assertEqual(new_subscription.ack_deadline, ack_deadline) + self.assertEqual(new_subscription.push_endpoint, push_endpoint) + self.assertTrue(new_subscription.retain_acked_messages) + self.assertEqual( + new_subscription.message_retention_duration, + message_retention_duration) + - class _Iterator(object): def __init__(self, items, token): @@ -419,7 +446,8 @@ def list_subscriptions(self, project, page_size, page_token): def list_snapshots(self, project, page_size, page_token): self._listed_snapshots = (project, page_size, page_token) - return self._list_snapshots_response + return self._list_snapshots_response + class _Connection(object): From 03c75b06fa3258d04cc30f8b4f980b4aef8f9392 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 16 May 2017 10:26:17 -0700 Subject: [PATCH 0099/1197] Fix "broken" docs build. (#3422) Examples: https://circleci.com/gh/GoogleCloudPlatform/google-cloud-python/1895 https://circleci.com/gh/GoogleCloudPlatform/google-cloud-python/1896 Caused by new release of Sphinx: https://pypi.python.org/pypi/Sphinx/1.6.1 --- packages/google-cloud-pubsub/google/cloud/pubsub/client.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 1b38780da7fe..1df95a2400de 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -226,7 +226,6 @@ def topic(self, name, timestamp_messages=False): .. literalinclude:: pubsub_snippets.py :start-after: [START client_topic] :end-before: [END client_topic] - :dedent: 4 :type name: str :param name: the name of the topic to be constructed. @@ -249,7 +248,6 @@ def subscription(self, name, ack_deadline=None, push_endpoint=None, .. literalinclude:: pubsub_snippets.py :start-after: [START client_subscription] :end-before: [END client_subscription] - :dedent: 4 :type name: str :param name: the name of the subscription to be constructed. From a3786e31c94c5f6c3501b9671e33477ece5a09ac Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 16 May 2017 10:34:26 -0700 Subject: [PATCH 0100/1197] Making Subscription.reload() update the topic if unset. (#3397) Also: removing Subscription._project and Snapshot._project since they are never used (and they shadow data that they shouldn't). --- .../google/cloud/pubsub/snapshot.py | 1 - .../google/cloud/pubsub/subscription.py | 4 ++- .../tests/unit/test__gax.py | 12 ++++----- .../tests/unit/test__http.py | 8 +++--- .../tests/unit/test_client.py | 6 ++--- .../tests/unit/test_subscription.py | 25 ++++++++++++++++++- 6 files changed, 40 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py b/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py index fd9a78376397..557ea93818d6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py @@ -35,7 +35,6 @@ def __init__(self, name, subscription=None, topic=None, client=None): self._subscription = subscription self._client = client or getattr( subscription, '_client', None) or topic._client - self._project = self._client.project @classmethod def from_api_repr(cls, resource, client, topics=None): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index b597d3526f67..538913cca33e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -86,7 +86,6 @@ def __init__(self, name, topic=None, ack_deadline=None, push_endpoint=None, self.name = name self.topic = topic self._client = client or topic._client - self._project = self._client.project self.ack_deadline = ack_deadline self.push_endpoint = push_endpoint self.retain_acked_messages = retain_acked_messages @@ -274,6 +273,9 @@ def reload(self, client=None): self.ack_deadline = data.get('ackDeadlineSeconds') push_config = data.get('pushConfig', {}) self.push_endpoint = push_config.get('pushEndpoint') + if self.topic is None and 'topic' in data: + topic_name = topic_name_from_path(data['topic'], client.project) + self.topic = client.topic(topic_name) def delete(self, client=None): """API call: delete the subscription via a DELETE request. diff --git a/packages/google-cloud-pubsub/tests/unit/test__gax.py b/packages/google-cloud-pubsub/tests/unit/test__gax.py index a93d22f95a53..2bd7983b40af 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__gax.py +++ b/packages/google-cloud-pubsub/tests/unit/test__gax.py @@ -475,7 +475,7 @@ def test_list_subscriptions_no_paging(self): self.assertIsInstance(subscription.topic, Topic) self.assertEqual(subscription.topic.name, self.TOPIC_NAME) self.assertIs(subscription._client, client) - self.assertEqual(subscription._project, self.PROJECT) + self.assertEqual(subscription.project, self.PROJECT) self.assertIsNone(subscription.ack_deadline) self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) @@ -523,7 +523,7 @@ def test_list_subscriptions_with_paging(self): self.assertIsInstance(subscription.topic, Topic) self.assertEqual(subscription.topic.name, self.TOPIC_NAME) self.assertIs(subscription._client, client) - self.assertEqual(subscription._project, self.PROJECT) + self.assertEqual(subscription.project, self.PROJECT) self.assertIsNone(subscription.ack_deadline) self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) @@ -560,7 +560,7 @@ def test_subscription_create(self): def test_subscription_create_optional_params(self): import datetime - + from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) @@ -1009,7 +1009,7 @@ def test_list_snapshots_no_paging(self): self.assertIsInstance(snapshot.topic, Topic) self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) self.assertIs(snapshot._client, client) - self.assertEqual(snapshot._project, self.PROJECT) + self.assertEqual(snapshot.project, self.PROJECT) def test_list_snapshots_with_paging(self): from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( @@ -1047,7 +1047,7 @@ def test_list_snapshots_with_paging(self): self.assertIsInstance(snapshot.topic, Topic) self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) self.assertIs(snapshot._client, client) - self.assertEqual(snapshot._project, self.PROJECT) + self.assertEqual(snapshot.project, self.PROJECT) def test_subscription_seek_hit(self): gax_api = _GAXSubscriberAPI(_seek_ok=True) @@ -1548,7 +1548,7 @@ def delete_snapshot(self, snapshot, options=None): raise GaxError('error') if not self._delete_snapshot_ok: raise GaxError('miss', self._make_grpc_not_found()) - + def seek(self, subscription, time=None, snapshot=None, options=None): from google.gax.errors import GaxError diff --git a/packages/google-cloud-pubsub/tests/unit/test__http.py b/packages/google-cloud-pubsub/tests/unit/test__http.py index 2dc14f789ed1..d4bbc29dd6dd 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__http.py +++ b/packages/google-cloud-pubsub/tests/unit/test__http.py @@ -521,7 +521,7 @@ def test_list_subscriptions_no_paging(self): self.assertIsInstance(subscription.topic, Topic) self.assertEqual(subscription.topic.name, self.TOPIC_NAME) self.assertIs(subscription._client, client) - self.assertEqual(subscription._project, self.PROJECT) + self.assertEqual(subscription.project, self.PROJECT) self.assertIsNone(subscription.ack_deadline) self.assertIsNone(subscription.push_endpoint) @@ -566,7 +566,7 @@ def test_list_subscriptions_with_paging(self): self.assertIsInstance(subscription.topic, Topic) self.assertEqual(subscription.topic.name, self.TOPIC_NAME) self.assertIs(subscription._client, client) - self.assertEqual(subscription._project, self.PROJECT) + self.assertEqual(subscription.project, self.PROJECT) self.assertIsNone(subscription.ack_deadline) self.assertIsNone(subscription.push_endpoint) @@ -612,7 +612,7 @@ def test_subscription_create_defaults(self): def test_subscription_create_retain_messages(self): import datetime - + RESOURCE = {'topic': self.TOPIC_PATH, 'retainAckedMessages': True, 'messageRetentionDuration': { @@ -637,7 +637,7 @@ def test_subscription_create_retain_messages(self): path = '/%s' % (self.SUB_PATH,) self.assertEqual(connection._called_with['path'], path) self.assertEqual(connection._called_with['data'], RESOURCE) - + def test_subscription_create_explicit(self): ACK_DEADLINE = 90 PUSH_ENDPOINT = 'https://api.example.com/push' diff --git a/packages/google-cloud-pubsub/tests/unit/test_client.py b/packages/google-cloud-pubsub/tests/unit/test_client.py index f71e9ba21d0b..407683606330 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_client.py +++ b/packages/google-cloud-pubsub/tests/unit/test_client.py @@ -280,7 +280,7 @@ def test_list_subscriptions_no_paging(self): self.assertIsInstance(subscription.topic, Topic) self.assertEqual(subscription.topic.name, self.TOPIC_NAME) self.assertIs(subscription._client, client) - self.assertEqual(subscription._project, self.PROJECT) + self.assertEqual(subscription.project, self.PROJECT) self.assertIsNone(subscription.ack_deadline) self.assertIsNone(subscription.push_endpoint) @@ -334,7 +334,7 @@ def test_list_subscriptions_with_paging(self): self.assertIsInstance(subscription.topic, Topic) self.assertEqual(subscription.topic.name, self.TOPIC_NAME) self.assertIs(subscription._client, client) - self.assertEqual(subscription._project, self.PROJECT) + self.assertEqual(subscription.project, self.PROJECT) self.assertEqual(subscription.ack_deadline, ACK_DEADLINE) self.assertEqual(subscription.push_endpoint, PUSH_ENDPOINT) @@ -408,7 +408,7 @@ def test_subscription_factory(self): self.assertEqual(new_subscription.name, sub_name) self.assertIsNone(new_subscription.topic) self.assertIs(new_subscription._client, client_obj) - self.assertEqual(new_subscription._project, project) + self.assertEqual(new_subscription.project, project) self.assertEqual(new_subscription.ack_deadline, ack_deadline) self.assertEqual(new_subscription.push_endpoint, push_endpoint) self.assertTrue(new_subscription.retain_acked_messages) diff --git a/packages/google-cloud-pubsub/tests/unit/test_subscription.py b/packages/google-cloud-pubsub/tests/unit/test_subscription.py index c845d601dfca..ddf0ea439d77 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_subscription.py +++ b/packages/google-cloud-pubsub/tests/unit/test_subscription.py @@ -259,6 +259,29 @@ def test_reload_w_bound_client(self): self.assertEqual(subscription.push_endpoint, self.ENDPOINT) self.assertEqual(api._subscription_got, self.SUB_PATH) + def test_reload_sets_topic(self): + from google.cloud.pubsub.topic import Topic + + response = { + 'name': self.SUB_PATH, + 'topic': self.TOPIC_PATH, + 'ackDeadlineSeconds': self.DEADLINE, + 'pushConfig': {'pushEndpoint': self.ENDPOINT}, + } + client = _Client(project=self.PROJECT) + api = client.subscriber_api = _FauxSubscribererAPI() + api._subscription_get_response = response + subscription = self._make_one(self.SUB_NAME, client=client) + + self.assertIsNone(subscription.topic) + subscription.reload() + + self.assertEqual(subscription.ack_deadline, self.DEADLINE) + self.assertEqual(subscription.push_endpoint, self.ENDPOINT) + self.assertEqual(api._subscription_got, self.SUB_PATH) + self.assertIsInstance(subscription.topic, Topic) + self.assertEqual(subscription.topic.name, self.TOPIC_NAME) + def test_reload_w_alternate_client(self): RESPONSE = { 'name': self.SUB_PATH, @@ -506,7 +529,7 @@ def test_seek_snapshot_w_alternate_client(self): def test_seek_time_w_bound_client(self): import datetime - + from google.cloud import _helpers time = datetime.time() From 9d2644cab0a7241a0aa23d33af9d55fe9eda7c92 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 31 May 2017 13:07:22 -0700 Subject: [PATCH 0101/1197] Re-organize the documentation structure in preparation to split docs among subpackages (#3459) --- .../google/cloud/pubsub/client.py | 8 +++--- .../google/cloud/pubsub/subscription.py | 22 ++++++++-------- .../google/cloud/pubsub/topic.py | 26 +++++++++---------- 3 files changed, 28 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 1df95a2400de..17bb67cb66e2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -141,7 +141,7 @@ def list_topics(self, page_size=None, page_token=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START client_list_topics] :end-before: [END client_list_topics] @@ -170,7 +170,7 @@ def list_subscriptions(self, page_size=None, page_token=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START client_list_subscriptions] :end-before: [END client_list_subscriptions] @@ -223,7 +223,7 @@ def topic(self, name, timestamp_messages=False): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START client_topic] :end-before: [END client_topic] @@ -245,7 +245,7 @@ def subscription(self, name, ack_deadline=None, push_endpoint=None, Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START client_subscription] :end-before: [END client_subscription] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index 538913cca33e..22f93246924c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -200,7 +200,7 @@ def create(self, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_create] :end-before: [END subscription_create] @@ -225,7 +225,7 @@ def exists(self, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_exists] :end-before: [END subscription_exists] @@ -258,7 +258,7 @@ def reload(self, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_reload] :end-before: [END subscription_reload] @@ -285,7 +285,7 @@ def delete(self, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_delete] :end-before: [END subscription_delete] @@ -306,11 +306,11 @@ def modify_push_configuration(self, push_endpoint, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_push_pull] :end-before: [END subscription_push_pull] - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_pull_push] :end-before: [END subscription_pull_push] @@ -337,7 +337,7 @@ def pull(self, return_immediately=False, max_messages=1, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_pull] :end-before: [END subscription_pull] @@ -376,7 +376,7 @@ def acknowledge(self, ack_ids, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_acknowledge] :end-before: [END subscription_acknowledge] @@ -460,7 +460,7 @@ def get_iam_policy(self, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_get_iam_policy] :end-before: [END subscription_get_iam_policy] @@ -486,7 +486,7 @@ def set_iam_policy(self, policy, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_set_iam_policy] :end-before: [END subscription_set_iam_policy] @@ -517,7 +517,7 @@ def check_iam_permissions(self, permissions, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START subscription_check_iam_permissions] :end-before: [END subscription_check_iam_permissions] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index f16c9d99baed..f9a8c28a3a09 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -59,19 +59,19 @@ def subscription(self, name, ack_deadline=None, push_endpoint=None, Example: pull-mode subcription, default parameter values - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_subscription_defaults] :end-before: [END topic_subscription_defaults] Example: pull-mode subcription, override ``ack_deadline`` default - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_subscription_ack90] :end-before: [END topic_subscription_ack90] Example: push-mode subcription - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_subscription_push] :end-before: [END topic_subscription_push] @@ -160,7 +160,7 @@ def create(self, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_create] :end-before: [END topic_create] @@ -181,7 +181,7 @@ def exists(self, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_exists] :end-before: [END topic_exists] @@ -211,7 +211,7 @@ def delete(self, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_delete] :end-before: [END topic_delete] @@ -242,13 +242,13 @@ def publish(self, message, client=None, **attrs): Example without message attributes: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_publish_simple_message] :end-before: [END topic_publish_simple_message] With message attributes: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_publish_message_with_attrs] :end-before: [END topic_publish_message_with_attrs] @@ -279,7 +279,7 @@ def batch(self, client=None, **kwargs): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_batch] :end-before: [END topic_batch] @@ -312,7 +312,7 @@ def list_subscriptions(self, page_size=None, page_token=None, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_list_subscriptions] :end-before: [END topic_list_subscriptions] @@ -347,7 +347,7 @@ def get_iam_policy(self, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_get_iam_policy] :end-before: [END topic_get_iam_policy] @@ -373,7 +373,7 @@ def set_iam_policy(self, policy, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_set_iam_policy] :end-before: [END topic_set_iam_policy] @@ -404,7 +404,7 @@ def check_iam_permissions(self, permissions, client=None): Example: - .. literalinclude:: pubsub_snippets.py + .. literalinclude:: snippets.py :start-after: [START topic_check_iam_permissions] :end-before: [END topic_check_iam_permissions] From 3a6603073be56ec6af8f4517045d899eda1905c0 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 2 Jun 2017 14:36:29 -0700 Subject: [PATCH 0102/1197] Vision semi-GAPIC (#3373) --- .../google/cloud/pubsub/_gax.py | 36 ++++++++-------- .../google/cloud/pubsub/_http.py | 42 +++++++++---------- .../google/cloud/pubsub/client.py | 6 +-- .../google/cloud/pubsub/iam.py | 2 +- .../google/cloud/pubsub/message.py | 2 +- .../google/cloud/pubsub/snapshot.py | 4 +- .../google/cloud/pubsub/subscription.py | 26 ++++++------ .../google/cloud/pubsub/topic.py | 16 +++---- 8 files changed, 67 insertions(+), 67 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 730192755221..d32f8eb069a7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -60,7 +60,7 @@ def __init__(self, gax_api, client): def list_topics(self, project, page_size=0, page_token=None): """List topics for the project associated with this API. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list :type project: str @@ -90,7 +90,7 @@ def list_topics(self, project, page_size=0, page_token=None): def topic_create(self, topic_path): """API call: create a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create :type topic_path: str @@ -113,7 +113,7 @@ def topic_create(self, topic_path): def topic_get(self, topic_path): """API call: retrieve a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get :type topic_path: str @@ -136,7 +136,7 @@ def topic_get(self, topic_path): def topic_delete(self, topic_path): """API call: delete a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete :type topic_path: str @@ -153,7 +153,7 @@ def topic_delete(self, topic_path): def topic_publish(self, topic_path, messages, timeout=30): """API call: publish one or more messages to a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish :type topic_path: str @@ -186,7 +186,7 @@ def topic_publish(self, topic_path, messages, timeout=30): def topic_list_subscriptions(self, topic, page_size=0, page_token=None): """API call: list subscriptions bound to a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list :type topic: :class:`~google.cloud.pubsub.topic.Topic` @@ -242,7 +242,7 @@ def __init__(self, gax_api, client): def list_subscriptions(self, project, page_size=0, page_token=None): """List subscriptions for the project associated with this API. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list :type project: str @@ -283,7 +283,7 @@ def subscription_create(self, subscription_path, topic_path, message_retention_duration=None): """API call: create a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create :type subscription_path: str @@ -345,7 +345,7 @@ def subscription_create(self, subscription_path, topic_path, def subscription_get(self, subscription_path): """API call: retrieve a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get :type subscription_path: str @@ -367,7 +367,7 @@ def subscription_get(self, subscription_path): def subscription_delete(self, subscription_path): """API call: delete a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete :type subscription_path: str @@ -386,7 +386,7 @@ def subscription_modify_push_config(self, subscription_path, push_endpoint): """API call: update push config of a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig :type subscription_path: str @@ -411,7 +411,7 @@ def subscription_pull(self, subscription_path, return_immediately=False, max_messages=1): """API call: retrieve messages for a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig :type subscription_path: str @@ -452,7 +452,7 @@ def subscription_pull(self, subscription_path, return_immediately=False, def subscription_acknowledge(self, subscription_path, ack_ids): """API call: acknowledge retrieved messages - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig :type subscription_path: str @@ -474,7 +474,7 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, ack_deadline): """API call: update ack deadline for retrieved messages - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline :type subscription_path: str @@ -500,7 +500,7 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, def subscription_seek(self, subscription_path, time=None, snapshot=None): """API call: seek a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek :type subscription_path: str @@ -524,7 +524,7 @@ def subscription_seek(self, subscription_path, time=None, snapshot=None): def list_snapshots(self, project, page_size=0, page_token=None): """List snapshots for the project associated with this API. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list :type project: str @@ -561,7 +561,7 @@ def list_snapshots(self, project, page_size=0, page_token=None): def snapshot_create(self, snapshot_path, subscription_path): """API call: create a snapshot - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create :type snapshot_path: str @@ -594,7 +594,7 @@ def snapshot_create(self, snapshot_path, subscription_path): def snapshot_delete(self, snapshot_path): """API call: delete a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete :type snapshot_path: str diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index 0c059df7453a..f1d07237d7df 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -116,7 +116,7 @@ def __init__(self, client): def list_topics(self, project, page_size=None, page_token=None): """API call: list topics for a given project - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list :type project: str @@ -148,7 +148,7 @@ def list_topics(self, project, page_size=None, page_token=None): def topic_create(self, topic_path): """API call: create a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create :type topic_path: str @@ -163,7 +163,7 @@ def topic_create(self, topic_path): def topic_get(self, topic_path): """API call: retrieve a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get :type topic_path: str @@ -178,7 +178,7 @@ def topic_get(self, topic_path): def topic_delete(self, topic_path): """API call: delete a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete :type topic_path: str @@ -190,7 +190,7 @@ def topic_delete(self, topic_path): def topic_publish(self, topic_path, messages): """API call: publish one or more messages to a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish :type topic_path: str @@ -213,7 +213,7 @@ def topic_publish(self, topic_path, messages): def topic_list_subscriptions(self, topic, page_size=None, page_token=None): """API call: list subscriptions bound to a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list :type topic: :class:`~google.cloud.pubsub.topic.Topic` @@ -260,7 +260,7 @@ def __init__(self, client): def list_subscriptions(self, project, page_size=None, page_token=None): """API call: list subscriptions for a given project - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list :type project: str @@ -302,7 +302,7 @@ def subscription_create(self, subscription_path, topic_path, message_retention_duration=None): """API call: create a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create :type subscription_path: str @@ -364,7 +364,7 @@ def subscription_create(self, subscription_path, topic_path, def subscription_get(self, subscription_path): """API call: retrieve a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get :type subscription_path: str @@ -381,7 +381,7 @@ def subscription_get(self, subscription_path): def subscription_delete(self, subscription_path): """API call: delete a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete :type subscription_path: str @@ -396,7 +396,7 @@ def subscription_modify_push_config(self, subscription_path, push_endpoint): """API call: update push config of a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig :type subscription_path: str @@ -417,7 +417,7 @@ def subscription_pull(self, subscription_path, return_immediately=False, max_messages=1): """API call: retrieve messages for a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig :type subscription_path: str @@ -450,7 +450,7 @@ def subscription_pull(self, subscription_path, return_immediately=False, def subscription_acknowledge(self, subscription_path, ack_ids): """API call: acknowledge retrieved messages - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig :type subscription_path: str @@ -471,7 +471,7 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, ack_deadline): """API call: update ack deadline for retrieved messages - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline :type subscription_path: str @@ -496,7 +496,7 @@ def subscription_modify_ack_deadline(self, subscription_path, ack_ids, def subscription_seek(self, subscription_path, time=None, snapshot=None): """API call: seek a subscription - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek :type subscription_path: str @@ -521,7 +521,7 @@ def subscription_seek(self, subscription_path, time=None, snapshot=None): def list_snapshots(self, project, page_size=None, page_token=None): """List snapshots for the project associated with this API. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list :type project: str @@ -559,7 +559,7 @@ def list_snapshots(self, project, page_size=None, page_token=None): def snapshot_create(self, snapshot_path, subscription_path): """API call: create a snapshot - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create :type snapshot_path: str @@ -581,7 +581,7 @@ def snapshot_create(self, snapshot_path, subscription_path): def snapshot_delete(self, snapshot_path): """API call: delete a topic - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete :type snapshot_path: str @@ -605,7 +605,7 @@ def __init__(self, client): def get_iam_policy(self, target_path): """API call: fetch the IAM policy for the target - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy @@ -621,7 +621,7 @@ def get_iam_policy(self, target_path): def set_iam_policy(self, target_path, policy): """API call: update the IAM policy for the target - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy @@ -641,7 +641,7 @@ def set_iam_policy(self, target_path, policy): def test_iam_permissions(self, target_path, permissions): """API call: test permissions - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 17bb67cb66e2..902188beaab6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -136,7 +136,7 @@ def iam_policy_api(self): def list_topics(self, page_size=None, page_token=None): """List topics for the project associated with this client. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list Example: @@ -165,7 +165,7 @@ def list_topics(self, page_size=None, page_token=None): def list_subscriptions(self, page_size=None, page_token=None): """List subscriptions for the project associated with this client. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list Example: @@ -195,7 +195,7 @@ def list_subscriptions(self, page_size=None, page_token=None): def list_snapshots(self, page_size=None, page_token=None): """List snapshots for the project associated with this API. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list :type project: str diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py index 9c7e46af222a..7dce1c2c4cfa 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py @@ -96,7 +96,7 @@ class Policy(_BasePolicy): """IAM Policy / Bindings. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Policy https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Binding """ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py index 6b93e3b890ed..e2153d5cb14f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py @@ -20,7 +20,7 @@ class Message(object): """Messages can be published to a topic and received by subscribers. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage :type data: bytes diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py b/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py index 557ea93818d6..599cd05d8765 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py @@ -108,7 +108,7 @@ def _require_client(self, client): def create(self, client=None): """API call: create the snapshot - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create :type client: :class:`~google.cloud.pubsub.client.Client` or @@ -127,7 +127,7 @@ def create(self, client=None): def delete(self, client=None): """API call: delete the snapshot - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete :type client: :class:`~google.cloud.pubsub.client.Client` or diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py index 22f93246924c..86ca1f97c230 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py @@ -27,7 +27,7 @@ class Subscription(object): """Subscriptions receive messages published to their topics. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions :type name: str @@ -69,7 +69,7 @@ class Subscription(object): _DELETED_TOPIC_PATH = '_deleted-topic_' """Value of ``projects.subscriptions.topic`` when topic has been deleted. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions#Subscription.FIELDS.topic """ @@ -195,7 +195,7 @@ def _require_client(self, client): def create(self, client=None): """API call: create the subscription via a PUT request - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create Example: @@ -280,7 +280,7 @@ def reload(self, client=None): def delete(self, client=None): """API call: delete the subscription via a DELETE request. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete Example: @@ -301,7 +301,7 @@ def delete(self, client=None): def modify_push_configuration(self, push_endpoint, client=None): """API call: update the push endpoint for the subscription. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig Example: @@ -332,7 +332,7 @@ def modify_push_configuration(self, push_endpoint, client=None): def pull(self, return_immediately=False, max_messages=1, client=None): """API call: retrieve messages for the subscription. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull Example: @@ -371,7 +371,7 @@ def pull(self, return_immediately=False, max_messages=1, client=None): def acknowledge(self, ack_ids, client=None): """API call: acknowledge retrieved messages for the subscription. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/acknowledge Example: @@ -395,7 +395,7 @@ def acknowledge(self, ack_ids, client=None): def modify_ack_deadline(self, ack_ids, ack_deadline, client=None): """API call: update acknowledgement deadline for a retrieved message. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline :type ack_ids: list of string @@ -428,7 +428,7 @@ def snapshot(self, name, client=None): def seek_snapshot(self, snapshot, client=None): """API call: seek a subscription to a given snapshot - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek :type snapshot: :class:`Snapshot` @@ -441,7 +441,7 @@ def seek_snapshot(self, snapshot, client=None): def seek_timestamp(self, timestamp, client=None): """API call: seek a subscription to a given point in time - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek :type time: :class:`datetime.datetime` @@ -455,7 +455,7 @@ def seek_timestamp(self, timestamp, client=None): def get_iam_policy(self, client=None): """Fetch the IAM policy for the subscription. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy Example: @@ -481,7 +481,7 @@ def get_iam_policy(self, client=None): def set_iam_policy(self, policy, client=None): """Update the IAM policy for the subscription. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy Example: @@ -512,7 +512,7 @@ def set_iam_policy(self, policy, client=None): def check_iam_permissions(self, permissions, client=None): """Verify permissions allowed for the current user. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions Example: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index f9a8c28a3a09..92c323ed63d7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -32,7 +32,7 @@ class Topic(object): Subscribers then receive those messages. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics :type name: str @@ -155,7 +155,7 @@ def _require_client(self, client): def create(self, client=None): """API call: create the topic via a PUT request - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create Example: @@ -206,7 +206,7 @@ def exists(self, client=None): def delete(self, client=None): """API call: delete the topic via a DELETE request - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete Example: @@ -237,7 +237,7 @@ def _timestamp_message(self, attrs): def publish(self, message, client=None, **attrs): """API call: publish a message to a topic via a POST request - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish Example without message attributes: @@ -307,7 +307,7 @@ def batch(self, client=None, **kwargs): def list_subscriptions(self, page_size=None, page_token=None, client=None): """List subscriptions for the project associated with this client. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list Example: @@ -342,7 +342,7 @@ def list_subscriptions(self, page_size=None, page_token=None, client=None): def get_iam_policy(self, client=None): """Fetch the IAM policy for the topic. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy Example: @@ -368,7 +368,7 @@ def get_iam_policy(self, client=None): def set_iam_policy(self, policy, client=None): """Update the IAM policy for the topic. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy Example: @@ -399,7 +399,7 @@ def set_iam_policy(self, policy, client=None): def check_iam_permissions(self, permissions, client=None): """Verify permissions allowed for the current user. - See: + See https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions Example: From 7095a9f096f302298699891f28deadabdf75e8e4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 5 Jun 2017 12:40:16 -0700 Subject: [PATCH 0103/1197] Revert "Fix "broken" docs build. (#3422)" (#3439) This reverts commit 90565b0043d40bae9eb6a2e905aa8f22c8951029. --- packages/google-cloud-pubsub/google/cloud/pubsub/client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 902188beaab6..6a7e60a1923d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -226,6 +226,7 @@ def topic(self, name, timestamp_messages=False): .. literalinclude:: snippets.py :start-after: [START client_topic] :end-before: [END client_topic] + :dedent: 4 :type name: str :param name: the name of the topic to be constructed. @@ -248,6 +249,7 @@ def subscription(self, name, ack_deadline=None, push_endpoint=None, .. literalinclude:: snippets.py :start-after: [START client_subscription] :end-before: [END client_subscription] + :dedent: 4 :type name: str :param name: the name of the subscription to be constructed. From 2742e9cc090da8c3cd0fab6c9be1ccc9d576c70e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Jun 2017 16:02:22 -0400 Subject: [PATCH 0104/1197] Remap new Gax conflict error code (#3443) * Add testing support for 'ALREADY_EXISTS' gRPC error code. * Cover both possible gRPC conflict error codes. Closes #3175. * Exercise conflict-on-create in systests for topic/sub/snap. --- .../google/cloud/pubsub/_gax.py | 9 ++- packages/google-cloud-pubsub/tests/system.py | 10 +++ .../tests/unit/test__gax.py | 81 ++++++++++++++++--- 3 files changed, 88 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index d32f8eb069a7..94dc639178ef 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -42,6 +42,9 @@ from google.cloud.pubsub.subscription import Subscription from google.cloud.pubsub.topic import Topic +_CONFLICT_ERROR_CODES = ( + StatusCode.FAILED_PRECONDITION, StatusCode.ALREADY_EXISTS) + class _PublisherAPI(object): """Helper mapping publisher-related APIs. @@ -105,7 +108,7 @@ def topic_create(self, topic_path): try: topic_pb = self._gax_api.create_topic(topic_path) except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + if exc_to_code(exc.cause) in _CONFLICT_ERROR_CODES: raise Conflict(topic_path) raise return {'name': topic_pb.name} @@ -337,7 +340,7 @@ def subscription_create(self, subscription_path, topic_path, retain_acked_messages=retain_acked_messages, message_retention_duration=message_retention_duration) except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + if exc_to_code(exc.cause) in _CONFLICT_ERROR_CODES: raise Conflict(topic_path) raise return MessageToDict(sub_pb) @@ -584,7 +587,7 @@ def snapshot_create(self, snapshot_path, subscription_path): snapshot_pb = self._gax_api.create_snapshot( snapshot_path, subscription_path) except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.FAILED_PRECONDITION: + if exc_to_code(exc.cause) in _CONFLICT_ERROR_CODES: raise Conflict(snapshot_path) elif exc_to_code(exc.cause) == StatusCode.NOT_FOUND: raise NotFound(subscription_path) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index acdbde0dffca..d55011a5254e 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -22,6 +22,7 @@ import httplib2 from google.cloud.environment_vars import PUBSUB_EMULATOR +from google.cloud.exceptions import Conflict from google.cloud.pubsub import client from test_utils.retry import RetryInstanceState @@ -113,6 +114,9 @@ def test_create_topic(self): self.assertTrue(topic.exists()) self.assertEqual(topic.name, topic_name) + with self.assertRaises(Conflict): + topic.create() + def test_list_topics(self): before = _consume_topics(Config.CLIENT) topics_to_create = [ @@ -152,6 +156,9 @@ def test_create_subscription_defaults(self): self.assertEqual(subscription.name, SUBSCRIPTION_NAME) self.assertIs(subscription.topic, topic) + with self.assertRaises(Conflict): + subscription.create() + def test_create_subscription_w_ack_deadline(self): TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') topic = Config.CLIENT.topic(TOPIC_NAME) @@ -350,6 +357,9 @@ def full_name(obj): self.assertIn(snapshot.full_name, map(full_name, after_snapshots)) self.assertNotIn(snapshot.full_name, map(full_name, before_snapshots)) + with self.assertRaises(Conflict): + snapshot.create() + def test_seek(self): TOPIC_NAME = 'seek-e2e' + unique_resource_id('-') diff --git a/packages/google-cloud-pubsub/tests/unit/test__gax.py b/packages/google-cloud-pubsub/tests/unit/test__gax.py index 2bd7983b40af..dd2ea8077f84 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__gax.py +++ b/packages/google-cloud-pubsub/tests/unit/test__gax.py @@ -141,10 +141,24 @@ def test_topic_create(self): self.assertEqual(topic_path, self.TOPIC_PATH) self.assertIsNone(options) + def test_topic_create_failed_precondition(self): + from google.cloud.exceptions import Conflict + + gax_api = _GAXPublisherAPI(_create_topic_failed_precondition=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(Conflict): + api.topic_create(self.TOPIC_PATH) + + topic_path, options = gax_api._create_topic_called_with + self.assertEqual(topic_path, self.TOPIC_PATH) + self.assertIsNone(options) + def test_topic_create_already_exists(self): from google.cloud.exceptions import Conflict - gax_api = _GAXPublisherAPI(_create_topic_conflict=True) + gax_api = _GAXPublisherAPI(_create_topic_already_exists=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -597,11 +611,35 @@ def test_subscription_create_optional_params(self): expected_message_retention_duration.total_seconds()) self.assertIsNone(options) + def test_subscription_create_failed_precondition(self): + from google.cloud.exceptions import Conflict + + DEADLINE = 600 + gax_api = _GAXSubscriberAPI( + _create_subscription_failed_precondition=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(Conflict): + api.subscription_create( + self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) + + (name, topic, push_config, ack_deadline, retain_acked_messages, + message_retention_duration, options) = ( + gax_api._create_subscription_called_with) + self.assertEqual(name, self.SUB_PATH) + self.assertEqual(topic, self.TOPIC_PATH) + self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) + self.assertEqual(ack_deadline, DEADLINE) + self.assertIsNone(retain_acked_messages) + self.assertIsNone(message_retention_duration) + self.assertIsNone(options) + def test_subscription_create_already_exists(self): from google.cloud.exceptions import Conflict DEADLINE = 600 - gax_api = _GAXSubscriberAPI(_create_subscription_conflict=True) + gax_api = _GAXSubscriberAPI(_create_subscription_already_exists=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -1121,10 +1159,26 @@ def test_snapshot_create(self): self.assertEqual(subscription, self.SUB_PATH) self.assertIsNone(options) + def test_snapshot_create_failed_precondition(self): + from google.cloud.exceptions import Conflict + + gax_api = _GAXSubscriberAPI(_create_snapshot_failed_precondition=True) + client = _Client(self.PROJECT) + api = self._make_one(gax_api, client) + + with self.assertRaises(Conflict): + api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) + + name, subscription, options = ( + gax_api._create_snapshot_called_with) + self.assertEqual(name, self.SNAPSHOT_PATH) + self.assertEqual(subscription, self.SUB_PATH) + self.assertIsNone(options) + def test_snapshot_create_already_exists(self): from google.cloud.exceptions import Conflict - gax_api = _GAXSubscriberAPI(_create_snapshot_conflict=True) + gax_api = _GAXSubscriberAPI(_create_snapshot_already_exists=True) client = _Client(self.PROJECT) api = self._make_one(gax_api, client) @@ -1371,7 +1425,8 @@ def mock_insecure_channel(host): class _GAXPublisherAPI(_GAXBaseAPI): - _create_topic_conflict = False + _create_topic_failed_precondition = False + _create_topic_already_exists = False def list_topics(self, name, page_size, options): self._list_topics_called_with = name, page_size, options @@ -1383,8 +1438,10 @@ def create_topic(self, name, options=None): self._create_topic_called_with = name, options if self._random_gax_error: raise GaxError('error') - if self._create_topic_conflict: + if self._create_topic_failed_precondition: raise GaxError('conflict', self._make_grpc_failed_precondition()) + if self._create_topic_already_exists: + raise GaxError('conflict', self._make_grpc_already_exists()) return self._create_topic_response def get_topic(self, name, options=None): @@ -1432,8 +1489,10 @@ def list_topic_subscriptions(self, topic, page_size, options=None): class _GAXSubscriberAPI(_GAXBaseAPI): - _create_snapshot_conflict = False - _create_subscription_conflict = False + _create_snapshot_already_exists = False + _create_snapshot_failed_precondition = False + _create_subscription_already_exists = False + _create_subscription_failed_precondition = False _modify_push_config_ok = False _acknowledge_ok = False _modify_ack_deadline_ok = False @@ -1456,8 +1515,10 @@ def create_subscription(self, name, topic, push_config=None, retain_acked_messages, message_retention_duration, options) if self._random_gax_error: raise GaxError('error') - if self._create_subscription_conflict: + if self._create_subscription_failed_precondition: raise GaxError('conflict', self._make_grpc_failed_precondition()) + if self._create_subscription_already_exists: + raise GaxError('conflict', self._make_grpc_already_exists()) return self._create_subscription_response def get_subscription(self, name, options=None): @@ -1533,7 +1594,9 @@ def create_snapshot(self, name, subscription, options=None): self._create_snapshot_called_with = (name, subscription, options) if self._random_gax_error: raise GaxError('error') - if self._create_snapshot_conflict: + if self._create_snapshot_already_exists: + raise GaxError('conflict', self._make_grpc_already_exists()) + if self._create_snapshot_failed_precondition: raise GaxError('conflict', self._make_grpc_failed_precondition()) if self._snapshot_create_subscription_miss: raise GaxError('miss', self._make_grpc_not_found()) From aafb423d833426d60a1ff715fd776a10aee33045 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 23 Jun 2017 15:08:10 -0700 Subject: [PATCH 0105/1197] Re-enable pylint in info-only mode for all packages (#3519) --- packages/google-cloud-pubsub/nox.py | 13 +++++++--- packages/google-cloud-pubsub/pylint.config.py | 25 +++++++++++++++++++ 2 files changed, 35 insertions(+), 3 deletions(-) create mode 100644 packages/google-cloud-pubsub/pylint.config.py diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 209ed41f9bfc..acd70b44ce0b 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -66,15 +66,22 @@ def system_tests(session, python_version): @nox.session def lint(session): - """Run flake8. + """Run linters. - Returns a failure if flake8 finds linting errors or sufficiently + Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', *LOCAL_DEPS) + session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/pubsub') + session.run( + 'gcp-devrel-py-tools', 'run-pylint', + '--config', 'pylint.config.py', + '--library-filesets', 'google', + '--test-filesets', 'tests', + # Temporarily allow this to fail. + success_codes=range(0, 100)) @nox.session diff --git a/packages/google-cloud-pubsub/pylint.config.py b/packages/google-cloud-pubsub/pylint.config.py new file mode 100644 index 000000000000..d8ca7b92e85e --- /dev/null +++ b/packages/google-cloud-pubsub/pylint.config.py @@ -0,0 +1,25 @@ +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This module is used to configure gcp-devrel-py-tools run-pylint.""" + +# Library configuration + +# library_additions = {} +# library_replacements = {} + +# Test configuration + +# test_additions = copy.deepcopy(library_additions) +# test_replacements = copy.deepcopy(library_replacements) From fdce0888faaa5aa74457914c1b5b66b2138c2dc3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 26 Jun 2017 18:38:54 -0400 Subject: [PATCH 0106/1197] Prep pubsub-0.26.0 release. (#3532) --- packages/google-cloud-pubsub/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index b1b1375ed870..94a854b63dee 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.24.1, < 0.25dev', + 'google-cloud-core >= 0.25.0, < 0.26dev', 'grpcio >= 1.0.2, < 2.0dev', 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-pubsub', - version='0.25.0', + version='0.26.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 5a7836673bdbff8defaa8ad3513baf4330482ce2 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Jun 2017 10:32:30 -0700 Subject: [PATCH 0107/1197] Fix inclusion of tests in manifest.in (#3552) --- packages/google-cloud-pubsub/MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index 9f7100c9528a..fc77f8c82ff0 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -1,4 +1,4 @@ include README.rst LICENSE recursive-include google *.json *.proto -recursive-include unit_tests * +recursive-include tests * global-exclude *.pyc __pycache__ From 0402f3d3a31c4986482e98388dc4bd1024d08f89 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 28 Jun 2017 14:07:25 -0700 Subject: [PATCH 0108/1197] Making all LICENSE headers "uniform". (#3563) --- packages/google-cloud-pubsub/pylint.config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/pylint.config.py b/packages/google-cloud-pubsub/pylint.config.py index d8ca7b92e85e..b618319b8b61 100644 --- a/packages/google-cloud-pubsub/pylint.config.py +++ b/packages/google-cloud-pubsub/pylint.config.py @@ -4,7 +4,7 @@ # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, From 06333cb7e0120aa61555845f309e29b1c32e342a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 29 Jun 2017 10:56:09 -0700 Subject: [PATCH 0109/1197] Skipping system tests when credentials env. var is unset. (#3475) --- packages/google-cloud-pubsub/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index acd70b44ce0b..dd7b09330524 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -49,7 +49,7 @@ def system_tests(session, python_version): # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - return + session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) From dad68ec5439f30fc3599f3334d017053b7b7d82d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 6 Jul 2017 16:41:31 -0400 Subject: [PATCH 0110/1197] Shorten nox virtualenv names to avoid hashing. (#3585) --- packages/google-cloud-pubsub/nox.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index dd7b09330524..4bcecafe66b4 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -30,6 +30,9 @@ def unit_tests(session, python_version): # Run unit tests against all supported versions of Python. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'unit-' + python_version + # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) session.install('-e', '.') @@ -54,6 +57,9 @@ def system_tests(session, python_version): # Run the system tests against latest Python 2 and Python 3 only. session.interpreter = 'python{}'.format(python_version) + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + python_version + # Install all test dependencies, then install this package into the # virutalenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) @@ -88,6 +94,10 @@ def lint(session): def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.interpreter = 'python3.6' + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'setup' + session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') From 7cc0842586f1bf395a0f68795325ff1b983743ac Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 11 Jul 2017 10:51:40 -0700 Subject: [PATCH 0111/1197] Updating author_email in all setup.py. (#3598) Done via: $ git grep -l author_email | \ > xargs sed -i s/jjg+google-cloud-python@google.com/googleapis-publisher@google.com/g and manually editing `videointelligence/setup.py` and `vision/setup.py`. --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 94a854b63dee..856a59824a60 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'jjg+google-cloud-python@google.com', + 'author_email': 'googleapis-publisher@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', From ee9f90a270bb9bdb2e199062316815e758d0ad49 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 19 Jul 2017 14:58:17 -0700 Subject: [PATCH 0112/1197] Fixing references to "dead" docs links. (#3631) * Fixing references to "dead" docs links. Done via: $ git grep -l 'google-cloud-auth.html' | \ > xargs sed -i s/'google-cloud-auth.html'/'core\/auth.html'/g $ git grep -l 'http\:\/\/google-cloud-python.readthedocs.io' | \ > xargs sed -i s/'http\:\/\/google-cloud-python.readthedocs.io'/\ > 'https\:\/\/google-cloud-python.readthedocs.io'/g Fixes #3531. * Fixing up other docs that were moved in #3459. --- packages/google-cloud-pubsub/README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 6bf9d77ee82e..472b74eb1bf0 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Pub / Sub - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub-usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub/usage.html Quick Start ----------- @@ -26,7 +26,7 @@ possible. Check out the `Authentication section`_ in our documentation to learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. -.. _Authentication section: http://google-cloud-python.readthedocs.io/en/latest/google-cloud-auth.html +.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html .. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication Using the API @@ -45,7 +45,7 @@ independently written applications. See the ``google-cloud-python`` API `Pub/Sub documentation`_ to learn how to connect to Cloud Pub/Sub using this Client Library. -.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub-usage.html +.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub/usage.html To get started with this API, you'll need to create From eb95926d5af96d9c4c388309b872d8fb339fb4b8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 20 Jul 2017 09:33:21 -0700 Subject: [PATCH 0113/1197] Changing all pypi.python.org links to warehouse links. (#3641) Done via $ export OLD='https\:\/\/pypi.python.org\/pypi\/' $ export NEW='https\:\/\/pypi.org\/project\/' $ git grep -l ${OLD} | xargs sed -i s/${OLD}/${NEW}/g Then manually going through and adding a trailing slash to all warehouse links. (Though I did undo changes to `docs/json/`.) --- packages/google-cloud-pubsub/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 472b74eb1bf0..bf116676a440 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -61,6 +61,6 @@ To get started with this API, you'll need to create attr1='value1', attr2='value2') .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg - :target: https://pypi.python.org/pypi/google-cloud-pubsub + :target: https://pypi.org/project/google-cloud-pubsub/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg - :target: https://pypi.python.org/pypi/google-cloud-pubsub + :target: https://pypi.org/project/google-cloud-pubsub/ From 405f06d05b808fc701380ab68db8d82252e96b05 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 27 Jul 2017 11:21:30 -0700 Subject: [PATCH 0114/1197] Remove httplib2, replace with Requests (#3674) * Core: remove httplib2, replace with Requests Additionally remove make_exception in favor of from_http_status and from_http_response. * Datastore: replace httplib2 with Requests * DNS: replace httplib2 with Requests * Error Reporting: replace httplib2 with requests * Language: replace httplib2 with Requests * Logging: replace httplib2 with requests * Monitoring: replace httplib2 with Requests * Pubsub: replace httplib2 with Requests * Resource Manager: replace httplib2 with Requests * Runtimeconfig: replace httplib2 with Requests * Speech: replace httplib2 with Requests * Storage: replace httplib2 with Requests * BigQuery: replace httplib2 with Requests * Translate: replace httplib2 with Requests * Vision: replace httplib2 with Requests --- .../google/cloud/pubsub/client.py | 4 ++-- packages/google-cloud-pubsub/tests/system.py | 8 ++++---- .../google-cloud-pubsub/tests/unit/test__http.py | 15 +++++++++------ 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index 6a7e60a1923d..ae808c038b7c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -58,10 +58,10 @@ class Client(ClientWithProject): passed), falls back to the default inferred from the environment. - :type _http: :class:`~httplib2.Http` + :type _http: :class:`~requests.Session` :param _http: (Optional) HTTP object to make requests. Can be any object that defines ``request()`` with the same interface as - :meth:`~httplib2.Http.request`. If not passed, an + :meth:`requests.Session.request`. If not passed, an ``_http`` object is created that is bound to the ``credentials`` for the current object. This parameter should be considered private, and could diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index d55011a5254e..fd70f44165de 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -19,7 +19,7 @@ from google.gax.errors import GaxError from google.gax.grpc import exc_to_code from grpc import StatusCode -import httplib2 +import requests from google.cloud.environment_vars import PUBSUB_EMULATOR from google.cloud.exceptions import Conflict @@ -53,9 +53,9 @@ def setUpModule(): Config.IN_EMULATOR = os.getenv(PUBSUB_EMULATOR) is not None if Config.IN_EMULATOR: credentials = EmulatorCreds() - http = httplib2.Http() # Un-authorized. - Config.CLIENT = client.Client(credentials=credentials, - _http=http) + http = requests.Session() # Un-authorized. + Config.CLIENT = client.Client( + credentials=credentials, _http=http) else: Config.CLIENT = client.Client() diff --git a/packages/google-cloud-pubsub/tests/unit/test__http.py b/packages/google-cloud-pubsub/tests/unit/test__http.py index d4bbc29dd6dd..794fe093bbb3 100644 --- a/packages/google-cloud-pubsub/tests/unit/test__http.py +++ b/packages/google-cloud-pubsub/tests/unit/test__http.py @@ -102,13 +102,17 @@ def test_build_api_url_w_base_url_override(self): URI) def test_extra_headers(self): + import requests + from google.cloud import _http as base_http from google.cloud.pubsub import _http as MUT - http = mock.Mock(spec=['request']) - response = mock.Mock(status=200, spec=['status']) + http = mock.create_autospec(requests.Session, instance=True) + response = requests.Response() + response.status_code = 200 data = b'brent-spiner' - http.request.return_value = response, data + response._content = data + http.request.return_value = response client = mock.Mock(_http=http, spec=['_http']) conn = self._make_one(client) @@ -118,17 +122,16 @@ def test_extra_headers(self): self.assertEqual(result, data) expected_headers = { - 'Content-Length': str(len(req_data)), 'Accept-Encoding': 'gzip', base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, 'User-Agent': conn.USER_AGENT, } expected_uri = conn.build_api_url('/rainbow') http.request.assert_called_once_with( - body=req_data, + data=req_data, headers=expected_headers, method='GET', - uri=expected_uri, + url=expected_uri, ) From 8848d464dc0a1df8d30d9198ed67a22da2a5b0bb Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 2 Aug 2017 16:20:00 -0400 Subject: [PATCH 0115/1197] Sprinkle majyk retry fairy dust. (#3720) Closes #3510 --- packages/google-cloud-pubsub/tests/system.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index fd70f44165de..bbc4b527db8e 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -348,7 +348,8 @@ def test_create_snapshot(self): # There is no GET method for snapshot, so check existence using # list - after_snapshots = _consume_snapshots(Config.CLIENT) + retry = RetryResult(lambda result: result, max_tries=4) + after_snapshots = retry(_consume_snapshots)(Config.CLIENT) self.assertEqual(len(before_snapshots) + 1, len(after_snapshots)) def full_name(obj): From c5bc85b78e2f002145a0f58667f8d037c4315d0c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 4 Aug 2017 16:45:43 -0700 Subject: [PATCH 0116/1197] Updating all affected packages after google-cloud-core update. (#3730) * Updating all affected packages after google-cloud-core update. * Moving 'pip install .' **after** subpackages in nox docs. @lukesneeringer still hasn't explained why it was moved. In it's current location, the depencencies are first retrieved from PyPI (which fails here for the unreleased versions), e.g. https://circleci.com/gh/GoogleCloudPlatform/google-cloud-python/2716 --- packages/google-cloud-pubsub/setup.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 856a59824a60..71fee1dd7b8f 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,14 +51,14 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.25.0, < 0.26dev', - 'grpcio >= 1.0.2, < 2.0dev', + 'google-cloud-core >= 0.26.0, < 0.27dev', + 'grpcio >= 1.2.0, < 2.0dev', 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', ] setup( name='google-cloud-pubsub', - version='0.26.0', + version='0.27.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From b590cd3a95bd71210587907d385be0007b2257b8 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 8 Aug 2017 14:50:31 -0700 Subject: [PATCH 0117/1197] Use latest/ directory for docs instead of stable/ (#3766) See also https://github.com/GoogleCloudPlatform/google-cloud-python/pull/3763 $ sed -i '' 's/googlecloudplatform.github.io\/google-cloud-python\/stable\//googlecloudplatform.github.io\/google-cloud-python\/latest\//g' **/*.rst --- packages/google-cloud-pubsub/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index bf116676a440..75611b1ff296 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Pub / Sub - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub/usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/pubsub/usage.html Quick Start ----------- @@ -45,7 +45,7 @@ independently written applications. See the ``google-cloud-python`` API `Pub/Sub documentation`_ to learn how to connect to Cloud Pub/Sub using this Client Library. -.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/pubsub/usage.html +.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/pubsub/usage.html To get started with this API, you'll need to create From 621392118114538e12f713e46124b36f5a850081 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 9 Aug 2017 10:02:05 -0700 Subject: [PATCH 0118/1197] Move google.cloud.iterator to google.api.core.page_iterator (#3770) * Move google.cloud.iterator to google.api.core.page_iterator * Re-write tests to pytest style. * Make GAXIterator private- it will soon be removed. * Pass api_request into HTTPIterator to avoid accessing private members * BigQuery: use google.api.core.page_iterator * DNS: use google.api.core.page_iterator * Logging: use google.api.core.page_iterator * PubSub: use google.api.core.page_iterator * Resource manager: use google.api.core.page_iterator * Runtimeconfig: use google.api.core.page_iterator * logging: use google.api.core._GAXIterator * Storage: use google.api.core.page_iterator * Pubsub: use google.api.core._GAXIterator * Trace: use google.api.core._GAXIterator * Spanner: use google.api.core._GAXIterator --- .../google/cloud/pubsub/_gax.py | 35 ++++++----- .../google/cloud/pubsub/_http.py | 59 ++++++++++++------- .../google/cloud/pubsub/client.py | 6 +- .../google/cloud/pubsub/topic.py | 2 +- 4 files changed, 60 insertions(+), 42 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py index 94dc639178ef..35e56717b3c2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py @@ -16,6 +16,7 @@ import functools +from google.api.core import page_iterator from google.cloud.gapic.pubsub.v1.publisher_client import PublisherClient from google.cloud.gapic.pubsub.v1.subscriber_client import SubscriberClient from google.gax import CallOptions @@ -35,7 +36,6 @@ from google.cloud._http import DEFAULT_USER_AGENT from google.cloud.exceptions import Conflict from google.cloud.exceptions import NotFound -from google.cloud.iterator import GAXIterator from google.cloud.pubsub import __version__ from google.cloud.pubsub._helpers import subscription_name_from_path from google.cloud.pubsub.snapshot import Snapshot @@ -78,7 +78,7 @@ def list_topics(self, project, page_size=0, page_token=None): passed, the API will return the first page of topics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` accessible to the current API. """ @@ -88,7 +88,8 @@ def list_topics(self, project, page_size=0, page_token=None): path = 'projects/%s' % (project,) page_iter = self._gax_api.list_topics( path, page_size=page_size, options=options) - return GAXIterator(self._client, page_iter, _item_to_topic) + return page_iterator._GAXIterator( + self._client, page_iter, _item_to_topic) def topic_create(self, topic_path): """API call: create a topic @@ -204,7 +205,7 @@ def topic_list_subscriptions(self, topic, page_size=0, page_token=None): If not passed, the API will return the first page of subscriptions. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.subscription.Subscription` accessible to the current API. @@ -223,8 +224,8 @@ def topic_list_subscriptions(self, topic, page_size=0, page_token=None): raise NotFound(topic_path) raise - iterator = GAXIterator(self._client, page_iter, - _item_to_subscription_for_topic) + iterator = page_iterator._GAXIterator( + self._client, page_iter, _item_to_subscription_for_topic) iterator.topic = topic return iterator @@ -260,7 +261,7 @@ def list_subscriptions(self, project, page_size=0, page_token=None): If not passed, the API will return the first page of subscriptions. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.subscription.Subscription` accessible to the current API. @@ -278,7 +279,8 @@ def list_subscriptions(self, project, page_size=0, page_token=None): topics = {} item_to_value = functools.partial( _item_to_sub_for_client, topics=topics) - return GAXIterator(self._client, page_iter, item_to_value) + return page_iterator._GAXIterator( + self._client, page_iter, item_to_value) def subscription_create(self, subscription_path, topic_path, ack_deadline=None, push_endpoint=None, @@ -542,7 +544,7 @@ def list_snapshots(self, project, page_size=0, page_token=None): passed, the API will return the first page of topics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` accessible to the current API. """ @@ -559,7 +561,8 @@ def list_snapshots(self, project, page_size=0, page_token=None): topics = {} item_to_value = functools.partial( _item_to_snapshot_for_client, topics=topics) - return GAXIterator(self._client, page_iter, item_to_value) + return page_iterator._GAXIterator( + self._client, page_iter, item_to_value) def snapshot_create(self, snapshot_path, subscription_path): """API call: create a snapshot @@ -709,7 +712,7 @@ def make_gax_subscriber_api(credentials=None, host=None): def _item_to_topic(iterator, resource): """Convert a protobuf topic to the native object. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: :class:`.pubsub_pb2.Topic` @@ -725,7 +728,7 @@ def _item_to_topic(iterator, resource): def _item_to_subscription_for_topic(iterator, subscription_path): """Convert a subscription name to the native object. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type subscription_path: str @@ -746,12 +749,12 @@ def _item_to_sub_for_client(iterator, sub_pb, topics): This method does not have the correct signature to be used as the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be + :class:`~google.api.core.page_iterator.Iterator`. It is intended to be patched with a mutable topics argument that can be updated on subsequent calls. For an example, see how the method is used above in :meth:`_SubscriberAPI.list_subscriptions`. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type sub_pb: :class:`.pubsub_pb2.Subscription` @@ -776,12 +779,12 @@ def _item_to_snapshot_for_client(iterator, snapshot_pb, topics): This method does not have the correct signature to be used as the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be + :class:`~google.api.core.page_iterator.Iterator`. It is intended to be patched with a mutable topics argument that can be updated on subsequent calls. For an example, see how the method is used above in :meth:`_SubscriberAPI.list_snapshots`. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type sub_pb: :class:`.pubsub_pb2.Snapshot` diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py index f1d07237d7df..5173b4095ca8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py @@ -19,10 +19,10 @@ import functools import os +from google.api.core import page_iterator from google.cloud import _http from google.cloud._helpers import _timedelta_to_duration_pb from google.cloud.environment_vars import PUBSUB_EMULATOR -from google.cloud.iterator import HTTPIterator from google.cloud.pubsub import __version__ from google.cloud.pubsub._helpers import subscription_name_from_path @@ -131,7 +131,7 @@ def list_topics(self, project, page_size=None, page_token=None): passed, the API will return the first page of topics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` accessible to the current client. """ @@ -140,9 +140,13 @@ def list_topics(self, project, page_size=None, page_token=None): extra_params['pageSize'] = page_size path = '/projects/%s/topics' % (project,) - return HTTPIterator( - client=self._client, path=path, item_to_value=_item_to_topic, - items_key='topics', page_token=page_token, + return page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, + item_to_value=_item_to_topic, + items_key='topics', + page_token=page_token, extra_params=extra_params) def topic_create(self, topic_path): @@ -237,11 +241,14 @@ def topic_list_subscriptions(self, topic, page_size=None, page_token=None): extra_params['pageSize'] = page_size path = '/%s/subscriptions' % (topic.full_name,) - iterator = HTTPIterator( - client=self._client, path=path, + iterator = page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, item_to_value=_item_to_subscription_for_topic, items_key='subscriptions', - page_token=page_token, extra_params=extra_params) + page_token=page_token, + extra_params=extra_params) iterator.topic = topic return iterator @@ -275,7 +282,7 @@ def list_subscriptions(self, project, page_size=None, page_token=None): If not passed, the API will return the first page of subscriptions. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.subscription.Subscription` accessible to the current API. @@ -291,9 +298,13 @@ def list_subscriptions(self, project, page_size=None, page_token=None): topics = {} item_to_value = functools.partial( _item_to_sub_for_client, topics=topics) - return HTTPIterator( - client=self._client, path=path, item_to_value=item_to_value, - items_key='subscriptions', page_token=page_token, + return page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, + item_to_value=item_to_value, + items_key='subscriptions', + page_token=page_token, extra_params=extra_params) def subscription_create(self, subscription_path, topic_path, @@ -536,7 +547,7 @@ def list_snapshots(self, project, page_size=None, page_token=None): passed, the API will return the first page of topics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` accessible to the current API. """ @@ -551,9 +562,13 @@ def list_snapshots(self, project, page_size=None, page_token=None): topics = {} item_to_value = functools.partial( _item_to_snapshot_for_client, topics=topics) - return HTTPIterator( - client=self._client, path=path, item_to_value=item_to_value, - items_key='snapshots', page_token=page_token, + return page_iterator.HTTPIterator( + client=self._client, + api_request=self._client._connection.api_request, + path=path, + item_to_value=item_to_value, + items_key='snapshots', + page_token=page_token, extra_params=extra_params) def snapshot_create(self, snapshot_path, subscription_path): @@ -695,7 +710,7 @@ def _transform_messages_base64(messages, transform, key=None): def _item_to_topic(iterator, resource): """Convert a JSON topic to the native object. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: dict @@ -710,7 +725,7 @@ def _item_to_topic(iterator, resource): def _item_to_subscription_for_topic(iterator, subscription_path): """Convert a subscription name to the native object. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type subscription_path: str @@ -731,12 +746,12 @@ def _item_to_sub_for_client(iterator, resource, topics): This method does not have the correct signature to be used as the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be + :class:`~google.api.core.page_iterator.Iterator`. It is intended to be patched with a mutable topics argument that can be updated on subsequent calls. For an example, see how the method is used above in :meth:`_SubscriberAPI.list_subscriptions`. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: dict @@ -760,12 +775,12 @@ def _item_to_snapshot_for_client(iterator, resource, topics): This method does not have the correct signature to be used as the ``item_to_value`` argument to - :class:`~google.cloud.iterator.Iterator`. It is intended to be + :class:`~google.api.core.page_iterator.Iterator`. It is intended to be patched with a mutable topics argument that can be updated on subsequent calls. For an example, see how the method is used above in :meth:`_SubscriberAPI.list_snapshots`. - :type iterator: :class:`~google.cloud.iterator.Iterator` + :type iterator: :class:`~google.api.core.page_iterator.Iterator` :param iterator: The iterator that is currently in use. :type resource: dict diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py index ae808c038b7c..0dc9b8fb6f38 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py @@ -154,7 +154,7 @@ def list_topics(self, page_size=None, page_token=None): passed, the API will return the first page of topics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` accessible to the current API. """ @@ -183,7 +183,7 @@ def list_subscriptions(self, page_size=None, page_token=None): passed, the API will return the first page of topics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.subscription.Subscription` accessible to the current client. @@ -210,7 +210,7 @@ def list_snapshots(self, page_size=None, page_token=None): passed, the API will return the first page of topics. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` accessible to the current API. """ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py index 92c323ed63d7..92f453bd2b2b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py @@ -330,7 +330,7 @@ def list_subscriptions(self, page_size=None, page_token=None, client=None): :param client: the client to use. If not passed, falls back to the ``client`` stored on the current topic. - :rtype: :class:`~google.cloud.iterator.Iterator` + :rtype: :class:`~google.api.core.page_iterator.Iterator` :returns: Iterator of :class:`~google.cloud.pubsub.subscription.Subscription` accessible to the current topic. From 5b3be379369c495909015760e5dbd9be5970f343 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Wed, 9 Aug 2017 14:54:32 -0700 Subject: [PATCH 0119/1197] De-flake the snapshot system test. (#3780) --- packages/google-cloud-pubsub/tests/system.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index bbc4b527db8e..eddfd1274da0 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -348,7 +348,10 @@ def test_create_snapshot(self): # There is no GET method for snapshot, so check existence using # list - retry = RetryResult(lambda result: result, max_tries=4) + def retry_predicate(result): + return len(result) > len(before_snapshots) + + retry = RetryResult(retry_predicate, max_tries=5) after_snapshots = retry(_consume_snapshots)(Config.CLIENT) self.assertEqual(len(before_snapshots) + 1, len(after_snapshots)) @@ -361,7 +364,6 @@ def full_name(obj): with self.assertRaises(Conflict): snapshot.create() - def test_seek(self): TOPIC_NAME = 'seek-e2e' + unique_resource_id('-') topic = Config.CLIENT.topic(TOPIC_NAME, From bd8b340668096c86277cd7d2de2587b0e61d2838 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 Aug 2017 10:20:24 -0700 Subject: [PATCH 0120/1197] Pub/Sub API Redesign (#3859) --- packages/google-cloud-pubsub/.coveragerc | 8 +- .../google/cloud/gapic/__init__.py | 1 + .../google/cloud/gapic/pubsub/__init__.py | 1 + .../cloud/gapic/pubsub/v1}/__init__.py | 0 .../cloud/gapic/pubsub/v1/publisher_client.py | 565 +++ .../pubsub/v1/publisher_client_config.json | 98 + .../gapic/pubsub/v1/subscriber_client.py | 1063 +++++ .../pubsub/v1/subscriber_client_config.json | 138 + .../google/cloud/proto/__init__.py | 1 + .../google/cloud/proto/pubsub/__init__.py | 1 + .../google/cloud/proto/pubsub/v1/__init__.py | 1 + .../cloud/proto/pubsub/v1/pubsub_pb2.py | 3594 +++++++++++++++++ .../cloud/proto/pubsub/v1/pubsub_pb2_grpc.py | 509 +++ .../google/cloud/pubsub.py | 26 + .../google/cloud/pubsub/__init__.py | 34 - .../google/cloud/pubsub/_gax.py | 802 ---- .../google/cloud/pubsub/_helpers.py | 73 - .../google/cloud/pubsub/_http.py | 797 ---- .../google/cloud/pubsub/client.py | 285 -- .../google/cloud/pubsub/iam.py | 138 - .../google/cloud/pubsub/message.py | 91 - .../google/cloud/pubsub/snapshot.py | 140 - .../google/cloud/pubsub/subscription.py | 590 --- .../google/cloud/pubsub/topic.py | 551 --- .../google/cloud/pubsub_v1/__init__.py | 25 + .../google/cloud/pubsub_v1/_gapic.py | 73 + .../cloud/pubsub_v1/publisher/__init__.py | 22 + .../pubsub_v1/publisher/batch/__init__.py | 0 .../cloud/pubsub_v1/publisher/batch/base.py | 147 + .../cloud/pubsub_v1/publisher/batch/thread.py | 245 ++ .../cloud/pubsub_v1/publisher/client.py | 161 + .../cloud/pubsub_v1/publisher/exceptions.py | 29 + .../cloud/pubsub_v1/publisher/futures.py | 169 + .../cloud/pubsub_v1/subscriber/__init__.py | 22 + .../cloud/pubsub_v1/subscriber/_consumer.py | 267 ++ .../pubsub_v1/subscriber/_helper_threads.py | 129 + .../cloud/pubsub_v1/subscriber/_histogram.py | 155 + .../cloud/pubsub_v1/subscriber/client.py | 98 + .../cloud/pubsub_v1/subscriber/message.py | 198 + .../pubsub_v1/subscriber/policy/__init__.py | 0 .../cloud/pubsub_v1/subscriber/policy/base.py | 392 ++ .../pubsub_v1/subscriber/policy/thread.py | 147 + .../google/cloud/pubsub_v1/types.py | 70 + packages/google-cloud-pubsub/nox.py | 11 +- packages/google-cloud-pubsub/setup.py | 10 +- packages/google-cloud-pubsub/tests/system.py | 472 +-- .../tests/unit/__init__.py | 13 - .../pubsub_v1/publisher/batch/test_base.py | 69 + .../pubsub_v1/publisher/batch/test_thread.py | 204 + .../unit/pubsub_v1/publisher/test_futures.py | 118 + .../publisher/test_publisher_client.py | 143 + .../pubsub_v1/subscriber/test_consumer.py | 117 + .../subscriber/test_helper_threads.py | 125 + .../pubsub_v1/subscriber/test_histogram.py | 84 + .../unit/pubsub_v1/subscriber/test_message.py | 102 + .../pubsub_v1/subscriber/test_policy_base.py | 231 ++ .../subscriber/test_policy_thread.py | 120 + .../subscriber/test_subscriber_client.py | 44 + .../tests/unit/test__gax.py | 1661 -------- .../tests/unit/test__helpers.py | 59 - .../tests/unit/test__http.py | 1165 ------ .../tests/unit/test_client.py | 462 --- .../tests/unit/test_iam.py | 81 - .../tests/unit/test_message.py | 125 - .../tests/unit/test_pubsub.py | 22 + .../tests/unit/test_snpashot.py | 215 - .../tests/unit/test_subscription.py | 957 ----- .../tests/unit/test_topic.py | 974 ----- 68 files changed, 9831 insertions(+), 9609 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/__init__.py rename packages/google-cloud-pubsub/{tests => google/cloud/gapic/pubsub/v1}/__init__.py (100%) create mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py create mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json create mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py create mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json create mode 100644 packages/google-cloud-pubsub/google/cloud/proto/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/proto/pubsub/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py create mode 100644 packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/_http.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/client.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/iam.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/message.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub/topic.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/test__gax.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/test__helpers.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/test__http.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/test_client.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/test_iam.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/test_message.py create mode 100644 packages/google-cloud-pubsub/tests/unit/test_pubsub.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/test_snpashot.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/test_subscription.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/test_topic.py diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index a54b99aa14b7..41ca7428e2ee 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -1,11 +1,17 @@ [run] branch = True +source = + google.cloud.pubsub + google.cloud.pubsub_v1 + tests.unit [report] -fail_under = 100 show_missing = True + exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ + # Ignore abstract methods + raise NotImplementedError diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/__init__.py b/packages/google-cloud-pubsub/google/cloud/gapic/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/gapic/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/packages/google-cloud-pubsub/tests/__init__.py b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/__init__.py similarity index 100% rename from packages/google-cloud-pubsub/tests/__init__.py rename to packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/__init__.py diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py new file mode 100644 index 000000000000..c0466e6d444b --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py @@ -0,0 +1,565 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Publisher API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +import google.gax + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 + +_PageDesc = google.gax.PageDescriptor + + +class PublisherClient(object): + """ + The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + _PAGE_DESCRIPTORS = { + 'list_topics': + _PageDesc('page_token', 'next_page_token', 'topics'), + 'list_topic_subscriptions': + _PageDesc('page_token', 'next_page_token', 'subscriptions') + } + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') + _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/topics/{topic}') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return cls._PROJECT_PATH_TEMPLATE.render({ + 'project': project, + }) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return cls._TOPIC_PATH_TEMPLATE.render({ + 'project': project, + 'topic': topic, + }) + + @classmethod + def match_project_from_project_name(cls, project_name): + """Parses the project from a project resource. + + Args: + project_name (string): A fully-qualified path representing a project + resource. + + Returns: + A string representing the project. + """ + return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') + + @classmethod + def match_project_from_topic_name(cls, topic_name): + """Parses the project from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the project. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') + + @classmethod + def match_topic_from_topic_name(cls, topic_name): + """Parses the topic from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the topic. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A PublisherClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-pubsub', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'publisher_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.pubsub.v1.Publisher', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, + page_descriptors=self._PAGE_DESCRIPTORS, ) + self.iam_policy_stub = config.create_stub( + iam_policy_pb2.IAMPolicyStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + self.publisher_stub = config.create_stub( + pubsub_pb2.PublisherStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._create_topic = api_callable.create_api_call( + self.publisher_stub.CreateTopic, settings=defaults['create_topic']) + self._publish = api_callable.create_api_call( + self.publisher_stub.Publish, settings=defaults['publish']) + self._get_topic = api_callable.create_api_call( + self.publisher_stub.GetTopic, settings=defaults['get_topic']) + self._list_topics = api_callable.create_api_call( + self.publisher_stub.ListTopics, settings=defaults['list_topics']) + self._list_topic_subscriptions = api_callable.create_api_call( + self.publisher_stub.ListTopicSubscriptions, + settings=defaults['list_topic_subscriptions']) + self._delete_topic = api_callable.create_api_call( + self.publisher_stub.DeleteTopic, settings=defaults['delete_topic']) + self._set_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.SetIamPolicy, + settings=defaults['set_iam_policy']) + self._get_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.GetIamPolicy, + settings=defaults['get_iam_policy']) + self._test_iam_permissions = api_callable.create_api_call( + self.iam_policy_stub.TestIamPermissions, + settings=defaults['test_iam_permissions']) + + # Service calls + def create_topic(self, name, options=None): + """ + Creates the given topic with the given name. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> name = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.create_topic(name) + + Args: + name (string): The name of the topic. It must have the format + ``\"projects/{project}/topics/{topic}\"``. ``{topic}`` must start with a letter, + and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent + signs (``%``). It must be between 3 and 255 characters in length, and it + must not start with ``\"goog\"``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.Topic(name=name) + return self._create_topic(request, options) + + def publish(self, topic, messages, options=None): + """ + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> data = b'' + >>> messages_element = pubsub_pb2.PubsubMessage(data=data) + >>> messages = [messages_element] + >>> response = client.publish(topic, messages) + + Args: + topic (string): The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages (list[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PubsubMessage`]): The messages to publish. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PublishResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) + return self._publish(request, options) + + def get_topic(self, topic, options=None): + """ + Gets the configuration of a topic. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.get_topic(topic) + + Args: + topic (string): The name of the topic to get. + Format is ``projects/{project}/topics/{topic}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.GetTopicRequest(topic=topic) + return self._get_topic(request, options) + + def list_topics(self, project, page_size=None, options=None): + """ + Lists matching topics. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = publisher_client.PublisherClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_topics(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topics(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that topics belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListTopicsRequest( + project=project, page_size=page_size) + return self._list_topics(request, options) + + def list_topic_subscriptions(self, topic, page_size=None, options=None): + """ + Lists the name of the subscriptions for this topic. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> # Iterate over all results + >>> for element in client.list_topic_subscriptions(topic): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topic_subscriptions(topic, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + topic (string): The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of string instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListTopicSubscriptionsRequest( + topic=topic, page_size=page_size) + return self._list_topic_subscriptions(request, options) + + def delete_topic(self, topic, options=None): + """ + Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their ``topic`` field is set to ``_deleted-topic_``. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> client.delete_topic(topic) + + Args: + topic (string): Name of the topic to delete. + Format is ``projects/{project}/topics/{topic}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteTopicRequest(topic=topic) + self._delete_topic(request, options) + + def set_iam_policy(self, resource, policy, options=None): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> from google.iam.v1 import policy_pb2 + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> policy = policy_pb2.Policy() + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (string): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, options) + + def get_iam_policy(self, resource, options=None): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.get_iam_policy(resource) + + Args: + resource (string): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, options) + + def test_iam_permissions(self, resource, permissions, options=None): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> permissions = [] + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (string): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions(request, options) diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json new file mode 100644 index 000000000000..7e8a723499e6 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json @@ -0,0 +1,98 @@ +{ + "interfaces": { + "google.pubsub.v1.Publisher": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "one_plus_delivery": [ + "CANCELLED", + "UNKNOWN", + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "ABORTED", + "INTERNAL", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 12000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Publish": { + "timeout_millis": 60000, + "retry_codes_name": "one_plus_delivery", + "retry_params_name": "messaging", + "bundling": { + "element_count_threshold": 10, + "element_count_limit": 1000, + "request_byte_threshold": 1024, + "request_byte_limit": 10485760, + "delay_threshold_millis": 10 + } + }, + "GetTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopics": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopicSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py new file mode 100644 index 000000000000..5313e0d941a1 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py @@ -0,0 +1,1063 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Subscriber API.""" + +import collections +import json +import os +import pkg_resources +import platform + +from google.gax import api_callable +from google.gax import config +from google.gax import path_template +from google.gax.utils import oneof +import google.gax + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 + +_PageDesc = google.gax.PageDescriptor + + +class SubscriberClient(object): + """ + The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the ``Pull`` method. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com' + """The default address of the service.""" + + DEFAULT_SERVICE_PORT = 443 + """The default port of the service.""" + + _PAGE_DESCRIPTORS = { + 'list_subscriptions': + _PageDesc('page_token', 'next_page_token', 'subscriptions'), + 'list_snapshots': + _PageDesc('page_token', 'next_page_token', 'snapshots') + } + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') + _SNAPSHOT_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/snapshots/{snapshot}') + _SUBSCRIPTION_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/subscriptions/{subscription}') + _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( + 'projects/{project}/topics/{topic}') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return cls._PROJECT_PATH_TEMPLATE.render({ + 'project': project, + }) + + @classmethod + def snapshot_path(cls, project, snapshot): + """Returns a fully-qualified snapshot resource name string.""" + return cls._SNAPSHOT_PATH_TEMPLATE.render({ + 'project': project, + 'snapshot': snapshot, + }) + + @classmethod + def subscription_path(cls, project, subscription): + """Returns a fully-qualified subscription resource name string.""" + return cls._SUBSCRIPTION_PATH_TEMPLATE.render({ + 'project': + project, + 'subscription': + subscription, + }) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return cls._TOPIC_PATH_TEMPLATE.render({ + 'project': project, + 'topic': topic, + }) + + @classmethod + def match_project_from_project_name(cls, project_name): + """Parses the project from a project resource. + + Args: + project_name (string): A fully-qualified path representing a project + resource. + + Returns: + A string representing the project. + """ + return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') + + @classmethod + def match_project_from_snapshot_name(cls, snapshot_name): + """Parses the project from a snapshot resource. + + Args: + snapshot_name (string): A fully-qualified path representing a snapshot + resource. + + Returns: + A string representing the project. + """ + return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('project') + + @classmethod + def match_snapshot_from_snapshot_name(cls, snapshot_name): + """Parses the snapshot from a snapshot resource. + + Args: + snapshot_name (string): A fully-qualified path representing a snapshot + resource. + + Returns: + A string representing the snapshot. + """ + return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('snapshot') + + @classmethod + def match_project_from_subscription_name(cls, subscription_name): + """Parses the project from a subscription resource. + + Args: + subscription_name (string): A fully-qualified path representing a subscription + resource. + + Returns: + A string representing the project. + """ + return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( + 'project') + + @classmethod + def match_subscription_from_subscription_name(cls, subscription_name): + """Parses the subscription from a subscription resource. + + Args: + subscription_name (string): A fully-qualified path representing a subscription + resource. + + Returns: + A string representing the subscription. + """ + return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( + 'subscription') + + @classmethod + def match_project_from_topic_name(cls, topic_name): + """Parses the project from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the project. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') + + @classmethod + def match_topic_from_topic_name(cls, topic_name): + """Parses the topic from a topic resource. + + Args: + topic_name (string): A fully-qualified path representing a topic + resource. + + Returns: + A string representing the topic. + """ + return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') + + def __init__(self, + service_path=SERVICE_ADDRESS, + port=DEFAULT_SERVICE_PORT, + channel=None, + credentials=None, + ssl_credentials=None, + scopes=None, + client_config=None, + app_name=None, + app_version='', + lib_name=None, + lib_version='', + metrics_headers=()): + """Constructor. + + Args: + service_path (string): The domain name of the API remote host. + port (int): The port on which to connect to the remote host. + channel (:class:`grpc.Channel`): A ``Channel`` instance through + which to make calls. + credentials (object): The authorization credentials to attach to + requests. These credentials identify this application to the + service. + ssl_credentials (:class:`grpc.ChannelCredentials`): A + ``ChannelCredentials`` instance for use with an SSL-enabled + channel. + scopes (list[string]): A list of OAuth2 scopes to attach to requests. + client_config (dict): + A dictionary for call options for each method. See + :func:`google.gax.construct_settings` for the structure of + this data. Falls back to the default config if not specified + or the specified config is missing data points. + app_name (string): The name of the application calling + the service. Recommended for analytics purposes. + app_version (string): The version of the application calling + the service. Recommended for analytics purposes. + lib_name (string): The API library software used for calling + the service. (Unless you are writing an API client itself, + leave this as default.) + lib_version (string): The API library software version used + for calling the service. (Unless you are writing an API client + itself, leave this as default.) + metrics_headers (dict): A dictionary of values for tracking + client library metrics. Ultimately serializes to a string + (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be + considered private. + + Returns: + A SubscriberClient object. + """ + # Unless the calling application specifically requested + # OAuth scopes, request everything. + if scopes is None: + scopes = self._ALL_SCOPES + + # Initialize an empty client config, if none is set. + if client_config is None: + client_config = {} + + # Initialize metrics_headers as an ordered dictionary + # (cuts down on cardinality of the resulting string slightly). + metrics_headers = collections.OrderedDict(metrics_headers) + metrics_headers['gl-python'] = platform.python_version() + + # The library may or may not be set, depending on what is + # calling this client. Newer client libraries set the library name + # and version. + if lib_name: + metrics_headers[lib_name] = lib_version + + # Finally, track the GAPIC package version. + metrics_headers['gapic'] = pkg_resources.get_distribution( + 'google-cloud-pubsub', ).version + + # Load the configuration defaults. + default_client_config = json.loads( + pkg_resources.resource_string( + __name__, 'subscriber_client_config.json').decode()) + defaults = api_callable.construct_settings( + 'google.pubsub.v1.Subscriber', + default_client_config, + client_config, + config.STATUS_CODE_NAMES, + metrics_headers=metrics_headers, + page_descriptors=self._PAGE_DESCRIPTORS, ) + self.iam_policy_stub = config.create_stub( + iam_policy_pb2.IAMPolicyStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + self.subscriber_stub = config.create_stub( + pubsub_pb2.SubscriberStub, + channel=channel, + service_path=service_path, + service_port=port, + credentials=credentials, + scopes=scopes, + ssl_credentials=ssl_credentials) + + self._create_subscription = api_callable.create_api_call( + self.subscriber_stub.CreateSubscription, + settings=defaults['create_subscription']) + self._get_subscription = api_callable.create_api_call( + self.subscriber_stub.GetSubscription, + settings=defaults['get_subscription']) + self._update_subscription = api_callable.create_api_call( + self.subscriber_stub.UpdateSubscription, + settings=defaults['update_subscription']) + self._list_subscriptions = api_callable.create_api_call( + self.subscriber_stub.ListSubscriptions, + settings=defaults['list_subscriptions']) + self._delete_subscription = api_callable.create_api_call( + self.subscriber_stub.DeleteSubscription, + settings=defaults['delete_subscription']) + self._modify_ack_deadline = api_callable.create_api_call( + self.subscriber_stub.ModifyAckDeadline, + settings=defaults['modify_ack_deadline']) + self._acknowledge = api_callable.create_api_call( + self.subscriber_stub.Acknowledge, settings=defaults['acknowledge']) + self._pull = api_callable.create_api_call( + self.subscriber_stub.Pull, settings=defaults['pull']) + self._streaming_pull = api_callable.create_api_call( + self.subscriber_stub.StreamingPull, + settings=defaults['streaming_pull']) + self._modify_push_config = api_callable.create_api_call( + self.subscriber_stub.ModifyPushConfig, + settings=defaults['modify_push_config']) + self._list_snapshots = api_callable.create_api_call( + self.subscriber_stub.ListSnapshots, + settings=defaults['list_snapshots']) + self._create_snapshot = api_callable.create_api_call( + self.subscriber_stub.CreateSnapshot, + settings=defaults['create_snapshot']) + self._delete_snapshot = api_callable.create_api_call( + self.subscriber_stub.DeleteSnapshot, + settings=defaults['delete_snapshot']) + self._seek = api_callable.create_api_call( + self.subscriber_stub.Seek, settings=defaults['seek']) + self._set_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.SetIamPolicy, + settings=defaults['set_iam_policy']) + self._get_iam_policy = api_callable.create_api_call( + self.iam_policy_stub.GetIamPolicy, + settings=defaults['get_iam_policy']) + self._test_iam_permissions = api_callable.create_api_call( + self.iam_policy_stub.TestIamPermissions, + settings=defaults['test_iam_permissions']) + + # Service calls + def create_subscription(self, + name, + topic, + push_config=None, + ack_deadline_seconds=None, + retain_acked_messages=None, + message_retention_duration=None, + options=None): + """ + Creates a subscription to a given topic. + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + `resource name format `_. + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> response = client.create_subscription(name, topic) + + Args: + name (string): The name of the subscription. It must have the format + ``\"projects/{project}/subscriptions/{subscription}\"``. ``{subscription}`` must + start with a letter, and contain only letters (``[A-Za-z]``), numbers + (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters + in length, and it must not start with ``\"goog\"``. + topic (string): The name of the topic from which this subscription is receiving messages. + Format is ``projects/{project}/topics/{topic}``. + The value of this field will be ``_deleted-topic_`` if the topic has been + deleted. + push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): If push delivery is used with this subscription, this field is + used to configure it. An empty ``pushConfig`` signifies that the subscriber + will pull and ack messages using API methods. + ack_deadline_seconds (int): This value is the maximum time after a subscriber receives a message + before the subscriber should acknowledge the message. After message + delivery but before the ack deadline expires and before the message is + acknowledged, it is an outstanding message and will not be delivered + again during that time (on a best-effort basis). + + For pull subscriptions, this value is used as the initial value for the ack + deadline. To override this value for a given message, call + ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using + pull. + The minimum custom deadline you can specify is 10 seconds. + The maximum custom deadline you can specify is 600 seconds (10 minutes). + If this parameter is 0, a default value of 10 seconds is used. + + For push delivery, this value is also used to set the request timeout for + the call to the push endpoint. + + If the subscriber never acknowledges the message, the Pub/Sub + system will eventually redeliver the message. + retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then + messages are not expunged from the subscription's backlog, even if they are + acknowledged, until they fall out of the ``message_retention_duration`` + window. + message_retention_duration (:class:`google.protobuf.duration_pb2.Duration`): How long to retain unacknowledged messages in the subscription's backlog, + from the moment a message is published. + If ``retain_acked_messages`` is true, then this also configures the retention + of acknowledged messages, and thus configures how far back in time a ``Seek`` + can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 + minutes. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.Subscription( + name=name, + topic=topic, + push_config=push_config, + ack_deadline_seconds=ack_deadline_seconds, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration) + return self._create_subscription(request, options) + + def get_subscription(self, subscription, options=None): + """ + Gets the configuration details of a subscription. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.get_subscription(subscription) + + Args: + subscription (string): The name of the subscription to get. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) + return self._get_subscription(request, options) + + def update_subscription(self, subscription, update_mask, options=None): + """ + Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> from google.protobuf import field_mask_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = pubsub_pb2.Subscription() + >>> update_mask = field_mask_pb2.FieldMask() + >>> response = client.update_subscription(subscription, update_mask) + + Args: + subscription (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription`): The updated subscription object. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Indicates which fields in the provided subscription to update. + Must be specified and non-empty. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.UpdateSubscriptionRequest( + subscription=subscription, update_mask=update_mask) + return self._update_subscription(request, options) + + def list_subscriptions(self, project, page_size=None, options=None): + """ + Lists matching subscriptions. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = subscriber_client.SubscriberClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_subscriptions(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_subscriptions(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that subscriptions belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListSubscriptionsRequest( + project=project, page_size=page_size) + return self._list_subscriptions(request, options) + + def delete_subscription(self, subscription, options=None): + """ + Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to ``Pull`` after deletion will return + ``NOT_FOUND``. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> client.delete_subscription(subscription) + + Args: + subscription (string): The subscription to delete. + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteSubscriptionRequest( + subscription=subscription) + self._delete_subscription(request, options) + + def modify_ack_deadline(self, + subscription, + ack_ids, + ack_deadline_seconds, + options=None): + """ + Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level ``ackDeadlineSeconds`` used for subsequent messages. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> ack_deadline_seconds = 0 + >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) + + Args: + subscription (string): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[string]): List of acknowledgment IDs. + ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to + the Pub/Sub system. For example, if the value is 10, the new + ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero may immediately make the message available for + another pull request. + The minimum deadline you can specify is 0 seconds. + The maximum deadline you can specify is 600 seconds (10 minutes). + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ModifyAckDeadlineRequest( + subscription=subscription, + ack_ids=ack_ids, + ack_deadline_seconds=ack_deadline_seconds) + self._modify_ack_deadline(request, options) + + def acknowledge(self, subscription, ack_ids, options=None): + """ + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> client.acknowledge(subscription, ack_ids) + + Args: + subscription (string): The subscription whose message is being acknowledged. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[string]): The acknowledgment ID for the messages being acknowledged that was returned + by the Pub/Sub system in the ``Pull`` response. Must not be empty. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.AcknowledgeRequest( + subscription=subscription, ack_ids=ack_ids) + self._acknowledge(request, options) + + def pull(self, + subscription, + max_messages, + return_immediately=None, + options=None): + """ + Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return ``UNAVAILABLE`` if + there are too many concurrent pull requests pending for the given + subscription. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> max_messages = 0 + >>> response = client.pull(subscription, max_messages) + + Args: + subscription (string): The subscription from which messages should be pulled. + Format is ``projects/{project}/subscriptions/{sub}``. + max_messages (int): The maximum number of messages returned for this request. The Pub/Sub + system may return fewer than the number specified. + return_immediately (bool): If this field set to true, the system will respond immediately even if + it there are no messages available to return in the ``Pull`` response. + Otherwise, the system may wait (for a bounded amount of time) until at + least one message is available, rather than returning no messages. The + client may cancel the request if it does not wish to wait any longer for + the response. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PullResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.PullRequest( + subscription=subscription, + max_messages=max_messages, + return_immediately=return_immediately) + return self._pull(request, options) + + def streaming_pull(self, requests, options=None): + """ + (EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status ``OK`` to reassign + server-side resources, in which case, the client should re-establish the + stream. ``UNAVAILABLE`` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + + EXPERIMENTAL: This method interface might change in the future. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> stream_ack_deadline_seconds = 0 + >>> request = pubsub_pb2.StreamingPullRequest(subscription=subscription, stream_ack_deadline_seconds=stream_ack_deadline_seconds) + >>> requests = [request] + >>> for element in client.streaming_pull(requests): + >>> # process element + >>> pass + + Args: + requests (iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullRequest`]): The input objects. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullResponse`]. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + return self._streaming_pull(requests, options) + + def modify_push_config(self, subscription, push_config, options=None): + """ + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty ``PushConfig``) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the ``PushConfig``. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> push_config = pubsub_pb2.PushConfig() + >>> client.modify_push_config(subscription, push_config) + + Args: + subscription (string): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub system should + stop pushing messages from the given subscription and allow + messages to be pulled and acknowledged - effectively pausing + the subscription if ``Pull`` is not called. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ModifyPushConfigRequest( + subscription=subscription, push_config=push_config) + self._modify_push_config(request, options) + + def list_snapshots(self, project, page_size=None, options=None): + """ + Lists the existing snapshots. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.gax import CallOptions, INITIAL_PAGE + >>> client = subscriber_client.SubscriberClient() + >>> project = client.project_path('[PROJECT]') + >>> + >>> # Iterate over all results + >>> for element in client.list_snapshots(project): + >>> # process element + >>> pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_snapshots(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for element in page: + >>> # process element + >>> pass + + Args: + project (string): The name of the cloud project that snapshots belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.gax.PageIterator` instance. By default, this + is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instances. + This object can also be configured to iterate over the pages + of the response through the `CallOptions` parameter. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.ListSnapshotsRequest( + project=project, page_size=page_size) + return self._list_snapshots(request, options) + + def create_snapshot(self, name, subscription, options=None): + """ + Creates a snapshot from the requested subscription. + If the snapshot already exists, returns ``ALREADY_EXISTS``. + If the requested subscription doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + `resource name format `_. + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.create_snapshot(name, subscription) + + Args: + name (string): Optional user-provided name for this snapshot. + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription. + Note that for REST API requests, you must specify a name. + Format is ``projects/{project}/snapshots/{snap}``. + subscription (string): The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: + + - The existing backlog on the subscription. More precisely, this is + defined as the messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + `CreateSnapshot` request; as well as: + - Any messages published to the subscription's topic following the + successful completion of the CreateSnapshot request. + + Format is ``projects/{project}/subscriptions/{sub}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.CreateSnapshotRequest( + name=name, subscription=subscription) + return self._create_snapshot(request, options) + + def delete_snapshot(self, snapshot, options=None): + """ + Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> client.delete_snapshot(snapshot) + + Args: + snapshot (string): The name of the snapshot to delete. + Format is ``projects/{project}/snapshots/{snap}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) + self._delete_snapshot(request, options) + + def seek(self, subscription, time=None, snapshot=None, options=None): + """ + Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.seek(subscription) + + Args: + subscription (string): The subscription to affect. + time (:class:`google.protobuf.timestamp_pb2.Timestamp`): The time to seek to. + Messages retained in the subscription that were published before this + time are marked as acknowledged, and messages retained in the + subscription that were published after this time are marked as + unacknowledged. Note that this operation affects only those messages + retained in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). For example, + if ``time`` corresponds to a point before the message retention + window (or to a point before the system's notion of the subscription + creation time), only retained messages will be marked as unacknowledged, + and already-expunged messages will not be restored. + snapshot (string): The snapshot to seek to. The snapshot's topic must be the same as that of + the provided subscription. + Format is ``projects/{project}/snapshots/{snap}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.SeekResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + oneof.check_oneof( + time=time, + snapshot=snapshot, ) + + # Create the request object. + request = pubsub_pb2.SeekRequest( + subscription=subscription, time=time, snapshot=snapshot) + return self._seek(request, options) + + def set_iam_policy(self, resource, policy, options=None): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> from google.iam.v1 import policy_pb2 + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> policy = policy_pb2.Policy() + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (string): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, options) + + def get_iam_policy(self, resource, options=None): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> response = client.get_iam_policy(resource) + + Args: + resource (string): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.policy_pb2.Policy` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, options) + + def test_iam_permissions(self, resource, permissions, options=None): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud.gapic.pubsub.v1 import subscriber_client + >>> client = subscriber_client.SubscriberClient() + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> permissions = [] + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (string): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + options (:class:`google.gax.CallOptions`): Overrides the default + settings for this call, e.g, timeout, retries etc. + + Returns: + A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. + + Raises: + :exc:`google.gax.errors.GaxError` if the RPC is aborted. + :exc:`ValueError` if the parameters are invalid. + """ + # Create the request object. + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions(request, options) diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json new file mode 100644 index 000000000000..6180cc0a941f --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json @@ -0,0 +1,138 @@ +{ + "interfaces": { + "google.pubsub.v1.Subscriber": { + "retry_codes": { + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "non_idempotent": [ + "UNAVAILABLE" + ], + "pull": [ + "CANCELLED", + "DEADLINE_EXCEEDED", + "RESOURCE_EXHAUSTED", + "INTERNAL", + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 12000, + "total_timeout_millis": 600000 + }, + "streaming": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 900000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 900000, + "total_timeout_millis": 900000 + } + }, + "methods": { + "CreateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "UpdateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ModifyAckDeadline": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "Acknowledge": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "messaging" + }, + "Pull": { + "timeout_millis": 60000, + "retry_codes_name": "pull", + "retry_params_name": "messaging" + }, + "StreamingPull": { + "timeout_millis": 900000, + "retry_codes_name": "pull", + "retry_params_name": "streaming" + }, + "ModifyPushConfig": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListSnapshots": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Seek": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/packages/google-cloud-pubsub/google/cloud/proto/__init__.py b/packages/google-cloud-pubsub/google/cloud/proto/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/proto/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/__init__.py new file mode 100644 index 000000000000..de40ea7ca058 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/__init__.py @@ -0,0 +1 @@ +__import__('pkg_resources').declare_namespace(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/__init__.py b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/__init__.py @@ -0,0 +1 @@ + diff --git a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py new file mode 100644 index 000000000000..aeee99e182d0 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py @@ -0,0 +1,3594 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/proto/pubsub/v1/pubsub.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/proto/pubsub/v1/pubsub.proto', + package='google.pubsub.v1', + syntax='proto3', + serialized_pb=_b('\n)google/cloud/proto/pubsub/v1/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"y\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xc5\x02\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xf7\x10\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9a\x07\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + + + + +_TOPIC_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Topic.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Topic.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Topic.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + +_TOPIC = _descriptor.Descriptor( + name='Topic', + full_name='google.pubsub.v1.Topic', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Topic.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Topic.labels', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_TOPIC_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=221, + serialized_end=342, +) + + +_PUBSUBMESSAGE_ATTRIBUTESENTRY = _descriptor.Descriptor( + name='AttributesEntry', + full_name='google.pubsub.v1.PubsubMessage.AttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=515, + serialized_end=564, +) + +_PUBSUBMESSAGE = _descriptor.Descriptor( + name='PubsubMessage', + full_name='google.pubsub.v1.PubsubMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='data', full_name='google.pubsub.v1.PubsubMessage.data', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='attributes', full_name='google.pubsub.v1.PubsubMessage.attributes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_id', full_name='google.pubsub.v1.PubsubMessage.message_id', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='publish_time', full_name='google.pubsub.v1.PubsubMessage.publish_time', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=345, + serialized_end=564, +) + + +_GETTOPICREQUEST = _descriptor.Descriptor( + name='GetTopicRequest', + full_name='google.pubsub.v1.GetTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.GetTopicRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=566, + serialized_end=598, +) + + +_UPDATETOPICREQUEST = _descriptor.Descriptor( + name='UpdateTopicRequest', + full_name='google.pubsub.v1.UpdateTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.UpdateTopicRequest.topic', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateTopicRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=600, + serialized_end=709, +) + + +_PUBLISHREQUEST = _descriptor.Descriptor( + name='PublishRequest', + full_name='google.pubsub.v1.PublishRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.PublishRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='messages', full_name='google.pubsub.v1.PublishRequest.messages', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=711, + serialized_end=793, +) + + +_PUBLISHRESPONSE = _descriptor.Descriptor( + name='PublishResponse', + full_name='google.pubsub.v1.PublishResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='message_ids', full_name='google.pubsub.v1.PublishResponse.message_ids', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=795, + serialized_end=833, +) + + +_LISTTOPICSREQUEST = _descriptor.Descriptor( + name='ListTopicsRequest', + full_name='google.pubsub.v1.ListTopicsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListTopicsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListTopicsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListTopicsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=835, + serialized_end=910, +) + + +_LISTTOPICSRESPONSE = _descriptor.Descriptor( + name='ListTopicsResponse', + full_name='google.pubsub.v1.ListTopicsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topics', full_name='google.pubsub.v1.ListTopicsResponse.topics', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListTopicsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=912, + serialized_end=998, +) + + +_LISTTOPICSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( + name='ListTopicSubscriptionsRequest', + full_name='google.pubsub.v1.ListTopicSubscriptionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1000, + serialized_end=1085, +) + + +_LISTTOPICSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( + name='ListTopicSubscriptionsResponse', + full_name='google.pubsub.v1.ListTopicSubscriptionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscriptions', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.subscriptions', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1087, + serialized_end=1167, +) + + +_DELETETOPICREQUEST = _descriptor.Descriptor( + name='DeleteTopicRequest', + full_name='google.pubsub.v1.DeleteTopicRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.DeleteTopicRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1169, + serialized_end=1204, +) + + +_SUBSCRIPTION_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Subscription.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Subscription.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Subscription.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + +_SUBSCRIPTION = _descriptor.Descriptor( + name='Subscription', + full_name='google.pubsub.v1.Subscription', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Subscription.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.Subscription.topic', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='push_config', full_name='google.pubsub.v1.Subscription.push_config', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_deadline_seconds', full_name='google.pubsub.v1.Subscription.ack_deadline_seconds', index=3, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='retain_acked_messages', full_name='google.pubsub.v1.Subscription.retain_acked_messages', index=4, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message_retention_duration', full_name='google.pubsub.v1.Subscription.message_retention_duration', index=5, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Subscription.labels', index=6, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_SUBSCRIPTION_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1207, + serialized_end=1532, +) + + +_PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( + name='AttributesEntry', + full_name='google.pubsub.v1.PushConfig.AttributesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.PushConfig.AttributesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.PushConfig.AttributesEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=515, + serialized_end=564, +) + +_PUSHCONFIG = _descriptor.Descriptor( + name='PushConfig', + full_name='google.pubsub.v1.PushConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='push_endpoint', full_name='google.pubsub.v1.PushConfig.push_endpoint', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='attributes', full_name='google.pubsub.v1.PushConfig.attributes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1535, + serialized_end=1687, +) + + +_RECEIVEDMESSAGE = _descriptor.Descriptor( + name='ReceivedMessage', + full_name='google.pubsub.v1.ReceivedMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ack_id', full_name='google.pubsub.v1.ReceivedMessage.ack_id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='message', full_name='google.pubsub.v1.ReceivedMessage.message', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1689, + serialized_end=1772, +) + + +_GETSUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='GetSubscriptionRequest', + full_name='google.pubsub.v1.GetSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.GetSubscriptionRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1774, + serialized_end=1820, +) + + +_UPDATESUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='UpdateSubscriptionRequest', + full_name='google.pubsub.v1.UpdateSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.UpdateSubscriptionRequest.subscription', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateSubscriptionRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1823, + serialized_end=1953, +) + + +_LISTSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( + name='ListSubscriptionsRequest', + full_name='google.pubsub.v1.ListSubscriptionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListSubscriptionsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1955, + serialized_end=2037, +) + + +_LISTSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( + name='ListSubscriptionsResponse', + full_name='google.pubsub.v1.ListSubscriptionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscriptions', full_name='google.pubsub.v1.ListSubscriptionsResponse.subscriptions', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListSubscriptionsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2039, + serialized_end=2146, +) + + +_DELETESUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name='DeleteSubscriptionRequest', + full_name='google.pubsub.v1.DeleteSubscriptionRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.DeleteSubscriptionRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2148, + serialized_end=2197, +) + + +_MODIFYPUSHCONFIGREQUEST = _descriptor.Descriptor( + name='ModifyPushConfigRequest', + full_name='google.pubsub.v1.ModifyPushConfigRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.ModifyPushConfigRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='push_config', full_name='google.pubsub.v1.ModifyPushConfigRequest.push_config', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2199, + serialized_end=2297, +) + + +_PULLREQUEST = _descriptor.Descriptor( + name='PullRequest', + full_name='google.pubsub.v1.PullRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.PullRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_immediately', full_name='google.pubsub.v1.PullRequest.return_immediately', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_messages', full_name='google.pubsub.v1.PullRequest.max_messages', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2299, + serialized_end=2384, +) + + +_PULLRESPONSE = _descriptor.Descriptor( + name='PullResponse', + full_name='google.pubsub.v1.PullResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='received_messages', full_name='google.pubsub.v1.PullResponse.received_messages', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2386, + serialized_end=2462, +) + + +_MODIFYACKDEADLINEREQUEST = _descriptor.Descriptor( + name='ModifyAckDeadlineRequest', + full_name='google.pubsub.v1.ModifyAckDeadlineRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids', index=1, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_deadline_seconds', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2464, + serialized_end=2559, +) + + +_ACKNOWLEDGEREQUEST = _descriptor.Descriptor( + name='AcknowledgeRequest', + full_name='google.pubsub.v1.AcknowledgeRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.AcknowledgeRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.AcknowledgeRequest.ack_ids', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2561, + serialized_end=2620, +) + + +_STREAMINGPULLREQUEST = _descriptor.Descriptor( + name='StreamingPullRequest', + full_name='google.pubsub.v1.StreamingPullRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.StreamingPullRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.ack_ids', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='modify_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds', index=2, + number=3, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='modify_deadline_ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids', index=3, + number=4, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stream_ack_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2623, + serialized_end=2787, +) + + +_STREAMINGPULLRESPONSE = _descriptor.Descriptor( + name='StreamingPullResponse', + full_name='google.pubsub.v1.StreamingPullResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='received_messages', full_name='google.pubsub.v1.StreamingPullResponse.received_messages', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2789, + serialized_end=2874, +) + + +_CREATESNAPSHOTREQUEST = _descriptor.Descriptor( + name='CreateSnapshotRequest', + full_name='google.pubsub.v1.CreateSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.CreateSnapshotRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.CreateSnapshotRequest.subscription', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2876, + serialized_end=2935, +) + + +_UPDATESNAPSHOTREQUEST = _descriptor.Descriptor( + name='UpdateSnapshotRequest', + full_name='google.pubsub.v1.UpdateSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.UpdateSnapshotRequest.snapshot', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.pubsub.v1.UpdateSnapshotRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2937, + serialized_end=3055, +) + + +_SNAPSHOT_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.Snapshot.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.Snapshot.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.Snapshot.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=297, + serialized_end=342, +) + +_SNAPSHOT = _descriptor.Descriptor( + name='Snapshot', + full_name='google.pubsub.v1.Snapshot', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.pubsub.v1.Snapshot.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.Snapshot.topic', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='expire_time', full_name='google.pubsub.v1.Snapshot.expire_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.Snapshot.labels', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[_SNAPSHOT_LABELSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3058, + serialized_end=3249, +) + + +_LISTSNAPSHOTSREQUEST = _descriptor.Descriptor( + name='ListSnapshotsRequest', + full_name='google.pubsub.v1.ListSnapshotsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='project', full_name='google.pubsub.v1.ListSnapshotsRequest.project', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListSnapshotsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListSnapshotsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3251, + serialized_end=3329, +) + + +_LISTSNAPSHOTSRESPONSE = _descriptor.Descriptor( + name='ListSnapshotsResponse', + full_name='google.pubsub.v1.ListSnapshotsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshots', full_name='google.pubsub.v1.ListSnapshotsResponse.snapshots', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListSnapshotsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3331, + serialized_end=3426, +) + + +_DELETESNAPSHOTREQUEST = _descriptor.Descriptor( + name='DeleteSnapshotRequest', + full_name='google.pubsub.v1.DeleteSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.DeleteSnapshotRequest.snapshot', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3428, + serialized_end=3469, +) + + +_SEEKREQUEST = _descriptor.Descriptor( + name='SeekRequest', + full_name='google.pubsub.v1.SeekRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subscription', full_name='google.pubsub.v1.SeekRequest.subscription', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='time', full_name='google.pubsub.v1.SeekRequest.time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.SeekRequest.snapshot', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='target', full_name='google.pubsub.v1.SeekRequest.target', + index=0, containing_type=None, fields=[]), + ], + serialized_start=3471, + serialized_end=3580, +) + + +_SEEKRESPONSE = _descriptor.Descriptor( + name='SeekResponse', + full_name='google.pubsub.v1.SeekResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3582, + serialized_end=3596, +) + +_TOPIC_LABELSENTRY.containing_type = _TOPIC +_TOPIC.fields_by_name['labels'].message_type = _TOPIC_LABELSENTRY +_PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE +_PUBSUBMESSAGE.fields_by_name['attributes'].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY +_PUBSUBMESSAGE.fields_by_name['publish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_UPDATETOPICREQUEST.fields_by_name['topic'].message_type = _TOPIC +_UPDATETOPICREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_PUBLISHREQUEST.fields_by_name['messages'].message_type = _PUBSUBMESSAGE +_LISTTOPICSRESPONSE.fields_by_name['topics'].message_type = _TOPIC +_SUBSCRIPTION_LABELSENTRY.containing_type = _SUBSCRIPTION +_SUBSCRIPTION.fields_by_name['push_config'].message_type = _PUSHCONFIG +_SUBSCRIPTION.fields_by_name['message_retention_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_SUBSCRIPTION.fields_by_name['labels'].message_type = _SUBSCRIPTION_LABELSENTRY +_PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG +_PUSHCONFIG.fields_by_name['attributes'].message_type = _PUSHCONFIG_ATTRIBUTESENTRY +_RECEIVEDMESSAGE.fields_by_name['message'].message_type = _PUBSUBMESSAGE +_UPDATESUBSCRIPTIONREQUEST.fields_by_name['subscription'].message_type = _SUBSCRIPTION +_UPDATESUBSCRIPTIONREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTSUBSCRIPTIONSRESPONSE.fields_by_name['subscriptions'].message_type = _SUBSCRIPTION +_MODIFYPUSHCONFIGREQUEST.fields_by_name['push_config'].message_type = _PUSHCONFIG +_PULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_STREAMINGPULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_UPDATESNAPSHOTREQUEST.fields_by_name['snapshot'].message_type = _SNAPSHOT +_UPDATESNAPSHOTREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_SNAPSHOT_LABELSENTRY.containing_type = _SNAPSHOT +_SNAPSHOT.fields_by_name['expire_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SNAPSHOT.fields_by_name['labels'].message_type = _SNAPSHOT_LABELSENTRY +_LISTSNAPSHOTSRESPONSE.fields_by_name['snapshots'].message_type = _SNAPSHOT +_SEEKREQUEST.fields_by_name['time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SEEKREQUEST.oneofs_by_name['target'].fields.append( + _SEEKREQUEST.fields_by_name['time']) +_SEEKREQUEST.fields_by_name['time'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] +_SEEKREQUEST.oneofs_by_name['target'].fields.append( + _SEEKREQUEST.fields_by_name['snapshot']) +_SEEKREQUEST.fields_by_name['snapshot'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] +DESCRIPTOR.message_types_by_name['Topic'] = _TOPIC +DESCRIPTOR.message_types_by_name['PubsubMessage'] = _PUBSUBMESSAGE +DESCRIPTOR.message_types_by_name['GetTopicRequest'] = _GETTOPICREQUEST +DESCRIPTOR.message_types_by_name['UpdateTopicRequest'] = _UPDATETOPICREQUEST +DESCRIPTOR.message_types_by_name['PublishRequest'] = _PUBLISHREQUEST +DESCRIPTOR.message_types_by_name['PublishResponse'] = _PUBLISHRESPONSE +DESCRIPTOR.message_types_by_name['ListTopicsRequest'] = _LISTTOPICSREQUEST +DESCRIPTOR.message_types_by_name['ListTopicsResponse'] = _LISTTOPICSRESPONSE +DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsRequest'] = _LISTTOPICSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsResponse'] = _LISTTOPICSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteTopicRequest'] = _DELETETOPICREQUEST +DESCRIPTOR.message_types_by_name['Subscription'] = _SUBSCRIPTION +DESCRIPTOR.message_types_by_name['PushConfig'] = _PUSHCONFIG +DESCRIPTOR.message_types_by_name['ReceivedMessage'] = _RECEIVEDMESSAGE +DESCRIPTOR.message_types_by_name['GetSubscriptionRequest'] = _GETSUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['UpdateSubscriptionRequest'] = _UPDATESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['ListSubscriptionsRequest'] = _LISTSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name['ListSubscriptionsResponse'] = _LISTSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteSubscriptionRequest'] = _DELETESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name['ModifyPushConfigRequest'] = _MODIFYPUSHCONFIGREQUEST +DESCRIPTOR.message_types_by_name['PullRequest'] = _PULLREQUEST +DESCRIPTOR.message_types_by_name['PullResponse'] = _PULLRESPONSE +DESCRIPTOR.message_types_by_name['ModifyAckDeadlineRequest'] = _MODIFYACKDEADLINEREQUEST +DESCRIPTOR.message_types_by_name['AcknowledgeRequest'] = _ACKNOWLEDGEREQUEST +DESCRIPTOR.message_types_by_name['StreamingPullRequest'] = _STREAMINGPULLREQUEST +DESCRIPTOR.message_types_by_name['StreamingPullResponse'] = _STREAMINGPULLRESPONSE +DESCRIPTOR.message_types_by_name['CreateSnapshotRequest'] = _CREATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['UpdateSnapshotRequest'] = _UPDATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT +DESCRIPTOR.message_types_by_name['ListSnapshotsRequest'] = _LISTSNAPSHOTSREQUEST +DESCRIPTOR.message_types_by_name['ListSnapshotsResponse'] = _LISTSNAPSHOTSRESPONSE +DESCRIPTOR.message_types_by_name['DeleteSnapshotRequest'] = _DELETESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name['SeekRequest'] = _SEEKREQUEST +DESCRIPTOR.message_types_by_name['SeekResponse'] = _SEEKRESPONSE + +Topic = _reflection.GeneratedProtocolMessageType('Topic', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _TOPIC_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) + )) + , + DESCRIPTOR = _TOPIC, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A topic resource. + + + Attributes: + name: + The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` must + start with a letter, and contain only letters (``[A-Za-z]``), + numbers (``[0-9]``), dashes (``-``), underscores (``_``), + periods (``.``), tildes (``~``), plus (``+``) or percent signs + (``%``). It must be between 3 and 255 characters in length, + and it must not start with ``"goog"``. + labels: + User labels. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) + )) +_sym_db.RegisterMessage(Topic) +_sym_db.RegisterMessage(Topic.LabelsEntry) + +PubsubMessage = _reflection.GeneratedProtocolMessageType('PubsubMessage', (_message.Message,), dict( + + AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( + DESCRIPTOR = _PUBSUBMESSAGE_ATTRIBUTESENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) + )) + , + DESCRIPTOR = _PUBSUBMESSAGE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A message data and its attributes. The message payload must not be + empty; it must contain either a non-empty data field, or at least one + attribute. + + + Attributes: + data: + The message payload. + attributes: + Optional attributes for this message. + message_id: + ID of this message, assigned by the server when the message is + published. Guaranteed to be unique within the topic. This + value may be read by a subscriber that receives a + ``PubsubMessage`` via a ``Pull`` call or a push delivery. It + must not be populated by the publisher in a ``Publish`` call. + publish_time: + The time at which the message was published, populated by the + server when it receives the ``Publish`` call. It must not be + populated by the publisher in a ``Publish`` call. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) + )) +_sym_db.RegisterMessage(PubsubMessage) +_sym_db.RegisterMessage(PubsubMessage.AttributesEntry) + +GetTopicRequest = _reflection.GeneratedProtocolMessageType('GetTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _GETTOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the GetTopic method. + + + Attributes: + topic: + The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) + )) +_sym_db.RegisterMessage(GetTopicRequest) + +UpdateTopicRequest = _reflection.GeneratedProtocolMessageType('UpdateTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATETOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateTopic method. + + + Attributes: + topic: + The topic to update. + update_mask: + Indicates which fields in the provided topic to update. Must + be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) + )) +_sym_db.RegisterMessage(UpdateTopicRequest) + +PublishRequest = _reflection.GeneratedProtocolMessageType('PublishRequest', (_message.Message,), dict( + DESCRIPTOR = _PUBLISHREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the Publish method. + + + Attributes: + topic: + The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages: + The messages to publish. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) + )) +_sym_db.RegisterMessage(PublishRequest) + +PublishResponse = _reflection.GeneratedProtocolMessageType('PublishResponse', (_message.Message,), dict( + DESCRIPTOR = _PUBLISHRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``Publish`` method. + + + Attributes: + message_ids: + The server-assigned ID of each published message, in the same + order as the messages in the request. IDs are guaranteed to be + unique within the topic. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) + )) +_sym_db.RegisterMessage(PublishResponse) + +ListTopicsRequest = _reflection.GeneratedProtocolMessageType('ListTopicsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListTopics`` method. + + + Attributes: + project: + The name of the cloud project that topics belong to. Format is + ``projects/{project}``. + page_size: + Maximum number of topics to return. + page_token: + The value returned by the last ``ListTopicsResponse``; + indicates that this is a continuation of a prior + ``ListTopics`` call, and that the system should return the + next page of data. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) + )) +_sym_db.RegisterMessage(ListTopicsRequest) + +ListTopicsResponse = _reflection.GeneratedProtocolMessageType('ListTopicsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListTopics`` method. + + + Attributes: + topics: + The resulting topics. + next_page_token: + If not empty, indicates that there may be more topics that + match the request; this value should be passed in a new + ``ListTopicsRequest``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) + )) +_sym_db.RegisterMessage(ListTopicsResponse) + +ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListTopicSubscriptions`` method. + + + Attributes: + topic: + The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. + page_size: + Maximum number of subscription names to return. + page_token: + The value returned by the last + ``ListTopicSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListTopicSubscriptions`` call, and + that the system should return the next page of data. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) + )) +_sym_db.RegisterMessage(ListTopicSubscriptionsRequest) + +ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListTopicSubscriptions`` method. + + + Attributes: + subscriptions: + The names of the subscriptions that match the request. + next_page_token: + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListTopicSubscriptionsRequest`` to get more subscriptions. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) + )) +_sym_db.RegisterMessage(ListTopicSubscriptionsResponse) + +DeleteTopicRequest = _reflection.GeneratedProtocolMessageType('DeleteTopicRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETETOPICREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``DeleteTopic`` method. + + + Attributes: + topic: + Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) + )) +_sym_db.RegisterMessage(DeleteTopicRequest) + +Subscription = _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _SUBSCRIPTION_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) + )) + , + DESCRIPTOR = _SUBSCRIPTION, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A subscription resource. + + + Attributes: + name: + The name of the subscription. It must have the format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain only + letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus + (``+``) or percent signs (``%``). It must be between 3 and 255 + characters in length, and it must not start with ``"goog"``. + topic: + The name of the topic from which this subscription is + receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this field + will be ``_deleted-topic_`` if the topic has been deleted. + push_config: + If push delivery is used with this subscription, this field is + used to configure it. An empty ``pushConfig`` signifies that + the subscriber will pull and ack messages using API methods. + ack_deadline_seconds: + This value is the maximum time after a subscriber receives a + message before the subscriber should acknowledge the message. + After message delivery but before the ack deadline expires and + before the message is acknowledged, it is an outstanding + message and will not be delivered again during that time (on a + best-effort basis). For pull subscriptions, this value is + used as the initial value for the ack deadline. To override + this value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using pull. The minimum + custom deadline you can specify is 10 seconds. The maximum + custom deadline you can specify is 600 seconds (10 minutes). + If this parameter is 0, a default value of 10 seconds is used. + For push delivery, this value is also used to set the request + timeout for the call to the push endpoint. If the subscriber + never acknowledges the message, the Pub/Sub system will + eventually redeliver the message. + retain_acked_messages: + Indicates whether to retain acknowledged messages. If true, + then messages are not expunged from the subscription's + backlog, even if they are acknowledged, until they fall out of + the ``message_retention_duration`` window. + message_retention_duration: + How long to retain unacknowledged messages in the + subscription's backlog, from the moment a message is + published. If ``retain_acked_messages`` is true, then this + also configures the retention of acknowledged messages, and + thus configures how far back in time a ``Seek`` can be done. + Defaults to 7 days. Cannot be more than 7 days or less than 10 + minutes. + labels: + User labels. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) + )) +_sym_db.RegisterMessage(Subscription) +_sym_db.RegisterMessage(Subscription.LabelsEntry) + +PushConfig = _reflection.GeneratedProtocolMessageType('PushConfig', (_message.Message,), dict( + + AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( + DESCRIPTOR = _PUSHCONFIG_ATTRIBUTESENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) + )) + , + DESCRIPTOR = _PUSHCONFIG, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Configuration for a push delivery endpoint. + + + Attributes: + push_endpoint: + A URL locating the endpoint to which messages should be + pushed. For example, a Webhook endpoint might use + "https://example.com/push". + attributes: + Endpoint configuration attributes. Every endpoint has a set + of API supported attributes that can be used to control + different aspects of the message delivery. The currently + supported attribute is ``x-goog-version``, which you can use + to change the format of the pushed message. This attribute + indicates the version of the data expected by the endpoint. + This controls the shape of the pushed message (i.e., its + fields and metadata). The endpoint version is based on the + version of the Pub/Sub API. If not present during the + ``CreateSubscription`` call, it will default to the version of + the API used to make such call. If not present during a + ``ModifyPushConfig`` call, its value will not be changed. + ``GetSubscription`` calls will always return a valid version, + even if the subscription was created without this attribute. + The possible values for this attribute are: - ``v1beta1``: + uses the push format defined in the v1beta1 Pub/Sub API. - + ``v1`` or ``v1beta2``: uses the push format defined in the v1 + Pub/Sub API. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) + )) +_sym_db.RegisterMessage(PushConfig) +_sym_db.RegisterMessage(PushConfig.AttributesEntry) + +ReceivedMessage = _reflection.GeneratedProtocolMessageType('ReceivedMessage', (_message.Message,), dict( + DESCRIPTOR = _RECEIVEDMESSAGE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A message and its corresponding acknowledgment ID. + + + Attributes: + ack_id: + This ID can be used to acknowledge the received message. + message: + The message. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) + )) +_sym_db.RegisterMessage(ReceivedMessage) + +GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType('GetSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _GETSUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the GetSubscription method. + + + Attributes: + subscription: + The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) + )) +_sym_db.RegisterMessage(GetSubscriptionRequest) + +UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType('UpdateSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateSubscription method. + + + Attributes: + subscription: + The updated subscription object. + update_mask: + Indicates which fields in the provided subscription to update. + Must be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) + )) +_sym_db.RegisterMessage(UpdateSubscriptionRequest) + +ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListSubscriptionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSUBSCRIPTIONSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListSubscriptions`` method. + + + Attributes: + project: + The name of the cloud project that subscriptions belong to. + Format is ``projects/{project}``. + page_size: + Maximum number of subscriptions to return. + page_token: + The value returned by the last ``ListSubscriptionsResponse``; + indicates that this is a continuation of a prior + ``ListSubscriptions`` call, and that the system should return + the next page of data. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) + )) +_sym_db.RegisterMessage(ListSubscriptionsRequest) + +ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListSubscriptionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSUBSCRIPTIONSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListSubscriptions`` method. + + + Attributes: + subscriptions: + The subscriptions that match the request. + next_page_token: + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListSubscriptionsRequest`` to get more subscriptions. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) + )) +_sym_db.RegisterMessage(ListSubscriptionsResponse) + +DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType('DeleteSubscriptionRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESUBSCRIPTIONREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the DeleteSubscription method. + + + Attributes: + subscription: + The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) + )) +_sym_db.RegisterMessage(DeleteSubscriptionRequest) + +ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType('ModifyPushConfigRequest', (_message.Message,), dict( + DESCRIPTOR = _MODIFYPUSHCONFIGREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ModifyPushConfig method. + + + Attributes: + subscription: + The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + push_config: + The push configuration for future deliveries. An empty + ``pushConfig`` indicates that the Pub/Sub system should stop + pushing messages from the given subscription and allow + messages to be pulled and acknowledged - effectively pausing + the subscription if ``Pull`` is not called. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) + )) +_sym_db.RegisterMessage(ModifyPushConfigRequest) + +PullRequest = _reflection.GeneratedProtocolMessageType('PullRequest', (_message.Message,), dict( + DESCRIPTOR = _PULLREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``Pull`` method. + + + Attributes: + subscription: + The subscription from which messages should be pulled. Format + is ``projects/{project}/subscriptions/{sub}``. + return_immediately: + If this field set to true, the system will respond immediately + even if it there are no messages available to return in the + ``Pull`` response. Otherwise, the system may wait (for a + bounded amount of time) until at least one message is + available, rather than returning no messages. The client may + cancel the request if it does not wish to wait any longer for + the response. + max_messages: + The maximum number of messages returned for this request. The + Pub/Sub system may return fewer than the number specified. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) + )) +_sym_db.RegisterMessage(PullRequest) + +PullResponse = _reflection.GeneratedProtocolMessageType('PullResponse', (_message.Message,), dict( + DESCRIPTOR = _PULLRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``Pull`` method. + + + Attributes: + received_messages: + Received Pub/Sub messages. The Pub/Sub system will return zero + messages if there are no more available in the backlog. The + Pub/Sub system may return fewer than the ``maxMessages`` + requested even if there are more messages available in the + backlog. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) + )) +_sym_db.RegisterMessage(PullResponse) + +ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType('ModifyAckDeadlineRequest', (_message.Message,), dict( + DESCRIPTOR = _MODIFYACKDEADLINEREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ModifyAckDeadline method. + + + Attributes: + subscription: + The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids: + List of acknowledgment IDs. + ack_deadline_seconds: + The new ack deadline with respect to the time this request was + sent to the Pub/Sub system. For example, if the value is 10, + the new ack deadline will expire 10 seconds after the + ``ModifyAckDeadline`` call was made. Specifying zero may + immediately make the message available for another pull + request. The minimum deadline you can specify is 0 seconds. + The maximum deadline you can specify is 600 seconds (10 + minutes). + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) + )) +_sym_db.RegisterMessage(ModifyAckDeadlineRequest) + +AcknowledgeRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeRequest', (_message.Message,), dict( + DESCRIPTOR = _ACKNOWLEDGEREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the Acknowledge method. + + + Attributes: + subscription: + The subscription whose message is being acknowledged. Format + is ``projects/{project}/subscriptions/{sub}``. + ack_ids: + The acknowledgment ID for the messages being acknowledged that + was returned by the Pub/Sub system in the ``Pull`` response. + Must not be empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) + )) +_sym_db.RegisterMessage(AcknowledgeRequest) + +StreamingPullRequest = _reflection.GeneratedProtocolMessageType('StreamingPullRequest', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGPULLREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``StreamingPull`` streaming RPC method. This request is + used to establish the initial stream as well as to stream + acknowledgements and ack deadline modifications from the client to the + server. + + + Attributes: + subscription: + The subscription for which to initialize the new stream. This + must be provided in the first request on the stream, and must + not be set in subsequent requests from client to server. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids: + List of acknowledgement IDs for acknowledging previously + received messages (received on this stream or a different + stream). If an ack ID has expired, the corresponding message + may be redelivered later. Acknowledging a message more than + once will not result in an error. If the acknowledgement ID is + malformed, the stream will be aborted with status + ``INVALID_ARGUMENT``. + modify_deadline_seconds: + The list of new ack deadlines for the IDs listed in + ``modify_deadline_ack_ids``. The size of this list must be the + same as the size of ``modify_deadline_ack_ids``. If it differs + the stream will be aborted with ``INVALID_ARGUMENT``. Each + element in this list is applied to the element in the same + position in ``modify_deadline_ack_ids``. The new ack deadline + is with respect to the time this request was sent to the + Pub/Sub system. Must be >= 0. For example, if the value is 10, + the new ack deadline will expire 10 seconds after this request + is received. If the value is 0, the message is immediately + made available for another streaming or non-streaming pull + request. If the value is < 0 (an error), the stream will be + aborted with status ``INVALID_ARGUMENT``. + modify_deadline_ack_ids: + List of acknowledgement IDs whose deadline will be modified + based on the corresponding element in + ``modify_deadline_seconds``. This field can be used to + indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if + the processing was interrupted. + stream_ack_deadline_seconds: + The ack deadline to use for the stream. This must be provided + in the first request on the stream, but it can also be updated + on subsequent requests from client to server. The minimum + deadline you can specify is 10 seconds. The maximum deadline + you can specify is 600 seconds (10 minutes). + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) + )) +_sym_db.RegisterMessage(StreamingPullRequest) + +StreamingPullResponse = _reflection.GeneratedProtocolMessageType('StreamingPullResponse', (_message.Message,), dict( + DESCRIPTOR = _STREAMINGPULLRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``StreamingPull`` method. This response is used to + stream messages from the server to the client. + + + Attributes: + received_messages: + Received Pub/Sub messages. This will not be empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) + )) +_sym_db.RegisterMessage(StreamingPullResponse) + +CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType('CreateSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``CreateSnapshot`` method. + + + Attributes: + name: + Optional user-provided name for this snapshot. If the name is + not provided in the request, the server will assign a random + name for this snapshot on the same project as the + subscription. Note that for REST API requests, you must + specify a name. Format is + ``projects/{project}/snapshots/{snap}``. + subscription: + The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: + (a) The existing backlog on the subscription. More precisely, + this is defined as the messages in the subscription's backlog + that are unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the successful + completion of the CreateSnapshot request. Format is + ``projects/{project}/subscriptions/{sub}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) + )) +_sym_db.RegisterMessage(CreateSnapshotRequest) + +UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType('UpdateSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the UpdateSnapshot method. + + + Attributes: + snapshot: + The updated snpashot object. + update_mask: + Indicates which fields in the provided snapshot to update. + Must be specified and non-empty. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) + )) +_sym_db.RegisterMessage(UpdateSnapshotRequest) + +Snapshot = _reflection.GeneratedProtocolMessageType('Snapshot', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _SNAPSHOT_LABELSENTRY, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) + )) + , + DESCRIPTOR = _SNAPSHOT, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """A snapshot resource. + + + Attributes: + name: + The name of the snapshot. + topic: + The name of the topic from which this snapshot is retaining + messages. + expire_time: + The snapshot is guaranteed to exist up until this time. A + newly-created snapshot expires no later than 7 days from the + time of its creation. Its exact lifetime is determined at + creation by the existing backlog in the source subscription. + Specifically, the lifetime of the snapshot is ``7 days - (age + of oldest unacked message in the subscription)``. For example, + consider a subscription whose oldest unacked message is 3 days + old. If a snapshot is created from this subscription, the + snapshot -- which will always capture this 3-day-old backlog + as long as the snapshot exists -- will expire in 4 days. + labels: + User labels. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) + )) +_sym_db.RegisterMessage(Snapshot) +_sym_db.RegisterMessage(Snapshot.LabelsEntry) + +ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListSnapshotsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTSNAPSHOTSREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``ListSnapshots`` method. + + + Attributes: + project: + The name of the cloud project that snapshots belong to. Format + is ``projects/{project}``. + page_size: + Maximum number of snapshots to return. + page_token: + The value returned by the last ``ListSnapshotsResponse``; + indicates that this is a continuation of a prior + ``ListSnapshots`` call, and that the system should return the + next page of data. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) + )) +_sym_db.RegisterMessage(ListSnapshotsRequest) + +ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListSnapshotsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTSNAPSHOTSRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Response for the ``ListSnapshots`` method. + + + Attributes: + snapshots: + The resulting snapshots. + next_page_token: + If not empty, indicates that there may be more snapshot that + match the request; this value should be passed in a new + ``ListSnapshotsRequest``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) + )) +_sym_db.RegisterMessage(ListSnapshotsResponse) + +DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType('DeleteSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETESNAPSHOTREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``DeleteSnapshot`` method. + + + Attributes: + snapshot: + The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) + )) +_sym_db.RegisterMessage(DeleteSnapshotRequest) + +SeekRequest = _reflection.GeneratedProtocolMessageType('SeekRequest', (_message.Message,), dict( + DESCRIPTOR = _SEEKREQUEST, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + , + __doc__ = """Request for the ``Seek`` method. + + + Attributes: + subscription: + The subscription to affect. + time: + The time to seek to. Messages retained in the subscription + that were published before this time are marked as + acknowledged, and messages retained in the subscription that + were published after this time are marked as unacknowledged. + Note that this operation affects only those messages retained + in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). + For example, if ``time`` corresponds to a point before the + message retention window (or to a point before the system's + notion of the subscription creation time), only retained + messages will be marked as unacknowledged, and already- + expunged messages will not be restored. + snapshot: + The snapshot to seek to. The snapshot's topic must be the same + as that of the provided subscription. Format is + ``projects/{project}/snapshots/{snap}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) + )) +_sym_db.RegisterMessage(SeekRequest) + +SeekResponse = _reflection.GeneratedProtocolMessageType('SeekResponse', (_message.Message,), dict( + DESCRIPTOR = _SEEKRESPONSE, + __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) + )) +_sym_db.RegisterMessage(SeekResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1')) +_TOPIC_LABELSENTRY.has_options = True +_TOPIC_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_PUBSUBMESSAGE_ATTRIBUTESENTRY.has_options = True +_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SUBSCRIPTION_LABELSENTRY.has_options = True +_SUBSCRIPTION_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_PUSHCONFIG_ATTRIBUTESENTRY.has_options = True +_PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SNAPSHOT_LABELSENTRY.has_options = True +_SNAPSHOT_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +try: + # THESE ELEMENTS WILL BE DEPRECATED. + # Please use the generated *_pb2_grpc.py files instead. + import grpc + from grpc.beta import implementations as beta_implementations + from grpc.beta import interfaces as beta_interfaces + from grpc.framework.common import cardinality + from grpc.framework.interfaces.face import utilities as face_utilities + + + class SubscriberStub(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=Subscription.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.GetSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=GetSubscriptionRequest.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.UpdateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=UpdateSubscriptionRequest.SerializeToString, + response_deserializer=Subscription.FromString, + ) + self.ListSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=ListSubscriptionsRequest.SerializeToString, + response_deserializer=ListSubscriptionsResponse.FromString, + ) + self.DeleteSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=DeleteSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ModifyAckDeadline = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=ModifyAckDeadlineRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Acknowledge = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=AcknowledgeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Pull = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=PullRequest.SerializeToString, + response_deserializer=PullResponse.FromString, + ) + self.StreamingPull = channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=StreamingPullRequest.SerializeToString, + response_deserializer=StreamingPullResponse.FromString, + ) + self.ModifyPushConfig = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=ModifyPushConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListSnapshots = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=ListSnapshotsRequest.SerializeToString, + response_deserializer=ListSnapshotsResponse.FromString, + ) + self.CreateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=CreateSnapshotRequest.SerializeToString, + response_deserializer=Snapshot.FromString, + ) + self.UpdateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=UpdateSnapshotRequest.SerializeToString, + response_deserializer=Snapshot.FromString, + ) + self.DeleteSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=DeleteSnapshotRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Seek = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=SeekRequest.SerializeToString, + response_deserializer=SeekResponse.FromString, + ) + + + class SubscriberServicer(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_SubscriberServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubscription, + request_deserializer=Subscription.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'GetSubscription': grpc.unary_unary_rpc_method_handler( + servicer.GetSubscription, + request_deserializer=GetSubscriptionRequest.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubscription, + request_deserializer=UpdateSubscriptionRequest.FromString, + response_serializer=Subscription.SerializeToString, + ), + 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListSubscriptions, + request_deserializer=ListSubscriptionsRequest.FromString, + response_serializer=ListSubscriptionsResponse.SerializeToString, + ), + 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubscription, + request_deserializer=DeleteSubscriptionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( + servicer.ModifyAckDeadline, + request_deserializer=ModifyAckDeadlineRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Acknowledge': grpc.unary_unary_rpc_method_handler( + servicer.Acknowledge, + request_deserializer=AcknowledgeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Pull': grpc.unary_unary_rpc_method_handler( + servicer.Pull, + request_deserializer=PullRequest.FromString, + response_serializer=PullResponse.SerializeToString, + ), + 'StreamingPull': grpc.stream_stream_rpc_method_handler( + servicer.StreamingPull, + request_deserializer=StreamingPullRequest.FromString, + response_serializer=StreamingPullResponse.SerializeToString, + ), + 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( + servicer.ModifyPushConfig, + request_deserializer=ModifyPushConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListSnapshots': grpc.unary_unary_rpc_method_handler( + servicer.ListSnapshots, + request_deserializer=ListSnapshotsRequest.FromString, + response_serializer=ListSnapshotsResponse.SerializeToString, + ), + 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.CreateSnapshot, + request_deserializer=CreateSnapshotRequest.FromString, + response_serializer=Snapshot.SerializeToString, + ), + 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSnapshot, + request_deserializer=UpdateSnapshotRequest.FromString, + response_serializer=Snapshot.SerializeToString, + ), + 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSnapshot, + request_deserializer=DeleteSnapshotRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Seek': grpc.unary_unary_rpc_method_handler( + servicer.Seek, + request_deserializer=SeekRequest.FromString, + response_serializer=SeekResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Subscriber', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class PublisherStub(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=Topic.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.UpdateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=UpdateTopicRequest.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.Publish = channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=PublishRequest.SerializeToString, + response_deserializer=PublishResponse.FromString, + ) + self.GetTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=GetTopicRequest.SerializeToString, + response_deserializer=Topic.FromString, + ) + self.ListTopics = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=ListTopicsRequest.SerializeToString, + response_deserializer=ListTopicsResponse.FromString, + ) + self.ListTopicSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=ListTopicSubscriptionsResponse.FromString, + ) + self.DeleteTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + + class PublisherServicer(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + + def add_PublisherServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTopic': grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=Topic.FromString, + response_serializer=Topic.SerializeToString, + ), + 'UpdateTopic': grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=UpdateTopicRequest.FromString, + response_serializer=Topic.SerializeToString, + ), + 'Publish': grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=PublishRequest.FromString, + response_serializer=PublishResponse.SerializeToString, + ), + 'GetTopic': grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=GetTopicRequest.FromString, + response_serializer=Topic.SerializeToString, + ), + 'ListTopics': grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=ListTopicsRequest.FromString, + response_serializer=ListTopicsResponse.SerializeToString, + ), + 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=ListTopicSubscriptionsRequest.FromString, + response_serializer=ListTopicSubscriptionsResponse.SerializeToString, + ), + 'DeleteTopic': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Publisher', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + class BetaSubscriberServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaSubscriberStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + def CreateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + raise NotImplementedError() + CreateSubscription.future = None + def GetSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the configuration details of a subscription. + """ + raise NotImplementedError() + GetSubscription.future = None + def UpdateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateSubscription.future = None + def ListSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists matching subscriptions. + """ + raise NotImplementedError() + ListSubscriptions.future = None + def DeleteSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + raise NotImplementedError() + DeleteSubscription.future = None + def ModifyAckDeadline(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + raise NotImplementedError() + ModifyAckDeadline.future = None + def Acknowledge(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + raise NotImplementedError() + Acknowledge.future = None + def Pull(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + raise NotImplementedError() + Pull.future = None + def StreamingPull(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + raise NotImplementedError() + def ModifyPushConfig(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + raise NotImplementedError() + ModifyPushConfig.future = None + def ListSnapshots(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the existing snapshots. + """ + raise NotImplementedError() + ListSnapshots.future = None + def CreateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + raise NotImplementedError() + CreateSnapshot.future = None + def UpdateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateSnapshot.future = None + def DeleteSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + raise NotImplementedError() + DeleteSnapshot.future = None + def Seek(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + raise NotImplementedError() + Seek.future = None + + + def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.FromString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.FromString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.FromString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.FromString, + } + response_serializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.SerializeToString, + } + method_implementations = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): face_utilities.unary_unary_inline(servicer.Acknowledge), + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): face_utilities.unary_unary_inline(servicer.CreateSnapshot), + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): face_utilities.unary_unary_inline(servicer.CreateSubscription), + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): face_utilities.unary_unary_inline(servicer.DeleteSnapshot), + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): face_utilities.unary_unary_inline(servicer.DeleteSubscription), + ('google.pubsub.v1.Subscriber', 'GetSubscription'): face_utilities.unary_unary_inline(servicer.GetSubscription), + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): face_utilities.unary_unary_inline(servicer.ListSnapshots), + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): face_utilities.unary_unary_inline(servicer.ListSubscriptions), + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): face_utilities.unary_unary_inline(servicer.ModifyAckDeadline), + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): face_utilities.unary_unary_inline(servicer.ModifyPushConfig), + ('google.pubsub.v1.Subscriber', 'Pull'): face_utilities.unary_unary_inline(servicer.Pull), + ('google.pubsub.v1.Subscriber', 'Seek'): face_utilities.unary_unary_inline(servicer.Seek), + ('google.pubsub.v1.Subscriber', 'StreamingPull'): face_utilities.stream_stream_inline(servicer.StreamingPull), + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): face_utilities.unary_unary_inline(servicer.UpdateSnapshot), + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): face_utilities.unary_unary_inline(servicer.UpdateSubscription), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.SerializeToString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.SerializeToString, + } + response_deserializers = { + ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.FromString, + ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.FromString, + ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.FromString, + ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.FromString, + ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.FromString, + ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.FromString, + } + cardinalities = { + 'Acknowledge': cardinality.Cardinality.UNARY_UNARY, + 'CreateSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'CreateSubscription': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'DeleteSubscription': cardinality.Cardinality.UNARY_UNARY, + 'GetSubscription': cardinality.Cardinality.UNARY_UNARY, + 'ListSnapshots': cardinality.Cardinality.UNARY_UNARY, + 'ListSubscriptions': cardinality.Cardinality.UNARY_UNARY, + 'ModifyAckDeadline': cardinality.Cardinality.UNARY_UNARY, + 'ModifyPushConfig': cardinality.Cardinality.UNARY_UNARY, + 'Pull': cardinality.Cardinality.UNARY_UNARY, + 'Seek': cardinality.Cardinality.UNARY_UNARY, + 'StreamingPull': cardinality.Cardinality.STREAM_STREAM, + 'UpdateSnapshot': cardinality.Cardinality.UNARY_UNARY, + 'UpdateSubscription': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Subscriber', cardinalities, options=stub_options) + + + class BetaPublisherServicer(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) + + + class BetaPublisherStub(object): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This class was generated + only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + def CreateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Creates the given topic with the given name. + """ + raise NotImplementedError() + CreateTopic.future = None + def UpdateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + raise NotImplementedError() + UpdateTopic.future = None + def Publish(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + raise NotImplementedError() + Publish.future = None + def GetTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Gets the configuration of a topic. + """ + raise NotImplementedError() + GetTopic.future = None + def ListTopics(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists matching topics. + """ + raise NotImplementedError() + ListTopics.future = None + def ListTopicSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Lists the name of the subscriptions for this topic. + """ + raise NotImplementedError() + ListTopicSubscriptions.future = None + def DeleteTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + raise NotImplementedError() + DeleteTopic.future = None + + + def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_deserializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.FromString, + ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.FromString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.FromString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.FromString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.FromString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.FromString, + } + response_serializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.SerializeToString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.SerializeToString, + } + method_implementations = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): face_utilities.unary_unary_inline(servicer.CreateTopic), + ('google.pubsub.v1.Publisher', 'DeleteTopic'): face_utilities.unary_unary_inline(servicer.DeleteTopic), + ('google.pubsub.v1.Publisher', 'GetTopic'): face_utilities.unary_unary_inline(servicer.GetTopic), + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): face_utilities.unary_unary_inline(servicer.ListTopicSubscriptions), + ('google.pubsub.v1.Publisher', 'ListTopics'): face_utilities.unary_unary_inline(servicer.ListTopics), + ('google.pubsub.v1.Publisher', 'Publish'): face_utilities.unary_unary_inline(servicer.Publish), + ('google.pubsub.v1.Publisher', 'UpdateTopic'): face_utilities.unary_unary_inline(servicer.UpdateTopic), + } + server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) + return beta_implementations.server(method_implementations, options=server_options) + + + def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): + """The Beta API is deprecated for 0.15.0 and later. + + It is recommended to use the GA API (classes and functions in this + file not marked beta) for all further purposes. This function was + generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" + request_serializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.SerializeToString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.SerializeToString, + } + response_deserializers = { + ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.FromString, + ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.FromString, + ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.FromString, + ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.FromString, + ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.FromString, + } + cardinalities = { + 'CreateTopic': cardinality.Cardinality.UNARY_UNARY, + 'DeleteTopic': cardinality.Cardinality.UNARY_UNARY, + 'GetTopic': cardinality.Cardinality.UNARY_UNARY, + 'ListTopicSubscriptions': cardinality.Cardinality.UNARY_UNARY, + 'ListTopics': cardinality.Cardinality.UNARY_UNARY, + 'Publish': cardinality.Cardinality.UNARY_UNARY, + 'UpdateTopic': cardinality.Cardinality.UNARY_UNARY, + } + stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) + return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Publisher', cardinalities, options=stub_options) +except ImportError: + pass +# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py new file mode 100644 index 000000000000..06dd470470d8 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py @@ -0,0 +1,509 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +import google.cloud.proto.pubsub.v1.pubsub_pb2 as google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2 +import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class SubscriberStub(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.GetSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.UpdateSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + ) + self.ListSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, + ) + self.DeleteSubscription = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ModifyAckDeadline = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Acknowledge = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Pull = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.FromString, + ) + self.StreamingPull = channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.FromString, + ) + self.ModifyPushConfig = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListSnapshots = channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.FromString, + ) + self.CreateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + ) + self.UpdateSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + ) + self.DeleteSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.Seek = channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.FromString, + ) + + +class SubscriberServicer(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method. + """ + + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: "subscription" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Pull(self, request, context): + """Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return `UNAVAILABLE` if + there are too many concurrent pull requests pending for the given + subscription. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def StreamingPull(self, request_iterator, context): + """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `OK` to reassign + server-side resources, in which case, the client should re-establish the + stream. `UNAVAILABLE` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListSnapshots(self, request, context): + """Lists the existing snapshots. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription. + If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateSnapshot(self, request, context): + """Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: "snapshot" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteSnapshot(self, request, context): + """Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_SubscriberServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.CreateSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'GetSubscription': grpc.unary_unary_rpc_method_handler( + servicer.GetSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + ), + 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListSubscriptions, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, + ), + 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubscription, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( + servicer.ModifyAckDeadline, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Acknowledge': grpc.unary_unary_rpc_method_handler( + servicer.Acknowledge, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Pull': grpc.unary_unary_rpc_method_handler( + servicer.Pull, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.SerializeToString, + ), + 'StreamingPull': grpc.stream_stream_rpc_method_handler( + servicer.StreamingPull, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, + ), + 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( + servicer.ModifyPushConfig, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListSnapshots': grpc.unary_unary_rpc_method_handler( + servicer.ListSnapshots, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, + ), + 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.CreateSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + ), + 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.UpdateSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + ), + 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.DeleteSnapshot, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'Seek': grpc.unary_unary_rpc_method_handler( + servicer.Seek, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Subscriber', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + +class PublisherStub(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.UpdateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.Publish = channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.FromString, + ) + self.GetTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + ) + self.ListTopics = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.FromString, + ) + self.ListTopicSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, + ) + self.DeleteTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class PublisherServicer(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def CreateTopic(self, request, context): + """Creates the given topic with the given name. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: "topic" instead of body: "*". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSubscriptions(self, request, context): + """Lists the name of the subscriptions for this topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_PublisherServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTopic': grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'UpdateTopic': grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'Publish': grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.SerializeToString, + ), + 'GetTopic': grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'ListTopics': grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, + ), + 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, + ), + 'DeleteTopic': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Publisher', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py new file mode 100644 index 000000000000..bf094f6cf03a --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -0,0 +1,26 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1 import PublisherClient +from google.cloud.pubsub_v1 import SubscriberClient +from google.cloud.pubsub_v1 import types + + +__all__ = ( + 'PublisherClient', + 'SubscriberClient', + 'types', +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py deleted file mode 100644 index 070e8243bf2b..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Google Cloud Pubsub API wrapper. - -The main concepts with this API are: - -- :class:`~google.cloud.pubsub.topic.Topic` represents an endpoint to which - messages can be published using the Cloud Storage Pubsub API. - -- :class:`~google.cloud.pubsub.subscription.Subscription` represents a named - subscription (either pull or push) to a topic. -""" - - -from pkg_resources import get_distribution -__version__ = get_distribution('google-cloud-pubsub').version - -from google.cloud.pubsub.client import Client -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -__all__ = ['__version__', 'Client', 'Subscription', 'Topic'] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py deleted file mode 100644 index 35e56717b3c2..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_gax.py +++ /dev/null @@ -1,802 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""GAX wrapper for Pubsub API requests.""" - -import functools - -from google.api.core import page_iterator -from google.cloud.gapic.pubsub.v1.publisher_client import PublisherClient -from google.cloud.gapic.pubsub.v1.subscriber_client import SubscriberClient -from google.gax import CallOptions -from google.gax import INITIAL_PAGE -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from google.protobuf.json_format import MessageToDict -from google.cloud.proto.pubsub.v1.pubsub_pb2 import PubsubMessage -from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig -from grpc import insecure_channel -from grpc import StatusCode - -from google.cloud._helpers import _to_bytes -from google.cloud._helpers import _pb_timestamp_to_rfc3339 -from google.cloud._helpers import _timedelta_to_duration_pb -from google.cloud._helpers import make_secure_channel -from google.cloud._http import DEFAULT_USER_AGENT -from google.cloud.exceptions import Conflict -from google.cloud.exceptions import NotFound -from google.cloud.pubsub import __version__ -from google.cloud.pubsub._helpers import subscription_name_from_path -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -_CONFLICT_ERROR_CODES = ( - StatusCode.FAILED_PRECONDITION, StatusCode.ALREADY_EXISTS) - - -class _PublisherAPI(object): - """Helper mapping publisher-related APIs. - - :type gax_api: :class:`.publisher_client.PublisherClient` - :param gax_api: API object used to make GAX requests. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns this API object. - """ - - def __init__(self, gax_api, client): - self._gax_api = gax_api - self._client = client - - def list_topics(self, project, page_size=0, page_token=None): - """List topics for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_topics( - path, page_size=page_size, options=options) - return page_iterator._GAXIterator( - self._client, page_iter, _item_to_topic) - - def topic_create(self, topic_path): - """API call: create a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - :type topic_path: str - :param topic_path: fully-qualified path of the new topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.Conflict` if the topic already - exists - """ - try: - topic_pb = self._gax_api.create_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) in _CONFLICT_ERROR_CODES: - raise Conflict(topic_path) - raise - return {'name': topic_pb.name} - - def topic_get(self, topic_path): - """API call: retrieve a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not - exist - """ - try: - topic_pb = self._gax_api.get_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - return {'name': topic_pb.name} - - def topic_delete(self, topic_path): - """API call: delete a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - """ - try: - self._gax_api.delete_topic(topic_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - - def topic_publish(self, topic_path, messages, timeout=30): - """API call: publish one or more messages to a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - :type topic_path: str - :param topic_path: fully-qualified path of the topic, in format - ``projects//topics/``. - - :type messages: list of dict - :param messages: messages to be published. - - :type timeout: int - :param timeout: (Optional) Timeout seconds. - - :rtype: list of string - :returns: list of opaque IDs for published messages. - :raises: :exc:`google.cloud.exceptions.NotFound` if the topic does not - exist - """ - options = CallOptions(is_bundling=False, timeout=timeout) - message_pbs = [_message_pb_from_mapping(message) - for message in messages] - try: - result = self._gax_api.publish(topic_path, message_pbs, - options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - return result.message_ids - - def topic_list_subscriptions(self, topic, page_size=0, page_token=None): - """API call: list subscriptions bound to a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: The topic that owns the subscriptions. - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - :raises: :exc:`~google.cloud.exceptions.NotFound` if the topic does - not exist. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - topic_path = topic.full_name - try: - page_iter = self._gax_api.list_topic_subscriptions( - topic_path, page_size=page_size, options=options) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(topic_path) - raise - - iterator = page_iterator._GAXIterator( - self._client, page_iter, _item_to_subscription_for_topic) - iterator.topic = topic - return iterator - - -class _SubscriberAPI(object): - """Helper mapping subscriber-related APIs. - - :type gax_api: :class:`.publisher_client.SubscriberClient` - :param gax_api: API object used to make GAX requests. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns this API object. - """ - def __init__(self, gax_api, client): - self._gax_api = gax_api - self._client = client - - def list_subscriptions(self, project, page_size=0, page_token=None): - """List subscriptions for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_subscriptions( - path, page_size=page_size, options=options) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Subscription.from_api_repr, they - # can be re-used by other subscriptions from the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_sub_for_client, topics=topics) - return page_iterator._GAXIterator( - self._client, page_iter, item_to_value) - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """API call: create a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic being - subscribed, in format - ``projects//topics/``. - - :type ack_deadline: int - :param ack_deadline: - (Optional) the deadline (in seconds) by which messages pulled from - the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - if push_endpoint is not None: - push_config = PushConfig(push_endpoint=push_endpoint) - else: - push_config = None - - if message_retention_duration is not None: - message_retention_duration = _timedelta_to_duration_pb( - message_retention_duration) - - try: - sub_pb = self._gax_api.create_subscription( - subscription_path, topic_path, - push_config=push_config, ack_deadline_seconds=ack_deadline, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - except GaxError as exc: - if exc_to_code(exc.cause) in _CONFLICT_ERROR_CODES: - raise Conflict(topic_path) - raise - return MessageToDict(sub_pb) - - def subscription_get(self, subscription_path): - """API call: retrieve a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - try: - sub_pb = self._gax_api.get_subscription(subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - return MessageToDict(sub_pb) - - def subscription_delete(self, subscription_path): - """API call: delete a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - """ - try: - self._gax_api.delete_subscription(subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_modify_push_config(self, subscription_path, - push_endpoint): - """API call: update push config of a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - """ - push_config = PushConfig(push_endpoint=push_endpoint) - try: - self._gax_api.modify_push_config(subscription_path, push_config) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_pull(self, subscription_path, return_immediately=False, - max_messages=1): - """API call: retrieve messages for a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to pull from, in - format ``projects//subscriptions/``. - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :rtype: list of dict - :returns: the ``receivedMessages`` element of the response. - """ - try: - response_pb = self._gax_api.pull( - subscription_path, max_messages, - return_immediately=return_immediately) - except GaxError as exc: - code = exc_to_code(exc.cause) - if code == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - elif code == StatusCode.DEADLINE_EXCEEDED: - # NOTE: The JSON-over-HTTP API returns a 200 with an empty - # response when ``return_immediately`` is ``False``, so - # we "mutate" the gRPC error into a non-error to conform. - if not return_immediately: - return [] - raise - return [_received_message_pb_to_mapping(rmpb) - for rmpb in response_pb.received_messages] - - def subscription_acknowledge(self, subscription_path, ack_ids): - """API call: acknowledge retrieved messages - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - """ - try: - self._gax_api.acknowledge(subscription_path, ack_ids) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - """API call: update ack deadline for retrieved messages - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - """ - try: - self._gax_api.modify_ack_deadline( - subscription_path, ack_ids, ack_deadline) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - """API call: seek a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type subscription_path: str - :param subscription_path:: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type time: :class:`.timestamp_pb2.Timestamp` - :param time: The time to seek to. - - :type snapshot: str - :param snapshot: The snapshot to seek to. - """ - try: - self._gax_api.seek(subscription_path, time=time, snapshot=snapshot) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - - def list_snapshots(self, project, page_size=0, page_token=None): - """List snapshots for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - if page_token is None: - page_token = INITIAL_PAGE - options = CallOptions(page_token=page_token) - path = 'projects/%s' % (project,) - page_iter = self._gax_api.list_snapshots( - path, page_size=page_size, options=options) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Snapshot.from_api_repr, they - # can be re-used by other snapshots of the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_snapshot_for_client, topics=topics) - return page_iterator._GAXIterator( - self._client, page_iter, item_to_value) - - def snapshot_create(self, snapshot_path, subscription_path): - """API call: create a snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :type subscription_path: str - :param subscription_path: fully-qualified path of the subscrption that - the new snapshot captures, in format - ``projects//subscription/``. - - :rtype: dict - :returns: ``Snapshot`` resource returned from the API. - :raises: :exc:`google.cloud.exceptions.Conflict` if the snapshot - already exists - :raises: :exc:`google.cloud.exceptions.NotFound` if the subscription - does not exist - """ - try: - snapshot_pb = self._gax_api.create_snapshot( - snapshot_path, subscription_path) - except GaxError as exc: - if exc_to_code(exc.cause) in _CONFLICT_ERROR_CODES: - raise Conflict(snapshot_path) - elif exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(subscription_path) - raise - return MessageToDict(snapshot_pb) - - def snapshot_delete(self, snapshot_path): - """API call: delete a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :raises: :exc:`google.cloud.exceptions.NotFound` if the snapshot does - not exist - """ - try: - self._gax_api.delete_snapshot(snapshot_path) - except GaxError as exc: - if exc_to_code(exc.cause) == StatusCode.NOT_FOUND: - raise NotFound(snapshot_path) - raise - - -def _message_pb_from_mapping(message): - """Helper for :meth:`_PublisherAPI.topic_publish`. - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return PubsubMessage(data=_to_bytes(message['data']), - attributes=message['attributes']) - - -def _message_pb_to_mapping(message_pb): - """Helper for :meth:`pull`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return { - 'messageId': message_pb.message_id, - 'data': message_pb.data, - 'attributes': message_pb.attributes, - 'publishTime': _pb_timestamp_to_rfc3339(message_pb.publish_time), - } - - -def _received_message_pb_to_mapping(received_message_pb): - """Helper for :meth:`pull`, et aliae - - Performs "impedance matching" between the protobuf attrs and the keys - expected in the JSON API. - """ - return { - 'ackId': received_message_pb.ack_id, - 'message': _message_pb_to_mapping( - received_message_pb.message), - } - - -def make_gax_publisher_api(credentials=None, host=None): - """Create an instance of the GAX Publisher API. - - If the ``credentials`` are omitted, then we create an insecure - ``channel`` pointing at the local Pub / Sub emulator. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) Credentials for getting access - tokens. - - :type host: str - :param host: (Optional) The host for an insecure channel. Only - used if ``credentials`` are omitted. - - :rtype: :class:`.publisher_client.PublisherClient` - :returns: A publisher API instance with the proper channel. - """ - if credentials is None: - channel = insecure_channel(host) - else: - channel = make_secure_channel( - credentials, DEFAULT_USER_AGENT, - PublisherClient.SERVICE_ADDRESS) - return PublisherClient( - channel=channel, lib_name='gccl', lib_version=__version__) - - -def make_gax_subscriber_api(credentials=None, host=None): - """Create an instance of the GAX Subscriber API. - - If the ``credentials`` are omitted, then we create an insecure - ``channel`` pointing at the local Pub / Sub emulator. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) Credentials for getting access - tokens. - - :type host: str - :param host: (Optional) The host for an insecure channel. Only - used if ``credentials`` are omitted. - - :rtype: :class:`.subscriber_client.SubscriberClient` - :returns: A subscriber API instance with the proper channel. - """ - if credentials is None: - channel = insecure_channel(host) - else: - channel = make_secure_channel( - credentials, DEFAULT_USER_AGENT, - SubscriberClient.SERVICE_ADDRESS) - return SubscriberClient( - channel=channel, lib_name='gccl', lib_version=__version__) - - -def _item_to_topic(iterator, resource): - """Convert a protobuf topic to the native object. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: :class:`.pubsub_pb2.Topic` - :param resource: A topic returned from the API. - - :rtype: :class:`~google.cloud.pubsub.topic.Topic` - :returns: The next topic in the page. - """ - return Topic.from_api_repr( - {'name': resource.name}, iterator.client) - - -def _item_to_subscription_for_topic(iterator, subscription_path): - """Convert a subscription name to the native object. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type subscription_path: str - :param subscription_path: Subscription path returned from the API. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - subscription_name = subscription_name_from_path( - subscription_path, iterator.client.project) - return Subscription(subscription_name, iterator.topic) - - -def _item_to_sub_for_client(iterator, sub_pb, topics): - """Convert a subscription protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_subscriptions`. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type sub_pb: :class:`.pubsub_pb2.Subscription` - :param sub_pb: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - resource = MessageToDict(sub_pb) - return Subscription.from_api_repr( - resource, iterator.client, topics=topics) - - -def _item_to_snapshot_for_client(iterator, snapshot_pb, topics): - """Convert a subscription protobuf to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_snapshots`. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type sub_pb: :class:`.pubsub_pb2.Snapshot` - :param sub_pb: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - resource = MessageToDict(snapshot_pb) - return Snapshot.from_api_repr( - resource, iterator.client, topics=topics) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py deleted file mode 100644 index 2f021f20ab3e..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_helpers.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helper functions for shared behavior.""" - -import re - -from google.cloud._helpers import _name_from_project_path - - -_TOPIC_TEMPLATE = re.compile(r""" - projects/ # static prefix - (?P[^/]+) # initial letter, wordchars + hyphen - /topics/ # static midfix - (?P[^/]+) # initial letter, wordchars + allowed punc -""", re.VERBOSE) - - -_SUBSCRIPTION_TEMPLATE = re.compile(r""" - projects/ # static prefix - (?P[^/]+) # initial letter, wordchars + hyphen - /subscriptions/ # static midfix - (?P[^/]+) # initial letter, wordchars + allowed punc -""", re.VERBOSE) - - -def topic_name_from_path(path, project): - """Validate a topic URI path and get the topic name. - - :type path: str - :param path: URI path for a topic API request. - - :type project: str - :param project: The project associated with the request. It is - included for validation purposes. - - :rtype: str - :returns: Topic name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. - """ - return _name_from_project_path(path, project, _TOPIC_TEMPLATE) - - -def subscription_name_from_path(path, project): - """Validate a subscription URI path and get the subscription name. - - :type path: str - :param path: URI path for a subscription API request. - - :type project: str - :param project: The project associated with the request. It is - included for validation purposes. - - :rtype: str - :returns: subscription name parsed from ``path``. - :raises: :class:`ValueError` if the ``path`` is ill-formed or if - the project from the ``path`` does not agree with the - ``project`` passed in. - """ - return _name_from_project_path(path, project, _SUBSCRIPTION_TEMPLATE) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py b/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py deleted file mode 100644 index 5173b4095ca8..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/_http.py +++ /dev/null @@ -1,797 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Interact with Google Cloud Pub/Sub via JSON-over-HTTP.""" - -import base64 -import copy -import functools -import os - -from google.api.core import page_iterator -from google.cloud import _http -from google.cloud._helpers import _timedelta_to_duration_pb -from google.cloud.environment_vars import PUBSUB_EMULATOR - -from google.cloud.pubsub import __version__ -from google.cloud.pubsub._helpers import subscription_name_from_path -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - - -PUBSUB_API_HOST = 'pubsub.googleapis.com' -"""Pub / Sub API request host.""" - -_CLIENT_INFO = _http.CLIENT_INFO_TEMPLATE.format(__version__) - - -class Connection(_http.JSONConnection): - """A connection to Google Cloud Pub/Sub via the JSON REST API. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: The client that owns the current connection. - """ - - API_BASE_URL = 'https://' + PUBSUB_API_HOST - """The base of the API call URL.""" - - API_VERSION = 'v1' - """The version of the API, used in building the API call's URL.""" - - API_URL_TEMPLATE = '{api_base_url}/{api_version}{path}' - """A template for the URL of a particular API call.""" - - _EXTRA_HEADERS = { - _http.CLIENT_INFO_HEADER: _CLIENT_INFO, - } - - def __init__(self, client): - super(Connection, self).__init__(client) - emulator_host = os.getenv(PUBSUB_EMULATOR) - if emulator_host is None: - self.host = self.__class__.API_BASE_URL - self.api_base_url = self.__class__.API_BASE_URL - self.in_emulator = False - else: - self.host = emulator_host - self.api_base_url = 'http://' + emulator_host - self.in_emulator = True - - def build_api_url(self, path, query_params=None, - api_base_url=None, api_version=None): - """Construct an API url given a few components, some optional. - - Typically, you shouldn't need to use this method. - - :type path: str - :param path: The path to the resource. - - :type query_params: dict or list - :param query_params: A dictionary of keys and values (or list of - key-value pairs) to insert into the query - string of the URL. - - :type api_base_url: str - :param api_base_url: The base URL for the API endpoint. - Typically you won't have to provide this. - - :type api_version: str - :param api_version: The version of the API to call. - Typically you shouldn't provide this and instead - use the default for the library. - - :rtype: str - :returns: The URL assembled from the pieces provided. - """ - if api_base_url is None: - api_base_url = self.api_base_url - return super(Connection, self.__class__).build_api_url( - path, query_params=query_params, - api_base_url=api_base_url, api_version=api_version) - - -class _PublisherAPI(object): - """Helper mapping publisher-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_topics(self, project, page_size=None, page_token=None): - """API call: list topics for a given project - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current client. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/topics' % (project,) - - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=_item_to_topic, - items_key='topics', - page_token=page_token, - extra_params=extra_params) - - def topic_create(self, topic_path): - """API call: create a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - :type topic_path: str - :param topic_path: the fully-qualified path of the new topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - """ - return self.api_request(method='PUT', path='/%s' % (topic_path,)) - - def topic_get(self, topic_path): - """API call: retrieve a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - - :rtype: dict - :returns: ``Topic`` resource returned from the API. - """ - return self.api_request(method='GET', path='/%s' % (topic_path,)) - - def topic_delete(self, topic_path): - """API call: delete a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - """ - self.api_request(method='DELETE', path='/%s' % (topic_path,)) - - def topic_publish(self, topic_path, messages): - """API call: publish one or more messages to a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic, in format - ``projects//topics/``. - - :type messages: list of dict - :param messages: messages to be published. - - :rtype: list of string - :returns: list of opaque IDs for published messages. - """ - messages_to_send = copy.deepcopy(messages) - _transform_messages_base64(messages_to_send, _base64_unicode) - data = {'messages': messages_to_send} - response = self.api_request( - method='POST', path='/%s:publish' % (topic_path,), data=data) - return response['messageIds'] - - def topic_list_subscriptions(self, topic, page_size=None, page_token=None): - """API call: list subscriptions bound to a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: The topic that owns the subscriptions. - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: list of strings - :returns: fully-qualified names of subscriptions for the supplied - topic. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/%s/subscriptions' % (topic.full_name,) - - iterator = page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=_item_to_subscription_for_topic, - items_key='subscriptions', - page_token=page_token, - extra_params=extra_params) - iterator.topic = topic - return iterator - - -class _SubscriberAPI(object): - """Helper mapping subscriber-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self._client = client - self.api_request = client._connection.api_request - - def list_subscriptions(self, project, page_size=None, page_token=None): - """API call: list subscriptions for a given project - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of subscriptions to return, If not - passed, defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of subscriptions. - If not passed, the API will return the first page - of subscriptions. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current API. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/subscriptions' % (project,) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Subscription.from_api_repr, they - # can be re-used by other subscriptions from the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_sub_for_client, topics=topics) - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=item_to_value, - items_key='subscriptions', - page_token=page_token, - extra_params=extra_params) - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """API call: create a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type topic_path: str - :param topic_path: the fully-qualified path of the topic being - subscribed, in format - ``projects//topics/``. - - :type ack_deadline: int - :param ack_deadline: - (Optional) the deadline (in seconds) by which messages pulled from - the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - path = '/%s' % (subscription_path,) - resource = {'topic': topic_path} - - if ack_deadline is not None: - resource['ackDeadlineSeconds'] = ack_deadline - - if push_endpoint is not None: - resource['pushConfig'] = {'pushEndpoint': push_endpoint} - - if retain_acked_messages is not None: - resource['retainAckedMessages'] = retain_acked_messages - - if message_retention_duration is not None: - pb = _timedelta_to_duration_pb(message_retention_duration) - resource['messageRetentionDuration'] = { - 'seconds': pb.seconds, - 'nanos': pb.nanos - } - - return self.api_request(method='PUT', path=path, data=resource) - - def subscription_get(self, subscription_path): - """API call: retrieve a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - - :rtype: dict - :returns: ``Subscription`` resource returned from the API. - """ - path = '/%s' % (subscription_path,) - return self.api_request(method='GET', path=path) - - def subscription_delete(self, subscription_path): - """API call: delete a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the subscription, in format - ``projects//subscriptions/``. - """ - path = '/%s' % (subscription_path,) - self.api_request(method='DELETE', path=path) - - def subscription_modify_push_config(self, subscription_path, - push_endpoint): - """API call: update push config of a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - """ - path = '/%s:modifyPushConfig' % (subscription_path,) - resource = {'pushConfig': {'pushEndpoint': push_endpoint}} - self.api_request(method='POST', path=path, data=resource) - - def subscription_pull(self, subscription_path, return_immediately=False, - max_messages=1): - """API call: retrieve messages for a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :rtype: list of dict - :returns: the ``receivedMessages`` element of the response. - """ - path = '/%s:pull' % (subscription_path,) - data = { - 'returnImmediately': return_immediately, - 'maxMessages': max_messages, - } - response = self.api_request(method='POST', path=path, data=data) - messages = response.get('receivedMessages', ()) - _transform_messages_base64(messages, base64.b64decode, 'message') - return messages - - def subscription_acknowledge(self, subscription_path, ack_ids): - """API call: acknowledge retrieved messages - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - """ - path = '/%s:acknowledge' % (subscription_path,) - data = { - 'ackIds': ack_ids, - } - self.api_request(method='POST', path=path, data=data) - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - """API call: update ack deadline for retrieved messages - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type subscription_path: str - :param subscription_path: - the fully-qualified path of the new subscription, in format - ``projects//subscriptions/``. - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - """ - path = '/%s:modifyAckDeadline' % (subscription_path,) - data = { - 'ackIds': ack_ids, - 'ackDeadlineSeconds': ack_deadline, - } - self.api_request(method='POST', path=path, data=data) - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - """API call: seek a subscription - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type subscription_path: str - :param subscription_path:: - the fully-qualified path of the subscription to affect, in format - ``projects//subscriptions/``. - - :type time: str - :param time: The time to seek to, in RFC 3339 format. - - :type snapshot: str - :param snapshot: The snapshot to seek to. - """ - path = '/%s:seek' % (subscription_path,) - data = {} - if time is not None: - data['time'] = time - if snapshot is not None: - data['snapshot'] = snapshot - self.api_request(method='POST', path=path, data=data) - - def list_snapshots(self, project, page_size=None, page_token=None): - """List snapshots for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - extra_params = {} - if page_size is not None: - extra_params['pageSize'] = page_size - path = '/projects/%s/snapshots' % (project,) - - # We attach a mutable topics dictionary so that as topic - # objects are created by Snapshot.from_api_repr, they - # can be re-used by other snapshots of the same topic. - topics = {} - item_to_value = functools.partial( - _item_to_snapshot_for_client, topics=topics) - return page_iterator.HTTPIterator( - client=self._client, - api_request=self._client._connection.api_request, - path=path, - item_to_value=item_to_value, - items_key='snapshots', - page_token=page_token, - extra_params=extra_params) - - def snapshot_create(self, snapshot_path, subscription_path): - """API call: create a snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - - :type subscription_path: str - :param subscription_path: fully-qualified path of the subscrption that - the new snapshot captures, in format - ``projects//subscription/``. - - :rtype: dict - :returns: ``Snapshot`` resource returned from the API. - """ - path = '/%s' % (snapshot_path,) - data = {'subscription': subscription_path} - return self.api_request(method='PUT', path=path, data=data) - - def snapshot_delete(self, snapshot_path): - """API call: delete a topic - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type snapshot_path: str - :param snapshot_path: fully-qualified path of the snapshot, in format - ``projects//snapshots/``. - """ - path = '/%s' % (snapshot_path,) - self.api_request(method='DELETE', path=path) - - -class _IAMPolicyAPI(object): - """Helper mapping IAM policy-related APIs. - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: the client used to make API requests. - """ - - def __init__(self, client): - self.api_request = client._connection.api_request - - def get_iam_policy(self, target_path): - """API call: fetch the IAM policy for the target - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy - - :type target_path: str - :param target_path: the path of the target object. - - :rtype: dict - :returns: the resource returned by the ``getIamPolicy`` API request. - """ - path = '/%s:getIamPolicy' % (target_path,) - return self.api_request(method='GET', path=path) - - def set_iam_policy(self, target_path, policy): - """API call: update the IAM policy for the target - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy - - :type target_path: str - :param target_path: the path of the target object. - - :type policy: dict - :param policy: the new policy resource. - - :rtype: dict - :returns: the resource returned by the ``setIamPolicy`` API request. - """ - wrapped = {'policy': policy} - path = '/%s:setIamPolicy' % (target_path,) - return self.api_request(method='POST', path=path, data=wrapped) - - def test_iam_permissions(self, target_path, permissions): - """API call: test permissions - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions - - :type target_path: str - :param target_path: the path of the target object. - - :type permissions: list of string - :param permissions: the permissions to check - - :rtype: dict - :returns: the resource returned by the ``getIamPolicy`` API request. - """ - wrapped = {'permissions': permissions} - path = '/%s:testIamPermissions' % (target_path,) - resp = self.api_request(method='POST', path=path, data=wrapped) - return resp.get('permissions', []) - - -def _base64_unicode(value): - """Helper to base64 encode and make JSON serializable. - - :type value: str - :param value: String value to be base64 encoded and made serializable. - - :rtype: str - :returns: Base64 encoded string/unicode value. - """ - as_bytes = base64.b64encode(value) - return as_bytes.decode('ascii') - - -def _transform_messages_base64(messages, transform, key=None): - """Helper for base64 encoding and decoding messages. - - :type messages: list - :param messages: List of dictionaries with message data. - - :type transform: :class:`~types.FunctionType` - :param transform: Function to encode/decode the message data. - - :type key: str - :param key: Index to access messages. - """ - for message in messages: - if key is not None: - message = message[key] - if 'data' in message: - message['data'] = transform(message['data']) - - -def _item_to_topic(iterator, resource): - """Convert a JSON topic to the native object. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A topic returned from the API. - - :rtype: :class:`~google.cloud.pubsub.topic.Topic` - :returns: The next topic in the page. - """ - return Topic.from_api_repr(resource, iterator.client) - - -def _item_to_subscription_for_topic(iterator, subscription_path): - """Convert a subscription name to the native object. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type subscription_path: str - :param subscription_path: Subscription path returned from the API. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - subscription_name = subscription_name_from_path( - subscription_path, iterator.client.project) - return Subscription(subscription_name, iterator.topic) - - -def _item_to_sub_for_client(iterator, resource, topics): - """Convert a subscription to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_subscriptions`. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - return Subscription.from_api_repr( - resource, iterator.client, topics=topics) - - -def _item_to_snapshot_for_client(iterator, resource, topics): - """Convert a subscription to the native object. - - .. note:: - - This method does not have the correct signature to be used as - the ``item_to_value`` argument to - :class:`~google.api.core.page_iterator.Iterator`. It is intended to be - patched with a mutable topics argument that can be updated - on subsequent calls. For an example, see how the method is - used above in :meth:`_SubscriberAPI.list_snapshots`. - - :type iterator: :class:`~google.api.core.page_iterator.Iterator` - :param iterator: The iterator that is currently in use. - - :type resource: dict - :param resource: A subscription returned from the API. - - :type topics: dict - :param topics: A dictionary of topics to be used (and modified) - as new subscriptions are created bound to topics. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: The next subscription in the page. - """ - return Snapshot.from_api_repr( - resource, iterator.client, topics=topics) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub/client.py deleted file mode 100644 index 0dc9b8fb6f38..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/client.py +++ /dev/null @@ -1,285 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Client for interacting with the Google Cloud Pub/Sub API.""" - -import os - -from google.cloud.client import ClientWithProject -from google.cloud.environment_vars import DISABLE_GRPC -from google.cloud.pubsub._http import Connection -from google.cloud.pubsub._http import _PublisherAPI as JSONPublisherAPI -from google.cloud.pubsub._http import _SubscriberAPI as JSONSubscriberAPI -from google.cloud.pubsub._http import _IAMPolicyAPI -from google.cloud.pubsub.subscription import Subscription -from google.cloud.pubsub.topic import Topic - -try: - from google.cloud.pubsub._gax import _PublisherAPI as GAXPublisherAPI - from google.cloud.pubsub._gax import _SubscriberAPI as GAXSubscriberAPI - from google.cloud.pubsub._gax import make_gax_publisher_api - from google.cloud.pubsub._gax import make_gax_subscriber_api -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False - GAXPublisherAPI = None - GAXSubscriberAPI = None - make_gax_publisher_api = None - make_gax_subscriber_api = None -else: - _HAVE_GRPC = True - - -_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) -_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC - - -class Client(ClientWithProject): - """Client to bundle configuration needed for API requests. - - :type project: str - :param project: the project which the client acts on behalf of. Will be - passed when creating a topic. If not passed, - falls back to the default inferred from the environment. - - :type credentials: :class:`~google.auth.credentials.Credentials` - :param credentials: (Optional) The OAuth2 Credentials to use for this - client. If not passed (and if no ``_http`` object is - passed), falls back to the default inferred from the - environment. - - :type _http: :class:`~requests.Session` - :param _http: (Optional) HTTP object to make requests. Can be any object - that defines ``request()`` with the same interface as - :meth:`requests.Session.request`. If not passed, an - ``_http`` object is created that is bound to the - ``credentials`` for the current object. - This parameter should be considered private, and could - change in the future. - - :type _use_grpc: bool - :param _use_grpc: (Optional) Explicitly specifies whether - to use the gRPC transport (via GAX) or HTTP. If unset, - falls back to the ``GOOGLE_CLOUD_DISABLE_GRPC`` - environment variable. - This parameter should be considered private, and could - change in the future. - """ - - _publisher_api = None - _subscriber_api = None - _iam_policy_api = None - - SCOPE = ('https://www.googleapis.com/auth/pubsub', - 'https://www.googleapis.com/auth/cloud-platform') - """The scopes required for authenticating as a Cloud Pub/Sub consumer.""" - - def __init__(self, project=None, credentials=None, - _http=None, _use_grpc=None): - super(Client, self).__init__( - project=project, credentials=credentials, _http=_http) - self._connection = Connection(self) - if _use_grpc is None: - self._use_grpc = _USE_GRPC - else: - self._use_grpc = _use_grpc - - @property - def publisher_api(self): - """Helper for publisher-related API calls.""" - if self._publisher_api is None: - if self._use_grpc: - if self._connection.in_emulator: - generated = make_gax_publisher_api( - host=self._connection.host) - else: - generated = make_gax_publisher_api( - credentials=self._credentials) - self._publisher_api = GAXPublisherAPI(generated, self) - else: - self._publisher_api = JSONPublisherAPI(self) - return self._publisher_api - - @property - def subscriber_api(self): - """Helper for subscriber-related API calls.""" - if self._subscriber_api is None: - if self._use_grpc: - if self._connection.in_emulator: - generated = make_gax_subscriber_api( - host=self._connection.host) - else: - generated = make_gax_subscriber_api( - credentials=self._credentials) - self._subscriber_api = GAXSubscriberAPI(generated, self) - else: - self._subscriber_api = JSONSubscriberAPI(self) - return self._subscriber_api - - @property - def iam_policy_api(self): - """Helper for IAM policy-related API calls.""" - if self._iam_policy_api is None: - self._iam_policy_api = _IAMPolicyAPI(self) - return self._iam_policy_api - - def list_topics(self, page_size=None, page_token=None): - """List topics for the project associated with this client. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_list_topics] - :end-before: [END client_list_topics] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.topic.Topic` - accessible to the current API. - """ - api = self.publisher_api - return api.list_topics( - self.project, page_size, page_token) - - def list_subscriptions(self, page_size=None, page_token=None): - """List subscriptions for the project associated with this client. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/list - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_list_subscriptions] - :end-before: [END client_list_subscriptions] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current client. - """ - api = self.subscriber_api - return api.list_subscriptions( - self.project, page_size, page_token) - - def list_snapshots(self, page_size=None, page_token=None): - """List snapshots for the project associated with this API. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/list - - :type project: str - :param project: project ID - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of :class:`~google.cloud.pubsub.snapshot.Snapshot` - accessible to the current API. - """ - api = self.subscriber_api - return api.list_snapshots( - self.project, page_size, page_token) - - def topic(self, name, timestamp_messages=False): - """Creates a topic bound to the current client. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_topic] - :end-before: [END client_topic] - :dedent: 4 - - :type name: str - :param name: the name of the topic to be constructed. - - :type timestamp_messages: bool - :param timestamp_messages: To be passed to ``Topic`` constructor. - - :rtype: :class:`google.cloud.pubsub.topic.Topic` - :returns: Topic created with the current client. - """ - return Topic(name, client=self, timestamp_messages=timestamp_messages) - - def subscription(self, name, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """Creates a subscription bound to the current client. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START client_subscription] - :end-before: [END client_subscription] - :dedent: 4 - - :type name: str - :param name: the name of the subscription to be constructed. - - :type ack_deadline: int - :param ack_deadline: (Optional) The deadline (in seconds) by which - messages pulledfrom the back-end must be - acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. - If not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by ``message_retention_duration``. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by ``message_retention_duration``. If unset, defaults to 7 days. - - :rtype: :class:`~google.cloud.pubsub.subscription.Subscription` - :returns: Subscription created with the current client. - """ - return Subscription( - name, ack_deadline=ack_deadline, push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, client=self) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py b/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py deleted file mode 100644 index 7dce1c2c4cfa..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/iam.py +++ /dev/null @@ -1,138 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""PubSub API IAM policy definitions - -For allowed roles / permissions, see: -https://cloud.google.com/pubsub/access_control#permissions -""" - -import warnings - -# pylint: disable=unused-import -from google.cloud.iam import OWNER_ROLE # noqa - backward compat -from google.cloud.iam import EDITOR_ROLE # noqa - backward compat -from google.cloud.iam import VIEWER_ROLE # noqa - backward compat -# pylint: enable=unused-import -from google.cloud.iam import Policy as _BasePolicy -from google.cloud.iam import _ASSIGNMENT_DEPRECATED_MSG - -# Pubsub-specific IAM roles - -PUBSUB_ADMIN_ROLE = 'roles/pubsub.admin' -"""Role implying all rights to an object.""" - -PUBSUB_EDITOR_ROLE = 'roles/pubsub.editor' -"""Role implying rights to modify an object.""" - -PUBSUB_VIEWER_ROLE = 'roles/pubsub.viewer' -"""Role implying rights to access an object.""" - -PUBSUB_PUBLISHER_ROLE = 'roles/pubsub.publisher' -"""Role implying rights to publish to a topic.""" - -PUBSUB_SUBSCRIBER_ROLE = 'roles/pubsub.subscriber' -"""Role implying rights to subscribe to a topic.""" - - -# Pubsub-specific permissions - -PUBSUB_TOPICS_CONSUME = 'pubsub.topics.consume' -"""Permission: consume events from a subscription.""" - -PUBSUB_TOPICS_CREATE = 'pubsub.topics.create' -"""Permission: create topics.""" - -PUBSUB_TOPICS_DELETE = 'pubsub.topics.delete' -"""Permission: delete topics.""" - -PUBSUB_TOPICS_GET = 'pubsub.topics.get' -"""Permission: retrieve topics.""" - -PUBSUB_TOPICS_GET_IAM_POLICY = 'pubsub.topics.getIamPolicy' -"""Permission: retrieve subscription IAM policies.""" - -PUBSUB_TOPICS_LIST = 'pubsub.topics.list' -"""Permission: list topics.""" - -PUBSUB_TOPICS_SET_IAM_POLICY = 'pubsub.topics.setIamPolicy' -"""Permission: update subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_CONSUME = 'pubsub.subscriptions.consume' -"""Permission: consume events from a subscription.""" - -PUBSUB_SUBSCRIPTIONS_CREATE = 'pubsub.subscriptions.create' -"""Permission: create subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_DELETE = 'pubsub.subscriptions.delete' -"""Permission: delete subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_GET = 'pubsub.subscriptions.get' -"""Permission: retrieve subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY = 'pubsub.subscriptions.getIamPolicy' -"""Permission: retrieve subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_LIST = 'pubsub.subscriptions.list' -"""Permission: list subscriptions.""" - -PUBSUB_SUBSCRIPTIONS_SET_IAM_POLICY = 'pubsub.subscriptions.setIamPolicy' -"""Permission: update subscription IAM policies.""" - -PUBSUB_SUBSCRIPTIONS_UPDATE = 'pubsub.subscriptions.update' -"""Permission: update subscriptions.""" - - -class Policy(_BasePolicy): - """IAM Policy / Bindings. - - See - https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Policy - https://cloud.google.com/pubsub/docs/reference/rest/Shared.Types/Binding - """ - _OWNER_ROLES = (OWNER_ROLE, PUBSUB_ADMIN_ROLE) - """Roles mapped onto our ``owners`` attribute.""" - - _EDITOR_ROLES = (EDITOR_ROLE, PUBSUB_EDITOR_ROLE) - """Roles mapped onto our ``editors`` attribute.""" - - _VIEWER_ROLES = (VIEWER_ROLE, PUBSUB_VIEWER_ROLE) - """Roles mapped onto our ``viewers`` attribute.""" - - @property - def publishers(self): - """Legacy access to owner role.""" - return frozenset(self._bindings.get(PUBSUB_PUBLISHER_ROLE, ())) - - @publishers.setter - def publishers(self, value): - """Update publishers.""" - warnings.warn( - _ASSIGNMENT_DEPRECATED_MSG.format( - 'publishers', PUBSUB_PUBLISHER_ROLE), - DeprecationWarning) - self[PUBSUB_PUBLISHER_ROLE] = value - - @property - def subscribers(self): - """Legacy access to owner role.""" - return frozenset(self._bindings.get(PUBSUB_SUBSCRIBER_ROLE, ())) - - @subscribers.setter - def subscribers(self, value): - """Update subscribers.""" - warnings.warn( - _ASSIGNMENT_DEPRECATED_MSG.format( - 'subscribers', PUBSUB_SUBSCRIBER_ROLE), - DeprecationWarning) - self[PUBSUB_SUBSCRIBER_ROLE] = value diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub/message.py deleted file mode 100644 index e2153d5cb14f..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/message.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Topics.""" - -from google.cloud._helpers import _rfc3339_to_datetime - - -class Message(object): - """Messages can be published to a topic and received by subscribers. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage - - :type data: bytes - :param data: the payload of the message. - - :type message_id: str - :param message_id: An ID assigned to the message by the API. - - :type attributes: dict - :param attributes: - (Optional) Extra metadata associated by the publisher with the message. - """ - _service_timestamp = None - - def __init__(self, data, message_id, attributes=None): - self.data = data - self.message_id = message_id - self._attributes = attributes - - @property - def attributes(self): - """Lazily-constructed attribute dictionary.""" - if self._attributes is None: - self._attributes = {} - return self._attributes - - @property - def timestamp(self): - """Return sortable timestamp from attributes, if passed. - - Allows sorting messages in publication order (assuming consistent - clocks across all publishers). - - :rtype: :class:`datetime.datetime` - :returns: timestamp (in UTC timezone) parsed from RFC 3339 timestamp - :raises: ValueError if timestamp not in ``attributes``, or if it does - not match the RFC 3339 format. - """ - stamp = self.attributes.get('timestamp') - if stamp is None: - raise ValueError('No timestamp') - return _rfc3339_to_datetime(stamp) - - @property - def service_timestamp(self): - """Return server-set timestamp. - - :rtype: str - :returns: timestamp (in UTC timezone) in RFC 3339 format - """ - return self._service_timestamp - - @classmethod - def from_api_repr(cls, api_repr): - """Factory: construct message from API representation. - - :type api_repr: dict - :param api_repr: (Optional) The API representation of the message - - :rtype: :class:`Message` - :returns: The message created from the response. - """ - data = api_repr.get('data', b'') - instance = cls( - data=data, message_id=api_repr['messageId'], - attributes=api_repr.get('attributes')) - instance._service_timestamp = api_repr.get('publishTime') - return instance diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py b/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py deleted file mode 100644 index 599cd05d8765..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/snapshot.py +++ /dev/null @@ -1,140 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Snapshots.""" - -from google.cloud.pubsub._helpers import topic_name_from_path - - -class Snapshot(object): - - _DELETED_TOPIC_PATH = '_deleted-topic_' - """Value of ``projects.snapshots.topic`` when topic has been deleted.""" - - def __init__(self, name, subscription=None, topic=None, client=None): - - num_kwargs = len( - [param for param in (subscription, topic, client) if param]) - if num_kwargs != 1: - raise TypeError( - "Pass only one of 'subscription', 'topic', 'client'.") - - self.name = name - self.topic = topic or getattr(subscription, 'topic', None) - self._subscription = subscription - self._client = client or getattr( - subscription, '_client', None) or topic._client - - @classmethod - def from_api_repr(cls, resource, client, topics=None): - """Factory: construct a subscription given its API representation - - :type resource: dict - :param resource: snapshot resource representation returned from the - API. - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration. - - :type subscriptions: dict - :param subscriptions: - (Optional) A Subscription to which this snapshot belongs. If not - passed, the subscription will have a newly-created subscription. - Must have the same topic as the snapshot. - - :rtype: :class:`google.cloud.pubsub.subscription.Subscription` - :returns: Subscription parsed from ``resource``. - """ - if topics is None: - topics = {} - topic_path = resource['topic'] - if topic_path == cls._DELETED_TOPIC_PATH: - topic = None - else: - topic = topics.get(topic_path) - if topic is None: - # NOTE: This duplicates behavior from Topic.from_api_repr to - # avoid an import cycle. - topic_name = topic_name_from_path(topic_path, client.project) - topic = topics[topic_path] = client.topic(topic_name) - _, _, _, name = resource['name'].split('/') - if topic is None: - return cls(name, client=client) - return cls(name, topic=topic) - - @property - def project(self): - """Project bound to the subscription.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in subscription APIs""" - return 'projects/%s/snapshots/%s' % (self.project, self.name) - - @property - def path(self): - """URL path for the subscription's APIs""" - return '/%s' % (self.full_name,) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the topic of the - current subscription. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/create - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - if not self._subscription: - raise RuntimeError( - 'Cannot create a snapshot not bound to a subscription') - - client = self._require_client(client) - api = client.subscriber_api - api.snapshot_create(self.full_name, self._subscription.full_name) - - def delete(self, client=None): - """API call: delete the snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.snapshots/delete - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.snapshot_delete(self.full_name) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py b/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py deleted file mode 100644 index 86ca1f97c230..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/subscription.py +++ /dev/null @@ -1,590 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Subscriptions.""" - -import datetime - -from google.cloud.exceptions import NotFound -from google.cloud._helpers import _datetime_to_rfc3339 -from google.cloud.pubsub.snapshot import Snapshot -from google.cloud.pubsub._helpers import topic_name_from_path -from google.cloud.pubsub.iam import Policy -from google.cloud.pubsub.message import Message - - -class Subscription(object): - """Subscriptions receive messages published to their topics. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions - - :type name: str - :param name: the name of the subscription. - - :type topic: :class:`google.cloud.pubsub.topic.Topic` - :param topic: - (Optional) the topic to which the subscription belongs; if ``None``, - the subscription's topic has been deleted. - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: - (Optional) URL to which messages will be pushed by the back-end. If - not set, the application must pull messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - - :type client: :class:`~google.cloud.pubsub.client.Client` - :param client: - (Optional) The client to use. If not passed, falls back to the - ``client`` stored on the topic. - """ - - _DELETED_TOPIC_PATH = '_deleted-topic_' - """Value of ``projects.subscriptions.topic`` when topic has been deleted. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions#Subscription.FIELDS.topic - """ - - def __init__(self, name, topic=None, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, message_retention_duration=None, - client=None): - - if client is None and topic is None: - raise TypeError("Pass only one of 'topic' or 'client'.") - - if client is not None and topic is not None: - raise TypeError("Pass only one of 'topic' or 'client'.") - - self.name = name - self.topic = topic - self._client = client or topic._client - self.ack_deadline = ack_deadline - self.push_endpoint = push_endpoint - self.retain_acked_messages = retain_acked_messages - self.message_retention_duration = message_retention_duration - - @classmethod - def from_api_repr(cls, resource, client, topics=None): - """Factory: construct a topic given its API representation - - :type resource: dict - :param resource: topic resource representation returned from the API. - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration for a topic. - - :type topics: dict - :param topics: - (Optional) A mapping of topic names -> topics. If not passed, the - subscription will have a newly-created topic. - - :rtype: :class:`google.cloud.pubsub.subscription.Subscription` - :returns: Subscription parsed from ``resource``. - """ - if topics is None: - topics = {} - topic_path = resource['topic'] - if topic_path == cls._DELETED_TOPIC_PATH: - topic = None - else: - topic = topics.get(topic_path) - if topic is None: - # NOTE: This duplicates behavior from Topic.from_api_repr to - # avoid an import cycle. - topic_name = topic_name_from_path(topic_path, client.project) - topic = topics[topic_path] = client.topic(topic_name) - _, _, _, name = resource['name'].split('/') - ack_deadline = resource.get('ackDeadlineSeconds') - push_config = resource.get('pushConfig', {}) - push_endpoint = push_config.get('pushEndpoint') - retain_acked_messages = resource.get('retainAckedMessages') - resource_duration = resource.get('duration', {}) - message_retention_duration = datetime.timedelta( - seconds=resource_duration.get('seconds', 0), - microseconds=resource_duration.get('nanos', 0) / 1000) - if topic is None: - return cls(name, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, - client=client) - return cls(name, topic=topic, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - - @property - def project(self): - """Project bound to the subscription.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in subscription APIs""" - return 'projects/%s/subscriptions/%s' % (self.project, self.name) - - @property - def path(self): - """URL path for the subscription's APIs""" - return '/%s' % (self.full_name,) - - def auto_ack(self, return_immediately=False, max_messages=1, client=None): - """:class:`AutoAck` factory - - :type return_immediately: bool - :param return_immediately: passed through to :meth:`Subscription.pull` - - :type max_messages: int - :param max_messages: passed through to :meth:`Subscription.pull` - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: passed through to :meth:`Subscription.pull` and - :meth:`Subscription.acknowledge`. - - :rtype: :class:`AutoAck` - :returns: the instance created for the given ``ack_id`` and ``message`` - """ - return AutoAck(self, return_immediately, max_messages, client) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the topic of the - current subscription. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the subscription via a PUT request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/create - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_create] - :end-before: [END subscription_create] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_create( - self.full_name, self.topic.full_name, - ack_deadline=self.ack_deadline, push_endpoint=self.push_endpoint, - retain_acked_messages=self.retain_acked_messages, - message_retention_duration=self.message_retention_duration) - - def exists(self, client=None): - """API call: test existence of the subscription via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_exists] - :end-before: [END subscription_exists] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: bool - :returns: Boolean indicating existence of the subscription. - """ - client = self._require_client(client) - api = client.subscriber_api - try: - api.subscription_get(self.full_name) - except NotFound: - return False - else: - return True - - def reload(self, client=None): - """API call: sync local subscription configuration via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/get - - :attr:`ack_deadline` and :attr:`push_endpoint` might never have - been set locally, or might have been updated by another client. This - method fetches their values from the server. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_reload] - :end-before: [END subscription_reload] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - data = api.subscription_get(self.full_name) - self.ack_deadline = data.get('ackDeadlineSeconds') - push_config = data.get('pushConfig', {}) - self.push_endpoint = push_config.get('pushEndpoint') - if self.topic is None and 'topic' in data: - topic_name = topic_name_from_path(data['topic'], client.project) - self.topic = client.topic(topic_name) - - def delete(self, client=None): - """API call: delete the subscription via a DELETE request. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/delete - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_delete] - :end-before: [END subscription_delete] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_delete(self.full_name) - - def modify_push_configuration(self, push_endpoint, client=None): - """API call: update the push endpoint for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyPushConfig - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_push_pull] - :end-before: [END subscription_push_pull] - - .. literalinclude:: snippets.py - :start-after: [START subscription_pull_push] - :end-before: [END subscription_pull_push] - - :type push_endpoint: str - :param push_endpoint: URL to which messages will be pushed by the - back-end. If None, the application must pull - messages. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_modify_push_config(self.full_name, push_endpoint) - self.push_endpoint = push_endpoint - - def pull(self, return_immediately=False, max_messages=1, client=None): - """API call: retrieve messages for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_pull] - :end-before: [END subscription_pull] - - :type return_immediately: bool - :param return_immediately: if True, the back-end returns even if no - messages are available; if False, the API - call blocks until one or more messages are - available. - - :type max_messages: int - :param max_messages: the maximum number of messages to return. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: list of (ack_id, message) tuples - :returns: sequence of tuples: ``ack_id`` is the ID to be used in a - subsequent call to :meth:`acknowledge`, and ``message`` - is an instance of - :class:`~google.cloud.pubsub.message.Message`. - """ - client = self._require_client(client) - api = client.subscriber_api - response = api.subscription_pull( - self.full_name, return_immediately, max_messages) - return [(info['ackId'], Message.from_api_repr(info['message'])) - for info in response] - - def acknowledge(self, ack_ids, client=None): - """API call: acknowledge retrieved messages for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/acknowledge - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_acknowledge] - :end-before: [END subscription_acknowledge] - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being acknowledged - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_acknowledge(self.full_name, ack_ids) - - def modify_ack_deadline(self, ack_ids, ack_deadline, client=None): - """API call: update acknowledgement deadline for a retrieved message. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/modifyAckDeadline - - :type ack_ids: list of string - :param ack_ids: ack IDs of messages being updated - - :type ack_deadline: int - :param ack_deadline: new deadline for the message, in seconds - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_modify_ack_deadline( - self.full_name, ack_ids, ack_deadline) - - def snapshot(self, name, client=None): - """Creates a snapshot of this subscription. - - :type name: str - :param name: the name of the subscription - - :rtype: :class:`Snapshot` - :returns: The snapshot created with the passed in arguments. - """ - return Snapshot(name, subscription=self) - - def seek_snapshot(self, snapshot, client=None): - """API call: seek a subscription to a given snapshot - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type snapshot: :class:`Snapshot` - :param snapshot: The snapshot to seek to. - """ - client = self._require_client(client) - api = client.subscriber_api - api.subscription_seek(self.full_name, snapshot=snapshot.full_name) - - def seek_timestamp(self, timestamp, client=None): - """API call: seek a subscription to a given point in time - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/seek - - :type time: :class:`datetime.datetime` - :param time: The time to seek to. - """ - client = self._require_client(client) - timestamp = _datetime_to_rfc3339(timestamp) - api = client.subscriber_api - api.subscription_seek(self.full_name, time=timestamp) - - def get_iam_policy(self, client=None): - """Fetch the IAM policy for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/getIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_get_iam_policy] - :end-before: [END subscription_get_iam_policy] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: policy created from the resource returned by the - ``getIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resp = api.get_iam_policy(self.full_name) - return Policy.from_api_repr(resp) - - def set_iam_policy(self, policy, client=None): - """Update the IAM policy for the subscription. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/setIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_set_iam_policy] - :end-before: [END subscription_set_iam_policy] - - :type policy: :class:`google.cloud.pubsub.iam.Policy` - :param policy: the new policy, typically fetched via - :meth:`get_iam_policy` and updated in place. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: updated policy created from the resource returned by the - ``setIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resource = policy.to_api_repr() - resp = api.set_iam_policy(self.full_name, resource) - return Policy.from_api_repr(resp) - - def check_iam_permissions(self, permissions, client=None): - """Verify permissions allowed for the current user. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/testIamPermissions - - Example: - - .. literalinclude:: snippets.py - :start-after: [START subscription_check_iam_permissions] - :end-before: [END subscription_check_iam_permissions] - - :type permissions: list of string - :param permissions: list of permissions to be tested - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current subscription's topic. - - :rtype: sequence of string - :returns: subset of ``permissions`` allowed by current IAM policy. - """ - client = self._require_client(client) - api = client.iam_policy_api - return api.test_iam_permissions( - self.full_name, list(permissions)) - - -class AutoAck(dict): - """Wrapper for :meth:`Subscription.pull` results. - - Mapping, tracks messages still-to-be-acknowledged. - - When used as a context manager, acknowledges all messages still in the - mapping on `__exit__`. When processing the pulled messages, application - code MUST delete messages from the :class:`AutoAck` mapping which are not - successfully processed, e.g.: - - .. code-block: python - - with AutoAck(subscription) as ack: # calls ``subscription.pull`` - for ack_id, message in ack.items(): - try: - do_something_with(message): - except: - del ack[ack_id] - - :type subscription: :class:`Subscription` - :param subscription: subscription to be pulled. - - :type return_immediately: bool - :param return_immediately: passed through to :meth:`Subscription.pull` - - :type max_messages: int - :param max_messages: passed through to :meth:`Subscription.pull` - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: passed through to :meth:`Subscription.pull` and - :meth:`Subscription.acknowledge`. - """ - def __init__(self, subscription, - return_immediately=False, max_messages=1, client=None): - super(AutoAck, self).__init__() - self._subscription = subscription - self._return_immediately = return_immediately - self._max_messages = max_messages - self._client = client - - def __enter__(self): - items = self._subscription.pull( - self._return_immediately, self._max_messages, self._client) - self.update(items) - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if self: - self._subscription.acknowledge(list(self), self._client) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py b/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py deleted file mode 100644 index 92f453bd2b2b..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/topic.py +++ /dev/null @@ -1,551 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Define API Topics.""" - -import base64 -import json -import time - -from google.cloud._helpers import _datetime_to_rfc3339 -from google.cloud._helpers import _NOW -from google.cloud._helpers import _to_bytes -from google.cloud.exceptions import NotFound -from google.cloud.pubsub._helpers import topic_name_from_path -from google.cloud.pubsub.iam import Policy -from google.cloud.pubsub.subscription import Subscription - - -class Topic(object): - """Topics are targets to which messages can be published. - - Subscribers then receive those messages. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics - - :type name: str - :param name: the name of the topic - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: A client which holds credentials and project configuration - for the topic (which requires a project). - - :type timestamp_messages: bool - :param timestamp_messages: If true, the topic will add a ``timestamp`` key - to the attributes of each published message: - the value will be an RFC 3339 timestamp. - """ - def __init__(self, name, client, timestamp_messages=False): - self.name = name - self._client = client - self.timestamp_messages = timestamp_messages - - def subscription(self, name, ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - """Creates a subscription bound to the current topic. - - Example: pull-mode subcription, default parameter values - - .. literalinclude:: snippets.py - :start-after: [START topic_subscription_defaults] - :end-before: [END topic_subscription_defaults] - - Example: pull-mode subcription, override ``ack_deadline`` default - - .. literalinclude:: snippets.py - :start-after: [START topic_subscription_ack90] - :end-before: [END topic_subscription_ack90] - - Example: push-mode subcription - - .. literalinclude:: snippets.py - :start-after: [START topic_subscription_push] - :end-before: [END topic_subscription_push] - - :type name: str - :param name: the name of the subscription - - :type ack_deadline: int - :param ack_deadline: the deadline (in seconds) by which messages pulled - from the back-end must be acknowledged. - - :type push_endpoint: str - :param push_endpoint: URL to which messages will be pushed by the - back-end. If not set, the application must pull - messages. - - :type retain_acked_messages: bool - :param retain_acked_messages: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. - - :type message_retention_duration: :class:`datetime.timedelta` - :param message_retention_duration: - (Optional) Whether to retain acked messages. If set, acked messages - are retained in the subscription's backlog for a duration indicated - by `message_retention_duration`. If unset, defaults to 7 days. - - :rtype: :class:`Subscription` - :returns: The subscription created with the passed in arguments. - """ - return Subscription( - name, self, ack_deadline=ack_deadline, push_endpoint=push_endpoint, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - - @classmethod - def from_api_repr(cls, resource, client): - """Factory: construct a topic given its API representation - - :type resource: dict - :param resource: topic resource representation returned from the API - - :type client: :class:`google.cloud.pubsub.client.Client` - :param client: Client which holds credentials and project - configuration for the topic. - - :rtype: :class:`google.cloud.pubsub.topic.Topic` - :returns: Topic parsed from ``resource``. - :raises: :class:`ValueError` if ``client`` is not ``None`` and the - project from the resource does not agree with the project - from the client. - """ - topic_name = topic_name_from_path(resource['name'], client.project) - return cls(topic_name, client=client) - - @property - def project(self): - """Project bound to the topic.""" - return self._client.project - - @property - def full_name(self): - """Fully-qualified name used in topic / subscription APIs""" - return 'projects/%s/topics/%s' % (self.project, self.name) - - def _require_client(self, client): - """Check client or verify over-ride. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`google.cloud.pubsub.client.Client` - :returns: The client passed in or the currently bound client. - """ - if client is None: - client = self._client - return client - - def create(self, client=None): - """API call: create the topic via a PUT request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/create - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_create] - :end-before: [END topic_create] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - api.topic_create(topic_path=self.full_name) - - def exists(self, client=None): - """API call: test for the existence of the topic via a GET request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/get - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_exists] - :end-before: [END topic_exists] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: bool - :returns: Boolean indicating existence of the topic. - """ - client = self._require_client(client) - api = client.publisher_api - - try: - api.topic_get(topic_path=self.full_name) - except NotFound: - return False - else: - return True - - def delete(self, client=None): - """API call: delete the topic via a DELETE request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/delete - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_delete] - :end-before: [END topic_delete] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - api.topic_delete(topic_path=self.full_name) - - def _timestamp_message(self, attrs): - """Add a timestamp to ``attrs``, if the topic is so configured. - - If ``attrs`` already has the key, do nothing. - - Helper method for ``publish``/``Batch.publish``. - """ - if self.timestamp_messages and 'timestamp' not in attrs: - attrs['timestamp'] = _datetime_to_rfc3339(_NOW()) - - def publish(self, message, client=None, **attrs): - """API call: publish a message to a topic via a POST request - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/publish - - Example without message attributes: - - .. literalinclude:: snippets.py - :start-after: [START topic_publish_simple_message] - :end-before: [END topic_publish_simple_message] - - With message attributes: - - .. literalinclude:: snippets.py - :start-after: [START topic_publish_message_with_attrs] - :end-before: [END topic_publish_message_with_attrs] - - :type message: bytes - :param message: the message payload - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :type attrs: dict (string -> string) - :param attrs: key-value pairs to send as message attributes - - :rtype: str - :returns: message ID assigned by the server to the published message - """ - client = self._require_client(client) - api = client.publisher_api - - self._timestamp_message(attrs) - message_data = {'data': message, 'attributes': attrs} - message_ids = api.topic_publish(self.full_name, [message_data]) - return message_ids[0] - - def batch(self, client=None, **kwargs): - """Return a batch to use as a context manager. - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_batch] - :end-before: [END topic_batch] - - .. note:: - - The only API request happens during the ``__exit__()`` of the topic - used as a context manager, and only if the block exits without - raising an exception. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :type kwargs: dict - :param kwargs: Keyword arguments passed to the - :class:`~google.cloud.pubsub.topic.Batch` constructor. - - :rtype: :class:`Batch` - :returns: A batch to use as a context manager. - """ - client = self._require_client(client) - return Batch(self, client, **kwargs) - - def list_subscriptions(self, page_size=None, page_token=None, client=None): - """List subscriptions for the project associated with this client. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics.subscriptions/list - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_list_subscriptions] - :end-before: [END topic_list_subscriptions] - - :type page_size: int - :param page_size: maximum number of topics to return, If not passed, - defaults to a value set by the API. - - :type page_token: str - :param page_token: opaque marker for the next "page" of topics. If not - passed, the API will return the first page of - topics. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current topic. - - :rtype: :class:`~google.api.core.page_iterator.Iterator` - :returns: Iterator of - :class:`~google.cloud.pubsub.subscription.Subscription` - accessible to the current topic. - """ - client = self._require_client(client) - api = client.publisher_api - return api.topic_list_subscriptions(self, page_size, page_token) - - def get_iam_policy(self, client=None): - """Fetch the IAM policy for the topic. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/getIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_get_iam_policy] - :end-before: [END topic_get_iam_policy] - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: policy created from the resource returned by the - ``getIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resp = api.get_iam_policy(self.full_name) - return Policy.from_api_repr(resp) - - def set_iam_policy(self, policy, client=None): - """Update the IAM policy for the topic. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/setIamPolicy - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_set_iam_policy] - :end-before: [END topic_set_iam_policy] - - :type policy: :class:`google.cloud.pubsub.iam.Policy` - :param policy: the new policy, typically fetched via - :meth:`get_iam_policy` and updated in place. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: :class:`google.cloud.pubsub.iam.Policy` - :returns: updated policy created from the resource returned by the - ``setIamPolicy`` API request. - """ - client = self._require_client(client) - api = client.iam_policy_api - resource = policy.to_api_repr() - resp = api.set_iam_policy(self.full_name, resource) - return Policy.from_api_repr(resp) - - def check_iam_permissions(self, permissions, client=None): - """Verify permissions allowed for the current user. - - See - https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics/testIamPermissions - - Example: - - .. literalinclude:: snippets.py - :start-after: [START topic_check_iam_permissions] - :end-before: [END topic_check_iam_permissions] - - :type permissions: list of string - :param permissions: list of permissions to be tested - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - - :rtype: sequence of string - :returns: subset of ``permissions`` allowed by current IAM policy. - """ - client = self._require_client(client) - api = client.iam_policy_api - return api.test_iam_permissions( - self.full_name, list(permissions)) - - -class Batch(object): - """Context manager: collect messages to publish via a single API call. - - Helper returned by :meth:Topic.batch - - :type topic: :class:`google.cloud.pubsub.topic.Topic` - :param topic: the topic being published - - :param client: The client to use. - :type client: :class:`google.cloud.pubsub.client.Client` - - :param max_interval: The maximum interval, in seconds, before the batch - will automatically commit. Note that this does not - run a background loop; it just checks when each - message is published. Therefore, this is intended - for situations where messages are published at - reasonably regular intervals. Defaults to infinity - (off). - :type max_interval: float - - :param max_messages: The maximum number of messages to hold in the batch - before automatically commiting. Defaults to infinity - (off). - :type max_messages: float - - :param max_size: The maximum size that the serialized messages can be - before automatically commiting. Defaults to 9 MB - (slightly less than the API limit). - :type max_size: int - """ - _INFINITY = float('inf') - - def __init__(self, topic, client, max_interval=_INFINITY, - max_messages=_INFINITY, max_size=1024 * 1024 * 9): - self.topic = topic - self.client = client - self.messages = [] - self.message_ids = [] - - # Set the autocommit rules. If the interval or number of messages - # is exceeded, then the .publish() method will imply a commit. - self._max_interval = max_interval - self._max_messages = max_messages - self._max_size = max_size - - # Set up the initial state, initializing messages, the starting - # timestamp, etc. - self._reset_state() - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - if exc_type is None: - self.commit() - - def __iter__(self): - return iter(self.message_ids) - - def _reset_state(self): - """Reset the state of this batch.""" - - del self.messages[:] - self._start_timestamp = time.time() - self._current_size = 0 - - def publish(self, message, **attrs): - """Emulate publishing a message, but save it. - - :type message: bytes - :param message: the message payload - - :type attrs: dict (string -> string) - :param attrs: key-value pairs to send as message attributes - """ - self.topic._timestamp_message(attrs) - - # Append the message to the list of messages.. - item = {'attributes': attrs, 'data': message} - self.messages.append(item) - - # Determine the approximate size of the message, and increment - # the current batch size appropriately. - encoded = base64.b64encode(_to_bytes(message)) - encoded += base64.b64encode( - json.dumps(attrs, ensure_ascii=False).encode('utf8'), - ) - self._current_size += len(encoded) - - # If too much time has elapsed since the first message - # was added, autocommit. - now = time.time() - if now - self._start_timestamp > self._max_interval: - self.commit() - return - - # If the number of messages on the list is greater than the - # maximum allowed, autocommit (with the batch's client). - if len(self.messages) >= self._max_messages: - self.commit() - return - - # If we have reached the max size, autocommit. - if self._current_size >= self._max_size: - self.commit() - return - - def commit(self, client=None): - """Send saved messages as a single API call. - - :type client: :class:`~google.cloud.pubsub.client.Client` or - ``NoneType`` - :param client: the client to use. If not passed, falls back to the - ``client`` stored on the current batch. - """ - if not self.messages: - return - - if client is None: - client = self.client - api = client.publisher_api - message_ids = api.topic_publish(self.topic.full_name, self.messages[:]) - self.message_ids.extend(message_ids) - self._reset_state() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py new file mode 100644 index 000000000000..21706f6eee5e --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py @@ -0,0 +1,25 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import Client as PublisherClient +from google.cloud.pubsub_v1.subscriber import Client as SubscriberClient + +__all__ = ( + 'PublisherClient', + 'SubscriberClient', + 'types', +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py new file mode 100644 index 000000000000..79aac7de8941 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py @@ -0,0 +1,73 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import functools + + +def add_methods(source_class, blacklist=()): + """Add wrapped versions of the `api` member's methods to the class. + + Any methods passed in `blacklist` are not added. + Additionally, any methods explicitly defined on the wrapped class are + not added. + """ + def wrap(wrapped_fx): + """Wrap a GAPIC method; preserve its name and docstring.""" + # If this is a static or class method, then we need to *not* + # send self as the first argument. + # + # Similarly, for instance methods, we need to send self.api rather + # than self, since that is where the actual methods were declared. + instance_method = True + self = getattr(wrapped_fx, '__self__', None) + if issubclass(type(self), type): + instance_method = False + + # Okay, we have figured out what kind of method this is; send + # down the correct wrapper function. + if instance_method: + fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) + return functools.wraps(wrapped_fx)(fx) + fx = lambda self, *a, **kw: wrapped_fx(*a, **kw) + return functools.wraps(wrapped_fx)(fx) + + def actual_decorator(cls): + # Reflectively iterate over most of the methods on the source class + # (the GAPIC) and make wrapped versions available on this client. + for name in dir(source_class): + # Ignore all private and magic methods. + if name.startswith('_'): + continue + + # Ignore anything on our blacklist. + if name in blacklist: + continue + + # Retrieve the attribute, and ignore it if it is not callable. + attr = getattr(source_class, name) + if not callable(attr): + continue + + # Add a wrapper method to this object. + fx = wrap(getattr(source_class, name)) + setattr(cls, name, fx) + + # Return the augmented class. + return cls + + # Simply return the actual decorator; this is returned from this method + # and actually used to decorate the class. + return actual_decorator diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py new file mode 100644 index 000000000000..76d54649448f --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1.publisher.client import Client + + +__all__ = ( + 'Client', +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py new file mode 100644 index 000000000000..61eea2bb9ad5 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -0,0 +1,147 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import abc +import enum + +import six + + +@six.add_metaclass(abc.ABCMeta) +class Batch(object): + """The base batching class for Pub/Sub publishing. + + Although the :class:`~.pubsub_v1.publisher.batch.thread.Batch` class, based + on :class:`threading.Thread`, is fine for most cases, advanced + users may need to implement something based on a different concurrency + model. + + This class defines the interface for the Batch implementation; + subclasses may be passed as the ``batch_class`` argument to + :class:`~.pubsub_v1.client.PublisherClient`. + + The batching behavior works like this: When the + :class:`~.pubsub_v1.publisher.client.Client` is asked to publish a new + message, it requires a batch. The client will see if there is an + already-opened batch for the given topic; if there is, then the message + is sent to that batch. If there is not, then a new batch is created + and the message put there. + + When a new batch is created, it automatically starts a timer counting + down to the maximum latency before the batch should commit. + Essentially, if enough time passes, the batch automatically commits + regardless of how much is in it. However, if either the message count or + size thresholds are encountered first, then the batch will commit early. + """ + def __len__(self): + """Return the number of messages currently in the batch.""" + return len(self.messages) + + @property + @abc.abstractmethod + def messages(self): + """Return the messages currently in the batch. + + Returns: + Sequence: The messages currently in the batch. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def size(self): + """Return the total size of all of the messages currently in the batch. + + Returns: + int: The total size of all of the messages currently + in the batch, in bytes. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def settings(self): + """Return the batch settings. + + Returns: + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. + """ + raise NotImplementedError + + @property + @abc.abstractmethod + def status(self): + """Return the status of this batch. + + Returns: + str: The status of this batch. All statuses are human-readable, + all-lowercase strings. The ones represented in the + :class:`BaseBatch.Status` enum are special, but other statuses + are permitted. + """ + raise NotImplementedError + + def will_accept(self, message): + """Return True if the batch is able to accept the message. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + bool: Whether this batch can accept the message. + """ + # If this batch is not accepting messages generally, return False. + if self.status != BatchStatus.ACCEPTING_MESSAGES: + return False + + # If this batch can not hold the message in question, return False. + if self.size + message.ByteSize() > self.settings.max_bytes: + return False + + # Okay, everything is good. + return True + + @abc.abstractmethod + def publish(self, message): + """Publish a single message. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + This method is called by :meth:`~.PublisherClient.publish`. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the + :class:`concurrent.futures.Future` interface. + """ + raise NotImplementedError + + +class BatchStatus(object): + """An enum-like class representing valid statuses for a batch. + + It is acceptable for a class to use a status that is not on this + class; this represents the list of statuses where the existing + library hooks in functionality. + """ + ACCEPTING_MESSAGES = 'accepting messages' + ERROR = 'error' + SUCCESS = 'success' diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py new file mode 100644 index 000000000000..f5c08a76f315 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -0,0 +1,245 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import logging +import threading +import time + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher import futures +from google.cloud.pubsub_v1.publisher.batch import base + + +class Batch(base.Batch): + """A batch of messages. + + The batch is the internal group of messages which are either awaiting + publication or currently in-flight. + + A batch is automatically created by the PublisherClient when the first + message to be published is received; subsequent messages are added to + that batch until the process of actual publishing _starts_. + + Once this occurs, any new messages sent to :meth:`publish` open a new + batch. + + If you are using this library, you most likely do not need to instantiate + batch objects directly; they will be created for you. If you want to + change the actual batching settings, see the ``batching`` argument on + :class:`~.pubsub_v1.PublisherClient`. + + Any properties or methods on this class which are not defined in + :class:`~.pubsub_v1.publisher.batch.BaseBatch` should be considered + implementation details. + + Args: + client (~.pubsub_v1.PublisherClient): The publisher client used to + create this batch. + topic (str): The topic. The format for this is + ``projects/{project}/topics/{topic}``. + settings (~.pubsub_v1.types.BatchSettings): The settings for batch + publishing. These should be considered immutable once the batch + has been opened. + autocommit (bool): Whether to autocommit the batch when the time + has elapsed. Defaults to True unless ``settings.max_latency`` is + inf. + """ + def __init__(self, client, topic, settings, autocommit=True): + self._client = client + + # These objects are all communicated between threads; ensure that + # any writes to them are atomic. + self._futures = [] + self._messages = [] + self._size = 0 + self._settings = settings + self._status = base.BatchStatus.ACCEPTING_MESSAGES + self._topic = topic + + # If max latency is specified, start a thread to monitor the batch and + # commit when the max latency is reached. + self._thread = None + self._commit_lock = threading.Lock() + if autocommit and self._settings.max_latency < float('inf'): + self._thread = threading.Thread(target=self.monitor) + self._thread.start() + + @property + def client(self): + """~.pubsub_v1.client.PublisherClient: A publisher client.""" + return self._client + + @property + def messages(self): + """Sequence: The messages currently in the batch.""" + return self._messages + + @property + def settings(self): + """Return the batch settings. + + Returns: + ~.pubsub_v1.types.BatchSettings: The batch settings. These are + considered immutable once the batch has been opened. + """ + return self._settings + + @property + def size(self): + """Return the total size of all of the messages currently in the batch. + + Returns: + int: The total size of all of the messages currently + in the batch, in bytes. + """ + return self._size + + @property + def status(self): + """Return the status of this batch. + + Returns: + str: The status of this batch. All statuses are human-readable, + all-lowercase strings. + """ + return self._status + + def commit(self): + """Actually publish all of the messages on the active batch. + + This synchronously sets the batch status to in-flight, and then opens + a new thread, which handles actually sending the messages to Pub/Sub. + + .. note:: + + This method is non-blocking. It opens a new thread, which calls + :meth:`_commit`, which does block. + """ + # Set the status to in-flight synchronously, to ensure that + # this batch will necessarily not accept new messages. + # + # Yes, this is repeated in `_commit`, because that method is called + # directly by `monitor`. + self._status = 'in-flight' + + # Start a new thread to actually handle the commit. + commit_thread = threading.Thread(target=self._commit) + commit_thread.start() + + def _commit(self): + """Actually publish all of the messages on the active batch. + + This moves the batch out from being the active batch to an in-flight + batch on the publisher, and then the batch is discarded upon + completion. + + .. note:: + + This method blocks. The :meth:`commit` method is the non-blocking + version, which calls this one. + """ + with self._commit_lock: + # If, in the intervening period, the batch started to be committed, + # or completed a commit, then no-op at this point. + if self._status != base.BatchStatus.ACCEPTING_MESSAGES: + return + + # Update the status. + self._status = 'in-flight' + + # Sanity check: If there are no messages, no-op. + if not self._messages: + return + + # Begin the request to publish these messages. + # Log how long the underlying request takes. + start = time.time() + response = self.client.api.publish( + self._topic, + self.messages, + ) + end = time.time() + logging.getLogger().debug('gRPC Publish took {s} seconds.'.format( + s=end - start, + )) + + # We got a response from Pub/Sub; denote that we are processing. + self._status = 'processing results' + + # Sanity check: If the number of message IDs is not equal to the + # number of futures I have, then something went wrong. + if len(response.message_ids) != len(self._futures): + for future in self._futures: + future.set_exception(exceptions.PublishError( + 'Some messages were not successfully published.', + )) + return + + # Iterate over the futures on the queue and return the response + # IDs. We are trusting that there is a 1:1 mapping, and raise an + # exception if not. + self._status = base.BatchStatus.SUCCESS + for message_id, future in zip(response.message_ids, self._futures): + future.set_result(message_id) + + def monitor(self): + """Commit this batch after sufficient time has elapsed. + + This simply sleeps for ``self._settings.max_latency`` seconds, + and then calls commit unless the batch has already been committed. + """ + # Note: This thread blocks; it is up to the calling code to call it + # in a separate thread. + # + # Sleep for however long we should be waiting. + time.sleep(self._settings.max_latency) + + # Commit. + return self._commit() + + def publish(self, message): + """Publish a single message. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + This method is called by :meth:`~.PublisherClient.publish`. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + ~.pubsub_v1.publisher.futures.Future: An object conforming to + the :class:`concurrent.futures.Future` interface. + """ + # Coerce the type, just in case. + if not isinstance(message, types.PubsubMessage): + message = types.PubsubMessage(**message) + + # Add the size to the running total of the size, so we know + # if future messages need to be rejected. + self._size += message.ByteSize() + + # Store the actual message in the batch's message queue. + self._messages.append(message) + + # Return a Future. That future needs to be aware of the status + # of this batch. + f = futures.Future() + self._futures.append(f) + return f diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py new file mode 100644 index 000000000000..e80662a715ef --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -0,0 +1,161 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import copy +import pkg_resources +import threading + +import six + +from google.cloud.gapic.pubsub.v1 import publisher_client + +from google.cloud.pubsub_v1 import _gapic +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher.batch import thread + + +__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version + + +@_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) +class Client(object): + """A publisher client for Google Cloud Pub/Sub. + + This creates an object that is capable of publishing messages. + Generally, you can instantiate this client with no arguments, and you + get sensible defaults. + + Args: + batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The + settings for batch publishing. + batch_class (class): A class that describes how to handle + batches. You may subclass the + :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in + order to define your own batcher. This is primarily provided to + allow use of different concurrency models; the default + is based on :class:`threading.Thread`. + kwargs (dict): Any additional arguments provided are sent as keyword + arguments to the underlying + :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. + Generally, you should not need to set additional keyword arguments. + """ + def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): + # Add the metrics headers, and instantiate the underlying GAPIC + # client. + kwargs['lib_name'] = 'gccl' + kwargs['lib_version'] = __VERSION__ + self.api = publisher_client.PublisherClient(**kwargs) + self.batch_settings = types.BatchSettings(*batch_settings) + + # The batches on the publisher client are responsible for holding + # messages. One batch exists for each topic. + self._batch_class = batch_class + self._batch_lock = threading.Lock() + self._batches = {} + + def batch(self, topic, message, create=True, autocommit=True): + """Return the current batch for the provided topic. + + This will create a new batch only if no batch currently exists. + + Args: + topic (str): A string representing the topic. + message (~google.cloud.pubsub_v1.types.PubsubMessage): The message + that will be committed. + create (bool): Whether to create a new batch if no batch is + found. Defaults to True. + autocommit (bool): Whether to autocommit this batch. + This is primarily useful for debugging. + + Returns: + ~.pubsub_v1.batch.Batch: The batch object. + """ + # If there is no matching batch yet, then potentially create one + # and place it on the batches dictionary. + with self._batch_lock: + batch = self._batches.get(topic, None) + if batch is None or not batch.will_accept(message): + if not create: + return None + batch = self._batch_class( + autocommit=autocommit, + client=self, + settings=self.batch_settings, + topic=topic, + ) + self._batches[topic] = batch + + # Simply return the appropriate batch. + return batch + + def publish(self, topic, data, **attrs): + """Publish a single message. + + .. note:: + Messages in Pub/Sub are blobs of bytes. They are *binary* data, + not text. You must send data as a bytestring + (``bytes`` in Python 3; ``str`` in Python 2), and this library + will raise an exception if you send a text string. + + The reason that this is so important (and why we do not try to + coerce for you) is because Pub/Sub is also platform independent + and there is no way to know how to decode messages properly on + the other side; therefore, encoding and decoding is a required + exercise for the developer. + + Add the given message to this object; this will cause it to be + published once the batch either has enough messages or a sufficient + period of time has elapsed. + + Example: + >>> from google.cloud.pubsub_v1 import publisher_client + >>> client = publisher_client.PublisherClient() + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> data = b'The rain in Wales falls mainly on the snails.' + >>> response = client.publish(topic, data, username='guido') + + Args: + topic (str): The topic to publish messages to. + data (bytes): A bytestring representing the message body. This + must be a bytestring. + attrs (Mapping[str, str]): A dictionary of attributes to be + sent as metadata. (These may be text strings or byte strings.) + + Returns: + ~concurrent.futures.Future: An object conforming to the + ``concurrent.futures.Future`` interface. + """ + # Sanity check: Is the data being sent as a bytestring? + # If it is literally anything else, complain loudly about it. + if not isinstance(data, six.binary_type): + raise TypeError('Data being published to Pub/Sub must be sent ' + 'as a bytestring.') + + # Coerce all attributes to text strings. + for k, v in copy.copy(attrs).items(): + if isinstance(v, six.text_type): + continue + if isinstance(v, six.binary_type): + attrs[k] = v.decode('utf-8') + continue + raise TypeError('All attributes being published to Pub/Sub must ' + 'be sent as text strings.') + + # Create the Pub/Sub message object. + message = types.PubsubMessage(data=data, attributes=attrs) + + # Delegate the publishing to the batch. + return self.batch(topic, message=message).publish(message) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py new file mode 100644 index 000000000000..bae090ceb9d7 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -0,0 +1,29 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent.futures import TimeoutError + +from google.api.core.exceptions import GoogleAPICallError + + +class PublishError(GoogleAPICallError): + pass + + +__all__ = ( + 'PublishError', + 'TimeoutError', +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py new file mode 100644 index 000000000000..cbc67d9e55c3 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -0,0 +1,169 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import threading + +import google.api.core.future +from google.cloud.pubsub_v1.publisher import exceptions + + +class Future(google.api.core.future.Future): + """Encapsulation of the asynchronous execution of an action. + + This object is returned from asychronous Pub/Sub calls, and is the + interface to determine the status of those calls. + + This object should not be created directly, but is returned by other + methods in this library. + """ + def __init__(self): + self._callbacks = [] + self._result = None + self._exception = None + self._completed = threading.Event() + + def cancel(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def cancelled(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns False. + """ + return False + + def running(self): + """Publishes in Pub/Sub currently may not be canceled. + + This method always returns True. + """ + return True + + def done(self): + """Return True if the publish has completed, False otherwise. + + This still returns True in failure cases; checking :meth:`result` or + :meth:`exception` is the canonical way to assess success or failure. + """ + return self._exception is not None or self._result is not None + + def result(self, timeout=None): + """Return the message ID, or raise an exception. + + This blocks until the message has successfully been published, and + returns the message ID. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Returns: + str: The message ID. + + Raises: + ~.pubsub_v1.TimeoutError: If the request times out. + Exception: For undefined exceptions in the underlying + call execution. + """ + # Attempt to get the exception if there is one. + # If there is not one, then we know everything worked, and we can + # return an appropriate value. + err = self.exception(timeout=timeout) + if err is None: + return self._result + raise err + + def exception(self, timeout=None, _wait=1): + """Return the exception raised by the call, if any. + + This blocks until the message has successfully been published, and + returns the exception. If the call succeeded, return None. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Raises: + TimeoutError: If the request times out. + + Returns: + Exception: The exception raised by the call, if any. + """ + # Wait until the future is done. + if not self._completed.wait(timeout=timeout): + raise exceptions.TimeoutError('Timed out waiting for result.') + + # If the batch completed successfully, this should return None. + if self._result is not None: + return None + + # Okay, this batch had an error; this should return it. + return self._exception + + def add_done_callback(self, fn): + """Attach the provided callable to the future. + + The provided function is called, with this future as its only argument, + when the future finishes running. + """ + if self.done(): + fn(self) + self._callbacks.append(fn) + + def set_result(self, result): + """Set the result of the future to the provided result. + + Args: + result (str): The message ID. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_result can only be called once.') + + # Set the result and trigger the future. + self._result = result + self._trigger() + + def set_exception(self, exception): + """Set the result of the future to the given exception. + + Args: + exception (:exc:`Exception`): The exception raised. + """ + # Sanity check: A future can only complete once. + if self._result is not None or self._exception is not None: + raise RuntimeError('set_exception can only be called once.') + + # Set the exception and trigger the future. + self._exception = exception + self._trigger() + + def _trigger(self): + """Trigger all callbacks registered to this Future. + + This method is called internally by the batch once the batch + completes. + + Args: + message_id (str): The message ID, as a string. + """ + self._completed.set() + for callback in self._callbacks: + callback(self) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py new file mode 100644 index 000000000000..d98a7bb75be4 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1.subscriber.client import Client + + +__all__ = ( + 'Client', +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py new file mode 100644 index 000000000000..9fb2567176bc --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -0,0 +1,267 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Bidirectional Streaming Consumer. + +The goal here is to consume a bidirectional streaming RPC by fanning out the +responses received from the server to be processed and fanning in requests from +the response processors to be sent to the server through the request stream. +This module is a framework to deal with this pattern in a consistent way: + + * A :class:`Consumer` manages scheduling requests to a stream and consuming + responses from a stream. The Consumer takes the responses and schedules + them to be processed in callbacks using any + :class:`~concurrent.futures.Executor`. + * A :class:`Policy` which determines how the consumer calls the RPC and + processes responses, errors, and messages. + +The :class:`Policy` is the only class that's intended to be sub-classed here. +This would be implemented for every bidirectional streaming method. +How does this work? The first part of the implementation, fanning out +responses, its actually quite straightforward and can be done with just a +:class:`concurrent.futures.Executor`: + +.. graphviz:: + digraph responses_only { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + } + +The challenge comes from the fact that in bidirectional streaming two more +things have to be done: + + 1. The consumer must maintain a long-running request generator. + 2. The consumer must provide some way for the response processor to queue + new requests. + +These are especially important because in the case of Pub/Sub you are +essentially streaming requests indefinitely and receiving responses +indefinitely. + +For the first challenge, we take advantage of the fact that gRPC runs the +request generator in its own thread. That thread can block, so we can use +a queue for that: + +.. graphviz:: + digraph response_flow { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + } + +The final piece of the puzzle, allowing things from anywhere to queue new +requests, it a bit more complex. If we were only dealing with threads, then the +response workers could just directly interact with the policy/consumer to +queue new requests: + +.. graphviz:: + digraph thread_only_requests { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + "callback" -> "Consumer" [label="send_request", color="blue"] + } + +But, because this does not dictate any particular concurrent strategy for +dealing with the responses, it's possible that a response could be processed +in a different thread, process, or even on a different machine. Because of +this, we need an intermediary queue between the callbacks and the gRPC request +queue to bridge the "concurrecy gap". To pump items from the concurrecy-safe +queue into the gRPC request queue, we need another worker thread. Putting this +all together looks like this: + +.. graphviz:: + digraph responses_only { + "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] + "gRPC Python" -> "Consumer" [label="responses", color="red"] + "Consumer" -> "request generator thread" [label="starts", color="gray"] + "Policy" -> "QueueCallbackThread" [label="starts", color="gray"] + "request generator thread" -> "gRPC Python" + [label="requests", color="blue"] + "Consumer" -> "Policy" [label="responses", color="red"] + "Policy" -> "futures.Executor" [label="response", color="red"] + "futures.Executor" -> "callback" [label="response", color="red"] + "callback" -> "callback_request_queue" [label="requests", color="blue"] + "callback_request_queue" -> "QueueCallbackThread" + [label="consumed by", color="blue"] + "QueueCallbackThread" -> "Consumer" + [label="send_response", color="blue"] + } + +This part is actually up to the Policy to enable. The consumer just provides a +thread-safe queue for requests. The :cls:`QueueCallbackThread` can be used by +the Policy implementation to spin up the worker thread to pump the +concurrency-safe queue. See the Pub/Sub subscriber implementation for an +example of this. +""" + +import logging +import queue +import threading + +from google.cloud.pubsub_v1.subscriber import _helper_threads + +_LOGGER = logging.getLogger(__name__) + + +class Consumer(object): + """Bi-directional streaming RPC consumer. + + This class coordinates the consumption of a bi-directional streaming RPC. + There is a bit of background information to know before understanding how + this class operates: + + 1. gRPC has its own background thread for dealing with I/O. + 2. gRPC consumes a streaming call's request generator in another + thread. + 3. If the request generator thread exits, gRPC will close the + connection. + + Because of (2) and (3), the consumer must always at least use threading + for some bookkeeping. No matter what, a thread will be created by gRPC to + generate requests. This thread is called the *request generator thread*. + Having the request generator thread allows the consumer to hold the stream + open indefinitely. Now gRPC will send responses as fast as the consumer can + ask for them. The consumer hands these off to the :cls:`Policy` via + :meth:`Policy.on_response`, which should not block. + + Finally, we do not want to block the main thread, so the consumer actually + invokes the RPC itself in a separate thread. This thread is called the + *response consumer helper thread*. + + So all in all there are three threads: + + 1. gRPC's internal I/O thread. + 2. The request generator thread, created by gRPC. + 3. The response consumer helper thread, created by the Consumer. + + In addition, the Consumer likely uses some sort of concurreny to prevent + blocking on processing responses. The Policy may also use another thread to + deal with pumping messages from an external queue into the request queue + here. + + It may seem strange to use threads for something "high performance" + considering the GIL. However, the threads here are not CPU bound. They are + simple threads that are blocked by I/O and generally just move around some + simple objects between queues. The overhead for these helper threads is + low. The Consumer and end-user can configure any sort of executor they want + for the actual processing of the responses, which may be CPU intensive. + """ + def __init__(self, policy): + """ + Args: + policy (Consumer): The consumer policy, which defines how + requests and responses are handled. + """ + self._policy = policy + self._request_queue = queue.Queue() + self._exiting = threading.Event() + + self.active = False + self.helper_threads = _helper_threads.HelperThreadRegistry() + """:cls:`_helper_threads.HelperThreads`: manages the helper threads. + The policy may use this to schedule its own helper threads. + """ + + def send_request(self, request): + """Queue a request to be sent to gRPC. + + Args: + request (Any): The request protobuf. + """ + self._request_queue.put(request) + + def _request_generator_thread(self): + """Generate requests for the stream. + + This blocks for new requests on the request queue and yields them to + gRPC. + """ + # First, yield the initial request. This occurs on every new + # connection, fundamentally including a resumed connection. + initial_request = self._policy.get_initial_request(ack_queue=True) + _LOGGER.debug('Sending initial request: {initial_request}'.format( + initial_request=initial_request, + )) + yield initial_request + + # Now yield each of the items on the request queue, and block if there + # are none. This can and must block to keep the stream open. + while True: + request = self._request_queue.get() + if request == _helper_threads.STOP: + _LOGGER.debug('Request generator signaled to stop.') + break + + _LOGGER.debug('Sending request: {}'.format(request)) + yield request + + def _blocking_consume(self): + """Consume the stream indefinitely.""" + while True: + # It is possible that a timeout can cause the stream to not + # exit cleanly when the user has called stop_consuming(). This + # checks to make sure we're not exiting before opening a new + # stream. + if self._exiting.is_set(): + _LOGGER.debug('Event signalled consumer exit.') + break + + request_generator = self._request_generator_thread() + response_generator = self._policy.call_rpc(request_generator) + try: + for response in response_generator: + _LOGGER.debug('Received response: {0}'.format(response)) + self._policy.on_response(response) + + # If the loop above exits without an exception, then the + # request stream terminated cleanly, which should only happen + # when it was signaled to do so by stop_consuming. In this + # case, break out of the while loop and exit this thread. + _LOGGER.debug('Clean RPC loop exit signalled consumer exit.') + break + except KeyboardInterrupt: + self.stop_consuming() + except Exception as exc: + try: + self._policy.on_exception(exc) + except: + self.active = False + raise + + def start_consuming(self): + """Start consuming the stream.""" + self.active = True + self._exiting.clear() + self.helper_threads.start( + 'consume bidirectional stream', + self._request_queue, + self._blocking_consume, + ) + + def stop_consuming(self): + """Signal the stream to stop and block until it completes.""" + self.active = False + self._exiting.set() + self.helper_threads.stop_all() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py new file mode 100644 index 000000000000..21e812a0d2ad --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -0,0 +1,129 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections +import logging +import threading +import uuid + +import six + +__all__ = ( + 'HelperThreadRegistry', + 'QueueCallbackThread', + 'STOP', +) + +_LOGGER = logging.getLogger(__name__) + +_HelperThread = collections.namedtuple( + 'HelperThreads', + ['name', 'thread', 'queue'], +) + + +# Helper thread stop indicator. This could be a sentinel object or None, +# but the sentinel object's ID can change if the process is forked, and +# None has the possibility of a user accidentally killing the helper +# thread. +STOP = uuid.uuid4() + + +class HelperThreadRegistry(object): + def __init__(self): + self._helper_threads = {} + + def __contains__(self, needle): + return needle in self._helper_threads + + def start(self, name, queue, target, *args, **kwargs): + """Create and start a helper thread. + + Args: + name (str): The name of the helper thread. + queue (Queue): A concurrency-safe queue. + target (Callable): The target of the thread. + args: Additional args passed to the thread constructor. + kwargs: Additional kwargs passed to the thread constructor. + + Returns: + threading.Thread: The created thread. + """ + # Create and start the helper thread. + thread = threading.Thread( + name='Consumer helper: {}'.format(name), + target=target, + *args, **kwargs + ) + thread.daemon = True + thread.start() + + # Keep track of the helper thread, so we are able to stop it. + self._helper_threads[name] = _HelperThread(name, thread, queue) + _LOGGER.debug('Started helper thread {}'.format(name)) + return thread + + def stop(self, name): + """Stops a helper thread. + + Sends the stop message and blocks until the thread joins. + + Args: + name (str): The name of the thread. + """ + # Attempt to retrieve the thread; if it is gone already, no-op. + helper_thread = self._helper_threads.get(name) + if helper_thread is None: + return + + # Join the thread if it is still alive. + if helper_thread.thread.is_alive(): + _LOGGER.debug('Stopping helper thread {}'.format(name)) + helper_thread.queue.put(STOP) + helper_thread.thread.join() + + # Remove the thread from our tracking. + self._helper_threads.pop(name, None) + + def stop_all(self): + """Stop all helper threads.""" + # This could be more efficient by sending the stop signal to all + # threads before joining any of them. + for name in list(six.iterkeys(self._helper_threads)): + self.stop(name) + + +class QueueCallbackThread(object): + """A helper thread that executes a callback for every item in + the queue. + """ + def __init__(self, queue, callback): + self.queue = queue + self._callback = callback + + def __call__(self): + while True: + item = self.queue.get() + if item == STOP: + break + + # Run the callback. If any exceptions occur, log them and + # continue. + try: + self._callback(item) + except Exception as exc: + _LOGGER.error('{class_}: {message}'.format( + class_=exc.__class__.__name__, + message=str(exc), + )) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py new file mode 100644 index 000000000000..09f047495896 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py @@ -0,0 +1,155 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division + + +class Histogram(object): + """Representation of a single histogram. + + The purpose of this class is to store actual ack timing information + in order to predict how long to renew leases. + + The default implementation uses the 99th percentile of previous ack + times to implicitly lease messages; however, custom + :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer` subclasses + are free to use a different formula. + + The precision of data stored is to the nearest integer. Additionally, + values outside the range of ``10 <= x <= 600`` are stored as ``10`` or + ``600``, since these are the boundaries of leases in the actual API. + """ + def __init__(self, data=None): + """Instantiate the histogram. + + Args: + data (Mapping[str, int]): The data strucure to be used to store + the underlying data. The default is an empty dictionary. + This can be set to a dictionary-like object if required + (for example, if a special object is needed for + concurrency reasons). + """ + # The data is stored as a dictionary, with the keys being the + # value being added and the values being the number of times that + # value was added to the dictionary. + # + # This is depending on the Python interpreter's implicit ordering + # of dictionaries, which is a bitwise sort by the key's ``hash()`` + # value. Because ``hash(int i) -> i`` and all of our keys are + # positive integers (negatives would be a problem because the sort + # is bitwise), we can rely on this. + if data is None: + data = {} + self._data = data + self._len = 0 + + def __len__(self): + """Return the total number of data points in this histogram. + + This is cached on a separate counter (rather than computing it using + ``sum([v for v in self._data.values()])``) to optimize lookup. + + Returns: + int: The total number of data points in this histogram. + """ + return self._len + + def __contains__(self, needle): + """Return True if needle is present in the histogram, False otherwise. + + Returns: + bool: True or False + """ + return needle in self._data + + def __repr__(self): + return ''.format( + len=len(self), + max=self.max, + min=self.min, + ) + + @property + def max(self): + """Return the maximum value in this histogram. + + If there are no values in the histogram at all, return 600. + + Returns: + int: The maximum value in the histogram. + """ + if len(self._data) == 0: + return 600 + return next(iter(reversed(sorted(self._data.keys())))) + + @property + def min(self): + """Return the minimum value in this histogram. + + If there are no values in the histogram at all, return 10. + + Returns: + int: The minimum value in the histogram. + """ + if len(self._data) == 0: + return 10 + return next(iter(sorted(self._data.keys()))) + + def add(self, value): + """Add the value to this histogram. + + Args: + value (int): The value. Values outside of ``10 <= x <= 600`` + will be raised to ``10`` or reduced to ``600``. + """ + # If the value is out of bounds, bring it in bounds. + value = int(value) + if value < 10: + value = 10 + if value > 600: + value = 600 + + # Add the value to the histogram's data dictionary. + self._data.setdefault(value, 0) + self._data[value] += 1 + self._len += 1 + + def percentile(self, percent): + """Return the value that is the Nth precentile in the histogram. + + Args: + percent (Union[int, float]): The precentile being sought. The + default consumer implementations use consistently use ``99``. + + Returns: + int: The value corresponding to the requested percentile. + """ + # Sanity check: Any value over 100 should become 100. + if percent >= 100: + percent = 100 + + # Determine the actual target number. + target = len(self) - len(self) * (percent / 100) + + # Iterate over the values in reverse, dropping the target by the + # number of times each value has been seen. When the target passes + # 0, return the value we are currently viewing. + for k in reversed(sorted(self._data.keys())): + target -= self._data[k] + if target < 0: + return k + + # The only way to get here is if there was no data. + # In this case, just return 10 seconds. + return 10 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py new file mode 100644 index 000000000000..afb9f7d7ca75 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -0,0 +1,98 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import pkg_resources + +from google.cloud.gapic.pubsub.v1 import subscriber_client + +from google.cloud.pubsub_v1 import _gapic +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber.policy import thread + + +__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version + + +@_gapic.add_methods(subscriber_client.SubscriberClient, + blacklist=('pull', 'streaming_pull')) +class Client(object): + """A subscriber client for Google Cloud Pub/Sub. + + This creates an object that is capable of subscribing to messages. + Generally, you can instantiate this client with no arguments, and you + get sensible defaults. + + Args: + policy_class (class): A class that describes how to handle + subscriptions. You may subclass the + :class:`.pubsub_v1.subscriber.policy.base.BasePolicy` + class in order to define your own consumer. This is primarily + provided to allow use of different concurrency models; the default + is based on :class:`threading.Thread`. + kwargs (dict): Any additional arguments provided are sent as keyword + keyword arguments to the underlying + :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. + Generally, you should not need to set additional keyword + arguments. + """ + def __init__(self, policy_class=thread.Policy, **kwargs): + # Add the metrics headers, and instantiate the underlying GAPIC + # client. + kwargs['lib_name'] = 'gccl' + kwargs['lib_version'] = __VERSION__ + self.api = subscriber_client.SubscriberClient(**kwargs) + + # The subcription class is responsible to retrieving and dispatching + # messages. + self._policy_class = policy_class + + def subscribe(self, subscription, callback=None, flow_control=()): + """Return a representation of an individual subscription. + + This method creates and returns a ``Consumer`` object (that is, a + :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer`) + subclass) bound to the topic. It does `not` create the subcription + on the backend (or do any API call at all); it simply returns an + object capable of doing these things. + + If the ``callback`` argument is provided, then the :meth:`open` method + is automatically called on the returned object. If ``callback`` is + not provided, the subscription is returned unopened. + + .. note:: + It only makes sense to provide ``callback`` here if you have + already created the subscription manually in the API. + + Args: + subscription (str): The name of the subscription. The + subscription should have already been created (for example, + by using :meth:`create_subscription`). + callback (function): The callback function. This function receives + the :class:`~.pubsub_v1.types.PubsubMessage` as its only + argument. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. Use this to prevent situations where you are + inundated with too many messages at once. + + Returns: + ~.pubsub_v1.subscriber.consumer.base.BaseConsumer: An instance + of the defined ``consumer_class`` on the client. + """ + flow_control = types.FlowControl(*flow_control) + subscr = self._policy_class(self, subscription, flow_control) + if callable(callback): + subscr.open(callback) + return subscr diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py new file mode 100644 index 000000000000..1015149cfbbf --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -0,0 +1,198 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import math +import time + + +class Message(object): + """A representation of a single Pub/Sub message. + + The common way to interact with + :class:`~.pubsub_v1.subscriber.message.Message` objects is to receive + them in callbacks on subscriptions; most users should never have a need + to instantiate them by hand. (The exception to this is if you are + implementing a custom subclass to + :class:`~.pubsub_v1.subscriber.consumer.BaseConsumer`.) + + Attributes: + message_id (str): The message ID. In general, you should not need + to use this directly. + data (bytes): The data in the message. Note that this will be a + :class:`bytes`, not a text string. + attributes (dict): The attributes sent along with the message. + publish_time (datetime): The time that this message was originally + published. + """ + def __init__(self, message, ack_id, request_queue): + """Construct the Message. + + .. note:: + + This class should not be constructed directly; it is the + responsibility of :class:`BasePolicy` subclasses to do so. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The message received + from Pub/Sub. + ack_id (str): The ack_id received from Pub/Sub. + request_queue (queue.Queue): A queue provided by the policy that + can accept requests; the policy is responsible for handling + those requests. + """ + self._message = message + self._ack_id = ack_id + self._request_queue = request_queue + self.message_id = message.message_id + + # The instantiation time is the time that this message + # was received. Tracking this provides us a way to be smart about + # the default lease deadline. + self._received_timestamp = time.time() + + # The policy should lease this message, telling PubSub that it has + # it until it is acked or otherwise dropped. + self.lease() + + def __repr__(self): + # Get an abbreviated version of the data. + abbv_data = self._message.data + if len(abbv_data) > 50: + abbv_data = abbv_data[0:50] + b'...' + + # Return a useful representation. + answer = 'Message {\n' + answer += ' data: {0!r}\n'.format(abbv_data) + answer += ' attributes: {0!r}\n'.format(self.attributes) + answer += '}' + return answer + + @property + def attributes(self): + """Return the attributes of the underlying Pub/Sub Message. + + Returns: + dict: The message's attributes. + """ + return self._message.attributes + + @property + def data(self): + """Return the data for the underlying Pub/Sub Message. + + Returns: + bytes: The message data. This is always a bytestring; if you + want a text string, call :meth:`bytes.decode`. + """ + return self._message.data + + @property + def publish_time(self): + """Return the time that the message was originally published. + + Returns: + datetime: The date and time that the message was published. + """ + return self._message.publish_time + + @property + def size(self): + """Return the size of the underlying message, in bytes.""" + return self._message.ByteSize() + + def ack(self): + """Acknowledge the given message. + + Acknowledging a message in Pub/Sub means that you are done + with it, and it will not be delivered to this subscription again. + You should avoid acknowledging messages until you have + *finished* processing them, so that in the event of a failure, + you receive the message again. + + .. warning:: + Acks in Pub/Sub are best effort. You should always + ensure that your processing code is idempotent, as you may + receive any given message more than once. + """ + time_to_ack = math.ceil(time.time() - self._received_timestamp) + self._request_queue.put(('ack', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + 'time_to_ack': time_to_ack, + })) + + def drop(self): + """Release the message from lease management. + + This informs the policy to no longer hold on to the lease for this + message. Pub/Sub will re-deliver the message if it is not acknowledged + before the existing lease expires. + + .. warning:: + For most use cases, the only reason to drop a message from + lease management is on :meth:`ack` or :meth:`nack`; these methods + both call this one. You probably do not want to call this method + directly. + """ + self._request_queue.put(('drop', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) + + def lease(self): + """Inform the policy to lease this message continually. + + .. note:: + This method is called by the constructor, and you should never + need to call it manually. + """ + self._request_queue.put(('lease', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) + + def modify_ack_deadline(self, seconds): + """Set the deadline for acknowledgement to the given value. + + The default implementation handles this for you; you should not need + to manually deal with setting ack deadlines. The exception case is + if you are implementing your own custom subclass of + :class:`~.pubsub_v1.subcriber.consumer.BaseConsumer`. + + .. note:: + This is not an extension; it *sets* the deadline to the given + number of seconds from right now. It is even possible to use this + method to make a deadline shorter. + + Args: + seconds (int): The number of seconds to set the lease deadline + to. This should be between 0 and 600. Due to network latency, + values below 10 are advised against. + """ + self._request_queue.put(('modify_ack_deadline', { + 'ack_id': self._ack_id, + 'seconds': seconds, + })) + + def nack(self): + """Decline to acknowldge the given message. + + This will cause the message to be re-delivered to the subscription. + """ + self._request_queue.put(('nack', { + 'ack_id': self._ack_id, + 'byte_size': self.size, + })) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py new file mode 100644 index 000000000000..85d047eb9439 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -0,0 +1,392 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division + +import abc +import logging +import random +import time + +import six + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _consumer +from google.cloud.pubsub_v1.subscriber import _histogram + +logger = logging.getLogger(__name__) + + +@six.add_metaclass(abc.ABCMeta) +class BasePolicy(object): + """Abstract class defining a subscription policy. + + Although the :class:`~.pubsub_v1.subscriber.policy.thread.Policy` class, + based on :class:`threading.Thread`, is fine for most cases, + advanced users may need to implement something based on a different + concurrency model. + + This class defines the interface for the policy implementation; + subclasses may be passed as the ``policy_class`` argument to + :class:`~.pubsub_v1.client.SubscriberClient`. + """ + def __init__(self, client, subscription, + flow_control=types.FlowControl(), histogram_data=None): + """Instantiate the policy. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. + histogram_data (dict): Optional: A structure to store the histogram + data for predicting appropriate ack times. If set, this should + be a dictionary-like object. + + .. note:: + Additionally, the histogram relies on the assumption + that the dictionary will properly sort keys provided + that all keys are positive integers. If you are sending + your own dictionary class, ensure this assumption holds + or you will get strange behavior. + """ + self._client = client + self._subscription = subscription + self._consumer = _consumer.Consumer(self) + self._ack_deadline = 10 + self._last_histogram_size = 0 + self.flow_control = flow_control + self.histogram = _histogram.Histogram(data=histogram_data) + + # These are for internal flow control tracking. + # They should not need to be used by subclasses. + self._bytes = 0 + self._ack_on_resume = set() + self._paused = False + + @property + def ack_deadline(self): + """Return the appropriate ack deadline. + + This method is "sticky". It will only perform the computations to + check on the right ack deadline if the histogram has gained a + significant amount of new information. + + Returns: + int: The correct ack deadline. + """ + target = min([ + self._last_histogram_size * 2, + self._last_histogram_size + 100, + ]) + if len(self.histogram) > target: + self._ack_deadline = self.histogram.percentile(percent=99) + return self._ack_deadline + + @property + def managed_ack_ids(self): + """Return the ack IDs currently being managed by the policy. + + Returns: + set: The set of ack IDs being managed. + """ + if not hasattr(self, '_managed_ack_ids'): + self._managed_ack_ids = set() + return self._managed_ack_ids + + @property + def subscription(self): + """Return the subscription. + + Returns: + str: The subscription + """ + return self._subscription + + @property + def _load(self): + """Return the current load. + + The load is represented as a float, where 1.0 represents having + hit one of the flow control limits, and values between 0.0 and 1.0 + represent how close we are to them. (0.5 means we have exactly half + of what the flow control setting allows, for example.) + + There are (currently) two flow control settings; this property + computes how close the subscriber is to each of them, and returns + whichever value is higher. (It does not matter that we have lots of + running room on setting A if setting B is over.) + + Returns: + float: The load value. + """ + return max([ + len(self.managed_ack_ids) / self.flow_control.max_messages, + self._bytes / self.flow_control.max_bytes, + ]) + + def ack(self, ack_id, time_to_ack=None, byte_size=None): + """Acknowledge the message corresponding to the given ack_id. + + Args: + ack_id (str): The ack ID. + time_to_ack (int): The time it took to ack the message, measured + from when it was received from the subscription. This is used + to improve the automatic ack timing. + byte_size (int): The size of the PubSub message, in bytes. + """ + # If we got timing information, add it to the histogram. + if time_to_ack is not None: + self.histogram.add(int(time_to_ack)) + + # Send the request to ack the message. + # However, if the consumer is inactive, then queue the ack_id here + # instead; it will be acked as part of the initial request when the + # consumer is started again. + if self._consumer.active: + request = types.StreamingPullRequest(ack_ids=[ack_id]) + self._consumer.send_request(request) + else: + self._ack_on_resume.add(ack_id) + + # Remove the message from lease management. + self.drop(ack_id=ack_id, byte_size=byte_size) + + def call_rpc(self, request_generator): + """Invoke the Pub/Sub streaming pull RPC. + + Args: + request_generator (Generator): A generator that yields requests, + and blocks if there are no outstanding requests (until such + time as there are). + """ + return self._client.api.streaming_pull(request_generator) + + def drop(self, ack_id, byte_size): + """Remove the given ack ID from lease management. + + Args: + ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + # Remove the ack ID from lease management, and decrement the + # byte counter. + if ack_id in self.managed_ack_ids: + self.managed_ack_ids.remove(ack_id) + self._bytes -= byte_size + self._bytes = min([self._bytes, 0]) + + # If we have been paused by flow control, check and see if we are + # back within our limits. + # + # In order to not thrash too much, require us to have passed below + # the resume threshold (80% by default) of each flow control setting + # before restarting. + if self._paused and self._load < self.flow_control.resume_threshold: + self._paused = False + self.open(self._callback) + + def get_initial_request(self, ack_queue=False): + """Return the initial request. + + This defines the initial request that must always be sent to Pub/Sub + immediately upon opening the subscription. + + Args: + ack_queue (bool): Whether to include any acks that were sent + while the connection was paused. + + Returns: + ~.pubsub_v1.types.StreamingPullRequest: A request suitable + for being the first request on the stream (and not suitable + for any other purpose). + + .. note:: + If ``ack_queue`` is set to True, this includes the ack_ids, but + also clears the internal set. + + This means that calls to :meth:`get_initial_request` with + ``ack_queue`` set to True are not idempotent. + """ + # Any ack IDs that are under lease management and not being acked + # need to have their deadline extended immediately. + ack_ids = set() + lease_ids = self.managed_ack_ids + if ack_queue: + ack_ids = self._ack_on_resume + lease_ids = lease_ids.difference(ack_ids) + + # Put the request together. + request = types.StreamingPullRequest( + ack_ids=list(ack_ids), + modify_deadline_ack_ids=list(lease_ids), + modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), + stream_ack_deadline_seconds=self.histogram.percentile(99), + subscription=self.subscription, + ) + + # Clear the ack_ids set. + # Note: If `ack_queue` is False, this just ends up being a no-op, + # since the set is just an empty set. + ack_ids.clear() + + # Return the initial request. + return request + + def lease(self, ack_id, byte_size): + """Add the given ack ID to lease management. + + Args: + ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + # Add the ack ID to the set of managed ack IDs, and increment + # the size counter. + if ack_id not in self.managed_ack_ids: + self.managed_ack_ids.add(ack_id) + self._bytes += byte_size + + # Sanity check: Do we have too many things in our inventory? + # If we do, we need to stop the stream. + if self._load >= 1.0: + self._paused = True + self.close() + + def maintain_leases(self): + """Maintain all of the leases being managed by the policy. + + This method modifies the ack deadline for all of the managed + ack IDs, then waits for most of that time (but with jitter), and + then calls itself. + + .. warning:: + This method blocks, and generally should be run in a separate + thread or process. + + Additionally, you should not have to call this method yourself, + unless you are implementing your own policy. If you are + implementing your own policy, you _should_ call this method + in an appropriate form of subprocess. + """ + while True: + # Sanity check: Should this infinitely loop quit? + if not self._consumer.active: + return + + # Determine the appropriate duration for the lease. This is + # based off of how long previous messages have taken to ack, with + # a sensible default and within the ranges allowed by Pub/Sub. + p99 = self.histogram.percentile(99) + logger.debug('The current p99 value is %d seconds.' % p99) + + # Create a streaming pull request. + # We do not actually call `modify_ack_deadline` over and over + # because it is more efficient to make a single request. + ack_ids = list(self.managed_ack_ids) + logger.debug('Renewing lease for %d ack IDs.' % len(ack_ids)) + if len(ack_ids) > 0 and self._consumer.active: + request = types.StreamingPullRequest( + modify_deadline_ack_ids=ack_ids, + modify_deadline_seconds=[p99] * len(ack_ids), + ) + self._consumer.send_request(request) + + # Now wait an appropriate period of time and do this again. + # + # We determine the appropriate period of time based on a random + # period between 0 seconds and 90% of the lease. This use of + # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases + # where there are many clients. + snooze = random.uniform(0.0, p99 * 0.9) + logger.debug('Snoozing lease management for %f seconds.' % snooze) + time.sleep(snooze) + + def modify_ack_deadline(self, ack_id, seconds): + """Modify the ack deadline for the given ack_id. + + Args: + ack_id (str): The ack ID + seconds (int): The number of seconds to set the new deadline to. + """ + request = types.StreamingPullRequest( + modify_deadline_ack_ids=[ack_id], + modify_deadline_seconds=[seconds], + ) + self._consumer.send_request(request) + + def nack(self, ack_id, byte_size=None): + """Explicitly deny receipt of a message. + + Args: + ack_id (str): The ack ID. + byte_size (int): The size of the PubSub message, in bytes. + """ + self.modify_ack_deadline(ack_id=ack_id, seconds=0) + self.drop(ack_id=ack_id, byte_size=byte_size) + + @abc.abstractmethod + def close(self): + """Close the existing connection.""" + raise NotImplementedError + + @abc.abstractmethod + def on_exception(self, exception): + """Called when a gRPC exception occurs. + + If this method does nothing, then the stream is re-started. If this + raises an exception, it will stop the consumer thread. + This is executed on the response consumer helper thread. + + Args: + exception (Exception): The exception raised by the RPC. + """ + raise NotImplementedError + + @abc.abstractmethod + def on_response(self, response): + """Process a response from gRPC. + + This gives the consumer control over how responses are scheduled to + be processed. This method is expected to not block and instead + schedule the response to be consumed by some sort of concurrency. + + For example, if a the Policy implementation takes a callback in its + constructor, you can schedule the callback using a + :cls:`concurrent.futures.ThreadPoolExecutor`:: + + self._pool.submit(self._callback, response) + + This is called from the response consumer helper thread. + + Args: + response (Any): The protobuf response from the RPC. + """ + raise NotImplementedError + + @abc.abstractmethod + def open(self, callback): + """Open a streaming pull connection and begin receiving messages. + + For each message received, the ``callback`` function is fired with + a :class:`~.pubsub_v1.subscriber.message.Message` as its only + argument. + + Args: + callback (Callable[Message]): A callable that receives a + Pub/Sub Message. + """ + raise NotImplementedError diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py new file mode 100644 index 000000000000..df0f965748de --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -0,0 +1,147 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent import futures +from queue import Queue +import logging +import threading + +import grpc + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber.policy import base +from google.cloud.pubsub_v1.subscriber.message import Message + + +logger = logging.getLogger(__name__) + + +class Policy(base.BasePolicy): + """A consumer class based on :class:`threading.Thread`. + + This consumer handles the connection to the Pub/Sub service and all of + the concurrency needs. + """ + def __init__(self, client, subscription, flow_control=types.FlowControl(), + executor=None, queue=None): + """Instantiate the policy. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow + control settings. + executor (~concurrent.futures.ThreadPoolExecutor): (Optional.) A + ThreadPoolExecutor instance, or anything duck-type compatible + with it. + queue (~queue.Queue): (Optional.) A Queue instance, appropriate + for crossing the concurrency boundary implemented by + ``executor``. + """ + # Default the callback to a no-op; it is provided by `.open`. + self._callback = lambda message: None + + # Create a queue for keeping track of shared state. + if queue is None: + queue = Queue() + self._request_queue = Queue() + + # Call the superclass constructor. + super(Policy, self).__init__( + client=client, + flow_control=flow_control, + subscription=subscription, + ) + + # Also maintain a request queue and an executor. + logger.debug('Creating callback requests thread (not starting).') + if executor is None: + executor = futures.ThreadPoolExecutor(max_workers=10) + self._executor = executor + self._callback_requests = _helper_threads.QueueCallbackThread( + self._request_queue, + self.on_callback_request, + ) + + def close(self): + """Close the existing connection.""" + # Close the main subscription connection. + self._consumer.helper_threads.stop('callback requests worker') + self._consumer.stop_consuming() + + def open(self, callback): + """Open a streaming pull connection and begin receiving messages. + + For each message received, the ``callback`` function is fired with + a :class:`~.pubsub_v1.subscriber.message.Message` as its only + argument. + + Args: + callback (Callable): The callback function. + """ + # Start the thread to pass the requests. + logger.debug('Starting callback requests worker.') + self._callback = callback + self._consumer.helper_threads.start( + 'callback requests worker', + self._request_queue, + self._callback_requests, + ) + + # Actually start consuming messages. + self._consumer.start_consuming() + + # Spawn a helper thread that maintains all of the leases for + # this policy. + logger.debug('Spawning lease maintenance worker.') + self._leaser = threading.Thread(target=self.maintain_leases) + self._leaser.daemon = True + self._leaser.start() + + def on_callback_request(self, callback_request): + """Map the callback request to the appropriate GRPC request.""" + action, kwargs = callback_request[0], callback_request[1] + getattr(self, action)(**kwargs) + + def on_exception(self, exception): + """Bubble the exception. + + This will cause the stream to exit loudly. + """ + # If this is DEADLINE_EXCEEDED, then we want to retry. + # That entails just returning None. + deadline_exceeded = grpc.StatusCode.DEADLINE_EXCEEDED + if getattr(exception, 'code', lambda: None)() == deadline_exceeded: + return + + # Raise any other exception. + raise exception + + def on_response(self, response): + """Process all received Pub/Sub messages. + + For each message, schedule a callback with the executor. + """ + for msg in response.received_messages: + logger.debug('New message received from Pub/Sub: %r', msg) + logger.debug(self._callback) + message = Message(msg.message, msg.ack_id, self._request_queue) + future = self._executor.submit(self._callback, message) + logger.debug('Result: %s' % future.result()) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py new file mode 100644 index 000000000000..a9de4a88f7f8 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -0,0 +1,70 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import sys + +import psutil + +from google.cloud.proto.pubsub.v1 import pubsub_pb2 +from google.gax.utils.messages import get_messages +from google.protobuf import timestamp_pb2 + + +# Define the default values for batching. +# +# This class is used when creating a publisher or subscriber client, and +# these settings can be altered to tweak Pub/Sub behavior. +# The defaults should be fine for most use cases. +BatchSettings = collections.namedtuple( + 'BatchSettings', + ['max_bytes', 'max_latency', 'max_messages'], +) +BatchSettings.__new__.__defaults__ = ( + 1024 * 1024 * 5, # max_bytes: 5 MB + 0.05, # max_latency: 0.05 seconds + 1000, # max_messages: 1,000 +) + +# Define the type class and default values for flow control settings. +# +# This class is used when creating a publisher or subscriber client, and +# these settings can be altered to tweak Pub/Sub behavior. +# The defaults should be fine for most use cases. +FlowControl = collections.namedtuple( + 'FlowControl', + ['max_bytes', 'max_messages', 'resume_threshold'], +) +FlowControl.__new__.__defaults__ = ( + psutil.virtual_memory().total * 0.2, # max_bytes: 20% of total RAM + float('inf'), # max_messages: no limit + 0.8, # resume_threshold: 80% +) + + +# Pub/Sub uses timestamps from the common protobuf package. +# Do not make users import from there. +Timestamp = timestamp_pb2.Timestamp + + +_names = ['BatchSettings', 'FlowControl', 'Timestamp'] +for name, message in get_messages(pubsub_pb2).items(): + message.__module__ = 'google.cloud.pubsub_v1.types' + setattr(sys.modules[__name__], name, message) + _names.append(name) + + +__all__ = tuple(sorted(_names)) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 4bcecafe66b4..c860e0741fe6 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -38,10 +38,10 @@ def unit_tests(session, python_version): session.install('-e', '.') # Run py.test against the unit tests. - session.run('py.test', '--quiet', - '--cov=google.cloud.pubsub', '--cov=tests.unit', '--cov-append', - '--cov-config=.coveragerc', '--cov-report=', '--cov-fail-under=97', - 'tests/unit', + session.run( + 'py.test', '--quiet', '--cov-append', '--cov-report=', + '--cov=google.cloud.pubsub', '--cov=google.cloud.pubsub_v1', + '--cov-config=.coveragerc', 'tests/unit', ) @@ -87,7 +87,8 @@ def lint(session): '--library-filesets', 'google', '--test-filesets', 'tests', # Temporarily allow this to fail. - success_codes=range(0, 100)) + success_codes=range(0, 100), + ) @nox.session diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 71fee1dd7b8f..91bbeb8e2a8c 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -27,7 +27,7 @@ # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', - 'author_email': 'googleapis-publisher@google.com', + 'author_email': 'googleapis-packages@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', @@ -51,9 +51,11 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.26.0, < 0.27dev', - 'grpcio >= 1.2.0, < 2.0dev', - 'gapic-google-cloud-pubsub-v1 >= 0.15.0, < 0.16dev', + 'google-gax >= 0.15.13, < 0.16dev', + 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', + 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', + 'grpcio >= 1.0.2, < 2.0dev', + 'psutil >= 5.2.2, < 6.0dev', ] setup( diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index eddfd1274da0..02666eae676a 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -1,4 +1,4 @@ -# Copyright 2015 Google Inc. +# Copyright 2017, Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,395 +12,95 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime -import os -import unittest +from __future__ import absolute_import -from google.gax.errors import GaxError -from google.gax.grpc import exc_to_code -from grpc import StatusCode -import requests +import time +import uuid -from google.cloud.environment_vars import PUBSUB_EMULATOR -from google.cloud.exceptions import Conflict -from google.cloud.pubsub import client +import mock +import six -from test_utils.retry import RetryInstanceState -from test_utils.retry import RetryResult -from test_utils.retry import RetryErrors -from test_utils.system import EmulatorCreds -from test_utils.system import unique_resource_id +from google import auth +from google.cloud import pubsub_v1 -def _unavailable(exc): - return exc_to_code(exc) == StatusCode.UNAVAILABLE +def _resource_name(resource_type): + """Return a randomly selected name for a resource. - -retry_unavailable = RetryErrors(GaxError, _unavailable) - - -class Config(object): - """Run-time configuration to be modified at set-up. - - This is a mutable stand-in to allow test set-up to modify - global state. - """ - CLIENT = None - IN_EMULATOR = False - - -def setUpModule(): - Config.IN_EMULATOR = os.getenv(PUBSUB_EMULATOR) is not None - if Config.IN_EMULATOR: - credentials = EmulatorCreds() - http = requests.Session() # Un-authorized. - Config.CLIENT = client.Client( - credentials=credentials, _http=http) - else: - Config.CLIENT = client.Client() - - -def _consume_topics(pubsub_client): - """Consume entire iterator. - - :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` - :param pubsub_client: Client to use to retrieve topics. - - :rtype: list - :returns: List of all topics encountered. - """ - return list(pubsub_client.list_topics()) - - -def _consume_snapshots(pubsub_client): - """Consume entire iterator. - - :type pubsub_client: :class:`~google.cloud.pubsub.client.Client` - :param pubsub_client: Client to use to retrieve snapshots. - - :rtype: list - :returns: List of all snapshots encountered. - """ - return list(pubsub_client.list_snapshots()) - - -def _consume_subscriptions(topic): - """Consume entire iterator. - - :type topic: :class:`~google.cloud.pubsub.topic.Topic` - :param topic: Topic to use to retrieve subscriptions. - - :rtype: list - :returns: List of all subscriptions encountered. + Args: + resource_type (str): The resource for which a name is being + generated. Should be singular (e.g. "topic", "subscription") """ - return list(topic.list_subscriptions()) - - -class TestPubsub(unittest.TestCase): - - def setUp(self): - self.to_delete = [] - - def tearDown(self): - for doomed in self.to_delete: - doomed.delete() - - def test_create_topic(self): - topic_name = 'a-new-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - self.assertTrue(topic.exists()) - self.assertEqual(topic.name, topic_name) - - with self.assertRaises(Conflict): - topic.create() - - def test_list_topics(self): - before = _consume_topics(Config.CLIENT) - topics_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), - ] - for topic_name in topics_to_create: - topic = Config.CLIENT.topic(topic_name) - topic.create() - self.to_delete.append(topic) - - # Retrieve the topics. - def _all_created(result): - return len(result) == len(before) + len(topics_to_create) - - retry = RetryResult(_all_created) - after = retry(_consume_topics)(Config.CLIENT) - - created = [topic for topic in after - if topic.name in topics_to_create and - topic.project == Config.CLIENT.project] - self.assertEqual(len(created), len(topics_to_create)) - - def test_create_subscription_defaults(self): - TOPIC_NAME = 'create-sub-def' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertIs(subscription.topic, topic) - - with self.assertRaises(Conflict): - subscription.create() - - def test_create_subscription_w_ack_deadline(self): - TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() - subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=120) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertEqual(subscription.ack_deadline, 120) - self.assertIs(subscription.topic, topic) - - def test_create_subscription_w_message_retention(self): - TOPIC_NAME = 'create-sub-ack' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id() - duration = datetime.timedelta(hours=12) - subscription = topic.subscription( - SUBSCRIPTION_NAME, retain_acked_messages=True, - message_retention_duration=duration) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - self.assertTrue(subscription.exists()) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertTrue(subscription.retain_acked_messages) - self.assertEqual(subscription.message_retention_duration, duration) - self.assertIs(subscription.topic, topic) - - def test_list_subscriptions(self): - TOPIC_NAME = 'list-sub' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - topic.create() - self.to_delete.append(topic) - empty = _consume_subscriptions(topic) - self.assertEqual(len(empty), 0) - subscriptions_to_create = [ - 'new' + unique_resource_id(), - 'newer' + unique_resource_id(), - 'newest' + unique_resource_id(), - ] - for subscription_name in subscriptions_to_create: - subscription = topic.subscription(subscription_name) - subscription.create() - self.to_delete.append(subscription) - - # Retrieve the subscriptions. - def _all_created(result): - return len(result) == len(subscriptions_to_create) - - retry = RetryResult(_all_created) - all_subscriptions = retry(_consume_subscriptions)(topic) - - created = [subscription for subscription in all_subscriptions - if subscription.name in subscriptions_to_create] - self.assertEqual(len(created), len(subscriptions_to_create)) - - def test_message_pull_mode_e2e(self): - import operator - TOPIC_NAME = 'message-e2e' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME, - timestamp_messages=True) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - - MESSAGE_1 = b'MESSAGE ONE' - MESSAGE_2 = b'MESSAGE ONE' - EXTRA_1 = 'EXTRA 1' - EXTRA_2 = 'EXTRA 2' - topic.publish(MESSAGE_1, extra=EXTRA_1) - topic.publish(MESSAGE_2, extra=EXTRA_2) - - class Hoover(object): - - def __init__(self): - self.received = [] - - def done(self, *dummy): - return len(self.received) == 2 - - def suction(self): - with subscription.auto_ack(max_messages=2) as ack: - self.received.extend(ack.values()) - - hoover = Hoover() - retry = RetryInstanceState(hoover.done) - retry(hoover.suction)() - - message1, message2 = sorted(hoover.received, - key=operator.attrgetter('timestamp')) - - self.assertEqual(message1.data, MESSAGE_1) - self.assertEqual(message1.attributes['extra'], EXTRA_1) - self.assertIsNotNone(message1.service_timestamp) - - self.assertEqual(message2.data, MESSAGE_2) - self.assertEqual(message2.attributes['extra'], EXTRA_2) - self.assertIsNotNone(message2.service_timestamp) - - def _maybe_emulator_skip(self): - # NOTE: This method is necessary because ``Config.IN_EMULATOR`` - # is set at runtime rather than import time, which means we - # can't use the @unittest.skipIf decorator. - if Config.IN_EMULATOR: - self.skipTest('IAM not supported by Pub/Sub emulator') - - def test_topic_iam_policy(self): - from google.cloud.pubsub.iam import PUBSUB_TOPICS_GET_IAM_POLICY - self._maybe_emulator_skip() - topic_name = 'test-topic-iam-policy-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - topic.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(topic.exists)() - self.to_delete.append(topic) - - if topic.check_iam_permissions([PUBSUB_TOPICS_GET_IAM_POLICY]): - policy = topic.get_iam_policy() - viewers = set(policy.viewers) - viewers.add(policy.user('jjg@google.com')) - policy.viewers = viewers - new_policy = topic.set_iam_policy(policy) - self.assertEqual(new_policy.viewers, policy.viewers) - - def test_subscription_iam_policy(self): - from google.cloud.pubsub.iam import PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY - self._maybe_emulator_skip() - topic_name = 'test-sub-iam-policy-topic' + unique_resource_id('-') - topic = Config.CLIENT.topic(topic_name) - topic.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(topic.exists)() - self.to_delete.append(topic) - - SUB_NAME = 'test-sub-iam-policy-sub' + unique_resource_id('-') - subscription = topic.subscription(SUB_NAME) - subscription.create() - - # Retry / backoff up to 7 seconds (1 + 2 + 4) - retry = RetryResult(lambda result: result, max_tries=4) - retry(subscription.exists)() - self.to_delete.insert(0, subscription) - - if subscription.check_iam_permissions( - [PUBSUB_SUBSCRIPTIONS_GET_IAM_POLICY]): - policy = subscription.get_iam_policy() - viewers = set(policy.viewers) - viewers.add(policy.user('jjg@google.com')) - policy.viewers = viewers - new_policy = subscription.set_iam_policy(policy) - self.assertEqual(new_policy.viewers, policy.viewers) - - def test_create_snapshot(self): - TOPIC_NAME = 'create-snap-def' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME) - before_snapshots = _consume_snapshots(Config.CLIENT) - - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - SUBSCRIPTION_NAME = 'subscribing-now' + unique_resource_id('-') - subscription = topic.subscription(SUBSCRIPTION_NAME, ack_deadline=600) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') - snapshot = subscription.snapshot(SNAPSHOT_NAME) - snapshot.create() - self.to_delete.append(snapshot) - - # There is no GET method for snapshot, so check existence using - # list - def retry_predicate(result): - return len(result) > len(before_snapshots) - - retry = RetryResult(retry_predicate, max_tries=5) - after_snapshots = retry(_consume_snapshots)(Config.CLIENT) - self.assertEqual(len(before_snapshots) + 1, len(after_snapshots)) - - def full_name(obj): - return obj.full_name - - self.assertIn(snapshot.full_name, map(full_name, after_snapshots)) - self.assertNotIn(snapshot.full_name, map(full_name, before_snapshots)) - - with self.assertRaises(Conflict): - snapshot.create() - - def test_seek(self): - TOPIC_NAME = 'seek-e2e' + unique_resource_id('-') - topic = Config.CLIENT.topic(TOPIC_NAME, - timestamp_messages=True) - self.assertFalse(topic.exists()) - topic.create() - self.to_delete.append(topic) - - SUBSCRIPTION_NAME = 'subscribing-to-seek' + unique_resource_id('-') - subscription = topic.subscription( - SUBSCRIPTION_NAME, retain_acked_messages=True) - self.assertFalse(subscription.exists()) - subscription.create() - self.to_delete.append(subscription) - - SNAPSHOT_NAME = 'new-snapshot' + unique_resource_id('-') - snapshot = subscription.snapshot(SNAPSHOT_NAME) - snapshot.create() - self.to_delete.append(snapshot) - - MESSAGE_1 = b'MESSAGE ONE' - topic.publish(MESSAGE_1) - MESSAGE_2 = b'MESSAGE TWO' - topic.publish(MESSAGE_2) - - ((ack_id_1a, recvd_1a), ) = subscription.pull() - ((ack_id_2a, recvd_2a), ) = subscription.pull() - before_data = [obj.data for obj in (recvd_1a, recvd_2a)] - self.assertIn(MESSAGE_1, before_data) - self.assertIn(MESSAGE_2, before_data) - subscription.acknowledge((ack_id_1a, ack_id_2a)) - - self.assertFalse(subscription.pull(return_immediately=True)) - - subscription.seek_snapshot(snapshot) - - ((_, recvd_1b), ) = subscription.pull() - ((_, recvd_2b), ) = subscription.pull() - after_data = [obj.data for obj in (recvd_1b, recvd_2b)] - self.assertEqual(sorted(before_data), sorted(after_data)) + return 'projects/{project}/{resource_type}s/st-n{random}'.format( + project=auth.default()[1], + random=str(uuid.uuid4())[0:8], + resource_type=resource_type, + ) + + +def test_publish_messages(): + publisher = pubsub_v1.PublisherClient() + topic_name = _resource_name('topic') + futures = [] + + try: + publisher.create_topic(topic_name) + for i in range(0, 500): + futures.append( + publisher.publish( + topic_name, + b'The hail in Wales falls mainly on the snails.', + num=str(i), + ), + ) + for future in futures: + result = future.result() + assert isinstance(result, (six.text_type, six.binary_type)) + finally: + publisher.delete_topic(topic_name) + + +def test_subscribe_to_messages(): + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_name = _resource_name('topic') + sub_name = _resource_name('subscription') + + try: + # Create a topic. + publisher.create_topic(topic_name) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription(sub_name, topic_name) + subscription = subscriber.subscribe(sub_name) + + # Publish some messages. + futures = [publisher.publish( + topic_name, + b'Wooooo! The claaaaaw!', + num=str(i), + ) for i in range(0, 50)] + + # Make sure the publish completes. + [f.result() for f in futures] + + # The callback should process the message numbers to prove + # that we got everything at least once. + callback = mock.Mock(wraps=lambda message: message.ack()) + + # Actually open the subscription and hold it open for a few seconds. + subscription.open(callback) + for second in range(0, 10): + time.sleep(1) + + # The callback should have fired at least fifty times, but it + # may take some time. + if callback.call_count >= 50: + return + + # Okay, we took too long; fail out. + assert callback.call_count >= 50 + finally: + publisher.delete_topic(topic_name) + subscriber.delete_subscription(sub_name) diff --git a/packages/google-cloud-pubsub/tests/unit/__init__.py b/packages/google-cloud-pubsub/tests/unit/__init__.py index 58e0d9153632..e69de29bb2d1 100644 --- a/packages/google-cloud-pubsub/tests/unit/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/__init__.py @@ -1,13 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py new file mode 100644 index 000000000000..05a749d58425 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -0,0 +1,69 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher.batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher.batch.thread import Batch + + +def create_batch(status=None, settings=types.BatchSettings()): + """Create a batch object, which does not commit. + + Args: + status (str): If provided, the batch's internal status will be set + to the provided status. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Batch: The batch object + """ + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + batch = Batch(client, 'topic_name', settings, autocommit=False) + if status: + batch._status = status + return batch + + +def test_len(): + batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) + assert len(batch) == 0 + batch.publish(types.PubsubMessage(data=b'foo')) + assert len(batch) == 1 + + +def test_will_accept(): + batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) + message = types.PubsubMessage() + assert batch.will_accept(message) is True + + +def test_will_not_accept_status(): + batch = create_batch(status='talk to the hand') + message = types.PubsubMessage() + assert batch.will_accept(message) is False + + +def test_will_not_accept_size(): + batch = create_batch( + settings=types.BatchSettings(max_bytes=10), + status=BatchStatus.ACCEPTING_MESSAGES, + ) + message = types.PubsubMessage(data=b'abcdefghijklmnopqrstuvwxyz') + assert batch.will_accept(message) is False diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py new file mode 100644 index 000000000000..00b761f52b96 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -0,0 +1,204 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading +import time + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher.batch.thread import Batch + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return publisher.Client(credentials=creds) + + +def create_batch(autocommit=False, **batch_settings): + """Return a batch object suitable for testing. + + Args: + autocommit (bool): Whether the batch should commit after + ``max_latency`` seconds. By default, this is ``False`` + for unit testing. + kwargs (dict): Arguments passed on to the + :class:``~.pubsub_v1.types.BatchSettings`` constructor. + + Returns: + ~.pubsub_v1.publisher.batch.thread.Batch: A batch object. + """ + client = create_client() + settings = types.BatchSettings(**batch_settings) + return Batch(client, 'topic_name', settings, autocommit=autocommit) + + +def test_init(): + """Establish that a monitor thread is usually created on init.""" + client = create_client() + + # Do not actually create a thread, but do verify that one was created; + # it should be running the batch's "monitor" method (which commits the + # batch once time elapses). + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + batch = Batch(client, 'topic_name', types.BatchSettings()) + Thread.assert_called_once_with(target=batch.monitor) + + # New batches start able to accept messages by default. + assert batch.status == BatchStatus.ACCEPTING_MESSAGES + + +def test_init_infinite_latency(): + batch = create_batch(max_latency=float('inf')) + assert batch._thread is None + + +def test_client(): + client = create_client() + settings = types.BatchSettings() + batch = Batch(client, 'topic_name', settings, autocommit=False) + assert batch.client is client + + +def test_commit(): + batch = create_batch() + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + batch.commit() + + # A thread should have been created to do the actual commit. + Thread.assert_called_once_with(target=batch._commit) + Thread.return_value.start.assert_called_once_with() + + # The batch's status needs to be something other than "accepting messages", + # since the commit started. + assert batch.status != BatchStatus.ACCEPTING_MESSAGES + + +def test_blocking_commit(): + batch = create_batch() + futures = ( + batch.publish({'data': b'This is my message.'}), + batch.publish({'data': b'This is another message.'}), + ) + + # Set up the underlying API publish method to return a PublishResponse. + with mock.patch.object(type(batch.client.api), 'publish') as publish: + publish.return_value = types.PublishResponse(message_ids=['a', 'b']) + + # Actually commit the batch. + batch._commit() + + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with('topic_name', [ + types.PubsubMessage(data=b'This is my message.'), + types.PubsubMessage(data=b'This is another message.'), + ]) + + # Establish that all of the futures are done, and that they have the + # expected values. + assert all([f.done() for f in futures]) + assert futures[0].result() == 'a' + assert futures[1].result() == 'b' + + +def test_blocking_commit_no_messages(): + batch = create_batch() + with mock.patch.object(type(batch.client.api), 'publish') as publish: + batch._commit() + assert publish.call_count == 0 + + +def test_blocking_commit_wrong_messageid_length(): + batch = create_batch() + futures = ( + batch.publish({'data': b'blah blah blah'}), + batch.publish({'data': b'blah blah blah blah'}), + ) + + # Set up a PublishResponse that only returns one message ID. + with mock.patch.object(type(batch.client.api), 'publish') as publish: + publish.return_value = types.PublishResponse(message_ids=['a']) + batch._commit() + for future in futures: + assert future.done() + assert isinstance(future.exception(), exceptions.PublishError) + + +def test_monitor(): + batch = create_batch(max_latency=5.0) + with mock.patch.object(time, 'sleep') as sleep: + with mock.patch.object(type(batch), '_commit') as _commit: + batch.monitor() + + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) + + # Since `monitor` runs in its own thread, it should call + # the blocking commit implementation. + _commit.assert_called_once_with() + + +def test_monitor_already_committed(): + batch = create_batch(max_latency=5.0) + batch._status = 'something else' + with mock.patch.object(time, 'sleep') as sleep: + batch.monitor() + + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) + + # The status should not have changed. + assert batch._status == 'something else' + + +def test_publish(): + batch = create_batch() + messages = ( + types.PubsubMessage(data=b'foobarbaz'), + types.PubsubMessage(data=b'spameggs'), + types.PubsubMessage(data=b'1335020400'), + ) + + # Publish each of the messages, which should save them to the batch. + for message in messages: + batch.publish(message) + + # There should be three messages on the batch, and three futures. + assert len(batch.messages) == 3 + assert len(batch._futures) == 3 + + # The size should have been incremented by the sum of the size of the + # messages. + assert batch.size == sum([m.ByteSize() for m in messages]) + assert batch.size > 0 # I do not always trust protobuf. + + +def test_publish_dict(): + batch = create_batch() + batch.publish({'data': b'foobarbaz', 'attributes': {'spam': 'eggs'}}) + + # There should be one message on the batch. + assert len(batch.messages) == 1 + + # It should be an actual protobuf Message at this point, with the + # expected values. + message = batch.messages[0] + assert isinstance(message, types.PubsubMessage) + assert message.data == b'foobarbaz' + assert message.attributes == {'spam': 'eggs'} diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py new file mode 100644 index 000000000000..e9b64a202e94 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py @@ -0,0 +1,118 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +import pytest + +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.futures import Future + + +def test_cancel(): + assert Future().cancel() is False + + +def test_cancelled(): + assert Future().cancelled() is False + + +def test_running(): + assert Future().running() is True + + +def test_done(): + future = Future() + assert future.done() is False + future.set_result('12345') + assert future.done() is True + + +def test_exception_no_error(): + future = Future() + future.set_result('12345') + assert future.exception() is None + + +def test_exception_with_error(): + future = Future() + error = RuntimeError('Something really bad happened.') + future.set_exception(error) + + # Make sure that the exception that is returned is the batch's error. + # Also check the type to ensure the batch's error did not somehow + # change internally. + assert future.exception() is error + assert isinstance(future.exception(), RuntimeError) + with pytest.raises(RuntimeError): + future.result() + + +def test_exception_timeout(): + future = Future() + with pytest.raises(exceptions.TimeoutError): + future.exception(timeout=0.01) + + +def test_result_no_error(): + future = Future() + future.set_result('42') + assert future.result() == '42' + + +def test_result_with_error(): + future = Future() + future.set_exception(RuntimeError('Something really bad happened.')) + with pytest.raises(RuntimeError): + future.result() + + +def test_add_done_callback_pending_batch(): + future = Future() + callback = mock.Mock() + future.add_done_callback(callback) + assert len(future._callbacks) == 1 + assert callback in future._callbacks + assert callback.call_count == 0 + + +def test_add_done_callback_completed_batch(): + future = Future() + future.set_result('12345') + callback = mock.Mock(spec=()) + future.add_done_callback(callback) + callback.assert_called_once_with(future) + + +def test_trigger(): + future = Future() + callback = mock.Mock(spec=()) + future.add_done_callback(callback) + assert callback.call_count == 0 + future.set_result('12345') + callback.assert_called_once_with(future) + + +def test_set_result_once_only(): + future = Future() + future.set_result('12345') + with pytest.raises(RuntimeError): + future.set_result('67890') + + +def test_set_exception_once_only(): + future = Future() + future.set_exception(ValueError('wah wah')) + with pytest.raises(RuntimeError): + future.set_exception(TypeError('other wah wah')) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py new file mode 100644 index 000000000000..0054b25262b5 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -0,0 +1,143 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +import pytest + +from google.auth import credentials +from google.cloud.gapic.pubsub.v1 import publisher_client +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return publisher.Client(credentials=creds) + + +def test_init(): + client = create_client() + + # A plain client should have an `api` (the underlying GAPIC) and a + # batch settings object, which should have the defaults. + assert isinstance(client.api, publisher_client.PublisherClient) + assert client.batch_settings.max_bytes == 5 * (2 ** 20) + assert client.batch_settings.max_latency == 0.05 + assert client.batch_settings.max_messages == 1000 + + +def test_batch_accepting(): + """Establish that an existing batch is returned if it accepts messages.""" + client = create_client() + message = types.PubsubMessage(data=b'foo') + + # At first, there are no batches, so this should return a new batch + # which is also saved to the object. + ante = len(client._batches) + batch = client.batch('topic_name', message, autocommit=False) + assert len(client._batches) == ante + 1 + assert batch is client._batches['topic_name'] + + # A subsequent request should return the same batch. + batch2 = client.batch('topic_name', message, autocommit=False) + assert batch is batch2 + assert batch2 is client._batches['topic_name'] + + +def test_batch_without_autocreate(): + client = create_client() + message = types.PubsubMessage(data=b'foo') + + # If `create=False` is sent, then when the batch is not found, None + # is returned instead. + ante = len(client._batches) + batch = client.batch('topic_name', message, create=False) + assert batch is None + assert len(client._batches) == ante + + +def test_publish(): + client = create_client() + + # Use a mock in lieu of the actual batch class; set the mock up to claim + # indiscriminately that it accepts all messages. + batch = mock.Mock(spec=client._batch_class) + batch.will_accept.return_value = True + client._batches['topic_name'] = batch + + # Begin publishing. + client.publish('topic_name', b'spam') + client.publish('topic_name', b'foo', bar='baz') + + # The batch's publish method should have been called twice. + assert batch.publish.call_count == 2 + + # In both cases + # The first call should correspond to the first message. + _, args, _ = batch.publish.mock_calls[0] + assert args[0].data == b'spam' + assert not args[0].attributes + + # The second call should correspond to the second message. + _, args, _ = batch.publish.mock_calls[1] + assert args[0].data == b'foo' + assert args[0].attributes == {u'bar': u'baz'} + + +def test_publish_data_not_bytestring_error(): + client = create_client() + with pytest.raises(TypeError): + client.publish('topic_name', u'This is a text string.') + with pytest.raises(TypeError): + client.publish('topic_name', 42) + + +def test_publish_attrs_bytestring(): + client = create_client() + + # Use a mock in lieu of the actual batch class; set the mock up to claim + # indiscriminately that it accepts all messages. + batch = mock.Mock(spec=client._batch_class) + batch.will_accept.return_value = True + client._batches['topic_name'] = batch + + # Begin publishing. + client.publish('topic_name', b'foo', bar=b'baz') + + # The attributes should have been sent as text. + _, args, _ = batch.publish.mock_calls[0] + assert args[0].data == b'foo' + assert args[0].attributes == {u'bar': u'baz'} + + +def test_publish_attrs_type_error(): + client = create_client() + with pytest.raises(TypeError): + client.publish('topic_name', b'foo', answer=42) + + +def test_gapic_instance_method(): + client = create_client() + with mock.patch.object(client.api, '_create_topic', autospec=True) as ct: + client.create_topic('projects/foo/topics/bar') + assert ct.call_count == 1 + _, args, _ = ct.mock_calls[0] + assert args[0] == types.Topic(name='projects/foo/topics/bar') + + +def test_gapic_class_method(): + client = create_client() + answer = client.topic_path('foo', 'bar') + assert answer == 'projects/foo/topics/bar' diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py new file mode 100644 index 000000000000..2a3429fbc5b3 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -0,0 +1,117 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue + +import mock + +import pytest + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _consumer +from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_consumer(): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + subscription = client.subscribe('sub_name_e') + return _consumer.Consumer(policy=subscription) + + +def test_send_request(): + consumer = create_consumer() + request = types.StreamingPullRequest(subscription='foo') + with mock.patch.object(queue.Queue, 'put') as put: + consumer.send_request(request) + put.assert_called_once_with(request) + + +def test_request_generator_thread(): + consumer = create_consumer() + generator = consumer._request_generator_thread() + + # The first request that comes from the request generator thread + # should always be the initial request. + initial_request = next(generator) + assert initial_request.subscription == 'sub_name_e' + assert initial_request.stream_ack_deadline_seconds == 10 + + # Subsequent requests correspond to items placed in the request queue. + consumer.send_request(types.StreamingPullRequest(ack_ids=['i'])) + request = next(generator) + assert request.ack_ids == ['i'] + + # The poison pill should stop the loop. + consumer.send_request(_helper_threads.STOP) + with pytest.raises(StopIteration): + next(generator) + + +def test_blocking_consume(): + consumer = create_consumer() + Policy = type(consumer._policy) + + # Establish that we get responses until we run out of them. + with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: + consumer._blocking_consume() + assert on_res.call_count == 2 + assert on_res.mock_calls[0][1][1] == mock.sentinel.A + assert on_res.mock_calls[1][1][1] == mock.sentinel.B + + +def test_blocking_consume_keyboard_interrupt(): + consumer = create_consumer() + Policy = type(consumer._policy) + + # Establish that we get responses until we are sent the exiting event. + with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: + on_res.side_effect = KeyboardInterrupt + consumer._blocking_consume() + on_res.assert_called_once_with(consumer._policy, mock.sentinel.A) + + +@mock.patch.object(thread.Policy, 'call_rpc', autospec=True) +@mock.patch.object(thread.Policy, 'on_response', autospec=True) +@mock.patch.object(thread.Policy, 'on_exception', autospec=True) +def test_blocking_consume_exception_reraise(on_exc, on_res, call_rpc): + consumer = create_consumer() + + # Establish that we get responses until we are sent the exiting event. + call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + on_res.side_effect = TypeError('Bad things!') + on_exc.side_effect = on_res.side_effect + with pytest.raises(TypeError): + consumer._blocking_consume() + + +def test_start_consuming(): + consumer = create_consumer() + helper_threads = consumer.helper_threads + with mock.patch.object(helper_threads, 'start', autospec=True) as start: + consumer.start_consuming() + assert consumer._exiting.is_set() is False + assert consumer.active is True + start.assert_called_once_with( + 'consume bidirectional stream', + consumer._request_queue, + consumer._blocking_consume, + ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py new file mode 100644 index 000000000000..84775f0be2c1 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -0,0 +1,125 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue +import threading + +import mock + +from google.cloud.pubsub_v1.subscriber import _helper_threads + + +def test_start(): + registry = _helper_threads.HelperThreadRegistry() + queue_ = queue.Queue() + target = mock.Mock(spec=()) + with mock.patch.object(threading.Thread, 'start', autospec=True) as start: + registry.start('foo', queue_, target) + assert start.called + + +def test_stop_noop(): + registry = _helper_threads.HelperThreadRegistry() + assert len(registry._helper_threads) == 0 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + +def test_stop_dead_thread(): + registry = _helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = _helper_threads._HelperThread( + name='foo', + queue=None, + thread=threading.Thread(target=lambda: None), + ) + assert len(registry._helper_threads) == 1 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + +@mock.patch.object(queue.Queue, 'put') +@mock.patch.object(threading.Thread, 'is_alive') +@mock.patch.object(threading.Thread, 'join') +def test_stop_alive_thread(join, is_alive, put): + is_alive.return_value = True + + # Set up a registry with a helper thread in it. + registry = _helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = _helper_threads._HelperThread( + name='foo', + queue=queue.Queue(), + thread=threading.Thread(target=lambda: None), + ) + + # Assert that the helper thread is present, and removed correctly + # on stop. + assert len(registry._helper_threads) == 1 + registry.stop('foo') + assert len(registry._helper_threads) == 0 + + # Assert that all of our mocks were called in the expected manner. + is_alive.assert_called_once_with() + join.assert_called_once_with() + put.assert_called_once_with(_helper_threads.STOP) + + +def test_stop_all(): + registry = _helper_threads.HelperThreadRegistry() + registry._helper_threads['foo'] = _helper_threads._HelperThread( + name='foo', + queue=None, + thread=threading.Thread(target=lambda: None), + ) + assert len(registry._helper_threads) == 1 + registry.stop_all() + assert len(registry._helper_threads) == 0 + + +def test_stop_all_noop(): + registry = _helper_threads.HelperThreadRegistry() + assert len(registry._helper_threads) == 0 + registry.stop_all() + assert len(registry._helper_threads) == 0 + + +def test_queue_callback_thread(): + queue_ = queue.Queue() + callback = mock.Mock(spec=()) + qct = _helper_threads.QueueCallbackThread(queue_, callback) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = (mock.sentinel.A, _helper_threads.STOP) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 2 + callback.assert_called_once_with(mock.sentinel.A) + + +def test_queue_callback_thread_exception(): + queue_ = queue.Queue() + callback = mock.Mock(spec=(), side_effect=(Exception,)) + qct = _helper_threads.QueueCallbackThread(queue_, callback) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = (mock.sentinel.A, _helper_threads.STOP) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 2 + callback.assert_called_once_with(mock.sentinel.A) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py new file mode 100644 index 000000000000..23474a19d116 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py @@ -0,0 +1,84 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.pubsub_v1.subscriber import _histogram + + +def test_init(): + data = {} + histo = _histogram.Histogram(data=data) + assert histo._data is data + assert len(histo) == 0 + + +def test_contains(): + histo = _histogram.Histogram() + histo.add(10) + histo.add(20) + assert 10 in histo + assert 20 in histo + assert 30 not in histo + + +def test_max(): + histo = _histogram.Histogram() + assert histo.max == 600 + histo.add(120) + assert histo.max == 120 + histo.add(150) + assert histo.max == 150 + histo.add(20) + assert histo.max == 150 + + +def test_min(): + histo = _histogram.Histogram() + assert histo.min == 10 + histo.add(60) + assert histo.min == 60 + histo.add(30) + assert histo.min == 30 + histo.add(120) + assert histo.min == 30 + + +def test_add(): + histo = _histogram.Histogram() + histo.add(60) + assert histo._data[60] == 1 + histo.add(60) + assert histo._data[60] == 2 + + +def test_add_lower_limit(): + histo = _histogram.Histogram() + histo.add(5) + assert 5 not in histo + assert 10 in histo + + +def test_add_upper_limit(): + histo = _histogram.Histogram() + histo.add(12000) + assert 12000 not in histo + assert 600 in histo + + +def test_percentile(): + histo = _histogram.Histogram() + [histo.add(i) for i in range(101, 201)] + assert histo.percentile(100) == 200 + assert histo.percentile(101) == 200 + assert histo.percentile(99) == 199 + assert histo.percentile(1) == 101 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py new file mode 100644 index 000000000000..a3a1e16f027e --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -0,0 +1,102 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import queue +import time + +import mock + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import message + + +def create_message(data, ack_id='ACKID', **attrs): + with mock.patch.object(message.Message, 'lease') as lease: + with mock.patch.object(time, 'time') as time_: + time_.return_value = 1335020400 + msg = message.Message(types.PubsubMessage( + attributes=attrs, + data=data, + message_id='message_id', + publish_time=types.Timestamp(seconds=1335020400 - 86400), + ), ack_id, queue.Queue()) + lease.assert_called_once_with() + return msg + + +def test_attributes(): + msg = create_message(b'foo', baz='bacon', spam='eggs') + assert msg.attributes == {'baz': 'bacon', 'spam': 'eggs'} + + +def test_data(): + msg = create_message(b'foo') + assert msg.data == b'foo' + + +def test_publish_time(): + msg = create_message(b'foo') + assert msg.publish_time == types.Timestamp(seconds=1335020400 - 86400) + + +def test_ack(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(msg._request_queue, 'put') as put: + with mock.patch.object(message.Message, 'drop') as drop: + msg.ack() + put.assert_called_once_with(('ack', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + 'time_to_ack': mock.ANY, + })) + + +def test_drop(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.drop() + put.assert_called_once_with(('drop', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + })) + + +def test_lease(): + msg = create_message(b'foo', ack_id='bogus_ack_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.lease() + put.assert_called_once_with(('lease', { + 'ack_id': 'bogus_ack_id', + 'byte_size': 25, + })) + + +def test_modify_ack_deadline(): + msg = create_message(b'foo', ack_id='bogus_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.modify_ack_deadline(60) + put.assert_called_once_with(('modify_ack_deadline', { + 'ack_id': 'bogus_id', + 'seconds': 60, + })) + + +def test_nack(): + msg = create_message(b'foo', ack_id='bogus_id') + with mock.patch.object(msg._request_queue, 'put') as put: + msg.nack() + put.assert_called_once_with(('nack', { + 'ack_id': 'bogus_id', + 'byte_size': 25, + })) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py new file mode 100644 index 000000000000..df963424ccb9 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -0,0 +1,231 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_policy(flow_control=types.FlowControl()): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + return thread.Policy(client, 'sub_name_d', flow_control=flow_control) + + +def test_ack_deadline(): + policy = create_policy() + assert policy.ack_deadline == 10 + policy.histogram.add(20) + assert policy.ack_deadline == 20 + policy.histogram.add(10) + assert policy.ack_deadline == 20 + + +def test_get_initial_request(): + policy = create_policy() + initial_request = policy.get_initial_request() + assert isinstance(initial_request, types.StreamingPullRequest) + assert initial_request.subscription == 'sub_name_d' + assert initial_request.stream_ack_deadline_seconds == 10 + + +def test_managed_ack_ids(): + policy = create_policy() + + # Ensure we always get a set back, even if the property is not yet set. + managed_ack_ids = policy.managed_ack_ids + assert isinstance(managed_ack_ids, set) + + # Ensure that multiple calls give the same actual object back. + assert managed_ack_ids is policy.managed_ack_ids + + +def test_subscription(): + policy = create_policy() + assert policy.subscription == 'sub_name_d' + + +def test_ack(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.ack('ack_id_string', 20) + send_request.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + assert len(policy.histogram) == 1 + assert 20 in policy.histogram + + +def test_ack_no_time(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.ack('ack_id_string') + send_request.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + assert len(policy.histogram) == 0 + + +def test_ack_paused(): + policy = create_policy() + policy._paused = True + policy._consumer.active = False + with mock.patch.object(policy, 'open') as open_: + policy.ack('ack_id_string') + open_.assert_called() + assert 'ack_id_string' in policy._ack_on_resume + + +def test_call_rpc(): + policy = create_policy() + with mock.patch.object(policy._client.api, 'streaming_pull') as pull: + policy.call_rpc(mock.sentinel.GENERATOR) + pull.assert_called_once_with(mock.sentinel.GENERATOR) + + +def test_drop(): + policy = create_policy() + policy.managed_ack_ids.add('ack_id_string') + policy._bytes = 20 + policy.drop('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 0 + assert policy._bytes == 0 + + # Do this again to establish idempotency. + policy.drop('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 0 + assert policy._bytes == 0 + + +def test_drop_below_threshold(): + """Establish that we resume a paused subscription. + + If the subscription is paused, and we drop sufficiently below + the flow control thresholds, it should resume. + """ + policy = create_policy() + policy.managed_ack_ids.add('ack_id_string') + policy._bytes = 20 + policy._paused = True + with mock.patch.object(policy, 'open') as open_: + policy.drop(ack_id='ack_id_string', byte_size=20) + open_.assert_called_once_with(policy._callback) + assert policy._paused is False + + +def test_load(): + flow_control = types.FlowControl(max_messages=10, max_bytes=1000) + policy = create_policy(flow_control=flow_control) + + # This should mean that our messages count is at 10%, and our bytes + # are at 15%; the ._load property should return the higher (0.15). + policy.lease(ack_id='one', byte_size=150) + assert policy._load == 0.15 + + # After this message is added, the messages should be higher at 20% + # (versus 16% for bytes). + policy.lease(ack_id='two', byte_size=10) + assert policy._load == 0.2 + + # Returning a number above 100% is fine. + policy.lease(ack_id='three', byte_size=1000) + assert policy._load == 1.16 + + +def test_modify_ack_deadline(): + policy = create_policy() + with mock.patch.object(policy._consumer, 'send_request') as send_request: + policy.modify_ack_deadline('ack_id_string', 60) + send_request.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['ack_id_string'], + modify_deadline_seconds=[60], + )) + + +def test_maintain_leases_inactive_consumer(): + policy = create_policy() + policy._consumer.active = False + assert policy.maintain_leases() is None + + +def test_maintain_leases_ack_ids(): + policy = create_policy() + policy._consumer.active = True + policy.lease('my ack id', 50) + + # Mock the sleep object. + with mock.patch.object(time, 'sleep', autospec=True) as sleep: + def trigger_inactive(seconds): + assert 0 < seconds < 10 + policy._consumer.active = False + sleep.side_effect = trigger_inactive + + # Also mock the consumer, which sends the request. + with mock.patch.object(policy._consumer, 'send_request') as send: + policy.maintain_leases() + send.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['my ack id'], + modify_deadline_seconds=[10], + )) + sleep.assert_called() + + +def test_maintain_leases_no_ack_ids(): + policy = create_policy() + policy._consumer.active = True + with mock.patch.object(time, 'sleep', autospec=True) as sleep: + def trigger_inactive(seconds): + assert 0 < seconds < 10 + policy._consumer.active = False + sleep.side_effect = trigger_inactive + policy.maintain_leases() + sleep.assert_called() + + +def test_lease(): + policy = create_policy() + policy.lease(ack_id='ack_id_string', byte_size=20) + assert len(policy.managed_ack_ids) == 1 + assert policy._bytes == 20 + + # Do this again to prove idempotency. + policy.lease(ack_id='ack_id_string', byte_size=20) + assert len(policy.managed_ack_ids) == 1 + assert policy._bytes == 20 + + +def test_lease_above_threshold(): + flow_control = types.FlowControl(max_messages=2) + policy = create_policy(flow_control=flow_control) + with mock.patch.object(policy, 'close') as close: + policy.lease(ack_id='first_ack_id', byte_size=20) + assert close.call_count == 0 + policy.lease(ack_id='second_ack_id', byte_size=25) + close.assert_called_once_with() + + +def test_nack(): + policy = create_policy() + with mock.patch.object(policy, 'modify_ack_deadline') as mad: + with mock.patch.object(policy, 'drop') as drop: + policy.nack(ack_id='ack_id_string', byte_size=10) + drop.assert_called_once_with(ack_id='ack_id_string', byte_size=10) + mad.assert_called_once_with(ack_id='ack_id_string', seconds=0) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py new file mode 100644 index 000000000000..76aec184815e --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -0,0 +1,120 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent import futures +import queue +import threading + +import grpc + +import mock + +import pytest + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber import message +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_policy(**kwargs): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + return thread.Policy(client, 'sub_name_c', **kwargs) + + +def test_init(): + policy = create_policy() + policy._callback(None) + + +def test_init_with_executor(): + executor = futures.ThreadPoolExecutor(max_workers=25) + policy = create_policy(executor=executor, queue=queue.Queue()) + assert policy._executor is executor + + +def test_close(): + policy = create_policy() + consumer = policy._consumer + with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: + policy.close() + stop_consuming.assert_called_once_with() + assert 'callback request worker' not in policy._consumer.helper_threads + + +@mock.patch.object(_helper_threads.HelperThreadRegistry, 'start') +@mock.patch.object(threading.Thread, 'start') +def test_open(thread_start, htr_start): + policy = create_policy() + with mock.patch.object(policy._consumer, 'start_consuming') as consuming: + policy.open(mock.sentinel.CALLBACK) + assert policy._callback is mock.sentinel.CALLBACK + consuming.assert_called_once_with() + htr_start.assert_called() + thread_start.assert_called() + + +def test_on_callback_request(): + policy = create_policy() + with mock.patch.object(policy, 'call_rpc') as call_rpc: + policy.on_callback_request(('call_rpc', {'something': 42})) + call_rpc.assert_called_once_with(something=42) + + +def test_on_exception_deadline_exceeded(): + policy = create_policy() + exc = mock.Mock(spec=('code',)) + exc.code.return_value = grpc.StatusCode.DEADLINE_EXCEEDED + assert policy.on_exception(exc) is None + + +def test_on_exception_other(): + policy = create_policy() + exc = TypeError('wahhhhhh') + with pytest.raises(TypeError): + policy.on_exception(exc) + + +def test_on_response(): + callback = mock.Mock(spec=()) + + # Set up the policy. + policy = create_policy() + policy._callback = callback + + # Set up the messages to send. + messages = ( + types.PubsubMessage(data=b'foo', message_id='1'), + types.PubsubMessage(data=b'bar', message_id='2'), + ) + + # Set up a valid response. + response = types.StreamingPullResponse( + received_messages=[ + {'ack_id': 'fack', 'message': messages[0]}, + {'ack_id': 'back', 'message': messages[1]}, + ], + ) + + # Actually run the method and prove that the callback was + # called in the expected way. + policy.on_response(response) + assert callback.call_count == 2 + for call in callback.mock_calls: + assert isinstance(call[1][0], message.Message) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py new file mode 100644 index 000000000000..50e90fead181 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -0,0 +1,44 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return subscriber.Client(credentials=creds) + + +def test_init(): + client = create_client() + assert client._policy_class is thread.Policy + + +def test_subscribe(): + client = create_client() + subscription = client.subscribe('sub_name_a') + assert isinstance(subscription, thread.Policy) + + +def test_subscribe_with_callback(): + client = create_client() + callback = mock.Mock() + with mock.patch.object(thread.Policy, 'open') as open_: + subscription = client.subscribe('sub_name_b', callback) + open_.assert_called_once_with(callback) + assert isinstance(subscription, thread.Policy) diff --git a/packages/google-cloud-pubsub/tests/unit/test__gax.py b/packages/google-cloud-pubsub/tests/unit/test__gax.py deleted file mode 100644 index dd2ea8077f84..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/test__gax.py +++ /dev/null @@ -1,1661 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - -try: - # pylint: disable=unused-import - import google.cloud.pubsub._gax - # pylint: enable=unused-import -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False -else: - _HAVE_GRPC = True - -from google.cloud._testing import _GAXBaseAPI - - -def _make_credentials(): - # pylint: disable=redefined-outer-name - import google.auth.credentials - # pylint: enable=redefined-outer-name - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(object): - PROJECT = 'PROJECT' - PROJECT_PATH = 'projects/%s' % (PROJECT,) - LIST_TOPICS_PATH = '%s/topics' % (PROJECT_PATH,) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) - SUB_NAME = 'sub_name' - SUB_PATH = '%s/subscriptions/%s' % (TOPIC_PATH, SUB_NAME) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = '%s/snapshots/%s' % (PROJECT_PATH, SNAPSHOT_NAME) - TIME = 12345 - - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_PublisherAPI(_Base, unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._gax import _PublisherAPI - - return _PublisherAPI - - def test_ctor(self): - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_topics_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.topic import Topic - - TOKEN = 'TOKEN' - response = _GAXPageIterator([_TopicPB(self.TOPIC_PATH)], - page_token=TOKEN) - gax_api = _GAXPublisherAPI(_list_topics_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN) - - name, page_size, options = gax_api._list_topics_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_list_topics_with_paging(self): - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - response = _GAXPageIterator( - [_TopicPB(self.TOPIC_PATH)], page_token=NEW_TOKEN) - gax_api = _GAXPublisherAPI(_list_topics_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - iterator = api.list_topics( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, NEW_TOKEN) - - name, page_size, options = gax_api._list_topics_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_topic_create(self): - topic_pb = _TopicPB(self.TOPIC_PATH) - gax_api = _GAXPublisherAPI(_create_topic_response=topic_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_create(self.TOPIC_PATH) - - self.assertEqual(resource, {'name': self.TOPIC_PATH}) - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_failed_precondition(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXPublisherAPI(_create_topic_failed_precondition=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_already_exists(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXPublisherAPI(_create_topic_already_exists=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_create(self.TOPIC_PATH) - - topic_path, options = gax_api._create_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_hit(self): - topic_pb = _TopicPB(self.TOPIC_PATH) - gax_api = _GAXPublisherAPI(_get_topic_response=topic_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_get(self.TOPIC_PATH) - - self.assertEqual(resource, {'name': self.TOPIC_PATH}) - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_get(self.TOPIC_PATH) - - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_get_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_get(self.TOPIC_PATH) - - topic_path, options = gax_api._get_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_hit(self): - gax_api = _GAXPublisherAPI(_delete_topic_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXPublisherAPI(_delete_topic_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_delete(self.TOPIC_PATH) - - topic_path, options = gax_api._delete_topic_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_topic_publish_hit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MSGID = 'DEADBEEF' - MESSAGE = {'data': B64, 'attributes': {}} - response = _PublishResponsePB([MSGID]) - gax_api = _GAXPublisherAPI(_publish_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(resource, [MSGID]) - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data.decode('ascii'), B64) - self.assertEqual(message_pb.attributes, {}) - self.assertEqual(options.is_bundling, False) - - def test_topic_publish_miss_w_attrs_w_bytes_payload(self): - import base64 - from google.cloud.exceptions import NotFound - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD) - MESSAGE = {'data': B64, 'attributes': {'foo': 'bar'}} - timeout = 120 # 120 seconds or 2 minutes - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.topic_publish(self.TOPIC_PATH, [MESSAGE], timeout=timeout) - - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data, B64) - self.assertEqual(message_pb.attributes, {'foo': 'bar'}) - self.assertEqual(options.is_bundling, False) - self.assertEqual(options.timeout, timeout) - - def test_topic_publish_error(self): - import base64 - from google.gax.errors import GaxError - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': B64, 'attributes': {}} - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - topic_path, message_pbs, options = gax_api._publish_called_with - self.assertEqual(topic_path, self.TOPIC_PATH) - message_pb, = message_pbs - self.assertEqual(message_pb.data.decode('ascii'), B64) - self.assertEqual(message_pb.attributes, {}) - self.assertEqual(options.is_bundling, False) - - def test_topic_list_subscriptions_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - response = _GAXPageIterator([local_sub_path]) - gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_topic_list_subscriptions_with_paging(self): - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - response = _GAXPageIterator( - [local_sub_path], page_token=NEW_TOKEN) - gax_api = _GAXPublisherAPI(_list_topic_subscriptions_response=response) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions( - topic, page_size=SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(next_token, NEW_TOKEN) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - name, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(name, self.TOPIC_PATH) - self.assertEqual(page_size, SIZE) - self.assertEqual(options.page_token, TOKEN) - - def test_topic_list_subscriptions_miss(self): - from google.gax import INITIAL_PAGE - from google.cloud.exceptions import NotFound - from google.cloud.pubsub.topic import Topic - - gax_api = _GAXPublisherAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - topic = Topic(self.TOPIC_NAME, client) - api.topic_list_subscriptions(topic) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_topic_list_subscriptions_error(self): - from google.gax import INITIAL_PAGE - from google.gax.errors import GaxError - from google.cloud.pubsub.topic import Topic - - gax_api = _GAXPublisherAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - topic = Topic(self.TOPIC_NAME, client) - api.topic_list_subscriptions(topic) - - topic_path, page_size, options = ( - gax_api._list_topic_subscriptions_called_with) - self.assertEqual(topic_path, self.TOPIC_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_SubscriberAPI(_Base, unittest.TestCase): - - PUSH_ENDPOINT = 'https://api.example.com/push' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._gax import _SubscriberAPI - - return _SubscriberAPI - - def test_ctor(self): - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - self.assertIs(api._gax_api, gax_api) - self.assertIs(api._client, client) - - def test_list_subscriptions_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Subscription as SubscriptionPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - response = _GAXPageIterator([sub_pb]) - gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) - - name, page_size, options = gax_api._list_subscriptions_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 0) - self.assertIs(options.page_token, INITIAL_PAGE) - - def test_list_subscriptions_with_paging(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Subscription as SubscriptionPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - local_sub_path = '%s/subscriptions/%s' % ( - self.PROJECT_PATH, self.SUB_NAME) - sub_pb = SubscriptionPB(name=local_sub_path, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - response = _GAXPageIterator([sub_pb], page_token=NEW_TOKEN) - gax_api = _GAXSubscriberAPI(_list_subscriptions_response=response) - client = _Client(self.PROJECT) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_subscriptions( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, NEW_TOKEN) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertEqual(subscription.push_endpoint, self.PUSH_ENDPOINT) - - name, page_size, options = gax_api._list_subscriptions_called_with - self.assertEqual(name, self.PROJECT_PATH) - self.assertEqual(page_size, 23) - self.assertEqual(options.page_token, TOKEN) - - def test_subscription_create(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertIsNone(push_config) - self.assertEqual(ack_deadline, None) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_optional_params(self): - import datetime - - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - expected_ack_deadline = 1729 - expected_push_endpoint = 'push-endpoint' - expected_retain_acked_messages = True - expected_message_retention_duration = datetime.timedelta( - days=1, hours=7, minutes=2, seconds=9) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, ack_deadline=expected_ack_deadline, - push_endpoint=expected_push_endpoint, - retain_acked_messages=expected_retain_acked_messages, - message_retention_duration=expected_message_retention_duration) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - print(gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, expected_push_endpoint) - self.assertEqual(ack_deadline, expected_ack_deadline) - self.assertEqual(retain_acked_messages, expected_retain_acked_messages) - self.assertEqual(message_retention_duration.seconds, - expected_message_retention_duration.total_seconds()) - self.assertIsNone(options) - - def test_subscription_create_failed_precondition(self): - from google.cloud.exceptions import Conflict - - DEADLINE = 600 - gax_api = _GAXSubscriberAPI( - _create_subscription_failed_precondition=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) - self.assertEqual(ack_deadline, DEADLINE) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_already_exists(self): - from google.cloud.exceptions import Conflict - - DEADLINE = 600 - gax_api = _GAXSubscriberAPI(_create_subscription_already_exists=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, DEADLINE, self.PUSH_ENDPOINT) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertEqual(push_config.push_endpoint, self.PUSH_ENDPOINT) - self.assertEqual(ack_deadline, DEADLINE) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - (name, topic, push_config, ack_deadline, retain_acked_messages, - message_retention_duration, options) = ( - gax_api._create_subscription_called_with) - self.assertEqual(name, self.SUB_PATH) - self.assertEqual(topic, self.TOPIC_PATH) - self.assertIsNone(push_config) - self.assertEqual(ack_deadline, None) - self.assertIsNone(retain_acked_messages) - self.assertIsNone(message_retention_duration) - self.assertIsNone(options) - - def test_subscription_get_hit(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import PushConfig - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Subscription - - push_cfg_pb = PushConfig(push_endpoint=self.PUSH_ENDPOINT) - sub_pb = Subscription(name=self.SUB_PATH, topic=self.TOPIC_PATH, - push_config=push_cfg_pb) - gax_api = _GAXSubscriberAPI(_get_subscription_response=sub_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.subscription_get(self.SUB_PATH) - - expected = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'pushConfig': { - 'pushEndpoint': self.PUSH_ENDPOINT, - }, - } - self.assertEqual(resource, expected) - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_get_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_get(self.SUB_PATH) - - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_get_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_get(self.SUB_PATH) - - sub_path, options = gax_api._get_subscription_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertIsNone(options) - - def test_subscription_delete_hit(self): - gax_api = _GAXSubscriberAPI(_delete_subscription_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_delete_subscription_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_delete(self.TOPIC_PATH) - - sub_path, options = gax_api._delete_subscription_called_with - self.assertEqual(sub_path, self.TOPIC_PATH) - self.assertIsNone(options) - - def test_subscription_modify_push_config_hit(self): - gax_api = _GAXSubscriberAPI(_modify_push_config_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_modify_push_config(self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_modify_push_config_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_modify_push_config( - self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_modify_push_config_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_modify_push_config( - self.SUB_PATH, self.PUSH_ENDPOINT) - - sub_path, config, options = gax_api._modify_push_config_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(config.push_endpoint, self.PUSH_ENDPOINT) - self.assertIsNone(options) - - def test_subscription_pull_explicit(self): - import base64 - import datetime - from google.cloud._helpers import UTC - from google.cloud._helpers import _datetime_to_pb_timestamp - from google.cloud._helpers import _datetime_to_rfc3339 - - NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) - NOW_PB = _datetime_to_pb_timestamp(NOW) - NOW_RFC3339 = _datetime_to_rfc3339(NOW) - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = { - 'messageId': MSG_ID, - 'data': B64, - 'attributes': {'a': 'b'}, - 'publishTime': NOW_RFC3339, - } - RECEIVED = [{'ackId': ACK_ID, 'message': MESSAGE}] - message_pb = _PubsubMessagePB(MSG_ID, B64, {'a': 'b'}, NOW_PB) - response_pb = _PullResponsePB([_ReceivedMessagePB(ACK_ID, message_pb)]) - gax_api = _GAXSubscriberAPI(_pull_response=response_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - MAX_MESSAGES = 10 - - received = api.subscription_pull( - self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) - - self.assertEqual(received, RECEIVED) - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, MAX_MESSAGES) - self.assertTrue(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_defaults_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_pull(self.SUB_PATH) - - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, 1) - self.assertFalse(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_defaults_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_pull(self.SUB_PATH) - - sub_path, max_messages, return_immediately, options = ( - gax_api._pull_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(max_messages, 1) - self.assertFalse(return_immediately) - self.assertIsNone(options) - - def test_subscription_pull_deadline_exceeded(self): - client = _Client(self.PROJECT) - gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) - api = self._make_one(gax_api, client) - - result = api.subscription_pull(self.SUB_PATH) - self.assertEqual(result, []) - - def test_subscription_pull_deadline_exceeded_return_immediately(self): - from google.gax.errors import GaxError - - client = _Client(self.PROJECT) - gax_api = _GAXSubscriberAPI(_deadline_exceeded_gax_error=True) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_pull(self.SUB_PATH, return_immediately=True) - - def test_subscription_acknowledge_hit(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI(_acknowledge_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_acknowledge_miss(self): - from google.cloud.exceptions import NotFound - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_acknowledge_error(self): - from google.gax.errors import GaxError - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - sub_path, ack_ids, options = gax_api._acknowledge_called_with - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_hit(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI(_modify_ack_deadline_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_miss(self): - from google.cloud.exceptions import NotFound - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI() - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_subscription_modify_ack_deadline_error(self): - from google.gax.errors import GaxError - - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - sub_path, ack_ids, deadline, options = ( - gax_api._modify_ack_deadline_called_with) - self.assertEqual(sub_path, self.SUB_PATH) - self.assertEqual(ack_ids, [ACK_ID1, ACK_ID2]) - self.assertEqual(deadline, NEW_DEADLINE) - self.assertIsNone(options) - - def test_list_snapshots_no_paging(self): - from google.gax import INITIAL_PAGE - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Snapshot as SnapshotPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - from google.cloud.pubsub.topic import Topic - - local_snapshot_path = '%s/snapshots/%s' % ( - self.PROJECT_PATH, self.SNAPSHOT_NAME) - snapshot_pb = SnapshotPB( - name=local_snapshot_path, topic=self.TOPIC_PATH) - response = _GAXPageIterator([snapshot_pb]) - gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_snapshots(self.PROJECT) - snapshots = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the snapshot object returned. - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot.topic, Topic) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - - def test_list_snapshots_with_paging(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import ( - Snapshot as SnapshotPB) - from google.cloud._testing import _GAXPageIterator - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - from google.cloud.pubsub.topic import Topic - - SIZE = 23 - TOKEN = 'TOKEN' - NEW_TOKEN = 'NEW_TOKEN' - local_snapshot_path = '%s/snapshots/%s' % ( - self.PROJECT_PATH, self.SNAPSHOT_NAME) - snapshot_pb = SnapshotPB(name=local_snapshot_path, topic=self.TOPIC_PATH) - response = _GAXPageIterator([snapshot_pb], page_token=NEW_TOKEN) - gax_api = _GAXSubscriberAPI(_list_snapshots_response=response) - client = _Client(self.PROJECT) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - api = self._make_one(gax_api, client) - - iterator = api.list_snapshots( - self.PROJECT, page_size=SIZE, page_token=TOKEN) - snapshots = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, NEW_TOKEN) - # Check the snapshot object returned. - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot.topic, Topic) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - - def test_subscription_seek_hit(self): - gax_api = _GAXSubscriberAPI(_seek_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_subscription_seek_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_seek_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_subscription_seek_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.subscription_seek( - self.SUB_PATH, time=self.TIME, snapshot=self.SNAPSHOT_PATH) - - subscription_path, time, snapshot_path, options = ( - gax_api._seek_called_with) - self.assertEqual(subscription_path, self.SUB_PATH) - self.assertEqual(time, self.TIME) - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_create(self): - from google.cloud.proto.pubsub.v1.pubsub_pb2 import Snapshot - - snapshot_pb = Snapshot(name=self.SNAPSHOT_PATH, topic=self.TOPIC_PATH) - gax_api = _GAXSubscriberAPI(_create_snapshot_response=snapshot_pb) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - expected = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH, - } - self.assertEqual(resource, expected) - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_failed_precondition(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXSubscriberAPI(_create_snapshot_failed_precondition=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_already_exists(self): - from google.cloud.exceptions import Conflict - - gax_api = _GAXSubscriberAPI(_create_snapshot_already_exists=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(Conflict): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_subscrption_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_snapshot_create_subscription_miss=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_create_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - name, subscription, options = ( - gax_api._create_snapshot_called_with) - self.assertEqual(name, self.SNAPSHOT_PATH) - self.assertEqual(subscription, self.SUB_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_hit(self): - gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_miss(self): - from google.cloud.exceptions import NotFound - - gax_api = _GAXSubscriberAPI(_delete_snapshot_ok=False) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(NotFound): - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - def test_snapshot_delete_error(self): - from google.gax.errors import GaxError - - gax_api = _GAXSubscriberAPI(_random_gax_error=True) - client = _Client(self.PROJECT) - api = self._make_one(gax_api, client) - - with self.assertRaises(GaxError): - api.snapshot_delete(self.SNAPSHOT_PATH) - - snapshot_path, options = gax_api._delete_snapshot_called_with - self.assertEqual(snapshot_path, self.SNAPSHOT_PATH) - self.assertIsNone(options) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_publisher_api(_Base, unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.pubsub._gax import make_gax_publisher_api - - return make_gax_publisher_api(*args, **kwargs) - - def test_live_api(self): - from google.cloud.pubsub import __version__ - from google.cloud.pubsub._gax import DEFAULT_USER_AGENT - - channels = [] - publisher_api_kwargs = [] - channel_args = [] - channel_obj = object() - mock_result = object() - host = 'foo.apis.invalid' - - def mock_publisher_api(channel, **kwargs): - channels.append(channel) - publisher_api_kwargs.append(kwargs) - return mock_result - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - mock_publisher_api.SERVICE_ADDRESS = host - - creds = _make_credentials() - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - PublisherClient=mock_publisher_api, - make_secure_channel=make_channel) - with patch: - result = self._call_fut(creds) - - self.assertIs(result, mock_result) - self.assertEqual(len(publisher_api_kwargs), 1) - self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - def test_emulator(self): - from google.cloud.pubsub import __version__ - - channels = [] - publisher_api_kwargs = [] - mock_result = object() - insecure_args = [] - mock_channel = object() - - def mock_publisher_api(channel, **kwargs): - channels.append(channel) - publisher_api_kwargs.append(kwargs) - return mock_result - - def mock_insecure_channel(host): - insecure_args.append(host) - return mock_channel - - host = 'CURR_HOST:1234' - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - PublisherClient=mock_publisher_api, - insecure_channel=mock_insecure_channel) - with patch: - result = self._call_fut(host=host) - - self.assertIs(result, mock_result) - self.assertEqual(len(publisher_api_kwargs), 1) - self.assertEqual(publisher_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(publisher_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [mock_channel]) - self.assertEqual(insecure_args, [host]) - - -@unittest.skipUnless(_HAVE_GRPC, 'No gax-python') -class Test_make_gax_subscriber_api(_Base, unittest.TestCase): - - def _call_fut(self, *args, **kwargs): - from google.cloud.pubsub._gax import make_gax_subscriber_api - - return make_gax_subscriber_api(*args, **kwargs) - - def test_live_api(self): - from google.cloud.pubsub import __version__ - from google.cloud.pubsub._gax import DEFAULT_USER_AGENT - - channels = [] - subscriber_api_kwargs = [] - channel_args = [] - channel_obj = object() - mock_result = object() - host = 'foo.apis.invalid' - - def mock_subscriber_api(channel, **kwargs): - channels.append(channel) - subscriber_api_kwargs.append(kwargs) - return mock_result - - def make_channel(*args): - channel_args.append(args) - return channel_obj - - mock_subscriber_api.SERVICE_ADDRESS = host - - creds = _make_credentials() - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - SubscriberClient=mock_subscriber_api, - make_secure_channel=make_channel) - with patch: - result = self._call_fut(creds) - - self.assertIs(result, mock_result) - self.assertEqual(len(subscriber_api_kwargs), 1) - self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [channel_obj]) - self.assertEqual(channel_args, - [(creds, DEFAULT_USER_AGENT, host)]) - - def test_emulator(self): - from google.cloud.pubsub import __version__ - - channels = [] - subscriber_api_kwargs = [] - mock_result = object() - insecure_args = [] - mock_channel = object() - - def mock_subscriber_api(channel, **kwargs): - channels.append(channel) - subscriber_api_kwargs.append(kwargs) - return mock_result - - def mock_insecure_channel(host): - insecure_args.append(host) - return mock_channel - - host = 'CURR_HOST:1234' - patch = mock.patch.multiple( - 'google.cloud.pubsub._gax', - SubscriberClient=mock_subscriber_api, - insecure_channel=mock_insecure_channel) - with patch: - result = self._call_fut(host=host) - - self.assertIs(result, mock_result) - self.assertEqual(len(subscriber_api_kwargs), 1) - self.assertEqual(subscriber_api_kwargs[0]['lib_name'], 'gccl') - self.assertEqual(subscriber_api_kwargs[0]['lib_version'], __version__) - self.assertEqual(channels, [mock_channel]) - self.assertEqual(insecure_args, [host]) - - -class _GAXPublisherAPI(_GAXBaseAPI): - - _create_topic_failed_precondition = False - _create_topic_already_exists = False - - def list_topics(self, name, page_size, options): - self._list_topics_called_with = name, page_size, options - return self._list_topics_response - - def create_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._create_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if self._create_topic_failed_precondition: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._create_topic_already_exists: - raise GaxError('conflict', self._make_grpc_already_exists()) - return self._create_topic_response - - def get_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._get_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_topic_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def delete_topic(self, name, options=None): - from google.gax.errors import GaxError - - self._delete_topic_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if not self._delete_topic_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def publish(self, topic, messages, options=None): - from google.gax.errors import GaxError - - self._publish_called_with = topic, messages, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._publish_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def list_topic_subscriptions(self, topic, page_size, options=None): - from google.gax.errors import GaxError - - self._list_topic_subscriptions_called_with = topic, page_size, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._list_topic_subscriptions_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - -class _GAXSubscriberAPI(_GAXBaseAPI): - - _create_snapshot_already_exists = False - _create_snapshot_failed_precondition = False - _create_subscription_already_exists = False - _create_subscription_failed_precondition = False - _modify_push_config_ok = False - _acknowledge_ok = False - _modify_ack_deadline_ok = False - _deadline_exceeded_gax_error = False - _snapshot_create_subscription_miss=False - - def list_subscriptions(self, project, page_size, options=None): - self._list_subscriptions_called_with = (project, page_size, options) - return self._list_subscriptions_response - - def create_subscription(self, name, topic, push_config=None, - ack_deadline_seconds=None, - retain_acked_messages=None, - message_retention_duration=None, - options=None): - from google.gax.errors import GaxError - - self._create_subscription_called_with = ( - name, topic, push_config, ack_deadline_seconds, - retain_acked_messages, message_retention_duration, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_subscription_failed_precondition: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._create_subscription_already_exists: - raise GaxError('conflict', self._make_grpc_already_exists()) - return self._create_subscription_response - - def get_subscription(self, name, options=None): - from google.gax.errors import GaxError - - self._get_subscription_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - try: - return self._get_subscription_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def delete_subscription(self, name, options=None): - from google.gax.errors import GaxError - - self._delete_subscription_called_with = name, options - if self._random_gax_error: - raise GaxError('error') - if not self._delete_subscription_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def modify_push_config(self, name, push_config, options=None): - from google.gax.errors import GaxError - - self._modify_push_config_called_with = name, push_config, options - if self._random_gax_error: - raise GaxError('error') - if not self._modify_push_config_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def pull(self, name, max_messages, return_immediately, options=None): - from google.gax.errors import GaxError - - self._pull_called_with = ( - name, max_messages, return_immediately, options) - if self._random_gax_error: - raise GaxError('error') - if self._deadline_exceeded_gax_error: - raise GaxError('deadline exceeded', - self._make_grpc_deadline_exceeded()) - try: - return self._pull_response - except AttributeError: - raise GaxError('miss', self._make_grpc_not_found()) - - def acknowledge(self, name, ack_ids, options=None): - from google.gax.errors import GaxError - - self._acknowledge_called_with = name, ack_ids, options - if self._random_gax_error: - raise GaxError('error') - if not self._acknowledge_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def modify_ack_deadline(self, name, ack_ids, deadline, options=None): - from google.gax.errors import GaxError - - self._modify_ack_deadline_called_with = ( - name, ack_ids, deadline, options) - if self._random_gax_error: - raise GaxError('error') - if not self._modify_ack_deadline_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def list_snapshots(self, project, page_size, options=None): - self._list_snapshots_called_with = (project, page_size, options) - return self._list_snapshots_response - - def create_snapshot(self, name, subscription, options=None): - from google.gax.errors import GaxError - - self._create_snapshot_called_with = (name, subscription, options) - if self._random_gax_error: - raise GaxError('error') - if self._create_snapshot_already_exists: - raise GaxError('conflict', self._make_grpc_already_exists()) - if self._create_snapshot_failed_precondition: - raise GaxError('conflict', self._make_grpc_failed_precondition()) - if self._snapshot_create_subscription_miss: - raise GaxError('miss', self._make_grpc_not_found()) - - return self._create_snapshot_response - - def delete_snapshot(self, snapshot, options=None): - from google.gax.errors import GaxError - - self._delete_snapshot_called_with = (snapshot, options) - if self._random_gax_error: - raise GaxError('error') - if not self._delete_snapshot_ok: - raise GaxError('miss', self._make_grpc_not_found()) - - def seek(self, subscription, time=None, snapshot=None, options=None): - from google.gax.errors import GaxError - - self._seek_called_with = (subscription, time, snapshot, options) - if self._random_gax_error: - raise GaxError('error') - if not self._seek_ok: - raise GaxError('miss', self._make_grpc_not_found()) - -class _TopicPB(object): - - def __init__(self, name): - self.name = name - - -class _PublishResponsePB(object): - - def __init__(self, message_ids): - self.message_ids = message_ids - - -class _PubsubMessagePB(object): - - def __init__(self, message_id, data, attributes, publish_time): - self.message_id = message_id - self.data = data - self.attributes = attributes - self.publish_time = publish_time - - -class _ReceivedMessagePB(object): - - def __init__(self, ack_id, message): - self.ack_id = ack_id - self.message = message - - -class _PullResponsePB(object): - - def __init__(self, received_messages): - self.received_messages = received_messages - - -class _Client(object): - - def __init__(self, project): - self.project = project diff --git a/packages/google-cloud-pubsub/tests/unit/test__helpers.py b/packages/google-cloud-pubsub/tests/unit/test__helpers.py deleted file mode 100644 index 0503d68b20b9..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/test__helpers.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class Test_topic_name_from_path(unittest.TestCase): - - def _call_fut(self, path, project): - from google.cloud.pubsub._helpers import topic_name_from_path - - return topic_name_from_path(path, project) - - def test_w_simple_name(self): - TOPIC_NAME = 'TOPIC_NAME' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, TOPIC_NAME) - - def test_w_name_w_all_extras(self): - TOPIC_NAME = 'TOPIC_NAME-part.one~part.two%part-three' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, TOPIC_NAME) - - -class Test_subscription_name_from_path(unittest.TestCase): - - def _call_fut(self, path, project): - from google.cloud.pubsub._helpers import subscription_name_from_path - - return subscription_name_from_path(path, project) - - def test_w_simple_name(self): - SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - subscription_name = self._call_fut(PATH, PROJECT) - self.assertEqual(subscription_name, SUBSCRIPTION_NAME) - - def test_w_name_w_all_extras(self): - SUBSCRIPTION_NAME = 'SUBSCRIPTION_NAME-part.one~part.two%part-three' - PROJECT = 'my-project-1234' - PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUBSCRIPTION_NAME) - topic_name = self._call_fut(PATH, PROJECT) - self.assertEqual(topic_name, SUBSCRIPTION_NAME) diff --git a/packages/google-cloud-pubsub/tests/unit/test__http.py b/packages/google-cloud-pubsub/tests/unit/test__http.py deleted file mode 100644 index 794fe093bbb3..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/test__http.py +++ /dev/null @@ -1,1165 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class _Base(unittest.TestCase): - PROJECT = 'PROJECT' - LIST_TOPICS_PATH = 'projects/%s/topics' % (PROJECT,) - LIST_SNAPSHOTS_PATH = 'projects/%s/snapshots' % (PROJECT,) - LIST_SUBSCRIPTIONS_PATH = 'projects/%s/subscriptions' % (PROJECT,) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - LIST_TOPIC_SUBSCRIPTIONS_PATH = '%s/subscriptions' % (TOPIC_PATH,) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - -class TestConnection(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import Connection - - return Connection - - def test_default_url(self): - conn = self._make_one(object()) - klass = self._get_target_class() - self.assertEqual(conn.api_base_url, klass.API_BASE_URL) - - def test_custom_url_from_env(self): - from google.cloud.environment_vars import PUBSUB_EMULATOR - - HOST = 'localhost:8187' - fake_environ = {PUBSUB_EMULATOR: HOST} - - with mock.patch('os.environ', new=fake_environ): - conn = self._make_one(object()) - - klass = self._get_target_class() - self.assertNotEqual(conn.api_base_url, klass.API_BASE_URL) - self.assertEqual(conn.api_base_url, 'http://' + HOST) - - def test_build_api_url_no_extra_query_params(self): - conn = self._make_one(object()) - URI = '/'.join([ - conn.API_BASE_URL, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo'), URI) - - def test_build_api_url_w_extra_query_params(self): - from six.moves.urllib.parse import parse_qsl - from six.moves.urllib.parse import urlsplit - - conn = self._make_one(object()) - uri = conn.build_api_url('/foo', {'bar': 'baz'}) - scheme, netloc, path, qs, _ = urlsplit(uri) - self.assertEqual('%s://%s' % (scheme, netloc), conn.API_BASE_URL) - self.assertEqual(path, - '/'.join(['', conn.API_VERSION, 'foo'])) - parms = dict(parse_qsl(qs)) - self.assertEqual(parms['bar'], 'baz') - - def test_build_api_url_w_base_url_override(self): - base_url1 = 'api-base-url1' - base_url2 = 'api-base-url2' - conn = self._make_one(object()) - conn.api_base_url = base_url1 - URI = '/'.join([ - base_url2, - conn.API_VERSION, - 'foo', - ]) - self.assertEqual(conn.build_api_url('/foo', api_base_url=base_url2), - URI) - - def test_extra_headers(self): - import requests - - from google.cloud import _http as base_http - from google.cloud.pubsub import _http as MUT - - http = mock.create_autospec(requests.Session, instance=True) - response = requests.Response() - response.status_code = 200 - data = b'brent-spiner' - response._content = data - http.request.return_value = response - client = mock.Mock(_http=http, spec=['_http']) - - conn = self._make_one(client) - req_data = 'req-data-boring' - result = conn.api_request( - 'GET', '/rainbow', data=req_data, expect_json=False) - self.assertEqual(result, data) - - expected_headers = { - 'Accept-Encoding': 'gzip', - base_http.CLIENT_INFO_HEADER: MUT._CLIENT_INFO, - 'User-Agent': conn.USER_AGENT, - } - expected_uri = conn.build_api_url('/rainbow') - http.request.assert_called_once_with( - data=req_data, - headers=expected_headers, - method='GET', - url=expected_uri, - ) - - -class Test_PublisherAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _PublisherAPI - - return _PublisherAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - def test_list_topics_no_paging(self): - from google.cloud.pubsub.topic import Topic - - returned = {'topics': [{'name': self.TOPIC_PATH}]} - connection = _Connection(returned) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_topics_with_paging(self): - import six - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - RETURNED = { - 'topics': [{'name': self.TOPIC_PATH}], - 'nextPageToken': 'TOKEN2', - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - topics = list(page) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - topic = topics[0] - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertEqual(next_token, TOKEN2) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_list_topics_missing_key(self): - returned = {} - connection = _Connection(returned) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_topics(self.PROJECT) - topics = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(topics), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPICS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_create(self): - RETURNED = {'name': self.TOPIC_PATH} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_create(self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_create_already_exists(self): - from google.cloud.exceptions import Conflict - - connection = _Connection() - connection._no_response_error = Conflict - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(Conflict): - api.topic_create(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_get_hit(self): - RETURNED = {'name': self.TOPIC_PATH} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_get(self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_get_miss(self): - from google.cloud.exceptions import NotFound - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_get(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_delete_hit(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.topic_delete(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_delete_miss(self): - from google.cloud.exceptions import NotFound - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_delete(self.TOPIC_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_topic_publish_hit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} - RETURNED = {'messageIds': [MSGID]} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(resource, [MSGID]) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:publish' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'messages': [B64MSG]}) - msg_data = connection._called_with['data']['messages'][0]['data'] - self.assertEqual(msg_data, B64_PAYLOAD) - - def test_topic_publish_twice(self): - import base64 - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - RETURNED = {'messageIds': []} - connection = _Connection(RETURNED, RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - messages = connection._called_with['data']['messages'] - self.assertEqual(len(messages), 1) - self.assertEqual(messages[0]['data'], B64_PAYLOAD) - - def test_topic_publish_miss(self): - import base64 - from google.cloud.exceptions import NotFound - - PAYLOAD = b'This is the message text' - B64_PAYLOAD = base64.b64encode(PAYLOAD).decode('ascii') - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - B64MSG = {'data': B64_PAYLOAD, 'attributes': {}} - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - api.topic_publish(self.TOPIC_PATH, [MESSAGE]) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:publish' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'messages': [B64MSG]}) - - def test_topic_list_subscriptions_no_paging(self): - from google.cloud.pubsub.topic import Topic - from google.cloud.pubsub.subscription import Subscription - - local_sub_path = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, self.SUB_NAME) - RETURNED = {'subscriptions': [local_sub_path]} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - local_sub_path = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, self.SUB_NAME) - RETURNED = { - 'subscriptions': [local_sub_path], - 'nextPageToken': TOKEN2, - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions( - topic, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - subscriptions = list(page) - next_token = iterator.next_page_token - - self.assertEqual(next_token, TOKEN2) - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertEqual(subscription.topic, topic) - self.assertIs(subscription._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_topic_list_subscriptions_missing_key(self): - from google.cloud.pubsub.topic import Topic - - connection = _Connection({}) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - topic = Topic(self.TOPIC_NAME, client) - iterator = api.topic_list_subscriptions(topic) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_topic_list_subscriptions_miss(self): - from google.cloud.exceptions import NotFound - from google.cloud.pubsub.topic import Topic - - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - topic = Topic(self.TOPIC_NAME, client) - list(api.topic_list_subscriptions(topic)) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_TOPIC_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - -class Test_SubscriberAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _SubscriberAPI - - return _SubscriberAPI - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - self.assertIs(api._client, client) - self.assertEqual(api.api_request, connection.api_request) - - def test_list_subscriptions_no_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - RETURNED = {'subscriptions': [SUB_INFO]} - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - RETURNED = { - 'subscriptions': [SUB_INFO], - 'nextPageToken': 'TOKEN2', - } - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_subscriptions( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - subscriptions = list(page) - next_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_token, TOKEN2) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_list_subscriptions_missing_key(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - iterator = api.list_subscriptions(self.PROJECT) - subscriptions = list(iterator) - next_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_token) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SUBSCRIPTIONS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_subscription_create_defaults(self): - RESOURCE = {'topic': self.TOPIC_PATH} - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create(self.SUB_PATH, self.TOPIC_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_create_retain_messages(self): - import datetime - - RESOURCE = {'topic': self.TOPIC_PATH, - 'retainAckedMessages': True, - 'messageRetentionDuration': { - 'seconds': 1729, - 'nanos': 2718 * 1000 - } - } - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, - retain_acked_messages=True, - message_retention_duration=datetime.timedelta( - seconds=1729, microseconds=2718)) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_create_explicit(self): - ACK_DEADLINE = 90 - PUSH_ENDPOINT = 'https://api.example.com/push' - RESOURCE = { - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': { - 'pushEndpoint': PUSH_ENDPOINT, - }, - } - RETURNED = RESOURCE.copy() - RETURNED['name'] = self.SUB_PATH - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_create( - self.SUB_PATH, self.TOPIC_PATH, - ack_deadline=ACK_DEADLINE, push_endpoint=PUSH_ENDPOINT) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], RESOURCE) - - def test_subscription_get(self): - ACK_DEADLINE = 90 - PUSH_ENDPOINT = 'https://api.example.com/push' - RETURNED = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.subscription_get(self.SUB_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_subscription_delete(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_delete(self.SUB_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_subscription_modify_push_config(self): - PUSH_ENDPOINT = 'https://api.example.com/push' - BODY = { - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}, - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_modify_push_config(self.SUB_PATH, PUSH_ENDPOINT) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:modifyPushConfig' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_pull_defaults(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} - RETURNED = { - 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - BODY = { - 'returnImmediately': False, - 'maxMessages': 1, - } - - received = api.subscription_pull(self.SUB_PATH) - - self.assertEqual(received, RETURNED['receivedMessages']) - self.assertEqual(received[0]['message']['data'], PAYLOAD) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:pull' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_pull_explicit(self): - import base64 - - PAYLOAD = b'This is the message text' - B64 = base64.b64encode(PAYLOAD).decode('ascii') - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - MESSAGE = {'messageId': MSG_ID, 'data': B64, 'attributes': {'a': 'b'}} - RETURNED = { - 'receivedMessages': [{'ackId': ACK_ID, 'message': MESSAGE}], - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - MAX_MESSAGES = 10 - BODY = { - 'returnImmediately': True, - 'maxMessages': MAX_MESSAGES, - } - - received = api.subscription_pull( - self.SUB_PATH, return_immediately=True, max_messages=MAX_MESSAGES) - - self.assertEqual(received, RETURNED['receivedMessages']) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:pull' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_acknowledge(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - BODY = { - 'ackIds': [ACK_ID1, ACK_ID2], - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_acknowledge(self.SUB_PATH, [ACK_ID1, ACK_ID2]) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:acknowledge' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_modify_ack_deadline(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - NEW_DEADLINE = 90 - BODY = { - 'ackIds': [ACK_ID1, ACK_ID2], - 'ackDeadlineSeconds': NEW_DEADLINE, - } - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_modify_ack_deadline( - self.SUB_PATH, [ACK_ID1, ACK_ID2], NEW_DEADLINE) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:modifyAckDeadline' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_list_snapshots_no_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - local_topic_path = 'projects/%s/topics/%s' % ( - self.PROJECT, self.TOPIC_NAME) - RETURNED = {'snapshots': [{ - 'name': local_snapshot_path, - 'topic': local_topic_path, - }], - } - - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_snapshots(self.PROJECT) - snapshots = list(iterator) - next_token = iterator.next_page_token - - self.assertIsNone(next_token) - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], {}) - - def test_list_snapshots_with_paging(self): - import six - - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.snapshot import Snapshot - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - local_topic_path = 'projects/%s/topics/%s' % ( - self.PROJECT, self.TOPIC_NAME) - RETURNED = { - 'snapshots': [{ - 'name': local_snapshot_path, - 'topic': local_topic_path, - }], - 'nextPageToken': TOKEN2, - } - - connection = _Connection(RETURNED) - creds = _make_credentials() - client = Client(project=self.PROJECT, credentials=creds) - client._connection = connection - api = self._make_one(client) - - iterator = api.list_snapshots( - self.PROJECT, page_token=TOKEN1, page_size=SIZE) - page = six.next(iterator.pages) - snapshots = list(page) - next_token = iterator.next_page_token - - self.assertEqual(next_token, TOKEN2) - self.assertEqual(len(snapshots), 1) - snapshot = snapshots[0] - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.topic.name, self.TOPIC_NAME) - self.assertIs(snapshot._client, client) - - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s' % (self.LIST_SNAPSHOTS_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['query_params'], - {'pageToken': TOKEN1, 'pageSize': SIZE}) - - def test_subscription_seek_snapshot(self): - local_snapshot_path = 'projects/%s/snapshots/%s' % ( - self.PROJECT, self.SNAPSHOT_NAME) - RETURNED = {} - BODY = { - 'snapshot': local_snapshot_path - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_seek( - self.SUB_PATH, snapshot=local_snapshot_path) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:seek' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_subscription_seek_time(self): - time = '12345' - RETURNED = {} - BODY = { - 'time': time - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.subscription_seek(self.SUB_PATH, time=time) - - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:seek' % (self.SUB_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_create(self): - RETURNED = { - 'name': self.SNAPSHOT_PATH, - 'subscription': self.SUB_PATH - } - BODY = { - 'subscription': self.SUB_PATH - } - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - self.assertEqual(resource, RETURNED) - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_create_already_exists(self): - from google.cloud.exceptions import NotFound - - BODY = { - 'subscription': self.SUB_PATH - } - connection = _Connection() - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - with self.assertRaises(NotFound): - resource = api.snapshot_create(self.SNAPSHOT_PATH, self.SUB_PATH) - - self.assertEqual(connection._called_with['method'], 'PUT') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], BODY) - - def test_snapshot_delete(self): - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, self.PROJECT) - api = self._make_one(client) - - api.snapshot_delete(self.SNAPSHOT_PATH) - - self.assertEqual(connection._called_with['method'], 'DELETE') - path = '/%s' % (self.SNAPSHOT_PATH,) - self.assertEqual(connection._called_with['path'], path) - - -class Test_IAMPolicyAPI(_Base): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub._http import _IAMPolicyAPI - - return _IAMPolicyAPI - - def test_ctor(self): - connection = _Connection() - client = _Client(connection, None) - api = self._make_one(client) - self.assertEqual(api.api_request, connection.api_request) - - def test_get_iam_policy(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - RETURNED = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ], - } - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - policy = api.get_iam_policy(self.TOPIC_PATH) - - self.assertEqual(policy, RETURNED) - self.assertEqual(connection._called_with['method'], 'GET') - path = '/%s:getIamPolicy' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - - def test_set_iam_policy(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - ], - } - RETURNED = POLICY.copy() - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - policy = api.set_iam_policy(self.TOPIC_PATH, POLICY) - - self.assertEqual(policy, RETURNED) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:setIamPolicy' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'policy': POLICY}) - - def test_test_iam_permissions(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - ALLOWED = ALL_ROLES[1:] - RETURNED = {'permissions': ALLOWED} - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) - - self.assertEqual(allowed, ALLOWED) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'permissions': ALL_ROLES}) - - def test_test_iam_permissions_missing_key(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ALL_ROLES = [OWNER_ROLE, EDITOR_ROLE, VIEWER_ROLE] - RETURNED = {} - connection = _Connection(RETURNED) - client = _Client(connection, None) - api = self._make_one(client) - - allowed = api.test_iam_permissions(self.TOPIC_PATH, ALL_ROLES) - - self.assertEqual(allowed, []) - self.assertEqual(connection._called_with['method'], 'POST') - path = '/%s:testIamPermissions' % (self.TOPIC_PATH,) - self.assertEqual(connection._called_with['path'], path) - self.assertEqual(connection._called_with['data'], - {'permissions': ALL_ROLES}) - - -class Test__transform_messages_base64_empty(unittest.TestCase): - def _call_fut(self, messages, transform, key=None): - from google.cloud.pubsub._http import _transform_messages_base64 - - return _transform_messages_base64(messages, transform, key) - - def test__transform_messages_base64_empty_message(self): - from base64 import b64decode - - DATA = [{'message': {}}] - self._call_fut(DATA, b64decode, 'message') - self.assertEqual(DATA, [{'message': {}}]) - - def test__transform_messages_base64_empty_data(self): - from base64 import b64decode - - DATA = [{'message': {'data': b''}}] - self._call_fut(DATA, b64decode, 'message') - self.assertEqual(DATA, [{'message': {'data': b''}}]) - - def test__transform_messages_base64_pull(self): - from base64 import b64encode - - DATA = [{'message': {'data': b'testing 1 2 3'}}] - self._call_fut(DATA, b64encode, 'message') - self.assertEqual(DATA[0]['message']['data'], - b64encode(b'testing 1 2 3')) - - def test__transform_messages_base64_publish(self): - from base64 import b64encode - - DATA = [{'data': b'testing 1 2 3'}] - self._call_fut(DATA, b64encode) - self.assertEqual(DATA[0]['data'], b64encode(b'testing 1 2 3')) - - -class _Connection(object): - - _called_with = None - _no_response_error = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - from google.cloud.exceptions import NotFound - - self._called_with = kw - try: - response, self._responses = self._responses[0], self._responses[1:] - except IndexError: - err_class = self._no_response_error or NotFound - raise err_class('miss') - return response - - -class _Client(object): - - def __init__(self, connection, project): - self._connection = connection - self.project = project diff --git a/packages/google-cloud-pubsub/tests/unit/test_client.py b/packages/google-cloud-pubsub/tests/unit/test_client.py deleted file mode 100644 index 407683606330..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/test_client.py +++ /dev/null @@ -1,462 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import datetime -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestClient(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.client import Client - - return Client - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_publisher_api_wo_gax(self): - from google.cloud.pubsub._http import _PublisherAPI - - creds = _make_credentials() - - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=False) - - conn = client._connection = _Connection() - api = client.publisher_api - - self.assertIsInstance(api, _PublisherAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.publisher_api - self.assertIs(again, api) - - def test_no_gax_ctor(self): - from google.cloud.pubsub._http import _PublisherAPI - - creds = _make_credentials() - with mock.patch('google.cloud.pubsub.client._USE_GRPC', - new=True): - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - - self.assertFalse(client._use_grpc) - api = client.publisher_api - self.assertIsInstance(api, _PublisherAPI) - - def _publisher_api_w_gax_helper(self, emulator=False): - from google.cloud.pubsub import _http - - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxPublisherAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self._client = client - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=True) - client._connection.in_emulator = emulator - - patch = mock.patch.multiple( - 'google.cloud.pubsub.client', - make_gax_publisher_api=_generated_api, - GAXPublisherAPI=_GaxPublisherAPI) - with patch: - api = client.publisher_api - - self.assertIsInstance(api, _GaxPublisherAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api._client, client) - # API instance is cached - again = client.publisher_api - self.assertIs(again, api) - if emulator: - kwargs = {'host': _http.Connection.API_BASE_URL} - else: - kwargs = {'credentials': creds} - self.assertEqual(_called_with, [((), kwargs)]) - - def test_publisher_api_w_gax(self): - self._publisher_api_w_gax_helper() - - def test_publisher_api_w_gax_and_emulator(self): - self._publisher_api_w_gax_helper(emulator=True) - - def test_subscriber_api_wo_gax(self): - from google.cloud.pubsub._http import _SubscriberAPI - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=False) - - conn = client._connection = _Connection() - api = client.subscriber_api - - self.assertIsInstance(api, _SubscriberAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.subscriber_api - self.assertIs(again, api) - - def _subscriber_api_w_gax_helper(self, emulator=False): - from google.cloud.pubsub import _http - - wrapped = object() - _called_with = [] - - def _generated_api(*args, **kw): - _called_with.append((args, kw)) - return wrapped - - class _GaxSubscriberAPI(object): - - def __init__(self, _wrapped, client): - self._wrapped = _wrapped - self._client = client - - creds = _make_credentials() - client = self._make_one( - project=self.PROJECT, credentials=creds, - _use_grpc=True) - client._connection.in_emulator = emulator - - patch = mock.patch.multiple( - 'google.cloud.pubsub.client', - make_gax_subscriber_api=_generated_api, - GAXSubscriberAPI=_GaxSubscriberAPI) - with patch: - api = client.subscriber_api - - self.assertIsInstance(api, _GaxSubscriberAPI) - self.assertIs(api._wrapped, wrapped) - self.assertIs(api._client, client) - # API instance is cached - again = client.subscriber_api - self.assertIs(again, api) - if emulator: - kwargs = {'host': _http.Connection.API_BASE_URL} - else: - kwargs = {'credentials': creds} - self.assertEqual(_called_with, [((), kwargs)]) - - def test_subscriber_api_w_gax(self): - self._subscriber_api_w_gax_helper() - - def test_subscriber_api_w_gax_and_emulator(self): - self._subscriber_api_w_gax_helper(emulator=True) - - def test_iam_policy_api(self): - from google.cloud.pubsub._http import _IAMPolicyAPI - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - conn = client._connection = _Connection() - - api = client.iam_policy_api - self.assertIsInstance(api, _IAMPolicyAPI) - self.assertEqual(api.api_request, conn.api_request) - # API instance is cached - again = client.iam_policy_api - self.assertIs(again, api) - - def test_list_topics_no_paging(self): - from google.cloud.pubsub.topic import Topic - - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI(items=[Topic(self.TOPIC_NAME, client)]) - client._publisher_api = api - - iterator = client.list_topics() - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - self.assertIsInstance(topics[0], Topic) - self.assertEqual(topics[0].name, self.TOPIC_NAME) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) - - def test_list_topics_with_paging(self): - from google.cloud.pubsub.topic import Topic - - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI([Topic(self.TOPIC_NAME, client)], TOKEN2) - client._publisher_api = api - - iterator = client.list_topics(SIZE, TOKEN1) - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 1) - self.assertIsInstance(topics[0], Topic) - self.assertEqual(topics[0].name, self.TOPIC_NAME) - self.assertEqual(next_page_token, TOKEN2) - - self.assertEqual(api._listed_topics, (self.PROJECT, 1, TOKEN1)) - - def test_list_topics_missing_key(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxPublisherAPI() - client._publisher_api = api - - iterator = client.list_topics() - topics = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(topics), 0) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_topics, (self.PROJECT, None, None)) - - def test_list_subscriptions_no_paging(self): - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - returned = {'subscriptions': [SUB_INFO]} - client._connection = _Connection(returned) - - iterator = client.list_subscriptions() - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - # Check the token returned. - self.assertIsNone(next_page_token) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - called_with = client._connection._called_with - expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': expected_path, - 'query_params': {}, - }) - - def test_list_subscriptions_with_paging(self): - import six - from google.cloud.pubsub.subscription import Subscription - from google.cloud.pubsub.topic import Topic - - SUB_INFO = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds, - _use_grpc=False) - - # Set up the mock response. - ACK_DEADLINE = 42 - PUSH_ENDPOINT = 'https://push.example.com/endpoint' - SUB_INFO = {'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': ACK_DEADLINE, - 'pushConfig': {'pushEndpoint': PUSH_ENDPOINT}} - TOKEN1 = 'TOKEN1' - TOKEN2 = 'TOKEN2' - SIZE = 1 - returned = { - 'subscriptions': [SUB_INFO], - 'nextPageToken': TOKEN2, - } - client._connection = _Connection(returned) - - iterator = client.list_subscriptions( - SIZE, TOKEN1) - page = six.next(iterator.pages) - subscriptions = list(page) - next_page_token = iterator.next_page_token - - # Check the token returned. - self.assertEqual(next_page_token, TOKEN2) - # Check the subscription object returned. - self.assertEqual(len(subscriptions), 1) - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - self.assertIs(subscription._client, client) - self.assertEqual(subscription.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, ACK_DEADLINE) - self.assertEqual(subscription.push_endpoint, PUSH_ENDPOINT) - - called_with = client._connection._called_with - expected_path = '/projects/%s/subscriptions' % (self.PROJECT,) - self.assertEqual(called_with, { - 'method': 'GET', - 'path': expected_path, - 'query_params': { - 'pageSize': SIZE, - 'pageToken': TOKEN1, - }, - }) - - def test_list_subscriptions_w_missing_key(self): - PROJECT = 'PROJECT' - creds = _make_credentials() - - client = self._make_one(project=PROJECT, credentials=creds) - client._connection = object() - api = client._subscriber_api = _FauxSubscriberAPI() - api._list_subscriptions_response = (), None - - subscriptions, next_page_token = client.list_subscriptions() - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_page_token) - - self.assertEqual(api._listed_subscriptions, - (self.PROJECT, None, None)) - - def test_list_snapshots(self): - creds = _make_credentials() - client = self._make_one(project=self.PROJECT, credentials=creds) - client._connection = object() - api = _FauxSubscriberAPI() - response = api._list_snapshots_response = object() - client._subscriber_api = api - self.assertEqual(client.list_snapshots(), response) - self.assertEqual(api._listed_snapshots, (self.PROJECT, None, None)) - - def test_topic_factory(self): - PROJECT = 'PROJECT' - TOPIC_NAME = 'TOPIC_NAME' - creds = _make_credentials() - - client_obj = self._make_one(project=PROJECT, credentials=creds) - new_topic = client_obj.topic(TOPIC_NAME) - self.assertEqual(new_topic.name, TOPIC_NAME) - self.assertIs(new_topic._client, client_obj) - self.assertEqual(new_topic.project, PROJECT) - self.assertEqual(new_topic.full_name, - 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME)) - self.assertFalse(new_topic.timestamp_messages) - - def test_subscription_factory(self): - project = 'PROJECT' - creds = _make_credentials() - client_obj = self._make_one(project=project, credentials=creds) - - sub_name = 'hoot-n-holler' - ack_deadline = 60, - push_endpoint = 'https://api.example.com/push' - message_retention_duration = datetime.timedelta(3600) - new_subscription = client_obj.subscription( - sub_name, ack_deadline=ack_deadline, - push_endpoint=push_endpoint, - retain_acked_messages=True, - message_retention_duration=message_retention_duration) - - self.assertEqual(new_subscription.name, sub_name) - self.assertIsNone(new_subscription.topic) - self.assertIs(new_subscription._client, client_obj) - self.assertEqual(new_subscription.project, project) - self.assertEqual(new_subscription.ack_deadline, ack_deadline) - self.assertEqual(new_subscription.push_endpoint, push_endpoint) - self.assertTrue(new_subscription.retain_acked_messages) - self.assertEqual( - new_subscription.message_retention_duration, - message_retention_duration) - - -class _Iterator(object): - - def __init__(self, items, token): - self._items = items or () - self.next_page_token = token - - def __iter__(self): - return iter(self._items) - - -class _FauxPublisherAPI(object): - - def __init__(self, items=None, token=None): - self._items = items - self._token = token - - def list_topics(self, project, page_size, page_token): - self._listed_topics = (project, page_size, page_token) - return _Iterator(self._items, self._token) - - -class _FauxSubscriberAPI(object): - - def list_subscriptions(self, project, page_size, page_token): - self._listed_subscriptions = (project, page_size, page_token) - return self._list_subscriptions_response - - def list_snapshots(self, project, page_size, page_token): - self._listed_snapshots = (project, page_size, page_token) - return self._list_snapshots_response - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response diff --git a/packages/google-cloud-pubsub/tests/unit/test_iam.py b/packages/google-cloud-pubsub/tests/unit/test_iam.py deleted file mode 100644 index 475d375d0cd8..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/test_iam.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2016 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestPolicy(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.iam import Policy - - return Policy - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - empty = frozenset() - policy = self._make_one() - self.assertIsNone(policy.etag) - self.assertIsNone(policy.version) - self.assertEqual(policy.owners, empty) - self.assertEqual(policy.editors, empty) - self.assertEqual(policy.viewers, empty) - self.assertEqual(policy.publishers, empty) - self.assertEqual(policy.subscribers, empty) - - def test_ctor_explicit(self): - VERSION = 17 - ETAG = 'ETAG' - empty = frozenset() - policy = self._make_one(ETAG, VERSION) - self.assertEqual(policy.etag, ETAG) - self.assertEqual(policy.version, VERSION) - self.assertEqual(policy.owners, empty) - self.assertEqual(policy.editors, empty) - self.assertEqual(policy.viewers, empty) - self.assertEqual(policy.publishers, empty) - self.assertEqual(policy.subscribers, empty) - - def test_publishers_setter(self): - import warnings - from google.cloud.pubsub.iam import ( - PUBSUB_PUBLISHER_ROLE, - ) - PUBLISHER = 'user:phred@example.com' - expected = set([PUBLISHER]) - policy = self._make_one() - with warnings.catch_warnings(): - policy.publishers = [PUBLISHER] - - self.assertEqual(policy.publishers, frozenset(expected)) - self.assertEqual( - dict(policy), {PUBSUB_PUBLISHER_ROLE: expected}) - - def test_subscribers_setter(self): - import warnings - from google.cloud.pubsub.iam import ( - PUBSUB_SUBSCRIBER_ROLE, - ) - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - expected = set([SUBSCRIBER]) - policy = self._make_one() - with warnings.catch_warnings(): - policy.subscribers = [SUBSCRIBER] - - self.assertEqual(policy.subscribers, frozenset(expected)) - self.assertEqual( - dict(policy), {PUBSUB_SUBSCRIBER_ROLE: expected}) diff --git a/packages/google-cloud-pubsub/tests/unit/test_message.py b/packages/google-cloud-pubsub/tests/unit/test_message.py deleted file mode 100644 index b4f6abfbb1b2..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/test_message.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - - -class TestMessage(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.message import Message - - return Message - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - message = self._make_one(data=DATA, message_id=MESSAGE_ID) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertIsNone(message.service_timestamp) - - def test_ctor_w_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - ATTRS = {'a': 'b'} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, ATTRS) - self.assertIsNone(message.service_timestamp) - - def test_timestamp_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - message = self._make_one(data=DATA, message_id=MESSAGE_ID) - - def _to_fail(): - return message.timestamp - - self.assertRaises(ValueError, _to_fail) - - def test_timestamp_wo_timestamp_in_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - ATTRS = {'a': 'b'} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - - def _to_fail(): - return message.timestamp - - self.assertRaises(ValueError, _to_fail) - - def test_timestamp_w_timestamp_in_attributes(self): - from datetime import datetime - from google.cloud._helpers import _RFC3339_MICROS - from google.cloud._helpers import UTC - - DATA = b'DEADBEEF' - MESSAGE_ID = b'12345' - TIMESTAMP = '2015-04-10T18:42:27.131956Z' - naive = datetime.strptime(TIMESTAMP, _RFC3339_MICROS) - timestamp = naive.replace(tzinfo=UTC) - ATTRS = {'timestamp': TIMESTAMP} - message = self._make_one(data=DATA, message_id=MESSAGE_ID, - attributes=ATTRS) - self.assertEqual(message.timestamp, timestamp) - - def test_from_api_repr_missing_data(self): - MESSAGE_ID = '12345' - api_repr = {'messageId': MESSAGE_ID} - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, b'') - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertIsNone(message.service_timestamp) - - def test_from_api_repr_no_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = '12345' - TIMESTAMP = '2016-03-18-19:38:22.001393427Z' - api_repr = { - 'data': DATA, - 'messageId': MESSAGE_ID, - 'publishTime': TIMESTAMP, - } - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.attributes, {}) - self.assertEqual(message.service_timestamp, TIMESTAMP) - - def test_from_api_repr_w_attributes(self): - DATA = b'DEADBEEF' - MESSAGE_ID = '12345' - ATTRS = {'a': 'b'} - TIMESTAMP = '2016-03-18-19:38:22.001393427Z' - api_repr = { - 'data': DATA, - 'messageId': MESSAGE_ID, - 'publishTime': TIMESTAMP, - 'attributes': ATTRS, - } - message = self._get_target_class().from_api_repr(api_repr) - self.assertEqual(message.data, DATA) - self.assertEqual(message.message_id, MESSAGE_ID) - self.assertEqual(message.service_timestamp, TIMESTAMP) - self.assertEqual(message.attributes, ATTRS) diff --git a/packages/google-cloud-pubsub/tests/unit/test_pubsub.py b/packages/google-cloud-pubsub/tests/unit/test_pubsub.py new file mode 100644 index 000000000000..605dbddd7601 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/test_pubsub.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import pubsub +from google.cloud import pubsub_v1 + + +def test_exported_things(): + assert pubsub.PublisherClient is pubsub_v1.PublisherClient + assert pubsub.SubscriberClient is pubsub_v1.SubscriberClient + assert pubsub.types is pubsub_v1.types diff --git a/packages/google-cloud-pubsub/tests/unit/test_snpashot.py b/packages/google-cloud-pubsub/tests/unit/test_snpashot.py deleted file mode 100644 index 5834a1fedd89..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/test_snpashot.py +++ /dev/null @@ -1,215 +0,0 @@ -# Copyright 2017 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestSnapshot(unittest.TestCase): - PROJECT = 'PROJECT' - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'subscription_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.snapshot import Snapshot - - return Snapshot - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - client = _Client(project=self.PROJECT) - snapshot = self._make_one(self.SNAPSHOT_NAME, - client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) - - def test_ctor_w_subscription(self): - client = _Client(project=self.PROJECT) - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, - subscription=subscription) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertEqual(snapshot.path, '/%s' % (self.SNAPSHOT_PATH, )) - - def test_ctor_error(self): - client = _Client(project=self.PROJECT) - subscription = _Subscription(name=self.SUB_NAME, client=client) - with self.assertRaises(TypeError): - snapshot = self._make_one(self.SNAPSHOT_NAME, - client=client, - subscription=subscription) - - def test_from_api_repr_no_topics(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - klass = self._get_target_class() - snapshot = klass.from_api_repr(resource, client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertIsInstance(snapshot.topic, Topic) - - def test_from_api_repr_w_deleted_topic(self): - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': klass._DELETED_TOPIC_PATH - } - snapshot = klass.from_api_repr(resource, client=client) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot._client, client) - self.assertEqual(snapshot.project, self.PROJECT) - self.assertEqual(snapshot.full_name, self.SNAPSHOT_PATH) - self.assertIsNone(snapshot.topic) - - def test_from_api_repr_w_topics_w_no_topic_match(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - topics = {} - snapshot = klass.from_api_repr(resource, client=client, topics=topics) - topic = snapshot.topic - self.assertIsInstance(topic, Topic) - self.assertIs(topic, topics[self.TOPIC_PATH]) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - - def test_from_api_repr_w_topics_w_topic_match(self): - from google.cloud.pubsub.topic import Topic - - client = _Client(project=self.PROJECT) - klass = self._get_target_class() - resource = { - 'name': self.SNAPSHOT_PATH, - 'topic': self.TOPIC_PATH - } - topic = _Topic(self.TOPIC_NAME, client=client) - topics = {self.TOPIC_PATH: topic} - snapshot = klass.from_api_repr(resource, client=client, topics=topics) - self.assertIs(snapshot.topic, topic) - - def test_create_w_bound_client_error(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_response = api._snapshot_create_response = object() - snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) - - with self.assertRaises(RuntimeError): - snapshot.create() - - def test_create_w_bound_subscription(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.create() - - self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) - - def test_create_w_bound_subscription_w_alternate_client(self): - client = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.create(client=client2) - - self.assertEqual(api._snapshot_created, (self.SNAPSHOT_PATH, self.SUB_PATH, )) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - snapshot = self._make_one(self.SNAPSHOT_NAME, client=client) - - snapshot.delete() - - self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) - - def test_delete_w_alternate_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscriberAPI() - expected_result = api._snapshot_create_response = object() - subscription = _Subscription(name=self.SUB_NAME, client=client) - snapshot = self._make_one(self.SNAPSHOT_NAME, subscription=subscription) - - snapshot.delete() - - self.assertEqual(api._snapshot_deleted, (self.SNAPSHOT_PATH, )) - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - def topic(self, name): - from google.cloud.pubsub.topic import Topic - - return Topic(name, client=self) - - -class _Topic(object): - - def __init__(self, name, client): - self._client = client - - -class _Subscription(object): - - def __init__(self, name, client=None): - self._client = client - self.full_name = 'projects/%s/subscriptions/%s' % ( - client.project, name, ) - - -class _FauxSubscriberAPI(object): - - def snapshot_create(self, snapshot_path, subscription_path): - self._snapshot_created = (snapshot_path, subscription_path, ) - - def snapshot_delete(self, snapshot_path): - self._snapshot_deleted = (snapshot_path, ) - - diff --git a/packages/google-cloud-pubsub/tests/unit/test_subscription.py b/packages/google-cloud-pubsub/tests/unit/test_subscription.py deleted file mode 100644 index ddf0ea439d77..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/test_subscription.py +++ /dev/null @@ -1,957 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -class TestSubscription(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - SNAPSHOT_NAME = 'snapshot_name' - SNAPSHOT_PATH = 'projects/%s/snapshots/%s' % (PROJECT, SNAPSHOT_NAME) - SUB_NAME = 'sub_name' - SUB_PATH = 'projects/%s/subscriptions/%s' % (PROJECT, SUB_NAME) - DEADLINE = 42 - ENDPOINT = 'https://api.example.com/push' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.subscription import Subscription - - return Subscription - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - - def test_ctor_explicit(self): - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_ctor_w_client_wo_topic(self): - client = _Client(project=self.PROJECT) - subscription = self._make_one(self.SUB_NAME, client=client) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsNone(subscription.topic) - - def test_ctor_w_both_topic_and_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client1) - with self.assertRaises(TypeError): - self._make_one(self.SUB_NAME, topic, client=client2) - - def test_ctor_w_neither_topic_nor_client(self): - with self.assertRaises(TypeError): - self._make_one(self.SUB_NAME) - - def test_from_api_repr_no_topics(self): - from google.cloud.pubsub.topic import Topic - - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client) - self.assertEqual(subscription.name, self.SUB_NAME) - topic = subscription.topic - self.assertIsInstance(topic, Topic) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_deleted_topic(self): - klass = self._get_target_class() - resource = {'topic': klass._DELETED_TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIsNone(subscription.topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_topics_no_topic_match(self): - from google.cloud.pubsub.topic import Topic - - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - topics = {} - klass = self._get_target_class() - client = _Client(project=self.PROJECT) - subscription = klass.from_api_repr(resource, client, topics=topics) - self.assertEqual(subscription.name, self.SUB_NAME) - topic = subscription.topic - self.assertIsInstance(topic, Topic) - self.assertIs(topic, topics[self.TOPIC_PATH]) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_from_api_repr_w_topics_w_topic_match(self): - resource = {'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}} - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - topics = {self.TOPIC_PATH: topic} - klass = self._get_target_class() - subscription = klass.from_api_repr(resource, client, topics=topics) - self.assertEqual(subscription.name, self.SUB_NAME) - self.assertIs(subscription.topic, topic) - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - - def test_full_name_and_path(self): - PROJECT = 'PROJECT' - SUB_FULL = 'projects/%s/subscriptions/%s' % (PROJECT, self.SUB_NAME) - SUB_PATH = '/%s' % (SUB_FULL,) - TOPIC_NAME = 'topic_name' - CLIENT = _Client(project=PROJECT) - topic = _Topic(TOPIC_NAME, client=CLIENT) - subscription = self._make_one(self.SUB_NAME, topic) - self.assertEqual(subscription.full_name, SUB_FULL) - self.assertEqual(subscription.path, SUB_PATH) - - def test_autoack_defaults(self): - from google.cloud.pubsub.subscription import AutoAck - - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - auto_ack = subscription.auto_ack() - self.assertIsInstance(auto_ack, AutoAck) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, False) - self.assertEqual(auto_ack._max_messages, 1) - self.assertIsNone(auto_ack._client) - - def test_autoack_explicit(self): - from google.cloud.pubsub.subscription import AutoAck - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - auto_ack = subscription.auto_ack(True, 10, client2) - self.assertIsInstance(auto_ack, AutoAck) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, True) - self.assertEqual(auto_ack._max_messages, 10) - self.assertIs(auto_ack._client, client2) - - def test_create_pull_wo_ack_deadline_w_bound_client(self): - RESPONSE = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_create_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.create() - - self.assertEqual( - api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, None, None, None, None)) - - def test_create_push_w_ack_deadline_w_alternate_client(self): - RESPONSE = { - 'topic': self.TOPIC_PATH, - 'name': self.SUB_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT} - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_create_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.create(client=client2) - - self.assertEqual( - api._subscription_created, - (self.SUB_PATH, self.TOPIC_PATH, self.DEADLINE, self.ENDPOINT, - None, None)) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - self.assertFalse(subscription.exists()) - - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_exists_hit_w_alternate_client(self): - RESPONSE = {'name': self.SUB_PATH, 'topic': self.TOPIC_PATH} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - self.assertTrue(subscription.exists(client=client2)) - - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_reload_w_bound_client(self): - RESPONSE = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.reload() - - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_reload_sets_topic(self): - from google.cloud.pubsub.topic import Topic - - response = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - 'ackDeadlineSeconds': self.DEADLINE, - 'pushConfig': {'pushEndpoint': self.ENDPOINT}, - } - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = response - subscription = self._make_one(self.SUB_NAME, client=client) - - self.assertIsNone(subscription.topic) - subscription.reload() - - self.assertEqual(subscription.ack_deadline, self.DEADLINE) - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_got, self.SUB_PATH) - self.assertIsInstance(subscription.topic, Topic) - self.assertEqual(subscription.topic.name, self.TOPIC_NAME) - - def test_reload_w_alternate_client(self): - RESPONSE = { - 'name': self.SUB_PATH, - 'topic': self.TOPIC_PATH, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_get_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.reload(client=client2) - - self.assertIsNone(subscription.ack_deadline) - self.assertIsNone(subscription.push_endpoint) - self.assertEqual(api._subscription_got, self.SUB_PATH) - - def test_delete_w_bound_client(self): - RESPONSE = {} - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_delete_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.delete() - - self.assertEqual(api._subscription_deleted, self.SUB_PATH) - - def test_delete_w_alternate_client(self): - RESPONSE = {} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_delete_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - self.DEADLINE, self.ENDPOINT) - - subscription.delete(client=client2) - - self.assertEqual(api._subscription_deleted, self.SUB_PATH) - - def test_modify_push_config_w_endpoint_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_push_config_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_push_configuration(push_endpoint=self.ENDPOINT) - - self.assertEqual(subscription.push_endpoint, self.ENDPOINT) - self.assertEqual(api._subscription_modified_push_config, - (self.SUB_PATH, self.ENDPOINT)) - - def test_modify_push_config_wo_endpoint_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_push_config_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic, - push_endpoint=self.ENDPOINT) - - subscription.modify_push_configuration(push_endpoint=None, - client=client2) - - self.assertIsNone(subscription.push_endpoint) - self.assertEqual(api._subscription_modified_push_config, - (self.SUB_PATH, None)) - - def test_pull_wo_return_immediately_max_messages_w_bound_client(self): - from google.cloud.pubsub.message import Message - - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - PAYLOAD = b'This is the message text' - MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD} - REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = [REC_MESSAGE] - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull() - - self.assertEqual(len(pulled), 1) - ack_id, message = pulled[0] - self.assertEqual(ack_id, ACK_ID) - self.assertIsInstance(message, Message) - self.assertEqual(message.data, PAYLOAD) - self.assertEqual(message.message_id, MSG_ID) - self.assertEqual(message.attributes, {}) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, False, 1)) - - def test_pull_w_return_immediately_w_max_messages_w_alt_client(self): - from google.cloud.pubsub.message import Message - - ACK_ID = 'DEADBEEF' - MSG_ID = 'BEADCAFE' - PAYLOAD = b'This is the message text' - MESSAGE = {'messageId': MSG_ID, 'data': PAYLOAD, - 'attributes': {'a': 'b'}} - REC_MESSAGE = {'ackId': ACK_ID, 'message': MESSAGE} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = [REC_MESSAGE] - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull(return_immediately=True, max_messages=3, - client=client2) - - self.assertEqual(len(pulled), 1) - ack_id, message = pulled[0] - self.assertEqual(ack_id, ACK_ID) - self.assertIsInstance(message, Message) - self.assertEqual(message.data, PAYLOAD) - self.assertEqual(message.message_id, MSG_ID) - self.assertEqual(message.attributes, {'a': 'b'}) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, True, 3)) - - def test_pull_wo_receivedMessages(self): - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_pull_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - pulled = subscription.pull(return_immediately=False) - - self.assertEqual(len(pulled), 0) - self.assertEqual(api._subscription_pulled, - (self.SUB_PATH, False, 1)) - - def test_acknowledge_w_bound_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_acknowlege_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.acknowledge([ACK_ID1, ACK_ID2]) - - self.assertEqual(api._subscription_acked, - (self.SUB_PATH, [ACK_ID1, ACK_ID2])) - - def test_acknowledge_w_alternate_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_acknowlege_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.acknowledge([ACK_ID1, ACK_ID2], client=client2) - - self.assertEqual(api._subscription_acked, - (self.SUB_PATH, [ACK_ID1, ACK_ID2])) - - def test_modify_ack_deadline_w_bound_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_ack_deadline_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_ack_deadline([ACK_ID1, ACK_ID2], self.DEADLINE) - - self.assertEqual(api._subscription_modified_ack_deadline, - (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) - - def test_modify_ack_deadline_w_alternate_client(self): - ACK_ID1 = 'DEADBEEF' - ACK_ID2 = 'BEADCAFE' - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_modify_ack_deadline_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.modify_ack_deadline( - [ACK_ID1, ACK_ID2], self.DEADLINE, client=client2) - - self.assertEqual(api._subscription_modified_ack_deadline, - (self.SUB_PATH, [ACK_ID1, ACK_ID2], self.DEADLINE)) - - def test_snapshot(self): - from google.cloud.pubsub.snapshot import Snapshot - - client = _Client(project=self.PROJECT) - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - snapshot = subscription.snapshot(self.SNAPSHOT_NAME) - self.assertIsInstance(snapshot, Snapshot) - self.assertEqual(snapshot.name, self.SNAPSHOT_NAME) - self.assertIs(snapshot.topic, topic) - - def test_seek_snapshot_w_bound_client(self): - from google.cloud.pubsub.snapshot import Snapshot - - client = _Client(project=self.PROJECT) - snapshot = Snapshot - snapshot = Snapshot(self.SNAPSHOT_NAME, client=client) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_snapshot(snapshot) - - self.assertEqual(api._subscription_seeked, - (self.SUB_PATH, None, self.SNAPSHOT_PATH)) - - def test_seek_snapshot_w_alternate_client(self): - from google.cloud.pubsub.snapshot import Snapshot - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - snapshot = Snapshot(self.SNAPSHOT_NAME, client=client1) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_snapshot(snapshot, client=client2) - - self.assertEqual(api._subscription_seeked, - (self.SUB_PATH, None, self.SNAPSHOT_PATH)) - - def test_seek_time_w_bound_client(self): - import datetime - - from google.cloud import _helpers - - time = datetime.time() - client = _Client(project=self.PROJECT) - api = client.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_timestamp(time) - - self.assertEqual( - api._subscription_seeked, - (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) - - def test_seek_time_w_alternate_client(self): - import datetime - - from google.cloud import _helpers - - time = datetime.time() - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.subscriber_api = _FauxSubscribererAPI() - api._subscription_seek_response = {} - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - subscription.seek_timestamp(time, client=client2) - - self.assertEqual( - api._subscription_seeked, - (self.SUB_PATH, _helpers._datetime_to_rfc3339(time), None)) - - def test_get_iam_policy_w_bound_client(self): - from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = subscription.get_iam_policy() - - self.assertEqual(policy.etag, 'DEADBEEF') - self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._got_iam_policy, self.SUB_PATH) - - def test_get_iam_policy_w_alternate_client(self): - POLICY = { - 'etag': 'ACAB', - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = subscription.get_iam_policy(client=client2) - - self.assertEqual(policy.etag, 'ACAB') - self.assertIsNone(policy.version) - self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.editors), []) - self.assertEqual(sorted(policy.viewers), []) - - self.assertEqual(api._got_iam_policy, self.SUB_PATH) - - def test_set_iam_policy_w_bound_client(self): - import operator - from google.cloud.pubsub.iam import Policy - from google.cloud.pubsub.iam import ( - OWNER_ROLE, - EDITOR_ROLE, - VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'group:cloud-logs@google.com' - OWNER2 = 'user:phred@example.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - RESPONSE = POLICY.copy() - RESPONSE['etag'] = 'ABACABAF' - RESPONSE['version'] = 18 - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - policy = Policy('DEADBEEF', 17) - policy.owners = [OWNER1, OWNER2] - policy.editors = [EDITOR1, EDITOR2] - policy.viewers = [VIEWER1, VIEWER2] - policy.publishers = [PUBLISHER] - policy.subscribers = [SUBSCRIBER] - - new_policy = subscription.set_iam_policy(policy) - - self.assertEqual(new_policy.etag, 'ABACABAF') - self.assertEqual(new_policy.version, 18) - self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(len(api._set_iam_policy), 2) - self.assertEqual(api._set_iam_policy[0], self.SUB_PATH) - resource = api._set_iam_policy[1] - self.assertEqual(resource['etag'], POLICY['etag']) - self.assertEqual(resource['version'], POLICY['version']) - key = operator.itemgetter('role') - self.assertEqual( - sorted(resource['bindings'], key=key), - sorted(POLICY['bindings'], key=key)) - - def test_set_iam_policy_w_alternate_client(self): - from google.cloud.pubsub.iam import Policy - - RESPONSE = {'etag': 'ACAB'} - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - policy = Policy() - new_policy = subscription.set_iam_policy(policy, client=client2) - - self.assertEqual(new_policy.etag, 'ACAB') - self.assertIsNone(new_policy.version) - self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.editors), []) - self.assertEqual(sorted(new_policy.viewers), []) - self.assertEqual(api._set_iam_policy, (self.SUB_PATH, {})) - - def test_check_iam_permissions_w_bound_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = ROLES[:-1] - topic = _Topic(self.TOPIC_NAME, client=client) - subscription = self._make_one(self.SUB_NAME, topic) - - allowed = subscription.check_iam_permissions(ROLES) - - self.assertEqual(allowed, ROLES[:-1]) - self.assertEqual(api._tested_iam_permissions, - (self.SUB_PATH, ROLES)) - - def test_check_iam_permissions_w_alternate_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = [] - topic = _Topic(self.TOPIC_NAME, client=client1) - subscription = self._make_one(self.SUB_NAME, topic) - - allowed = subscription.check_iam_permissions(ROLES, client=client2) - - self.assertEqual(len(allowed), 0) - self.assertEqual(api._tested_iam_permissions, - (self.SUB_PATH, ROLES)) - - -class _FauxSubscribererAPI(object): - - def subscription_create(self, subscription_path, topic_path, - ack_deadline=None, push_endpoint=None, - retain_acked_messages=None, - message_retention_duration=None): - self._subscription_created = ( - subscription_path, topic_path, ack_deadline, push_endpoint, - retain_acked_messages, message_retention_duration) - return self._subscription_create_response - - def subscription_get(self, subscription_path): - from google.cloud.exceptions import NotFound - - self._subscription_got = subscription_path - try: - return self._subscription_get_response - except AttributeError: - raise NotFound(subscription_path) - - def subscription_delete(self, subscription_path): - self._subscription_deleted = subscription_path - return self._subscription_delete_response - - def subscription_modify_push_config( - self, subscription_path, push_endpoint): - self._subscription_modified_push_config = ( - subscription_path, push_endpoint) - return self._subscription_modify_push_config_response - - def subscription_pull(self, subscription_path, return_immediately, - max_messages): - self._subscription_pulled = ( - subscription_path, return_immediately, max_messages) - return self._subscription_pull_response - - def subscription_acknowledge(self, subscription_path, ack_ids): - self._subscription_acked = (subscription_path, ack_ids) - return self._subscription_acknowlege_response - - def subscription_modify_ack_deadline(self, subscription_path, ack_ids, - ack_deadline): - self._subscription_modified_ack_deadline = ( - subscription_path, ack_ids, ack_deadline) - return self._subscription_modify_ack_deadline_response - - def subscription_seek(self, subscription_path, time=None, snapshot=None): - self._subscription_seeked = ( - subscription_path, time, snapshot) - return self._subscription_seek_response - - -class TestAutoAck(unittest.TestCase): - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.subscription import AutoAck - - return AutoAck - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_defaults(self): - subscription = _FauxSubscription(()) - auto_ack = self._make_one(subscription) - self.assertEqual(auto_ack._return_immediately, False) - self.assertEqual(auto_ack._max_messages, 1) - self.assertIsNone(auto_ack._client) - - def test_ctor_explicit(self): - CLIENT = object() - subscription = _FauxSubscription(()) - auto_ack = self._make_one( - subscription, return_immediately=True, max_messages=10, - client=CLIENT) - self.assertIs(auto_ack._subscription, subscription) - self.assertEqual(auto_ack._return_immediately, True) - self.assertEqual(auto_ack._max_messages, 10) - self.assertIs(auto_ack._client, CLIENT) - - def test___enter___w_defaults(self): - subscription = _FauxSubscription(()) - auto_ack = self._make_one(subscription) - - with auto_ack as returned: - pass - - self.assertIs(returned, auto_ack) - self.assertEqual(subscription._return_immediately, False) - self.assertEqual(subscription._max_messages, 1) - self.assertIsNone(subscription._client) - - def test___enter___w_explicit(self): - CLIENT = object() - subscription = _FauxSubscription(()) - auto_ack = self._make_one( - subscription, return_immediately=True, max_messages=10, - client=CLIENT) - - with auto_ack as returned: - pass - - self.assertIs(returned, auto_ack) - self.assertEqual(subscription._return_immediately, True) - self.assertEqual(subscription._max_messages, 10) - self.assertIs(subscription._client, CLIENT) - - def test___exit___(self): - CLIENT = object() - ACK_ID1, MESSAGE1 = 'ACK_ID1', _FallibleMessage() - ACK_ID2, MESSAGE2 = 'ACK_ID2', _FallibleMessage() - ACK_ID3, MESSAGE3 = 'ACK_ID3', _FallibleMessage(True) - ITEMS = [ - (ACK_ID1, MESSAGE1), - (ACK_ID2, MESSAGE2), - (ACK_ID3, MESSAGE3), - ] - subscription = _FauxSubscription(ITEMS) - auto_ack = self._make_one(subscription, client=CLIENT) - with auto_ack: - for ack_id, message in list(auto_ack.items()): - if message.fail: - del auto_ack[ack_id] - self.assertEqual(sorted(subscription._acknowledged), - [ACK_ID1, ACK_ID2]) - self.assertIs(subscription._ack_client, CLIENT) - - def test_empty_ack_no_acknowledge(self): - subscription = mock.Mock(_FauxSubscription) - subscription.pull = lambda *args: [] - - auto_ack = self._make_one(subscription) - with auto_ack: - pass - - subscription.acknowledge.assert_not_called() - - -class _FauxIAMPolicy(object): - - def get_iam_policy(self, target_path): - self._got_iam_policy = target_path - return self._get_iam_policy_response - - def set_iam_policy(self, target_path, policy): - self._set_iam_policy = target_path, policy - return self._set_iam_policy_response - - def test_iam_permissions(self, target_path, permissions): - self._tested_iam_permissions = target_path, permissions - return self._test_iam_permissions_response - - -class _Topic(object): - - def __init__(self, name, client): - self.name = name - self._client = client - self.project = client.project - self.full_name = 'projects/%s/topics/%s' % (client.project, name) - self.path = '/projects/%s/topics/%s' % (client.project, name) - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - def topic(self, name, timestamp_messages=False): - from google.cloud.pubsub.topic import Topic - - return Topic(name, client=self, timestamp_messages=timestamp_messages) - - -class _FallibleMessage(object): - - def __init__(self, fail=False): - self.fail = fail - - -class _FauxSubscription(object): - - def __init__(self, items): - self._items = items - self._mapping = dict(items) - self._acknowledged = set() - - def pull(self, return_immediately=False, max_messages=1, client=None): - self._return_immediately = return_immediately - self._max_messages = max_messages - self._client = client - return self._items - - def acknowledge(self, ack_ids, client=None): - self._ack_client = client - for ack_id in ack_ids: - message = self._mapping[ack_id] - assert not message.fail - self._acknowledged.add(ack_id) diff --git a/packages/google-cloud-pubsub/tests/unit/test_topic.py b/packages/google-cloud-pubsub/tests/unit/test_topic.py deleted file mode 100644 index 2c90432195c2..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/test_topic.py +++ /dev/null @@ -1,974 +0,0 @@ -# Copyright 2015 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import unittest - -import mock - - -def _make_credentials(): - import google.auth.credentials - - return mock.Mock(spec=google.auth.credentials.Credentials) - - -class TestTopic(unittest.TestCase): - PROJECT = 'PROJECT' - TOPIC_NAME = 'topic_name' - TOPIC_PATH = 'projects/%s/topics/%s' % (PROJECT, TOPIC_NAME) - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.topic import Topic - - return Topic - - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor_w_explicit_timestamp(self): - client = _Client(project=self.PROJECT) - topic = self._make_one(self.TOPIC_NAME, - client=client, - timestamp_messages=True) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - self.assertTrue(topic.timestamp_messages) - - def test_from_api_repr(self): - client = _Client(project=self.PROJECT) - resource = {'name': self.TOPIC_PATH} - klass = self._get_target_class() - topic = klass.from_api_repr(resource, client=client) - self.assertEqual(topic.name, self.TOPIC_NAME) - self.assertIs(topic._client, client) - self.assertEqual(topic.project, self.PROJECT) - self.assertEqual(topic.full_name, self.TOPIC_PATH) - - def test_from_api_repr_with_bad_client(self): - PROJECT1 = 'PROJECT1' - PROJECT2 = 'PROJECT2' - client = _Client(project=PROJECT1) - PATH = 'projects/%s/topics/%s' % (PROJECT2, self.TOPIC_NAME) - resource = {'name': PATH} - klass = self._get_target_class() - self.assertRaises(ValueError, klass.from_api_repr, - resource, client=client) - - def test_create_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client) - - topic.create() - - self.assertEqual(api._topic_created, self.TOPIC_PATH) - - def test_create_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_create_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - topic.create(client=client2) - - self.assertEqual(api._topic_created, self.TOPIC_PATH) - - def test_exists_miss_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - topic = self._make_one(self.TOPIC_NAME, client=client) - - self.assertFalse(topic.exists()) - - self.assertEqual(api._topic_got, self.TOPIC_PATH) - - def test_exists_hit_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_get_response = {'name': self.TOPIC_PATH} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - self.assertTrue(topic.exists(client=client2)) - - self.assertEqual(api._topic_got, self.TOPIC_PATH) - - def test_delete_w_bound_client(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_delete_response = {} - topic = self._make_one(self.TOPIC_NAME, client=client) - - topic.delete() - - self.assertEqual(api._topic_deleted, self.TOPIC_PATH) - - def test_delete_w_alternate_client(self): - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_delete_response = {} - topic = self._make_one(self.TOPIC_NAME, client=client1) - - topic.delete(client=client2) - - self.assertEqual(api._topic_deleted, self.TOPIC_PATH) - - def test_publish_single_bytes_wo_attrs_w_bound_client(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_bytes_wo_attrs_w_add_timestamp_alt_client(self): - import datetime - from google.cloud._helpers import _RFC3339_MICROS - - NOW = datetime.datetime.utcnow() - - def _utcnow(): - return NOW - - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = { - 'data': PAYLOAD, - 'attributes': {'timestamp': NOW.strftime(_RFC3339_MICROS)}, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - - topic = self._make_one(self.TOPIC_NAME, client=client1, - timestamp_messages=True) - with mock.patch('google.cloud.pubsub.topic._NOW', new=_utcnow): - msgid = topic.publish(PAYLOAD, client=client2) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_bytes_w_add_timestamp_w_ts_in_attrs(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - OVERRIDE = '2015-04-10T16:46:22.868399Z' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'timestamp': OVERRIDE}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client, - timestamp_messages=True) - - msgid = topic.publish(PAYLOAD, timestamp=OVERRIDE) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_single_w_attrs(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - - msgid = topic.publish(PAYLOAD, attr1='value1', attr2='value2') - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_with_gax(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_without_gax(self): - PAYLOAD = 'This is the message text' - MSGID = 'DEADBEEF' - MESSAGE = {'data': PAYLOAD, 'attributes': {}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID] - topic = self._make_one(self.TOPIC_NAME, client=client) - msgid = topic.publish(PAYLOAD) - - self.assertEqual(msgid, MSGID) - self.assertEqual(api._topic_published, (self.TOPIC_PATH, [MESSAGE])) - - def test_publish_multiple_w_bound_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = self._make_one(self.TOPIC_NAME, client=client) - - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) - - def test_publish_w_no_messages(self): - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [] - topic = self._make_one(self.TOPIC_NAME, client=client) - - with topic.batch() as batch: - pass - - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._api_called, 0) - - def test_publish_multiple_w_alternate_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = { - 'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}, - } - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = self._make_one(self.TOPIC_NAME, client=client1) - - with topic.batch(client=client2) as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (self.TOPIC_PATH, [MESSAGE1, MESSAGE2])) - - def test_publish_multiple_error(self): - PAYLOAD1 = b'This is the first message text' - PAYLOAD2 = b'This is the second message text' - client = _Client(project=self.PROJECT) - api = client.publisher_api = _FauxPublisherAPI() - topic = self._make_one(self.TOPIC_NAME, client=client) - - try: - with topic.batch() as batch: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - raise _Bugout() - except _Bugout: - pass - - self.assertEqual(list(batch), []) - self.assertEqual(getattr(api, '_topic_published', self), self) - - def test_subscription(self): - from google.cloud.pubsub.subscription import Subscription - - client = _Client(project=self.PROJECT) - topic = self._make_one(self.TOPIC_NAME, client=client) - - SUBSCRIPTION_NAME = 'subscription_name' - subscription = topic.subscription(SUBSCRIPTION_NAME) - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscription.name, SUBSCRIPTION_NAME) - self.assertIs(subscription.topic, topic) - - def test_list_subscriptions_no_paging(self): - import six - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - - SUB_NAME_1 = 'subscription_1' - SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_1) - SUB_NAME_2 = 'subscription_2' - SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_2) - SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] - TOKEN = 'TOKEN' - - returned = { - 'subscriptions': SUBS_LIST, - 'nextPageToken': TOKEN, - } - client._connection = _Connection(returned) - - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions() - page = six.next(iterator.pages) - subscriptions = list(page) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 2) - - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[0].name, SUB_NAME_1) - self.assertIs(subscription.topic, topic) - - subscription = subscriptions[1] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[1].name, SUB_NAME_2) - self.assertIs(subscription.topic, topic) - - self.assertEqual(next_page_token, TOKEN) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], {}) - - def test_list_subscriptions_with_paging(self): - from google.cloud.pubsub.client import Client - from google.cloud.pubsub.subscription import Subscription - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - - SUB_NAME_1 = 'subscription_1' - SUB_PATH_1 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_1) - SUB_NAME_2 = 'subscription_2' - SUB_PATH_2 = 'projects/%s/subscriptions/%s' % ( - self.PROJECT, SUB_NAME_2) - SUBS_LIST = [SUB_PATH_1, SUB_PATH_2] - PAGE_SIZE = 10 - TOKEN = 'TOKEN' - - returned = { - 'subscriptions': SUBS_LIST, - } - client._connection = _Connection(returned) - - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions( - page_size=PAGE_SIZE, page_token=TOKEN) - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 2) - - subscription = subscriptions[0] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[0].name, SUB_NAME_1) - self.assertIs(subscription.topic, topic) - - subscription = subscriptions[1] - self.assertIsInstance(subscription, Subscription) - self.assertEqual(subscriptions[1].name, SUB_NAME_2) - self.assertIs(subscription.topic, topic) - - self.assertIsNone(next_page_token) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], - {'pageSize': PAGE_SIZE, 'pageToken': TOKEN}) - - def test_list_subscriptions_missing_key(self): - from google.cloud.pubsub.client import Client - - client = Client(project=self.PROJECT, - credentials=_make_credentials(), _use_grpc=False) - client._connection = _Connection({}) - topic = self._make_one(self.TOPIC_NAME, client=client) - - iterator = topic.list_subscriptions() - subscriptions = list(iterator) - next_page_token = iterator.next_page_token - - self.assertEqual(len(subscriptions), 0) - self.assertIsNone(next_page_token) - # Verify the mock. - called_with = client._connection._called_with - self.assertEqual(len(called_with), 3) - self.assertEqual(called_with['method'], 'GET') - path = '/%s/subscriptions' % (self.TOPIC_PATH,) - self.assertEqual(called_with['path'], path) - self.assertEqual(called_with['query_params'], {}) - - def test_get_iam_policy_w_bound_client(self): - from google.cloud.pubsub.iam import ( - PUBSUB_ADMIN_ROLE, - PUBSUB_EDITOR_ROLE, - PUBSUB_VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'user:phred@example.com' - OWNER2 = 'group:cloud-logs@google.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': PUBSUB_ADMIN_ROLE, 'members': [OWNER1, OWNER2]}, - {'role': PUBSUB_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]}, - {'role': PUBSUB_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, 'members': [SUBSCRIBER]}, - ], - } - - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = self._make_one(self.TOPIC_NAME, client=client) - - policy = topic.get_iam_policy() - - self.assertEqual(policy.etag, 'DEADBEEF') - self.assertEqual(policy.version, 17) - self.assertEqual(sorted(policy.owners), [OWNER2, OWNER1]) - self.assertEqual(sorted(policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(policy.subscribers), [SUBSCRIBER]) - self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) - - def test_get_iam_policy_w_alternate_client(self): - POLICY = { - 'etag': 'ACAB', - } - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._get_iam_policy_response = POLICY - topic = self._make_one(self.TOPIC_NAME, client=client1) - - policy = topic.get_iam_policy(client=client2) - - self.assertEqual(policy.etag, 'ACAB') - self.assertIsNone(policy.version) - self.assertEqual(sorted(policy.owners), []) - self.assertEqual(sorted(policy.editors), []) - self.assertEqual(sorted(policy.viewers), []) - - self.assertEqual(api._got_iam_policy, self.TOPIC_PATH) - - def test_set_iam_policy_w_bound_client(self): - import operator - from google.cloud.pubsub.iam import Policy - from google.cloud.pubsub.iam import ( - OWNER_ROLE, - EDITOR_ROLE, - VIEWER_ROLE, - PUBSUB_PUBLISHER_ROLE, - PUBSUB_SUBSCRIBER_ROLE, - ) - - OWNER1 = 'group:cloud-logs@google.com' - OWNER2 = 'user:phred@example.com' - EDITOR1 = 'domain:google.com' - EDITOR2 = 'user:phred@example.com' - VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com' - VIEWER2 = 'user:phred@example.com' - PUBLISHER = 'user:phred@example.com' - SUBSCRIBER = 'serviceAccount:1234-abcdef@service.example.com' - POLICY = { - 'etag': 'DEADBEEF', - 'version': 17, - 'bindings': [ - {'role': OWNER_ROLE, - 'members': [OWNER1, OWNER2]}, - {'role': EDITOR_ROLE, - 'members': [EDITOR1, EDITOR2]}, - {'role': VIEWER_ROLE, - 'members': [VIEWER1, VIEWER2]}, - {'role': PUBSUB_PUBLISHER_ROLE, - 'members': [PUBLISHER]}, - {'role': PUBSUB_SUBSCRIBER_ROLE, - 'members': [SUBSCRIBER]}, - ], - } - RESPONSE = POLICY.copy() - RESPONSE['etag'] = 'ABACABAF' - RESPONSE['version'] = 18 - - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = self._make_one(self.TOPIC_NAME, client=client) - policy = Policy('DEADBEEF', 17) - policy.owners = [OWNER1, OWNER2] - policy.editors = [EDITOR1, EDITOR2] - policy.viewers = [VIEWER1, VIEWER2] - policy.publishers = [PUBLISHER] - policy.subscribers = [SUBSCRIBER] - - new_policy = topic.set_iam_policy(policy) - - self.assertEqual(new_policy.etag, 'ABACABAF') - self.assertEqual(new_policy.version, 18) - self.assertEqual(sorted(new_policy.owners), [OWNER1, OWNER2]) - self.assertEqual(sorted(new_policy.editors), [EDITOR1, EDITOR2]) - self.assertEqual(sorted(new_policy.viewers), [VIEWER1, VIEWER2]) - self.assertEqual(sorted(new_policy.publishers), [PUBLISHER]) - self.assertEqual(sorted(new_policy.subscribers), [SUBSCRIBER]) - self.assertEqual(len(api._set_iam_policy), 2) - self.assertEqual(api._set_iam_policy[0], self.TOPIC_PATH) - resource = api._set_iam_policy[1] - self.assertEqual(resource['etag'], POLICY['etag']) - self.assertEqual(resource['version'], POLICY['version']) - key = operator.itemgetter('role') - self.assertEqual( - sorted(resource['bindings'], key=key), - sorted(POLICY['bindings'], key=key)) - - def test_set_iam_policy_w_alternate_client(self): - from google.cloud.pubsub.iam import Policy - - RESPONSE = {'etag': 'ACAB'} - - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._set_iam_policy_response = RESPONSE - topic = self._make_one(self.TOPIC_NAME, client=client1) - - policy = Policy() - new_policy = topic.set_iam_policy(policy, client=client2) - - self.assertEqual(new_policy.etag, 'ACAB') - self.assertIsNone(new_policy.version) - self.assertEqual(sorted(new_policy.owners), []) - self.assertEqual(sorted(new_policy.editors), []) - self.assertEqual(sorted(new_policy.viewers), []) - - self.assertEqual(api._set_iam_policy, (self.TOPIC_PATH, {})) - - def test_check_iam_permissions_w_bound_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client = _Client(project=self.PROJECT) - api = client.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = ROLES[:-1] - topic = self._make_one(self.TOPIC_NAME, client=client) - - allowed = topic.check_iam_permissions(ROLES) - - self.assertEqual(allowed, ROLES[:-1]) - self.assertEqual(api._tested_iam_permissions, - (self.TOPIC_PATH, ROLES)) - - def test_check_iam_permissions_w_alternate_client(self): - from google.cloud.pubsub.iam import OWNER_ROLE - from google.cloud.pubsub.iam import EDITOR_ROLE - from google.cloud.pubsub.iam import VIEWER_ROLE - - ROLES = [VIEWER_ROLE, EDITOR_ROLE, OWNER_ROLE] - client1 = _Client(project=self.PROJECT) - client2 = _Client(project=self.PROJECT) - api = client2.iam_policy_api = _FauxIAMPolicy() - api._test_iam_permissions_response = [] - topic = self._make_one(self.TOPIC_NAME, client=client1) - - allowed = topic.check_iam_permissions(ROLES, client=client2) - - self.assertEqual(len(allowed), 0) - self.assertEqual(api._tested_iam_permissions, - (self.TOPIC_PATH, ROLES)) - - -class TestBatch(unittest.TestCase): - PROJECT = 'PROJECT' - - @staticmethod - def _get_target_class(): - from google.cloud.pubsub.topic import Batch - - return Batch - - def _make_one(self, *args, **kwargs): - return self._get_target_class()(*args, **kwargs) - - def test_ctor_defaults(self): - topic = _Topic() - client = _Client(project=self.PROJECT) - batch = self._make_one(topic, client) - self.assertIs(batch.topic, topic) - self.assertIs(batch.client, client) - self.assertEqual(len(batch.messages), 0) - self.assertEqual(len(batch.message_ids), 0) - - def test___iter___empty(self): - topic = _Topic() - client = object() - batch = self._make_one(topic, client) - self.assertEqual(list(batch), []) - - def test___iter___non_empty(self): - topic = _Topic() - client = object() - batch = self._make_one(topic, client) - batch.message_ids[:] = ['ONE', 'TWO', 'THREE'] - self.assertEqual(list(batch), ['ONE', 'TWO', 'THREE']) - - def test_publish_bytes_wo_attrs(self): - PAYLOAD = 'This is the message text' - MESSAGE = {'data': PAYLOAD, - 'attributes': {}} - client = _Client(project=self.PROJECT) - topic = _Topic() - batch = self._make_one(topic, client=client) - batch.publish(PAYLOAD) - self.assertEqual(batch.messages, [MESSAGE]) - - def test_publish_bytes_w_add_timestamp(self): - PAYLOAD = 'This is the message text' - MESSAGE = {'data': PAYLOAD, - 'attributes': {'timestamp': 'TIMESTAMP'}} - client = _Client(project=self.PROJECT) - topic = _Topic(timestamp_messages=True) - batch = self._make_one(topic, client=client) - batch.publish(PAYLOAD) - self.assertEqual(batch.messages, [MESSAGE]) - - def test_commit_w_bound_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, - 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client) - - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - batch.commit() - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_commit_w_alternate_client(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client1 = _Client(project='PROJECT') - client2 = _Client(project='PROJECT') - api = client2.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client1) - - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - batch.commit(client=client2) - - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_context_mgr_success(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MSGID1 = 'DEADBEEF' - MSGID2 = 'BEADCAFE' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - api._topic_publish_response = [MSGID1, MSGID2] - topic = _Topic() - batch = self._make_one(topic, client=client) - - with batch as other: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - - self.assertIs(other, batch) - self.assertEqual(list(batch), [MSGID1, MSGID2]) - self.assertEqual(list(batch.messages), []) - self.assertEqual(api._topic_published, - (topic.full_name, [MESSAGE1, MESSAGE2])) - - def test_context_mgr_failure(self): - PAYLOAD1 = 'This is the first message text' - PAYLOAD2 = 'This is the second message text' - MESSAGE1 = {'data': PAYLOAD1, 'attributes': {}} - MESSAGE2 = {'data': PAYLOAD2, - 'attributes': {'attr1': 'value1', 'attr2': 'value2'}} - client = _Client(project='PROJECT') - api = client.publisher_api = _FauxPublisherAPI() - topic = _Topic() - batch = self._make_one(topic, client=client) - - try: - with batch as other: - batch.publish(PAYLOAD1) - batch.publish(PAYLOAD2, attr1='value1', attr2='value2') - raise _Bugout() - except _Bugout: - pass - - self.assertIs(other, batch) - self.assertEqual(list(batch), []) - self.assertEqual(list(batch.messages), [MESSAGE1, MESSAGE2]) - self.assertEqual(getattr(api, '_topic_published', self), self) - - def test_batch_messages(self): - # Establish that a batch actually batches messsages in the expected - # way. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client) as batch: - self.assertIsInstance(batch, Batch) - - # Publish four messages and establish that the batch does - # not commit. - for i in range(0, 4): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Check the contents of the batch. - self.assertEqual(batch.messages, [ - {'data': 'Batch message 0.', 'attributes': {}}, - {'data': 'Batch message 1.', 'attributes': {}}, - {'data': 'Batch message 2.', 'attributes': {}}, - {'data': 'Batch message 3.', 'attributes': {}}, - ]) - - def test_message_count_autocommit(self): - # Establish that if the batch is assigned to take a maximum - # number of messages, that it commits when it reaches that maximum. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client, max_messages=5) as batch: - self.assertIsInstance(batch, Batch) - - # Publish four messages and establish that the batch does - # not commit. - for i in range(0, 4): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Publish a fifth message and observe the commit. - batch.publish('The final call to trigger a commit!') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - @mock.patch('time.time') - def test_message_time_autocommit(self, mock_time): - # Establish that if the batch is sufficiently old, that it commits - # the next time it receives a publish. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - mock_time.return_value = 0.0 - with self._make_one(topic, client=client, max_interval=5) as batch: - self.assertIsInstance(batch, Batch) - - # Publish some messages and establish that the batch does - # not commit. - for i in range(0, 10): - batch.publish('Batch message %d.' % (i,)) - commit.assert_not_called() - - # Move time ahead so that this batch is too old. - mock_time.return_value = 10.0 - - # Publish another message and observe the commit. - batch.publish('The final call to trigger a commit!') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - def test_message_size_autocommit(self): - # Establish that if the batch is sufficiently large, that it - # auto-commits. - client = _Client(project='PROJECT') - topic = _Topic(name='TOPIC') - - # Track commits, but do not perform them. - Batch = self._get_target_class() - with mock.patch.object(Batch, 'commit') as commit: - with self._make_one(topic, client=client, max_size=100) as batch: - self.assertIsInstance(batch, Batch) - - # Publish a short (< 100 bytes) message and establish that - # the batch does not commit. - batch.publish(b'foo') - commit.assert_not_called() - - # Publish another message and observe the commit. - batch.publish(u'The final call to trigger a commit, because ' - u'this message is sufficiently long.') - commit.assert_called_once_with() - - # There should be a second commit after the context manager - # exits. - self.assertEqual(commit.call_count, 2) - - -class _FauxPublisherAPI(object): - _api_called = 0 - - def topic_create(self, topic_path): - self._topic_created = topic_path - return self._topic_create_response - - def topic_get(self, topic_path): - from google.cloud.exceptions import NotFound - - self._topic_got = topic_path - try: - return self._topic_get_response - except AttributeError: - raise NotFound(topic_path) - - def topic_delete(self, topic_path): - self._topic_deleted = topic_path - return self._topic_delete_response - - def topic_publish(self, topic_path, messages): - self._topic_published = topic_path, messages - self._api_called += 1 - return self._topic_publish_response - - -class _FauxIAMPolicy(object): - - def get_iam_policy(self, target_path): - self._got_iam_policy = target_path - return self._get_iam_policy_response - - def set_iam_policy(self, target_path, policy): - self._set_iam_policy = target_path, policy - return self._set_iam_policy_response - - def test_iam_permissions(self, target_path, permissions): - self._tested_iam_permissions = target_path, permissions - return self._test_iam_permissions_response - - -class _Topic(object): - - def __init__(self, name="NAME", project="PROJECT", - timestamp_messages=False): - self.full_name = 'projects/%s/topics/%s' % (project, name) - self.path = '/%s' % (self.full_name,) - self.timestamp_messages = timestamp_messages - - def _timestamp_message(self, attrs): - if self.timestamp_messages: - attrs['timestamp'] = 'TIMESTAMP' - - -class _Client(object): - - connection = None - - def __init__(self, project): - self.project = project - - -class _Bugout(Exception): - pass - - -class _Connection(object): - - _called_with = None - - def __init__(self, *responses): - self._responses = responses - - def api_request(self, **kw): - self._called_with = kw - response, self._responses = self._responses[0], self._responses[1:] - return response From 5b4f1e8cbfa53a5d9941ac1eab4be0ee58a1f534 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 Aug 2017 10:47:34 -0700 Subject: [PATCH 0121/1197] Maek Pub/Sub as beta; bump version number. (#3862) --- packages/google-cloud-pubsub/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 91bbeb8e2a8c..8f1f31ed6d4f 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -35,7 +35,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 3 - Alpha', + 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', @@ -60,7 +60,7 @@ setup( name='google-cloud-pubsub', - version='0.27.0', + version='0.28.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 68feb6dadacc858d0bf4eafdd8c01e78f091e584 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 Aug 2017 13:42:33 -0700 Subject: [PATCH 0122/1197] Re-add core dependency (#3865) --- packages/google-cloud-pubsub/setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 8f1f31ed6d4f..85f001dfbd09 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,6 +51,7 @@ REQUIREMENTS = [ + 'google-cloud-core >= 0.27.0, < 0.28dev', 'google-gax >= 0.15.13, < 0.16dev', 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', @@ -60,7 +61,7 @@ setup( name='google-cloud-pubsub', - version='0.28.0', + version='0.28.1', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From d5071ac460b94dc4cffac603d1527fe8d391cc10 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 Aug 2017 15:09:57 -0700 Subject: [PATCH 0123/1197] Documentation fix. (#3876) --- packages/google-cloud-pubsub/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 75611b1ff296..26338cf07491 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -45,7 +45,7 @@ independently written applications. See the ``google-cloud-python`` API `Pub/Sub documentation`_ to learn how to connect to Cloud Pub/Sub using this Client Library. -.. _Pub/Sub documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/pubsub/usage.html +.. _Pub/Sub documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/index.html To get started with this API, you'll need to create From 552fc011eee1480324f6df23c20db2a95ae05f77 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 Aug 2017 16:17:37 -0700 Subject: [PATCH 0124/1197] Update README.rst sample. (#3879) --- packages/google-cloud-pubsub/README.rst | 63 ++++++++++++++++++++++--- packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 57 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 26338cf07491..d5b06d9020e0 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -47,18 +47,67 @@ to Cloud Pub/Sub using this Client Library. .. _Pub/Sub documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/index.html -To get started with this API, you'll need to create -.. code:: python +Publishing +---------- +To publish data to Cloud Pub/Sub you must create a topic, and then publish +messages to it + +.. code-block:: python + + import os from google.cloud import pubsub - client = pubsub.Client() - topic = client.topic('topic_name') - topic.create() + publisher = pubsub.PublisherClient() + topic = 'projects/{project_id}/topics/{topic}'.format( + project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + topic='MY_TOPIC_NAME', # Set this to something appropriate. + ) + publisher.create_topic() + publisher.publish(topic, b'My first message!', spam='eggs') + +To learn more, consult the :doc:`publishing documentation`_. + +.. _publishing documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/publisher/index.html + + +Subscribing +----------- + +To subscribe to data in Cloud Pub/Sub, you create a subscription based on +the topic, and subscribe to that. + +.. code-block:: python + + import os + from google.cloud import pubsub + + subscriber = pubsub.SubscriberClient() + topic = 'projects/{project_id}/topics/{topic}'.format( + project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + topic='MY_TOPIC_NAME', # Set this to something appropriate. + ) + subscription_name = 'projects/{project_id}/subscriptions/{sub}'.format( + project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), + sub='MY_SUBSCRIPTION_NAME', # Set this to something appropriate. + ) + subscription = subscriber.create_subscription(topic, subscription) + +The subscription is opened asychronously, and messages are processed by +use of a callback. + +.. code-block:: python + + def callback(message): + print(message.data) + message.ack() + subscription.open(callback) + +To learn more, consult the :doc:`subscriber documentation`_. + +.. _subscriber documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/subscriber/index.html - topic.publish('this is the message_payload', - attr1='value1', attr2='value2') .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 85f001dfbd09..a8e17f6c48e1 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -61,7 +61,7 @@ setup( name='google-cloud-pubsub', - version='0.28.1', + version='0.28.2', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 54a034908d3acea79bce944c32c0fab54caeb7fb Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 Aug 2017 22:56:18 -0700 Subject: [PATCH 0125/1197] Fix RST ambiguity. --- packages/google-cloud-pubsub/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index d5b06d9020e0..00af83948f27 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -67,7 +67,7 @@ messages to it publisher.create_topic() publisher.publish(topic, b'My first message!', spam='eggs') -To learn more, consult the :doc:`publishing documentation`_. +To learn more, consult the `publishing documentation`_. .. _publishing documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/publisher/index.html @@ -104,7 +104,7 @@ use of a callback. message.ack() subscription.open(callback) -To learn more, consult the :doc:`subscriber documentation`_. +To learn more, consult the `subscriber documentation`_. .. _subscriber documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/subscriber/index.html From 71f1866f7c54c68a174a94678f3d1303dcceda05 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 5 Sep 2017 14:25:05 -0700 Subject: [PATCH 0126/1197] Pubsub 0.28.3 (#3921) --- packages/google-cloud-pubsub/setup.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index a8e17f6c48e1..5316b5363469 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -61,12 +61,16 @@ setup( name='google-cloud-pubsub', - version='0.28.2', + version='0.28.3', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ 'google', 'google.cloud', + 'google.cloud.gapic', + 'google.cloud.gapic.pubsub', + 'google.cloud.proto', + 'google.cloud.proto.pubsub', ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, From 2328182311ff39b8d59896ee909cc68a1904b814 Mon Sep 17 00:00:00 2001 From: Gary Krige Date: Wed, 6 Sep 2017 20:07:21 +0200 Subject: [PATCH 0127/1197] Use queue if provided in kwargs (#3924) Fix for a new Queue being instantiated regardless of whether a queue is passed in or not. --- .../google/cloud/pubsub_v1/subscriber/policy/thread.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index df0f965748de..bdca8dec004b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -61,7 +61,7 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), # Create a queue for keeping track of shared state. if queue is None: queue = Queue() - self._request_queue = Queue() + self._request_queue = queue # Call the superclass constructor. super(Policy, self).__init__( From 9b0f5dd2809e431d42940c7780743fb3d7040ca5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 2 Oct 2017 10:15:57 -0700 Subject: [PATCH 0128/1197] Avoiding `grpcio==1.6.0` in deps. (#4096) This is due to `google-gax` doing the same, which has broken RTD builds: https://readthedocs.org/projects/google-cloud-python/builds/6063446/ The motivation for avoiding `grpcio==1.6.0` is: https://github.com/grpc/grpc/issues/12455 --- packages/google-cloud-pubsub/setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 5316b5363469..35235ba30cc6 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -53,9 +53,9 @@ REQUIREMENTS = [ 'google-cloud-core >= 0.27.0, < 0.28dev', 'google-gax >= 0.15.13, < 0.16dev', - 'googleapis-common-protos[grpc] >= 1.5.2, < 2.0dev', + 'googleapis-common-protos[grpc] >= 1.5.3, < 2.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', - 'grpcio >= 1.0.2, < 2.0dev', + 'grpcio >= 1.2.0, < 1.6dev', 'psutil >= 5.2.2, < 6.0dev', ] From bc8b92822bc04162b594d9f39c322123ad63caf8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 3 Oct 2017 13:02:49 -0700 Subject: [PATCH 0129/1197] Fixing virutal->virtual typo. (#4108) Done via: $ git grep -l virutal | xargs sed -i s/virutal/virtual/g --- packages/google-cloud-pubsub/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index c860e0741fe6..b3df2538156b 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -61,7 +61,7 @@ def system_tests(session, python_version): session.virtualenv_dirname = 'sys-' + python_version # Install all test dependencies, then install this package into the - # virutalenv's dist-packages. + # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) session.install('../test_utils/') session.install('.') From c93b23e57adc21e2022f532eb06c7c5f761ffb47 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 4 Oct 2017 12:45:40 -0700 Subject: [PATCH 0130/1197] Removing `googleapis-common-protos` from deps in non-`core` packages. (#4098) * Removing `googleapis-common-protos` from deps in non-`core` packages. Also - removing `grpcio` from non-`core` packages. - manually specifying the `grpcio` dep in core (rather than getting it from `googleapis-common-protos[grpc]`) * Making `grpc` an extra for `core`. * Adding `googleapis-common-protos` back to `videointelligence`. --- packages/google-cloud-pubsub/setup.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 35235ba30cc6..702b99849db1 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,11 +51,9 @@ REQUIREMENTS = [ - 'google-cloud-core >= 0.27.0, < 0.28dev', + 'google-cloud-core[grpc] >= 0.27.1, < 0.28dev', 'google-gax >= 0.15.13, < 0.16dev', - 'googleapis-common-protos[grpc] >= 1.5.3, < 2.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', - 'grpcio >= 1.2.0, < 1.6dev', 'psutil >= 5.2.2, < 6.0dev', ] From 6324c39b41fa3e1e040c8074bbd3e45b58947c3b Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 12 Oct 2017 17:13:19 -0700 Subject: [PATCH 0131/1197] s/gcloud-common/google-cloud-common/g (#4180) The gcloud-common repo moved to https://github.com/GoogleCloudPlatform/google-cloud-common --- packages/google-cloud-pubsub/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 00af83948f27..eebd6df78f58 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -27,7 +27,7 @@ learn more. You may also find the `authentication document`_ shared by all the ``google-cloud-*`` libraries to be helpful. .. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html -.. _authentication document: https://github.com/GoogleCloudPlatform/gcloud-common/tree/master/authentication +.. _authentication document: https://github.com/GoogleCloudPlatform/google-cloud-common/tree/master/authentication Using the API ------------- From 7346d9c9d6f97044234b972dd4702abacf827738 Mon Sep 17 00:00:00 2001 From: Jeff Payne Date: Fri, 13 Oct 2017 07:09:33 -0700 Subject: [PATCH 0132/1197] Move debug logging call in Future.add_done_callback (#4174) --- .../google/cloud/pubsub_v1/subscriber/policy/thread.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index bdca8dec004b..b53706083fe0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -30,6 +30,11 @@ logger = logging.getLogger(__name__) +def _callback_completed(future): + """Simple callback that just logs a `Future`'s result.""" + logger.debug('Result: %s', future.result()) + + class Policy(base.BasePolicy): """A consumer class based on :class:`threading.Thread`. @@ -144,4 +149,4 @@ def on_response(self, response): logger.debug(self._callback) message = Message(msg.message, msg.ack_id, self._request_queue) future = self._executor.submit(self._callback, message) - logger.debug('Result: %s' % future.result()) + future.add_done_callback(_callback_completed) From f17c185e5fa67157d9b73ca2391673bf7963eabc Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Fri, 13 Oct 2017 07:49:29 -0700 Subject: [PATCH 0133/1197] Pub/Sub version bump: 0.28.4 (#4183) --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 702b99849db1..42c98394dd8d 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.28.3', + version='0.28.4', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 14398edb71caf7309a6fe89c4d921bcaa1ff59bc Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Fri, 13 Oct 2017 13:46:24 -0700 Subject: [PATCH 0134/1197] Update Docs with Python Setup Guide (#4187) --- packages/google-cloud-pubsub/README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index eebd6df78f58..b883698996fc 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -18,6 +18,10 @@ Quick Start $ pip install --upgrade google-cloud-pubsub +Fore more information on setting up your Python development environment, such as installing ``pip`` and on your system, please refer to `Python Development Environment Setup Guide`_ for Google Cloud Platform. + +.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup + Authentication -------------- From 5f8e65fdeb8a87d0166c79c798aaeb6fd016fd8b Mon Sep 17 00:00:00 2001 From: Jeff Payne Date: Tue, 17 Oct 2017 07:52:18 -0700 Subject: [PATCH 0135/1197] Add PubSub system test for fix in #4174 (#4190) --- packages/google-cloud-pubsub/tests/system.py | 64 ++++++++++++++++++- .../subscriber/test_policy_thread.py | 35 ++++++++-- 2 files changed, 92 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 02666eae676a..ec38927f3a9d 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -14,6 +14,7 @@ from __future__ import absolute_import +import datetime import time import uuid @@ -103,4 +104,65 @@ def test_subscribe_to_messages(): assert callback.call_count >= 50 finally: publisher.delete_topic(topic_name) - subscriber.delete_subscription(sub_name) + + +def test_subscribe_to_messages_async_callbacks(): + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_name = _resource_name('topic') + sub_name = _resource_name('subscription') + + try: + # Create a topic. + publisher.create_topic(topic_name) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription(sub_name, topic_name) + subscription = subscriber.subscribe(sub_name) + + # Publish some messages. + futures = [publisher.publish( + topic_name, + b'Wooooo! The claaaaaw!', + num=str(i), + ) for i in range(0, 2)] + + # Make sure the publish completes. + [f.result() for f in futures] + + # We want to make sure that the callback was called asynchronously. So + # track when each call happened and make sure below. + call_times = [] + + def process_message(message): + # list.append() is thread-safe. + call_times.append(datetime.datetime.now()) + time.sleep(2) + message.ack() + + callback = mock.Mock(wraps=process_message) + side_effect = mock.Mock() + callback.side_effect = side_effect + + # Actually open the subscription and hold it open for a few seconds. + subscription.open(callback) + for second in range(0, 5): + time.sleep(4) + + # The callback should have fired at least two times, but it may + # take some time. + if callback.call_count >= 2 and side_effect.call_count >= 2: + first = min(call_times[:2]) + last = max(call_times[:2]) + diff = last - first + # "Ensure" the first two callbacks were executed asynchronously + # (sequentially would have resulted in a difference of 2+ + # seconds). + assert diff.days == 0 + assert diff.seconds < 2 + + # Okay, we took too long; fail out. + assert callback.call_count >= 2 + finally: + publisher.delete_topic(topic_name) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index 76aec184815e..c86b54e4edd2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -94,8 +94,15 @@ def test_on_exception_other(): def test_on_response(): callback = mock.Mock(spec=()) + # Create mock ThreadPoolExecutor, pass into create_policy(), and verify + # that both executor.submit() and future.add_done_callback are called + # twice. + future = mock.Mock() + attrs = {'submit.return_value': future} + executor = mock.Mock(**attrs) + # Set up the policy. - policy = create_policy() + policy = create_policy(executor=executor) policy._callback = callback # Set up the messages to send. @@ -112,9 +119,25 @@ def test_on_response(): ], ) - # Actually run the method and prove that the callback was - # called in the expected way. + # Actually run the method and prove that executor.submit and + # future.add_done_callback were called in the expected way. policy.on_response(response) - assert callback.call_count == 2 - for call in callback.mock_calls: - assert isinstance(call[1][0], message.Message) + + submit_calls = [m for m in executor.method_calls if m[0] == 'submit'] + assert len(submit_calls) == 2 + for call in submit_calls: + assert call[1][0] == callback + assert isinstance(call[1][1], message.Message) + + add_done_callback_calls = [ + m for m in future.method_calls if m[0] == 'add_done_callback'] + assert len(add_done_callback_calls) == 2 + for call in add_done_callback_calls: + assert call[1][0] == thread._callback_completed + + +def test__callback_completed(): + future = mock.Mock() + thread._callback_completed(future) + result_calls = [m for m in future.method_calls if m[0] == 'result'] + assert len(result_calls) == 1 From 8943189e12b8a89151f79578691b8ea45774b447 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 18 Oct 2017 15:36:57 -0700 Subject: [PATCH 0136/1197] Replace usage of google.api.core with google.api_core (#4221) * Remove api.core packages from google.cloud.core, make google.cloud.core depend on api_core. * s/google.api.core/google.api_core/g and nox updates * Fixing core tests, addressing review feedback * Fix bigquery --- .../google/cloud/pubsub_v1/publisher/exceptions.py | 2 +- .../google/cloud/pubsub_v1/publisher/futures.py | 4 ++-- packages/google-cloud-pubsub/nox.py | 5 ++++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index bae090ceb9d7..8de6f660e4e8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -16,7 +16,7 @@ from concurrent.futures import TimeoutError -from google.api.core.exceptions import GoogleAPICallError +from google.api_core.exceptions import GoogleAPICallError class PublishError(GoogleAPICallError): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index cbc67d9e55c3..c926bb6e577f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -16,11 +16,11 @@ import threading -import google.api.core.future +import google.api_core.future from google.cloud.pubsub_v1.publisher import exceptions -class Future(google.api.core.future.Future): +class Future(google.api_core.future.Future): """Encapsulation of the asynchronous execution of an action. This object is returned from asychronous Pub/Sub calls, and is the diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index b3df2538156b..b938a9019a26 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -19,7 +19,10 @@ import nox -LOCAL_DEPS = ('../core/',) +LOCAL_DEPS = ( + os.path.join('..', 'api_core'), + os.path.join('..', 'core'), +) @nox.session From 880c4f6a80622cc629eae9a92fb0e38ff988318a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 26 Oct 2017 09:43:41 -0700 Subject: [PATCH 0137/1197] Honor max_messages always. (#4262) * Honor max_messages always. Fixes a bug where `max_messages` is not honored when autocommit is on. * Remove pdb comment. --- .../cloud/pubsub_v1/publisher/batch/base.py | 1 - .../cloud/pubsub_v1/publisher/batch/thread.py | 2 ++ .../pubsub_v1/publisher/batch/test_thread.py | 21 +++++++++++++++++++ 3 files changed, 23 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index 61eea2bb9ad5..b5a23a681bc1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -15,7 +15,6 @@ from __future__ import absolute_import import abc -import enum import six diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index f5c08a76f315..b684c6e45e97 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -237,6 +237,8 @@ def publish(self, message): # Store the actual message in the batch's message queue. self._messages.append(message) + if len(self._messages) >= self.settings.max_messages: + self.commit() # Return a Future. That future needs to be aware of the status # of this batch. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 00b761f52b96..f731e2a151bb 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -189,6 +189,27 @@ def test_publish(): assert batch.size > 0 # I do not always trust protobuf. +def test_publish_max_messages(): + batch = create_batch(max_messages=4) + messages = ( + types.PubsubMessage(data=b'foobarbaz'), + types.PubsubMessage(data=b'spameggs'), + types.PubsubMessage(data=b'1335020400'), + ) + + # Publish each of the messages, which should save them to the batch. + with mock.patch.object(batch, 'commit') as commit: + for message in messages: + batch.publish(message) + + # Commit should not yet have been called. + assert commit.call_count == 0 + + # When a fourth message is published, commit should be called. + batch.publish(types.PubsubMessage(data=b'last one')) + commit.assert_called_once_with() + + def test_publish_dict(): batch = create_batch() batch.publish({'data': b'foobarbaz', 'attributes': {'spam': 'eggs'}}) From 8cd03d72c99066edbe83ab7ee606cc3d853c7647 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 26 Oct 2017 15:00:58 -0700 Subject: [PATCH 0138/1197] Add futures for subscriptions. (#4265) --- .../google/cloud/pubsub_v1/exceptions.py | 22 +++ .../google/cloud/pubsub_v1/futures.py | 176 ++++++++++++++++++ .../cloud/pubsub_v1/publisher/batch/base.py | 2 +- .../cloud/pubsub_v1/publisher/batch/thread.py | 2 +- .../cloud/pubsub_v1/publisher/exceptions.py | 3 +- .../cloud/pubsub_v1/publisher/futures.py | 161 ++-------------- .../cloud/pubsub_v1/subscriber/futures.py | 54 ++++++ .../cloud/pubsub_v1/subscriber/policy/base.py | 16 ++ .../pubsub_v1/subscriber/policy/thread.py | 29 ++- .../unit/pubsub_v1/publisher/test_futures.py | 5 +- .../subscriber/test_futures_subscriber.py | 44 +++++ .../subscriber/test_policy_thread.py | 15 ++ 12 files changed, 376 insertions(+), 153 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py new file mode 100644 index 000000000000..b902978cfd60 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py @@ -0,0 +1,22 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from concurrent.futures import TimeoutError + + +__all__ = ( + 'TimeoutError', +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py new file mode 100644 index 000000000000..7b5f4df9534f --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -0,0 +1,176 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import threading + +import google.api_core.future +from google.cloud.pubsub_v1.publisher import exceptions + + +class Future(google.api_core.future.Future): + """Encapsulation of the asynchronous execution of an action. + + This object is returned from asychronous Pub/Sub calls, and is the + interface to determine the status of those calls. + + This object should not be created directly, but is returned by other + methods in this library. + """ + _SENTINEL = object() + + def __init__(self): + self._result = self._SENTINEL + self._exception = self._SENTINEL + self._callbacks = [] + self._completed = threading.Event() + + def cancel(self): + """Actions in Pub/Sub generally may not be canceled. + + This method always returns False. + """ + return False + + def cancelled(self): + """Actions in Pub/Sub generally may not be canceled. + + This method always returns False. + """ + return False + + def running(self): + """Actions in Pub/Sub generally may not be canceled. + + Returns: + bool: ``True`` if this method has not yet completed, or + ``False`` if it has completed. + """ + if self.done(): + return False + return True + + def done(self): + """Return True the future is done, False otherwise. + + This still returns True in failure cases; checking :meth:`result` or + :meth:`exception` is the canonical way to assess success or failure. + """ + return (self._exception is not self._SENTINEL or + self._result is not self._SENTINEL) + + def result(self, timeout=None): + """Return the message ID, or raise an exception. + + This blocks until the message has successfully been published, and + returns the message ID. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Returns: + str: The message ID. + + Raises: + ~.pubsub_v1.TimeoutError: If the request times out. + Exception: For undefined exceptions in the underlying + call execution. + """ + # Attempt to get the exception if there is one. + # If there is not one, then we know everything worked, and we can + # return an appropriate value. + err = self.exception(timeout=timeout) + if err is None: + return self._result + raise err + + def exception(self, timeout=None): + """Return the exception raised by the call, if any. + + This blocks until the message has successfully been published, and + returns the exception. If the call succeeded, return None. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Raises: + TimeoutError: If the request times out. + + Returns: + Exception: The exception raised by the call, if any. + """ + # Wait until the future is done. + if not self._completed.wait(timeout=timeout): + raise exceptions.TimeoutError('Timed out waiting for result.') + + # If the batch completed successfully, this should return None. + if self._result is not self._SENTINEL: + return None + + # Okay, this batch had an error; this should return it. + return self._exception + + def add_done_callback(self, fn): + """Attach the provided callable to the future. + + The provided function is called, with this future as its only argument, + when the future finishes running. + """ + if self.done(): + return fn(self) + self._callbacks.append(fn) + + def set_result(self, result): + """Set the result of the future to the provided result. + + Args: + result (Any): The result + """ + # Sanity check: A future can only complete once. + if self.done(): + raise RuntimeError('set_result can only be called once.') + + # Set the result and trigger the future. + self._result = result + self._trigger() + + def set_exception(self, exception): + """Set the result of the future to the given exception. + + Args: + exception (:exc:`Exception`): The exception raised. + """ + # Sanity check: A future can only complete once. + if self.done(): + raise RuntimeError('set_exception can only be called once.') + + # Set the exception and trigger the future. + self._exception = exception + self._trigger() + + def _trigger(self): + """Trigger all callbacks registered to this Future. + + This method is called internally by the batch once the batch + completes. + + Args: + message_id (str): The message ID, as a string. + """ + self._completed.set() + for callback in self._callbacks: + callback(self) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index b5a23a681bc1..bad45ff782a7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -128,7 +128,7 @@ def publish(self, message): message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. Returns: - ~.pubsub_v1.publisher.batch.mp.Future: An object conforming to the + ~google.api_core.future.Future: An object conforming to the :class:`concurrent.futures.Future` interface. """ raise NotImplementedError diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index b684c6e45e97..f7171e64d2d4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -224,7 +224,7 @@ def publish(self, message): message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. Returns: - ~.pubsub_v1.publisher.futures.Future: An object conforming to + ~google.api_core.future.Future: An object conforming to the :class:`concurrent.futures.Future` interface. """ # Coerce the type, just in case. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index 8de6f660e4e8..079eccd1f07a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -14,9 +14,8 @@ from __future__ import absolute_import -from concurrent.futures import TimeoutError - from google.api_core.exceptions import GoogleAPICallError +from google.cloud.pubsub_v1.exceptions import TimeoutError class PublishError(GoogleAPICallError): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index c926bb6e577f..a1273bb76e62 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -14,156 +14,27 @@ from __future__ import absolute_import -import threading +from google.cloud.pubsub_v1 import futures -import google.api_core.future -from google.cloud.pubsub_v1.publisher import exceptions - -class Future(google.api_core.future.Future): +class Future(futures.Future): """Encapsulation of the asynchronous execution of an action. - This object is returned from asychronous Pub/Sub calls, and is the - interface to determine the status of those calls. + This object is returned from asychronous Pub/Sub publishing calls, and is + the interface to determine the status of those calls. This object should not be created directly, but is returned by other methods in this library. """ - def __init__(self): - self._callbacks = [] - self._result = None - self._exception = None - self._completed = threading.Event() - - def cancel(self): - """Publishes in Pub/Sub currently may not be canceled. - - This method always returns False. - """ - return False - - def cancelled(self): - """Publishes in Pub/Sub currently may not be canceled. - - This method always returns False. - """ - return False - - def running(self): - """Publishes in Pub/Sub currently may not be canceled. - - This method always returns True. - """ - return True - - def done(self): - """Return True if the publish has completed, False otherwise. - - This still returns True in failure cases; checking :meth:`result` or - :meth:`exception` is the canonical way to assess success or failure. - """ - return self._exception is not None or self._result is not None - - def result(self, timeout=None): - """Return the message ID, or raise an exception. - - This blocks until the message has successfully been published, and - returns the message ID. - - Args: - timeout (Union[int, float]): The number of seconds before this call - times out and raises TimeoutError. - - Returns: - str: The message ID. - - Raises: - ~.pubsub_v1.TimeoutError: If the request times out. - Exception: For undefined exceptions in the underlying - call execution. - """ - # Attempt to get the exception if there is one. - # If there is not one, then we know everything worked, and we can - # return an appropriate value. - err = self.exception(timeout=timeout) - if err is None: - return self._result - raise err - - def exception(self, timeout=None, _wait=1): - """Return the exception raised by the call, if any. - - This blocks until the message has successfully been published, and - returns the exception. If the call succeeded, return None. - - Args: - timeout (Union[int, float]): The number of seconds before this call - times out and raises TimeoutError. - - Raises: - TimeoutError: If the request times out. - - Returns: - Exception: The exception raised by the call, if any. - """ - # Wait until the future is done. - if not self._completed.wait(timeout=timeout): - raise exceptions.TimeoutError('Timed out waiting for result.') - - # If the batch completed successfully, this should return None. - if self._result is not None: - return None - - # Okay, this batch had an error; this should return it. - return self._exception - - def add_done_callback(self, fn): - """Attach the provided callable to the future. - - The provided function is called, with this future as its only argument, - when the future finishes running. - """ - if self.done(): - fn(self) - self._callbacks.append(fn) - - def set_result(self, result): - """Set the result of the future to the provided result. - - Args: - result (str): The message ID. - """ - # Sanity check: A future can only complete once. - if self._result is not None or self._exception is not None: - raise RuntimeError('set_result can only be called once.') - - # Set the result and trigger the future. - self._result = result - self._trigger() - - def set_exception(self, exception): - """Set the result of the future to the given exception. - - Args: - exception (:exc:`Exception`): The exception raised. - """ - # Sanity check: A future can only complete once. - if self._result is not None or self._exception is not None: - raise RuntimeError('set_exception can only be called once.') - - # Set the exception and trigger the future. - self._exception = exception - self._trigger() - - def _trigger(self): - """Trigger all callbacks registered to this Future. - - This method is called internally by the batch once the batch - completes. - - Args: - message_id (str): The message ID, as a string. - """ - self._completed.set() - for callback in self._callbacks: - callback(self) + # The publishing-side subclass does not need any special behavior + # at this time. + # + # However, there is still a subclass so that if someone attempts + # isinstance checks against a publisher-returned or subscriber-returned + # future, trying either one against the other returns False. + pass + + +__all__ = ( + 'Future', +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py new file mode 100644 index 000000000000..04f8d15a0380 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -0,0 +1,54 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.pubsub_v1 import futures + + +class Future(futures.Future): + """Encapsulation of the asynchronous execution of an action. + + This object is returned from opening a Pub/Sub subscription, and is the + interface to block on the subscription or query its status. + + This object should not be created directly, but is returned by other + methods in this library. + + Args: + policy (~.pubsub_v1.subscriber.policy.base.BasePolicy): The policy + that creates this Future. + """ + def __init__(self, policy): + self._policy = policy + super(Future, self).__init__() + + def running(self): + """Return whether this subscription is opened with this Future. + + .. note:: + + A ``False`` value here does not necessarily mean that the + subscription is closed; it merely means that _this_ future is + not the future applicable to it. + + Since futures have a single result (or exception) and there is + not a concept of resetting them, a closing re-opening of a + subscription will therefore return a new future. + + Returns: + bool: ``True`` if this subscription is opened with this future, + ``False`` otherwise. + """ + return self._policy.future is self diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 85d047eb9439..511b1a32b75a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -69,6 +69,7 @@ def __init__(self, client, subscription, self._consumer = _consumer.Consumer(self) self._ack_deadline = 10 self._last_histogram_size = 0 + self._future = None self.flow_control = flow_control self.histogram = _histogram.Histogram(data=histogram_data) @@ -97,6 +98,16 @@ def ack_deadline(self): self._ack_deadline = self.histogram.percentile(percent=99) return self._ack_deadline + @property + def future(self): + """Return the Future in use, if any. + + Returns: + ~.pubsub_v1.subscriber.future.Future: A Future conforming to the + ``~concurrent.futures.Future`` interface. + """ + return self._future + @property def managed_ack_ids(self): """Return the ack IDs currently being managed by the policy. @@ -388,5 +399,10 @@ def open(self, callback): Args: callback (Callable[Message]): A callable that receives a Pub/Sub Message. + + Returns: + ~google.api_core.future.Future: A future that provides + an interface to block on the subscription if desired, and + handle errors. """ raise NotImplementedError diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index b53706083fe0..616a5d472f1a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -23,6 +23,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber.futures import Future from google.cloud.pubsub_v1.subscriber.policy import base from google.cloud.pubsub_v1.subscriber.message import Message @@ -63,6 +64,9 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), # Default the callback to a no-op; it is provided by `.open`. self._callback = lambda message: None + # Default the future to None; it is provided by `.open`. + self._future = None + # Create a queue for keeping track of shared state. if queue is None: queue = Queue() @@ -87,10 +91,16 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), def close(self): """Close the existing connection.""" - # Close the main subscription connection. + # Stop consuming messages. self._consumer.helper_threads.stop('callback requests worker') self._consumer.stop_consuming() + # The subscription is closing cleanly; resolve the future if it is not + # resolved already. + if self._future and not self._future.done(): + self._future.set_result(None) + self._future = None + def open(self, callback): """Open a streaming pull connection and begin receiving messages. @@ -100,7 +110,17 @@ def open(self, callback): Args: callback (Callable): The callback function. + + Returns: + ~google.api_core.future.Future: A future that provides + an interface to block on the subscription if desired, and + handle errors. """ + # Create the Future that this method will return. + # This future is the main thread's interface to handle exceptions, + # block on the subscription, etc. + self._future = Future(policy=self) + # Start the thread to pass the requests. logger.debug('Starting callback requests worker.') self._callback = callback @@ -120,6 +140,9 @@ def open(self, callback): self._leaser.daemon = True self._leaser.start() + # Return the future. + return self._future + def on_callback_request(self, callback_request): """Map the callback request to the appropriate GRPC request.""" action, kwargs = callback_request[0], callback_request[1] @@ -136,8 +159,8 @@ def on_exception(self, exception): if getattr(exception, 'code', lambda: None)() == deadline_exceeded: return - # Raise any other exception. - raise exception + # Set any other exception on the future. + self._future.set_exception(exception) def on_response(self, response): """Process all received Pub/Sub messages. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py index e9b64a202e94..0e6315bb620a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py @@ -29,7 +29,10 @@ def test_cancelled(): def test_running(): - assert Future().running() is True + future = Future() + assert future.running() is True + future.set_result('foobar') + assert future.running() is False def test_done(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py new file mode 100644 index 000000000000..f4aa0f75a108 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -0,0 +1,44 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import mock + +from google.auth import credentials +from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1.subscriber import futures +from google.cloud.pubsub_v1.subscriber.policy import thread + + +def create_policy(**kwargs): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + return thread.Policy(client, 'sub_name_c', **kwargs) + + +def create_future(policy=None): + if policy is None: + policy = create_policy() + future = futures.Future(policy=policy) + policy._future = future + return future + + +def test_running(): + policy = create_policy() + future = create_future(policy=policy) + assert future.running() is True + policy._future = None + assert future.running() is False diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index c86b54e4edd2..fb1b1b80b35b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -29,6 +29,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _helper_threads from google.cloud.pubsub_v1.subscriber import message +from google.cloud.pubsub_v1.subscriber.futures import Future from google.cloud.pubsub_v1.subscriber.policy import thread @@ -58,6 +59,18 @@ def test_close(): assert 'callback request worker' not in policy._consumer.helper_threads +def test_close_with_future(): + policy = create_policy() + policy._future = Future(policy=policy) + consumer = policy._consumer + with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: + future = policy.future + policy.close() + stop_consuming.assert_called_once_with() + assert policy.future != future + assert future.result() is None + + @mock.patch.object(_helper_threads.HelperThreadRegistry, 'start') @mock.patch.object(threading.Thread, 'start') def test_open(thread_start, htr_start): @@ -86,9 +99,11 @@ def test_on_exception_deadline_exceeded(): def test_on_exception_other(): policy = create_policy() + policy._future = Future(policy=policy) exc = TypeError('wahhhhhh') with pytest.raises(TypeError): policy.on_exception(exc) + policy.future.result() def test_on_response(): From 712e9baf00fbb42b7183907fb3e569d22a8eeb1e Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Mon, 30 Oct 2017 08:51:43 -0700 Subject: [PATCH 0139/1197] Set gRPC message options and keepalive. (#4269) --- .../cloud/pubsub_v1/publisher/client.py | 37 ++++++++++++++++++ .../cloud/pubsub_v1/subscriber/client.py | 39 +++++++++++++++++++ .../publisher/test_publisher_client.py | 15 +++++++ .../subscriber/test_subscriber_client.py | 12 ++++++ 4 files changed, 103 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index e80662a715ef..4e0deecae942 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -15,11 +15,14 @@ from __future__ import absolute_import import copy +import os import pkg_resources import threading +import grpc import six +from google.api_core import grpc_helpers from google.cloud.gapic.pubsub.v1 import publisher_client from google.cloud.pubsub_v1 import _gapic @@ -53,6 +56,28 @@ class Client(object): Generally, you should not need to set additional keyword arguments. """ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): + # Sanity check: Is our goal to use the emulator? + # If so, create a grpc insecure channel with the emulator host + # as the target. + if os.environ.get('PUBSUB_EMULATOR_HOST'): + kwargs['channel'] = grpc.insecure_channel( + target=os.environ.get('PUBSUB_EMULATOR_HOST'), + ) + + # Use a custom channel. + # We need this in order to set appropriate default message size and + # keepalive options. + if 'channel' not in kwargs: + kwargs['channel'] = grpc_helpers.create_channel( + credentials=kwargs.get('credentials', None), + target=self.target, + scopes=publisher_client.PublisherClient._ALL_SCOPES, + options={ + 'grpc.max_send_message_length': -1, + 'grpc.max_receive_message_length': -1, + }.items(), + ) + # Add the metrics headers, and instantiate the underlying GAPIC # client. kwargs['lib_name'] = 'gccl' @@ -66,6 +91,18 @@ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): self._batch_lock = threading.Lock() self._batches = {} + @property + def target(self): + """Return the target (where the API is). + + Returns: + str: The location of the API. + """ + return '{host}:{port}'.format( + host=publisher_client.PublisherClient.SERVICE_ADDRESS, + port=publisher_client.PublisherClient.DEFAULT_SERVICE_PORT, + ) + def batch(self, topic, message, create=True, autocommit=True): """Return the current batch for the provided topic. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index afb9f7d7ca75..173ad7f2aee0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -15,7 +15,11 @@ from __future__ import absolute_import import pkg_resources +import os +import grpc + +from google.api_core import grpc_helpers from google.cloud.gapic.pubsub.v1 import subscriber_client from google.cloud.pubsub_v1 import _gapic @@ -49,6 +53,29 @@ class in order to define your own consumer. This is primarily arguments. """ def __init__(self, policy_class=thread.Policy, **kwargs): + # Sanity check: Is our goal to use the emulator? + # If so, create a grpc insecure channel with the emulator host + # as the target. + if os.environ.get('PUBSUB_EMULATOR_HOST'): + kwargs['channel'] = grpc.insecure_channel( + target=os.environ.get('PUBSUB_EMULATOR_HOST'), + ) + + # Use a custom channel. + # We need this in order to set appropriate default message size and + # keepalive options. + if 'channel' not in kwargs: + kwargs['channel'] = grpc_helpers.create_channel( + credentials=kwargs.get('credentials', None), + target=self.target, + scopes=subscriber_client.SubscriberClient._ALL_SCOPES, + options={ + 'grpc.max_send_message_length': -1, + 'grpc.max_receive_message_length': -1, + 'grpc.keepalive_time_ms': 30000, + }.items(), + ) + # Add the metrics headers, and instantiate the underlying GAPIC # client. kwargs['lib_name'] = 'gccl' @@ -59,6 +86,18 @@ def __init__(self, policy_class=thread.Policy, **kwargs): # messages. self._policy_class = policy_class + @property + def target(self): + """Return the target (where the API is). + + Returns: + str: The location of the API. + """ + return '{host}:{port}'.format( + host=subscriber_client.SubscriberClient.SERVICE_ADDRESS, + port=subscriber_client.SubscriberClient.DEFAULT_SERVICE_PORT, + ) + def subscribe(self, subscription, callback=None, flow_control=()): """Return a representation of an individual subscription. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 0054b25262b5..b7deac66ef98 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import absolute_import +import os + import mock import pytest @@ -38,6 +41,18 @@ def test_init(): assert client.batch_settings.max_messages == 1000 +def test_init_emulator(monkeypatch): + monkeypatch.setenv('PUBSUB_EMULATOR_HOST', '/foo/bar/') + client = create_client() + + # Establish that a gRPC request would attempt to hit the emulator host. + # + # Sadly, there seems to be no good way to do this without poking at + # the private API of gRPC. + channel = client.api.publisher_stub.Publish._channel + assert channel.target().decode('utf8') == '/foo/bar/' + + def test_batch_accepting(): """Establish that an existing batch is returned if it accepts messages.""" client = create_client() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 50e90fead181..d0f40860a23c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -29,6 +29,18 @@ def test_init(): assert client._policy_class is thread.Policy +def test_init_emulator(monkeypatch): + monkeypatch.setenv('PUBSUB_EMULATOR_HOST', '/baz/bacon/') + client = create_client() + + # Establish that a gRPC request would attempt to hit the emulator host. + # + # Sadly, there seems to be no good way to do this without poking at + # the private API of gRPC. + channel = client.api.subscriber_stub.Pull._channel + assert channel.target().decode('utf8') == '/baz/bacon/' + + def test_subscribe(): client = create_client() subscription = client.subscribe('sub_name_a') From 25a6a748cc52f0d1fa2a65f09b25445748626c96 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 30 Oct 2017 14:41:42 -0700 Subject: [PATCH 0140/1197] Cutting version 0.28.0 of `google-cloud-core`. (#4280) Also - updating all dependencies of `grpcio` to `>= 1.7.0`. This was due to an issue [1] with `1.6.0`. - updating the version of `google-api-core` (also to be released, This is required since the bounds on `grpcio` of `google-cloud-core==0.28.0` and `google-api-core==0.1.0` are mutually exclusive.) - Updating `google-api-core` CHANGELOG for release. - Updating packages to depend on `google-cloud-core>=0.28.0`. - Installing `nox -s lint` deps locally for vision. [1]: https://github.com/grpc/grpc/issues/12455 --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 42c98394dd8d..9456a0516a0c 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ - 'google-cloud-core[grpc] >= 0.27.1, < 0.28dev', + 'google-cloud-core[grpc] >= 0.28.0, < 0.29dev', 'google-gax >= 0.15.13, < 0.16dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', 'psutil >= 5.2.2, < 6.0dev', From 7d82cdb95c1a556e1b9f350b661e165dd2f6f57a Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Tue, 31 Oct 2017 08:57:09 -0700 Subject: [PATCH 0141/1197] Switch copyright holder to "Google LLC" (#4287) --- packages/google-cloud-pubsub/google/__init__.py | 2 +- packages/google-cloud-pubsub/google/cloud/__init__.py | 2 +- .../google/cloud/gapic/pubsub/v1/publisher_client.py | 2 +- .../google/cloud/gapic/pubsub/v1/subscriber_client.py | 2 +- packages/google-cloud-pubsub/google/cloud/pubsub.py | 2 +- packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py | 2 +- packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py | 2 +- .../google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py | 2 +- packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py | 2 +- .../google/cloud/pubsub_v1/publisher/__init__.py | 2 +- .../google/cloud/pubsub_v1/publisher/batch/base.py | 2 +- .../google/cloud/pubsub_v1/publisher/batch/thread.py | 2 +- .../google/cloud/pubsub_v1/publisher/client.py | 2 +- .../google/cloud/pubsub_v1/publisher/exceptions.py | 2 +- .../google/cloud/pubsub_v1/publisher/futures.py | 2 +- .../google/cloud/pubsub_v1/subscriber/__init__.py | 2 +- .../google/cloud/pubsub_v1/subscriber/_consumer.py | 2 +- .../google/cloud/pubsub_v1/subscriber/_helper_threads.py | 2 +- .../google/cloud/pubsub_v1/subscriber/_histogram.py | 2 +- .../google/cloud/pubsub_v1/subscriber/client.py | 2 +- .../google/cloud/pubsub_v1/subscriber/futures.py | 2 +- .../google/cloud/pubsub_v1/subscriber/message.py | 2 +- .../google/cloud/pubsub_v1/subscriber/policy/base.py | 2 +- .../google/cloud/pubsub_v1/subscriber/policy/thread.py | 2 +- packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py | 2 +- packages/google-cloud-pubsub/nox.py | 2 +- packages/google-cloud-pubsub/pylint.config.py | 2 +- packages/google-cloud-pubsub/setup.py | 2 +- packages/google-cloud-pubsub/tests/system.py | 2 +- .../tests/unit/pubsub_v1/publisher/batch/test_base.py | 2 +- .../tests/unit/pubsub_v1/publisher/batch/test_thread.py | 2 +- .../tests/unit/pubsub_v1/publisher/test_futures.py | 2 +- .../tests/unit/pubsub_v1/publisher/test_publisher_client.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_consumer.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_helper_threads.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_histogram.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_message.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_policy_base.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_policy_thread.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_subscriber_client.py | 2 +- packages/google-cloud-pubsub/tests/unit/test_pubsub.py | 2 +- 42 files changed, 42 insertions(+), 42 deletions(-) diff --git a/packages/google-cloud-pubsub/google/__init__.py b/packages/google-cloud-pubsub/google/__init__.py index b2b833373882..9ee9bf4342ab 100644 --- a/packages/google-cloud-pubsub/google/__init__.py +++ b/packages/google-cloud-pubsub/google/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py index b2b833373882..9ee9bf4342ab 100644 --- a/packages/google-cloud-pubsub/google/cloud/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py index c0466e6d444b..176f27b22f95 100644 --- a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py index 5313e0d941a1..b2b86cd7f799 100644 --- a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py index bf094f6cf03a..afaa55e4ef81 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py index 21706f6eee5e..7343acde9d49 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py index 79aac7de8941..a4a0edea955d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py index b902978cfd60..806bb204aeef 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index 7b5f4df9534f..f73893503301 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py index 76d54649448f..ca5f04d582c2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index bad45ff782a7..9e08ea132e00 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index f7171e64d2d4..df7d23ffe7d8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 4e0deecae942..d1375372df28 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index 079eccd1f07a..b6bb0256966f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index a1273bb76e62..cca1c97f5f2f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py index d98a7bb75be4..d82e3da96286 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 9fb2567176bc..127675683af1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index 21e812a0d2ad..50710e2114db 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py index 09f047495896..ddf468457931 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 173ad7f2aee0..8e5f377834bc 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index 04f8d15a0380..15a932f2478f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 1015149cfbbf..6b65da1c0a21 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 511b1a32b75a..d5cd07c41b44 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 616a5d472f1a..30bb540cea2f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index a9de4a88f7f8..23a055a31954 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index b938a9019a26..3d1f8fc3047e 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/pylint.config.py b/packages/google-cloud-pubsub/pylint.config.py index b618319b8b61..5d64b9d2f256 100644 --- a/packages/google-cloud-pubsub/pylint.config.py +++ b/packages/google-cloud-pubsub/pylint.config.py @@ -1,4 +1,4 @@ -# Copyright 2017 Google Inc. +# Copyright 2017 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 9456a0516a0c..493df4f16952 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. +# Copyright 2016 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index ec38927f3a9d..e89b68b35d38 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index 05a749d58425..49cd31f82714 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index f731e2a151bb..588f4bfe689f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py index 0e6315bb620a..f179afa7012b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index b7deac66ef98..ce010e69a8cd 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 2a3429fbc5b3..53f81296fe92 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index f4aa0f75a108..3e6b24501594 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 84775f0be2c1..6b104c874617 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py index 23474a19d116..e3c0e55dbaaf 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index a3a1e16f027e..0cde86169417 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index df963424ccb9..3ebf1e6e6d18 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index fb1b1b80b35b..5adfb7817086 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index d0f40860a23c..fb5faaf6b10c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/test_pubsub.py b/packages/google-cloud-pubsub/tests/unit/test_pubsub.py index 605dbddd7601..76d590a492c5 100644 --- a/packages/google-cloud-pubsub/tests/unit/test_pubsub.py +++ b/packages/google-cloud-pubsub/tests/unit/test_pubsub.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2017, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 516c2635e6e9e7735df14a84219593232198396c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 31 Oct 2017 14:28:55 -0700 Subject: [PATCH 0142/1197] Making release for most packages. (#4296) * Making release for most packages. Every package except those that have already been released (`google-cloud-core`, `google-api-core`, `google-cloud-bigquery`): - `google-cloud` - `google-cloud-bigtable` - `google-cloud-datastore` - `google-cloud-dns` - `google-cloud-error-reporting` - `google-cloud-firestore` - `google-cloud-language` - `google-cloud-logging` - `google-cloud-monitoring` - `google-cloud-resource-manager` - `google-cloud-runtimeconfig` - `google-cloud-spanner` - `google-cloud-speech` - `google-cloud-storage` - `google-cloud-trace` - `google-cloud-translate` - `google-cloud-videointelligence` - `google-cloud-vision` * Adding changelog files for each package. --- packages/google-cloud-pubsub/CHANGELOG.md | 28 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 3 ++- 2 files changed, 30 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-pubsub/CHANGELOG.md diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md new file mode 100644 index 000000000000..3624f59945cc --- /dev/null +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -0,0 +1,28 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-pubsub/#history + +## 0.29.0 + +### Notable Implementation Changes + +- Honor `max_messages` always (#4262) +- Add futures for subscriptions (#4265) +- Set gRPC message options and keepalive (#4269) +- Updating autogenerated packages (#4297) + +### Documentation + +- Added link to "Python Development Environment Setup Guide" in + project README (#4187, h/t to @michaelawyu) + +### Dependencies + +- Upgrading to `google-cloud-core >= 0.28.0` and adding dependency + on `google-api-core` (#4221, #4280) +- Deferring to `google-api-core` for `grpcio` and + `googleapis-common-protos`dependencies (#4096, #4098) + +PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.0/ diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 493df4f16952..5f8be063328e 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -52,6 +52,7 @@ REQUIREMENTS = [ 'google-cloud-core[grpc] >= 0.28.0, < 0.29dev', + 'google-api-core >= 0.1.1, < 0.2.0dev', 'google-gax >= 0.15.13, < 0.16dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', 'psutil >= 5.2.2, < 6.0dev', @@ -59,7 +60,7 @@ setup( name='google-cloud-pubsub', - version='0.28.4', + version='0.29.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From d84baae48c0786cdf4d25cf53500a1979fcd7d04 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 31 Oct 2017 15:43:51 -0700 Subject: [PATCH 0143/1197] Marking all remaining versions as "dev". (#4299) This is to make it clear the code is between releases. Any code that relies on a **new** feature (e.g. of `google-api-core`) will then be able to **explicitly** make this clear by using the lower bound of the `devN` version. Fixes #4208. See: https://snarky.ca/how-i-manage-package-version-numbers/ --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 5f8be063328e..c347de589917 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -60,7 +60,7 @@ setup( name='google-cloud-pubsub', - version='0.29.0', + version='0.29.1.dev1', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 70522a12f04791fa120ce461a4c6253b7ce50048 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 31 Oct 2017 16:18:41 -0700 Subject: [PATCH 0144/1197] Removing autogen update from Pub/Sub changelog. (#4303) That PR didn't end up making it in the release. --- packages/google-cloud-pubsub/CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 3624f59945cc..e326c60deef2 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -11,7 +11,6 @@ - Honor `max_messages` always (#4262) - Add futures for subscriptions (#4265) - Set gRPC message options and keepalive (#4269) -- Updating autogenerated packages (#4297) ### Documentation From a50f31e452cb31e061ae420f16cc8ff40f622107 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 1 Nov 2017 12:43:23 -0700 Subject: [PATCH 0145/1197] Fixing "Fore" -> "For" typo in README docs. (#4317) Also obeying an 80-column limit for the content and adding a missing "``virtualenv``" in the phrase "``pip`` and ``virtualenv``" in some of the docs. --- packages/google-cloud-pubsub/README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index b883698996fc..2c5248ce5ec9 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -18,7 +18,9 @@ Quick Start $ pip install --upgrade google-cloud-pubsub -Fore more information on setting up your Python development environment, such as installing ``pip`` and on your system, please refer to `Python Development Environment Setup Guide`_ for Google Cloud Platform. +For more information on setting up your Python development environment, +such as installing ``pip`` and ``virtualenv`` on your system, please refer +to `Python Development Environment Setup Guide`_ for Google Cloud Platform. .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup From 19eac8519a2ae0436029fe24bbf17c101480b81a Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Wed, 1 Nov 2017 16:53:46 -0700 Subject: [PATCH 0146/1197] Closes #4319 - shorten test names (#4321) * Closes #4319 - shorten test names * #4319 update docs and config files --- packages/google-cloud-pubsub/nox.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 3d1f8fc3047e..18af0e748020 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -26,15 +26,15 @@ @nox.session -@nox.parametrize('python_version', ['2.7', '3.4', '3.5', '3.6']) -def unit_tests(session, python_version): +@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) +def unit(session, py): """Run the unit test suite.""" # Run unit tests against all supported versions of Python. - session.interpreter = 'python{}'.format(python_version) + session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. - session.virtualenv_dirname = 'unit-' + python_version + session.virtualenv_dirname = 'unit-' + py # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) @@ -49,8 +49,8 @@ def unit_tests(session, python_version): @nox.session -@nox.parametrize('python_version', ['2.7', '3.6']) -def system_tests(session, python_version): +@nox.parametrize('py', ['2.7', '3.6']) +def system(session, py): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. @@ -58,10 +58,10 @@ def system_tests(session, python_version): session.skip('Credentials must be set via environment variable.') # Run the system tests against latest Python 2 and Python 3 only. - session.interpreter = 'python{}'.format(python_version) + session.interpreter = 'python{}'.format(py) # Set the virtualenv dirname. - session.virtualenv_dirname = 'sys-' + python_version + session.virtualenv_dirname = 'sys-' + py # Install all test dependencies, then install this package into the # virtualenv's dist-packages. From f47a3f8abd8d29186386dccc55ea07e1f3e3e19a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 1 Nov 2017 21:47:55 -0700 Subject: [PATCH 0147/1197] Making a `nox -s default` session for all packages. (#4324) * Making a `nox -s default` session for all packages. * Using "default" `nox` session on AppVeyor. This was 32-bit or 64-bit Python can be used, depending on which is the active `python` / the active `nox.exe`. --- packages/google-cloud-pubsub/nox.py | 37 +++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 18af0e748020..6fb6acb31d5a 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -25,6 +25,32 @@ ) +@nox.session +def default(session): + """Default unit test session. + + This is intended to be run **without** an interpreter set, so + that the current ``python`` (on the ``PATH``) or the version of + Python corresponding to the ``nox`` binary the ``PATH`` can + run the tests. + """ + # Install all test dependencies, then install this package in-place. + session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run( + 'py.test', + '--quiet', + '--cov-append', + '--cov-report=', + '--cov=google.cloud.pubsub', + '--cov=google.cloud.pubsub_v1', + '--cov-config=.coveragerc', + 'tests/unit', + ) + + @nox.session @nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) def unit(session, py): @@ -36,16 +62,7 @@ def unit(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'unit-' + py - # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) - session.install('-e', '.') - - # Run py.test against the unit tests. - session.run( - 'py.test', '--quiet', '--cov-append', '--cov-report=', - '--cov=google.cloud.pubsub', '--cov=google.cloud.pubsub_v1', - '--cov-config=.coveragerc', 'tests/unit', - ) + default(session) @nox.session From 28243fa8ddbafdc76fc881a8c8c54248ab8ff019 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 15 Nov 2017 15:57:42 -0500 Subject: [PATCH 0148/1197] Ensure that quickstart examples actually work. (#4398) Closes #4394. --- packages/google-cloud-pubsub/README.rst | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 2c5248ce5ec9..28df0ad363ee 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -66,12 +66,12 @@ messages to it from google.cloud import pubsub publisher = pubsub.PublisherClient() - topic = 'projects/{project_id}/topics/{topic}'.format( + topic_name = 'projects/{project_id}/topics/{topic}'.format( project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), topic='MY_TOPIC_NAME', # Set this to something appropriate. ) - publisher.create_topic() - publisher.publish(topic, b'My first message!', spam='eggs') + publisher.create_topic(topic_name) + publisher.publish(topic_name, b'My first message!', spam='eggs') To learn more, consult the `publishing documentation`_. @@ -90,7 +90,7 @@ the topic, and subscribe to that. from google.cloud import pubsub subscriber = pubsub.SubscriberClient() - topic = 'projects/{project_id}/topics/{topic}'.format( + topic_name = 'projects/{project_id}/topics/{topic}'.format( project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), topic='MY_TOPIC_NAME', # Set this to something appropriate. ) @@ -98,7 +98,8 @@ the topic, and subscribe to that. project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), sub='MY_SUBSCRIPTION_NAME', # Set this to something appropriate. ) - subscription = subscriber.create_subscription(topic, subscription) + subscription = subscriber.create_subscription( + name=subscription_name, topic=topic_name) The subscription is opened asychronously, and messages are processed by use of a callback. From 453fdb34a7696c052337095d7a465d2f227f826f Mon Sep 17 00:00:00 2001 From: Mehmet Bora Ezer Date: Thu, 16 Nov 2017 23:08:46 +0300 Subject: [PATCH 0149/1197] Subscription is returned from 'subscribe', not 'create_subscription' (#4402) --- packages/google-cloud-pubsub/README.rst | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 28df0ad363ee..3266b2e4c545 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -98,8 +98,9 @@ the topic, and subscribe to that. project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), sub='MY_SUBSCRIPTION_NAME', # Set this to something appropriate. ) - subscription = subscriber.create_subscription( + subscriber.create_subscription( name=subscription_name, topic=topic_name) + subscription = subscriber.subscribe(subscription_name) The subscription is opened asychronously, and messages are processed by use of a callback. From 87ebeb31b3d50f83c5b16b462451e1af82695ce1 Mon Sep 17 00:00:00 2001 From: Mehmet Bora Ezer Date: Fri, 17 Nov 2017 19:15:50 +0300 Subject: [PATCH 0150/1197] Unreachable links in the readme files have been fixed. (#4406) --- packages/google-cloud-pubsub/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 3266b2e4c545..39699fd47919 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -9,7 +9,7 @@ Python Client for Google Cloud Pub / Sub - `Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/pubsub/usage.html +.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/pubsub/ Quick Start ----------- From 3de101070ed5be1d931c113a5fa264b7375a4ca3 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 22 Nov 2017 12:59:00 -0800 Subject: [PATCH 0151/1197] Pub/Sub autogen refresh (#4438) --- packages/google-cloud-pubsub/.coveragerc | 3 + .../google/cloud/gapic/__init__.py | 1 - .../google/cloud/gapic/pubsub/__init__.py | 1 - .../cloud/gapic/pubsub/v1/publisher_client.py | 565 -------- .../pubsub/v1/publisher_client_config.json | 98 -- .../gapic/pubsub/v1/subscriber_client.py | 1063 -------------- .../pubsub/v1/subscriber_client_config.json | 138 -- .../google/cloud/proto/__init__.py | 1 - .../google/cloud/proto/pubsub/__init__.py | 1 - .../google/cloud/proto/pubsub/v1/__init__.py | 1 - .../google/cloud/pubsub.py | 3 +- .../google/cloud/pubsub_v1/__init__.py | 15 +- .../pubsub/v1 => pubsub_v1/gapic}/__init__.py | 0 .../cloud/pubsub_v1/gapic/publisher_client.py | 681 +++++++++ .../gapic/publisher_client_config.py | 93 ++ .../pubsub_v1/gapic/subscriber_client.py | 1239 +++++++++++++++++ .../gapic/subscriber_client_config.py | 135 ++ .../google/cloud/pubsub_v1/proto/__init__.py | 0 .../v1 => pubsub_v1/proto}/pubsub_pb2.py | 86 +- .../v1 => pubsub_v1/proto}/pubsub_pb2_grpc.py | 156 +-- .../cloud/pubsub_v1/publisher/client.py | 15 +- .../cloud/pubsub_v1/subscriber/_consumer.py | 3 +- .../cloud/pubsub_v1/subscriber/client.py | 15 +- .../pubsub_v1/subscriber/policy/thread.py | 4 +- .../google/cloud/pubsub_v1/types.py | 43 +- packages/google-cloud-pubsub/setup.py | 9 +- .../unit/gapic/v1/test_publisher_client_v1.py | 415 ++++++ .../gapic/v1/test_subscriber_client_v1.py | 712 ++++++++++ .../publisher/test_publisher_client.py | 27 +- .../pubsub_v1/subscriber/test_consumer.py | 6 +- .../subscriber/test_helper_threads.py | 2 +- .../unit/pubsub_v1/subscriber/test_message.py | 2 +- .../subscriber/test_policy_thread.py | 6 +- .../subscriber/test_subscriber_client.py | 13 +- 34 files changed, 3480 insertions(+), 2072 deletions(-) delete mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/__init__.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/__init__.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json delete mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json delete mode 100644 packages/google-cloud-pubsub/google/cloud/proto/__init__.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/proto/pubsub/__init__.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/__init__.py rename packages/google-cloud-pubsub/google/cloud/{gapic/pubsub/v1 => pubsub_v1/gapic}/__init__.py (100%) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/__init__.py rename packages/google-cloud-pubsub/google/cloud/{proto/pubsub/v1 => pubsub_v1/proto}/pubsub_pb2.py (98%) rename packages/google-cloud-pubsub/google/cloud/{proto/pubsub/v1 => pubsub_v1/proto}/pubsub_pb2_grpc.py (65%) create mode 100644 packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py create mode 100644 packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index 41ca7428e2ee..4bea65589ecb 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -6,6 +6,9 @@ source = tests.unit [report] +omit = + */gapic/* + */proto/* show_missing = True exclude_lines = diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/__init__.py b/packages/google-cloud-pubsub/google/cloud/gapic/__init__.py deleted file mode 100644 index de40ea7ca058..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/gapic/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/__init__.py deleted file mode 100644 index de40ea7ca058..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py deleted file mode 100644 index 176f27b22f95..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client.py +++ /dev/null @@ -1,565 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# EDITING INSTRUCTIONS -# This file was generated from the file -# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, -# and updates to that file get reflected here through a refresh process. -# For the short term, the refresh process will only be runnable by Google engineers. -# -# The only allowed edits are to method and file documentation. A 3-way -# merge preserves those additions if the generated source changes. -"""Accesses the google.pubsub.v1 Publisher API.""" - -import collections -import json -import os -import pkg_resources -import platform - -from google.gax import api_callable -from google.gax import config -from google.gax import path_template -import google.gax - -from google.cloud.proto.pubsub.v1 import pubsub_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 - -_PageDesc = google.gax.PageDescriptor - - -class PublisherClient(object): - """ - The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - SERVICE_ADDRESS = 'pubsub.googleapis.com' - """The default address of the service.""" - - DEFAULT_SERVICE_PORT = 443 - """The default port of the service.""" - - _PAGE_DESCRIPTORS = { - 'list_topics': - _PageDesc('page_token', 'next_page_token', 'topics'), - 'list_topic_subscriptions': - _PageDesc('page_token', 'next_page_token', 'subscriptions') - } - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', ) - - _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') - _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( - 'projects/{project}/topics/{topic}') - - @classmethod - def project_path(cls, project): - """Returns a fully-qualified project resource name string.""" - return cls._PROJECT_PATH_TEMPLATE.render({ - 'project': project, - }) - - @classmethod - def topic_path(cls, project, topic): - """Returns a fully-qualified topic resource name string.""" - return cls._TOPIC_PATH_TEMPLATE.render({ - 'project': project, - 'topic': topic, - }) - - @classmethod - def match_project_from_project_name(cls, project_name): - """Parses the project from a project resource. - - Args: - project_name (string): A fully-qualified path representing a project - resource. - - Returns: - A string representing the project. - """ - return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') - - @classmethod - def match_project_from_topic_name(cls, topic_name): - """Parses the project from a topic resource. - - Args: - topic_name (string): A fully-qualified path representing a topic - resource. - - Returns: - A string representing the project. - """ - return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') - - @classmethod - def match_topic_from_topic_name(cls, topic_name): - """Parses the topic from a topic resource. - - Args: - topic_name (string): A fully-qualified path representing a topic - resource. - - Returns: - A string representing the topic. - """ - return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') - - def __init__(self, - service_path=SERVICE_ADDRESS, - port=DEFAULT_SERVICE_PORT, - channel=None, - credentials=None, - ssl_credentials=None, - scopes=None, - client_config=None, - app_name=None, - app_version='', - lib_name=None, - lib_version='', - metrics_headers=()): - """Constructor. - - Args: - service_path (string): The domain name of the API remote host. - port (int): The port on which to connect to the remote host. - channel (:class:`grpc.Channel`): A ``Channel`` instance through - which to make calls. - credentials (object): The authorization credentials to attach to - requests. These credentials identify this application to the - service. - ssl_credentials (:class:`grpc.ChannelCredentials`): A - ``ChannelCredentials`` instance for use with an SSL-enabled - channel. - scopes (list[string]): A list of OAuth2 scopes to attach to requests. - client_config (dict): - A dictionary for call options for each method. See - :func:`google.gax.construct_settings` for the structure of - this data. Falls back to the default config if not specified - or the specified config is missing data points. - app_name (string): The name of the application calling - the service. Recommended for analytics purposes. - app_version (string): The version of the application calling - the service. Recommended for analytics purposes. - lib_name (string): The API library software used for calling - the service. (Unless you are writing an API client itself, - leave this as default.) - lib_version (string): The API library software version used - for calling the service. (Unless you are writing an API client - itself, leave this as default.) - metrics_headers (dict): A dictionary of values for tracking - client library metrics. Ultimately serializes to a string - (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be - considered private. - - Returns: - A PublisherClient object. - """ - # Unless the calling application specifically requested - # OAuth scopes, request everything. - if scopes is None: - scopes = self._ALL_SCOPES - - # Initialize an empty client config, if none is set. - if client_config is None: - client_config = {} - - # Initialize metrics_headers as an ordered dictionary - # (cuts down on cardinality of the resulting string slightly). - metrics_headers = collections.OrderedDict(metrics_headers) - metrics_headers['gl-python'] = platform.python_version() - - # The library may or may not be set, depending on what is - # calling this client. Newer client libraries set the library name - # and version. - if lib_name: - metrics_headers[lib_name] = lib_version - - # Finally, track the GAPIC package version. - metrics_headers['gapic'] = pkg_resources.get_distribution( - 'google-cloud-pubsub', ).version - - # Load the configuration defaults. - default_client_config = json.loads( - pkg_resources.resource_string( - __name__, 'publisher_client_config.json').decode()) - defaults = api_callable.construct_settings( - 'google.pubsub.v1.Publisher', - default_client_config, - client_config, - config.STATUS_CODE_NAMES, - metrics_headers=metrics_headers, - page_descriptors=self._PAGE_DESCRIPTORS, ) - self.iam_policy_stub = config.create_stub( - iam_policy_pb2.IAMPolicyStub, - channel=channel, - service_path=service_path, - service_port=port, - credentials=credentials, - scopes=scopes, - ssl_credentials=ssl_credentials) - self.publisher_stub = config.create_stub( - pubsub_pb2.PublisherStub, - channel=channel, - service_path=service_path, - service_port=port, - credentials=credentials, - scopes=scopes, - ssl_credentials=ssl_credentials) - - self._create_topic = api_callable.create_api_call( - self.publisher_stub.CreateTopic, settings=defaults['create_topic']) - self._publish = api_callable.create_api_call( - self.publisher_stub.Publish, settings=defaults['publish']) - self._get_topic = api_callable.create_api_call( - self.publisher_stub.GetTopic, settings=defaults['get_topic']) - self._list_topics = api_callable.create_api_call( - self.publisher_stub.ListTopics, settings=defaults['list_topics']) - self._list_topic_subscriptions = api_callable.create_api_call( - self.publisher_stub.ListTopicSubscriptions, - settings=defaults['list_topic_subscriptions']) - self._delete_topic = api_callable.create_api_call( - self.publisher_stub.DeleteTopic, settings=defaults['delete_topic']) - self._set_iam_policy = api_callable.create_api_call( - self.iam_policy_stub.SetIamPolicy, - settings=defaults['set_iam_policy']) - self._get_iam_policy = api_callable.create_api_call( - self.iam_policy_stub.GetIamPolicy, - settings=defaults['get_iam_policy']) - self._test_iam_permissions = api_callable.create_api_call( - self.iam_policy_stub.TestIamPermissions, - settings=defaults['test_iam_permissions']) - - # Service calls - def create_topic(self, name, options=None): - """ - Creates the given topic with the given name. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import publisher_client - >>> client = publisher_client.PublisherClient() - >>> name = client.topic_path('[PROJECT]', '[TOPIC]') - >>> response = client.create_topic(name) - - Args: - name (string): The name of the topic. It must have the format - ``\"projects/{project}/topics/{topic}\"``. ``{topic}`` must start with a letter, - and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent - signs (``%``). It must be between 3 and 255 characters in length, and it - must not start with ``\"goog\"``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.Topic(name=name) - return self._create_topic(request, options) - - def publish(self, topic, messages, options=None): - """ - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic - does not exist. The message payload must not be empty; it must contain - either a non-empty data field, or at least one attribute. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import publisher_client - >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 - >>> client = publisher_client.PublisherClient() - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> data = b'' - >>> messages_element = pubsub_pb2.PubsubMessage(data=data) - >>> messages = [messages_element] - >>> response = client.publish(topic, messages) - - Args: - topic (string): The messages in the request will be published on this topic. - Format is ``projects/{project}/topics/{topic}``. - messages (list[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PubsubMessage`]): The messages to publish. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PublishResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) - return self._publish(request, options) - - def get_topic(self, topic, options=None): - """ - Gets the configuration of a topic. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import publisher_client - >>> client = publisher_client.PublisherClient() - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> response = client.get_topic(topic) - - Args: - topic (string): The name of the topic to get. - Format is ``projects/{project}/topics/{topic}``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.GetTopicRequest(topic=topic) - return self._get_topic(request, options) - - def list_topics(self, project, page_size=None, options=None): - """ - Lists matching topics. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import publisher_client - >>> from google.gax import CallOptions, INITIAL_PAGE - >>> client = publisher_client.PublisherClient() - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topics(project): - >>> # process element - >>> pass - >>> - >>> # Or iterate over results one page at a time - >>> for page in client.list_topics(project, options=CallOptions(page_token=INITIAL_PAGE)): - >>> for element in page: - >>> # process element - >>> pass - - Args: - project (string): The name of the cloud project that topics belong to. - Format is ``projects/{project}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.gax.PageIterator` instance. By default, this - is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Topic` instances. - This object can also be configured to iterate over the pages - of the response through the `CallOptions` parameter. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.ListTopicsRequest( - project=project, page_size=page_size) - return self._list_topics(request, options) - - def list_topic_subscriptions(self, topic, page_size=None, options=None): - """ - Lists the name of the subscriptions for this topic. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import publisher_client - >>> from google.gax import CallOptions, INITIAL_PAGE - >>> client = publisher_client.PublisherClient() - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topic_subscriptions(topic): - >>> # process element - >>> pass - >>> - >>> # Or iterate over results one page at a time - >>> for page in client.list_topic_subscriptions(topic, options=CallOptions(page_token=INITIAL_PAGE)): - >>> for element in page: - >>> # process element - >>> pass - - Args: - topic (string): The name of the topic that subscriptions are attached to. - Format is ``projects/{project}/topics/{topic}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.gax.PageIterator` instance. By default, this - is an iterable of string instances. - This object can also be configured to iterate over the pages - of the response through the `CallOptions` parameter. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.ListTopicSubscriptionsRequest( - topic=topic, page_size=page_size) - return self._list_topic_subscriptions(request, options) - - def delete_topic(self, topic, options=None): - """ - Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their ``topic`` field is set to ``_deleted-topic_``. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import publisher_client - >>> client = publisher_client.PublisherClient() - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> client.delete_topic(topic) - - Args: - topic (string): Name of the topic to delete. - Format is ``projects/{project}/topics/{topic}``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.DeleteTopicRequest(topic=topic) - self._delete_topic(request, options) - - def set_iam_policy(self, resource, policy, options=None): - """ - Sets the access control policy on the specified resource. Replaces any - existing policy. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import publisher_client - >>> from google.iam.v1 import policy_pb2 - >>> client = publisher_client.PublisherClient() - >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') - >>> policy = policy_pb2.Policy() - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (string): REQUIRED: The resource for which the policy is being specified. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. - policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of - the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.iam.v1.policy_pb2.Policy` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy) - return self._set_iam_policy(request, options) - - def get_iam_policy(self, resource, options=None): - """ - Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does not have a policy - set. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import publisher_client - >>> client = publisher_client.PublisherClient() - >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') - >>> response = client.get_iam_policy(resource) - - Args: - resource (string): REQUIRED: The resource for which the policy is being requested. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.iam.v1.policy_pb2.Policy` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - return self._get_iam_policy(request, options) - - def test_iam_permissions(self, resource, permissions, options=None): - """ - Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import publisher_client - >>> client = publisher_client.PublisherClient() - >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') - >>> permissions = [] - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (string): REQUIRED: The resource for which the policy detail is being requested. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. - permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with - wildcards (such as '*' or 'storage.*') are not allowed. For more - information see - `IAM Overview `_. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions) - return self._test_iam_permissions(request, options) diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json deleted file mode 100644 index 7e8a723499e6..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/publisher_client_config.json +++ /dev/null @@ -1,98 +0,0 @@ -{ - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_codes": { - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "one_plus_delivery": [ - "CANCELLED", - "UNKNOWN", - "DEADLINE_EXCEEDED", - "RESOURCE_EXHAUSTED", - "ABORTED", - "INTERNAL", - "UNAVAILABLE" - ], - "non_idempotent": [ - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 12000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 12000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateTopic": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "Publish": { - "timeout_millis": 60000, - "retry_codes_name": "one_plus_delivery", - "retry_params_name": "messaging", - "bundling": { - "element_count_threshold": 10, - "element_count_limit": 1000, - "request_byte_threshold": 1024, - "request_byte_limit": 10485760, - "delay_threshold_millis": 10 - } - }, - "GetTopic": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "ListTopics": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "ListTopicSubscriptions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "DeleteTopic": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "SetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "TestIamPermissions": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py deleted file mode 100644 index b2b86cd7f799..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client.py +++ /dev/null @@ -1,1063 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# EDITING INSTRUCTIONS -# This file was generated from the file -# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, -# and updates to that file get reflected here through a refresh process. -# For the short term, the refresh process will only be runnable by Google engineers. -# -# The only allowed edits are to method and file documentation. A 3-way -# merge preserves those additions if the generated source changes. -"""Accesses the google.pubsub.v1 Subscriber API.""" - -import collections -import json -import os -import pkg_resources -import platform - -from google.gax import api_callable -from google.gax import config -from google.gax import path_template -from google.gax.utils import oneof -import google.gax - -from google.cloud.proto.pubsub.v1 import pubsub_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - -_PageDesc = google.gax.PageDescriptor - - -class SubscriberClient(object): - """ - The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the ``Pull`` method. - """ - - SERVICE_ADDRESS = 'pubsub.googleapis.com' - """The default address of the service.""" - - DEFAULT_SERVICE_PORT = 443 - """The default port of the service.""" - - _PAGE_DESCRIPTORS = { - 'list_subscriptions': - _PageDesc('page_token', 'next_page_token', 'subscriptions'), - 'list_snapshots': - _PageDesc('page_token', 'next_page_token', 'snapshots') - } - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _ALL_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', ) - - _PROJECT_PATH_TEMPLATE = path_template.PathTemplate('projects/{project}') - _SNAPSHOT_PATH_TEMPLATE = path_template.PathTemplate( - 'projects/{project}/snapshots/{snapshot}') - _SUBSCRIPTION_PATH_TEMPLATE = path_template.PathTemplate( - 'projects/{project}/subscriptions/{subscription}') - _TOPIC_PATH_TEMPLATE = path_template.PathTemplate( - 'projects/{project}/topics/{topic}') - - @classmethod - def project_path(cls, project): - """Returns a fully-qualified project resource name string.""" - return cls._PROJECT_PATH_TEMPLATE.render({ - 'project': project, - }) - - @classmethod - def snapshot_path(cls, project, snapshot): - """Returns a fully-qualified snapshot resource name string.""" - return cls._SNAPSHOT_PATH_TEMPLATE.render({ - 'project': project, - 'snapshot': snapshot, - }) - - @classmethod - def subscription_path(cls, project, subscription): - """Returns a fully-qualified subscription resource name string.""" - return cls._SUBSCRIPTION_PATH_TEMPLATE.render({ - 'project': - project, - 'subscription': - subscription, - }) - - @classmethod - def topic_path(cls, project, topic): - """Returns a fully-qualified topic resource name string.""" - return cls._TOPIC_PATH_TEMPLATE.render({ - 'project': project, - 'topic': topic, - }) - - @classmethod - def match_project_from_project_name(cls, project_name): - """Parses the project from a project resource. - - Args: - project_name (string): A fully-qualified path representing a project - resource. - - Returns: - A string representing the project. - """ - return cls._PROJECT_PATH_TEMPLATE.match(project_name).get('project') - - @classmethod - def match_project_from_snapshot_name(cls, snapshot_name): - """Parses the project from a snapshot resource. - - Args: - snapshot_name (string): A fully-qualified path representing a snapshot - resource. - - Returns: - A string representing the project. - """ - return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('project') - - @classmethod - def match_snapshot_from_snapshot_name(cls, snapshot_name): - """Parses the snapshot from a snapshot resource. - - Args: - snapshot_name (string): A fully-qualified path representing a snapshot - resource. - - Returns: - A string representing the snapshot. - """ - return cls._SNAPSHOT_PATH_TEMPLATE.match(snapshot_name).get('snapshot') - - @classmethod - def match_project_from_subscription_name(cls, subscription_name): - """Parses the project from a subscription resource. - - Args: - subscription_name (string): A fully-qualified path representing a subscription - resource. - - Returns: - A string representing the project. - """ - return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( - 'project') - - @classmethod - def match_subscription_from_subscription_name(cls, subscription_name): - """Parses the subscription from a subscription resource. - - Args: - subscription_name (string): A fully-qualified path representing a subscription - resource. - - Returns: - A string representing the subscription. - """ - return cls._SUBSCRIPTION_PATH_TEMPLATE.match(subscription_name).get( - 'subscription') - - @classmethod - def match_project_from_topic_name(cls, topic_name): - """Parses the project from a topic resource. - - Args: - topic_name (string): A fully-qualified path representing a topic - resource. - - Returns: - A string representing the project. - """ - return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('project') - - @classmethod - def match_topic_from_topic_name(cls, topic_name): - """Parses the topic from a topic resource. - - Args: - topic_name (string): A fully-qualified path representing a topic - resource. - - Returns: - A string representing the topic. - """ - return cls._TOPIC_PATH_TEMPLATE.match(topic_name).get('topic') - - def __init__(self, - service_path=SERVICE_ADDRESS, - port=DEFAULT_SERVICE_PORT, - channel=None, - credentials=None, - ssl_credentials=None, - scopes=None, - client_config=None, - app_name=None, - app_version='', - lib_name=None, - lib_version='', - metrics_headers=()): - """Constructor. - - Args: - service_path (string): The domain name of the API remote host. - port (int): The port on which to connect to the remote host. - channel (:class:`grpc.Channel`): A ``Channel`` instance through - which to make calls. - credentials (object): The authorization credentials to attach to - requests. These credentials identify this application to the - service. - ssl_credentials (:class:`grpc.ChannelCredentials`): A - ``ChannelCredentials`` instance for use with an SSL-enabled - channel. - scopes (list[string]): A list of OAuth2 scopes to attach to requests. - client_config (dict): - A dictionary for call options for each method. See - :func:`google.gax.construct_settings` for the structure of - this data. Falls back to the default config if not specified - or the specified config is missing data points. - app_name (string): The name of the application calling - the service. Recommended for analytics purposes. - app_version (string): The version of the application calling - the service. Recommended for analytics purposes. - lib_name (string): The API library software used for calling - the service. (Unless you are writing an API client itself, - leave this as default.) - lib_version (string): The API library software version used - for calling the service. (Unless you are writing an API client - itself, leave this as default.) - metrics_headers (dict): A dictionary of values for tracking - client library metrics. Ultimately serializes to a string - (e.g. 'foo/1.2.3 bar/3.14.1'). This argument should be - considered private. - - Returns: - A SubscriberClient object. - """ - # Unless the calling application specifically requested - # OAuth scopes, request everything. - if scopes is None: - scopes = self._ALL_SCOPES - - # Initialize an empty client config, if none is set. - if client_config is None: - client_config = {} - - # Initialize metrics_headers as an ordered dictionary - # (cuts down on cardinality of the resulting string slightly). - metrics_headers = collections.OrderedDict(metrics_headers) - metrics_headers['gl-python'] = platform.python_version() - - # The library may or may not be set, depending on what is - # calling this client. Newer client libraries set the library name - # and version. - if lib_name: - metrics_headers[lib_name] = lib_version - - # Finally, track the GAPIC package version. - metrics_headers['gapic'] = pkg_resources.get_distribution( - 'google-cloud-pubsub', ).version - - # Load the configuration defaults. - default_client_config = json.loads( - pkg_resources.resource_string( - __name__, 'subscriber_client_config.json').decode()) - defaults = api_callable.construct_settings( - 'google.pubsub.v1.Subscriber', - default_client_config, - client_config, - config.STATUS_CODE_NAMES, - metrics_headers=metrics_headers, - page_descriptors=self._PAGE_DESCRIPTORS, ) - self.iam_policy_stub = config.create_stub( - iam_policy_pb2.IAMPolicyStub, - channel=channel, - service_path=service_path, - service_port=port, - credentials=credentials, - scopes=scopes, - ssl_credentials=ssl_credentials) - self.subscriber_stub = config.create_stub( - pubsub_pb2.SubscriberStub, - channel=channel, - service_path=service_path, - service_port=port, - credentials=credentials, - scopes=scopes, - ssl_credentials=ssl_credentials) - - self._create_subscription = api_callable.create_api_call( - self.subscriber_stub.CreateSubscription, - settings=defaults['create_subscription']) - self._get_subscription = api_callable.create_api_call( - self.subscriber_stub.GetSubscription, - settings=defaults['get_subscription']) - self._update_subscription = api_callable.create_api_call( - self.subscriber_stub.UpdateSubscription, - settings=defaults['update_subscription']) - self._list_subscriptions = api_callable.create_api_call( - self.subscriber_stub.ListSubscriptions, - settings=defaults['list_subscriptions']) - self._delete_subscription = api_callable.create_api_call( - self.subscriber_stub.DeleteSubscription, - settings=defaults['delete_subscription']) - self._modify_ack_deadline = api_callable.create_api_call( - self.subscriber_stub.ModifyAckDeadline, - settings=defaults['modify_ack_deadline']) - self._acknowledge = api_callable.create_api_call( - self.subscriber_stub.Acknowledge, settings=defaults['acknowledge']) - self._pull = api_callable.create_api_call( - self.subscriber_stub.Pull, settings=defaults['pull']) - self._streaming_pull = api_callable.create_api_call( - self.subscriber_stub.StreamingPull, - settings=defaults['streaming_pull']) - self._modify_push_config = api_callable.create_api_call( - self.subscriber_stub.ModifyPushConfig, - settings=defaults['modify_push_config']) - self._list_snapshots = api_callable.create_api_call( - self.subscriber_stub.ListSnapshots, - settings=defaults['list_snapshots']) - self._create_snapshot = api_callable.create_api_call( - self.subscriber_stub.CreateSnapshot, - settings=defaults['create_snapshot']) - self._delete_snapshot = api_callable.create_api_call( - self.subscriber_stub.DeleteSnapshot, - settings=defaults['delete_snapshot']) - self._seek = api_callable.create_api_call( - self.subscriber_stub.Seek, settings=defaults['seek']) - self._set_iam_policy = api_callable.create_api_call( - self.iam_policy_stub.SetIamPolicy, - settings=defaults['set_iam_policy']) - self._get_iam_policy = api_callable.create_api_call( - self.iam_policy_stub.GetIamPolicy, - settings=defaults['get_iam_policy']) - self._test_iam_permissions = api_callable.create_api_call( - self.iam_policy_stub.TestIamPermissions, - settings=defaults['test_iam_permissions']) - - # Service calls - def create_subscription(self, - name, - topic, - push_config=None, - ack_deadline_seconds=None, - retain_acked_messages=None, - message_retention_duration=None, - options=None): - """ - Creates a subscription to a given topic. - If the subscription already exists, returns ``ALREADY_EXISTS``. - If the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - `resource name format `_. - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> response = client.create_subscription(name, topic) - - Args: - name (string): The name of the subscription. It must have the format - ``\"projects/{project}/subscriptions/{subscription}\"``. ``{subscription}`` must - start with a letter, and contain only letters (``[A-Za-z]``), numbers - (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), - plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters - in length, and it must not start with ``\"goog\"``. - topic (string): The name of the topic from which this subscription is receiving messages. - Format is ``projects/{project}/topics/{topic}``. - The value of this field will be ``_deleted-topic_`` if the topic has been - deleted. - push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): If push delivery is used with this subscription, this field is - used to configure it. An empty ``pushConfig`` signifies that the subscriber - will pull and ack messages using API methods. - ack_deadline_seconds (int): This value is the maximum time after a subscriber receives a message - before the subscriber should acknowledge the message. After message - delivery but before the ack deadline expires and before the message is - acknowledged, it is an outstanding message and will not be delivered - again during that time (on a best-effort basis). - - For pull subscriptions, this value is used as the initial value for the ack - deadline. To override this value for a given message, call - ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using - pull. - The minimum custom deadline you can specify is 10 seconds. - The maximum custom deadline you can specify is 600 seconds (10 minutes). - If this parameter is 0, a default value of 10 seconds is used. - - For push delivery, this value is also used to set the request timeout for - the call to the push endpoint. - - If the subscriber never acknowledges the message, the Pub/Sub - system will eventually redeliver the message. - retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then - messages are not expunged from the subscription's backlog, even if they are - acknowledged, until they fall out of the ``message_retention_duration`` - window. - message_retention_duration (:class:`google.protobuf.duration_pb2.Duration`): How long to retain unacknowledged messages in the subscription's backlog, - from the moment a message is published. - If ``retain_acked_messages`` is true, then this also configures the retention - of acknowledged messages, and thus configures how far back in time a ``Seek`` - can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 - minutes. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.Subscription( - name=name, - topic=topic, - push_config=push_config, - ack_deadline_seconds=ack_deadline_seconds, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration) - return self._create_subscription(request, options) - - def get_subscription(self, subscription, options=None): - """ - Gets the configuration details of a subscription. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> response = client.get_subscription(subscription) - - Args: - subscription (string): The name of the subscription to get. - Format is ``projects/{project}/subscriptions/{sub}``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) - return self._get_subscription(request, options) - - def update_subscription(self, subscription, update_mask, options=None): - """ - Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 - >>> from google.protobuf import field_mask_pb2 - >>> client = subscriber_client.SubscriberClient() - >>> subscription = pubsub_pb2.Subscription() - >>> update_mask = field_mask_pb2.FieldMask() - >>> response = client.update_subscription(subscription, update_mask) - - Args: - subscription (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription`): The updated subscription object. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Indicates which fields in the provided subscription to update. - Must be specified and non-empty. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask) - return self._update_subscription(request, options) - - def list_subscriptions(self, project, page_size=None, options=None): - """ - Lists matching subscriptions. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> from google.gax import CallOptions, INITIAL_PAGE - >>> client = subscriber_client.SubscriberClient() - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_subscriptions(project): - >>> # process element - >>> pass - >>> - >>> # Or iterate over results one page at a time - >>> for page in client.list_subscriptions(project, options=CallOptions(page_token=INITIAL_PAGE)): - >>> for element in page: - >>> # process element - >>> pass - - Args: - project (string): The name of the cloud project that subscriptions belong to. - Format is ``projects/{project}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.gax.PageIterator` instance. By default, this - is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Subscription` instances. - This object can also be configured to iterate over the pages - of the response through the `CallOptions` parameter. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.ListSubscriptionsRequest( - project=project, page_size=page_size) - return self._list_subscriptions(request, options) - - def delete_subscription(self, subscription, options=None): - """ - Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to ``Pull`` after deletion will return - ``NOT_FOUND``. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> client.delete_subscription(subscription) - - Args: - subscription (string): The subscription to delete. - Format is ``projects/{project}/subscriptions/{sub}``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.DeleteSubscriptionRequest( - subscription=subscription) - self._delete_subscription(request, options) - - def modify_ack_deadline(self, - subscription, - ack_ids, - ack_deadline_seconds, - options=None): - """ - Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level ``ackDeadlineSeconds`` used for subsequent messages. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> ack_ids = [] - >>> ack_deadline_seconds = 0 - >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - Args: - subscription (string): The name of the subscription. - Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[string]): List of acknowledgment IDs. - ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to - the Pub/Sub system. For example, if the value is 10, the new - ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call - was made. Specifying zero may immediately make the message available for - another pull request. - The minimum deadline you can specify is 0 seconds. - The maximum deadline you can specify is 600 seconds (10 minutes). - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.ModifyAckDeadlineRequest( - subscription=subscription, - ack_ids=ack_ids, - ack_deadline_seconds=ack_deadline_seconds) - self._modify_ack_deadline(request, options) - - def acknowledge(self, subscription, ack_ids, options=None): - """ - Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages - from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> ack_ids = [] - >>> client.acknowledge(subscription, ack_ids) - - Args: - subscription (string): The subscription whose message is being acknowledged. - Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[string]): The acknowledgment ID for the messages being acknowledged that was returned - by the Pub/Sub system in the ``Pull`` response. Must not be empty. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids) - self._acknowledge(request, options) - - def pull(self, - subscription, - max_messages, - return_immediately=None, - options=None): - """ - Pulls messages from the server. Returns an empty list if there are no - messages available in the backlog. The server may return ``UNAVAILABLE`` if - there are too many concurrent pull requests pending for the given - subscription. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> max_messages = 0 - >>> response = client.pull(subscription, max_messages) - - Args: - subscription (string): The subscription from which messages should be pulled. - Format is ``projects/{project}/subscriptions/{sub}``. - max_messages (int): The maximum number of messages returned for this request. The Pub/Sub - system may return fewer than the number specified. - return_immediately (bool): If this field set to true, the system will respond immediately even if - it there are no messages available to return in the ``Pull`` response. - Otherwise, the system may wait (for a bounded amount of time) until at - least one message is available, rather than returning no messages. The - client may cancel the request if it does not wish to wait any longer for - the response. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PullResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.PullRequest( - subscription=subscription, - max_messages=max_messages, - return_immediately=return_immediately) - return self._pull(request, options) - - def streaming_pull(self, requests, options=None): - """ - (EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will - respond with UNIMPLEMENTED errors unless you have been invited to test - this feature. Contact cloud-pubsub@google.com with any questions. - - Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status ``OK`` to reassign - server-side resources, in which case, the client should re-establish the - stream. ``UNAVAILABLE`` may also be returned in the case of a transient error - (e.g., a server restart). These should also be retried by the client. Flow - control can be achieved by configuring the underlying RPC channel. - - EXPERIMENTAL: This method interface might change in the future. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 - >>> client = subscriber_client.SubscriberClient() - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> stream_ack_deadline_seconds = 0 - >>> request = pubsub_pb2.StreamingPullRequest(subscription=subscription, stream_ack_deadline_seconds=stream_ack_deadline_seconds) - >>> requests = [request] - >>> for element in client.streaming_pull(requests): - >>> # process element - >>> pass - - Args: - requests (iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullRequest`]): The input objects. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - iterator[:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.StreamingPullResponse`]. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - return self._streaming_pull(requests, options) - - def modify_push_config(self, subscription, push_config, options=None): - """ - Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified by - an empty ``PushConfig``) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the ``PushConfig``. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> from google.cloud.proto.pubsub.v1 import pubsub_pb2 - >>> client = subscriber_client.SubscriberClient() - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> push_config = pubsub_pb2.PushConfig() - >>> client.modify_push_config(subscription, push_config) - - Args: - subscription (string): The name of the subscription. - Format is ``projects/{project}/subscriptions/{sub}``. - push_config (:class:`google.cloud.proto.pubsub.v1.pubsub_pb2.PushConfig`): The push configuration for future deliveries. - - An empty ``pushConfig`` indicates that the Pub/Sub system should - stop pushing messages from the given subscription and allow - messages to be pulled and acknowledged - effectively pausing - the subscription if ``Pull`` is not called. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config) - self._modify_push_config(request, options) - - def list_snapshots(self, project, page_size=None, options=None): - """ - Lists the existing snapshots. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> from google.gax import CallOptions, INITIAL_PAGE - >>> client = subscriber_client.SubscriberClient() - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_snapshots(project): - >>> # process element - >>> pass - >>> - >>> # Or iterate over results one page at a time - >>> for page in client.list_snapshots(project, options=CallOptions(page_token=INITIAL_PAGE)): - >>> for element in page: - >>> # process element - >>> pass - - Args: - project (string): The name of the cloud project that snapshots belong to. - Format is ``projects/{project}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.gax.PageIterator` instance. By default, this - is an iterable of :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instances. - This object can also be configured to iterate over the pages - of the response through the `CallOptions` parameter. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.ListSnapshotsRequest( - project=project, page_size=page_size) - return self._list_snapshots(request, options) - - def create_snapshot(self, name, subscription, options=None): - """ - Creates a snapshot from the requested subscription. - If the snapshot already exists, returns ``ALREADY_EXISTS``. - If the requested subscription doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the - `resource name format `_. - The generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the request. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> response = client.create_snapshot(name, subscription) - - Args: - name (string): Optional user-provided name for this snapshot. - If the name is not provided in the request, the server will assign a random - name for this snapshot on the same project as the subscription. - Note that for REST API requests, you must specify a name. - Format is ``projects/{project}/snapshots/{snap}``. - subscription (string): The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: - - - The existing backlog on the subscription. More precisely, this is - defined as the messages in the subscription's backlog that are - unacknowledged upon the successful completion of the - `CreateSnapshot` request; as well as: - - Any messages published to the subscription's topic following the - successful completion of the CreateSnapshot request. - - Format is ``projects/{project}/subscriptions/{sub}``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.Snapshot` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription) - return self._create_snapshot(request, options) - - def delete_snapshot(self, snapshot, options=None): - """ - Removes an existing snapshot. All messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> client.delete_snapshot(snapshot) - - Args: - snapshot (string): The name of the snapshot to delete. - Format is ``projects/{project}/snapshots/{snap}``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) - self._delete_snapshot(request, options) - - def seek(self, subscription, time=None, snapshot=None, options=None): - """ - Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> response = client.seek(subscription) - - Args: - subscription (string): The subscription to affect. - time (:class:`google.protobuf.timestamp_pb2.Timestamp`): The time to seek to. - Messages retained in the subscription that were published before this - time are marked as acknowledged, and messages retained in the - subscription that were published after this time are marked as - unacknowledged. Note that this operation affects only those messages - retained in the subscription (configured by the combination of - ``message_retention_duration`` and ``retain_acked_messages``). For example, - if ``time`` corresponds to a point before the message retention - window (or to a point before the system's notion of the subscription - creation time), only retained messages will be marked as unacknowledged, - and already-expunged messages will not be restored. - snapshot (string): The snapshot to seek to. The snapshot's topic must be the same as that of - the provided subscription. - Format is ``projects/{project}/snapshots/{snap}``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.cloud.proto.pubsub.v1.pubsub_pb2.SeekResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - oneof.check_oneof( - time=time, - snapshot=snapshot, ) - - # Create the request object. - request = pubsub_pb2.SeekRequest( - subscription=subscription, time=time, snapshot=snapshot) - return self._seek(request, options) - - def set_iam_policy(self, resource, policy, options=None): - """ - Sets the access control policy on the specified resource. Replaces any - existing policy. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> from google.iam.v1 import policy_pb2 - >>> client = subscriber_client.SubscriberClient() - >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> policy = policy_pb2.Policy() - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (string): REQUIRED: The resource for which the policy is being specified. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. - policy (:class:`google.iam.v1.policy_pb2.Policy`): REQUIRED: The complete policy to be applied to the ``resource``. The size of - the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.iam.v1.policy_pb2.Policy` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy) - return self._set_iam_policy(request, options) - - def get_iam_policy(self, resource, options=None): - """ - Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does not have a policy - set. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> response = client.get_iam_policy(resource) - - Args: - resource (string): REQUIRED: The resource for which the policy is being requested. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.iam.v1.policy_pb2.Policy` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - return self._get_iam_policy(request, options) - - def test_iam_permissions(self, resource, permissions, options=None): - """ - Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. - - Example: - >>> from google.cloud.gapic.pubsub.v1 import subscriber_client - >>> client = subscriber_client.SubscriberClient() - >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> permissions = [] - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (string): REQUIRED: The resource for which the policy detail is being requested. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. - permissions (list[string]): The set of permissions to check for the ``resource``. Permissions with - wildcards (such as '*' or 'storage.*') are not allowed. For more - information see - `IAM Overview `_. - options (:class:`google.gax.CallOptions`): Overrides the default - settings for this call, e.g, timeout, retries etc. - - Returns: - A :class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse` instance. - - Raises: - :exc:`google.gax.errors.GaxError` if the RPC is aborted. - :exc:`ValueError` if the parameters are invalid. - """ - # Create the request object. - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions) - return self._test_iam_permissions(request, options) diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json b/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json deleted file mode 100644 index 6180cc0a941f..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/subscriber_client_config.json +++ /dev/null @@ -1,138 +0,0 @@ -{ - "interfaces": { - "google.pubsub.v1.Subscriber": { - "retry_codes": { - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "non_idempotent": [ - "UNAVAILABLE" - ], - "pull": [ - "CANCELLED", - "DEADLINE_EXCEEDED", - "RESOURCE_EXHAUSTED", - "INTERNAL", - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - }, - "messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 12000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 12000, - "total_timeout_millis": 600000 - }, - "streaming": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 900000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 900000, - "total_timeout_millis": 900000 - } - }, - "methods": { - "CreateSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "GetSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "UpdateSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "ListSubscriptions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "DeleteSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "ModifyAckDeadline": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "Acknowledge": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "messaging" - }, - "Pull": { - "timeout_millis": 60000, - "retry_codes_name": "pull", - "retry_params_name": "messaging" - }, - "StreamingPull": { - "timeout_millis": 900000, - "retry_codes_name": "pull", - "retry_params_name": "streaming" - }, - "ModifyPushConfig": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "ListSnapshots": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "CreateSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "DeleteSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "Seek": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "SetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "TestIamPermissions": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/packages/google-cloud-pubsub/google/cloud/proto/__init__.py b/packages/google-cloud-pubsub/google/cloud/proto/__init__.py deleted file mode 100644 index de40ea7ca058..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/proto/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/__init__.py deleted file mode 100644 index de40ea7ca058..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/__init__.py +++ /dev/null @@ -1 +0,0 @@ -__import__('pkg_resources').declare_namespace(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/__init__.py b/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/__init__.py deleted file mode 100644 index 8b137891791f..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py index afaa55e4ef81..dba2ad09a3fd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -18,9 +18,8 @@ from google.cloud.pubsub_v1 import SubscriberClient from google.cloud.pubsub_v1 import types - __all__ = ( + 'types', 'PublisherClient', 'SubscriberClient', - 'types', ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py index 7343acde9d49..e6f3c0aae6e7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py @@ -15,11 +15,20 @@ from __future__ import absolute_import from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.publisher import Client as PublisherClient -from google.cloud.pubsub_v1.subscriber import Client as SubscriberClient +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import subscriber + + +class PublisherClient(publisher.Client): + __doc__ = publisher.Client.__doc__ + + +class SubscriberClient(subscriber.Client): + __doc__ = subscriber.Client.__doc__ + __all__ = ( + 'types', 'PublisherClient', 'SubscriberClient', - 'types', ) diff --git a/packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/__init__.py similarity index 100% rename from packages/google-cloud-pubsub/google/cloud/gapic/pubsub/v1/__init__.py rename to packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/__init__.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py new file mode 100644 index 000000000000..0e61814166dd --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -0,0 +1,681 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Publisher API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template + +from google.cloud.pubsub_v1.gapic import publisher_client_config +from google.cloud.pubsub_v1.proto import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import field_mask_pb2 + + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-pubsub').version + + +class PublisherClient(object): + """ + The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary + _INTERFACE_NAME = ('google.pubsub.v1.Publisher') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, ) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return google.api_core.path_template.expand( + 'projects/{project}/topics/{topic}', + project=project, + topic=topic, ) + + def __init__(self, + channel=None, + credentials=None, + client_config=publisher_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. If specified, then the ``credentials`` + argument is ignored. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): + A dictionary of call options for each method. If not specified + the default configuration is used. Generally, you only need + to set this if you're developing your own client library. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + if channel is not None and credentials is not None: + raise ValueError( + 'channel and credentials arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__)) + + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES) + + self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel)) + self.publisher_stub = (pubsub_pb2.PublisherStub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + interface_config = client_config['interfaces'][self._INTERFACE_NAME] + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + interface_config) + + self._create_topic = google.api_core.gapic_v1.method.wrap_method( + self.publisher_stub.CreateTopic, + default_retry=method_configs['CreateTopic'].retry, + default_timeout=method_configs['CreateTopic'].timeout, + client_info=client_info) + self._update_topic = google.api_core.gapic_v1.method.wrap_method( + self.publisher_stub.UpdateTopic, + default_retry=method_configs['UpdateTopic'].retry, + default_timeout=method_configs['UpdateTopic'].timeout, + client_info=client_info) + self._publish = google.api_core.gapic_v1.method.wrap_method( + self.publisher_stub.Publish, + default_retry=method_configs['Publish'].retry, + default_timeout=method_configs['Publish'].timeout, + client_info=client_info) + self._get_topic = google.api_core.gapic_v1.method.wrap_method( + self.publisher_stub.GetTopic, + default_retry=method_configs['GetTopic'].retry, + default_timeout=method_configs['GetTopic'].timeout, + client_info=client_info) + self._list_topics = google.api_core.gapic_v1.method.wrap_method( + self.publisher_stub.ListTopics, + default_retry=method_configs['ListTopics'].retry, + default_timeout=method_configs['ListTopics'].timeout, + client_info=client_info) + self._list_topic_subscriptions = google.api_core.gapic_v1.method.wrap_method( + self.publisher_stub.ListTopicSubscriptions, + default_retry=method_configs['ListTopicSubscriptions'].retry, + default_timeout=method_configs['ListTopicSubscriptions'].timeout, + client_info=client_info) + self._delete_topic = google.api_core.gapic_v1.method.wrap_method( + self.publisher_stub.DeleteTopic, + default_retry=method_configs['DeleteTopic'].retry, + default_timeout=method_configs['DeleteTopic'].timeout, + client_info=client_info) + self._set_iam_policy = google.api_core.gapic_v1.method.wrap_method( + self.iam_policy_stub.SetIamPolicy, + default_retry=method_configs['SetIamPolicy'].retry, + default_timeout=method_configs['SetIamPolicy'].timeout, + client_info=client_info) + self._get_iam_policy = google.api_core.gapic_v1.method.wrap_method( + self.iam_policy_stub.GetIamPolicy, + default_retry=method_configs['GetIamPolicy'].retry, + default_timeout=method_configs['GetIamPolicy'].timeout, + client_info=client_info) + self._test_iam_permissions = google.api_core.gapic_v1.method.wrap_method( + self.iam_policy_stub.TestIamPermissions, + default_retry=method_configs['TestIamPermissions'].retry, + default_timeout=method_configs['TestIamPermissions'].timeout, + client_info=client_info) + + # Service calls + def create_topic(self, + name, + labels=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Creates the given topic with the given name. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> name = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> response = client.create_topic(name) + + Args: + name (str): The name of the topic. It must have the format + ``\"projects/{project}/topics/{topic}\"``. ``{topic}`` must start with a letter, + and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent + signs (``%``). It must be between 3 and 255 characters in length, and it + must not start with ``\"goog\"``. + labels (dict[str -> str]): User labels. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Topic` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.Topic(name=name, labels=labels) + return self._create_topic(request, retry=retry, timeout=timeout) + + def update_topic(self, + topic, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Updates an existing topic. Note that certain properties of a topic are not + modifiable. Options settings follow the style guide: + NOTE: The style guide requires body: \"topic\" instead of body: \"*\". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> topic = {} + >>> update_mask = {} + >>> + >>> response = client.update_topic(topic, update_mask) + + Args: + topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): The topic to update. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.Topic` + update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided topic to update. + Must be specified and non-empty. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Topic` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.UpdateTopicRequest( + topic=topic, update_mask=update_mask) + return self._update_topic(request, retry=retry, timeout=timeout) + + def publish(self, + topic, + messages, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> data = b'' + >>> messages_element = {'data': data} + >>> messages = [messages_element] + >>> + >>> response = client.publish(topic, messages) + + Args: + topic (str): The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages (list[Union[dict, ~google.cloud.pubsub_v1.types.PubsubMessage]]): The messages to publish. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.PubsubMessage` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.PublishResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) + return self._publish(request, retry=retry, timeout=timeout) + + def get_topic(self, + topic, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Gets the configuration of a topic. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> response = client.get_topic(topic) + + Args: + topic (str): The name of the topic to get. + Format is ``projects/{project}/topics/{topic}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Topic` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.GetTopicRequest(topic=topic) + return self._get_topic(request, retry=retry, timeout=timeout) + + def list_topics(self, + project, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Lists matching topics. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> project = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_topics(project): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topics(project, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + project (str): The name of the cloud project that topics belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.pubsub_v1.types.Topic` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.ListTopicsRequest( + project=project, page_size=page_size) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_topics, retry=retry, timeout=timeout), + request=request, + items_field='topics', + request_token_field='page_token', + response_token_field='next_page_token') + return iterator + + def list_topic_subscriptions( + self, + topic, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Lists the name of the subscriptions for this topic. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_topic_subscriptions(topic): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_topic_subscriptions(topic, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + topic (str): The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`str` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.ListTopicSubscriptionsRequest( + topic=topic, page_size=page_size) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_topic_subscriptions, retry=retry, timeout=timeout), + request=request, + items_field='subscriptions', + request_token_field='page_token', + response_token_field='next_page_token') + return iterator + + def delete_topic(self, + topic, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their ``topic`` field is set to ``_deleted-topic_``. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> client.delete_topic(topic) + + Args: + topic (str): Name of the topic to delete. + Format is ``projects/{project}/topics/{topic}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.DeleteTopicRequest(topic=topic) + self._delete_topic(request, retry=retry, timeout=timeout) + + def set_iam_policy(self, + resource, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> policy = {} + >>> + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (str): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.Policy` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Policy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, retry=retry, timeout=timeout) + + def get_iam_policy(self, + resource, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> response = client.get_iam_policy(resource) + + Args: + resource (str): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Policy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, retry=retry, timeout=timeout) + + def test_iam_permissions(self, + resource, + permissions, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> permissions = [] + >>> + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (str): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions( + request, retry=retry, timeout=timeout) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py new file mode 100644 index 000000000000..38aa0ca0ffbf --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -0,0 +1,93 @@ +config = { + "interfaces": { + "google.pubsub.v1.Publisher": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "one_plus_delivery": [ + "ABORTED", "CANCELLED", "DEADLINE_EXCEEDED", "INTERNAL", + "RESOURCE_EXHAUSTED", "UNAVAILABLE", "UNKNOWN" + ], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 30000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "UpdateTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Publish": { + "timeout_millis": 60000, + "retry_codes_name": "one_plus_delivery", + "retry_params_name": "messaging", + "bundling": { + "element_count_threshold": 10, + "element_count_limit": 1000, + "request_byte_threshold": 1024, + "request_byte_limit": 10485760, + "delay_threshold_millis": 10 + } + }, + "GetTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopics": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTopicSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteTopic": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py new file mode 100644 index 000000000000..c019bd9e295f --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -0,0 +1,1239 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# EDITING INSTRUCTIONS +# This file was generated from the file +# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, +# and updates to that file get reflected here through a refresh process. +# For the short term, the refresh process will only be runnable by Google engineers. +# +# The only allowed edits are to method and file documentation. A 3-way +# merge preserves those additions if the generated source changes. +"""Accesses the google.pubsub.v1 Subscriber API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template +import google.api_core.protobuf_helpers + +from google.cloud.pubsub_v1.gapic import subscriber_client_config +from google.cloud.pubsub_v1.proto import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-pubsub', ).version + + +class SubscriberClient(object): + """ + The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the ``Pull`` method. + """ + + SERVICE_ADDRESS = 'pubsub.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary + _INTERFACE_NAME = ('google.pubsub.v1.Subscriber') + + @classmethod + def project_path(cls, project): + """Returns a fully-qualified project resource name string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, ) + + @classmethod + def snapshot_path(cls, project, snapshot): + """Returns a fully-qualified snapshot resource name string.""" + return google.api_core.path_template.expand( + 'projects/{project}/snapshots/{snapshot}', + project=project, + snapshot=snapshot, ) + + @classmethod + def subscription_path(cls, project, subscription): + """Returns a fully-qualified subscription resource name string.""" + return google.api_core.path_template.expand( + 'projects/{project}/subscriptions/{subscription}', + project=project, + subscription=subscription, ) + + @classmethod + def topic_path(cls, project, topic): + """Returns a fully-qualified topic resource name string.""" + return google.api_core.path_template.expand( + 'projects/{project}/topics/{topic}', + project=project, + topic=topic, ) + + def __init__(self, + channel=None, + credentials=None, + client_config=subscriber_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. If specified, then the ``credentials`` + argument is ignored. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): + A dictionary of call options for each method. If not specified + the default configuration is used. Generally, you only need + to set this if you're developing your own client library. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + if channel is not None and credentials is not None: + raise ValueError( + 'channel and credentials arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__)) + + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES) + + self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel)) + self.subscriber_stub = (pubsub_pb2.SubscriberStub(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + interface_config = client_config['interfaces'][self._INTERFACE_NAME] + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + interface_config) + + self._create_subscription = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.CreateSubscription, + default_retry=method_configs['CreateSubscription'].retry, + default_timeout=method_configs['CreateSubscription'].timeout, + client_info=client_info) + self._get_subscription = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.GetSubscription, + default_retry=method_configs['GetSubscription'].retry, + default_timeout=method_configs['GetSubscription'].timeout, + client_info=client_info) + self._update_subscription = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.UpdateSubscription, + default_retry=method_configs['UpdateSubscription'].retry, + default_timeout=method_configs['UpdateSubscription'].timeout, + client_info=client_info) + self._list_subscriptions = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.ListSubscriptions, + default_retry=method_configs['ListSubscriptions'].retry, + default_timeout=method_configs['ListSubscriptions'].timeout, + client_info=client_info) + self._delete_subscription = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.DeleteSubscription, + default_retry=method_configs['DeleteSubscription'].retry, + default_timeout=method_configs['DeleteSubscription'].timeout, + client_info=client_info) + self._modify_ack_deadline = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.ModifyAckDeadline, + default_retry=method_configs['ModifyAckDeadline'].retry, + default_timeout=method_configs['ModifyAckDeadline'].timeout, + client_info=client_info) + self._acknowledge = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.Acknowledge, + default_retry=method_configs['Acknowledge'].retry, + default_timeout=method_configs['Acknowledge'].timeout, + client_info=client_info) + self._pull = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.Pull, + default_retry=method_configs['Pull'].retry, + default_timeout=method_configs['Pull'].timeout, + client_info=client_info) + self._streaming_pull = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.StreamingPull, + default_retry=method_configs['StreamingPull'].retry, + default_timeout=method_configs['StreamingPull'].timeout, + client_info=client_info) + self._modify_push_config = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.ModifyPushConfig, + default_retry=method_configs['ModifyPushConfig'].retry, + default_timeout=method_configs['ModifyPushConfig'].timeout, + client_info=client_info) + self._list_snapshots = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.ListSnapshots, + default_retry=method_configs['ListSnapshots'].retry, + default_timeout=method_configs['ListSnapshots'].timeout, + client_info=client_info) + self._create_snapshot = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.CreateSnapshot, + default_retry=method_configs['CreateSnapshot'].retry, + default_timeout=method_configs['CreateSnapshot'].timeout, + client_info=client_info) + self._update_snapshot = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.UpdateSnapshot, + default_retry=method_configs['UpdateSnapshot'].retry, + default_timeout=method_configs['UpdateSnapshot'].timeout, + client_info=client_info) + self._delete_snapshot = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.DeleteSnapshot, + default_retry=method_configs['DeleteSnapshot'].retry, + default_timeout=method_configs['DeleteSnapshot'].timeout, + client_info=client_info) + self._seek = google.api_core.gapic_v1.method.wrap_method( + self.subscriber_stub.Seek, + default_retry=method_configs['Seek'].retry, + default_timeout=method_configs['Seek'].timeout, + client_info=client_info) + self._set_iam_policy = google.api_core.gapic_v1.method.wrap_method( + self.iam_policy_stub.SetIamPolicy, + default_retry=method_configs['SetIamPolicy'].retry, + default_timeout=method_configs['SetIamPolicy'].timeout, + client_info=client_info) + self._get_iam_policy = google.api_core.gapic_v1.method.wrap_method( + self.iam_policy_stub.GetIamPolicy, + default_retry=method_configs['GetIamPolicy'].retry, + default_timeout=method_configs['GetIamPolicy'].timeout, + client_info=client_info) + self._test_iam_permissions = google.api_core.gapic_v1.method.wrap_method( + self.iam_policy_stub.TestIamPermissions, + default_retry=method_configs['TestIamPermissions'].retry, + default_timeout=method_configs['TestIamPermissions'].timeout, + client_info=client_info) + + # Service calls + def create_subscription(self, + name, + topic, + push_config=None, + ack_deadline_seconds=None, + retain_acked_messages=None, + message_retention_duration=None, + labels=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Creates a subscription to a given topic. + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + `resource name format `_. + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> response = client.create_subscription(name, topic) + + Args: + name (str): The name of the subscription. It must have the format + ``\"projects/{project}/subscriptions/{subscription}\"``. ``{subscription}`` must + start with a letter, and contain only letters (``[A-Za-z]``), numbers + (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters + in length, and it must not start with ``\"goog\"``. + topic (str): The name of the topic from which this subscription is receiving messages. + Format is ``projects/{project}/topics/{topic}``. + The value of this field will be ``_deleted-topic_`` if the topic has been + deleted. + push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is + used to configure it. An empty ``pushConfig`` signifies that the subscriber + will pull and ack messages using API methods. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.PushConfig` + ack_deadline_seconds (int): This value is the maximum time after a subscriber receives a message + before the subscriber should acknowledge the message. After message + delivery but before the ack deadline expires and before the message is + acknowledged, it is an outstanding message and will not be delivered + again during that time (on a best-effort basis). + + For pull subscriptions, this value is used as the initial value for the ack + deadline. To override this value for a given message, call + ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using + pull. + The minimum custom deadline you can specify is 10 seconds. + The maximum custom deadline you can specify is 600 seconds (10 minutes). + If this parameter is 0, a default value of 10 seconds is used. + + For push delivery, this value is also used to set the request timeout for + the call to the push endpoint. + + If the subscriber never acknowledges the message, the Pub/Sub + system will eventually redeliver the message. + retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then + messages are not expunged from the subscription's backlog, even if they are + acknowledged, until they fall out of the ``message_retention_duration`` + window. + message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's backlog, + from the moment a message is published. + If ``retain_acked_messages`` is true, then this also configures the retention + of acknowledged messages, and thus configures how far back in time a ``Seek`` + can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 + minutes. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.Duration` + labels (dict[str -> str]): User labels. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.Subscription( + name=name, + topic=topic, + push_config=push_config, + ack_deadline_seconds=ack_deadline_seconds, + retain_acked_messages=retain_acked_messages, + message_retention_duration=message_retention_duration, + labels=labels) + return self._create_subscription(request, retry=retry, timeout=timeout) + + def get_subscription(self, + subscription, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Gets the configuration details of a subscription. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> response = client.get_subscription(subscription) + + Args: + subscription (str): The name of the subscription to get. + Format is ``projects/{project}/subscriptions/{sub}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) + return self._get_subscription(request, retry=retry, timeout=timeout) + + def update_subscription(self, + subscription, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + NOTE: The style guide requires body: \"subscription\" instead of body: \"*\". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> subscription = {} + >>> update_mask = {} + >>> + >>> response = client.update_subscription(subscription, update_mask) + + Args: + subscription (Union[dict, ~google.cloud.pubsub_v1.types.Subscription]): The updated subscription object. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.Subscription` + update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided subscription to update. + Must be specified and non-empty. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.UpdateSubscriptionRequest( + subscription=subscription, update_mask=update_mask) + return self._update_subscription(request, retry=retry, timeout=timeout) + + def list_subscriptions(self, + project, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Lists matching subscriptions. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> project = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_subscriptions(project): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_subscriptions(project, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + project (str): The name of the cloud project that subscriptions belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.pubsub_v1.types.Subscription` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.ListSubscriptionsRequest( + project=project, page_size=page_size) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_subscriptions, retry=retry, timeout=timeout), + request=request, + items_field='subscriptions', + request_token_field='page_token', + response_token_field='next_page_token') + return iterator + + def delete_subscription(self, + subscription, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to ``Pull`` after deletion will return + ``NOT_FOUND``. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> client.delete_subscription(subscription) + + Args: + subscription (str): The subscription to delete. + Format is ``projects/{project}/subscriptions/{sub}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.DeleteSubscriptionRequest( + subscription=subscription) + self._delete_subscription(request, retry=retry, timeout=timeout) + + def modify_ack_deadline(self, + subscription, + ack_ids, + ack_deadline_seconds, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level ``ackDeadlineSeconds`` used for subsequent messages. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> ack_deadline_seconds = 0 + >>> + >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) + + Args: + subscription (str): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[str]): List of acknowledgment IDs. + ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to + the Pub/Sub system. For example, if the value is 10, the new + ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero may immediately make the message available for + another pull request. + The minimum deadline you can specify is 0 seconds. + The maximum deadline you can specify is 600 seconds (10 minutes). + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.ModifyAckDeadlineRequest( + subscription=subscription, + ack_ids=ack_ids, + ack_deadline_seconds=ack_deadline_seconds) + self._modify_ack_deadline(request, retry=retry, timeout=timeout) + + def acknowledge(self, + subscription, + ack_ids, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> ack_ids = [] + >>> + >>> client.acknowledge(subscription, ack_ids) + + Args: + subscription (str): The subscription whose message is being acknowledged. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[str]): The acknowledgment ID for the messages being acknowledged that was returned + by the Pub/Sub system in the ``Pull`` response. Must not be empty. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.AcknowledgeRequest( + subscription=subscription, ack_ids=ack_ids) + self._acknowledge(request, retry=retry, timeout=timeout) + + def pull(self, + subscription, + max_messages, + return_immediately=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return ``UNAVAILABLE`` if + there are too many concurrent pull requests pending for the given + subscription. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> max_messages = 0 + >>> + >>> response = client.pull(subscription, max_messages) + + Args: + subscription (str): The subscription from which messages should be pulled. + Format is ``projects/{project}/subscriptions/{sub}``. + max_messages (int): The maximum number of messages returned for this request. The Pub/Sub + system may return fewer than the number specified. + return_immediately (bool): If this field set to true, the system will respond immediately even if + it there are no messages available to return in the ``Pull`` response. + Otherwise, the system may wait (for a bounded amount of time) until at + least one message is available, rather than returning no messages. The + client may cancel the request if it does not wish to wait any longer for + the response. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.PullResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.PullRequest( + subscription=subscription, + max_messages=max_messages, + return_immediately=return_immediately) + return self._pull(request, retry=retry, timeout=timeout) + + def streaming_pull(self, + requests, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + (EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will + respond with UNIMPLEMENTED errors unless you have been invited to test + this feature. Contact cloud-pubsub@google.com with any questions. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status ``OK`` to reassign + server-side resources, in which case, the client should re-establish the + stream. ``UNAVAILABLE`` may also be returned in the case of a transient error + (e.g., a server restart). These should also be retried by the client. Flow + control can be achieved by configuring the underlying RPC channel. + + EXPERIMENTAL: This method interface might change in the future. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> stream_ack_deadline_seconds = 0 + >>> request = {'subscription': subscription, 'stream_ack_deadline_seconds': stream_ack_deadline_seconds} + >>> + >>> requests = [request] + >>> for element in client.streaming_pull(requests): + ... # process element + ... pass + + Args: + requests (iterator[dict|google.cloud.pubsub_v1.proto.pubsub_pb2.StreamingPullRequest]): The input objects. If a dict is provided, it must be of the + same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.StreamingPullRequest` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + Iterable[~google.cloud.pubsub_v1.types.StreamingPullResponse]. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + return self._streaming_pull(requests, retry=retry, timeout=timeout) + + def modify_push_config(self, + subscription, + push_config, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty ``PushConfig``) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the ``PushConfig``. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> push_config = {} + >>> + >>> client.modify_push_config(subscription, push_config) + + Args: + subscription (str): The name of the subscription. + Format is ``projects/{project}/subscriptions/{sub}``. + push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub system should + stop pushing messages from the given subscription and allow + messages to be pulled and acknowledged - effectively pausing + the subscription if ``Pull`` is not called. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.PushConfig` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.ModifyPushConfigRequest( + subscription=subscription, push_config=push_config) + self._modify_push_config(request, retry=retry, timeout=timeout) + + def list_snapshots(self, + project, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Lists the existing snapshots. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> project = client.project_path('[PROJECT]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_snapshots(project): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_snapshots(project, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + project (str): The name of the cloud project that snapshots belong to. + Format is ``projects/{project}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.pubsub_v1.types.Snapshot` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.ListSnapshotsRequest( + project=project, page_size=page_size) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_snapshots, retry=retry, timeout=timeout), + request=request, + items_field='snapshots', + request_token_field='page_token', + response_token_field='next_page_token') + return iterator + + def create_snapshot(self, + name, + subscription, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Creates a snapshot from the requested subscription. + If the snapshot already exists, returns ``ALREADY_EXISTS``. + If the requested subscription doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + `resource name format `_. + The generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the request. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> response = client.create_snapshot(name, subscription) + + Args: + name (str): Optional user-provided name for this snapshot. + If the name is not provided in the request, the server will assign a random + name for this snapshot on the same project as the subscription. + Note that for REST API requests, you must specify a name. + Format is ``projects/{project}/snapshots/{snap}``. + subscription (str): The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: + + * The existing backlog on the subscription. More precisely, this is + defined as the messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + `CreateSnapshot` request; as well as: + * Any messages published to the subscription's topic following the + successful completion of the CreateSnapshot request. + + Format is ``projects/{project}/subscriptions/{sub}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.CreateSnapshotRequest( + name=name, subscription=subscription) + return self._create_snapshot(request, retry=retry, timeout=timeout) + + def update_snapshot(self, + snapshot, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Updates an existing snapshot. Note that certain properties of a snapshot + are not modifiable. + NOTE: The style guide requires body: \"snapshot\" instead of body: \"*\". + Keeping the latter for internal consistency in V1, however it should be + corrected in V2. See + https://cloud.google.com/apis/design/standard_methods#update for details. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> snapshot = {} + >>> update_mask = {} + >>> + >>> response = client.update_snapshot(snapshot, update_mask) + + Args: + snapshot (Union[dict, ~google.cloud.pubsub_v1.types.Snapshot]): The updated snpashot object. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.Snapshot` + update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided snapshot to update. + Must be specified and non-empty. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.UpdateSnapshotRequest( + snapshot=snapshot, update_mask=update_mask) + return self._update_snapshot(request, retry=retry, timeout=timeout) + + def delete_snapshot(self, + snapshot, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Removes an existing snapshot. All messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> + >>> client.delete_snapshot(snapshot) + + Args: + snapshot (str): The name of the snapshot to delete. + Format is ``projects/{project}/snapshots/{snap}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) + self._delete_snapshot(request, retry=retry, timeout=timeout) + + def seek(self, + subscription, + time=None, + snapshot=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> response = client.seek(subscription) + + Args: + subscription (str): The subscription to affect. + time (Union[dict, ~google.cloud.pubsub_v1.types.Timestamp]): The time to seek to. + Messages retained in the subscription that were published before this + time are marked as acknowledged, and messages retained in the + subscription that were published after this time are marked as + unacknowledged. Note that this operation affects only those messages + retained in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). For example, + if ``time`` corresponds to a point before the message retention + window (or to a point before the system's notion of the subscription + creation time), only retained messages will be marked as unacknowledged, + and already-expunged messages will not be restored. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.Timestamp` + snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as that of + the provided subscription. + Format is ``projects/{project}/snapshots/{snap}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.SeekResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Sanity check: We have some fields which are mutually exclusive; + # raise ValueError if more than one is sent. + google.api_core.protobuf_helpers.check_oneof( + time=time, + snapshot=snapshot, ) + + request = pubsub_pb2.SeekRequest( + subscription=subscription, time=time, snapshot=snapshot) + return self._seek(request, retry=retry, timeout=timeout) + + def set_iam_policy(self, + resource, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> policy = {} + >>> + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (str): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.Policy` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Policy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + return self._set_iam_policy(request, retry=retry, timeout=timeout) + + def get_iam_policy(self, + resource, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> response = client.get_iam_policy(resource) + + Args: + resource (str): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Policy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._get_iam_policy(request, retry=retry, timeout=timeout) + + def test_iam_permissions(self, + resource, + permissions, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> permissions = [] + >>> + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (str): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + return self._test_iam_permissions( + request, retry=retry, timeout=timeout) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py new file mode 100644 index 000000000000..896d2002cb12 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -0,0 +1,135 @@ +config = { + "interfaces": { + "google.pubsub.v1.Subscriber": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + "pull": [ + "CANCELLED", "DEADLINE_EXCEEDED", "INTERNAL", + "RESOURCE_EXHAUSTED", "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + }, + "messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 12000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 12000, + "total_timeout_millis": 600000 + }, + "streaming_messaging": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 600000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 600000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "UpdateSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListSubscriptions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ModifyAckDeadline": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "Acknowledge": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "messaging" + }, + "Pull": { + "timeout_millis": 60000, + "retry_codes_name": "pull", + "retry_params_name": "messaging" + }, + "StreamingPull": { + "timeout_millis": 60000, + "retry_codes_name": "pull", + "retry_params_name": "streaming_messaging" + }, + "ModifyPushConfig": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListSnapshots": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "UpdateSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "Seek": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py similarity index 98% rename from packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py rename to packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index aeee99e182d0..932c7c1a7ac0 100644 --- a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -1,5 +1,5 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/proto/pubsub/v1/pubsub.proto +# source: google/cloud/pubsub_v1/proto/pubsub.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) @@ -21,13 +21,12 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/proto/pubsub/v1/pubsub.proto', + name='google/cloud/pubsub_v1/proto/pubsub.proto', package='google.pubsub.v1', syntax='proto3', - serialized_pb=_b('\n)google/cloud/proto/pubsub/v1/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"y\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xc5\x02\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xf7\x10\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9a\x07\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') + serialized_pb=_b('\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"y\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xc5\x02\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xf7\x10\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9a\x07\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -1643,17 +1642,18 @@ DESCRIPTOR.message_types_by_name['DeleteSnapshotRequest'] = _DELETESNAPSHOTREQUEST DESCRIPTOR.message_types_by_name['SeekRequest'] = _SEEKREQUEST DESCRIPTOR.message_types_by_name['SeekResponse'] = _SEEKRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) Topic = _reflection.GeneratedProtocolMessageType('Topic', (_message.Message,), dict( LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( DESCRIPTOR = _TOPIC_LABELSENTRY, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) )) , DESCRIPTOR = _TOPIC, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """A topic resource. @@ -1679,12 +1679,12 @@ AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( DESCRIPTOR = _PUBSUBMESSAGE_ATTRIBUTESENTRY, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) )) , DESCRIPTOR = _PUBSUBMESSAGE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """A message data and its attributes. The message payload must not be empty; it must contain either a non-empty data field, or at least one @@ -1714,7 +1714,7 @@ GetTopicRequest = _reflection.GeneratedProtocolMessageType('GetTopicRequest', (_message.Message,), dict( DESCRIPTOR = _GETTOPICREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the GetTopic method. @@ -1730,7 +1730,7 @@ UpdateTopicRequest = _reflection.GeneratedProtocolMessageType('UpdateTopicRequest', (_message.Message,), dict( DESCRIPTOR = _UPDATETOPICREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the UpdateTopic method. @@ -1748,7 +1748,7 @@ PublishRequest = _reflection.GeneratedProtocolMessageType('PublishRequest', (_message.Message,), dict( DESCRIPTOR = _PUBLISHREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the Publish method. @@ -1766,7 +1766,7 @@ PublishResponse = _reflection.GeneratedProtocolMessageType('PublishResponse', (_message.Message,), dict( DESCRIPTOR = _PUBLISHRESPONSE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Response for the ``Publish`` method. @@ -1783,7 +1783,7 @@ ListTopicsRequest = _reflection.GeneratedProtocolMessageType('ListTopicsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``ListTopics`` method. @@ -1806,7 +1806,7 @@ ListTopicsResponse = _reflection.GeneratedProtocolMessageType('ListTopicsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSRESPONSE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Response for the ``ListTopics`` method. @@ -1825,7 +1825,7 @@ ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``ListTopicSubscriptions`` method. @@ -1848,7 +1848,7 @@ ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSRESPONSE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Response for the ``ListTopicSubscriptions`` method. @@ -1867,7 +1867,7 @@ DeleteTopicRequest = _reflection.GeneratedProtocolMessageType('DeleteTopicRequest', (_message.Message,), dict( DESCRIPTOR = _DELETETOPICREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``DeleteTopic`` method. @@ -1885,12 +1885,12 @@ LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( DESCRIPTOR = _SUBSCRIPTION_LABELSENTRY, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) )) , DESCRIPTOR = _SUBSCRIPTION, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """A subscription resource. @@ -1955,12 +1955,12 @@ AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( DESCRIPTOR = _PUSHCONFIG_ATTRIBUTESENTRY, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) )) , DESCRIPTOR = _PUSHCONFIG, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Configuration for a push delivery endpoint. @@ -1997,7 +1997,7 @@ ReceivedMessage = _reflection.GeneratedProtocolMessageType('ReceivedMessage', (_message.Message,), dict( DESCRIPTOR = _RECEIVEDMESSAGE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """A message and its corresponding acknowledgment ID. @@ -2014,7 +2014,7 @@ GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType('GetSubscriptionRequest', (_message.Message,), dict( DESCRIPTOR = _GETSUBSCRIPTIONREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the GetSubscription method. @@ -2030,7 +2030,7 @@ UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType('UpdateSubscriptionRequest', (_message.Message,), dict( DESCRIPTOR = _UPDATESUBSCRIPTIONREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the UpdateSubscription method. @@ -2048,7 +2048,7 @@ ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListSubscriptionsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTSUBSCRIPTIONSREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``ListSubscriptions`` method. @@ -2071,7 +2071,7 @@ ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListSubscriptionsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTSUBSCRIPTIONSRESPONSE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Response for the ``ListSubscriptions`` method. @@ -2090,7 +2090,7 @@ DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType('DeleteSubscriptionRequest', (_message.Message,), dict( DESCRIPTOR = _DELETESUBSCRIPTIONREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the DeleteSubscription method. @@ -2106,7 +2106,7 @@ ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType('ModifyPushConfigRequest', (_message.Message,), dict( DESCRIPTOR = _MODIFYPUSHCONFIGREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ModifyPushConfig method. @@ -2128,7 +2128,7 @@ PullRequest = _reflection.GeneratedProtocolMessageType('PullRequest', (_message.Message,), dict( DESCRIPTOR = _PULLREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``Pull`` method. @@ -2155,7 +2155,7 @@ PullResponse = _reflection.GeneratedProtocolMessageType('PullResponse', (_message.Message,), dict( DESCRIPTOR = _PULLRESPONSE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Response for the ``Pull`` method. @@ -2174,7 +2174,7 @@ ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType('ModifyAckDeadlineRequest', (_message.Message,), dict( DESCRIPTOR = _MODIFYACKDEADLINEREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ModifyAckDeadline method. @@ -2201,7 +2201,7 @@ AcknowledgeRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeRequest', (_message.Message,), dict( DESCRIPTOR = _ACKNOWLEDGEREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the Acknowledge method. @@ -2221,7 +2221,7 @@ StreamingPullRequest = _reflection.GeneratedProtocolMessageType('StreamingPullRequest', (_message.Message,), dict( DESCRIPTOR = _STREAMINGPULLREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``StreamingPull`` streaming RPC method. This request is used to establish the initial stream as well as to stream @@ -2277,7 +2277,7 @@ StreamingPullResponse = _reflection.GeneratedProtocolMessageType('StreamingPullResponse', (_message.Message,), dict( DESCRIPTOR = _STREAMINGPULLRESPONSE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Response for the ``StreamingPull`` method. This response is used to stream messages from the server to the client. @@ -2293,7 +2293,7 @@ CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType('CreateSnapshotRequest', (_message.Message,), dict( DESCRIPTOR = _CREATESNAPSHOTREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``CreateSnapshot`` method. @@ -2323,7 +2323,7 @@ UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType('UpdateSnapshotRequest', (_message.Message,), dict( DESCRIPTOR = _UPDATESNAPSHOTREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the UpdateSnapshot method. @@ -2343,12 +2343,12 @@ LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( DESCRIPTOR = _SNAPSHOT_LABELSENTRY, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) )) , DESCRIPTOR = _SNAPSHOT, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """A snapshot resource. @@ -2380,7 +2380,7 @@ ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListSnapshotsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTSNAPSHOTSREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``ListSnapshots`` method. @@ -2403,7 +2403,7 @@ ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListSnapshotsResponse', (_message.Message,), dict( DESCRIPTOR = _LISTSNAPSHOTSRESPONSE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Response for the ``ListSnapshots`` method. @@ -2422,7 +2422,7 @@ DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType('DeleteSnapshotRequest', (_message.Message,), dict( DESCRIPTOR = _DELETESNAPSHOTREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``DeleteSnapshot`` method. @@ -2438,7 +2438,7 @@ SeekRequest = _reflection.GeneratedProtocolMessageType('SeekRequest', (_message.Message,), dict( DESCRIPTOR = _SEEKREQUEST, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , __doc__ = """Request for the ``Seek`` method. @@ -2470,7 +2470,7 @@ SeekResponse = _reflection.GeneratedProtocolMessageType('SeekResponse', (_message.Message,), dict( DESCRIPTOR = _SEEKRESPONSE, - __module__ = 'google.cloud.proto.pubsub.v1.pubsub_pb2' + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) )) _sym_db.RegisterMessage(SeekResponse) diff --git a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py similarity index 65% rename from packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py rename to packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py index 06dd470470d8..b0e76ca0fa44 100644 --- a/packages/google-cloud-pubsub/google/cloud/proto/pubsub/v1/pubsub_pb2_grpc.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py @@ -1,8 +1,8 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -import google.cloud.proto.pubsub.v1.pubsub_pb2 as google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2 -import google.protobuf.empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.cloud.pubsub_v1.proto import pubsub_pb2 as google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class SubscriberStub(object): @@ -18,78 +18,78 @@ def __init__(self, channel): """ self.CreateSubscription = channel.unary_unary( '/google.pubsub.v1.Subscriber/CreateSubscription', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, ) self.GetSubscription = channel.unary_unary( '/google.pubsub.v1.Subscriber/GetSubscription', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, ) self.UpdateSubscription = channel.unary_unary( '/google.pubsub.v1.Subscriber/UpdateSubscription', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, ) self.ListSubscriptions = channel.unary_unary( '/google.pubsub.v1.Subscriber/ListSubscriptions', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, ) self.DeleteSubscription = channel.unary_unary( '/google.pubsub.v1.Subscriber/DeleteSubscription', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.ModifyAckDeadline = channel.unary_unary( '/google.pubsub.v1.Subscriber/ModifyAckDeadline', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.Acknowledge = channel.unary_unary( '/google.pubsub.v1.Subscriber/Acknowledge', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.Pull = channel.unary_unary( '/google.pubsub.v1.Subscriber/Pull', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, ) self.StreamingPull = channel.stream_stream( '/google.pubsub.v1.Subscriber/StreamingPull', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, ) self.ModifyPushConfig = channel.unary_unary( '/google.pubsub.v1.Subscriber/ModifyPushConfig', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.ListSnapshots = channel.unary_unary( '/google.pubsub.v1.Subscriber/ListSnapshots', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, ) self.CreateSnapshot = channel.unary_unary( '/google.pubsub.v1.Subscriber/CreateSnapshot', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, ) self.UpdateSnapshot = channel.unary_unary( '/google.pubsub.v1.Subscriber/UpdateSnapshot', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, ) self.DeleteSnapshot = channel.unary_unary( '/google.pubsub.v1.Subscriber/DeleteSnapshot', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.Seek = channel.unary_unary( '/google.pubsub.v1.Subscriber/Seek', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, ) @@ -273,78 +273,78 @@ def add_SubscriberServicer_to_server(servicer, server): rpc_method_handlers = { 'CreateSubscription': grpc.unary_unary_rpc_method_handler( servicer.CreateSubscription, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, ), 'GetSubscription': grpc.unary_unary_rpc_method_handler( servicer.GetSubscription, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, ), 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( servicer.UpdateSubscription, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Subscription.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, ), 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( servicer.ListSubscriptions, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, ), 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( servicer.DeleteSubscription, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( servicer.ModifyAckDeadline, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'Acknowledge': grpc.unary_unary_rpc_method_handler( servicer.Acknowledge, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.AcknowledgeRequest.FromString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'Pull': grpc.unary_unary_rpc_method_handler( servicer.Pull, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PullResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.SerializeToString, ), 'StreamingPull': grpc.stream_stream_rpc_method_handler( servicer.StreamingPull, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, ), 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( servicer.ModifyPushConfig, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'ListSnapshots': grpc.unary_unary_rpc_method_handler( servicer.ListSnapshots, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, ), 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( servicer.CreateSnapshot, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.CreateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, ), 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( servicer.UpdateSnapshot, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Snapshot.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, ), 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( servicer.DeleteSnapshot, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), 'Seek': grpc.unary_unary_rpc_method_handler( servicer.Seek, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.SeekResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( @@ -365,37 +365,37 @@ def __init__(self, channel): """ self.CreateTopic = channel.unary_unary( '/google.pubsub.v1.Publisher/CreateTopic', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, ) self.UpdateTopic = channel.unary_unary( '/google.pubsub.v1.Publisher/UpdateTopic', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, ) self.Publish = channel.unary_unary( '/google.pubsub.v1.Publisher/Publish', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, ) self.GetTopic = channel.unary_unary( '/google.pubsub.v1.Publisher/GetTopic', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, ) self.ListTopics = channel.unary_unary( '/google.pubsub.v1.Publisher/ListTopics', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, ) self.ListTopicSubscriptions = channel.unary_unary( '/google.pubsub.v1.Publisher/ListTopicSubscriptions', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, ) self.DeleteTopic = channel.unary_unary( '/google.pubsub.v1.Publisher/DeleteTopic', - request_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) @@ -470,37 +470,37 @@ def add_PublisherServicer_to_server(servicer, server): rpc_method_handlers = { 'CreateTopic': grpc.unary_unary_rpc_method_handler( servicer.CreateTopic, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, ), 'UpdateTopic': grpc.unary_unary_rpc_method_handler( servicer.UpdateTopic, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.UpdateTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, ), 'Publish': grpc.unary_unary_rpc_method_handler( servicer.Publish, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.PublishResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.SerializeToString, ), 'GetTopic': grpc.unary_unary_rpc_method_handler( servicer.GetTopic, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.GetTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.Topic.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, ), 'ListTopics': grpc.unary_unary_rpc_method_handler( servicer.ListTopics, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, ), 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( servicer.ListTopicSubscriptions, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, ), 'DeleteTopic': grpc.unary_unary_rpc_method_handler( servicer.DeleteTopic, - request_deserializer=google_dot_cloud_dot_proto_dot_pubsub_dot_v1_dot_pubsub__pb2.DeleteTopicRequest.FromString, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index d1375372df28..4d00abe22504 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -23,14 +23,14 @@ import six from google.api_core import grpc_helpers -from google.cloud.gapic.pubsub.v1 import publisher_client from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.gapic import publisher_client from google.cloud.pubsub_v1.publisher.batch import thread -__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version +__version__ = pkg_resources.get_distribution('google-cloud-pubsub').version @_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) @@ -69,9 +69,9 @@ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): # keepalive options. if 'channel' not in kwargs: kwargs['channel'] = grpc_helpers.create_channel( - credentials=kwargs.get('credentials', None), + credentials=kwargs.pop('credentials', None), target=self.target, - scopes=publisher_client.PublisherClient._ALL_SCOPES, + scopes=publisher_client.PublisherClient._DEFAULT_SCOPES, options={ 'grpc.max_send_message_length': -1, 'grpc.max_receive_message_length': -1, @@ -80,8 +80,6 @@ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. - kwargs['lib_name'] = 'gccl' - kwargs['lib_version'] = __VERSION__ self.api = publisher_client.PublisherClient(**kwargs) self.batch_settings = types.BatchSettings(*batch_settings) @@ -98,10 +96,7 @@ def target(self): Returns: str: The location of the API. """ - return '{host}:{port}'.format( - host=publisher_client.PublisherClient.SERVICE_ADDRESS, - port=publisher_client.PublisherClient.DEFAULT_SERVICE_PORT, - ) + return publisher_client.PublisherClient.SERVICE_ADDRESS def batch(self, topic, message, create=True, autocommit=True): """Return the current batch for the provided topic. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 127675683af1..099f86336b4a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -117,9 +117,10 @@ """ import logging -import queue import threading +from six.moves import queue + from google.cloud.pubsub_v1.subscriber import _helper_threads _LOGGER = logging.getLogger(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 8e5f377834bc..6c9b2fccb10d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -20,14 +20,14 @@ import grpc from google.api_core import grpc_helpers -from google.cloud.gapic.pubsub.v1 import subscriber_client from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.gapic import subscriber_client from google.cloud.pubsub_v1.subscriber.policy import thread -__VERSION__ = pkg_resources.get_distribution('google-cloud-pubsub').version +__version__ = pkg_resources.get_distribution('google-cloud-pubsub').version @_gapic.add_methods(subscriber_client.SubscriberClient, @@ -66,9 +66,9 @@ def __init__(self, policy_class=thread.Policy, **kwargs): # keepalive options. if 'channel' not in kwargs: kwargs['channel'] = grpc_helpers.create_channel( - credentials=kwargs.get('credentials', None), + credentials=kwargs.pop('credentials', None), target=self.target, - scopes=subscriber_client.SubscriberClient._ALL_SCOPES, + scopes=subscriber_client.SubscriberClient._DEFAULT_SCOPES, options={ 'grpc.max_send_message_length': -1, 'grpc.max_receive_message_length': -1, @@ -78,8 +78,6 @@ def __init__(self, policy_class=thread.Policy, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. - kwargs['lib_name'] = 'gccl' - kwargs['lib_version'] = __VERSION__ self.api = subscriber_client.SubscriberClient(**kwargs) # The subcription class is responsible to retrieving and dispatching @@ -93,10 +91,7 @@ def target(self): Returns: str: The location of the API. """ - return '{host}:{port}'.format( - host=subscriber_client.SubscriberClient.SERVICE_ADDRESS, - port=subscriber_client.SubscriberClient.DEFAULT_SERVICE_PORT, - ) + return subscriber_client.SubscriberClient.SERVICE_ADDRESS def subscribe(self, subscription, callback=None, flow_control=()): """Return a representation of an individual subscription. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 30bb540cea2f..8444132c1bc5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -15,11 +15,11 @@ from __future__ import absolute_import from concurrent import futures -from queue import Queue import logging import threading import grpc +from six.moves import queue as queue_mod from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _helper_threads @@ -69,7 +69,7 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), # Create a queue for keeping track of shared state. if queue is None: - queue = Queue() + queue = queue_mod.Queue() self._request_queue = queue # Call the superclass constructor. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 23a055a31954..f601915d0a60 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -13,14 +13,21 @@ # limitations under the License. from __future__ import absolute_import - import collections +import psutil import sys -import psutil +from google.api_core.protobuf_helpers import get_messages -from google.cloud.proto.pubsub.v1 import pubsub_pb2 -from google.gax.utils.messages import get_messages +from google.api import http_pb2 +from google.cloud.pubsub_v1.proto import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.iam.v1.logging import audit_data_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 from google.protobuf import timestamp_pb2 @@ -55,16 +62,28 @@ ) -# Pub/Sub uses timestamps from the common protobuf package. -# Do not make users import from there. -Timestamp = timestamp_pb2.Timestamp - - -_names = ['BatchSettings', 'FlowControl', 'Timestamp'] +names = ['BatchSettings', 'FlowControl'] for name, message in get_messages(pubsub_pb2).items(): message.__module__ = 'google.cloud.pubsub_v1.types' setattr(sys.modules[__name__], name, message) - _names.append(name) + names.append(name) + + +for module in ( + http_pb2, + pubsub_pb2, + iam_policy_pb2, + policy_pb2, + audit_data_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, ): + for name, message in get_messages(module).items(): + message.__module__ = 'google.cloud.pubsub_v1.types' + setattr(sys.modules[__name__], name, message) + names.append(name) -__all__ = tuple(sorted(_names)) +__all__ = tuple(sorted(names)) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index c347de589917..b7f45a7c38db 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,9 +51,8 @@ REQUIREMENTS = [ - 'google-cloud-core[grpc] >= 0.28.0, < 0.29dev', - 'google-api-core >= 0.1.1, < 0.2.0dev', - 'google-gax >= 0.15.13, < 0.16dev', + 'google-api-core[grpc] >= 0.1.1, < 0.2.0dev', + 'google-auth >= 1.0.2, < 2.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', 'psutil >= 5.2.2, < 6.0dev', ] @@ -66,10 +65,6 @@ namespace_packages=[ 'google', 'google.cloud', - 'google.cloud.gapic', - 'google.cloud.gapic.pubsub', - 'google.cloud.proto', - 'google.cloud.proto.pubsub', ], packages=find_packages(exclude=('tests*',)), install_requires=REQUIREMENTS, diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py new file mode 100644 index 000000000000..13b3bc6fbc1c --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -0,0 +1,415 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.cloud.pubsub_v1.gapic import publisher_client +from google.cloud.pubsub_v1.proto import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestPublisherClient(object): + def test_create_topic(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + expected_response = {'name': name_2} + expected_response = pubsub_pb2.Topic(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + name = client.topic_path('[PROJECT]', '[TOPIC]') + + response = client.create_topic(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.Topic(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_topic_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + name = client.topic_path('[PROJECT]', '[TOPIC]') + + with pytest.raises(CustomException): + client.create_topic(name) + + def test_update_topic(self): + # Setup Expected Response + name = 'name3373707' + expected_response = {'name': name} + expected_response = pubsub_pb2.Topic(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + topic = {} + update_mask = {} + + response = client.update_topic(topic, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.UpdateTopicRequest( + topic=topic, update_mask=update_mask) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_topic_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + topic = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_topic(topic, update_mask) + + def test_publish(self): + # Setup Expected Response + message_ids_element = 'messageIdsElement-744837059' + message_ids = [message_ids_element] + expected_response = {'message_ids': message_ids} + expected_response = pubsub_pb2.PublishResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + topic = client.topic_path('[PROJECT]', '[TOPIC]') + data = b'-86' + messages_element = {'data': data} + messages = [messages_element] + + response = client.publish(topic, messages) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.PublishRequest( + topic=topic, messages=messages) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_publish_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + topic = client.topic_path('[PROJECT]', '[TOPIC]') + data = b'-86' + messages_element = {'data': data} + messages = [messages_element] + + with pytest.raises(CustomException): + client.publish(topic, messages) + + def test_get_topic(self): + # Setup Expected Response + name = 'name3373707' + expected_response = {'name': name} + expected_response = pubsub_pb2.Topic(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + topic = client.topic_path('[PROJECT]', '[TOPIC]') + + response = client.get_topic(topic) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.GetTopicRequest(topic=topic) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_topic_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + topic = client.topic_path('[PROJECT]', '[TOPIC]') + + with pytest.raises(CustomException): + client.get_topic(topic) + + def test_list_topics(self): + # Setup Expected Response + next_page_token = '' + topics_element = {} + topics = [topics_element] + expected_response = { + 'next_page_token': next_page_token, + 'topics': topics + } + expected_response = pubsub_pb2.ListTopicsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + project = client.project_path('[PROJECT]') + + paged_list_response = client.list_topics(project) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.topics[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.ListTopicsRequest(project=project) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_topics_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + project = client.project_path('[PROJECT]') + + paged_list_response = client.list_topics(project) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_list_topic_subscriptions(self): + # Setup Expected Response + next_page_token = '' + subscriptions_element = 'subscriptionsElement1698708147' + subscriptions = [subscriptions_element] + expected_response = { + 'next_page_token': next_page_token, + 'subscriptions': subscriptions + } + expected_response = pubsub_pb2.ListTopicSubscriptionsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + topic = client.topic_path('[PROJECT]', '[TOPIC]') + + paged_list_response = client.list_topic_subscriptions(topic) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.subscriptions[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.ListTopicSubscriptionsRequest( + topic=topic) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_topic_subscriptions_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + topic = client.topic_path('[PROJECT]', '[TOPIC]') + + paged_list_response = client.list_topic_subscriptions(topic) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_delete_topic(self): + channel = ChannelStub() + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + topic = client.topic_path('[PROJECT]', '[TOPIC]') + + client.delete_topic(topic) + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.DeleteTopicRequest(topic=topic) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_topic_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + topic = client.topic_path('[PROJECT]', '[TOPIC]') + + with pytest.raises(CustomException): + client.delete_topic(topic) + + def test_set_iam_policy(self): + # Setup Expected Response + version = 351608024 + etag = b'21' + expected_response = {'version': version, 'etag': etag} + expected_response = policy_pb2.Policy(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + resource = client.topic_path('[PROJECT]', '[TOPIC]') + policy = {} + + response = client.set_iam_policy(resource, policy) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_set_iam_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + resource = client.topic_path('[PROJECT]', '[TOPIC]') + policy = {} + + with pytest.raises(CustomException): + client.set_iam_policy(resource, policy) + + def test_get_iam_policy(self): + # Setup Expected Response + version = 351608024 + etag = b'21' + expected_response = {'version': version, 'etag': etag} + expected_response = policy_pb2.Policy(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + resource = client.topic_path('[PROJECT]', '[TOPIC]') + + response = client.get_iam_policy(resource) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_iam_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + resource = client.topic_path('[PROJECT]', '[TOPIC]') + + with pytest.raises(CustomException): + client.get_iam_policy(resource) + + def test_test_iam_permissions(self): + # Setup Expected Response + expected_response = {} + expected_response = iam_policy_pb2.TestIamPermissionsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup Request + resource = client.topic_path('[PROJECT]', '[TOPIC]') + permissions = [] + + response = client.test_iam_permissions(resource, permissions) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_test_iam_permissions_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = publisher_client.PublisherClient(channel=channel) + + # Setup request + resource = client.topic_path('[PROJECT]', '[TOPIC]') + permissions = [] + + with pytest.raises(CustomException): + client.test_iam_permissions(resource, permissions) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py new file mode 100644 index 000000000000..fd3e04c044fd --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -0,0 +1,712 @@ +# Copyright 2017, Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.cloud.pubsub_v1.gapic import subscriber_client +from google.cloud.pubsub_v1.proto import pubsub_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + def stream_stream(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestSubscriberClient(object): + def test_create_subscription(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + topic_2 = 'topic2-1139259102' + ack_deadline_seconds = 2135351438 + retain_acked_messages = False + expected_response = { + 'name': name_2, + 'topic': topic_2, + 'ack_deadline_seconds': ack_deadline_seconds, + 'retain_acked_messages': retain_acked_messages + } + expected_response = pubsub_pb2.Subscription(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + topic = client.topic_path('[PROJECT]', '[TOPIC]') + + response = client.create_subscription(name, topic) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.Subscription(name=name, topic=topic) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_subscription_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + topic = client.topic_path('[PROJECT]', '[TOPIC]') + + with pytest.raises(CustomException): + client.create_subscription(name, topic) + + def test_get_subscription(self): + # Setup Expected Response + name = 'name3373707' + topic = 'topic110546223' + ack_deadline_seconds = 2135351438 + retain_acked_messages = False + expected_response = { + 'name': name, + 'topic': topic, + 'ack_deadline_seconds': ack_deadline_seconds, + 'retain_acked_messages': retain_acked_messages + } + expected_response = pubsub_pb2.Subscription(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + response = client.get_subscription(subscription) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.GetSubscriptionRequest( + subscription=subscription) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_subscription_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + with pytest.raises(CustomException): + client.get_subscription(subscription) + + def test_update_subscription(self): + # Setup Expected Response + name = 'name3373707' + topic = 'topic110546223' + ack_deadline_seconds = 2135351438 + retain_acked_messages = False + expected_response = { + 'name': name, + 'topic': topic, + 'ack_deadline_seconds': ack_deadline_seconds, + 'retain_acked_messages': retain_acked_messages + } + expected_response = pubsub_pb2.Subscription(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + subscription = {} + update_mask = {} + + response = client.update_subscription(subscription, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.UpdateSubscriptionRequest( + subscription=subscription, update_mask=update_mask) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_subscription_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + subscription = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_subscription(subscription, update_mask) + + def test_list_subscriptions(self): + # Setup Expected Response + next_page_token = '' + subscriptions_element = {} + subscriptions = [subscriptions_element] + expected_response = { + 'next_page_token': next_page_token, + 'subscriptions': subscriptions + } + expected_response = pubsub_pb2.ListSubscriptionsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + project = client.project_path('[PROJECT]') + + paged_list_response = client.list_subscriptions(project) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.subscriptions[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.ListSubscriptionsRequest(project=project) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_subscriptions_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + project = client.project_path('[PROJECT]') + + paged_list_response = client.list_subscriptions(project) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_delete_subscription(self): + channel = ChannelStub() + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + client.delete_subscription(subscription) + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.DeleteSubscriptionRequest( + subscription=subscription) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_subscription_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + with pytest.raises(CustomException): + client.delete_subscription(subscription) + + def test_modify_ack_deadline(self): + channel = ChannelStub() + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + ack_ids = [] + ack_deadline_seconds = 2135351438 + + client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.ModifyAckDeadlineRequest( + subscription=subscription, + ack_ids=ack_ids, + ack_deadline_seconds=ack_deadline_seconds) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_modify_ack_deadline_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + ack_ids = [] + ack_deadline_seconds = 2135351438 + + with pytest.raises(CustomException): + client.modify_ack_deadline(subscription, ack_ids, + ack_deadline_seconds) + + def test_acknowledge(self): + channel = ChannelStub() + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + ack_ids = [] + + client.acknowledge(subscription, ack_ids) + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.AcknowledgeRequest( + subscription=subscription, ack_ids=ack_ids) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_acknowledge_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + ack_ids = [] + + with pytest.raises(CustomException): + client.acknowledge(subscription, ack_ids) + + def test_pull(self): + # Setup Expected Response + expected_response = {} + expected_response = pubsub_pb2.PullResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + max_messages = 496131527 + + response = client.pull(subscription, max_messages) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.PullRequest( + subscription=subscription, max_messages=max_messages) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_pull_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + max_messages = 496131527 + + with pytest.raises(CustomException): + client.pull(subscription, max_messages) + + def test_streaming_pull(self): + # Setup Expected Response + received_messages_element = {} + received_messages = [received_messages_element] + expected_response = {'received_messages': received_messages} + expected_response = pubsub_pb2.StreamingPullResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[iter([expected_response])]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + stream_ack_deadline_seconds = 1875467245 + request = { + 'subscription': subscription, + 'stream_ack_deadline_seconds': stream_ack_deadline_seconds + } + request = pubsub_pb2.StreamingPullRequest(**request) + requests = [request] + + response = client.streaming_pull(requests) + resources = list(response) + assert len(resources) == 1 + assert expected_response == resources[0] + + assert len(channel.requests) == 1 + actual_requests = channel.requests[0][1] + assert len(actual_requests) == 1 + actual_request = list(actual_requests)[0] + assert request == actual_request + + def test_streaming_pull_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + stream_ack_deadline_seconds = 1875467245 + request = { + 'subscription': subscription, + 'stream_ack_deadline_seconds': stream_ack_deadline_seconds + } + + request = pubsub_pb2.StreamingPullRequest(**request) + requests = [request] + + with pytest.raises(CustomException): + client.streaming_pull(requests) + + def test_modify_push_config(self): + channel = ChannelStub() + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + push_config = {} + + client.modify_push_config(subscription, push_config) + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.ModifyPushConfigRequest( + subscription=subscription, push_config=push_config) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_modify_push_config_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + push_config = {} + + with pytest.raises(CustomException): + client.modify_push_config(subscription, push_config) + + def test_list_snapshots(self): + # Setup Expected Response + next_page_token = '' + snapshots_element = {} + snapshots = [snapshots_element] + expected_response = { + 'next_page_token': next_page_token, + 'snapshots': snapshots + } + expected_response = pubsub_pb2.ListSnapshotsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + project = client.project_path('[PROJECT]') + + paged_list_response = client.list_snapshots(project) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.snapshots[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.ListSnapshotsRequest(project=project) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_snapshots_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + project = client.project_path('[PROJECT]') + + paged_list_response = client.list_snapshots(project) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_create_snapshot(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + topic = 'topic110546223' + expected_response = {'name': name_2, 'topic': topic} + expected_response = pubsub_pb2.Snapshot(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + response = client.create_snapshot(name, subscription) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.CreateSnapshotRequest( + name=name, subscription=subscription) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_snapshot_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + with pytest.raises(CustomException): + client.create_snapshot(name, subscription) + + def test_update_snapshot(self): + # Setup Expected Response + name = 'name3373707' + topic = 'topic110546223' + expected_response = {'name': name, 'topic': topic} + expected_response = pubsub_pb2.Snapshot(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + snapshot = {} + update_mask = {} + + response = client.update_snapshot(snapshot, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.UpdateSnapshotRequest( + snapshot=snapshot, update_mask=update_mask) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_snapshot_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + snapshot = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_snapshot(snapshot, update_mask) + + def test_delete_snapshot(self): + channel = ChannelStub() + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + + client.delete_snapshot(snapshot) + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_snapshot_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + + with pytest.raises(CustomException): + client.delete_snapshot(snapshot) + + def test_seek(self): + # Setup Expected Response + expected_response = {} + expected_response = pubsub_pb2.SeekResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + response = client.seek(subscription) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.SeekRequest(subscription=subscription) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_seek_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + with pytest.raises(CustomException): + client.seek(subscription) + + def test_set_iam_policy(self): + # Setup Expected Response + version = 351608024 + etag = b'21' + expected_response = {'version': version, 'etag': etag} + expected_response = policy_pb2.Policy(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + policy = {} + + response = client.set_iam_policy(resource, policy) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_set_iam_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + policy = {} + + with pytest.raises(CustomException): + client.set_iam_policy(resource, policy) + + def test_get_iam_policy(self): + # Setup Expected Response + version = 351608024 + etag = b'21' + expected_response = {'version': version, 'etag': etag} + expected_response = policy_pb2.Policy(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + response = client.get_iam_policy(resource) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_iam_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + + with pytest.raises(CustomException): + client.get_iam_policy(resource) + + def test_test_iam_permissions(self): + # Setup Expected Response + expected_response = {} + expected_response = iam_policy_pb2.TestIamPermissionsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup Request + resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + permissions = [] + + response = client.test_iam_permissions(resource, permissions) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_test_iam_permissions_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = subscriber_client.SubscriberClient(channel=channel) + + # Setup request + resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + permissions = [] + + with pytest.raises(CustomException): + client.test_iam_permissions(resource, permissions) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index ce010e69a8cd..c817fb45887f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -20,18 +20,13 @@ import pytest from google.auth import credentials -from google.cloud.gapic.pubsub.v1 import publisher_client +from google.cloud.pubsub_v1.gapic import publisher_client from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types -def create_client(): - creds = mock.Mock(spec=credentials.Credentials) - return publisher.Client(credentials=creds) - - def test_init(): - client = create_client() + client = publisher.Client() # A plain client should have an `api` (the underlying GAPIC) and a # batch settings object, which should have the defaults. @@ -43,7 +38,7 @@ def test_init(): def test_init_emulator(monkeypatch): monkeypatch.setenv('PUBSUB_EMULATOR_HOST', '/foo/bar/') - client = create_client() + client = publisher.Client() # Establish that a gRPC request would attempt to hit the emulator host. # @@ -55,7 +50,7 @@ def test_init_emulator(monkeypatch): def test_batch_accepting(): """Establish that an existing batch is returned if it accepts messages.""" - client = create_client() + client = publisher.Client() message = types.PubsubMessage(data=b'foo') # At first, there are no batches, so this should return a new batch @@ -72,7 +67,7 @@ def test_batch_accepting(): def test_batch_without_autocreate(): - client = create_client() + client = publisher.Client() message = types.PubsubMessage(data=b'foo') # If `create=False` is sent, then when the batch is not found, None @@ -84,7 +79,7 @@ def test_batch_without_autocreate(): def test_publish(): - client = create_client() + client = publisher.Client() # Use a mock in lieu of the actual batch class; set the mock up to claim # indiscriminately that it accepts all messages. @@ -112,7 +107,7 @@ def test_publish(): def test_publish_data_not_bytestring_error(): - client = create_client() + client = publisher.Client() with pytest.raises(TypeError): client.publish('topic_name', u'This is a text string.') with pytest.raises(TypeError): @@ -120,7 +115,7 @@ def test_publish_data_not_bytestring_error(): def test_publish_attrs_bytestring(): - client = create_client() + client = publisher.Client() # Use a mock in lieu of the actual batch class; set the mock up to claim # indiscriminately that it accepts all messages. @@ -138,13 +133,13 @@ def test_publish_attrs_bytestring(): def test_publish_attrs_type_error(): - client = create_client() + client = publisher.Client() with pytest.raises(TypeError): client.publish('topic_name', b'foo', answer=42) def test_gapic_instance_method(): - client = create_client() + client = publisher.Client() with mock.patch.object(client.api, '_create_topic', autospec=True) as ct: client.create_topic('projects/foo/topics/bar') assert ct.call_count == 1 @@ -153,6 +148,6 @@ def test_gapic_instance_method(): def test_gapic_class_method(): - client = create_client() + client = publisher.Client() answer = client.topic_path('foo', 'bar') assert answer == 'projects/foo/topics/bar' diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 53f81296fe92..c0a0828c8a1e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -12,13 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import queue - +from google.auth import credentials import mock - import pytest +from six.moves import queue -from google.auth import credentials from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _consumer diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 6b104c874617..6a8bff2a7f66 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import queue import threading import mock +from six.moves import queue from google.cloud.pubsub_v1.subscriber import _helper_threads diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 0cde86169417..5a564a52eee3 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -12,10 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import queue import time import mock +from six.moves import queue from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index 5adfb7817086..106e735a7110 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -15,16 +15,14 @@ from __future__ import absolute_import from concurrent import futures -import queue import threading +from google.auth import credentials import grpc - import mock - import pytest +from six.moves import queue -from google.auth import credentials from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _helper_threads diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index fb5faaf6b10c..8620081a8b44 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -19,19 +19,14 @@ from google.cloud.pubsub_v1.subscriber.policy import thread -def create_client(): - creds = mock.Mock(spec=credentials.Credentials) - return subscriber.Client(credentials=creds) - - def test_init(): - client = create_client() + client = subscriber.Client() assert client._policy_class is thread.Policy def test_init_emulator(monkeypatch): monkeypatch.setenv('PUBSUB_EMULATOR_HOST', '/baz/bacon/') - client = create_client() + client = subscriber.Client() # Establish that a gRPC request would attempt to hit the emulator host. # @@ -42,13 +37,13 @@ def test_init_emulator(monkeypatch): def test_subscribe(): - client = create_client() + client = subscriber.Client() subscription = client.subscribe('sub_name_a') assert isinstance(subscription, thread.Policy) def test_subscribe_with_callback(): - client = create_client() + client = subscriber.Client() callback = mock.Mock() with mock.patch.object(thread.Policy, 'open') as open_: subscription = client.subscribe('sub_name_b', callback) From af2d57ac780a39567d2d47d7a147d1a6137d82ce Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Nov 2017 11:00:16 -0800 Subject: [PATCH 0152/1197] Making Pub/Sub unit tests use mock credentials. (#4454) --- .../publisher/test_publisher_client.py | 31 +++++++++++++------ .../subscriber/test_subscriber_client.py | 13 +++++--- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index c817fb45887f..4d8b1643adab 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -15,18 +15,19 @@ from __future__ import absolute_import import os +from google.auth import credentials import mock import pytest -from google.auth import credentials from google.cloud.pubsub_v1.gapic import publisher_client from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types def test_init(): - client = publisher.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) # A plain client should have an `api` (the underlying GAPIC) and a # batch settings object, which should have the defaults. @@ -38,6 +39,8 @@ def test_init(): def test_init_emulator(monkeypatch): monkeypatch.setenv('PUBSUB_EMULATOR_HOST', '/foo/bar/') + # NOTE: When the emulator host is set, a custom channel will be used, so + # no credentials (mock ot otherwise) can be passed in. client = publisher.Client() # Establish that a gRPC request would attempt to hit the emulator host. @@ -50,7 +53,8 @@ def test_init_emulator(monkeypatch): def test_batch_accepting(): """Establish that an existing batch is returned if it accepts messages.""" - client = publisher.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) message = types.PubsubMessage(data=b'foo') # At first, there are no batches, so this should return a new batch @@ -67,7 +71,8 @@ def test_batch_accepting(): def test_batch_without_autocreate(): - client = publisher.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) message = types.PubsubMessage(data=b'foo') # If `create=False` is sent, then when the batch is not found, None @@ -79,7 +84,8 @@ def test_batch_without_autocreate(): def test_publish(): - client = publisher.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) # Use a mock in lieu of the actual batch class; set the mock up to claim # indiscriminately that it accepts all messages. @@ -107,7 +113,8 @@ def test_publish(): def test_publish_data_not_bytestring_error(): - client = publisher.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) with pytest.raises(TypeError): client.publish('topic_name', u'This is a text string.') with pytest.raises(TypeError): @@ -115,7 +122,8 @@ def test_publish_data_not_bytestring_error(): def test_publish_attrs_bytestring(): - client = publisher.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) # Use a mock in lieu of the actual batch class; set the mock up to claim # indiscriminately that it accepts all messages. @@ -133,13 +141,15 @@ def test_publish_attrs_bytestring(): def test_publish_attrs_type_error(): - client = publisher.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) with pytest.raises(TypeError): client.publish('topic_name', b'foo', answer=42) def test_gapic_instance_method(): - client = publisher.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) with mock.patch.object(client.api, '_create_topic', autospec=True) as ct: client.create_topic('projects/foo/topics/bar') assert ct.call_count == 1 @@ -148,6 +158,7 @@ def test_gapic_instance_method(): def test_gapic_class_method(): - client = publisher.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) answer = client.topic_path('foo', 'bar') assert answer == 'projects/foo/topics/bar' diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 8620081a8b44..70da18cd477d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -12,20 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. +from google.auth import credentials import mock -from google.auth import credentials from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1.subscriber.policy import thread def test_init(): - client = subscriber.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) assert client._policy_class is thread.Policy def test_init_emulator(monkeypatch): monkeypatch.setenv('PUBSUB_EMULATOR_HOST', '/baz/bacon/') + # NOTE: When the emulator host is set, a custom channel will be used, so + # no credentials (mock ot otherwise) can be passed in. client = subscriber.Client() # Establish that a gRPC request would attempt to hit the emulator host. @@ -37,13 +40,15 @@ def test_init_emulator(monkeypatch): def test_subscribe(): - client = subscriber.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) subscription = client.subscribe('sub_name_a') assert isinstance(subscription, thread.Policy) def test_subscribe_with_callback(): - client = subscriber.Client() + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) callback = mock.Mock() with mock.patch.object(thread.Policy, 'open') as open_: subscription = client.subscribe('sub_name_b', callback) From 32d2b482318bbc70249f4d1bc86fb3c3dc1108c5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Nov 2017 11:31:55 -0800 Subject: [PATCH 0153/1197] Some lint hygiene for Pub/Sub. (#4455) - Configured PyLint to ignore generated code - Addressed all PyLint failures in `subscriber.policy.base` --- .../cloud/pubsub_v1/subscriber/policy/base.py | 94 +++++++++++-------- packages/google-cloud-pubsub/pylint.config.py | 9 +- 2 files changed, 65 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index d5cd07c41b44..b3a1af540064 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +"""Base class for concurrency policy.""" + from __future__ import absolute_import, division import abc @@ -25,7 +27,8 @@ from google.cloud.pubsub_v1.subscriber import _consumer from google.cloud.pubsub_v1.subscriber import _histogram -logger = logging.getLogger(__name__) + +_LOGGER = logging.getLogger(__name__) @six.add_metaclass(abc.ABCMeta) @@ -40,30 +43,31 @@ class BasePolicy(object): This class defines the interface for the policy implementation; subclasses may be passed as the ``policy_class`` argument to :class:`~.pubsub_v1.client.SubscriberClient`. + + Args: + client (google.cloud.pubsub_v1.subscriber.client.Client): The + subscriber client used to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (google.cloud.pubsub_v1.types.FlowControl): The flow + control settings. + histogram_data (dict): Optional: A structure to store the histogram + data for predicting appropriate ack times. If set, this should + be a dictionary-like object. + + .. note:: + Additionally, the histogram relies on the assumption + that the dictionary will properly sort keys provided + that all keys are positive integers. If you are sending + your own dictionary class, ensure this assumption holds + or you will get strange behavior. """ + + _managed_ack_ids = None + def __init__(self, client, subscription, flow_control=types.FlowControl(), histogram_data=None): - """Instantiate the policy. - - Args: - client (~.pubsub_v1.subscriber.client): The subscriber client used - to create this instance. - subscription (str): The name of the subscription. The canonical - format for this is - ``projects/{project}/subscriptions/{subscription}``. - flow_control (~.pubsub_v1.types.FlowControl): The flow control - settings. - histogram_data (dict): Optional: A structure to store the histogram - data for predicting appropriate ack times. If set, this should - be a dictionary-like object. - - .. note:: - Additionally, the histogram relies on the assumption - that the dictionary will properly sort keys provided - that all keys are positive integers. If you are sending - your own dictionary class, ensure this assumption holds - or you will get strange behavior. - """ self._client = client self._subscription = subscription self._consumer = _consumer.Consumer(self) @@ -103,8 +107,8 @@ def future(self): """Return the Future in use, if any. Returns: - ~.pubsub_v1.subscriber.future.Future: A Future conforming to the - ``~concurrent.futures.Future`` interface. + google.cloud.pubsub_v1.subscriber.futures.Future: A Future + conforming to the :class:`~concurrent.futures.Future` interface. """ return self._future @@ -115,7 +119,7 @@ def managed_ack_ids(self): Returns: set: The set of ack IDs being managed. """ - if not hasattr(self, '_managed_ack_ids'): + if self._managed_ack_ids is None: self._managed_ack_ids = set() return self._managed_ack_ids @@ -184,6 +188,10 @@ def call_rpc(self, request_generator): request_generator (Generator): A generator that yields requests, and blocks if there are no outstanding requests (until such time as there are). + + Returns: + Iterable[~google.cloud.pubsub_v1.types.StreamingPullResponse]: An + iterable of pull responses. """ return self._client.api.streaming_pull(request_generator) @@ -222,9 +230,9 @@ def get_initial_request(self, ack_queue=False): while the connection was paused. Returns: - ~.pubsub_v1.types.StreamingPullRequest: A request suitable - for being the first request on the stream (and not suitable - for any other purpose). + google.cloud.pubsub_v1.types.StreamingPullRequest: A request + suitable for being the first request on the stream (and not + suitable for any other purpose). .. note:: If ``ack_queue`` is set to True, this includes the ack_ids, but @@ -302,14 +310,14 @@ def maintain_leases(self): # based off of how long previous messages have taken to ack, with # a sensible default and within the ranges allowed by Pub/Sub. p99 = self.histogram.percentile(99) - logger.debug('The current p99 value is %d seconds.' % p99) + _LOGGER.debug('The current p99 value is %d seconds.', p99) # Create a streaming pull request. # We do not actually call `modify_ack_deadline` over and over # because it is more efficient to make a single request. ack_ids = list(self.managed_ack_ids) - logger.debug('Renewing lease for %d ack IDs.' % len(ack_ids)) - if len(ack_ids) > 0 and self._consumer.active: + _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) + if ack_ids and self._consumer.active: request = types.StreamingPullRequest( modify_deadline_ack_ids=ack_ids, modify_deadline_seconds=[p99] * len(ack_ids), @@ -323,7 +331,7 @@ def maintain_leases(self): # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases # where there are many clients. snooze = random.uniform(0.0, p99 * 0.9) - logger.debug('Snoozing lease management for %f seconds.' % snooze) + _LOGGER.debug('Snoozing lease management for %f seconds.', snooze) time.sleep(snooze) def modify_ack_deadline(self, ack_id, seconds): @@ -351,7 +359,11 @@ def nack(self, ack_id, byte_size=None): @abc.abstractmethod def close(self): - """Close the existing connection.""" + """Close the existing connection. + + Raises: + NotImplementedError: Always + """ raise NotImplementedError @abc.abstractmethod @@ -364,6 +376,9 @@ def on_exception(self, exception): Args: exception (Exception): The exception raised by the RPC. + + Raises: + NotImplementedError: Always """ raise NotImplementedError @@ -385,6 +400,9 @@ def on_response(self, response): Args: response (Any): The protobuf response from the RPC. + + Raises: + NotImplementedError: Always """ raise NotImplementedError @@ -396,13 +414,15 @@ def open(self, callback): a :class:`~.pubsub_v1.subscriber.message.Message` as its only argument. + This method is virtual, but concrete implementations should return + a :class:`~google.api_core.future.Future` that provides an interface + to block on the subscription if desired, and handle errors. + Args: callback (Callable[Message]): A callable that receives a Pub/Sub Message. - Returns: - ~google.api_core.future.Future: A future that provides - an interface to block on the subscription if desired, and - handle errors. + Raises: + NotImplementedError: Always """ raise NotImplementedError diff --git a/packages/google-cloud-pubsub/pylint.config.py b/packages/google-cloud-pubsub/pylint.config.py index 5d64b9d2f256..625e52cc9bbb 100644 --- a/packages/google-cloud-pubsub/pylint.config.py +++ b/packages/google-cloud-pubsub/pylint.config.py @@ -14,10 +14,17 @@ """This module is used to configure gcp-devrel-py-tools run-pylint.""" +import copy + +from gcp_devrel.tools import pylint + # Library configuration # library_additions = {} -# library_replacements = {} +# Ignore generated code +library_replacements = copy.deepcopy(pylint.DEFAULT_LIBRARY_RC_REPLACEMENTS) +library_replacements['MASTER']['ignore'].append('gapic') +library_replacements['MASTER']['ignore'].append('proto') # Test configuration From aedb335ea83b7ac8ea94dd7b6a48758904581e20 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Nov 2017 11:59:23 -0800 Subject: [PATCH 0154/1197] Using `pytest` fixtures in Pub/Sub system tests. (#4440) Also - Using `topic_path()` and `subscription_path()` (along with `unique_resource_id()`) rather than a hard-coded path in `_resource_name()` - Dropping the usage of `mock` in favor of `__call__`-able types - Re-using the **same** publisher and subscriber via module-level fixtures - Adding a `cleanup` fixture instead of the `try/finally` indirection - Making sure that `call_count` (now named `calls`) is thread safe - Cleaning up subscriptions that are created (they were previously leaked) --- packages/google-cloud-pubsub/tests/system.py | 322 +++++++++++-------- 1 file changed, 184 insertions(+), 138 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index e89b68b35d38..2a90e7802c58 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -15,154 +15,200 @@ from __future__ import absolute_import import datetime +import threading import time -import uuid -import mock +import pytest import six -from google import auth +import google.auth from google.cloud import pubsub_v1 -def _resource_name(resource_type): - """Return a randomly selected name for a resource. +from test_utils.system import unique_resource_id - Args: - resource_type (str): The resource for which a name is being - generated. Should be singular (e.g. "topic", "subscription") - """ - return 'projects/{project}/{resource_type}s/st-n{random}'.format( - project=auth.default()[1], - random=str(uuid.uuid4())[0:8], - resource_type=resource_type, - ) +@pytest.fixture(scope=u'module') +def project(): + _, default_project = google.auth.default() + yield default_project + + +@pytest.fixture(scope=u'module') +def publisher(): + yield pubsub_v1.PublisherClient() + + +@pytest.fixture(scope=u'module') +def subscriber(): + yield pubsub_v1.SubscriberClient() + + +@pytest.fixture +def topic_path(project, publisher): + topic_name = 't' + unique_resource_id('-') + yield publisher.topic_path(project, topic_name) -def test_publish_messages(): - publisher = pubsub_v1.PublisherClient() - topic_name = _resource_name('topic') - futures = [] - try: - publisher.create_topic(topic_name) - for i in range(0, 500): - futures.append( - publisher.publish( - topic_name, - b'The hail in Wales falls mainly on the snails.', - num=str(i), - ), - ) - for future in futures: - result = future.result() - assert isinstance(result, (six.text_type, six.binary_type)) - finally: - publisher.delete_topic(topic_name) - - -def test_subscribe_to_messages(): - publisher = pubsub_v1.PublisherClient() - subscriber = pubsub_v1.SubscriberClient() - topic_name = _resource_name('topic') - sub_name = _resource_name('subscription') - - try: - # Create a topic. - publisher.create_topic(topic_name) - - # Subscribe to the topic. This must happen before the messages - # are published. - subscriber.create_subscription(sub_name, topic_name) - subscription = subscriber.subscribe(sub_name) - - # Publish some messages. - futures = [publisher.publish( - topic_name, +@pytest.fixture +def subscription_path(project, subscriber): + sub_name = 's' + unique_resource_id('-') + yield subscriber.subscription_path(project, sub_name) + + +@pytest.fixture +def cleanup(): + registry = [] + yield registry + + # Perform all clean up. + for to_call, argument in registry: + to_call(argument) + + +def test_publish_messages(publisher, topic_path, cleanup): + futures = [] + # Make sure the topic gets deleted. + cleanup.append((publisher.delete_topic, topic_path)) + + publisher.create_topic(topic_path) + for index in six.moves.range(500): + futures.append( + publisher.publish( + topic_path, + b'The hail in Wales falls mainly on the snails.', + num=str(index), + ), + ) + + for future in futures: + result = future.result() + assert isinstance(result, six.string_types) + + +def test_subscribe_to_messages( + publisher, topic_path, subscriber, subscription_path, cleanup): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # Create a topic. + publisher.create_topic(topic_path) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription(subscription_path, topic_path) + subscription = subscriber.subscribe(subscription_path) + + # Publish some messages. + futures = [ + publisher.publish( + topic_path, b'Wooooo! The claaaaaw!', - num=str(i), - ) for i in range(0, 50)] - - # Make sure the publish completes. - [f.result() for f in futures] - - # The callback should process the message numbers to prove - # that we got everything at least once. - callback = mock.Mock(wraps=lambda message: message.ack()) - - # Actually open the subscription and hold it open for a few seconds. - subscription.open(callback) - for second in range(0, 10): - time.sleep(1) - - # The callback should have fired at least fifty times, but it - # may take some time. - if callback.call_count >= 50: - return - - # Okay, we took too long; fail out. - assert callback.call_count >= 50 - finally: - publisher.delete_topic(topic_name) - - -def test_subscribe_to_messages_async_callbacks(): - publisher = pubsub_v1.PublisherClient() - subscriber = pubsub_v1.SubscriberClient() - topic_name = _resource_name('topic') - sub_name = _resource_name('subscription') - - try: - # Create a topic. - publisher.create_topic(topic_name) - - # Subscribe to the topic. This must happen before the messages - # are published. - subscriber.create_subscription(sub_name, topic_name) - subscription = subscriber.subscribe(sub_name) - - # Publish some messages. - futures = [publisher.publish( - topic_name, + num=str(index), + ) + for index in six.moves.range(50) + ] + + # Make sure the publish completes. + for future in futures: + future.result() + + # Actually open the subscription and hold it open for a few seconds. + # The callback should process the message numbers to prove + # that we got everything at least once. + callback = AckCallback() + subscription.open(callback) + for second in six.moves.range(10): + time.sleep(1) + + # The callback should have fired at least fifty times, but it + # may take some time. + if callback.calls >= 50: + return + + # Okay, we took too long; fail out. + assert callback.calls >= 50 + + +def test_subscribe_to_messages_async_callbacks( + publisher, topic_path, subscriber, subscription_path, cleanup): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # Create a topic. + publisher.create_topic(topic_path) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription(subscription_path, topic_path) + subscription = subscriber.subscribe(subscription_path) + + # Publish some messages. + futures = [ + publisher.publish( + topic_path, b'Wooooo! The claaaaaw!', - num=str(i), - ) for i in range(0, 2)] - - # Make sure the publish completes. - [f.result() for f in futures] - - # We want to make sure that the callback was called asynchronously. So - # track when each call happened and make sure below. - call_times = [] - - def process_message(message): - # list.append() is thread-safe. - call_times.append(datetime.datetime.now()) - time.sleep(2) - message.ack() - - callback = mock.Mock(wraps=process_message) - side_effect = mock.Mock() - callback.side_effect = side_effect - - # Actually open the subscription and hold it open for a few seconds. - subscription.open(callback) - for second in range(0, 5): - time.sleep(4) - - # The callback should have fired at least two times, but it may - # take some time. - if callback.call_count >= 2 and side_effect.call_count >= 2: - first = min(call_times[:2]) - last = max(call_times[:2]) - diff = last - first - # "Ensure" the first two callbacks were executed asynchronously - # (sequentially would have resulted in a difference of 2+ - # seconds). - assert diff.days == 0 - assert diff.seconds < 2 - - # Okay, we took too long; fail out. - assert callback.call_count >= 2 - finally: - publisher.delete_topic(topic_name) + num=str(index), + ) + for index in six.moves.range(2) + ] + + # Make sure the publish completes. + for future in futures: + future.result() + + # We want to make sure that the callback was called asynchronously. So + # track when each call happened and make sure below. + callback = TimesCallback(2) + + # Actually open the subscription and hold it open for a few seconds. + subscription.open(callback) + for second in six.moves.range(5): + time.sleep(4) + + # The callback should have fired at least two times, but it may + # take some time. + if callback.calls >= 2: + first, last = sorted(callback.call_times[:2]) + diff = last - first + # "Ensure" the first two callbacks were executed asynchronously + # (sequentially would have resulted in a difference of 2+ + # seconds). + assert diff.days == 0 + assert diff.seconds < callback.sleep_time + + # Okay, we took too long; fail out. + assert callback.calls >= 2 + + +class AckCallback(object): + + def __init__(self): + self.calls = 0 + + def __call__(self, message): + message.ack() + # Only increment the number of calls **after** finishing. + with threading.Lock(): + self.calls += 1 + + +class TimesCallback(object): + + def __init__(self, sleep_time): + self.sleep_time = sleep_time + self.calls = 0 + self.call_times = [] + + def __call__(self, message): + now = datetime.datetime.now() + time.sleep(self.sleep_time) + message.ack() + # Only increment the number of calls **after** finishing. + with threading.Lock(): + # list.append() is thread-safe, but we still wait until + # ``calls`` is incremented to do it. + self.call_times.append(now) + self.calls += 1 From b7ddc691a8f9e64caa79949cdae5391df774c666 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Nov 2017 14:09:42 -0800 Subject: [PATCH 0155/1197] PubSub: Renaming `logger` -> `_LOGGER` since at module scope. (#4459) --- .../cloud/pubsub_v1/subscriber/policy/thread.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 8444132c1bc5..a4cd9136e0e9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -28,12 +28,12 @@ from google.cloud.pubsub_v1.subscriber.message import Message -logger = logging.getLogger(__name__) +_LOGGER = logging.getLogger(__name__) def _callback_completed(future): """Simple callback that just logs a `Future`'s result.""" - logger.debug('Result: %s', future.result()) + _LOGGER.debug('Result: %s', future.result()) class Policy(base.BasePolicy): @@ -80,7 +80,7 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), ) # Also maintain a request queue and an executor. - logger.debug('Creating callback requests thread (not starting).') + _LOGGER.debug('Creating callback requests thread (not starting).') if executor is None: executor = futures.ThreadPoolExecutor(max_workers=10) self._executor = executor @@ -122,7 +122,7 @@ def open(self, callback): self._future = Future(policy=self) # Start the thread to pass the requests. - logger.debug('Starting callback requests worker.') + _LOGGER.debug('Starting callback requests worker.') self._callback = callback self._consumer.helper_threads.start( 'callback requests worker', @@ -135,7 +135,7 @@ def open(self, callback): # Spawn a helper thread that maintains all of the leases for # this policy. - logger.debug('Spawning lease maintenance worker.') + _LOGGER.debug('Spawning lease maintenance worker.') self._leaser = threading.Thread(target=self.maintain_leases) self._leaser.daemon = True self._leaser.start() @@ -168,8 +168,8 @@ def on_response(self, response): For each message, schedule a callback with the executor. """ for msg in response.received_messages: - logger.debug('New message received from Pub/Sub: %r', msg) - logger.debug(self._callback) + _LOGGER.debug('New message received from Pub/Sub: %r', msg) + _LOGGER.debug(self._callback) message = Message(msg.message, msg.ack_id, self._request_queue) future = self._executor.submit(self._callback, message) future.add_done_callback(_callback_completed) From b8b01bb685973d4dab9d4c8c669e0bf6064e555d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Nov 2017 14:10:11 -0800 Subject: [PATCH 0156/1197] PubSub: Making `thread.Policy.on_exception` more robust. (#4444) - Adding special handling for API core exceptions - Retrying on both types of idempotent error Towards #4234. --- .../cloud/pubsub_v1/subscriber/policy/base.py | 5 ++++ .../pubsub_v1/subscriber/policy/thread.py | 7 +++--- .../pubsub_v1/subscriber/test_policy_base.py | 23 ++++++++++++++++++- .../subscriber/test_policy_thread.py | 17 +++++++++++--- 4 files changed, 44 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index b3a1af540064..3caff7043e2a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -21,6 +21,7 @@ import random import time +from google.api_core import exceptions import six from google.cloud.pubsub_v1 import types @@ -65,6 +66,10 @@ class BasePolicy(object): """ _managed_ack_ids = None + _RETRYABLE_STREAM_ERRORS = ( + exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, + ) def __init__(self, client, subscription, flow_control=types.FlowControl(), histogram_data=None): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index a4cd9136e0e9..b30dd8107814 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -153,10 +153,9 @@ def on_exception(self, exception): This will cause the stream to exit loudly. """ - # If this is DEADLINE_EXCEEDED, then we want to retry. - # That entails just returning None. - deadline_exceeded = grpc.StatusCode.DEADLINE_EXCEEDED - if getattr(exception, 'code', lambda: None)() == deadline_exceeded: + # If this is in the list of idempotent exceptions, then we want to + # retry. That entails just returning None. + if isinstance(exception, self._RETRYABLE_STREAM_ERRORS): return # Set any other exception on the future. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 3ebf1e6e6d18..5dd082ec2de7 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -14,11 +14,15 @@ import time +from google.api_core import exceptions +from google.auth import credentials +import grpc import mock -from google.auth import credentials from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.gapic import subscriber_client_config +from google.cloud.pubsub_v1.subscriber.policy import base from google.cloud.pubsub_v1.subscriber.policy import thread @@ -28,6 +32,23 @@ def create_policy(flow_control=types.FlowControl()): return thread.Policy(client, 'sub_name_d', flow_control=flow_control) +def test_idempotent_retry_codes(): + # Make sure the config matches our hard-coded tuple of exceptions. + interfaces = subscriber_client_config.config['interfaces'] + retry_codes = interfaces['google.pubsub.v1.Subscriber']['retry_codes'] + idempotent = retry_codes['idempotent'] + + status_codes = tuple( + getattr(grpc.StatusCode, name, None) + for name in idempotent + ) + expected = tuple( + exceptions.exception_class_for_grpc_status(status_code) + for status_code in status_codes + ) + assert base.BasePolicy._RETRYABLE_STREAM_ERRORS == expected + + def test_ack_deadline(): policy = create_policy() assert policy.ack_deadline == 10 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index 106e735a7110..e31315900c97 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -17,8 +17,8 @@ from concurrent import futures import threading +from google.api_core import exceptions from google.auth import credentials -import grpc import mock import pytest from six.moves import queue @@ -90,8 +90,19 @@ def test_on_callback_request(): def test_on_exception_deadline_exceeded(): policy = create_policy() - exc = mock.Mock(spec=('code',)) - exc.code.return_value = grpc.StatusCode.DEADLINE_EXCEEDED + + details = 'Bad thing happened. Time out, go sit in the corner.' + exc = exceptions.DeadlineExceeded(details) + + assert policy.on_exception(exc) is None + + +def test_on_exception_unavailable(): + policy = create_policy() + + details = 'UNAVAILABLE. Service taking nap.' + exc = exceptions.ServiceUnavailable(details) + assert policy.on_exception(exc) is None From 328b15b6fd640bc2d368d223ad6410073b0687e9 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Nov 2017 14:39:47 -0800 Subject: [PATCH 0157/1197] Preparing release 0.29.1 of Pub / Sub. (#4460) --- packages/google-cloud-pubsub/CHANGELOG.md | 24 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index e326c60deef2..c358fe2d2513 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,30 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.29.1 + +### Notable Implementation Changes + +- **Bug fix** (#4234): Adding retries for connection `UNAVAILABLE`. This + bug made the Pub / Sub client mostly unusable for subscribers to topics + that don't have a steady stream of messages. After ~2 minutes of inactivity, + the gRPC connection would timeout and raise `UNAVAILABLE` locally, i.e. not + due to a response from the backend. (#4444) +- Updating autogenerated packages (#4438) + +### Documentation + +- Fixing broken examples in quick start (#4398) +- Fixing broken example in README (#4402, h/t to @mehmetboraezer) +- Updating old/dead link to usage doc in README (#4406, h/t to @mehmetboraezer) + +### Dependencies + +- Dropping dependency on `google-cloud-core` in exchange for + `google-api-core` (#4438) + +PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.1/ + ## 0.29.0 ### Notable Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index b7f45a7c38db..babded331ecd 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.29.1.dev1', + version='0.29.1', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 99918bf3918f387a5d9b630b7de4e847a0e689d5 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Nov 2017 14:49:17 -0800 Subject: [PATCH 0158/1197] Passing `posargs` to `nox -s unit` in Pub / Sub. (#4461) Also modifying a unit test to call `policy.on_exception()` **outside** of an "assert raises". I noticed these issues during review of #4380. --- packages/google-cloud-pubsub/nox.py | 3 ++- .../tests/unit/pubsub_v1/subscriber/test_policy_thread.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 6fb6acb31d5a..00945bdccbc7 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -47,7 +47,8 @@ def default(session): '--cov=google.cloud.pubsub', '--cov=google.cloud.pubsub_v1', '--cov-config=.coveragerc', - 'tests/unit', + os.path.join('tests', 'unit'), + *session.posargs ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index e31315900c97..8e9d41138fc7 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -110,8 +110,8 @@ def test_on_exception_other(): policy = create_policy() policy._future = Future(policy=policy) exc = TypeError('wahhhhhh') + assert policy.on_exception(exc) is None with pytest.raises(TypeError): - policy.on_exception(exc) policy.future.result() From 146b3000db73a923c229773f6fb75569b93fb81f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Nov 2017 15:10:12 -0800 Subject: [PATCH 0159/1197] Adding `.dev` to Pub / Sub version after release. (#4462) --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index babded331ecd..1d28890be846 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.29.1', + version='0.29.2.dev', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 6e2e72240d2b60b3da75a78eb93bd6df857154cb Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 27 Nov 2017 20:03:34 -0800 Subject: [PATCH 0160/1197] Changing `dev` to `dev1` in Pub / Sub version. (#4469) This is a follow-up to #4462, where I forgot to add it. In the current form, `pip` adds a 0 at the end for us: $ virtualenv venv $ venv/bin/pip install pubsub/ $ venv/bin/pip show google-cloud-pubsub Name: google-cloud-pubsub Version: 0.29.2.dev0 ... --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 1d28890be846..962d05018ea6 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.29.2.dev', + version='0.29.2.dev1', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From eedee588bea34c3cf6f8c08ce22c744d8d16475c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 28 Nov 2017 12:15:45 -0800 Subject: [PATCH 0161/1197] Pub/Sub logging hygiene. (#4471) Uses newline character when logging protobufs that may end up having newlines in them. These are harder to grok in the log output since it "blends" the protobuf and the description. For example. Received response: received_messages { ack_id: "Pn41MEV..." message { data: ... vs. Received response: received_messages { ack_id: "Pn41MEV..." message { data: ... Also - replaced a lambda with a simple function - added docstrings to two default callbacks used by the `policy.thread.Policy` implementation. - updated grammatical error in a comment - replaced a bare `if future` with `if future is not None` - using repr() of protobufs (rather than str()) --- .../cloud/pubsub_v1/subscriber/_consumer.py | 8 ++--- .../cloud/pubsub_v1/subscriber/policy/base.py | 2 +- .../pubsub_v1/subscriber/policy/thread.py | 32 ++++++++++++++++--- 3 files changed, 32 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 099f86336b4a..77c96f06db22 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -202,9 +202,7 @@ def _request_generator_thread(self): # First, yield the initial request. This occurs on every new # connection, fundamentally including a resumed connection. initial_request = self._policy.get_initial_request(ack_queue=True) - _LOGGER.debug('Sending initial request: {initial_request}'.format( - initial_request=initial_request, - )) + _LOGGER.debug('Sending initial request:\n%r', initial_request) yield initial_request # Now yield each of the items on the request queue, and block if there @@ -215,7 +213,7 @@ def _request_generator_thread(self): _LOGGER.debug('Request generator signaled to stop.') break - _LOGGER.debug('Sending request: {}'.format(request)) + _LOGGER.debug('Sending request:\n%r', request) yield request def _blocking_consume(self): @@ -233,7 +231,7 @@ def _blocking_consume(self): response_generator = self._policy.call_rpc(request_generator) try: for response in response_generator: - _LOGGER.debug('Received response: {0}'.format(response)) + _LOGGER.debug('Received response:\n%r', response) self._policy.on_response(response) # If the loop above exits without an exception, then the diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 3caff7043e2a..c0b4afde2ec0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -307,7 +307,7 @@ def maintain_leases(self): in an appropriate form of subprocess. """ while True: - # Sanity check: Should this infinitely loop quit? + # Sanity check: Should this infinite loop quit? if not self._consumer.active: return diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index b30dd8107814..8092c861f3d6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -32,10 +32,34 @@ def _callback_completed(future): - """Simple callback that just logs a `Future`'s result.""" + """Simple callback that just logs a future's result. + + Used on completion of processing a message received by a + subscriber. + + Args: + future (concurrent.futures.Future): A future returned + from :meth:`~concurrent.futures.Executor.submit`. + """ _LOGGER.debug('Result: %s', future.result()) +def _do_nothing_callback(message): + """Default callback for messages received by subscriber. + + Does nothing with the message and returns :data:`None`. + + Args: + message (~google.cloud.pubsub_v1.subscriber.message.Message): A + protobuf message returned by the backend and parsed into + our high level message type. + + Returns: + NoneType: Always. + """ + return None + + class Policy(base.BasePolicy): """A consumer class based on :class:`threading.Thread`. @@ -62,7 +86,7 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), ``executor``. """ # Default the callback to a no-op; it is provided by `.open`. - self._callback = lambda message: None + self._callback = _do_nothing_callback # Default the future to None; it is provided by `.open`. self._future = None @@ -97,7 +121,7 @@ def close(self): # The subscription is closing cleanly; resolve the future if it is not # resolved already. - if self._future and not self._future.done(): + if self._future is not None and not self._future.done(): self._future.set_result(None) self._future = None @@ -167,7 +191,7 @@ def on_response(self, response): For each message, schedule a callback with the executor. """ for msg in response.received_messages: - _LOGGER.debug('New message received from Pub/Sub: %r', msg) + _LOGGER.debug('New message received from Pub/Sub:\n%r', msg) _LOGGER.debug(self._callback) message = Message(msg.message, msg.ack_id, self._request_queue) future = self._executor.submit(self._callback, message) From f4373eb40e74d2c66e2c00d3df7edee07cc29ba2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 28 Nov 2017 16:13:54 -0800 Subject: [PATCH 0162/1197] Using logger for module in Pub/Sub publisher. (#4473) --- .../google/cloud/pubsub_v1/publisher/batch/thread.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index df7d23ffe7d8..8ffe1b626515 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -24,6 +24,9 @@ from google.cloud.pubsub_v1.publisher.batch import base +_LOGGER = logging.getLogger(__name__) + + class Batch(base.Batch): """A batch of messages. @@ -173,7 +176,7 @@ def _commit(self): self.messages, ) end = time.time() - logging.getLogger().debug('gRPC Publish took {s} seconds.'.format( + _LOGGER.debug('gRPC Publish took {s} seconds.'.format( s=end - start, )) From ab80d932a8d4897369156b843717b6acb1dd5bd3 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 28 Nov 2017 20:03:19 -0800 Subject: [PATCH 0163/1197] PubSub: Naming the explicitly spawned threads (#4474) --- .../google/cloud/pubsub_v1/publisher/batch/thread.py | 10 ++++++++-- .../cloud/pubsub_v1/subscriber/_helper_threads.py | 3 ++- .../google/cloud/pubsub_v1/subscriber/policy/thread.py | 5 ++++- .../unit/pubsub_v1/publisher/batch/test_thread.py | 10 ++++++++-- 4 files changed, 22 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 8ffe1b626515..c898d30bc724 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -78,7 +78,10 @@ def __init__(self, client, topic, settings, autocommit=True): self._thread = None self._commit_lock = threading.Lock() if autocommit and self._settings.max_latency < float('inf'): - self._thread = threading.Thread(target=self.monitor) + self._thread = threading.Thread( + name='Thread-MonitorBatchPublisher', + target=self.monitor, + ) self._thread.start() @property @@ -140,7 +143,10 @@ def commit(self): self._status = 'in-flight' # Start a new thread to actually handle the commit. - commit_thread = threading.Thread(target=self._commit) + commit_thread = threading.Thread( + name='Thread-CommitBatchPublisher', + target=self._commit, + ) commit_thread.start() def _commit(self): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index 50710e2114db..054afd76727f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -64,7 +64,8 @@ def start(self, name, queue, target, *args, **kwargs): thread = threading.Thread( name='Consumer helper: {}'.format(name), target=target, - *args, **kwargs + *args, + **kwargs ) thread.daemon = True thread.start() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 8092c861f3d6..ec2710cb9096 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -160,7 +160,10 @@ def open(self, callback): # Spawn a helper thread that maintains all of the leases for # this policy. _LOGGER.debug('Spawning lease maintenance worker.') - self._leaser = threading.Thread(target=self.maintain_leases) + self._leaser = threading.Thread( + name='Thread-LeaseMaintenance', + target=self.maintain_leases, + ) self._leaser.daemon = True self._leaser.start() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 588f4bfe689f..ca931e97df5b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -57,7 +57,10 @@ def test_init(): # batch once time elapses). with mock.patch.object(threading, 'Thread', autospec=True) as Thread: batch = Batch(client, 'topic_name', types.BatchSettings()) - Thread.assert_called_once_with(target=batch.monitor) + Thread.assert_called_once_with( + name='Thread-MonitorBatchPublisher', + target=batch.monitor, + ) # New batches start able to accept messages by default. assert batch.status == BatchStatus.ACCEPTING_MESSAGES @@ -81,7 +84,10 @@ def test_commit(): batch.commit() # A thread should have been created to do the actual commit. - Thread.assert_called_once_with(target=batch._commit) + Thread.assert_called_once_with( + name='Thread-CommitBatchPublisher', + target=batch._commit, + ) Thread.return_value.start.assert_called_once_with() # The batch's status needs to be something other than "accepting messages", From 04e927b361f27d42772f976e6d06935575f4b9ac Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 29 Nov 2017 08:44:36 -0800 Subject: [PATCH 0164/1197] Renaming "Consumer helper" threads in Pub/Sub. (#4476) Now names will show up in logs as Thread-ConsumerHelper-CallbackRequestsWorker Thread-ConsumerHelper-ConsumeBidirectionalStream instead of as Consumer helper: callback requests worker Consumer helper: consume bidirectional stream --- .../google/cloud/pubsub_v1/subscriber/_consumer.py | 2 +- .../google/cloud/pubsub_v1/subscriber/_helper_threads.py | 2 +- .../google/cloud/pubsub_v1/subscriber/policy/thread.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_consumer.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 77c96f06db22..c3c690a92c91 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -254,7 +254,7 @@ def start_consuming(self): self.active = True self._exiting.clear() self.helper_threads.start( - 'consume bidirectional stream', + 'ConsumeBidirectionalStream', self._request_queue, self._blocking_consume, ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index 054afd76727f..c88ca2487a89 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -62,7 +62,7 @@ def start(self, name, queue, target, *args, **kwargs): """ # Create and start the helper thread. thread = threading.Thread( - name='Consumer helper: {}'.format(name), + name='Thread-ConsumerHelper-{}'.format(name), target=target, *args, **kwargs diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index ec2710cb9096..6c678dda3cd4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -149,7 +149,7 @@ def open(self, callback): _LOGGER.debug('Starting callback requests worker.') self._callback = callback self._consumer.helper_threads.start( - 'callback requests worker', + 'CallbackRequestsWorker', self._request_queue, self._callback_requests, ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index c0a0828c8a1e..52038a891d94 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -109,7 +109,7 @@ def test_start_consuming(): assert consumer._exiting.is_set() is False assert consumer.active is True start.assert_called_once_with( - 'consume bidirectional stream', + 'ConsumeBidirectionalStream', consumer._request_queue, consumer._blocking_consume, ) From b53467d0eb6ec12e981c06b97a175b2f68538492 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 29 Nov 2017 09:58:52 -0800 Subject: [PATCH 0165/1197] PubSub: `Policy.on_exception` actually used to make consumer go inactive. (#4472) --- .../cloud/pubsub_v1/subscriber/_consumer.py | 6 +- .../cloud/pubsub_v1/subscriber/futures.py | 13 ++-- .../cloud/pubsub_v1/subscriber/policy/base.py | 7 +- .../pubsub_v1/subscriber/policy/thread.py | 15 ++++- .../pubsub_v1/subscriber/test_consumer.py | 65 ++++++++++++++++--- .../subscriber/test_policy_thread.py | 6 +- 6 files changed, 84 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index c3c690a92c91..5907a1c7e1f1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -243,11 +243,7 @@ def _blocking_consume(self): except KeyboardInterrupt: self.stop_consuming() except Exception as exc: - try: - self._policy.on_exception(exc) - except: - self.active = False - raise + self.active = self._policy.on_exception(exc) def start_consuming(self): """Start consuming the stream.""" diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index 15a932f2478f..fa1f457a2602 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -39,16 +39,19 @@ def running(self): .. note:: - A ``False`` value here does not necessarily mean that the + A :data:`False` value here does not necessarily mean that the subscription is closed; it merely means that _this_ future is not the future applicable to it. Since futures have a single result (or exception) and there is - not a concept of resetting them, a closing re-opening of a + not a concept of resetting them, a closing / re-opening of a subscription will therefore return a new future. Returns: - bool: ``True`` if this subscription is opened with this future, - ``False`` otherwise. + bool: :data:`True` if this subscription is opened with this + future, :data:`False` otherwise. """ - return self._policy.future is self + if self._policy.future is not self: + return False + + return super(Future, self).running() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index c0b4afde2ec0..181f671fcc5b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -376,8 +376,11 @@ def on_exception(self, exception): """Called when a gRPC exception occurs. If this method does nothing, then the stream is re-started. If this - raises an exception, it will stop the consumer thread. - This is executed on the response consumer helper thread. + raises an exception, it will stop the consumer thread. This is + executed on the response consumer helper thread. + + Implementations should return :data:`True` if they want the consumer + thread to remain active, otherwise they should return :data:`False`. Args: exception (Exception): The exception raised by the RPC. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 6c678dda3cd4..70cd227f68a8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -176,17 +176,26 @@ def on_callback_request(self, callback_request): getattr(self, action)(**kwargs) def on_exception(self, exception): - """Bubble the exception. + """Handle the exception. - This will cause the stream to exit loudly. + If the exception is one of the retryable exceptions, this will signal + to the consumer thread that it should remain active. + + This will cause the stream to exit when it returns :data:`False`. + + Returns: + bool: Indicates if the caller should remain active or shut down. + Will be :data:`True` if the ``exception`` is "acceptable", i.e. + in a list of retryable / idempotent exceptions. """ # If this is in the list of idempotent exceptions, then we want to # retry. That entails just returning None. if isinstance(exception, self._RETRYABLE_STREAM_ERRORS): - return + return True # Set any other exception on the future. self._future.set_exception(exception) + return False def on_response(self, response): """Process all received Pub/Sub messages. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 52038a891d94..3eb820d6c418 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import types as base_types + from google.auth import credentials import mock import pytest @@ -87,18 +89,61 @@ def test_blocking_consume_keyboard_interrupt(): on_res.assert_called_once_with(consumer._policy, mock.sentinel.A) -@mock.patch.object(thread.Policy, 'call_rpc', autospec=True) -@mock.patch.object(thread.Policy, 'on_response', autospec=True) -@mock.patch.object(thread.Policy, 'on_exception', autospec=True) -def test_blocking_consume_exception_reraise(on_exc, on_res, call_rpc): - consumer = create_consumer() +class OnException(object): + + def __init__(self, exiting_event, acceptable=None): + self.exiting_event = exiting_event + self.acceptable = acceptable + + def __call__(self, exception): + if exception is self.acceptable: + return True + else: + self.exiting_event.set() + return False + + +def test_blocking_consume_on_exception(): + policy = mock.Mock(spec=('call_rpc', 'on_response', 'on_exception')) + policy.call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + exc = TypeError('Bad things!') + policy.on_response.side_effect = exc + + consumer = _consumer.Consumer(policy=policy) + policy.on_exception.side_effect = OnException(consumer._exiting) + + # Establish that we get responses until we are sent the exiting event. + consumer._blocking_consume() + + # Check mocks. + policy.call_rpc.assert_called_once() + policy.on_response.assert_called_once_with(mock.sentinel.A) + policy.on_exception.assert_called_once_with(exc) + + +def test_blocking_consume_two_exceptions(): + policy = mock.Mock(spec=('call_rpc', 'on_response', 'on_exception')) + policy.call_rpc.side_effect = ( + (mock.sentinel.A,), + (mock.sentinel.B,), + ) + exc1 = NameError('Oh noes.') + exc2 = ValueError('Something grumble.') + policy.on_response.side_effect = (exc1, exc2) + + consumer = _consumer.Consumer(policy=policy) + policy.on_exception.side_effect = OnException( + consumer._exiting, acceptable=exc1) # Establish that we get responses until we are sent the exiting event. - call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) - on_res.side_effect = TypeError('Bad things!') - on_exc.side_effect = on_res.side_effect - with pytest.raises(TypeError): - consumer._blocking_consume() + consumer._blocking_consume() + + # Check mocks. + assert policy.call_rpc.call_count == 2 + policy.on_response.assert_has_calls( + [mock.call(mock.sentinel.A), mock.call(mock.sentinel.B)]) + policy.on_exception.assert_has_calls( + [mock.call(exc1), mock.call(exc2)]) def test_start_consuming(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index 8e9d41138fc7..fef7df01dea0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -94,7 +94,7 @@ def test_on_exception_deadline_exceeded(): details = 'Bad thing happened. Time out, go sit in the corner.' exc = exceptions.DeadlineExceeded(details) - assert policy.on_exception(exc) is None + assert policy.on_exception(exc) is True def test_on_exception_unavailable(): @@ -103,14 +103,14 @@ def test_on_exception_unavailable(): details = 'UNAVAILABLE. Service taking nap.' exc = exceptions.ServiceUnavailable(details) - assert policy.on_exception(exc) is None + assert policy.on_exception(exc) is True def test_on_exception_other(): policy = create_policy() policy._future = Future(policy=policy) exc = TypeError('wahhhhhh') - assert policy.on_exception(exc) is None + assert policy.on_exception(exc) is False with pytest.raises(TypeError): policy.future.result() From 293ec68f19809dd4032c6d7e8dd522f3682bd7b0 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 29 Nov 2017 10:54:51 -0800 Subject: [PATCH 0166/1197] Removing unused "extra" arguments in Pub/Sub helper thread start. (#4483) --- .../google/cloud/pubsub_v1/subscriber/_helper_threads.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index c88ca2487a89..dab617650749 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -47,15 +47,13 @@ def __init__(self): def __contains__(self, needle): return needle in self._helper_threads - def start(self, name, queue, target, *args, **kwargs): + def start(self, name, queue, target): """Create and start a helper thread. Args: name (str): The name of the helper thread. queue (Queue): A concurrency-safe queue. target (Callable): The target of the thread. - args: Additional args passed to the thread constructor. - kwargs: Additional kwargs passed to the thread constructor. Returns: threading.Thread: The created thread. @@ -64,8 +62,6 @@ def start(self, name, queue, target, *args, **kwargs): thread = threading.Thread( name='Thread-ConsumerHelper-{}'.format(name), target=target, - *args, - **kwargs ) thread.daemon = True thread.start() From 8afced53a6a10d97d8749e1515e731dd69bf316f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 29 Nov 2017 11:28:12 -0800 Subject: [PATCH 0167/1197] Adding log message when Pub/Sub `maintain_leases` exits. (#4484) --- .../google/cloud/pubsub_v1/subscriber/policy/base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 181f671fcc5b..087e9a1cb670 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -309,6 +309,7 @@ def maintain_leases(self): while True: # Sanity check: Should this infinite loop quit? if not self._consumer.active: + _LOGGER.debug('Consumer inactive, ending lease maintenance.') return # Determine the appropriate duration for the lease. This is From c6d016712662f46c3d31d2c9772e497bd0d53b6d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 29 Nov 2017 15:25:44 -0800 Subject: [PATCH 0168/1197] Various hygiene changes to Pub / Sub subscriber. (#4494) - Using `%`-formatting in all `logging.log()` calls (e.g. `info()`). I am not opposed to using `.format()` but `logging` "prefers" `%`-formatting (and I wanted to be consistent because hobgoblins). - Adding non-public globals for helper thread names, this was especially needed because I accidentally broke this in #4476 when I changed `callback requests worker` to `CallbackRequestsWorker` in one place, not two. - Adding docstring to `QueueCallbackThread` that explains what it does (I'll refactor this class in a follow-up) - Adding a logging statement when a `QueueCallbackThread` exits - Changing indents / using much more vertical space in calls to `request_queue.put()` in `subscriber.message.Message`. (I came across these when trying to understand how `QueueCallbackThread` interacts with `Policy.on_callback_request`) - Changing `GPRC` to `gRPC` in a docstring - Moving "Creating callback requests thread (not starting)." until right before the resource is created - Changing "Spawning" to "Starting" in a log message to match others --- .../cloud/pubsub_v1/subscriber/_consumer.py | 4 +- .../pubsub_v1/subscriber/_helper_threads.py | 32 ++++++--- .../cloud/pubsub_v1/subscriber/message.py | 68 +++++++++++++------ .../pubsub_v1/subscriber/policy/thread.py | 11 +-- 4 files changed, 79 insertions(+), 36 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 5907a1c7e1f1..21f2fefc537d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -123,7 +123,9 @@ from google.cloud.pubsub_v1.subscriber import _helper_threads + _LOGGER = logging.getLogger(__name__) +_BIDIRECTIONAL_CONSUMER_NAME = 'ConsumeBidirectionalStream' class Consumer(object): @@ -250,7 +252,7 @@ def start_consuming(self): self.active = True self._exiting.clear() self.helper_threads.start( - 'ConsumeBidirectionalStream', + _BIDIRECTIONAL_CONSUMER_NAME, self._request_queue, self._blocking_consume, ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index dab617650749..16d90e5c0f52 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -68,7 +68,7 @@ def start(self, name, queue, target): # Keep track of the helper thread, so we are able to stop it. self._helper_threads[name] = _HelperThread(name, thread, queue) - _LOGGER.debug('Started helper thread {}'.format(name)) + _LOGGER.debug('Started helper thread %s', name) return thread def stop(self, name): @@ -86,7 +86,7 @@ def stop(self, name): # Join the thread if it is still alive. if helper_thread.thread.is_alive(): - _LOGGER.debug('Stopping helper thread {}'.format(name)) + _LOGGER.debug('Stopping helper thread %s', name) helper_thread.queue.put(STOP) helper_thread.thread.join() @@ -102,9 +102,25 @@ def stop_all(self): class QueueCallbackThread(object): - """A helper thread that executes a callback for every item in - the queue. + """A helper that executes a callback for every item in the queue. + + .. note:: + + This is not actually a thread, but it is intended to be a target + for a thread. + + Calls a blocking ``get()`` on the ``queue`` until it encounters + :attr:`STOP`. + + Args: + queue (~queue.Queue): A Queue instance, appropriate for crossing the + concurrency boundary implemented by ``executor``. Items will + be popped off (with a blocking ``get()``) until :attr:`STOP` + is encountered. + callback (Callable): A callback that can process items pulled off + of the queue. """ + def __init__(self, queue, callback): self.queue = queue self._callback = callback @@ -113,14 +129,12 @@ def __call__(self): while True: item = self.queue.get() if item == STOP: - break + _LOGGER.debug('Exiting the QueueCallbackThread.') + return # Run the callback. If any exceptions occur, log them and # continue. try: self._callback(item) except Exception as exc: - _LOGGER.error('{class_}: {message}'.format( - class_=exc.__class__.__name__, - message=str(exc), - )) + _LOGGER.error('%s: %s', exc.__class__.__name__, exc) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 6b65da1c0a21..33bf04502225 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -37,6 +37,7 @@ class Message(object): publish_time (datetime): The time that this message was originally published. """ + def __init__(self, message, ack_id, request_queue): """Construct the Message. @@ -128,11 +129,16 @@ def ack(self): receive any given message more than once. """ time_to_ack = math.ceil(time.time() - self._received_timestamp) - self._request_queue.put(('ack', { - 'ack_id': self._ack_id, - 'byte_size': self.size, - 'time_to_ack': time_to_ack, - })) + self._request_queue.put( + ( + 'ack', + { + 'ack_id': self._ack_id, + 'byte_size': self.size, + 'time_to_ack': time_to_ack, + }, + ), + ) def drop(self): """Release the message from lease management. @@ -147,10 +153,15 @@ def drop(self): both call this one. You probably do not want to call this method directly. """ - self._request_queue.put(('drop', { - 'ack_id': self._ack_id, - 'byte_size': self.size, - })) + self._request_queue.put( + ( + 'drop', + { + 'ack_id': self._ack_id, + 'byte_size': self.size, + }, + ), + ) def lease(self): """Inform the policy to lease this message continually. @@ -159,10 +170,15 @@ def lease(self): This method is called by the constructor, and you should never need to call it manually. """ - self._request_queue.put(('lease', { - 'ack_id': self._ack_id, - 'byte_size': self.size, - })) + self._request_queue.put( + ( + 'lease', + { + 'ack_id': self._ack_id, + 'byte_size': self.size, + }, + ), + ) def modify_ack_deadline(self, seconds): """Set the deadline for acknowledgement to the given value. @@ -182,17 +198,27 @@ def modify_ack_deadline(self, seconds): to. This should be between 0 and 600. Due to network latency, values below 10 are advised against. """ - self._request_queue.put(('modify_ack_deadline', { - 'ack_id': self._ack_id, - 'seconds': seconds, - })) + self._request_queue.put( + ( + 'modify_ack_deadline', + { + 'ack_id': self._ack_id, + 'seconds': seconds, + }, + ), + ) def nack(self): """Decline to acknowldge the given message. This will cause the message to be re-delivered to the subscription. """ - self._request_queue.put(('nack', { - 'ack_id': self._ack_id, - 'byte_size': self.size, - })) + self._request_queue.put( + ( + 'nack', + { + 'ack_id': self._ack_id, + 'byte_size': self.size, + }, + ), + ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 70cd227f68a8..ee5c97709553 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -29,6 +29,7 @@ _LOGGER = logging.getLogger(__name__) +_CALLBACK_WORKER_NAME = 'CallbackRequestsWorker' def _callback_completed(future): @@ -104,10 +105,10 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), ) # Also maintain a request queue and an executor. - _LOGGER.debug('Creating callback requests thread (not starting).') if executor is None: executor = futures.ThreadPoolExecutor(max_workers=10) self._executor = executor + _LOGGER.debug('Creating callback requests thread (not starting).') self._callback_requests = _helper_threads.QueueCallbackThread( self._request_queue, self.on_callback_request, @@ -116,7 +117,7 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), def close(self): """Close the existing connection.""" # Stop consuming messages. - self._consumer.helper_threads.stop('callback requests worker') + self._consumer.helper_threads.stop(_CALLBACK_WORKER_NAME) self._consumer.stop_consuming() # The subscription is closing cleanly; resolve the future if it is not @@ -149,7 +150,7 @@ def open(self, callback): _LOGGER.debug('Starting callback requests worker.') self._callback = callback self._consumer.helper_threads.start( - 'CallbackRequestsWorker', + _CALLBACK_WORKER_NAME, self._request_queue, self._callback_requests, ) @@ -159,7 +160,7 @@ def open(self, callback): # Spawn a helper thread that maintains all of the leases for # this policy. - _LOGGER.debug('Spawning lease maintenance worker.') + _LOGGER.debug('Starting lease maintenance worker.') self._leaser = threading.Thread( name='Thread-LeaseMaintenance', target=self.maintain_leases, @@ -171,7 +172,7 @@ def open(self, callback): return self._future def on_callback_request(self, callback_request): - """Map the callback request to the appropriate GRPC request.""" + """Map the callback request to the appropriate gRPC request.""" action, kwargs = callback_request[0], callback_request[1] getattr(self, action)(**kwargs) From e09a0394509b50225f172ea987d7ef6d1e08e618 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 29 Nov 2017 16:02:42 -0800 Subject: [PATCH 0169/1197] Actually **stopping** the Pub / Sub consumer on an exception. (#4498) Fixes #4463. --- .../cloud/pubsub_v1/subscriber/_consumer.py | 6 ++--- .../pubsub_v1/subscriber/_helper_threads.py | 23 +++++++++++++++++++ .../pubsub_v1/subscriber/policy/thread.py | 4 ++-- .../pubsub_v1/subscriber/test_consumer.py | 13 ----------- .../subscriber/test_helper_threads.py | 23 +++++++++++++++++++ 5 files changed, 51 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 21f2fefc537d..651da085c97a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -242,10 +242,10 @@ def _blocking_consume(self): # case, break out of the while loop and exit this thread. _LOGGER.debug('Clean RPC loop exit signalled consumer exit.') break - except KeyboardInterrupt: - self.stop_consuming() except Exception as exc: - self.active = self._policy.on_exception(exc) + recover = self._policy.on_exception(exc) + if not recover: + self.stop_consuming() def start_consuming(self): """Start consuming the stream.""" diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index 16d90e5c0f52..fa32c1ae8605 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -40,6 +40,19 @@ STOP = uuid.uuid4() +def _current_thread(): + """Get the currently active thread. + + This is provided as a test helper so that it can be mocked easily. + Mocking ``threading.current_thread()`` directly may have unintended + consequences on code that relies on it. + + Returns: + threading.Thread: The current thread. + """ + return threading.current_thread() + + class HelperThreadRegistry(object): def __init__(self): self._helper_threads = {} @@ -84,6 +97,16 @@ def stop(self, name): if helper_thread is None: return + if helper_thread.thread is _current_thread(): + # The current thread cannot ``join()`` itself but it can + # still send a signal to stop. + _LOGGER.debug('Cannot stop current thread %s', name) + helper_thread.queue.put(STOP) + # We return and stop short of ``pop()``-ing so that the + # thread that invoked the current helper can properly stop + # it. + return + # Join the thread if it is still alive. if helper_thread.thread.is_alive(): _LOGGER.debug('Stopping helper thread %s', name) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index ee5c97709553..ac6f2f46cbc3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -180,12 +180,12 @@ def on_exception(self, exception): """Handle the exception. If the exception is one of the retryable exceptions, this will signal - to the consumer thread that it should remain active. + to the consumer thread that it should "recover" from the failure. This will cause the stream to exit when it returns :data:`False`. Returns: - bool: Indicates if the caller should remain active or shut down. + bool: Indicates if the caller should recover or shut down. Will be :data:`True` if the ``exception`` is "acceptable", i.e. in a list of retryable / idempotent exceptions. """ diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 3eb820d6c418..c80b436e941b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -76,19 +76,6 @@ def test_blocking_consume(): assert on_res.mock_calls[1][1][1] == mock.sentinel.B -def test_blocking_consume_keyboard_interrupt(): - consumer = create_consumer() - Policy = type(consumer._policy) - - # Establish that we get responses until we are sent the exiting event. - with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: - call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) - with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: - on_res.side_effect = KeyboardInterrupt - consumer._blocking_consume() - on_res.assert_called_once_with(consumer._policy, mock.sentinel.A) - - class OnException(object): def __init__(self, exiting_event, acceptable=None): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 6a8bff2a7f66..9d585d4c63fa 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -36,6 +36,29 @@ def test_stop_noop(): assert len(registry._helper_threads) == 0 +@mock.patch.object( + _helper_threads, '_current_thread', return_value=mock.sentinel.thread) +def test_stop_current_thread(_current_thread): + registry = _helper_threads.HelperThreadRegistry() + queue_ = mock.Mock(spec=('put',)) + + name = 'here' + registry._helper_threads[name] = _helper_threads._HelperThread( + name=name, + queue=queue_, + thread=_current_thread.return_value, + ) + assert list(registry._helper_threads.keys()) == [name] + registry.stop(name) + # Make sure it hasn't been removed from the registry ... + assert list(registry._helper_threads.keys()) == [name] + # ... but it did receive the STOP signal. + queue_.put.assert_called_once_with(_helper_threads.STOP) + + # Verify that our mock was only called once. + _current_thread.assert_called_once_with() + + def test_stop_dead_thread(): registry = _helper_threads.HelperThreadRegistry() registry._helper_threads['foo'] = _helper_threads._HelperThread( From f43f3b2b03dd3f244461304951ad216642ea525b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 29 Nov 2017 16:21:18 -0800 Subject: [PATCH 0170/1197] Adding thread prefix for ThreadPoolExecutor in Pub/Sub. (#4480) --- .../google/cloud/pubsub_v1/subscriber/policy/thread.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index ac6f2f46cbc3..c2fa8c34520a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -16,6 +16,7 @@ from concurrent import futures import logging +import sys import threading import grpc @@ -106,7 +107,14 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), # Also maintain a request queue and an executor. if executor is None: - executor = futures.ThreadPoolExecutor(max_workers=10) + executor_kwargs = {} + if sys.version_info >= (3, 6): + executor_kwargs['thread_name_prefix'] = ( + 'ThreadPoolExecutor-SubscriberPolicy') + executor = futures.ThreadPoolExecutor( + max_workers=10, + **executor_kwargs + ) self._executor = executor _LOGGER.debug('Creating callback requests thread (not starting).') self._callback_requests = _helper_threads.QueueCallbackThread( From d59f7fe7a6703867fb507136e03601563d2c4f6f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 29 Nov 2017 17:42:06 -0800 Subject: [PATCH 0171/1197] Preparing release 0.29.2 of Pub / Sub. (#4500) --- packages/google-cloud-pubsub/CHANGELOG.md | 29 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index c358fe2d2513..cb5bf612bf53 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,35 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.29.2 + +### Notable Implementation Changes + +- **Bug fix** (#4463): Making a subscription consumer actually stop + running after encountering an exception (#4472, #4498). This bug + is the **only** reason for the `0.29.2` release. +- Thread Changes + - Added names to all threads created directly by Pub / Sub (#4474, + #4476, #4480). Also removing spaces and colons from thread + names (#4476). +- Logging changes + - Adding debug logs when lease management exits (#4484) + - Adding debug logs when hen `QueueCallbackThread` exits (#4494). + Instances handle theprocessing of messages in a + subscription (e.g. to `ack`). + - Using a named logger in `publisher.batch.thread` (#4473) + - Adding newlines before logging protobuf payloads (#4471) + +### Documentation + +- Fixing broken examples in quick start (#4398) + +### Dependencies + +- Dropping + +PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.2/ + ## 0.29.1 ### Notable Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 962d05018ea6..91af558d75b9 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.29.2.dev1', + version='0.29.2', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 32c258efd431cf8aa04e1069d532a2ac9d5d254b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 30 Nov 2017 09:32:23 -0800 Subject: [PATCH 0172/1197] Adding `.dev1` suffix to recently released packages. (#4501) Also removing content from Pub / Sub changelog that was accidentally included. --- packages/google-cloud-pubsub/CHANGELOG.md | 8 -------- packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index cb5bf612bf53..51adc776fffc 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -23,14 +23,6 @@ - Using a named logger in `publisher.batch.thread` (#4473) - Adding newlines before logging protobuf payloads (#4471) -### Documentation - -- Fixing broken examples in quick start (#4398) - -### Dependencies - -- Dropping - PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.2/ ## 0.29.1 diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 91af558d75b9..57f5c2182def 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.29.2', + version='0.29.3.dev1', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 534d3bb54d58fc812ddf368ee22d0605f55fb036 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Fri, 1 Dec 2017 13:09:48 -0800 Subject: [PATCH 0173/1197] Closes #4487: Give error if Pub/Sub subscribe() receives a non-callable callback (#4497) --- .../google/cloud/pubsub_v1/subscriber/client.py | 6 ++++++ .../pubsub_v1/subscriber/test_subscriber_client.py | 10 ++++++++++ 2 files changed, 16 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 6c9b2fccb10d..912490893d89 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -124,9 +124,15 @@ def subscribe(self, subscription, callback=None, flow_control=()): Returns: ~.pubsub_v1.subscriber.consumer.base.BaseConsumer: An instance of the defined ``consumer_class`` on the client. + + Raises: + TypeError: If ``callback`` is not callable. """ flow_control = types.FlowControl(*flow_control) subscr = self._policy_class(self, subscription, flow_control) if callable(callback): subscr.open(callback) + elif callback is not None: + error = '{!r} is not callable, please check input'.format(callback) + raise TypeError(error) return subscr diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 70da18cd477d..1836a44682de 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -14,6 +14,7 @@ from google.auth import credentials import mock +import pytest from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1.subscriber.policy import thread @@ -54,3 +55,12 @@ def test_subscribe_with_callback(): subscription = client.subscribe('sub_name_b', callback) open_.assert_called_once_with(callback) assert isinstance(subscription, thread.Policy) + + +def test_subscribe_with_failed_callback(): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + callback = 'abcdefg' + with pytest.raises(TypeError) as exc_info: + subscription = client.subscribe('sub_name_b', callback) + assert callback in str(exc_info.value) From ad4d22205861777f05c144fb8e8e888d6024bfcf Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 1 Dec 2017 14:39:49 -0800 Subject: [PATCH 0174/1197] Make sure inactive request generator is stopped before spawning new one in Pub / Sub consumer. (#4503) --- .../cloud/pubsub_v1/subscriber/_consumer.py | 78 ++++++++- .../pubsub_v1/subscriber/_helper_threads.py | 13 +- .../pubsub_v1/subscriber/policy/thread.py | 2 +- .../pubsub_v1/subscriber/test_consumer.py | 158 +++++++++++++++++- .../subscriber/test_helper_threads.py | 9 +- 5 files changed, 240 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 651da085c97a..394c6e67d39f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -180,6 +180,7 @@ def __init__(self, policy): self._policy = policy self._request_queue = queue.Queue() self._exiting = threading.Event() + self._put_lock = threading.Lock() self.active = False self.helper_threads = _helper_threads.HelperThreadRegistry() @@ -193,13 +194,17 @@ def send_request(self, request): Args: request (Any): The request protobuf. """ - self._request_queue.put(request) + with self._put_lock: + self._request_queue.put(request) def _request_generator_thread(self): """Generate requests for the stream. This blocks for new requests on the request queue and yields them to gRPC. + + Yields: + google.cloud.pubsub_v1.types.StreamingPullRequest: Requests """ # First, yield the initial request. This occurs on every new # connection, fundamentally including a resumed connection. @@ -218,6 +223,73 @@ def _request_generator_thread(self): _LOGGER.debug('Sending request:\n%r', request) yield request + def _stop_request_generator(self, request_generator): + """Ensure a request generator is closed. + + This **must** be done when recovering from a retry-able exception. + If not, then an inactive request generator (i.e. not attached to any + actual RPC) will be trying to access the same request queue as the + active request generator. + + In addition, we want the gRPC thread consuming to cleanly exit so + that system resources are not wasted. + + Args: + request_generator (Generator): A streaming pull request generator + returned from :meth:`_request_generator_thread`. + + Returns: + bool: Indicates if the generator was successfully stopped. Will + be :data:`True` unless the queue is not empty and the generator + is running. + """ + with self._put_lock: + try: + request_generator.close() + except ValueError: + # Should be ``ValueError('generator already executing')`` + if not self._request_queue.empty(): + # This case may be a false negative in **very** rare + # cases. We **assume** that the generator can't be + # ``close()``-ed because it is blocking on ``get()``. + # It's **very unlikely** that the generator was not + # blocking, but instead **in between** the blocking + # ``get()`` and the next ``yield`` / ``break``. However, + # for practical purposes, we only need to stop the request + # generator if the connection has timed out due to + # inactivity, which indicates an empty queue. + _LOGGER.debug( + 'Request generator could not be closed but ' + 'request queue is not empty.') + return False + # At this point we know: + # 1. The queue is empty and we hold the ``put()`` lock + # 2. We **cannot** ``close()`` the request generator. + # This means that the request generator is blocking at + # ``get()`` from the queue and will continue to block since + # we have locked ``.put()``. + self._request_queue.put(_helper_threads.STOP) + # Wait for the request generator to ``.get()`` the ``STOP``. + _LOGGER.debug( + 'Waiting for active request generator to receive STOP') + while not self._request_queue.empty(): + pass + # We would **like** to call ``request_generator.close()`` here + # but can't guarantee that the generator is paused, since it + # has a few instructions to complete between the ``get()`` + # and the ``break``. However, we are confident that + # 1. The queue was empty and we hold the ``put()`` lock + # 2. We added ``STOP`` + # 3. We waited until the request generator consumed ``STOP`` + # so we know the request generator **will** stop within a + # few cycles. + except Exception as exc: + _LOGGER.error('Failed to close request generator: %r', exc) + return False + + _LOGGER.debug('Successfully closed request generator.') + return True + def _blocking_consume(self): """Consume the stream indefinitely.""" while True: @@ -244,6 +316,8 @@ def _blocking_consume(self): break except Exception as exc: recover = self._policy.on_exception(exc) + if recover: + recover = self._stop_request_generator(request_generator) if not recover: self.stop_consuming() @@ -253,7 +327,7 @@ def start_consuming(self): self._exiting.clear() self.helper_threads.start( _BIDIRECTIONAL_CONSUMER_NAME, - self._request_queue, + self.send_request, self._blocking_consume, ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index fa32c1ae8605..e1a5a1d15968 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -29,7 +29,7 @@ _HelperThread = collections.namedtuple( 'HelperThreads', - ['name', 'thread', 'queue'], + ['name', 'thread', 'queue_put'], ) @@ -60,12 +60,13 @@ def __init__(self): def __contains__(self, needle): return needle in self._helper_threads - def start(self, name, queue, target): + def start(self, name, queue_put, target): """Create and start a helper thread. Args: name (str): The name of the helper thread. - queue (Queue): A concurrency-safe queue. + queue_put (Callable): The ``put()`` method for a + concurrency-safe queue. target (Callable): The target of the thread. Returns: @@ -80,7 +81,7 @@ def start(self, name, queue, target): thread.start() # Keep track of the helper thread, so we are able to stop it. - self._helper_threads[name] = _HelperThread(name, thread, queue) + self._helper_threads[name] = _HelperThread(name, thread, queue_put) _LOGGER.debug('Started helper thread %s', name) return thread @@ -101,7 +102,7 @@ def stop(self, name): # The current thread cannot ``join()`` itself but it can # still send a signal to stop. _LOGGER.debug('Cannot stop current thread %s', name) - helper_thread.queue.put(STOP) + helper_thread.queue_put(STOP) # We return and stop short of ``pop()``-ing so that the # thread that invoked the current helper can properly stop # it. @@ -110,7 +111,7 @@ def stop(self, name): # Join the thread if it is still alive. if helper_thread.thread.is_alive(): _LOGGER.debug('Stopping helper thread %s', name) - helper_thread.queue.put(STOP) + helper_thread.queue_put(STOP) helper_thread.thread.join() # Remove the thread from our tracking. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index c2fa8c34520a..6812a42eed36 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -159,7 +159,7 @@ def open(self, callback): self._callback = callback self._consumer.helper_threads.start( _CALLBACK_WORKER_NAME, - self._request_queue, + self._request_queue.put, self._callback_requests, ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index c80b436e941b..711e7e659fd5 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import threading import types as base_types from google.auth import credentials @@ -78,15 +79,13 @@ def test_blocking_consume(): class OnException(object): - def __init__(self, exiting_event, acceptable=None): - self.exiting_event = exiting_event + def __init__(self, acceptable=None): self.acceptable = acceptable def __call__(self, exception): if exception is self.acceptable: return True else: - self.exiting_event.set() return False @@ -97,7 +96,7 @@ def test_blocking_consume_on_exception(): policy.on_response.side_effect = exc consumer = _consumer.Consumer(policy=policy) - policy.on_exception.side_effect = OnException(consumer._exiting) + policy.on_exception.side_effect = OnException() # Establish that we get responses until we are sent the exiting event. consumer._blocking_consume() @@ -119,8 +118,7 @@ def test_blocking_consume_two_exceptions(): policy.on_response.side_effect = (exc1, exc2) consumer = _consumer.Consumer(policy=policy) - policy.on_exception.side_effect = OnException( - consumer._exiting, acceptable=exc1) + policy.on_exception.side_effect = OnException(acceptable=exc1) # Establish that we get responses until we are sent the exiting event. consumer._blocking_consume() @@ -142,6 +140,152 @@ def test_start_consuming(): assert consumer.active is True start.assert_called_once_with( 'ConsumeBidirectionalStream', - consumer._request_queue, + consumer.send_request, consumer._blocking_consume, ) + + +def basic_queue_generator(queue, received): + while True: + value = queue.get() + received.put(value) + yield value + + +def test_stop_request_generator_not_running(): + # Model scenario tested: + # - The request generator **is not** running + # - The request queue **is not** empty + # Expected result: + # - ``_stop_request_generator()`` successfully calls ``.close()`` + consumer = create_consumer() + queue_ = consumer._request_queue + received = queue.Queue() + request_generator = basic_queue_generator(queue_, received) + + item1 = 'unblock-please' + item2 = 'still-here' + queue_.put(item1) + queue_.put(item2) + assert not queue_.empty() + assert received.empty() + thread = threading.Thread(target=next, args=(request_generator,)) + thread.start() + + # Make sure the generator is not stuck at the blocked ``.get()`` + # in the thread. + while request_generator.gi_running: + pass + assert received.get() == item1 + # Make sure it **isn't** done. + assert request_generator.gi_frame is not None + + stopped = consumer._stop_request_generator(request_generator) + assert stopped is True + + # Make sure it **is** done. + assert not request_generator.gi_running + assert request_generator.gi_frame is None + assert not queue_.empty() + assert queue_.get() == item2 + assert queue_.empty() + + +def test_stop_request_generator_close_failure(): + # Model scenario tested: + # - The input isn't actually a generator + # Expected result: + # - ``_stop_request_generator()`` falls through to the ``LOGGER.error`` + # case and returns ``False`` + consumer = create_consumer() + + request_generator = mock.Mock(spec=('close',)) + request_generator.close.side_effect = TypeError('Really, not a generator') + + stopped = consumer._stop_request_generator(request_generator) + assert stopped is False + + # Make sure close() was only called once. + request_generator.close.assert_called_once_with() + + +def test_stop_request_generator_queue_non_empty(): + # Model scenario tested: + # - The request generator **is** running + # - The request queue **is not** empty + # Expected result: + # - ``_stop_request_generator()`` can't call ``.close()`` (since + # the generator is running) but then returns with ``False`` because + # the queue **is not** empty + consumer = create_consumer() + # Attach a "fake" queue to the request generator so the generator can + # block on an empty queue while the consumer's queue is not empty. + queue_ = queue.Queue() + received = queue.Queue() + request_generator = basic_queue_generator(queue_, received) + # Make sure the consumer's queue is not empty. + item1 = 'not-empty' + consumer._request_queue.put(item1) + + thread = threading.Thread(target=next, args=(request_generator,)) + thread.start() + + # Make sure the generator is stuck at the blocked ``.get()`` + # in ``thread``. + while not request_generator.gi_running: + pass + assert received.empty() + assert request_generator.gi_frame is not None + + stopped = consumer._stop_request_generator(request_generator) + assert stopped is False + + # Make sure the generator is **still** not finished. + assert request_generator.gi_running + assert request_generator.gi_frame is not None + assert consumer._request_queue.get() == item1 + # Allow the generator to exit. + item2 = 'just-exit' + queue_.put(item2) + # Wait until it's actually done. + while request_generator.gi_running: + pass + assert received.get() == item2 + + +def test_stop_request_generator_running(): + # Model scenario tested: + # - The request generator **is** running + # - The request queue **is** empty + # Expected result: + # - ``_stop_request_generator()`` can't call ``.close()`` (since + # the generator is running) but then verifies that the queue is + # empty and sends ``STOP`` into the queue to successfully stop + # the generator + consumer = create_consumer() + queue_ = consumer._request_queue + received = queue.Queue() + request_generator = basic_queue_generator(queue_, received) + + thread = threading.Thread(target=next, args=(request_generator,)) + thread.start() + + # Make sure the generator is stuck at the blocked ``.get()`` + # in the thread. + while not request_generator.gi_running: + pass + assert received.empty() + assert request_generator.gi_frame is not None + + stopped = consumer._stop_request_generator(request_generator) + assert stopped is True + + # Make sure it **is** done, though we may have to wait until + # the generator finishes (it has a few instructions between the + # ``get()`` and the ``break``). + while request_generator.gi_running: + pass + request_generator.close() + assert request_generator.gi_frame is None + assert received.get() == _helper_threads.STOP + assert queue_.empty() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 9d585d4c63fa..5f5ca33d8efc 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -45,7 +45,7 @@ def test_stop_current_thread(_current_thread): name = 'here' registry._helper_threads[name] = _helper_threads._HelperThread( name=name, - queue=queue_, + queue_put=queue_.put, thread=_current_thread.return_value, ) assert list(registry._helper_threads.keys()) == [name] @@ -63,7 +63,7 @@ def test_stop_dead_thread(): registry = _helper_threads.HelperThreadRegistry() registry._helper_threads['foo'] = _helper_threads._HelperThread( name='foo', - queue=None, + queue_put=None, thread=threading.Thread(target=lambda: None), ) assert len(registry._helper_threads) == 1 @@ -79,9 +79,10 @@ def test_stop_alive_thread(join, is_alive, put): # Set up a registry with a helper thread in it. registry = _helper_threads.HelperThreadRegistry() + queue_ = queue.Queue() registry._helper_threads['foo'] = _helper_threads._HelperThread( name='foo', - queue=queue.Queue(), + queue_put=queue_.put, thread=threading.Thread(target=lambda: None), ) @@ -101,7 +102,7 @@ def test_stop_all(): registry = _helper_threads.HelperThreadRegistry() registry._helper_threads['foo'] = _helper_threads._HelperThread( name='foo', - queue=None, + queue_put=None, thread=threading.Thread(target=lambda: None), ) assert len(registry._helper_threads) == 1 From b9fdeda4545d2a51ddb2c17602c35153b45391b8 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 4 Dec 2017 09:59:11 -0800 Subject: [PATCH 0175/1197] Bugfix: Using `max` where `min` was used by mistake to ensure a value is non-negative (#4514) The original intent was to keep `bytes` at or above `0` but `min([-n, 0]) == -n` and more importantly `min([n, 0]) = 0` so that `bytes` would be artificially dropped to `0` when `drop()` is called. Fixes #4516. --- .../cloud/pubsub_v1/subscriber/policy/base.py | 5 ++++- .../unit/pubsub_v1/subscriber/test_policy_base.py | 13 +++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 087e9a1cb670..2b80f72bd496 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -212,7 +212,10 @@ def drop(self, ack_id, byte_size): if ack_id in self.managed_ack_ids: self.managed_ack_ids.remove(ack_id) self._bytes -= byte_size - self._bytes = min([self._bytes, 0]) + if self._bytes < 0: + _LOGGER.debug( + 'Bytes was unexpectedly negative: %d', self._bytes) + self._bytes = 0 # If we have been paused by flow control, check and see if we are # back within our limits. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 5dd082ec2de7..1f2a9dacaab6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import time from google.api_core import exceptions @@ -136,6 +137,18 @@ def test_drop(): assert policy._bytes == 0 +@mock.patch.object(base, '_LOGGER', spec=logging.Logger) +def test_drop_unexpected_negative(_LOGGER): + policy = create_policy() + policy.managed_ack_ids.add('ack_id_string') + policy._bytes = 0 + policy.drop('ack_id_string', 20) + assert len(policy.managed_ack_ids) == 0 + assert policy._bytes == 0 + _LOGGER.debug.assert_called_once_with( + 'Bytes was unexpectedly negative: %d', -20) + + def test_drop_below_threshold(): """Establish that we resume a paused subscription. From 0e88c30fd8c5c7edc40b24c0a39b2d7854280970 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 4 Dec 2017 10:40:41 -0800 Subject: [PATCH 0176/1197] Shutdown executor when closing Pub/Sub subscriber policy. (#4522) --- .../google/cloud/pubsub_v1/subscriber/policy/thread.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 6812a42eed36..ffb29f531e5b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -127,6 +127,7 @@ def close(self): # Stop consuming messages. self._consumer.helper_threads.stop(_CALLBACK_WORKER_NAME) self._consumer.stop_consuming() + self._executor.shutdown() # The subscription is closing cleanly; resolve the future if it is not # resolved already. From c0961f44ae133d82ace34b3590230bad7d5c4fdb Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 4 Dec 2017 11:06:04 -0800 Subject: [PATCH 0177/1197] Requiring latest release of concurrent futures backport. (#4521) See: https://github.com/agronholm/pythonfutures/issues/63 --- .../google/cloud/pubsub_v1/subscriber/policy/thread.py | 2 +- packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index ffb29f531e5b..3ccf454fd58a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -108,7 +108,7 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), # Also maintain a request queue and an executor. if executor is None: executor_kwargs = {} - if sys.version_info >= (3, 6): + if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6): executor_kwargs['thread_name_prefix'] = ( 'ThreadPoolExecutor-SubscriberPolicy') executor = futures.ThreadPoolExecutor( diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 57f5c2182def..3ff535b13518 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ - 'google-api-core[grpc] >= 0.1.1, < 0.2.0dev', + 'google-api-core[grpc] >= 0.1.2.dev1, < 0.2.0dev', 'google-auth >= 1.0.2, < 2.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', 'psutil >= 5.2.2, < 6.0dev', From e6805c267df73acd8228110807bd835272d00592 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 6 Dec 2017 13:21:02 -0800 Subject: [PATCH 0178/1197] Refactor Pub / Sub callback queue (#4511) * Renaming `QueueCallbackThread` -> `QueueCallbackWorker`. Also fixing a few typos nearby a mention of `QueueCallbackThread` in `pubsub/CHANGELOG.md`. * Making Policy.on_callback_request() less open-ended. --- packages/google-cloud-pubsub/CHANGELOG.md | 4 +-- .../cloud/pubsub_v1/subscriber/_consumer.py | 8 ++--- .../pubsub_v1/subscriber/_helper_threads.py | 20 +++++------ .../pubsub_v1/subscriber/policy/thread.py | 35 +++++++++++++++---- .../subscriber/test_helper_threads.py | 18 +++++----- .../subscriber/test_policy_thread.py | 27 +++++++++++--- 6 files changed, 77 insertions(+), 35 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 51adc776fffc..446162ace07f 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -17,8 +17,8 @@ names (#4476). - Logging changes - Adding debug logs when lease management exits (#4484) - - Adding debug logs when hen `QueueCallbackThread` exits (#4494). - Instances handle theprocessing of messages in a + - Adding debug logs when `QueueCallbackThread` exits (#4494). + Instances handle the processing of messages in a subscription (e.g. to `ack`). - Using a named logger in `publisher.batch.thread` (#4473) - Adding newlines before logging protobuf payloads (#4471) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 394c6e67d39f..737bfcd1fe88 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -96,21 +96,21 @@ "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] "gRPC Python" -> "Consumer" [label="responses", color="red"] "Consumer" -> "request generator thread" [label="starts", color="gray"] - "Policy" -> "QueueCallbackThread" [label="starts", color="gray"] + "Policy" -> "QueueCallbackWorker" [label="starts", color="gray"] "request generator thread" -> "gRPC Python" [label="requests", color="blue"] "Consumer" -> "Policy" [label="responses", color="red"] "Policy" -> "futures.Executor" [label="response", color="red"] "futures.Executor" -> "callback" [label="response", color="red"] "callback" -> "callback_request_queue" [label="requests", color="blue"] - "callback_request_queue" -> "QueueCallbackThread" + "callback_request_queue" -> "QueueCallbackWorker" [label="consumed by", color="blue"] - "QueueCallbackThread" -> "Consumer" + "QueueCallbackWorker" -> "Consumer" [label="send_response", color="blue"] } This part is actually up to the Policy to enable. The consumer just provides a -thread-safe queue for requests. The :cls:`QueueCallbackThread` can be used by +thread-safe queue for requests. The :cls:`QueueCallbackWorker` can be used by the Policy implementation to spin up the worker thread to pump the concurrency-safe queue. See the Pub/Sub subscriber implementation for an example of this. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index e1a5a1d15968..b0f166e1a3fa 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -21,7 +21,7 @@ __all__ = ( 'HelperThreadRegistry', - 'QueueCallbackThread', + 'QueueCallbackWorker', 'STOP', ) @@ -125,14 +125,9 @@ def stop_all(self): self.stop(name) -class QueueCallbackThread(object): +class QueueCallbackWorker(object): """A helper that executes a callback for every item in the queue. - .. note:: - - This is not actually a thread, but it is intended to be a target - for a thread. - Calls a blocking ``get()`` on the ``queue`` until it encounters :attr:`STOP`. @@ -141,8 +136,10 @@ class QueueCallbackThread(object): concurrency boundary implemented by ``executor``. Items will be popped off (with a blocking ``get()``) until :attr:`STOP` is encountered. - callback (Callable): A callback that can process items pulled off - of the queue. + callback (Callable[[str, Dict], Any]): A callback that can process + items pulled off of the queue. Items are assumed to be a pair + of a method name to be invoked and a dictionary of keyword + arguments for that method. """ def __init__(self, queue, callback): @@ -153,12 +150,13 @@ def __call__(self): while True: item = self.queue.get() if item == STOP: - _LOGGER.debug('Exiting the QueueCallbackThread.') + _LOGGER.debug('Exiting the QueueCallbackWorker.') return # Run the callback. If any exceptions occur, log them and # continue. try: - self._callback(item) + action, kwargs = item + self._callback(action, kwargs) except Exception as exc: _LOGGER.error('%s: %s', exc.__class__.__name__, exc) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 3ccf454fd58a..5fffcd09f068 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -117,9 +117,9 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), ) self._executor = executor _LOGGER.debug('Creating callback requests thread (not starting).') - self._callback_requests = _helper_threads.QueueCallbackThread( + self._callback_requests = _helper_threads.QueueCallbackWorker( self._request_queue, - self.on_callback_request, + self.dispatch_callback, ) def close(self): @@ -180,10 +180,33 @@ def open(self, callback): # Return the future. return self._future - def on_callback_request(self, callback_request): - """Map the callback request to the appropriate gRPC request.""" - action, kwargs = callback_request[0], callback_request[1] - getattr(self, action)(**kwargs) + def dispatch_callback(self, action, kwargs): + """Map the callback request to the appropriate gRPC request. + + Args: + action (str): The method to be invoked. + kwargs (Dict[str, Any]): The keyword arguments for the method + specified by ``action``. + + Raises: + ValueError: If ``action`` isn't one of the expected actions + "ack", "drop", "lease", "modify_ack_deadline" or "nack". + """ + if action == 'ack': + self.ack(**kwargs) + elif action == 'drop': + self.drop(**kwargs) + elif action == 'lease': + self.lease(**kwargs) + elif action == 'modify_ack_deadline': + self.modify_ack_deadline(**kwargs) + elif action == 'nack': + self.nack(**kwargs) + else: + raise ValueError( + 'Unexpected action', action, + 'Must be one of "ack", "drop", "lease", ' + '"modify_ack_deadline" or "nack".') def on_exception(self, exception): """Handle the exception. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 5f5ca33d8efc..ec889b7fc2fd 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -117,33 +117,35 @@ def test_stop_all_noop(): assert len(registry._helper_threads) == 0 -def test_queue_callback_thread(): +def test_queue_callback_worker(): queue_ = queue.Queue() callback = mock.Mock(spec=()) - qct = _helper_threads.QueueCallbackThread(queue_, callback) + qct = _helper_threads.QueueCallbackWorker(queue_, callback) # Set up an appropriate mock for the queue, and call the queue callback # thread. with mock.patch.object(queue.Queue, 'get') as get: - get.side_effect = (mock.sentinel.A, _helper_threads.STOP) + item1 = ('action', mock.sentinel.A) + get.side_effect = (item1, _helper_threads.STOP) qct() # Assert that we got the expected calls. assert get.call_count == 2 - callback.assert_called_once_with(mock.sentinel.A) + callback.assert_called_once_with('action', mock.sentinel.A) -def test_queue_callback_thread_exception(): +def test_queue_callback_worker_exception(): queue_ = queue.Queue() callback = mock.Mock(spec=(), side_effect=(Exception,)) - qct = _helper_threads.QueueCallbackThread(queue_, callback) + qct = _helper_threads.QueueCallbackWorker(queue_, callback) # Set up an appropriate mock for the queue, and call the queue callback # thread. with mock.patch.object(queue.Queue, 'get') as get: - get.side_effect = (mock.sentinel.A, _helper_threads.STOP) + item1 = ('action', mock.sentinel.A) + get.side_effect = (item1, _helper_threads.STOP) qct() # Assert that we got the expected calls. assert get.call_count == 2 - callback.assert_called_once_with(mock.sentinel.A) + callback.assert_called_once_with('action', mock.sentinel.A) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index fef7df01dea0..f73bfea21e12 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -81,11 +81,30 @@ def test_open(thread_start, htr_start): thread_start.assert_called() -def test_on_callback_request(): +def test_dispatch_callback_valid_actions(): policy = create_policy() - with mock.patch.object(policy, 'call_rpc') as call_rpc: - policy.on_callback_request(('call_rpc', {'something': 42})) - call_rpc.assert_called_once_with(something=42) + kwargs = {'foo': 10, 'bar': 13.37} + actions = ( + 'ack', + 'drop', + 'lease', + 'modify_ack_deadline', + 'nack', + ) + for action in actions: + with mock.patch.object(policy, action) as mocked: + policy.dispatch_callback(action, kwargs) + mocked.assert_called_once_with(**kwargs) + + +def test_dispatch_callback_invalid_action(): + policy = create_policy() + with pytest.raises(ValueError) as exc_info: + policy.dispatch_callback('gecko', {}) + + assert len(exc_info.value.args) == 3 + assert exc_info.value.args[0] == 'Unexpected action' + assert exc_info.value.args[1] == 'gecko' def test_on_exception_deadline_exceeded(): From a29b458c02e9faa7d0ae2ae6026cf9cb815fff66 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 6 Dec 2017 15:40:00 -0800 Subject: [PATCH 0179/1197] Re-factoring `Policy` constructor. (#4535) Doing so to make it clearer what is happening during construction. Also - removing the `_callback_requests` member (not needed) - modifying some docstrings in the `pubsub_v1.subscriber._constructor` module so that it is valid Sphinx/rST --- .../cloud/pubsub_v1/subscriber/_consumer.py | 11 ++- .../pubsub_v1/subscriber/policy/thread.py | 97 ++++++++++++------- 2 files changed, 69 insertions(+), 39 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 737bfcd1fe88..8d2ecf5623b6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -33,6 +33,7 @@ :class:`concurrent.futures.Executor`: .. graphviz:: + digraph responses_only { "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] "gRPC Python" -> "Consumer" [label="responses", color="red"] @@ -57,6 +58,7 @@ a queue for that: .. graphviz:: + digraph response_flow { "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] "gRPC Python" -> "Consumer" [label="responses", color="red"] @@ -71,6 +73,7 @@ queue new requests: .. graphviz:: + digraph thread_only_requests { "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] "gRPC Python" -> "Consumer" [label="responses", color="red"] @@ -92,6 +95,7 @@ all together looks like this: .. graphviz:: + digraph responses_only { "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] "gRPC Python" -> "Consumer" [label="responses", color="red"] @@ -110,7 +114,8 @@ } This part is actually up to the Policy to enable. The consumer just provides a -thread-safe queue for requests. The :cls:`QueueCallbackWorker` can be used by +thread-safe queue for requests. The :class:`QueueCallbackWorker` can be used by + the Policy implementation to spin up the worker thread to pump the concurrency-safe queue. See the Pub/Sub subscriber implementation for an example of this. @@ -146,7 +151,7 @@ class Consumer(object): generate requests. This thread is called the *request generator thread*. Having the request generator thread allows the consumer to hold the stream open indefinitely. Now gRPC will send responses as fast as the consumer can - ask for them. The consumer hands these off to the :cls:`Policy` via + ask for them. The consumer hands these off to the :class:`Policy` via :meth:`Policy.on_response`, which should not block. Finally, we do not want to block the main thread, so the consumer actually @@ -184,7 +189,7 @@ def __init__(self, policy): self.active = False self.helper_threads = _helper_threads.HelperThreadRegistry() - """:cls:`_helper_threads.HelperThreads`: manages the helper threads. + """:class:`_helper_threads.HelperThreads`: manages the helper threads. The policy may use this to schedule its own helper threads. """ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 5fffcd09f068..c8061aa43add 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -67,60 +67,81 @@ class Policy(base.BasePolicy): This consumer handles the connection to the Pub/Sub service and all of the concurrency needs. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow + control settings. + executor (~concurrent.futures.ThreadPoolExecutor): (Optional.) A + ThreadPoolExecutor instance, or anything duck-type compatible + with it. + queue (~queue.Queue): (Optional.) A Queue instance, appropriate + for crossing the concurrency boundary implemented by + ``executor``. """ + def __init__(self, client, subscription, flow_control=types.FlowControl(), executor=None, queue=None): - """Instantiate the policy. + super(Policy, self).__init__( + client=client, + flow_control=flow_control, + subscription=subscription, + ) + # Default the callback to a no-op; the **actual** callback is + # provided by ``.open()``. + self._callback = _do_nothing_callback + # Create a queue for keeping track of shared state. + self._request_queue = self._get_queue(queue) + # Also maintain an executor. + self._executor = self._get_executor(executor) + + @staticmethod + def _get_queue(queue): + """Gets a queue for the constructor. Args: - client (~.pubsub_v1.subscriber.client): The subscriber client used - to create this instance. - subscription (str): The name of the subscription. The canonical - format for this is - ``projects/{project}/subscriptions/{subscription}``. - flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow - control settings. - executor (~concurrent.futures.ThreadPoolExecutor): (Optional.) A - ThreadPoolExecutor instance, or anything duck-type compatible - with it. - queue (~queue.Queue): (Optional.) A Queue instance, appropriate + queue (Optional[~queue.Queue]): A Queue instance, appropriate for crossing the concurrency boundary implemented by ``executor``. - """ - # Default the callback to a no-op; it is provided by `.open`. - self._callback = _do_nothing_callback - # Default the future to None; it is provided by `.open`. - self._future = None - - # Create a queue for keeping track of shared state. + Returns: + ~queue.Queue: Either ``queue`` if not :data:`None` or a default + queue. + """ if queue is None: - queue = queue_mod.Queue() - self._request_queue = queue + return queue_mod.Queue() + else: + return queue - # Call the superclass constructor. - super(Policy, self).__init__( - client=client, - flow_control=flow_control, - subscription=subscription, - ) + @staticmethod + def _get_executor(executor): + """Gets an executor for the constructor. + + Args: + executor (Optional[~concurrent.futures.ThreadPoolExecutor]): A + ThreadPoolExecutor instance, or anything duck-type compatible + with it. - # Also maintain a request queue and an executor. + Returns: + ~concurrent.futures.ThreadPoolExecutor: Either ``executor`` if not + :data:`None` or a default thread pool executor with 10 workers + and a prefix (if supported). + """ if executor is None: executor_kwargs = {} if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6): executor_kwargs['thread_name_prefix'] = ( 'ThreadPoolExecutor-SubscriberPolicy') - executor = futures.ThreadPoolExecutor( + return futures.ThreadPoolExecutor( max_workers=10, **executor_kwargs ) - self._executor = executor - _LOGGER.debug('Creating callback requests thread (not starting).') - self._callback_requests = _helper_threads.QueueCallbackWorker( - self._request_queue, - self.dispatch_callback, - ) + else: + return executor def close(self): """Close the existing connection.""" @@ -158,10 +179,14 @@ def open(self, callback): # Start the thread to pass the requests. _LOGGER.debug('Starting callback requests worker.') self._callback = callback + dispatch_worker = _helper_threads.QueueCallbackWorker( + self._request_queue, + self.dispatch_callback, + ) self._consumer.helper_threads.start( _CALLBACK_WORKER_NAME, self._request_queue.put, - self._callback_requests, + dispatch_worker, ) # Actually start consuming messages. From cac474894b46ccd32f8abf18fed6c70b5780055c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 6 Dec 2017 15:50:09 -0800 Subject: [PATCH 0180/1197] Dropping usage of `HelperThreadRegistry` in Pub / Sub policy. (#4536) --- .../pubsub_v1/subscriber/_helper_threads.py | 3 - .../pubsub_v1/subscriber/policy/thread.py | 81 +++++++++++++------ .../pubsub_v1/subscriber/test_policy_base.py | 6 +- .../subscriber/test_policy_thread.py | 52 +++++++++--- 4 files changed, 105 insertions(+), 37 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index b0f166e1a3fa..6b72a4cc6ad6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -57,9 +57,6 @@ class HelperThreadRegistry(object): def __init__(self): self._helper_threads = {} - def __contains__(self, needle): - return needle in self._helper_threads - def start(self, name, queue_put, target): """Create and start a helper thread. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index c8061aa43add..48712ac454bf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -30,7 +30,7 @@ _LOGGER = logging.getLogger(__name__) -_CALLBACK_WORKER_NAME = 'CallbackRequestsWorker' +_CALLBACK_WORKER_NAME = 'Thread-Consumer-CallbackRequestsWorker' def _callback_completed(future): @@ -98,6 +98,9 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), self._request_queue = self._get_queue(queue) # Also maintain an executor. self._executor = self._get_executor(executor) + # The threads created in ``.open()``. + self._dispatch_thread = None + self._leases_thread = None @staticmethod def _get_queue(queue): @@ -146,8 +149,12 @@ def _get_executor(executor): def close(self): """Close the existing connection.""" # Stop consuming messages. - self._consumer.helper_threads.stop(_CALLBACK_WORKER_NAME) + self._request_queue.put(_helper_threads.STOP) + self._dispatch_thread.join() # Wait until stopped. + self._dispatch_thread = None self._consumer.stop_consuming() + self._leases_thread.join() + self._leases_thread = None self._executor.shutdown() # The subscription is closing cleanly; resolve the future if it is not @@ -156,6 +163,53 @@ def close(self): self._future.set_result(None) self._future = None + def _start_dispatch(self): + """Start a thread to dispatch requests queued up by callbacks. + + .. note:: + + This assumes, but does not check, that ``_dispatch_thread`` + is :data:`None`. + + Spawns a thread to run :meth:`dispatch_callback` and sets the + "dispatch thread" member on the current policy. + """ + _LOGGER.debug('Starting callback requests worker.') + dispatch_worker = _helper_threads.QueueCallbackWorker( + self._request_queue, + self.dispatch_callback, + ) + # Create and start the helper thread. + thread = threading.Thread( + name=_CALLBACK_WORKER_NAME, + target=dispatch_worker, + ) + thread.daemon = True + thread.start() + _LOGGER.debug('Started helper thread %s', thread.name) + self._dispatch_thread = thread + + def _start_lease_worker(self): + """Spawn a helper thread that maintains all of leases for this policy. + + .. note:: + + This assumes, but does not check, that ``_leases_thread`` is + :data:`None`. + + Spawns a thread to run :meth:`maintain_leases` and sets the + "leases thread" member on the current policy. + """ + _LOGGER.debug('Starting lease maintenance worker.') + thread = threading.Thread( + name='Thread-LeaseMaintenance', + target=self.maintain_leases, + ) + thread.daemon = True + thread.start() + + self._leases_thread = thread + def open(self, callback): """Open a streaming pull connection and begin receiving messages. @@ -177,30 +231,11 @@ def open(self, callback): self._future = Future(policy=self) # Start the thread to pass the requests. - _LOGGER.debug('Starting callback requests worker.') self._callback = callback - dispatch_worker = _helper_threads.QueueCallbackWorker( - self._request_queue, - self.dispatch_callback, - ) - self._consumer.helper_threads.start( - _CALLBACK_WORKER_NAME, - self._request_queue.put, - dispatch_worker, - ) - + self._start_dispatch() # Actually start consuming messages. self._consumer.start_consuming() - - # Spawn a helper thread that maintains all of the leases for - # this policy. - _LOGGER.debug('Starting lease maintenance worker.') - self._leaser = threading.Thread( - name='Thread-LeaseMaintenance', - target=self.maintain_leases, - ) - self._leaser.daemon = True - self._leaser.start() + self._start_lease_worker() # Return the future. return self._future diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 1f2a9dacaab6..56ec8a83b979 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -180,8 +180,10 @@ def test_load(): assert policy._load == 0.2 # Returning a number above 100% is fine. - policy.lease(ack_id='three', byte_size=1000) - assert policy._load == 1.16 + with mock.patch.object(policy, 'close') as close: + policy.lease(ack_id='three', byte_size=1000) + assert policy._load == 1.16 + close.assert_called_once_with() def test_modify_ack_deadline(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index f73bfea21e12..6bdcfcf994d9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -21,6 +21,7 @@ from google.auth import credentials import mock import pytest +import six from six.moves import queue from google.cloud.pubsub_v1 import subscriber @@ -49,36 +50,69 @@ def test_init_with_executor(): def test_close(): + dispatch_thread = mock.Mock(spec=threading.Thread) + leases_thread = mock.Mock(spec=threading.Thread) + policy = create_policy() + policy._dispatch_thread = dispatch_thread + policy._leases_thread = leases_thread consumer = policy._consumer with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: policy.close() stop_consuming.assert_called_once_with() - assert 'callback request worker' not in policy._consumer.helper_threads + + assert policy._dispatch_thread is None + dispatch_thread.join.assert_called_once_with() + assert policy._leases_thread is None + leases_thread.join.assert_called_once_with() def test_close_with_future(): + dispatch_thread = mock.Mock(spec=threading.Thread) + leases_thread = mock.Mock(spec=threading.Thread) + policy = create_policy() + policy._dispatch_thread = dispatch_thread + policy._leases_thread = leases_thread policy._future = Future(policy=policy) consumer = policy._consumer with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: future = policy.future policy.close() stop_consuming.assert_called_once_with() + + assert policy._dispatch_thread is None + dispatch_thread.join.assert_called_once_with() + assert policy._leases_thread is None + leases_thread.join.assert_called_once_with() assert policy.future != future assert future.result() is None -@mock.patch.object(_helper_threads.HelperThreadRegistry, 'start') -@mock.patch.object(threading.Thread, 'start') -def test_open(thread_start, htr_start): +def test_open(): policy = create_policy() - with mock.patch.object(policy._consumer, 'start_consuming') as consuming: + consumer = policy._consumer + threads = ( + mock.Mock(spec=('name', 'start')), + mock.Mock(spec=('name', 'start')), + mock.Mock(spec=('name', 'start')), + ) + with mock.patch.object(threading, 'Thread', side_effect=threads): policy.open(mock.sentinel.CALLBACK) - assert policy._callback is mock.sentinel.CALLBACK - consuming.assert_called_once_with() - htr_start.assert_called() - thread_start.assert_called() + + assert policy._callback is mock.sentinel.CALLBACK + + assert policy._dispatch_thread is threads[0] + threads[0].start.assert_called_once_with() + + threads_dict = consumer.helper_threads._helper_threads + assert len(threads_dict) == 1 + helper_thread = next(six.itervalues(threads_dict)) + assert helper_thread.thread is threads[1] + threads[1].start.assert_called_once_with() + + assert policy._leases_thread is threads[2] + threads[2].start.assert_called_once_with() def test_dispatch_callback_valid_actions(): From 5d68506bb54af9848fca1c22a5e46a67ff7955e7 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 6 Dec 2017 16:08:55 -0800 Subject: [PATCH 0181/1197] Removing Pub / Sub HelperThreadRegistry. (#4537) I'll likely put back in **some** of the features (especially the concept of the namedtuple that knows about the thread **AND** the queue / threading.Event). --- .../cloud/pubsub_v1/subscriber/_consumer.py | 23 +++-- .../pubsub_v1/subscriber/_helper_threads.py | 91 ----------------- .../pubsub_v1/subscriber/test_consumer.py | 22 +++-- .../subscriber/test_helper_threads.py | 99 ------------------- .../subscriber/test_policy_thread.py | 6 +- 5 files changed, 27 insertions(+), 214 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 8d2ecf5623b6..2e915c54f367 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -130,7 +130,7 @@ _LOGGER = logging.getLogger(__name__) -_BIDIRECTIONAL_CONSUMER_NAME = 'ConsumeBidirectionalStream' +_BIDIRECTIONAL_CONSUMER_NAME = 'Thread-ConsumeBidirectionalStream' class Consumer(object): @@ -188,10 +188,7 @@ def __init__(self, policy): self._put_lock = threading.Lock() self.active = False - self.helper_threads = _helper_threads.HelperThreadRegistry() - """:class:`_helper_threads.HelperThreads`: manages the helper threads. - The policy may use this to schedule its own helper threads. - """ + self._consumer_thread = None def send_request(self, request): """Queue a request to be sent to gRPC. @@ -330,14 +327,20 @@ def start_consuming(self): """Start consuming the stream.""" self.active = True self._exiting.clear() - self.helper_threads.start( - _BIDIRECTIONAL_CONSUMER_NAME, - self.send_request, - self._blocking_consume, + thread = threading.Thread( + name=_BIDIRECTIONAL_CONSUMER_NAME, + target=self._blocking_consume, ) + thread.daemon = True + thread.start() + _LOGGER.debug('Started helper thread %s', thread.name) + self._consumer_thread = thread def stop_consuming(self): """Signal the stream to stop and block until it completes.""" self.active = False self._exiting.set() - self.helper_threads.stop_all() + _LOGGER.debug('Stopping helper thread %s', self._consumer_thread.name) + self.send_request(_helper_threads.STOP) + self._consumer_thread.join() + self._consumer_thread = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index 6b72a4cc6ad6..339ba2d3a11f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -12,26 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -import collections import logging -import threading import uuid -import six __all__ = ( - 'HelperThreadRegistry', 'QueueCallbackWorker', 'STOP', ) _LOGGER = logging.getLogger(__name__) -_HelperThread = collections.namedtuple( - 'HelperThreads', - ['name', 'thread', 'queue_put'], -) - # Helper thread stop indicator. This could be a sentinel object or None, # but the sentinel object's ID can change if the process is forked, and @@ -40,88 +31,6 @@ STOP = uuid.uuid4() -def _current_thread(): - """Get the currently active thread. - - This is provided as a test helper so that it can be mocked easily. - Mocking ``threading.current_thread()`` directly may have unintended - consequences on code that relies on it. - - Returns: - threading.Thread: The current thread. - """ - return threading.current_thread() - - -class HelperThreadRegistry(object): - def __init__(self): - self._helper_threads = {} - - def start(self, name, queue_put, target): - """Create and start a helper thread. - - Args: - name (str): The name of the helper thread. - queue_put (Callable): The ``put()`` method for a - concurrency-safe queue. - target (Callable): The target of the thread. - - Returns: - threading.Thread: The created thread. - """ - # Create and start the helper thread. - thread = threading.Thread( - name='Thread-ConsumerHelper-{}'.format(name), - target=target, - ) - thread.daemon = True - thread.start() - - # Keep track of the helper thread, so we are able to stop it. - self._helper_threads[name] = _HelperThread(name, thread, queue_put) - _LOGGER.debug('Started helper thread %s', name) - return thread - - def stop(self, name): - """Stops a helper thread. - - Sends the stop message and blocks until the thread joins. - - Args: - name (str): The name of the thread. - """ - # Attempt to retrieve the thread; if it is gone already, no-op. - helper_thread = self._helper_threads.get(name) - if helper_thread is None: - return - - if helper_thread.thread is _current_thread(): - # The current thread cannot ``join()`` itself but it can - # still send a signal to stop. - _LOGGER.debug('Cannot stop current thread %s', name) - helper_thread.queue_put(STOP) - # We return and stop short of ``pop()``-ing so that the - # thread that invoked the current helper can properly stop - # it. - return - - # Join the thread if it is still alive. - if helper_thread.thread.is_alive(): - _LOGGER.debug('Stopping helper thread %s', name) - helper_thread.queue_put(STOP) - helper_thread.thread.join() - - # Remove the thread from our tracking. - self._helper_threads.pop(name, None) - - def stop_all(self): - """Stop all helper threads.""" - # This could be more efficient by sending the stop signal to all - # threads before joining any of them. - for name in list(six.iterkeys(self._helper_threads)): - self.stop(name) - - class QueueCallbackWorker(object): """A helper that executes a callback for every item in the queue. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 711e7e659fd5..80884bf8f3af 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -96,10 +96,12 @@ def test_blocking_consume_on_exception(): policy.on_response.side_effect = exc consumer = _consumer.Consumer(policy=policy) + consumer._consumer_thread = mock.Mock(spec=threading.Thread) policy.on_exception.side_effect = OnException() # Establish that we get responses until we are sent the exiting event. consumer._blocking_consume() + assert consumer._consumer_thread is None # Check mocks. policy.call_rpc.assert_called_once() @@ -118,10 +120,12 @@ def test_blocking_consume_two_exceptions(): policy.on_response.side_effect = (exc1, exc2) consumer = _consumer.Consumer(policy=policy) + consumer._consumer_thread = mock.Mock(spec=threading.Thread) policy.on_exception.side_effect = OnException(acceptable=exc1) # Establish that we get responses until we are sent the exiting event. consumer._blocking_consume() + assert consumer._consumer_thread is None # Check mocks. assert policy.call_rpc.call_count == 2 @@ -133,16 +137,16 @@ def test_blocking_consume_two_exceptions(): def test_start_consuming(): consumer = create_consumer() - helper_threads = consumer.helper_threads - with mock.patch.object(helper_threads, 'start', autospec=True) as start: + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: consumer.start_consuming() - assert consumer._exiting.is_set() is False - assert consumer.active is True - start.assert_called_once_with( - 'ConsumeBidirectionalStream', - consumer.send_request, - consumer._blocking_consume, - ) + + assert consumer._exiting.is_set() is False + assert consumer.active is True + Thread.assert_called_once_with( + name=_consumer._BIDIRECTIONAL_CONSUMER_NAME, + target=consumer._blocking_consume, + ) + assert consumer._consumer_thread is Thread.return_value def basic_queue_generator(queue, received): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index ec889b7fc2fd..fdef24409837 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -12,111 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -import threading - import mock from six.moves import queue from google.cloud.pubsub_v1.subscriber import _helper_threads -def test_start(): - registry = _helper_threads.HelperThreadRegistry() - queue_ = queue.Queue() - target = mock.Mock(spec=()) - with mock.patch.object(threading.Thread, 'start', autospec=True) as start: - registry.start('foo', queue_, target) - assert start.called - - -def test_stop_noop(): - registry = _helper_threads.HelperThreadRegistry() - assert len(registry._helper_threads) == 0 - registry.stop('foo') - assert len(registry._helper_threads) == 0 - - -@mock.patch.object( - _helper_threads, '_current_thread', return_value=mock.sentinel.thread) -def test_stop_current_thread(_current_thread): - registry = _helper_threads.HelperThreadRegistry() - queue_ = mock.Mock(spec=('put',)) - - name = 'here' - registry._helper_threads[name] = _helper_threads._HelperThread( - name=name, - queue_put=queue_.put, - thread=_current_thread.return_value, - ) - assert list(registry._helper_threads.keys()) == [name] - registry.stop(name) - # Make sure it hasn't been removed from the registry ... - assert list(registry._helper_threads.keys()) == [name] - # ... but it did receive the STOP signal. - queue_.put.assert_called_once_with(_helper_threads.STOP) - - # Verify that our mock was only called once. - _current_thread.assert_called_once_with() - - -def test_stop_dead_thread(): - registry = _helper_threads.HelperThreadRegistry() - registry._helper_threads['foo'] = _helper_threads._HelperThread( - name='foo', - queue_put=None, - thread=threading.Thread(target=lambda: None), - ) - assert len(registry._helper_threads) == 1 - registry.stop('foo') - assert len(registry._helper_threads) == 0 - - -@mock.patch.object(queue.Queue, 'put') -@mock.patch.object(threading.Thread, 'is_alive') -@mock.patch.object(threading.Thread, 'join') -def test_stop_alive_thread(join, is_alive, put): - is_alive.return_value = True - - # Set up a registry with a helper thread in it. - registry = _helper_threads.HelperThreadRegistry() - queue_ = queue.Queue() - registry._helper_threads['foo'] = _helper_threads._HelperThread( - name='foo', - queue_put=queue_.put, - thread=threading.Thread(target=lambda: None), - ) - - # Assert that the helper thread is present, and removed correctly - # on stop. - assert len(registry._helper_threads) == 1 - registry.stop('foo') - assert len(registry._helper_threads) == 0 - - # Assert that all of our mocks were called in the expected manner. - is_alive.assert_called_once_with() - join.assert_called_once_with() - put.assert_called_once_with(_helper_threads.STOP) - - -def test_stop_all(): - registry = _helper_threads.HelperThreadRegistry() - registry._helper_threads['foo'] = _helper_threads._HelperThread( - name='foo', - queue_put=None, - thread=threading.Thread(target=lambda: None), - ) - assert len(registry._helper_threads) == 1 - registry.stop_all() - assert len(registry._helper_threads) == 0 - - -def test_stop_all_noop(): - registry = _helper_threads.HelperThreadRegistry() - assert len(registry._helper_threads) == 0 - registry.stop_all() - assert len(registry._helper_threads) == 0 - - def test_queue_callback_worker(): queue_ = queue.Queue() callback = mock.Mock(spec=()) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index 6bdcfcf994d9..ebb274905be6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -21,7 +21,6 @@ from google.auth import credentials import mock import pytest -import six from six.moves import queue from google.cloud.pubsub_v1 import subscriber @@ -105,10 +104,7 @@ def test_open(): assert policy._dispatch_thread is threads[0] threads[0].start.assert_called_once_with() - threads_dict = consumer.helper_threads._helper_threads - assert len(threads_dict) == 1 - helper_thread = next(six.itervalues(threads_dict)) - assert helper_thread.thread is threads[1] + assert consumer._consumer_thread is threads[1] threads[1].start.assert_called_once_with() assert policy._leases_thread is threads[2] From 587594996b6af20373186130dfb9dd70c9fd4564 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 7 Dec 2017 10:18:02 -0800 Subject: [PATCH 0182/1197] Make sure Pub / Sub BIDI consumer doesn't try to join itself. (#4540) This functionality was removed in #4537, so this puts it back. --- .../cloud/pubsub_v1/subscriber/_consumer.py | 33 ++++++++++++++++--- .../pubsub_v1/subscriber/test_consumer.py | 29 ++++++++++++++++ 2 files changed, 58 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 2e915c54f367..38363ad00b78 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -321,7 +321,8 @@ def _blocking_consume(self): if recover: recover = self._stop_request_generator(request_generator) if not recover: - self.stop_consuming() + self._stop_no_join() + return def start_consuming(self): """Start consuming the stream.""" @@ -336,11 +337,35 @@ def start_consuming(self): _LOGGER.debug('Started helper thread %s', thread.name) self._consumer_thread = thread - def stop_consuming(self): - """Signal the stream to stop and block until it completes.""" + def _stop_no_join(self): + """Signal the request stream to stop. + + To actually stop the worker ("consumer thread"), a ``STOP`` is + sent to the request queue. + + The ``_consumer_thread`` member is removed from the current instance + and returned. + + Returns: + threading.Thread: The worker ("consumer thread") that is being + stopped. + """ self.active = False self._exiting.set() _LOGGER.debug('Stopping helper thread %s', self._consumer_thread.name) self.send_request(_helper_threads.STOP) - self._consumer_thread.join() + thread = self._consumer_thread self._consumer_thread = None + return thread + + def stop_consuming(self): + """Signal the stream to stop and block until it completes. + + To actually stop the worker ("consumer thread"), a ``STOP`` is + sent to the request queue. + + This **assumes** that the caller is not in the same thread + (since a thread cannot ``join()`` itself). + """ + thread = self._stop_no_join() + thread.join() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 80884bf8f3af..7f212d18d9bf 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -77,6 +77,18 @@ def test_blocking_consume(): assert on_res.mock_calls[1][1][1] == mock.sentinel.B +@mock.patch.object(_consumer, '_LOGGER') +def test_blocking_consume_when_exiting(_LOGGER): + consumer = create_consumer() + assert consumer._exiting.is_set() is False + consumer._exiting.set() + + # Make sure method cleanly exits. + assert consumer._blocking_consume() is None + + _LOGGER.debug.assert_called_once_with('Event signalled consumer exit.') + + class OnException(object): def __init__(self, acceptable=None): @@ -149,6 +161,23 @@ def test_start_consuming(): assert consumer._consumer_thread is Thread.return_value +def test_stop_consuming(): + consumer = create_consumer() + consumer.active = True + assert consumer._exiting.is_set() is False + thread = mock.Mock(spec=threading.Thread) + consumer._consumer_thread = thread + + assert consumer.stop_consuming() is None + + # Make sure state was updated. + assert consumer.active is False + assert consumer._exiting.is_set() is True + assert consumer._consumer_thread is None + # Check mocks. + thread.join.assert_called_once_with() + + def basic_queue_generator(queue, received): while True: value = queue.get() From d9af40869f46fc651c8589bea4fbd949ec82c81d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 7 Dec 2017 10:46:27 -0800 Subject: [PATCH 0183/1197] Removing redundant `Consumer.active` member. (#4549) Also renaming `Consumer._exiting` as `Consumer.stopped` (and making it a public member). --- .../cloud/pubsub_v1/subscriber/_consumer.py | 12 ++++-------- .../cloud/pubsub_v1/subscriber/policy/base.py | 6 +++--- .../unit/pubsub_v1/subscriber/test_consumer.py | 13 +++++-------- .../pubsub_v1/subscriber/test_policy_base.py | 16 ++++++++-------- 4 files changed, 20 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 38363ad00b78..44349ca3c90f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -184,10 +184,8 @@ def __init__(self, policy): """ self._policy = policy self._request_queue = queue.Queue() - self._exiting = threading.Event() + self.stopped = threading.Event() self._put_lock = threading.Lock() - - self.active = False self._consumer_thread = None def send_request(self, request): @@ -299,7 +297,7 @@ def _blocking_consume(self): # exit cleanly when the user has called stop_consuming(). This # checks to make sure we're not exiting before opening a new # stream. - if self._exiting.is_set(): + if self.stopped.is_set(): _LOGGER.debug('Event signalled consumer exit.') break @@ -326,8 +324,7 @@ def _blocking_consume(self): def start_consuming(self): """Start consuming the stream.""" - self.active = True - self._exiting.clear() + self.stopped.clear() thread = threading.Thread( name=_BIDIRECTIONAL_CONSUMER_NAME, target=self._blocking_consume, @@ -350,8 +347,7 @@ def _stop_no_join(self): threading.Thread: The worker ("consumer thread") that is being stopped. """ - self.active = False - self._exiting.set() + self.stopped.set() _LOGGER.debug('Stopping helper thread %s', self._consumer_thread.name) self.send_request(_helper_threads.STOP) thread = self._consumer_thread diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 2b80f72bd496..f16653838b1e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -177,7 +177,7 @@ def ack(self, ack_id, time_to_ack=None, byte_size=None): # However, if the consumer is inactive, then queue the ack_id here # instead; it will be acked as part of the initial request when the # consumer is started again. - if self._consumer.active: + if self._consumer.stopped.is_set(): request = types.StreamingPullRequest(ack_ids=[ack_id]) self._consumer.send_request(request) else: @@ -311,7 +311,7 @@ def maintain_leases(self): """ while True: # Sanity check: Should this infinite loop quit? - if not self._consumer.active: + if not self._consumer.stopped.is_set(): _LOGGER.debug('Consumer inactive, ending lease maintenance.') return @@ -326,7 +326,7 @@ def maintain_leases(self): # because it is more efficient to make a single request. ack_ids = list(self.managed_ack_ids) _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) - if ack_ids and self._consumer.active: + if ack_ids and self._consumer.stopped.is_set(): request = types.StreamingPullRequest( modify_deadline_ack_ids=ack_ids, modify_deadline_seconds=[p99] * len(ack_ids), diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 7f212d18d9bf..d11be9f0dcf2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -80,8 +80,8 @@ def test_blocking_consume(): @mock.patch.object(_consumer, '_LOGGER') def test_blocking_consume_when_exiting(_LOGGER): consumer = create_consumer() - assert consumer._exiting.is_set() is False - consumer._exiting.set() + assert consumer.stopped.is_set() is False + consumer.stopped.set() # Make sure method cleanly exits. assert consumer._blocking_consume() is None @@ -152,8 +152,7 @@ def test_start_consuming(): with mock.patch.object(threading, 'Thread', autospec=True) as Thread: consumer.start_consuming() - assert consumer._exiting.is_set() is False - assert consumer.active is True + assert consumer.stopped.is_set() is False Thread.assert_called_once_with( name=_consumer._BIDIRECTIONAL_CONSUMER_NAME, target=consumer._blocking_consume, @@ -163,16 +162,14 @@ def test_start_consuming(): def test_stop_consuming(): consumer = create_consumer() - consumer.active = True - assert consumer._exiting.is_set() is False + assert consumer.stopped.is_set() is False thread = mock.Mock(spec=threading.Thread) consumer._consumer_thread = thread assert consumer.stop_consuming() is None # Make sure state was updated. - assert consumer.active is False - assert consumer._exiting.is_set() is True + assert consumer.stopped.is_set() is True assert consumer._consumer_thread is None # Check mocks. thread.join.assert_called_once_with() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 56ec8a83b979..360f9c8a573d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -85,7 +85,7 @@ def test_subscription(): def test_ack(): policy = create_policy() - policy._consumer.active = True + policy._consumer.stopped.set() with mock.patch.object(policy._consumer, 'send_request') as send_request: policy.ack('ack_id_string', 20) send_request.assert_called_once_with(types.StreamingPullRequest( @@ -97,7 +97,7 @@ def test_ack(): def test_ack_no_time(): policy = create_policy() - policy._consumer.active = True + policy._consumer.stopped.set() with mock.patch.object(policy._consumer, 'send_request') as send_request: policy.ack('ack_id_string') send_request.assert_called_once_with(types.StreamingPullRequest( @@ -109,7 +109,7 @@ def test_ack_no_time(): def test_ack_paused(): policy = create_policy() policy._paused = True - policy._consumer.active = False + policy._consumer.stopped.clear() with mock.patch.object(policy, 'open') as open_: policy.ack('ack_id_string') open_.assert_called() @@ -198,20 +198,20 @@ def test_modify_ack_deadline(): def test_maintain_leases_inactive_consumer(): policy = create_policy() - policy._consumer.active = False + policy._consumer.stopped.clear() assert policy.maintain_leases() is None def test_maintain_leases_ack_ids(): policy = create_policy() - policy._consumer.active = True + policy._consumer.stopped.set() policy.lease('my ack id', 50) # Mock the sleep object. with mock.patch.object(time, 'sleep', autospec=True) as sleep: def trigger_inactive(seconds): assert 0 < seconds < 10 - policy._consumer.active = False + policy._consumer.stopped.clear() sleep.side_effect = trigger_inactive # Also mock the consumer, which sends the request. @@ -226,11 +226,11 @@ def trigger_inactive(seconds): def test_maintain_leases_no_ack_ids(): policy = create_policy() - policy._consumer.active = True + policy._consumer.stopped.set() with mock.patch.object(time, 'sleep', autospec=True) as sleep: def trigger_inactive(seconds): assert 0 < seconds < 10 - policy._consumer.active = False + policy._consumer.stopped.clear() sleep.side_effect = trigger_inactive policy.maintain_leases() sleep.assert_called() From 196747c78e5612fa775c9171f749118d6b4b7c57 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 7 Dec 2017 13:48:59 -0800 Subject: [PATCH 0184/1197] Break Policy<->Consumer reference cycle. (#4552) Make `Policy` the parent of `Consumer` and explicitly require passing a `policy` into `Consumer.start_consuming()` (and its helpers). --- .../cloud/pubsub_v1/subscriber/_consumer.py | 49 ++++++++++----- .../cloud/pubsub_v1/subscriber/_histogram.py | 2 +- .../cloud/pubsub_v1/subscriber/client.py | 4 +- .../cloud/pubsub_v1/subscriber/message.py | 4 +- .../cloud/pubsub_v1/subscriber/policy/base.py | 2 +- .../pubsub_v1/subscriber/policy/thread.py | 2 +- .../pubsub_v1/subscriber/test_consumer.py | 63 +++++++++---------- 7 files changed, 69 insertions(+), 57 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 44349ca3c90f..88d1b19ab468 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -176,13 +176,7 @@ class Consumer(object): low. The Consumer and end-user can configure any sort of executor they want for the actual processing of the responses, which may be CPU intensive. """ - def __init__(self, policy): - """ - Args: - policy (Consumer): The consumer policy, which defines how - requests and responses are handled. - """ - self._policy = policy + def __init__(self): self._request_queue = queue.Queue() self.stopped = threading.Event() self._put_lock = threading.Lock() @@ -197,18 +191,24 @@ def send_request(self, request): with self._put_lock: self._request_queue.put(request) - def _request_generator_thread(self): + def _request_generator_thread(self, policy): """Generate requests for the stream. This blocks for new requests on the request queue and yields them to gRPC. + Args: + policy (~.pubsub_v1.subscriber.policy.base.BasePolicy): The policy + that owns this consumer. A policy is used to create the + initial request used to open the streaming pull bidirectional + stream. + Yields: google.cloud.pubsub_v1.types.StreamingPullRequest: Requests """ # First, yield the initial request. This occurs on every new # connection, fundamentally including a resumed connection. - initial_request = self._policy.get_initial_request(ack_queue=True) + initial_request = policy.get_initial_request(ack_queue=True) _LOGGER.debug('Sending initial request:\n%r', initial_request) yield initial_request @@ -290,8 +290,13 @@ def _stop_request_generator(self, request_generator): _LOGGER.debug('Successfully closed request generator.') return True - def _blocking_consume(self): - """Consume the stream indefinitely.""" + def _blocking_consume(self, policy): + """Consume the stream indefinitely. + + Args: + policy (~.pubsub_v1.subscriber.policy.base.BasePolicy): The policy, + which defines how requests and responses are handled. + """ while True: # It is possible that a timeout can cause the stream to not # exit cleanly when the user has called stop_consuming(). This @@ -301,12 +306,12 @@ def _blocking_consume(self): _LOGGER.debug('Event signalled consumer exit.') break - request_generator = self._request_generator_thread() - response_generator = self._policy.call_rpc(request_generator) + request_generator = self._request_generator_thread(policy) + response_generator = policy.call_rpc(request_generator) try: for response in response_generator: _LOGGER.debug('Received response:\n%r', response) - self._policy.on_response(response) + policy.on_response(response) # If the loop above exits without an exception, then the # request stream terminated cleanly, which should only happen @@ -315,19 +320,29 @@ def _blocking_consume(self): _LOGGER.debug('Clean RPC loop exit signalled consumer exit.') break except Exception as exc: - recover = self._policy.on_exception(exc) + recover = policy.on_exception(exc) if recover: recover = self._stop_request_generator(request_generator) if not recover: self._stop_no_join() return - def start_consuming(self): - """Start consuming the stream.""" + def start_consuming(self, policy): + """Start consuming the stream. + + Sets the ``_consumer_thread`` member on the current consumer with + a newly started thread. + + Args: + policy (~.pubsub_v1.subscriber.policy.base.BasePolicy): The policy + that owns this consumer. A policy defines how requests and + responses are handled. + """ self.stopped.clear() thread = threading.Thread( name=_BIDIRECTIONAL_CONSUMER_NAME, target=self._blocking_consume, + args=(policy,), ) thread.daemon = True thread.start() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py index ddf468457931..46a8c3f1a2c0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py @@ -23,7 +23,7 @@ class Histogram(object): The default implementation uses the 99th percentile of previous ack times to implicitly lease messages; however, custom - :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer` subclasses + :class:`~.pubsub_v1.subscriber._consumer.Consumer` subclasses are free to use a different formula. The precision of data stored is to the nearest integer. Additionally, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 912490893d89..439a843cb3e9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -97,7 +97,7 @@ def subscribe(self, subscription, callback=None, flow_control=()): """Return a representation of an individual subscription. This method creates and returns a ``Consumer`` object (that is, a - :class:`~.pubsub_v1.subscriber.consumer.base.BaseConsumer`) + :class:`~.pubsub_v1.subscriber._consumer.Consumer`) subclass) bound to the topic. It does `not` create the subcription on the backend (or do any API call at all); it simply returns an object capable of doing these things. @@ -122,7 +122,7 @@ def subscribe(self, subscription, callback=None, flow_control=()): inundated with too many messages at once. Returns: - ~.pubsub_v1.subscriber.consumer.base.BaseConsumer: An instance + ~.pubsub_v1.subscriber._consumer.Consumer: An instance of the defined ``consumer_class`` on the client. Raises: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 33bf04502225..44dc9136ddda 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -26,7 +26,7 @@ class Message(object): them in callbacks on subscriptions; most users should never have a need to instantiate them by hand. (The exception to this is if you are implementing a custom subclass to - :class:`~.pubsub_v1.subscriber.consumer.BaseConsumer`.) + :class:`~.pubsub_v1.subscriber._consumer.Consumer`.) Attributes: message_id (str): The message ID. In general, you should not need @@ -186,7 +186,7 @@ def modify_ack_deadline(self, seconds): The default implementation handles this for you; you should not need to manually deal with setting ack deadlines. The exception case is if you are implementing your own custom subclass of - :class:`~.pubsub_v1.subcriber.consumer.BaseConsumer`. + :class:`~.pubsub_v1.subcriber._consumer.Consumer`. .. note:: This is not an extension; it *sets* the deadline to the given diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index f16653838b1e..fc520e734277 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -75,7 +75,7 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), histogram_data=None): self._client = client self._subscription = subscription - self._consumer = _consumer.Consumer(self) + self._consumer = _consumer.Consumer() self._ack_deadline = 10 self._last_histogram_size = 0 self._future = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 48712ac454bf..283719fce593 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -234,7 +234,7 @@ def open(self, callback): self._callback = callback self._start_dispatch() # Actually start consuming messages. - self._consumer.start_consuming() + self._consumer.start_consuming(self) self._start_lease_worker() # Return the future. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index d11be9f0dcf2..047607e7f562 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -27,15 +27,8 @@ from google.cloud.pubsub_v1.subscriber.policy import thread -def create_consumer(): - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - subscription = client.subscribe('sub_name_e') - return _consumer.Consumer(policy=subscription) - - def test_send_request(): - consumer = create_consumer() + consumer = _consumer.Consumer() request = types.StreamingPullRequest(subscription='foo') with mock.patch.object(queue.Queue, 'put') as put: consumer.send_request(request) @@ -43,8 +36,11 @@ def test_send_request(): def test_request_generator_thread(): - consumer = create_consumer() - generator = consumer._request_generator_thread() + consumer = _consumer.Consumer() + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + policy = client.subscribe('sub_name_e') + generator = consumer._request_generator_thread(policy) # The first request that comes from the request generator thread # should always be the initial request. @@ -64,27 +60,24 @@ def test_request_generator_thread(): def test_blocking_consume(): - consumer = create_consumer() - Policy = type(consumer._policy) + policy = mock.Mock(spec=('call_rpc', 'on_response')) + policy.call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) - # Establish that we get responses until we run out of them. - with mock.patch.object(Policy, 'call_rpc', autospec=True) as call_rpc: - call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) - with mock.patch.object(Policy, 'on_response', autospec=True) as on_res: - consumer._blocking_consume() - assert on_res.call_count == 2 - assert on_res.mock_calls[0][1][1] == mock.sentinel.A - assert on_res.mock_calls[1][1][1] == mock.sentinel.B + consumer = _consumer.Consumer() + assert consumer._blocking_consume(policy) is None + policy.call_rpc.assert_called_once() + policy.on_response.assert_has_calls( + [mock.call(mock.sentinel.A), mock.call(mock.sentinel.B)]) @mock.patch.object(_consumer, '_LOGGER') def test_blocking_consume_when_exiting(_LOGGER): - consumer = create_consumer() + consumer = _consumer.Consumer() assert consumer.stopped.is_set() is False consumer.stopped.set() # Make sure method cleanly exits. - assert consumer._blocking_consume() is None + assert consumer._blocking_consume(None) is None _LOGGER.debug.assert_called_once_with('Event signalled consumer exit.') @@ -107,12 +100,12 @@ def test_blocking_consume_on_exception(): exc = TypeError('Bad things!') policy.on_response.side_effect = exc - consumer = _consumer.Consumer(policy=policy) + consumer = _consumer.Consumer() consumer._consumer_thread = mock.Mock(spec=threading.Thread) policy.on_exception.side_effect = OnException() # Establish that we get responses until we are sent the exiting event. - consumer._blocking_consume() + consumer._blocking_consume(policy) assert consumer._consumer_thread is None # Check mocks. @@ -131,12 +124,12 @@ def test_blocking_consume_two_exceptions(): exc2 = ValueError('Something grumble.') policy.on_response.side_effect = (exc1, exc2) - consumer = _consumer.Consumer(policy=policy) + consumer = _consumer.Consumer() consumer._consumer_thread = mock.Mock(spec=threading.Thread) policy.on_exception.side_effect = OnException(acceptable=exc1) # Establish that we get responses until we are sent the exiting event. - consumer._blocking_consume() + consumer._blocking_consume(policy) assert consumer._consumer_thread is None # Check mocks. @@ -148,20 +141,24 @@ def test_blocking_consume_two_exceptions(): def test_start_consuming(): - consumer = create_consumer() + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + policy = client.subscribe('sub_name_e') + consumer = _consumer.Consumer() with mock.patch.object(threading, 'Thread', autospec=True) as Thread: - consumer.start_consuming() + consumer.start_consuming(policy) assert consumer.stopped.is_set() is False Thread.assert_called_once_with( name=_consumer._BIDIRECTIONAL_CONSUMER_NAME, target=consumer._blocking_consume, + args=(policy,), ) assert consumer._consumer_thread is Thread.return_value def test_stop_consuming(): - consumer = create_consumer() + consumer = _consumer.Consumer() assert consumer.stopped.is_set() is False thread = mock.Mock(spec=threading.Thread) consumer._consumer_thread = thread @@ -188,7 +185,7 @@ def test_stop_request_generator_not_running(): # - The request queue **is not** empty # Expected result: # - ``_stop_request_generator()`` successfully calls ``.close()`` - consumer = create_consumer() + consumer = _consumer.Consumer() queue_ = consumer._request_queue received = queue.Queue() request_generator = basic_queue_generator(queue_, received) @@ -227,7 +224,7 @@ def test_stop_request_generator_close_failure(): # Expected result: # - ``_stop_request_generator()`` falls through to the ``LOGGER.error`` # case and returns ``False`` - consumer = create_consumer() + consumer = _consumer.Consumer() request_generator = mock.Mock(spec=('close',)) request_generator.close.side_effect = TypeError('Really, not a generator') @@ -247,7 +244,7 @@ def test_stop_request_generator_queue_non_empty(): # - ``_stop_request_generator()`` can't call ``.close()`` (since # the generator is running) but then returns with ``False`` because # the queue **is not** empty - consumer = create_consumer() + consumer = _consumer.Consumer() # Attach a "fake" queue to the request generator so the generator can # block on an empty queue while the consumer's queue is not empty. queue_ = queue.Queue() @@ -292,7 +289,7 @@ def test_stop_request_generator_running(): # the generator is running) but then verifies that the queue is # empty and sends ``STOP`` into the queue to successfully stop # the generator - consumer = create_consumer() + consumer = _consumer.Consumer() queue_ = consumer._request_queue received = queue.Queue() request_generator = basic_queue_generator(queue_, received) From 8d5d23c130ae8530567aa472129bb0084a634691 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 7 Dec 2017 14:49:57 -0800 Subject: [PATCH 0185/1197] Checking _Rendezvous.done() when stopping Pub / Sub request generator. (#4554) --- .../cloud/pubsub_v1/subscriber/_consumer.py | 16 ++++- .../pubsub_v1/subscriber/test_consumer.py | 65 +++++++++++++++---- 2 files changed, 66 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 88d1b19ab468..a0db4c547070 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -223,7 +223,7 @@ def _request_generator_thread(self, policy): _LOGGER.debug('Sending request:\n%r', request) yield request - def _stop_request_generator(self, request_generator): + def _stop_request_generator(self, request_generator, response_generator): """Ensure a request generator is closed. This **must** be done when recovering from a retry-able exception. @@ -237,12 +237,23 @@ def _stop_request_generator(self, request_generator): Args: request_generator (Generator): A streaming pull request generator returned from :meth:`_request_generator_thread`. + response_generator (grpc.Future): The gRPC bidirectional stream + object that **was** consuming the ``request_generator``. (It + will actually spawn a thread to consume the requests, but + that thread will stop once the rendezvous has a status code + set.) Returns: bool: Indicates if the generator was successfully stopped. Will be :data:`True` unless the queue is not empty and the generator is running. """ + if not response_generator.done(): + _LOGGER.debug( + 'Response generator must be done before stopping ' + 'request generator.') + return False + with self._put_lock: try: request_generator.close() @@ -322,7 +333,8 @@ def _blocking_consume(self, policy): except Exception as exc: recover = policy.on_exception(exc) if recover: - recover = self._stop_request_generator(request_generator) + recover = self._stop_request_generator( + request_generator, response_generator) if not recover: self._stop_no_join() return diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 047607e7f562..a05f50b412ca 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -115,18 +115,21 @@ def test_blocking_consume_on_exception(): def test_blocking_consume_two_exceptions(): - policy = mock.Mock(spec=('call_rpc', 'on_response', 'on_exception')) - policy.call_rpc.side_effect = ( - (mock.sentinel.A,), - (mock.sentinel.B,), - ) + policy = mock.Mock(spec=('call_rpc', 'on_exception')) + exc1 = NameError('Oh noes.') exc2 = ValueError('Something grumble.') - policy.on_response.side_effect = (exc1, exc2) + policy.on_exception.side_effect = OnException(acceptable=exc1) + + response_generator1 = mock.MagicMock(spec=('__iter__', 'done')) + response_generator1.__iter__.side_effect = exc1 + response_generator1.done.return_value = True + response_generator2 = mock.MagicMock(spec=('__iter__', 'done')) + response_generator2.__iter__.side_effect = exc2 + policy.call_rpc.side_effect = (response_generator1, response_generator2) consumer = _consumer.Consumer() consumer._consumer_thread = mock.Mock(spec=threading.Thread) - policy.on_exception.side_effect = OnException(acceptable=exc1) # Establish that we get responses until we are sent the exiting event. consumer._blocking_consume(policy) @@ -134,8 +137,10 @@ def test_blocking_consume_two_exceptions(): # Check mocks. assert policy.call_rpc.call_count == 2 - policy.on_response.assert_has_calls( - [mock.call(mock.sentinel.A), mock.call(mock.sentinel.B)]) + response_generator1.__iter__.assert_called_once_with() + response_generator1.done.assert_called_once_with() + response_generator2.__iter__.assert_called_once_with() + response_generator2.done.assert_not_called() policy.on_exception.assert_has_calls( [mock.call(exc1), mock.call(exc2)]) @@ -179,6 +184,18 @@ def basic_queue_generator(queue, received): yield value +def test_stop_request_generator_response_not_done(): + consumer = _consumer.Consumer() + + response_generator = mock.Mock(spec=('done',)) + response_generator.done.return_value = False + stopped = consumer._stop_request_generator(None, response_generator) + assert stopped is False + + # Check mocks. + response_generator.done.assert_called_once_with() + + def test_stop_request_generator_not_running(): # Model scenario tested: # - The request generator **is not** running @@ -207,7 +224,10 @@ def test_stop_request_generator_not_running(): # Make sure it **isn't** done. assert request_generator.gi_frame is not None - stopped = consumer._stop_request_generator(request_generator) + response_generator = mock.Mock(spec=('done',)) + response_generator.done.return_value = True + stopped = consumer._stop_request_generator( + request_generator, response_generator) assert stopped is True # Make sure it **is** done. @@ -217,6 +237,9 @@ def test_stop_request_generator_not_running(): assert queue_.get() == item2 assert queue_.empty() + # Check mocks. + response_generator.done.assert_called_once_with() + def test_stop_request_generator_close_failure(): # Model scenario tested: @@ -229,11 +252,15 @@ def test_stop_request_generator_close_failure(): request_generator = mock.Mock(spec=('close',)) request_generator.close.side_effect = TypeError('Really, not a generator') - stopped = consumer._stop_request_generator(request_generator) + response_generator = mock.Mock(spec=('done',)) + response_generator.done.return_value = True + stopped = consumer._stop_request_generator( + request_generator, response_generator) assert stopped is False # Make sure close() was only called once. request_generator.close.assert_called_once_with() + response_generator.done.assert_called_once_with() def test_stop_request_generator_queue_non_empty(): @@ -264,7 +291,10 @@ def test_stop_request_generator_queue_non_empty(): assert received.empty() assert request_generator.gi_frame is not None - stopped = consumer._stop_request_generator(request_generator) + response_generator = mock.Mock(spec=('done',)) + response_generator.done.return_value = True + stopped = consumer._stop_request_generator( + request_generator, response_generator) assert stopped is False # Make sure the generator is **still** not finished. @@ -279,6 +309,9 @@ def test_stop_request_generator_queue_non_empty(): pass assert received.get() == item2 + # Check mocks. + response_generator.done.assert_called_once_with() + def test_stop_request_generator_running(): # Model scenario tested: @@ -304,7 +337,10 @@ def test_stop_request_generator_running(): assert received.empty() assert request_generator.gi_frame is not None - stopped = consumer._stop_request_generator(request_generator) + response_generator = mock.Mock(spec=('done',)) + response_generator.done.return_value = True + stopped = consumer._stop_request_generator( + request_generator, response_generator) assert stopped is True # Make sure it **is** done, though we may have to wait until @@ -316,3 +352,6 @@ def test_stop_request_generator_running(): assert request_generator.gi_frame is None assert received.get() == _helper_threads.STOP assert queue_.empty() + + # Check mocks. + response_generator.done.assert_called_once_with() From 350112378a4df6c3a12d06c95da2619945dc6fa9 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 8 Dec 2017 14:49:55 -0800 Subject: [PATCH 0186/1197] Adding pause/resume to Pub / Sub consumer. (#4558) Using these (rather then open/close on the subscription Policy) when the flow control signals the message load is too great. --- .../cloud/pubsub_v1/subscriber/_consumer.py | 55 ++++++++++++++- .../cloud/pubsub_v1/subscriber/policy/base.py | 4 +- .../pubsub_v1/subscriber/test_consumer.py | 67 +++++++++++++++---- .../pubsub_v1/subscriber/test_policy_base.py | 58 +++++++++------- 4 files changed, 145 insertions(+), 39 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index a0db4c547070..2392f17b58af 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -179,6 +179,7 @@ class Consumer(object): def __init__(self): self._request_queue = queue.Queue() self.stopped = threading.Event() + self._can_consume = threading.Event() self._put_lock = threading.Lock() self._consumer_thread = None @@ -319,8 +320,10 @@ def _blocking_consume(self, policy): request_generator = self._request_generator_thread(policy) response_generator = policy.call_rpc(request_generator) + responses = _pausable_iterator( + response_generator, self._can_consume) try: - for response in response_generator: + for response in responses: _LOGGER.debug('Received response:\n%r', response) policy.on_response(response) @@ -339,6 +342,34 @@ def _blocking_consume(self, policy): self._stop_no_join() return + def pause(self): + """Pause the current consumer. + + This method is idempotent by design. + + This will clear the ``_can_consume`` event which is checked + every time :meth:`_blocking_consume` consumes a response from the + bidirectional streaming pull. + + Complement to :meth:`resume`. + """ + _LOGGER.debug('Pausing consumer') + self._can_consume.clear() + + def resume(self): + """Resume the current consumer. + + This method is idempotent by design. + + This will set the ``_can_consume`` event which is checked + every time :meth:`_blocking_consume` consumes a response from the + bidirectional streaming pull. + + Complement to :meth:`pause`. + """ + _LOGGER.debug('Resuming consumer') + self._can_consume.set() + def start_consuming(self, policy): """Start consuming the stream. @@ -351,6 +382,7 @@ def start_consuming(self, policy): responses are handled. """ self.stopped.clear() + self.resume() # Make sure we aren't paused. thread = threading.Thread( name=_BIDIRECTIONAL_CONSUMER_NAME, target=self._blocking_consume, @@ -374,6 +406,7 @@ def _stop_no_join(self): threading.Thread: The worker ("consumer thread") that is being stopped. """ + self.resume() # Make sure we aren't paused. self.stopped.set() _LOGGER.debug('Stopping helper thread %s', self._consumer_thread.name) self.send_request(_helper_threads.STOP) @@ -392,3 +425,23 @@ def stop_consuming(self): """ thread = self._stop_no_join() thread.join() + + +def _pausable_iterator(iterator, can_continue): + """Converts a standard iterator into one that can be paused. + + The ``can_continue`` event can be used by an independent, concurrent + worker to pause and resume the iteration over ``iterator``. + + Args: + iterator (Iterator): Any iterator to be iterated over. + can_continue (threading.Event): An event which determines if we + can advance to the next iteration. Will be ``wait()``-ed on + before + + Yields: + Any: The items from ``iterator``. + """ + while True: + can_continue.wait() + yield next(iterator) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index fc520e734277..220197fb94c4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -225,7 +225,7 @@ def drop(self, ack_id, byte_size): # before restarting. if self._paused and self._load < self.flow_control.resume_threshold: self._paused = False - self.open(self._callback) + self._consumer.resume() def get_initial_request(self, ack_queue=False): """Return the initial request. @@ -291,7 +291,7 @@ def lease(self, ack_id, byte_size): # If we do, we need to stop the stream. if self._load >= 1.0: self._paused = True - self.close() + self._consumer.pause() def maintain_leases(self): """Maintain all of the leases being managed by the policy. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index a05f50b412ca..d56274fc70c8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -61,9 +61,11 @@ def test_request_generator_thread(): def test_blocking_consume(): policy = mock.Mock(spec=('call_rpc', 'on_response')) - policy.call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + policy.call_rpc.return_value = iter((mock.sentinel.A, mock.sentinel.B)) consumer = _consumer.Consumer() + consumer.resume() + assert consumer._blocking_consume(policy) is None policy.call_rpc.assert_called_once() policy.on_response.assert_has_calls( @@ -96,11 +98,12 @@ def __call__(self, exception): def test_blocking_consume_on_exception(): policy = mock.Mock(spec=('call_rpc', 'on_response', 'on_exception')) - policy.call_rpc.return_value = (mock.sentinel.A, mock.sentinel.B) + policy.call_rpc.return_value = iter((mock.sentinel.A, mock.sentinel.B)) exc = TypeError('Bad things!') policy.on_response.side_effect = exc consumer = _consumer.Consumer() + consumer.resume() consumer._consumer_thread = mock.Mock(spec=threading.Thread) policy.on_exception.side_effect = OnException() @@ -114,6 +117,28 @@ def test_blocking_consume_on_exception(): policy.on_exception.assert_called_once_with(exc) +class RaisingResponseGenerator(object): + # NOTE: This is needed because defining `.next` on an **instance** + # rather than the **class** will not be iterable in Python 2. + # This is problematic since a `Mock` just sets members. + + def __init__(self, exception): + self.exception = exception + self.done_calls = 0 + self.next_calls = 0 + + def done(self): + self.done_calls += 1 + return True + + def __next__(self): + self.next_calls += 1 + raise self.exception + + def next(self): + return self.__next__() # Python 2 + + def test_blocking_consume_two_exceptions(): policy = mock.Mock(spec=('call_rpc', 'on_exception')) @@ -121,30 +146,48 @@ def test_blocking_consume_two_exceptions(): exc2 = ValueError('Something grumble.') policy.on_exception.side_effect = OnException(acceptable=exc1) - response_generator1 = mock.MagicMock(spec=('__iter__', 'done')) - response_generator1.__iter__.side_effect = exc1 - response_generator1.done.return_value = True - response_generator2 = mock.MagicMock(spec=('__iter__', 'done')) - response_generator2.__iter__.side_effect = exc2 + response_generator1 = RaisingResponseGenerator(exc1) + response_generator2 = RaisingResponseGenerator(exc2) policy.call_rpc.side_effect = (response_generator1, response_generator2) consumer = _consumer.Consumer() + consumer.resume() consumer._consumer_thread = mock.Mock(spec=threading.Thread) # Establish that we get responses until we are sent the exiting event. - consumer._blocking_consume(policy) + assert consumer._blocking_consume(policy) is None assert consumer._consumer_thread is None # Check mocks. assert policy.call_rpc.call_count == 2 - response_generator1.__iter__.assert_called_once_with() - response_generator1.done.assert_called_once_with() - response_generator2.__iter__.assert_called_once_with() - response_generator2.done.assert_not_called() + assert response_generator1.next_calls == 1 + assert response_generator1.done_calls == 1 + assert response_generator2.next_calls == 1 + assert response_generator2.done_calls == 0 policy.on_exception.assert_has_calls( [mock.call(exc1), mock.call(exc2)]) +@mock.patch.object(_consumer, '_LOGGER') +def test_pause(_LOGGER): + consumer = _consumer.Consumer() + consumer._can_consume.set() + + assert consumer.pause() is None + assert not consumer._can_consume.is_set() + _LOGGER.debug.assert_called_once_with('Pausing consumer') + + +@mock.patch.object(_consumer, '_LOGGER') +def test_resume(_LOGGER): + consumer = _consumer.Consumer() + consumer._can_consume.clear() + + assert consumer.resume() is None + assert consumer._can_consume.is_set() + _LOGGER.debug.assert_called_once_with('Resuming consumer') + + def test_start_consuming(): creds = mock.Mock(spec=credentials.Credentials) client = subscriber.Client(credentials=creds) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 360f9c8a573d..60c372ec75e9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -109,10 +109,13 @@ def test_ack_no_time(): def test_ack_paused(): policy = create_policy() policy._paused = True - policy._consumer.stopped.clear() - with mock.patch.object(policy, 'open') as open_: + consumer = policy._consumer + + with mock.patch.object(consumer, 'resume') as resume: policy.ack('ack_id_string') - open_.assert_called() + resume.assert_called_once_with() + + assert policy._paused is False assert 'ack_id_string' in policy._ack_on_resume @@ -157,33 +160,38 @@ def test_drop_below_threshold(): """ policy = create_policy() policy.managed_ack_ids.add('ack_id_string') - policy._bytes = 20 + num_bytes = 20 + policy._bytes = num_bytes policy._paused = True - with mock.patch.object(policy, 'open') as open_: - policy.drop(ack_id='ack_id_string', byte_size=20) - open_.assert_called_once_with(policy._callback) + consumer = policy._consumer + + with mock.patch.object(consumer, 'resume') as resume: + policy.drop(ack_id='ack_id_string', byte_size=num_bytes) + resume.assert_called_once_with() + assert policy._paused is False def test_load(): flow_control = types.FlowControl(max_messages=10, max_bytes=1000) policy = create_policy(flow_control=flow_control) - - # This should mean that our messages count is at 10%, and our bytes - # are at 15%; the ._load property should return the higher (0.15). - policy.lease(ack_id='one', byte_size=150) - assert policy._load == 0.15 - - # After this message is added, the messages should be higher at 20% - # (versus 16% for bytes). - policy.lease(ack_id='two', byte_size=10) - assert policy._load == 0.2 - - # Returning a number above 100% is fine. - with mock.patch.object(policy, 'close') as close: + consumer = policy._consumer + + with mock.patch.object(consumer, 'pause') as pause: + # This should mean that our messages count is at 10%, and our bytes + # are at 15%; the ._load property should return the higher (0.15). + policy.lease(ack_id='one', byte_size=150) + assert policy._load == 0.15 + pause.assert_not_called() + # After this message is added, the messages should be higher at 20% + # (versus 16% for bytes). + policy.lease(ack_id='two', byte_size=10) + assert policy._load == 0.2 + pause.assert_not_called() + # Returning a number above 100% is fine. policy.lease(ack_id='three', byte_size=1000) assert policy._load == 1.16 - close.assert_called_once_with() + pause.assert_called_once_with() def test_modify_ack_deadline(): @@ -251,11 +259,13 @@ def test_lease(): def test_lease_above_threshold(): flow_control = types.FlowControl(max_messages=2) policy = create_policy(flow_control=flow_control) - with mock.patch.object(policy, 'close') as close: + consumer = policy._consumer + + with mock.patch.object(consumer, 'pause') as pause: policy.lease(ack_id='first_ack_id', byte_size=20) - assert close.call_count == 0 + pause.assert_not_called() policy.lease(ack_id='second_ack_id', byte_size=25) - close.assert_called_once_with() + pause.assert_called_once_with() def test_nack(): From bea44886bb4e77c48cd76c9bfe0f424dfeeb52d6 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 8 Dec 2017 15:31:56 -0800 Subject: [PATCH 0187/1197] Making new Pub / Sub and API core releases. (#4559) --- packages/google-cloud-pubsub/CHANGELOG.md | 37 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 4 +-- 2 files changed, 39 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 446162ace07f..c8de4cf1653a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,43 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.29.3 + +### Notable Implementation Changes + +- In subscription consumer thread: Making sure the request generator + attached to an inactive bidirectional streaming pull is stopped before + spawning a new request generator. This way we have a (fairly strong) + guarantee that requests in the queue don't get sent into an inactive + stream (#4503, #4554). +- Adding `pause` / `resume` to subscription consumer thread and using these + methods during flow control. The previous implementation tried to close the + subscription (which involved 3 worker threads and 10 executors in a thread + pool) and then re-open a new subscription. But, this was not entirely + possible to shut down correctly from **within** one of the worker threads. + Instead, we only pause the worker (of the 3) that is pulling new responses + from the bidirectional streaming pull (#4558). +- **Bug fix** (#4516): Using `max` where `min` was used by mistake to + ensure the number of bytes tracked for subscription flow control + remained non-negative (#4514). +- Raising `TypeError` if `SubscriberClient.subscribe` receives a + non-callable callback (#4497). +- Shutting down thread pool executor when closing a subscriber + policy (#4522). +- Renaming `Policy.on_callback_request` to `Policy.dispatch_callback` + and making the behavior much less dynamic (#4511). +- Make sure subscription consumer thread doesn't try to join itself + when exiting in error (#4540). + +### Dependencies + +- Upgrading `google-api-core` dependency to latest revision (`0.1.2`) + since we rely on the latest version of the `concurrent.futures` backport + to provide the `thread_name_prefix` argument for thread pool + executor (#4521, #XYZ). + +PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.3/ + ## 0.29.2 ### Notable Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 3ff535b13518..6e1d5f361d73 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,7 +51,7 @@ REQUIREMENTS = [ - 'google-api-core[grpc] >= 0.1.2.dev1, < 0.2.0dev', + 'google-api-core[grpc] >= 0.1.2, < 0.2.0dev', 'google-auth >= 1.0.2, < 2.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', 'psutil >= 5.2.2, < 6.0dev', @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.29.3.dev1', + version='0.29.3', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 8b656b0f58a2f1b8ea6c2dd348a84f22b063dacc Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 11 Dec 2017 09:14:01 -0800 Subject: [PATCH 0188/1197] Un-confuse the "active"<->"stopped" relationship in Pub/Sub consumer. (#4562) I messed this up during #4549, and as a result, there is no lease maintenance in the `0.29.3` release. Yay Friday releases! --- .../cloud/pubsub_v1/subscriber/policy/base.py | 16 ++++++++-------- .../pubsub_v1/subscriber/test_policy_base.py | 17 ++++++++++------- 2 files changed, 18 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 220197fb94c4..c48fdec29fc5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -173,15 +173,15 @@ def ack(self, ack_id, time_to_ack=None, byte_size=None): if time_to_ack is not None: self.histogram.add(int(time_to_ack)) - # Send the request to ack the message. - # However, if the consumer is inactive, then queue the ack_id here - # instead; it will be acked as part of the initial request when the - # consumer is started again. if self._consumer.stopped.is_set(): + # If the consumer is inactive, then queue the ack_id here; it + # will be acked as part of the initial request when the consumer + # is started again. + self._ack_on_resume.add(ack_id) + else: + # Send the request to ack the message. request = types.StreamingPullRequest(ack_ids=[ack_id]) self._consumer.send_request(request) - else: - self._ack_on_resume.add(ack_id) # Remove the message from lease management. self.drop(ack_id=ack_id, byte_size=byte_size) @@ -311,7 +311,7 @@ def maintain_leases(self): """ while True: # Sanity check: Should this infinite loop quit? - if not self._consumer.stopped.is_set(): + if self._consumer.stopped.is_set(): _LOGGER.debug('Consumer inactive, ending lease maintenance.') return @@ -326,7 +326,7 @@ def maintain_leases(self): # because it is more efficient to make a single request. ack_ids = list(self.managed_ack_ids) _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) - if ack_ids and self._consumer.stopped.is_set(): + if ack_ids and not self._consumer.stopped.is_set(): request = types.StreamingPullRequest( modify_deadline_ack_ids=ack_ids, modify_deadline_seconds=[p99] * len(ack_ids), diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 60c372ec75e9..3eba4501bc5f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -85,7 +85,7 @@ def test_subscription(): def test_ack(): policy = create_policy() - policy._consumer.stopped.set() + policy._consumer.stopped.clear() with mock.patch.object(policy._consumer, 'send_request') as send_request: policy.ack('ack_id_string', 20) send_request.assert_called_once_with(types.StreamingPullRequest( @@ -97,7 +97,7 @@ def test_ack(): def test_ack_no_time(): policy = create_policy() - policy._consumer.stopped.set() + policy._consumer.stopped.clear() with mock.patch.object(policy._consumer, 'send_request') as send_request: policy.ack('ack_id_string') send_request.assert_called_once_with(types.StreamingPullRequest( @@ -110,6 +110,7 @@ def test_ack_paused(): policy = create_policy() policy._paused = True consumer = policy._consumer + consumer.stopped.set() with mock.patch.object(consumer, 'resume') as resume: policy.ack('ack_id_string') @@ -206,20 +207,21 @@ def test_modify_ack_deadline(): def test_maintain_leases_inactive_consumer(): policy = create_policy() - policy._consumer.stopped.clear() + policy._consumer.stopped.set() assert policy.maintain_leases() is None def test_maintain_leases_ack_ids(): policy = create_policy() - policy._consumer.stopped.set() + policy._consumer.stopped.clear() policy.lease('my ack id', 50) # Mock the sleep object. with mock.patch.object(time, 'sleep', autospec=True) as sleep: def trigger_inactive(seconds): assert 0 < seconds < 10 - policy._consumer.stopped.clear() + policy._consumer.stopped.set() + sleep.side_effect = trigger_inactive # Also mock the consumer, which sends the request. @@ -234,11 +236,12 @@ def trigger_inactive(seconds): def test_maintain_leases_no_ack_ids(): policy = create_policy() - policy._consumer.stopped.set() + policy._consumer.stopped.clear() with mock.patch.object(time, 'sleep', autospec=True) as sleep: def trigger_inactive(seconds): assert 0 < seconds < 10 - policy._consumer.stopped.clear() + policy._consumer.stopped.set() + sleep.side_effect = trigger_inactive policy.maintain_leases() sleep.assert_called() From 7083b326231610167ca85fa48097933dfe48388e Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 11 Dec 2017 09:49:44 -0800 Subject: [PATCH 0189/1197] Pub / sub release 0.29.4. (#4566) --- packages/google-cloud-pubsub/CHANGELOG.md | 12 ++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index c8de4cf1653a..9fcf5d40dedf 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.29.4 + +### Notable Implementation Changes + +- **Bug fix**: Restore previous behavior of the subscription lease + maintenance worker. This was accidentally "stopped" in `0.29.3` + due to a change in implementation that went from an `active` + boolean to an "inactive" / `stopped` boolean, so `True` became + `False` and vice-versa (#4564). + +PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.4/ + ## 0.29.3 ### Notable Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 6e1d5f361d73..88aa0486bd82 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.29.3', + version='0.29.4', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 8e6e47c1d8510900beee78a75e60473b43d1011f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 11 Dec 2017 10:30:58 -0800 Subject: [PATCH 0190/1197] Post PR #4559: add back `.dev1` to version numbers. (#4561) --- packages/google-cloud-pubsub/CHANGELOG.md | 2 +- packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 9fcf5d40dedf..2b6e52b9bce4 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -49,7 +49,7 @@ PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.4/ - Upgrading `google-api-core` dependency to latest revision (`0.1.2`) since we rely on the latest version of the `concurrent.futures` backport to provide the `thread_name_prefix` argument for thread pool - executor (#4521, #XYZ). + executor (#4521, #4559). PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.3/ diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 88aa0486bd82..48dfe0f1fc7e 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.29.4', + version='0.29.5.dev1', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 286c4c261e2385abf62298deb016382db48b0977 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 11 Dec 2017 10:59:24 -0800 Subject: [PATCH 0191/1197] Dropping redundant Pub / Sub `Policy._paused` data member. (#4568) --- .../cloud/pubsub_v1/subscriber/_consumer.py | 5 +++++ .../cloud/pubsub_v1/subscriber/policy/base.py | 6 ++---- .../unit/pubsub_v1/subscriber/test_consumer.py | 11 +++++++++++ .../pubsub_v1/subscriber/test_policy_base.py | 16 ++++++---------- 4 files changed, 24 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 2392f17b58af..f0bada8b1140 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -342,6 +342,11 @@ def _blocking_consume(self, policy): self._stop_no_join() return + @property + def paused(self): + """bool: Check if the current consumer is paused.""" + return not self._can_consume.is_set() + def pause(self): """Pause the current consumer. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index c48fdec29fc5..e10bc8e8bbe8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -86,7 +86,6 @@ def __init__(self, client, subscription, # They should not need to be used by subclasses. self._bytes = 0 self._ack_on_resume = set() - self._paused = False @property def ack_deadline(self): @@ -223,8 +222,8 @@ def drop(self, ack_id, byte_size): # In order to not thrash too much, require us to have passed below # the resume threshold (80% by default) of each flow control setting # before restarting. - if self._paused and self._load < self.flow_control.resume_threshold: - self._paused = False + if (self._consumer.paused and + self._load < self.flow_control.resume_threshold): self._consumer.resume() def get_initial_request(self, ack_queue=False): @@ -290,7 +289,6 @@ def lease(self, ack_id, byte_size): # Sanity check: Do we have too many things in our inventory? # If we do, we need to stop the stream. if self._load >= 1.0: - self._paused = True self._consumer.pause() def maintain_leases(self): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index d56274fc70c8..fb042c27f5b2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -168,6 +168,17 @@ def test_blocking_consume_two_exceptions(): [mock.call(exc1), mock.call(exc2)]) +def test_paused(): + consumer = _consumer.Consumer() + assert consumer.paused is True + + consumer._can_consume.set() + assert consumer.paused is False + + consumer._can_consume.clear() + assert consumer.paused is True + + @mock.patch.object(_consumer, '_LOGGER') def test_pause(_LOGGER): consumer = _consumer.Consumer() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 3eba4501bc5f..59dbd13db4f6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -108,15 +108,13 @@ def test_ack_no_time(): def test_ack_paused(): policy = create_policy() - policy._paused = True consumer = policy._consumer consumer.stopped.set() + assert consumer.paused is True - with mock.patch.object(consumer, 'resume') as resume: - policy.ack('ack_id_string') - resume.assert_called_once_with() + policy.ack('ack_id_string') - assert policy._paused is False + assert consumer.paused is False assert 'ack_id_string' in policy._ack_on_resume @@ -163,14 +161,12 @@ def test_drop_below_threshold(): policy.managed_ack_ids.add('ack_id_string') num_bytes = 20 policy._bytes = num_bytes - policy._paused = True consumer = policy._consumer + assert consumer.paused is True - with mock.patch.object(consumer, 'resume') as resume: - policy.drop(ack_id='ack_id_string', byte_size=num_bytes) - resume.assert_called_once_with() + policy.drop(ack_id='ack_id_string', byte_size=num_bytes) - assert policy._paused is False + assert consumer.paused is False def test_load(): From 0c8375291bdbafbbf9c81f69030f5a58c4a52891 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 11 Dec 2017 15:17:22 -0800 Subject: [PATCH 0192/1197] Combine two logging statements into one. (#4574) --- .../google/cloud/pubsub_v1/subscriber/policy/thread.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 283719fce593..d7267a79c9e4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -296,8 +296,9 @@ def on_response(self, response): For each message, schedule a callback with the executor. """ for msg in response.received_messages: - _LOGGER.debug('New message received from Pub/Sub:\n%r', msg) - _LOGGER.debug(self._callback) + _LOGGER.debug( + 'Using %s to process new message received:\n%r', + self._callback, msg) message = Message(msg.message, msg.ack_id, self._request_queue) future = self._executor.submit(self._callback, message) future.add_done_callback(_callback_completed) From c5e748b77c1caca40401c82b0f16c05ae7e0e0c4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 15 Dec 2017 22:46:10 -0800 Subject: [PATCH 0193/1197] Removing redunant "active" check in Pub / Sub policy. (#4603) Also adding a note explaining a bit why they extra check was there to begin with. Fixes #4496. --- .../google/cloud/pubsub_v1/subscriber/policy/base.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index e10bc8e8bbe8..b1a7583df36d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -324,11 +324,16 @@ def maintain_leases(self): # because it is more efficient to make a single request. ack_ids = list(self.managed_ack_ids) _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) - if ack_ids and not self._consumer.stopped.is_set(): + if ack_ids: request = types.StreamingPullRequest( modify_deadline_ack_ids=ack_ids, modify_deadline_seconds=[p99] * len(ack_ids), ) + # NOTE: This may not work as expected if ``consumer.stopped`` + # has been set since we checked it. An implementation + # without any sort of race condition would require a + # way for ``send_request`` to fail when the consumer + # is stopped. self._consumer.send_request(request) # Now wait an appropriate period of time and do this again. From a48ecfabb8650ec24c2856e544791f54b0261be4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 15 Dec 2017 22:47:08 -0800 Subject: [PATCH 0194/1197] Add more explicit documentation for Pub / Sub Message.attributes. (#4601) Fixes #4390. --- .../google/cloud/pubsub_v1/subscriber/message.py | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 44dc9136ddda..aa3cde540db8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -33,7 +33,8 @@ class Message(object): to use this directly. data (bytes): The data in the message. Note that this will be a :class:`bytes`, not a text string. - attributes (dict): The attributes sent along with the message. + attributes (.ScalarMapContainer): The attributes sent along with the + message. See :attr:`attributes` for more information on this type. publish_time (datetime): The time that this message was originally published. """ @@ -85,8 +86,18 @@ def __repr__(self): def attributes(self): """Return the attributes of the underlying Pub/Sub Message. + .. warning:: + + A ``ScalarMapContainer`` behaves slightly differently than a + ``dict``. For a Pub / Sub message this is a ``string->string`` map. + When trying to access a value via ``map['key']``, if the key is + not in the map, then the default value for the string type will + be returned, which is an empty string. It may be more intuitive + to just cast the map to a ``dict`` or to one use ``map.get``. + Returns: - dict: The message's attributes. + .ScalarMapContainer: The message's attributes. This is a + ``dict``-like object provided by ``google.protobuf``. """ return self._message.attributes From ebd8c30acb8be07746d81965016ec3da1cfaa72f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 15 Dec 2017 22:47:29 -0800 Subject: [PATCH 0195/1197] Make Pub / Sub `Message.__repr__` a bit prettier / more useful. (#4602) Fixes #4529. --- .../cloud/pubsub_v1/subscriber/message.py | 47 +++++++++++++++---- .../unit/pubsub_v1/subscriber/test_message.py | 16 +++++++ 2 files changed, 55 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index aa3cde540db8..cb27876942a0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -14,10 +14,37 @@ from __future__ import absolute_import +import json import math import time +_MESSAGE_REPR = """\ +Message {{ + data: {!r} + attributes: {} +}}""" + + +def _indent(lines, prefix=' '): + """Indent some text. + + Note that this is present as ``textwrap.indent``, but not in Python 2. + + Args: + lines (str): The newline delimited string to be indented. + prefix (Optional[str]): The prefix to indent each line with. Default + to two spaces. + + Returns: + str: The newly indented content. + """ + indented = [] + for line in lines.split('\n'): + indented.append(prefix + line) + return '\n'.join(indented) + + class Message(object): """A representation of a single Pub/Sub message. @@ -73,14 +100,18 @@ def __repr__(self): # Get an abbreviated version of the data. abbv_data = self._message.data if len(abbv_data) > 50: - abbv_data = abbv_data[0:50] + b'...' - - # Return a useful representation. - answer = 'Message {\n' - answer += ' data: {0!r}\n'.format(abbv_data) - answer += ' attributes: {0!r}\n'.format(self.attributes) - answer += '}' - return answer + abbv_data = abbv_data[:50] + b'...' + + pretty_attrs = json.dumps( + dict(self.attributes), + indent=2, + separators=(',', ': '), + sort_keys=True, + ) + pretty_attrs = _indent(pretty_attrs) + # We don't actually want the first line indented. + pretty_attrs = pretty_attrs.lstrip() + return _MESSAGE_REPR.format(abbv_data, pretty_attrs) @property def attributes(self): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 5a564a52eee3..df82fec59725 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -100,3 +100,19 @@ def test_nack(): 'ack_id': 'bogus_id', 'byte_size': 25, })) + + +def test_repr(): + data = b'foo' + msg = create_message(data, snow='cones', orange='juice') + data_line = ' data: {!r}'.format(data) + expected_repr = '\n'.join(( + 'Message {', + data_line, + ' attributes: {', + ' "orange": "juice",', + ' "snow": "cones"', + ' }', + '}', + )) + assert repr(msg) == expected_repr From be4ebb00a130c28318f0766bcc0c2c4e848262cd Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Fri, 15 Dec 2017 23:18:55 -0800 Subject: [PATCH 0196/1197] Adding a `Consumer.active` property to Pub / Sub. (#4604) --- .../cloud/pubsub_v1/subscriber/_consumer.py | 18 ++++++++++++++---- .../cloud/pubsub_v1/subscriber/policy/base.py | 18 +++++++++--------- .../unit/pubsub_v1/subscriber/test_consumer.py | 10 +++++----- .../pubsub_v1/subscriber/test_policy_base.py | 16 ++++++++-------- 4 files changed, 36 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index f0bada8b1140..ae1270fdb3c6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -178,11 +178,21 @@ class Consumer(object): """ def __init__(self): self._request_queue = queue.Queue() - self.stopped = threading.Event() + self._stopped = threading.Event() self._can_consume = threading.Event() self._put_lock = threading.Lock() self._consumer_thread = None + @property + def active(self): + """bool: Indicates if the consumer is active. + + This is intended to be an implementation independent way of indicating + that the consumer is stopped. (E.g. so a policy that owns a consumer + doesn't need to know what a ``threading.Event`` is.) + """ + return not self._stopped.is_set() + def send_request(self, request): """Queue a request to be sent to gRPC. @@ -314,7 +324,7 @@ def _blocking_consume(self, policy): # exit cleanly when the user has called stop_consuming(). This # checks to make sure we're not exiting before opening a new # stream. - if self.stopped.is_set(): + if self._stopped.is_set(): _LOGGER.debug('Event signalled consumer exit.') break @@ -386,7 +396,7 @@ def start_consuming(self, policy): that owns this consumer. A policy defines how requests and responses are handled. """ - self.stopped.clear() + self._stopped.clear() self.resume() # Make sure we aren't paused. thread = threading.Thread( name=_BIDIRECTIONAL_CONSUMER_NAME, @@ -412,7 +422,7 @@ def _stop_no_join(self): stopped. """ self.resume() # Make sure we aren't paused. - self.stopped.set() + self._stopped.set() _LOGGER.debug('Stopping helper thread %s', self._consumer_thread.name) self.send_request(_helper_threads.STOP) thread = self._consumer_thread diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index b1a7583df36d..15a57ad555f7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -172,15 +172,15 @@ def ack(self, ack_id, time_to_ack=None, byte_size=None): if time_to_ack is not None: self.histogram.add(int(time_to_ack)) - if self._consumer.stopped.is_set(): + if self._consumer.active: + # Send the request to ack the message. + request = types.StreamingPullRequest(ack_ids=[ack_id]) + self._consumer.send_request(request) + else: # If the consumer is inactive, then queue the ack_id here; it # will be acked as part of the initial request when the consumer # is started again. self._ack_on_resume.add(ack_id) - else: - # Send the request to ack the message. - request = types.StreamingPullRequest(ack_ids=[ack_id]) - self._consumer.send_request(request) # Remove the message from lease management. self.drop(ack_id=ack_id, byte_size=byte_size) @@ -309,7 +309,7 @@ def maintain_leases(self): """ while True: # Sanity check: Should this infinite loop quit? - if self._consumer.stopped.is_set(): + if not self._consumer.active: _LOGGER.debug('Consumer inactive, ending lease maintenance.') return @@ -329,11 +329,11 @@ def maintain_leases(self): modify_deadline_ack_ids=ack_ids, modify_deadline_seconds=[p99] * len(ack_ids), ) - # NOTE: This may not work as expected if ``consumer.stopped`` - # has been set since we checked it. An implementation + # NOTE: This may not work as expected if ``consumer.active`` + # has changed since we checked it. An implementation # without any sort of race condition would require a # way for ``send_request`` to fail when the consumer - # is stopped. + # is inactive. self._consumer.send_request(request) # Now wait an appropriate period of time and do this again. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index fb042c27f5b2..b503a6a8895b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -75,8 +75,8 @@ def test_blocking_consume(): @mock.patch.object(_consumer, '_LOGGER') def test_blocking_consume_when_exiting(_LOGGER): consumer = _consumer.Consumer() - assert consumer.stopped.is_set() is False - consumer.stopped.set() + assert consumer._stopped.is_set() is False + consumer._stopped.set() # Make sure method cleanly exits. assert consumer._blocking_consume(None) is None @@ -207,7 +207,7 @@ def test_start_consuming(): with mock.patch.object(threading, 'Thread', autospec=True) as Thread: consumer.start_consuming(policy) - assert consumer.stopped.is_set() is False + assert consumer._stopped.is_set() is False Thread.assert_called_once_with( name=_consumer._BIDIRECTIONAL_CONSUMER_NAME, target=consumer._blocking_consume, @@ -218,14 +218,14 @@ def test_start_consuming(): def test_stop_consuming(): consumer = _consumer.Consumer() - assert consumer.stopped.is_set() is False + assert consumer._stopped.is_set() is False thread = mock.Mock(spec=threading.Thread) consumer._consumer_thread = thread assert consumer.stop_consuming() is None # Make sure state was updated. - assert consumer.stopped.is_set() is True + assert consumer._stopped.is_set() is True assert consumer._consumer_thread is None # Check mocks. thread.join.assert_called_once_with() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 59dbd13db4f6..85dbc527c062 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -85,7 +85,7 @@ def test_subscription(): def test_ack(): policy = create_policy() - policy._consumer.stopped.clear() + policy._consumer._stopped.clear() with mock.patch.object(policy._consumer, 'send_request') as send_request: policy.ack('ack_id_string', 20) send_request.assert_called_once_with(types.StreamingPullRequest( @@ -97,7 +97,7 @@ def test_ack(): def test_ack_no_time(): policy = create_policy() - policy._consumer.stopped.clear() + policy._consumer._stopped.clear() with mock.patch.object(policy._consumer, 'send_request') as send_request: policy.ack('ack_id_string') send_request.assert_called_once_with(types.StreamingPullRequest( @@ -109,7 +109,7 @@ def test_ack_no_time(): def test_ack_paused(): policy = create_policy() consumer = policy._consumer - consumer.stopped.set() + consumer._stopped.set() assert consumer.paused is True policy.ack('ack_id_string') @@ -203,20 +203,20 @@ def test_modify_ack_deadline(): def test_maintain_leases_inactive_consumer(): policy = create_policy() - policy._consumer.stopped.set() + policy._consumer._stopped.set() assert policy.maintain_leases() is None def test_maintain_leases_ack_ids(): policy = create_policy() - policy._consumer.stopped.clear() + policy._consumer._stopped.clear() policy.lease('my ack id', 50) # Mock the sleep object. with mock.patch.object(time, 'sleep', autospec=True) as sleep: def trigger_inactive(seconds): assert 0 < seconds < 10 - policy._consumer.stopped.set() + policy._consumer._stopped.set() sleep.side_effect = trigger_inactive @@ -232,11 +232,11 @@ def trigger_inactive(seconds): def test_maintain_leases_no_ack_ids(): policy = create_policy() - policy._consumer.stopped.clear() + policy._consumer._stopped.clear() with mock.patch.object(time, 'sleep', autospec=True) as sleep: def trigger_inactive(seconds): assert 0 < seconds < 10 - policy._consumer.stopped.set() + policy._consumer._stopped.set() sleep.side_effect = trigger_inactive policy.maintain_leases() From d220cfde273aeffe975b0230e4565270d2e455a4 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 18 Dec 2017 09:15:07 -0800 Subject: [PATCH 0197/1197] Making it impossible to call `Policy.open()` on an already opened policy. (#4606) Similar with `Policy.close()`. Fixes #4488. Also returning the future from `Policy.close()`. --- .../pubsub_v1/subscriber/policy/thread.py | 43 +++++++++++++++++-- .../subscriber/test_policy_thread.py | 36 ++++++++++++++-- 2 files changed, 71 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index d7267a79c9e4..39f161a3b93e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -147,7 +147,26 @@ def _get_executor(executor): return executor def close(self): - """Close the existing connection.""" + """Close the existing connection. + + .. warning:: + + This method is not thread-safe. For example, if this method is + called while another thread is executing :meth:`open`, then the + policy could end up in an undefined state. The **same** policy + instance is not intended to be used by multiple workers (though + each policy instance **does** have a thread-safe private queue). + + Returns: + ~google.api_core.future.Future: The future that **was** attached + to the subscription. + + Raises: + ValueError: If the policy has not been opened yet. + """ + if self._future is None: + raise ValueError('This policy has not been opened yet.') + # Stop consuming messages. self._request_queue.put(_helper_threads.STOP) self._dispatch_thread.join() # Wait until stopped. @@ -159,9 +178,11 @@ def close(self): # The subscription is closing cleanly; resolve the future if it is not # resolved already. - if self._future is not None and not self._future.done(): + if not self._future.done(): self._future.set_result(None) + future = self._future self._future = None + return future def _start_dispatch(self): """Start a thread to dispatch requests queued up by callbacks. @@ -213,6 +234,14 @@ def _start_lease_worker(self): def open(self, callback): """Open a streaming pull connection and begin receiving messages. + .. warning:: + + This method is not thread-safe. For example, if this method is + called while another thread is executing :meth:`close`, then the + policy could end up in an undefined state. The **same** policy + instance is not intended to be used by multiple workers (though + each policy instance **does** have a thread-safe private queue). + For each message received, the ``callback`` function is fired with a :class:`~.pubsub_v1.subscriber.message.Message` as its only argument. @@ -222,9 +251,15 @@ def open(self, callback): Returns: ~google.api_core.future.Future: A future that provides - an interface to block on the subscription if desired, and - handle errors. + an interface to block on the subscription if desired, and + handle errors. + + Raises: + ValueError: If the policy has already been opened. """ + if self._future is not None: + raise ValueError('This policy has already been opened.') + # Create the Future that this method will return. # This future is the main thread's interface to handle exceptions, # block on the subscription, etc. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index ebb274905be6..a1b254fb2eee 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -55,18 +55,35 @@ def test_close(): policy = create_policy() policy._dispatch_thread = dispatch_thread policy._leases_thread = leases_thread + future = mock.Mock(spec=('done',)) + future.done.return_value = True + policy._future = future + consumer = policy._consumer with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: - policy.close() + closed_fut = policy.close() stop_consuming.assert_called_once_with() assert policy._dispatch_thread is None dispatch_thread.join.assert_called_once_with() assert policy._leases_thread is None leases_thread.join.assert_called_once_with() + assert closed_fut is future + assert policy._future is None + future.done.assert_called_once_with() + + +def test_close_without_future(): + policy = create_policy() + assert policy._future is None + + with pytest.raises(ValueError) as exc_info: + policy.close() + assert exc_info.value.args == ('This policy has not been opened yet.',) -def test_close_with_future(): + +def test_close_with_unfinished_future(): dispatch_thread = mock.Mock(spec=threading.Thread) leases_thread = mock.Mock(spec=threading.Thread) @@ -77,14 +94,15 @@ def test_close_with_future(): consumer = policy._consumer with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: future = policy.future - policy.close() + closed_fut = policy.close() stop_consuming.assert_called_once_with() assert policy._dispatch_thread is None dispatch_thread.join.assert_called_once_with() assert policy._leases_thread is None leases_thread.join.assert_called_once_with() - assert policy.future != future + assert policy._future is None + assert closed_fut is future assert future.result() is None @@ -111,6 +129,16 @@ def test_open(): threads[2].start.assert_called_once_with() +def test_open_already_open(): + policy = create_policy() + policy._future = mock.sentinel.future + + with pytest.raises(ValueError) as exc_info: + policy.open(None) + + assert exc_info.value.args == ('This policy has already been opened.',) + + def test_dispatch_callback_valid_actions(): policy = create_policy() kwargs = {'foo': 10, 'bar': 13.37} From e2be4c04760f9218a8251cc238cf19583854ed4f Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 18 Dec 2017 11:31:56 -0800 Subject: [PATCH 0198/1197] Adding extra check in `Batch.will_accept` for the number of messages. (#4612) Also adding a note about the fleeting nature of the "will accept?" check. Pinning `pylint` to avoid `1.8.x` breakage of `gcp-devrel-py-tools` breakage. --- .../google/cloud/pubsub_v1/publisher/batch/base.py | 13 ++++++++++++- packages/google-cloud-pubsub/nox.py | 3 ++- .../unit/pubsub_v1/publisher/batch/test_base.py | 9 +++++++++ 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index 9e08ea132e00..366dc8ca7738 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -97,6 +97,11 @@ def status(self): def will_accept(self, message): """Return True if the batch is able to accept the message. + In concurrent implementations, the attributes on the current batch + may be modified by other workers. With this in mind, the caller will + likely want to hold a lock that will make sure the state remains + the same after the "will accept?" question is answered. + Args: message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. @@ -107,10 +112,16 @@ def will_accept(self, message): if self.status != BatchStatus.ACCEPTING_MESSAGES: return False - # If this batch can not hold the message in question, return False. + # If this message will make the batch exceed the ``max_bytes`` + # setting, return False. if self.size + message.ByteSize() > self.settings.max_bytes: return False + # If this message will make the batch exceed the ``max_messages`` + # setting, return False. + if len(self.messages) >= self.settings.max_messages: + return False + # Okay, everything is good. return True diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 00945bdccbc7..93c5b3344a7a 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -99,7 +99,8 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8', 'pylint', 'gcp-devrel-py-tools', *LOCAL_DEPS) + session.install( + 'flake8', 'pylint==1.7.5', 'gcp-devrel-py-tools', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google/cloud/pubsub') session.run( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index 49cd31f82714..d4177e2f7d55 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -67,3 +67,12 @@ def test_will_not_accept_size(): ) message = types.PubsubMessage(data=b'abcdefghijklmnopqrstuvwxyz') assert batch.will_accept(message) is False + + +def test_will_not_accept_number(): + batch = create_batch( + settings=types.BatchSettings(max_messages=-1), + status=BatchStatus.ACCEPTING_MESSAGES, + ) + message = types.PubsubMessage(data=b'abc') + assert batch.will_accept(message) is False From 24b6a6a2ff6de39c758e98ed6920b7bbfea33cdb Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 18 Dec 2017 13:03:55 -0800 Subject: [PATCH 0199/1197] Moving `will_accept()` check out of `PublisherClient.batch()` factory. (#4613) This is the first stage of moving that check directly into `Batch.publish()`. This check **must** occur when the message is being published because concurrent access to the messages and futures on a `Batch` make it impossible to reliably do an LBYL "will accept?" check. --- .../cloud/pubsub_v1/publisher/client.py | 39 +++-- .../publisher/test_publisher_client.py | 144 ++++++++++++------ 2 files changed, 121 insertions(+), 62 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 4d00abe22504..96b9e5f6d806 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -98,19 +98,21 @@ def target(self): """ return publisher_client.PublisherClient.SERVICE_ADDRESS - def batch(self, topic, message, create=True, autocommit=True): + def batch(self, topic, create=False, autocommit=True): """Return the current batch for the provided topic. - This will create a new batch only if no batch currently exists. + This will create a new batch if ``create=True`` or if no batch + currently exists. Args: topic (str): A string representing the topic. - message (~google.cloud.pubsub_v1.types.PubsubMessage): The message - that will be committed. - create (bool): Whether to create a new batch if no batch is - found. Defaults to True. - autocommit (bool): Whether to autocommit this batch. - This is primarily useful for debugging. + create (bool): Whether to create a new batch. Defaults to + :data:`False`. If :data:`True`, this will create a new batch + even if one already exists. + autocommit (bool): Whether to autocommit this batch. This is + primarily useful for debugging and testing, since it allows + the caller to avoid some side effects that batch creation + might have (e.g. spawning a worker to publish a batch). Returns: ~.pubsub_v1.batch.Batch: The batch object. @@ -118,10 +120,12 @@ def batch(self, topic, message, create=True, autocommit=True): # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. with self._batch_lock: - batch = self._batches.get(topic, None) - if batch is None or not batch.will_accept(message): - if not create: - return None + if not create: + batch = self._batches.get(topic) + if batch is None: + create = True + + if create: batch = self._batch_class( autocommit=autocommit, client=self, @@ -130,7 +134,6 @@ def batch(self, topic, message, create=True, autocommit=True): ) self._batches[topic] = batch - # Simply return the appropriate batch. return batch def publish(self, topic, data, **attrs): @@ -190,4 +193,12 @@ def publish(self, topic, data, **attrs): message = types.PubsubMessage(data=data, attributes=attrs) # Delegate the publishing to the batch. - return self.batch(topic, message=message).publish(message) + batch = self.batch(topic) + future = None + while future is None: + if batch.will_accept(message): + future = batch.publish(message) + else: + batch = self.batch(topic, create=True) + + return future diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 4d8b1643adab..441342b11665 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -51,100 +51,148 @@ def test_init_emulator(monkeypatch): assert channel.target().decode('utf8') == '/foo/bar/' -def test_batch_accepting(): - """Establish that an existing batch is returned if it accepts messages.""" +def test_batch_create(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - message = types.PubsubMessage(data=b'foo') - # At first, there are no batches, so this should return a new batch - # which is also saved to the object. - ante = len(client._batches) - batch = client.batch('topic_name', message, autocommit=False) - assert len(client._batches) == ante + 1 - assert batch is client._batches['topic_name'] + assert len(client._batches) == 0 + topic = 'topic/path' + batch = client.batch(topic, autocommit=False) + assert client._batches == {topic: batch} + + +def test_batch_exists(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + + topic = 'topic/path' + client._batches[topic] = mock.sentinel.batch # A subsequent request should return the same batch. - batch2 = client.batch('topic_name', message, autocommit=False) - assert batch is batch2 - assert batch2 is client._batches['topic_name'] + batch = client.batch(topic, autocommit=False) + assert batch is mock.sentinel.batch + assert client._batches == {topic: batch} -def test_batch_without_autocreate(): +def test_batch_create_and_exists(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - message = types.PubsubMessage(data=b'foo') - # If `create=False` is sent, then when the batch is not found, None - # is returned instead. - ante = len(client._batches) - batch = client.batch('topic_name', message, create=False) - assert batch is None - assert len(client._batches) == ante + topic = 'topic/path' + client._batches[topic] = mock.sentinel.batch + + # A subsequent request should return the same batch. + batch = client.batch(topic, create=True, autocommit=False) + assert batch is not mock.sentinel.batch + assert client._batches == {topic: batch} def test_publish(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - # Use a mock in lieu of the actual batch class; set the mock up to claim - # indiscriminately that it accepts all messages. + # Use a mock in lieu of the actual batch class. batch = mock.Mock(spec=client._batch_class) + # Set the mock up to claim indiscriminately that it accepts all messages. batch.will_accept.return_value = True - client._batches['topic_name'] = batch - - # Begin publishing. - client.publish('topic_name', b'spam') - client.publish('topic_name', b'foo', bar='baz') - # The batch's publish method should have been called twice. - assert batch.publish.call_count == 2 + topic = 'topic/path' + client._batches[topic] = batch - # In both cases - # The first call should correspond to the first message. - _, args, _ = batch.publish.mock_calls[0] - assert args[0].data == b'spam' - assert not args[0].attributes + # Begin publishing. + client.publish(topic, b'spam') + client.publish(topic, b'foo', bar='baz') - # The second call should correspond to the second message. - _, args, _ = batch.publish.mock_calls[1] - assert args[0].data == b'foo' - assert args[0].attributes == {u'bar': u'baz'} + # Check mock. + batch.publish.assert_has_calls( + [ + mock.call(types.PubsubMessage(data=b'spam')), + mock.call(types.PubsubMessage( + data=b'foo', + attributes={'bar': 'baz'}, + )), + ], + ) def test_publish_data_not_bytestring_error(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) + topic = 'topic/path' with pytest.raises(TypeError): - client.publish('topic_name', u'This is a text string.') + client.publish(topic, u'This is a text string.') with pytest.raises(TypeError): - client.publish('topic_name', 42) + client.publish(topic, 42) def test_publish_attrs_bytestring(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - # Use a mock in lieu of the actual batch class; set the mock up to claim - # indiscriminately that it accepts all messages. + # Use a mock in lieu of the actual batch class. batch = mock.Mock(spec=client._batch_class) + # Set the mock up to claim indiscriminately that it accepts all messages. batch.will_accept.return_value = True - client._batches['topic_name'] = batch + + topic = 'topic/path' + client._batches[topic] = batch + + # Begin publishing. + client.publish(topic, b'foo', bar=b'baz') + + # The attributes should have been sent as text. + batch.publish.assert_called_once_with( + types.PubsubMessage( + data=b'foo', + attributes={'bar': u'baz'}, + ), + ) + + +def test_publish_new_batch_needed(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + + # Use mocks in lieu of the actual batch class. + batch1 = mock.Mock(spec=client._batch_class) + batch2 = mock.Mock(spec=client._batch_class) + # Set the first mock up to claim indiscriminately that it rejects all + # messages and the second accepts all. + batch1.will_accept.return_value = False + batch2.will_accept.return_value = True + + topic = 'topic/path' + client._batches[topic] = batch1 + + # Actually mock the batch class now. + batch_class = mock.Mock(spec=(), return_value=batch2) + client._batch_class = batch_class # Begin publishing. - client.publish('topic_name', b'foo', bar=b'baz') + client.publish(topic, b'foo', bar=b'baz') + batch_class.assert_called_once_with( + autocommit=True, + client=client, + settings=client.batch_settings, + topic=topic, + ) # The attributes should have been sent as text. - _, args, _ = batch.publish.mock_calls[0] - assert args[0].data == b'foo' - assert args[0].attributes == {u'bar': u'baz'} + batch1.publish.assert_not_called() + batch2.publish.assert_called_once_with( + types.PubsubMessage( + data=b'foo', + attributes={'bar': u'baz'}, + ), + ) def test_publish_attrs_type_error(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) + topic = 'topic/path' with pytest.raises(TypeError): - client.publish('topic_name', b'foo', answer=42) + client.publish(topic, b'foo', answer=42) def test_gapic_instance_method(): From ecea18f810df71bcef6928554fc1f01eef162a0c Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Mon, 18 Dec 2017 13:06:27 -0800 Subject: [PATCH 0200/1197] Adding a "STARTING" status for Pub / Sub `Batch.commit()`. (#4614) Towards #4575. --- .../cloud/pubsub_v1/publisher/batch/base.py | 2 + .../cloud/pubsub_v1/publisher/batch/thread.py | 78 ++++++++--------- .../pubsub_v1/publisher/batch/test_thread.py | 85 +++++++++++++------ 3 files changed, 102 insertions(+), 63 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index 366dc8ca7738..03705dce9c14 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -153,5 +153,7 @@ class BatchStatus(object): library hooks in functionality. """ ACCEPTING_MESSAGES = 'accepting messages' + STARTING = 'starting' + IN_PROGRESS = 'in progress' ERROR = 'error' SUCCESS = 'success' diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index c898d30bc724..095fb6494ff6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -18,6 +18,8 @@ import threading import time +import six + from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures @@ -25,6 +27,10 @@ _LOGGER = logging.getLogger(__name__) +_CAN_COMMIT = ( + base.BatchStatus.ACCEPTING_MESSAGES, + base.BatchStatus.STARTING, +) class Batch(base.Batch): @@ -63,20 +69,20 @@ class Batch(base.Batch): """ def __init__(self, client, topic, settings, autocommit=True): self._client = client + self._topic = topic + self._settings = settings + self._commit_lock = threading.Lock() # These objects are all communicated between threads; ensure that # any writes to them are atomic. self._futures = [] self._messages = [] self._size = 0 - self._settings = settings self._status = base.BatchStatus.ACCEPTING_MESSAGES - self._topic = topic # If max latency is specified, start a thread to monitor the batch and # commit when the max latency is reached. self._thread = None - self._commit_lock = threading.Lock() if autocommit and self._settings.max_latency < float('inf'): self._thread = threading.Thread( name='Thread-MonitorBatchPublisher', @@ -135,12 +141,9 @@ def commit(self): This method is non-blocking. It opens a new thread, which calls :meth:`_commit`, which does block. """ - # Set the status to in-flight synchronously, to ensure that + # Set the status to "starting" synchronously, to ensure that # this batch will necessarily not accept new messages. - # - # Yes, this is repeated in `_commit`, because that method is called - # directly by `monitor`. - self._status = 'in-flight' + self._status = base.BatchStatus.STARTING # Start a new thread to actually handle the commit. commit_thread = threading.Thread( @@ -162,48 +165,47 @@ def _commit(self): version, which calls this one. """ with self._commit_lock: - # If, in the intervening period, the batch started to be committed, - # or completed a commit, then no-op at this point. - if self._status != base.BatchStatus.ACCEPTING_MESSAGES: + if self._status in _CAN_COMMIT: + self._status = base.BatchStatus.IN_PROGRESS + else: + # If, in the intervening period between when this method was + # called and now, the batch started to be committed, or + # completed a commit, then no-op at this point. + _LOGGER.debug('Batch is already in progress, exiting commit') return - # Update the status. - self._status = 'in-flight' - # Sanity check: If there are no messages, no-op. if not self._messages: + _LOGGER.debug('No messages to publish, exiting commit') + self._status = base.BatchStatus.SUCCESS return # Begin the request to publish these messages. # Log how long the underlying request takes. start = time.time() - response = self.client.api.publish( + response = self._client.api.publish( self._topic, - self.messages, + self._messages, ) end = time.time() - _LOGGER.debug('gRPC Publish took {s} seconds.'.format( - s=end - start, - )) - - # We got a response from Pub/Sub; denote that we are processing. - self._status = 'processing results' - - # Sanity check: If the number of message IDs is not equal to the - # number of futures I have, then something went wrong. - if len(response.message_ids) != len(self._futures): + _LOGGER.debug('gRPC Publish took %s seconds.', end - start) + + if len(response.message_ids) == len(self._futures): + # Iterate over the futures on the queue and return the response + # IDs. We are trusting that there is a 1:1 mapping, and raise + # an exception if not. + self._status = base.BatchStatus.SUCCESS + zip_iter = six.moves.zip(response.message_ids, self._futures) + for message_id, future in zip_iter: + future.set_result(message_id) + else: + # Sanity check: If the number of message IDs is not equal to + # the number of futures I have, then something went wrong. + self._status = base.BatchStatus.ERROR + exception = exceptions.PublishError( + 'Some messages were not successfully published.') for future in self._futures: - future.set_exception(exceptions.PublishError( - 'Some messages were not successfully published.', - )) - return - - # Iterate over the futures on the queue and return the response - # IDs. We are trusting that there is a 1:1 mapping, and raise an - # exception if not. - self._status = base.BatchStatus.SUCCESS - for message_id, future in zip(response.message_ids, self._futures): - future.set_result(message_id) + future.set_exception(exception) def monitor(self): """Commit this batch after sufficient time has elapsed. @@ -246,7 +248,7 @@ def publish(self, message): # Store the actual message in the batch's message queue. self._messages.append(message) - if len(self._messages) >= self.settings.max_messages: + if len(self._messages) >= self._settings.max_messages: self.commit() # Return a Future. That future needs to be aware of the status diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index ca931e97df5b..c5f014d43c22 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -22,6 +22,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher.batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher.batch import thread from google.cloud.pubsub_v1.publisher.batch.thread import Batch @@ -93,9 +94,10 @@ def test_commit(): # The batch's status needs to be something other than "accepting messages", # since the commit started. assert batch.status != BatchStatus.ACCEPTING_MESSAGES + assert batch.status == BatchStatus.STARTING -def test_blocking_commit(): +def test_blocking__commit(): batch = create_batch() futures = ( batch.publish({'data': b'This is my message.'}), @@ -103,34 +105,63 @@ def test_blocking_commit(): ) # Set up the underlying API publish method to return a PublishResponse. - with mock.patch.object(type(batch.client.api), 'publish') as publish: - publish.return_value = types.PublishResponse(message_ids=['a', 'b']) - - # Actually commit the batch. + publish_response = types.PublishResponse(message_ids=['a', 'b']) + patch = mock.patch.object( + type(batch.client.api), 'publish', return_value=publish_response) + with patch as publish: batch._commit() - # Establish that the underlying API call was made with expected - # arguments. - publish.assert_called_once_with('topic_name', [ + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with( + 'topic_name', + [ types.PubsubMessage(data=b'This is my message.'), types.PubsubMessage(data=b'This is another message.'), - ]) + ], + ) # Establish that all of the futures are done, and that they have the # expected values. - assert all([f.done() for f in futures]) + assert futures[0].done() assert futures[0].result() == 'a' + assert futures[1].done() assert futures[1].result() == 'b' -def test_blocking_commit_no_messages(): +@mock.patch.object(thread, '_LOGGER') +def test_blocking__commit_starting(_LOGGER): + batch = create_batch() + batch._status = BatchStatus.STARTING + + batch._commit() + assert batch._status == BatchStatus.SUCCESS + + _LOGGER.debug.assert_called_once_with( + 'No messages to publish, exiting commit') + + +@mock.patch.object(thread, '_LOGGER') +def test_blocking__commit_already_started(_LOGGER): + batch = create_batch() + batch._status = BatchStatus.IN_PROGRESS + + batch._commit() + assert batch._status == BatchStatus.IN_PROGRESS + + _LOGGER.debug.assert_called_once_with( + 'Batch is already in progress, exiting commit') + + +def test_blocking__commit_no_messages(): batch = create_batch() with mock.patch.object(type(batch.client.api), 'publish') as publish: batch._commit() - assert publish.call_count == 0 + + assert publish.call_count == 0 -def test_blocking_commit_wrong_messageid_length(): +def test_blocking__commit_wrong_messageid_length(): batch = create_batch() futures = ( batch.publish({'data': b'blah blah blah'}), @@ -138,9 +169,12 @@ def test_blocking_commit_wrong_messageid_length(): ) # Set up a PublishResponse that only returns one message ID. - with mock.patch.object(type(batch.client.api), 'publish') as publish: - publish.return_value = types.PublishResponse(message_ids=['a']) + publish_response = types.PublishResponse(message_ids=['a']) + patch = mock.patch.object( + type(batch.client.api), 'publish', return_value=publish_response) + with patch as publish: batch._commit() + for future in futures: assert future.done() assert isinstance(future.exception(), exceptions.PublishError) @@ -152,25 +186,26 @@ def test_monitor(): with mock.patch.object(type(batch), '_commit') as _commit: batch.monitor() - # The monitor should have waited the given latency. - sleep.assert_called_once_with(5.0) + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) - # Since `monitor` runs in its own thread, it should call - # the blocking commit implementation. - _commit.assert_called_once_with() + # Since `monitor` runs in its own thread, it should call + # the blocking commit implementation. + _commit.assert_called_once_with() def test_monitor_already_committed(): batch = create_batch(max_latency=5.0) - batch._status = 'something else' + status = 'something else' + batch._status = status with mock.patch.object(time, 'sleep') as sleep: batch.monitor() - # The monitor should have waited the given latency. - sleep.assert_called_once_with(5.0) + # The monitor should have waited the given latency. + sleep.assert_called_once_with(5.0) - # The status should not have changed. - assert batch._status == 'something else' + # The status should not have changed. + assert batch._status == status def test_publish(): From da7f6a887993b23c4504b96d7110a681c45a8e85 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 19 Dec 2017 08:16:11 -0800 Subject: [PATCH 0201/1197] Checking Pub / Sub `Batch.will_accept` in thread-safe way. (#4616) Fixes #4575. --- .../cloud/pubsub_v1/publisher/batch/thread.py | 73 ++++++++++++------- .../cloud/pubsub_v1/publisher/client.py | 5 +- .../pubsub_v1/publisher/batch/test_thread.py | 65 ++++++++++++----- .../publisher/test_publisher_client.py | 39 ++++++---- 4 files changed, 118 insertions(+), 64 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 095fb6494ff6..b339865220b9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -37,7 +37,7 @@ class Batch(base.Batch): """A batch of messages. The batch is the internal group of messages which are either awaiting - publication or currently in-flight. + publication or currently in progress. A batch is automatically created by the PublisherClient when the first message to be published is received; subsequent messages are added to @@ -72,9 +72,9 @@ def __init__(self, client, topic, settings, autocommit=True): self._topic = topic self._settings = settings - self._commit_lock = threading.Lock() - # These objects are all communicated between threads; ensure that - # any writes to them are atomic. + self._state_lock = threading.Lock() + # These members are all communicated between threads; ensure that + # any writes to them use the "state lock" to remain atomic. self._futures = [] self._messages = [] self._size = 0 @@ -133,17 +133,24 @@ def status(self): def commit(self): """Actually publish all of the messages on the active batch. - This synchronously sets the batch status to in-flight, and then opens - a new thread, which handles actually sending the messages to Pub/Sub. - .. note:: This method is non-blocking. It opens a new thread, which calls :meth:`_commit`, which does block. + + This synchronously sets the batch status to "starting", and then opens + a new thread, which handles actually sending the messages to Pub/Sub. + + If the current batch is **not** accepting messages, this method + does nothing. """ # Set the status to "starting" synchronously, to ensure that # this batch will necessarily not accept new messages. - self._status = base.BatchStatus.STARTING + with self._state_lock: + if self._status == base.BatchStatus.ACCEPTING_MESSAGES: + self._status = base.BatchStatus.STARTING + else: + return # Start a new thread to actually handle the commit. commit_thread = threading.Thread( @@ -155,7 +162,7 @@ def commit(self): def _commit(self): """Actually publish all of the messages on the active batch. - This moves the batch out from being the active batch to an in-flight + This moves the batch out from being the active batch to an in progress batch on the publisher, and then the batch is discarded upon completion. @@ -164,7 +171,7 @@ def _commit(self): This method blocks. The :meth:`commit` method is the non-blocking version, which calls this one. """ - with self._commit_lock: + with self._state_lock: if self._status in _CAN_COMMIT: self._status = base.BatchStatus.IN_PROGRESS else: @@ -213,13 +220,13 @@ def monitor(self): This simply sleeps for ``self._settings.max_latency`` seconds, and then calls commit unless the batch has already been committed. """ - # Note: This thread blocks; it is up to the calling code to call it - # in a separate thread. - # + # NOTE: This blocks; it is up to the calling code to call it + # in a separate thread. + # Sleep for however long we should be waiting. time.sleep(self._settings.max_latency) - # Commit. + _LOGGER.debug('Monitor is waking up') return self._commit() def publish(self, message): @@ -235,24 +242,34 @@ def publish(self, message): message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. Returns: - ~google.api_core.future.Future: An object conforming to - the :class:`concurrent.futures.Future` interface. + Optional[~google.api_core.future.Future]: An object conforming to + the :class:`~concurrent.futures.Future` interface or :data:`None`. + If :data:`None` is returned, that signals that the batch cannot + accept a message. """ # Coerce the type, just in case. if not isinstance(message, types.PubsubMessage): message = types.PubsubMessage(**message) - # Add the size to the running total of the size, so we know - # if future messages need to be rejected. - self._size += message.ByteSize() - - # Store the actual message in the batch's message queue. - self._messages.append(message) - if len(self._messages) >= self._settings.max_messages: + with self._state_lock: + if not self.will_accept(message): + return None + + # Add the size to the running total of the size, so we know + # if future messages need to be rejected. + self._size += message.ByteSize() + # Store the actual message in the batch's message queue. + self._messages.append(message) + # Track the future on this batch (so that the result of the + # future can be set). + future = futures.Future() + self._futures.append(future) + # Determine the number of messages before releasing the lock. + num_messages = len(self._messages) + + # Try to commit, but it must be **without** the lock held, since + # ``commit()`` will try to obtain the lock. + if num_messages >= self._settings.max_messages: self.commit() - # Return a Future. That future needs to be aware of the status - # of this batch. - f = futures.Future() - self._futures.append(f) - return f + return future diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 96b9e5f6d806..3b1a7a2a7d2d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -196,9 +196,8 @@ def publish(self, topic, data, **attrs): batch = self.batch(topic) future = None while future is None: - if batch.will_accept(message): - future = batch.publish(message) - else: + future = batch.publish(message) + if future is None: batch = self.batch(topic, create=True) return future diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index c5f014d43c22..903ae90794a4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -97,6 +97,19 @@ def test_commit(): assert batch.status == BatchStatus.STARTING +def test_commit_no_op(): + batch = create_batch() + batch._status = BatchStatus.IN_PROGRESS + with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + batch.commit() + + # Make sure a thread was not created. + Thread.assert_not_called() + + # Check that batch status is unchanged. + assert batch.status == BatchStatus.IN_PROGRESS + + def test_blocking__commit(): batch = create_batch() futures = ( @@ -217,21 +230,35 @@ def test_publish(): ) # Publish each of the messages, which should save them to the batch. - for message in messages: - batch.publish(message) + futures = [batch.publish(message) for message in messages] # There should be three messages on the batch, and three futures. assert len(batch.messages) == 3 - assert len(batch._futures) == 3 + assert batch._futures == futures # The size should have been incremented by the sum of the size of the # messages. - assert batch.size == sum([m.ByteSize() for m in messages]) + expected_size = sum([message_pb.ByteSize() for message_pb in messages]) + assert batch.size == expected_size assert batch.size > 0 # I do not always trust protobuf. -def test_publish_max_messages(): - batch = create_batch(max_messages=4) +def test_publish_not_will_accept(): + batch = create_batch(max_messages=0) + + # Publish the message. + message = types.PubsubMessage(data=b'foobarbaz') + future = batch.publish(message) + + assert future is None + assert batch.size == 0 + assert batch.messages == [] + assert batch._futures == [] + + +def test_publish_exceed_max_messages(): + max_messages = 4 + batch = create_batch(max_messages=max_messages) messages = ( types.PubsubMessage(data=b'foobarbaz'), types.PubsubMessage(data=b'spameggs'), @@ -240,27 +267,29 @@ def test_publish_max_messages(): # Publish each of the messages, which should save them to the batch. with mock.patch.object(batch, 'commit') as commit: - for message in messages: - batch.publish(message) + futures = [batch.publish(message) for message in messages] + assert batch._futures == futures + assert len(futures) == max_messages - 1 # Commit should not yet have been called. assert commit.call_count == 0 # When a fourth message is published, commit should be called. - batch.publish(types.PubsubMessage(data=b'last one')) + future = batch.publish(types.PubsubMessage(data=b'last one')) commit.assert_called_once_with() + futures.append(future) + assert batch._futures == futures + assert len(futures) == max_messages + def test_publish_dict(): batch = create_batch() - batch.publish({'data': b'foobarbaz', 'attributes': {'spam': 'eggs'}}) + future = batch.publish( + {'data': b'foobarbaz', 'attributes': {'spam': 'eggs'}}) # There should be one message on the batch. - assert len(batch.messages) == 1 - - # It should be an actual protobuf Message at this point, with the - # expected values. - message = batch.messages[0] - assert isinstance(message, types.PubsubMessage) - assert message.data == b'foobarbaz' - assert message.attributes == {'spam': 'eggs'} + expected_message = types.PubsubMessage( + data=b'foobarbaz', attributes={'spam': 'eggs'}) + assert batch.messages == [expected_message] + assert batch._futures == [future] diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 441342b11665..a519ddc645fd 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -95,13 +95,20 @@ def test_publish(): batch = mock.Mock(spec=client._batch_class) # Set the mock up to claim indiscriminately that it accepts all messages. batch.will_accept.return_value = True + batch.publish.side_effect = ( + mock.sentinel.future1, + mock.sentinel.future2, + ) topic = 'topic/path' client._batches[topic] = batch # Begin publishing. - client.publish(topic, b'spam') - client.publish(topic, b'foo', bar='baz') + future1 = client.publish(topic, b'spam') + future2 = client.publish(topic, b'foo', bar='baz') + + assert future1 is mock.sentinel.future1 + assert future2 is mock.sentinel.future2 # Check mock. batch.publish.assert_has_calls( @@ -138,7 +145,9 @@ def test_publish_attrs_bytestring(): client._batches[topic] = batch # Begin publishing. - client.publish(topic, b'foo', bar=b'baz') + future = client.publish(topic, b'foo', bar=b'baz') + + assert future is batch.publish.return_value # The attributes should have been sent as text. batch.publish.assert_called_once_with( @@ -158,8 +167,8 @@ def test_publish_new_batch_needed(): batch2 = mock.Mock(spec=client._batch_class) # Set the first mock up to claim indiscriminately that it rejects all # messages and the second accepts all. - batch1.will_accept.return_value = False - batch2.will_accept.return_value = True + batch1.publish.return_value = None + batch2.publish.return_value = mock.sentinel.future topic = 'topic/path' client._batches[topic] = batch1 @@ -168,23 +177,23 @@ def test_publish_new_batch_needed(): batch_class = mock.Mock(spec=(), return_value=batch2) client._batch_class = batch_class - # Begin publishing. - client.publish(topic, b'foo', bar=b'baz') + # Publish a message. + future = client.publish(topic, b'foo', bar=b'baz') + assert future is mock.sentinel.future + + # Check the mocks. batch_class.assert_called_once_with( autocommit=True, client=client, settings=client.batch_settings, topic=topic, ) - - # The attributes should have been sent as text. - batch1.publish.assert_not_called() - batch2.publish.assert_called_once_with( - types.PubsubMessage( - data=b'foo', - attributes={'bar': u'baz'}, - ), + message_pb = types.PubsubMessage( + data=b'foo', + attributes={'bar': u'baz'}, ) + batch1.publish.assert_called_once_with(message_pb) + batch2.publish.assert_called_once_with(message_pb) def test_publish_attrs_type_error(): From 178f4f94b7367bfb013e2c2294c8c8d7781bd818 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 19 Dec 2017 08:43:05 -0800 Subject: [PATCH 0202/1197] Creating `google-cloud-pubsub==0.30.0` release. (#4619) Also: - Making a release of the umbrella package (since `google-cloud-pubsub` has transition from `0.29.x` to `0.30.x`) - Updating version with `.dev1` suffix in `bigquery_datatransfer`, `container` and `trace` packages - Adding `google-cloud-bigquery-datatransfer` and `google-cloud-container` to umbrella package --- packages/google-cloud-pubsub/CHANGELOG.md | 35 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 2b6e52b9bce4..bcce254077de 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,41 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.30.0 + +### Notable Implementation Changes + +- Dropping redundant Pub / Sub `Policy._paused` data member (#4568). +- Removing redundant "active" check in policy (#4603). +- Adding a `Consumer.active` property (#4604). +- Making it impossible to call `Policy.open()` on an already opened + policy (#4606). +- **Bug fix** (#4575): Fix bug with async publish for batches. There + were two related bugs. The first: if a batch exceeds the `max_messages` + from the batch settings, then the `commit()` will fail. The second: + when a "monitor" worker that after `max_latency` seconds, a failure + can occur if a new message is added to the batch during the publish. + To fix, the following changes were implemented: + - Adding a "STARTING" status for `Batch.commit()` (#4614). This + fixes the issue when the batch exceeds `max_messages`. + - Adding extra check in `Batch.will_accept` for the number of + messages (#4612). + - Moving `will_accept()` check out of `PublisherClient.batch()` + factory (#4613). + - Checking `Batch.will_accept` in thread-safe way (#4616). +- **Breaking API change**: As part of #4613, changing `PublisherClient.batch()` + to no longer accept a `message` (since the `will_accept` check needs to + happen in a more concurrency friendly way). In addition, changing the + `create` argument so that it means "create even if batch already exists" + rather than "create if missing". + +### Documentation + +- Add more explicit documentation for Pub / Sub `Message.attributes` (#4601). +- Make `Message.__repr__` a bit prettier / more useful (#4602). + +PyPI: https://pypi.org/project/google-cloud-pubsub/0.30.0/ + ## 0.29.4 ### Notable Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 48dfe0f1fc7e..e5e17bc04169 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.29.5.dev1', + version='0.30.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From c9a2e676588ec9b90b7a08ae4e96fa0cfb3a29ca Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 19 Dec 2017 09:34:04 -0800 Subject: [PATCH 0203/1197] Adding back `.dev1` suffixes after #4619. (#4624) * Adding back `.dev1` suffixes after #4619. Also removing "Pub / Sub" mention from the Pub / Sub changelog since it is redundant. * Fixing vision URL. * Fixing "nonsense" sentence in Pub / Sub changelog. --- packages/google-cloud-pubsub/CHANGELOG.md | 10 +++++----- packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index bcce254077de..971db1e15ee3 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -8,7 +8,7 @@ ### Notable Implementation Changes -- Dropping redundant Pub / Sub `Policy._paused` data member (#4568). +- Dropping redundant `Policy._paused` data member (#4568). - Removing redundant "active" check in policy (#4603). - Adding a `Consumer.active` property (#4604). - Making it impossible to call `Policy.open()` on an already opened @@ -16,9 +16,9 @@ - **Bug fix** (#4575): Fix bug with async publish for batches. There were two related bugs. The first: if a batch exceeds the `max_messages` from the batch settings, then the `commit()` will fail. The second: - when a "monitor" worker that after `max_latency` seconds, a failure - can occur if a new message is added to the batch during the publish. - To fix, the following changes were implemented: + when a "monitor" worker calls `commit()` after `max_latency` seconds, + a failure can occur if a new message is added to the batch **during** + the commit. To fix, the following changes were implemented: - Adding a "STARTING" status for `Batch.commit()` (#4614). This fixes the issue when the batch exceeds `max_messages`. - Adding extra check in `Batch.will_accept` for the number of @@ -34,7 +34,7 @@ ### Documentation -- Add more explicit documentation for Pub / Sub `Message.attributes` (#4601). +- Add more explicit documentation for `Message.attributes` (#4601). - Make `Message.__repr__` a bit prettier / more useful (#4602). PyPI: https://pypi.org/project/google-cloud-pubsub/0.30.0/ diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index e5e17bc04169..9202e44d6af1 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.30.0', + version='0.30.1.dev1', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 2041a28c75890278f41e7350a48f462991d5e5ea Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 19 Dec 2017 15:38:33 -0800 Subject: [PATCH 0204/1197] Moving lock factory used in publisher client to the Batch implementation. (#4628) --- .../google/cloud/pubsub_v1/publisher/batch/base.py | 10 ++++++++++ .../google/cloud/pubsub_v1/publisher/batch/thread.py | 9 +++++++++ .../google/cloud/pubsub_v1/publisher/client.py | 12 ++++++++---- .../unit/pubsub_v1/publisher/batch/test_thread.py | 7 +++++++ .../pubsub_v1/publisher/test_publisher_client.py | 2 +- 5 files changed, 35 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index 03705dce9c14..dae0dafb9fd5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -49,6 +49,16 @@ def __len__(self): """Return the number of messages currently in the batch.""" return len(self.messages) + @staticmethod + @abc.abstractmethod + def make_lock(): + """Return a lock in the chosen concurrency model. + + Returns: + ContextManager: A newly created lock. + """ + raise NotImplementedError + @property @abc.abstractmethod def messages(self): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index b339865220b9..12b9790c6b80 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -90,6 +90,15 @@ def __init__(self, client, topic, settings, autocommit=True): ) self._thread.start() + @staticmethod + def make_lock(): + """Return a threading lock. + + Returns: + _thread.Lock: A newly created lock. + """ + return threading.Lock() + @property def client(self): """~.pubsub_v1.client.PublisherClient: A publisher client.""" diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 3b1a7a2a7d2d..d2faedad1d8a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -17,7 +17,6 @@ import copy import os import pkg_resources -import threading import grpc import six @@ -44,16 +43,21 @@ class Client(object): Args: batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The settings for batch publishing. - batch_class (class): A class that describes how to handle + batch_class (Optional[type]): A class that describes how to handle batches. You may subclass the :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in order to define your own batcher. This is primarily provided to allow use of different concurrency models; the default - is based on :class:`threading.Thread`. + is based on :class:`threading.Thread`. This class should also have + a class method (or static method) that takes no arguments and + produces a lock that can be used as a context manager. kwargs (dict): Any additional arguments provided are sent as keyword arguments to the underlying :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. Generally, you should not need to set additional keyword arguments. + Before being passed along to the GAPIC constructor, a channel may + be added if ``credentials`` are passed explicitly or if the + Pub / Sub emulator is detected as running. """ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): # Sanity check: Is our goal to use the emulator? @@ -86,7 +90,7 @@ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): # The batches on the publisher client are responsible for holding # messages. One batch exists for each topic. self._batch_class = batch_class - self._batch_lock = threading.Lock() + self._batch_lock = batch_class.make_lock() self._batches = {} @property diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 903ae90794a4..2c8852576308 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -72,6 +72,13 @@ def test_init_infinite_latency(): assert batch._thread is None +@mock.patch.object(threading, 'Lock') +def test_make_lock(Lock): + lock = Batch.make_lock() + assert lock is Lock.return_value + Lock.assert_called_once_with() + + def test_client(): client = create_client() settings = types.BatchSettings() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index a519ddc645fd..55a4990761d4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -16,8 +16,8 @@ import os from google.auth import credentials -import mock +import mock import pytest from google.cloud.pubsub_v1.gapic import publisher_client From 5f7d27afd1aec231b648397f3b666113d7bfff61 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Tue, 19 Dec 2017 16:33:15 -0800 Subject: [PATCH 0205/1197] Fix build issue with datastore docs. (#4633) Also: - Fix an ambiguous name (`type`) in Pub / Sub (was introduced because I merged with the docs build failing) - Marking the `dataproc` version as `.dev1` --- .../google/cloud/pubsub_v1/publisher/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index d2faedad1d8a..b38393c198ae 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -43,7 +43,7 @@ class Client(object): Args: batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The settings for batch publishing. - batch_class (Optional[type]): A class that describes how to handle + batch_class (Optional[Type]): A class that describes how to handle batches. You may subclass the :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in order to define your own batcher. This is primarily provided to From b98e51c4428e0c1d16dc024e6de5540674e1fbe2 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 20 Dec 2017 09:04:13 -0800 Subject: [PATCH 0206/1197] Use a UUID (rather than a sentinel object) on Pub / Sub `Future`. (#4634) Also changing identity check to equality check for Future._SENTINEL. --- .../google/cloud/pubsub_v1/futures.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index f73893503301..067fc7429ab9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -15,6 +15,7 @@ from __future__ import absolute_import import threading +import uuid import google.api_core.future from google.cloud.pubsub_v1.publisher import exceptions @@ -29,7 +30,11 @@ class Future(google.api_core.future.Future): This object should not be created directly, but is returned by other methods in this library. """ - _SENTINEL = object() + + # This could be a sentinel object or None, but the sentinel object's ID + # can change if the process is forked, and None has the possibility of + # actually being a result. + _SENTINEL = uuid.uuid4() def __init__(self): self._result = self._SENTINEL @@ -68,8 +73,8 @@ def done(self): This still returns True in failure cases; checking :meth:`result` or :meth:`exception` is the canonical way to assess success or failure. """ - return (self._exception is not self._SENTINEL or - self._result is not self._SENTINEL) + return (self._exception != self._SENTINEL or + self._result != self._SENTINEL) def result(self, timeout=None): """Return the message ID, or raise an exception. @@ -118,7 +123,7 @@ def exception(self, timeout=None): raise exceptions.TimeoutError('Timed out waiting for result.') # If the batch completed successfully, this should return None. - if self._result is not self._SENTINEL: + if self._result != self._SENTINEL: return None # Okay, this batch had an error; this should return it. From 9a3121105a6474c983594c2a6232ac0109c59153 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 20 Dec 2017 09:33:03 -0800 Subject: [PATCH 0207/1197] Fixing race condition in Pub / Sub system tests. (#4632) This was a programming error on my part (using a brand new lock every time it was held). --- packages/google-cloud-pubsub/tests/system.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 2a90e7802c58..8b0d568e4705 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -187,11 +187,12 @@ class AckCallback(object): def __init__(self): self.calls = 0 + self.lock = threading.Lock() def __call__(self, message): message.ack() # Only increment the number of calls **after** finishing. - with threading.Lock(): + with self.lock: self.calls += 1 @@ -201,13 +202,14 @@ def __init__(self, sleep_time): self.sleep_time = sleep_time self.calls = 0 self.call_times = [] + self.lock = threading.Lock() def __call__(self, message): now = datetime.datetime.now() time.sleep(self.sleep_time) message.ack() # Only increment the number of calls **after** finishing. - with threading.Lock(): + with self.lock: # list.append() is thread-safe, but we still wait until # ``calls`` is incremented to do it. self.call_times.append(now) From 4ad5526845f93a80f5d4e72825a7d48c2bdc0222 Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Wed, 20 Dec 2017 15:32:13 -0800 Subject: [PATCH 0208/1197] Requiring 'grpcio >= 1.8.2'. (#4642) This is due to a nasty spinlock bug [1] that has been partially fixed [2] in `1.8.2`. [1]: https://github.com/grpc/grpc/issues/9688 [2]: https://github.com/grpc/grpc/pull/13665 --- packages/google-cloud-pubsub/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 9202e44d6af1..2e38d2c84f32 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -52,6 +52,7 @@ REQUIREMENTS = [ 'google-api-core[grpc] >= 0.1.2, < 0.2.0dev', + 'grpcio >= 1.8.2', 'google-auth >= 1.0.2, < 2.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', 'psutil >= 5.2.2, < 6.0dev', From 8db619876be1202910928366ea4739a9252adf6d Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 21 Dec 2017 09:14:13 -0800 Subject: [PATCH 0209/1197] Making `google-cloud-pubsub==0.30.1` release. (#4644) * Making `google-cloud-pubsub==0.30.1` release. * Also making `google-api-core==0.1.3` release. * Dropping the Pub / Sub dependency on `grpcio`. --- packages/google-cloud-pubsub/CHANGELOG.md | 17 +++++++++++++++++ packages/google-cloud-pubsub/setup.py | 5 ++--- 2 files changed, 19 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 971db1e15ee3..0912251259f2 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.30.1 + +### Notable Implementation Changes + +- Moving lock factory used in publisher client to the Batch + implementation (#4628). +- Use a UUID (rather than a sentinel object) on `Future` (#4634). + +### Dependencies + +- Upgrading to `google-api-core==0.1.3` which depends on the latest + `grpcio==1.8.2` (#4642). This fixes #4600. For details, see related + gRPC [bug](https://github.com/grpc/grpc/issues/9688) and + [fix](https://github.com/grpc/grpc/pull/13665). + +PyPI: https://pypi.org/project/google-cloud-pubsub/0.30.1/ + ## 0.30.0 ### Notable Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 2e38d2c84f32..5c50623dbea3 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,8 +51,7 @@ REQUIREMENTS = [ - 'google-api-core[grpc] >= 0.1.2, < 0.2.0dev', - 'grpcio >= 1.8.2', + 'google-api-core[grpc] >= 0.1.3, < 0.2.0dev', 'google-auth >= 1.0.2, < 2.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', 'psutil >= 5.2.2, < 6.0dev', @@ -60,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.30.1.dev1', + version='0.30.1', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 274e7377d63a946de9f03aa0b90ccf92fad1f59a Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 21 Dec 2017 10:06:14 -0800 Subject: [PATCH 0210/1197] Allowed a custom `Event` type in Pub / Sub futures. (#4643) --- .../google/cloud/pubsub_v1/futures.py | 15 ++++- .../cloud/pubsub_v1/publisher/batch/thread.py | 2 +- .../cloud/pubsub_v1/publisher/futures.py | 9 +++ .../cloud/pubsub_v1/subscriber/futures.py | 11 +++- .../pubsub_v1/subscriber/policy/thread.py | 2 +- .../pubsub_v1/{publisher => }/test_futures.py | 61 +++++++++++++------ 6 files changed, 77 insertions(+), 23 deletions(-) rename packages/google-cloud-pubsub/tests/unit/pubsub_v1/{publisher => }/test_futures.py (69%) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index 067fc7429ab9..0940a47709a3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -29,6 +29,15 @@ class Future(google.api_core.future.Future): This object should not be created directly, but is returned by other methods in this library. + + Args: + completed (Optional[Any]): An event, with the same interface as + :class:`threading.Event`. This is provided so that callers + with different concurrency models (e.g. ``threading`` or + ``multiprocessing``) can supply an event that is compatible + with that model. The ``wait()`` and ``set()`` methods will be + used. If this argument is not provided, then a new + :class:`threading.Event` will be created and used. """ # This could be a sentinel object or None, but the sentinel object's ID @@ -36,11 +45,13 @@ class Future(google.api_core.future.Future): # actually being a result. _SENTINEL = uuid.uuid4() - def __init__(self): + def __init__(self, completed=None): self._result = self._SENTINEL self._exception = self._SENTINEL self._callbacks = [] - self._completed = threading.Event() + if completed is None: + completed = threading.Event() + self._completed = completed def cancel(self): """Actions in Pub/Sub generally may not be canceled. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 12b9790c6b80..73cafb9cde13 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -271,7 +271,7 @@ def publish(self, message): self._messages.append(message) # Track the future on this batch (so that the result of the # future can be set). - future = futures.Future() + future = futures.Future(completed=threading.Event()) self._futures.append(future) # Determine the number of messages before releasing the lock. num_messages = len(self._messages) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index cca1c97f5f2f..9c0e93120bc5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -25,6 +25,15 @@ class Future(futures.Future): This object should not be created directly, but is returned by other methods in this library. + + Args: + completed (Optional[Any]): An event, with the same interface as + :class:`threading.Event`. This is provided so that callers + with different concurrency models (e.g. ``threading`` or + ``multiprocessing``) can supply an event that is compatible + with that model. The ``wait()`` and ``set()`` methods will be + used. If this argument is not provided, then a new + :class:`threading.Event` will be created and used. """ # The publishing-side subclass does not need any special behavior # at this time. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index fa1f457a2602..7114a32c9600 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -29,10 +29,17 @@ class Future(futures.Future): Args: policy (~.pubsub_v1.subscriber.policy.base.BasePolicy): The policy that creates this Future. + completed (Optional[Any]): An event, with the same interface as + :class:`threading.Event`. This is provided so that callers + with different concurrency models (e.g. ``threading`` or + ``multiprocessing``) can supply an event that is compatible + with that model. The ``wait()`` and ``set()`` methods will be + used. If this argument is not provided, then a new + :class:`threading.Event` will be created and used. """ - def __init__(self, policy): + def __init__(self, policy, completed=None): self._policy = policy - super(Future, self).__init__() + super(Future, self).__init__(completed=completed) def running(self): """Return whether this subscription is opened with this Future. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 39f161a3b93e..37d8fdc63519 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -263,7 +263,7 @@ def open(self, callback): # Create the Future that this method will return. # This future is the main thread's interface to handle exceptions, # block on the subscription, etc. - self._future = Future(policy=self) + self._future = Future(policy=self, completed=threading.Event()) # Start the thread to pass the requests. self._callback = callback diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py similarity index 69% rename from packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py rename to packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py index f179afa7012b..9dd77b506267 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py @@ -12,44 +12,71 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock +import threading +import mock import pytest -from google.cloud.pubsub_v1.publisher import exceptions -from google.cloud.pubsub_v1.publisher.futures import Future +from google.cloud.pubsub_v1 import exceptions +from google.cloud.pubsub_v1 import futures + + +def _future(*args, **kwargs): + return futures.Future(*args, **kwargs) + + +def test_constructor_defaults(): + with mock.patch.object(threading, 'Event', autospec=True) as Event: + future = _future() + + assert future._result == futures.Future._SENTINEL + assert future._exception == futures.Future._SENTINEL + assert future._callbacks == [] + assert future._completed is Event.return_value + + Event.assert_called_once_with() + + +def test_constructor_explicit_completed(): + completed = mock.sentinel.completed + future = _future(completed=completed) + + assert future._result == futures.Future._SENTINEL + assert future._exception == futures.Future._SENTINEL + assert future._callbacks == [] + assert future._completed is completed def test_cancel(): - assert Future().cancel() is False + assert _future().cancel() is False def test_cancelled(): - assert Future().cancelled() is False + assert _future().cancelled() is False def test_running(): - future = Future() + future = _future() assert future.running() is True future.set_result('foobar') assert future.running() is False def test_done(): - future = Future() + future = _future() assert future.done() is False future.set_result('12345') assert future.done() is True def test_exception_no_error(): - future = Future() + future = _future() future.set_result('12345') assert future.exception() is None def test_exception_with_error(): - future = Future() + future = _future() error = RuntimeError('Something really bad happened.') future.set_exception(error) @@ -63,26 +90,26 @@ def test_exception_with_error(): def test_exception_timeout(): - future = Future() + future = _future() with pytest.raises(exceptions.TimeoutError): future.exception(timeout=0.01) def test_result_no_error(): - future = Future() + future = _future() future.set_result('42') assert future.result() == '42' def test_result_with_error(): - future = Future() + future = _future() future.set_exception(RuntimeError('Something really bad happened.')) with pytest.raises(RuntimeError): future.result() def test_add_done_callback_pending_batch(): - future = Future() + future = _future() callback = mock.Mock() future.add_done_callback(callback) assert len(future._callbacks) == 1 @@ -91,7 +118,7 @@ def test_add_done_callback_pending_batch(): def test_add_done_callback_completed_batch(): - future = Future() + future = _future() future.set_result('12345') callback = mock.Mock(spec=()) future.add_done_callback(callback) @@ -99,7 +126,7 @@ def test_add_done_callback_completed_batch(): def test_trigger(): - future = Future() + future = _future() callback = mock.Mock(spec=()) future.add_done_callback(callback) assert callback.call_count == 0 @@ -108,14 +135,14 @@ def test_trigger(): def test_set_result_once_only(): - future = Future() + future = _future() future.set_result('12345') with pytest.raises(RuntimeError): future.set_result('67890') def test_set_exception_once_only(): - future = Future() + future = _future() future.set_exception(ValueError('wah wah')) with pytest.raises(RuntimeError): future.set_exception(TypeError('other wah wah')) From b3fcfecd4c532ad49808d2b66d8ac0ff2a57cd2b Mon Sep 17 00:00:00 2001 From: Danny Hermes Date: Thu, 21 Dec 2017 10:47:51 -0800 Subject: [PATCH 0211/1197] Adding back `.dev1` suffix after #4644. (#4651) Also adding a newly added note to Pub / Sub release notes (I already added this in the official release). --- packages/google-cloud-pubsub/CHANGELOG.md | 2 ++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 0912251259f2..35de9206567a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -11,6 +11,8 @@ - Moving lock factory used in publisher client to the Batch implementation (#4628). - Use a UUID (rather than a sentinel object) on `Future` (#4634). +- Apply scopes to explicitly provided credentials if needed (#4594). + Fixes #4479. This feature comes as part of `google-api-core==0.1.3`. ### Dependencies diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 5c50623dbea3..bf24dc2c03cf 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.30.1', + version='0.30.2.dev1', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From 35cd9668ba5fd8d139cc919df7c3d0eef9f8a9f4 Mon Sep 17 00:00:00 2001 From: Juan M Uys Date: Mon, 5 Feb 2018 21:39:09 +0000 Subject: [PATCH 0212/1197] Clarify that `modify_ack_deadline` resets the deadline (#4822) --- .../google/cloud/pubsub_v1/subscriber/message.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index cb27876942a0..27ad9f0e7f32 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -223,18 +223,15 @@ def lease(self): ) def modify_ack_deadline(self, seconds): - """Set the deadline for acknowledgement to the given value. + """Resets the deadline for acknowledgement. + + New deadline will be the given value of seconds from now. The default implementation handles this for you; you should not need to manually deal with setting ack deadlines. The exception case is if you are implementing your own custom subclass of :class:`~.pubsub_v1.subcriber._consumer.Consumer`. - .. note:: - This is not an extension; it *sets* the deadline to the given - number of seconds from right now. It is even possible to use this - method to make a deadline shorter. - Args: seconds (int): The number of seconds to set the lease deadline to. This should be between 0 and 600. Due to network latency, From f5f0af79d6c2af604e23a6876067fedf8a2a3b78 Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 8 Feb 2018 09:55:05 -0800 Subject: [PATCH 0213/1197] Pub/Sub: Update default batch size to 10 MB (#4857) The Pub/Sub default is 10 MB. This allows sending the max batch size unless the user intervenes. --- .../google-cloud-pubsub/google/cloud/pubsub_v1/types.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index f601915d0a60..3d285ec2252a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -41,9 +41,9 @@ ['max_bytes', 'max_latency', 'max_messages'], ) BatchSettings.__new__.__defaults__ = ( - 1024 * 1024 * 5, # max_bytes: 5 MB - 0.05, # max_latency: 0.05 seconds - 1000, # max_messages: 1,000 + 1024 * 1024 * 10, # max_bytes: 10 MB + 0.05, # max_latency: 0.05 seconds + 1000, # max_messages: 1,000 ) # Define the type class and default values for flow control settings. From 430f68b2135093bc75053d697c46f28ad8ce3784 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Fri, 9 Feb 2018 12:39:56 -0800 Subject: [PATCH 0214/1197] Fix unit test for default `max_bytes` value (#4860) --- .../tests/unit/pubsub_v1/publisher/test_publisher_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 55a4990761d4..7ce32ca07396 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -32,7 +32,7 @@ def test_init(): # A plain client should have an `api` (the underlying GAPIC) and a # batch settings object, which should have the defaults. assert isinstance(client.api, publisher_client.PublisherClient) - assert client.batch_settings.max_bytes == 5 * (2 ** 20) + assert client.batch_settings.max_bytes == 10 * (2 ** 20) assert client.batch_settings.max_latency == 0.05 assert client.batch_settings.max_messages == 1000 From 4ce1c69bba87cf10c22e0d19cdf28ba22014735a Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Mon, 12 Feb 2018 16:00:52 -0800 Subject: [PATCH 0215/1197] Raise ValueError when a message is too large for a batch (#4872) --- .../google/cloud/pubsub_v1/publisher/client.py | 18 ++++++++++++++---- packages/google-cloud-pubsub/tests/system.py | 1 - .../publisher/test_publisher_client.py | 13 +++++++++++++ 3 files changed, 27 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index b38393c198ae..8fd91bcc9153 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -180,8 +180,10 @@ def publish(self, topic, data, **attrs): # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. if not isinstance(data, six.binary_type): - raise TypeError('Data being published to Pub/Sub must be sent ' - 'as a bytestring.') + raise TypeError( + 'Data being published to Pub/Sub must be sent ' + 'as a bytestring.' + ) # Coerce all attributes to text strings. for k, v in copy.copy(attrs).items(): @@ -190,11 +192,19 @@ def publish(self, topic, data, **attrs): if isinstance(v, six.binary_type): attrs[k] = v.decode('utf-8') continue - raise TypeError('All attributes being published to Pub/Sub must ' - 'be sent as text strings.') + raise TypeError( + 'All attributes being published to Pub/Sub must ' + 'be sent as text strings.' + ) # Create the Pub/Sub message object. message = types.PubsubMessage(data=data, attributes=attrs) + if message.ByteSize() > self.batch_settings.max_bytes: + raise ValueError( + 'Message being published is too large for the ' + 'batch settings with max bytes {}.'. + format(self.batch_settings.max_bytes) + ) # Delegate the publishing to the batch. batch = self.batch(topic) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 8b0d568e4705..e02c42bc7e2a 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -80,7 +80,6 @@ def test_publish_messages(publisher, topic_path, cleanup): num=str(index), ), ) - for future in futures: result = future.result() assert isinstance(result, six.string_types) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 7ce32ca07396..311c46a00a56 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -132,6 +132,19 @@ def test_publish_data_not_bytestring_error(): client.publish(topic, 42) +def test_publish_data_too_large(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + topic = 'topic/path' + client.batch_settings = types.BatchSettings( + 0, + client.batch_settings.max_latency, + client.batch_settings.max_messages + ) + with pytest.raises(ValueError): + client.publish(topic, b'This is a text string.') + + def test_publish_attrs_bytestring(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) From 5eede48db7a0e3e5bb1d7ac1b234ed789be6e01d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 16 Feb 2018 11:29:33 -0800 Subject: [PATCH 0216/1197] Add pending request backpressure for subscriber. (#4892) This change includes the consumer's pending request backlog in the policy's load calculations. This allows the policy to pause (and resume) the response stream if there are a large number of outstanding requests to be send on the stream. This additionally adds `max_requests` to `FlowControl` to allow tweaking this number. Resolves: #4792 Related: #4841 --- .../cloud/pubsub_v1/subscriber/_consumer.py | 24 +++++++--- .../cloud/pubsub_v1/subscriber/policy/base.py | 35 +++++++++++---- .../pubsub_v1/subscriber/policy/thread.py | 21 ++------- .../google/cloud/pubsub_v1/types.py | 3 +- .../pubsub_v1/subscriber/test_consumer.py | 2 +- .../pubsub_v1/subscriber/test_policy_base.py | 45 ++++++++++++++++++- .../subscriber/test_policy_thread.py | 14 ------ 7 files changed, 95 insertions(+), 49 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index ae1270fdb3c6..a95efadf4d72 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -187,6 +187,10 @@ def __init__(self): def active(self): """bool: Indicates if the consumer is active. + *Active* means that the stream is open and that it is possible to + send and receive messages. This is distinct from *pausing* which just + pauses *response* consumption. + This is intended to be an implementation independent way of indicating that the consumer is stopped. (E.g. so a policy that owns a consumer doesn't need to know what a ``threading.Event`` is.) @@ -202,6 +206,14 @@ def send_request(self, request): with self._put_lock: self._request_queue.put(request) + @property + def pending_requests(self): + """int: An approximate count of the outstanding requests. + + This can be used to determine if the consumer should be paused if there + are too many outstanding requests.""" + return self._request_queue.qsize() + def _request_generator_thread(self, policy): """Generate requests for the stream. @@ -231,8 +243,9 @@ def _request_generator_thread(self, policy): _LOGGER.debug('Request generator signaled to stop.') break - _LOGGER.debug('Sending request:\n%r', request) + _LOGGER.debug('Sending request on stream') yield request + policy.on_request(request) def _stop_request_generator(self, request_generator, response_generator): """Ensure a request generator is closed. @@ -325,7 +338,7 @@ def _blocking_consume(self, policy): # checks to make sure we're not exiting before opening a new # stream. if self._stopped.is_set(): - _LOGGER.debug('Event signalled consumer exit.') + _LOGGER.debug('Event signaled consumer exit.') break request_generator = self._request_generator_thread(policy) @@ -334,14 +347,14 @@ def _blocking_consume(self, policy): response_generator, self._can_consume) try: for response in responses: - _LOGGER.debug('Received response:\n%r', response) + _LOGGER.debug('Received response on stream') policy.on_response(response) # If the loop above exits without an exception, then the # request stream terminated cleanly, which should only happen # when it was signaled to do so by stop_consuming. In this # case, break out of the while loop and exit this thread. - _LOGGER.debug('Clean RPC loop exit signalled consumer exit.') + _LOGGER.debug('Clean RPC loop exit signaled consumer exit.') break except Exception as exc: recover = policy.on_exception(exc) @@ -364,7 +377,8 @@ def pause(self): This will clear the ``_can_consume`` event which is checked every time :meth:`_blocking_consume` consumes a response from the - bidirectional streaming pull. + bidirectional streaming pull. *requests* can still be sent along + the stream. Complement to :meth:`resume`. """ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 15a57ad555f7..87cc9ec66884 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -156,8 +156,21 @@ def _load(self): return max([ len(self.managed_ack_ids) / self.flow_control.max_messages, self._bytes / self.flow_control.max_bytes, + self._consumer.pending_requests / self.flow_control.max_requests ]) + def _maybe_resume_consumer(self): + """Check the current load and resume the consumer if needed.""" + # If we have been paused by flow control, check and see if we are + # back within our limits. + # + # In order to not thrash too much, require us to have passed below + # the resume threshold (80% by default) of each flow control setting + # before restarting. + if (self._consumer.paused and + self._load < self.flow_control.resume_threshold): + self._consumer.resume() + def ack(self, ack_id, time_to_ack=None, byte_size=None): """Acknowledge the message corresponding to the given ack_id. @@ -216,15 +229,7 @@ def drop(self, ack_id, byte_size): 'Bytes was unexpectedly negative: %d', self._bytes) self._bytes = 0 - # If we have been paused by flow control, check and see if we are - # back within our limits. - # - # In order to not thrash too much, require us to have passed below - # the resume threshold (80% by default) of each flow control setting - # before restarting. - if (self._consumer.paused and - self._load < self.flow_control.resume_threshold): - self._consumer.resume() + self._maybe_resume_consumer() def get_initial_request(self, ack_queue=False): """Return the initial request. @@ -397,6 +402,18 @@ def on_exception(self, exception): """ raise NotImplementedError + def on_request(self, request): + """Called whenever a request has been sent to gRPC. + + This allows the policy to measure the rate of requests sent along the + stream and apply backpressure by pausing or resuming the consumer + if needed. + + Args: + request (Any): The protobuf request that was sent to gRPC. + """ + self._maybe_resume_consumer() + @abc.abstractmethod def on_response(self, response): """Process a response from gRPC. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 37d8fdc63519..e3f6428c1b09 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -19,7 +19,6 @@ import sys import threading -import grpc from six.moves import queue as queue_mod from google.cloud.pubsub_v1 import types @@ -33,19 +32,6 @@ _CALLBACK_WORKER_NAME = 'Thread-Consumer-CallbackRequestsWorker' -def _callback_completed(future): - """Simple callback that just logs a future's result. - - Used on completion of processing a message received by a - subscriber. - - Args: - future (concurrent.futures.Future): A future returned - from :meth:`~concurrent.futures.Executor.submit`. - """ - _LOGGER.debug('Result: %s', future.result()) - - def _do_nothing_callback(message): """Default callback for messages received by subscriber. @@ -332,8 +318,7 @@ def on_response(self, response): """ for msg in response.received_messages: _LOGGER.debug( - 'Using %s to process new message received:\n%r', - self._callback, msg) + 'Using %s to process message with ack_id %s.', + self._callback, msg.ack_id) message = Message(msg.message, msg.ack_id, self._request_queue) - future = self._executor.submit(self._callback, message) - future.add_done_callback(_callback_completed) + self._executor.submit(self._callback, message) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 3d285ec2252a..0740cdc4df00 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -53,12 +53,13 @@ # The defaults should be fine for most use cases. FlowControl = collections.namedtuple( 'FlowControl', - ['max_bytes', 'max_messages', 'resume_threshold'], + ['max_bytes', 'max_messages', 'resume_threshold', 'max_requests'], ) FlowControl.__new__.__defaults__ = ( psutil.virtual_memory().total * 0.2, # max_bytes: 20% of total RAM float('inf'), # max_messages: no limit 0.8, # resume_threshold: 80% + 100, # max_requests: 100 ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index b503a6a8895b..88f46d2b50b7 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -81,7 +81,7 @@ def test_blocking_consume_when_exiting(_LOGGER): # Make sure method cleanly exits. assert consumer._blocking_consume(None) is None - _LOGGER.debug.assert_called_once_with('Event signalled consumer exit.') + _LOGGER.debug.assert_called_once_with('Event signaled consumer exit.') class OnException(object): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 85dbc527c062..a3d0f55caee4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -169,7 +169,30 @@ def test_drop_below_threshold(): assert consumer.paused is False -def test_load(): +def test_on_request_below_threshold(): + """Establish that we resume a paused subscription when the pending + requests count is below threshold.""" + flow_control = types.FlowControl(max_requests=100) + policy = create_policy(flow_control=flow_control) + consumer = policy._consumer + + assert consumer.paused is True + + pending_requests_patch = mock.patch.object( + consumer.__class__, 'pending_requests', new_callable=mock.PropertyMock) + with pending_requests_patch as pending_requests: + # should still be paused, not under the threshold. + pending_requests.return_value = 90 + policy.on_request(None) + assert consumer.paused is True + + # should unpause, we're under the resume threshold + pending_requests.return_value = 50 + policy.on_request(None) + assert consumer.paused is False + + +def test_load_w_lease(): flow_control = types.FlowControl(max_messages=10, max_bytes=1000) policy = create_policy(flow_control=flow_control) consumer = policy._consumer @@ -191,6 +214,26 @@ def test_load(): pause.assert_called_once_with() +def test_load_w_requests(): + flow_control = types.FlowControl(max_bytes=100, max_requests=100) + policy = create_policy(flow_control=flow_control) + consumer = policy._consumer + + pending_requests_patch = mock.patch.object( + consumer.__class__, 'pending_requests', new_callable=mock.PropertyMock) + with pending_requests_patch as pending_requests: + pending_requests.return_value = 0 + assert policy._load == 0 + + pending_requests.return_value = 100 + print(consumer.pending_requests) + assert policy._load == 1 + + # If bytes count is higher, it should return that. + policy._bytes = 110 + assert policy._load == 1.1 + + def test_modify_ack_deadline(): policy = create_policy() with mock.patch.object(policy._consumer, 'send_request') as send_request: diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index a1b254fb2eee..2f9286c9d5ec 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -25,7 +25,6 @@ from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber import _helper_threads from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber.futures import Future from google.cloud.pubsub_v1.subscriber.policy import thread @@ -229,16 +228,3 @@ def test_on_response(): for call in submit_calls: assert call[1][0] == callback assert isinstance(call[1][1], message.Message) - - add_done_callback_calls = [ - m for m in future.method_calls if m[0] == 'add_done_callback'] - assert len(add_done_callback_calls) == 2 - for call in add_done_callback_calls: - assert call[1][0] == thread._callback_completed - - -def test__callback_completed(): - future = mock.Mock() - thread._callback_completed(future) - result_calls = [m for m in future.method_calls if m[0] == 'result'] - assert len(result_calls) == 1 From c8d29752340cca81a9713c4a4be08aa1677fdbc3 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 20 Feb 2018 11:15:26 -0800 Subject: [PATCH 0217/1197] Add ability for subscriber to batch requests. (#4895) The size of the batches and the rate at which they are submitted is controlled by two new flow control settings, `max_request_batch_size` and `max_request_batch_latency`. --- .../pubsub_v1/subscriber/_helper_threads.py | 72 ++++++++-- .../cloud/pubsub_v1/subscriber/message.py | 63 ++++---- .../cloud/pubsub_v1/subscriber/policy/base.py | 135 ++++++++++++------ .../pubsub_v1/subscriber/policy/thread.py | 47 +++--- .../google/cloud/pubsub_v1/types.py | 5 +- .../subscriber/test_helper_threads.py | 85 +++++++++-- .../unit/pubsub_v1/subscriber/test_message.py | 78 ++++++---- .../pubsub_v1/subscriber/test_policy_base.py | 47 +++--- .../subscriber/test_policy_thread.py | 36 ++--- 9 files changed, 378 insertions(+), 190 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py index 339ba2d3a11f..b191eec90256 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py @@ -13,8 +13,11 @@ # limitations under the License. import logging +import time import uuid +from six.moves import queue + __all__ = ( 'QueueCallbackWorker', @@ -31,6 +34,36 @@ STOP = uuid.uuid4() +def _get_many(queue_, max_items=None, max_latency=0): + """Get multiple items from a Queue. + + Gets at least one (blocking) and at most ``max_items`` items + (non-blocking) from a given Queue. Does not mark the items as done. + + Args: + queue_ (~queue.Queue`): The Queue to get items from. + max_items (int): The maximum number of items to get. If ``None``, then + all available items in the queue are returned. + max_latency (float): The maximum number of seconds to wait for more + than one item from a queue. This number includes the time required + to retrieve the first item. + + Returns: + Sequence[Any]: A sequence of items retrieved from the queue. + """ + start = time.time() + # Always return at least one item. + items = [queue_.get()] + while max_items is None or len(items) < max_items: + try: + elapsed = time.time() - start + timeout = max(0, max_latency - elapsed) + items.append(queue_.get(timeout=timeout)) + except queue.Empty: + break + return items + + class QueueCallbackWorker(object): """A helper that executes a callback for every item in the queue. @@ -42,27 +75,42 @@ class QueueCallbackWorker(object): concurrency boundary implemented by ``executor``. Items will be popped off (with a blocking ``get()``) until :attr:`STOP` is encountered. - callback (Callable[[str, Dict], Any]): A callback that can process - items pulled off of the queue. Items are assumed to be a pair - of a method name to be invoked and a dictionary of keyword - arguments for that method. + callback (Callable[Sequence[Any], Any]): A callback that can process + items pulled off of the queue. Multiple items will be passed to + the callback in batches. + max_items (int): The maximum amount of items that will be passed to the + callback at a time. + max_latency (float): The maximum amount of time in seconds to wait for + additional items before executing the callback. """ - def __init__(self, queue, callback): + def __init__(self, queue, callback, max_items=100, max_latency=0): self.queue = queue self._callback = callback + self.max_items = max_items + self.max_latency = max_latency def __call__(self): - while True: - item = self.queue.get() - if item == STOP: - _LOGGER.debug('Exiting the QueueCallbackWorker.') - return + continue_ = True + while continue_: + items = _get_many( + self.queue, + max_items=self.max_items, + max_latency=self.max_latency) + + # If stop is in the items, process all items up to STOP and then + # exit. + try: + items = items[:items.index(STOP)] + continue_ = False + except ValueError: + pass # Run the callback. If any exceptions occur, log them and # continue. try: - action, kwargs = item - self._callback(action, kwargs) + self._callback(items) except Exception as exc: _LOGGER.error('%s: %s', exc.__class__.__name__, exc) + + _LOGGER.debug('Exiting the QueueCallbackWorker.') diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 27ad9f0e7f32..4af03976f27e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -18,6 +18,8 @@ import math import time +from google.cloud.pubsub_v1.subscriber.policy import base as base_policy + _MESSAGE_REPR = """\ Message {{ @@ -172,14 +174,11 @@ def ack(self): """ time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( - ( - 'ack', - { - 'ack_id': self._ack_id, - 'byte_size': self.size, - 'time_to_ack': time_to_ack, - }, - ), + base_policy.AckRequest( + ack_id=self._ack_id, + byte_size=self.size, + time_to_ack=time_to_ack + ) ) def drop(self): @@ -196,13 +195,10 @@ def drop(self): directly. """ self._request_queue.put( - ( - 'drop', - { - 'ack_id': self._ack_id, - 'byte_size': self.size, - }, - ), + base_policy.DropRequest( + ack_id=self._ack_id, + byte_size=self.size + ) ) def lease(self): @@ -213,19 +209,16 @@ def lease(self): need to call it manually. """ self._request_queue.put( - ( - 'lease', - { - 'ack_id': self._ack_id, - 'byte_size': self.size, - }, - ), + base_policy.LeaseRequest( + ack_id=self._ack_id, + byte_size=self.size + ) ) def modify_ack_deadline(self, seconds): """Resets the deadline for acknowledgement. - - New deadline will be the given value of seconds from now. + + New deadline will be the given value of seconds from now. The default implementation handles this for you; you should not need to manually deal with setting ack deadlines. The exception case is @@ -238,13 +231,10 @@ def modify_ack_deadline(self, seconds): values below 10 are advised against. """ self._request_queue.put( - ( - 'modify_ack_deadline', - { - 'ack_id': self._ack_id, - 'seconds': seconds, - }, - ), + base_policy.ModAckRequest( + ack_id=self._ack_id, + seconds=seconds + ) ) def nack(self): @@ -253,11 +243,8 @@ def nack(self): This will cause the message to be re-delivered to the subscription. """ self._request_queue.put( - ( - 'nack', - { - 'ack_id': self._ack_id, - 'byte_size': self.size, - }, - ), + base_policy.NackRequest( + ack_id=self._ack_id, + byte_size=self.size + ) ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 87cc9ec66884..be098c5eeea8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -17,6 +17,7 @@ from __future__ import absolute_import, division import abc +import collections import logging import random import time @@ -31,6 +32,33 @@ _LOGGER = logging.getLogger(__name__) +# Namedtuples for management requests. Used by the Message class to communicate +# items of work back to the policy. +AckRequest = collections.namedtuple( + 'AckRequest', + ['ack_id', 'byte_size', 'time_to_ack'], +) + +DropRequest = collections.namedtuple( + 'DropRequest', + ['ack_id', 'byte_size'], +) + +LeaseRequest = collections.namedtuple( + 'LeaseRequest', + ['ack_id', 'byte_size'], +) + +ModAckRequest = collections.namedtuple( + 'ModAckRequest', + ['ack_id', 'seconds'], +) + +NackRequest = collections.namedtuple( + 'NackRequest', + ['ack_id', 'byte_size'], +) + @six.add_metaclass(abc.ABCMeta) class BasePolicy(object): @@ -167,36 +195,39 @@ def _maybe_resume_consumer(self): # In order to not thrash too much, require us to have passed below # the resume threshold (80% by default) of each flow control setting # before restarting. - if (self._consumer.paused and - self._load < self.flow_control.resume_threshold): + if not self._consumer.paused: + return + + if self._load < self.flow_control.resume_threshold: self._consumer.resume() + else: + _LOGGER.debug('Did not resume, current load is %s', self._load) - def ack(self, ack_id, time_to_ack=None, byte_size=None): - """Acknowledge the message corresponding to the given ack_id. + def ack(self, items): + """Acknowledge the given messages. Args: - ack_id (str): The ack ID. - time_to_ack (int): The time it took to ack the message, measured - from when it was received from the subscription. This is used - to improve the automatic ack timing. - byte_size (int): The size of the PubSub message, in bytes. + items(Sequence[AckRequest]): The items to acknowledge. """ # If we got timing information, add it to the histogram. - if time_to_ack is not None: - self.histogram.add(int(time_to_ack)) + for item in items: + time_to_ack = item.time_to_ack + if time_to_ack is not None: + self.histogram.add(int(time_to_ack)) + ack_ids = [item.ack_id for item in items] if self._consumer.active: # Send the request to ack the message. - request = types.StreamingPullRequest(ack_ids=[ack_id]) + request = types.StreamingPullRequest(ack_ids=ack_ids) self._consumer.send_request(request) else: - # If the consumer is inactive, then queue the ack_id here; it + # If the consumer is inactive, then queue the ack_ids here; it # will be acked as part of the initial request when the consumer # is started again. - self._ack_on_resume.add(ack_id) + self._ack_on_resume.update(ack_ids) # Remove the message from lease management. - self.drop(ack_id=ack_id, byte_size=byte_size) + self.drop(items) def call_rpc(self, request_generator): """Invoke the Pub/Sub streaming pull RPC. @@ -212,22 +243,25 @@ def call_rpc(self, request_generator): """ return self._client.api.streaming_pull(request_generator) - def drop(self, ack_id, byte_size): - """Remove the given ack ID from lease management. + def drop(self, items): + """Remove the given messages from lease management. Args: - ack_id (str): The ack ID. - byte_size (int): The size of the PubSub message, in bytes. + items(Sequence[DropRequest]): The items to drop. """ # Remove the ack ID from lease management, and decrement the # byte counter. - if ack_id in self.managed_ack_ids: - self.managed_ack_ids.remove(ack_id) - self._bytes -= byte_size - if self._bytes < 0: - _LOGGER.debug( - 'Bytes was unexpectedly negative: %d', self._bytes) - self._bytes = 0 + for item in items: + if item.ack_id in self.managed_ack_ids: + self.managed_ack_ids.remove(item.ack_id) + self._bytes -= item.byte_size + else: + _LOGGER.debug('Item %s wasn\'t managed', item.ack_id) + + if self._bytes < 0: + _LOGGER.debug( + 'Bytes was unexpectedly negative: %d', self._bytes) + self._bytes = 0 self._maybe_resume_consumer() @@ -278,18 +312,21 @@ def get_initial_request(self, ack_queue=False): # Return the initial request. return request - def lease(self, ack_id, byte_size): - """Add the given ack ID to lease management. + def lease(self, items): + """Add the given messages to lease management. Args: - ack_id (str): The ack ID. - byte_size (int): The size of the PubSub message, in bytes. + items(Sequence[LeaseRequest]): The items to lease. """ - # Add the ack ID to the set of managed ack IDs, and increment - # the size counter. - if ack_id not in self.managed_ack_ids: - self.managed_ack_ids.add(ack_id) - self._bytes += byte_size + for item in items: + # Add the ack ID to the set of managed ack IDs, and increment + # the size counter. + if item.ack_id not in self.managed_ack_ids: + self.managed_ack_ids.add(item.ack_id) + self._bytes += item.byte_size + else: + _LOGGER.debug( + 'Message %s is already lease managed', item.ack_id) # Sanity check: Do we have too many things in our inventory? # If we do, we need to stop the stream. @@ -351,28 +388,32 @@ def maintain_leases(self): _LOGGER.debug('Snoozing lease management for %f seconds.', snooze) time.sleep(snooze) - def modify_ack_deadline(self, ack_id, seconds): - """Modify the ack deadline for the given ack_id. + def modify_ack_deadline(self, items): + """Modify the ack deadline for the given messages. Args: - ack_id (str): The ack ID - seconds (int): The number of seconds to set the new deadline to. + items(Sequence[ModAckRequest]): The items to modify. """ + ack_ids = [item.ack_id for item in items] + seconds = [item.seconds for item in items] + request = types.StreamingPullRequest( - modify_deadline_ack_ids=[ack_id], - modify_deadline_seconds=[seconds], + modify_deadline_ack_ids=ack_ids, + modify_deadline_seconds=seconds, ) self._consumer.send_request(request) - def nack(self, ack_id, byte_size=None): - """Explicitly deny receipt of a message. + def nack(self, items): + """Explicitly deny receipt of messages. Args: - ack_id (str): The ack ID. - byte_size (int): The size of the PubSub message, in bytes. + items(Sequence[NackRequest]): The items to deny. """ - self.modify_ack_deadline(ack_id=ack_id, seconds=0) - self.drop(ack_id=ack_id, byte_size=byte_size) + self.modify_ack_deadline([ + ModAckRequest(ack_id=item.ack_id, seconds=0) + for item in items]) + self.drop( + [DropRequest(*item) for item in items]) @abc.abstractmethod def close(self): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index e3f6428c1b09..b5141ac4d260 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -14,6 +14,7 @@ from __future__ import absolute_import +import collections from concurrent import futures import logging import sys @@ -30,6 +31,13 @@ _LOGGER = logging.getLogger(__name__) _CALLBACK_WORKER_NAME = 'Thread-Consumer-CallbackRequestsWorker' +_VALID_ACTIONS = frozenset([ + 'ack', + 'drop', + 'lease', + 'modify_ack_deadline', + 'nack', +]) def _do_nothing_callback(message): @@ -185,6 +193,8 @@ def _start_dispatch(self): dispatch_worker = _helper_threads.QueueCallbackWorker( self._request_queue, self.dispatch_callback, + max_items=self.flow_control.max_request_batch_size, + max_latency=self.flow_control.max_request_batch_latency ) # Create and start the helper thread. thread = threading.Thread( @@ -261,7 +271,7 @@ def open(self, callback): # Return the future. return self._future - def dispatch_callback(self, action, kwargs): + def dispatch_callback(self, items): """Map the callback request to the appropriate gRPC request. Args: @@ -273,21 +283,26 @@ def dispatch_callback(self, action, kwargs): ValueError: If ``action`` isn't one of the expected actions "ack", "drop", "lease", "modify_ack_deadline" or "nack". """ - if action == 'ack': - self.ack(**kwargs) - elif action == 'drop': - self.drop(**kwargs) - elif action == 'lease': - self.lease(**kwargs) - elif action == 'modify_ack_deadline': - self.modify_ack_deadline(**kwargs) - elif action == 'nack': - self.nack(**kwargs) - else: - raise ValueError( - 'Unexpected action', action, - 'Must be one of "ack", "drop", "lease", ' - '"modify_ack_deadline" or "nack".') + batched_commands = collections.defaultdict(list) + + for item in items: + batched_commands[item.__class__].append(item) + + _LOGGER.debug('Handling %d batched requests', len(items)) + + if batched_commands[base.LeaseRequest]: + self.lease(batched_commands.pop(base.LeaseRequest)) + if batched_commands[base.ModAckRequest]: + self.modify_ack_deadline( + batched_commands.pop(base.ModAckRequest)) + # Note: Drop and ack *must* be after lease. It's possible to get both + # the lease the and ack/drop request in the same batch. + if batched_commands[base.AckRequest]: + self.ack(batched_commands.pop(base.AckRequest)) + if batched_commands[base.NackRequest]: + self.nack(batched_commands.pop(base.NackRequest)) + if batched_commands[base.DropRequest]: + self.drop(batched_commands.pop(base.DropRequest)) def on_exception(self, exception): """Handle the exception. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 0740cdc4df00..29ac93f9e58d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -53,13 +53,16 @@ # The defaults should be fine for most use cases. FlowControl = collections.namedtuple( 'FlowControl', - ['max_bytes', 'max_messages', 'resume_threshold', 'max_requests'], + ['max_bytes', 'max_messages', 'resume_threshold', 'max_requests', + 'max_request_batch_size', 'max_request_batch_latency'], ) FlowControl.__new__.__defaults__ = ( psutil.virtual_memory().total * 0.2, # max_bytes: 20% of total RAM float('inf'), # max_messages: no limit 0.8, # resume_threshold: 80% 100, # max_requests: 100 + 100, # max_request_batch_size: 100 + 0.01, # max_request_batch_latency: 0.01s ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index fdef24409837..8bdafeed5b6d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -26,13 +26,80 @@ def test_queue_callback_worker(): # Set up an appropriate mock for the queue, and call the queue callback # thread. with mock.patch.object(queue.Queue, 'get') as get: - item1 = ('action', mock.sentinel.A) - get.side_effect = (item1, _helper_threads.STOP) + get.side_effect = ( + mock.sentinel.A, + _helper_threads.STOP, + queue.Empty()) qct() # Assert that we got the expected calls. - assert get.call_count == 2 - callback.assert_called_once_with('action', mock.sentinel.A) + assert get.call_count == 3 + callback.assert_called_once_with([mock.sentinel.A]) + + +def test_queue_callback_worker_stop_with_extra_items(): + queue_ = queue.Queue() + callback = mock.Mock(spec=()) + qct = _helper_threads.QueueCallbackWorker(queue_, callback) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = ( + mock.sentinel.A, + _helper_threads.STOP, + mock.sentinel.B, + queue.Empty()) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 4 + callback.assert_called_once_with([mock.sentinel.A]) + + +def test_queue_callback_worker_get_many(): + queue_ = queue.Queue() + callback = mock.Mock(spec=()) + qct = _helper_threads.QueueCallbackWorker(queue_, callback) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = ( + mock.sentinel.A, + queue.Empty(), + mock.sentinel.B, + _helper_threads.STOP, + queue.Empty()) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 5 + callback.assert_has_calls([ + mock.call([(mock.sentinel.A)]), + mock.call([(mock.sentinel.B)])]) + + +def test_queue_callback_worker_max_items(): + queue_ = queue.Queue() + callback = mock.Mock(spec=()) + qct = _helper_threads.QueueCallbackWorker(queue_, callback, max_items=1) + + # Set up an appropriate mock for the queue, and call the queue callback + # thread. + with mock.patch.object(queue.Queue, 'get') as get: + get.side_effect = ( + mock.sentinel.A, + mock.sentinel.B, + _helper_threads.STOP, + queue.Empty()) + qct() + + # Assert that we got the expected calls. + assert get.call_count == 3 + callback.assert_has_calls([ + mock.call([(mock.sentinel.A)]), + mock.call([(mock.sentinel.B)])]) def test_queue_callback_worker_exception(): @@ -43,10 +110,12 @@ def test_queue_callback_worker_exception(): # Set up an appropriate mock for the queue, and call the queue callback # thread. with mock.patch.object(queue.Queue, 'get') as get: - item1 = ('action', mock.sentinel.A) - get.side_effect = (item1, _helper_threads.STOP) + get.side_effect = ( + mock.sentinel.A, + _helper_threads.STOP, + queue.Empty()) qct() # Assert that we got the expected calls. - assert get.call_count == 2 - callback.assert_called_once_with('action', mock.sentinel.A) + assert get.call_count == 3 + callback.assert_called_once_with([mock.sentinel.A]) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index df82fec59725..1587c0c1866a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -19,6 +19,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message +from google.cloud.pubsub_v1.subscriber.policy import base def create_message(data, ack_id='ACKID', **attrs): @@ -50,56 +51,83 @@ def test_publish_time(): assert msg.publish_time == types.Timestamp(seconds=1335020400 - 86400) +def check_call_types(mock, *args, **kwargs): + """Checks a mock's call types. + + Args: + mock: The mock to check. + args: The types of the positional arguments. + kwargs: The names of the keyword args to check and their respective + types. + + Raises: + AssertionError: if any of the types don't match, or if the number of + arguments does not match. + """ + for call in mock.mock_calls: + _, call_args, call_kwargs = call + assert len(call_args) == len(args) + for n, argtype in enumerate(args): + assert isinstance(call_args[n], argtype) + for argname, argtype in kwargs: + assert argname in call_kwargs + assert isinstance(call_kwargs[argname], argtype) + + def test_ack(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: - with mock.patch.object(message.Message, 'drop') as drop: - msg.ack() - put.assert_called_once_with(('ack', { - 'ack_id': 'bogus_ack_id', - 'byte_size': 25, - 'time_to_ack': mock.ANY, - })) + msg.ack() + put.assert_called_once_with(base.AckRequest( + ack_id='bogus_ack_id', + byte_size=25, + time_to_ack=mock.ANY, + )) + check_call_types(put, base.AckRequest) def test_drop(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.drop() - put.assert_called_once_with(('drop', { - 'ack_id': 'bogus_ack_id', - 'byte_size': 25, - })) + put.assert_called_once_with(base.DropRequest( + ack_id='bogus_ack_id', + byte_size=25, + )) + check_call_types(put, base.DropRequest) def test_lease(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.lease() - put.assert_called_once_with(('lease', { - 'ack_id': 'bogus_ack_id', - 'byte_size': 25, - })) + put.assert_called_once_with(base.LeaseRequest( + ack_id='bogus_ack_id', + byte_size=25, + )) + check_call_types(put, base.LeaseRequest) def test_modify_ack_deadline(): - msg = create_message(b'foo', ack_id='bogus_id') + msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.modify_ack_deadline(60) - put.assert_called_once_with(('modify_ack_deadline', { - 'ack_id': 'bogus_id', - 'seconds': 60, - })) + put.assert_called_once_with(base.ModAckRequest( + ack_id='bogus_ack_id', + seconds=60, + )) + check_call_types(put, base.ModAckRequest) def test_nack(): - msg = create_message(b'foo', ack_id='bogus_id') + msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.nack() - put.assert_called_once_with(('nack', { - 'ack_id': 'bogus_id', - 'byte_size': 25, - })) + put.assert_called_once_with(base.NackRequest( + ack_id='bogus_ack_id', + byte_size=25, + )) + check_call_types(put, base.NackRequest) def test_repr(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index a3d0f55caee4..8b8d9d87d9b6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -87,7 +87,9 @@ def test_ack(): policy = create_policy() policy._consumer._stopped.clear() with mock.patch.object(policy._consumer, 'send_request') as send_request: - policy.ack('ack_id_string', 20) + policy.ack([ + base.AckRequest( + ack_id='ack_id_string', time_to_ack=20, byte_size=0)]) send_request.assert_called_once_with(types.StreamingPullRequest( ack_ids=['ack_id_string'], )) @@ -99,7 +101,8 @@ def test_ack_no_time(): policy = create_policy() policy._consumer._stopped.clear() with mock.patch.object(policy._consumer, 'send_request') as send_request: - policy.ack('ack_id_string') + policy.ack([base.AckRequest( + 'ack_id_string', time_to_ack=None, byte_size=0)]) send_request.assert_called_once_with(types.StreamingPullRequest( ack_ids=['ack_id_string'], )) @@ -112,7 +115,7 @@ def test_ack_paused(): consumer._stopped.set() assert consumer.paused is True - policy.ack('ack_id_string') + policy.ack([base.AckRequest('ack_id_string', 0, 0)]) assert consumer.paused is False assert 'ack_id_string' in policy._ack_on_resume @@ -129,12 +132,12 @@ def test_drop(): policy = create_policy() policy.managed_ack_ids.add('ack_id_string') policy._bytes = 20 - policy.drop('ack_id_string', 20) + policy.drop([base.DropRequest(ack_id='ack_id_string', byte_size=20)]) assert len(policy.managed_ack_ids) == 0 assert policy._bytes == 0 # Do this again to establish idempotency. - policy.drop('ack_id_string', 20) + policy.drop([base.DropRequest(ack_id='ack_id_string', byte_size=20)]) assert len(policy.managed_ack_ids) == 0 assert policy._bytes == 0 @@ -144,7 +147,7 @@ def test_drop_unexpected_negative(_LOGGER): policy = create_policy() policy.managed_ack_ids.add('ack_id_string') policy._bytes = 0 - policy.drop('ack_id_string', 20) + policy.drop([base.DropRequest(ack_id='ack_id_string', byte_size=20)]) assert len(policy.managed_ack_ids) == 0 assert policy._bytes == 0 _LOGGER.debug.assert_called_once_with( @@ -164,7 +167,8 @@ def test_drop_below_threshold(): consumer = policy._consumer assert consumer.paused is True - policy.drop(ack_id='ack_id_string', byte_size=num_bytes) + policy.drop([ + base.DropRequest(ack_id='ack_id_string', byte_size=num_bytes)]) assert consumer.paused is False @@ -200,16 +204,16 @@ def test_load_w_lease(): with mock.patch.object(consumer, 'pause') as pause: # This should mean that our messages count is at 10%, and our bytes # are at 15%; the ._load property should return the higher (0.15). - policy.lease(ack_id='one', byte_size=150) + policy.lease([base.LeaseRequest(ack_id='one', byte_size=150)]) assert policy._load == 0.15 pause.assert_not_called() # After this message is added, the messages should be higher at 20% # (versus 16% for bytes). - policy.lease(ack_id='two', byte_size=10) + policy.lease([base.LeaseRequest(ack_id='two', byte_size=10)]) assert policy._load == 0.2 pause.assert_not_called() # Returning a number above 100% is fine. - policy.lease(ack_id='three', byte_size=1000) + policy.lease([base.LeaseRequest(ack_id='three', byte_size=1000)]) assert policy._load == 1.16 pause.assert_called_once_with() @@ -226,7 +230,6 @@ def test_load_w_requests(): assert policy._load == 0 pending_requests.return_value = 100 - print(consumer.pending_requests) assert policy._load == 1 # If bytes count is higher, it should return that. @@ -237,7 +240,8 @@ def test_load_w_requests(): def test_modify_ack_deadline(): policy = create_policy() with mock.patch.object(policy._consumer, 'send_request') as send_request: - policy.modify_ack_deadline('ack_id_string', 60) + policy.modify_ack_deadline([ + base.ModAckRequest(ack_id='ack_id_string', seconds=60)]) send_request.assert_called_once_with(types.StreamingPullRequest( modify_deadline_ack_ids=['ack_id_string'], modify_deadline_seconds=[60], @@ -253,7 +257,7 @@ def test_maintain_leases_inactive_consumer(): def test_maintain_leases_ack_ids(): policy = create_policy() policy._consumer._stopped.clear() - policy.lease('my ack id', 50) + policy.lease([base.LeaseRequest(ack_id='my ack id', byte_size=50)]) # Mock the sleep object. with mock.patch.object(time, 'sleep', autospec=True) as sleep: @@ -288,12 +292,12 @@ def trigger_inactive(seconds): def test_lease(): policy = create_policy() - policy.lease(ack_id='ack_id_string', byte_size=20) + policy.lease([base.LeaseRequest(ack_id='ack_id_string', byte_size=20)]) assert len(policy.managed_ack_ids) == 1 assert policy._bytes == 20 # Do this again to prove idempotency. - policy.lease(ack_id='ack_id_string', byte_size=20) + policy.lease([base.LeaseRequest(ack_id='ack_id_string', byte_size=20)]) assert len(policy.managed_ack_ids) == 1 assert policy._bytes == 20 @@ -304,9 +308,9 @@ def test_lease_above_threshold(): consumer = policy._consumer with mock.patch.object(consumer, 'pause') as pause: - policy.lease(ack_id='first_ack_id', byte_size=20) + policy.lease([base.LeaseRequest(ack_id='first_ack_id', byte_size=20)]) pause.assert_not_called() - policy.lease(ack_id='second_ack_id', byte_size=25) + policy.lease([base.LeaseRequest(ack_id='second_ack_id', byte_size=25)]) pause.assert_called_once_with() @@ -314,6 +318,9 @@ def test_nack(): policy = create_policy() with mock.patch.object(policy, 'modify_ack_deadline') as mad: with mock.patch.object(policy, 'drop') as drop: - policy.nack(ack_id='ack_id_string', byte_size=10) - drop.assert_called_once_with(ack_id='ack_id_string', byte_size=10) - mad.assert_called_once_with(ack_id='ack_id_string', seconds=0) + items = [base.NackRequest(ack_id='ack_id_string', byte_size=10)] + policy.nack(items) + drop.assert_called_once_with( + [base.DropRequest(ack_id='ack_id_string', byte_size=10)]) + mad.assert_called_once_with( + [base.ModAckRequest(ack_id='ack_id_string', seconds=0)]) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index 2f9286c9d5ec..0cca27397ff2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -27,6 +27,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber.futures import Future +from google.cloud.pubsub_v1.subscriber.policy import base from google.cloud.pubsub_v1.subscriber.policy import thread @@ -138,30 +139,19 @@ def test_open_already_open(): assert exc_info.value.args == ('This policy has already been opened.',) -def test_dispatch_callback_valid_actions(): +@pytest.mark.parametrize('item,method', [ + (base.AckRequest(0, 0, 0), 'ack'), + (base.DropRequest(0, 0), 'drop'), + (base.LeaseRequest(0, 0), 'lease'), + (base.ModAckRequest(0, 0), 'modify_ack_deadline'), + (base.NackRequest(0, 0), 'nack') +]) +def test_dispatch_callback_valid(item, method): policy = create_policy() - kwargs = {'foo': 10, 'bar': 13.37} - actions = ( - 'ack', - 'drop', - 'lease', - 'modify_ack_deadline', - 'nack', - ) - for action in actions: - with mock.patch.object(policy, action) as mocked: - policy.dispatch_callback(action, kwargs) - mocked.assert_called_once_with(**kwargs) - - -def test_dispatch_callback_invalid_action(): - policy = create_policy() - with pytest.raises(ValueError) as exc_info: - policy.dispatch_callback('gecko', {}) - - assert len(exc_info.value.args) == 3 - assert exc_info.value.args[0] == 'Unexpected action' - assert exc_info.value.args[1] == 'gecko' + with mock.patch.object(policy, method) as mocked: + items = [item] + policy.dispatch_callback(items) + mocked.assert_called_once_with([item]) def test_on_exception_deadline_exceeded(): From 40dc241ae931f1ef7fc40675b892c6b94356335a Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 20 Feb 2018 13:57:43 -0800 Subject: [PATCH 0218/1197] Release 0.31.0 (#4903) --- packages/google-cloud-pubsub/CHANGELOG.md | 21 +++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 35de9206567a..a38d9dae71ca 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.31.0 + +### New features + +- Added the ability for subscriber to batch requests. (#4895) +- Added pending request backpressure for subscriber. (#4892) + +### Implementation changes + +- Raise `ValueError` when a message is too large for a batch. (#4872) +- Updated the default batch size to 10 MB. (#4857) +- Allow a custom `Event` type in Pub / Sub futures. (#4643) + +### Documentation + +- Clarify that `modify_ack_deadline` resets the deadline. (#4822) + +### Testing + +- Fix unit test for default `max_bytes` value. (#4860) + ## 0.30.1 ### Notable Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index bf24dc2c03cf..f2efa840857b 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -59,7 +59,7 @@ setup( name='google-cloud-pubsub', - version='0.30.2.dev1', + version='0.31.0', description='Python Client for Google Cloud Pub/Sub', long_description=README, namespace_packages=[ From f827fb3075a00c93b91274728875fbe2b5749378 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Wed, 21 Feb 2018 13:31:35 -0800 Subject: [PATCH 0219/1197] Add support for streaming pull receipts. (#4878) * Closes (#4325) - PubSub: Implement StreamingPullRequest modify acknowledgement as message receipt * Review Changes * Minor fixups * Minor fixups --- .../cloud/pubsub_v1/subscriber/policy/thread.py | 14 +++++++++++++- .../pubsub_v1/subscriber/test_policy_thread.py | 14 +++++++++++--- 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index b5141ac4d260..3d7df2236d9c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -329,8 +329,20 @@ def on_exception(self, exception): def on_response(self, response): """Process all received Pub/Sub messages. - For each message, schedule a callback with the executor. + For each message, send a modified acknowledgement request to the + server. This prevents expiration of the message due to buffering by + gRPC or proxy/firewall. This makes the server and client expiration + timer closer to each other thus preventing the message being + redelivered multiple times. + + After the messages have all had their ack deadline updated, execute + the callback for each message using the executor. """ + items = [ + base.ModAckRequest(message.ack_id, self.histogram.percentile(99)) + for message in response.received_messages + ] + self.modify_ack_deadline(items) for msg in response.received_messages: _LOGGER.debug( 'Using %s to process message with ack_id %s.', diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index 0cca27397ff2..43d55600b966 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -209,9 +209,17 @@ def test_on_response(): ], ) - # Actually run the method and prove that executor.submit and - # future.add_done_callback were called in the expected way. - policy.on_response(response) + # Actually run the method and prove that modack and executor.submit + # are called in the expected way. + modack_patch = mock.patch.object( + policy, 'modify_ack_deadline', autospec=True) + with modack_patch as modack: + policy.on_response(response) + + modack.assert_called_once_with( + [base.ModAckRequest('fack', 10), + base.ModAckRequest('back', 10)] + ) submit_calls = [m for m in executor.method_calls if m[0] == 'submit'] assert len(submit_calls) == 2 From 3b0df3e74df06a5f76e9f805b0a3c67f8d7e82e1 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 22 Feb 2018 10:28:50 -0800 Subject: [PATCH 0220/1197] Normalize all setup.py files (#4909) --- packages/google-cloud-pubsub/setup.py | 102 +++++++++++++++----------- 1 file changed, 60 insertions(+), 42 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index f2efa840857b..6062accf2f89 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google LLC +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,61 +12,79 @@ # See the License for the specific language governing permissions and # limitations under the License. +import io import os -from setuptools import find_packages -from setuptools import setup +import setuptools -PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) +# Package metadata. + +name = 'google-cloud-pubsub' +description = 'Google Cloud Pub/Sub API client library' +version = '0.31.0' +# Should be one of: +# 'Development Status :: 3 - Alpha' +# 'Development Status :: 4 - Beta' +# 'Development Status :: 5 - Stable' +release_status = 'Development Status :: 4 - Beta' +dependencies = [ + 'google-api-core[grpc]<0.2.0dev,>=0.1.3', + 'grpc-google-iam-v1<0.12dev,>=0.11.1', + 'psutil<6.0dev,>=5.2.2', +] +extras = { +} + + +# Setup boilerplate below this line. + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +# Only include packages under the 'google' namespace. Do not include tests, +# benchmarks, etc. +packages = [ + package for package in setuptools.find_packages() + if package.startswith('google')] + +# Determine which namespaces are needed. +namespaces = ['google'] +if 'google.cloud' in packages: + namespaces.append('google.cloud') -with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: - README = file_obj.read() -# NOTE: This is duplicated throughout and we should try to -# consolidate. -SETUP_BASE = { - 'author': 'Google Cloud Platform', - 'author_email': 'googleapis-packages@google.com', - 'scripts': [], - 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', - 'license': 'Apache 2.0', - 'platforms': 'Posix; MacOS X; Windows', - 'include_package_data': True, - 'zip_safe': False, - 'classifiers': [ - 'Development Status :: 4 - Beta', +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author='Google LLC', + author_email='googleapis-packages@google.com', + license='Apache 2.0', + url='https://github.com/GoogleCloudPlatform/google-cloud-python', + classifiers=[ + release_status, 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', + 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Operating System :: OS Independent', 'Topic :: Internet', ], -} - - -REQUIREMENTS = [ - 'google-api-core[grpc] >= 0.1.3, < 0.2.0dev', - 'google-auth >= 1.0.2, < 2.0dev', - 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', - 'psutil >= 5.2.2, < 6.0dev', -] - -setup( - name='google-cloud-pubsub', - version='0.31.0', - description='Python Client for Google Cloud Pub/Sub', - long_description=README, - namespace_packages=[ - 'google', - 'google.cloud', - ], - packages=find_packages(exclude=('tests*',)), - install_requires=REQUIREMENTS, - **SETUP_BASE + platforms='Posix; MacOS X; Windows', + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, ) From 4c9fe58b0ed93b2e1dcbb89388c923f14f24ad49 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 22 Feb 2018 16:06:49 -0800 Subject: [PATCH 0221/1197] Release 0.32.0 (#4917) --- packages/google-cloud-pubsub/CHANGELOG.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index a38d9dae71ca..19ec55e5c3cb 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,12 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.32.0 + +### Implementation changes + +- Added support for streaming pull receipts. (#4878) + ## 0.31.0 ### New features From befc8e8f7ed5e0ebc8ef3596bf29fb2a284d20f8 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 23 Feb 2018 10:18:50 -0800 Subject: [PATCH 0222/1197] Fix version in pubsub setup.py --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 6062accf2f89..05248499e715 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.31.0' +version = '0.32.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 59638a0da841946bb9291ee94383356784a13b74 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 23 Feb 2018 16:34:37 -0800 Subject: [PATCH 0223/1197] Re-enable lint for tests, remove usage of pylint (#4921) --- packages/google-cloud-pubsub/.flake8 | 6 ++++++ .../google/cloud/pubsub_v1/_gapic.py | 4 ++-- packages/google-cloud-pubsub/nox.py | 13 ++----------- .../unit/pubsub_v1/publisher/batch/test_thread.py | 3 ++- .../pubsub_v1/publisher/test_publisher_client.py | 1 - .../unit/pubsub_v1/subscriber/test_consumer.py | 2 -- .../pubsub_v1/subscriber/test_subscriber_client.py | 2 +- 7 files changed, 13 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index 25168dc87605..1f44a90f8195 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -1,5 +1,11 @@ [flake8] exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + *_pb2.py + + # Standard linting exemptions. __pycache__, .git, *.pyc, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py index a4a0edea955d..3e24ad757a57 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py @@ -39,9 +39,9 @@ def wrap(wrapped_fx): # Okay, we have figured out what kind of method this is; send # down the correct wrapper function. if instance_method: - fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) + fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) # noqa return functools.wraps(wrapped_fx)(fx) - fx = lambda self, *a, **kw: wrapped_fx(*a, **kw) + fx = lambda self, *a, **kw: wrapped_fx(*a, **kw) # noqa return functools.wraps(wrapped_fx)(fx) def actual_decorator(cls): diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 93c5b3344a7a..fa45bb6eb1d8 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -99,18 +99,9 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install( - 'flake8', 'pylint==1.7.5', 'gcp-devrel-py-tools', *LOCAL_DEPS) + session.install('flake8') session.install('.') - session.run('flake8', 'google/cloud/pubsub') - session.run( - 'gcp-devrel-py-tools', 'run-pylint', - '--config', 'pylint.config.py', - '--library-filesets', 'google', - '--test-filesets', 'tests', - # Temporarily allow this to fail. - success_codes=range(0, 100), - ) + session.run('flake8', 'google', 'tests') @nox.session diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 2c8852576308..1c08e1b3843a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -192,7 +192,8 @@ def test_blocking__commit_wrong_messageid_length(): publish_response = types.PublishResponse(message_ids=['a']) patch = mock.patch.object( type(batch.client.api), 'publish', return_value=publish_response) - with patch as publish: + + with patch: batch._commit() for future in futures: diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 311c46a00a56..4fa144aa54b5 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -13,7 +13,6 @@ # limitations under the License. from __future__ import absolute_import -import os from google.auth import credentials diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 88f46d2b50b7..7ae9699dd8b4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -13,7 +13,6 @@ # limitations under the License. import threading -import types as base_types from google.auth import credentials import mock @@ -24,7 +23,6 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _consumer from google.cloud.pubsub_v1.subscriber import _helper_threads -from google.cloud.pubsub_v1.subscriber.policy import thread def test_send_request(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 1836a44682de..3d4169f7ab28 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -62,5 +62,5 @@ def test_subscribe_with_failed_callback(): client = subscriber.Client(credentials=creds) callback = 'abcdefg' with pytest.raises(TypeError) as exc_info: - subscription = client.subscribe('sub_name_b', callback) + client.subscribe('sub_name_b', callback) assert callback in str(exc_info.value) From b3ead672a12b32c8b3da54127bf7a74a344c6973 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 26 Feb 2018 14:24:04 -0800 Subject: [PATCH 0224/1197] Install local dependencies when running lint (#4936) --- packages/google-cloud-pubsub/nox.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index fa45bb6eb1d8..043431c43256 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -99,7 +99,7 @@ def lint(session): serious code quality issues. """ session.interpreter = 'python3.6' - session.install('flake8') + session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google', 'tests') From 74562003121752cb5ce64b20ef8e5b172e625bc5 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 27 Feb 2018 11:17:14 -0800 Subject: [PATCH 0225/1197] Update dependency range for api-core to include v1.0.0 releases (#4944) --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 05248499e715..a7fb08f7d5c8 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Stable' release_status = 'Development Status :: 4 - Beta' dependencies = [ - 'google-api-core[grpc]<0.2.0dev,>=0.1.3', + 'google-api-core[grpc]<2.0.0dev,>=0.1.3', 'grpc-google-iam-v1<0.12dev,>=0.11.1', 'psutil<6.0dev,>=5.2.2', ] From 2ccf222fb1858569fe531ba77eca3dfa7f4953b3 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 28 Feb 2018 08:59:09 -0800 Subject: [PATCH 0226/1197] Release pubsub 0.32.1 (#4964) --- packages/google-cloud-pubsub/CHANGELOG.md | 11 +++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 19ec55e5c3cb..6ea8399e2386 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,17 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.32.1 + +### Dependencies + +- Update dependency range for api-core to include v1.0.0 releases (#4944) + +### Testing and internal changes + +- Install local dependencies when running lint (#4936) +- Re-enable lint for tests, remove usage of pylint (#4921) + ## 0.32.0 ### Implementation changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index a7fb08f7d5c8..34401b573fd5 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.32.0' +version = '0.32.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From bd0e254b876b5faad63d222ad78695bf13387155 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 12 Mar 2018 14:59:12 -0700 Subject: [PATCH 0227/1197] Nack messages when the subscriber callback errors (#5019) --- .../pubsub_v1/subscriber/policy/thread.py | 40 ++++--- .../subscriber/test_policy_thread.py | 101 +++++++++++++----- 2 files changed, 91 insertions(+), 50 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 3d7df2236d9c..9a16dbe4efc6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -16,6 +16,7 @@ import collections from concurrent import futures +import functools import logging import sys import threading @@ -31,29 +32,25 @@ _LOGGER = logging.getLogger(__name__) _CALLBACK_WORKER_NAME = 'Thread-Consumer-CallbackRequestsWorker' -_VALID_ACTIONS = frozenset([ - 'ack', - 'drop', - 'lease', - 'modify_ack_deadline', - 'nack', -]) -def _do_nothing_callback(message): - """Default callback for messages received by subscriber. - - Does nothing with the message and returns :data:`None`. +def _wrap_callback_errors(callback, message): + """Wraps a user callback so that if an exception occurs the message is + nacked. Args: - message (~google.cloud.pubsub_v1.subscriber.message.Message): A - protobuf message returned by the backend and parsed into - our high level message type. - - Returns: - NoneType: Always. + callback (Callable[None, Message]): The user callback. + message (~Message): The Pub/Sub message. """ - return None + try: + callback(message) + except Exception: + # Note: the likelihood of this failing is extremely low. This just adds + # a message to a queue, so if this doesn't work the world is in an + # unrecoverable state and this thread should just bail. + message.nack() + # Re-raise the exception so that the executor can deal with it. + raise class Policy(base.BasePolicy): @@ -85,9 +82,8 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), flow_control=flow_control, subscription=subscription, ) - # Default the callback to a no-op; the **actual** callback is - # provided by ``.open()``. - self._callback = _do_nothing_callback + # The **actual** callback is provided by ``.open()``. + self._callback = None # Create a queue for keeping track of shared state. self._request_queue = self._get_queue(queue) # Also maintain an executor. @@ -262,7 +258,7 @@ def open(self, callback): self._future = Future(policy=self, completed=threading.Event()) # Start the thread to pass the requests. - self._callback = callback + self._callback = functools.partial(_wrap_callback_errors, callback) self._start_dispatch() # Actually start consuming messages. self._consumer.start_consuming(self) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py index 43d55600b966..2ee0c3e7ed12 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py @@ -37,11 +37,6 @@ def create_policy(**kwargs): return thread.Policy(client, 'sub_name_c', **kwargs) -def test_init(): - policy = create_policy() - policy._callback(None) - - def test_init_with_executor(): executor = futures.ThreadPoolExecutor(max_workers=25) policy = create_policy(executor=executor, queue=queue.Queue()) @@ -114,10 +109,13 @@ def test_open(): mock.Mock(spec=('name', 'start')), mock.Mock(spec=('name', 'start')), ) + callback = mock.Mock() + with mock.patch.object(threading, 'Thread', side_effect=threads): - policy.open(mock.sentinel.CALLBACK) + policy.open(callback) - assert policy._callback is mock.sentinel.CALLBACK + policy._callback(mock.sentinel.MESSAGE) + callback.assert_called_once_with(mock.sentinel.MESSAGE) assert policy._dispatch_thread is threads[0] threads[0].start.assert_called_once_with() @@ -181,31 +179,35 @@ def test_on_exception_other(): policy.future.result() +def create_and_open_policy(callback, **kwargs): + creds = mock.create_autospec(credentials.Credentials, instance=True) + client = subscriber.Client(credentials=creds) + policy = thread.Policy(client, 'sub_name_c', **kwargs) + + with mock.patch('threading.Thread', autospec=True): + policy.open(callback) + + return policy + + def test_on_response(): - callback = mock.Mock(spec=()) + # Create mock Executor so we can verify calls to executor.submit(). + executor = mock.create_autospec(futures.Executor, instance=True) - # Create mock ThreadPoolExecutor, pass into create_policy(), and verify - # that both executor.submit() and future.add_done_callback are called - # twice. - future = mock.Mock() - attrs = {'submit.return_value': future} - executor = mock.Mock(**attrs) - - # Set up the policy. - policy = create_policy(executor=executor) - policy._callback = callback - - # Set up the messages to send. - messages = ( - types.PubsubMessage(data=b'foo', message_id='1'), - types.PubsubMessage(data=b'bar', message_id='2'), - ) + callback = mock.Mock(spec=()) + policy = create_and_open_policy(callback, executor=executor) - # Set up a valid response. + # Set up the messages. response = types.StreamingPullResponse( received_messages=[ - {'ack_id': 'fack', 'message': messages[0]}, - {'ack_id': 'back', 'message': messages[1]}, + types.ReceivedMessage( + ack_id='fack', + message=types.PubsubMessage(data=b'foo', message_id='1') + ), + types.ReceivedMessage( + ack_id='back', + message=types.PubsubMessage(data=b'bar', message_id='2') + ), ], ) @@ -224,5 +226,48 @@ def test_on_response(): submit_calls = [m for m in executor.method_calls if m[0] == 'submit'] assert len(submit_calls) == 2 for call in submit_calls: - assert call[1][0] == callback + assert call[1][0] == policy._callback assert isinstance(call[1][1], message.Message) + + +def _callback_side_effect(callback, *args, **kwargs): + try: + return callback(*args, **kwargs) + except Exception: + pass + + +def test_on_response_nacks_on_error(): + # Create a callback that always errors. + callback = mock.Mock(spec=(), side_effect=ValueError) + executor = mock.create_autospec(futures.Executor, instance=True) + executor.submit.side_effect = _callback_side_effect + policy = create_and_open_policy(callback, executor=executor) + + # Set up the messages. + message = types.PubsubMessage(data=b'foo', message_id='1') + response = types.StreamingPullResponse( + received_messages=[ + types.ReceivedMessage( + ack_id='fack', + message=message + ), + ], + ) + + # Actually run the method and prove that nack is called because the + # callback errored. + policy.on_response(response) + + # Make sure the callback was executed. + callback.assert_called_once_with(mock.ANY) + + # Process outstanding requests, the callback should've queued a nack + # request. + nack_patch = mock.patch.object( + policy, 'nack', autospec=True) + with nack_patch as nack: + policy.dispatch_callback(policy._request_queue.queue) + + nack.assert_called_once_with([ + base.NackRequest('fack', message.ByteSize())]) From f052e9c814575682a8a7d79c2cd5a9a85cb8077d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 13 Mar 2018 09:14:52 -0700 Subject: [PATCH 0228/1197] Retry subscription stream on InternalServerError, Unknown, and GatewayTimeout (#5021) --- .../google/cloud/pubsub_v1/subscriber/policy/base.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index be098c5eeea8..8844016a35df 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -97,6 +97,9 @@ class BasePolicy(object): _RETRYABLE_STREAM_ERRORS = ( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + exceptions.InternalServerError, + exceptions.Unknown, + exceptions.GatewayTimeout, ) def __init__(self, client, subscription, From 2d2580a8ca01372acf994821aae08755c8eb33dd Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 13 Mar 2018 09:31:32 -0700 Subject: [PATCH 0229/1197] Drop leased messages after flow_control.max_lease_duration has passed. (#5020) --- .../cloud/pubsub_v1/subscriber/policy/base.py | 64 +++++++++++-------- .../google/cloud/pubsub_v1/types.py | 4 +- .../pubsub_v1/subscriber/test_policy_base.py | 64 +++++++++++++++---- 3 files changed, 92 insertions(+), 40 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 8844016a35df..f71a72525088 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -59,6 +59,10 @@ ['ack_id', 'byte_size'], ) +_LeasedMessage = collections.namedtuple( + '_LeasedMessage', + ['added_time', 'size']) + @six.add_metaclass(abc.ABCMeta) class BasePolicy(object): @@ -92,8 +96,6 @@ class BasePolicy(object): your own dictionary class, ensure this assumption holds or you will get strange behavior. """ - - _managed_ack_ids = None _RETRYABLE_STREAM_ERRORS = ( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, @@ -112,6 +114,10 @@ def __init__(self, client, subscription, self._future = None self.flow_control = flow_control self.histogram = _histogram.Histogram(data=histogram_data) + """.Histogram: the histogram tracking ack latency.""" + self.leased_messages = {} + """dict[str, float]: A mapping of ack IDs to the local time when the + ack ID was initially leased in seconds since the epoch.""" # These are for internal flow control tracking. # They should not need to be used by subclasses. @@ -147,17 +153,6 @@ def future(self): """ return self._future - @property - def managed_ack_ids(self): - """Return the ack IDs currently being managed by the policy. - - Returns: - set: The set of ack IDs being managed. - """ - if self._managed_ack_ids is None: - self._managed_ack_ids = set() - return self._managed_ack_ids - @property def subscription(self): """Return the subscription. @@ -185,7 +180,7 @@ def _load(self): float: The load value. """ return max([ - len(self.managed_ack_ids) / self.flow_control.max_messages, + len(self.leased_messages) / self.flow_control.max_messages, self._bytes / self.flow_control.max_bytes, self._consumer.pending_requests / self.flow_control.max_requests ]) @@ -255,11 +250,10 @@ def drop(self, items): # Remove the ack ID from lease management, and decrement the # byte counter. for item in items: - if item.ack_id in self.managed_ack_ids: - self.managed_ack_ids.remove(item.ack_id) + if self.leased_messages.pop(item.ack_id, None) is not None: self._bytes -= item.byte_size else: - _LOGGER.debug('Item %s wasn\'t managed', item.ack_id) + _LOGGER.debug('Item %s was not managed.', item.ack_id) if self._bytes < 0: _LOGGER.debug( @@ -293,7 +287,7 @@ def get_initial_request(self, ack_queue=False): # Any ack IDs that are under lease management and not being acked # need to have their deadline extended immediately. ack_ids = set() - lease_ids = self.managed_ack_ids + lease_ids = set(self.leased_messages.keys()) if ack_queue: ack_ids = self._ack_on_resume lease_ids = lease_ids.difference(ack_ids) @@ -324,8 +318,10 @@ def lease(self, items): for item in items: # Add the ack ID to the set of managed ack IDs, and increment # the size counter. - if item.ack_id not in self.managed_ack_ids: - self.managed_ack_ids.add(item.ack_id) + if item.ack_id not in self.leased_messages: + self.leased_messages[item.ack_id] = _LeasedMessage( + added_time=time.time(), + size=item.byte_size) self._bytes += item.byte_size else: _LOGGER.debug( @@ -364,22 +360,36 @@ def maintain_leases(self): p99 = self.histogram.percentile(99) _LOGGER.debug('The current p99 value is %d seconds.', p99) + # Drop any leases that are well beyond max lease time. This + # ensures that in the event of a badly behaving actor, we can + # drop messages and allow Pub/Sub to resend them. + cutoff = time.time() - self.flow_control.max_lease_duration + to_drop = [ + DropRequest(ack_id, item.size) + for ack_id, item + in six.iteritems(self.leased_messages) + if item.added_time < cutoff] + + if to_drop: + _LOGGER.warning( + 'Dropping %s items because they were leased too long.', + len(to_drop)) + self.drop(to_drop) + # Create a streaming pull request. # We do not actually call `modify_ack_deadline` over and over # because it is more efficient to make a single request. - ack_ids = list(self.managed_ack_ids) - _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) + ack_ids = list(self.leased_messages.keys()) if ack_ids: - request = types.StreamingPullRequest( - modify_deadline_ack_ids=ack_ids, - modify_deadline_seconds=[p99] * len(ack_ids), - ) + _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) + # NOTE: This may not work as expected if ``consumer.active`` # has changed since we checked it. An implementation # without any sort of race condition would require a # way for ``send_request`` to fail when the consumer # is inactive. - self._consumer.send_request(request) + self.modify_ack_deadline([ + ModAckRequest(ack_id, p99) for ack_id in ack_ids]) # Now wait an appropriate period of time and do this again. # diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 29ac93f9e58d..8a8de3852b3c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -54,7 +54,8 @@ FlowControl = collections.namedtuple( 'FlowControl', ['max_bytes', 'max_messages', 'resume_threshold', 'max_requests', - 'max_request_batch_size', 'max_request_batch_latency'], + 'max_request_batch_size', 'max_request_batch_latency', + 'max_lease_duration'], ) FlowControl.__new__.__defaults__ = ( psutil.virtual_memory().total * 0.2, # max_bytes: 20% of total RAM @@ -63,6 +64,7 @@ 100, # max_requests: 100 100, # max_request_batch_size: 100 0.01, # max_request_batch_latency: 0.01s + 2 * 60 * 60, # max_lease_duration: 2 hours. ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index 8b8d9d87d9b6..c43940bf0f92 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -67,15 +67,15 @@ def test_get_initial_request(): assert initial_request.stream_ack_deadline_seconds == 10 -def test_managed_ack_ids(): +def test_leased_messagess(): policy = create_policy() # Ensure we always get a set back, even if the property is not yet set. - managed_ack_ids = policy.managed_ack_ids - assert isinstance(managed_ack_ids, set) + leased_messages = policy.leased_messages + assert isinstance(leased_messages, dict) # Ensure that multiple calls give the same actual object back. - assert managed_ack_ids is policy.managed_ack_ids + assert leased_messages is policy.leased_messages def test_subscription(): @@ -130,25 +130,25 @@ def test_call_rpc(): def test_drop(): policy = create_policy() - policy.managed_ack_ids.add('ack_id_string') + policy.leased_messages['ack_id_string'] = 0 policy._bytes = 20 policy.drop([base.DropRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.managed_ack_ids) == 0 + assert len(policy.leased_messages) == 0 assert policy._bytes == 0 # Do this again to establish idempotency. policy.drop([base.DropRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.managed_ack_ids) == 0 + assert len(policy.leased_messages) == 0 assert policy._bytes == 0 @mock.patch.object(base, '_LOGGER', spec=logging.Logger) def test_drop_unexpected_negative(_LOGGER): policy = create_policy() - policy.managed_ack_ids.add('ack_id_string') + policy.leased_messages['ack_id_string'] = 0 policy._bytes = 0 policy.drop([base.DropRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.managed_ack_ids) == 0 + assert len(policy.leased_messages) == 0 assert policy._bytes == 0 _LOGGER.debug.assert_called_once_with( 'Bytes was unexpectedly negative: %d', -20) @@ -161,7 +161,7 @@ def test_drop_below_threshold(): the flow control thresholds, it should resume. """ policy = create_policy() - policy.managed_ack_ids.add('ack_id_string') + policy.leased_messages['ack_id_string'] = 0 num_bytes = 20 policy._bytes = num_bytes consumer = policy._consumer @@ -290,15 +290,55 @@ def trigger_inactive(seconds): sleep.assert_called() +@mock.patch.object(time, 'time', autospec=True) +@mock.patch.object(time, 'sleep', autospec=True) +def test_maintain_leases_outdated_items(sleep, time): + policy = create_policy() + policy._consumer._stopped.clear() + + # Add these items at the beginning of the timeline + time.return_value = 0 + policy.lease([ + base.LeaseRequest(ack_id='ack1', byte_size=50)]) + + # Add another item at towards end of the timeline + time.return_value = policy.flow_control.max_lease_duration - 1 + policy.lease([ + base.LeaseRequest(ack_id='ack2', byte_size=50)]) + + # Now make sure time reports that we are at the end of our timeline. + time.return_value = policy.flow_control.max_lease_duration + 1 + + # Mock the sleep object. + def trigger_inactive(seconds): + assert 0 < seconds < 10 + policy._consumer._stopped.set() + + sleep.side_effect = trigger_inactive + + # Also mock the consumer, which sends the request. + with mock.patch.object(policy._consumer, 'send_request') as send: + policy.maintain_leases() + + # Only ack2 should be renewed. ack1 should've been dropped + send.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['ack2'], + modify_deadline_seconds=[10], + )) + assert len(policy.leased_messages) == 1 + + sleep.assert_called() + + def test_lease(): policy = create_policy() policy.lease([base.LeaseRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.managed_ack_ids) == 1 + assert len(policy.leased_messages) == 1 assert policy._bytes == 20 # Do this again to prove idempotency. policy.lease([base.LeaseRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.managed_ack_ids) == 1 + assert len(policy.leased_messages) == 1 assert policy._bytes == 20 From c701c0f3841c67e62fe355f3742b795be4e6bf04 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 14 Mar 2018 11:01:24 -0700 Subject: [PATCH 0230/1197] Fix test that checks for retryable exceptions (#5034) --- .../tests/unit/pubsub_v1/subscriber/test_policy_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py index c43940bf0f92..42a062eae168 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py @@ -47,7 +47,8 @@ def test_idempotent_retry_codes(): exceptions.exception_class_for_grpc_status(status_code) for status_code in status_codes ) - assert base.BasePolicy._RETRYABLE_STREAM_ERRORS == expected + assert set(expected).issubset( + set(base.BasePolicy._RETRYABLE_STREAM_ERRORS)) def test_ack_deadline(): From 5b20a66988eb2ebadd77016fbce2e79cf6b40ae8 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 14 Mar 2018 11:08:10 -0700 Subject: [PATCH 0231/1197] Avoid race condition in maintain_leases by copying leased_messages (#5035) --- .../google/cloud/pubsub_v1/subscriber/policy/base.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index f71a72525088..2e9ad1a03545 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -18,6 +18,7 @@ import abc import collections +import copy import logging import random import time @@ -360,6 +361,11 @@ def maintain_leases(self): p99 = self.histogram.percentile(99) _LOGGER.debug('The current p99 value is %d seconds.', p99) + # Make a copy of the leased messages. This is needed because it's + # possible for another thread to modify the dictionary while + # we're iterating over it. + leased_messages = copy.copy(self.leased_messages) + # Drop any leases that are well beyond max lease time. This # ensures that in the event of a badly behaving actor, we can # drop messages and allow Pub/Sub to resend them. @@ -367,7 +373,7 @@ def maintain_leases(self): to_drop = [ DropRequest(ack_id, item.size) for ack_id, item - in six.iteritems(self.leased_messages) + in six.iteritems(leased_messages) if item.added_time < cutoff] if to_drop: @@ -379,7 +385,7 @@ def maintain_leases(self): # Create a streaming pull request. # We do not actually call `modify_ack_deadline` over and over # because it is more efficient to make a single request. - ack_ids = list(self.leased_messages.keys()) + ack_ids = list(leased_messages.keys()) if ack_ids: _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) From defd7a1b53383868b6845f2f61e6a6b3f4c993de Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 15 Mar 2018 08:52:22 -0700 Subject: [PATCH 0232/1197] Fix bad trove classifier --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 34401b573fd5..b6787b309a60 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -26,7 +26,7 @@ # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Stable' +# 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 4 - Beta' dependencies = [ 'google-api-core[grpc]<2.0.0dev,>=0.1.3', From 2afbf9cb228db5f8def620ad5c29f732cef9a154 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 15 Mar 2018 11:31:09 -0700 Subject: [PATCH 0233/1197] Fix mantain leases to not modack messages it just dropped (#5045) --- .../google/cloud/pubsub_v1/subscriber/policy/base.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 2e9ad1a03545..3bd75356bb3c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -382,6 +382,11 @@ def maintain_leases(self): len(to_drop)) self.drop(to_drop) + # Remove dropped items from our copy of the leased messages (they + # have already been removed from the real one by self.drop). + for item in to_drop: + leased_messages.pop(item.ack_id) + # Create a streaming pull request. # We do not actually call `modify_ack_deadline` over and over # because it is more efficient to make a single request. From c49ebe95e232eaa4de9a656daaeccd5521222d13 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Fri, 16 Mar 2018 12:04:27 -0700 Subject: [PATCH 0234/1197] pubsub nox.py cleanup (#5056) --- packages/google-cloud-pubsub/nox.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 043431c43256..d544f2eef081 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -88,7 +88,12 @@ def system(session, py): session.install('.') # Run py.test against the system tests. - session.run('py.test', '--quiet', 'tests/system.py') + session.run( + 'py.test', + '--quiet', + 'tests/system.py', + *session.posargs + ) @nox.session From 5f02058d8068d900126a5659e1fd6bbe55cd7dbc Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 20 Mar 2018 10:21:54 -0700 Subject: [PATCH 0235/1197] Use the rpc's status to determine when to exit the request generator thread (#5054) --- .../cloud/pubsub_v1/subscriber/_consumer.py | 220 ++++++------- .../cloud/pubsub_v1/subscriber/policy/base.py | 25 +- packages/google-cloud-pubsub/tests/system.py | 4 + .../pubsub_v1/subscriber/test_consumer.py | 288 +++++------------- 4 files changed, 181 insertions(+), 356 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index a95efadf4d72..7662546588b6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -133,6 +133,95 @@ _BIDIRECTIONAL_CONSUMER_NAME = 'Thread-ConsumeBidirectionalStream' +class _RequestQueueGenerator(object): + """A helper for sending requests to a gRPC stream from a Queue. + + This generator takes requests off a given queue and yields them to gRPC. + + This helper is useful when you have an indeterminate, indefinite, or + otherwise open-ended set of requests to send through a request-streaming + (or bidirectional) RPC. + + The reason this is necessary is because gRPC takes an iterator as the + request for request-streaming RPCs. gRPC consumes this iterator in another + thread to allow it to block while generating requests for the stream. + However, if the generator blocks indefinitely gRPC will not be able to + clean up the thread as it'll be blocked on `next(iterator)` and not be able + to check the channel status to stop iterating. This helper mitigates that + by waiting on the queue with a timeout and checking the RPC state before + yielding. + + Finally, it allows for retrying without swapping queues because if it does + pull an item off the queue, it'll immediately put it back and then exit. + This is necessary because yielding the item in this case will cause gRPC + to discard it. In practice, this means that the order of messages is not + guaranteed. If such a thing is necessary it would be easy to use a priority + queue. + + Example:: + + requests = request_queue_generator(q) + rpc = stub.StreamingRequest(iter(requests)) + requests.rpc = rpc + + for response in rpc: + print(response) + q.put(...) + + Args: + queue (queue.Queue): The request queue. + period (float): The number of seconds to wait for items from the queue + before checking if the RPC is cancelled. In practice, this + determines the maximum amount of time the request consumption + thread will live after the RPC is cancelled. + initial_request (protobuf.Message): The initial request to yield. This + is done independently of the request queue to allow for easily + restarting streams that require some initial configuration request. + """ + def __init__(self, queue, period=1, initial_request=None): + self._queue = queue + self._period = period + self._initial_request = initial_request + self.rpc = None + + def _should_exit(self): + # Note: there is a possibility that this starts *before* the rpc + # property is set. So we have to check if self.rpc is set before seeing + # if it's active. + if self.rpc is not None and not self.rpc.is_active(): + return True + else: + return False + + def __iter__(self): + if self._initial_request is not None: + yield self._initial_request + + while True: + try: + item = self._queue.get(timeout=self._period) + except queue.Empty: + if self._should_exit(): + _LOGGER.debug( + 'Empty queue and inactive RPC, exiting request ' + 'generator.') + return + else: + # RPC is still active, keep waiting for queue items. + continue + + if self._should_exit(): + # We have an item, but the RPC is closed. We should put the + # item back on the queue so that the next RPC can consume it. + self._queue.put(item) + _LOGGER.debug( + 'Inactive RPC, replacing item on queue and exiting ' + 'request generator.') + return + + yield item + + class Consumer(object): """Bi-directional streaming RPC consumer. @@ -180,7 +269,6 @@ def __init__(self): self._request_queue = queue.Queue() self._stopped = threading.Event() self._can_consume = threading.Event() - self._put_lock = threading.Lock() self._consumer_thread = None @property @@ -203,8 +291,7 @@ def send_request(self, request): Args: request (Any): The request protobuf. """ - with self._put_lock: - self._request_queue.put(request) + self._request_queue.put(request) @property def pending_requests(self): @@ -214,117 +301,6 @@ def pending_requests(self): are too many outstanding requests.""" return self._request_queue.qsize() - def _request_generator_thread(self, policy): - """Generate requests for the stream. - - This blocks for new requests on the request queue and yields them to - gRPC. - - Args: - policy (~.pubsub_v1.subscriber.policy.base.BasePolicy): The policy - that owns this consumer. A policy is used to create the - initial request used to open the streaming pull bidirectional - stream. - - Yields: - google.cloud.pubsub_v1.types.StreamingPullRequest: Requests - """ - # First, yield the initial request. This occurs on every new - # connection, fundamentally including a resumed connection. - initial_request = policy.get_initial_request(ack_queue=True) - _LOGGER.debug('Sending initial request:\n%r', initial_request) - yield initial_request - - # Now yield each of the items on the request queue, and block if there - # are none. This can and must block to keep the stream open. - while True: - request = self._request_queue.get() - if request == _helper_threads.STOP: - _LOGGER.debug('Request generator signaled to stop.') - break - - _LOGGER.debug('Sending request on stream') - yield request - policy.on_request(request) - - def _stop_request_generator(self, request_generator, response_generator): - """Ensure a request generator is closed. - - This **must** be done when recovering from a retry-able exception. - If not, then an inactive request generator (i.e. not attached to any - actual RPC) will be trying to access the same request queue as the - active request generator. - - In addition, we want the gRPC thread consuming to cleanly exit so - that system resources are not wasted. - - Args: - request_generator (Generator): A streaming pull request generator - returned from :meth:`_request_generator_thread`. - response_generator (grpc.Future): The gRPC bidirectional stream - object that **was** consuming the ``request_generator``. (It - will actually spawn a thread to consume the requests, but - that thread will stop once the rendezvous has a status code - set.) - - Returns: - bool: Indicates if the generator was successfully stopped. Will - be :data:`True` unless the queue is not empty and the generator - is running. - """ - if not response_generator.done(): - _LOGGER.debug( - 'Response generator must be done before stopping ' - 'request generator.') - return False - - with self._put_lock: - try: - request_generator.close() - except ValueError: - # Should be ``ValueError('generator already executing')`` - if not self._request_queue.empty(): - # This case may be a false negative in **very** rare - # cases. We **assume** that the generator can't be - # ``close()``-ed because it is blocking on ``get()``. - # It's **very unlikely** that the generator was not - # blocking, but instead **in between** the blocking - # ``get()`` and the next ``yield`` / ``break``. However, - # for practical purposes, we only need to stop the request - # generator if the connection has timed out due to - # inactivity, which indicates an empty queue. - _LOGGER.debug( - 'Request generator could not be closed but ' - 'request queue is not empty.') - return False - # At this point we know: - # 1. The queue is empty and we hold the ``put()`` lock - # 2. We **cannot** ``close()`` the request generator. - # This means that the request generator is blocking at - # ``get()`` from the queue and will continue to block since - # we have locked ``.put()``. - self._request_queue.put(_helper_threads.STOP) - # Wait for the request generator to ``.get()`` the ``STOP``. - _LOGGER.debug( - 'Waiting for active request generator to receive STOP') - while not self._request_queue.empty(): - pass - # We would **like** to call ``request_generator.close()`` here - # but can't guarantee that the generator is paused, since it - # has a few instructions to complete between the ``get()`` - # and the ``break``. However, we are confident that - # 1. The queue was empty and we hold the ``put()`` lock - # 2. We added ``STOP`` - # 3. We waited until the request generator consumed ``STOP`` - # so we know the request generator **will** stop within a - # few cycles. - except Exception as exc: - _LOGGER.error('Failed to close request generator: %r', exc) - return False - - _LOGGER.debug('Successfully closed request generator.') - return True - def _blocking_consume(self, policy): """Consume the stream indefinitely. @@ -341,10 +317,12 @@ def _blocking_consume(self, policy): _LOGGER.debug('Event signaled consumer exit.') break - request_generator = self._request_generator_thread(policy) - response_generator = policy.call_rpc(request_generator) - responses = _pausable_iterator( - response_generator, self._can_consume) + initial_request = policy.get_initial_request() + request_generator = _RequestQueueGenerator( + self._request_queue, initial_request=initial_request) + rpc = policy.call_rpc(request_generator) + request_generator.rpc = rpc + responses = _pausable_iterator(rpc, self._can_consume) try: for response in responses: _LOGGER.debug('Received response on stream') @@ -358,11 +336,11 @@ def _blocking_consume(self, policy): break except Exception as exc: recover = policy.on_exception(exc) - if recover: - recover = self._stop_request_generator( - request_generator, response_generator) if not recover: self._stop_no_join() + # No need to raise this exception. The policy should handle + # passing the exception to the code that started the + # consumer via a future. return @property diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 3bd75356bb3c..48ef35e7e113 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -263,39 +263,26 @@ def drop(self, items): self._maybe_resume_consumer() - def get_initial_request(self, ack_queue=False): + def get_initial_request(self): """Return the initial request. This defines the initial request that must always be sent to Pub/Sub immediately upon opening the subscription. - Args: - ack_queue (bool): Whether to include any acks that were sent - while the connection was paused. - Returns: google.cloud.pubsub_v1.types.StreamingPullRequest: A request suitable for being the first request on the stream (and not suitable for any other purpose). - - .. note:: - If ``ack_queue`` is set to True, this includes the ack_ids, but - also clears the internal set. - - This means that calls to :meth:`get_initial_request` with - ``ack_queue`` set to True are not idempotent. """ # Any ack IDs that are under lease management and not being acked # need to have their deadline extended immediately. - ack_ids = set() lease_ids = set(self.leased_messages.keys()) - if ack_queue: - ack_ids = self._ack_on_resume - lease_ids = lease_ids.difference(ack_ids) + # Exclude any IDs that we're about to ack. + lease_ids = lease_ids.difference(self._ack_on_resume) # Put the request together. request = types.StreamingPullRequest( - ack_ids=list(ack_ids), + ack_ids=list(self._ack_on_resume), modify_deadline_ack_ids=list(lease_ids), modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), stream_ack_deadline_seconds=self.histogram.percentile(99), @@ -303,9 +290,7 @@ def get_initial_request(self, ack_queue=False): ) # Clear the ack_ids set. - # Note: If `ack_queue` is False, this just ends up being a no-op, - # since the set is just an empty set. - ack_ids.clear() + self._ack_on_resume.clear() # Return the initial request. return request diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index e02c42bc7e2a..9f5c77477995 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -129,6 +129,8 @@ def test_subscribe_to_messages( # Okay, we took too long; fail out. assert callback.calls >= 50 + subscription.close() + def test_subscribe_to_messages_async_callbacks( publisher, topic_path, subscriber, subscription_path, cleanup): @@ -181,6 +183,8 @@ def test_subscribe_to_messages_async_callbacks( # Okay, we took too long; fail out. assert callback.calls >= 2 + subscription.close() + class AckCallback(object): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 7ae9699dd8b4..1f817c4fa1f9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -15,6 +15,7 @@ import threading from google.auth import credentials +import grpc import mock import pytest from six.moves import queue @@ -22,9 +23,76 @@ from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _consumer +from google.cloud.pubsub_v1.subscriber.policy import base from google.cloud.pubsub_v1.subscriber import _helper_threads +class Test_RequestQueueGenerator(object): + + def test_bounded_consume(self): + rpc = mock.create_autospec(grpc.RpcContext, instance=True) + rpc.is_active.return_value = True + + def queue_generator(rpc): + yield mock.sentinel.A + yield queue.Empty() + yield mock.sentinel.B + rpc.is_active.return_value = False + yield mock.sentinel.C + + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = queue_generator(rpc) + + generator = _consumer._RequestQueueGenerator(q) + generator.rpc = rpc + + items = list(generator) + + assert items == [mock.sentinel.A, mock.sentinel.B] + + def test_yield_initial_and_exit(self): + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = queue.Empty() + rpc = mock.create_autospec(grpc.RpcContext, instance=True) + rpc.is_active.return_value = False + + generator = _consumer._RequestQueueGenerator( + q, initial_request=mock.sentinel.A) + generator.rpc = rpc + + items = list(generator) + + assert items == [mock.sentinel.A] + + def test_exit_when_inactive_with_item(self): + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = [mock.sentinel.A, queue.Empty()] + rpc = mock.create_autospec(grpc.RpcContext, instance=True) + rpc.is_active.return_value = False + + generator = _consumer._RequestQueueGenerator(q) + generator.rpc = rpc + + items = list(generator) + + assert items == [] + # Make sure it put the item back. + q.put.assert_called_once_with(mock.sentinel.A) + + def test_exit_when_inactive_empty(self): + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = queue.Empty() + rpc = mock.create_autospec(grpc.RpcContext, instance=True) + rpc.is_active.return_value = False + + generator = _consumer._RequestQueueGenerator(q) + generator.rpc = rpc + + items = list(generator) + + assert items == [] + + def test_send_request(): consumer = _consumer.Consumer() request = types.StreamingPullRequest(subscription='foo') @@ -33,32 +101,8 @@ def test_send_request(): put.assert_called_once_with(request) -def test_request_generator_thread(): - consumer = _consumer.Consumer() - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - policy = client.subscribe('sub_name_e') - generator = consumer._request_generator_thread(policy) - - # The first request that comes from the request generator thread - # should always be the initial request. - initial_request = next(generator) - assert initial_request.subscription == 'sub_name_e' - assert initial_request.stream_ack_deadline_seconds == 10 - - # Subsequent requests correspond to items placed in the request queue. - consumer.send_request(types.StreamingPullRequest(ack_ids=['i'])) - request = next(generator) - assert request.ack_ids == ['i'] - - # The poison pill should stop the loop. - consumer.send_request(_helper_threads.STOP) - with pytest.raises(StopIteration): - next(generator) - - def test_blocking_consume(): - policy = mock.Mock(spec=('call_rpc', 'on_response')) + policy = mock.create_autospec(base.BasePolicy, instance=True) policy.call_rpc.return_value = iter((mock.sentinel.A, mock.sentinel.B)) consumer = _consumer.Consumer() @@ -95,7 +139,7 @@ def __call__(self, exception): def test_blocking_consume_on_exception(): - policy = mock.Mock(spec=('call_rpc', 'on_response', 'on_exception')) + policy = mock.create_autospec(base.BasePolicy, instance=True) policy.call_rpc.return_value = iter((mock.sentinel.A, mock.sentinel.B)) exc = TypeError('Bad things!') policy.on_response.side_effect = exc @@ -122,13 +166,8 @@ class RaisingResponseGenerator(object): def __init__(self, exception): self.exception = exception - self.done_calls = 0 self.next_calls = 0 - def done(self): - self.done_calls += 1 - return True - def __next__(self): self.next_calls += 1 raise self.exception @@ -138,7 +177,7 @@ def next(self): def test_blocking_consume_two_exceptions(): - policy = mock.Mock(spec=('call_rpc', 'on_exception')) + policy = mock.create_autospec(base.BasePolicy, instance=True) exc1 = NameError('Oh noes.') exc2 = ValueError('Something grumble.') @@ -150,7 +189,8 @@ def test_blocking_consume_two_exceptions(): consumer = _consumer.Consumer() consumer.resume() - consumer._consumer_thread = mock.Mock(spec=threading.Thread) + consumer._consumer_thread = mock.create_autospec( + threading.Thread, instance=True) # Establish that we get responses until we are sent the exiting event. assert consumer._blocking_consume(policy) is None @@ -159,9 +199,7 @@ def test_blocking_consume_two_exceptions(): # Check mocks. assert policy.call_rpc.call_count == 2 assert response_generator1.next_calls == 1 - assert response_generator1.done_calls == 1 assert response_generator2.next_calls == 1 - assert response_generator2.done_calls == 0 policy.on_exception.assert_has_calls( [mock.call(exc1), mock.call(exc2)]) @@ -227,183 +265,3 @@ def test_stop_consuming(): assert consumer._consumer_thread is None # Check mocks. thread.join.assert_called_once_with() - - -def basic_queue_generator(queue, received): - while True: - value = queue.get() - received.put(value) - yield value - - -def test_stop_request_generator_response_not_done(): - consumer = _consumer.Consumer() - - response_generator = mock.Mock(spec=('done',)) - response_generator.done.return_value = False - stopped = consumer._stop_request_generator(None, response_generator) - assert stopped is False - - # Check mocks. - response_generator.done.assert_called_once_with() - - -def test_stop_request_generator_not_running(): - # Model scenario tested: - # - The request generator **is not** running - # - The request queue **is not** empty - # Expected result: - # - ``_stop_request_generator()`` successfully calls ``.close()`` - consumer = _consumer.Consumer() - queue_ = consumer._request_queue - received = queue.Queue() - request_generator = basic_queue_generator(queue_, received) - - item1 = 'unblock-please' - item2 = 'still-here' - queue_.put(item1) - queue_.put(item2) - assert not queue_.empty() - assert received.empty() - thread = threading.Thread(target=next, args=(request_generator,)) - thread.start() - - # Make sure the generator is not stuck at the blocked ``.get()`` - # in the thread. - while request_generator.gi_running: - pass - assert received.get() == item1 - # Make sure it **isn't** done. - assert request_generator.gi_frame is not None - - response_generator = mock.Mock(spec=('done',)) - response_generator.done.return_value = True - stopped = consumer._stop_request_generator( - request_generator, response_generator) - assert stopped is True - - # Make sure it **is** done. - assert not request_generator.gi_running - assert request_generator.gi_frame is None - assert not queue_.empty() - assert queue_.get() == item2 - assert queue_.empty() - - # Check mocks. - response_generator.done.assert_called_once_with() - - -def test_stop_request_generator_close_failure(): - # Model scenario tested: - # - The input isn't actually a generator - # Expected result: - # - ``_stop_request_generator()`` falls through to the ``LOGGER.error`` - # case and returns ``False`` - consumer = _consumer.Consumer() - - request_generator = mock.Mock(spec=('close',)) - request_generator.close.side_effect = TypeError('Really, not a generator') - - response_generator = mock.Mock(spec=('done',)) - response_generator.done.return_value = True - stopped = consumer._stop_request_generator( - request_generator, response_generator) - assert stopped is False - - # Make sure close() was only called once. - request_generator.close.assert_called_once_with() - response_generator.done.assert_called_once_with() - - -def test_stop_request_generator_queue_non_empty(): - # Model scenario tested: - # - The request generator **is** running - # - The request queue **is not** empty - # Expected result: - # - ``_stop_request_generator()`` can't call ``.close()`` (since - # the generator is running) but then returns with ``False`` because - # the queue **is not** empty - consumer = _consumer.Consumer() - # Attach a "fake" queue to the request generator so the generator can - # block on an empty queue while the consumer's queue is not empty. - queue_ = queue.Queue() - received = queue.Queue() - request_generator = basic_queue_generator(queue_, received) - # Make sure the consumer's queue is not empty. - item1 = 'not-empty' - consumer._request_queue.put(item1) - - thread = threading.Thread(target=next, args=(request_generator,)) - thread.start() - - # Make sure the generator is stuck at the blocked ``.get()`` - # in ``thread``. - while not request_generator.gi_running: - pass - assert received.empty() - assert request_generator.gi_frame is not None - - response_generator = mock.Mock(spec=('done',)) - response_generator.done.return_value = True - stopped = consumer._stop_request_generator( - request_generator, response_generator) - assert stopped is False - - # Make sure the generator is **still** not finished. - assert request_generator.gi_running - assert request_generator.gi_frame is not None - assert consumer._request_queue.get() == item1 - # Allow the generator to exit. - item2 = 'just-exit' - queue_.put(item2) - # Wait until it's actually done. - while request_generator.gi_running: - pass - assert received.get() == item2 - - # Check mocks. - response_generator.done.assert_called_once_with() - - -def test_stop_request_generator_running(): - # Model scenario tested: - # - The request generator **is** running - # - The request queue **is** empty - # Expected result: - # - ``_stop_request_generator()`` can't call ``.close()`` (since - # the generator is running) but then verifies that the queue is - # empty and sends ``STOP`` into the queue to successfully stop - # the generator - consumer = _consumer.Consumer() - queue_ = consumer._request_queue - received = queue.Queue() - request_generator = basic_queue_generator(queue_, received) - - thread = threading.Thread(target=next, args=(request_generator,)) - thread.start() - - # Make sure the generator is stuck at the blocked ``.get()`` - # in the thread. - while not request_generator.gi_running: - pass - assert received.empty() - assert request_generator.gi_frame is not None - - response_generator = mock.Mock(spec=('done',)) - response_generator.done.return_value = True - stopped = consumer._stop_request_generator( - request_generator, response_generator) - assert stopped is True - - # Make sure it **is** done, though we may have to wait until - # the generator finishes (it has a few instructions between the - # ``get()`` and the ``break``). - while request_generator.gi_running: - pass - request_generator.close() - assert request_generator.gi_frame is None - assert received.get() == _helper_threads.STOP - assert queue_.empty() - - # Check mocks. - response_generator.done.assert_called_once_with() From b2471b52d7851871d8e49a08f91ca3af34c0c929 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 20 Mar 2018 13:17:07 -0700 Subject: [PATCH 0236/1197] Release 0.33.0 (#5074) --- packages/google-cloud-pubsub/CHANGELOG.md | 16 ++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 6ea8399e2386..6fc76367dd39 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.33.0 + +### Implementation changes + +- Use the rpc's status to determine when to exit the request generator thread (#5054) +- Drop leased messages after flow_control.max_lease_duration has passed. (#5020) +- Fix mantain leases to not modack messages it just dropped (#5045) +- Avoid race condition in maintain_leases by copying leased_messages (#5035) +- Retry subscription stream on InternalServerError, Unknown, and GatewayTimeout (#5021) +- Nack messages when the subscriber callback errors (#5019) + +### Testing + +- pubsub nox.py cleanup (#5056) +- Fix test that checks for retryable exceptions (#5034) + ## 0.32.1 ### Dependencies diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index b6787b309a60..55b2f40b6a9f 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.32.1' +version = '0.33.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 9245e55ac2be5b3f623d447571d8f48b486a9eb8 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 20 Mar 2018 13:31:50 -0700 Subject: [PATCH 0237/1197] Revert "Release 0.33.0 (#5074)" (#5077) This reverts commit 52ba3b7ef395ae7c408cddd22bb70d72b69cc72a. --- packages/google-cloud-pubsub/CHANGELOG.md | 16 ---------------- packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 6fc76367dd39..6ea8399e2386 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,22 +4,6 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history -## 0.33.0 - -### Implementation changes - -- Use the rpc's status to determine when to exit the request generator thread (#5054) -- Drop leased messages after flow_control.max_lease_duration has passed. (#5020) -- Fix mantain leases to not modack messages it just dropped (#5045) -- Avoid race condition in maintain_leases by copying leased_messages (#5035) -- Retry subscription stream on InternalServerError, Unknown, and GatewayTimeout (#5021) -- Nack messages when the subscriber callback errors (#5019) - -### Testing - -- pubsub nox.py cleanup (#5056) -- Fix test that checks for retryable exceptions (#5034) - ## 0.32.1 ### Dependencies diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 55b2f40b6a9f..b6787b309a60 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.33.0' +version = '0.32.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 147cf42bc61c3f02a84239617bb9ce5d6582b001 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 20 Mar 2018 13:37:12 -0700 Subject: [PATCH 0238/1197] Fix missing iter on request stream (#5078) --- .../google/cloud/pubsub_v1/subscriber/_consumer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 7662546588b6..ae00abea42d9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -320,7 +320,7 @@ def _blocking_consume(self, policy): initial_request = policy.get_initial_request() request_generator = _RequestQueueGenerator( self._request_queue, initial_request=initial_request) - rpc = policy.call_rpc(request_generator) + rpc = policy.call_rpc(iter(request_generator)) request_generator.rpc = rpc responses = _pausable_iterator(rpc, self._can_consume) try: From 5b177142a1b22166141f2eaededaaafd800c6f27 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 20 Mar 2018 13:47:23 -0700 Subject: [PATCH 0239/1197] Release pubsub 0.33.0 (#5079) --- packages/google-cloud-pubsub/CHANGELOG.md | 17 +++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 6ea8399e2386..995c043d4aba 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,23 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.33.0 + +### Implementation changes + +- Drop leased messages after flow_control.max_lease_duration has passed. (#5020) +- Fix mantain leases to not modack messages it just dropped (#5045) +- Avoid race condition in maintain_leases by copying leased_messages (#5035) +- Retry subscription stream on InternalServerError, Unknown, and GatewayTimeout (#5021) +- Use the rpc's status to determine when to exit the request generator thread (#5054) +- Fix missing iter on request stream (#5078) +- Nack messages when the subscriber callback errors (#5019) + +### Testing + +- pubsub nox.py cleanup (#5056) +- Fix test that checks for retryable exceptions (#5034) + ## 0.32.1 ### Dependencies diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index b6787b309a60..55b2f40b6a9f 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.32.1' +version = '0.33.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From e079e5d2afb0f2beb0246d5a40fd5359f226f7dd Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 22 Mar 2018 12:54:18 -0700 Subject: [PATCH 0240/1197] Properly handle graceful stop in request generator (#5097) --- .../google/cloud/pubsub_v1/subscriber/_consumer.py | 7 +++++++ .../unit/pubsub_v1/subscriber/test_consumer.py | 14 +++++++++++++- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index ae00abea42d9..1cf757c52bd0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -210,6 +210,12 @@ def __iter__(self): # RPC is still active, keep waiting for queue items. continue + # A call to consumer.close() signaled us to stop generating + # requests. + if item == _helper_threads.STOP: + _LOGGER.debug('Cleanly exiting request generator.') + return + if self._should_exit(): # We have an item, but the RPC is closed. We should put the # item back on the queue so that the next RPC can consume it. @@ -416,6 +422,7 @@ def _stop_no_join(self): self.resume() # Make sure we aren't paused. self._stopped.set() _LOGGER.debug('Stopping helper thread %s', self._consumer_thread.name) + # Signal the request generator RPC to exit cleanly. self.send_request(_helper_threads.STOP) thread = self._consumer_thread self._consumer_thread = None diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 1f817c4fa1f9..f54de7484d22 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -17,7 +17,6 @@ from google.auth import credentials import grpc import mock -import pytest from six.moves import queue from google.cloud.pubsub_v1 import subscriber @@ -92,6 +91,19 @@ def test_exit_when_inactive_empty(self): assert items == [] + def test_exit_with_stop(self): + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = [_helper_threads.STOP, queue.Empty()] + rpc = mock.create_autospec(grpc.RpcContext, instance=True) + rpc.is_active.return_value = True + + generator = _consumer._RequestQueueGenerator(q) + generator.rpc = rpc + + items = list(generator) + + assert items == [] + def test_send_request(): consumer = _consumer.Consumer() From 9b5f633adc76531bb098ba796c9053440ab15214 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 26 Mar 2018 12:46:10 -0700 Subject: [PATCH 0241/1197] Make the pausable response iterator aware of the RPC state to prevent deadlock (#5108) --- .../cloud/pubsub_v1/subscriber/_consumer.py | 52 +++++++++------- .../pubsub_v1/subscriber/test_consumer.py | 61 ++++++++++++++++++- 2 files changed, 91 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 1cf757c52bd0..63e41dbe8ef0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -228,6 +228,36 @@ def __iter__(self): yield item +def _pausable_response_iterator(iterator, can_continue, period=1): + """Converts a gRPC response iterator into one that can be paused. + + The ``can_continue`` event can be used by an independent, concurrent + worker to pause and resume the iteration over ``iterator``. + + Args: + iterator (grpc.RpcContext, Iterator[protobuf.Message]): A + ``grpc.RpcContext`` instance that is also an iterator of responses. + This is a typically returned from grpc's streaming response call + types. + can_continue (threading.Event): An event which determines if we + can advance to the next iteration. Will be ``wait()``-ed on + before consuming more items from the iterator. + period (float): The number of seconds to wait to be able to consume + before checking if the RPC is cancelled. In practice, this + determines the maximum amount of time that ``next()`` on this + iterator will block after the RPC is cancelled. + + Yields: + Any: The items yielded from ``iterator``. + """ + while True: + can_yield = can_continue.wait(timeout=period) + # Calling next() on a cancelled RPC will cause it to raise the + # grpc.RpcError associated with the cancellation. + if can_yield or not iterator.is_active(): + yield next(iterator) + + class Consumer(object): """Bi-directional streaming RPC consumer. @@ -328,7 +358,7 @@ def _blocking_consume(self, policy): self._request_queue, initial_request=initial_request) rpc = policy.call_rpc(iter(request_generator)) request_generator.rpc = rpc - responses = _pausable_iterator(rpc, self._can_consume) + responses = _pausable_response_iterator(rpc, self._can_consume) try: for response in responses: _LOGGER.debug('Received response on stream') @@ -439,23 +469,3 @@ def stop_consuming(self): """ thread = self._stop_no_join() thread.join() - - -def _pausable_iterator(iterator, can_continue): - """Converts a standard iterator into one that can be paused. - - The ``can_continue`` event can be used by an independent, concurrent - worker to pause and resume the iteration over ``iterator``. - - Args: - iterator (Iterator): Any iterator to be iterated over. - can_continue (threading.Event): An event which determines if we - can advance to the next iteration. Will be ``wait()``-ed on - before - - Yields: - Any: The items from ``iterator``. - """ - while True: - can_continue.wait() - yield next(iterator) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index f54de7484d22..31cd5ec66d04 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -105,6 +105,38 @@ def test_exit_with_stop(self): assert items == [] +class _ResponseIterator(object): + def __init__(self, items, active=True): + self._items = iter(items) + self._active = active + + def is_active(self): + return self._active + + def __next__(self): + return next(self._items) + + next = __next__ + + +def test__pausable_response_iterator_active_but_cant_consume(): + # Note: we can't autospec threading.Event because it's goofy on Python 2. + can_consume = mock.Mock(spec=['wait']) + # First call will return false, indicating the loop should try again. + # second call will allow it to consume the first (and only) item. + can_consume.wait.side_effect = [False, True] + iterator = _ResponseIterator([1]) + + print(can_consume) + + pausable_iter = _consumer._pausable_response_iterator( + iterator, can_consume) + + items = list(pausable_iter) + + assert items == [1] + + def test_send_request(): consumer = _consumer.Consumer() request = types.StreamingPullRequest(subscription='foo') @@ -176,9 +208,10 @@ class RaisingResponseGenerator(object): # rather than the **class** will not be iterable in Python 2. # This is problematic since a `Mock` just sets members. - def __init__(self, exception): + def __init__(self, exception, active=True): self.exception = exception self.next_calls = 0 + self._active = active def __next__(self): self.next_calls += 1 @@ -187,6 +220,32 @@ def __next__(self): def next(self): return self.__next__() # Python 2 + def is_active(self): + return self._active + + +def test_blocking_consume_iter_exception_while_paused(): + policy = mock.create_autospec(base.BasePolicy, instance=True) + exc = TypeError('Bad things!') + policy.call_rpc.return_value = RaisingResponseGenerator( + exc, active=False) + + consumer = _consumer.Consumer() + # Ensure the consume is paused. + consumer.pause() + consumer._consumer_thread = mock.Mock(spec=threading.Thread) + policy.on_exception.side_effect = OnException() + + # Start the thread. It should not block forever but should notice the rpc + # is inactive and raise the exception from the stream and then exit + # because on_exception returns false. + consumer._blocking_consume(policy) + assert consumer._consumer_thread is None + + # Check mocks. + policy.call_rpc.assert_called_once() + policy.on_exception.assert_called_once_with(exc) + def test_blocking_consume_two_exceptions(): policy = mock.create_autospec(base.BasePolicy, instance=True) From 9cdeb5fab4cff1fef5a45a5f08384c4ca1d81fc1 Mon Sep 17 00:00:00 2001 From: chemelnucfin Date: Mon, 26 Mar 2018 13:28:52 -0700 Subject: [PATCH 0242/1197] Pubsub: remove extra print statement (#5118) --- .../tests/unit/pubsub_v1/subscriber/test_consumer.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index 31cd5ec66d04..daac8352e5d9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -127,8 +127,6 @@ def test__pausable_response_iterator_active_but_cant_consume(): can_consume.wait.side_effect = [False, True] iterator = _ResponseIterator([1]) - print(can_consume) - pausable_iter = _consumer._pausable_response_iterator( iterator, can_consume) From 591c4b358ca80607f57039461e0b1e96a1cc1477 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 28 Mar 2018 13:19:45 -0700 Subject: [PATCH 0243/1197] Surface publish RPC errors back to the publish futures (#5124) --- .../cloud/pubsub_v1/publisher/batch/thread.py | 28 ++++++++++++++++--- .../pubsub_v1/publisher/batch/test_thread.py | 21 ++++++++++++++ 2 files changed, 45 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 73cafb9cde13..64186b130e94 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -20,6 +20,7 @@ import six +import google.api_core.exceptions from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures @@ -199,10 +200,24 @@ def _commit(self): # Begin the request to publish these messages. # Log how long the underlying request takes. start = time.time() - response = self._client.api.publish( - self._topic, - self._messages, - ) + + try: + response = self._client.api.publish( + self._topic, + self._messages, + ) + except google.api_core.exceptions.GoogleAPICallError as exc: + # We failed to publish, set the exception on all futures and + # exit. + self._status = base.BatchStatus.ERROR + + for future in self._futures: + future.set_exception(exc) + + _LOGGER.exception( + 'Failed to publish %s messages.', len(self._futures)) + return + end = time.time() _LOGGER.debug('gRPC Publish took %s seconds.', end - start) @@ -220,9 +235,14 @@ def _commit(self): self._status = base.BatchStatus.ERROR exception = exceptions.PublishError( 'Some messages were not successfully published.') + for future in self._futures: future.set_exception(exception) + _LOGGER.error( + 'Only %s of %s messages were published.', + len(response.message_ids), len(self._futures)) + def monitor(self): """Commit this batch after sufficient time has elapsed. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 1c08e1b3843a..fb62dbc6e550 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -17,6 +17,7 @@ import mock +import google.api_core.exceptions from google.auth import credentials from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types @@ -201,6 +202,26 @@ def test_blocking__commit_wrong_messageid_length(): assert isinstance(future.exception(), exceptions.PublishError) +def test_block__commmit_api_error(): + batch = create_batch() + futures = ( + batch.publish({'data': b'blah blah blah'}), + batch.publish({'data': b'blah blah blah blah'}), + ) + + # Make the API throw an error when publishing. + error = google.api_core.exceptions.InternalServerError('uh oh') + patch = mock.patch.object( + type(batch.client.api), 'publish', side_effect=error) + + with patch: + batch._commit() + + for future in futures: + assert future.done() + assert future.exception() == error + + def test_monitor(): batch = create_batch(max_latency=5.0) with mock.patch.object(time, 'sleep') as sleep: From 9e297b153b5f6275c274cabc2f0fac269faebb99 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 3 Apr 2018 09:17:03 -0700 Subject: [PATCH 0244/1197] Release 0.33.1 (#5136) --- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 995c043d4aba..1e233db75b7c 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.33.1 + +### Implementation changes + +- Surface publish RPC errors back to the publish futures (#5124) +- Make the pausable response iterator aware of the RPC state to prevent deadlock (#5108) +- Properly handle graceful stop in request generator (#5097) + ## 0.33.0 ### Implementation changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 55b2f40b6a9f..71734d727efa 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.33.0' +version = '0.33.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From a9fecaa69a8f30d6757c4d6b033590bf927faef2 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 17 Apr 2018 12:05:19 -0700 Subject: [PATCH 0245/1197] Add bi-directional stream consumption helpers. (#5189) This is the foundation of the Pub/Sub subscriber refactor. It exposes a socket-like interface for gRPC streams and builds a robust, resumable stream on top of that. Finally, it adds a class for consuming the stream in the background using callbacks. --- .../subscriber/_protocol/__init__.py | 0 .../pubsub_v1/subscriber/_protocol/bidi.py | 489 ++++++++++++++ .../unit/pubsub_v1/subscriber/test_bidi.py | 610 ++++++++++++++++++ 3 files changed, 1099 insertions(+) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py new file mode 100644 index 000000000000..a40f039152a0 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -0,0 +1,489 @@ +# Copyright 2017, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Bi-directional streaming RPC helpers.""" + +import logging +import threading + +from six.moves import queue + +from google.api_core import exceptions + +_LOGGER = logging.getLogger(__name__) +_BIDIRECTIONAL_CONSUMER_NAME = 'Thread-ConsumeBidirectionalStream' + + +class _RequestQueueGenerator(object): + """A helper for sending requests to a gRPC stream from a Queue. + + This generator takes requests off a given queue and yields them to gRPC. + + This helper is useful when you have an indeterminate, indefinite, or + otherwise open-ended set of requests to send through a request-streaming + (or bidirectional) RPC. + + The reason this is necessary is because gRPC takes an iterator as the + request for request-streaming RPCs. gRPC consumes this iterator in another + thread to allow it to block while generating requests for the stream. + However, if the generator blocks indefinitely gRPC will not be able to + clean up the thread as it'll be blocked on `next(iterator)` and not be able + to check the channel status to stop iterating. This helper mitigates that + by waiting on the queue with a timeout and checking the RPC state before + yielding. + + Finally, it allows for retrying without swapping queues because if it does + pull an item off the queue when the RPC is inactive, it'll immediately put + it back and then exit. This is necessary because yielding the item in this + case will cause gRPC to discard it. In practice, this means that the order + of messages is not guaranteed. If such a thing is necessary it would be + easy to use a priority queue. + + Example:: + + requests = request_queue_generator(q) + call = stub.StreamingRequest(iter(requests)) + requests.call = call + + for response in call: + print(response) + q.put(...) + + Note that it is possible to accomplish this behavior without "spinning" + (using a queue timeout). One possible way would be to use more threads to + multiplex the grpc end event with the queue, another possible way is to + use selectors and a custom event/queue object. Both of these approaches + are significant from an engineering perspective for small benefit - the + CPU consumed by spinning is pretty minuscule. + + Args: + queue (queue.Queue): The request queue. + period (float): The number of seconds to wait for items from the queue + before checking if the RPC is cancelled. In practice, this + determines the maximum amount of time the request consumption + thread will live after the RPC is cancelled. + initial_request (Union[protobuf.Message, + Callable[None, protobuf.Message]]): The initial request to + yield. This is done independently of the request queue to allow fo + easily restarting streams that require some initial configuration + request. + """ + def __init__(self, queue, period=1, initial_request=None): + self._queue = queue + self._period = period + self._initial_request = initial_request + self.call = None + + def _is_active(self): + # Note: there is a possibility that this starts *before* the call + # property is set. So we have to check if self.call is set before + # seeing if it's active. + if self.call is not None and not self.call.is_active(): + return False + else: + return True + + def __iter__(self): + if self._initial_request is not None: + if callable(self._initial_request): + yield self._initial_request() + else: + yield self._initial_request + + while True: + try: + item = self._queue.get(timeout=self._period) + except queue.Empty: + if not self._is_active(): + _LOGGER.debug( + 'Empty queue and inactive call, exiting request ' + 'generator.') + return + else: + # call is still active, keep waiting for queue items. + continue + + # The consumer explicitly sent "None", indicating that the request + # should end. + if item is None: + _LOGGER.debug('Cleanly exiting request generator.') + return + + if not self._is_active(): + # We have an item, but the call is closed. We should put the + # item back on the queue so that the next call can consume it. + self._queue.put(item) + _LOGGER.debug( + 'Inactive call, replacing item on queue and exiting ' + 'request generator.') + return + + yield item + + +class BidiRpc(object): + """A helper for consuming a bi-directional streaming RPC. + + This maps gRPC's built-in interface which uses a request iterator and a + response iterator into a socket-like :func:`send` and :func:`recv`. This + is a more useful pattern for long-running or asymmetric streams (streams + where there is not a direct correlation between the requests and + responses). + + Example:: + + initial_request = example_pb2.StreamingRpcRequest( + setting='example') + rpc = BidiRpc(stub.StreamingRpc, initial_request=initial_request) + + rpc.open() + + while rpc.is_active(): + print(rpc.recv()) + rpc.send(example_pb2.StreamingRpcRequest( + data='example')) + + This does *not* retry the stream on errors. See :class:`ResumableBidiRpc`. + + Args: + start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to + start the RPC. + initial_request (Union[protobuf.Message, + Callable[None, protobuf.Message]]): The initial request to + yield. This is useful if an initial request is needed to start the + stream. + """ + def __init__(self, start_rpc, initial_request=None): + self._start_rpc = start_rpc + self._initial_request = initial_request + self._request_queue = queue.Queue() + self._request_generator = None + self._is_active = False + self.call = None + self._callbacks = [] + + def add_done_callback(self, callback): + """Adds a callback that will be called when the RPC terminates. + + This occurs when the RPC errors or is successfully terminated. + + Args: + callback (Callable[[grpc.Future], None]): The callback to execute. + It will be provided with the same gRPC future as the underlying + stream which will also be a :class:`grpc.Call`. + """ + self._callbacks.append(callback) + + def _on_call_done(self, future): + for callback in self._callbacks: + callback(future) + + def open(self): + """Opens the stream.""" + if self.is_active: + raise ValueError('Can not open an already open stream.') + + request_generator = _RequestQueueGenerator( + self._request_queue, initial_request=self._initial_request) + call = self._start_rpc(iter(request_generator)) + + request_generator.call = call + + # TODO: api_core should expose the future interface for wrapped + # callables as well. + if hasattr(call, '_wrapped'): # pragma: NO COVER + call._wrapped.add_done_callback(self._on_call_done) + else: + call.add_done_callback(self._on_call_done) + + self._request_generator = request_generator + self.call = call + + def close(self): + """Closes the stream.""" + if self.call is None: + return + + self._request_queue.put(None) + self.call.cancel() + self._request_generator = None + # Don't set self.call to None. Keep it around so that send/recv can + # raise the error. + + def send(self, request): + """Queue a message to be sent on the stream. + + Send is non-blocking. + + If the underlying RPC has been closed, this will raise. + + Args: + request (protobuf.Message): The request to send. + """ + if self.call is None: + raise ValueError( + 'Can not send() on an RPC that has never been open()ed.') + + if self.is_active: + self._request_queue.put(request) + else: + # calling next should cause the call to raise. + next(self.call) + + def recv(self): + """Wait for a message to be returned from the stream. + + Recv is blocking. + + If the underlying RPC has been closed, this will raise. + + Returns: + protobuf.Message: The received message. + """ + if self.call is None: + raise ValueError( + 'Can not recv() on an RPC that has never been open()ed.') + + return next(self.call) + + @property + def is_active(self): + """bool: True if this stream is currently open and active.""" + return self.call is not None and self.call.is_active() + + @property + def pending_requests(self): + """int: Returns an estimate of the number of queued requests.""" + return self._request_queue.qsize() + + +class ResumableBidiRpc(BidiRpc): + """A :class:`BidiRpc` that can automatically resume the stream on errors. + + It uses the ``should_recover`` arg to determine if it should re-establish + the stream on error. + + Example:: + + def should_recover(exc): + return ( + isinstance(exc, grpc.RpcError) and + exc.code() == grpc.StatusCode.UNVAILABLE) + + initial_request = example_pb2.StreamingRpcRequest( + setting='example') + + rpc = ResumeableBidiRpc( + stub.StreamingRpc, + initial_request=initial_request, + should_recover=should_recover) + + rpc.open() + + while rpc.is_active(): + print(rpc.recv()) + rpc.send(example_pb2.StreamingRpcRequest( + data='example')) + + Args: + start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to + start the RPC. + initial_request (Union[protobuf.Message, + Callable[None, protobuf.Message]]): The initial request to + yield. This is useful if an initial request is needed to start the + stream. + should_recover (Callable[[Exception], bool]): A function that returns + True if the stream should be recovered. This will be called + whenever an error is encountered on the stream. + """ + def __init__(self, start_rpc, should_recover, initial_request=None): + super(ResumableBidiRpc, self).__init__(start_rpc, initial_request) + self._should_recover = should_recover + self._operational_lock = threading.Lock() + + def _on_call_done(self, future): + # Unlike the base class, we only execute the callbacks on a terminal + # error, not for errors that we can recover from. Note that grpc's + # "future" here is also a grpc.RpcError. + if not self._should_recover(future): + for callback in self._callbacks: + callback(future) + + def _reopen(self): + with self._operational_lock: + # Another thread already managed to re-open this stream. + if self.is_active: + return + + self.call = None + # Request generator should exit cleanly since the RPC its bound to + # has exited. + self.request_generator = None + self.open() + + def _recoverable(self, method, *args, **kwargs): + """Wraps a method to recover the stream and retry on error. + + If a recoverable error occurs, this will retry the RPC and retry the + method. If a second error occurs while retrying the method, it will + bubble up. + + Args: + method (Callable[..., Any]): The method to call. + args: The args to pass to the method. + kwargs: The kwargs to pass to the method. + """ + try: + return method(*args, **kwargs) + + except Exception as exc: + if not self._should_recover(exc): + self.close() + raise exc + + self._reopen() + + return method(*args, **kwargs) + + def send(self, request): + return self._recoverable( + super(ResumableBidiRpc, self).send, request) + + def recv(self): + return self._recoverable( + super(ResumableBidiRpc, self).recv) + + +class BackgroundConsumer(object): + """A bi-directional stream consumer that runs in a separate thread. + + This maps the consumption of a stream into a callback-based model. It also + provides :func:`pause` and :func:`resume` to allow for flow-control. + + Example:: + + def should_recover(exc): + return ( + isinstance(exc, grpc.RpcError) and + exc.code() == grpc.StatusCode.UNVAILABLE) + + initial_request = example_pb2.StreamingRpcRequest( + setting='example') + + rpc = ResumeableBidiRpc( + stub.StreamingRpc, + initial_request=initial_request, + should_recover=should_recover) + + def on_response(response): + print(response) + + consumer = BackgroundConsumer(rpc, on_response) + consume.start() + + Note that error handling *must* be done by using the provided + ``bidi_rpc``'s ``add_done_callback``. This helper will automatically exit + whenever the RPC itself exits and will not provide any error details. + + Args: + bidi_rpc (BidiRpc): The RPC to consume. Should not have been + ``open()``ed yet. + on_response (Callable[[protobuf.Message], None]): The callback to + be called for every response on the stream. + """ + def __init__(self, bidi_rpc, on_response): + self._bidi_rpc = bidi_rpc + self._on_response = on_response + self._paused = False + self._wake = threading.Condition() + self._thread = None + + def _on_call_done(self, future): + # Resume the thread if it's paused, this prevents blocking forever + # when the RPC has terminated. + self.resume() + + def _thread_main(self): + try: + self._bidi_rpc.add_done_callback(self._on_call_done) + self._bidi_rpc.open() + + while self._bidi_rpc.is_active: + if not self.is_paused: + _LOGGER.debug('waiting for recv.') + response = self._bidi_rpc.recv() + _LOGGER.debug('recved response.') + self._on_response(response) + else: + _LOGGER.debug('paused, waiting for waking.') + with self._wake: + self._wake.wait() + _LOGGER.debug('woken.') + + except exceptions.GoogleAPICallError as exc: + _LOGGER.debug( + '%s caught error %s and will exit. Generally this is due to ' + 'the RPC itself being cancelled and the error will be ' + 'surfaced to the calling code.', + _BIDIRECTIONAL_CONSUMER_NAME, exc, exc_info=True) + + except Exception as exc: + _LOGGER.exception( + '%s caught unexpected exception %s and will exit.', + _BIDIRECTIONAL_CONSUMER_NAME, exc) + + _LOGGER.info('%s exiting', _BIDIRECTIONAL_CONSUMER_NAME) + + def start(self): + """Start the background thread and begin consuming the thread.""" + thread = threading.Thread( + name=_BIDIRECTIONAL_CONSUMER_NAME, + target=self._thread_main) + thread.daemon = True + thread.start() + self._thread = thread + _LOGGER.debug('Started helper thread %s', thread.name) + + def stop(self): + """Stop consuming the stream and shutdown the background thread.""" + self._bidi_rpc.close() + + if self._thread is not None: + self._thread.join() + + self._thread = None + + @property + def is_active(self): + """bool: True if the background thread is active.""" + return self._thread is not None and self._thread.is_alive() + + def pause(self): + """Pauses the response stream. + + This does *not* pause the request stream. + """ + with self._wake: + self._paused = True + + def resume(self): + """Resumes the response stream.""" + with self._wake: + self._paused = False + self._wake.notifyAll() + + @property + def is_paused(self): + """bool: True if the response stream is paused.""" + return self._paused diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py new file mode 100644 index 000000000000..6d51472d55c2 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py @@ -0,0 +1,610 @@ +# Copyright 2018, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import threading + +import grpc +import mock +import pytest +from six.moves import queue + +from google.api_core import exceptions +from google.cloud.pubsub_v1.subscriber._protocol import bidi + + +class Test_RequestQueueGenerator(object): + + def test_bounded_consume(self): + call = mock.create_autospec(grpc.Call, instance=True) + call.is_active.return_value = True + + def queue_generator(rpc): + yield mock.sentinel.A + yield queue.Empty() + yield mock.sentinel.B + rpc.is_active.return_value = False + yield mock.sentinel.C + + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = queue_generator(call) + + generator = bidi._RequestQueueGenerator(q) + generator.call = call + + items = list(generator) + + assert items == [mock.sentinel.A, mock.sentinel.B] + + def test_yield_initial_and_exit(self): + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = queue.Empty() + call = mock.create_autospec(grpc.Call, instance=True) + call.is_active.return_value = False + + generator = bidi._RequestQueueGenerator( + q, initial_request=mock.sentinel.A) + generator.call = call + + items = list(generator) + + assert items == [mock.sentinel.A] + + def test_yield_initial_callable_and_exit(self): + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = queue.Empty() + call = mock.create_autospec(grpc.Call, instance=True) + call.is_active.return_value = False + + generator = bidi._RequestQueueGenerator( + q, initial_request=lambda: mock.sentinel.A) + generator.call = call + + items = list(generator) + + assert items == [mock.sentinel.A] + + def test_exit_when_inactive_with_item(self): + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = [mock.sentinel.A, queue.Empty()] + call = mock.create_autospec(grpc.Call, instance=True) + call.is_active.return_value = False + + generator = bidi._RequestQueueGenerator(q) + generator.call = call + + items = list(generator) + + assert items == [] + # Make sure it put the item back. + q.put.assert_called_once_with(mock.sentinel.A) + + def test_exit_when_inactive_empty(self): + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = queue.Empty() + call = mock.create_autospec(grpc.Call, instance=True) + call.is_active.return_value = False + + generator = bidi._RequestQueueGenerator(q) + generator.call = call + + items = list(generator) + + assert items == [] + + def test_exit_with_stop(self): + q = mock.create_autospec(queue.Queue, instance=True) + q.get.side_effect = [None, queue.Empty()] + call = mock.create_autospec(grpc.Call, instance=True) + call.is_active.return_value = True + + generator = bidi._RequestQueueGenerator(q) + generator.call = call + + items = list(generator) + + assert items == [] + + +class _CallAndFuture(grpc.Call, grpc.Future): + pass + + +def make_rpc(): + """Makes a mock RPC used to test Bidi classes.""" + call = mock.create_autospec(_CallAndFuture, instance=True) + rpc = mock.create_autospec(grpc.StreamStreamMultiCallable, instance=True) + + def rpc_side_effect(request): + call.is_active.return_value = True + call.request = request + return call + + rpc.side_effect = rpc_side_effect + + def cancel_side_effect(): + call.is_active.return_value = False + + call.cancel.side_effect = cancel_side_effect + + return rpc, call + + +class ClosedCall(object): + # NOTE: This is needed because defining `.next` on an **instance** + # rather than the **class** will not be iterable in Python 2. + # This is problematic since a `Mock` just sets members. + + def __init__(self, exception): + self.exception = exception + + def __next__(self): + raise self.exception + + next = __next__ # Python 2 + + def is_active(self): + return False + + +class TestBidiRpc(object): + def test_initial_state(self): + bidi_rpc = bidi.BidiRpc(None) + + assert bidi_rpc.is_active is False + + def test_done_callbacks(self): + bidi_rpc = bidi.BidiRpc(None) + callback = mock.Mock(spec=['__call__']) + + bidi_rpc.add_done_callback(callback) + bidi_rpc._on_call_done(mock.sentinel.future) + + callback.assert_called_once_with(mock.sentinel.future) + + def test_open(self): + rpc, call = make_rpc() + bidi_rpc = bidi.BidiRpc(rpc) + + bidi_rpc.open() + + assert bidi_rpc.call == call + assert bidi_rpc.is_active + call.add_done_callback.assert_called_once_with(bidi_rpc._on_call_done) + + def test_open_error_already_open(self): + rpc, _ = make_rpc() + bidi_rpc = bidi.BidiRpc(rpc) + + bidi_rpc.open() + + with pytest.raises(ValueError): + bidi_rpc.open() + + def test_close(self): + rpc, call = make_rpc() + bidi_rpc = bidi.BidiRpc(rpc) + bidi_rpc.open() + + bidi_rpc.close() + + call.cancel.assert_called_once() + assert bidi_rpc.call == call + assert bidi_rpc.is_active is False + # ensure the request queue was signaled to stop. + assert bidi_rpc.pending_requests == 1 + assert bidi_rpc._request_queue.get() is None + + def test_close_no_rpc(self): + bidi_rpc = bidi.BidiRpc(None) + bidi_rpc.close() + + def test_send(self): + rpc, call = make_rpc() + bidi_rpc = bidi.BidiRpc(rpc) + bidi_rpc.open() + + bidi_rpc.send(mock.sentinel.request) + + assert bidi_rpc.pending_requests == 1 + assert bidi_rpc._request_queue.get() is mock.sentinel.request + + def test_send_not_open(self): + rpc, call = make_rpc() + bidi_rpc = bidi.BidiRpc(rpc) + + with pytest.raises(ValueError): + bidi_rpc.send(mock.sentinel.request) + + def test_send_dead_rpc(self): + error = ValueError() + bidi_rpc = bidi.BidiRpc(None) + bidi_rpc.call = ClosedCall(error) + + with pytest.raises(ValueError) as exc_info: + bidi_rpc.send(mock.sentinel.request) + + assert exc_info.value == error + + def test_recv(self): + bidi_rpc = bidi.BidiRpc(None) + bidi_rpc.call = iter([mock.sentinel.response]) + + response = bidi_rpc.recv() + + assert response == mock.sentinel.response + + def test_recv_not_open(self): + rpc, call = make_rpc() + bidi_rpc = bidi.BidiRpc(rpc) + + with pytest.raises(ValueError): + bidi_rpc.recv() + + +class CallStub(object): + def __init__(self, values, active=True): + self.values = iter(values) + self._is_active = active + self.cancelled = False + + def __next__(self): + item = next(self.values) + if isinstance(item, Exception): + self._is_active = False + raise item + return item + + next = __next__ # Python 2 + + def is_active(self): + return self._is_active + + def add_done_callback(self, callback): + pass + + def cancel(self): + self.cancelled = True + + +class TestResumableBidiRpc(object): + def test_initial_state(self): + bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: True) + + assert bidi_rpc.is_active is False + + def test_done_callbacks_recoverable(self): + bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: True) + callback = mock.Mock(spec=['__call__']) + + bidi_rpc.add_done_callback(callback) + bidi_rpc._on_call_done(mock.sentinel.future) + + callback.assert_not_called() + + def test_done_callbacks_non_recoverable(self): + bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False) + callback = mock.Mock(spec=['__call__']) + + bidi_rpc.add_done_callback(callback) + bidi_rpc._on_call_done(mock.sentinel.future) + + callback.assert_called_once_with(mock.sentinel.future) + + def test_send_recover(self): + error = ValueError() + call_1 = CallStub([error], active=False) + call_2 = CallStub([]) + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, + instance=True, + side_effect=[call_1, call_2]) + should_recover = mock.Mock(autospec=['__call__'], return_value=True) + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + + bidi_rpc.open() + + bidi_rpc.send(mock.sentinel.request) + + assert bidi_rpc.pending_requests == 1 + assert bidi_rpc._request_queue.get() is mock.sentinel.request + + should_recover.assert_called_once_with(error) + assert bidi_rpc.call == call_2 + assert bidi_rpc.is_active is True + + def test_send_failure(self): + error = ValueError() + call = CallStub([error], active=False) + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, + instance=True, + return_value=call) + should_recover = mock.Mock(autospec=['__call__'], return_value=False) + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + + bidi_rpc.open() + + with pytest.raises(ValueError) as exc_info: + bidi_rpc.send(mock.sentinel.request) + + assert exc_info.value == error + should_recover.assert_called_once_with(error) + assert bidi_rpc.call == call + assert bidi_rpc.is_active is False + assert call.cancelled is True + assert bidi_rpc.pending_requests == 1 + assert bidi_rpc._request_queue.get() is None + + def test_recv_recover(self): + error = ValueError() + call_1 = CallStub([1, error]) + call_2 = CallStub([2, 3]) + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, + instance=True, + side_effect=[call_1, call_2]) + should_recover = mock.Mock(autospec=['__call__'], return_value=True) + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + + bidi_rpc.open() + + values = [] + for n in range(3): + values.append(bidi_rpc.recv()) + + assert values == [1, 2, 3] + should_recover.assert_called_once_with(error) + assert bidi_rpc.call == call_2 + assert bidi_rpc.is_active is True + + def test_recv_recover_race_condition(self): + # This test checks the race condition where two threads recv() and + # encounter an error and must re-open the stream. Only one thread + # should succeed in doing so. + error = ValueError() + call_1 = CallStub([error, error]) + call_2 = CallStub([1, 2]) + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, + instance=True, + side_effect=[call_1, call_2]) + recovered_event = threading.Event() + + def second_thread_main(): + assert bidi_rpc.recv() == 2 + + second_thread = threading.Thread(target=second_thread_main) + + def should_recover(exception): + assert exception == error + if threading.current_thread() == second_thread: + recovered_event.wait() + return True + + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + + bidi_rpc.open() + second_thread.start() + + assert bidi_rpc.recv() == 1 + recovered_event.set() + + assert bidi_rpc.call == call_2 + assert bidi_rpc.is_active is True + second_thread.join() + + def test_recv_failure(self): + error = ValueError() + call = CallStub([error]) + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, + instance=True, + return_value=call) + should_recover = mock.Mock(autospec=['__call__'], return_value=False) + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + + bidi_rpc.open() + + with pytest.raises(ValueError) as exc_info: + bidi_rpc.recv() + + assert exc_info.value == error + should_recover.assert_called_once_with(error) + assert bidi_rpc.call == call + assert bidi_rpc.is_active is False + assert call.cancelled is True + + +class TestBackgroundConsumer(object): + def test_consume_once_then_exit(self): + bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + bidi_rpc.is_active = True + bidi_rpc.recv.side_effect = [mock.sentinel.response_1] + recved = threading.Event() + + def on_response(response): + assert response == mock.sentinel.response_1 + bidi_rpc.is_active = False + recved.set() + + consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) + + consumer.start() + + recved.wait() + + bidi_rpc.recv.assert_called_once() + assert bidi_rpc.is_active is False + + consumer.stop() + + bidi_rpc.close.assert_called_once() + assert consumer.is_active is False + + def test_pause_resume_and_close(self): + # This test is relatively complex. It attempts to start the consumer, + # consume one item, pause the consumer, check the state of the world, + # then resume the consumer. Doing this in a deterministic fashion + # requires a bit more mocking and patching than usual. + + bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + bidi_rpc.is_active = True + + def close_side_effect(): + bidi_rpc.is_active = False + + bidi_rpc.close.side_effect = close_side_effect + + # These are used to coordinate the two threads to ensure deterministic + # execution. + should_continue = threading.Event() + responses_and_events = { + mock.sentinel.response_1: threading.Event(), + mock.sentinel.response_2: threading.Event() + } + bidi_rpc.recv.side_effect = [ + mock.sentinel.response_1, mock.sentinel.response_2] + + recved_responses = [] + consumer = None + + def on_response(response): + if response == mock.sentinel.response_1: + consumer.pause() + + recved_responses.append(response) + responses_and_events[response].set() + should_continue.wait() + + consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) + + consumer.start() + + # Wait for the first response to be recved. + responses_and_events[mock.sentinel.response_1].wait() + + # Ensure only one item has been recved and that the consumer is paused. + assert recved_responses == [mock.sentinel.response_1] + assert consumer.is_paused is True + assert consumer.is_active is True + + # Unpause the consumer, wait for the second item, then close the + # consumer. + should_continue.set() + consumer.resume() + responses_and_events[mock.sentinel.response_2].wait() + assert recved_responses == [ + mock.sentinel.response_1, mock.sentinel.response_2] + + consumer.stop() + + assert consumer.is_active is False + + def test_wake_on_error(self): + should_continue = threading.Event() + + bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + bidi_rpc.is_active = True + bidi_rpc.add_done_callback.side_effect = ( + lambda _: should_continue.set()) + + consumer = bidi.BackgroundConsumer(bidi_rpc, mock.sentinel.on_response) + + # Start the consumer paused, which should immediately put it into wait + # state. + consumer.pause() + consumer.start() + + # Wait for add_done_callback to be called + should_continue.wait() + bidi_rpc.add_done_callback.assert_called_once_with( + consumer._on_call_done) + + # The consumer should now be blocked on waiting to be unpaused. + assert consumer.is_active + assert consumer.is_paused + + # Trigger the done callback, it should unpause the consumer and cause + # it to exit. + bidi_rpc.is_active = False + consumer._on_call_done(bidi_rpc) + + # It may take a few cycles for the thread to exit. + while consumer.is_active: + pass + + def test_consumer_expected_error(self, caplog): + caplog.set_level(logging.DEBUG) + + bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + bidi_rpc.is_active = True + bidi_rpc.recv.side_effect = exceptions.ServiceUnavailable('Gone away') + + on_response = mock.Mock(spec=['__call__']) + + consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) + + consumer.start() + + # Wait for the consumer's thread to exit. + while consumer.is_active: + pass + + on_response.assert_not_called() + bidi_rpc.recv.assert_called_once() + assert 'caught error' in caplog.text + + def test_consumer_unexpected_error(self, caplog): + caplog.set_level(logging.DEBUG) + + bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + bidi_rpc.is_active = True + bidi_rpc.recv.side_effect = ValueError() + + on_response = mock.Mock(spec=['__call__']) + + consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) + + consumer.start() + + # Wait for the consumer's thread to exit. + while consumer.is_active: + pass + + on_response.assert_not_called() + bidi_rpc.recv.assert_called_once() + assert 'caught unexpected exception' in caplog.text + + def test_double_stop(self, caplog): + caplog.set_level(logging.DEBUG) + bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + bidi_rpc.is_active = True + on_response = mock.Mock(spec=['__call__']) + + def close_side_effect(): + bidi_rpc.is_active = False + + bidi_rpc.close.side_effect = close_side_effect + + consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) + + consumer.start() + assert consumer.is_active is True + + consumer.stop() + assert consumer.is_active is False + + # calling stop twice should not result in an error. + consumer.stop() From 28eda1403ecf13931a978c48e29d0d8d9e237085 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 17 Apr 2018 14:28:15 -0700 Subject: [PATCH 0246/1197] Add subscriber dispatcher (#5201) --- .../cloud/pubsub_v1/subscriber/_consumer.py | 6 +- .../subscriber/_protocol/dispatcher.py | 100 ++++++++++++++++++ .../helper_threads.py} | 2 +- .../subscriber/_protocol/requests.py | 46 ++++++++ .../pubsub_v1/subscriber/policy/thread.py | 6 +- .../cloud/pubsub_v1/subscriber/subscriber.py | 83 +++++++++++++++ .../pubsub_v1/subscriber/test_consumer.py | 4 +- .../pubsub_v1/subscriber/test_dispatcher.py | 98 +++++++++++++++++ .../subscriber/test_helper_threads.py | 22 ++-- 9 files changed, 347 insertions(+), 20 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py rename packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/{_helper_threads.py => _protocol/helper_threads.py} (98%) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py index 63e41dbe8ef0..8abaedfe04ac 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py @@ -126,7 +126,7 @@ from six.moves import queue -from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber._protocol import helper_threads _LOGGER = logging.getLogger(__name__) @@ -212,7 +212,7 @@ def __iter__(self): # A call to consumer.close() signaled us to stop generating # requests. - if item == _helper_threads.STOP: + if item == helper_threads.STOP: _LOGGER.debug('Cleanly exiting request generator.') return @@ -453,7 +453,7 @@ def _stop_no_join(self): self._stopped.set() _LOGGER.debug('Stopping helper thread %s', self._consumer_thread.name) # Signal the request generator RPC to exit cleanly. - self.send_request(_helper_threads.STOP) + self.send_request(helper_threads.STOP) thread = self._consumer_thread self._consumer_thread = None return thread diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py new file mode 100644 index 000000000000..3cd9500864b9 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -0,0 +1,100 @@ +# Copyright 2017, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import logging +import threading + +from google.cloud.pubsub_v1.subscriber._protocol import helper_threads +from google.cloud.pubsub_v1.subscriber._protocol import requests + + +_LOGGER = logging.getLogger(__name__) +_CALLBACK_WORKER_NAME = 'Thread-CallbackRequestDispatcher' + + +class Dispatcher(object): + def __init__(self, queue, subscriber): + self._queue = queue + self._subscriber = subscriber + self._thread = None + + def dispatch_callback(self, items): + """Map the callback request to the appropriate gRPC request. + + Args: + action (str): The method to be invoked. + kwargs (Dict[str, Any]): The keyword arguments for the method + specified by ``action``. + + Raises: + ValueError: If ``action`` isn't one of the expected actions + "ack", "drop", "lease", "modify_ack_deadline" or "nack". + """ + if not self._subscriber.is_active: + return + + batched_commands = collections.defaultdict(list) + + for item in items: + batched_commands[item.__class__].append(item) + + _LOGGER.debug('Handling %d batched requests', len(items)) + + if batched_commands[requests.LeaseRequest]: + self._subscriber.lease(batched_commands.pop(requests.LeaseRequest)) + if batched_commands[requests.ModAckRequest]: + self._subscriber.modify_ack_deadline( + batched_commands.pop(requests.ModAckRequest)) + # Note: Drop and ack *must* be after lease. It's possible to get both + # the lease the and ack/drop request in the same batch. + if batched_commands[requests.AckRequest]: + self._subscriber.ack(batched_commands.pop(requests.AckRequest)) + if batched_commands[requests.NackRequest]: + self._subscriber.nack(batched_commands.pop(requests.NackRequest)) + if batched_commands[requests.DropRequest]: + self._subscriber.drop(batched_commands.pop(requests.DropRequest)) + + def start(self): + """Start a thread to dispatch requests queued up by callbacks. + Spawns a thread to run :meth:`dispatch_callback`. + """ + if self._thread is not None: + raise ValueError('Dispatcher is already running.') + + worker = helper_threads.QueueCallbackWorker( + self._queue, + self.dispatch_callback, + max_items=self._subscriber.flow_control.max_request_batch_size, + max_latency=self._subscriber.flow_control.max_request_batch_latency + ) + # Create and start the helper thread. + thread = threading.Thread( + name=_CALLBACK_WORKER_NAME, + target=worker, + ) + thread.daemon = True + thread.start() + _LOGGER.debug('Started helper thread %s', thread.name) + self._thread = thread + + def stop(self): + if self._thread is not None: + # Signal the worker to stop by queueing a "poison pill" + self._queue.put(helper_threads.STOP) + self._thread.join() + + self._thread = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py similarity index 98% rename from packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py rename to packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py index b191eec90256..ac38101bd96f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py @@ -65,7 +65,7 @@ def _get_many(queue_, max_items=None, max_latency=0): class QueueCallbackWorker(object): - """A helper that executes a callback for every item in the queue. + """A helper that executes a callback for items sent in a queue. Calls a blocking ``get()`` on the ``queue`` until it encounters :attr:`STOP`. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py new file mode 100644 index 000000000000..6e042e080648 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py @@ -0,0 +1,46 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Base class for concurrency policy.""" + +from __future__ import absolute_import, division + +import collections + +# Namedtuples for management requests. Used by the Message class to communicate +# items of work back to the policy. +AckRequest = collections.namedtuple( + 'AckRequest', + ['ack_id', 'byte_size', 'time_to_ack'], +) + +DropRequest = collections.namedtuple( + 'DropRequest', + ['ack_id', 'byte_size'], +) + +LeaseRequest = collections.namedtuple( + 'LeaseRequest', + ['ack_id', 'byte_size'], +) + +ModAckRequest = collections.namedtuple( + 'ModAckRequest', + ['ack_id', 'seconds'], +) + +NackRequest = collections.namedtuple( + 'NackRequest', + ['ack_id', 'byte_size'], +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py index 9a16dbe4efc6..78874f32aaf2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py @@ -24,7 +24,7 @@ from six.moves import queue as queue_mod from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber.futures import Future from google.cloud.pubsub_v1.subscriber.policy import base from google.cloud.pubsub_v1.subscriber.message import Message @@ -158,7 +158,7 @@ def close(self): raise ValueError('This policy has not been opened yet.') # Stop consuming messages. - self._request_queue.put(_helper_threads.STOP) + self._request_queue.put(helper_threads.STOP) self._dispatch_thread.join() # Wait until stopped. self._dispatch_thread = None self._consumer.stop_consuming() @@ -186,7 +186,7 @@ def _start_dispatch(self): "dispatch thread" member on the current policy. """ _LOGGER.debug('Starting callback requests worker.') - dispatch_worker = _helper_threads.QueueCallbackWorker( + dispatch_worker = helper_threads.QueueCallbackWorker( self._request_queue, self.dispatch_callback, max_items=self.flow_control.max_request_batch_size, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py new file mode 100644 index 000000000000..c3177f71d9e7 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py @@ -0,0 +1,83 @@ +# Copyright 2017, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.pubsub_v1 import types + + +class Subscriber(object): + """A consumer class based on :class:`threading.Thread`. + + This consumer handles the connection to the Pub/Sub service and all of + the concurrency needs. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow + control settings. + executor (~concurrent.futures.ThreadPoolExecutor): (Optional.) A + ThreadPoolExecutor instance, or anything duck-type compatible + with it. + queue (~queue.Queue): (Optional.) A Queue instance, appropriate + for crossing the concurrency boundary implemented by + ``executor``. + """ + + def __init__(self, client, subscription, flow_control=types.FlowControl(), + scheduler_cls=None): + raise NotImplementedError + + @property + def is_active(self): + raise NotImplementedError + + @property + def flow_control(self): + raise NotImplementedError + + @property + def future(self): + raise NotImplementedError + + # + # User-facing subscriber management methods. + # + + def open(self, callback): + raise NotImplementedError + + def close(self): + raise NotImplementedError + + # + # Message management methods + # + + def ack(self, items): + raise NotImplementedError + + def drop(self, items): + raise NotImplementedError + + def lease(self, items): + raise NotImplementedError + + def modify_ack_deadline(self, items): + raise NotImplementedError + + def nack(self, items): + raise NotImplementedError diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py index daac8352e5d9..572014dbfc4f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py @@ -22,8 +22,8 @@ from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _consumer +from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber.policy import base -from google.cloud.pubsub_v1.subscriber import _helper_threads class Test_RequestQueueGenerator(object): @@ -93,7 +93,7 @@ def test_exit_when_inactive_empty(self): def test_exit_with_stop(self): q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = [_helper_threads.STOP, queue.Empty()] + q.get.side_effect = [helper_threads.STOP, queue.Empty()] rpc = mock.create_autospec(grpc.RpcContext, instance=True) rpc.is_active.return_value = True diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py new file mode 100644 index 000000000000..9367a6b1f0e7 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -0,0 +1,98 @@ +# Copyright 2017, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import threading + +from google.cloud.pubsub_v1.subscriber._protocol import dispatcher +from google.cloud.pubsub_v1.subscriber._protocol import helper_threads +from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.cloud.pubsub_v1.subscriber import subscriber + +import mock +from six.moves import queue +import pytest + + +@pytest.mark.parametrize('item,method', [ + (requests.AckRequest(0, 0, 0), 'ack'), + (requests.DropRequest(0, 0), 'drop'), + (requests.LeaseRequest(0, 0), 'lease'), + (requests.ModAckRequest(0, 0), 'modify_ack_deadline'), + (requests.NackRequest(0, 0), 'nack') +]) +def test_dispatch_callback(item, method): + subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) + dispatcher_ = dispatcher.Dispatcher(mock.sentinel.queue, subscriber_) + + items = [item] + dispatcher_.dispatch_callback(items) + + getattr(subscriber_, method).assert_called_once_with([item]) + + +def test_dispatch_callback_inactive(): + subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) + subscriber_.is_active = False + dispatcher_ = dispatcher.Dispatcher(mock.sentinel.queue, subscriber_) + + dispatcher_.dispatch_callback([requests.AckRequest(0, 0, 0)]) + + subscriber_.ack.assert_not_called() + + +@mock.patch('threading.Thread', autospec=True) +def test_start(thread): + subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) + dispatcher_ = dispatcher.Dispatcher(mock.sentinel.queue, subscriber_) + + dispatcher_.start() + + thread.assert_called_once_with( + name=dispatcher._CALLBACK_WORKER_NAME, target=mock.ANY) + + thread.return_value.start.assert_called_once() + + assert dispatcher_._thread is not None + + +@mock.patch('threading.Thread', autospec=True) +def test_start_already_started(thread): + subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) + dispatcher_ = dispatcher.Dispatcher(mock.sentinel.queue, subscriber_) + dispatcher_._thread = mock.sentinel.thread + + with pytest.raises(ValueError): + dispatcher_.start() + + thread.assert_not_called() + + +def test_stop(): + queue_ = queue.Queue() + dispatcher_ = dispatcher.Dispatcher(queue_, mock.sentinel.subscriber) + thread = mock.create_autospec(threading.Thread, instance=True) + dispatcher_._thread = thread + + dispatcher_.stop() + + assert queue_.get() is helper_threads.STOP + thread.join.assert_called_once() + assert dispatcher_._thread is None + + +def test_stop_no_join(): + dispatcher_ = dispatcher.Dispatcher( + mock.sentinel.queue, mock.sentinel.subscriber) + + dispatcher_.stop() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 8bdafeed5b6d..507e8292f7c8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -15,20 +15,20 @@ import mock from six.moves import queue -from google.cloud.pubsub_v1.subscriber import _helper_threads +from google.cloud.pubsub_v1.subscriber._protocol import helper_threads def test_queue_callback_worker(): queue_ = queue.Queue() callback = mock.Mock(spec=()) - qct = _helper_threads.QueueCallbackWorker(queue_, callback) + qct = helper_threads.QueueCallbackWorker(queue_, callback) # Set up an appropriate mock for the queue, and call the queue callback # thread. with mock.patch.object(queue.Queue, 'get') as get: get.side_effect = ( mock.sentinel.A, - _helper_threads.STOP, + helper_threads.STOP, queue.Empty()) qct() @@ -40,14 +40,14 @@ def test_queue_callback_worker(): def test_queue_callback_worker_stop_with_extra_items(): queue_ = queue.Queue() callback = mock.Mock(spec=()) - qct = _helper_threads.QueueCallbackWorker(queue_, callback) + qct = helper_threads.QueueCallbackWorker(queue_, callback) # Set up an appropriate mock for the queue, and call the queue callback # thread. with mock.patch.object(queue.Queue, 'get') as get: get.side_effect = ( mock.sentinel.A, - _helper_threads.STOP, + helper_threads.STOP, mock.sentinel.B, queue.Empty()) qct() @@ -60,7 +60,7 @@ def test_queue_callback_worker_stop_with_extra_items(): def test_queue_callback_worker_get_many(): queue_ = queue.Queue() callback = mock.Mock(spec=()) - qct = _helper_threads.QueueCallbackWorker(queue_, callback) + qct = helper_threads.QueueCallbackWorker(queue_, callback) # Set up an appropriate mock for the queue, and call the queue callback # thread. @@ -69,7 +69,7 @@ def test_queue_callback_worker_get_many(): mock.sentinel.A, queue.Empty(), mock.sentinel.B, - _helper_threads.STOP, + helper_threads.STOP, queue.Empty()) qct() @@ -83,7 +83,7 @@ def test_queue_callback_worker_get_many(): def test_queue_callback_worker_max_items(): queue_ = queue.Queue() callback = mock.Mock(spec=()) - qct = _helper_threads.QueueCallbackWorker(queue_, callback, max_items=1) + qct = helper_threads.QueueCallbackWorker(queue_, callback, max_items=1) # Set up an appropriate mock for the queue, and call the queue callback # thread. @@ -91,7 +91,7 @@ def test_queue_callback_worker_max_items(): get.side_effect = ( mock.sentinel.A, mock.sentinel.B, - _helper_threads.STOP, + helper_threads.STOP, queue.Empty()) qct() @@ -105,14 +105,14 @@ def test_queue_callback_worker_max_items(): def test_queue_callback_worker_exception(): queue_ = queue.Queue() callback = mock.Mock(spec=(), side_effect=(Exception,)) - qct = _helper_threads.QueueCallbackWorker(queue_, callback) + qct = helper_threads.QueueCallbackWorker(queue_, callback) # Set up an appropriate mock for the queue, and call the queue callback # thread. with mock.patch.object(queue.Queue, 'get') as get: get.side_effect = ( mock.sentinel.A, - _helper_threads.STOP, + helper_threads.STOP, queue.Empty()) qct() From 0cf69ff15e66379ff7a290612be22d7595449381 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 19 Apr 2018 14:22:25 -0700 Subject: [PATCH 0247/1197] Add pubsub subscriber leaser (#5210) This is another helper thread that maintains the leases for any messages that the current subscriber is working on. The majority of this code (notably, `maintain_leases`) is copied from `pubsub_v1.subscriber.policy.base.BasePolicy.maintain_leases`. --- .../pubsub_v1/subscriber/_protocol/leaser.py | 187 ++++++++++++++ .../cloud/pubsub_v1/subscriber/subscriber.py | 4 + .../unit/pubsub_v1/subscriber/test_leaser.py | 235 ++++++++++++++++++ 3 files changed, 426 insertions(+) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py new file mode 100644 index 000000000000..05157b5e8db1 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -0,0 +1,187 @@ +# Copyright 2017, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import collections +import copy +import logging +import random +import threading +import time + +import six + +from google.cloud.pubsub_v1.subscriber._protocol import requests + + +_LOGGER = logging.getLogger(__name__) +_LEASE_WORKER_NAME = 'Thread-LeaseMaintainer' + + +_LeasedMessage = collections.namedtuple( + '_LeasedMessage', + ['added_time', 'size']) + + +class Leaser(object): + def __init__(self, subscriber): + self._thread = None + self._subscriber = subscriber + + self._leased_messages = {} + """dict[str, float]: A mapping of ack IDs to the local time when the + ack ID was initially leased in seconds since the epoch.""" + self._bytes = 0 + """int: The total number of bytes consumed by leased messages.""" + + self._stop_event = threading.Event() + + @property + def message_count(self): + """int: The number of leased messages.""" + return len(self._leased_messages) + + @property + def ack_ids(self): + """Sequence[str]: The ack IDs of all leased messages.""" + return self._leased_messages.keys() + + @property + def bytes(self): + """int: The total size, in bytes, of all leased messages.""" + return self._bytes + + def add(self, items): + """Add messages to be managed by the leaser.""" + for item in items: + # Add the ack ID to the set of managed ack IDs, and increment + # the size counter. + if item.ack_id not in self._leased_messages: + self._leased_messages[item.ack_id] = _LeasedMessage( + added_time=time.time(), + size=item.byte_size) + self._bytes += item.byte_size + else: + _LOGGER.debug( + 'Message %s is already lease managed', item.ack_id) + + def remove(self, items): + """Remove messages from lease management.""" + # Remove the ack ID from lease management, and decrement the + # byte counter. + for item in items: + if self._leased_messages.pop(item.ack_id, None) is not None: + self._bytes -= item.byte_size + else: + _LOGGER.debug('Item %s was not managed.', item.ack_id) + + if self._bytes < 0: + _LOGGER.debug( + 'Bytes was unexpectedly negative: %d', self._bytes) + self._bytes = 0 + + def maintain_leases(self): + """Maintain all of the leases being managed by the subscriber. + + This method modifies the ack deadline for all of the managed + ack IDs, then waits for most of that time (but with jitter), and + repeats. + """ + while self._subscriber.is_active and not self._stop_event.is_set(): + # Determine the appropriate duration for the lease. This is + # based off of how long previous messages have taken to ack, with + # a sensible default and within the ranges allowed by Pub/Sub. + p99 = self._subscriber.ack_histogram.percentile(99) + _LOGGER.debug('The current p99 value is %d seconds.', p99) + + # Make a copy of the leased messages. This is needed because it's + # possible for another thread to modify the dictionary while + # we're iterating over it. + leased_messages = copy.copy(self._leased_messages) + + # Drop any leases that are well beyond max lease time. This + # ensures that in the event of a badly behaving actor, we can + # drop messages and allow Pub/Sub to resend them. + cutoff = ( + time.time() - + self._subscriber.flow_control.max_lease_duration) + to_drop = [ + requests.DropRequest(ack_id, item.size) + for ack_id, item + in six.iteritems(leased_messages) + if item.added_time < cutoff] + + if to_drop: + _LOGGER.warning( + 'Dropping %s items because they were leased too long.', + len(to_drop)) + self._subscriber.drop(to_drop) + + # Remove dropped items from our copy of the leased messages (they + # have already been removed from the real one by + # self._subscriber.drop(), which calls self.remove()). + for item in to_drop: + leased_messages.pop(item.ack_id) + + # Create a streaming pull request. + # We do not actually call `modify_ack_deadline` over and over + # because it is more efficient to make a single request. + ack_ids = leased_messages.keys() + if ack_ids: + _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) + + # NOTE: This may not work as expected if ``consumer.active`` + # has changed since we checked it. An implementation + # without any sort of race condition would require a + # way for ``send_request`` to fail when the consumer + # is inactive. + self._subscriber.modify_ack_deadline([ + requests.ModAckRequest(ack_id, p99) for ack_id in ack_ids]) + + # Now wait an appropriate period of time and do this again. + # + # We determine the appropriate period of time based on a random + # period between 0 seconds and 90% of the lease. This use of + # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases + # where there are many clients. + snooze = random.uniform(0.0, p99 * 0.9) + _LOGGER.debug('Snoozing lease management for %f seconds.', snooze) + time.sleep(snooze) + + _LOGGER.info('%s exiting.', _LEASE_WORKER_NAME) + + def start(self): + if self._thread is not None: + raise ValueError('Leaser is already running.') + + # Create and start the helper thread. + self._stop_event.clear() + thread = threading.Thread( + name=_LEASE_WORKER_NAME, + target=self.maintain_leases) + thread.daemon = True + thread.start() + _LOGGER.debug('Started helper thread %s', thread.name) + self._thread = thread + + def stop(self): + self._stop_event.set() + + if self._thread is not None: + # The thread should automatically exit when the consumer is + # inactive. + self._thread.join() + + self._thread = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py index c3177f71d9e7..f1bc96808e60 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py @@ -49,6 +49,10 @@ def is_active(self): def flow_control(self): raise NotImplementedError + @property + def ack_histogram(self): + raise NotImplementedError + @property def future(self): raise NotImplementedError diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py new file mode 100644 index 000000000000..571e56f6b61a --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -0,0 +1,235 @@ +# Copyright 2017, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import threading + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import _histogram +from google.cloud.pubsub_v1.subscriber import subscriber +from google.cloud.pubsub_v1.subscriber._protocol import leaser +from google.cloud.pubsub_v1.subscriber._protocol import requests + +import mock +import pytest + + +def test_add_and_remove(): + leaser_ = leaser.Leaser(mock.sentinel.subscriber) + + leaser_.add([ + requests.LeaseRequest(ack_id='ack1', byte_size=50)]) + leaser_.add([ + requests.LeaseRequest(ack_id='ack2', byte_size=25)]) + + assert leaser_.message_count == 2 + assert set(leaser_.ack_ids) == set(['ack1', 'ack2']) + assert leaser_.bytes == 75 + + leaser_.remove([ + requests.DropRequest(ack_id='ack1', byte_size=50)]) + + assert leaser_.message_count == 1 + assert set(leaser_.ack_ids) == set(['ack2']) + assert leaser_.bytes == 25 + + +def test_add_already_managed(caplog): + caplog.set_level(logging.DEBUG) + + leaser_ = leaser.Leaser(mock.sentinel.subscriber) + + leaser_.add([ + requests.LeaseRequest(ack_id='ack1', byte_size=50)]) + leaser_.add([ + requests.LeaseRequest(ack_id='ack1', byte_size=50)]) + + assert 'already lease managed' in caplog.text + + +def test_remove_not_managed(caplog): + caplog.set_level(logging.DEBUG) + + leaser_ = leaser.Leaser(mock.sentinel.subscriber) + + leaser_.remove([ + requests.DropRequest(ack_id='ack1', byte_size=50)]) + + assert 'not managed' in caplog.text + + +def test_remove_negative_bytes(caplog): + caplog.set_level(logging.DEBUG) + + leaser_ = leaser.Leaser(mock.sentinel.subscriber) + + leaser_.add([ + requests.LeaseRequest(ack_id='ack1', byte_size=50)]) + leaser_.remove([ + requests.DropRequest(ack_id='ack1', byte_size=75)]) + + assert leaser_.bytes == 0 + assert 'unexpectedly negative' in caplog.text + + +def create_subscriber(flow_control=types.FlowControl()): + subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) + subscriber_.is_active = True + subscriber_.flow_control = flow_control + subscriber_.ack_histogram = _histogram.Histogram() + return subscriber_ + + +def test_maintain_leases_inactive(caplog): + caplog.set_level(logging.INFO) + subscriber_ = create_subscriber() + subscriber_.is_active = False + + leaser_ = leaser.Leaser(subscriber_) + + leaser_.maintain_leases() + + assert 'exiting' in caplog.text + + +def test_maintain_leases_stopped(caplog): + caplog.set_level(logging.INFO) + subscriber_ = create_subscriber() + + leaser_ = leaser.Leaser(subscriber_) + leaser_.stop() + + leaser_.maintain_leases() + + assert 'exiting' in caplog.text + + +def make_sleep_mark_subscriber_as_inactive(sleep, subscriber): + # Make sleep mark the subscriber as inactive so that maintain_leases + # exits at the end of the first run. + def trigger_inactive(seconds): + assert 0 < seconds < 10 + subscriber.is_active = False + sleep.side_effect = trigger_inactive + + +@mock.patch('time.sleep', autospec=True) +def test_maintain_leases_ack_ids(sleep): + subscriber_ = create_subscriber() + make_sleep_mark_subscriber_as_inactive(sleep, subscriber_) + leaser_ = leaser.Leaser(subscriber_) + leaser_.add([requests.LeaseRequest(ack_id='my ack id', byte_size=50)]) + + leaser_.maintain_leases() + + subscriber_.modify_ack_deadline.assert_called_once_with([ + requests.ModAckRequest( + ack_id='my ack id', + seconds=10, + ) + ]) + sleep.assert_called() + + +@mock.patch('time.sleep', autospec=True) +def test_maintain_leases_no_ack_ids(sleep): + subscriber_ = create_subscriber() + make_sleep_mark_subscriber_as_inactive(sleep, subscriber_) + leaser_ = leaser.Leaser(subscriber_) + + leaser_.maintain_leases() + + subscriber_.modify_ack_deadline.assert_not_called() + sleep.assert_called() + + +@mock.patch('time.time', autospec=True) +@mock.patch('time.sleep', autospec=True) +def test_maintain_leases_outdated_items(sleep, time): + subscriber_ = create_subscriber() + make_sleep_mark_subscriber_as_inactive(sleep, subscriber_) + leaser_ = leaser.Leaser(subscriber_) + + # Add these items at the beginning of the timeline + time.return_value = 0 + leaser_.add([ + requests.LeaseRequest(ack_id='ack1', byte_size=50)]) + + # Add another item at towards end of the timeline + time.return_value = subscriber_.flow_control.max_lease_duration - 1 + leaser_.add([ + requests.LeaseRequest(ack_id='ack2', byte_size=50)]) + + # Now make sure time reports that we are at the end of our timeline. + time.return_value = subscriber_.flow_control.max_lease_duration + 1 + + leaser_.maintain_leases() + + # Only ack2 should be renewed. ack1 should've been dropped + subscriber_.modify_ack_deadline.assert_called_once_with([ + requests.ModAckRequest( + ack_id='ack2', + seconds=10, + ) + ]) + subscriber_.drop.assert_called_once_with([ + requests.DropRequest(ack_id='ack1', byte_size=50) + ]) + sleep.assert_called() + + +@mock.patch('threading.Thread', autospec=True) +def test_start(thread): + subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) + leaser_ = leaser.Leaser(subscriber_) + + leaser_.start() + + thread.assert_called_once_with( + name=leaser._LEASE_WORKER_NAME, target=leaser_.maintain_leases) + + thread.return_value.start.assert_called_once() + + assert leaser_._thread is not None + + +@mock.patch('threading.Thread', autospec=True) +def test_start_already_started(thread): + subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) + leaser_ = leaser.Leaser(subscriber_) + leaser_._thread = mock.sentinel.thread + + with pytest.raises(ValueError): + leaser_.start() + + thread.assert_not_called() + + +def test_stop(): + subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) + leaser_ = leaser.Leaser(subscriber_) + thread = mock.create_autospec(threading.Thread, instance=True) + leaser_._thread = thread + + leaser_.stop() + + assert leaser_._stop_event.is_set() + thread.join.assert_called_once() + assert leaser_._thread is None + + +def test_stop_no_join(): + leaser_ = leaser.Leaser(mock.sentinel.subscriber) + + leaser_.stop() From 3904366b5bba76cd126c4bb7da6e1c98bfc7033a Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 24 Apr 2018 09:25:04 -0700 Subject: [PATCH 0248/1197] Rename histogram module (#5229) --- .../{_histogram.py => _protocol/histogram.py} | 0 .../cloud/pubsub_v1/subscriber/policy/base.py | 4 ++-- .../pubsub_v1/subscriber/test_histogram.py | 18 +++++++++--------- .../unit/pubsub_v1/subscriber/test_leaser.py | 4 ++-- 4 files changed, 13 insertions(+), 13 deletions(-) rename packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/{_histogram.py => _protocol/histogram.py} (100%) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py similarity index 100% rename from packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_histogram.py rename to packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 48ef35e7e113..82122cea83d8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -28,7 +28,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _consumer -from google.cloud.pubsub_v1.subscriber import _histogram +from google.cloud.pubsub_v1.subscriber._protocol import histogram _LOGGER = logging.getLogger(__name__) @@ -114,7 +114,7 @@ def __init__(self, client, subscription, self._last_histogram_size = 0 self._future = None self.flow_control = flow_control - self.histogram = _histogram.Histogram(data=histogram_data) + self.histogram = histogram.Histogram(data=histogram_data) """.Histogram: the histogram tracking ack latency.""" self.leased_messages = {} """dict[str, float]: A mapping of ack IDs to the local time when the diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py index e3c0e55dbaaf..d3c15cdcee9c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py @@ -12,18 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud.pubsub_v1.subscriber import _histogram +from google.cloud.pubsub_v1.subscriber._protocol import histogram def test_init(): data = {} - histo = _histogram.Histogram(data=data) + histo = histogram.Histogram(data=data) assert histo._data is data assert len(histo) == 0 def test_contains(): - histo = _histogram.Histogram() + histo = histogram.Histogram() histo.add(10) histo.add(20) assert 10 in histo @@ -32,7 +32,7 @@ def test_contains(): def test_max(): - histo = _histogram.Histogram() + histo = histogram.Histogram() assert histo.max == 600 histo.add(120) assert histo.max == 120 @@ -43,7 +43,7 @@ def test_max(): def test_min(): - histo = _histogram.Histogram() + histo = histogram.Histogram() assert histo.min == 10 histo.add(60) assert histo.min == 60 @@ -54,7 +54,7 @@ def test_min(): def test_add(): - histo = _histogram.Histogram() + histo = histogram.Histogram() histo.add(60) assert histo._data[60] == 1 histo.add(60) @@ -62,21 +62,21 @@ def test_add(): def test_add_lower_limit(): - histo = _histogram.Histogram() + histo = histogram.Histogram() histo.add(5) assert 5 not in histo assert 10 in histo def test_add_upper_limit(): - histo = _histogram.Histogram() + histo = histogram.Histogram() histo.add(12000) assert 12000 not in histo assert 600 in histo def test_percentile(): - histo = _histogram.Histogram() + histo = histogram.Histogram() [histo.add(i) for i in range(101, 201)] assert histo.percentile(100) == 200 assert histo.percentile(101) == 200 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 571e56f6b61a..c2cdde4bafd9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -16,8 +16,8 @@ import threading from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber import _histogram from google.cloud.pubsub_v1.subscriber import subscriber +from google.cloud.pubsub_v1.subscriber._protocol import histogram from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -87,7 +87,7 @@ def create_subscriber(flow_control=types.FlowControl()): subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) subscriber_.is_active = True subscriber_.flow_control = flow_control - subscriber_.ack_histogram = _histogram.Histogram() + subscriber_.ack_histogram = histogram.Histogram() return subscriber_ From 2b07543ed9fa5cc4399746fd0d1ea7a7da439368 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 24 Apr 2018 12:26:33 -0700 Subject: [PATCH 0249/1197] Add scheduler (#5230) --- .../cloud/pubsub_v1/subscriber/scheduler.py | 122 ++++++++++++++++++ .../pubsub_v1/subscriber/test_scheduler.py | 53 ++++++++ 2 files changed, 175 insertions(+) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py new file mode 100644 index 000000000000..17e2c2f967ce --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -0,0 +1,122 @@ +# Copyright 2018, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Schedulers provide means to *schedule* callbacks asynchronously. + +These are used by the subscriber to call the user-provided callback to process +each message. +""" + +import abc +import concurrent.futures +import sys + +import six +from six.moves import queue + + +@six.add_metaclass(abc.ABCMeta) +class Scheduler(object): + """Abstract base class for schedulers. + + Schedulers are used to schedule callbacks asynchronously. + """ + + @property + @abc.abstractmethod + def queue(self): + """Queue: A concurrency-safe queue specific to the underlying + concurrency implementation. + + This queue is used to send messages *back* to the scheduling actor. + """ + raise NotImplementedError + + @abc.abstractmethod + def schedule(self, callback, *args, **kwargs): + """Schedule the callback to be called asynchronously. + + Args: + callback (Callable): The function to call. + args: Positional arguments passed to the function. + kwargs: Key-word arguments passed to the function. + + Returns: + None + """ + raise NotImplementedError + + @abc.abstractmethod + def shutdown(self): + """Shuts down the scheduler and immediately end all pending callbacks. + """ + raise NotImplementedError + + +def _make_default_thread_pool_executor(): + # Python 2.7 and 3.6+ have the thread_name_prefix argument, which is useful + # for debugging. + executor_kwargs = {} + if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6): + executor_kwargs['thread_name_prefix'] = ( + 'ThreadPoolExecutor-ThreadScheduler') + return concurrent.futures.ThreadPoolExecutor( + max_workers=10, + **executor_kwargs + ) + + +class ThreadScheduler(object): + """A thread pool-based scheduler. + + This scheduler is useful in typical I/O-bound message processing. + + Args: + executor(concurrent.futures.ThreadPoolExecutor): An optional executor + to use. If not specified, a default one will be created. + """ + def __init__(self, executor=None): + self._queue = queue.Queue() + if executor is None: + self._executor = _make_default_thread_pool_executor() + else: + self._executor = executor + + @property + def queue(self): + """Queue: A thread-safe queue used for communication between callbacks + and the scheduling thread.""" + return self._queue + + def schedule(self, callback, *args, **kwargs): + """Schedule the callback to be called asynchronously in a thread pool. + + Args: + callback (Callable): The function to call. + args: Positional arguments passed to the function. + kwargs: Key-word arguments passed to the function. + + Returns: + None + """ + self._executor.submit(callback, *args, **kwargs) + + def shutdown(self): + """Shuts down the scheduler and immediately end all pending callbacks. + """ + # Drop all pending item from the executor. Without this, the executor + # will block until all pending items are complete, which is + # undesirable. + self._executor._work_queue.queue.clear() + self._executor.shutdown() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py new file mode 100644 index 000000000000..ddd40637e15e --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -0,0 +1,53 @@ +# Copyright 2018, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import concurrent.futures +import threading + +import mock +from six.moves import queue + +from google.cloud.pubsub_v1.subscriber import scheduler + + +def test_constructor_defaults(): + scheduler_ = scheduler.ThreadScheduler() + + assert isinstance(scheduler_.queue, queue.Queue) + assert isinstance(scheduler_._executor, concurrent.futures.Executor) + + +def test_constructor_options(): + scheduler_ = scheduler.ThreadScheduler( + executor=mock.sentinel.executor) + + assert scheduler_._executor == mock.sentinel.executor + + +def test_schedule(): + called_with = [] + called = threading.Event() + + def callback(*args, **kwargs): + called_with.append((args, kwargs)) + called.set() + + scheduler_ = scheduler.ThreadScheduler() + + scheduler_.schedule(callback, 'arg1', kwarg1='meep') + + called.wait() + scheduler_.shutdown() + + assert called_with == [(('arg1',), {'kwarg1': 'meep'})] From c0bcb9d5238400180fc07591aa33d59679926d80 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 26 Apr 2018 15:32:41 -0700 Subject: [PATCH 0250/1197] Add streaming pull manager and integrate new subscriber logic (#5237) Adds a new method, `subscribe_experimental`, to the pubsub subscriber client to use this new functionality. Leaves the older subscription implementation in place for now (will be removed by another PR after this gets more testing.) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 27 +- .../subscriber/_protocol/dispatcher.py | 137 ++++-- .../pubsub_v1/subscriber/_protocol/leaser.py | 57 +-- .../_protocol/streaming_pull_manager.py | 382 +++++++++++++++++ .../cloud/pubsub_v1/subscriber/client.py | 85 +++- .../cloud/pubsub_v1/subscriber/futures.py | 33 ++ .../cloud/pubsub_v1/subscriber/message.py | 12 +- .../cloud/pubsub_v1/subscriber/policy/base.py | 32 +- .../cloud/pubsub_v1/subscriber/subscriber.py | 87 ---- packages/google-cloud-pubsub/tests/system.py | 53 +++ .../pubsub_v1/subscriber/test_dispatcher.py | 127 +++++- .../subscriber/test_futures_subscriber.py | 44 ++ .../unit/pubsub_v1/subscriber/test_leaser.py | 89 ++-- .../unit/pubsub_v1/subscriber/test_message.py | 22 +- .../subscriber/test_streaming_pull_manager.py | 396 ++++++++++++++++++ .../subscriber/test_subscriber_client.py | 15 + 16 files changed, 1338 insertions(+), 260 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index a40f039152a0..13e0c06cce80 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -408,6 +408,7 @@ def __init__(self, bidi_rpc, on_response): self._paused = False self._wake = threading.Condition() self._thread = None + self._operational_lock = threading.Lock() def _on_call_done(self, future): # Resume the thread if it's paused, this prevents blocking forever @@ -447,22 +448,26 @@ def _thread_main(self): def start(self): """Start the background thread and begin consuming the thread.""" - thread = threading.Thread( - name=_BIDIRECTIONAL_CONSUMER_NAME, - target=self._thread_main) - thread.daemon = True - thread.start() - self._thread = thread - _LOGGER.debug('Started helper thread %s', thread.name) + with self._operational_lock: + thread = threading.Thread( + name=_BIDIRECTIONAL_CONSUMER_NAME, + target=self._thread_main) + thread.daemon = True + thread.start() + self._thread = thread + _LOGGER.debug('Started helper thread %s', thread.name) def stop(self): """Stop consuming the stream and shutdown the background thread.""" - self._bidi_rpc.close() + with self._operational_lock: + self._bidi_rpc.close() - if self._thread is not None: - self._thread.join() + if self._thread is not None: + # Resume the thread to wake it up in case it is sleeping. + self.resume() + self._thread.join() - self._thread = None + self._thread = None @property def is_active(self): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 3cd9500864b9..c70f8531a817 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -18,6 +18,7 @@ import logging import threading +from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -27,10 +28,45 @@ class Dispatcher(object): - def __init__(self, queue, subscriber): + def __init__(self, manager, queue): + self._manager = manager self._queue = queue - self._subscriber = subscriber self._thread = None + self._operational_lock = threading.Lock() + + def start(self): + """Start a thread to dispatch requests queued up by callbacks. + Spawns a thread to run :meth:`dispatch_callback`. + """ + with self._operational_lock: + if self._thread is not None: + raise ValueError('Dispatcher is already running.') + + flow_control = self._manager.flow_control + worker = helper_threads.QueueCallbackWorker( + self._queue, + self.dispatch_callback, + max_items=flow_control.max_request_batch_size, + max_latency=flow_control.max_request_batch_latency + ) + # Create and start the helper thread. + thread = threading.Thread( + name=_CALLBACK_WORKER_NAME, + target=worker, + ) + thread.daemon = True + thread.start() + _LOGGER.debug('Started helper thread %s', thread.name) + self._thread = thread + + def stop(self): + with self._operational_lock: + if self._thread is not None: + # Signal the worker to stop by queueing a "poison pill" + self._queue.put(helper_threads.STOP) + self._thread.join() + + self._thread = None def dispatch_callback(self, items): """Map the callback request to the appropriate gRPC request. @@ -44,7 +80,7 @@ def dispatch_callback(self, items): ValueError: If ``action`` isn't one of the expected actions "ack", "drop", "lease", "modify_ack_deadline" or "nack". """ - if not self._subscriber.is_active: + if not self._manager.is_active: return batched_commands = collections.defaultdict(list) @@ -55,46 +91,79 @@ def dispatch_callback(self, items): _LOGGER.debug('Handling %d batched requests', len(items)) if batched_commands[requests.LeaseRequest]: - self._subscriber.lease(batched_commands.pop(requests.LeaseRequest)) + self.lease(batched_commands.pop(requests.LeaseRequest)) if batched_commands[requests.ModAckRequest]: - self._subscriber.modify_ack_deadline( + self.modify_ack_deadline( batched_commands.pop(requests.ModAckRequest)) # Note: Drop and ack *must* be after lease. It's possible to get both # the lease the and ack/drop request in the same batch. if batched_commands[requests.AckRequest]: - self._subscriber.ack(batched_commands.pop(requests.AckRequest)) + self.ack(batched_commands.pop(requests.AckRequest)) if batched_commands[requests.NackRequest]: - self._subscriber.nack(batched_commands.pop(requests.NackRequest)) + self.nack(batched_commands.pop(requests.NackRequest)) if batched_commands[requests.DropRequest]: - self._subscriber.drop(batched_commands.pop(requests.DropRequest)) + self.drop(batched_commands.pop(requests.DropRequest)) - def start(self): - """Start a thread to dispatch requests queued up by callbacks. - Spawns a thread to run :meth:`dispatch_callback`. + def ack(self, items): + """Acknowledge the given messages. + + Args: + items(Sequence[AckRequest]): The items to acknowledge. """ - if self._thread is not None: - raise ValueError('Dispatcher is already running.') - - worker = helper_threads.QueueCallbackWorker( - self._queue, - self.dispatch_callback, - max_items=self._subscriber.flow_control.max_request_batch_size, - max_latency=self._subscriber.flow_control.max_request_batch_latency - ) - # Create and start the helper thread. - thread = threading.Thread( - name=_CALLBACK_WORKER_NAME, - target=worker, + # If we got timing information, add it to the histogram. + for item in items: + time_to_ack = item.time_to_ack + if time_to_ack is not None: + self._manager.ack_histogram.add(time_to_ack) + + ack_ids = [item.ack_id for item in items] + request = types.StreamingPullRequest(ack_ids=ack_ids) + self._manager.send(request) + + # Remove the message from lease management. + self.drop(items) + + def drop(self, items): + """Remove the given messages from lease management. + + Args: + items(Sequence[DropRequest]): The items to drop. + """ + self._manager.leaser.remove(items) + self._manager.maybe_resume_consumer() + + def lease(self, items): + """Add the given messages to lease management. + + Args: + items(Sequence[LeaseRequest]): The items to lease. + """ + self._manager.leaser.add(items) + self._manager.maybe_pause_consumer() + + def modify_ack_deadline(self, items): + """Modify the ack deadline for the given messages. + + Args: + items(Sequence[ModAckRequest]): The items to modify. + """ + ack_ids = [item.ack_id for item in items] + seconds = [item.seconds for item in items] + + request = types.StreamingPullRequest( + modify_deadline_ack_ids=ack_ids, + modify_deadline_seconds=seconds, ) - thread.daemon = True - thread.start() - _LOGGER.debug('Started helper thread %s', thread.name) - self._thread = thread + self._manager.send(request) - def stop(self): - if self._thread is not None: - # Signal the worker to stop by queueing a "poison pill" - self._queue.put(helper_threads.STOP) - self._thread.join() + def nack(self, items): + """Explicitly deny receipt of messages. - self._thread = None + Args: + items(Sequence[NackRequest]): The items to deny. + """ + self.modify_ack_deadline([ + requests.ModAckRequest(ack_id=item.ack_id, seconds=0) + for item in items]) + self.drop( + [requests.DropRequest(*item) for item in items]) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 05157b5e8db1..02e78577ff70 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -36,9 +36,10 @@ class Leaser(object): - def __init__(self, subscriber): + def __init__(self, manager): self._thread = None - self._subscriber = subscriber + self._operational_lock = threading.Lock() + self._manager = manager self._leased_messages = {} """dict[str, float]: A mapping of ack IDs to the local time when the @@ -93,17 +94,17 @@ def remove(self, items): self._bytes = 0 def maintain_leases(self): - """Maintain all of the leases being managed by the subscriber. + """Maintain all of the leases being managed. This method modifies the ack deadline for all of the managed ack IDs, then waits for most of that time (but with jitter), and repeats. """ - while self._subscriber.is_active and not self._stop_event.is_set(): + while self._manager.is_active and not self._stop_event.is_set(): # Determine the appropriate duration for the lease. This is # based off of how long previous messages have taken to ack, with # a sensible default and within the ranges allowed by Pub/Sub. - p99 = self._subscriber.ack_histogram.percentile(99) + p99 = self._manager.ack_histogram.percentile(99) _LOGGER.debug('The current p99 value is %d seconds.', p99) # Make a copy of the leased messages. This is needed because it's @@ -116,7 +117,7 @@ def maintain_leases(self): # drop messages and allow Pub/Sub to resend them. cutoff = ( time.time() - - self._subscriber.flow_control.max_lease_duration) + self._manager.flow_control.max_lease_duration) to_drop = [ requests.DropRequest(ack_id, item.size) for ack_id, item @@ -127,11 +128,11 @@ def maintain_leases(self): _LOGGER.warning( 'Dropping %s items because they were leased too long.', len(to_drop)) - self._subscriber.drop(to_drop) + self._manager.dispatcher.drop(to_drop) # Remove dropped items from our copy of the leased messages (they # have already been removed from the real one by - # self._subscriber.drop(), which calls self.remove()). + # self._manager.drop(), which calls self.remove()). for item in to_drop: leased_messages.pop(item.ack_id) @@ -147,7 +148,7 @@ def maintain_leases(self): # without any sort of race condition would require a # way for ``send_request`` to fail when the consumer # is inactive. - self._subscriber.modify_ack_deadline([ + self._manager.dispatcher.modify_ack_deadline([ requests.ModAckRequest(ack_id, p99) for ack_id in ack_ids]) # Now wait an appropriate period of time and do this again. @@ -163,25 +164,27 @@ def maintain_leases(self): _LOGGER.info('%s exiting.', _LEASE_WORKER_NAME) def start(self): - if self._thread is not None: - raise ValueError('Leaser is already running.') - - # Create and start the helper thread. - self._stop_event.clear() - thread = threading.Thread( - name=_LEASE_WORKER_NAME, - target=self.maintain_leases) - thread.daemon = True - thread.start() - _LOGGER.debug('Started helper thread %s', thread.name) - self._thread = thread + with self._operational_lock: + if self._thread is not None: + raise ValueError('Leaser is already running.') + + # Create and start the helper thread. + self._stop_event.clear() + thread = threading.Thread( + name=_LEASE_WORKER_NAME, + target=self.maintain_leases) + thread.daemon = True + thread.start() + _LOGGER.debug('Started helper thread %s', thread.name) + self._thread = thread def stop(self): - self._stop_event.set() + with self._operational_lock: + self._stop_event.set() - if self._thread is not None: - # The thread should automatically exit when the consumer is - # inactive. - self._thread.join() + if self._thread is not None: + # The thread should automatically exit when the consumer is + # inactive. + self._thread.join() - self._thread = None + self._thread = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py new file mode 100644 index 000000000000..2fb93e7cfda7 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -0,0 +1,382 @@ +# Copyright 2017, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import division + +import functools +import logging +import threading + +from google.api_core import exceptions +import grpc + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber._protocol import bidi +from google.cloud.pubsub_v1.subscriber._protocol import dispatcher +from google.cloud.pubsub_v1.subscriber._protocol import histogram +from google.cloud.pubsub_v1.subscriber._protocol import leaser +from google.cloud.pubsub_v1.subscriber._protocol import requests +import google.cloud.pubsub_v1.subscriber.message +import google.cloud.pubsub_v1.subscriber.scheduler + +_LOGGER = logging.getLogger(__name__) +_RETRYABLE_STREAM_ERRORS = ( + exceptions.DeadlineExceeded, + exceptions.ServiceUnavailable, + exceptions.InternalServerError, + exceptions.Unknown, + exceptions.GatewayTimeout +) + + +def _maybe_wrap_exception(exception): + """Wraps a gRPC exception class, if needed.""" + if isinstance(exception, grpc.RpcError): + return exceptions.from_grpc_error(exception) + return exception + + +def _wrap_callback_errors(callback, message): + """Wraps a user callback so that if an exception occurs the message is + nacked. + + Args: + callback (Callable[None, Message]): The user callback. + message (~Message): The Pub/Sub message. + """ + try: + callback(message) + except Exception: + # Note: the likelihood of this failing is extremely low. This just adds + # a message to a queue, so if this doesn't work the world is in an + # unrecoverable state and this thread should just bail. + _LOGGER.exception( + 'Top-level exception occurred in callback while processing a ' + 'message') + message.nack() + + +class StreamingPullManager(object): + """The streaming pull manager coordinates pulling messages from Pub/Sub, + leasing them, and scheduling them to be processed. + + Args: + client (~.pubsub_v1.subscriber.client): The subscriber client used + to create this instance. + subscription (str): The name of the subscription. The canonical + format for this is + ``projects/{project}/subscriptions/{subscription}``. + flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow + control settings. + scheduler (~google.cloud.pubsub_v1.scheduler.Scheduler): The scheduler + to use to process messages. If not provided, a thread pool-based + scheduler will be used. + """ + + def __init__(self, client, subscription, flow_control=types.FlowControl(), + scheduler=None): + self._client = client + self._subscription = subscription + self._flow_control = flow_control + self._ack_histogram = histogram.Histogram() + self._last_histogram_size = 0 + self._ack_deadline = 10 + self._rpc = None + self._callback = None + self._closing = threading.Lock() + self._closed = False + self._close_callbacks = [] + + if scheduler is None: + self._scheduler = ( + google.cloud.pubsub_v1.subscriber.scheduler.ThreadScheduler()) + else: + self._scheduler = scheduler + + # The threads created in ``.open()``. + self._dispatcher = None + self._leaser = None + self._consumer = None + + @property + def is_active(self): + """bool: True if this manager is actively streaming. + + Note that ``False`` does not indicate this is complete shut down, + just that it stopped getting new messages. + """ + return self._consumer is not None and self._consumer.is_active + + @property + def flow_control(self): + """google.cloud.pubsub_v1.types.FlowControl: The active flow control + settings.""" + return self._flow_control + + @property + def dispatcher(self): + """google.cloud.pubsub_v1.subscriber._protocol.dispatcher.Dispatcher: + The dispatcher helper. + """ + return self._dispatcher + + @property + def leaser(self): + """google.cloud.pubsub_v1.subscriber._protocol.leaser.Leaser: + The leaser helper. + """ + return self._leaser + + @property + def ack_histogram(self): + """google.cloud.pubsub_v1.subscriber._protocol.histogram.Histogram: + The histogram tracking time-to-acknowledge. + """ + return self._ack_histogram + + @property + def ack_deadline(self): + """Return the current ack deadline based on historical time-to-ack. + + This method is "sticky". It will only perform the computations to + check on the right ack deadline if the histogram has gained a + significant amount of new information. + + Returns: + int: The ack deadline. + """ + target = min([ + self._last_histogram_size * 2, + self._last_histogram_size + 100, + ]) + if len(self.ack_histogram) > target: + self._ack_deadline = self.ack_histogram.percentile(percent=99) + return self._ack_deadline + + @property + def load(self): + """Return the current load. + + The load is represented as a float, where 1.0 represents having + hit one of the flow control limits, and values between 0.0 and 1.0 + represent how close we are to them. (0.5 means we have exactly half + of what the flow control setting allows, for example.) + + There are (currently) two flow control settings; this property + computes how close the manager is to each of them, and returns + whichever value is higher. (It does not matter that we have lots of + running room on setting A if setting B is over.) + + Returns: + float: The load value. + """ + if self._leaser is None: + return 0 + + return max([ + self._leaser.message_count / self._flow_control.max_messages, + self._leaser.bytes / self._flow_control.max_bytes, + ]) + + def add_close_callback(self, callback): + """Schedules a callable when the manager closes. + + Args: + callback (Callable): The method to call. + """ + self._close_callbacks.append(callback) + + def maybe_pause_consumer(self): + """Check the current load and pause the consumer if needed.""" + if self.load >= 1.0 and not self._consumer.is_paused: + _LOGGER.debug( + 'Message backlog over load at %.2f, pausing.', self.load) + self._consumer.pause() + + def maybe_resume_consumer(self): + """Check the current load and resume the consumer if needed.""" + # If we have been paused by flow control, check and see if we are + # back within our limits. + # + # In order to not thrash too much, require us to have passed below + # the resume threshold (80% by default) of each flow control setting + # before restarting. + if not self._consumer.is_paused: + return + + if self.load < self.flow_control.resume_threshold: + self._consumer.resume() + else: + _LOGGER.debug('Did not resume, current load is %s', self.load) + + def send(self, request): + """Queue a request to be sent to the RPC.""" + self._rpc.send(request) + + def open(self, callback): + """Begin consuming messages. + + Args: + callback (Callable[None, google.cloud.pubsub_v1.message.Messages]): + A callback that will be called for each message received on the + stream. + """ + if self.is_active: + raise ValueError('This manager is already open.') + + if self._closed: + raise ValueError( + 'This manager has been closed and can not be re-used.') + + self._callback = functools.partial(_wrap_callback_errors, callback) + + # Start the thread to pass the requests. + self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) + self._dispatcher.start() + + # Start consuming messages. + self._rpc = bidi.ResumableBidiRpc( + start_rpc=self._client.api.streaming_pull, + initial_request=self._get_initial_request, + should_recover=self._should_recover) + self._rpc.add_done_callback(self._on_rpc_done) + self._consumer = bidi.BackgroundConsumer( + self._rpc, self._on_response) + self._consumer.start() + + # Start the lease maintainer thread. + self._leaser = leaser.Leaser(self) + self._leaser.start() + + def close(self, reason=None): + """Stop consuming messages and shutdown all helper threads. + + This method is idempotent. Additional calls will have no effect. + + Args: + reason (Any): The reason to close this. If None, this is considered + an "intentional" shutdown. This is passed to the callbacks + specified via :meth:`add_close_callback`. + """ + with self._closing: + if self._closed: + return + + # Stop consuming messages. + if self.is_active: + _LOGGER.debug('Stopping consumer.') + self._consumer.stop() + self._consumer = None + + # Shutdown all helper threads + _LOGGER.debug('Stopping scheduler.') + self._scheduler.shutdown() + self._scheduler = None + _LOGGER.debug('Stopping leaser.') + self._leaser.stop() + self._leaser = None + _LOGGER.debug('Stopping dispatcher.') + self._dispatcher.stop() + self._dispatcher = None + + self._rpc = None + self._closed = True + _LOGGER.debug('Finished stopping manager.') + + for callback in self._close_callbacks: + callback(self, reason) + + def _get_initial_request(self): + """Return the initial request for the RPC. + + This defines the initial request that must always be sent to Pub/Sub + immediately upon opening the subscription. + + Returns: + google.cloud.pubsub_v1.types.StreamingPullRequest: A request + suitable for being the first request on the stream (and not + suitable for any other purpose). + """ + # Any ack IDs that are under lease management need to have their + # deadline extended immediately. + lease_ids = self._leaser.ack_ids + + # Put the request together. + request = types.StreamingPullRequest( + modify_deadline_ack_ids=list(lease_ids), + modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), + stream_ack_deadline_seconds=self.ack_histogram.percentile(99), + subscription=self._subscription, + ) + + # Return the initial request. + return request + + def _on_response(self, response): + """Process all received Pub/Sub messages. + + For each message, send a modified acknowledgment request to the + server. This prevents expiration of the message due to buffering by + gRPC or proxy/firewall. This makes the server and client expiration + timer closer to each other thus preventing the message being + redelivered multiple times. + + After the messages have all had their ack deadline updated, execute + the callback for each message using the executor. + """ + _LOGGER.debug( + 'Scheduling callbacks for %s messages.', + len(response.received_messages)) + + # Immediately modack the messages we received, as this tells the server + # that we've received them. + items = [ + requests.ModAckRequest( + message.ack_id, self._ack_histogram.percentile(99)) + for message in response.received_messages + ] + self._dispatcher.modify_ack_deadline(items) + for received_message in response.received_messages: + message = google.cloud.pubsub_v1.subscriber.message.Message( + received_message.message, + received_message.ack_id, + self._scheduler.queue) + # TODO: Immediately lease instead of using the callback queue. + self._scheduler.schedule(self._callback, message) + + def _should_recover(self, exception): + """Determine if an error on the RPC stream should be recovered. + + If the exception is one of the retryable exceptions, this will signal + to the consumer thread that it should "recover" from the failure. + + This will cause the stream to exit when it returns :data:`False`. + + Returns: + bool: Indicates if the caller should recover or shut down. + Will be :data:`True` if the ``exception`` is "acceptable", i.e. + in a list of retryable / idempotent exceptions. + """ + exception = _maybe_wrap_exception(exception) + # If this is in the list of idempotent exceptions, then we want to + # recover. + if isinstance(exception, _RETRYABLE_STREAM_ERRORS): + return True + return False + + def _on_rpc_done(self, future): + _LOGGER.info( + 'RPC termination has signaled streaming pull manager shutdown.') + future = _maybe_wrap_exception(future) + self.close(reason=future) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 439a843cb3e9..4e104be8bf4c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -24,6 +24,8 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.gapic import subscriber_client +from google.cloud.pubsub_v1.subscriber import futures +from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager from google.cloud.pubsub_v1.subscriber.policy import thread @@ -78,7 +80,7 @@ def __init__(self, policy_class=thread.Policy, **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. - self.api = subscriber_client.SubscriberClient(**kwargs) + self._api = subscriber_client.SubscriberClient(**kwargs) # The subcription class is responsible to retrieving and dispatching # messages. @@ -93,6 +95,11 @@ def target(self): """ return subscriber_client.SubscriberClient.SERVICE_ADDRESS + @property + def api(self): + """The underlying gapic API client.""" + return self._api + def subscribe(self, subscription, callback=None, flow_control=()): """Return a representation of an individual subscription. @@ -136,3 +143,79 @@ def subscribe(self, subscription, callback=None, flow_control=()): error = '{!r} is not callable, please check input'.format(callback) raise TypeError(error) return subscr + + def subscribe_experimental( + self, subscription, callback, flow_control=(), + scheduler_=None): + """Asynchronously start receiving messages on a given subscription. + + This method starts a background thread to begin pulling messages from + a Pub/Sub subscription and scheduling them to be processed using the + provided ``callback``. + + The ``callback`` will be called with an individual + :class:`google.cloud.pubsub_v1.subscriber.message.Message`. It is the + responsibility of the callback to either call ``ack()`` or ``nack()`` + on the message when it finished processing. If an exception occurs in + the callback during processing, the exception is logged and the message + is ``nack()`` ed. + + The ``flow_control`` argument can be used to control the rate of + message processing. + + This method starts the receiver in the background and returns a + *Future* representing its execution. Waiting on the future (calling + ``result()``) will block forever or until a non-recoverable error + is encountered (such as loss of network connectivity). Cancelling the + future will signal the process to shutdown gracefully and exit. + + Example + + .. code-block:: python + + from google.cloud.pubsub_v1 import subscriber + + subscriber_client = pubsub.SubscriberClient() + + # existing subscription + subscription = subscriber_client.subscription_path( + 'my-project-id', 'my-subscription') + + def callback(message): + print(message) + message.ack() + + future = subscriber.subscribe_experimental( + subscription, callback) + + try: + future.result() + except KeyboardInterrupt: + future.cancel() + + Args: + subscription (str): The name of the subscription. The + subscription should have already been created (for example, + by using :meth:`create_subscription`). + callback (Callable[~.pubsub_v1.subscriber.message.Message]): + The callback function. This function receives the message as + its only argument and will be called from a different thread/ + process depending on the scheduling strategy. + flow_control (~.pubsub_v1.types.FlowControl): The flow control + settings. Use this to prevent situations where you are + inundated with too many messages at once. + + Returns: + google.cloud.pubsub_v1.futures.StreamingPullFuture: A Future object + that can be used to manage the background stream. + """ + flow_control = types.FlowControl(*flow_control) + + manager = streaming_pull_manager.StreamingPullManager( + self, subscription, flow_control) + + future = futures.StreamingPullFuture(manager) + + manager.open(callback) + + return future diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index 7114a32c9600..e6e55439a2c5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -62,3 +62,36 @@ def running(self): return False return super(Future, self).running() + + +class StreamingPullFuture(futures.Future): + """Represents a process that asynchronously performs streaming pull and + schedules messages to be processed. + + This future is resolved when the process is stopped (via :meth:`cancel`) or + if it encounters an unrecoverable error. Calling `.result()` will cause + the calling thread to block indefinitely. + """ + + def __init__(self, manager): + super(StreamingPullFuture, self).__init__() + self._manager = manager + self._manager.add_close_callback(self._on_close_callback) + self._cancelled = True + + def _on_close_callback(self, manager, result): + if result is None: + self.set_result(True) + else: + self.set_exception(result) + + def cancel(self): + """Stops pulling messages and shutdowns the background thread consuming + messages. + """ + self._cancelled = True + return self._manager.close() + + def cancelled(self): + """bool: True if the subscription has been cancelled.""" + return self._cancelled diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 4af03976f27e..d24161e853f4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -18,7 +18,7 @@ import math import time -from google.cloud.pubsub_v1.subscriber.policy import base as base_policy +from google.cloud.pubsub_v1.subscriber._protocol import requests _MESSAGE_REPR = """\ @@ -174,7 +174,7 @@ def ack(self): """ time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( - base_policy.AckRequest( + requests.AckRequest( ack_id=self._ack_id, byte_size=self.size, time_to_ack=time_to_ack @@ -195,7 +195,7 @@ def drop(self): directly. """ self._request_queue.put( - base_policy.DropRequest( + requests.DropRequest( ack_id=self._ack_id, byte_size=self.size ) @@ -209,7 +209,7 @@ def lease(self): need to call it manually. """ self._request_queue.put( - base_policy.LeaseRequest( + requests.LeaseRequest( ack_id=self._ack_id, byte_size=self.size ) @@ -231,7 +231,7 @@ def modify_ack_deadline(self, seconds): values below 10 are advised against. """ self._request_queue.put( - base_policy.ModAckRequest( + requests.ModAckRequest( ack_id=self._ack_id, seconds=seconds ) @@ -243,7 +243,7 @@ def nack(self): This will cause the message to be re-delivered to the subscription. """ self._request_queue.put( - base_policy.NackRequest( + requests.NackRequest( ack_id=self._ack_id, byte_size=self.size ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py index 82122cea83d8..a1dca5208c94 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py @@ -28,6 +28,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import _consumer +from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import histogram @@ -35,30 +36,11 @@ # Namedtuples for management requests. Used by the Message class to communicate # items of work back to the policy. -AckRequest = collections.namedtuple( - 'AckRequest', - ['ack_id', 'byte_size', 'time_to_ack'], -) - -DropRequest = collections.namedtuple( - 'DropRequest', - ['ack_id', 'byte_size'], -) - -LeaseRequest = collections.namedtuple( - 'LeaseRequest', - ['ack_id', 'byte_size'], -) - -ModAckRequest = collections.namedtuple( - 'ModAckRequest', - ['ack_id', 'seconds'], -) - -NackRequest = collections.namedtuple( - 'NackRequest', - ['ack_id', 'byte_size'], -) +AckRequest = requests.AckRequest +DropRequest = requests.DropRequest +LeaseRequest = requests.LeaseRequest +ModAckRequest = requests.ModAckRequest +NackRequest = requests.NackRequest _LeasedMessage = collections.namedtuple( '_LeasedMessage', @@ -474,7 +456,7 @@ def on_response(self, response): For example, if a the Policy implementation takes a callback in its constructor, you can schedule the callback using a - :cls:`concurrent.futures.ThreadPoolExecutor`:: + :class:`concurrent.futures.ThreadPoolExecutor`:: self._pool.submit(self._callback, response) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py deleted file mode 100644 index f1bc96808e60..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/subscriber.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2017, Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from google.cloud.pubsub_v1 import types - - -class Subscriber(object): - """A consumer class based on :class:`threading.Thread`. - - This consumer handles the connection to the Pub/Sub service and all of - the concurrency needs. - - Args: - client (~.pubsub_v1.subscriber.client): The subscriber client used - to create this instance. - subscription (str): The name of the subscription. The canonical - format for this is - ``projects/{project}/subscriptions/{subscription}``. - flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow - control settings. - executor (~concurrent.futures.ThreadPoolExecutor): (Optional.) A - ThreadPoolExecutor instance, or anything duck-type compatible - with it. - queue (~queue.Queue): (Optional.) A Queue instance, appropriate - for crossing the concurrency boundary implemented by - ``executor``. - """ - - def __init__(self, client, subscription, flow_control=types.FlowControl(), - scheduler_cls=None): - raise NotImplementedError - - @property - def is_active(self): - raise NotImplementedError - - @property - def flow_control(self): - raise NotImplementedError - - @property - def ack_histogram(self): - raise NotImplementedError - - @property - def future(self): - raise NotImplementedError - - # - # User-facing subscriber management methods. - # - - def open(self, callback): - raise NotImplementedError - - def close(self): - raise NotImplementedError - - # - # Message management methods - # - - def ack(self, items): - raise NotImplementedError - - def drop(self, items): - raise NotImplementedError - - def lease(self, items): - raise NotImplementedError - - def modify_ack_deadline(self, items): - raise NotImplementedError - - def nack(self, items): - raise NotImplementedError diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 9f5c77477995..a33a042f07fa 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -186,6 +186,59 @@ def test_subscribe_to_messages_async_callbacks( subscription.close() +def test_subscribe_to_messages_async_callbacks_experimental( + publisher, topic_path, subscriber, subscription_path, cleanup): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # Create a topic. + publisher.create_topic(topic_path) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription(subscription_path, topic_path) + + # Publish some messages. + futures = [ + publisher.publish( + topic_path, + b'Wooooo! The claaaaaw!', + num=str(index), + ) + for index in six.moves.range(2) + ] + + # Make sure the publish completes. + for future in futures: + future.result() + + # We want to make sure that the callback was called asynchronously. So + # track when each call happened and make sure below. + callback = TimesCallback(2) + + # Actually open the subscription and hold it open for a few seconds. + future = subscriber.subscribe_experimental(subscription_path, callback) + for second in six.moves.range(5): + time.sleep(4) + + # The callback should have fired at least two times, but it may + # take some time. + if callback.calls >= 2: + first, last = sorted(callback.call_times[:2]) + diff = last - first + # "Ensure" the first two callbacks were executed asynchronously + # (sequentially would have resulted in a difference of 2+ + # seconds). + assert diff.days == 0 + assert diff.seconds < callback.sleep_time + + # Okay, we took too long; fail out. + assert callback.calls >= 2 + + future.cancel() + + class AckCallback(object): def __init__(self): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 9367a6b1f0e7..7fdee71e7dcb 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -14,47 +14,139 @@ import threading +from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests -from google.cloud.pubsub_v1.subscriber import subscriber +from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager import mock from six.moves import queue import pytest -@pytest.mark.parametrize('item,method', [ +@pytest.mark.parametrize('item,method_name', [ (requests.AckRequest(0, 0, 0), 'ack'), (requests.DropRequest(0, 0), 'drop'), (requests.LeaseRequest(0, 0), 'lease'), (requests.ModAckRequest(0, 0), 'modify_ack_deadline'), (requests.NackRequest(0, 0), 'nack') ]) -def test_dispatch_callback(item, method): - subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) - dispatcher_ = dispatcher.Dispatcher(mock.sentinel.queue, subscriber_) +def test_dispatch_callback(item, method_name): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) items = [item] - dispatcher_.dispatch_callback(items) - getattr(subscriber_, method).assert_called_once_with([item]) + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([item]) def test_dispatch_callback_inactive(): - subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) - subscriber_.is_active = False - dispatcher_ = dispatcher.Dispatcher(mock.sentinel.queue, subscriber_) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + manager.is_active = False + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) dispatcher_.dispatch_callback([requests.AckRequest(0, 0, 0)]) - subscriber_.ack.assert_not_called() + manager.send.assert_not_called() + + +def test_ack(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [requests.AckRequest( + ack_id='ack_id_string', byte_size=0, time_to_ack=20)] + dispatcher_.ack(items) + + manager.send.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + + manager.leaser.remove.assert_called_once_with(items) + manager.maybe_resume_consumer.assert_called_once() + manager.ack_histogram.add.assert_called_once_with(20) + + +def test_ack_no_time(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [requests.AckRequest( + ack_id='ack_id_string', byte_size=0, time_to_ack=None)] + dispatcher_.ack(items) + + manager.send.assert_called_once_with(types.StreamingPullRequest( + ack_ids=['ack_id_string'], + )) + + manager.ack_histogram.add.assert_not_called() + + +def test_lease(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [requests.LeaseRequest(ack_id='ack_id_string', byte_size=10)] + dispatcher_.lease(items) + + manager.leaser.add.assert_called_once_with(items) + manager.maybe_pause_consumer.assert_called_once() + + +def test_drop(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [requests.DropRequest(ack_id='ack_id_string', byte_size=10)] + dispatcher_.drop(items) + + manager.leaser.remove.assert_called_once_with(items) + manager.maybe_resume_consumer.assert_called_once() + + +def test_nack(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [requests.NackRequest(ack_id='ack_id_string', byte_size=10)] + dispatcher_.nack(items) + + manager.send.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['ack_id_string'], + modify_deadline_seconds=[0], + )) + + +def test_modify_ack_deadline(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [requests.ModAckRequest(ack_id='ack_id_string', seconds=60)] + dispatcher_.modify_ack_deadline(items) + + manager.send.assert_called_once_with(types.StreamingPullRequest( + modify_deadline_ack_ids=['ack_id_string'], + modify_deadline_seconds=[60], + )) @mock.patch('threading.Thread', autospec=True) def test_start(thread): - subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) - dispatcher_ = dispatcher.Dispatcher(mock.sentinel.queue, subscriber_) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) dispatcher_.start() @@ -68,8 +160,9 @@ def test_start(thread): @mock.patch('threading.Thread', autospec=True) def test_start_already_started(thread): - subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) - dispatcher_ = dispatcher.Dispatcher(mock.sentinel.queue, subscriber_) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) dispatcher_._thread = mock.sentinel.thread with pytest.raises(ValueError): @@ -80,7 +173,7 @@ def test_start_already_started(thread): def test_stop(): queue_ = queue.Queue() - dispatcher_ = dispatcher.Dispatcher(queue_, mock.sentinel.subscriber) + dispatcher_ = dispatcher.Dispatcher(mock.sentinel.manager, queue_) thread = mock.create_autospec(threading.Thread, instance=True) dispatcher_._thread = thread @@ -93,6 +186,6 @@ def test_stop(): def test_stop_no_join(): dispatcher_ = dispatcher.Dispatcher( - mock.sentinel.queue, mock.sentinel.subscriber) + mock.sentinel.manager, mock.sentinel.queue) dispatcher_.stop() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 3e6b24501594..3ffcaff647dd 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -15,11 +15,13 @@ from __future__ import absolute_import import mock +import pytest from google.auth import credentials from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber.policy import thread +from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager def create_policy(**kwargs): @@ -42,3 +44,45 @@ def test_running(): assert future.running() is True policy._future = None assert future.running() is False + + +class TestStreamingPullFuture(object): + def make_future(self): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + future = futures.StreamingPullFuture(manager) + return future + + def test_default_state(self): + future = self.make_future() + + assert future.running() + assert not future.done() + future._manager.add_close_callback.assert_called_once_with( + future._on_close_callback) + + def test__on_close_callback_success(self): + future = self.make_future() + + future._on_close_callback(mock.sentinel.manager, None) + + assert future.result() is True + assert not future.running() + + def test__on_close_callback_failure(self): + future = self.make_future() + + future._on_close_callback(mock.sentinel.manager, ValueError('meep')) + + with pytest.raises(ValueError): + future.result() + + assert not future.running() + + def test_cancel(self): + future = self.make_future() + + future.cancel() + + future._manager.close.assert_called_once() + assert future.cancelled() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index c2cdde4bafd9..6c16276e8f15 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -16,17 +16,18 @@ import threading from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber import subscriber +from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import histogram from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager import mock import pytest def test_add_and_remove(): - leaser_ = leaser.Leaser(mock.sentinel.subscriber) + leaser_ = leaser.Leaser(mock.sentinel.manager) leaser_.add([ requests.LeaseRequest(ack_id='ack1', byte_size=50)]) @@ -48,7 +49,7 @@ def test_add_and_remove(): def test_add_already_managed(caplog): caplog.set_level(logging.DEBUG) - leaser_ = leaser.Leaser(mock.sentinel.subscriber) + leaser_ = leaser.Leaser(mock.sentinel.manager) leaser_.add([ requests.LeaseRequest(ack_id='ack1', byte_size=50)]) @@ -61,7 +62,7 @@ def test_add_already_managed(caplog): def test_remove_not_managed(caplog): caplog.set_level(logging.DEBUG) - leaser_ = leaser.Leaser(mock.sentinel.subscriber) + leaser_ = leaser.Leaser(mock.sentinel.manager) leaser_.remove([ requests.DropRequest(ack_id='ack1', byte_size=50)]) @@ -72,7 +73,7 @@ def test_remove_not_managed(caplog): def test_remove_negative_bytes(caplog): caplog.set_level(logging.DEBUG) - leaser_ = leaser.Leaser(mock.sentinel.subscriber) + leaser_ = leaser.Leaser(mock.sentinel.manager) leaser_.add([ requests.LeaseRequest(ack_id='ack1', byte_size=50)]) @@ -83,20 +84,23 @@ def test_remove_negative_bytes(caplog): assert 'unexpectedly negative' in caplog.text -def create_subscriber(flow_control=types.FlowControl()): - subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) - subscriber_.is_active = True - subscriber_.flow_control = flow_control - subscriber_.ack_histogram = histogram.Histogram() - return subscriber_ +def create_manager(flow_control=types.FlowControl()): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + manager.dispatcher = mock.create_autospec( + dispatcher.Dispatcher, instance=True) + manager.is_active = True + manager.flow_control = flow_control + manager.ack_histogram = histogram.Histogram() + return manager def test_maintain_leases_inactive(caplog): caplog.set_level(logging.INFO) - subscriber_ = create_subscriber() - subscriber_.is_active = False + manager = create_manager() + manager.is_active = False - leaser_ = leaser.Leaser(subscriber_) + leaser_ = leaser.Leaser(manager) leaser_.maintain_leases() @@ -105,9 +109,9 @@ def test_maintain_leases_inactive(caplog): def test_maintain_leases_stopped(caplog): caplog.set_level(logging.INFO) - subscriber_ = create_subscriber() + manager = create_manager() - leaser_ = leaser.Leaser(subscriber_) + leaser_ = leaser.Leaser(manager) leaser_.stop() leaser_.maintain_leases() @@ -115,25 +119,25 @@ def test_maintain_leases_stopped(caplog): assert 'exiting' in caplog.text -def make_sleep_mark_subscriber_as_inactive(sleep, subscriber): - # Make sleep mark the subscriber as inactive so that maintain_leases +def make_sleep_mark_manager_as_inactive(sleep, manager): + # Make sleep mark the manager as inactive so that maintain_leases # exits at the end of the first run. def trigger_inactive(seconds): assert 0 < seconds < 10 - subscriber.is_active = False + manager.is_active = False sleep.side_effect = trigger_inactive @mock.patch('time.sleep', autospec=True) def test_maintain_leases_ack_ids(sleep): - subscriber_ = create_subscriber() - make_sleep_mark_subscriber_as_inactive(sleep, subscriber_) - leaser_ = leaser.Leaser(subscriber_) + manager = create_manager() + make_sleep_mark_manager_as_inactive(sleep, manager) + leaser_ = leaser.Leaser(manager) leaser_.add([requests.LeaseRequest(ack_id='my ack id', byte_size=50)]) leaser_.maintain_leases() - subscriber_.modify_ack_deadline.assert_called_once_with([ + manager.dispatcher.modify_ack_deadline.assert_called_once_with([ requests.ModAckRequest( ack_id='my ack id', seconds=10, @@ -144,22 +148,22 @@ def test_maintain_leases_ack_ids(sleep): @mock.patch('time.sleep', autospec=True) def test_maintain_leases_no_ack_ids(sleep): - subscriber_ = create_subscriber() - make_sleep_mark_subscriber_as_inactive(sleep, subscriber_) - leaser_ = leaser.Leaser(subscriber_) + manager = create_manager() + make_sleep_mark_manager_as_inactive(sleep, manager) + leaser_ = leaser.Leaser(manager) leaser_.maintain_leases() - subscriber_.modify_ack_deadline.assert_not_called() + manager.dispatcher.modify_ack_deadline.assert_not_called() sleep.assert_called() @mock.patch('time.time', autospec=True) @mock.patch('time.sleep', autospec=True) def test_maintain_leases_outdated_items(sleep, time): - subscriber_ = create_subscriber() - make_sleep_mark_subscriber_as_inactive(sleep, subscriber_) - leaser_ = leaser.Leaser(subscriber_) + manager = create_manager() + make_sleep_mark_manager_as_inactive(sleep, manager) + leaser_ = leaser.Leaser(manager) # Add these items at the beginning of the timeline time.return_value = 0 @@ -167,23 +171,23 @@ def test_maintain_leases_outdated_items(sleep, time): requests.LeaseRequest(ack_id='ack1', byte_size=50)]) # Add another item at towards end of the timeline - time.return_value = subscriber_.flow_control.max_lease_duration - 1 + time.return_value = manager.flow_control.max_lease_duration - 1 leaser_.add([ requests.LeaseRequest(ack_id='ack2', byte_size=50)]) # Now make sure time reports that we are at the end of our timeline. - time.return_value = subscriber_.flow_control.max_lease_duration + 1 + time.return_value = manager.flow_control.max_lease_duration + 1 leaser_.maintain_leases() # Only ack2 should be renewed. ack1 should've been dropped - subscriber_.modify_ack_deadline.assert_called_once_with([ + manager.dispatcher.modify_ack_deadline.assert_called_once_with([ requests.ModAckRequest( ack_id='ack2', seconds=10, ) ]) - subscriber_.drop.assert_called_once_with([ + manager.dispatcher.drop.assert_called_once_with([ requests.DropRequest(ack_id='ack1', byte_size=50) ]) sleep.assert_called() @@ -191,8 +195,9 @@ def test_maintain_leases_outdated_items(sleep, time): @mock.patch('threading.Thread', autospec=True) def test_start(thread): - subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) - leaser_ = leaser.Leaser(subscriber_) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + leaser_ = leaser.Leaser(manager) leaser_.start() @@ -206,8 +211,9 @@ def test_start(thread): @mock.patch('threading.Thread', autospec=True) def test_start_already_started(thread): - subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) - leaser_ = leaser.Leaser(subscriber_) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + leaser_ = leaser.Leaser(manager) leaser_._thread = mock.sentinel.thread with pytest.raises(ValueError): @@ -217,8 +223,9 @@ def test_start_already_started(thread): def test_stop(): - subscriber_ = mock.create_autospec(subscriber.Subscriber, instance=True) - leaser_ = leaser.Leaser(subscriber_) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + leaser_ = leaser.Leaser(manager) thread = mock.create_autospec(threading.Thread, instance=True) leaser_._thread = thread @@ -230,6 +237,6 @@ def test_stop(): def test_stop_no_join(): - leaser_ = leaser.Leaser(mock.sentinel.subscriber) + leaser_ = leaser.Leaser(mock.sentinel.manager) leaser_.stop() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 1587c0c1866a..431d39bb6afc 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -19,7 +19,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message -from google.cloud.pubsub_v1.subscriber.policy import base +from google.cloud.pubsub_v1.subscriber._protocol import requests def create_message(data, ack_id='ACKID', **attrs): @@ -78,56 +78,56 @@ def test_ack(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.ack() - put.assert_called_once_with(base.AckRequest( + put.assert_called_once_with(requests.AckRequest( ack_id='bogus_ack_id', byte_size=25, time_to_ack=mock.ANY, )) - check_call_types(put, base.AckRequest) + check_call_types(put, requests.AckRequest) def test_drop(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.drop() - put.assert_called_once_with(base.DropRequest( + put.assert_called_once_with(requests.DropRequest( ack_id='bogus_ack_id', byte_size=25, )) - check_call_types(put, base.DropRequest) + check_call_types(put, requests.DropRequest) def test_lease(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.lease() - put.assert_called_once_with(base.LeaseRequest( + put.assert_called_once_with(requests.LeaseRequest( ack_id='bogus_ack_id', byte_size=25, )) - check_call_types(put, base.LeaseRequest) + check_call_types(put, requests.LeaseRequest) def test_modify_ack_deadline(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.modify_ack_deadline(60) - put.assert_called_once_with(base.ModAckRequest( + put.assert_called_once_with(requests.ModAckRequest( ack_id='bogus_ack_id', seconds=60, )) - check_call_types(put, base.ModAckRequest) + check_call_types(put, requests.ModAckRequest) def test_nack(): msg = create_message(b'foo', ack_id='bogus_ack_id') with mock.patch.object(msg._request_queue, 'put') as put: msg.nack() - put.assert_called_once_with(base.NackRequest( + put.assert_called_once_with(requests.NackRequest( ack_id='bogus_ack_id', byte_size=25, )) - check_call_types(put, base.NackRequest) + check_call_types(put, requests.NackRequest) def test_repr(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py new file mode 100644 index 000000000000..a6527dc4eb3b --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -0,0 +1,396 @@ +# Copyright 2018, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock +import pytest + +from google.api_core import exceptions +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.gapic import subscriber_client_config +from google.cloud.pubsub_v1.subscriber import client +from google.cloud.pubsub_v1.subscriber import message +from google.cloud.pubsub_v1.subscriber import scheduler +from google.cloud.pubsub_v1.subscriber._protocol import bidi +from google.cloud.pubsub_v1.subscriber._protocol import dispatcher +from google.cloud.pubsub_v1.subscriber._protocol import leaser +from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +import grpc + + +@pytest.mark.parametrize('exception,expected_cls', [ + (ValueError('meep'), ValueError), + (mock.create_autospec(grpc.RpcError, instance=True), + exceptions.GoogleAPICallError), +]) +def test__maybe_wrap_exception(exception, expected_cls): + assert isinstance( + streaming_pull_manager._maybe_wrap_exception(exception), expected_cls) + + +def test__wrap_callback_errors_no_error(): + msg = mock.create_autospec(message.Message, instance=True) + callback = mock.Mock() + + streaming_pull_manager._wrap_callback_errors(callback, msg) + + callback.assert_called_once_with(msg) + msg.nack.assert_not_called() + + +def test__wrap_callback_errors_error(): + msg = mock.create_autospec(message.Message, instance=True) + callback = mock.Mock(side_effect=ValueError('meep')) + + streaming_pull_manager._wrap_callback_errors(callback, msg) + + msg.nack.assert_called_once() + + +def test_constructor_and_default_state(): + manager = streaming_pull_manager.StreamingPullManager( + mock.sentinel.client, + mock.sentinel.subscription) + + # Public state + assert manager.is_active is False + assert manager.flow_control == types.FlowControl() + assert manager.dispatcher is None + assert manager.leaser is None + assert manager.ack_histogram is not None + assert manager.ack_deadline == 10 + assert manager.load == 0 + + # Private state + assert manager._client == mock.sentinel.client + assert manager._subscription == mock.sentinel.subscription + assert manager._scheduler is not None + + +def test_constructor_with_options(): + manager = streaming_pull_manager.StreamingPullManager( + mock.sentinel.client, + mock.sentinel.subscription, + flow_control=mock.sentinel.flow_control, + scheduler=mock.sentinel.scheduler) + + assert manager.flow_control == mock.sentinel.flow_control + assert manager._scheduler == mock.sentinel.scheduler + + +def make_manager(**kwargs): + client_ = mock.create_autospec(client.Client, instance=True) + scheduler_ = mock.create_autospec(scheduler.Scheduler, instance=True) + return streaming_pull_manager.StreamingPullManager( + client_, + 'subscription-name', + scheduler=scheduler_, + **kwargs) + + +def test_ack_deadline(): + manager = make_manager() + assert manager.ack_deadline == 10 + manager.ack_histogram.add(20) + assert manager.ack_deadline == 20 + manager.ack_histogram.add(10) + assert manager.ack_deadline == 20 + + +def test_lease_load_and_pause(): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000)) + manager._leaser = leaser.Leaser(manager) + manager._consumer = mock.create_autospec( + bidi.BackgroundConsumer, instance=True) + manager._consumer.is_paused = False + + # This should mean that our messages count is at 10%, and our bytes + # are at 15%; load should return the higher (0.15), and shouldn't cause + # the consumer to pause. + manager.leaser.add([requests.LeaseRequest(ack_id='one', byte_size=150)]) + assert manager.load == 0.15 + manager.maybe_pause_consumer() + manager._consumer.pause.assert_not_called() + + # After this message is added, the messages should be higher at 20% + # (versus 16% for bytes). + manager.leaser.add([requests.LeaseRequest(ack_id='two', byte_size=10)]) + assert manager.load == 0.2 + + # Returning a number above 100% is fine, and it should cause this to pause. + manager.leaser.add([requests.LeaseRequest(ack_id='three', byte_size=1000)]) + assert manager.load == 1.16 + manager.maybe_pause_consumer() + manager._consumer.pause.assert_called_once() + + +def test_drop_and_resume(): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000)) + manager._leaser = leaser.Leaser(manager) + manager._consumer = mock.create_autospec( + bidi.BackgroundConsumer, instance=True) + manager._consumer.is_paused = True + + # Add several messages until we're over the load threshold. + manager.leaser.add([ + requests.LeaseRequest(ack_id='one', byte_size=750), + requests.LeaseRequest(ack_id='two', byte_size=250)]) + + assert manager.load == 1.0 + + # Trying to resume now should have no effect as we're over the threshold. + manager.maybe_resume_consumer() + manager._consumer.resume.assert_not_called() + + # Drop the 200 byte message, which should put us under the resume + # threshold. + manager.leaser.remove([ + requests.DropRequest(ack_id='two', byte_size=250)]) + manager.maybe_resume_consumer() + manager._consumer.resume.assert_called_once() + + +def test_resume_not_paused(): + manager = make_manager() + manager._consumer = mock.create_autospec( + bidi.BackgroundConsumer, instance=True) + manager._consumer.is_paused = False + + # Resuming should have no effect is the consumer is not actually paused. + manager.maybe_resume_consumer() + manager._consumer.resume.assert_not_called() + + +def test_send(): + manager = make_manager() + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + + manager.send(mock.sentinel.request) + + manager._rpc.send.assert_called_once_with(mock.sentinel.request) + + +@mock.patch( + 'google.cloud.pubsub_v1.subscriber._protocol.bidi.ResumableBidiRpc', + autospec=True) +@mock.patch( + 'google.cloud.pubsub_v1.subscriber._protocol.bidi.BackgroundConsumer', + autospec=True) +@mock.patch( + 'google.cloud.pubsub_v1.subscriber._protocol.leaser.Leaser', + autospec=True) +@mock.patch( + 'google.cloud.pubsub_v1.subscriber._protocol.dispatcher.Dispatcher', + autospec=True) +def test_open(dispatcher, leaser, background_consumer, resumable_bidi_rpc): + manager = make_manager() + + manager.open(mock.sentinel.callback) + + dispatcher.assert_called_once_with(manager, manager._scheduler.queue) + dispatcher.return_value.start.assert_called_once() + assert manager._dispatcher == dispatcher.return_value + + leaser.assert_called_once_with(manager) + leaser.return_value.start.assert_called_once() + assert manager.leaser == leaser.return_value + + background_consumer.assert_called_once_with( + manager._rpc, manager._on_response) + background_consumer.return_value.start.assert_called_once() + assert manager._consumer == background_consumer.return_value + + resumable_bidi_rpc.assert_called_once_with( + start_rpc=manager._client.api.streaming_pull, + initial_request=manager._get_initial_request, + should_recover=manager._should_recover) + resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( + manager._on_rpc_done) + assert manager._rpc == resumable_bidi_rpc.return_value + + manager._consumer.is_active = True + assert manager.is_active is True + + +def test_open_already_active(): + manager = make_manager() + manager._consumer = mock.create_autospec( + bidi.BackgroundConsumer, instance=True) + manager._consumer.is_active = True + + with pytest.raises(ValueError, match='already open'): + manager.open(mock.sentinel.callback) + + +def test_open_has_been_closed(): + manager = make_manager() + manager._closed = True + + with pytest.raises(ValueError, match='closed'): + manager.open(mock.sentinel.callback) + + +def make_running_manager(): + manager = make_manager() + manager._consumer = mock.create_autospec( + bidi.BackgroundConsumer, instance=True) + manager._consumer.is_active = True + manager._dispatcher = mock.create_autospec( + dispatcher.Dispatcher, instance=True) + manager._leaser = mock.create_autospec( + leaser.Leaser, instance=True) + + return ( + manager, manager._consumer, manager._dispatcher, manager._leaser, + manager._scheduler) + + +def test_close(): + manager, consumer, dispatcher, leaser, scheduler = make_running_manager() + + manager.close() + + consumer.stop.assert_called_once() + leaser.stop.assert_called_once() + dispatcher.stop.assert_called_once() + scheduler.shutdown.assert_called_once() + + assert manager.is_active is False + + +def test_close_inactive_consumer(): + manager, consumer, dispatcher, leaser, scheduler = make_running_manager() + consumer.is_active = False + + manager.close() + + consumer.stop.assert_not_called() + leaser.stop.assert_called_once() + dispatcher.stop.assert_called_once() + scheduler.shutdown.assert_called_once() + + +def test_close_idempotent(): + manager, _, _, _, scheduler = make_running_manager() + + manager.close() + manager.close() + + assert scheduler.shutdown.call_count == 1 + + +def test_close_callbacks(): + manager, _, _, _, _ = make_running_manager() + + callback = mock.Mock() + + manager.add_close_callback(callback) + manager.close(reason='meep') + + callback.assert_called_once_with(manager, 'meep') + + +def test__get_initial_request(): + manager = make_manager() + manager._leaser = mock.create_autospec( + leaser.Leaser, instance=True) + manager._leaser.ack_ids = ['1', '2'] + + initial_request = manager._get_initial_request() + + assert isinstance(initial_request, types.StreamingPullRequest) + assert initial_request.subscription == 'subscription-name' + assert initial_request.stream_ack_deadline_seconds == 10 + assert initial_request.modify_deadline_ack_ids == ['1', '2'] + assert initial_request.modify_deadline_seconds == [10, 10] + + +def test_on_response(): + manager, _, dispatcher, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + # Set up the messages. + response = types.StreamingPullResponse( + received_messages=[ + types.ReceivedMessage( + ack_id='fack', + message=types.PubsubMessage(data=b'foo', message_id='1') + ), + types.ReceivedMessage( + ack_id='back', + message=types.PubsubMessage(data=b'bar', message_id='2') + ), + ], + ) + + # Actually run the method and prove that modack and schedule + # are called in the expected way. + manager._on_response(response) + + dispatcher.modify_ack_deadline.assert_called_once_with( + [requests.ModAckRequest('fack', 10), + requests.ModAckRequest('back', 10)] + ) + + schedule_calls = scheduler.schedule.mock_calls + assert len(schedule_calls) == 2 + for call in schedule_calls: + assert call[1][0] == mock.sentinel.callback + assert isinstance(call[1][1], message.Message) + + +def test_retryable_stream_errors(): + # Make sure the config matches our hard-coded tuple of exceptions. + interfaces = subscriber_client_config.config['interfaces'] + retry_codes = interfaces['google.pubsub.v1.Subscriber']['retry_codes'] + idempotent = retry_codes['idempotent'] + + status_codes = tuple( + getattr(grpc.StatusCode, name, None) + for name in idempotent + ) + expected = tuple( + exceptions.exception_class_for_grpc_status(status_code) + for status_code in status_codes + ) + assert set(expected).issubset( + set(streaming_pull_manager._RETRYABLE_STREAM_ERRORS)) + + +def test__should_recover_true(): + manager = make_manager() + + details = 'UNAVAILABLE. Service taking nap.' + exc = exceptions.ServiceUnavailable(details) + + assert manager._should_recover(exc) is True + + +def test__should_recover_false(): + manager = make_manager() + + exc = TypeError('wahhhhhh') + + assert manager._should_recover(exc) is False + + +def test__on_rpc_done(): + manager = make_manager() + + with mock.patch.object(manager, 'close') as close: + manager._on_rpc_done(mock.sentinel.error) + + close.assert_called_once_with(reason=mock.sentinel.error) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 3d4169f7ab28..c5d29ecfafdc 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -17,6 +17,7 @@ import pytest from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber.policy import thread @@ -64,3 +65,17 @@ def test_subscribe_with_failed_callback(): with pytest.raises(TypeError) as exc_info: client.subscribe('sub_name_b', callback) assert callback in str(exc_info.value) + + +@mock.patch( + 'google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager.' + 'StreamingPullManager.open', autospec=True) +def test_subscribe_experimental(manager_open): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + + future = client.subscribe_experimental( + 'sub_name_a', callback=mock.sentinel.callback) + assert isinstance(future, futures.StreamingPullFuture) + + manager_open.assert_called_once_with(mock.ANY, mock.sentinel.callback) From 64a2ded68097b3d1e68660aca4ca761360f92ba4 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 27 Apr 2018 09:02:06 -0700 Subject: [PATCH 0251/1197] Lower the flow control defaults, remove psutil dependency (#5248) --- .../google/cloud/pubsub_v1/subscriber/client.py | 9 +++++++-- .../google/cloud/pubsub_v1/types.py | 15 +++++++-------- packages/google-cloud-pubsub/setup.py | 1 - 3 files changed, 14 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 4e104be8bf4c..e948d74521b7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -160,8 +160,13 @@ def subscribe_experimental( the callback during processing, the exception is logged and the message is ``nack()`` ed. - The ``flow_control`` argument can be used to control the rate of - message processing. + The ``flow_control`` argument can be used to control the rate of at + which messages are pulled. The settings are relatively conservative by + default to prevent "message hoarding" - a situation where the client + pulls a large number of messages but can not process them fast enough + leading it to "starve" other clients of messages. Increasing these + settings may lead to faster throughput for messages that do not take + a long time to process. This method starts the receiver in the background and returns a *Future* representing its execution. Waiting on the future (calling diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 8a8de3852b3c..fbb21af02e33 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -14,7 +14,6 @@ from __future__ import absolute_import import collections -import psutil import sys from google.api_core.protobuf_helpers import get_messages @@ -58,13 +57,13 @@ 'max_lease_duration'], ) FlowControl.__new__.__defaults__ = ( - psutil.virtual_memory().total * 0.2, # max_bytes: 20% of total RAM - float('inf'), # max_messages: no limit - 0.8, # resume_threshold: 80% - 100, # max_requests: 100 - 100, # max_request_batch_size: 100 - 0.01, # max_request_batch_latency: 0.01s - 2 * 60 * 60, # max_lease_duration: 2 hours. + 100 * 1024 * 1024, # max_bytes: 100mb + 100, # max_messages: 100 + 0.8, # resume_threshold: 80% + 100, # max_requests: 100 + 100, # max_request_batch_size: 100 + 0.01, # max_request_batch_latency: 0.01s + 2 * 60 * 60, # max_lease_duration: 2 hours. ) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 71734d727efa..2acb15301061 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -31,7 +31,6 @@ dependencies = [ 'google-api-core[grpc]<2.0.0dev,>=0.1.3', 'grpc-google-iam-v1<0.12dev,>=0.11.1', - 'psutil<6.0dev,>=5.2.2', ] extras = { } From 35b5380fcf296d05465e435993a1010b1a5c4f99 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 27 Apr 2018 13:50:16 -0700 Subject: [PATCH 0252/1197] Fix race condition in bidi.BackgroundConsumer (#5256) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 26 ++++++++++++------- .../unit/pubsub_v1/subscriber/test_bidi.py | 2 ++ 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 13e0c06cce80..215e38e9ac68 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -421,16 +421,24 @@ def _thread_main(self): self._bidi_rpc.open() while self._bidi_rpc.is_active: - if not self.is_paused: - _LOGGER.debug('waiting for recv.') - response = self._bidi_rpc.recv() - _LOGGER.debug('recved response.') - self._on_response(response) - else: - _LOGGER.debug('paused, waiting for waking.') - with self._wake: + # Do not allow the paused status to change at all during this + # section. There is a condition where we could be resumed + # between checking if we are paused and calling wake.wait(), + # which means that we will miss the notification to wake up + # (oops!) and wait for a notification that will never come. + # Keeping the lock throughout avoids that. + # In the future, we could use `Condition.wait_for` if we drop + # Python 2.7. + with self._wake: + if self._paused: + _LOGGER.debug('paused, waiting for waking.') self._wake.wait() - _LOGGER.debug('woken.') + _LOGGER.debug('woken.') + + _LOGGER.debug('waiting for recv.') + response = self._bidi_rpc.recv() + _LOGGER.debug('recved response.') + self._on_response(response) except exceptions.GoogleAPICallError as exc: _LOGGER.debug( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py index 6d51472d55c2..4e79ee3e6ce4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py @@ -504,7 +504,9 @@ def on_response(response): # consumer. should_continue.set() consumer.resume() + responses_and_events[mock.sentinel.response_2].wait() + assert recved_responses == [ mock.sentinel.response_1, mock.sentinel.response_2] From ad0ee3a7f997a4751f61d15f4a108b20682773c9 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Fri, 27 Apr 2018 15:36:16 -0700 Subject: [PATCH 0253/1197] Release 0.34.0 (#5258) --- packages/google-cloud-pubsub/CHANGELOG.md | 15 +++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 1e233db75b7c..8c9f1422f625 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.34.0 + +### Implementation Changes + +- Lower the flow control defaults. (#5248) + +### New Features + +- A new implementation of the subscriber has been added. This is available as `SubscriberClient.subscribe_experimental`. In the next release, this will be replace the current `subscribe` method. If you use this, please report your +findings to us on GitHub. (#5189, #5201, #5210, #5229, #5230, #5237, #5256) + +### Dependencies + +- Remove psutil dependency. (#5248) + ## 0.33.1 ### Implementation changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 2acb15301061..e1237ac8d11e 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.33.1' +version = '0.34.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 1616f48605c94d4bec42a3f9c482972c2046fdb9 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 30 Apr 2018 20:17:26 -0700 Subject: [PATCH 0254/1197] Promote subscribe_experimental() to subscribe(), remove old subscriber implementation. (#5274) Promote subscribe_experimental() to subscribe(), remove old subscriber implementation. This removes the following public interfaces: * pubsub_v1.subscriber.policy.base * pubsub_v1.subscriber.policy.thread * pubsub_v1.subscriber.futures.Future * pubsub_v1.subscriber.client.Client.subscribe_experimental --- .../cloud/pubsub_v1/subscriber/_consumer.py | 471 ----------------- .../cloud/pubsub_v1/subscriber/client.py | 59 +-- .../cloud/pubsub_v1/subscriber/futures.py | 47 -- .../pubsub_v1/subscriber/policy/__init__.py | 0 .../cloud/pubsub_v1/subscriber/policy/base.py | 492 ------------------ .../pubsub_v1/subscriber/policy/thread.py | 347 ------------ packages/google-cloud-pubsub/tests/system.py | 61 +-- .../pubsub_v1/subscriber/test_consumer.py | 336 ------------ .../subscriber/test_futures_subscriber.py | 25 - .../pubsub_v1/subscriber/test_policy_base.py | 367 ------------- .../subscriber/test_policy_thread.py | 273 ---------- .../subscriber/test_subscriber_client.py | 34 +- 12 files changed, 8 insertions(+), 2504 deletions(-) delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py deleted file mode 100644 index 8abaedfe04ac..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_consumer.py +++ /dev/null @@ -1,471 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Bidirectional Streaming Consumer. - -The goal here is to consume a bidirectional streaming RPC by fanning out the -responses received from the server to be processed and fanning in requests from -the response processors to be sent to the server through the request stream. -This module is a framework to deal with this pattern in a consistent way: - - * A :class:`Consumer` manages scheduling requests to a stream and consuming - responses from a stream. The Consumer takes the responses and schedules - them to be processed in callbacks using any - :class:`~concurrent.futures.Executor`. - * A :class:`Policy` which determines how the consumer calls the RPC and - processes responses, errors, and messages. - -The :class:`Policy` is the only class that's intended to be sub-classed here. -This would be implemented for every bidirectional streaming method. -How does this work? The first part of the implementation, fanning out -responses, its actually quite straightforward and can be done with just a -:class:`concurrent.futures.Executor`: - -.. graphviz:: - - digraph responses_only { - "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] - "gRPC Python" -> "Consumer" [label="responses", color="red"] - "Consumer" -> "Policy" [label="responses", color="red"] - "Policy" -> "futures.Executor" [label="response", color="red"] - "futures.Executor" -> "callback" [label="response", color="red"] - } - -The challenge comes from the fact that in bidirectional streaming two more -things have to be done: - - 1. The consumer must maintain a long-running request generator. - 2. The consumer must provide some way for the response processor to queue - new requests. - -These are especially important because in the case of Pub/Sub you are -essentially streaming requests indefinitely and receiving responses -indefinitely. - -For the first challenge, we take advantage of the fact that gRPC runs the -request generator in its own thread. That thread can block, so we can use -a queue for that: - -.. graphviz:: - - digraph response_flow { - "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] - "gRPC Python" -> "Consumer" [label="responses", color="red"] - "Consumer" -> "request generator thread" [label="starts", color="gray"] - "request generator thread" -> "gRPC Python" - [label="requests", color="blue"] - } - -The final piece of the puzzle, allowing things from anywhere to queue new -requests, it a bit more complex. If we were only dealing with threads, then the -response workers could just directly interact with the policy/consumer to -queue new requests: - -.. graphviz:: - - digraph thread_only_requests { - "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] - "gRPC Python" -> "Consumer" [label="responses", color="red"] - "Consumer" -> "request generator thread" [label="starts", color="gray"] - "request generator thread" -> "gRPC Python" - [label="requests", color="blue"] - "Consumer" -> "Policy" [label="responses", color="red"] - "Policy" -> "futures.Executor" [label="response", color="red"] - "futures.Executor" -> "callback" [label="response", color="red"] - "callback" -> "Consumer" [label="send_request", color="blue"] - } - -But, because this does not dictate any particular concurrent strategy for -dealing with the responses, it's possible that a response could be processed -in a different thread, process, or even on a different machine. Because of -this, we need an intermediary queue between the callbacks and the gRPC request -queue to bridge the "concurrecy gap". To pump items from the concurrecy-safe -queue into the gRPC request queue, we need another worker thread. Putting this -all together looks like this: - -.. graphviz:: - - digraph responses_only { - "gRPC C Core" -> "gRPC Python" [label="queue", dir="both"] - "gRPC Python" -> "Consumer" [label="responses", color="red"] - "Consumer" -> "request generator thread" [label="starts", color="gray"] - "Policy" -> "QueueCallbackWorker" [label="starts", color="gray"] - "request generator thread" -> "gRPC Python" - [label="requests", color="blue"] - "Consumer" -> "Policy" [label="responses", color="red"] - "Policy" -> "futures.Executor" [label="response", color="red"] - "futures.Executor" -> "callback" [label="response", color="red"] - "callback" -> "callback_request_queue" [label="requests", color="blue"] - "callback_request_queue" -> "QueueCallbackWorker" - [label="consumed by", color="blue"] - "QueueCallbackWorker" -> "Consumer" - [label="send_response", color="blue"] - } - -This part is actually up to the Policy to enable. The consumer just provides a -thread-safe queue for requests. The :class:`QueueCallbackWorker` can be used by - -the Policy implementation to spin up the worker thread to pump the -concurrency-safe queue. See the Pub/Sub subscriber implementation for an -example of this. -""" - -import logging -import threading - -from six.moves import queue - -from google.cloud.pubsub_v1.subscriber._protocol import helper_threads - - -_LOGGER = logging.getLogger(__name__) -_BIDIRECTIONAL_CONSUMER_NAME = 'Thread-ConsumeBidirectionalStream' - - -class _RequestQueueGenerator(object): - """A helper for sending requests to a gRPC stream from a Queue. - - This generator takes requests off a given queue and yields them to gRPC. - - This helper is useful when you have an indeterminate, indefinite, or - otherwise open-ended set of requests to send through a request-streaming - (or bidirectional) RPC. - - The reason this is necessary is because gRPC takes an iterator as the - request for request-streaming RPCs. gRPC consumes this iterator in another - thread to allow it to block while generating requests for the stream. - However, if the generator blocks indefinitely gRPC will not be able to - clean up the thread as it'll be blocked on `next(iterator)` and not be able - to check the channel status to stop iterating. This helper mitigates that - by waiting on the queue with a timeout and checking the RPC state before - yielding. - - Finally, it allows for retrying without swapping queues because if it does - pull an item off the queue, it'll immediately put it back and then exit. - This is necessary because yielding the item in this case will cause gRPC - to discard it. In practice, this means that the order of messages is not - guaranteed. If such a thing is necessary it would be easy to use a priority - queue. - - Example:: - - requests = request_queue_generator(q) - rpc = stub.StreamingRequest(iter(requests)) - requests.rpc = rpc - - for response in rpc: - print(response) - q.put(...) - - Args: - queue (queue.Queue): The request queue. - period (float): The number of seconds to wait for items from the queue - before checking if the RPC is cancelled. In practice, this - determines the maximum amount of time the request consumption - thread will live after the RPC is cancelled. - initial_request (protobuf.Message): The initial request to yield. This - is done independently of the request queue to allow for easily - restarting streams that require some initial configuration request. - """ - def __init__(self, queue, period=1, initial_request=None): - self._queue = queue - self._period = period - self._initial_request = initial_request - self.rpc = None - - def _should_exit(self): - # Note: there is a possibility that this starts *before* the rpc - # property is set. So we have to check if self.rpc is set before seeing - # if it's active. - if self.rpc is not None and not self.rpc.is_active(): - return True - else: - return False - - def __iter__(self): - if self._initial_request is not None: - yield self._initial_request - - while True: - try: - item = self._queue.get(timeout=self._period) - except queue.Empty: - if self._should_exit(): - _LOGGER.debug( - 'Empty queue and inactive RPC, exiting request ' - 'generator.') - return - else: - # RPC is still active, keep waiting for queue items. - continue - - # A call to consumer.close() signaled us to stop generating - # requests. - if item == helper_threads.STOP: - _LOGGER.debug('Cleanly exiting request generator.') - return - - if self._should_exit(): - # We have an item, but the RPC is closed. We should put the - # item back on the queue so that the next RPC can consume it. - self._queue.put(item) - _LOGGER.debug( - 'Inactive RPC, replacing item on queue and exiting ' - 'request generator.') - return - - yield item - - -def _pausable_response_iterator(iterator, can_continue, period=1): - """Converts a gRPC response iterator into one that can be paused. - - The ``can_continue`` event can be used by an independent, concurrent - worker to pause and resume the iteration over ``iterator``. - - Args: - iterator (grpc.RpcContext, Iterator[protobuf.Message]): A - ``grpc.RpcContext`` instance that is also an iterator of responses. - This is a typically returned from grpc's streaming response call - types. - can_continue (threading.Event): An event which determines if we - can advance to the next iteration. Will be ``wait()``-ed on - before consuming more items from the iterator. - period (float): The number of seconds to wait to be able to consume - before checking if the RPC is cancelled. In practice, this - determines the maximum amount of time that ``next()`` on this - iterator will block after the RPC is cancelled. - - Yields: - Any: The items yielded from ``iterator``. - """ - while True: - can_yield = can_continue.wait(timeout=period) - # Calling next() on a cancelled RPC will cause it to raise the - # grpc.RpcError associated with the cancellation. - if can_yield or not iterator.is_active(): - yield next(iterator) - - -class Consumer(object): - """Bi-directional streaming RPC consumer. - - This class coordinates the consumption of a bi-directional streaming RPC. - There is a bit of background information to know before understanding how - this class operates: - - 1. gRPC has its own background thread for dealing with I/O. - 2. gRPC consumes a streaming call's request generator in another - thread. - 3. If the request generator thread exits, gRPC will close the - connection. - - Because of (2) and (3), the consumer must always at least use threading - for some bookkeeping. No matter what, a thread will be created by gRPC to - generate requests. This thread is called the *request generator thread*. - Having the request generator thread allows the consumer to hold the stream - open indefinitely. Now gRPC will send responses as fast as the consumer can - ask for them. The consumer hands these off to the :class:`Policy` via - :meth:`Policy.on_response`, which should not block. - - Finally, we do not want to block the main thread, so the consumer actually - invokes the RPC itself in a separate thread. This thread is called the - *response consumer helper thread*. - - So all in all there are three threads: - - 1. gRPC's internal I/O thread. - 2. The request generator thread, created by gRPC. - 3. The response consumer helper thread, created by the Consumer. - - In addition, the Consumer likely uses some sort of concurreny to prevent - blocking on processing responses. The Policy may also use another thread to - deal with pumping messages from an external queue into the request queue - here. - - It may seem strange to use threads for something "high performance" - considering the GIL. However, the threads here are not CPU bound. They are - simple threads that are blocked by I/O and generally just move around some - simple objects between queues. The overhead for these helper threads is - low. The Consumer and end-user can configure any sort of executor they want - for the actual processing of the responses, which may be CPU intensive. - """ - def __init__(self): - self._request_queue = queue.Queue() - self._stopped = threading.Event() - self._can_consume = threading.Event() - self._consumer_thread = None - - @property - def active(self): - """bool: Indicates if the consumer is active. - - *Active* means that the stream is open and that it is possible to - send and receive messages. This is distinct from *pausing* which just - pauses *response* consumption. - - This is intended to be an implementation independent way of indicating - that the consumer is stopped. (E.g. so a policy that owns a consumer - doesn't need to know what a ``threading.Event`` is.) - """ - return not self._stopped.is_set() - - def send_request(self, request): - """Queue a request to be sent to gRPC. - - Args: - request (Any): The request protobuf. - """ - self._request_queue.put(request) - - @property - def pending_requests(self): - """int: An approximate count of the outstanding requests. - - This can be used to determine if the consumer should be paused if there - are too many outstanding requests.""" - return self._request_queue.qsize() - - def _blocking_consume(self, policy): - """Consume the stream indefinitely. - - Args: - policy (~.pubsub_v1.subscriber.policy.base.BasePolicy): The policy, - which defines how requests and responses are handled. - """ - while True: - # It is possible that a timeout can cause the stream to not - # exit cleanly when the user has called stop_consuming(). This - # checks to make sure we're not exiting before opening a new - # stream. - if self._stopped.is_set(): - _LOGGER.debug('Event signaled consumer exit.') - break - - initial_request = policy.get_initial_request() - request_generator = _RequestQueueGenerator( - self._request_queue, initial_request=initial_request) - rpc = policy.call_rpc(iter(request_generator)) - request_generator.rpc = rpc - responses = _pausable_response_iterator(rpc, self._can_consume) - try: - for response in responses: - _LOGGER.debug('Received response on stream') - policy.on_response(response) - - # If the loop above exits without an exception, then the - # request stream terminated cleanly, which should only happen - # when it was signaled to do so by stop_consuming. In this - # case, break out of the while loop and exit this thread. - _LOGGER.debug('Clean RPC loop exit signaled consumer exit.') - break - except Exception as exc: - recover = policy.on_exception(exc) - if not recover: - self._stop_no_join() - # No need to raise this exception. The policy should handle - # passing the exception to the code that started the - # consumer via a future. - return - - @property - def paused(self): - """bool: Check if the current consumer is paused.""" - return not self._can_consume.is_set() - - def pause(self): - """Pause the current consumer. - - This method is idempotent by design. - - This will clear the ``_can_consume`` event which is checked - every time :meth:`_blocking_consume` consumes a response from the - bidirectional streaming pull. *requests* can still be sent along - the stream. - - Complement to :meth:`resume`. - """ - _LOGGER.debug('Pausing consumer') - self._can_consume.clear() - - def resume(self): - """Resume the current consumer. - - This method is idempotent by design. - - This will set the ``_can_consume`` event which is checked - every time :meth:`_blocking_consume` consumes a response from the - bidirectional streaming pull. - - Complement to :meth:`pause`. - """ - _LOGGER.debug('Resuming consumer') - self._can_consume.set() - - def start_consuming(self, policy): - """Start consuming the stream. - - Sets the ``_consumer_thread`` member on the current consumer with - a newly started thread. - - Args: - policy (~.pubsub_v1.subscriber.policy.base.BasePolicy): The policy - that owns this consumer. A policy defines how requests and - responses are handled. - """ - self._stopped.clear() - self.resume() # Make sure we aren't paused. - thread = threading.Thread( - name=_BIDIRECTIONAL_CONSUMER_NAME, - target=self._blocking_consume, - args=(policy,), - ) - thread.daemon = True - thread.start() - _LOGGER.debug('Started helper thread %s', thread.name) - self._consumer_thread = thread - - def _stop_no_join(self): - """Signal the request stream to stop. - - To actually stop the worker ("consumer thread"), a ``STOP`` is - sent to the request queue. - - The ``_consumer_thread`` member is removed from the current instance - and returned. - - Returns: - threading.Thread: The worker ("consumer thread") that is being - stopped. - """ - self.resume() # Make sure we aren't paused. - self._stopped.set() - _LOGGER.debug('Stopping helper thread %s', self._consumer_thread.name) - # Signal the request generator RPC to exit cleanly. - self.send_request(helper_threads.STOP) - thread = self._consumer_thread - self._consumer_thread = None - return thread - - def stop_consuming(self): - """Signal the stream to stop and block until it completes. - - To actually stop the worker ("consumer thread"), a ``STOP`` is - sent to the request queue. - - This **assumes** that the caller is not in the same thread - (since a thread cannot ``join()`` itself). - """ - thread = self._stop_no_join() - thread.join() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index e948d74521b7..c1e60bbfea8f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -26,7 +26,6 @@ from google.cloud.pubsub_v1.gapic import subscriber_client from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager -from google.cloud.pubsub_v1.subscriber.policy import thread __version__ = pkg_resources.get_distribution('google-cloud-pubsub').version @@ -42,19 +41,13 @@ class Client(object): get sensible defaults. Args: - policy_class (class): A class that describes how to handle - subscriptions. You may subclass the - :class:`.pubsub_v1.subscriber.policy.base.BasePolicy` - class in order to define your own consumer. This is primarily - provided to allow use of different concurrency models; the default - is based on :class:`threading.Thread`. kwargs (dict): Any additional arguments provided are sent as keyword keyword arguments to the underlying :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. Generally, you should not need to set additional keyword arguments. """ - def __init__(self, policy_class=thread.Policy, **kwargs): + def __init__(self, **kwargs): # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. @@ -82,10 +75,6 @@ def __init__(self, policy_class=thread.Policy, **kwargs): # client. self._api = subscriber_client.SubscriberClient(**kwargs) - # The subcription class is responsible to retrieving and dispatching - # messages. - self._policy_class = policy_class - @property def target(self): """Return the target (where the API is). @@ -100,51 +89,7 @@ def api(self): """The underlying gapic API client.""" return self._api - def subscribe(self, subscription, callback=None, flow_control=()): - """Return a representation of an individual subscription. - - This method creates and returns a ``Consumer`` object (that is, a - :class:`~.pubsub_v1.subscriber._consumer.Consumer`) - subclass) bound to the topic. It does `not` create the subcription - on the backend (or do any API call at all); it simply returns an - object capable of doing these things. - - If the ``callback`` argument is provided, then the :meth:`open` method - is automatically called on the returned object. If ``callback`` is - not provided, the subscription is returned unopened. - - .. note:: - It only makes sense to provide ``callback`` here if you have - already created the subscription manually in the API. - - Args: - subscription (str): The name of the subscription. The - subscription should have already been created (for example, - by using :meth:`create_subscription`). - callback (function): The callback function. This function receives - the :class:`~.pubsub_v1.types.PubsubMessage` as its only - argument. - flow_control (~.pubsub_v1.types.FlowControl): The flow control - settings. Use this to prevent situations where you are - inundated with too many messages at once. - - Returns: - ~.pubsub_v1.subscriber._consumer.Consumer: An instance - of the defined ``consumer_class`` on the client. - - Raises: - TypeError: If ``callback`` is not callable. - """ - flow_control = types.FlowControl(*flow_control) - subscr = self._policy_class(self, subscription, flow_control) - if callable(callback): - subscr.open(callback) - elif callback is not None: - error = '{!r} is not callable, please check input'.format(callback) - raise TypeError(error) - return subscr - - def subscribe_experimental( + def subscribe( self, subscription, callback, flow_control=(), scheduler_=None): """Asynchronously start receiving messages on a given subscription. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index e6e55439a2c5..11fddf24abd0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -17,53 +17,6 @@ from google.cloud.pubsub_v1 import futures -class Future(futures.Future): - """Encapsulation of the asynchronous execution of an action. - - This object is returned from opening a Pub/Sub subscription, and is the - interface to block on the subscription or query its status. - - This object should not be created directly, but is returned by other - methods in this library. - - Args: - policy (~.pubsub_v1.subscriber.policy.base.BasePolicy): The policy - that creates this Future. - completed (Optional[Any]): An event, with the same interface as - :class:`threading.Event`. This is provided so that callers - with different concurrency models (e.g. ``threading`` or - ``multiprocessing``) can supply an event that is compatible - with that model. The ``wait()`` and ``set()`` methods will be - used. If this argument is not provided, then a new - :class:`threading.Event` will be created and used. - """ - def __init__(self, policy, completed=None): - self._policy = policy - super(Future, self).__init__(completed=completed) - - def running(self): - """Return whether this subscription is opened with this Future. - - .. note:: - - A :data:`False` value here does not necessarily mean that the - subscription is closed; it merely means that _this_ future is - not the future applicable to it. - - Since futures have a single result (or exception) and there is - not a concept of resetting them, a closing / re-opening of a - subscription will therefore return a new future. - - Returns: - bool: :data:`True` if this subscription is opened with this - future, :data:`False` otherwise. - """ - if self._policy.future is not self: - return False - - return super(Future, self).running() - - class StreamingPullFuture(futures.Future): """Represents a process that asynchronously performs streaming pull and schedules messages to be processed. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py deleted file mode 100644 index a1dca5208c94..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/base.py +++ /dev/null @@ -1,492 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Base class for concurrency policy.""" - -from __future__ import absolute_import, division - -import abc -import collections -import copy -import logging -import random -import time - -from google.api_core import exceptions -import six - -from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber import _consumer -from google.cloud.pubsub_v1.subscriber._protocol import requests -from google.cloud.pubsub_v1.subscriber._protocol import histogram - - -_LOGGER = logging.getLogger(__name__) - -# Namedtuples for management requests. Used by the Message class to communicate -# items of work back to the policy. -AckRequest = requests.AckRequest -DropRequest = requests.DropRequest -LeaseRequest = requests.LeaseRequest -ModAckRequest = requests.ModAckRequest -NackRequest = requests.NackRequest - -_LeasedMessage = collections.namedtuple( - '_LeasedMessage', - ['added_time', 'size']) - - -@six.add_metaclass(abc.ABCMeta) -class BasePolicy(object): - """Abstract class defining a subscription policy. - - Although the :class:`~.pubsub_v1.subscriber.policy.thread.Policy` class, - based on :class:`threading.Thread`, is fine for most cases, - advanced users may need to implement something based on a different - concurrency model. - - This class defines the interface for the policy implementation; - subclasses may be passed as the ``policy_class`` argument to - :class:`~.pubsub_v1.client.SubscriberClient`. - - Args: - client (google.cloud.pubsub_v1.subscriber.client.Client): The - subscriber client used to create this instance. - subscription (str): The name of the subscription. The canonical - format for this is - ``projects/{project}/subscriptions/{subscription}``. - flow_control (google.cloud.pubsub_v1.types.FlowControl): The flow - control settings. - histogram_data (dict): Optional: A structure to store the histogram - data for predicting appropriate ack times. If set, this should - be a dictionary-like object. - - .. note:: - Additionally, the histogram relies on the assumption - that the dictionary will properly sort keys provided - that all keys are positive integers. If you are sending - your own dictionary class, ensure this assumption holds - or you will get strange behavior. - """ - _RETRYABLE_STREAM_ERRORS = ( - exceptions.DeadlineExceeded, - exceptions.ServiceUnavailable, - exceptions.InternalServerError, - exceptions.Unknown, - exceptions.GatewayTimeout, - ) - - def __init__(self, client, subscription, - flow_control=types.FlowControl(), histogram_data=None): - self._client = client - self._subscription = subscription - self._consumer = _consumer.Consumer() - self._ack_deadline = 10 - self._last_histogram_size = 0 - self._future = None - self.flow_control = flow_control - self.histogram = histogram.Histogram(data=histogram_data) - """.Histogram: the histogram tracking ack latency.""" - self.leased_messages = {} - """dict[str, float]: A mapping of ack IDs to the local time when the - ack ID was initially leased in seconds since the epoch.""" - - # These are for internal flow control tracking. - # They should not need to be used by subclasses. - self._bytes = 0 - self._ack_on_resume = set() - - @property - def ack_deadline(self): - """Return the appropriate ack deadline. - - This method is "sticky". It will only perform the computations to - check on the right ack deadline if the histogram has gained a - significant amount of new information. - - Returns: - int: The correct ack deadline. - """ - target = min([ - self._last_histogram_size * 2, - self._last_histogram_size + 100, - ]) - if len(self.histogram) > target: - self._ack_deadline = self.histogram.percentile(percent=99) - return self._ack_deadline - - @property - def future(self): - """Return the Future in use, if any. - - Returns: - google.cloud.pubsub_v1.subscriber.futures.Future: A Future - conforming to the :class:`~concurrent.futures.Future` interface. - """ - return self._future - - @property - def subscription(self): - """Return the subscription. - - Returns: - str: The subscription - """ - return self._subscription - - @property - def _load(self): - """Return the current load. - - The load is represented as a float, where 1.0 represents having - hit one of the flow control limits, and values between 0.0 and 1.0 - represent how close we are to them. (0.5 means we have exactly half - of what the flow control setting allows, for example.) - - There are (currently) two flow control settings; this property - computes how close the subscriber is to each of them, and returns - whichever value is higher. (It does not matter that we have lots of - running room on setting A if setting B is over.) - - Returns: - float: The load value. - """ - return max([ - len(self.leased_messages) / self.flow_control.max_messages, - self._bytes / self.flow_control.max_bytes, - self._consumer.pending_requests / self.flow_control.max_requests - ]) - - def _maybe_resume_consumer(self): - """Check the current load and resume the consumer if needed.""" - # If we have been paused by flow control, check and see if we are - # back within our limits. - # - # In order to not thrash too much, require us to have passed below - # the resume threshold (80% by default) of each flow control setting - # before restarting. - if not self._consumer.paused: - return - - if self._load < self.flow_control.resume_threshold: - self._consumer.resume() - else: - _LOGGER.debug('Did not resume, current load is %s', self._load) - - def ack(self, items): - """Acknowledge the given messages. - - Args: - items(Sequence[AckRequest]): The items to acknowledge. - """ - # If we got timing information, add it to the histogram. - for item in items: - time_to_ack = item.time_to_ack - if time_to_ack is not None: - self.histogram.add(int(time_to_ack)) - - ack_ids = [item.ack_id for item in items] - if self._consumer.active: - # Send the request to ack the message. - request = types.StreamingPullRequest(ack_ids=ack_ids) - self._consumer.send_request(request) - else: - # If the consumer is inactive, then queue the ack_ids here; it - # will be acked as part of the initial request when the consumer - # is started again. - self._ack_on_resume.update(ack_ids) - - # Remove the message from lease management. - self.drop(items) - - def call_rpc(self, request_generator): - """Invoke the Pub/Sub streaming pull RPC. - - Args: - request_generator (Generator): A generator that yields requests, - and blocks if there are no outstanding requests (until such - time as there are). - - Returns: - Iterable[~google.cloud.pubsub_v1.types.StreamingPullResponse]: An - iterable of pull responses. - """ - return self._client.api.streaming_pull(request_generator) - - def drop(self, items): - """Remove the given messages from lease management. - - Args: - items(Sequence[DropRequest]): The items to drop. - """ - # Remove the ack ID from lease management, and decrement the - # byte counter. - for item in items: - if self.leased_messages.pop(item.ack_id, None) is not None: - self._bytes -= item.byte_size - else: - _LOGGER.debug('Item %s was not managed.', item.ack_id) - - if self._bytes < 0: - _LOGGER.debug( - 'Bytes was unexpectedly negative: %d', self._bytes) - self._bytes = 0 - - self._maybe_resume_consumer() - - def get_initial_request(self): - """Return the initial request. - - This defines the initial request that must always be sent to Pub/Sub - immediately upon opening the subscription. - - Returns: - google.cloud.pubsub_v1.types.StreamingPullRequest: A request - suitable for being the first request on the stream (and not - suitable for any other purpose). - """ - # Any ack IDs that are under lease management and not being acked - # need to have their deadline extended immediately. - lease_ids = set(self.leased_messages.keys()) - # Exclude any IDs that we're about to ack. - lease_ids = lease_ids.difference(self._ack_on_resume) - - # Put the request together. - request = types.StreamingPullRequest( - ack_ids=list(self._ack_on_resume), - modify_deadline_ack_ids=list(lease_ids), - modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), - stream_ack_deadline_seconds=self.histogram.percentile(99), - subscription=self.subscription, - ) - - # Clear the ack_ids set. - self._ack_on_resume.clear() - - # Return the initial request. - return request - - def lease(self, items): - """Add the given messages to lease management. - - Args: - items(Sequence[LeaseRequest]): The items to lease. - """ - for item in items: - # Add the ack ID to the set of managed ack IDs, and increment - # the size counter. - if item.ack_id not in self.leased_messages: - self.leased_messages[item.ack_id] = _LeasedMessage( - added_time=time.time(), - size=item.byte_size) - self._bytes += item.byte_size - else: - _LOGGER.debug( - 'Message %s is already lease managed', item.ack_id) - - # Sanity check: Do we have too many things in our inventory? - # If we do, we need to stop the stream. - if self._load >= 1.0: - self._consumer.pause() - - def maintain_leases(self): - """Maintain all of the leases being managed by the policy. - - This method modifies the ack deadline for all of the managed - ack IDs, then waits for most of that time (but with jitter), and - then calls itself. - - .. warning:: - This method blocks, and generally should be run in a separate - thread or process. - - Additionally, you should not have to call this method yourself, - unless you are implementing your own policy. If you are - implementing your own policy, you _should_ call this method - in an appropriate form of subprocess. - """ - while True: - # Sanity check: Should this infinite loop quit? - if not self._consumer.active: - _LOGGER.debug('Consumer inactive, ending lease maintenance.') - return - - # Determine the appropriate duration for the lease. This is - # based off of how long previous messages have taken to ack, with - # a sensible default and within the ranges allowed by Pub/Sub. - p99 = self.histogram.percentile(99) - _LOGGER.debug('The current p99 value is %d seconds.', p99) - - # Make a copy of the leased messages. This is needed because it's - # possible for another thread to modify the dictionary while - # we're iterating over it. - leased_messages = copy.copy(self.leased_messages) - - # Drop any leases that are well beyond max lease time. This - # ensures that in the event of a badly behaving actor, we can - # drop messages and allow Pub/Sub to resend them. - cutoff = time.time() - self.flow_control.max_lease_duration - to_drop = [ - DropRequest(ack_id, item.size) - for ack_id, item - in six.iteritems(leased_messages) - if item.added_time < cutoff] - - if to_drop: - _LOGGER.warning( - 'Dropping %s items because they were leased too long.', - len(to_drop)) - self.drop(to_drop) - - # Remove dropped items from our copy of the leased messages (they - # have already been removed from the real one by self.drop). - for item in to_drop: - leased_messages.pop(item.ack_id) - - # Create a streaming pull request. - # We do not actually call `modify_ack_deadline` over and over - # because it is more efficient to make a single request. - ack_ids = list(leased_messages.keys()) - if ack_ids: - _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) - - # NOTE: This may not work as expected if ``consumer.active`` - # has changed since we checked it. An implementation - # without any sort of race condition would require a - # way for ``send_request`` to fail when the consumer - # is inactive. - self.modify_ack_deadline([ - ModAckRequest(ack_id, p99) for ack_id in ack_ids]) - - # Now wait an appropriate period of time and do this again. - # - # We determine the appropriate period of time based on a random - # period between 0 seconds and 90% of the lease. This use of - # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases - # where there are many clients. - snooze = random.uniform(0.0, p99 * 0.9) - _LOGGER.debug('Snoozing lease management for %f seconds.', snooze) - time.sleep(snooze) - - def modify_ack_deadline(self, items): - """Modify the ack deadline for the given messages. - - Args: - items(Sequence[ModAckRequest]): The items to modify. - """ - ack_ids = [item.ack_id for item in items] - seconds = [item.seconds for item in items] - - request = types.StreamingPullRequest( - modify_deadline_ack_ids=ack_ids, - modify_deadline_seconds=seconds, - ) - self._consumer.send_request(request) - - def nack(self, items): - """Explicitly deny receipt of messages. - - Args: - items(Sequence[NackRequest]): The items to deny. - """ - self.modify_ack_deadline([ - ModAckRequest(ack_id=item.ack_id, seconds=0) - for item in items]) - self.drop( - [DropRequest(*item) for item in items]) - - @abc.abstractmethod - def close(self): - """Close the existing connection. - - Raises: - NotImplementedError: Always - """ - raise NotImplementedError - - @abc.abstractmethod - def on_exception(self, exception): - """Called when a gRPC exception occurs. - - If this method does nothing, then the stream is re-started. If this - raises an exception, it will stop the consumer thread. This is - executed on the response consumer helper thread. - - Implementations should return :data:`True` if they want the consumer - thread to remain active, otherwise they should return :data:`False`. - - Args: - exception (Exception): The exception raised by the RPC. - - Raises: - NotImplementedError: Always - """ - raise NotImplementedError - - def on_request(self, request): - """Called whenever a request has been sent to gRPC. - - This allows the policy to measure the rate of requests sent along the - stream and apply backpressure by pausing or resuming the consumer - if needed. - - Args: - request (Any): The protobuf request that was sent to gRPC. - """ - self._maybe_resume_consumer() - - @abc.abstractmethod - def on_response(self, response): - """Process a response from gRPC. - - This gives the consumer control over how responses are scheduled to - be processed. This method is expected to not block and instead - schedule the response to be consumed by some sort of concurrency. - - For example, if a the Policy implementation takes a callback in its - constructor, you can schedule the callback using a - :class:`concurrent.futures.ThreadPoolExecutor`:: - - self._pool.submit(self._callback, response) - - This is called from the response consumer helper thread. - - Args: - response (Any): The protobuf response from the RPC. - - Raises: - NotImplementedError: Always - """ - raise NotImplementedError - - @abc.abstractmethod - def open(self, callback): - """Open a streaming pull connection and begin receiving messages. - - For each message received, the ``callback`` function is fired with - a :class:`~.pubsub_v1.subscriber.message.Message` as its only - argument. - - This method is virtual, but concrete implementations should return - a :class:`~google.api_core.future.Future` that provides an interface - to block on the subscription if desired, and handle errors. - - Args: - callback (Callable[Message]): A callable that receives a - Pub/Sub Message. - - Raises: - NotImplementedError: Always - """ - raise NotImplementedError diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py deleted file mode 100644 index 78874f32aaf2..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/policy/thread.py +++ /dev/null @@ -1,347 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import collections -from concurrent import futures -import functools -import logging -import sys -import threading - -from six.moves import queue as queue_mod - -from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber._protocol import helper_threads -from google.cloud.pubsub_v1.subscriber.futures import Future -from google.cloud.pubsub_v1.subscriber.policy import base -from google.cloud.pubsub_v1.subscriber.message import Message - - -_LOGGER = logging.getLogger(__name__) -_CALLBACK_WORKER_NAME = 'Thread-Consumer-CallbackRequestsWorker' - - -def _wrap_callback_errors(callback, message): - """Wraps a user callback so that if an exception occurs the message is - nacked. - - Args: - callback (Callable[None, Message]): The user callback. - message (~Message): The Pub/Sub message. - """ - try: - callback(message) - except Exception: - # Note: the likelihood of this failing is extremely low. This just adds - # a message to a queue, so if this doesn't work the world is in an - # unrecoverable state and this thread should just bail. - message.nack() - # Re-raise the exception so that the executor can deal with it. - raise - - -class Policy(base.BasePolicy): - """A consumer class based on :class:`threading.Thread`. - - This consumer handles the connection to the Pub/Sub service and all of - the concurrency needs. - - Args: - client (~.pubsub_v1.subscriber.client): The subscriber client used - to create this instance. - subscription (str): The name of the subscription. The canonical - format for this is - ``projects/{project}/subscriptions/{subscription}``. - flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow - control settings. - executor (~concurrent.futures.ThreadPoolExecutor): (Optional.) A - ThreadPoolExecutor instance, or anything duck-type compatible - with it. - queue (~queue.Queue): (Optional.) A Queue instance, appropriate - for crossing the concurrency boundary implemented by - ``executor``. - """ - - def __init__(self, client, subscription, flow_control=types.FlowControl(), - executor=None, queue=None): - super(Policy, self).__init__( - client=client, - flow_control=flow_control, - subscription=subscription, - ) - # The **actual** callback is provided by ``.open()``. - self._callback = None - # Create a queue for keeping track of shared state. - self._request_queue = self._get_queue(queue) - # Also maintain an executor. - self._executor = self._get_executor(executor) - # The threads created in ``.open()``. - self._dispatch_thread = None - self._leases_thread = None - - @staticmethod - def _get_queue(queue): - """Gets a queue for the constructor. - - Args: - queue (Optional[~queue.Queue]): A Queue instance, appropriate - for crossing the concurrency boundary implemented by - ``executor``. - - Returns: - ~queue.Queue: Either ``queue`` if not :data:`None` or a default - queue. - """ - if queue is None: - return queue_mod.Queue() - else: - return queue - - @staticmethod - def _get_executor(executor): - """Gets an executor for the constructor. - - Args: - executor (Optional[~concurrent.futures.ThreadPoolExecutor]): A - ThreadPoolExecutor instance, or anything duck-type compatible - with it. - - Returns: - ~concurrent.futures.ThreadPoolExecutor: Either ``executor`` if not - :data:`None` or a default thread pool executor with 10 workers - and a prefix (if supported). - """ - if executor is None: - executor_kwargs = {} - if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6): - executor_kwargs['thread_name_prefix'] = ( - 'ThreadPoolExecutor-SubscriberPolicy') - return futures.ThreadPoolExecutor( - max_workers=10, - **executor_kwargs - ) - else: - return executor - - def close(self): - """Close the existing connection. - - .. warning:: - - This method is not thread-safe. For example, if this method is - called while another thread is executing :meth:`open`, then the - policy could end up in an undefined state. The **same** policy - instance is not intended to be used by multiple workers (though - each policy instance **does** have a thread-safe private queue). - - Returns: - ~google.api_core.future.Future: The future that **was** attached - to the subscription. - - Raises: - ValueError: If the policy has not been opened yet. - """ - if self._future is None: - raise ValueError('This policy has not been opened yet.') - - # Stop consuming messages. - self._request_queue.put(helper_threads.STOP) - self._dispatch_thread.join() # Wait until stopped. - self._dispatch_thread = None - self._consumer.stop_consuming() - self._leases_thread.join() - self._leases_thread = None - self._executor.shutdown() - - # The subscription is closing cleanly; resolve the future if it is not - # resolved already. - if not self._future.done(): - self._future.set_result(None) - future = self._future - self._future = None - return future - - def _start_dispatch(self): - """Start a thread to dispatch requests queued up by callbacks. - - .. note:: - - This assumes, but does not check, that ``_dispatch_thread`` - is :data:`None`. - - Spawns a thread to run :meth:`dispatch_callback` and sets the - "dispatch thread" member on the current policy. - """ - _LOGGER.debug('Starting callback requests worker.') - dispatch_worker = helper_threads.QueueCallbackWorker( - self._request_queue, - self.dispatch_callback, - max_items=self.flow_control.max_request_batch_size, - max_latency=self.flow_control.max_request_batch_latency - ) - # Create and start the helper thread. - thread = threading.Thread( - name=_CALLBACK_WORKER_NAME, - target=dispatch_worker, - ) - thread.daemon = True - thread.start() - _LOGGER.debug('Started helper thread %s', thread.name) - self._dispatch_thread = thread - - def _start_lease_worker(self): - """Spawn a helper thread that maintains all of leases for this policy. - - .. note:: - - This assumes, but does not check, that ``_leases_thread`` is - :data:`None`. - - Spawns a thread to run :meth:`maintain_leases` and sets the - "leases thread" member on the current policy. - """ - _LOGGER.debug('Starting lease maintenance worker.') - thread = threading.Thread( - name='Thread-LeaseMaintenance', - target=self.maintain_leases, - ) - thread.daemon = True - thread.start() - - self._leases_thread = thread - - def open(self, callback): - """Open a streaming pull connection and begin receiving messages. - - .. warning:: - - This method is not thread-safe. For example, if this method is - called while another thread is executing :meth:`close`, then the - policy could end up in an undefined state. The **same** policy - instance is not intended to be used by multiple workers (though - each policy instance **does** have a thread-safe private queue). - - For each message received, the ``callback`` function is fired with - a :class:`~.pubsub_v1.subscriber.message.Message` as its only - argument. - - Args: - callback (Callable): The callback function. - - Returns: - ~google.api_core.future.Future: A future that provides - an interface to block on the subscription if desired, and - handle errors. - - Raises: - ValueError: If the policy has already been opened. - """ - if self._future is not None: - raise ValueError('This policy has already been opened.') - - # Create the Future that this method will return. - # This future is the main thread's interface to handle exceptions, - # block on the subscription, etc. - self._future = Future(policy=self, completed=threading.Event()) - - # Start the thread to pass the requests. - self._callback = functools.partial(_wrap_callback_errors, callback) - self._start_dispatch() - # Actually start consuming messages. - self._consumer.start_consuming(self) - self._start_lease_worker() - - # Return the future. - return self._future - - def dispatch_callback(self, items): - """Map the callback request to the appropriate gRPC request. - - Args: - action (str): The method to be invoked. - kwargs (Dict[str, Any]): The keyword arguments for the method - specified by ``action``. - - Raises: - ValueError: If ``action`` isn't one of the expected actions - "ack", "drop", "lease", "modify_ack_deadline" or "nack". - """ - batched_commands = collections.defaultdict(list) - - for item in items: - batched_commands[item.__class__].append(item) - - _LOGGER.debug('Handling %d batched requests', len(items)) - - if batched_commands[base.LeaseRequest]: - self.lease(batched_commands.pop(base.LeaseRequest)) - if batched_commands[base.ModAckRequest]: - self.modify_ack_deadline( - batched_commands.pop(base.ModAckRequest)) - # Note: Drop and ack *must* be after lease. It's possible to get both - # the lease the and ack/drop request in the same batch. - if batched_commands[base.AckRequest]: - self.ack(batched_commands.pop(base.AckRequest)) - if batched_commands[base.NackRequest]: - self.nack(batched_commands.pop(base.NackRequest)) - if batched_commands[base.DropRequest]: - self.drop(batched_commands.pop(base.DropRequest)) - - def on_exception(self, exception): - """Handle the exception. - - If the exception is one of the retryable exceptions, this will signal - to the consumer thread that it should "recover" from the failure. - - This will cause the stream to exit when it returns :data:`False`. - - Returns: - bool: Indicates if the caller should recover or shut down. - Will be :data:`True` if the ``exception`` is "acceptable", i.e. - in a list of retryable / idempotent exceptions. - """ - # If this is in the list of idempotent exceptions, then we want to - # retry. That entails just returning None. - if isinstance(exception, self._RETRYABLE_STREAM_ERRORS): - return True - - # Set any other exception on the future. - self._future.set_exception(exception) - return False - - def on_response(self, response): - """Process all received Pub/Sub messages. - - For each message, send a modified acknowledgement request to the - server. This prevents expiration of the message due to buffering by - gRPC or proxy/firewall. This makes the server and client expiration - timer closer to each other thus preventing the message being - redelivered multiple times. - - After the messages have all had their ack deadline updated, execute - the callback for each message using the executor. - """ - items = [ - base.ModAckRequest(message.ack_id, self.histogram.percentile(99)) - for message in response.received_messages - ] - self.modify_ack_deadline(items) - for msg in response.received_messages: - _LOGGER.debug( - 'Using %s to process message with ack_id %s.', - self._callback, msg.ack_id) - message = Message(msg.message, msg.ack_id, self._request_queue) - self._executor.submit(self._callback, message) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index a33a042f07fa..3c77f0629aef 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -97,7 +97,6 @@ def test_subscribe_to_messages( # Subscribe to the topic. This must happen before the messages # are published. subscriber.create_subscription(subscription_path, topic_path) - subscription = subscriber.subscribe(subscription_path) # Publish some messages. futures = [ @@ -117,7 +116,7 @@ def test_subscribe_to_messages( # The callback should process the message numbers to prove # that we got everything at least once. callback = AckCallback() - subscription.open(callback) + future = subscriber.subscribe(subscription_path, callback) for second in six.moves.range(10): time.sleep(1) @@ -129,7 +128,7 @@ def test_subscribe_to_messages( # Okay, we took too long; fail out. assert callback.calls >= 50 - subscription.close() + future.cancel() def test_subscribe_to_messages_async_callbacks( @@ -141,60 +140,6 @@ def test_subscribe_to_messages_async_callbacks( # Create a topic. publisher.create_topic(topic_path) - # Subscribe to the topic. This must happen before the messages - # are published. - subscriber.create_subscription(subscription_path, topic_path) - subscription = subscriber.subscribe(subscription_path) - - # Publish some messages. - futures = [ - publisher.publish( - topic_path, - b'Wooooo! The claaaaaw!', - num=str(index), - ) - for index in six.moves.range(2) - ] - - # Make sure the publish completes. - for future in futures: - future.result() - - # We want to make sure that the callback was called asynchronously. So - # track when each call happened and make sure below. - callback = TimesCallback(2) - - # Actually open the subscription and hold it open for a few seconds. - subscription.open(callback) - for second in six.moves.range(5): - time.sleep(4) - - # The callback should have fired at least two times, but it may - # take some time. - if callback.calls >= 2: - first, last = sorted(callback.call_times[:2]) - diff = last - first - # "Ensure" the first two callbacks were executed asynchronously - # (sequentially would have resulted in a difference of 2+ - # seconds). - assert diff.days == 0 - assert diff.seconds < callback.sleep_time - - # Okay, we took too long; fail out. - assert callback.calls >= 2 - - subscription.close() - - -def test_subscribe_to_messages_async_callbacks_experimental( - publisher, topic_path, subscriber, subscription_path, cleanup): - # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) - - # Create a topic. - publisher.create_topic(topic_path) - # Subscribe to the topic. This must happen before the messages # are published. subscriber.create_subscription(subscription_path, topic_path) @@ -218,7 +163,7 @@ def test_subscribe_to_messages_async_callbacks_experimental( callback = TimesCallback(2) # Actually open the subscription and hold it open for a few seconds. - future = subscriber.subscribe_experimental(subscription_path, callback) + future = subscriber.subscribe(subscription_path, callback) for second in six.moves.range(5): time.sleep(4) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py deleted file mode 100644 index 572014dbfc4f..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_consumer.py +++ /dev/null @@ -1,336 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import threading - -from google.auth import credentials -import grpc -import mock -from six.moves import queue - -from google.cloud.pubsub_v1 import subscriber -from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber import _consumer -from google.cloud.pubsub_v1.subscriber._protocol import helper_threads -from google.cloud.pubsub_v1.subscriber.policy import base - - -class Test_RequestQueueGenerator(object): - - def test_bounded_consume(self): - rpc = mock.create_autospec(grpc.RpcContext, instance=True) - rpc.is_active.return_value = True - - def queue_generator(rpc): - yield mock.sentinel.A - yield queue.Empty() - yield mock.sentinel.B - rpc.is_active.return_value = False - yield mock.sentinel.C - - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = queue_generator(rpc) - - generator = _consumer._RequestQueueGenerator(q) - generator.rpc = rpc - - items = list(generator) - - assert items == [mock.sentinel.A, mock.sentinel.B] - - def test_yield_initial_and_exit(self): - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = queue.Empty() - rpc = mock.create_autospec(grpc.RpcContext, instance=True) - rpc.is_active.return_value = False - - generator = _consumer._RequestQueueGenerator( - q, initial_request=mock.sentinel.A) - generator.rpc = rpc - - items = list(generator) - - assert items == [mock.sentinel.A] - - def test_exit_when_inactive_with_item(self): - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = [mock.sentinel.A, queue.Empty()] - rpc = mock.create_autospec(grpc.RpcContext, instance=True) - rpc.is_active.return_value = False - - generator = _consumer._RequestQueueGenerator(q) - generator.rpc = rpc - - items = list(generator) - - assert items == [] - # Make sure it put the item back. - q.put.assert_called_once_with(mock.sentinel.A) - - def test_exit_when_inactive_empty(self): - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = queue.Empty() - rpc = mock.create_autospec(grpc.RpcContext, instance=True) - rpc.is_active.return_value = False - - generator = _consumer._RequestQueueGenerator(q) - generator.rpc = rpc - - items = list(generator) - - assert items == [] - - def test_exit_with_stop(self): - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = [helper_threads.STOP, queue.Empty()] - rpc = mock.create_autospec(grpc.RpcContext, instance=True) - rpc.is_active.return_value = True - - generator = _consumer._RequestQueueGenerator(q) - generator.rpc = rpc - - items = list(generator) - - assert items == [] - - -class _ResponseIterator(object): - def __init__(self, items, active=True): - self._items = iter(items) - self._active = active - - def is_active(self): - return self._active - - def __next__(self): - return next(self._items) - - next = __next__ - - -def test__pausable_response_iterator_active_but_cant_consume(): - # Note: we can't autospec threading.Event because it's goofy on Python 2. - can_consume = mock.Mock(spec=['wait']) - # First call will return false, indicating the loop should try again. - # second call will allow it to consume the first (and only) item. - can_consume.wait.side_effect = [False, True] - iterator = _ResponseIterator([1]) - - pausable_iter = _consumer._pausable_response_iterator( - iterator, can_consume) - - items = list(pausable_iter) - - assert items == [1] - - -def test_send_request(): - consumer = _consumer.Consumer() - request = types.StreamingPullRequest(subscription='foo') - with mock.patch.object(queue.Queue, 'put') as put: - consumer.send_request(request) - put.assert_called_once_with(request) - - -def test_blocking_consume(): - policy = mock.create_autospec(base.BasePolicy, instance=True) - policy.call_rpc.return_value = iter((mock.sentinel.A, mock.sentinel.B)) - - consumer = _consumer.Consumer() - consumer.resume() - - assert consumer._blocking_consume(policy) is None - policy.call_rpc.assert_called_once() - policy.on_response.assert_has_calls( - [mock.call(mock.sentinel.A), mock.call(mock.sentinel.B)]) - - -@mock.patch.object(_consumer, '_LOGGER') -def test_blocking_consume_when_exiting(_LOGGER): - consumer = _consumer.Consumer() - assert consumer._stopped.is_set() is False - consumer._stopped.set() - - # Make sure method cleanly exits. - assert consumer._blocking_consume(None) is None - - _LOGGER.debug.assert_called_once_with('Event signaled consumer exit.') - - -class OnException(object): - - def __init__(self, acceptable=None): - self.acceptable = acceptable - - def __call__(self, exception): - if exception is self.acceptable: - return True - else: - return False - - -def test_blocking_consume_on_exception(): - policy = mock.create_autospec(base.BasePolicy, instance=True) - policy.call_rpc.return_value = iter((mock.sentinel.A, mock.sentinel.B)) - exc = TypeError('Bad things!') - policy.on_response.side_effect = exc - - consumer = _consumer.Consumer() - consumer.resume() - consumer._consumer_thread = mock.Mock(spec=threading.Thread) - policy.on_exception.side_effect = OnException() - - # Establish that we get responses until we are sent the exiting event. - consumer._blocking_consume(policy) - assert consumer._consumer_thread is None - - # Check mocks. - policy.call_rpc.assert_called_once() - policy.on_response.assert_called_once_with(mock.sentinel.A) - policy.on_exception.assert_called_once_with(exc) - - -class RaisingResponseGenerator(object): - # NOTE: This is needed because defining `.next` on an **instance** - # rather than the **class** will not be iterable in Python 2. - # This is problematic since a `Mock` just sets members. - - def __init__(self, exception, active=True): - self.exception = exception - self.next_calls = 0 - self._active = active - - def __next__(self): - self.next_calls += 1 - raise self.exception - - def next(self): - return self.__next__() # Python 2 - - def is_active(self): - return self._active - - -def test_blocking_consume_iter_exception_while_paused(): - policy = mock.create_autospec(base.BasePolicy, instance=True) - exc = TypeError('Bad things!') - policy.call_rpc.return_value = RaisingResponseGenerator( - exc, active=False) - - consumer = _consumer.Consumer() - # Ensure the consume is paused. - consumer.pause() - consumer._consumer_thread = mock.Mock(spec=threading.Thread) - policy.on_exception.side_effect = OnException() - - # Start the thread. It should not block forever but should notice the rpc - # is inactive and raise the exception from the stream and then exit - # because on_exception returns false. - consumer._blocking_consume(policy) - assert consumer._consumer_thread is None - - # Check mocks. - policy.call_rpc.assert_called_once() - policy.on_exception.assert_called_once_with(exc) - - -def test_blocking_consume_two_exceptions(): - policy = mock.create_autospec(base.BasePolicy, instance=True) - - exc1 = NameError('Oh noes.') - exc2 = ValueError('Something grumble.') - policy.on_exception.side_effect = OnException(acceptable=exc1) - - response_generator1 = RaisingResponseGenerator(exc1) - response_generator2 = RaisingResponseGenerator(exc2) - policy.call_rpc.side_effect = (response_generator1, response_generator2) - - consumer = _consumer.Consumer() - consumer.resume() - consumer._consumer_thread = mock.create_autospec( - threading.Thread, instance=True) - - # Establish that we get responses until we are sent the exiting event. - assert consumer._blocking_consume(policy) is None - assert consumer._consumer_thread is None - - # Check mocks. - assert policy.call_rpc.call_count == 2 - assert response_generator1.next_calls == 1 - assert response_generator2.next_calls == 1 - policy.on_exception.assert_has_calls( - [mock.call(exc1), mock.call(exc2)]) - - -def test_paused(): - consumer = _consumer.Consumer() - assert consumer.paused is True - - consumer._can_consume.set() - assert consumer.paused is False - - consumer._can_consume.clear() - assert consumer.paused is True - - -@mock.patch.object(_consumer, '_LOGGER') -def test_pause(_LOGGER): - consumer = _consumer.Consumer() - consumer._can_consume.set() - - assert consumer.pause() is None - assert not consumer._can_consume.is_set() - _LOGGER.debug.assert_called_once_with('Pausing consumer') - - -@mock.patch.object(_consumer, '_LOGGER') -def test_resume(_LOGGER): - consumer = _consumer.Consumer() - consumer._can_consume.clear() - - assert consumer.resume() is None - assert consumer._can_consume.is_set() - _LOGGER.debug.assert_called_once_with('Resuming consumer') - - -def test_start_consuming(): - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - policy = client.subscribe('sub_name_e') - consumer = _consumer.Consumer() - with mock.patch.object(threading, 'Thread', autospec=True) as Thread: - consumer.start_consuming(policy) - - assert consumer._stopped.is_set() is False - Thread.assert_called_once_with( - name=_consumer._BIDIRECTIONAL_CONSUMER_NAME, - target=consumer._blocking_consume, - args=(policy,), - ) - assert consumer._consumer_thread is Thread.return_value - - -def test_stop_consuming(): - consumer = _consumer.Consumer() - assert consumer._stopped.is_set() is False - thread = mock.Mock(spec=threading.Thread) - consumer._consumer_thread = thread - - assert consumer.stop_consuming() is None - - # Make sure state was updated. - assert consumer._stopped.is_set() is True - assert consumer._consumer_thread is None - # Check mocks. - thread.join.assert_called_once_with() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 3ffcaff647dd..7c695047c86f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -17,35 +17,10 @@ import mock import pytest -from google.auth import credentials -from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1.subscriber import futures -from google.cloud.pubsub_v1.subscriber.policy import thread from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager -def create_policy(**kwargs): - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - return thread.Policy(client, 'sub_name_c', **kwargs) - - -def create_future(policy=None): - if policy is None: - policy = create_policy() - future = futures.Future(policy=policy) - policy._future = future - return future - - -def test_running(): - policy = create_policy() - future = create_future(policy=policy) - assert future.running() is True - policy._future = None - assert future.running() is False - - class TestStreamingPullFuture(object): def make_future(self): manager = mock.create_autospec( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py deleted file mode 100644 index 42a062eae168..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_base.py +++ /dev/null @@ -1,367 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import time - -from google.api_core import exceptions -from google.auth import credentials -import grpc -import mock - -from google.cloud.pubsub_v1 import subscriber -from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import subscriber_client_config -from google.cloud.pubsub_v1.subscriber.policy import base -from google.cloud.pubsub_v1.subscriber.policy import thread - - -def create_policy(flow_control=types.FlowControl()): - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - return thread.Policy(client, 'sub_name_d', flow_control=flow_control) - - -def test_idempotent_retry_codes(): - # Make sure the config matches our hard-coded tuple of exceptions. - interfaces = subscriber_client_config.config['interfaces'] - retry_codes = interfaces['google.pubsub.v1.Subscriber']['retry_codes'] - idempotent = retry_codes['idempotent'] - - status_codes = tuple( - getattr(grpc.StatusCode, name, None) - for name in idempotent - ) - expected = tuple( - exceptions.exception_class_for_grpc_status(status_code) - for status_code in status_codes - ) - assert set(expected).issubset( - set(base.BasePolicy._RETRYABLE_STREAM_ERRORS)) - - -def test_ack_deadline(): - policy = create_policy() - assert policy.ack_deadline == 10 - policy.histogram.add(20) - assert policy.ack_deadline == 20 - policy.histogram.add(10) - assert policy.ack_deadline == 20 - - -def test_get_initial_request(): - policy = create_policy() - initial_request = policy.get_initial_request() - assert isinstance(initial_request, types.StreamingPullRequest) - assert initial_request.subscription == 'sub_name_d' - assert initial_request.stream_ack_deadline_seconds == 10 - - -def test_leased_messagess(): - policy = create_policy() - - # Ensure we always get a set back, even if the property is not yet set. - leased_messages = policy.leased_messages - assert isinstance(leased_messages, dict) - - # Ensure that multiple calls give the same actual object back. - assert leased_messages is policy.leased_messages - - -def test_subscription(): - policy = create_policy() - assert policy.subscription == 'sub_name_d' - - -def test_ack(): - policy = create_policy() - policy._consumer._stopped.clear() - with mock.patch.object(policy._consumer, 'send_request') as send_request: - policy.ack([ - base.AckRequest( - ack_id='ack_id_string', time_to_ack=20, byte_size=0)]) - send_request.assert_called_once_with(types.StreamingPullRequest( - ack_ids=['ack_id_string'], - )) - assert len(policy.histogram) == 1 - assert 20 in policy.histogram - - -def test_ack_no_time(): - policy = create_policy() - policy._consumer._stopped.clear() - with mock.patch.object(policy._consumer, 'send_request') as send_request: - policy.ack([base.AckRequest( - 'ack_id_string', time_to_ack=None, byte_size=0)]) - send_request.assert_called_once_with(types.StreamingPullRequest( - ack_ids=['ack_id_string'], - )) - assert len(policy.histogram) == 0 - - -def test_ack_paused(): - policy = create_policy() - consumer = policy._consumer - consumer._stopped.set() - assert consumer.paused is True - - policy.ack([base.AckRequest('ack_id_string', 0, 0)]) - - assert consumer.paused is False - assert 'ack_id_string' in policy._ack_on_resume - - -def test_call_rpc(): - policy = create_policy() - with mock.patch.object(policy._client.api, 'streaming_pull') as pull: - policy.call_rpc(mock.sentinel.GENERATOR) - pull.assert_called_once_with(mock.sentinel.GENERATOR) - - -def test_drop(): - policy = create_policy() - policy.leased_messages['ack_id_string'] = 0 - policy._bytes = 20 - policy.drop([base.DropRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.leased_messages) == 0 - assert policy._bytes == 0 - - # Do this again to establish idempotency. - policy.drop([base.DropRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.leased_messages) == 0 - assert policy._bytes == 0 - - -@mock.patch.object(base, '_LOGGER', spec=logging.Logger) -def test_drop_unexpected_negative(_LOGGER): - policy = create_policy() - policy.leased_messages['ack_id_string'] = 0 - policy._bytes = 0 - policy.drop([base.DropRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.leased_messages) == 0 - assert policy._bytes == 0 - _LOGGER.debug.assert_called_once_with( - 'Bytes was unexpectedly negative: %d', -20) - - -def test_drop_below_threshold(): - """Establish that we resume a paused subscription. - - If the subscription is paused, and we drop sufficiently below - the flow control thresholds, it should resume. - """ - policy = create_policy() - policy.leased_messages['ack_id_string'] = 0 - num_bytes = 20 - policy._bytes = num_bytes - consumer = policy._consumer - assert consumer.paused is True - - policy.drop([ - base.DropRequest(ack_id='ack_id_string', byte_size=num_bytes)]) - - assert consumer.paused is False - - -def test_on_request_below_threshold(): - """Establish that we resume a paused subscription when the pending - requests count is below threshold.""" - flow_control = types.FlowControl(max_requests=100) - policy = create_policy(flow_control=flow_control) - consumer = policy._consumer - - assert consumer.paused is True - - pending_requests_patch = mock.patch.object( - consumer.__class__, 'pending_requests', new_callable=mock.PropertyMock) - with pending_requests_patch as pending_requests: - # should still be paused, not under the threshold. - pending_requests.return_value = 90 - policy.on_request(None) - assert consumer.paused is True - - # should unpause, we're under the resume threshold - pending_requests.return_value = 50 - policy.on_request(None) - assert consumer.paused is False - - -def test_load_w_lease(): - flow_control = types.FlowControl(max_messages=10, max_bytes=1000) - policy = create_policy(flow_control=flow_control) - consumer = policy._consumer - - with mock.patch.object(consumer, 'pause') as pause: - # This should mean that our messages count is at 10%, and our bytes - # are at 15%; the ._load property should return the higher (0.15). - policy.lease([base.LeaseRequest(ack_id='one', byte_size=150)]) - assert policy._load == 0.15 - pause.assert_not_called() - # After this message is added, the messages should be higher at 20% - # (versus 16% for bytes). - policy.lease([base.LeaseRequest(ack_id='two', byte_size=10)]) - assert policy._load == 0.2 - pause.assert_not_called() - # Returning a number above 100% is fine. - policy.lease([base.LeaseRequest(ack_id='three', byte_size=1000)]) - assert policy._load == 1.16 - pause.assert_called_once_with() - - -def test_load_w_requests(): - flow_control = types.FlowControl(max_bytes=100, max_requests=100) - policy = create_policy(flow_control=flow_control) - consumer = policy._consumer - - pending_requests_patch = mock.patch.object( - consumer.__class__, 'pending_requests', new_callable=mock.PropertyMock) - with pending_requests_patch as pending_requests: - pending_requests.return_value = 0 - assert policy._load == 0 - - pending_requests.return_value = 100 - assert policy._load == 1 - - # If bytes count is higher, it should return that. - policy._bytes = 110 - assert policy._load == 1.1 - - -def test_modify_ack_deadline(): - policy = create_policy() - with mock.patch.object(policy._consumer, 'send_request') as send_request: - policy.modify_ack_deadline([ - base.ModAckRequest(ack_id='ack_id_string', seconds=60)]) - send_request.assert_called_once_with(types.StreamingPullRequest( - modify_deadline_ack_ids=['ack_id_string'], - modify_deadline_seconds=[60], - )) - - -def test_maintain_leases_inactive_consumer(): - policy = create_policy() - policy._consumer._stopped.set() - assert policy.maintain_leases() is None - - -def test_maintain_leases_ack_ids(): - policy = create_policy() - policy._consumer._stopped.clear() - policy.lease([base.LeaseRequest(ack_id='my ack id', byte_size=50)]) - - # Mock the sleep object. - with mock.patch.object(time, 'sleep', autospec=True) as sleep: - def trigger_inactive(seconds): - assert 0 < seconds < 10 - policy._consumer._stopped.set() - - sleep.side_effect = trigger_inactive - - # Also mock the consumer, which sends the request. - with mock.patch.object(policy._consumer, 'send_request') as send: - policy.maintain_leases() - send.assert_called_once_with(types.StreamingPullRequest( - modify_deadline_ack_ids=['my ack id'], - modify_deadline_seconds=[10], - )) - sleep.assert_called() - - -def test_maintain_leases_no_ack_ids(): - policy = create_policy() - policy._consumer._stopped.clear() - with mock.patch.object(time, 'sleep', autospec=True) as sleep: - def trigger_inactive(seconds): - assert 0 < seconds < 10 - policy._consumer._stopped.set() - - sleep.side_effect = trigger_inactive - policy.maintain_leases() - sleep.assert_called() - - -@mock.patch.object(time, 'time', autospec=True) -@mock.patch.object(time, 'sleep', autospec=True) -def test_maintain_leases_outdated_items(sleep, time): - policy = create_policy() - policy._consumer._stopped.clear() - - # Add these items at the beginning of the timeline - time.return_value = 0 - policy.lease([ - base.LeaseRequest(ack_id='ack1', byte_size=50)]) - - # Add another item at towards end of the timeline - time.return_value = policy.flow_control.max_lease_duration - 1 - policy.lease([ - base.LeaseRequest(ack_id='ack2', byte_size=50)]) - - # Now make sure time reports that we are at the end of our timeline. - time.return_value = policy.flow_control.max_lease_duration + 1 - - # Mock the sleep object. - def trigger_inactive(seconds): - assert 0 < seconds < 10 - policy._consumer._stopped.set() - - sleep.side_effect = trigger_inactive - - # Also mock the consumer, which sends the request. - with mock.patch.object(policy._consumer, 'send_request') as send: - policy.maintain_leases() - - # Only ack2 should be renewed. ack1 should've been dropped - send.assert_called_once_with(types.StreamingPullRequest( - modify_deadline_ack_ids=['ack2'], - modify_deadline_seconds=[10], - )) - assert len(policy.leased_messages) == 1 - - sleep.assert_called() - - -def test_lease(): - policy = create_policy() - policy.lease([base.LeaseRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.leased_messages) == 1 - assert policy._bytes == 20 - - # Do this again to prove idempotency. - policy.lease([base.LeaseRequest(ack_id='ack_id_string', byte_size=20)]) - assert len(policy.leased_messages) == 1 - assert policy._bytes == 20 - - -def test_lease_above_threshold(): - flow_control = types.FlowControl(max_messages=2) - policy = create_policy(flow_control=flow_control) - consumer = policy._consumer - - with mock.patch.object(consumer, 'pause') as pause: - policy.lease([base.LeaseRequest(ack_id='first_ack_id', byte_size=20)]) - pause.assert_not_called() - policy.lease([base.LeaseRequest(ack_id='second_ack_id', byte_size=25)]) - pause.assert_called_once_with() - - -def test_nack(): - policy = create_policy() - with mock.patch.object(policy, 'modify_ack_deadline') as mad: - with mock.patch.object(policy, 'drop') as drop: - items = [base.NackRequest(ack_id='ack_id_string', byte_size=10)] - policy.nack(items) - drop.assert_called_once_with( - [base.DropRequest(ack_id='ack_id_string', byte_size=10)]) - mad.assert_called_once_with( - [base.ModAckRequest(ack_id='ack_id_string', seconds=0)]) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py deleted file mode 100644 index 2ee0c3e7ed12..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_policy_thread.py +++ /dev/null @@ -1,273 +0,0 @@ -# Copyright 2017, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -from concurrent import futures -import threading - -from google.api_core import exceptions -from google.auth import credentials -import mock -import pytest -from six.moves import queue - -from google.cloud.pubsub_v1 import subscriber -from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber import message -from google.cloud.pubsub_v1.subscriber.futures import Future -from google.cloud.pubsub_v1.subscriber.policy import base -from google.cloud.pubsub_v1.subscriber.policy import thread - - -def create_policy(**kwargs): - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - return thread.Policy(client, 'sub_name_c', **kwargs) - - -def test_init_with_executor(): - executor = futures.ThreadPoolExecutor(max_workers=25) - policy = create_policy(executor=executor, queue=queue.Queue()) - assert policy._executor is executor - - -def test_close(): - dispatch_thread = mock.Mock(spec=threading.Thread) - leases_thread = mock.Mock(spec=threading.Thread) - - policy = create_policy() - policy._dispatch_thread = dispatch_thread - policy._leases_thread = leases_thread - future = mock.Mock(spec=('done',)) - future.done.return_value = True - policy._future = future - - consumer = policy._consumer - with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: - closed_fut = policy.close() - stop_consuming.assert_called_once_with() - - assert policy._dispatch_thread is None - dispatch_thread.join.assert_called_once_with() - assert policy._leases_thread is None - leases_thread.join.assert_called_once_with() - assert closed_fut is future - assert policy._future is None - future.done.assert_called_once_with() - - -def test_close_without_future(): - policy = create_policy() - assert policy._future is None - - with pytest.raises(ValueError) as exc_info: - policy.close() - - assert exc_info.value.args == ('This policy has not been opened yet.',) - - -def test_close_with_unfinished_future(): - dispatch_thread = mock.Mock(spec=threading.Thread) - leases_thread = mock.Mock(spec=threading.Thread) - - policy = create_policy() - policy._dispatch_thread = dispatch_thread - policy._leases_thread = leases_thread - policy._future = Future(policy=policy) - consumer = policy._consumer - with mock.patch.object(consumer, 'stop_consuming') as stop_consuming: - future = policy.future - closed_fut = policy.close() - stop_consuming.assert_called_once_with() - - assert policy._dispatch_thread is None - dispatch_thread.join.assert_called_once_with() - assert policy._leases_thread is None - leases_thread.join.assert_called_once_with() - assert policy._future is None - assert closed_fut is future - assert future.result() is None - - -def test_open(): - policy = create_policy() - consumer = policy._consumer - threads = ( - mock.Mock(spec=('name', 'start')), - mock.Mock(spec=('name', 'start')), - mock.Mock(spec=('name', 'start')), - ) - callback = mock.Mock() - - with mock.patch.object(threading, 'Thread', side_effect=threads): - policy.open(callback) - - policy._callback(mock.sentinel.MESSAGE) - callback.assert_called_once_with(mock.sentinel.MESSAGE) - - assert policy._dispatch_thread is threads[0] - threads[0].start.assert_called_once_with() - - assert consumer._consumer_thread is threads[1] - threads[1].start.assert_called_once_with() - - assert policy._leases_thread is threads[2] - threads[2].start.assert_called_once_with() - - -def test_open_already_open(): - policy = create_policy() - policy._future = mock.sentinel.future - - with pytest.raises(ValueError) as exc_info: - policy.open(None) - - assert exc_info.value.args == ('This policy has already been opened.',) - - -@pytest.mark.parametrize('item,method', [ - (base.AckRequest(0, 0, 0), 'ack'), - (base.DropRequest(0, 0), 'drop'), - (base.LeaseRequest(0, 0), 'lease'), - (base.ModAckRequest(0, 0), 'modify_ack_deadline'), - (base.NackRequest(0, 0), 'nack') -]) -def test_dispatch_callback_valid(item, method): - policy = create_policy() - with mock.patch.object(policy, method) as mocked: - items = [item] - policy.dispatch_callback(items) - mocked.assert_called_once_with([item]) - - -def test_on_exception_deadline_exceeded(): - policy = create_policy() - - details = 'Bad thing happened. Time out, go sit in the corner.' - exc = exceptions.DeadlineExceeded(details) - - assert policy.on_exception(exc) is True - - -def test_on_exception_unavailable(): - policy = create_policy() - - details = 'UNAVAILABLE. Service taking nap.' - exc = exceptions.ServiceUnavailable(details) - - assert policy.on_exception(exc) is True - - -def test_on_exception_other(): - policy = create_policy() - policy._future = Future(policy=policy) - exc = TypeError('wahhhhhh') - assert policy.on_exception(exc) is False - with pytest.raises(TypeError): - policy.future.result() - - -def create_and_open_policy(callback, **kwargs): - creds = mock.create_autospec(credentials.Credentials, instance=True) - client = subscriber.Client(credentials=creds) - policy = thread.Policy(client, 'sub_name_c', **kwargs) - - with mock.patch('threading.Thread', autospec=True): - policy.open(callback) - - return policy - - -def test_on_response(): - # Create mock Executor so we can verify calls to executor.submit(). - executor = mock.create_autospec(futures.Executor, instance=True) - - callback = mock.Mock(spec=()) - policy = create_and_open_policy(callback, executor=executor) - - # Set up the messages. - response = types.StreamingPullResponse( - received_messages=[ - types.ReceivedMessage( - ack_id='fack', - message=types.PubsubMessage(data=b'foo', message_id='1') - ), - types.ReceivedMessage( - ack_id='back', - message=types.PubsubMessage(data=b'bar', message_id='2') - ), - ], - ) - - # Actually run the method and prove that modack and executor.submit - # are called in the expected way. - modack_patch = mock.patch.object( - policy, 'modify_ack_deadline', autospec=True) - with modack_patch as modack: - policy.on_response(response) - - modack.assert_called_once_with( - [base.ModAckRequest('fack', 10), - base.ModAckRequest('back', 10)] - ) - - submit_calls = [m for m in executor.method_calls if m[0] == 'submit'] - assert len(submit_calls) == 2 - for call in submit_calls: - assert call[1][0] == policy._callback - assert isinstance(call[1][1], message.Message) - - -def _callback_side_effect(callback, *args, **kwargs): - try: - return callback(*args, **kwargs) - except Exception: - pass - - -def test_on_response_nacks_on_error(): - # Create a callback that always errors. - callback = mock.Mock(spec=(), side_effect=ValueError) - executor = mock.create_autospec(futures.Executor, instance=True) - executor.submit.side_effect = _callback_side_effect - policy = create_and_open_policy(callback, executor=executor) - - # Set up the messages. - message = types.PubsubMessage(data=b'foo', message_id='1') - response = types.StreamingPullResponse( - received_messages=[ - types.ReceivedMessage( - ack_id='fack', - message=message - ), - ], - ) - - # Actually run the method and prove that nack is called because the - # callback errored. - policy.on_response(response) - - # Make sure the callback was executed. - callback.assert_called_once_with(mock.ANY) - - # Process outstanding requests, the callback should've queued a nack - # request. - nack_patch = mock.patch.object( - policy, 'nack', autospec=True) - with nack_patch as nack: - policy.dispatch_callback(policy._request_queue.queue) - - nack.assert_called_once_with([ - base.NackRequest('fack', message.ByteSize())]) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index c5d29ecfafdc..6b694572b112 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -14,17 +14,15 @@ from google.auth import credentials import mock -import pytest from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1.subscriber import futures -from google.cloud.pubsub_v1.subscriber.policy import thread def test_init(): creds = mock.Mock(spec=credentials.Credentials) client = subscriber.Client(credentials=creds) - assert client._policy_class is thread.Policy + assert client.api is not None def test_init_emulator(monkeypatch): @@ -41,40 +39,14 @@ def test_init_emulator(monkeypatch): assert channel.target().decode('utf8') == '/baz/bacon/' -def test_subscribe(): - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - subscription = client.subscribe('sub_name_a') - assert isinstance(subscription, thread.Policy) - - -def test_subscribe_with_callback(): - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - callback = mock.Mock() - with mock.patch.object(thread.Policy, 'open') as open_: - subscription = client.subscribe('sub_name_b', callback) - open_.assert_called_once_with(callback) - assert isinstance(subscription, thread.Policy) - - -def test_subscribe_with_failed_callback(): - creds = mock.Mock(spec=credentials.Credentials) - client = subscriber.Client(credentials=creds) - callback = 'abcdefg' - with pytest.raises(TypeError) as exc_info: - client.subscribe('sub_name_b', callback) - assert callback in str(exc_info.value) - - @mock.patch( 'google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager.' 'StreamingPullManager.open', autospec=True) -def test_subscribe_experimental(manager_open): +def test_subscribe(manager_open): creds = mock.Mock(spec=credentials.Credentials) client = subscriber.Client(credentials=creds) - future = client.subscribe_experimental( + future = client.subscribe( 'sub_name_a', callback=mock.sentinel.callback) assert isinstance(future, futures.StreamingPullFuture) From 7124d8b3764d264575613a93c77d363ed54cde88 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 1 May 2018 11:00:43 -0700 Subject: [PATCH 0255/1197] Wire up scheduler argument for subscribe() (#5279) --- .../cloud/pubsub_v1/subscriber/client.py | 7 +++-- .../subscriber/test_subscriber_client.py | 28 ++++++++++++++++++- 2 files changed, 32 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index c1e60bbfea8f..b567ed6cb9f2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -91,7 +91,7 @@ def api(self): def subscribe( self, subscription, callback, flow_control=(), - scheduler_=None): + scheduler=None): """Asynchronously start receiving messages on a given subscription. This method starts a background thread to begin pulling messages from @@ -154,6 +154,9 @@ def callback(message): flow_control (~.pubsub_v1.types.FlowControl): The flow control settings. Use this to prevent situations where you are inundated with too many messages at once. + scheduler (~.pubsub_v1.subscriber.scheduler.Scheduler): An optional + *scheduler* to use when executing the callback. This controls + how callbacks are executed concurrently. Returns: google.cloud.pubsub_v1.futures.StreamingPullFuture: A Future object @@ -162,7 +165,7 @@ def callback(message): flow_control = types.FlowControl(*flow_control) manager = streaming_pull_manager.StreamingPullManager( - self, subscription, flow_control) + self, subscription, flow_control=flow_control, scheduler=scheduler) future = futures.StreamingPullFuture(manager) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 6b694572b112..000040cb243e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -16,6 +16,7 @@ import mock from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import futures @@ -50,4 +51,29 @@ def test_subscribe(manager_open): 'sub_name_a', callback=mock.sentinel.callback) assert isinstance(future, futures.StreamingPullFuture) - manager_open.assert_called_once_with(mock.ANY, mock.sentinel.callback) + assert future._manager._subscription == 'sub_name_a' + manager_open.assert_called_once_with( + mock.ANY, mock.sentinel.callback) + + +@mock.patch( + 'google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager.' + 'StreamingPullManager.open', autospec=True) +def test_subscribe_options(manager_open): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + flow_control = types.FlowControl(max_bytes=42) + scheduler = mock.sentinel.scheduler + + future = client.subscribe( + 'sub_name_a', + callback=mock.sentinel.callback, + flow_control=flow_control, + scheduler=scheduler) + assert isinstance(future, futures.StreamingPullFuture) + + assert future._manager._subscription == 'sub_name_a' + assert future._manager.flow_control == flow_control + assert future._manager._scheduler == scheduler + manager_open.assert_called_once_with( + mock.ANY, mock.sentinel.callback) From 065d34b6497eec3d2c5830988fbdd3faa6abcd5d Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 4 May 2018 09:01:24 -0700 Subject: [PATCH 0256/1197] Add Test runs for Python 3.7 and remove 3.4 (#5295) * remove 3.4 from unit test runs * add 3.7 to most packages. PubSub, Monitoring, BigQuery not enabled * Fix #5292 by draining queue in a way compatible with SimpleQueue and Queue --- .../google/cloud/pubsub_v1/subscriber/scheduler.py | 6 +++++- packages/google-cloud-pubsub/nox.py | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index 17e2c2f967ce..70c3721249f9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -118,5 +118,9 @@ def shutdown(self): # Drop all pending item from the executor. Without this, the executor # will block until all pending items are complete, which is # undesirable. - self._executor._work_queue.queue.clear() + try: + while True: + self._executor._work_queue.get(block=False) + except queue.Empty: + pass self._executor.shutdown() diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index d544f2eef081..7deabf7dbfd0 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -53,7 +53,7 @@ def default(session): @nox.session -@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) +@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7']) def unit(session, py): """Run the unit test suite.""" From a11471e85077799426e4be4c92c39d44076d5a8d Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 16 May 2018 10:14:30 -0700 Subject: [PATCH 0257/1197] Modify system tests to use prerelease versions of grpcio (#5304) --- packages/google-cloud-pubsub/nox.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index 7deabf7dbfd0..f772b2b33d57 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -81,6 +81,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) From 2cf2c364ad5209f1da7e122d56b3d70e9267c149 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 21 May 2018 12:27:03 -0400 Subject: [PATCH 0258/1197] Normalize overflow handling for max count and bytes (#5343) * Remove exceptions for oversize messages. Toward #4608. * Normalize overflow handling for max count and bytes. Closes #4608. --- .../cloud/pubsub_v1/publisher/batch/base.py | 5 --- .../cloud/pubsub_v1/publisher/batch/thread.py | 36 +++++++++++-------- .../cloud/pubsub_v1/publisher/client.py | 6 ---- .../pubsub_v1/publisher/batch/test_base.py | 14 ++++---- .../pubsub_v1/publisher/batch/test_thread.py | 4 +-- .../publisher/test_publisher_client.py | 13 ------- 6 files changed, 31 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index dae0dafb9fd5..ac1f7ef7fe0e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -122,11 +122,6 @@ def will_accept(self, message): if self.status != BatchStatus.ACCEPTING_MESSAGES: return False - # If this message will make the batch exceed the ``max_bytes`` - # setting, return False. - if self.size + message.ByteSize() > self.settings.max_bytes: - return False - # If this message will make the batch exceed the ``max_messages`` # setting, return False. if len(self.messages) >= self.settings.max_messages: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 64186b130e94..3b11bf32ef25 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -280,25 +280,33 @@ def publish(self, message): if not isinstance(message, types.PubsubMessage): message = types.PubsubMessage(**message) + future = None + with self._state_lock: if not self.will_accept(message): - return None - - # Add the size to the running total of the size, so we know - # if future messages need to be rejected. - self._size += message.ByteSize() - # Store the actual message in the batch's message queue. - self._messages.append(message) - # Track the future on this batch (so that the result of the - # future can be set). - future = futures.Future(completed=threading.Event()) - self._futures.append(future) - # Determine the number of messages before releasing the lock. - num_messages = len(self._messages) + return future + + new_size = self._size + message.ByteSize() + new_count = len(self._messages) + 1 + overflow = ( + new_size > self.settings.max_bytes or + new_count >= self._settings.max_messages + ) + + if not self._messages or not overflow: + + # Store the actual message in the batch's message queue. + self._messages.append(message) + self._size = new_size + + # Track the future on this batch (so that the result of the + # future can be set). + future = futures.Future(completed=threading.Event()) + self._futures.append(future) # Try to commit, but it must be **without** the lock held, since # ``commit()`` will try to obtain the lock. - if num_messages >= self._settings.max_messages: + if overflow: self.commit() return future diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 8fd91bcc9153..1aa1a279d393 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -199,12 +199,6 @@ def publish(self, topic, data, **attrs): # Create the Pub/Sub message object. message = types.PubsubMessage(data=data, attributes=attrs) - if message.ByteSize() > self.batch_settings.max_bytes: - raise ValueError( - 'Message being published is too large for the ' - 'batch settings with max bytes {}.'. - format(self.batch_settings.max_bytes) - ) # Delegate the publishing to the batch. batch = self.batch(topic) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index d4177e2f7d55..1c5dd7cfdaa3 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -54,18 +54,18 @@ def test_will_accept(): assert batch.will_accept(message) is True -def test_will_not_accept_status(): - batch = create_batch(status='talk to the hand') - message = types.PubsubMessage() - assert batch.will_accept(message) is False - - -def test_will_not_accept_size(): +def test_will_accept_oversize(): batch = create_batch( settings=types.BatchSettings(max_bytes=10), status=BatchStatus.ACCEPTING_MESSAGES, ) message = types.PubsubMessage(data=b'abcdefghijklmnopqrstuvwxyz') + assert batch.will_accept(message) is True + + +def test_will_not_accept_status(): + batch = create_batch(status='talk to the hand') + message = types.PubsubMessage() assert batch.will_accept(message) is False diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index fb62dbc6e550..bb50de5e0ec2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -304,12 +304,12 @@ def test_publish_exceed_max_messages(): assert commit.call_count == 0 # When a fourth message is published, commit should be called. + # No future will be returned in this case. future = batch.publish(types.PubsubMessage(data=b'last one')) commit.assert_called_once_with() - futures.append(future) + assert future is None assert batch._futures == futures - assert len(futures) == max_messages def test_publish_dict(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 4fa144aa54b5..188d1c09950d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -131,19 +131,6 @@ def test_publish_data_not_bytestring_error(): client.publish(topic, 42) -def test_publish_data_too_large(): - creds = mock.Mock(spec=credentials.Credentials) - client = publisher.Client(credentials=creds) - topic = 'topic/path' - client.batch_settings = types.BatchSettings( - 0, - client.batch_settings.max_latency, - client.batch_settings.max_messages - ) - with pytest.raises(ValueError): - client.publish(topic, b'This is a text string.') - - def test_publish_attrs_bytestring(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) From a641c0340b85a076b92cbc96b3859571cddf0756 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 22 May 2018 16:03:07 -0400 Subject: [PATCH 0259/1197] Avoid overwriting '__module__' of messages from shared modules. (#5364) Note that we *are* still overwriting it for messages from modules defined within the current package. See #4715. --- .../google/cloud/pubsub_v1/types.py | 48 +++++++++++-------- 1 file changed, 28 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index fbb21af02e33..1ac99a96534b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -16,10 +16,7 @@ import collections import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.pubsub_v1.proto import pubsub_pb2 from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from google.iam.v1.logging import audit_data_pb2 @@ -29,6 +26,9 @@ from google.protobuf import field_mask_pb2 from google.protobuf import timestamp_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.pubsub_v1.proto import pubsub_pb2 + # Define the default values for batching. # @@ -67,24 +67,32 @@ ) +_shared_modules = [ + http_pb2, + iam_policy_pb2, + policy_pb2, + audit_data_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, +] + +_local_modules = [ + pubsub_pb2, +] + + names = ['BatchSettings', 'FlowControl'] -for name, message in get_messages(pubsub_pb2).items(): - message.__module__ = 'google.cloud.pubsub_v1.types' - setattr(sys.modules[__name__], name, message) - names.append(name) - - -for module in ( - http_pb2, - pubsub_pb2, - iam_policy_pb2, - policy_pb2, - audit_data_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, ): + + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.pubsub_v1.types' setattr(sys.modules[__name__], name, message) From 69ce094fe849821965b5b35879758a31ac0d1883 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 10:30:01 -0700 Subject: [PATCH 0260/1197] Make re-open failures bubble to callbacks (#5372) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 26 ++++++++-- .../unit/pubsub_v1/subscriber/test_bidi.py | 50 +++++++++++++++++-- 2 files changed, 68 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 215e38e9ac68..518c7c91275e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -170,8 +170,8 @@ def __init__(self, start_rpc, initial_request=None): self._request_queue = queue.Queue() self._request_generator = None self._is_active = False - self.call = None self._callbacks = [] + self.call = None def add_done_callback(self, callback): """Adds a callback that will be called when the RPC terminates. @@ -311,14 +311,25 @@ def __init__(self, start_rpc, should_recover, initial_request=None): super(ResumableBidiRpc, self).__init__(start_rpc, initial_request) self._should_recover = should_recover self._operational_lock = threading.Lock() + self._finalized = False + self._finalize_lock = threading.Lock() + + def _finalize(self, result): + with self._finalize_lock: + if self._finalized: + return + + for callback in self._callbacks: + callback(result) + + self._finalized = True def _on_call_done(self, future): # Unlike the base class, we only execute the callbacks on a terminal # error, not for errors that we can recover from. Note that grpc's # "future" here is also a grpc.RpcError. if not self._should_recover(future): - for callback in self._callbacks: - callback(future) + self._finalize(future) def _reopen(self): with self._operational_lock: @@ -330,7 +341,14 @@ def _reopen(self): # Request generator should exit cleanly since the RPC its bound to # has exited. self.request_generator = None - self.open() + + try: + self.open() + # If re-opening fails, consider this a terminal error and finalize + # the object. + except Exception as exc: + self._finalize(exc) + raise def _recoverable(self, method, *args, **kwargs): """Wraps a method to recover the stream and retry on error. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py index 4e79ee3e6ce4..b040e7f97887 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py @@ -310,7 +310,7 @@ def test_send_recover(self): grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]) - should_recover = mock.Mock(autospec=['__call__'], return_value=True) + should_recover = mock.Mock(spec=['__call__'], return_value=True) bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) bidi_rpc.open() @@ -331,7 +331,7 @@ def test_send_failure(self): grpc.StreamStreamMultiCallable, instance=True, return_value=call) - should_recover = mock.Mock(autospec=['__call__'], return_value=False) + should_recover = mock.Mock(spec=['__call__'], return_value=False) bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) bidi_rpc.open() @@ -355,7 +355,7 @@ def test_recv_recover(self): grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]) - should_recover = mock.Mock(autospec=['__call__'], return_value=True) + should_recover = mock.Mock(spec=['__call__'], return_value=True) bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) bidi_rpc.open() @@ -412,7 +412,7 @@ def test_recv_failure(self): grpc.StreamStreamMultiCallable, instance=True, return_value=call) - should_recover = mock.Mock(autospec=['__call__'], return_value=False) + should_recover = mock.Mock(spec=['__call__'], return_value=False) bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) bidi_rpc.open() @@ -426,6 +426,48 @@ def test_recv_failure(self): assert bidi_rpc.is_active is False assert call.cancelled is True + def test_reopen_failure_on_rpc_restart(self): + error1 = ValueError('1') + error2 = ValueError('2') + call = CallStub([error1]) + # Invoking start RPC a second time will trigger an error. + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, + instance=True, + side_effect=[call, error2]) + should_recover = mock.Mock(spec=['__call__'], return_value=True) + callback = mock.Mock(spec=['__call__']) + + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + bidi_rpc.add_done_callback(callback) + + bidi_rpc.open() + + with pytest.raises(ValueError) as exc_info: + bidi_rpc.recv() + + assert exc_info.value == error2 + should_recover.assert_called_once_with(error1) + assert bidi_rpc.call is None + assert bidi_rpc.is_active is False + callback.assert_called_once_with(error2) + + def test_finalize_idempotent(self): + error1 = ValueError('1') + error2 = ValueError('2') + callback = mock.Mock(spec=['__call__']) + should_recover = mock.Mock(spec=['__call__'], return_value=False) + + bidi_rpc = bidi.ResumableBidiRpc( + mock.sentinel.start_rpc, should_recover) + + bidi_rpc.add_done_callback(callback) + + bidi_rpc._on_call_done(error1) + bidi_rpc._on_call_done(error2) + + callback.assert_called_once_with(error1) + class TestBackgroundConsumer(object): def test_consume_once_then_exit(self): From 549865dba1813801877b3512c4196cff7533e5ee Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 11:10:58 -0700 Subject: [PATCH 0261/1197] Fix example in subscribe's documentation (#5375) --- .../google/cloud/pubsub_v1/subscriber/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index b567ed6cb9f2..c4906bbbeb21 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -135,7 +135,7 @@ def callback(message): print(message) message.ack() - future = subscriber.subscribe_experimental( + future = subscriber.subscribe( subscription, callback) try: From 996159362e1e5dd559c1403cd3d760f96097cd7c Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 14:00:10 -0700 Subject: [PATCH 0262/1197] Make leaser exit more quickly (#5373) --- .../pubsub_v1/subscriber/_protocol/leaser.py | 2 +- .../unit/pubsub_v1/subscriber/test_leaser.py | 29 ++++++++----------- 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 02e78577ff70..c3ef6565587a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -159,7 +159,7 @@ def maintain_leases(self): # where there are many clients. snooze = random.uniform(0.0, p99 * 0.9) _LOGGER.debug('Snoozing lease management for %f seconds.', snooze) - time.sleep(snooze) + self._stop_event.wait(timeout=snooze) _LOGGER.info('%s exiting.', _LEASE_WORKER_NAME) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 6c16276e8f15..447fa79f5036 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -119,20 +119,20 @@ def test_maintain_leases_stopped(caplog): assert 'exiting' in caplog.text -def make_sleep_mark_manager_as_inactive(sleep, manager): +def make_sleep_mark_manager_as_inactive(leaser): # Make sleep mark the manager as inactive so that maintain_leases # exits at the end of the first run. - def trigger_inactive(seconds): - assert 0 < seconds < 10 - manager.is_active = False - sleep.side_effect = trigger_inactive + def trigger_inactive(timeout): + assert 0 < timeout < 10 + leaser._manager.is_active = False + leaser._stop_event.wait = trigger_inactive -@mock.patch('time.sleep', autospec=True) -def test_maintain_leases_ack_ids(sleep): + +def test_maintain_leases_ack_ids(): manager = create_manager() - make_sleep_mark_manager_as_inactive(sleep, manager) leaser_ = leaser.Leaser(manager) + make_sleep_mark_manager_as_inactive(leaser_) leaser_.add([requests.LeaseRequest(ack_id='my ack id', byte_size=50)]) leaser_.maintain_leases() @@ -143,27 +143,23 @@ def test_maintain_leases_ack_ids(sleep): seconds=10, ) ]) - sleep.assert_called() -@mock.patch('time.sleep', autospec=True) -def test_maintain_leases_no_ack_ids(sleep): +def test_maintain_leases_no_ack_ids(): manager = create_manager() - make_sleep_mark_manager_as_inactive(sleep, manager) leaser_ = leaser.Leaser(manager) + make_sleep_mark_manager_as_inactive(leaser_) leaser_.maintain_leases() manager.dispatcher.modify_ack_deadline.assert_not_called() - sleep.assert_called() @mock.patch('time.time', autospec=True) -@mock.patch('time.sleep', autospec=True) -def test_maintain_leases_outdated_items(sleep, time): +def test_maintain_leases_outdated_items(time): manager = create_manager() - make_sleep_mark_manager_as_inactive(sleep, manager) leaser_ = leaser.Leaser(manager) + make_sleep_mark_manager_as_inactive(leaser_) # Add these items at the beginning of the timeline time.return_value = 0 @@ -190,7 +186,6 @@ def test_maintain_leases_outdated_items(sleep, time): manager.dispatcher.drop.assert_called_once_with([ requests.DropRequest(ack_id='ack1', byte_size=50) ]) - sleep.assert_called() @mock.patch('threading.Thread', autospec=True) From c65d56602ca180467d308b1bb2f391c421d1ca9d Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 14:00:26 -0700 Subject: [PATCH 0263/1197] Initialize references to helper threads before starting them (#5374) --- .../_protocol/streaming_pull_manager.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 2fb93e7cfda7..e5219fc4eecd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -241,22 +241,26 @@ def open(self, callback): self._callback = functools.partial(_wrap_callback_errors, callback) - # Start the thread to pass the requests. - self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) - self._dispatcher.start() - - # Start consuming messages. + # Create the RPC self._rpc = bidi.ResumableBidiRpc( start_rpc=self._client.api.streaming_pull, initial_request=self._get_initial_request, should_recover=self._should_recover) self._rpc.add_done_callback(self._on_rpc_done) + + # Create references to threads + self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) self._consumer = bidi.BackgroundConsumer( self._rpc, self._on_response) + self._leaser = leaser.Leaser(self) + + # Start the thread to pass the requests. + self._dispatcher.start() + + # Start consuming messages. self._consumer.start() # Start the lease maintainer thread. - self._leaser = leaser.Leaser(self) self._leaser.start() def close(self, reason=None): From 09e96ac67a9073e89b6504163ba7e4ed194c0bc0 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 15:20:43 -0700 Subject: [PATCH 0264/1197] Send requests during streaming pull over a separate unary RPC (#5377) --- .../subscriber/_protocol/helper_threads.py | 2 +- .../_protocol/streaming_pull_manager.py | 42 ++++++++++++++++++- .../subscriber/test_streaming_pull_manager.py | 35 +++++++++++++++- 3 files changed, 75 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py index ac38101bd96f..edb22d14fea5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py @@ -111,6 +111,6 @@ def __call__(self): try: self._callback(items) except Exception as exc: - _LOGGER.error('%s: %s', exc.__class__.__name__, exc) + _LOGGER.exception('Error in queue callback worker: %s', exc) _LOGGER.debug('Exiting the QueueCallbackWorker.') diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index e5219fc4eecd..aa0876798ad9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -14,13 +14,15 @@ from __future__ import division +import collections import functools import logging import threading -from google.api_core import exceptions import grpc +import six +from google.api_core import exceptions from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import bidi from google.cloud.pubsub_v1.subscriber._protocol import dispatcher @@ -84,6 +86,10 @@ class StreamingPullManager(object): scheduler will be used. """ + _UNARY_REQUESTS = True + """If set to True, this class will make requests over a separate unary + RPC instead of over the streaming RPC.""" + def __init__(self, client, subscription, flow_control=types.FlowControl(), scheduler=None): self._client = client @@ -220,9 +226,41 @@ def maybe_resume_consumer(self): else: _LOGGER.debug('Did not resume, current load is %s', self.load) + def _send_unary_request(self, request): + """Send a request using a separate unary request instead of over the + stream. + + Args: + request (types.StreamingPullRequest): The stream request to be + mapped into unary requests. + """ + if request.ack_ids: + self._client.acknowledge( + subscription=self._subscription, + ack_ids=list(request.ack_ids)) + + if request.modify_deadline_ack_ids: + # Send ack_ids with the same deadline seconds together. + deadline_to_ack_ids = collections.defaultdict(list) + + for n, ack_id in enumerate(request.modify_deadline_ack_ids): + deadline = request.modify_deadline_seconds[n] + deadline_to_ack_ids[deadline].append(ack_id) + + for deadline, ack_ids in six.iteritems(deadline_to_ack_ids): + self._client.modify_ack_deadline( + subscription=self._subscription, + ack_ids=ack_ids, + ack_deadline_seconds=deadline) + + _LOGGER.debug('Sent request(s) over unary RPC.') + def send(self, request): """Queue a request to be sent to the RPC.""" - self._rpc.send(request) + if self._UNARY_REQUESTS: + self._send_unary_request(request) + else: + self._rpc.send(request) def open(self, callback): """Begin consuming messages. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index a6527dc4eb3b..61d040a26fc1 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -174,8 +174,41 @@ def test_resume_not_paused(): manager._consumer.resume.assert_not_called() -def test_send(): +def test_send_unary(): manager = make_manager() + manager._UNARY_REQUESTS = True + + manager.send(types.StreamingPullRequest( + ack_ids=['ack_id1', 'ack_id2'], + modify_deadline_ack_ids=['ack_id3', 'ack_id4', 'ack_id5'], + modify_deadline_seconds=[10, 20, 20])) + + manager._client.acknowledge.assert_called_once_with( + subscription=manager._subscription, ack_ids=['ack_id1', 'ack_id2']) + + manager._client.modify_ack_deadline.assert_has_calls([ + mock.call( + subscription=manager._subscription, + ack_ids=['ack_id3'], ack_deadline_seconds=10), + mock.call( + subscription=manager._subscription, + ack_ids=['ack_id4', 'ack_id5'], ack_deadline_seconds=20), + ], any_order=True) + + +def test_send_unary_empty(): + manager = make_manager() + manager._UNARY_REQUESTS = True + + manager.send(types.StreamingPullRequest()) + + manager._client.acknowledge.assert_not_called() + manager._client.modify_ack_deadline.assert_not_called() + + +def test_send_streaming(): + manager = make_manager() + manager._UNARY_REQUESTS = False manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) manager.send(mock.sentinel.request) From 2461a82b9339aebbfcf068c6b84023a7be723afa Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 15:29:35 -0700 Subject: [PATCH 0265/1197] Add link to streaming pull behavior documentation (#5378) --- .../google/cloud/pubsub_v1/subscriber/client.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index c4906bbbeb21..ff1a76955a40 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -119,7 +119,13 @@ def subscribe( is encountered (such as loss of network connectivity). Cancelling the future will signal the process to shutdown gracefully and exit. - Example + .. note:: This uses Pub/Sub's *streaming pull* feature. This feature + properties that may be surprising. Please take a look at + https://cloud.google.com/pubsub/docs/pull#streamingpull for + more details on how streaming pull behaves compared to the + synchronous pull method. + + Example: .. code-block:: python From 79da2949f5fd49e63eb73b160d91df1af01adac0 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 15:32:56 -0700 Subject: [PATCH 0266/1197] Restore the synchronous pull method (#5379) --- .../google/cloud/pubsub_v1/subscriber/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index ff1a76955a40..4a9cf0d3a32b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -32,7 +32,7 @@ @_gapic.add_methods(subscriber_client.SubscriberClient, - blacklist=('pull', 'streaming_pull')) + blacklist=('streaming_pull',)) class Client(object): """A subscriber client for Google Cloud Pub/Sub. From 37d241a07754122c2ad08f644e9ac282105488cf Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 16:38:48 -0700 Subject: [PATCH 0267/1197] Release 0.35.0 (#5380) --- packages/google-cloud-pubsub/CHANGELOG.md | 27 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 8c9f1422f625..82524c20d447 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,33 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.0 + +### Implementation Changes + +- Send requests during streaming pull over a separate unary RPC (#5377) +- Initialize references to helper threads before starting them (#5374) +- Make leaser exit more quickly (#5373) +- Make re-open failures bubble to callbacks (#5372) +- Avoid overwriting '__module__' of messages from shared modules. (#5364) +- Normalize overflow handling for max count and bytes (#5343) + +### New Features + +- Restore the synchronous pull method (#5379) +- Promote subscribe_experimental() to subscribe(), remove old subscriber implementation. (#5274) +- Wire up scheduler argument for subscribe() (#5279) + +### Documentation + +- Add link to streaming pull behavior documentation (#5378) +- Fix example in subscribe's documentation (#5375) + +### Internal / Testing Changes + +- Add Test runs for Python 3.7 and remove 3.4 (#5295) +- Modify system tests to use prerelease versions of grpcio (#5304) + ## 0.34.0 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index e1237ac8d11e..a7ef56b86deb 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.34.0' +version = '0.35.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From ffa538b87af655f8e8826e28149ec26e61e523eb Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 29 May 2018 12:27:39 -0700 Subject: [PATCH 0268/1197] Catch errors when re-retying send() or recv() in addition to open() (#5402) --- .../cloud/pubsub_v1/subscriber/_protocol/bidi.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 518c7c91275e..6e361a1e1ff8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -342,13 +342,7 @@ def _reopen(self): # has exited. self.request_generator = None - try: - self.open() - # If re-opening fails, consider this a terminal error and finalize - # the object. - except Exception as exc: - self._finalize(exc) - raise + self.open() def _recoverable(self, method, *args, **kwargs): """Wraps a method to recover the stream and retry on error. @@ -370,9 +364,14 @@ def _recoverable(self, method, *args, **kwargs): self.close() raise exc + try: self._reopen() - return method(*args, **kwargs) + # If re-opening or re-calling the method fails for any reason, consider + # it a terminal error and finalize the object. + except Exception as exc: + self._finalize(exc) + raise def send(self, request): return self._recoverable( From f3a82174bc6008d0695dc9982af1c4fb5d7080b2 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 29 May 2018 14:01:52 -0700 Subject: [PATCH 0269/1197] Release pubsub 0.35.1 (#5404) * Release 0.35.1 --- packages/google-cloud-pubsub/CHANGELOG.md | 5 +++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 82524c20d447..3fe9694dae35 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,11 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.1 + +### Implementation Changes +- Catch errors when re-retying send() or recv() in addition to open() (#5402) + ## 0.35.0 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index a7ef56b86deb..8d2bd4a24661 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.35.0' +version = '0.35.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 848d3ee5744fb9dcbbbabf533c95db7cdc7670fc Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 30 May 2018 10:50:20 -0700 Subject: [PATCH 0270/1197] Fix retrying of bidirectional RPCs and closing the streaming pull manager (#5412) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 46 ++++++++++++------- .../_protocol/streaming_pull_manager.py | 20 +++++++- .../subscriber/test_streaming_pull_manager.py | 10 ++-- 3 files changed, 54 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 6e361a1e1ff8..80824c55022b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -342,36 +342,48 @@ def _reopen(self): # has exited. self.request_generator = None - self.open() + # Note: we do not currently do any sort of backoff here. The + # assumption is that re-establishing the stream under normal + # circumstances will happen in intervals greater than 60s. + # However, it is possible in a degenerative case that the server + # closes the stream rapidly which would lead to thrashing here, + # but hopefully in those cases the server would return a non- + # retryable error. + + try: + self.open() + # If re-opening or re-calling the method fails for any reason, + # consider it a terminal error and finalize the stream. + except Exception as exc: + self._finalize(exc) + raise + + _LOGGER.info('Re-established stream') def _recoverable(self, method, *args, **kwargs): """Wraps a method to recover the stream and retry on error. - If a recoverable error occurs, this will retry the RPC and retry the - method. If a second error occurs while retrying the method, it will - bubble up. + If a retryable error occurs while making the call, then the stream will + be re-opened and the method will be retried. This happens indefinitely + so long as the error is a retryable one. If an error occurs while + re-opening the stream, then this method will raise immediately and + trigger finalization of this object. Args: method (Callable[..., Any]): The method to call. args: The args to pass to the method. kwargs: The kwargs to pass to the method. """ - try: - return method(*args, **kwargs) + while True: + try: + return method(*args, **kwargs) - except Exception as exc: - if not self._should_recover(exc): - self.close() - raise exc + except Exception as exc: + if not self._should_recover(exc): + self.close() + raise exc - try: self._reopen() - return method(*args, **kwargs) - # If re-opening or re-calling the method fails for any reason, consider - # it a terminal error and finalize the object. - except Exception as exc: - self._finalize(exc) - raise def send(self, request): return self._recoverable( diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index aa0876798ad9..57661729a83f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -33,6 +33,7 @@ import google.cloud.pubsub_v1.subscriber.scheduler _LOGGER = logging.getLogger(__name__) +_RPC_ERROR_THREAD_NAME = 'Thread-OnRpcTerminated' _RETRYABLE_STREAM_ERRORS = ( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, @@ -414,11 +415,28 @@ def _should_recover(self, exception): # If this is in the list of idempotent exceptions, then we want to # recover. if isinstance(exception, _RETRYABLE_STREAM_ERRORS): + logging.info('Observed recoverable stream error %s', exception) return True + logging.info('Observed non-recoverable stream error %s', exception) return False def _on_rpc_done(self, future): + """Triggered whenever the underlying RPC terminates without recovery. + + This is typically triggered from one of two threads: the background + consumer thread (when calling ``recv()`` produces a non-recoverable + error) or the grpc management thread (when cancelling the RPC). + + This method is *non-blocking*. It will start another thread to deal + with shutting everything down. This is to prevent blocking in the + background consumer and preventing it from being ``joined()``. + """ _LOGGER.info( 'RPC termination has signaled streaming pull manager shutdown.') future = _maybe_wrap_exception(future) - self.close(reason=future) + thread = threading.Thread( + name=_RPC_ERROR_THREAD_NAME, + target=self.close, + kwargs={'reason': future}) + thread.daemon = True + thread.start() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 61d040a26fc1..1aa979a504c9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -420,10 +420,12 @@ def test__should_recover_false(): assert manager._should_recover(exc) is False -def test__on_rpc_done(): +@mock.patch('threading.Thread', autospec=True) +def test__on_rpc_done(thread): manager = make_manager() - with mock.patch.object(manager, 'close') as close: - manager._on_rpc_done(mock.sentinel.error) + manager._on_rpc_done(mock.sentinel.error) - close.assert_called_once_with(reason=mock.sentinel.error) + thread.assert_called_once_with( + name=mock.ANY, target=manager.close, + kwargs={'reason': mock.sentinel.error}) From 5ea9caa77effc69751e318652f494fd4cd5bc2c0 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 30 May 2018 12:03:02 -0700 Subject: [PATCH 0271/1197] Add heartbeating to the streaming pull manager (#5413) --- .../subscriber/_protocol/heartbeater.py | 70 +++++++++++ .../_protocol/streaming_pull_manager.py | 18 +++ .../pubsub_v1/subscriber/test_heartbeater.py | 119 ++++++++++++++++++ .../subscriber/test_streaming_pull_manager.py | 50 ++++++-- 4 files changed, 250 insertions(+), 7 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py new file mode 100644 index 000000000000..38d2ae8dc505 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -0,0 +1,70 @@ +# Copyright 2018, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import logging +import threading + + +_LOGGER = logging.getLogger(__name__) +_HEARTBEAT_WORKER_NAME = 'Thread-Heartbeater' +# How often to send heartbeats in seconds. Determined as half the period of +# time where the Pub/Sub server will close the stream as inactive, which is +# 60 seconds. +_DEFAULT_PERIOD = 30 + + +class Heartbeater(object): + def __init__(self, manager, period=_DEFAULT_PERIOD): + self._thread = None + self._operational_lock = threading.Lock() + self._manager = manager + self._stop_event = threading.Event() + self._period = period + + def heartbeat(self): + """Periodically send heartbeats.""" + while self._manager.is_active and not self._stop_event.is_set(): + self._manager.heartbeat() + _LOGGER.debug('Sent heartbeat.') + self._stop_event.wait(timeout=self._period) + + _LOGGER.info('%s exiting.', _HEARTBEAT_WORKER_NAME) + + def start(self): + with self._operational_lock: + if self._thread is not None: + raise ValueError('Heartbeater is already running.') + + # Create and start the helper thread. + self._stop_event.clear() + thread = threading.Thread( + name=_HEARTBEAT_WORKER_NAME, + target=self.heartbeat) + thread.daemon = True + thread.start() + _LOGGER.debug('Started helper thread %s', thread.name) + self._thread = thread + + def stop(self): + with self._operational_lock: + self._stop_event.set() + + if self._thread is not None: + # The thread should automatically exit when the consumer is + # inactive. + self._thread.join() + + self._thread = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 57661729a83f..24d57e1ee90b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -26,6 +26,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import bidi from google.cloud.pubsub_v1.subscriber._protocol import dispatcher +from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import histogram from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -115,6 +116,7 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), self._dispatcher = None self._leaser = None self._consumer = None + self._heartbeater = None @property def is_active(self): @@ -263,6 +265,15 @@ def send(self, request): else: self._rpc.send(request) + def heartbeat(self): + """Sends an empty request over the streaming pull RPC. + + This always sends over the stream, regardless of if + ``self._UNARY_REQUESTS`` is set or not. + """ + if self._rpc is not None and self._rpc.is_active: + self._rpc.send(types.StreamingPullRequest()) + def open(self, callback): """Begin consuming messages. @@ -292,6 +303,7 @@ def open(self, callback): self._consumer = bidi.BackgroundConsumer( self._rpc, self._on_response) self._leaser = leaser.Leaser(self) + self._heartbeater = heartbeater.Heartbeater(self) # Start the thread to pass the requests. self._dispatcher.start() @@ -302,6 +314,9 @@ def open(self, callback): # Start the lease maintainer thread. self._leaser.start() + # Start the stream heartbeater thread. + self._heartbeater.start() + def close(self, reason=None): """Stop consuming messages and shutdown all helper threads. @@ -332,6 +347,9 @@ def close(self, reason=None): _LOGGER.debug('Stopping dispatcher.') self._dispatcher.stop() self._dispatcher = None + _LOGGER.debug('Stopping heartbeater.') + self._heartbeater.stop() + self._heartbeater = None self._rpc = None self._closed = True diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py new file mode 100644 index 000000000000..f9147a4d7e39 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -0,0 +1,119 @@ +# Copyright 2018, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import threading + +from google.cloud.pubsub_v1.subscriber._protocol import heartbeater +from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager + +import mock +import pytest + + +def test_heartbeat_inactive(caplog): + caplog.set_level(logging.INFO) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + manager.is_active = False + + heartbeater_ = heartbeater.Heartbeater(manager) + + heartbeater_.heartbeat() + + assert 'exiting' in caplog.text + + +def test_heartbeat_stopped(caplog): + caplog.set_level(logging.INFO) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + + heartbeater_ = heartbeater.Heartbeater(manager) + heartbeater_.stop() + + heartbeater_.heartbeat() + + assert 'exiting' in caplog.text + + +def make_sleep_mark_manager_as_inactive(heartbeater): + # Make sleep mark the manager as inactive so that heartbeat() + # exits at the end of the first run. + def trigger_inactive(timeout): + assert timeout + heartbeater._manager.is_active = False + + heartbeater._stop_event.wait = trigger_inactive + + +def test_heartbeat_once(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + heartbeater_ = heartbeater.Heartbeater(manager) + make_sleep_mark_manager_as_inactive(heartbeater_) + + heartbeater_.heartbeat() + + manager.heartbeat.assert_called_once() + + +@mock.patch('threading.Thread', autospec=True) +def test_start(thread): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + heartbeater_ = heartbeater.Heartbeater(manager) + + heartbeater_.start() + + thread.assert_called_once_with( + name=heartbeater._HEARTBEAT_WORKER_NAME, + target=heartbeater_.heartbeat) + + thread.return_value.start.assert_called_once() + + assert heartbeater_._thread is not None + + +@mock.patch('threading.Thread', autospec=True) +def test_start_already_started(thread): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + heartbeater_ = heartbeater.Heartbeater(manager) + heartbeater_._thread = mock.sentinel.thread + + with pytest.raises(ValueError): + heartbeater_.start() + + thread.assert_not_called() + + +def test_stop(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + heartbeater_ = heartbeater.Heartbeater(manager) + thread = mock.create_autospec(threading.Thread, instance=True) + heartbeater_._thread = thread + + heartbeater_.stop() + + assert heartbeater_._stop_event.is_set() + thread.join.assert_called_once() + assert heartbeater_._thread is None + + +def test_stop_no_join(): + heartbeater_ = heartbeater.Heartbeater(mock.sentinel.manager) + + heartbeater_.stop() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 1aa979a504c9..53b23dd7049f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -23,6 +23,7 @@ from google.cloud.pubsub_v1.subscriber import scheduler from google.cloud.pubsub_v1.subscriber._protocol import bidi from google.cloud.pubsub_v1.subscriber._protocol import dispatcher +from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager @@ -216,6 +217,26 @@ def test_send_streaming(): manager._rpc.send.assert_called_once_with(mock.sentinel.request) +def test_heartbeat(): + manager = make_manager() + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = True + + manager.heartbeat() + + manager._rpc.send.assert_called_once_with(types.StreamingPullRequest()) + + +def test_heartbeat_inactive(): + manager = make_manager() + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = False + + manager.heartbeat() + + manager._rpc.send.assert_not_called() + + @mock.patch( 'google.cloud.pubsub_v1.subscriber._protocol.bidi.ResumableBidiRpc', autospec=True) @@ -228,11 +249,20 @@ def test_send_streaming(): @mock.patch( 'google.cloud.pubsub_v1.subscriber._protocol.dispatcher.Dispatcher', autospec=True) -def test_open(dispatcher, leaser, background_consumer, resumable_bidi_rpc): +@mock.patch( + 'google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater', + autospec=True) +def test_open( + heartbeater, dispatcher, leaser, background_consumer, + resumable_bidi_rpc): manager = make_manager() manager.open(mock.sentinel.callback) + heartbeater.assert_called_once_with(manager) + heartbeater.return_value.start.assert_called_once() + assert manager._heartbeater == heartbeater.return_value + dispatcher.assert_called_once_with(manager, manager._scheduler.queue) dispatcher.return_value.start.assert_called_once() assert manager._dispatcher == dispatcher.return_value @@ -285,27 +315,32 @@ def make_running_manager(): dispatcher.Dispatcher, instance=True) manager._leaser = mock.create_autospec( leaser.Leaser, instance=True) + manager._heartbeater = mock.create_autospec( + heartbeater.Heartbeater, instance=True) return ( manager, manager._consumer, manager._dispatcher, manager._leaser, - manager._scheduler) + manager._heartbeater, manager._scheduler) def test_close(): - manager, consumer, dispatcher, leaser, scheduler = make_running_manager() + manager, consumer, dispatcher, leaser, heartbeater, scheduler = ( + make_running_manager()) manager.close() consumer.stop.assert_called_once() leaser.stop.assert_called_once() dispatcher.stop.assert_called_once() + heartbeater.stop.assert_called_once() scheduler.shutdown.assert_called_once() assert manager.is_active is False def test_close_inactive_consumer(): - manager, consumer, dispatcher, leaser, scheduler = make_running_manager() + manager, consumer, dispatcher, leaser, heartbeater, scheduler = ( + make_running_manager()) consumer.is_active = False manager.close() @@ -313,11 +348,12 @@ def test_close_inactive_consumer(): consumer.stop.assert_not_called() leaser.stop.assert_called_once() dispatcher.stop.assert_called_once() + heartbeater.stop.assert_called_once() scheduler.shutdown.assert_called_once() def test_close_idempotent(): - manager, _, _, _, scheduler = make_running_manager() + manager, _, _, _, _, scheduler = make_running_manager() manager.close() manager.close() @@ -326,7 +362,7 @@ def test_close_idempotent(): def test_close_callbacks(): - manager, _, _, _, _ = make_running_manager() + manager, _, _, _, _, _ = make_running_manager() callback = mock.Mock() @@ -352,7 +388,7 @@ def test__get_initial_request(): def test_on_response(): - manager, _, dispatcher, _, scheduler = make_running_manager() + manager, _, dispatcher, _, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback # Set up the messages. From 3b5d808417591f9e53aa5a9d022bc4dc8424e47a Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 30 May 2018 12:47:13 -0700 Subject: [PATCH 0272/1197] Release 0.35.2 (#5414) --- packages/google-cloud-pubsub/CHANGELOG.md | 6 ++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 3fe9694dae35..85af38e15069 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,12 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.2 + +### Implementation Changes +- Add heartbeating to the streaming pull manager (#5413) +- Fix retrying of bidirectional RPCs and closing the streaming pull manager (#5412) + ## 0.35.1 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 8d2bd4a24661..fd97318fee06 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.35.1' +version = '0.35.2' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 61343aa6046282eced62353fc3110c353d90dda1 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 5 Jun 2018 13:15:55 -0700 Subject: [PATCH 0273/1197] Add additional error handling to unary RPCs (#5438) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 5 +++++ .../_protocol/streaming_pull_manager.py | 12 +++++++++--- .../subscriber/test_streaming_pull_manager.py | 17 +++++++++++++++++ 3 files changed, 31 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 80824c55022b..a0bedf7e3d8e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -481,6 +481,11 @@ def _thread_main(self): '%s caught unexpected exception %s and will exit.', _BIDIRECTIONAL_CONSUMER_NAME, exc) + else: + _LOGGER.error( + 'The bidirectional RPC unexpectedly exited. %s', + self._bidi_rpc.call) + _LOGGER.info('%s exiting', _BIDIRECTIONAL_CONSUMER_NAME) def start(self): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 24d57e1ee90b..edc7cfbf8802 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -261,7 +261,13 @@ def _send_unary_request(self, request): def send(self, request): """Queue a request to be sent to the RPC.""" if self._UNARY_REQUESTS: - self._send_unary_request(request) + try: + self._send_unary_request(request) + except exceptions.GoogleAPICallError as exc: + _LOGGER.debug( + 'Exception while sending unary RPC. This is typically ' + 'non-fatal as stream requests are best-effort.', + exc_info=True) else: self._rpc.send(request) @@ -433,9 +439,9 @@ def _should_recover(self, exception): # If this is in the list of idempotent exceptions, then we want to # recover. if isinstance(exception, _RETRYABLE_STREAM_ERRORS): - logging.info('Observed recoverable stream error %s', exception) + _LOGGER.info('Observed recoverable stream error %s', exception) return True - logging.info('Observed non-recoverable stream error %s', exception) + _LOGGER.info('Observed non-recoverable stream error %s', exception) return False def _on_rpc_done(self, future): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 53b23dd7049f..5f2a8f53fb9f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + import mock import pytest @@ -207,6 +209,21 @@ def test_send_unary_empty(): manager._client.modify_ack_deadline.assert_not_called() +def test_send_unary_error(caplog): + caplog.set_level(logging.DEBUG) + + manager = make_manager() + manager._UNARY_REQUESTS = True + + error = exceptions.GoogleAPICallError('The front fell off') + manager._client.acknowledge.side_effect = error + + manager.send(types.StreamingPullRequest( + ack_ids=['ack_id1', 'ack_id2'])) + + assert 'The front fell off' in caplog.text + + def test_send_streaming(): manager = make_manager() manager._UNARY_REQUESTS = False From e49639457c5120ab9ae4455b8e91e7bf25754d3e Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 5 Jun 2018 13:49:24 -0700 Subject: [PATCH 0274/1197] Release pubsub 0.35.3 (#5439) --- packages/google-cloud-pubsub/CHANGELOG.md | 6 ++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 85af38e15069..4c525ab2a09b 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,12 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.3 + +### Implementation Changes + +- Add additional error handling to unary RPCs (#5438) + ## 0.35.2 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index fd97318fee06..d755d3e99b2c 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.35.2' +version = '0.35.3' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 093231b6cf986c4433b2dc4f5cfd968163f3baa2 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 6 Jun 2018 12:07:54 -0700 Subject: [PATCH 0275/1197] Use operational lock when checking for activity on streams (#5445) --- .../cloud/pubsub_v1/subscriber/_protocol/bidi.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index a0bedf7e3d8e..fd16879e50b7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -310,7 +310,7 @@ def should_recover(exc): def __init__(self, start_rpc, should_recover, initial_request=None): super(ResumableBidiRpc, self).__init__(start_rpc, initial_request) self._should_recover = should_recover - self._operational_lock = threading.Lock() + self._operational_lock = threading.RLock() self._finalized = False self._finalize_lock = threading.Lock() @@ -393,6 +393,14 @@ def recv(self): return self._recoverable( super(ResumableBidiRpc, self).recv) + @property + def is_active(self): + """bool: True if this stream is currently open and active.""" + # Use the operational lock. It's entirely possible for something + # to check the active state *while* the RPC is being retried. + with self._operational_lock: + return self.call is not None and self.call.is_active() + class BackgroundConsumer(object): """A bi-directional stream consumer that runs in a separate thread. From 5dcb0baf3a43eb1570dd537dd7c44b1c624f2195 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 6 Jun 2018 14:17:10 -0700 Subject: [PATCH 0276/1197] Recover streams during the gRPC error callback (#5446) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 25 +++++++++++++++---- .../_protocol/streaming_pull_manager.py | 1 + .../unit/pubsub_v1/subscriber/test_bidi.py | 6 ++++- 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index fd16879e50b7..00877e70058e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -235,7 +235,9 @@ def send(self, request): raise ValueError( 'Can not send() on an RPC that has never been open()ed.') - if self.is_active: + # Don't use self.is_active(), as ResumableBidiRpc will overload it + # to mean something semantically different. + if self.call.is_active(): self._request_queue.put(request) else: # calling next should cause the call to raise. @@ -330,11 +332,15 @@ def _on_call_done(self, future): # "future" here is also a grpc.RpcError. if not self._should_recover(future): self._finalize(future) + else: + _LOGGER.debug('Re-opening stream from gRPC callback.') + self._reopen() def _reopen(self): with self._operational_lock: # Another thread already managed to re-open this stream. - if self.is_active: + if self.call is not None and self.call.is_active(): + _LOGGER.debug('Stream was already re-established.') return self.call = None @@ -379,10 +385,14 @@ def _recoverable(self, method, *args, **kwargs): return method(*args, **kwargs) except Exception as exc: + _LOGGER.debug('Call to retryable %r caused %s.', method, exc) if not self._should_recover(exc): self.close() + _LOGGER.debug('Not retrying %r due to %s.', method, exc) + self._finalize(exc) raise exc + _LOGGER.debug('Re-opening stream from retryable %r.', method) self._reopen() def send(self, request): @@ -398,8 +408,13 @@ def is_active(self): """bool: True if this stream is currently open and active.""" # Use the operational lock. It's entirely possible for something # to check the active state *while* the RPC is being retried. + # Also, use finalized to track the actual terminal state here. + # This is because if the stream is re-established by the gRPC thread + # it's technically possible to check this between when gRPC marks the + # RPC as inactive and when gRPC executes our callback that re-opens + # the stream. with self._operational_lock: - return self.call is not None and self.call.is_active() + return self.call is not None and not self._finalized class BackgroundConsumer(object): @@ -491,8 +506,8 @@ def _thread_main(self): else: _LOGGER.error( - 'The bidirectional RPC unexpectedly exited. %s', - self._bidi_rpc.call) + 'The bidirectional RPC unexpectedly exited. This is a truly ' + 'exceptional case. Please file a bug with your logs.') _LOGGER.info('%s exiting', _BIDIRECTIONAL_CONSUMER_NAME) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index edc7cfbf8802..b7b9002e2844 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -402,6 +402,7 @@ def _on_response(self, response): After the messages have all had their ack deadline updated, execute the callback for each message using the executor. """ + _LOGGER.debug( 'Scheduling callbacks for %s messages.', len(response.received_messages)) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py index b040e7f97887..2e72a757600a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py @@ -285,13 +285,17 @@ def test_initial_state(self): assert bidi_rpc.is_active is False def test_done_callbacks_recoverable(self): - bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: True) + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, instance=True) + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, lambda _: True) callback = mock.Mock(spec=['__call__']) bidi_rpc.add_done_callback(callback) bidi_rpc._on_call_done(mock.sentinel.future) callback.assert_not_called() + start_rpc.assert_called_once() + assert bidi_rpc.is_active def test_done_callbacks_non_recoverable(self): bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False) From bde74de74c7c96966beeb30539214ac019528c7e Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 6 Jun 2018 14:51:26 -0700 Subject: [PATCH 0277/1197] Release 0.35.4 (#5447) --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 4c525ab2a09b..8a4d16a53033 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.4 + +### Implementation Changes + +- Recover streams during the gRPC error callback. (#5446) +- Use operational lock when checking for activity on streams. (#5445) + ## 0.35.3 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index d755d3e99b2c..48f6ed882024 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.35.3' +version = '0.35.4' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 4c06cd1a327966f4dc989d29a3696060af546027 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 17 Jul 2018 16:14:00 -0400 Subject: [PATCH 0278/1197] Pubsub: Make 'Message.publish_time' return datetime (#5633) Rather than a 'google.protobuf.timestamp_pb2.Timestamp' instance. Closes #5598. --- .../cloud/pubsub_v1/subscriber/message.py | 8 ++++- .../unit/pubsub_v1/subscriber/test_message.py | 29 ++++++++++++++----- 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index d24161e853f4..091826007ae3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -14,10 +14,12 @@ from __future__ import absolute_import +import datetime import json import math import time +from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -151,7 +153,11 @@ def publish_time(self): Returns: datetime: The date and time that the message was published. """ - return self._message.publish_time + timestamp = self._message.publish_time + delta = datetime.timedelta( + seconds=timestamp.seconds, + microseconds=timestamp.nanos // 1000) + return datetime_helpers._UTC_EPOCH + delta @property def size(self): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 431d39bb6afc..4089d4d5109d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -12,25 +12,40 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import time import mock +import pytz from six.moves import queue +from google.protobuf import timestamp_pb2 +from google.api_core import datetime_helpers from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import requests +RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=pytz.utc) +RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000 +PUBLISHED_MICROS = 123456 +PUBLISHED = RECEIVED + datetime.timedelta( + days=1, microseconds=PUBLISHED_MICROS) +PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 + + def create_message(data, ack_id='ACKID', **attrs): with mock.patch.object(message.Message, 'lease') as lease: with mock.patch.object(time, 'time') as time_: - time_.return_value = 1335020400 + time_.return_value = RECEIVED_SECONDS msg = message.Message(types.PubsubMessage( attributes=attrs, data=data, message_id='message_id', - publish_time=types.Timestamp(seconds=1335020400 - 86400), + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, + nanos=PUBLISHED_MICROS * 1000, + ), ), ack_id, queue.Queue()) lease.assert_called_once_with() return msg @@ -48,7 +63,7 @@ def test_data(): def test_publish_time(): msg = create_message(b'foo') - assert msg.publish_time == types.Timestamp(seconds=1335020400 - 86400) + assert msg.publish_time == PUBLISHED def check_call_types(mock, *args, **kwargs): @@ -80,7 +95,7 @@ def test_ack(): msg.ack() put.assert_called_once_with(requests.AckRequest( ack_id='bogus_ack_id', - byte_size=25, + byte_size=30, time_to_ack=mock.ANY, )) check_call_types(put, requests.AckRequest) @@ -92,7 +107,7 @@ def test_drop(): msg.drop() put.assert_called_once_with(requests.DropRequest( ack_id='bogus_ack_id', - byte_size=25, + byte_size=30, )) check_call_types(put, requests.DropRequest) @@ -103,7 +118,7 @@ def test_lease(): msg.lease() put.assert_called_once_with(requests.LeaseRequest( ack_id='bogus_ack_id', - byte_size=25, + byte_size=30, )) check_call_types(put, requests.LeaseRequest) @@ -125,7 +140,7 @@ def test_nack(): msg.nack() put.assert_called_once_with(requests.NackRequest( ack_id='bogus_ack_id', - byte_size=25, + byte_size=30, )) check_call_types(put, requests.NackRequest) From 655d23cde78bde7cf63797ef61f6dfcf6d436141 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 26 Jul 2018 14:09:53 -0400 Subject: [PATCH 0279/1197] Add 'Message.ack_id' property. (#5693) Also, add explicit test for 'Message.size' property. Closes #5691. --- .../google/cloud/pubsub_v1/subscriber/message.py | 5 +++++ .../tests/unit/pubsub_v1/subscriber/test_message.py | 11 +++++++++++ 2 files changed, 16 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 091826007ae3..6772f196bf60 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -164,6 +164,11 @@ def size(self): """Return the size of the underlying message, in bytes.""" return self._message.ByteSize() + @property + def ack_id(self): + """str: the ID used to ack the message.""" + return self._ack_id + def ack(self): """Acknowledge the given message. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 4089d4d5109d..bb87dec3518c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -61,6 +61,17 @@ def test_data(): assert msg.data == b'foo' +def test_size(): + msg = create_message(b'foo') + assert msg.size == 30 # payload + protobuf overhead + + +def test_ack_id(): + ack_id = 'MY-ACK-ID' + msg = create_message(b'foo', ack_id=ack_id) + assert msg.ack_id == ack_id + + def test_publish_time(): msg = create_message(b'foo') assert msg.publish_time == PUBLISHED From 3ee5a1a85d3c05fbe60aa1292ab6188ad719313b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 6 Aug 2018 16:16:16 -0400 Subject: [PATCH 0280/1197] Ensure SPM methods check that 'self._consumer' is not None before use. (#5758) Closes #5751. --- .../_protocol/streaming_pull_manager.py | 11 ++++++----- .../subscriber/test_streaming_pull_manager.py | 17 +++++++++++++++++ 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index b7b9002e2844..6c1d90192477 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -208,10 +208,11 @@ def add_close_callback(self, callback): def maybe_pause_consumer(self): """Check the current load and pause the consumer if needed.""" - if self.load >= 1.0 and not self._consumer.is_paused: - _LOGGER.debug( - 'Message backlog over load at %.2f, pausing.', self.load) - self._consumer.pause() + if self.load >= 1.0: + if self._consumer is not None and not self._consumer.is_paused: + _LOGGER.debug( + 'Message backlog over load at %.2f, pausing.', self.load) + self._consumer.pause() def maybe_resume_consumer(self): """Check the current load and resume the consumer if needed.""" @@ -221,7 +222,7 @@ def maybe_resume_consumer(self): # In order to not thrash too much, require us to have passed below # the resume threshold (80% by default) of each flow control setting # before restarting. - if not self._consumer.is_paused: + if self._consumer is None or not self._consumer.is_paused: return if self.load < self.flow_control.resume_threshold: diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 5f2a8f53fb9f..47638070478a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -111,6 +111,17 @@ def test_ack_deadline(): assert manager.ack_deadline == 20 +def test_maybe_pause_consumer_wo_consumer_set(): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000)) + manager.maybe_pause_consumer() # no raise + # Ensure load > 1 + _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) + _leaser.message_count = 100 + _leaser.bytes = 10000 + manager.maybe_pause_consumer() # no raise + + def test_lease_load_and_pause(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000)) @@ -177,6 +188,12 @@ def test_resume_not_paused(): manager._consumer.resume.assert_not_called() +def test_maybe_resume_consumer_wo_consumer_set(): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000)) + manager.maybe_resume_consumer() # no raise + + def test_send_unary(): manager = make_manager() manager._UNARY_REQUESTS = True From e7eb5beab56be5eef6d302974301d58ed721018c Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 10 Aug 2018 12:37:58 -0700 Subject: [PATCH 0281/1197] PubSub: add geo-fencing support (#5769) * Add support for storage location policy (geo-fencing) to Pub/Sub by regenerating v1 endpoint --- packages/google-cloud-pubsub/LICENSE | 7 +- packages/google-cloud-pubsub/MANIFEST.in | 3 +- .../google-cloud-pubsub/google/__init__.py | 4 +- .../google/cloud/__init__.py | 4 +- .../google/cloud/pubsub.py | 4 +- .../cloud/pubsub_v1/gapic/publisher_client.py | 487 ++-- .../pubsub_v1/gapic/subscriber_client.py | 826 +++++-- .../gapic/subscriber_client_config.py | 2 +- .../pubsub_v1/gapic/transports/__init__.py | 0 .../transports/publisher_grpc_transport.py | 238 ++ .../transports/subscriber_grpc_transport.py | 405 ++++ .../cloud/pubsub_v1/proto/pubsub_pb2.py | 2077 +++++++---------- .../cloud/pubsub_v1/proto/pubsub_pb2_grpc.py | 425 ++-- packages/google-cloud-pubsub/setup.py | 5 +- packages/google-cloud-pubsub/synth.py | 141 ++ .../gapic/v1/test_system_publisher_v1.py | 28 + .../unit/gapic/v1/test_publisher_client_v1.py | 4 +- .../gapic/v1/test_subscriber_client_v1.py | 39 +- .../publisher/test_publisher_client.py | 13 +- 19 files changed, 2799 insertions(+), 1913 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py create mode 100644 packages/google-cloud-pubsub/synth.py create mode 100644 packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py diff --git a/packages/google-cloud-pubsub/LICENSE b/packages/google-cloud-pubsub/LICENSE index d64569567334..a8ee855de2aa 100644 --- a/packages/google-cloud-pubsub/LICENSE +++ b/packages/google-cloud-pubsub/LICENSE @@ -1,7 +1,6 @@ - - Apache License + Apache License Version 2.0, January 2004 - http://www.apache.org/licenses/ + https://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -193,7 +192,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - http://www.apache.org/licenses/LICENSE-2.0 + https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index fc77f8c82ff0..9cbf175afe6b 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -1,4 +1,5 @@ include README.rst LICENSE recursive-include google *.json *.proto recursive-include tests * -global-exclude *.pyc __pycache__ +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/packages/google-cloud-pubsub/google/__init__.py b/packages/google-cloud-pubsub/google/__init__.py index 9ee9bf4342ab..7a9e5a0ef198 100644 --- a/packages/google-cloud-pubsub/google/__init__.py +++ b/packages/google-cloud-pubsub/google/__init__.py @@ -1,10 +1,10 @@ -# Copyright 2016 Google LLC +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py index 9ee9bf4342ab..7a9e5a0ef198 100644 --- a/packages/google-cloud-pubsub/google/cloud/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/__init__.py @@ -1,10 +1,10 @@ -# Copyright 2016 Google LLC +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py index dba2ad09a3fd..a0872f96ac0c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -1,10 +1,10 @@ -# Copyright 2017, Google LLC All rights reserved. +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 0e61814166dd..a5f82ec5eaff 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -1,46 +1,42 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# -# EDITING INSTRUCTIONS -# This file was generated from the file -# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, -# and updates to that file get reflected here through a refresh process. -# For the short term, the refresh process will only be runnable by Google engineers. -# -# The only allowed edits are to method and file documentation. A 3-way -# merge preserves those additions if the generated source changes. """Accesses the google.pubsub.v1 Publisher API.""" import functools import pkg_resources +import warnings +from google.oauth2 import service_account import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method +import google.api_core.path_template import google.api_core.grpc_helpers import google.api_core.page_iterator -import google.api_core.path_template +import grpc from google.cloud.pubsub_v1.gapic import publisher_client_config +from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport from google.cloud.pubsub_v1.proto import pubsub_pb2 +from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 +from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 - _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-pubsub').version + 'google-cloud-pubsub', ).version class PublisherClient(object): @@ -58,26 +54,50 @@ class PublisherClient(object): 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/pubsub', ) - # The name of the interface for this client. This is the key used to find - # method configuration in the client_config dictionary - _INTERFACE_NAME = ('google.pubsub.v1.Publisher') + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.pubsub.v1.Publisher' @classmethod - def project_path(cls, project): - """Returns a fully-qualified project resource name string.""" - return google.api_core.path_template.expand( - 'projects/{project}', - project=project, ) + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file @classmethod def topic_path(cls, project, topic): - """Returns a fully-qualified topic resource name string.""" + """Return a fully-qualified topic string.""" return google.api_core.path_template.expand( 'projects/{project}/topics/{topic}', project=project, - topic=topic, ) + topic=topic, + ) + + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, + ) def __init__(self, + transport=None, channel=None, credentials=None, client_config=publisher_client_config.config, @@ -85,107 +105,98 @@ def __init__(self, """Constructor. Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. If specified, then the ``credentials`` - argument is ignored. + transport (Union[~.PublisherGrpcTransport, + Callable[[~.Credentials, type], ~.PublisherGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - client_config (dict): - A dictionary of call options for each method. If not specified - the default configuration is used. Generally, you only need - to set this if you're developing your own client library. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. """ - if channel is not None and credentials is not None: - raise ValueError( - 'channel and credentials arguments to {} are mutually ' - 'exclusive.'.format(self.__class__.__name__)) - - if channel is None: - channel = google.api_core.grpc_helpers.create_channel( - self.SERVICE_ADDRESS, + # Raise deprecation warnings for things we want to go away. + if client_config: + warnings.warn('The `client_config` argument is deprecated.', + PendingDeprecationWarning) + if channel: + warnings.warn( + 'The `channel` argument is deprecated; use ' + '`transport` instead.', PendingDeprecationWarning) + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=publisher_grpc_transport. + PublisherGrpcTransport, + ) + else: + if credentials: + raise ValueError( + 'Received both a transport instance and ' + 'credentials; these are mutually exclusive.') + self.transport = transport + else: + self.transport = publisher_grpc_transport.PublisherGrpcTransport( + address=self.SERVICE_ADDRESS, + channel=channel, credentials=credentials, - scopes=self._DEFAULT_SCOPES) + ) self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel)) - self.publisher_stub = (pubsub_pb2.PublisherStub(channel)) + self.publisher_stub = (pubsub_pb2_grpc.PublisherStub(channel)) if client_info is None: client_info = ( google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) - client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) - interface_config = client_config['interfaces'][self._INTERFACE_NAME] - method_configs = google.api_core.gapic_v1.config.parse_method_configs( - interface_config) - - self._create_topic = google.api_core.gapic_v1.method.wrap_method( - self.publisher_stub.CreateTopic, - default_retry=method_configs['CreateTopic'].retry, - default_timeout=method_configs['CreateTopic'].timeout, - client_info=client_info) - self._update_topic = google.api_core.gapic_v1.method.wrap_method( - self.publisher_stub.UpdateTopic, - default_retry=method_configs['UpdateTopic'].retry, - default_timeout=method_configs['UpdateTopic'].timeout, - client_info=client_info) - self._publish = google.api_core.gapic_v1.method.wrap_method( - self.publisher_stub.Publish, - default_retry=method_configs['Publish'].retry, - default_timeout=method_configs['Publish'].timeout, - client_info=client_info) - self._get_topic = google.api_core.gapic_v1.method.wrap_method( - self.publisher_stub.GetTopic, - default_retry=method_configs['GetTopic'].retry, - default_timeout=method_configs['GetTopic'].timeout, - client_info=client_info) - self._list_topics = google.api_core.gapic_v1.method.wrap_method( - self.publisher_stub.ListTopics, - default_retry=method_configs['ListTopics'].retry, - default_timeout=method_configs['ListTopics'].timeout, - client_info=client_info) - self._list_topic_subscriptions = google.api_core.gapic_v1.method.wrap_method( - self.publisher_stub.ListTopicSubscriptions, - default_retry=method_configs['ListTopicSubscriptions'].retry, - default_timeout=method_configs['ListTopicSubscriptions'].timeout, - client_info=client_info) - self._delete_topic = google.api_core.gapic_v1.method.wrap_method( - self.publisher_stub.DeleteTopic, - default_retry=method_configs['DeleteTopic'].retry, - default_timeout=method_configs['DeleteTopic'].timeout, - client_info=client_info) - self._set_iam_policy = google.api_core.gapic_v1.method.wrap_method( - self.iam_policy_stub.SetIamPolicy, - default_retry=method_configs['SetIamPolicy'].retry, - default_timeout=method_configs['SetIamPolicy'].timeout, - client_info=client_info) - self._get_iam_policy = google.api_core.gapic_v1.method.wrap_method( - self.iam_policy_stub.GetIamPolicy, - default_retry=method_configs['GetIamPolicy'].retry, - default_timeout=method_configs['GetIamPolicy'].timeout, - client_info=client_info) - self._test_iam_permissions = google.api_core.gapic_v1.method.wrap_method( - self.iam_policy_stub.TestIamPermissions, - default_retry=method_configs['TestIamPermissions'].retry, - default_timeout=method_configs['TestIamPermissions'].timeout, - client_info=client_info) + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} # Service calls def create_topic(self, name, labels=None, + message_storage_policy=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ - Creates the given topic with the given name. + Creates the given topic with the given name. See the + resource name rules. Example: >>> from google.cloud import pubsub_v1 @@ -204,12 +215,22 @@ def create_topic(self, signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``\"goog\"``. labels (dict[str -> str]): User labels. + message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining how messages published to the topic may be stored. It + is determined when the topic is created based on the policy configured at + the project level. It must not be set by the caller in the request to + CreateTopic or to UpdateTopic. This field will be populated in the + responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the + response, then no constraints are in effect. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.MessageStoragePolicy` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Topic` instance. @@ -221,38 +242,57 @@ def create_topic(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - request = pubsub_pb2.Topic(name=name, labels=labels) - return self._create_topic(request, retry=retry, timeout=timeout) + # Wrap the transport method to add retry and timeout logic. + if 'create_topic' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_topic'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_topic, + default_retry=self._method_configs['CreateTopic'].retry, + default_timeout=self._method_configs['CreateTopic'] + .timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.Topic( + name=name, + labels=labels, + message_storage_policy=message_storage_policy, + ) + return self._inner_api_calls['create_topic']( + request, retry=retry, timeout=timeout, metadata=metadata) def update_topic(self, topic, update_mask, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ - Updates an existing topic. Note that certain properties of a topic are not - modifiable. Options settings follow the style guide: - NOTE: The style guide requires body: \"topic\" instead of body: \"*\". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. + Updates an existing topic. Note that certain properties of a + topic are not modifiable. Example: >>> from google.cloud import pubsub_v1 >>> >>> client = pubsub_v1.PublisherClient() >>> + >>> # TODO: Initialize ``topic``: >>> topic = {} + >>> + >>> # TODO: Initialize ``update_mask``: >>> update_mask = {} >>> >>> response = client.update_topic(topic, update_mask) Args: - topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): The topic to update. + topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): The updated topic object. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Topic` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided topic to update. - Must be specified and non-empty. + update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided topic to update. Must be specified + and non-empty. Note that if ``update_mask`` contains + \"message_storage_policy\" then the new value will be determined based on the + policy configured at the project or organization level. The + ``message_storage_policy`` must not be set in the ``topic`` provided above. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -261,6 +301,8 @@ def update_topic(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Topic` instance. @@ -272,15 +314,30 @@ def update_topic(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'update_topic' not in self._inner_api_calls: + self._inner_api_calls[ + 'update_topic'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_topic, + default_retry=self._method_configs['UpdateTopic'].retry, + default_timeout=self._method_configs['UpdateTopic'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.UpdateTopicRequest( - topic=topic, update_mask=update_mask) - return self._update_topic(request, retry=retry, timeout=timeout) + topic=topic, + update_mask=update_mask, + ) + return self._inner_api_calls['update_topic']( + request, retry=retry, timeout=timeout, metadata=metadata) def publish(self, topic, messages, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic does not exist. The message payload must not be empty; it must contain @@ -310,6 +367,8 @@ def publish(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.PublishResponse` instance. @@ -321,13 +380,28 @@ def publish(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) - return self._publish(request, retry=retry, timeout=timeout) + # Wrap the transport method to add retry and timeout logic. + if 'publish' not in self._inner_api_calls: + self._inner_api_calls[ + 'publish'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.publish, + default_retry=self._method_configs['Publish'].retry, + default_timeout=self._method_configs['Publish'].timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.PublishRequest( + topic=topic, + messages=messages, + ) + return self._inner_api_calls['publish']( + request, retry=retry, timeout=timeout, metadata=metadata) def get_topic(self, topic, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Gets the configuration of a topic. @@ -349,6 +423,8 @@ def get_topic(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Topic` instance. @@ -360,14 +436,26 @@ def get_topic(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - request = pubsub_pb2.GetTopicRequest(topic=topic) - return self._get_topic(request, retry=retry, timeout=timeout) + # Wrap the transport method to add retry and timeout logic. + if 'get_topic' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_topic'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_topic, + default_retry=self._method_configs['GetTopic'].retry, + default_timeout=self._method_configs['GetTopic'].timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.GetTopicRequest(topic=topic, ) + return self._inner_api_calls['get_topic']( + request, retry=retry, timeout=timeout, metadata=metadata) def list_topics(self, project, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Lists matching topics. @@ -378,13 +466,15 @@ def list_topics(self, >>> >>> project = client.project_path('[PROJECT]') >>> - >>> >>> # Iterate over all results >>> for element in client.list_topics(project): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_topics(project, options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -404,6 +494,8 @@ def list_topics(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this @@ -418,16 +510,32 @@ def list_topics(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'list_topics' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_topics'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_topics, + default_retry=self._method_configs['ListTopics'].retry, + default_timeout=self._method_configs['ListTopics'].timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.ListTopicsRequest( - project=project, page_size=page_size) + project=project, + page_size=page_size, + ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_topics, retry=retry, timeout=timeout), + self._inner_api_calls['list_topics'], + retry=retry, + timeout=timeout, + metadata=metadata), request=request, items_field='topics', request_token_field='page_token', - response_token_field='next_page_token') + response_token_field='next_page_token', + ) return iterator def list_topic_subscriptions( @@ -435,9 +543,10 @@ def list_topic_subscriptions( topic, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ - Lists the name of the subscriptions for this topic. + Lists the names of the subscriptions on this topic. Example: >>> from google.cloud import pubsub_v1 @@ -446,13 +555,15 @@ def list_topic_subscriptions( >>> >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') >>> - >>> >>> # Iterate over all results >>> for element in client.list_topic_subscriptions(topic): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_topic_subscriptions(topic, options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -472,6 +583,8 @@ def list_topic_subscriptions( timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this @@ -486,22 +599,41 @@ def list_topic_subscriptions( to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'list_topic_subscriptions' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_topic_subscriptions'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_topic_subscriptions, + default_retry=self._method_configs[ + 'ListTopicSubscriptions'].retry, + default_timeout=self._method_configs[ + 'ListTopicSubscriptions'].timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.ListTopicSubscriptionsRequest( - topic=topic, page_size=page_size) + topic=topic, + page_size=page_size, + ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_topic_subscriptions, retry=retry, timeout=timeout), + self._inner_api_calls['list_topic_subscriptions'], + retry=retry, + timeout=timeout, + metadata=metadata), request=request, items_field='subscriptions', request_token_field='page_token', - response_token_field='next_page_token') + response_token_field='next_page_token', + ) return iterator def delete_topic(self, topic, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic does not exist. After a topic is deleted, a new topic may be created with @@ -527,6 +659,8 @@ def delete_topic(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -535,14 +669,27 @@ def delete_topic(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - request = pubsub_pb2.DeleteTopicRequest(topic=topic) - self._delete_topic(request, retry=retry, timeout=timeout) + # Wrap the transport method to add retry and timeout logic. + if 'delete_topic' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_topic'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_topic, + default_retry=self._method_configs['DeleteTopic'].retry, + default_timeout=self._method_configs['DeleteTopic'] + .timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.DeleteTopicRequest(topic=topic, ) + self._inner_api_calls['delete_topic']( + request, retry=retry, timeout=timeout, metadata=metadata) def set_iam_policy(self, resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Sets the access control policy on the specified resource. Replaces any existing policy. @@ -553,6 +700,8 @@ def set_iam_policy(self, >>> client = pubsub_v1.PublisherClient() >>> >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> # TODO: Initialize ``policy``: >>> policy = {} >>> >>> response = client.set_iam_policy(resource, policy) @@ -573,6 +722,8 @@ def set_iam_policy(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Policy` instance. @@ -584,14 +735,29 @@ def set_iam_policy(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'set_iam_policy' not in self._inner_api_calls: + self._inner_api_calls[ + 'set_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.set_iam_policy, + default_retry=self._method_configs['SetIamPolicy'].retry, + default_timeout=self._method_configs['SetIamPolicy'] + .timeout, + client_info=self._client_info, + ) + request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy) - return self._set_iam_policy(request, retry=retry, timeout=timeout) + resource=resource, + policy=policy, + ) + return self._inner_api_calls['set_iam_policy']( + request, retry=retry, timeout=timeout, metadata=metadata) def get_iam_policy(self, resource, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy @@ -616,6 +782,8 @@ def get_iam_policy(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Policy` instance. @@ -627,14 +795,27 @@ def get_iam_policy(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - return self._get_iam_policy(request, retry=retry, timeout=timeout) + # Wrap the transport method to add retry and timeout logic. + if 'get_iam_policy' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_iam_policy, + default_retry=self._method_configs['GetIamPolicy'].retry, + default_timeout=self._method_configs['GetIamPolicy'] + .timeout, + client_info=self._client_info, + ) + + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + return self._inner_api_calls['get_iam_policy']( + request, retry=retry, timeout=timeout, metadata=metadata) def test_iam_permissions(self, resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of @@ -646,6 +827,8 @@ def test_iam_permissions(self, >>> client = pubsub_v1.PublisherClient() >>> >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> # TODO: Initialize ``permissions``: >>> permissions = [] >>> >>> response = client.test_iam_permissions(resource, permissions) @@ -664,6 +847,8 @@ def test_iam_permissions(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. @@ -675,7 +860,21 @@ def test_iam_permissions(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'test_iam_permissions' not in self._inner_api_calls: + self._inner_api_calls[ + 'test_iam_permissions'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.test_iam_permissions, + default_retry=self._method_configs[ + 'TestIamPermissions'].retry, + default_timeout=self._method_configs['TestIamPermissions'] + .timeout, + client_info=self._client_info, + ) + request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions) - return self._test_iam_permissions( - request, retry=retry, timeout=timeout) + resource=resource, + permissions=permissions, + ) + return self._inner_api_calls['test_iam_permissions']( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index c019bd9e295f..8fdbff7168be 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -1,43 +1,39 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# -# EDITING INSTRUCTIONS -# This file was generated from the file -# https://github.com/google/googleapis/blob/master/google/pubsub/v1/pubsub.proto, -# and updates to that file get reflected here through a refresh process. -# For the short term, the refresh process will only be runnable by Google engineers. -# -# The only allowed edits are to method and file documentation. A 3-way -# merge preserves those additions if the generated source changes. """Accesses the google.pubsub.v1 Subscriber API.""" import functools import pkg_resources +import warnings +from google.oauth2 import service_account import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.grpc_helpers import google.api_core.page_iterator -import google.api_core.path_template import google.api_core.protobuf_helpers +import grpc from google.cloud.pubsub_v1.gapic import subscriber_client_config +from google.cloud.pubsub_v1.gapic.transports import subscriber_grpc_transport from google.cloud.pubsub_v1.proto import pubsub_pb2 +from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 from google.protobuf import timestamp_pb2 @@ -48,7 +44,8 @@ class SubscriberClient(object): """ The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the ``Pull`` method. + consume messages from a subscription via the ``Pull`` method or by + establishing a bi-directional stream using the ``StreamingPull`` method. """ SERVICE_ADDRESS = 'pubsub.googleapis.com:443' @@ -60,42 +57,68 @@ class SubscriberClient(object): 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/pubsub', ) - # The name of the interface for this client. This is the key used to find - # method configuration in the client_config dictionary - _INTERFACE_NAME = ('google.pubsub.v1.Subscriber') + # The name of the interface for this client. This is the key used to + # find the method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.pubsub.v1.Subscriber' @classmethod - def project_path(cls, project): - """Returns a fully-qualified project resource name string.""" - return google.api_core.path_template.expand( - 'projects/{project}', - project=project, ) + def from_service_account_file(cls, filename, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. - @classmethod - def snapshot_path(cls, project, snapshot): - """Returns a fully-qualified snapshot resource name string.""" - return google.api_core.path_template.expand( - 'projects/{project}/snapshots/{snapshot}', - project=project, - snapshot=snapshot, ) + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file @classmethod def subscription_path(cls, project, subscription): - """Returns a fully-qualified subscription resource name string.""" + """Return a fully-qualified subscription string.""" return google.api_core.path_template.expand( 'projects/{project}/subscriptions/{subscription}', project=project, - subscription=subscription, ) + subscription=subscription, + ) @classmethod def topic_path(cls, project, topic): - """Returns a fully-qualified topic resource name string.""" + """Return a fully-qualified topic string.""" return google.api_core.path_template.expand( 'projects/{project}/topics/{topic}', project=project, - topic=topic, ) + topic=topic, + ) + + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + 'projects/{project}', + project=project, + ) + + @classmethod + def snapshot_path(cls, project, snapshot): + """Return a fully-qualified snapshot string.""" + return google.api_core.path_template.expand( + 'projects/{project}/snapshots/{snapshot}', + project=project, + snapshot=snapshot, + ) def __init__(self, + transport=None, channel=None, credentials=None, client_config=subscriber_client_config.config, @@ -103,138 +126,86 @@ def __init__(self, """Constructor. Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. If specified, then the ``credentials`` - argument is ignored. + transport (Union[~.SubscriberGrpcTransport, + Callable[[~.Credentials, type], ~.SubscriberGrpcTransport]): A transport + instance, responsible for actually making the API calls. + The default transport uses the gRPC protocol. + This argument may also be a callable which returns a + transport instance. Callables will be sent the credentials + as the first argument and the default transport class as + the second argument. + channel (grpc.Channel): DEPRECATED. A ``Channel`` instance + through which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. credentials (google.auth.credentials.Credentials): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - client_config (dict): - A dictionary of call options for each method. If not specified - the default configuration is used. Generally, you only need - to set this if you're developing your own client library. + This argument is mutually exclusive with providing a + transport instance to ``transport``; doing so will raise + an exception. + client_config (dict): DEPRECATED. A dictionary of call options for + each method. If not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. """ - if channel is not None and credentials is not None: - raise ValueError( - 'channel and credentials arguments to {} are mutually ' - 'exclusive.'.format(self.__class__.__name__)) - - if channel is None: - channel = google.api_core.grpc_helpers.create_channel( - self.SERVICE_ADDRESS, + # Raise deprecation warnings for things we want to go away. + if client_config: + warnings.warn('The `client_config` argument is deprecated.', + PendingDeprecationWarning) + if channel: + warnings.warn( + 'The `channel` argument is deprecated; use ' + '`transport` instead.', PendingDeprecationWarning) + + # Instantiate the transport. + # The transport is responsible for handling serialization and + # deserialization and actually sending data to the service. + if transport: + if callable(transport): + self.transport = transport( + credentials=credentials, + default_class=subscriber_grpc_transport. + SubscriberGrpcTransport, + ) + else: + if credentials: + raise ValueError( + 'Received both a transport instance and ' + 'credentials; these are mutually exclusive.') + self.transport = transport + else: + self.transport = subscriber_grpc_transport.SubscriberGrpcTransport( + address=self.SERVICE_ADDRESS, + channel=channel, credentials=credentials, - scopes=self._DEFAULT_SCOPES) + ) self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel)) - self.subscriber_stub = (pubsub_pb2.SubscriberStub(channel)) + self.subscriber_stub = (pubsub_pb2_grpc.SubscriberStub(channel)) if client_info is None: client_info = ( google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) - client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info - interface_config = client_config['interfaces'][self._INTERFACE_NAME] - method_configs = google.api_core.gapic_v1.config.parse_method_configs( - interface_config) - - self._create_subscription = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.CreateSubscription, - default_retry=method_configs['CreateSubscription'].retry, - default_timeout=method_configs['CreateSubscription'].timeout, - client_info=client_info) - self._get_subscription = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.GetSubscription, - default_retry=method_configs['GetSubscription'].retry, - default_timeout=method_configs['GetSubscription'].timeout, - client_info=client_info) - self._update_subscription = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.UpdateSubscription, - default_retry=method_configs['UpdateSubscription'].retry, - default_timeout=method_configs['UpdateSubscription'].timeout, - client_info=client_info) - self._list_subscriptions = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.ListSubscriptions, - default_retry=method_configs['ListSubscriptions'].retry, - default_timeout=method_configs['ListSubscriptions'].timeout, - client_info=client_info) - self._delete_subscription = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.DeleteSubscription, - default_retry=method_configs['DeleteSubscription'].retry, - default_timeout=method_configs['DeleteSubscription'].timeout, - client_info=client_info) - self._modify_ack_deadline = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.ModifyAckDeadline, - default_retry=method_configs['ModifyAckDeadline'].retry, - default_timeout=method_configs['ModifyAckDeadline'].timeout, - client_info=client_info) - self._acknowledge = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.Acknowledge, - default_retry=method_configs['Acknowledge'].retry, - default_timeout=method_configs['Acknowledge'].timeout, - client_info=client_info) - self._pull = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.Pull, - default_retry=method_configs['Pull'].retry, - default_timeout=method_configs['Pull'].timeout, - client_info=client_info) - self._streaming_pull = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.StreamingPull, - default_retry=method_configs['StreamingPull'].retry, - default_timeout=method_configs['StreamingPull'].timeout, - client_info=client_info) - self._modify_push_config = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.ModifyPushConfig, - default_retry=method_configs['ModifyPushConfig'].retry, - default_timeout=method_configs['ModifyPushConfig'].timeout, - client_info=client_info) - self._list_snapshots = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.ListSnapshots, - default_retry=method_configs['ListSnapshots'].retry, - default_timeout=method_configs['ListSnapshots'].timeout, - client_info=client_info) - self._create_snapshot = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.CreateSnapshot, - default_retry=method_configs['CreateSnapshot'].retry, - default_timeout=method_configs['CreateSnapshot'].timeout, - client_info=client_info) - self._update_snapshot = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.UpdateSnapshot, - default_retry=method_configs['UpdateSnapshot'].retry, - default_timeout=method_configs['UpdateSnapshot'].timeout, - client_info=client_info) - self._delete_snapshot = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.DeleteSnapshot, - default_retry=method_configs['DeleteSnapshot'].retry, - default_timeout=method_configs['DeleteSnapshot'].timeout, - client_info=client_info) - self._seek = google.api_core.gapic_v1.method.wrap_method( - self.subscriber_stub.Seek, - default_retry=method_configs['Seek'].retry, - default_timeout=method_configs['Seek'].timeout, - client_info=client_info) - self._set_iam_policy = google.api_core.gapic_v1.method.wrap_method( - self.iam_policy_stub.SetIamPolicy, - default_retry=method_configs['SetIamPolicy'].retry, - default_timeout=method_configs['SetIamPolicy'].timeout, - client_info=client_info) - self._get_iam_policy = google.api_core.gapic_v1.method.wrap_method( - self.iam_policy_stub.GetIamPolicy, - default_retry=method_configs['GetIamPolicy'].retry, - default_timeout=method_configs['GetIamPolicy'].timeout, - client_info=client_info) - self._test_iam_permissions = google.api_core.gapic_v1.method.wrap_method( - self.iam_policy_stub.TestIamPermissions, - default_retry=method_configs['TestIamPermissions'].retry, - default_timeout=method_configs['TestIamPermissions'].timeout, - client_info=client_info) + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + # Save a dictionary of cached API call functions. + # These are the actual callables which invoke the proper + # transport methods, wrapped with `wrap_method` to add retry, + # timeout, and the like. + self._inner_api_calls = {} # Service calls def create_subscription(self, @@ -246,9 +217,11 @@ def create_subscription(self, message_retention_duration=None, labels=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ - Creates a subscription to a given topic. + Creates a subscription to a given topic. See the + resource name rules. If the subscription already exists, returns ``ALREADY_EXISTS``. If the corresponding topic doesn't exist, returns ``NOT_FOUND``. @@ -275,7 +248,7 @@ def create_subscription(self, start with a letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters - in length, and it must not start with ``\"goog\"``. + in length, and it must not start with ``\"goog\"`` topic (str): The name of the topic from which this subscription is receiving messages. Format is ``projects/{project}/topics/{topic}``. The value of this field will be ``_deleted-topic_`` if the topic has been @@ -294,7 +267,8 @@ def create_subscription(self, For pull subscriptions, this value is used as the initial value for the ack deadline. To override this value for a given message, call ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using - pull. + non-streaming pull or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming pull. The minimum custom deadline you can specify is 10 seconds. The maximum custom deadline you can specify is 600 seconds (10 minutes). If this parameter is 0, a default value of 10 seconds is used. @@ -307,13 +281,19 @@ def create_subscription(self, retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the ``message_retention_duration`` - window. + window.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If ``retain_acked_messages`` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 - minutes. + minutes.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Duration` labels (dict[str -> str]): User labels. @@ -323,6 +303,8 @@ def create_subscription(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. @@ -334,6 +316,18 @@ def create_subscription(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'create_subscription' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_subscription'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_subscription, + default_retry=self._method_configs[ + 'CreateSubscription'].retry, + default_timeout=self._method_configs['CreateSubscription'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.Subscription( name=name, topic=topic, @@ -341,13 +335,16 @@ def create_subscription(self, ack_deadline_seconds=ack_deadline_seconds, retain_acked_messages=retain_acked_messages, message_retention_duration=message_retention_duration, - labels=labels) - return self._create_subscription(request, retry=retry, timeout=timeout) + labels=labels, + ) + return self._inner_api_calls['create_subscription']( + request, retry=retry, timeout=timeout, metadata=metadata) def get_subscription(self, subscription, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Gets the configuration details of a subscription. @@ -369,6 +366,8 @@ def get_subscription(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. @@ -380,29 +379,43 @@ def get_subscription(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) - return self._get_subscription(request, retry=retry, timeout=timeout) + # Wrap the transport method to add retry and timeout logic. + if 'get_subscription' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_subscription'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_subscription, + default_retry=self._method_configs[ + 'GetSubscription'].retry, + default_timeout=self._method_configs['GetSubscription'] + .timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.GetSubscriptionRequest( + subscription=subscription, ) + return self._inner_api_calls['get_subscription']( + request, retry=retry, timeout=timeout, metadata=metadata) def update_subscription(self, subscription, update_mask, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. - NOTE: The style guide requires body: \"subscription\" instead of body: \"*\". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. Example: >>> from google.cloud import pubsub_v1 >>> >>> client = pubsub_v1.SubscriberClient() >>> - >>> subscription = {} - >>> update_mask = {} + >>> ack_deadline_seconds = 42 + >>> subscription = {'ack_deadline_seconds': ack_deadline_seconds} + >>> paths_element = 'ack_deadline_seconds' + >>> paths = [paths_element] + >>> update_mask = {'paths': paths} >>> >>> response = client.update_subscription(subscription, update_mask) @@ -420,6 +433,8 @@ def update_subscription(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. @@ -431,15 +446,31 @@ def update_subscription(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'update_subscription' not in self._inner_api_calls: + self._inner_api_calls[ + 'update_subscription'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_subscription, + default_retry=self._method_configs[ + 'UpdateSubscription'].retry, + default_timeout=self._method_configs['UpdateSubscription'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask) - return self._update_subscription(request, retry=retry, timeout=timeout) + subscription=subscription, + update_mask=update_mask, + ) + return self._inner_api_calls['update_subscription']( + request, retry=retry, timeout=timeout, metadata=metadata) def list_subscriptions(self, project, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Lists matching subscriptions. @@ -450,13 +481,15 @@ def list_subscriptions(self, >>> >>> project = client.project_path('[PROJECT]') >>> - >>> >>> # Iterate over all results >>> for element in client.list_subscriptions(project): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_subscriptions(project, options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -476,6 +509,8 @@ def list_subscriptions(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this @@ -490,22 +525,41 @@ def list_subscriptions(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'list_subscriptions' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_subscriptions'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_subscriptions, + default_retry=self._method_configs[ + 'ListSubscriptions'].retry, + default_timeout=self._method_configs['ListSubscriptions'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.ListSubscriptionsRequest( - project=project, page_size=page_size) + project=project, + page_size=page_size, + ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_subscriptions, retry=retry, timeout=timeout), + self._inner_api_calls['list_subscriptions'], + retry=retry, + timeout=timeout, + metadata=metadata), request=request, items_field='subscriptions', request_token_field='page_token', - response_token_field='next_page_token') + response_token_field='next_page_token', + ) return iterator def delete_subscription(self, subscription, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Deletes an existing subscription. All messages retained in the subscription are immediately dropped. Calls to ``Pull`` after deletion will return @@ -531,6 +585,8 @@ def delete_subscription(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -539,16 +595,30 @@ def delete_subscription(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'delete_subscription' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_subscription'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_subscription, + default_retry=self._method_configs[ + 'DeleteSubscription'].retry, + default_timeout=self._method_configs['DeleteSubscription'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.DeleteSubscriptionRequest( - subscription=subscription) - self._delete_subscription(request, retry=retry, timeout=timeout) + subscription=subscription, ) + self._inner_api_calls['delete_subscription']( + request, retry=retry, timeout=timeout, metadata=metadata) def modify_ack_deadline(self, subscription, ack_ids, ack_deadline_seconds, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Modifies the ack deadline for a specific message. This method is useful to indicate that more time is needed to process a message by the @@ -562,7 +632,11 @@ def modify_ack_deadline(self, >>> client = pubsub_v1.SubscriberClient() >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> # TODO: Initialize ``ack_ids``: >>> ack_ids = [] + >>> + >>> # TODO: Initialize ``ack_deadline_seconds``: >>> ack_deadline_seconds = 0 >>> >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) @@ -584,6 +658,8 @@ def modify_ack_deadline(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -592,17 +668,32 @@ def modify_ack_deadline(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'modify_ack_deadline' not in self._inner_api_calls: + self._inner_api_calls[ + 'modify_ack_deadline'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.modify_ack_deadline, + default_retry=self._method_configs[ + 'ModifyAckDeadline'].retry, + default_timeout=self._method_configs['ModifyAckDeadline'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.ModifyAckDeadlineRequest( subscription=subscription, ack_ids=ack_ids, - ack_deadline_seconds=ack_deadline_seconds) - self._modify_ack_deadline(request, retry=retry, timeout=timeout) + ack_deadline_seconds=ack_deadline_seconds, + ) + self._inner_api_calls['modify_ack_deadline']( + request, retry=retry, timeout=timeout, metadata=metadata) def acknowledge(self, subscription, ack_ids, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Acknowledges the messages associated with the ``ack_ids`` in the ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages @@ -618,6 +709,8 @@ def acknowledge(self, >>> client = pubsub_v1.SubscriberClient() >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> # TODO: Initialize ``ack_ids``: >>> ack_ids = [] >>> >>> client.acknowledge(subscription, ack_ids) @@ -633,6 +726,8 @@ def acknowledge(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -641,16 +736,31 @@ def acknowledge(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'acknowledge' not in self._inner_api_calls: + self._inner_api_calls[ + 'acknowledge'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.acknowledge, + default_retry=self._method_configs['Acknowledge'].retry, + default_timeout=self._method_configs['Acknowledge'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids) - self._acknowledge(request, retry=retry, timeout=timeout) + subscription=subscription, + ack_ids=ack_ids, + ) + self._inner_api_calls['acknowledge']( + request, retry=retry, timeout=timeout, metadata=metadata) def pull(self, subscription, max_messages, return_immediately=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Pulls messages from the server. Returns an empty list if there are no messages available in the backlog. The server may return ``UNAVAILABLE`` if @@ -663,6 +773,8 @@ def pull(self, >>> client = pubsub_v1.SubscriberClient() >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> # TODO: Initialize ``max_messages``: >>> max_messages = 0 >>> >>> response = client.pull(subscription, max_messages) @@ -684,6 +796,8 @@ def pull(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.PullResponse` instance. @@ -695,29 +809,37 @@ def pull(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'pull' not in self._inner_api_calls: + self._inner_api_calls[ + 'pull'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.pull, + default_retry=self._method_configs['Pull'].retry, + default_timeout=self._method_configs['Pull'].timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.PullRequest( subscription=subscription, max_messages=max_messages, - return_immediately=return_immediately) - return self._pull(request, retry=retry, timeout=timeout) + return_immediately=return_immediately, + ) + return self._inner_api_calls['pull']( + request, retry=retry, timeout=timeout, metadata=metadata) def streaming_pull(self, requests, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ - (EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will - respond with UNIMPLEMENTED errors unless you have been invited to test - this feature. Contact cloud-pubsub@google.com with any questions. - Establishes a stream with the server, which sends messages down to the client. The client streams acknowledgements and ack deadline modifications back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status ``OK`` to reassign - server-side resources, in which case, the client should re-establish the - stream. ``UNAVAILABLE`` may also be returned in the case of a transient error - (e.g., a server restart). These should also be retried by the client. Flow - control can be achieved by configuring the underlying RPC channel. + on any error. The server may close the stream with status ``UNAVAILABLE`` to + reassign server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by configuring the + underlying RPC channel. EXPERIMENTAL: This method interface might change in the future. @@ -727,6 +849,8 @@ def streaming_pull(self, >>> client = pubsub_v1.SubscriberClient() >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> # TODO: Initialize ``stream_ack_deadline_seconds``: >>> stream_ack_deadline_seconds = 0 >>> request = {'subscription': subscription, 'stream_ack_deadline_seconds': stream_ack_deadline_seconds} >>> @@ -744,6 +868,8 @@ def streaming_pull(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: Iterable[~google.cloud.pubsub_v1.types.StreamingPullResponse]. @@ -755,13 +881,26 @@ def streaming_pull(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - return self._streaming_pull(requests, retry=retry, timeout=timeout) + # Wrap the transport method to add retry and timeout logic. + if 'streaming_pull' not in self._inner_api_calls: + self._inner_api_calls[ + 'streaming_pull'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.streaming_pull, + default_retry=self._method_configs['StreamingPull'].retry, + default_timeout=self._method_configs['StreamingPull'] + .timeout, + client_info=self._client_info, + ) + + return self._inner_api_calls['streaming_pull']( + requests, retry=retry, timeout=timeout, metadata=metadata) def modify_push_config(self, subscription, push_config, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Modifies the ``PushConfig`` for a specified subscription. @@ -776,6 +915,8 @@ def modify_push_config(self, >>> client = pubsub_v1.SubscriberClient() >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> # TODO: Initialize ``push_config``: >>> push_config = {} >>> >>> client.modify_push_config(subscription, push_config) @@ -788,7 +929,7 @@ def modify_push_config(self, An empty ``pushConfig`` indicates that the Pub/Sub system should stop pushing messages from the given subscription and allow messages to be pulled and acknowledged - effectively pausing - the subscription if ``Pull`` is not called. + the subscription if ``Pull`` or ``StreamingPull`` is not called. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PushConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -797,6 +938,8 @@ def modify_push_config(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -805,17 +948,36 @@ def modify_push_config(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'modify_push_config' not in self._inner_api_calls: + self._inner_api_calls[ + 'modify_push_config'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.modify_push_config, + default_retry=self._method_configs[ + 'ModifyPushConfig'].retry, + default_timeout=self._method_configs['ModifyPushConfig'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config) - self._modify_push_config(request, retry=retry, timeout=timeout) + subscription=subscription, + push_config=push_config, + ) + self._inner_api_calls['modify_push_config']( + request, retry=retry, timeout=timeout, metadata=metadata) def list_snapshots(self, project, page_size=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ - Lists the existing snapshots. + Lists the existing snapshots.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. Example: >>> from google.cloud import pubsub_v1 @@ -824,13 +986,15 @@ def list_snapshots(self, >>> >>> project = client.project_path('[PROJECT]') >>> - >>> >>> # Iterate over all results >>> for element in client.list_snapshots(project): ... # process element ... pass >>> - >>> # Or iterate over results one page at a time + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time >>> for page in client.list_snapshots(project, options=CallOptions(page_token=INITIAL_PAGE)): ... for element in page: ... # process element @@ -850,6 +1014,8 @@ def list_snapshots(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.gax.PageIterator` instance. By default, this @@ -864,34 +1030,58 @@ def list_snapshots(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'list_snapshots' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_snapshots'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_snapshots, + default_retry=self._method_configs['ListSnapshots'].retry, + default_timeout=self._method_configs['ListSnapshots'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.ListSnapshotsRequest( - project=project, page_size=page_size) + project=project, + page_size=page_size, + ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._list_snapshots, retry=retry, timeout=timeout), + self._inner_api_calls['list_snapshots'], + retry=retry, + timeout=timeout, + metadata=metadata), request=request, items_field='snapshots', request_token_field='page_token', - response_token_field='next_page_token') + response_token_field='next_page_token', + ) return iterator def create_snapshot(self, name, subscription, + labels=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ - Creates a snapshot from the requested subscription. + Creates a snapshot from the requested subscription.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. If the snapshot already exists, returns ``ALREADY_EXISTS``. If the requested subscription doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a random + If the backlog in the subscription is too old -- and the resulting snapshot + would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is returned. + See also the ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming - to the - `resource name format `_. - The generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the request. + to the `resource name format `_. + The generated + name is populated in the returned Snapshot object. Note that for REST API + requests, you must specify a name in the request. Example: >>> from google.cloud import pubsub_v1 @@ -910,22 +1100,24 @@ def create_snapshot(self, Note that for REST API requests, you must specify a name. Format is ``projects/{project}/snapshots/{snap}``. subscription (str): The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: - - * The existing backlog on the subscription. More precisely, this is - defined as the messages in the subscription's backlog that are - unacknowledged upon the successful completion of the - `CreateSnapshot` request; as well as: - * Any messages published to the subscription's topic following the - successful completion of the CreateSnapshot request. + Specifically, the created snapshot is guaranteed to retain: \ + (a) The existing backlog on the subscription. More precisely, this is \ + defined as the messages in the subscription's backlog that are \ + unacknowledged upon the successful completion of the \ + `CreateSnapshot` request; as well as: \ + (b) Any messages published to the subscription's topic following the \ + successful completion of the CreateSnapshot request. \ Format is ``projects/{project}/subscriptions/{sub}``. + labels (dict[str -> str]): User labels. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. @@ -937,35 +1129,54 @@ def create_snapshot(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'create_snapshot' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_snapshot'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_snapshot, + default_retry=self._method_configs['CreateSnapshot'].retry, + default_timeout=self._method_configs['CreateSnapshot'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription) - return self._create_snapshot(request, retry=retry, timeout=timeout) + name=name, + subscription=subscription, + labels=labels, + ) + return self._inner_api_calls['create_snapshot']( + request, retry=retry, timeout=timeout, metadata=metadata) def update_snapshot(self, snapshot, update_mask, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ - Updates an existing snapshot. Note that certain properties of a snapshot - are not modifiable. - NOTE: The style guide requires body: \"snapshot\" instead of body: \"*\". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. + Updates an existing snapshot.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + Note that certain properties of a snapshot are not modifiable. Example: >>> from google.cloud import pubsub_v1 >>> >>> client = pubsub_v1.SubscriberClient() >>> - >>> snapshot = {} - >>> update_mask = {} + >>> seconds = 123456 + >>> expire_time = {'seconds': seconds} + >>> snapshot = {'expire_time': expire_time} + >>> paths_element = 'expire_time' + >>> paths = [paths_element] + >>> update_mask = {'paths': paths} >>> >>> response = client.update_snapshot(snapshot, update_mask) Args: - snapshot (Union[dict, ~google.cloud.pubsub_v1.types.Snapshot]): The updated snpashot object. + snapshot (Union[dict, ~google.cloud.pubsub_v1.types.Snapshot]): The updated snapshot object. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Snapshot` update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided snapshot to update. @@ -978,6 +1189,8 @@ def update_snapshot(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. @@ -989,16 +1202,35 @@ def update_snapshot(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'update_snapshot' not in self._inner_api_calls: + self._inner_api_calls[ + 'update_snapshot'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_snapshot, + default_retry=self._method_configs['UpdateSnapshot'].retry, + default_timeout=self._method_configs['UpdateSnapshot'] + .timeout, + client_info=self._client_info, + ) + request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, update_mask=update_mask) - return self._update_snapshot(request, retry=retry, timeout=timeout) + snapshot=snapshot, + update_mask=update_mask, + ) + return self._inner_api_calls['update_snapshot']( + request, retry=retry, timeout=timeout, metadata=metadata) def delete_snapshot(self, snapshot, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ - Removes an existing snapshot. All messages retained in the snapshot + Removes an existing snapshot.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + When the snapshot is deleted, all messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be created with the same name, but the new one has no association with the old snapshot or its subscription, unless the same subscription is specified. @@ -1021,6 +1253,8 @@ def delete_snapshot(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1029,18 +1263,34 @@ def delete_snapshot(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) - self._delete_snapshot(request, retry=retry, timeout=timeout) + # Wrap the transport method to add retry and timeout logic. + if 'delete_snapshot' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_snapshot'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_snapshot, + default_retry=self._method_configs['DeleteSnapshot'].retry, + default_timeout=self._method_configs['DeleteSnapshot'] + .timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot, ) + self._inner_api_calls['delete_snapshot']( + request, retry=retry, timeout=timeout, metadata=metadata) def seek(self, subscription, time=None, snapshot=None, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. + whichever is provided in the request.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. Example: >>> from google.cloud import pubsub_v1 @@ -1075,6 +1325,8 @@ def seek(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.SeekResponse` instance. @@ -1086,21 +1338,37 @@ def seek(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'seek' not in self._inner_api_calls: + self._inner_api_calls[ + 'seek'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.seek, + default_retry=self._method_configs['Seek'].retry, + default_timeout=self._method_configs['Seek'].timeout, + client_info=self._client_info, + ) + # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. google.api_core.protobuf_helpers.check_oneof( time=time, - snapshot=snapshot, ) + snapshot=snapshot, + ) request = pubsub_pb2.SeekRequest( - subscription=subscription, time=time, snapshot=snapshot) - return self._seek(request, retry=retry, timeout=timeout) + subscription=subscription, + time=time, + snapshot=snapshot, + ) + return self._inner_api_calls['seek']( + request, retry=retry, timeout=timeout, metadata=metadata) def set_iam_policy(self, resource, policy, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Sets the access control policy on the specified resource. Replaces any existing policy. @@ -1111,6 +1379,8 @@ def set_iam_policy(self, >>> client = pubsub_v1.SubscriberClient() >>> >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> # TODO: Initialize ``policy``: >>> policy = {} >>> >>> response = client.set_iam_policy(resource, policy) @@ -1131,6 +1401,8 @@ def set_iam_policy(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Policy` instance. @@ -1142,14 +1414,29 @@ def set_iam_policy(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'set_iam_policy' not in self._inner_api_calls: + self._inner_api_calls[ + 'set_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.set_iam_policy, + default_retry=self._method_configs['SetIamPolicy'].retry, + default_timeout=self._method_configs['SetIamPolicy'] + .timeout, + client_info=self._client_info, + ) + request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy) - return self._set_iam_policy(request, retry=retry, timeout=timeout) + resource=resource, + policy=policy, + ) + return self._inner_api_calls['set_iam_policy']( + request, retry=retry, timeout=timeout, metadata=metadata) def get_iam_policy(self, resource, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy @@ -1174,6 +1461,8 @@ def get_iam_policy(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.Policy` instance. @@ -1185,14 +1474,27 @@ def get_iam_policy(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - return self._get_iam_policy(request, retry=retry, timeout=timeout) + # Wrap the transport method to add retry and timeout logic. + if 'get_iam_policy' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_iam_policy, + default_retry=self._method_configs['GetIamPolicy'].retry, + default_timeout=self._method_configs['GetIamPolicy'] + .timeout, + client_info=self._client_info, + ) + + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + return self._inner_api_calls['get_iam_policy']( + request, retry=retry, timeout=timeout, metadata=metadata) def test_iam_permissions(self, resource, permissions, retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT): + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): """ Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of @@ -1204,6 +1506,8 @@ def test_iam_permissions(self, >>> client = pubsub_v1.SubscriberClient() >>> >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> # TODO: Initialize ``permissions``: >>> permissions = [] >>> >>> response = client.test_iam_permissions(resource, permissions) @@ -1222,6 +1526,8 @@ def test_iam_permissions(self, timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. Returns: A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. @@ -1233,7 +1539,21 @@ def test_iam_permissions(self, to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ + # Wrap the transport method to add retry and timeout logic. + if 'test_iam_permissions' not in self._inner_api_calls: + self._inner_api_calls[ + 'test_iam_permissions'] = google.api_core.gapic_v1.method.wrap_method( + self.transport.test_iam_permissions, + default_retry=self._method_configs[ + 'TestIamPermissions'].retry, + default_timeout=self._method_configs['TestIamPermissions'] + .timeout, + client_info=self._client_info, + ) + request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions) - return self._test_iam_permissions( - request, retry=retry, timeout=timeout) + resource=resource, + permissions=permissions, + ) + return self._inner_api_calls['test_iam_permissions']( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 896d2002cb12..7857fbdab8e1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -80,7 +80,7 @@ "retry_params_name": "messaging" }, "StreamingPull": { - "timeout_millis": 60000, + "timeout_millis": 900000, "retry_codes_name": "pull", "retry_params_name": "streaming_messaging" }, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py new file mode 100644 index 000000000000..c6bb9a648d5b --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -0,0 +1,238 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.grpc_helpers + +from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc +from google.iam.v1 import iam_policy_pb2 + + +class PublisherGrpcTransport(object): + """gRPC transport class providing stubs for + google.pubsub.v1 Publisher API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', + ) + + def __init__(self, + channel=None, + credentials=None, + address='pubsub.googleapis.com:443'): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments are mutually ' + 'exclusive.', ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + ) + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + 'iam_policy_stub': iam_policy_pb2.IAMPolicyStub(channel), + 'publisher_stub': pubsub_pb2_grpc.PublisherStub(channel), + } + + @classmethod + def create_channel(cls, + address='pubsub.googleapis.com:443', + credentials=None): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, + credentials=credentials, + scopes=cls._OAUTH_SCOPES, + ) + + @property + def create_topic(self): + """Return the gRPC stub for {$apiMethod.name}. + + Creates the given topic with the given name. See the + resource name rules. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['publisher_stub'].CreateTopic + + @property + def update_topic(self): + """Return the gRPC stub for {$apiMethod.name}. + + Updates an existing topic. Note that certain properties of a + topic are not modifiable. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['publisher_stub'].UpdateTopic + + @property + def publish(self): + """Return the gRPC stub for {$apiMethod.name}. + + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['publisher_stub'].Publish + + @property + def get_topic(self): + """Return the gRPC stub for {$apiMethod.name}. + + Gets the configuration of a topic. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['publisher_stub'].GetTopic + + @property + def list_topics(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists matching topics. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['publisher_stub'].ListTopics + + @property + def list_topic_subscriptions(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists the names of the subscriptions on this topic. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['publisher_stub'].ListTopicSubscriptions + + @property + def delete_topic(self): + """Return the gRPC stub for {$apiMethod.name}. + + Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their ``topic`` field is set to ``_deleted-topic_``. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['publisher_stub'].DeleteTopic + + @property + def set_iam_policy(self): + """Return the gRPC stub for {$apiMethod.name}. + + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['iam_policy_stub'].SetIamPolicy + + @property + def get_iam_policy(self): + """Return the gRPC stub for {$apiMethod.name}. + + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['iam_policy_stub'].GetIamPolicy + + @property + def test_iam_permissions(self): + """Return the gRPC stub for {$apiMethod.name}. + + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['iam_policy_stub'].TestIamPermissions diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py new file mode 100644 index 000000000000..8163cd2e8ec9 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -0,0 +1,405 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import google.api_core.grpc_helpers + +from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc +from google.iam.v1 import iam_policy_pb2 + + +class SubscriberGrpcTransport(object): + """gRPC transport class providing stubs for + google.pubsub.v1 Subscriber API. + + The transport provides access to the raw gRPC stubs, + which can be used to take advantage of advanced + features of gRPC. + """ + # The scopes needed to make gRPC calls to all of the methods defined + # in this service. + _OAUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', + ) + + def __init__(self, + channel=None, + credentials=None, + address='pubsub.googleapis.com:443'): + """Instantiate the transport class. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + address (str): The address where the service is hosted. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments are mutually ' + 'exclusive.', ) + + # Create the channel. + if channel is None: + channel = self.create_channel( + address=address, + credentials=credentials, + ) + + # gRPC uses objects called "stubs" that are bound to the + # channel and provide a basic method for each RPC. + self._stubs = { + 'iam_policy_stub': iam_policy_pb2.IAMPolicyStub(channel), + 'subscriber_stub': pubsub_pb2_grpc.SubscriberStub(channel), + } + + @classmethod + def create_channel(cls, + address='pubsub.googleapis.com:443', + credentials=None): + """Create and return a gRPC channel object. + + Args: + address (str): The host for the channel to use. + credentials (~.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return google.api_core.grpc_helpers.create_channel( + address, + credentials=credentials, + scopes=cls._OAUTH_SCOPES, + ) + + @property + def create_subscription(self): + """Return the gRPC stub for {$apiMethod.name}. + + Creates a subscription to a given topic. See the + resource name rules. + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + `resource name format `_. + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].CreateSubscription + + @property + def get_subscription(self): + """Return the gRPC stub for {$apiMethod.name}. + + Gets the configuration details of a subscription. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].GetSubscription + + @property + def update_subscription(self): + """Return the gRPC stub for {$apiMethod.name}. + + Updates an existing subscription. Note that certain properties of a + subscription, such as its topic, are not modifiable. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].UpdateSubscription + + @property + def list_subscriptions(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists matching subscriptions. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].ListSubscriptions + + @property + def delete_subscription(self): + """Return the gRPC stub for {$apiMethod.name}. + + Deletes an existing subscription. All messages retained in the subscription + are immediately dropped. Calls to ``Pull`` after deletion will return + ``NOT_FOUND``. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].DeleteSubscription + + @property + def modify_ack_deadline(self): + """Return the gRPC stub for {$apiMethod.name}. + + Modifies the ack deadline for a specific message. This method is useful + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level ``ackDeadlineSeconds`` used for subsequent messages. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].ModifyAckDeadline + + @property + def acknowledge(self): + """Return the gRPC stub for {$apiMethod.name}. + + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages + from the subscription. + + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].Acknowledge + + @property + def pull(self): + """Return the gRPC stub for {$apiMethod.name}. + + Pulls messages from the server. Returns an empty list if there are no + messages available in the backlog. The server may return ``UNAVAILABLE`` if + there are too many concurrent pull requests pending for the given + subscription. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].Pull + + @property + def streaming_pull(self): + """Return the gRPC stub for {$apiMethod.name}. + + Establishes a stream with the server, which sends messages down to the + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status ``UNAVAILABLE`` to + reassign server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by configuring the + underlying RPC channel. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].StreamingPull + + @property + def modify_push_config(self): + """Return the gRPC stub for {$apiMethod.name}. + + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one (signified by + an empty ``PushConfig``) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the ``PushConfig``. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].ModifyPushConfig + + @property + def list_snapshots(self): + """Return the gRPC stub for {$apiMethod.name}. + + Lists the existing snapshots.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].ListSnapshots + + @property + def create_snapshot(self): + """Return the gRPC stub for {$apiMethod.name}. + + Creates a snapshot from the requested subscription.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + If the snapshot already exists, returns ``ALREADY_EXISTS``. + If the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the resulting snapshot + would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is returned. + See also the ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the `resource name format `_. + The generated + name is populated in the returned Snapshot object. Note that for REST API + requests, you must specify a name in the request. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].CreateSnapshot + + @property + def update_snapshot(self): + """Return the gRPC stub for {$apiMethod.name}. + + Updates an existing snapshot.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + Note that certain properties of a snapshot are not modifiable. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].UpdateSnapshot + + @property + def delete_snapshot(self): + """Return the gRPC stub for {$apiMethod.name}. + + Removes an existing snapshot.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + When the snapshot is deleted, all messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].DeleteSnapshot + + @property + def seek(self): + """Return the gRPC stub for {$apiMethod.name}. + + Seeks an existing subscription to a point in time or to a given snapshot, + whichever is provided in the request.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['subscriber_stub'].Seek + + @property + def set_iam_policy(self): + """Return the gRPC stub for {$apiMethod.name}. + + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['iam_policy_stub'].SetIamPolicy + + @property + def get_iam_policy(self): + """Return the gRPC stub for {$apiMethod.name}. + + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['iam_policy_stub'].GetIamPolicy + + @property + def test_iam_permissions(self): + """Return the gRPC stub for {$apiMethod.name}. + + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs['iam_policy_stub'].TestIamPermissions diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 932c7c1a7ac0..112f1d8a05f1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -24,13 +24,44 @@ name='google/cloud/pubsub_v1/proto/pubsub.proto', package='google.pubsub.v1', syntax='proto3', - serialized_pb=_b('\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"y\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xc5\x02\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\";\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xf7\x10\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*2\x9a\x07\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}By\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1b\x06proto3') + serialized_pb=_b('\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t\"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xc5\x02\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse\"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf7\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\x92\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1b\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) +_MESSAGESTORAGEPOLICY = _descriptor.Descriptor( + name='MessageStoragePolicy', + full_name='google.pubsub.v1.MessageStoragePolicy', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='allowed_persistence_regions', full_name='google.pubsub.v1.MessageStoragePolicy.allowed_persistence_regions', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=221, + serialized_end=280, +) + + _TOPIC_LABELSENTRY = _descriptor.Descriptor( name='LabelsEntry', full_name='google.pubsub.v1.Topic.LabelsEntry', @@ -44,14 +75,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='google.pubsub.v1.Topic.LabelsEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -64,8 +95,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=297, - serialized_end=342, + serialized_start=431, + serialized_end=476, ) _TOPIC = _descriptor.Descriptor( @@ -81,14 +112,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='labels', full_name='google.pubsub.v1.Topic.labels', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='message_storage_policy', full_name='google.pubsub.v1.Topic.message_storage_policy', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -101,8 +139,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=221, - serialized_end=342, + serialized_start=283, + serialized_end=476, ) @@ -119,14 +157,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -139,8 +177,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=515, - serialized_end=564, + serialized_start=649, + serialized_end=698, ) _PUBSUBMESSAGE = _descriptor.Descriptor( @@ -156,28 +194,28 @@ has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='attributes', full_name='google.pubsub.v1.PubsubMessage.attributes', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='message_id', full_name='google.pubsub.v1.PubsubMessage.message_id', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='publish_time', full_name='google.pubsub.v1.PubsubMessage.publish_time', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -190,8 +228,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=345, - serialized_end=564, + serialized_start=479, + serialized_end=698, ) @@ -208,7 +246,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -221,8 +259,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=566, - serialized_end=598, + serialized_start=700, + serialized_end=732, ) @@ -239,14 +277,14 @@ has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='update_mask', full_name='google.pubsub.v1.UpdateTopicRequest.update_mask', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -259,8 +297,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=600, - serialized_end=709, + serialized_start=734, + serialized_end=843, ) @@ -277,14 +315,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='messages', full_name='google.pubsub.v1.PublishRequest.messages', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -297,8 +335,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=711, - serialized_end=793, + serialized_start=845, + serialized_end=927, ) @@ -315,7 +353,7 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -328,8 +366,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=795, - serialized_end=833, + serialized_start=929, + serialized_end=967, ) @@ -346,21 +384,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_size', full_name='google.pubsub.v1.ListTopicsRequest.page_size', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.pubsub.v1.ListTopicsRequest.page_token', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -373,8 +411,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=835, - serialized_end=910, + serialized_start=969, + serialized_end=1044, ) @@ -391,14 +429,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.pubsub.v1.ListTopicsResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -411,8 +449,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=912, - serialized_end=998, + serialized_start=1046, + serialized_end=1132, ) @@ -429,21 +467,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_size', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_size', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_token', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -456,8 +494,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1000, - serialized_end=1085, + serialized_start=1134, + serialized_end=1219, ) @@ -474,14 +512,97 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1221, + serialized_end=1301, +) + + +_LISTTOPICSNAPSHOTSREQUEST = _descriptor.Descriptor( + name='ListTopicSnapshotsRequest', + full_name='google.pubsub.v1.ListTopicSnapshotsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='topic', full_name='google.pubsub.v1.ListTopicSnapshotsRequest.topic', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.pubsub.v1.ListTopicSnapshotsRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.pubsub.v1.ListTopicSnapshotsRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1303, + serialized_end=1384, +) + + +_LISTTOPICSNAPSHOTSRESPONSE = _descriptor.Descriptor( + name='ListTopicSnapshotsResponse', + full_name='google.pubsub.v1.ListTopicSnapshotsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshots', full_name='google.pubsub.v1.ListTopicSnapshotsResponse.snapshots', index=0, + number=1, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.pubsub.v1.ListTopicSnapshotsResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -494,8 +615,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1087, - serialized_end=1167, + serialized_start=1386, + serialized_end=1458, ) @@ -512,7 +633,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -525,8 +646,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1169, - serialized_end=1204, + serialized_start=1460, + serialized_end=1495, ) @@ -543,14 +664,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='google.pubsub.v1.Subscription.LabelsEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -563,8 +684,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=297, - serialized_end=342, + serialized_start=431, + serialized_end=476, ) _SUBSCRIPTION = _descriptor.Descriptor( @@ -580,49 +701,49 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='topic', full_name='google.pubsub.v1.Subscription.topic', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='push_config', full_name='google.pubsub.v1.Subscription.push_config', index=2, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='ack_deadline_seconds', full_name='google.pubsub.v1.Subscription.ack_deadline_seconds', index=3, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='retain_acked_messages', full_name='google.pubsub.v1.Subscription.retain_acked_messages', index=4, number=7, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='message_retention_duration', full_name='google.pubsub.v1.Subscription.message_retention_duration', index=5, number=8, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='labels', full_name='google.pubsub.v1.Subscription.labels', index=6, number=9, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -635,8 +756,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1207, - serialized_end=1532, + serialized_start=1498, + serialized_end=1823, ) @@ -653,14 +774,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='google.pubsub.v1.PushConfig.AttributesEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -673,8 +794,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=515, - serialized_end=564, + serialized_start=649, + serialized_end=698, ) _PUSHCONFIG = _descriptor.Descriptor( @@ -690,14 +811,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='attributes', full_name='google.pubsub.v1.PushConfig.attributes', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -710,8 +831,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1535, - serialized_end=1687, + serialized_start=1826, + serialized_end=1978, ) @@ -728,14 +849,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='message', full_name='google.pubsub.v1.ReceivedMessage.message', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -748,8 +869,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1689, - serialized_end=1772, + serialized_start=1980, + serialized_end=2063, ) @@ -766,7 +887,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -779,8 +900,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1774, - serialized_end=1820, + serialized_start=2065, + serialized_end=2111, ) @@ -797,14 +918,14 @@ has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='update_mask', full_name='google.pubsub.v1.UpdateSubscriptionRequest.update_mask', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -817,8 +938,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1823, - serialized_end=1953, + serialized_start=2114, + serialized_end=2244, ) @@ -835,21 +956,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_size', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_size', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_token', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -862,8 +983,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1955, - serialized_end=2037, + serialized_start=2246, + serialized_end=2328, ) @@ -880,14 +1001,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.pubsub.v1.ListSubscriptionsResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -900,8 +1021,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2039, - serialized_end=2146, + serialized_start=2330, + serialized_end=2437, ) @@ -918,7 +1039,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -931,8 +1052,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2148, - serialized_end=2197, + serialized_start=2439, + serialized_end=2488, ) @@ -949,14 +1070,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='push_config', full_name='google.pubsub.v1.ModifyPushConfigRequest.push_config', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -969,8 +1090,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2199, - serialized_end=2297, + serialized_start=2490, + serialized_end=2588, ) @@ -987,21 +1108,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='return_immediately', full_name='google.pubsub.v1.PullRequest.return_immediately', index=1, number=2, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='max_messages', full_name='google.pubsub.v1.PullRequest.max_messages', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1014,8 +1135,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2299, - serialized_end=2384, + serialized_start=2590, + serialized_end=2675, ) @@ -1032,7 +1153,7 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1045,8 +1166,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2386, - serialized_end=2462, + serialized_start=2677, + serialized_end=2753, ) @@ -1063,21 +1184,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='ack_ids', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids', index=1, number=4, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='ack_deadline_seconds', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds', index=2, number=3, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1090,8 +1211,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2464, - serialized_end=2559, + serialized_start=2755, + serialized_end=2850, ) @@ -1108,14 +1229,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='ack_ids', full_name='google.pubsub.v1.AcknowledgeRequest.ack_ids', index=1, number=2, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1128,8 +1249,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2561, - serialized_end=2620, + serialized_start=2852, + serialized_end=2911, ) @@ -1146,35 +1267,35 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.ack_ids', index=1, number=2, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='modify_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds', index=2, number=3, type=5, cpp_type=1, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='modify_deadline_ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids', index=3, number=4, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='stream_ack_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds', index=4, number=5, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1187,8 +1308,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2623, - serialized_end=2787, + serialized_start=2914, + serialized_end=3078, ) @@ -1205,7 +1326,7 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1218,11 +1339,48 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2789, - serialized_end=2874, + serialized_start=3080, + serialized_end=3165, ) +_CREATESNAPSHOTREQUEST_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.pubsub.v1.CreateSnapshotRequest.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=431, + serialized_end=476, +) + _CREATESNAPSHOTREQUEST = _descriptor.Descriptor( name='CreateSnapshotRequest', full_name='google.pubsub.v1.CreateSnapshotRequest', @@ -1236,18 +1394,25 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='subscription', full_name='google.pubsub.v1.CreateSnapshotRequest.subscription', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='labels', full_name='google.pubsub.v1.CreateSnapshotRequest.labels', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), ], extensions=[ ], - nested_types=[], + nested_types=[_CREATESNAPSHOTREQUEST_LABELSENTRY, ], enum_types=[ ], options=None, @@ -1256,8 +1421,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2876, - serialized_end=2935, + serialized_start=3168, + serialized_end=3343, ) @@ -1274,14 +1439,14 @@ has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='update_mask', full_name='google.pubsub.v1.UpdateSnapshotRequest.update_mask', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1294,8 +1459,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2937, - serialized_end=3055, + serialized_start=3345, + serialized_end=3463, ) @@ -1312,14 +1477,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='google.pubsub.v1.Snapshot.LabelsEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1332,8 +1497,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=297, - serialized_end=342, + serialized_start=431, + serialized_end=476, ) _SNAPSHOT = _descriptor.Descriptor( @@ -1349,28 +1514,28 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='topic', full_name='google.pubsub.v1.Snapshot.topic', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='expire_time', full_name='google.pubsub.v1.Snapshot.expire_time', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='labels', full_name='google.pubsub.v1.Snapshot.labels', index=3, number=4, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1383,8 +1548,39 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3058, - serialized_end=3249, + serialized_start=3466, + serialized_end=3657, +) + + +_GETSNAPSHOTREQUEST = _descriptor.Descriptor( + name='GetSnapshotRequest', + full_name='google.pubsub.v1.GetSnapshotRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='snapshot', full_name='google.pubsub.v1.GetSnapshotRequest.snapshot', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=3659, + serialized_end=3697, ) @@ -1401,21 +1597,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_size', full_name='google.pubsub.v1.ListSnapshotsRequest.page_size', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='page_token', full_name='google.pubsub.v1.ListSnapshotsRequest.page_token', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1428,8 +1624,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3251, - serialized_end=3329, + serialized_start=3699, + serialized_end=3777, ) @@ -1446,14 +1642,14 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='next_page_token', full_name='google.pubsub.v1.ListSnapshotsResponse.next_page_token', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1466,8 +1662,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3331, - serialized_end=3426, + serialized_start=3779, + serialized_end=3874, ) @@ -1484,7 +1680,7 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1497,8 +1693,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3428, - serialized_end=3469, + serialized_start=3876, + serialized_end=3917, ) @@ -1515,21 +1711,21 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='time', full_name='google.pubsub.v1.SeekRequest.time', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='snapshot', full_name='google.pubsub.v1.SeekRequest.snapshot', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -1545,8 +1741,8 @@ name='target', full_name='google.pubsub.v1.SeekRequest.target', index=0, containing_type=None, fields=[]), ], - serialized_start=3471, - serialized_end=3580, + serialized_start=3919, + serialized_end=4028, ) @@ -1569,12 +1765,13 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3582, - serialized_end=3596, + serialized_start=4030, + serialized_end=4044, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC _TOPIC.fields_by_name['labels'].message_type = _TOPIC_LABELSENTRY +_TOPIC.fields_by_name['message_storage_policy'].message_type = _MESSAGESTORAGEPOLICY _PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE _PUBSUBMESSAGE.fields_by_name['attributes'].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY _PUBSUBMESSAGE.fields_by_name['publish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP @@ -1595,6 +1792,8 @@ _MODIFYPUSHCONFIGREQUEST.fields_by_name['push_config'].message_type = _PUSHCONFIG _PULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE _STREAMINGPULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_CREATESNAPSHOTREQUEST_LABELSENTRY.containing_type = _CREATESNAPSHOTREQUEST +_CREATESNAPSHOTREQUEST.fields_by_name['labels'].message_type = _CREATESNAPSHOTREQUEST_LABELSENTRY _UPDATESNAPSHOTREQUEST.fields_by_name['snapshot'].message_type = _SNAPSHOT _UPDATESNAPSHOTREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK _SNAPSHOT_LABELSENTRY.containing_type = _SNAPSHOT @@ -1608,6 +1807,7 @@ _SEEKREQUEST.oneofs_by_name['target'].fields.append( _SEEKREQUEST.fields_by_name['snapshot']) _SEEKREQUEST.fields_by_name['snapshot'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] +DESCRIPTOR.message_types_by_name['MessageStoragePolicy'] = _MESSAGESTORAGEPOLICY DESCRIPTOR.message_types_by_name['Topic'] = _TOPIC DESCRIPTOR.message_types_by_name['PubsubMessage'] = _PUBSUBMESSAGE DESCRIPTOR.message_types_by_name['GetTopicRequest'] = _GETTOPICREQUEST @@ -1618,6 +1818,8 @@ DESCRIPTOR.message_types_by_name['ListTopicsResponse'] = _LISTTOPICSRESPONSE DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsRequest'] = _LISTTOPICSUBSCRIPTIONSREQUEST DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsResponse'] = _LISTTOPICSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name['ListTopicSnapshotsRequest'] = _LISTTOPICSNAPSHOTSREQUEST +DESCRIPTOR.message_types_by_name['ListTopicSnapshotsResponse'] = _LISTTOPICSNAPSHOTSRESPONSE DESCRIPTOR.message_types_by_name['DeleteTopicRequest'] = _DELETETOPICREQUEST DESCRIPTOR.message_types_by_name['Subscription'] = _SUBSCRIPTION DESCRIPTOR.message_types_by_name['PushConfig'] = _PUSHCONFIG @@ -1637,6 +1839,7 @@ DESCRIPTOR.message_types_by_name['CreateSnapshotRequest'] = _CREATESNAPSHOTREQUEST DESCRIPTOR.message_types_by_name['UpdateSnapshotRequest'] = _UPDATESNAPSHOTREQUEST DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT +DESCRIPTOR.message_types_by_name['GetSnapshotRequest'] = _GETSNAPSHOTREQUEST DESCRIPTOR.message_types_by_name['ListSnapshotsRequest'] = _LISTSNAPSHOTSREQUEST DESCRIPTOR.message_types_by_name['ListSnapshotsResponse'] = _LISTSNAPSHOTSRESPONSE DESCRIPTOR.message_types_by_name['DeleteSnapshotRequest'] = _DELETESNAPSHOTREQUEST @@ -1644,6 +1847,27 @@ DESCRIPTOR.message_types_by_name['SeekResponse'] = _SEEKRESPONSE _sym_db.RegisterFileDescriptor(DESCRIPTOR) +MessageStoragePolicy = _reflection.GeneratedProtocolMessageType('MessageStoragePolicy', (_message.Message,), dict( + DESCRIPTOR = _MESSAGESTORAGEPOLICY, + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' + , + __doc__ = """A message storage policy. + + + Attributes: + allowed_persistence_regions: + The list of GCP regions where messages that are published to + the topic may be persisted in storage. Messages published by + publishers running in non-allowed GCP regions (or running + outside of GCP altogether) will be routed for storage in one + of the allowed regions. An empty list indicates a + misconfiguration at the project or organization level, which + will result in all Publish operations failing. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.MessageStoragePolicy) + )) +_sym_db.RegisterMessage(MessageStoragePolicy) + Topic = _reflection.GeneratedProtocolMessageType('Topic', (_message.Message,), dict( LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( @@ -1669,6 +1893,14 @@ and it must not start with ``"goog"``. labels: User labels. + message_storage_policy: + Policy constraining how messages published to the topic may be + stored. It is determined when the topic is created based on + the policy configured at the project level. It must not be set + by the caller in the request to CreateTopic or to UpdateTopic. + This field will be populated in the responses for GetTopic, + CreateTopic, and UpdateTopic: if not present in the response, + then no constraints are in effect. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) )) @@ -1737,10 +1969,14 @@ Attributes: topic: - The topic to update. + The updated topic object. update_mask: Indicates which fields in the provided topic to update. Must - be specified and non-empty. + be specified and non-empty. Note that if ``update_mask`` + contains "message\_storage\_policy" then the new value will be + determined based on the policy configured at the project or + organization level. The ``message_storage_policy`` must not be + set in the ``topic`` provided above. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) )) @@ -1865,6 +2101,54 @@ )) _sym_db.RegisterMessage(ListTopicSubscriptionsResponse) +ListTopicSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSnapshotsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSNAPSHOTSREQUEST, + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' + , + __doc__ = """Request for the ``ListTopicSnapshots`` method. ALPHA: This feature is + part of an alpha release. This API might be changed in + backward-incompatible ways and is not recommended for production use. It + is not subject to any SLA or deprecation policy. + + + Attributes: + topic: + The name of the topic that snapshots are attached to. Format + is ``projects/{project}/topics/{topic}``. + page_size: + Maximum number of snapshot names to return. + page_token: + The value returned by the last ``ListTopicSnapshotsResponse``; + indicates that this is a continuation of a prior + ``ListTopicSnapshots`` call, and that the system should return + the next page of data. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsRequest) + )) +_sym_db.RegisterMessage(ListTopicSnapshotsRequest) + +ListTopicSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSnapshotsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTOPICSNAPSHOTSRESPONSE, + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' + , + __doc__ = """Response for the ``ListTopicSnapshots`` method. ALPHA: This feature is + part of an alpha release. This API might be changed in + backward-incompatible ways and is not recommended for production use. It + is not subject to any SLA or deprecation policy. + + + Attributes: + snapshots: + The names of the snapshots that match the request. + next_page_token: + If not empty, indicates that there may be more snapshots that + match the request; this value should be passed in a new + ``ListTopicSnapshotsRequest`` to get more snapshots. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsResponse) + )) +_sym_db.RegisterMessage(ListTopicSnapshotsResponse) + DeleteTopicRequest = _reflection.GeneratedProtocolMessageType('DeleteTopicRequest', (_message.Message,), dict( DESCRIPTOR = _DELETETOPICREQUEST, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' @@ -1922,19 +2206,25 @@ best-effort basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this value for a given message, call ``ModifyAckDeadline`` - with the corresponding ``ack_id`` if using pull. The minimum - custom deadline you can specify is 10 seconds. The maximum - custom deadline you can specify is 600 seconds (10 minutes). - If this parameter is 0, a default value of 10 seconds is used. - For push delivery, this value is also used to set the request - timeout for the call to the push endpoint. If the subscriber - never acknowledges the message, the Pub/Sub system will - eventually redeliver the message. + with the corresponding ``ack_id`` if using non-streaming pull + or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming pull. + The minimum custom deadline you can specify is 10 seconds. The + maximum custom deadline you can specify is 600 seconds (10 + minutes). If this parameter is 0, a default value of 10 + seconds is used. For push delivery, this value is also used + to set the request timeout for the call to the push endpoint. + If the subscriber never acknowledges the message, the Pub/Sub + system will eventually redeliver the message. retain_acked_messages: Indicates whether to retain acknowledged messages. If true, then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of - the ``message_retention_duration`` window. + the ``message_retention_duration`` window. ALPHA: This feature + is part of an alpha release. This API might be changed in + backward-incompatible ways and is not recommended for + production use. It is not subject to any SLA or deprecation + policy. message_retention_duration: How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is @@ -1942,7 +2232,10 @@ also configures the retention of acknowledged messages, and thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 - minutes. + minutes. ALPHA: This feature is part of an alpha release. This + API might be changed in backward-incompatible ways and is not + recommended for production use. It is not subject to any SLA + or deprecation policy. labels: User labels. """, @@ -2120,7 +2413,8 @@ ``pushConfig`` indicates that the Pub/Sub system should stop pushing messages from the given subscription and allow messages to be pulled and acknowledged - effectively pausing - the subscription if ``Pull`` is not called. + the subscription if ``Pull`` or ``StreamingPull`` is not + called. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) )) @@ -2292,10 +2586,20 @@ _sym_db.RegisterMessage(StreamingPullResponse) CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType('CreateSnapshotRequest', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _CREATESNAPSHOTREQUEST_LABELSENTRY, + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' + # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest.LabelsEntry) + )) + , DESCRIPTOR = _CREATESNAPSHOTREQUEST, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , - __doc__ = """Request for the ``CreateSnapshot`` method. + __doc__ = """Request for the ``CreateSnapshot`` method. ALPHA: This feature is part + of an alpha release. This API might be changed in backward-incompatible + ways and is not recommended for production use. It is not subject to any + SLA or deprecation policy. Attributes: @@ -2316,21 +2620,27 @@ published to the subscription's topic following the successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. + labels: + User labels. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) )) _sym_db.RegisterMessage(CreateSnapshotRequest) +_sym_db.RegisterMessage(CreateSnapshotRequest.LabelsEntry) UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType('UpdateSnapshotRequest', (_message.Message,), dict( DESCRIPTOR = _UPDATESNAPSHOTREQUEST, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , - __doc__ = """Request for the UpdateSnapshot method. + __doc__ = """Request for the UpdateSnapshot method. ALPHA: This feature is part of an + alpha release. This API might be changed in backward-incompatible ways + and is not recommended for production use. It is not subject to any SLA + or deprecation policy. Attributes: snapshot: - The updated snpashot object. + The updated snapshot object. update_mask: Indicates which fields in the provided snapshot to update. Must be specified and non-empty. @@ -2350,7 +2660,10 @@ DESCRIPTOR = _SNAPSHOT, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , - __doc__ = """A snapshot resource. + __doc__ = """A snapshot resource. ALPHA: This feature is part of an alpha release. + This API might be changed in backward-incompatible ways and is not + recommended for production use. It is not subject to any SLA or + deprecation policy. Attributes: @@ -2369,7 +2682,9 @@ consider a subscription whose oldest unacked message is 3 days old. If a snapshot is created from this subscription, the snapshot -- which will always capture this 3-day-old backlog - as long as the snapshot exists -- will expire in 4 days. + as long as the snapshot exists -- will expire in 4 days. The + service will refuse to create a snapshot that would expire in + less than 1 hour after creation. labels: User labels. """, @@ -2378,11 +2693,33 @@ _sym_db.RegisterMessage(Snapshot) _sym_db.RegisterMessage(Snapshot.LabelsEntry) +GetSnapshotRequest = _reflection.GeneratedProtocolMessageType('GetSnapshotRequest', (_message.Message,), dict( + DESCRIPTOR = _GETSNAPSHOTREQUEST, + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' + , + __doc__ = """Request for the GetSnapshot method. ALPHA: This feature is part of an + alpha release. This API might be changed in backward-incompatible ways + and is not recommended for production use. It is not subject to any SLA + or deprecation policy. + + + Attributes: + snapshot: + The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSnapshotRequest) + )) +_sym_db.RegisterMessage(GetSnapshotRequest) + ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListSnapshotsRequest', (_message.Message,), dict( DESCRIPTOR = _LISTSNAPSHOTSREQUEST, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , - __doc__ = """Request for the ``ListSnapshots`` method. + __doc__ = """Request for the ``ListSnapshots`` method. ALPHA: This feature is part of + an alpha release. This API might be changed in backward-incompatible + ways and is not recommended for production use. It is not subject to any + SLA or deprecation policy. Attributes: @@ -2405,7 +2742,10 @@ DESCRIPTOR = _LISTSNAPSHOTSRESPONSE, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , - __doc__ = """Response for the ``ListSnapshots`` method. + __doc__ = """Response for the ``ListSnapshots`` method. ALPHA: This feature is part + of an alpha release. This API might be changed in backward-incompatible + ways and is not recommended for production use. It is not subject to any + SLA or deprecation policy. Attributes: @@ -2424,7 +2764,10 @@ DESCRIPTOR = _DELETESNAPSHOTREQUEST, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , - __doc__ = """Request for the ``DeleteSnapshot`` method. + __doc__ = """Request for the ``DeleteSnapshot`` method. ALPHA: This feature is part + of an alpha release. This API might be changed in backward-incompatible + ways and is not recommended for production use. It is not subject to any + SLA or deprecation policy. Attributes: @@ -2440,7 +2783,10 @@ DESCRIPTOR = _SEEKREQUEST, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , - __doc__ = """Request for the ``Seek`` method. + __doc__ = """Request for the ``Seek`` method. ALPHA: This feature is part of an alpha + release. This API might be changed in backward-incompatible ways and is + not recommended for production use. It is not subject to any SLA or + deprecation policy. Attributes: @@ -2477,7 +2823,7 @@ DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1')) +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1')) _TOPIC_LABELSENTRY.has_options = True _TOPIC_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) _PUBSUBMESSAGE_ATTRIBUTESENTRY.has_options = True @@ -2486,1109 +2832,254 @@ _SUBSCRIPTION_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) _PUSHCONFIG_ATTRIBUTESENTRY.has_options = True _PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_CREATESNAPSHOTREQUEST_LABELSENTRY.has_options = True +_CREATESNAPSHOTREQUEST_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) _SNAPSHOT_LABELSENTRY.has_options = True _SNAPSHOT_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) -try: - # THESE ELEMENTS WILL BE DEPRECATED. - # Please use the generated *_pb2_grpc.py files instead. - import grpc - from grpc.beta import implementations as beta_implementations - from grpc.beta import interfaces as beta_interfaces - from grpc.framework.common import cardinality - from grpc.framework.interfaces.face import utilities as face_utilities - - - class SubscriberStub(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateSubscription = channel.unary_unary( - '/google.pubsub.v1.Subscriber/CreateSubscription', - request_serializer=Subscription.SerializeToString, - response_deserializer=Subscription.FromString, - ) - self.GetSubscription = channel.unary_unary( - '/google.pubsub.v1.Subscriber/GetSubscription', - request_serializer=GetSubscriptionRequest.SerializeToString, - response_deserializer=Subscription.FromString, - ) - self.UpdateSubscription = channel.unary_unary( - '/google.pubsub.v1.Subscriber/UpdateSubscription', - request_serializer=UpdateSubscriptionRequest.SerializeToString, - response_deserializer=Subscription.FromString, - ) - self.ListSubscriptions = channel.unary_unary( - '/google.pubsub.v1.Subscriber/ListSubscriptions', - request_serializer=ListSubscriptionsRequest.SerializeToString, - response_deserializer=ListSubscriptionsResponse.FromString, - ) - self.DeleteSubscription = channel.unary_unary( - '/google.pubsub.v1.Subscriber/DeleteSubscription', - request_serializer=DeleteSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ModifyAckDeadline = channel.unary_unary( - '/google.pubsub.v1.Subscriber/ModifyAckDeadline', - request_serializer=ModifyAckDeadlineRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Acknowledge = channel.unary_unary( - '/google.pubsub.v1.Subscriber/Acknowledge', - request_serializer=AcknowledgeRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Pull = channel.unary_unary( - '/google.pubsub.v1.Subscriber/Pull', - request_serializer=PullRequest.SerializeToString, - response_deserializer=PullResponse.FromString, - ) - self.StreamingPull = channel.stream_stream( - '/google.pubsub.v1.Subscriber/StreamingPull', - request_serializer=StreamingPullRequest.SerializeToString, - response_deserializer=StreamingPullResponse.FromString, - ) - self.ModifyPushConfig = channel.unary_unary( - '/google.pubsub.v1.Subscriber/ModifyPushConfig', - request_serializer=ModifyPushConfigRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ListSnapshots = channel.unary_unary( - '/google.pubsub.v1.Subscriber/ListSnapshots', - request_serializer=ListSnapshotsRequest.SerializeToString, - response_deserializer=ListSnapshotsResponse.FromString, - ) - self.CreateSnapshot = channel.unary_unary( - '/google.pubsub.v1.Subscriber/CreateSnapshot', - request_serializer=CreateSnapshotRequest.SerializeToString, - response_deserializer=Snapshot.FromString, - ) - self.UpdateSnapshot = channel.unary_unary( - '/google.pubsub.v1.Subscriber/UpdateSnapshot', - request_serializer=UpdateSnapshotRequest.SerializeToString, - response_deserializer=Snapshot.FromString, - ) - self.DeleteSnapshot = channel.unary_unary( - '/google.pubsub.v1.Subscriber/DeleteSnapshot', - request_serializer=DeleteSnapshotRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Seek = channel.unary_unary( - '/google.pubsub.v1.Subscriber/Seek', - request_serializer=SeekRequest.SerializeToString, - response_deserializer=SeekResponse.FromString, - ) - - - class SubscriberServicer(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method. - """ - - def CreateSubscription(self, request, context): - """Creates a subscription to a given topic. - If the subscription already exists, returns `ALREADY_EXISTS`. - If the corresponding topic doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - [resource name format](https://cloud.google.com/pubsub/docs/overview#names). - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetSubscription(self, request, context): - """Gets the configuration details of a subscription. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def UpdateSubscription(self, request, context): - """Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - NOTE: The style guide requires body: "subscription" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListSubscriptions(self, request, context): - """Lists matching subscriptions. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def DeleteSubscription(self, request, context): - """Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to `Pull` after deletion will return - `NOT_FOUND`. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ModifyAckDeadline(self, request, context): - """Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level `ackDeadlineSeconds` used for subsequent messages. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Acknowledge(self, request, context): - """Acknowledges the messages associated with the `ack_ids` in the - `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages - from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Pull(self, request, context): - """Pulls messages from the server. Returns an empty list if there are no - messages available in the backlog. The server may return `UNAVAILABLE` if - there are too many concurrent pull requests pending for the given - subscription. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def StreamingPull(self, request_iterator, context): - """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will - respond with UNIMPLEMENTED errors unless you have been invited to test - this feature. Contact cloud-pubsub@google.com with any questions. - - Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status `OK` to reassign - server-side resources, in which case, the client should re-establish the - stream. `UNAVAILABLE` may also be returned in the case of a transient error - (e.g., a server restart). These should also be retried by the client. Flow - control can be achieved by configuring the underlying RPC channel. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ModifyPushConfig(self, request, context): - """Modifies the `PushConfig` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified by - an empty `PushConfig`) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the `PushConfig`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListSnapshots(self, request, context): - """Lists the existing snapshots. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def CreateSnapshot(self, request, context): - """Creates a snapshot from the requested subscription. - If the snapshot already exists, returns `ALREADY_EXISTS`. - If the requested subscription doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the - [resource name format](https://cloud.google.com/pubsub/docs/overview#names). - The generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def UpdateSnapshot(self, request, context): - """Updates an existing snapshot. Note that certain properties of a snapshot - are not modifiable. - NOTE: The style guide requires body: "snapshot" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def DeleteSnapshot(self, request, context): - """Removes an existing snapshot. All messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Seek(self, request, context): - """Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - - def add_SubscriberServicer_to_server(servicer, server): - rpc_method_handlers = { - 'CreateSubscription': grpc.unary_unary_rpc_method_handler( - servicer.CreateSubscription, - request_deserializer=Subscription.FromString, - response_serializer=Subscription.SerializeToString, - ), - 'GetSubscription': grpc.unary_unary_rpc_method_handler( - servicer.GetSubscription, - request_deserializer=GetSubscriptionRequest.FromString, - response_serializer=Subscription.SerializeToString, - ), - 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( - servicer.UpdateSubscription, - request_deserializer=UpdateSubscriptionRequest.FromString, - response_serializer=Subscription.SerializeToString, - ), - 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( - servicer.ListSubscriptions, - request_deserializer=ListSubscriptionsRequest.FromString, - response_serializer=ListSubscriptionsResponse.SerializeToString, - ), - 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( - servicer.DeleteSubscription, - request_deserializer=DeleteSubscriptionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( - servicer.ModifyAckDeadline, - request_deserializer=ModifyAckDeadlineRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'Acknowledge': grpc.unary_unary_rpc_method_handler( - servicer.Acknowledge, - request_deserializer=AcknowledgeRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'Pull': grpc.unary_unary_rpc_method_handler( - servicer.Pull, - request_deserializer=PullRequest.FromString, - response_serializer=PullResponse.SerializeToString, - ), - 'StreamingPull': grpc.stream_stream_rpc_method_handler( - servicer.StreamingPull, - request_deserializer=StreamingPullRequest.FromString, - response_serializer=StreamingPullResponse.SerializeToString, - ), - 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( - servicer.ModifyPushConfig, - request_deserializer=ModifyPushConfigRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'ListSnapshots': grpc.unary_unary_rpc_method_handler( - servicer.ListSnapshots, - request_deserializer=ListSnapshotsRequest.FromString, - response_serializer=ListSnapshotsResponse.SerializeToString, - ), - 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( - servicer.CreateSnapshot, - request_deserializer=CreateSnapshotRequest.FromString, - response_serializer=Snapshot.SerializeToString, - ), - 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( - servicer.UpdateSnapshot, - request_deserializer=UpdateSnapshotRequest.FromString, - response_serializer=Snapshot.SerializeToString, - ), - 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( - servicer.DeleteSnapshot, - request_deserializer=DeleteSnapshotRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'Seek': grpc.unary_unary_rpc_method_handler( - servicer.Seek, - request_deserializer=SeekRequest.FromString, - response_serializer=SeekResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.pubsub.v1.Subscriber', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - class PublisherStub(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/CreateTopic', - request_serializer=Topic.SerializeToString, - response_deserializer=Topic.FromString, - ) - self.UpdateTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/UpdateTopic', - request_serializer=UpdateTopicRequest.SerializeToString, - response_deserializer=Topic.FromString, - ) - self.Publish = channel.unary_unary( - '/google.pubsub.v1.Publisher/Publish', - request_serializer=PublishRequest.SerializeToString, - response_deserializer=PublishResponse.FromString, - ) - self.GetTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/GetTopic', - request_serializer=GetTopicRequest.SerializeToString, - response_deserializer=Topic.FromString, - ) - self.ListTopics = channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopics', - request_serializer=ListTopicsRequest.SerializeToString, - response_deserializer=ListTopicsResponse.FromString, - ) - self.ListTopicSubscriptions = channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopicSubscriptions', - request_serializer=ListTopicSubscriptionsRequest.SerializeToString, - response_deserializer=ListTopicSubscriptionsResponse.FromString, - ) - self.DeleteTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/DeleteTopic', - request_serializer=DeleteTopicRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - - class PublisherServicer(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def CreateTopic(self, request, context): - """Creates the given topic with the given name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def UpdateTopic(self, request, context): - """Updates an existing topic. Note that certain properties of a topic are not - modifiable. Options settings follow the style guide: - NOTE: The style guide requires body: "topic" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Publish(self, request, context): - """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic - does not exist. The message payload must not be empty; it must contain - either a non-empty data field, or at least one attribute. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetTopic(self, request, context): - """Gets the configuration of a topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListTopics(self, request, context): - """Lists matching topics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListTopicSubscriptions(self, request, context): - """Lists the name of the subscriptions for this topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def DeleteTopic(self, request, context): - """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their `topic` field is set to `_deleted-topic_`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - - def add_PublisherServicer_to_server(servicer, server): - rpc_method_handlers = { - 'CreateTopic': grpc.unary_unary_rpc_method_handler( - servicer.CreateTopic, - request_deserializer=Topic.FromString, - response_serializer=Topic.SerializeToString, - ), - 'UpdateTopic': grpc.unary_unary_rpc_method_handler( - servicer.UpdateTopic, - request_deserializer=UpdateTopicRequest.FromString, - response_serializer=Topic.SerializeToString, - ), - 'Publish': grpc.unary_unary_rpc_method_handler( - servicer.Publish, - request_deserializer=PublishRequest.FromString, - response_serializer=PublishResponse.SerializeToString, - ), - 'GetTopic': grpc.unary_unary_rpc_method_handler( - servicer.GetTopic, - request_deserializer=GetTopicRequest.FromString, - response_serializer=Topic.SerializeToString, - ), - 'ListTopics': grpc.unary_unary_rpc_method_handler( - servicer.ListTopics, - request_deserializer=ListTopicsRequest.FromString, - response_serializer=ListTopicsResponse.SerializeToString, - ), - 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSubscriptions, - request_deserializer=ListTopicSubscriptionsRequest.FromString, - response_serializer=ListTopicSubscriptionsResponse.SerializeToString, - ), - 'DeleteTopic': grpc.unary_unary_rpc_method_handler( - servicer.DeleteTopic, - request_deserializer=DeleteTopicRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.pubsub.v1.Publisher', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) - - - class BetaSubscriberServicer(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method. - """ - def CreateSubscription(self, request, context): - """Creates a subscription to a given topic. - If the subscription already exists, returns `ALREADY_EXISTS`. - If the corresponding topic doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - [resource name format](https://cloud.google.com/pubsub/docs/overview#names). - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def GetSubscription(self, request, context): - """Gets the configuration details of a subscription. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def UpdateSubscription(self, request, context): - """Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - NOTE: The style guide requires body: "subscription" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ListSubscriptions(self, request, context): - """Lists matching subscriptions. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def DeleteSubscription(self, request, context): - """Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to `Pull` after deletion will return - `NOT_FOUND`. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ModifyAckDeadline(self, request, context): - """Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level `ackDeadlineSeconds` used for subsequent messages. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Acknowledge(self, request, context): - """Acknowledges the messages associated with the `ack_ids` in the - `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages - from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Pull(self, request, context): - """Pulls messages from the server. Returns an empty list if there are no - messages available in the backlog. The server may return `UNAVAILABLE` if - there are too many concurrent pull requests pending for the given - subscription. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def StreamingPull(self, request_iterator, context): - """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will - respond with UNIMPLEMENTED errors unless you have been invited to test - this feature. Contact cloud-pubsub@google.com with any questions. - - Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status `OK` to reassign - server-side resources, in which case, the client should re-establish the - stream. `UNAVAILABLE` may also be returned in the case of a transient error - (e.g., a server restart). These should also be retried by the client. Flow - control can be achieved by configuring the underlying RPC channel. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ModifyPushConfig(self, request, context): - """Modifies the `PushConfig` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified by - an empty `PushConfig`) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the `PushConfig`. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ListSnapshots(self, request, context): - """Lists the existing snapshots. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def CreateSnapshot(self, request, context): - """Creates a snapshot from the requested subscription. - If the snapshot already exists, returns `ALREADY_EXISTS`. - If the requested subscription doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the - [resource name format](https://cloud.google.com/pubsub/docs/overview#names). - The generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the request. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def UpdateSnapshot(self, request, context): - """Updates an existing snapshot. Note that certain properties of a snapshot - are not modifiable. - NOTE: The style guide requires body: "snapshot" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def DeleteSnapshot(self, request, context): - """Removes an existing snapshot. All messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Seek(self, request, context): - """Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - - - class BetaSubscriberStub(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method. - """ - def CreateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Creates a subscription to a given topic. - If the subscription already exists, returns `ALREADY_EXISTS`. - If the corresponding topic doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - [resource name format](https://cloud.google.com/pubsub/docs/overview#names). - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - """ - raise NotImplementedError() - CreateSubscription.future = None - def GetSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Gets the configuration details of a subscription. - """ - raise NotImplementedError() - GetSubscription.future = None - def UpdateSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - NOTE: The style guide requires body: "subscription" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - raise NotImplementedError() - UpdateSubscription.future = None - def ListSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists matching subscriptions. - """ - raise NotImplementedError() - ListSubscriptions.future = None - def DeleteSubscription(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to `Pull` after deletion will return - `NOT_FOUND`. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. - """ - raise NotImplementedError() - DeleteSubscription.future = None - def ModifyAckDeadline(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level `ackDeadlineSeconds` used for subsequent messages. - """ - raise NotImplementedError() - ModifyAckDeadline.future = None - def Acknowledge(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Acknowledges the messages associated with the `ack_ids` in the - `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages - from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - """ - raise NotImplementedError() - Acknowledge.future = None - def Pull(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Pulls messages from the server. Returns an empty list if there are no - messages available in the backlog. The server may return `UNAVAILABLE` if - there are too many concurrent pull requests pending for the given - subscription. - """ - raise NotImplementedError() - Pull.future = None - def StreamingPull(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): - """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will - respond with UNIMPLEMENTED errors unless you have been invited to test - this feature. Contact cloud-pubsub@google.com with any questions. - - Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status `OK` to reassign - server-side resources, in which case, the client should re-establish the - stream. `UNAVAILABLE` may also be returned in the case of a transient error - (e.g., a server restart). These should also be retried by the client. Flow - control can be achieved by configuring the underlying RPC channel. - """ - raise NotImplementedError() - def ModifyPushConfig(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Modifies the `PushConfig` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified by - an empty `PushConfig`) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the `PushConfig`. - """ - raise NotImplementedError() - ModifyPushConfig.future = None - def ListSnapshots(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists the existing snapshots. - """ - raise NotImplementedError() - ListSnapshots.future = None - def CreateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Creates a snapshot from the requested subscription. - If the snapshot already exists, returns `ALREADY_EXISTS`. - If the requested subscription doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the - [resource name format](https://cloud.google.com/pubsub/docs/overview#names). - The generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the request. - """ - raise NotImplementedError() - CreateSnapshot.future = None - def UpdateSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Updates an existing snapshot. Note that certain properties of a snapshot - are not modifiable. - NOTE: The style guide requires body: "snapshot" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - raise NotImplementedError() - UpdateSnapshot.future = None - def DeleteSnapshot(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Removes an existing snapshot. All messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - """ - raise NotImplementedError() - DeleteSnapshot.future = None - def Seek(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. - """ - raise NotImplementedError() - Seek.future = None - - - def beta_create_Subscriber_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_deserializers = { - ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.FromString, - ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.FromString, - ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, - ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.FromString, - ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.FromString, - ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.FromString, - ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.FromString, - ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.FromString, - ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.FromString, - ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.FromString, - ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.FromString, - ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.FromString, - ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.FromString, - ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.FromString, - ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.FromString, - } - response_serializers = { - ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.SerializeToString, - ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, - ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.SerializeToString, - ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.SerializeToString, - ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.SerializeToString, - ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.SerializeToString, - ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.SerializeToString, - ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.SerializeToString, - ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.SerializeToString, - ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.SerializeToString, - } - method_implementations = { - ('google.pubsub.v1.Subscriber', 'Acknowledge'): face_utilities.unary_unary_inline(servicer.Acknowledge), - ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): face_utilities.unary_unary_inline(servicer.CreateSnapshot), - ('google.pubsub.v1.Subscriber', 'CreateSubscription'): face_utilities.unary_unary_inline(servicer.CreateSubscription), - ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): face_utilities.unary_unary_inline(servicer.DeleteSnapshot), - ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): face_utilities.unary_unary_inline(servicer.DeleteSubscription), - ('google.pubsub.v1.Subscriber', 'GetSubscription'): face_utilities.unary_unary_inline(servicer.GetSubscription), - ('google.pubsub.v1.Subscriber', 'ListSnapshots'): face_utilities.unary_unary_inline(servicer.ListSnapshots), - ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): face_utilities.unary_unary_inline(servicer.ListSubscriptions), - ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): face_utilities.unary_unary_inline(servicer.ModifyAckDeadline), - ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): face_utilities.unary_unary_inline(servicer.ModifyPushConfig), - ('google.pubsub.v1.Subscriber', 'Pull'): face_utilities.unary_unary_inline(servicer.Pull), - ('google.pubsub.v1.Subscriber', 'Seek'): face_utilities.unary_unary_inline(servicer.Seek), - ('google.pubsub.v1.Subscriber', 'StreamingPull'): face_utilities.stream_stream_inline(servicer.StreamingPull), - ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): face_utilities.unary_unary_inline(servicer.UpdateSnapshot), - ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): face_utilities.unary_unary_inline(servicer.UpdateSubscription), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - - def beta_create_Subscriber_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_serializers = { - ('google.pubsub.v1.Subscriber', 'Acknowledge'): AcknowledgeRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): CreateSnapshotRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.SerializeToString, - ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): DeleteSnapshotRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): DeleteSubscriptionRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'GetSubscription'): GetSubscriptionRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): ModifyAckDeadlineRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): ModifyPushConfigRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'Pull'): PullRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'Seek'): SeekRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): UpdateSnapshotRequest.SerializeToString, - ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): UpdateSubscriptionRequest.SerializeToString, - } - response_deserializers = { - ('google.pubsub.v1.Subscriber', 'Acknowledge'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.pubsub.v1.Subscriber', 'CreateSnapshot'): Snapshot.FromString, - ('google.pubsub.v1.Subscriber', 'CreateSubscription'): Subscription.FromString, - ('google.pubsub.v1.Subscriber', 'DeleteSnapshot'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.pubsub.v1.Subscriber', 'DeleteSubscription'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.pubsub.v1.Subscriber', 'GetSubscription'): Subscription.FromString, - ('google.pubsub.v1.Subscriber', 'ListSnapshots'): ListSnapshotsResponse.FromString, - ('google.pubsub.v1.Subscriber', 'ListSubscriptions'): ListSubscriptionsResponse.FromString, - ('google.pubsub.v1.Subscriber', 'ModifyAckDeadline'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.pubsub.v1.Subscriber', 'ModifyPushConfig'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.pubsub.v1.Subscriber', 'Pull'): PullResponse.FromString, - ('google.pubsub.v1.Subscriber', 'Seek'): SeekResponse.FromString, - ('google.pubsub.v1.Subscriber', 'StreamingPull'): StreamingPullResponse.FromString, - ('google.pubsub.v1.Subscriber', 'UpdateSnapshot'): Snapshot.FromString, - ('google.pubsub.v1.Subscriber', 'UpdateSubscription'): Subscription.FromString, - } - cardinalities = { - 'Acknowledge': cardinality.Cardinality.UNARY_UNARY, - 'CreateSnapshot': cardinality.Cardinality.UNARY_UNARY, - 'CreateSubscription': cardinality.Cardinality.UNARY_UNARY, - 'DeleteSnapshot': cardinality.Cardinality.UNARY_UNARY, - 'DeleteSubscription': cardinality.Cardinality.UNARY_UNARY, - 'GetSubscription': cardinality.Cardinality.UNARY_UNARY, - 'ListSnapshots': cardinality.Cardinality.UNARY_UNARY, - 'ListSubscriptions': cardinality.Cardinality.UNARY_UNARY, - 'ModifyAckDeadline': cardinality.Cardinality.UNARY_UNARY, - 'ModifyPushConfig': cardinality.Cardinality.UNARY_UNARY, - 'Pull': cardinality.Cardinality.UNARY_UNARY, - 'Seek': cardinality.Cardinality.UNARY_UNARY, - 'StreamingPull': cardinality.Cardinality.STREAM_STREAM, - 'UpdateSnapshot': cardinality.Cardinality.UNARY_UNARY, - 'UpdateSubscription': cardinality.Cardinality.UNARY_UNARY, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Subscriber', cardinalities, options=stub_options) - - - class BetaPublisherServicer(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - def CreateTopic(self, request, context): - """Creates the given topic with the given name. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def UpdateTopic(self, request, context): - """Updates an existing topic. Note that certain properties of a topic are not - modifiable. Options settings follow the style guide: - NOTE: The style guide requires body: "topic" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def Publish(self, request, context): - """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic - does not exist. The message payload must not be empty; it must contain - either a non-empty data field, or at least one attribute. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def GetTopic(self, request, context): - """Gets the configuration of a topic. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ListTopics(self, request, context): - """Lists matching topics. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def ListTopicSubscriptions(self, request, context): - """Lists the name of the subscriptions for this topic. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - def DeleteTopic(self, request, context): - """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their `topic` field is set to `_deleted-topic_`. - """ - context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) - - - class BetaPublisherStub(object): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This class was generated - only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.""" - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - def CreateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Creates the given topic with the given name. - """ - raise NotImplementedError() - CreateTopic.future = None - def UpdateTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Updates an existing topic. Note that certain properties of a topic are not - modifiable. Options settings follow the style guide: - NOTE: The style guide requires body: "topic" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - raise NotImplementedError() - UpdateTopic.future = None - def Publish(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic - does not exist. The message payload must not be empty; it must contain - either a non-empty data field, or at least one attribute. - """ - raise NotImplementedError() - Publish.future = None - def GetTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Gets the configuration of a topic. - """ - raise NotImplementedError() - GetTopic.future = None - def ListTopics(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists matching topics. - """ - raise NotImplementedError() - ListTopics.future = None - def ListTopicSubscriptions(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Lists the name of the subscriptions for this topic. - """ - raise NotImplementedError() - ListTopicSubscriptions.future = None - def DeleteTopic(self, request, timeout, metadata=None, with_call=False, protocol_options=None): - """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their `topic` field is set to `_deleted-topic_`. - """ - raise NotImplementedError() - DeleteTopic.future = None - - - def beta_create_Publisher_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_deserializers = { - ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, - ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.FromString, - ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.FromString, - ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.FromString, - ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.FromString, - ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.FromString, - ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.FromString, - } - response_serializers = { - ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, - ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.SerializeToString, - ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.SerializeToString, - ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.SerializeToString, - ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.SerializeToString, - ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.SerializeToString, - } - method_implementations = { - ('google.pubsub.v1.Publisher', 'CreateTopic'): face_utilities.unary_unary_inline(servicer.CreateTopic), - ('google.pubsub.v1.Publisher', 'DeleteTopic'): face_utilities.unary_unary_inline(servicer.DeleteTopic), - ('google.pubsub.v1.Publisher', 'GetTopic'): face_utilities.unary_unary_inline(servicer.GetTopic), - ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): face_utilities.unary_unary_inline(servicer.ListTopicSubscriptions), - ('google.pubsub.v1.Publisher', 'ListTopics'): face_utilities.unary_unary_inline(servicer.ListTopics), - ('google.pubsub.v1.Publisher', 'Publish'): face_utilities.unary_unary_inline(servicer.Publish), - ('google.pubsub.v1.Publisher', 'UpdateTopic'): face_utilities.unary_unary_inline(servicer.UpdateTopic), - } - server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout) - return beta_implementations.server(method_implementations, options=server_options) - - - def beta_create_Publisher_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None): - """The Beta API is deprecated for 0.15.0 and later. - - It is recommended to use the GA API (classes and functions in this - file not marked beta) for all further purposes. This function was - generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0""" - request_serializers = { - ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.SerializeToString, - ('google.pubsub.v1.Publisher', 'DeleteTopic'): DeleteTopicRequest.SerializeToString, - ('google.pubsub.v1.Publisher', 'GetTopic'): GetTopicRequest.SerializeToString, - ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsRequest.SerializeToString, - ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsRequest.SerializeToString, - ('google.pubsub.v1.Publisher', 'Publish'): PublishRequest.SerializeToString, - ('google.pubsub.v1.Publisher', 'UpdateTopic'): UpdateTopicRequest.SerializeToString, - } - response_deserializers = { - ('google.pubsub.v1.Publisher', 'CreateTopic'): Topic.FromString, - ('google.pubsub.v1.Publisher', 'DeleteTopic'): google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ('google.pubsub.v1.Publisher', 'GetTopic'): Topic.FromString, - ('google.pubsub.v1.Publisher', 'ListTopicSubscriptions'): ListTopicSubscriptionsResponse.FromString, - ('google.pubsub.v1.Publisher', 'ListTopics'): ListTopicsResponse.FromString, - ('google.pubsub.v1.Publisher', 'Publish'): PublishResponse.FromString, - ('google.pubsub.v1.Publisher', 'UpdateTopic'): Topic.FromString, - } - cardinalities = { - 'CreateTopic': cardinality.Cardinality.UNARY_UNARY, - 'DeleteTopic': cardinality.Cardinality.UNARY_UNARY, - 'GetTopic': cardinality.Cardinality.UNARY_UNARY, - 'ListTopicSubscriptions': cardinality.Cardinality.UNARY_UNARY, - 'ListTopics': cardinality.Cardinality.UNARY_UNARY, - 'Publish': cardinality.Cardinality.UNARY_UNARY, - 'UpdateTopic': cardinality.Cardinality.UNARY_UNARY, - } - stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size) - return beta_implementations.dynamic_stub(channel, 'google.pubsub.v1.Publisher', cardinalities, options=stub_options) -except ImportError: - pass + +_PUBLISHER = _descriptor.ServiceDescriptor( + name='Publisher', + full_name='google.pubsub.v1.Publisher', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=4047, + serialized_end=5134, + methods=[ + _descriptor.MethodDescriptor( + name='CreateTopic', + full_name='google.pubsub.v1.Publisher.CreateTopic', + index=0, + containing_service=None, + input_type=_TOPIC, + output_type=_TOPIC, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*')), + ), + _descriptor.MethodDescriptor( + name='UpdateTopic', + full_name='google.pubsub.v1.Publisher.UpdateTopic', + index=1, + containing_service=None, + input_type=_UPDATETOPICREQUEST, + output_type=_TOPIC, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*')), + ), + _descriptor.MethodDescriptor( + name='Publish', + full_name='google.pubsub.v1.Publisher.Publish', + index=2, + containing_service=None, + input_type=_PUBLISHREQUEST, + output_type=_PUBLISHRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002,\"\'/v1/{topic=projects/*/topics/*}:publish:\001*')), + ), + _descriptor.MethodDescriptor( + name='GetTopic', + full_name='google.pubsub.v1.Publisher.GetTopic', + index=3, + containing_service=None, + input_type=_GETTOPICREQUEST, + output_type=_TOPIC, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}')), + ), + _descriptor.MethodDescriptor( + name='ListTopics', + full_name='google.pubsub.v1.Publisher.ListTopics', + index=4, + containing_service=None, + input_type=_LISTTOPICSREQUEST, + output_type=_LISTTOPICSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics')), + ), + _descriptor.MethodDescriptor( + name='ListTopicSubscriptions', + full_name='google.pubsub.v1.Publisher.ListTopicSubscriptions', + index=5, + containing_service=None, + input_type=_LISTTOPICSUBSCRIPTIONSREQUEST, + output_type=_LISTTOPICSUBSCRIPTIONSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions')), + ), + _descriptor.MethodDescriptor( + name='ListTopicSnapshots', + full_name='google.pubsub.v1.Publisher.ListTopicSnapshots', + index=6, + containing_service=None, + input_type=_LISTTOPICSNAPSHOTSREQUEST, + output_type=_LISTTOPICSNAPSHOTSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots')), + ), + _descriptor.MethodDescriptor( + name='DeleteTopic', + full_name='google.pubsub.v1.Publisher.DeleteTopic', + index=7, + containing_service=None, + input_type=_DELETETOPICREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}')), + ), +]) +_sym_db.RegisterServiceDescriptor(_PUBLISHER) + +DESCRIPTOR.services_by_name['Publisher'] = _PUBLISHER + + +_SUBSCRIBER = _descriptor.ServiceDescriptor( + name='Subscriber', + full_name='google.pubsub.v1.Subscriber', + file=DESCRIPTOR, + index=1, + options=None, + serialized_start=5137, + serialized_end=7432, + methods=[ + _descriptor.MethodDescriptor( + name='CreateSubscription', + full_name='google.pubsub.v1.Subscriber.CreateSubscription', + index=0, + containing_service=None, + input_type=_SUBSCRIPTION, + output_type=_SUBSCRIPTION, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*')), + ), + _descriptor.MethodDescriptor( + name='GetSubscription', + full_name='google.pubsub.v1.Subscriber.GetSubscription', + index=1, + containing_service=None, + input_type=_GETSUBSCRIPTIONREQUEST, + output_type=_SUBSCRIPTION, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}')), + ), + _descriptor.MethodDescriptor( + name='UpdateSubscription', + full_name='google.pubsub.v1.Subscriber.UpdateSubscription', + index=2, + containing_service=None, + input_type=_UPDATESUBSCRIPTIONREQUEST, + output_type=_SUBSCRIPTION, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*')), + ), + _descriptor.MethodDescriptor( + name='ListSubscriptions', + full_name='google.pubsub.v1.Subscriber.ListSubscriptions', + index=3, + containing_service=None, + input_type=_LISTSUBSCRIPTIONSREQUEST, + output_type=_LISTSUBSCRIPTIONSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions')), + ), + _descriptor.MethodDescriptor( + name='DeleteSubscription', + full_name='google.pubsub.v1.Subscriber.DeleteSubscription', + index=4, + containing_service=None, + input_type=_DELETESUBSCRIPTIONREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}')), + ), + _descriptor.MethodDescriptor( + name='ModifyAckDeadline', + full_name='google.pubsub.v1.Subscriber.ModifyAckDeadline', + index=5, + containing_service=None, + input_type=_MODIFYACKDEADLINEREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002D\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*')), + ), + _descriptor.MethodDescriptor( + name='Acknowledge', + full_name='google.pubsub.v1.Subscriber.Acknowledge', + index=6, + containing_service=None, + input_type=_ACKNOWLEDGEREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*')), + ), + _descriptor.MethodDescriptor( + name='Pull', + full_name='google.pubsub.v1.Subscriber.Pull', + index=7, + containing_service=None, + input_type=_PULLREQUEST, + output_type=_PULLRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0027\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*')), + ), + _descriptor.MethodDescriptor( + name='StreamingPull', + full_name='google.pubsub.v1.Subscriber.StreamingPull', + index=8, + containing_service=None, + input_type=_STREAMINGPULLREQUEST, + output_type=_STREAMINGPULLRESPONSE, + options=None, + ), + _descriptor.MethodDescriptor( + name='ModifyPushConfig', + full_name='google.pubsub.v1.Subscriber.ModifyPushConfig', + index=9, + containing_service=None, + input_type=_MODIFYPUSHCONFIGREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002C\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*')), + ), + _descriptor.MethodDescriptor( + name='GetSnapshot', + full_name='google.pubsub.v1.Subscriber.GetSnapshot', + index=10, + containing_service=None, + input_type=_GETSNAPSHOTREQUEST, + output_type=_SNAPSHOT, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\'\022%/v1/{snapshot=projects/*/snapshots/*}')), + ), + _descriptor.MethodDescriptor( + name='ListSnapshots', + full_name='google.pubsub.v1.Subscriber.ListSnapshots', + index=11, + containing_service=None, + input_type=_LISTSNAPSHOTSREQUEST, + output_type=_LISTSNAPSHOTSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002$\022\"/v1/{project=projects/*}/snapshots')), + ), + _descriptor.MethodDescriptor( + name='CreateSnapshot', + full_name='google.pubsub.v1.Subscriber.CreateSnapshot', + index=12, + containing_service=None, + input_type=_CREATESNAPSHOTREQUEST, + output_type=_SNAPSHOT, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*')), + ), + _descriptor.MethodDescriptor( + name='UpdateSnapshot', + full_name='google.pubsub.v1.Subscriber.UpdateSnapshot', + index=13, + containing_service=None, + input_type=_UPDATESNAPSHOTREQUEST, + output_type=_SNAPSHOT, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*')), + ), + _descriptor.MethodDescriptor( + name='DeleteSnapshot', + full_name='google.pubsub.v1.Subscriber.DeleteSnapshot', + index=14, + containing_service=None, + input_type=_DELETESNAPSHOTREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\'*%/v1/{snapshot=projects/*/snapshots/*}')), + ), + _descriptor.MethodDescriptor( + name='Seek', + full_name='google.pubsub.v1.Subscriber.Seek', + index=15, + containing_service=None, + input_type=_SEEKREQUEST, + output_type=_SEEKRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0027\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*')), + ), +]) +_sym_db.RegisterServiceDescriptor(_SUBSCRIBER) + +DESCRIPTOR.services_by_name['Subscriber'] = _SUBSCRIBER + # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py index b0e76ca0fa44..cbc898ec32db 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py @@ -5,9 +5,184 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +class PublisherStub(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + ) + self.UpdateTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + ) + self.Publish = channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, + ) + self.GetTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + ) + self.ListTopics = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, + ) + self.ListTopicSubscriptions = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, + ) + self.ListTopicSnapshots = channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSnapshots', + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, + ) + self.DeleteTopic = channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + + +class PublisherServicer(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + def CreateTopic(self, request, context): + """Creates the given topic with the given name. See the + resource name rules. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a + topic are not modifiable. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + does not exist. The message payload must not be empty; it must contain + either a non-empty data field, or at least one attribute. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTopic(self, request, context): + """Gets the configuration of a topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopics(self, request, context): + """Lists matching topics. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSubscriptions(self, request, context): + """Lists the names of the subscriptions on this topic. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTopicSnapshots(self, request, context): + """Lists the names of the snapshots on this topic.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_PublisherServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateTopic': grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'UpdateTopic': grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'Publish': grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.SerializeToString, + ), + 'GetTopic': grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + ), + 'ListTopics': grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, + ), + 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, + ), + 'ListTopicSnapshots': grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSnapshots, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.SerializeToString, + ), + 'DeleteTopic': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.pubsub.v1.Publisher', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + class SubscriberStub(object): """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method. + consume messages from a subscription via the `Pull` method or by + establishing a bi-directional stream using the `StreamingPull` method. """ def __init__(self, channel): @@ -66,6 +241,11 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) + self.GetSnapshot = channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSnapshot', + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, + ) self.ListSnapshots = channel.unary_unary( '/google.pubsub.v1.Subscriber/ListSnapshots', request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, @@ -95,11 +275,13 @@ def __init__(self, channel): class SubscriberServicer(object): """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method. + consume messages from a subscription via the `Pull` method or by + establishing a bi-directional stream using the `StreamingPull` method. """ def CreateSubscription(self, request, context): - """Creates a subscription to a given topic. + """Creates a subscription to a given topic. See the + resource name rules. If the subscription already exists, returns `ALREADY_EXISTS`. If the corresponding topic doesn't exist, returns `NOT_FOUND`. @@ -124,10 +306,6 @@ def GetSubscription(self, request, context): def UpdateSubscription(self, request, context): """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. - NOTE: The style guide requires body: "subscription" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -186,18 +364,13 @@ def Pull(self, request, context): raise NotImplementedError('Method not implemented!') def StreamingPull(self, request_iterator, context): - """(EXPERIMENTAL) StreamingPull is an experimental feature. This RPC will - respond with UNIMPLEMENTED errors unless you have been invited to test - this feature. Contact cloud-pubsub@google.com with any questions. - - Establishes a stream with the server, which sends messages down to the + """Establishes a stream with the server, which sends messages down to the client. The client streams acknowledgements and ack deadline modifications back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status `OK` to reassign - server-side resources, in which case, the client should re-establish the - stream. `UNAVAILABLE` may also be returned in the case of a transient error - (e.g., a server restart). These should also be retried by the client. Flow - control can be achieved by configuring the underlying RPC channel. + on any error. The server may close the stream with status `UNAVAILABLE` to + reassign server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by configuring the + underlying RPC channel. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -215,43 +388,64 @@ def ModifyPushConfig(self, request, context): context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') + def GetSnapshot(self, request, context): + """Gets the configuration details of a snapshot.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + def ListSnapshots(self, request, context): - """Lists the existing snapshots. + """Lists the existing snapshots.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def CreateSnapshot(self, request, context): - """Creates a snapshot from the requested subscription. + """Creates a snapshot from the requested subscription.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. If the snapshot already exists, returns `ALREADY_EXISTS`. If the requested subscription doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random + If the backlog in the subscription is too old -- and the resulting snapshot + would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. + See also the `Snapshot.expire_time` field. If the name is not provided in + the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming - to the - [resource name format](https://cloud.google.com/pubsub/docs/overview#names). - The generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the request. + to the [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + The generated + name is populated in the returned Snapshot object. Note that for REST API + requests, you must specify a name in the request. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def UpdateSnapshot(self, request, context): - """Updates an existing snapshot. Note that certain properties of a snapshot - are not modifiable. - NOTE: The style guide requires body: "snapshot" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. + """Updates an existing snapshot.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + Note that certain properties of a snapshot are not modifiable. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') raise NotImplementedError('Method not implemented!') def DeleteSnapshot(self, request, context): - """Removes an existing snapshot. All messages retained in the snapshot + """Removes an existing snapshot.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + When the snapshot is deleted, all messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be created with the same name, but the new one has no association with the old snapshot or its subscription, unless the same subscription is specified. @@ -262,7 +456,10 @@ def DeleteSnapshot(self, request, context): def Seek(self, request, context): """Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. + whichever is provided in the request.

+ ALPHA: This feature is part of an alpha release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -321,6 +518,11 @@ def add_SubscriberServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), + 'GetSnapshot': grpc.unary_unary_rpc_method_handler( + servicer.GetSnapshot, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, + ), 'ListSnapshots': grpc.unary_unary_rpc_method_handler( servicer.ListSnapshots, request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.FromString, @@ -350,160 +552,3 @@ def add_SubscriberServicer_to_server(servicer, server): generic_handler = grpc.method_handlers_generic_handler( 'google.pubsub.v1.Subscriber', rpc_method_handlers) server.add_generic_rpc_handlers((generic_handler,)) - - -class PublisherStub(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/CreateTopic', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.UpdateTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/UpdateTopic', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.Publish = channel.unary_unary( - '/google.pubsub.v1.Publisher/Publish', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, - ) - self.GetTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/GetTopic', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.ListTopics = channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopics', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, - ) - self.ListTopicSubscriptions = channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopicSubscriptions', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, - ) - self.DeleteTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/DeleteTopic', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class PublisherServicer(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def CreateTopic(self, request, context): - """Creates the given topic with the given name. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def UpdateTopic(self, request, context): - """Updates an existing topic. Note that certain properties of a topic are not - modifiable. Options settings follow the style guide: - NOTE: The style guide requires body: "topic" instead of body: "*". - Keeping the latter for internal consistency in V1, however it should be - corrected in V2. See - https://cloud.google.com/apis/design/standard_methods#update for details. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def Publish(self, request, context): - """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic - does not exist. The message payload must not be empty; it must contain - either a non-empty data field, or at least one attribute. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetTopic(self, request, context): - """Gets the configuration of a topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListTopics(self, request, context): - """Lists matching topics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def ListTopicSubscriptions(self, request, context): - """Lists the name of the subscriptions for this topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def DeleteTopic(self, request, context): - """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their `topic` field is set to `_deleted-topic_`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_PublisherServicer_to_server(servicer, server): - rpc_method_handlers = { - 'CreateTopic': grpc.unary_unary_rpc_method_handler( - servicer.CreateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - 'UpdateTopic': grpc.unary_unary_rpc_method_handler( - servicer.UpdateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - 'Publish': grpc.unary_unary_rpc_method_handler( - servicer.Publish, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.SerializeToString, - ), - 'GetTopic': grpc.unary_unary_rpc_method_handler( - servicer.GetTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - 'ListTopics': grpc.unary_unary_rpc_method_handler( - servicer.ListTopics, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, - ), - 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSubscriptions, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, - ), - 'DeleteTopic': grpc.unary_unary_rpc_method_handler( - servicer.DeleteTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.pubsub.v1.Publisher', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 48f6ed882024..eb6be71a25c3 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,8 +29,9 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 4 - Beta' dependencies = [ - 'google-api-core[grpc]<2.0.0dev,>=0.1.3', - 'grpc-google-iam-v1<0.12dev,>=0.11.1', + 'google-api-core[grpc] >= 1.1.0, < 2.0.0dev', + 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', + 'enum34; python_version < "3.4"', ] extras = { } diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py new file mode 100644 index 000000000000..d834fa387b41 --- /dev/null +++ b/packages/google-cloud-pubsub/synth.py @@ -0,0 +1,141 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" + +import re + +import synthtool as s +from synthtool import gcp + +gapic = gcp.GAPICGenerator() + +version = 'v1' + + +library = gapic.py_library( + 'pubsub', version, config_path='/google/pubsub/artman_pubsub.yaml') +s.move( + library, + excludes=[ + 'docs/**/*', 'nox.py', 'README.rst', 'setup.py', + 'google/cloud/pubsub_v1/__init__.py', 'google/cloud/pubsub_v1/types.py']) + +# Adjust tests to import the clients directly. +s.replace( + 'tests/unit/gapic/v1/test_publisher_client_v1.py', + 'from google.cloud import pubsub_v1', + 'from google.cloud.pubsub_v1.gapic import publisher_client') + +s.replace( + 'tests/unit/gapic/v1/test_publisher_client_v1.py', + ' pubsub_v1', + ' publisher_client') + +s.replace( + 'tests/unit/gapic/v1/test_subscriber_client_v1.py', + 'from google.cloud import pubsub_v1', + 'from google.cloud.pubsub_v1.gapic import subscriber_client') + +s.replace( + 'tests/unit/gapic/v1/test_subscriber_client_v1.py', + ' pubsub_v1', + ' subscriber_client') + +# iam_policy_pb2_grpc doesn't exist. +s.replace( + ['google/cloud/pubsub_v1/gapic/publisher_client.py', + 'google/cloud/pubsub_v1/gapic/subscriber_client.py'], + 'from google.iam.v1 import iam_policy_pb2_grpc\n', + '') +s.replace( + ['google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py', + 'google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py'], + 'from google.iam.v1 import iam_policy_pb2_grpc\n', + 'from google.iam.v1 import iam_policy_pb2\n') +s.replace( + 'google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py', + 'iam_policy_pb2_grpc', + 'iam_policy_pb2') +s.replace( + 'google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py', + 'iam_policy_pb2_grpc', + 'iam_policy_pb2') + +# DEFAULT SCOPES are being used. so let's force them in. +s.replace( + 'google/cloud/pubsub_v1/gapic/*er_client.py', + '# The name of the interface for this client. This is the key used to', + '''# The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', ) + + \g<0>''' +) + + +# Stubs are missing +s.replace( + 'google/cloud/pubsub_v1/gapic/subscriber_client.py', + '^(\s+)if client_info is None:\n', + '\g<1>self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel))' + '\g<1>self.subscriber_stub = (pubsub_pb2_grpc.SubscriberStub(channel))\n' + '\g<0>' +) + +s.replace( + 'google/cloud/pubsub_v1/gapic/publisher_client.py', + '^(\s+)if client_info is None:\n', + '\g<1>self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel))' + '\g<1>self.publisher_stub = (pubsub_pb2_grpc.PublisherStub(channel))\n' + '\g<0>' +) + +s.replace( + 'google/cloud/pubsub_v1/gapic/publisher_client.py', + 'import google.api_core.gapic_v1.method\n', + '\g<0>import google.api_core.path_template\n' +) + +# Generator is removing ending line of __init__.py +s.replace( + ['google/cloud/__init__.py', 'google/__init__.py'], + '__path__ = pkgutil.extend_path\(__path__, __name__\)', + '\g<0>\n' +) + +# Doc strings are formatted poorly +s.replace( + 'google/cloud/pubsub_v1/proto/pubsub_pb2.py', + 'DESCRIPTOR = _MESSAGESTORAGEPOLICY,\n\s+__module__.*\n\s+,\n\s+__doc__ = """', + '\g<0>A message storage policy.\n\n\n ' +) + +s.replace( + 'google/cloud/pubsub_v1/gapic/subscriber_client.py', + 'subscription \(str\): The subscription whose backlog .*\n(.*\n)+?' + '\s+Format is .*', + '''subscription (str): The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: \\ + (a) The existing backlog on the subscription. More precisely, this is \\ + defined as the messages in the subscription's backlog that are \\ + unacknowledged upon the successful completion of the \\ + `CreateSnapshot` request; as well as: \\ + (b) Any messages published to the subscription's topic following the \\ + successful completion of the CreateSnapshot request. \\ + + Format is ``projects/{project}/subscriptions/{sub}``.''' +) diff --git a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py new file mode 100644 index 000000000000..2748494fa79a --- /dev/null +++ b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py @@ -0,0 +1,28 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import time + +from google.cloud import pubsub_v1 +from google.cloud.pubsub_v1.proto import pubsub_pb2 + + +class TestSystemPublisher(object): + def test_list_topics(self): + project_id = os.environ['PROJECT_ID'] + + client = pubsub_v1.PublisherClient() + project = client.project_path(project_id) + response = client.list_topics(project) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index 13b3bc6fbc1c..a4c0aa91c60b 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -1,10 +1,10 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index fd3e04c044fd..89d753396b77 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -1,10 +1,10 @@ -# Copyright 2017, Google Inc. All rights reserved. +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -21,6 +21,7 @@ from google.iam.v1 import policy_pb2 from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 class MultiCallableStub(object): @@ -156,12 +157,12 @@ def test_update_subscription(self): # Setup Expected Response name = 'name3373707' topic = 'topic110546223' - ack_deadline_seconds = 2135351438 + ack_deadline_seconds_2 = 921632575 retain_acked_messages = False expected_response = { 'name': name, 'topic': topic, - 'ack_deadline_seconds': ack_deadline_seconds, + 'ack_deadline_seconds': ack_deadline_seconds_2, 'retain_acked_messages': retain_acked_messages } expected_response = pubsub_pb2.Subscription(**expected_response) @@ -171,8 +172,11 @@ def test_update_subscription(self): client = subscriber_client.SubscriberClient(channel=channel) # Setup Request - subscription = {} - update_mask = {} + ack_deadline_seconds = 42 + subscription = {'ack_deadline_seconds': ack_deadline_seconds} + paths_element = 'ack_deadline_seconds' + paths = [paths_element] + update_mask = {'paths': paths} response = client.update_subscription(subscription, update_mask) assert expected_response == response @@ -189,8 +193,11 @@ def test_update_subscription_exception(self): client = subscriber_client.SubscriberClient(channel=channel) # Setup request - subscription = {} - update_mask = {} + ack_deadline_seconds = 42 + subscription = {'ack_deadline_seconds': ack_deadline_seconds} + paths_element = 'ack_deadline_seconds' + paths = [paths_element] + update_mask = {'paths': paths} with pytest.raises(CustomException): client.update_subscription(subscription, update_mask) @@ -526,8 +533,12 @@ def test_update_snapshot(self): client = subscriber_client.SubscriberClient(channel=channel) # Setup Request - snapshot = {} - update_mask = {} + seconds = 123456 + expire_time = {'seconds': seconds} + snapshot = {'expire_time': expire_time} + paths_element = 'expire_time' + paths = [paths_element] + update_mask = {'paths': paths} response = client.update_snapshot(snapshot, update_mask) assert expected_response == response @@ -544,8 +555,12 @@ def test_update_snapshot_exception(self): client = subscriber_client.SubscriberClient(channel=channel) # Setup request - snapshot = {} - update_mask = {} + seconds = 123456 + expire_time = {'seconds': seconds} + snapshot = {'expire_time': expire_time} + paths_element = 'expire_time' + paths = [paths_element] + update_mask = {'paths': paths} with pytest.raises(CustomException): client.update_snapshot(snapshot, update_mask) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 188d1c09950d..7ff56a925c67 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -206,11 +206,14 @@ def test_publish_attrs_type_error(): def test_gapic_instance_method(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - with mock.patch.object(client.api, '_create_topic', autospec=True) as ct: - client.create_topic('projects/foo/topics/bar') - assert ct.call_count == 1 - _, args, _ = ct.mock_calls[0] - assert args[0] == types.Topic(name='projects/foo/topics/bar') + + ct = mock.Mock() + client.api._inner_api_calls['create_topic'] = ct + + client.create_topic('projects/foo/topics/bar') + assert ct.call_count == 1 + _, args, _ = ct.mock_calls[0] + assert args[0] == types.Topic(name='projects/foo/topics/bar') def test_gapic_class_method(): From e1a9c0243decdf5a415c05ca2e69a1ad8e3e38dd Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 10 Aug 2018 12:59:27 -0700 Subject: [PATCH 0282/1197] Release 0.36.0 (#5786) --- packages/google-cloud-pubsub/CHANGELOG.md | 10 ++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 8a4d16a53033..fcb926af4932 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,16 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.36.0 + +### Implementation Changes +- Pubsub: Make 'Message.publish_time' return datetime (#5633) +- Ensure SPM methods check that 'self._consumer' is not None before use. (#5758) + +### New Features +- PubSub: add geo-fencing support (#5769) +- Add 'Message.ack_id' property. (#5693) + ## 0.35.4 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index eb6be71a25c3..9f3f0ec33eca 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.35.4' +version = '0.36.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 032c6b01b1b59ff4265fe0d071496b007a8fdc1c Mon Sep 17 00:00:00 2001 From: Chris McDonough Date: Fri, 10 Aug 2018 18:37:27 -0400 Subject: [PATCH 0283/1197] Make Publisher batch-related interfaces private (#5784) --- .../publisher/{batch => _batch}/__init__.py | 0 .../publisher/{batch => _batch}/base.py | 0 .../publisher/{batch => _batch}/thread.py | 2 +- .../cloud/pubsub_v1/publisher/client.py | 24 +++++++------------ .../pubsub_v1/publisher/batch/test_base.py | 4 ++-- .../pubsub_v1/publisher/batch/test_thread.py | 6 ++--- .../publisher/test_publisher_client.py | 6 ++--- 7 files changed, 17 insertions(+), 25 deletions(-) rename packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/{batch => _batch}/__init__.py (100%) rename packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/{batch => _batch}/base.py (100%) rename packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/{batch => _batch}/thread.py (99%) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/__init__.py similarity index 100% rename from packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/__init__.py rename to packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/__init__.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py similarity index 100% rename from packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/base.py rename to packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py similarity index 99% rename from packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py rename to packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 3b11bf32ef25..b59b8a3cafd8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -24,7 +24,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures -from google.cloud.pubsub_v1.publisher.batch import base +from google.cloud.pubsub_v1.publisher._batch import base _LOGGER = logging.getLogger(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 1aa1a279d393..8e878e1df764 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -26,7 +26,7 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.gapic import publisher_client -from google.cloud.pubsub_v1.publisher.batch import thread +from google.cloud.pubsub_v1.publisher._batch import thread __version__ = pkg_resources.get_distribution('google-cloud-pubsub').version @@ -34,6 +34,7 @@ @_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) class Client(object): + _batch_class = thread.Batch """A publisher client for Google Cloud Pub/Sub. This creates an object that is capable of publishing messages. @@ -43,14 +44,6 @@ class Client(object): Args: batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The settings for batch publishing. - batch_class (Optional[Type]): A class that describes how to handle - batches. You may subclass the - :class:`.pubsub_v1.publisher.batch.base.BaseBatch` class in - order to define your own batcher. This is primarily provided to - allow use of different concurrency models; the default - is based on :class:`threading.Thread`. This class should also have - a class method (or static method) that takes no arguments and - produces a lock that can be used as a context manager. kwargs (dict): Any additional arguments provided are sent as keyword arguments to the underlying :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. @@ -59,7 +52,7 @@ class Client(object): be added if ``credentials`` are passed explicitly or if the Pub / Sub emulator is detected as running. """ - def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): + def __init__(self, batch_settings=(), **kwargs): # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. @@ -89,8 +82,7 @@ def __init__(self, batch_settings=(), batch_class=thread.Batch, **kwargs): # The batches on the publisher client are responsible for holding # messages. One batch exists for each topic. - self._batch_class = batch_class - self._batch_lock = batch_class.make_lock() + self._batch_lock = self._batch_class.make_lock() self._batches = {} @property @@ -102,7 +94,7 @@ def target(self): """ return publisher_client.PublisherClient.SERVICE_ADDRESS - def batch(self, topic, create=False, autocommit=True): + def _batch(self, topic, create=False, autocommit=True): """Return the current batch for the provided topic. This will create a new batch if ``create=True`` or if no batch @@ -119,7 +111,7 @@ def batch(self, topic, create=False, autocommit=True): might have (e.g. spawning a worker to publish a batch). Returns: - ~.pubsub_v1.batch.Batch: The batch object. + ~.pubsub_v1._batch.Batch: The batch object. """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. @@ -201,11 +193,11 @@ def publish(self, topic, data, **attrs): message = types.PubsubMessage(data=data, attributes=attrs) # Delegate the publishing to the batch. - batch = self.batch(topic) + batch = self._batch(topic) future = None while future is None: future = batch.publish(message) if future is None: - batch = self.batch(topic, create=True) + batch = self._batch(topic, create=True) return future diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index 1c5dd7cfdaa3..0ef98945b0bf 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -19,8 +19,8 @@ from google.auth import credentials from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.publisher.batch.base import BatchStatus -from google.cloud.pubsub_v1.publisher.batch.thread import Batch +from google.cloud.pubsub_v1.publisher._batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher._batch.thread import Batch def create_batch(status=None, settings=types.BatchSettings()): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index bb50de5e0ec2..315f072bdf29 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -22,9 +22,9 @@ from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions -from google.cloud.pubsub_v1.publisher.batch.base import BatchStatus -from google.cloud.pubsub_v1.publisher.batch import thread -from google.cloud.pubsub_v1.publisher.batch.thread import Batch +from google.cloud.pubsub_v1.publisher._batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher._batch import thread +from google.cloud.pubsub_v1.publisher._batch.thread import Batch def create_client(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 7ff56a925c67..24f778f73890 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -56,7 +56,7 @@ def test_batch_create(): assert len(client._batches) == 0 topic = 'topic/path' - batch = client.batch(topic, autocommit=False) + batch = client._batch(topic, autocommit=False) assert client._batches == {topic: batch} @@ -68,7 +68,7 @@ def test_batch_exists(): client._batches[topic] = mock.sentinel.batch # A subsequent request should return the same batch. - batch = client.batch(topic, autocommit=False) + batch = client._batch(topic, autocommit=False) assert batch is mock.sentinel.batch assert client._batches == {topic: batch} @@ -81,7 +81,7 @@ def test_batch_create_and_exists(): client._batches[topic] = mock.sentinel.batch # A subsequent request should return the same batch. - batch = client.batch(topic, create=True, autocommit=False) + batch = client._batch(topic, create=True, autocommit=False) assert batch is not mock.sentinel.batch assert client._batches == {topic: batch} From 1913d0e54b8e0f71de6926f46b4142d819a1edad Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 14 Aug 2018 12:36:59 -0500 Subject: [PATCH 0284/1197] Release Pub/Sub 0.37.0 (#5795) --- packages/google-cloud-pubsub/CHANGELOG.md | 6 ++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index fcb926af4932..42fb870ff87a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,12 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.37.0 + +### Implementation Changes + +- Make Publisher batch-related interfaces private (#5784) + ## 0.36.0 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 9f3f0ec33eca..7ac97f231c45 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.36.0' +version = '0.37.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From d8b06c5e50def760a25ed21cf85cd98036466b2f Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 14 Aug 2018 15:20:07 -0500 Subject: [PATCH 0285/1197] Make get_initial_request more resilient to race conditions. (#5803) There's a rare case where the stream can be restarted while the streaming pull manager is shutting down. This causes get_initial_request to be called while the manager is in a bad state, which will trigger an AttributeError when attempting to read the list of outstanding Ack IDs from the leaser. Closes #5792 --- .../subscriber/_protocol/streaming_pull_manager.py | 5 ++++- .../subscriber/test_streaming_pull_manager.py | 13 +++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 6c1d90192477..21a8f98851a0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -378,7 +378,10 @@ def _get_initial_request(self): """ # Any ack IDs that are under lease management need to have their # deadline extended immediately. - lease_ids = self._leaser.ack_ids + if self._leaser is not None: + lease_ids = self._leaser.ack_ids + else: + lease_ids = [] # Put the request together. request = types.StreamingPullRequest( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 47638070478a..15f3bc95db80 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -421,6 +421,19 @@ def test__get_initial_request(): assert initial_request.modify_deadline_seconds == [10, 10] +def test__get_initial_request_wo_leaser(): + manager = make_manager() + manager._leaser = None + + initial_request = manager._get_initial_request() + + assert isinstance(initial_request, types.StreamingPullRequest) + assert initial_request.subscription == 'subscription-name' + assert initial_request.stream_ack_deadline_seconds == 10 + assert initial_request.modify_deadline_ack_ids == [] + assert initial_request.modify_deadline_seconds == [] + + def test_on_response(): manager, _, dispatcher, _, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback From 719e19ee6ed45852ca2fa24f5bdeacaeff480119 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Thu, 16 Aug 2018 11:47:41 -0500 Subject: [PATCH 0286/1197] Release 0.37.1 (#5810) --- packages/google-cloud-pubsub/CHANGELOG.md | 6 ++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 42fb870ff87a..c7f9a8c1cf00 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,12 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.37.1 + +### Implementation Changes + +- Make get_initial_request more resilient to race conditions. (#5803) + ## 0.37.0 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 7ac97f231c45..a76f79bb606b 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.37.0' +version = '0.37.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From d9bbfa1cf5b2a2ae9858bb77de4e01ad9f5836c3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 20 Aug 2018 15:36:52 -0400 Subject: [PATCH 0287/1197] Fix Sphinx rendering for publisher client. (#5822) Closes #5788. --- .../google/cloud/pubsub_v1/publisher/client.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 8e878e1df764..c37b1cb80a64 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -34,7 +34,6 @@ @_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) class Client(object): - _batch_class = thread.Batch """A publisher client for Google Cloud Pub/Sub. This creates an object that is capable of publishing messages. @@ -52,6 +51,8 @@ class Client(object): be added if ``credentials`` are passed explicitly or if the Pub / Sub emulator is detected as running. """ + _batch_class = thread.Batch + def __init__(self, batch_settings=(), **kwargs): # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host From 4494d72742bd6051a3e20e143f0f3ae21d388cc3 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 21 Aug 2018 12:48:22 -0500 Subject: [PATCH 0288/1197] Re-generate library, removing obsolete synth modifications. (#5825) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 4 +- .../pubsub_v1/gapic/subscriber_client.py | 4 +- packages/google-cloud-pubsub/synth.py | 38 ------------------- .../publisher/test_publisher_client.py | 2 +- .../subscriber/test_subscriber_client.py | 2 +- 5 files changed, 4 insertions(+), 46 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index a5f82ec5eaff..9528a4a61a44 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -24,6 +24,7 @@ import google.api_core.path_template import google.api_core.grpc_helpers import google.api_core.page_iterator +import google.api_core.path_template import grpc from google.cloud.pubsub_v1.gapic import publisher_client_config @@ -164,9 +165,6 @@ def __init__(self, credentials=credentials, ) - self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel)) - self.publisher_stub = (pubsub_pb2_grpc.PublisherStub(channel)) - if client_info is None: client_info = ( google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 8fdbff7168be..caa02d3bb563 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -23,6 +23,7 @@ import google.api_core.gapic_v1.method import google.api_core.grpc_helpers import google.api_core.page_iterator +import google.api_core.path_template import google.api_core.protobuf_helpers import grpc @@ -185,9 +186,6 @@ def __init__(self, credentials=credentials, ) - self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel)) - self.subscriber_stub = (pubsub_pb2_grpc.SubscriberStub(channel)) - if client_info is None: client_info = ( google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index d834fa387b41..376be17eb77b 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -53,26 +53,6 @@ ' pubsub_v1', ' subscriber_client') -# iam_policy_pb2_grpc doesn't exist. -s.replace( - ['google/cloud/pubsub_v1/gapic/publisher_client.py', - 'google/cloud/pubsub_v1/gapic/subscriber_client.py'], - 'from google.iam.v1 import iam_policy_pb2_grpc\n', - '') -s.replace( - ['google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py', - 'google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py'], - 'from google.iam.v1 import iam_policy_pb2_grpc\n', - 'from google.iam.v1 import iam_policy_pb2\n') -s.replace( - 'google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py', - 'iam_policy_pb2_grpc', - 'iam_policy_pb2') -s.replace( - 'google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py', - 'iam_policy_pb2_grpc', - 'iam_policy_pb2') - # DEFAULT SCOPES are being used. so let's force them in. s.replace( 'google/cloud/pubsub_v1/gapic/*er_client.py', @@ -86,24 +66,6 @@ \g<0>''' ) - -# Stubs are missing -s.replace( - 'google/cloud/pubsub_v1/gapic/subscriber_client.py', - '^(\s+)if client_info is None:\n', - '\g<1>self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel))' - '\g<1>self.subscriber_stub = (pubsub_pb2_grpc.SubscriberStub(channel))\n' - '\g<0>' -) - -s.replace( - 'google/cloud/pubsub_v1/gapic/publisher_client.py', - '^(\s+)if client_info is None:\n', - '\g<1>self.iam_policy_stub = (iam_policy_pb2.IAMPolicyStub(channel))' - '\g<1>self.publisher_stub = (pubsub_pb2_grpc.PublisherStub(channel))\n' - '\g<0>' -) - s.replace( 'google/cloud/pubsub_v1/gapic/publisher_client.py', 'import google.api_core.gapic_v1.method\n', diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 24f778f73890..a10ae75b37b2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -46,7 +46,7 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api.publisher_stub.Publish._channel + channel = client.api.transport.publish._channel assert channel.target().decode('utf8') == '/foo/bar/' diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 000040cb243e..35487c1a6a8d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -36,7 +36,7 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api.subscriber_stub.Pull._channel + channel = client.api.transport.pull._channel assert channel.target().decode('utf8') == '/baz/bacon/' From 4d49e1b64d48e350c5f9844babf4353cf26fc91c Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 21 Aug 2018 14:25:49 -0500 Subject: [PATCH 0289/1197] Fix classmethod wrapping (#5826) --- .../google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py index 3e24ad757a57..0383ec85f451 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py @@ -32,6 +32,8 @@ def wrap(wrapped_fx): # Similarly, for instance methods, we need to send self.api rather # than self, since that is where the actual methods were declared. instance_method = True + + # If this is a bound method it's a classmethod. self = getattr(wrapped_fx, '__self__', None) if issubclass(type(self), type): instance_method = False @@ -41,8 +43,9 @@ def wrap(wrapped_fx): if instance_method: fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) # noqa return functools.wraps(wrapped_fx)(fx) - fx = lambda self, *a, **kw: wrapped_fx(*a, **kw) # noqa - return functools.wraps(wrapped_fx)(fx) + + fx = lambda *a, **kw: wrapped_fx(*a, **kw) # noqa + return staticmethod(functools.wraps(wrapped_fx)(fx)) def actual_decorator(cls): # Reflectively iterate over most of the methods on the source class From 91ecb75410ea308ab380b42296b86b4830ca3b7d Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 21 Aug 2018 15:10:29 -0500 Subject: [PATCH 0290/1197] Add test for testing invoking a wrapped class method on the class itself (#5828) --- .../unit/pubsub_v1/publisher/test_publisher_client.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index a10ae75b37b2..1c0ab91a03d4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -216,7 +216,12 @@ def test_gapic_instance_method(): assert args[0] == types.Topic(name='projects/foo/topics/bar') -def test_gapic_class_method(): +def test_gapic_class_method_on_class(): + answer = publisher.Client.topic_path('foo', 'bar') + assert answer == 'projects/foo/topics/bar' + + +def test_gapic_class_method_on_instance(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) answer = client.topic_path('foo', 'bar') From 7d3d142b4bb2fd261377a4f2daf007f248bf98de Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 21 Aug 2018 16:04:10 -0500 Subject: [PATCH 0291/1197] Release 0.37.2 (#5829) --- packages/google-cloud-pubsub/CHANGELOG.md | 15 +++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index c7f9a8c1cf00..6c4b78258ee6 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.37.2 + +### Implementation Changes + +- Fix classmethod wrapping (#5826) + +### Documentation + +- Fix Sphinx rendering for publisher client. (#5822) + +### Internal / Testing Changes + +- Re-generate library, removing obsolete synth modifications. (#5825) +- Add test for testing invoking a wrapped class method on the class itself (#5828) + ## 0.37.1 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index a76f79bb606b..c812b7981842 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.37.1' +version = '0.37.2' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From cd2e9ac6b50ee2b834c70fad8a65abcd6a86ae03 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 30 Aug 2018 11:50:23 -0400 Subject: [PATCH 0292/1197] Nox: use inplace installs (#5865) --- packages/google-cloud-pubsub/nox.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/nox.py index f772b2b33d57..fc887cadfdc8 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/nox.py @@ -34,8 +34,10 @@ def default(session): Python corresponding to the ``nox`` binary the ``PATH`` can run the tests. """ - # Install all test dependencies, then install this package in-place. - session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) + # Install all test dependencies, then install local packages in-place. + session.install('mock', 'pytest', 'pytest-cov') + for local_dep in LOCAL_DEPS: + session.install('-e', local_dep) session.install('-e', '.') # Run py.test against the unit tests. @@ -84,11 +86,12 @@ def system(session, py): # Use pre-release gRPC for system tests. session.install('--pre', 'grpcio') - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install('mock', 'pytest', *LOCAL_DEPS) - session.install('../test_utils/') - session.install('.') + # Install all test dependencies, then install local packages in-place. + session.install('mock', 'pytest') + for local_dep in LOCAL_DEPS: + session.install('-e', local_dep) + session.install('-e', '../test_utils/') + session.install('-e', '.') # Run py.test against the system tests. session.run( From fecfd42bf909a9ec97951c9be86f54abd7f1682e Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 7 Sep 2018 12:09:57 -0400 Subject: [PATCH 0293/1197] Change 'BatchSettings.max_bytes' default. (#5899) It is documented as '10MB', but enforced as 10000000 bytes. Closes #5898. --- packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py | 2 +- .../tests/unit/pubsub_v1/publisher/test_publisher_client.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 1ac99a96534b..1f5ca92174e2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -40,7 +40,7 @@ ['max_bytes', 'max_latency', 'max_messages'], ) BatchSettings.__new__.__defaults__ = ( - 1024 * 1024 * 10, # max_bytes: 10 MB + 1000 * 1000 * 10, # max_bytes: documented "10 MB", enforced 10000000 0.05, # max_latency: 0.05 seconds 1000, # max_messages: 1,000 ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 1c0ab91a03d4..9b15e6d4777d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -31,7 +31,7 @@ def test_init(): # A plain client should have an `api` (the underlying GAPIC) and a # batch settings object, which should have the defaults. assert isinstance(client.api, publisher_client.PublisherClient) - assert client.batch_settings.max_bytes == 10 * (2 ** 20) + assert client.batch_settings.max_bytes == 10 * 1000 * 1000 assert client.batch_settings.max_latency == 0.05 assert client.batch_settings.max_messages == 1000 From 4db2291bfdc68725b1576c019a8037bf4cc5457a Mon Sep 17 00:00:00 2001 From: DPE bot Date: Wed, 12 Sep 2018 12:13:49 -0700 Subject: [PATCH 0294/1197] Re-generate library using pubsub/synth.py (#5953) --- packages/google-cloud-pubsub/google/__init__.py | 2 ++ packages/google-cloud-pubsub/google/cloud/__init__.py | 2 ++ packages/google-cloud-pubsub/google/cloud/pubsub.py | 2 ++ .../google/cloud/pubsub_v1/gapic/publisher_client.py | 2 ++ .../google/cloud/pubsub_v1/gapic/subscriber_client.py | 2 ++ .../pubsub_v1/gapic/transports/publisher_grpc_transport.py | 2 ++ .../gapic/transports/subscriber_grpc_transport.py | 2 ++ packages/google-cloud-pubsub/synth.py | 7 ------- .../tests/system/gapic/v1/test_system_publisher_v1.py | 2 ++ .../tests/unit/gapic/v1/test_publisher_client_v1.py | 2 ++ .../tests/unit/gapic/v1/test_subscriber_client_v1.py | 2 ++ 11 files changed, 20 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/google/__init__.py b/packages/google-cloud-pubsub/google/__init__.py index 7a9e5a0ef198..f65701dd143f 100644 --- a/packages/google-cloud-pubsub/google/__init__.py +++ b/packages/google-cloud-pubsub/google/__init__.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py index 7a9e5a0ef198..f65701dd143f 100644 --- a/packages/google-cloud-pubsub/google/cloud/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/__init__.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py index a0872f96ac0c..7da5d951d804 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 9528a4a61a44..c3051a3ca31c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index caa02d3bb563..a0d3750822ce 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index c6bb9a648d5b..c74dabd24891 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 8163cd2e8ec9..50f7aa3ee266 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 376be17eb77b..1123c75e4169 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -72,13 +72,6 @@ '\g<0>import google.api_core.path_template\n' ) -# Generator is removing ending line of __init__.py -s.replace( - ['google/cloud/__init__.py', 'google/__init__.py'], - '__path__ = pkgutil.extend_path\(__path__, __name__\)', - '\g<0>\n' -) - # Doc strings are formatted poorly s.replace( 'google/cloud/pubsub_v1/proto/pubsub_pb2.py', diff --git a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py index 2748494fa79a..0d1dfa290e9a 100644 --- a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py +++ b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index a4c0aa91c60b..befea7775a4f 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index 89d753396b77..5e1ddc2059e3 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); From 0fed5fe6105ebbebf976a4200967e2ad4f2f3279 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 12 Sep 2018 15:02:26 -0700 Subject: [PATCH 0295/1197] Fix race condition in recv()'s usage of self.call. (#5935) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 76 ++++++++++++++----- .../unit/pubsub_v1/subscriber/test_bidi.py | 44 +++++------ 2 files changed, 75 insertions(+), 45 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 00877e70058e..7c995c57652e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -330,11 +330,12 @@ def _on_call_done(self, future): # Unlike the base class, we only execute the callbacks on a terminal # error, not for errors that we can recover from. Note that grpc's # "future" here is also a grpc.RpcError. - if not self._should_recover(future): - self._finalize(future) - else: - _LOGGER.debug('Re-opening stream from gRPC callback.') - self._reopen() + with self._operational_lock: + if not self._should_recover(future): + self._finalize(future) + else: + _LOGGER.debug('Re-opening stream from gRPC callback.') + self._reopen() def _reopen(self): with self._operational_lock: @@ -361,6 +362,7 @@ def _reopen(self): # If re-opening or re-calling the method fails for any reason, # consider it a terminal error and finalize the stream. except Exception as exc: + _LOGGER.debug('Failed to re-open stream due to %s', exc) self._finalize(exc) raise @@ -385,23 +387,60 @@ def _recoverable(self, method, *args, **kwargs): return method(*args, **kwargs) except Exception as exc: - _LOGGER.debug('Call to retryable %r caused %s.', method, exc) - if not self._should_recover(exc): - self.close() - _LOGGER.debug('Not retrying %r due to %s.', method, exc) - self._finalize(exc) - raise exc + with self._operational_lock: + _LOGGER.debug( + 'Call to retryable %r caused %s.', method, exc) + + if not self._should_recover(exc): + self.close() + _LOGGER.debug( + 'Not retrying %r due to %s.', method, exc) + self._finalize(exc) + raise exc + + _LOGGER.debug( + 'Re-opening stream from retryable %r.', method) + self._reopen() + + def _send(self, request): + # Grab a reference to the RPC call. Because another thread (notably + # the gRPC error thread) can modify self.call (by invoking reopen), + # we should ensure our reference can not change underneath us. + # If self.call is modified (such as replaced with a new RPC call) then + # this will use the "old" RPC, which should result in the same + # exception passed into gRPC's error handler being raised here, which + # will be handled by the usual error handling in retryable. + with self._operational_lock: + call = self.call + + if call is None: + raise ValueError( + 'Can not send() on an RPC that has never been open()ed.') - _LOGGER.debug('Re-opening stream from retryable %r.', method) - self._reopen() + # Don't use self.is_active(), as ResumableBidiRpc will overload it + # to mean something semantically different. + if call.is_active(): + self._request_queue.put(request) + pass + else: + # calling next should cause the call to raise. + next(call) def send(self, request): - return self._recoverable( - super(ResumableBidiRpc, self).send, request) + return self._recoverable(self._send, request) + + def _recv(self): + with self._operational_lock: + call = self.call + + if call is None: + raise ValueError( + 'Can not recv() on an RPC that has never been open()ed.') + + return next(call) def recv(self): - return self._recoverable( - super(ResumableBidiRpc, self).recv) + return self._recoverable(self._recv) @property def is_active(self): @@ -506,8 +545,7 @@ def _thread_main(self): else: _LOGGER.error( - 'The bidirectional RPC unexpectedly exited. This is a truly ' - 'exceptional case. Please file a bug with your logs.') + 'The bidirectional RPC exited.') _LOGGER.info('%s exiting', _BIDIRECTIONAL_CONSUMER_NAME) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py index 2e72a757600a..058cd53c29cf 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py @@ -373,41 +373,21 @@ def test_recv_recover(self): assert bidi_rpc.call == call_2 assert bidi_rpc.is_active is True - def test_recv_recover_race_condition(self): - # This test checks the race condition where two threads recv() and - # encounter an error and must re-open the stream. Only one thread - # should succeed in doing so. - error = ValueError() - call_1 = CallStub([error, error]) - call_2 = CallStub([1, 2]) + def test_recv_recover_already_recovered(self): + call_1 = CallStub([]) + call_2 = CallStub([]) start_rpc = mock.create_autospec( grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]) - recovered_event = threading.Event() - - def second_thread_main(): - assert bidi_rpc.recv() == 2 - - second_thread = threading.Thread(target=second_thread_main) - - def should_recover(exception): - assert exception == error - if threading.current_thread() == second_thread: - recovered_event.wait() - return True - - bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, lambda _: True) bidi_rpc.open() - second_thread.start() - assert bidi_rpc.recv() == 1 - recovered_event.set() + bidi_rpc._reopen() - assert bidi_rpc.call == call_2 + assert bidi_rpc.call is call_1 assert bidi_rpc.is_active is True - second_thread.join() def test_recv_failure(self): error = ValueError() @@ -456,6 +436,18 @@ def test_reopen_failure_on_rpc_restart(self): assert bidi_rpc.is_active is False callback.assert_called_once_with(error2) + def test_send_not_open(self): + bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False) + + with pytest.raises(ValueError): + bidi_rpc.send(mock.sentinel.request) + + def test_recv_not_open(self): + bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False) + + with pytest.raises(ValueError): + bidi_rpc.recv() + def test_finalize_idempotent(self): error1 = ValueError('1') error2 = ValueError('2') From 958a90f8f0584bf2625194bca3427b7a86f765e9 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 12 Sep 2018 15:32:34 -0700 Subject: [PATCH 0296/1197] Fix race condition where pending Ack IDs can be modified by another thread. (#5929) --- .../pubsub_v1/subscriber/_protocol/streaming_pull_manager.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 21a8f98851a0..17d1a2cad166 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -379,7 +379,9 @@ def _get_initial_request(self): # Any ack IDs that are under lease management need to have their # deadline extended immediately. if self._leaser is not None: - lease_ids = self._leaser.ack_ids + # Explicitly copy the list, as it could be modified by another + # thread. + lease_ids = list(self._leaser.ack_ids) else: lease_ids = [] From 797940600ac76cae90cb2cd5e22b5801116835ca Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 12 Sep 2018 16:11:54 -0700 Subject: [PATCH 0297/1197] Release pubsub 0.38.0 (#5962) --- packages/google-cloud-pubsub/CHANGELOG.md | 13 +++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 6c4b78258ee6..a6ed8ca5f36c 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.38.0 + +### Implementation Changes + +- Fix race condition in recv()'s usage of self.call. ([#5935](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5935)) +- Re-generate the underlying library from protos. ([#5953](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5953)) +- Change 'BatchSettings.max_bytes' default. ([#5899](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5899)) +- Fix race condition where pending Ack IDs can be modified by another thread. ([#5929](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5929)) + +### Internal / Testing Changes + +- Nox: use inplace installs ([#5865](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5865)) + ## 0.37.2 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index c812b7981842..ee5677565a12 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.37.2' +version = '0.38.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 36689332c0701b891ae00a5f35ad788ec1a6d6fb Mon Sep 17 00:00:00 2001 From: DPE bot Date: Fri, 14 Sep 2018 11:35:17 -0700 Subject: [PATCH 0298/1197] Re-generate library using pubsub/synth.py (#5978) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 36 +++---- .../pubsub_v1/gapic/subscriber_client.py | 96 +++++++++---------- 2 files changed, 66 insertions(+), 66 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index c3051a3ca31c..cb08721e1d05 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -248,8 +248,8 @@ def create_topic(self, 'create_topic'] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_topic, default_retry=self._method_configs['CreateTopic'].retry, - default_timeout=self._method_configs['CreateTopic'] - .timeout, + default_timeout=self._method_configs['CreateTopic']. + timeout, client_info=self._client_info, ) @@ -320,8 +320,8 @@ def update_topic(self, 'update_topic'] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_topic, default_retry=self._method_configs['UpdateTopic'].retry, - default_timeout=self._method_configs['UpdateTopic'] - .timeout, + default_timeout=self._method_configs['UpdateTopic']. + timeout, client_info=self._client_info, ) @@ -604,10 +604,10 @@ def list_topic_subscriptions( self._inner_api_calls[ 'list_topic_subscriptions'] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_topic_subscriptions, - default_retry=self._method_configs[ - 'ListTopicSubscriptions'].retry, - default_timeout=self._method_configs[ - 'ListTopicSubscriptions'].timeout, + default_retry=self. + _method_configs['ListTopicSubscriptions'].retry, + default_timeout=self. + _method_configs['ListTopicSubscriptions'].timeout, client_info=self._client_info, ) @@ -675,8 +675,8 @@ def delete_topic(self, 'delete_topic'] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_topic, default_retry=self._method_configs['DeleteTopic'].retry, - default_timeout=self._method_configs['DeleteTopic'] - .timeout, + default_timeout=self._method_configs['DeleteTopic']. + timeout, client_info=self._client_info, ) @@ -741,8 +741,8 @@ def set_iam_policy(self, 'set_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs['SetIamPolicy'].retry, - default_timeout=self._method_configs['SetIamPolicy'] - .timeout, + default_timeout=self._method_configs['SetIamPolicy']. + timeout, client_info=self._client_info, ) @@ -801,8 +801,8 @@ def get_iam_policy(self, 'get_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs['GetIamPolicy'].retry, - default_timeout=self._method_configs['GetIamPolicy'] - .timeout, + default_timeout=self._method_configs['GetIamPolicy']. + timeout, client_info=self._client_info, ) @@ -865,10 +865,10 @@ def test_iam_permissions(self, self._inner_api_calls[ 'test_iam_permissions'] = google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, - default_retry=self._method_configs[ - 'TestIamPermissions'].retry, - default_timeout=self._method_configs['TestIamPermissions'] - .timeout, + default_retry=self._method_configs['TestIamPermissions']. + retry, + default_timeout=self._method_configs['TestIamPermissions']. + timeout, client_info=self._client_info, ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index a0d3750822ce..48ce0f8e7540 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -321,10 +321,10 @@ def create_subscription(self, self._inner_api_calls[ 'create_subscription'] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_subscription, - default_retry=self._method_configs[ - 'CreateSubscription'].retry, - default_timeout=self._method_configs['CreateSubscription'] - .timeout, + default_retry=self._method_configs['CreateSubscription']. + retry, + default_timeout=self._method_configs['CreateSubscription']. + timeout, client_info=self._client_info, ) @@ -384,10 +384,10 @@ def get_subscription(self, self._inner_api_calls[ 'get_subscription'] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_subscription, - default_retry=self._method_configs[ - 'GetSubscription'].retry, - default_timeout=self._method_configs['GetSubscription'] - .timeout, + default_retry=self._method_configs['GetSubscription']. + retry, + default_timeout=self._method_configs['GetSubscription']. + timeout, client_info=self._client_info, ) @@ -451,10 +451,10 @@ def update_subscription(self, self._inner_api_calls[ 'update_subscription'] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_subscription, - default_retry=self._method_configs[ - 'UpdateSubscription'].retry, - default_timeout=self._method_configs['UpdateSubscription'] - .timeout, + default_retry=self._method_configs['UpdateSubscription']. + retry, + default_timeout=self._method_configs['UpdateSubscription']. + timeout, client_info=self._client_info, ) @@ -530,10 +530,10 @@ def list_subscriptions(self, self._inner_api_calls[ 'list_subscriptions'] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_subscriptions, - default_retry=self._method_configs[ - 'ListSubscriptions'].retry, - default_timeout=self._method_configs['ListSubscriptions'] - .timeout, + default_retry=self._method_configs['ListSubscriptions']. + retry, + default_timeout=self._method_configs['ListSubscriptions']. + timeout, client_info=self._client_info, ) @@ -600,10 +600,10 @@ def delete_subscription(self, self._inner_api_calls[ 'delete_subscription'] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_subscription, - default_retry=self._method_configs[ - 'DeleteSubscription'].retry, - default_timeout=self._method_configs['DeleteSubscription'] - .timeout, + default_retry=self._method_configs['DeleteSubscription']. + retry, + default_timeout=self._method_configs['DeleteSubscription']. + timeout, client_info=self._client_info, ) @@ -673,10 +673,10 @@ def modify_ack_deadline(self, self._inner_api_calls[ 'modify_ack_deadline'] = google.api_core.gapic_v1.method.wrap_method( self.transport.modify_ack_deadline, - default_retry=self._method_configs[ - 'ModifyAckDeadline'].retry, - default_timeout=self._method_configs['ModifyAckDeadline'] - .timeout, + default_retry=self._method_configs['ModifyAckDeadline']. + retry, + default_timeout=self._method_configs['ModifyAckDeadline']. + timeout, client_info=self._client_info, ) @@ -742,8 +742,8 @@ def acknowledge(self, 'acknowledge'] = google.api_core.gapic_v1.method.wrap_method( self.transport.acknowledge, default_retry=self._method_configs['Acknowledge'].retry, - default_timeout=self._method_configs['Acknowledge'] - .timeout, + default_timeout=self._method_configs['Acknowledge']. + timeout, client_info=self._client_info, ) @@ -887,8 +887,8 @@ def streaming_pull(self, 'streaming_pull'] = google.api_core.gapic_v1.method.wrap_method( self.transport.streaming_pull, default_retry=self._method_configs['StreamingPull'].retry, - default_timeout=self._method_configs['StreamingPull'] - .timeout, + default_timeout=self._method_configs['StreamingPull']. + timeout, client_info=self._client_info, ) @@ -953,10 +953,10 @@ def modify_push_config(self, self._inner_api_calls[ 'modify_push_config'] = google.api_core.gapic_v1.method.wrap_method( self.transport.modify_push_config, - default_retry=self._method_configs[ - 'ModifyPushConfig'].retry, - default_timeout=self._method_configs['ModifyPushConfig'] - .timeout, + default_retry=self._method_configs['ModifyPushConfig']. + retry, + default_timeout=self._method_configs['ModifyPushConfig']. + timeout, client_info=self._client_info, ) @@ -1036,8 +1036,8 @@ def list_snapshots(self, 'list_snapshots'] = google.api_core.gapic_v1.method.wrap_method( self.transport.list_snapshots, default_retry=self._method_configs['ListSnapshots'].retry, - default_timeout=self._method_configs['ListSnapshots'] - .timeout, + default_timeout=self._method_configs['ListSnapshots']. + timeout, client_info=self._client_info, ) @@ -1135,8 +1135,8 @@ def create_snapshot(self, 'create_snapshot'] = google.api_core.gapic_v1.method.wrap_method( self.transport.create_snapshot, default_retry=self._method_configs['CreateSnapshot'].retry, - default_timeout=self._method_configs['CreateSnapshot'] - .timeout, + default_timeout=self._method_configs['CreateSnapshot']. + timeout, client_info=self._client_info, ) @@ -1208,8 +1208,8 @@ def update_snapshot(self, 'update_snapshot'] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_snapshot, default_retry=self._method_configs['UpdateSnapshot'].retry, - default_timeout=self._method_configs['UpdateSnapshot'] - .timeout, + default_timeout=self._method_configs['UpdateSnapshot']. + timeout, client_info=self._client_info, ) @@ -1269,8 +1269,8 @@ def delete_snapshot(self, 'delete_snapshot'] = google.api_core.gapic_v1.method.wrap_method( self.transport.delete_snapshot, default_retry=self._method_configs['DeleteSnapshot'].retry, - default_timeout=self._method_configs['DeleteSnapshot'] - .timeout, + default_timeout=self._method_configs['DeleteSnapshot']. + timeout, client_info=self._client_info, ) @@ -1420,8 +1420,8 @@ def set_iam_policy(self, 'set_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( self.transport.set_iam_policy, default_retry=self._method_configs['SetIamPolicy'].retry, - default_timeout=self._method_configs['SetIamPolicy'] - .timeout, + default_timeout=self._method_configs['SetIamPolicy']. + timeout, client_info=self._client_info, ) @@ -1480,8 +1480,8 @@ def get_iam_policy(self, 'get_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( self.transport.get_iam_policy, default_retry=self._method_configs['GetIamPolicy'].retry, - default_timeout=self._method_configs['GetIamPolicy'] - .timeout, + default_timeout=self._method_configs['GetIamPolicy']. + timeout, client_info=self._client_info, ) @@ -1544,10 +1544,10 @@ def test_iam_permissions(self, self._inner_api_calls[ 'test_iam_permissions'] = google.api_core.gapic_v1.method.wrap_method( self.transport.test_iam_permissions, - default_retry=self._method_configs[ - 'TestIamPermissions'].retry, - default_timeout=self._method_configs['TestIamPermissions'] - .timeout, + default_retry=self._method_configs['TestIamPermissions']. + retry, + default_timeout=self._method_configs['TestIamPermissions']. + timeout, client_info=self._client_info, ) From 0877573c4199af239fc920e4c3d7c9e7939f7765 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 17 Sep 2018 17:10:20 -0400 Subject: [PATCH 0299/1197] Prep pubsub docs for repo split. (#6001) - Move docs from 'docs/pubsub' into 'pubsub/docs' and leave symlink. - Harmonize / DRY 'pubsub/README.rst' and 'pubsub/docs/index.rst'. - Ensure that docs still build from top-level. Toward #5912. --- packages/google-cloud-pubsub/README.rst | 101 ++++++----- .../google-cloud-pubsub/docs/changelog.md | 1 + packages/google-cloud-pubsub/docs/index.rst | 22 +++ .../docs/publisher/api/client.rst | 6 + .../docs/publisher/index.rst | 130 ++++++++++++++ .../docs/subscriber/api/client.rst | 6 + .../docs/subscriber/api/futures.rst | 6 + .../docs/subscriber/api/message.rst | 5 + .../docs/subscriber/api/scheduler.rst | 6 + .../docs/subscriber/index.rst | 164 ++++++++++++++++++ packages/google-cloud-pubsub/docs/types.rst | 5 + 11 files changed, 412 insertions(+), 40 deletions(-) create mode 120000 packages/google-cloud-pubsub/docs/changelog.md create mode 100644 packages/google-cloud-pubsub/docs/index.rst create mode 100644 packages/google-cloud-pubsub/docs/publisher/api/client.rst create mode 100644 packages/google-cloud-pubsub/docs/publisher/index.rst create mode 100644 packages/google-cloud-pubsub/docs/subscriber/api/client.rst create mode 100644 packages/google-cloud-pubsub/docs/subscriber/api/futures.rst create mode 100644 packages/google-cloud-pubsub/docs/subscriber/api/message.rst create mode 100644 packages/google-cloud-pubsub/docs/subscriber/api/scheduler.rst create mode 100644 packages/google-cloud-pubsub/docs/subscriber/index.rst create mode 100644 packages/google-cloud-pubsub/docs/types.rst diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 39699fd47919..92b6fd6924bb 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -1,61 +1,88 @@ Python Client for Google Cloud Pub / Sub ======================================== - Python idiomatic client for `Google Cloud Pub / Sub`_ +|pypi| |versions| -.. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/docs +`Google Cloud Pub / Sub`_ is a fully-managed real-time messaging service that +allows you to send and receive messages between independent applications. You +can leverage Cloud Pub/Sub’s flexibility to decouple systems and components +hosted on Google Cloud Platform or elsewhere on the Internet. By building on +the same technology Google uses, Cloud Pub / Sub is designed to provide “at +least once” delivery at low latency with on-demand scalability to 1 million +messages per second (and beyond). -|pypi| |versions| +Publisher applications can send messages to a ``topic`` and other applications +can subscribe to that topic to receive the messages. By decoupling senders and +receivers, Google Cloud Pub/Sub allows developers to communicate between +independently written applications. -- `Documentation`_ +- `Product Documentation`_ +- `Client Library Documentation`_ -.. _Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/pubsub/ +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg + :target: https://pypi.org/project/google-cloud-pubsub/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg + :target: https://pypi.org/project/google-cloud-pubsub/ +.. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/ +.. _Product Documentation: https://cloud.google.com/pubsub/docs +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/pubsub/ Quick Start ----------- -.. code-block:: console +In order to use this library, you first need to go through the following steps: - $ pip install --upgrade google-cloud-pubsub +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Pub / Sub API.`_ +4. `Setup Authentication.`_ -For more information on setting up your Python development environment, -such as installing ``pip`` and ``virtualenv`` on your system, please refer -to `Python Development Environment Setup Guide`_ for Google Cloud Platform. +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Pub / Sub API.: https://cloud.google.com/pubsub +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html -.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup +Installation +~~~~~~~~~~~~ -Authentication --------------- +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. -With ``google-cloud-python`` we try to make authentication as painless as -possible. Check out the `Authentication section`_ in our documentation to -learn more. You may also find the `authentication document`_ shared by all -the ``google-cloud-*`` libraries to be helpful. +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. -.. _Authentication section: https://google-cloud-python.readthedocs.io/en/latest/core/auth.html -.. _authentication document: https://github.com/GoogleCloudPlatform/google-cloud-common/tree/master/authentication +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ -Using the API -------------- -Google `Cloud Pub/Sub`_ (`Pub/Sub API docs`_) is designed to provide reliable, -many-to-many, asynchronous messaging between applications. Publisher -applications can send messages to a ``topic`` and other applications can -subscribe to that topic to receive the messages. By decoupling senders and -receivers, Google Cloud Pub/Sub allows developers to communicate between -independently written applications. +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-pubsub + -.. _Cloud Pub/Sub: https://cloud.google.com/pubsub/docs -.. _Pub/Sub API docs: https://cloud.google.com/pubsub/docs/reference/rest/ +Windows +^^^^^^^ -See the ``google-cloud-python`` API `Pub/Sub documentation`_ to learn how to connect -to Cloud Pub/Sub using this Client Library. +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-pubsub -.. _Pub/Sub documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/index.html +Example Usage +~~~~~~~~~~~~~ Publishing ----------- +^^^^^^^^^^ To publish data to Cloud Pub/Sub you must create a topic, and then publish messages to it @@ -79,7 +106,7 @@ To learn more, consult the `publishing documentation`_. Subscribing ------------ +^^^^^^^^^^^ To subscribe to data in Cloud Pub/Sub, you create a subscription based on the topic, and subscribe to that. @@ -115,9 +142,3 @@ use of a callback. To learn more, consult the `subscriber documentation`_. .. _subscriber documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/subscriber/index.html - - -.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg - :target: https://pypi.org/project/google-cloud-pubsub/ -.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg - :target: https://pypi.org/project/google-cloud-pubsub/ diff --git a/packages/google-cloud-pubsub/docs/changelog.md b/packages/google-cloud-pubsub/docs/changelog.md new file mode 120000 index 000000000000..04c99a55caae --- /dev/null +++ b/packages/google-cloud-pubsub/docs/changelog.md @@ -0,0 +1 @@ +../CHANGELOG.md \ No newline at end of file diff --git a/packages/google-cloud-pubsub/docs/index.rst b/packages/google-cloud-pubsub/docs/index.rst new file mode 100644 index 000000000000..f20e6ed74414 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/index.rst @@ -0,0 +1,22 @@ +.. include:: /../pubsub/README.rst + +API Documentation +----------------- + +.. toctree:: + :maxdepth: 3 + + publisher/index + subscriber/index + types + +Changelog +--------- + +For a list of all ``google-cloud-pubsub`` releases: + +.. toctree:: + :maxdepth: 2 + + changelog + diff --git a/packages/google-cloud-pubsub/docs/publisher/api/client.rst b/packages/google-cloud-pubsub/docs/publisher/api/client.rst new file mode 100644 index 000000000000..47a3aa3d5d7a --- /dev/null +++ b/packages/google-cloud-pubsub/docs/publisher/api/client.rst @@ -0,0 +1,6 @@ +Publisher Client API +==================== + +.. automodule:: google.cloud.pubsub_v1.publisher.client + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/docs/publisher/index.rst b/packages/google-cloud-pubsub/docs/publisher/index.rst new file mode 100644 index 000000000000..bd1d318b4310 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/publisher/index.rst @@ -0,0 +1,130 @@ +Publishing Messages +=================== + +Publishing messages is handled through the +:class:`~.pubsub_v1.publisher.client.Client` class (aliased as +``google.cloud.pubsub.PublisherClient``). This class provides methods to +create topics, and (most importantly) a +:meth:`~.pubsub_v1.publisher.client.Client.publish` method that publishes +messages to Pub/Sub. + +Instantiating a publishing client is straightforward: + +.. code-block:: python + + from google.cloud import pubsub + publish_client = pubsub.PublisherClient() + + +Publish a Message +----------------- + +To publish a message, use the +:meth:`~.pubsub_v1.publisher.client.Client.publish` method. This method accepts +two positional arguments: the topic to publish to, and the body of the message. +It also accepts arbitrary keyword arguments, which are passed along as +attributes of the message. + +The topic is passed along as a string; all topics have the canonical form of +``projects/{project_name}/topics/{topic_name}``. + +Therefore, a very basic publishing call looks like: + +.. code-block:: python + + topic = 'projects/{project}/topics/{topic}' + publish_client.publish(topic, b'This is my message.') + +.. note:: + + The message data in Pub/Sub is an opaque blob of bytes, and as such, you + *must* send a ``bytes`` object in Python 3 (``str`` object in Python 2). + If you send a text string (``str`` in Python 3, ``unicode`` in Python 2), + the method will raise :exc:`TypeError`. + + The reason it works this way is because there is no reasonable guarantee + that the same language or environment is being used by the subscriber, + and so it is the responsibility of the publisher to properly encode + the payload. + +If you want to include attributes, simply add keyword arguments: + +.. code-block:: python + + topic = 'projects/{project}/topics/{topic}' + publish_client.publish(topic, b'This is my message.', foo='bar') + + +Batching +-------- + +Whenever you publish a message, the publisher will automatically batch the +messages over a small time window to avoid making too many separate requests to +the service. This helps increase throughput. + +.. note:: + + By default, this uses ``threading``, and you will need to be in an + environment with threading enabled. It is possible to provide an + alternative batch class that uses another concurrency strategy. + +The way that this works is that on the first message that you send, a new batch +is created automatically. For every subsequent message, if there is already a +valid batch that is still accepting messages, then that batch is used. When the +batch is created, it begins a countdown that publishes the batch once +sufficient time has elapsed (by default, this is 0.05 seconds). + +If you need different batching settings, simply provide a +:class:`~.pubsub_v1.types.BatchSettings` object when you instantiate the +:class:`~.pubsub_v1.publisher.client.Client`: + +.. code-block:: python + + from google.cloud import pubsub + from google.cloud.pubsub import types + + client = pubsub.PublisherClient( + batch_settings=BatchSettings(max_messages=500), + ) + +Pub/Sub accepts a maximum of 1,000 messages in a batch, and the size of a +batch can not exceed 10 megabytes. + + +Futures +------- + +Every call to :meth:`~.pubsub_v1.publisher.client.Client.publish` will return +a class that conforms to the :class:`~concurrent.futures.Future` interface. +You can use this to ensure that the publish succeeded: + +.. code-block:: python + + # The .result() method will block until the future is complete. + # If there is an error, it will raise an exception. + future = client.publish(topic, b'My awesome message.') + message_id = future.result() + +You can also attach a callback to the future: + +.. code-block:: python + + # Callbacks receive the future as their only argument, as defined in + # the Future interface. + def callback(future): + message_id = future.result() + do_something_with(message_id) + + # The callback is added once you get the future. If you add a callback + # and the future is already done, it will simply be executed immediately. + future = client.publish(topic, b'My awesome message.') + future.add_done_callback(callback) + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + api/client diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/client.rst b/packages/google-cloud-pubsub/docs/subscriber/api/client.rst new file mode 100644 index 000000000000..965880c5a640 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/subscriber/api/client.rst @@ -0,0 +1,6 @@ +Subscriber Client API +===================== + +.. automodule:: google.cloud.pubsub_v1.subscriber.client + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/futures.rst b/packages/google-cloud-pubsub/docs/subscriber/api/futures.rst new file mode 100644 index 000000000000..fb0264279cf6 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/subscriber/api/futures.rst @@ -0,0 +1,6 @@ +Futures +======= + +.. automodule:: google.cloud.pubsub_v1.subscriber.futures + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/message.rst b/packages/google-cloud-pubsub/docs/subscriber/api/message.rst new file mode 100644 index 000000000000..cc5b0e37c9b4 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/subscriber/api/message.rst @@ -0,0 +1,5 @@ +Messages +======== + +.. autoclass:: google.cloud.pubsub_v1.subscriber.message.Message + :members: diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/scheduler.rst b/packages/google-cloud-pubsub/docs/subscriber/api/scheduler.rst new file mode 100644 index 000000000000..06e839f219e6 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/subscriber/api/scheduler.rst @@ -0,0 +1,6 @@ +Scheduler +========= + +.. automodule:: google.cloud.pubsub_v1.subscriber.scheduler + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/docs/subscriber/index.rst b/packages/google-cloud-pubsub/docs/subscriber/index.rst new file mode 100644 index 000000000000..7593f17b711f --- /dev/null +++ b/packages/google-cloud-pubsub/docs/subscriber/index.rst @@ -0,0 +1,164 @@ +Subscribing to Messages +======================= + +Subscribing to messages is handled through the +:class:`~.pubsub_v1.subscriber.client.Client` class (aliased as +``google.cloud.pubsub.SubscriberClient``). This class provides a +:meth:`~.pubsub_v1.subscriber.client.Client.subscribe` method to +attach to subscriptions on existing topics, and (most importantly) a +:meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open` method that +consumes messages from Pub/Sub. + +Instantiating a subscriber client is straightforward: + +.. code-block:: python + + from google.cloud import pubsub + subscriber = pubsub.SubscriberClient() + + +Creating a Subscription +----------------------- + +In Pub/Sub, a **subscription** is a discrete pull of messages from a topic. +If multiple clients pull the same subscription, then messages are split +between them. If multiple clients create a subscription each, then each client +will get every message. + +.. note:: + + Remember that Pub/Sub operates under the principle of "everything at least + once". Even in the case where multiple clients pull the same subscription, + *some* redundancy is likely. + +Creating a subscription requires that you already know what topic you want +to subscribe to, and it must already exist. Once you have that, it is easy: + +.. code-block:: python + + # Substitute {project}, {topic}, and {subscription} with appropriate + # values for your application. + topic_name = 'projects/{project}/topics/{topic}' + sub_name = 'projects/{project}/subscriptions/{subscription}' + subscriber.create_subscription(sub_name, topic_name) + + +Pulling a Subscription +---------------------- + +Once you have created a subscription (or if you already had one), the next +step is to pull data from it. The subscriber client uses the +:meth:`~.pubsub_v1.subscriber.client.Client.subscribe` method to start a +background thread to receive messages from Pub/Sub and calls a callback with +each message received. + +.. code-block:: python + + # As before, substitute {project} and {subscription} with appropriate + # values for your application. + future = subscriber.subscribe( + 'projects/{project}/subscriptions/{subscription}', + callback + ) + +This will return a +:class:`~.pubsub_v1.subscriber.futures.StreamingPullFuture`. This future allows +you to control the background thread that is managing the subscription. + + +Subscription Callbacks +---------------------- + +Messages received from a subscription are processed asynchronously through +**callbacks**. + +The basic idea: Define a function that takes one argument; this argument +will be a :class:`~.pubsub_v1.subscriber.message.Message` instance. This +function should do whatever processing is necessary. At the end, the +function should either :meth:`~.pubsub_v1.subscriber.message.Message.ack` +or :meth:`~.pubsub_v1.subscriber.message.Message.nack` the message. + +When you call :meth:`~.pubsub_v1.subscriber.client.Client.subscribe`, you +must pass the callback that will be used. + +Here is an example: + +.. code-block:: python + + # Define the callback. + # Note that the callback is defined *before* the subscription is opened. + def callback(message): + do_something_with(message) # Replace this with your actual logic. + message.ack() + + # Open the subscription, passing the callback. + future = subscriber.subscribe( + 'projects/{project}/subscriptions/{subscription}', + callback + ) + +The :meth:`~.pubsub_v1.subscriber.client.Client.subscribe` method returns +a :class:`~.pubsub_v1.subscriber.futures.StreamingPullFuture`, which is both +the interface to wait on messages (e.g. block the primary thread) and to +address exceptions. + +To block the thread you are in while messages are coming in the stream, +use the :meth:`~.pubsub_v1.subscriber.futures.Future.result` method: + +.. code-block:: python + + future.result() + +.. note: This will block forever assuming no errors or that ``cancel`` is never + called. + +You can also use this for error handling; any exceptions that crop up on a +thread will be set on the future. + +.. code-block:: python + + try: + future.result() + except Exception as ex: + subscription.close() + raise + +Finally, you can use +:meth:`~.pubsub_v1.subscriber.futures.StreamingPullFuture.cancel` to stop +receiving messages. + + +.. code-block:: python + + future.cancel() + + +Explaining Ack +-------------- + +In Pub/Sub, the term **ack** stands for "acknowledge". You should ack a +message when your processing of that message *has completed*. When you ack +a message, you are telling Pub/Sub that you do not need to see it again. + +It might be tempting to ack messages immediately on receipt. While there +are valid use cases for this, in general it is unwise. The reason why: If +there is some error or edge case in your processing logic, and processing +of the message fails, you will have already told Pub/Sub that you successfully +processed the message. By contrast, if you ack only upon completion, then +Pub/Sub will eventually re-deliver the unacknowledged message. + +It is also possible to **nack** a message, which is the opposite. When you +nack, it tells Pub/Sub that you are unable or unwilling to deal with the +message, and that the service should redeliver it. + + +API Reference +------------- + +.. toctree:: + :maxdepth: 2 + + api/client + api/message + api/futures + api/scheduler diff --git a/packages/google-cloud-pubsub/docs/types.rst b/packages/google-cloud-pubsub/docs/types.rst new file mode 100644 index 000000000000..87c987571766 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/types.rst @@ -0,0 +1,5 @@ +Pub/Sub Client Types +==================== + +.. automodule:: google.cloud.pubsub_v1.types + :members: From b5eee018b7a0fa65ce2926567f08b9d20c8b999f Mon Sep 17 00:00:00 2001 From: DPE bot Date: Sat, 22 Sep 2018 05:33:22 -0700 Subject: [PATCH 0300/1197] Re-generate library using pubsub/synth.py (#6059) --- .../google/cloud/pubsub_v1/gapic/publisher_client_config.py | 5 +++-- .../google/cloud/pubsub_v1/gapic/subscriber_client_config.py | 5 +++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index 38aa0ca0ffbf..129cbeabf23c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -3,11 +3,12 @@ "google.pubsub.v1.Publisher": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "http_get": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], "one_plus_delivery": [ "ABORTED", "CANCELLED", "DEADLINE_EXCEEDED", "INTERNAL", "RESOURCE_EXHAUSTED", "UNAVAILABLE", "UNKNOWN" - ], - "non_idempotent": [] + ] }, "retry_params": { "default": { diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 7857fbdab8e1..faa26736e62e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -3,11 +3,12 @@ "google.pubsub.v1.Subscriber": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], "pull": [ "CANCELLED", "DEADLINE_EXCEEDED", "INTERNAL", "RESOURCE_EXHAUSTED", "UNAVAILABLE" - ] + ], + "http_get": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [] }, "retry_params": { "default": { From d75b8ad885a7dfcd366fa3d84d60096690c5158f Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 28 Sep 2018 13:44:06 -0400 Subject: [PATCH 0301/1197] Bump minimum 'api_core' version to '1.4.1'. (#6134) Closes #6128. --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index ee5677565a12..f34758421dab 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 4 - Beta' dependencies = [ - 'google-api-core[grpc] >= 1.1.0, < 2.0.0dev', + 'google-api-core[grpc] >= 1.4.1, < 2.0.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', 'enum34; python_version < "3.4"', ] From 5b401029b0d7da737e6798054e09cf44bf6a4cac Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 10 Oct 2018 11:04:44 -0700 Subject: [PATCH 0302/1197] Use new Nox (#6175) --- .../{nox.py => noxfile.py} | 41 ++++--------------- 1 file changed, 7 insertions(+), 34 deletions(-) rename packages/google-cloud-pubsub/{nox.py => noxfile.py} (75%) diff --git a/packages/google-cloud-pubsub/nox.py b/packages/google-cloud-pubsub/noxfile.py similarity index 75% rename from packages/google-cloud-pubsub/nox.py rename to packages/google-cloud-pubsub/noxfile.py index fc887cadfdc8..a061653d5407 100644 --- a/packages/google-cloud-pubsub/nox.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -28,11 +28,6 @@ @nox.session def default(session): """Default unit test session. - - This is intended to be run **without** an interpreter set, so - that the current ``python`` (on the ``PATH``) or the version of - Python corresponding to the ``nox`` binary the ``PATH`` can - run the tests. """ # Install all test dependencies, then install local packages in-place. session.install('mock', 'pytest', 'pytest-cov') @@ -54,35 +49,20 @@ def default(session): ) -@nox.session -@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7']) -def unit(session, py): +@nox.session(python=['2.7', '3.5', '3.6', '3.7']) +def unit(session): """Run the unit test suite.""" - - # Run unit tests against all supported versions of Python. - session.interpreter = 'python{}'.format(py) - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'unit-' + py - default(session) -@nox.session -@nox.parametrize('py', ['2.7', '3.6']) -def system(session, py): +@nox.session(python=['2.7', '3.6']) +def system(session): """Run the system test suite.""" # Sanity check: Only run system tests if the environment variable is set. if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): session.skip('Credentials must be set via environment variable.') - # Run the system tests against latest Python 2 and Python 3 only. - session.interpreter = 'python{}'.format(py) - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'sys-' + py - # Use pre-release gRPC for system tests. session.install('--pre', 'grpcio') @@ -102,40 +82,33 @@ def system(session, py): ) -@nox.session +@nox.session(python='3.6') def lint(session): """Run linters. Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.interpreter = 'python3.6' session.install('flake8', *LOCAL_DEPS) session.install('.') session.run('flake8', 'google', 'tests') -@nox.session +@nox.session(python='3.6') def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" - session.interpreter = 'python3.6' - - # Set the virtualenv dirname. - session.virtualenv_dirname = 'setup' - session.install('docutils', 'Pygments') session.run( 'python', 'setup.py', 'check', '--restructuredtext', '--strict') -@nox.session +@nox.session(python='3.6') def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ - session.interpreter = 'python3.6' session.install('coverage', 'pytest-cov') session.run('coverage', 'report', '--show-missing', '--fail-under=100') session.run('coverage', 'erase') From f0e469618067780507729adc63bf27ae23335edb Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 12 Oct 2018 18:39:45 -0400 Subject: [PATCH 0303/1197] Update subscriber example in README to current patterns. (#6194) Closes #6189. --- packages/google-cloud-pubsub/README.rst | 29 +++++++++++++++---------- 1 file changed, 17 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 92b6fd6924bb..c2d14aba6c5a 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -90,9 +90,9 @@ messages to it .. code-block:: python import os - from google.cloud import pubsub + from google.cloud import pubsub_v1 - publisher = pubsub.PublisherClient() + publisher = pubsub_v1.PublisherClient() topic_name = 'projects/{project_id}/topics/{topic}'.format( project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), topic='MY_TOPIC_NAME', # Set this to something appropriate. @@ -109,14 +109,14 @@ Subscribing ^^^^^^^^^^^ To subscribe to data in Cloud Pub/Sub, you create a subscription based on -the topic, and subscribe to that. +the topic, and subscribe to that, passing a callback function. .. code-block:: python import os - from google.cloud import pubsub + from google.cloud import pubsub_v1 - subscriber = pubsub.SubscriberClient() + subscriber = pubsub_v1.SubscriberClient() topic_name = 'projects/{project_id}/topics/{topic}'.format( project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), topic='MY_TOPIC_NAME', # Set this to something appropriate. @@ -127,17 +127,22 @@ the topic, and subscribe to that. ) subscriber.create_subscription( name=subscription_name, topic=topic_name) - subscription = subscriber.subscribe(subscription_name) - -The subscription is opened asychronously, and messages are processed by -use of a callback. - -.. code-block:: python def callback(message): print(message.data) message.ack() - subscription.open(callback) + + future = subscriber.subscribe(subscription_name, callback) + +The future returned by the call to ``subscriber.subscribe`` can be used to +block the current thread until a given condition obtains: + +.. code-block:: python + + try: + future.result() + except KeyboardInterrupt: + future.cancel() To learn more, consult the `subscriber documentation`_. From 2473250e4180b12718eec8cbbbd1f5a1cfe94ef3 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 16 Oct 2018 09:06:47 -0700 Subject: [PATCH 0304/1197] Move bidi to api-core (#6211) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 597 ---------------- .../_protocol/streaming_pull_manager.py | 2 +- .../unit/pubsub_v1/subscriber/test_bidi.py | 650 ------------------ .../subscriber/test_streaming_pull_manager.py | 2 +- 4 files changed, 2 insertions(+), 1249 deletions(-) delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py deleted file mode 100644 index 7c995c57652e..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ /dev/null @@ -1,597 +0,0 @@ -# Copyright 2017, Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Bi-directional streaming RPC helpers.""" - -import logging -import threading - -from six.moves import queue - -from google.api_core import exceptions - -_LOGGER = logging.getLogger(__name__) -_BIDIRECTIONAL_CONSUMER_NAME = 'Thread-ConsumeBidirectionalStream' - - -class _RequestQueueGenerator(object): - """A helper for sending requests to a gRPC stream from a Queue. - - This generator takes requests off a given queue and yields them to gRPC. - - This helper is useful when you have an indeterminate, indefinite, or - otherwise open-ended set of requests to send through a request-streaming - (or bidirectional) RPC. - - The reason this is necessary is because gRPC takes an iterator as the - request for request-streaming RPCs. gRPC consumes this iterator in another - thread to allow it to block while generating requests for the stream. - However, if the generator blocks indefinitely gRPC will not be able to - clean up the thread as it'll be blocked on `next(iterator)` and not be able - to check the channel status to stop iterating. This helper mitigates that - by waiting on the queue with a timeout and checking the RPC state before - yielding. - - Finally, it allows for retrying without swapping queues because if it does - pull an item off the queue when the RPC is inactive, it'll immediately put - it back and then exit. This is necessary because yielding the item in this - case will cause gRPC to discard it. In practice, this means that the order - of messages is not guaranteed. If such a thing is necessary it would be - easy to use a priority queue. - - Example:: - - requests = request_queue_generator(q) - call = stub.StreamingRequest(iter(requests)) - requests.call = call - - for response in call: - print(response) - q.put(...) - - Note that it is possible to accomplish this behavior without "spinning" - (using a queue timeout). One possible way would be to use more threads to - multiplex the grpc end event with the queue, another possible way is to - use selectors and a custom event/queue object. Both of these approaches - are significant from an engineering perspective for small benefit - the - CPU consumed by spinning is pretty minuscule. - - Args: - queue (queue.Queue): The request queue. - period (float): The number of seconds to wait for items from the queue - before checking if the RPC is cancelled. In practice, this - determines the maximum amount of time the request consumption - thread will live after the RPC is cancelled. - initial_request (Union[protobuf.Message, - Callable[None, protobuf.Message]]): The initial request to - yield. This is done independently of the request queue to allow fo - easily restarting streams that require some initial configuration - request. - """ - def __init__(self, queue, period=1, initial_request=None): - self._queue = queue - self._period = period - self._initial_request = initial_request - self.call = None - - def _is_active(self): - # Note: there is a possibility that this starts *before* the call - # property is set. So we have to check if self.call is set before - # seeing if it's active. - if self.call is not None and not self.call.is_active(): - return False - else: - return True - - def __iter__(self): - if self._initial_request is not None: - if callable(self._initial_request): - yield self._initial_request() - else: - yield self._initial_request - - while True: - try: - item = self._queue.get(timeout=self._period) - except queue.Empty: - if not self._is_active(): - _LOGGER.debug( - 'Empty queue and inactive call, exiting request ' - 'generator.') - return - else: - # call is still active, keep waiting for queue items. - continue - - # The consumer explicitly sent "None", indicating that the request - # should end. - if item is None: - _LOGGER.debug('Cleanly exiting request generator.') - return - - if not self._is_active(): - # We have an item, but the call is closed. We should put the - # item back on the queue so that the next call can consume it. - self._queue.put(item) - _LOGGER.debug( - 'Inactive call, replacing item on queue and exiting ' - 'request generator.') - return - - yield item - - -class BidiRpc(object): - """A helper for consuming a bi-directional streaming RPC. - - This maps gRPC's built-in interface which uses a request iterator and a - response iterator into a socket-like :func:`send` and :func:`recv`. This - is a more useful pattern for long-running or asymmetric streams (streams - where there is not a direct correlation between the requests and - responses). - - Example:: - - initial_request = example_pb2.StreamingRpcRequest( - setting='example') - rpc = BidiRpc(stub.StreamingRpc, initial_request=initial_request) - - rpc.open() - - while rpc.is_active(): - print(rpc.recv()) - rpc.send(example_pb2.StreamingRpcRequest( - data='example')) - - This does *not* retry the stream on errors. See :class:`ResumableBidiRpc`. - - Args: - start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to - start the RPC. - initial_request (Union[protobuf.Message, - Callable[None, protobuf.Message]]): The initial request to - yield. This is useful if an initial request is needed to start the - stream. - """ - def __init__(self, start_rpc, initial_request=None): - self._start_rpc = start_rpc - self._initial_request = initial_request - self._request_queue = queue.Queue() - self._request_generator = None - self._is_active = False - self._callbacks = [] - self.call = None - - def add_done_callback(self, callback): - """Adds a callback that will be called when the RPC terminates. - - This occurs when the RPC errors or is successfully terminated. - - Args: - callback (Callable[[grpc.Future], None]): The callback to execute. - It will be provided with the same gRPC future as the underlying - stream which will also be a :class:`grpc.Call`. - """ - self._callbacks.append(callback) - - def _on_call_done(self, future): - for callback in self._callbacks: - callback(future) - - def open(self): - """Opens the stream.""" - if self.is_active: - raise ValueError('Can not open an already open stream.') - - request_generator = _RequestQueueGenerator( - self._request_queue, initial_request=self._initial_request) - call = self._start_rpc(iter(request_generator)) - - request_generator.call = call - - # TODO: api_core should expose the future interface for wrapped - # callables as well. - if hasattr(call, '_wrapped'): # pragma: NO COVER - call._wrapped.add_done_callback(self._on_call_done) - else: - call.add_done_callback(self._on_call_done) - - self._request_generator = request_generator - self.call = call - - def close(self): - """Closes the stream.""" - if self.call is None: - return - - self._request_queue.put(None) - self.call.cancel() - self._request_generator = None - # Don't set self.call to None. Keep it around so that send/recv can - # raise the error. - - def send(self, request): - """Queue a message to be sent on the stream. - - Send is non-blocking. - - If the underlying RPC has been closed, this will raise. - - Args: - request (protobuf.Message): The request to send. - """ - if self.call is None: - raise ValueError( - 'Can not send() on an RPC that has never been open()ed.') - - # Don't use self.is_active(), as ResumableBidiRpc will overload it - # to mean something semantically different. - if self.call.is_active(): - self._request_queue.put(request) - else: - # calling next should cause the call to raise. - next(self.call) - - def recv(self): - """Wait for a message to be returned from the stream. - - Recv is blocking. - - If the underlying RPC has been closed, this will raise. - - Returns: - protobuf.Message: The received message. - """ - if self.call is None: - raise ValueError( - 'Can not recv() on an RPC that has never been open()ed.') - - return next(self.call) - - @property - def is_active(self): - """bool: True if this stream is currently open and active.""" - return self.call is not None and self.call.is_active() - - @property - def pending_requests(self): - """int: Returns an estimate of the number of queued requests.""" - return self._request_queue.qsize() - - -class ResumableBidiRpc(BidiRpc): - """A :class:`BidiRpc` that can automatically resume the stream on errors. - - It uses the ``should_recover`` arg to determine if it should re-establish - the stream on error. - - Example:: - - def should_recover(exc): - return ( - isinstance(exc, grpc.RpcError) and - exc.code() == grpc.StatusCode.UNVAILABLE) - - initial_request = example_pb2.StreamingRpcRequest( - setting='example') - - rpc = ResumeableBidiRpc( - stub.StreamingRpc, - initial_request=initial_request, - should_recover=should_recover) - - rpc.open() - - while rpc.is_active(): - print(rpc.recv()) - rpc.send(example_pb2.StreamingRpcRequest( - data='example')) - - Args: - start_rpc (grpc.StreamStreamMultiCallable): The gRPC method used to - start the RPC. - initial_request (Union[protobuf.Message, - Callable[None, protobuf.Message]]): The initial request to - yield. This is useful if an initial request is needed to start the - stream. - should_recover (Callable[[Exception], bool]): A function that returns - True if the stream should be recovered. This will be called - whenever an error is encountered on the stream. - """ - def __init__(self, start_rpc, should_recover, initial_request=None): - super(ResumableBidiRpc, self).__init__(start_rpc, initial_request) - self._should_recover = should_recover - self._operational_lock = threading.RLock() - self._finalized = False - self._finalize_lock = threading.Lock() - - def _finalize(self, result): - with self._finalize_lock: - if self._finalized: - return - - for callback in self._callbacks: - callback(result) - - self._finalized = True - - def _on_call_done(self, future): - # Unlike the base class, we only execute the callbacks on a terminal - # error, not for errors that we can recover from. Note that grpc's - # "future" here is also a grpc.RpcError. - with self._operational_lock: - if not self._should_recover(future): - self._finalize(future) - else: - _LOGGER.debug('Re-opening stream from gRPC callback.') - self._reopen() - - def _reopen(self): - with self._operational_lock: - # Another thread already managed to re-open this stream. - if self.call is not None and self.call.is_active(): - _LOGGER.debug('Stream was already re-established.') - return - - self.call = None - # Request generator should exit cleanly since the RPC its bound to - # has exited. - self.request_generator = None - - # Note: we do not currently do any sort of backoff here. The - # assumption is that re-establishing the stream under normal - # circumstances will happen in intervals greater than 60s. - # However, it is possible in a degenerative case that the server - # closes the stream rapidly which would lead to thrashing here, - # but hopefully in those cases the server would return a non- - # retryable error. - - try: - self.open() - # If re-opening or re-calling the method fails for any reason, - # consider it a terminal error and finalize the stream. - except Exception as exc: - _LOGGER.debug('Failed to re-open stream due to %s', exc) - self._finalize(exc) - raise - - _LOGGER.info('Re-established stream') - - def _recoverable(self, method, *args, **kwargs): - """Wraps a method to recover the stream and retry on error. - - If a retryable error occurs while making the call, then the stream will - be re-opened and the method will be retried. This happens indefinitely - so long as the error is a retryable one. If an error occurs while - re-opening the stream, then this method will raise immediately and - trigger finalization of this object. - - Args: - method (Callable[..., Any]): The method to call. - args: The args to pass to the method. - kwargs: The kwargs to pass to the method. - """ - while True: - try: - return method(*args, **kwargs) - - except Exception as exc: - with self._operational_lock: - _LOGGER.debug( - 'Call to retryable %r caused %s.', method, exc) - - if not self._should_recover(exc): - self.close() - _LOGGER.debug( - 'Not retrying %r due to %s.', method, exc) - self._finalize(exc) - raise exc - - _LOGGER.debug( - 'Re-opening stream from retryable %r.', method) - self._reopen() - - def _send(self, request): - # Grab a reference to the RPC call. Because another thread (notably - # the gRPC error thread) can modify self.call (by invoking reopen), - # we should ensure our reference can not change underneath us. - # If self.call is modified (such as replaced with a new RPC call) then - # this will use the "old" RPC, which should result in the same - # exception passed into gRPC's error handler being raised here, which - # will be handled by the usual error handling in retryable. - with self._operational_lock: - call = self.call - - if call is None: - raise ValueError( - 'Can not send() on an RPC that has never been open()ed.') - - # Don't use self.is_active(), as ResumableBidiRpc will overload it - # to mean something semantically different. - if call.is_active(): - self._request_queue.put(request) - pass - else: - # calling next should cause the call to raise. - next(call) - - def send(self, request): - return self._recoverable(self._send, request) - - def _recv(self): - with self._operational_lock: - call = self.call - - if call is None: - raise ValueError( - 'Can not recv() on an RPC that has never been open()ed.') - - return next(call) - - def recv(self): - return self._recoverable(self._recv) - - @property - def is_active(self): - """bool: True if this stream is currently open and active.""" - # Use the operational lock. It's entirely possible for something - # to check the active state *while* the RPC is being retried. - # Also, use finalized to track the actual terminal state here. - # This is because if the stream is re-established by the gRPC thread - # it's technically possible to check this between when gRPC marks the - # RPC as inactive and when gRPC executes our callback that re-opens - # the stream. - with self._operational_lock: - return self.call is not None and not self._finalized - - -class BackgroundConsumer(object): - """A bi-directional stream consumer that runs in a separate thread. - - This maps the consumption of a stream into a callback-based model. It also - provides :func:`pause` and :func:`resume` to allow for flow-control. - - Example:: - - def should_recover(exc): - return ( - isinstance(exc, grpc.RpcError) and - exc.code() == grpc.StatusCode.UNVAILABLE) - - initial_request = example_pb2.StreamingRpcRequest( - setting='example') - - rpc = ResumeableBidiRpc( - stub.StreamingRpc, - initial_request=initial_request, - should_recover=should_recover) - - def on_response(response): - print(response) - - consumer = BackgroundConsumer(rpc, on_response) - consume.start() - - Note that error handling *must* be done by using the provided - ``bidi_rpc``'s ``add_done_callback``. This helper will automatically exit - whenever the RPC itself exits and will not provide any error details. - - Args: - bidi_rpc (BidiRpc): The RPC to consume. Should not have been - ``open()``ed yet. - on_response (Callable[[protobuf.Message], None]): The callback to - be called for every response on the stream. - """ - def __init__(self, bidi_rpc, on_response): - self._bidi_rpc = bidi_rpc - self._on_response = on_response - self._paused = False - self._wake = threading.Condition() - self._thread = None - self._operational_lock = threading.Lock() - - def _on_call_done(self, future): - # Resume the thread if it's paused, this prevents blocking forever - # when the RPC has terminated. - self.resume() - - def _thread_main(self): - try: - self._bidi_rpc.add_done_callback(self._on_call_done) - self._bidi_rpc.open() - - while self._bidi_rpc.is_active: - # Do not allow the paused status to change at all during this - # section. There is a condition where we could be resumed - # between checking if we are paused and calling wake.wait(), - # which means that we will miss the notification to wake up - # (oops!) and wait for a notification that will never come. - # Keeping the lock throughout avoids that. - # In the future, we could use `Condition.wait_for` if we drop - # Python 2.7. - with self._wake: - if self._paused: - _LOGGER.debug('paused, waiting for waking.') - self._wake.wait() - _LOGGER.debug('woken.') - - _LOGGER.debug('waiting for recv.') - response = self._bidi_rpc.recv() - _LOGGER.debug('recved response.') - self._on_response(response) - - except exceptions.GoogleAPICallError as exc: - _LOGGER.debug( - '%s caught error %s and will exit. Generally this is due to ' - 'the RPC itself being cancelled and the error will be ' - 'surfaced to the calling code.', - _BIDIRECTIONAL_CONSUMER_NAME, exc, exc_info=True) - - except Exception as exc: - _LOGGER.exception( - '%s caught unexpected exception %s and will exit.', - _BIDIRECTIONAL_CONSUMER_NAME, exc) - - else: - _LOGGER.error( - 'The bidirectional RPC exited.') - - _LOGGER.info('%s exiting', _BIDIRECTIONAL_CONSUMER_NAME) - - def start(self): - """Start the background thread and begin consuming the thread.""" - with self._operational_lock: - thread = threading.Thread( - name=_BIDIRECTIONAL_CONSUMER_NAME, - target=self._thread_main) - thread.daemon = True - thread.start() - self._thread = thread - _LOGGER.debug('Started helper thread %s', thread.name) - - def stop(self): - """Stop consuming the stream and shutdown the background thread.""" - with self._operational_lock: - self._bidi_rpc.close() - - if self._thread is not None: - # Resume the thread to wake it up in case it is sleeping. - self.resume() - self._thread.join() - - self._thread = None - - @property - def is_active(self): - """bool: True if the background thread is active.""" - return self._thread is not None and self._thread.is_alive() - - def pause(self): - """Pauses the response stream. - - This does *not* pause the request stream. - """ - with self._wake: - self._paused = True - - def resume(self): - """Resumes the response stream.""" - with self._wake: - self._paused = False - self._wake.notifyAll() - - @property - def is_paused(self): - """bool: True if the response stream is paused.""" - return self._paused diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 17d1a2cad166..fdc868b4fa3a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -22,9 +22,9 @@ import grpc import six +from google.api_core import bidi from google.api_core import exceptions from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.subscriber._protocol import bidi from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import histogram diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py deleted file mode 100644 index 058cd53c29cf..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py +++ /dev/null @@ -1,650 +0,0 @@ -# Copyright 2018, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -import threading - -import grpc -import mock -import pytest -from six.moves import queue - -from google.api_core import exceptions -from google.cloud.pubsub_v1.subscriber._protocol import bidi - - -class Test_RequestQueueGenerator(object): - - def test_bounded_consume(self): - call = mock.create_autospec(grpc.Call, instance=True) - call.is_active.return_value = True - - def queue_generator(rpc): - yield mock.sentinel.A - yield queue.Empty() - yield mock.sentinel.B - rpc.is_active.return_value = False - yield mock.sentinel.C - - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = queue_generator(call) - - generator = bidi._RequestQueueGenerator(q) - generator.call = call - - items = list(generator) - - assert items == [mock.sentinel.A, mock.sentinel.B] - - def test_yield_initial_and_exit(self): - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = queue.Empty() - call = mock.create_autospec(grpc.Call, instance=True) - call.is_active.return_value = False - - generator = bidi._RequestQueueGenerator( - q, initial_request=mock.sentinel.A) - generator.call = call - - items = list(generator) - - assert items == [mock.sentinel.A] - - def test_yield_initial_callable_and_exit(self): - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = queue.Empty() - call = mock.create_autospec(grpc.Call, instance=True) - call.is_active.return_value = False - - generator = bidi._RequestQueueGenerator( - q, initial_request=lambda: mock.sentinel.A) - generator.call = call - - items = list(generator) - - assert items == [mock.sentinel.A] - - def test_exit_when_inactive_with_item(self): - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = [mock.sentinel.A, queue.Empty()] - call = mock.create_autospec(grpc.Call, instance=True) - call.is_active.return_value = False - - generator = bidi._RequestQueueGenerator(q) - generator.call = call - - items = list(generator) - - assert items == [] - # Make sure it put the item back. - q.put.assert_called_once_with(mock.sentinel.A) - - def test_exit_when_inactive_empty(self): - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = queue.Empty() - call = mock.create_autospec(grpc.Call, instance=True) - call.is_active.return_value = False - - generator = bidi._RequestQueueGenerator(q) - generator.call = call - - items = list(generator) - - assert items == [] - - def test_exit_with_stop(self): - q = mock.create_autospec(queue.Queue, instance=True) - q.get.side_effect = [None, queue.Empty()] - call = mock.create_autospec(grpc.Call, instance=True) - call.is_active.return_value = True - - generator = bidi._RequestQueueGenerator(q) - generator.call = call - - items = list(generator) - - assert items == [] - - -class _CallAndFuture(grpc.Call, grpc.Future): - pass - - -def make_rpc(): - """Makes a mock RPC used to test Bidi classes.""" - call = mock.create_autospec(_CallAndFuture, instance=True) - rpc = mock.create_autospec(grpc.StreamStreamMultiCallable, instance=True) - - def rpc_side_effect(request): - call.is_active.return_value = True - call.request = request - return call - - rpc.side_effect = rpc_side_effect - - def cancel_side_effect(): - call.is_active.return_value = False - - call.cancel.side_effect = cancel_side_effect - - return rpc, call - - -class ClosedCall(object): - # NOTE: This is needed because defining `.next` on an **instance** - # rather than the **class** will not be iterable in Python 2. - # This is problematic since a `Mock` just sets members. - - def __init__(self, exception): - self.exception = exception - - def __next__(self): - raise self.exception - - next = __next__ # Python 2 - - def is_active(self): - return False - - -class TestBidiRpc(object): - def test_initial_state(self): - bidi_rpc = bidi.BidiRpc(None) - - assert bidi_rpc.is_active is False - - def test_done_callbacks(self): - bidi_rpc = bidi.BidiRpc(None) - callback = mock.Mock(spec=['__call__']) - - bidi_rpc.add_done_callback(callback) - bidi_rpc._on_call_done(mock.sentinel.future) - - callback.assert_called_once_with(mock.sentinel.future) - - def test_open(self): - rpc, call = make_rpc() - bidi_rpc = bidi.BidiRpc(rpc) - - bidi_rpc.open() - - assert bidi_rpc.call == call - assert bidi_rpc.is_active - call.add_done_callback.assert_called_once_with(bidi_rpc._on_call_done) - - def test_open_error_already_open(self): - rpc, _ = make_rpc() - bidi_rpc = bidi.BidiRpc(rpc) - - bidi_rpc.open() - - with pytest.raises(ValueError): - bidi_rpc.open() - - def test_close(self): - rpc, call = make_rpc() - bidi_rpc = bidi.BidiRpc(rpc) - bidi_rpc.open() - - bidi_rpc.close() - - call.cancel.assert_called_once() - assert bidi_rpc.call == call - assert bidi_rpc.is_active is False - # ensure the request queue was signaled to stop. - assert bidi_rpc.pending_requests == 1 - assert bidi_rpc._request_queue.get() is None - - def test_close_no_rpc(self): - bidi_rpc = bidi.BidiRpc(None) - bidi_rpc.close() - - def test_send(self): - rpc, call = make_rpc() - bidi_rpc = bidi.BidiRpc(rpc) - bidi_rpc.open() - - bidi_rpc.send(mock.sentinel.request) - - assert bidi_rpc.pending_requests == 1 - assert bidi_rpc._request_queue.get() is mock.sentinel.request - - def test_send_not_open(self): - rpc, call = make_rpc() - bidi_rpc = bidi.BidiRpc(rpc) - - with pytest.raises(ValueError): - bidi_rpc.send(mock.sentinel.request) - - def test_send_dead_rpc(self): - error = ValueError() - bidi_rpc = bidi.BidiRpc(None) - bidi_rpc.call = ClosedCall(error) - - with pytest.raises(ValueError) as exc_info: - bidi_rpc.send(mock.sentinel.request) - - assert exc_info.value == error - - def test_recv(self): - bidi_rpc = bidi.BidiRpc(None) - bidi_rpc.call = iter([mock.sentinel.response]) - - response = bidi_rpc.recv() - - assert response == mock.sentinel.response - - def test_recv_not_open(self): - rpc, call = make_rpc() - bidi_rpc = bidi.BidiRpc(rpc) - - with pytest.raises(ValueError): - bidi_rpc.recv() - - -class CallStub(object): - def __init__(self, values, active=True): - self.values = iter(values) - self._is_active = active - self.cancelled = False - - def __next__(self): - item = next(self.values) - if isinstance(item, Exception): - self._is_active = False - raise item - return item - - next = __next__ # Python 2 - - def is_active(self): - return self._is_active - - def add_done_callback(self, callback): - pass - - def cancel(self): - self.cancelled = True - - -class TestResumableBidiRpc(object): - def test_initial_state(self): - bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: True) - - assert bidi_rpc.is_active is False - - def test_done_callbacks_recoverable(self): - start_rpc = mock.create_autospec( - grpc.StreamStreamMultiCallable, instance=True) - bidi_rpc = bidi.ResumableBidiRpc(start_rpc, lambda _: True) - callback = mock.Mock(spec=['__call__']) - - bidi_rpc.add_done_callback(callback) - bidi_rpc._on_call_done(mock.sentinel.future) - - callback.assert_not_called() - start_rpc.assert_called_once() - assert bidi_rpc.is_active - - def test_done_callbacks_non_recoverable(self): - bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False) - callback = mock.Mock(spec=['__call__']) - - bidi_rpc.add_done_callback(callback) - bidi_rpc._on_call_done(mock.sentinel.future) - - callback.assert_called_once_with(mock.sentinel.future) - - def test_send_recover(self): - error = ValueError() - call_1 = CallStub([error], active=False) - call_2 = CallStub([]) - start_rpc = mock.create_autospec( - grpc.StreamStreamMultiCallable, - instance=True, - side_effect=[call_1, call_2]) - should_recover = mock.Mock(spec=['__call__'], return_value=True) - bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) - - bidi_rpc.open() - - bidi_rpc.send(mock.sentinel.request) - - assert bidi_rpc.pending_requests == 1 - assert bidi_rpc._request_queue.get() is mock.sentinel.request - - should_recover.assert_called_once_with(error) - assert bidi_rpc.call == call_2 - assert bidi_rpc.is_active is True - - def test_send_failure(self): - error = ValueError() - call = CallStub([error], active=False) - start_rpc = mock.create_autospec( - grpc.StreamStreamMultiCallable, - instance=True, - return_value=call) - should_recover = mock.Mock(spec=['__call__'], return_value=False) - bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) - - bidi_rpc.open() - - with pytest.raises(ValueError) as exc_info: - bidi_rpc.send(mock.sentinel.request) - - assert exc_info.value == error - should_recover.assert_called_once_with(error) - assert bidi_rpc.call == call - assert bidi_rpc.is_active is False - assert call.cancelled is True - assert bidi_rpc.pending_requests == 1 - assert bidi_rpc._request_queue.get() is None - - def test_recv_recover(self): - error = ValueError() - call_1 = CallStub([1, error]) - call_2 = CallStub([2, 3]) - start_rpc = mock.create_autospec( - grpc.StreamStreamMultiCallable, - instance=True, - side_effect=[call_1, call_2]) - should_recover = mock.Mock(spec=['__call__'], return_value=True) - bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) - - bidi_rpc.open() - - values = [] - for n in range(3): - values.append(bidi_rpc.recv()) - - assert values == [1, 2, 3] - should_recover.assert_called_once_with(error) - assert bidi_rpc.call == call_2 - assert bidi_rpc.is_active is True - - def test_recv_recover_already_recovered(self): - call_1 = CallStub([]) - call_2 = CallStub([]) - start_rpc = mock.create_autospec( - grpc.StreamStreamMultiCallable, - instance=True, - side_effect=[call_1, call_2]) - bidi_rpc = bidi.ResumableBidiRpc(start_rpc, lambda _: True) - - bidi_rpc.open() - - bidi_rpc._reopen() - - assert bidi_rpc.call is call_1 - assert bidi_rpc.is_active is True - - def test_recv_failure(self): - error = ValueError() - call = CallStub([error]) - start_rpc = mock.create_autospec( - grpc.StreamStreamMultiCallable, - instance=True, - return_value=call) - should_recover = mock.Mock(spec=['__call__'], return_value=False) - bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) - - bidi_rpc.open() - - with pytest.raises(ValueError) as exc_info: - bidi_rpc.recv() - - assert exc_info.value == error - should_recover.assert_called_once_with(error) - assert bidi_rpc.call == call - assert bidi_rpc.is_active is False - assert call.cancelled is True - - def test_reopen_failure_on_rpc_restart(self): - error1 = ValueError('1') - error2 = ValueError('2') - call = CallStub([error1]) - # Invoking start RPC a second time will trigger an error. - start_rpc = mock.create_autospec( - grpc.StreamStreamMultiCallable, - instance=True, - side_effect=[call, error2]) - should_recover = mock.Mock(spec=['__call__'], return_value=True) - callback = mock.Mock(spec=['__call__']) - - bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) - bidi_rpc.add_done_callback(callback) - - bidi_rpc.open() - - with pytest.raises(ValueError) as exc_info: - bidi_rpc.recv() - - assert exc_info.value == error2 - should_recover.assert_called_once_with(error1) - assert bidi_rpc.call is None - assert bidi_rpc.is_active is False - callback.assert_called_once_with(error2) - - def test_send_not_open(self): - bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False) - - with pytest.raises(ValueError): - bidi_rpc.send(mock.sentinel.request) - - def test_recv_not_open(self): - bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False) - - with pytest.raises(ValueError): - bidi_rpc.recv() - - def test_finalize_idempotent(self): - error1 = ValueError('1') - error2 = ValueError('2') - callback = mock.Mock(spec=['__call__']) - should_recover = mock.Mock(spec=['__call__'], return_value=False) - - bidi_rpc = bidi.ResumableBidiRpc( - mock.sentinel.start_rpc, should_recover) - - bidi_rpc.add_done_callback(callback) - - bidi_rpc._on_call_done(error1) - bidi_rpc._on_call_done(error2) - - callback.assert_called_once_with(error1) - - -class TestBackgroundConsumer(object): - def test_consume_once_then_exit(self): - bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - bidi_rpc.is_active = True - bidi_rpc.recv.side_effect = [mock.sentinel.response_1] - recved = threading.Event() - - def on_response(response): - assert response == mock.sentinel.response_1 - bidi_rpc.is_active = False - recved.set() - - consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) - - consumer.start() - - recved.wait() - - bidi_rpc.recv.assert_called_once() - assert bidi_rpc.is_active is False - - consumer.stop() - - bidi_rpc.close.assert_called_once() - assert consumer.is_active is False - - def test_pause_resume_and_close(self): - # This test is relatively complex. It attempts to start the consumer, - # consume one item, pause the consumer, check the state of the world, - # then resume the consumer. Doing this in a deterministic fashion - # requires a bit more mocking and patching than usual. - - bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - bidi_rpc.is_active = True - - def close_side_effect(): - bidi_rpc.is_active = False - - bidi_rpc.close.side_effect = close_side_effect - - # These are used to coordinate the two threads to ensure deterministic - # execution. - should_continue = threading.Event() - responses_and_events = { - mock.sentinel.response_1: threading.Event(), - mock.sentinel.response_2: threading.Event() - } - bidi_rpc.recv.side_effect = [ - mock.sentinel.response_1, mock.sentinel.response_2] - - recved_responses = [] - consumer = None - - def on_response(response): - if response == mock.sentinel.response_1: - consumer.pause() - - recved_responses.append(response) - responses_and_events[response].set() - should_continue.wait() - - consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) - - consumer.start() - - # Wait for the first response to be recved. - responses_and_events[mock.sentinel.response_1].wait() - - # Ensure only one item has been recved and that the consumer is paused. - assert recved_responses == [mock.sentinel.response_1] - assert consumer.is_paused is True - assert consumer.is_active is True - - # Unpause the consumer, wait for the second item, then close the - # consumer. - should_continue.set() - consumer.resume() - - responses_and_events[mock.sentinel.response_2].wait() - - assert recved_responses == [ - mock.sentinel.response_1, mock.sentinel.response_2] - - consumer.stop() - - assert consumer.is_active is False - - def test_wake_on_error(self): - should_continue = threading.Event() - - bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - bidi_rpc.is_active = True - bidi_rpc.add_done_callback.side_effect = ( - lambda _: should_continue.set()) - - consumer = bidi.BackgroundConsumer(bidi_rpc, mock.sentinel.on_response) - - # Start the consumer paused, which should immediately put it into wait - # state. - consumer.pause() - consumer.start() - - # Wait for add_done_callback to be called - should_continue.wait() - bidi_rpc.add_done_callback.assert_called_once_with( - consumer._on_call_done) - - # The consumer should now be blocked on waiting to be unpaused. - assert consumer.is_active - assert consumer.is_paused - - # Trigger the done callback, it should unpause the consumer and cause - # it to exit. - bidi_rpc.is_active = False - consumer._on_call_done(bidi_rpc) - - # It may take a few cycles for the thread to exit. - while consumer.is_active: - pass - - def test_consumer_expected_error(self, caplog): - caplog.set_level(logging.DEBUG) - - bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - bidi_rpc.is_active = True - bidi_rpc.recv.side_effect = exceptions.ServiceUnavailable('Gone away') - - on_response = mock.Mock(spec=['__call__']) - - consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) - - consumer.start() - - # Wait for the consumer's thread to exit. - while consumer.is_active: - pass - - on_response.assert_not_called() - bidi_rpc.recv.assert_called_once() - assert 'caught error' in caplog.text - - def test_consumer_unexpected_error(self, caplog): - caplog.set_level(logging.DEBUG) - - bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - bidi_rpc.is_active = True - bidi_rpc.recv.side_effect = ValueError() - - on_response = mock.Mock(spec=['__call__']) - - consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) - - consumer.start() - - # Wait for the consumer's thread to exit. - while consumer.is_active: - pass - - on_response.assert_not_called() - bidi_rpc.recv.assert_called_once() - assert 'caught unexpected exception' in caplog.text - - def test_double_stop(self, caplog): - caplog.set_level(logging.DEBUG) - bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - bidi_rpc.is_active = True - on_response = mock.Mock(spec=['__call__']) - - def close_side_effect(): - bidi_rpc.is_active = False - - bidi_rpc.close.side_effect = close_side_effect - - consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) - - consumer.start() - assert consumer.is_active is True - - consumer.stop() - assert consumer.is_active is False - - # calling stop twice should not result in an error. - consumer.stop() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 15f3bc95db80..4acbf74ea8c0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -17,13 +17,13 @@ import mock import pytest +from google.api_core import bidi from google.api_core import exceptions from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.gapic import subscriber_client_config from google.cloud.pubsub_v1.subscriber import client from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber import scheduler -from google.cloud.pubsub_v1.subscriber._protocol import bidi from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import leaser From b38840ba6f9eeefc7ca5f74b9919a61b8bd43962 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 17 Oct 2018 12:51:09 -0400 Subject: [PATCH 0305/1197] Docs: normalize use of support level badges (#6159) * Remove badges for deprecated umbrella 'google-cloud' package. * Clarify support levels. - Add explicit section to support linking from sub-package README badges. - Move explanatory text for a support level above the list of packages at that level. * Normalize use of support-level badges in READMEs. - Note that 'error_reporting/README.rst' and 'monitoring/README.rst' are undergoing other edits; they are left out here to avoid conflicts. * Use 'General Avaialblity' for support level. Fix linkx in related API READMEs. * Fix links for alpha support in API READMEs. * Fix links for beta support in API READMEs. --- packages/google-cloud-pubsub/README.rst | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index c2d14aba6c5a..c0ef2a2f4034 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -1,7 +1,7 @@ Python Client for Google Cloud Pub / Sub ======================================== -|pypi| |versions| +|beta| |pypi| |versions| `Google Cloud Pub / Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. You @@ -19,6 +19,8 @@ independently written applications. - `Product Documentation`_ - `Client Library Documentation`_ +.. |beta| image:: https://img.shields.io/badge/support-beta-silver.svg + :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#beta-support .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg From de3a602de840299ddbf627b6c123e1eb020fef57 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 17 Oct 2018 16:02:30 -0400 Subject: [PATCH 0306/1197] Fix path for patch of 'bidi' elements. (#6243) Closes #6241. --- .../unit/pubsub_v1/subscriber/test_streaming_pull_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 4acbf74ea8c0..924fde56ea70 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -272,10 +272,10 @@ def test_heartbeat_inactive(): @mock.patch( - 'google.cloud.pubsub_v1.subscriber._protocol.bidi.ResumableBidiRpc', + 'google.api_core.bidi.ResumableBidiRpc', autospec=True) @mock.patch( - 'google.cloud.pubsub_v1.subscriber._protocol.bidi.BackgroundConsumer', + 'google.api_core.bidi.BackgroundConsumer', autospec=True) @mock.patch( 'google.cloud.pubsub_v1.subscriber._protocol.leaser.Leaser', From 8a231714e99fbc51e4ede6248fd682fb0e50eb68 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Wed, 17 Oct 2018 13:41:38 -0700 Subject: [PATCH 0307/1197] Add 'expiration_policy' to subscriber client (#6223) Re-generate library using 'pubsub/synth.py'. --- .../cloud/pubsub_v1/gapic/publisher_client.py | 9 +- .../pubsub_v1/gapic/subscriber_client.py | 37 ++- .../gapic/subscriber_client_config.py | 6 +- .../transports/publisher_grpc_transport.py | 3 +- .../transports/subscriber_grpc_transport.py | 5 +- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 227 ++++++++++++------ .../cloud/pubsub_v1/proto/pubsub_pb2_grpc.py | 8 +- 7 files changed, 189 insertions(+), 106 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index cb08721e1d05..b5cc5e5e83a0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -214,7 +214,7 @@ def create_topic(self, underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``\"goog\"``. - labels (dict[str -> str]): User labels. + labels (dict[str -> str]): See Creating and managing labels. message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining how messages published to the topic may be stored. It is determined when the topic is created based on the policy configured at the project level. It must not be set by the caller in the request to @@ -340,8 +340,7 @@ def publish(self, metadata=None): """ Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic - does not exist. The message payload must not be empty; it must contain - either a non-empty data field, or at least one attribute. + does not exist. Example: >>> from google.cloud import pubsub_v1 @@ -481,8 +480,8 @@ def list_topics(self, ... pass Args: - project (str): The name of the cloud project that topics belong to. - Format is ``projects/{project}``. + project (str): The name of the project in which to list topics. + Format is ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 48ce0f8e7540..95829e5923ca 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -216,6 +216,7 @@ def create_subscription(self, retain_acked_messages=None, message_retention_duration=None, labels=None, + expiration_policy=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): @@ -296,7 +297,18 @@ def create_subscription(self, use. It is not subject to any SLA or deprecation policy. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Duration` - labels (dict[str -> str]): User labels. + labels (dict[str -> str]): See Creating and managing labels. + expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's expiration. + A subscription is considered active as long as any connected subscriber is + successfully consuming messages from the subscription or is issuing + operations on the subscription. If ``expiration_policy`` is not set, a + *default policy* with ``ttl`` of 31 days will be used. The minimum allowed + value for ``expiration_policy.ttl`` is 1 day. + BETA: This feature is part of a beta release. This API might be + changed in backward-incompatible ways and is not recommended for production + use. It is not subject to any SLA or deprecation policy. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -336,6 +348,7 @@ def create_subscription(self, retain_acked_messages=retain_acked_messages, message_retention_duration=message_retention_duration, labels=labels, + expiration_policy=expiration_policy, ) return self._inner_api_calls['create_subscription']( request, retry=retry, timeout=timeout, metadata=metadata) @@ -496,8 +509,8 @@ def list_subscriptions(self, ... pass Args: - project (str): The name of the cloud project that subscriptions belong to. - Format is ``projects/{project}``. + project (str): The name of the project in which to list subscriptions. + Format is ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -762,8 +775,7 @@ def pull(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Pulls messages from the server. Returns an empty list if there are no - messages available in the backlog. The server may return ``UNAVAILABLE`` if + Pulls messages from the server. The server may return ``UNAVAILABLE`` if there are too many concurrent pull requests pending for the given subscription. @@ -787,9 +799,7 @@ def pull(self, return_immediately (bool): If this field set to true, the system will respond immediately even if it there are no messages available to return in the ``Pull`` response. Otherwise, the system may wait (for a bounded amount of time) until at - least one message is available, rather than returning no messages. The - client may cancel the request if it does not wish to wait any longer for - the response. + least one message is available, rather than returning no messages. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -1001,8 +1011,8 @@ def list_snapshots(self, ... pass Args: - project (str): The name of the cloud project that snapshots belong to. - Format is ``projects/{project}``. + project (str): The name of the project in which to list snapshots. + Format is ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -1070,7 +1080,7 @@ def create_snapshot(self, Creates a snapshot from the requested subscription.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + use. It is not subject to any SLA or deprecation policy.

If the snapshot already exists, returns ``ALREADY_EXISTS``. If the requested subscription doesn't exist, returns ``NOT_FOUND``. If the backlog in the subscription is too old -- and the resulting snapshot @@ -1097,7 +1107,8 @@ def create_snapshot(self, name (str): Optional user-provided name for this snapshot. If the name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription. - Note that for REST API requests, you must specify a name. + Note that for REST API requests, you must specify a name. See the + resource name rules. Format is ``projects/{project}/snapshots/{snap}``. subscription (str): The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to retain: \ @@ -1109,7 +1120,7 @@ def create_snapshot(self, successful completion of the CreateSnapshot request. \ Format is ``projects/{project}/subscriptions/{sub}``. - labels (dict[str -> str]): User labels. + labels (dict[str -> str]): See Creating and managing labels. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index faa26736e62e..ef113edaec15 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -4,8 +4,8 @@ "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "pull": [ - "CANCELLED", "DEADLINE_EXCEEDED", "INTERNAL", - "RESOURCE_EXHAUSTED", "UNAVAILABLE" + "DEADLINE_EXCEEDED", "INTERNAL", "RESOURCE_EXHAUSTED", + "UNAVAILABLE" ], "http_get": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "non_idempotent": [] @@ -72,7 +72,7 @@ }, "Acknowledge": { "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", + "retry_codes_name": "idempotent", "retry_params_name": "messaging" }, "Pull": { diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index c74dabd24891..9facdc16d9b0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -129,8 +129,7 @@ def publish(self): """Return the gRPC stub for {$apiMethod.name}. Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic - does not exist. The message payload must not be empty; it must contain - either a non-empty data field, or at least one attribute. + does not exist. Returns: Callable: A callable which accepts the appropriate diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 50f7aa3ee266..eeb05cee25ff 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -216,8 +216,7 @@ def acknowledge(self): def pull(self): """Return the gRPC stub for {$apiMethod.name}. - Pulls messages from the server. Returns an empty list if there are no - messages available in the backlog. The server may return ``UNAVAILABLE`` if + Pulls messages from the server. The server may return ``UNAVAILABLE`` if there are too many concurrent pull requests pending for the given subscription. @@ -288,7 +287,7 @@ def create_snapshot(self): Creates a snapshot from the requested subscription.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + use. It is not subject to any SLA or deprecation policy.

If the snapshot already exists, returns ``ALREADY_EXISTS``. If the requested subscription doesn't exist, returns ``NOT_FOUND``. If the backlog in the subscription is too old -- and the resulting snapshot diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 112f1d8a05f1..6f4a75859082 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -24,7 +24,7 @@ name='google/cloud/pubsub_v1/proto/pubsub.proto', package='google.pubsub.v1', syntax='proto3', - serialized_pb=_b('\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t\"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\xc5\x02\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse\"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf7\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x64\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\x92\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1b\x06proto3') + serialized_pb=_b('\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t\"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\x84\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32\".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse\"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse\"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\x92\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1b\x06proto3') , dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) @@ -744,6 +744,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='expiration_policy', full_name='google.pubsub.v1.Subscription.expiration_policy', index=7, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -757,7 +764,38 @@ oneofs=[ ], serialized_start=1498, - serialized_end=1823, + serialized_end=1886, +) + + +_EXPIRATIONPOLICY = _descriptor.Descriptor( + name='ExpirationPolicy', + full_name='google.pubsub.v1.ExpirationPolicy', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ttl', full_name='google.pubsub.v1.ExpirationPolicy.ttl', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1888, + serialized_end=1946, ) @@ -831,8 +869,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1826, - serialized_end=1978, + serialized_start=1949, + serialized_end=2101, ) @@ -869,8 +907,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1980, - serialized_end=2063, + serialized_start=2103, + serialized_end=2186, ) @@ -900,8 +938,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2065, - serialized_end=2111, + serialized_start=2188, + serialized_end=2234, ) @@ -938,8 +976,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2114, - serialized_end=2244, + serialized_start=2237, + serialized_end=2367, ) @@ -983,8 +1021,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2246, - serialized_end=2328, + serialized_start=2369, + serialized_end=2451, ) @@ -1021,8 +1059,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2330, - serialized_end=2437, + serialized_start=2453, + serialized_end=2560, ) @@ -1052,8 +1090,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2439, - serialized_end=2488, + serialized_start=2562, + serialized_end=2611, ) @@ -1090,8 +1128,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2490, - serialized_end=2588, + serialized_start=2613, + serialized_end=2711, ) @@ -1135,8 +1173,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2590, - serialized_end=2675, + serialized_start=2713, + serialized_end=2798, ) @@ -1166,8 +1204,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2677, - serialized_end=2753, + serialized_start=2800, + serialized_end=2876, ) @@ -1211,8 +1249,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2755, - serialized_end=2850, + serialized_start=2878, + serialized_end=2973, ) @@ -1249,8 +1287,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2852, - serialized_end=2911, + serialized_start=2975, + serialized_end=3034, ) @@ -1308,8 +1346,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=2914, - serialized_end=3078, + serialized_start=3037, + serialized_end=3201, ) @@ -1339,8 +1377,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3080, - serialized_end=3165, + serialized_start=3203, + serialized_end=3288, ) @@ -1421,8 +1459,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3168, - serialized_end=3343, + serialized_start=3291, + serialized_end=3466, ) @@ -1459,8 +1497,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3345, - serialized_end=3463, + serialized_start=3468, + serialized_end=3586, ) @@ -1548,8 +1586,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3466, - serialized_end=3657, + serialized_start=3589, + serialized_end=3780, ) @@ -1579,8 +1617,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3659, - serialized_end=3697, + serialized_start=3782, + serialized_end=3820, ) @@ -1624,8 +1662,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3699, - serialized_end=3777, + serialized_start=3822, + serialized_end=3900, ) @@ -1662,8 +1700,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3779, - serialized_end=3874, + serialized_start=3902, + serialized_end=3997, ) @@ -1693,8 +1731,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=3876, - serialized_end=3917, + serialized_start=3999, + serialized_end=4040, ) @@ -1741,8 +1779,8 @@ name='target', full_name='google.pubsub.v1.SeekRequest.target', index=0, containing_type=None, fields=[]), ], - serialized_start=3919, - serialized_end=4028, + serialized_start=4042, + serialized_end=4151, ) @@ -1765,8 +1803,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=4030, - serialized_end=4044, + serialized_start=4153, + serialized_end=4167, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -1783,6 +1821,8 @@ _SUBSCRIPTION.fields_by_name['push_config'].message_type = _PUSHCONFIG _SUBSCRIPTION.fields_by_name['message_retention_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION _SUBSCRIPTION.fields_by_name['labels'].message_type = _SUBSCRIPTION_LABELSENTRY +_SUBSCRIPTION.fields_by_name['expiration_policy'].message_type = _EXPIRATIONPOLICY +_EXPIRATIONPOLICY.fields_by_name['ttl'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION _PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG _PUSHCONFIG.fields_by_name['attributes'].message_type = _PUSHCONFIG_ATTRIBUTESENTRY _RECEIVEDMESSAGE.fields_by_name['message'].message_type = _PUBSUBMESSAGE @@ -1822,6 +1862,7 @@ DESCRIPTOR.message_types_by_name['ListTopicSnapshotsResponse'] = _LISTTOPICSNAPSHOTSRESPONSE DESCRIPTOR.message_types_by_name['DeleteTopicRequest'] = _DELETETOPICREQUEST DESCRIPTOR.message_types_by_name['Subscription'] = _SUBSCRIPTION +DESCRIPTOR.message_types_by_name['ExpirationPolicy'] = _EXPIRATIONPOLICY DESCRIPTOR.message_types_by_name['PushConfig'] = _PUSHCONFIG DESCRIPTOR.message_types_by_name['ReceivedMessage'] = _RECEIVEDMESSAGE DESCRIPTOR.message_types_by_name['GetSubscriptionRequest'] = _GETSUBSCRIPTIONREQUEST @@ -1892,7 +1933,7 @@ (``%``). It must be between 3 and 255 characters in length, and it must not start with ``"goog"``. labels: - User labels. + See Creating and managing labels. message_storage_policy: Policy constraining how messages published to the topic may be stored. It is determined when the topic is created based on @@ -1918,14 +1959,16 @@ DESCRIPTOR = _PUBSUBMESSAGE, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' , - __doc__ = """A message data and its attributes. The message payload must not be - empty; it must contain either a non-empty data field, or at least one - attribute. + __doc__ = """A message that is published by publishers and consumed by subscribers. + The message must contain either a non-empty data field or at least one + attribute. See Quotas and limits for more information about message + limits. Attributes: data: - The message payload. + The message data field. If this field is empty, the message + must contain at least one attribute. attributes: Optional attributes for this message. message_id: @@ -2026,8 +2069,8 @@ Attributes: project: - The name of the cloud project that topics belong to. Format is - ``projects/{project}``. + The name of the project in which to list topics. Format is + ``projects/{project-id}``. page_size: Maximum number of topics to return. page_token: @@ -2237,13 +2280,46 @@ recommended for production use. It is not subject to any SLA or deprecation policy. labels: - User labels. + See Creating and managing labels. + expiration_policy: + A policy that specifies the conditions for this subscription's + expiration. A subscription is considered active as long as any + connected subscriber is successfully consuming messages from + the subscription or is issuing operations on the subscription. + If ``expiration_policy`` is not set, a *default policy* with + ``ttl`` of 31 days will be used. The minimum allowed value for + ``expiration_policy.ttl`` is 1 day. BETA: This feature is part + of a beta release. This API might be changed in backward- + incompatible ways and is not recommended for production use. + It is not subject to any SLA or deprecation policy. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) )) _sym_db.RegisterMessage(Subscription) _sym_db.RegisterMessage(Subscription.LabelsEntry) +ExpirationPolicy = _reflection.GeneratedProtocolMessageType('ExpirationPolicy', (_message.Message,), dict( + DESCRIPTOR = _EXPIRATIONPOLICY, + __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' + , + __doc__ = """A policy that specifies the conditions for resource expiration (i.e., + automatic resource deletion). + + + Attributes: + ttl: + Specifies the "time-to-live" duration for an associated + resource. The resource expires if it is not active for a + period of ``ttl``. The definition of "activity" depends on the + type of the associated resource. The minimum and maximum + allowed values for ``ttl`` depend on the type of the + associated resource, as well. If ``ttl`` is not set, the + associated resource never expires. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ExpirationPolicy) + )) +_sym_db.RegisterMessage(ExpirationPolicy) + PushConfig = _reflection.GeneratedProtocolMessageType('PushConfig', (_message.Message,), dict( AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( @@ -2348,8 +2424,8 @@ Attributes: project: - The name of the cloud project that subscriptions belong to. - Format is ``projects/{project}``. + The name of the project in which to list subscriptions. Format + is ``projects/{project-id}``. page_size: Maximum number of subscriptions to return. page_token: @@ -2436,9 +2512,7 @@ even if it there are no messages available to return in the ``Pull`` response. Otherwise, the system may wait (for a bounded amount of time) until at least one message is - available, rather than returning no messages. The client may - cancel the request if it does not wish to wait any longer for - the response. + available, rather than returning no messages. max_messages: The maximum number of messages returned for this request. The Pub/Sub system may return fewer than the number specified. @@ -2456,11 +2530,11 @@ Attributes: received_messages: - Received Pub/Sub messages. The Pub/Sub system will return zero - messages if there are no more available in the backlog. The - Pub/Sub system may return fewer than the ``maxMessages`` - requested even if there are more messages available in the - backlog. + Received Pub/Sub messages. The list will be empty if there are + no more messages available in the backlog. For JSON, the + response can be entirely empty. The Pub/Sub system may return + fewer than the ``maxMessages`` requested even if there are + more messages available in the backlog. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) )) @@ -2608,7 +2682,7 @@ not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription. Note that for REST API requests, you must - specify a name. Format is + specify a name. See the resource name rules. Format is ``projects/{project}/snapshots/{snap}``. subscription: The subscription whose backlog the snapshot retains. @@ -2621,7 +2695,7 @@ completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. labels: - User labels. + See Creating and managing labels. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) )) @@ -2686,7 +2760,7 @@ service will refuse to create a snapshot that would expire in less than 1 hour after creation. labels: - User labels. + See Creating and managing labels. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) )) @@ -2724,8 +2798,8 @@ Attributes: project: - The name of the cloud project that snapshots belong to. Format - is ``projects/{project}``. + The name of the project in which to list snapshots. Format is + ``projects/{project-id}``. page_size: Maximum number of snapshots to return. page_token: @@ -2817,6 +2891,9 @@ SeekResponse = _reflection.GeneratedProtocolMessageType('SeekResponse', (_message.Message,), dict( DESCRIPTOR = _SEEKRESPONSE, __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' + , + __doc__ = """Response for the ``Seek`` method (this response is empty). + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) )) _sym_db.RegisterMessage(SeekResponse) @@ -2843,8 +2920,8 @@ file=DESCRIPTOR, index=0, options=None, - serialized_start=4047, - serialized_end=5134, + serialized_start=4170, + serialized_end=5257, methods=[ _descriptor.MethodDescriptor( name='CreateTopic', @@ -2930,8 +3007,8 @@ file=DESCRIPTOR, index=1, options=None, - serialized_start=5137, - serialized_end=7432, + serialized_start=5260, + serialized_end=7557, methods=[ _descriptor.MethodDescriptor( name='CreateSubscription', diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py index cbc898ec32db..98f878c83b0a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py @@ -81,8 +81,7 @@ def UpdateTopic(self, request, context): def Publish(self, request, context): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic - does not exist. The message payload must not be empty; it must contain - either a non-empty data field, or at least one attribute. + does not exist. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details('Method not implemented!') @@ -354,8 +353,7 @@ def Acknowledge(self, request, context): raise NotImplementedError('Method not implemented!') def Pull(self, request, context): - """Pulls messages from the server. Returns an empty list if there are no - messages available in the backlog. The server may return `UNAVAILABLE` if + """Pulls messages from the server. The server may return `UNAVAILABLE` if there are too many concurrent pull requests pending for the given subscription. """ @@ -412,7 +410,7 @@ def CreateSnapshot(self, request, context): """Creates a snapshot from the requested subscription.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + use. It is not subject to any SLA or deprecation policy.

If the snapshot already exists, returns `ALREADY_EXISTS`. If the requested subscription doesn't exist, returns `NOT_FOUND`. If the backlog in the subscription is too old -- and the resulting snapshot From 669f40923043ef65a03cae0565948cffc173607b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 30 Oct 2018 16:49:24 -0400 Subject: [PATCH 0308/1197] Fix error from new flake8 version. (#6346) --- .../pubsub_v1/subscriber/_protocol/streaming_pull_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index fdc868b4fa3a..5a7c7d754262 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -264,7 +264,7 @@ def send(self, request): if self._UNARY_REQUESTS: try: self._send_unary_request(request) - except exceptions.GoogleAPICallError as exc: + except exceptions.GoogleAPICallError: _LOGGER.debug( 'Exception while sending unary RPC. This is typically ' 'non-fatal as stream requests are best-effort.', From b06b8934dd387cbea4ceb3d5d779f9723ab8bcfe Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 31 Oct 2018 09:59:23 -0400 Subject: [PATCH 0309/1197] Pubsub: fix imports for hand-written client docstring examples. (#6345) Closes #6334. --- .../google/cloud/pubsub_v1/publisher/client.py | 4 ++-- .../google/cloud/pubsub_v1/subscriber/client.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index c37b1cb80a64..910ebc5fbada 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -153,8 +153,8 @@ def publish(self, topic, data, **attrs): period of time has elapsed. Example: - >>> from google.cloud.pubsub_v1 import publisher_client - >>> client = publisher_client.PublisherClient() + >>> from google.cloud import pubsub_v1 + >>> client = pubsub_v1.PublisherClient() >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') >>> data = b'The rain in Wales falls mainly on the snails.' >>> response = client.publish(topic, data, username='guido') diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 4a9cf0d3a32b..bda24ce96718 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -129,9 +129,9 @@ def subscribe( .. code-block:: python - from google.cloud.pubsub_v1 import subscriber + from google.cloud import pubsub_v1 - subscriber_client = pubsub.SubscriberClient() + subscriber_client = pubsub_v1.SubscriberClient() # existing subscription subscription = subscriber_client.subscription_path( From 099a8c35ac58df56538ba0d0dde9341a07b16ed4 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 1 Nov 2018 14:54:55 -0700 Subject: [PATCH 0310/1197] Update IAM version in dependencies (#6362) --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index f34758421dab..aaac99a6bcc0 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 4 - Beta' dependencies = [ 'google-api-core[grpc] >= 1.4.1, < 2.0.0dev', - 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', + 'grpc-google-iam-v1<0.12dev,>=0.11.4', 'enum34; python_version < "3.4"', ] extras = { From 61a60d71d2bbebb443d522f5a50ce2fd62ac512b Mon Sep 17 00:00:00 2001 From: David Mandelberg Date: Mon, 5 Nov 2018 12:16:15 -0500 Subject: [PATCH 0311/1197] Fix docstring reference to wrong future class. (#6382) --- .../google/cloud/pubsub_v1/subscriber/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index bda24ce96718..817ef80a97eb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -165,8 +165,8 @@ def callback(message): how callbacks are executed concurrently. Returns: - google.cloud.pubsub_v1.futures.StreamingPullFuture: A Future object - that can be used to manage the background stream. + google.cloud.pubsub_v1.subscriber.futures.StreamingPullFuture: A + Future object that can be used to manage the background stream. """ flow_control = types.FlowControl(*flow_control) From 5fd48bf0bd8f01529fb55a1e1071dd5740607822 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 6 Nov 2018 08:54:54 -0800 Subject: [PATCH 0312/1197] Fix client_info bug, update docstrings. (#6418) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 100 ++--- .../gapic/publisher_client_config.py | 1 - .../pubsub_v1/gapic/subscriber_client.py | 344 +++++++++--------- .../gapic/subscriber_client_config.py | 1 - .../transports/publisher_grpc_transport.py | 25 +- .../transports/subscriber_grpc_transport.py | 96 ++--- 6 files changed, 290 insertions(+), 277 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index b5cc5e5e83a0..71e093f22042 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -168,9 +168,10 @@ def __init__(self, ) if client_info is None: - client_info = ( - google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) - client_info.gapic_version = _GAPIC_LIBRARY_VERSION + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION, ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC @@ -195,8 +196,8 @@ def create_topic(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Creates the given topic with the given name. See the - resource name rules. + Creates the given topic with the given name. See the resource name + rules. Example: >>> from google.cloud import pubsub_v1 @@ -209,18 +210,19 @@ def create_topic(self, Args: name (str): The name of the topic. It must have the format - ``\"projects/{project}/topics/{topic}\"``. ``{topic}`` must start with a letter, - and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent - signs (``%``). It must be between 3 and 255 characters in length, and it - must not start with ``\"goog\"``. - labels (dict[str -> str]): See Creating and managing labels. + `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter, + and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), + underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent + signs (`%`). It must be between 3 and 255 characters in length, and it + must not start with `"goog"`. + labels (dict[str -> str]): See Creating and managing labels. message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining how messages published to the topic may be stored. It is determined when the topic is created based on the policy configured at the project level. It must not be set by the caller in the request to CreateTopic or to UpdateTopic. This field will be populated in the responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the response, then no constraints are in effect. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.MessageStoragePolicy` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -276,23 +278,26 @@ def update_topic(self, >>> >>> client = pubsub_v1.PublisherClient() >>> - >>> # TODO: Initialize ``topic``: + >>> # TODO: Initialize `topic`: >>> topic = {} >>> - >>> # TODO: Initialize ``update_mask``: + >>> # TODO: Initialize `update_mask`: >>> update_mask = {} >>> >>> response = client.update_topic(topic, update_mask) Args: topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): The updated topic object. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Topic` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided topic to update. Must be specified - and non-empty. Note that if ``update_mask`` contains - \"message_storage_policy\" then the new value will be determined based on the - policy configured at the project or organization level. The - ``message_storage_policy`` must not be set in the ``topic`` provided above. + update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided topic to update. Must be + specified and non-empty. Note that if ``update_mask`` contains + "message\_storage\_policy" then the new value will be determined based + on the policy configured at the project or organization level. The + ``message_storage_policy`` must not be set in the ``topic`` provided + above. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -339,8 +344,8 @@ def publish(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic - does not exist. + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the + topic does not exist. Example: >>> from google.cloud import pubsub_v1 @@ -355,9 +360,10 @@ def publish(self, >>> response = client.publish(topic, messages) Args: - topic (str): The messages in the request will be published on this topic. - Format is ``projects/{project}/topics/{topic}``. + topic (str): The messages in the request will be published on this topic. Format is + ``projects/{project}/topics/{topic}``. messages (list[Union[dict, ~google.cloud.pubsub_v1.types.PubsubMessage]]): The messages to publish. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PubsubMessage` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -414,8 +420,8 @@ def get_topic(self, >>> response = client.get_topic(topic) Args: - topic (str): The name of the topic to get. - Format is ``projects/{project}/topics/{topic}``. + topic (str): The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -474,14 +480,14 @@ def list_topics(self, >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_topics(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_topics(project).pages: ... for element in page: ... # process element ... pass Args: - project (str): The name of the project in which to list topics. - Format is ``projects/{project-id}``. + project (str): The name of the project in which to list topics. Format is + ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -563,14 +569,14 @@ def list_topic_subscriptions( >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_topic_subscriptions(topic, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_topic_subscriptions(topic).pages: ... for element in page: ... # process element ... pass Args: - topic (str): The name of the topic that subscriptions are attached to. - Format is ``projects/{project}/topics/{topic}``. + topic (str): The name of the topic that subscriptions are attached to. Format is + ``projects/{project}/topics/{topic}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -634,11 +640,12 @@ def delete_topic(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their ``topic`` field is set to ``_deleted-topic_``. + Deletes the topic with the given name. Returns ``NOT_FOUND`` if the + topic does not exist. After a topic is deleted, a new topic may be + created with the same name; this is an entirely new topic with none of + the old configuration or subscriptions. Existing subscriptions to this + topic are not deleted, but their ``topic`` field is set to + ``_deleted-topic_``. Example: >>> from google.cloud import pubsub_v1 @@ -650,8 +657,8 @@ def delete_topic(self, >>> client.delete_topic(topic) Args: - topic (str): Name of the topic to delete. - Format is ``projects/{project}/topics/{topic}``. + topic (str): Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -700,7 +707,7 @@ def set_iam_policy(self, >>> >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') >>> - >>> # TODO: Initialize ``policy``: + >>> # TODO: Initialize `policy`: >>> policy = {} >>> >>> response = client.set_iam_policy(resource, policy) @@ -709,10 +716,11 @@ def set_iam_policy(self, resource (str): REQUIRED: The resource for which the policy is being specified. ``resource`` is usually specified as a path. For example, a Project resource is specified as ``projects/{project}``. - policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The size of - the policy is limited to a few 10s of KB. An empty policy is a + policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The + size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform services (such as Projects) might reject them. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -816,9 +824,9 @@ def test_iam_permissions(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. + Returns permissions that a caller has on the specified resource. If the + resource does not exist, this will return an empty set of permissions, + not a NOT\_FOUND error. Example: >>> from google.cloud import pubsub_v1 @@ -827,7 +835,7 @@ def test_iam_permissions(self, >>> >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') >>> - >>> # TODO: Initialize ``permissions``: + >>> # TODO: Initialize `permissions`: >>> permissions = [] >>> >>> response = client.test_iam_permissions(resource, permissions) @@ -838,8 +846,8 @@ def test_iam_permissions(self, resource is specified as ``projects/{project}``. permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see - `IAM Overview `_. + information see `IAM + Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index 129cbeabf23c..e5db20ce1d9f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -3,7 +3,6 @@ "google.pubsub.v1.Publisher": { "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "http_get": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "non_idempotent": [], "one_plus_delivery": [ "ABORTED", "CANCELLED", "DEADLINE_EXCEEDED", "INTERNAL", diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 95829e5923ca..0afc9299dd16 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -189,9 +189,10 @@ def __init__(self, ) if client_info is None: - client_info = ( - google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) - client_info.gapic_version = _GAPIC_LIBRARY_VERSION + client_info = google.api_core.gapic_v1.client_info.ClientInfo( + gapic_version=_GAPIC_LIBRARY_VERSION, ) + else: + client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info # Parse out the default settings for retry and timeout for each RPC @@ -221,17 +222,16 @@ def create_subscription(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Creates a subscription to a given topic. See the - resource name rules. - If the subscription already exists, returns ``ALREADY_EXISTS``. - If the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - `resource name format `_. - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. + Creates a subscription to a given topic. See the resource name rules. If + the subscription already exists, returns ``ALREADY_EXISTS``. If the + corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a + random name for this subscription on the same project as the topic, + conforming to the `resource name + format `__. The + generated name is populated in the returned Subscription object. Note + that for REST API requests, you must specify a name in the request. Example: >>> from google.cloud import pubsub_v1 @@ -245,18 +245,18 @@ def create_subscription(self, Args: name (str): The name of the subscription. It must have the format - ``\"projects/{project}/subscriptions/{subscription}\"``. ``{subscription}`` must - start with a letter, and contain only letters (``[A-Za-z]``), numbers - (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), - plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters - in length, and it must not start with ``\"goog\"`` - topic (str): The name of the topic from which this subscription is receiving messages. - Format is ``projects/{project}/topics/{topic}``. - The value of this field will be ``_deleted-topic_`` if the topic has been - deleted. - push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is - used to configure it. An empty ``pushConfig`` signifies that the subscriber - will pull and ack messages using API methods. + `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must + start with a letter, and contain only letters (`[A-Za-z]`), numbers + (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), + plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters + in length, and it must not start with `"goog"` + topic (str): The name of the topic from which this subscription is receiving + messages. Format is ``projects/{project}/topics/{topic}``. The value of + this field will be ``_deleted-topic_`` if the topic has been deleted. + push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used to + configure it. An empty ``pushConfig`` signifies that the subscriber will + pull and ack messages using API methods. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PushConfig` ack_deadline_seconds (int): This value is the maximum time after a subscriber receives a message @@ -265,48 +265,50 @@ def create_subscription(self, acknowledged, it is an outstanding message and will not be delivered again during that time (on a best-effort basis). - For pull subscriptions, this value is used as the initial value for the ack - deadline. To override this value for a given message, call + For pull subscriptions, this value is used as the initial value for the + ack deadline. To override this value for a given message, call ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using non-streaming pull or send the ``ack_id`` in a - ``StreamingModifyAckDeadlineRequest`` if using streaming pull. - The minimum custom deadline you can specify is 10 seconds. - The maximum custom deadline you can specify is 600 seconds (10 minutes). - If this parameter is 0, a default value of 10 seconds is used. + ``StreamingModifyAckDeadlineRequest`` if using streaming pull. The + minimum custom deadline you can specify is 10 seconds. The maximum + custom deadline you can specify is 600 seconds (10 minutes). If this + parameter is 0, a default value of 10 seconds is used. - For push delivery, this value is also used to set the request timeout for - the call to the push endpoint. + For push delivery, this value is also used to set the request timeout + for the call to the push endpoint. - If the subscriber never acknowledges the message, the Pub/Sub - system will eventually redeliver the message. + If the subscriber never acknowledges the message, the Pub/Sub system + will eventually redeliver the message. retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then - messages are not expunged from the subscription's backlog, even if they are - acknowledged, until they fall out of the ``message_retention_duration`` - window.

- ALPHA: This feature is part of an alpha release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. - message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's backlog, - from the moment a message is published. - If ``retain_acked_messages`` is true, then this also configures the retention - of acknowledged messages, and thus configures how far back in time a ``Seek`` - can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 - minutes.

- ALPHA: This feature is part of an alpha release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + messages are not expunged from the subscription's backlog, even if they + are acknowledged, until they fall out of the + ``message_retention_duration`` window. ALPHA: This feature is part of an + alpha release. This API might be changed in backward-incompatible ways + and is not recommended for production use. It is not subject to any SLA + or deprecation policy. + message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's + backlog, from the moment a message is published. If + ``retain_acked_messages`` is true, then this also configures the + retention of acknowledged messages, and thus configures how far back in + time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 + days or less than 10 minutes. ALPHA: This feature is part of an alpha + release. This API might be changed in backward-incompatible ways and is + not recommended for production use. It is not subject to any SLA or + deprecation policy. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Duration` - labels (dict[str -> str]): See Creating and managing labels. - expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's expiration. - A subscription is considered active as long as any connected subscriber is - successfully consuming messages from the subscription or is issuing - operations on the subscription. If ``expiration_policy`` is not set, a - *default policy* with ``ttl`` of 31 days will be used. The minimum allowed - value for ``expiration_policy.ttl`` is 1 day. - BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + labels (dict[str -> str]): See Creating and managing labels. + expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's + expiration. A subscription is considered active as long as any connected + subscriber is successfully consuming messages from the subscription or + is issuing operations on the subscription. If ``expiration_policy`` is + not set, a *default policy* with ``ttl`` of 31 days will be used. The + minimum allowed value for ``expiration_policy.ttl`` is 1 day. BETA: This + feature is part of a beta release. This API might be changed in + backward-incompatible ways and is not recommended for production use. It + is not subject to any SLA or deprecation policy. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -371,8 +373,8 @@ def get_subscription(self, >>> response = client.get_subscription(subscription) Args: - subscription (str): The name of the subscription to get. - Format is ``projects/{project}/subscriptions/{sub}``. + subscription (str): The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -434,10 +436,12 @@ def update_subscription(self, Args: subscription (Union[dict, ~google.cloud.pubsub_v1.types.Subscription]): The updated subscription object. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Subscription` update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided subscription to update. Must be specified and non-empty. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -503,14 +507,14 @@ def list_subscriptions(self, >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_subscriptions(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_subscriptions(project).pages: ... for element in page: ... # process element ... pass Args: - project (str): The name of the project in which to list subscriptions. - Format is ``projects/{project-id}``. + project (str): The name of the project in which to list subscriptions. Format is + ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -574,11 +578,12 @@ def delete_subscription(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to ``Pull`` after deletion will return - ``NOT_FOUND``. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after deletion + will return ``NOT_FOUND``. After a subscription is deleted, a new one + may be created with the same name, but the new one has no association + with the old subscription or its topic unless the same topic is + specified. Example: >>> from google.cloud import pubsub_v1 @@ -590,8 +595,8 @@ def delete_subscription(self, >>> client.delete_subscription(subscription) Args: - subscription (str): The subscription to delete. - Format is ``projects/{project}/subscriptions/{sub}``. + subscription (str): The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -646,24 +651,23 @@ def modify_ack_deadline(self, >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') >>> - >>> # TODO: Initialize ``ack_ids``: + >>> # TODO: Initialize `ack_ids`: >>> ack_ids = [] >>> - >>> # TODO: Initialize ``ack_deadline_seconds``: + >>> # TODO: Initialize `ack_deadline_seconds`: >>> ack_deadline_seconds = 0 >>> >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) Args: - subscription (str): The name of the subscription. - Format is ``projects/{project}/subscriptions/{sub}``. + subscription (str): The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. ack_ids (list[str]): List of acknowledgment IDs. ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to - the Pub/Sub system. For example, if the value is 10, the new - ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call - was made. Specifying zero may immediately make the message available for - another pull request. - The minimum deadline you can specify is 0 seconds. + the Pub/Sub system. For example, if the value is 10, the new ack + deadline will expire 10 seconds after the ``ModifyAckDeadline`` call was + made. Specifying zero may immediately make the message available for + another pull request. The minimum deadline you can specify is 0 seconds. The maximum deadline you can specify is 600 seconds (10 minutes). retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not @@ -709,11 +713,11 @@ def acknowledge(self, metadata=None): """ Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages - from the subscription. + ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant + messages from the subscription. - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more + Acknowledging a message whose ack deadline has expired may succeed, but + such a message may be redelivered later. Acknowledging a message more than once will not result in an error. Example: @@ -723,16 +727,17 @@ def acknowledge(self, >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') >>> - >>> # TODO: Initialize ``ack_ids``: + >>> # TODO: Initialize `ack_ids`: >>> ack_ids = [] >>> >>> client.acknowledge(subscription, ack_ids) Args: - subscription (str): The subscription whose message is being acknowledged. - Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): The acknowledgment ID for the messages being acknowledged that was returned - by the Pub/Sub system in the ``Pull`` response. Must not be empty. + subscription (str): The subscription whose message is being acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[str]): The acknowledgment ID for the messages being acknowledged that was + returned by the Pub/Sub system in the ``Pull`` response. Must not be + empty. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -786,14 +791,14 @@ def pull(self, >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') >>> - >>> # TODO: Initialize ``max_messages``: + >>> # TODO: Initialize `max_messages`: >>> max_messages = 0 >>> >>> response = client.pull(subscription, max_messages) Args: - subscription (str): The subscription from which messages should be pulled. - Format is ``projects/{project}/subscriptions/{sub}``. + subscription (str): The subscription from which messages should be pulled. Format is + ``projects/{project}/subscriptions/{sub}``. max_messages (int): The maximum number of messages returned for this request. The Pub/Sub system may return fewer than the number specified. return_immediately (bool): If this field set to true, the system will respond immediately even if @@ -844,12 +849,12 @@ def streaming_pull(self, metadata=None): """ Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status ``UNAVAILABLE`` to - reassign server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by configuring the - underlying RPC channel. + client. The client streams acknowledgements and ack deadline + modifications back to the server. The server will close the stream and + return the status on any error. The server may close the stream with + status ``UNAVAILABLE`` to reassign server-side resources, in which case, + the client should re-establish the stream. Flow control can be achieved + by configuring the underlying RPC channel. EXPERIMENTAL: This method interface might change in the future. @@ -860,7 +865,7 @@ def streaming_pull(self, >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') >>> - >>> # TODO: Initialize ``stream_ack_deadline_seconds``: + >>> # TODO: Initialize `stream_ack_deadline_seconds`: >>> stream_ack_deadline_seconds = 0 >>> request = {'subscription': subscription, 'stream_ack_deadline_seconds': stream_ack_deadline_seconds} >>> @@ -914,10 +919,11 @@ def modify_push_config(self, """ Modifies the ``PushConfig`` for a specified subscription. - This may be used to change a push subscription to a pull one (signified by - an empty ``PushConfig``) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the ``PushConfig``. + This may be used to change a push subscription to a pull one (signified + by an empty ``PushConfig``) or vice versa, or change the endpoint URL + and other attributes of a push subscription. Messages will accumulate + for delivery continuously through the call regardless of changes to the + ``PushConfig``. Example: >>> from google.cloud import pubsub_v1 @@ -926,20 +932,21 @@ def modify_push_config(self, >>> >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') >>> - >>> # TODO: Initialize ``push_config``: + >>> # TODO: Initialize `push_config`: >>> push_config = {} >>> >>> client.modify_push_config(subscription, push_config) Args: - subscription (str): The name of the subscription. - Format is ``projects/{project}/subscriptions/{sub}``. + subscription (str): The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): The push configuration for future deliveries. - An empty ``pushConfig`` indicates that the Pub/Sub system should - stop pushing messages from the given subscription and allow - messages to be pulled and acknowledged - effectively pausing - the subscription if ``Pull`` or ``StreamingPull`` is not called. + An empty ``pushConfig`` indicates that the Pub/Sub system should stop + pushing messages from the given subscription and allow messages to be + pulled and acknowledged - effectively pausing the subscription if + ``Pull`` or ``StreamingPull`` is not called. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PushConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1005,14 +1012,14 @@ def list_snapshots(self, >>> # Alternatively: >>> >>> # Iterate over results one page at a time - >>> for page in client.list_snapshots(project, options=CallOptions(page_token=INITIAL_PAGE)): + >>> for page in client.list_snapshots(project).pages: ... for element in page: ... # process element ... pass Args: - project (str): The name of the project in which to list snapshots. - Format is ``projects/{project-id}``. + project (str): The name of the project in which to list snapshots. Format is + ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -1077,21 +1084,20 @@ def create_snapshot(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Creates a snapshot from the requested subscription.

- ALPHA: This feature is part of an alpha release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy.

- If the snapshot already exists, returns ``ALREADY_EXISTS``. - If the requested subscription doesn't exist, returns ``NOT_FOUND``. - If the backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is returned. - See also the ``Snapshot.expire_time`` field. If the name is not provided in - the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the `resource name format `_. - The generated - name is populated in the returned Snapshot object. Note that for REST API - requests, you must specify a name in the request. + Creates a snapshot from the requested subscription. ALPHA: This feature + is part of an alpha release. This API might be changed in + backward-incompatible ways and is not recommended for production use. It + is not subject to any SLA or deprecation policy. If the snapshot already + exists, returns ``ALREADY_EXISTS``. If the requested subscription + doesn't exist, returns ``NOT_FOUND``. If the backlog in the subscription + is too old -- and the resulting snapshot would expire in less than 1 + hour -- then ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in the + request, the server will assign a random name for this snapshot on the + same project as the subscription, conforming to the `resource name + format `__. The + generated name is populated in the returned Snapshot object. Note that + for REST API requests, you must specify a name in the request. Example: >>> from google.cloud import pubsub_v1 @@ -1104,23 +1110,20 @@ def create_snapshot(self, >>> response = client.create_snapshot(name, subscription) Args: - name (str): Optional user-provided name for this snapshot. - If the name is not provided in the request, the server will assign a random - name for this snapshot on the same project as the subscription. - Note that for REST API requests, you must specify a name. See the - resource name rules. - Format is ``projects/{project}/snapshots/{snap}``. - subscription (str): The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: \ - (a) The existing backlog on the subscription. More precisely, this is \ - defined as the messages in the subscription's backlog that are \ - unacknowledged upon the successful completion of the \ - `CreateSnapshot` request; as well as: \ - (b) Any messages published to the subscription's topic following the \ - successful completion of the CreateSnapshot request. \ - - Format is ``projects/{project}/subscriptions/{sub}``. - labels (dict[str -> str]): See Creating and managing labels. + name (str): Optional user-provided name for this snapshot. If the name is not + provided in the request, the server will assign a random name for this + snapshot on the same project as the subscription. Note that for REST API + requests, you must specify a name. See the resource name rules. Format + is ``projects/{project}/snapshots/{snap}``. + subscription (str): The subscription whose backlog the snapshot retains. Specifically, the + created snapshot is guaranteed to retain: (a) The existing backlog on + the subscription. More precisely, this is defined as the messages in the + subscription's backlog that are unacknowledged upon the successful + completion of the ``CreateSnapshot`` request; as well as: (b) Any + messages published to the subscription's topic following the successful + completion of the CreateSnapshot request. Format is + ``projects/{project}/subscriptions/{sub}``. + labels (dict[str -> str]): See Creating and managing labels. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -1188,10 +1191,12 @@ def update_snapshot(self, Args: snapshot (Union[dict, ~google.cloud.pubsub_v1.types.Snapshot]): The updated snapshot object. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Snapshot` update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided snapshot to update. Must be specified and non-empty. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1256,8 +1261,8 @@ def delete_snapshot(self, >>> client.delete_snapshot(snapshot) Args: - snapshot (str): The name of the snapshot to delete. - Format is ``projects/{project}/snapshots/{snap}``. + snapshot (str): The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -1314,22 +1319,22 @@ def seek(self, Args: subscription (str): The subscription to affect. - time (Union[dict, ~google.cloud.pubsub_v1.types.Timestamp]): The time to seek to. - Messages retained in the subscription that were published before this - time are marked as acknowledged, and messages retained in the - subscription that were published after this time are marked as - unacknowledged. Note that this operation affects only those messages - retained in the subscription (configured by the combination of - ``message_retention_duration`` and ``retain_acked_messages``). For example, - if ``time`` corresponds to a point before the message retention + time (Union[dict, ~google.cloud.pubsub_v1.types.Timestamp]): The time to seek to. Messages retained in the subscription that were + published before this time are marked as acknowledged, and messages + retained in the subscription that were published after this time are + marked as unacknowledged. Note that this operation affects only those + messages retained in the subscription (configured by the combination of + ``message_retention_duration`` and ``retain_acked_messages``). For + example, if ``time`` corresponds to a point before the message retention window (or to a point before the system's notion of the subscription creation time), only retained messages will be marked as unacknowledged, and already-expunged messages will not be restored. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Timestamp` - snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as that of - the provided subscription. - Format is ``projects/{project}/snapshots/{snap}``. + snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as that + of the provided subscription. Format is + ``projects/{project}/snapshots/{snap}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -1391,7 +1396,7 @@ def set_iam_policy(self, >>> >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') >>> - >>> # TODO: Initialize ``policy``: + >>> # TODO: Initialize `policy`: >>> policy = {} >>> >>> response = client.set_iam_policy(resource, policy) @@ -1400,10 +1405,11 @@ def set_iam_policy(self, resource (str): REQUIRED: The resource for which the policy is being specified. ``resource`` is usually specified as a path. For example, a Project resource is specified as ``projects/{project}``. - policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The size of - the policy is limited to a few 10s of KB. An empty policy is a + policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The + size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform services (such as Projects) might reject them. + If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1507,9 +1513,9 @@ def test_iam_permissions(self, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): """ - Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. + Returns permissions that a caller has on the specified resource. If the + resource does not exist, this will return an empty set of permissions, + not a NOT\_FOUND error. Example: >>> from google.cloud import pubsub_v1 @@ -1518,7 +1524,7 @@ def test_iam_permissions(self, >>> >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') >>> - >>> # TODO: Initialize ``permissions``: + >>> # TODO: Initialize `permissions`: >>> permissions = [] >>> >>> response = client.test_iam_permissions(resource, permissions) @@ -1529,8 +1535,8 @@ def test_iam_permissions(self, resource is specified as ``projects/{project}``. permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see - `IAM Overview `_. + information see `IAM + Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index ef113edaec15..536d6d34b2b5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -7,7 +7,6 @@ "DEADLINE_EXCEEDED", "INTERNAL", "RESOURCE_EXHAUSTED", "UNAVAILABLE" ], - "http_get": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "non_idempotent": [] }, "retry_params": { diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 9facdc16d9b0..7ce2bca1a552 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -100,8 +100,8 @@ def create_channel(cls, def create_topic(self): """Return the gRPC stub for {$apiMethod.name}. - Creates the given topic with the given name. See the - resource name rules. + Creates the given topic with the given name. See the resource name + rules. Returns: Callable: A callable which accepts the appropriate @@ -128,8 +128,8 @@ def update_topic(self): def publish(self): """Return the gRPC stub for {$apiMethod.name}. - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic - does not exist. + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the + topic does not exist. Returns: Callable: A callable which accepts the appropriate @@ -181,11 +181,12 @@ def list_topic_subscriptions(self): def delete_topic(self): """Return the gRPC stub for {$apiMethod.name}. - Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their ``topic`` field is set to ``_deleted-topic_``. + Deletes the topic with the given name. Returns ``NOT_FOUND`` if the + topic does not exist. After a topic is deleted, a new topic may be + created with the same name; this is an entirely new topic with none of + the old configuration or subscriptions. Existing subscriptions to this + topic are not deleted, but their ``topic`` field is set to + ``_deleted-topic_``. Returns: Callable: A callable which accepts the appropriate @@ -227,9 +228,9 @@ def get_iam_policy(self): def test_iam_permissions(self): """Return the gRPC stub for {$apiMethod.name}. - Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. + Returns permissions that a caller has on the specified resource. If the + resource does not exist, this will return an empty set of permissions, + not a NOT\_FOUND error. Returns: Callable: A callable which accepts the appropriate diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index eeb05cee25ff..1455d6c476b4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -100,17 +100,16 @@ def create_channel(cls, def create_subscription(self): """Return the gRPC stub for {$apiMethod.name}. - Creates a subscription to a given topic. See the - resource name rules. - If the subscription already exists, returns ``ALREADY_EXISTS``. - If the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - `resource name format `_. - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. + Creates a subscription to a given topic. See the resource name rules. If + the subscription already exists, returns ``ALREADY_EXISTS``. If the + corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will assign a + random name for this subscription on the same project as the topic, + conforming to the `resource name + format `__. The + generated name is populated in the returned Subscription object. Note + that for REST API requests, you must specify a name in the request. Returns: Callable: A callable which accepts the appropriate @@ -163,11 +162,12 @@ def list_subscriptions(self): def delete_subscription(self): """Return the gRPC stub for {$apiMethod.name}. - Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to ``Pull`` after deletion will return - ``NOT_FOUND``. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after deletion + will return ``NOT_FOUND``. After a subscription is deleted, a new one + may be created with the same name, but the new one has no association + with the old subscription or its topic unless the same topic is + specified. Returns: Callable: A callable which accepts the appropriate @@ -198,11 +198,11 @@ def acknowledge(self): """Return the gRPC stub for {$apiMethod.name}. Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant messages - from the subscription. + ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant + messages from the subscription. - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more + Acknowledging a message whose ack deadline has expired may succeed, but + such a message may be redelivered later. Acknowledging a message more than once will not result in an error. Returns: @@ -232,12 +232,12 @@ def streaming_pull(self): """Return the gRPC stub for {$apiMethod.name}. Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status ``UNAVAILABLE`` to - reassign server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by configuring the - underlying RPC channel. + client. The client streams acknowledgements and ack deadline + modifications back to the server. The server will close the stream and + return the status on any error. The server may close the stream with + status ``UNAVAILABLE`` to reassign server-side resources, in which case, + the client should re-establish the stream. Flow control can be achieved + by configuring the underlying RPC channel. Returns: Callable: A callable which accepts the appropriate @@ -252,10 +252,11 @@ def modify_push_config(self): Modifies the ``PushConfig`` for a specified subscription. - This may be used to change a push subscription to a pull one (signified by - an empty ``PushConfig``) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the ``PushConfig``. + This may be used to change a push subscription to a pull one (signified + by an empty ``PushConfig``) or vice versa, or change the endpoint URL + and other attributes of a push subscription. Messages will accumulate + for delivery continuously through the call regardless of changes to the + ``PushConfig``. Returns: Callable: A callable which accepts the appropriate @@ -284,21 +285,20 @@ def list_snapshots(self): def create_snapshot(self): """Return the gRPC stub for {$apiMethod.name}. - Creates a snapshot from the requested subscription.

- ALPHA: This feature is part of an alpha release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy.

- If the snapshot already exists, returns ``ALREADY_EXISTS``. - If the requested subscription doesn't exist, returns ``NOT_FOUND``. - If the backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is returned. - See also the ``Snapshot.expire_time`` field. If the name is not provided in - the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the `resource name format `_. - The generated - name is populated in the returned Snapshot object. Note that for REST API - requests, you must specify a name in the request. + Creates a snapshot from the requested subscription. ALPHA: This feature + is part of an alpha release. This API might be changed in + backward-incompatible ways and is not recommended for production use. It + is not subject to any SLA or deprecation policy. If the snapshot already + exists, returns ``ALREADY_EXISTS``. If the requested subscription + doesn't exist, returns ``NOT_FOUND``. If the backlog in the subscription + is too old -- and the resulting snapshot would expire in less than 1 + hour -- then ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in the + request, the server will assign a random name for this snapshot on the + same project as the subscription, conforming to the `resource name + format `__. The + generated name is populated in the returned Snapshot object. Note that + for REST API requests, you must specify a name in the request. Returns: Callable: A callable which accepts the appropriate @@ -394,9 +394,9 @@ def get_iam_policy(self): def test_iam_permissions(self): """Return the gRPC stub for {$apiMethod.name}. - Returns permissions that a caller has on the specified resource. - If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. + Returns permissions that a caller has on the specified resource. If the + resource does not exist, this will return an empty set of permissions, + not a NOT\_FOUND error. Returns: Callable: A callable which accepts the appropriate From 8e0d0f7c00a18d639b901fd48f711d97dfc09f43 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 7 Nov 2018 14:03:35 -0500 Subject: [PATCH 0313/1197] Bump minimum 'api_core' version for all GAPIC libs to 1.4.1. (#6391) Closes #6390. --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index aaac99a6bcc0..899b85ca9325 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -30,7 +30,7 @@ release_status = 'Development Status :: 4 - Beta' dependencies = [ 'google-api-core[grpc] >= 1.4.1, < 2.0.0dev', - 'grpc-google-iam-v1<0.12dev,>=0.11.4', + 'grpc-google-iam-v1 >= 0.11.4, < 0.12dev', 'enum34; python_version < "3.4"', ] extras = { From 86937bdc66d0f20d4270ed15f161d8940ccb70de Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 8 Nov 2018 15:09:25 -0500 Subject: [PATCH 0314/1197] Override client classmethod factories inherited from GAPIC. (#6453) The '_gapic.add_methods' decorator doesn't quite get them right, so blacklist them from it and create them locally. Closes #5903. --- .../cloud/pubsub_v1/publisher/client.py | 32 ++++++++++++++++++- .../cloud/pubsub_v1/subscriber/client.py | 31 ++++++++++++++++-- .../publisher/test_publisher_client.py | 10 ++++++ .../subscriber/test_subscriber_client.py | 10 ++++++ 4 files changed, 80 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 910ebc5fbada..670000f7fc69 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -22,6 +22,7 @@ import six from google.api_core import grpc_helpers +from google.oauth2 import service_account from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types @@ -31,8 +32,15 @@ __version__ = pkg_resources.get_distribution('google-cloud-pubsub').version +_BLACKLISTED_METHODS = ( + 'publish', + 'from_service_account_file', + 'from_service_account_json', +) -@_gapic.add_methods(publisher_client.PublisherClient, blacklist=('publish',)) + +@_gapic.add_methods( + publisher_client.PublisherClient, blacklist=_BLACKLISTED_METHODS) class Client(object): """A publisher client for Google Cloud Pub/Sub. @@ -86,6 +94,28 @@ def __init__(self, batch_settings=(), **kwargs): self._batch_lock = self._batch_class.make_lock() self._batches = {} + @classmethod + def from_service_account_file(cls, filename, batch_settings=(), **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The + settings for batch publishing. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(batch_settings, **kwargs) + + from_service_account_json = from_service_account_file + @property def target(self): """Return the target (where the API is). diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 817ef80a97eb..226b7bf344f8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -20,6 +20,7 @@ import grpc from google.api_core import grpc_helpers +from google.oauth2 import service_account from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types @@ -30,9 +31,15 @@ __version__ = pkg_resources.get_distribution('google-cloud-pubsub').version +_BLACKLISTED_METHODS = ( + 'publish', + 'from_service_account_file', + 'from_service_account_json', +) -@_gapic.add_methods(subscriber_client.SubscriberClient, - blacklist=('streaming_pull',)) + +@_gapic.add_methods( + subscriber_client.SubscriberClient, blacklist=_BLACKLISTED_METHODS) class Client(object): """A subscriber client for Google Cloud Pub/Sub. @@ -75,6 +82,26 @@ def __init__(self, **kwargs): # client. self._api = subscriber_client.SubscriberClient(**kwargs) + @classmethod + def from_service_account_file(cls, filename, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs['credentials'] = credentials + return cls(**kwargs) + + from_service_account_json = from_service_account_file + @property def target(self): """Return the target (where the API is). diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 9b15e6d4777d..523dbe855fd1 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -221,6 +221,16 @@ def test_gapic_class_method_on_class(): assert answer == 'projects/foo/topics/bar' +def test_class_method_factory(): + patch = mock.patch( + 'google.oauth2.service_account.Credentials.from_service_account_file') + + with patch: + client = publisher.Client.from_service_account_file('filename.json') + + assert isinstance(client, publisher.Client) + + def test_gapic_class_method_on_instance(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 35487c1a6a8d..86297d31cd96 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -40,6 +40,16 @@ def test_init_emulator(monkeypatch): assert channel.target().decode('utf8') == '/baz/bacon/' +def test_class_method_factory(): + patch = mock.patch( + 'google.oauth2.service_account.Credentials.from_service_account_file') + + with patch: + client = subscriber.Client.from_service_account_file('filename.json') + + assert isinstance(client, subscriber.Client) + + @mock.patch( 'google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager.' 'StreamingPullManager.open', autospec=True) From ce304b2ebcb0bf725075052ede401cf98933fd91 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Mon, 19 Nov 2018 08:15:37 -0800 Subject: [PATCH 0315/1197] Pick up fixes to GAPIC generator. (#6503) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 17 +- .../pubsub_v1/gapic/subscriber_client.py | 17 +- .../transports/publisher_grpc_transport.py | 11 ++ .../transports/subscriber_grpc_transport.py | 11 ++ .../unit/gapic/v1/test_publisher_client_v1.py | 101 ++++++++-- .../gapic/v1/test_subscriber_client_v1.py | 181 ++++++++++++++---- 6 files changed, 272 insertions(+), 66 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 71e093f22042..9e57c5f4789d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -103,7 +103,7 @@ def __init__(self, transport=None, channel=None, credentials=None, - client_config=publisher_client_config.config, + client_config=None, client_info=None): """Constructor. @@ -136,13 +136,20 @@ def __init__(self, your own client library. """ # Raise deprecation warnings for things we want to go away. - if client_config: - warnings.warn('The `client_config` argument is deprecated.', - PendingDeprecationWarning) + if client_config is not None: + warnings.warn( + 'The `client_config` argument is deprecated.', + PendingDeprecationWarning, + stacklevel=2) + else: + client_config = publisher_client_config.config + if channel: warnings.warn( 'The `channel` argument is deprecated; use ' - '`transport` instead.', PendingDeprecationWarning) + '`transport` instead.', + PendingDeprecationWarning, + stacklevel=2) # Instantiate the transport. # The transport is responsible for handling serialization and diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 0afc9299dd16..7f50e15e8a9c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -124,7 +124,7 @@ def __init__(self, transport=None, channel=None, credentials=None, - client_config=subscriber_client_config.config, + client_config=None, client_info=None): """Constructor. @@ -157,13 +157,20 @@ def __init__(self, your own client library. """ # Raise deprecation warnings for things we want to go away. - if client_config: - warnings.warn('The `client_config` argument is deprecated.', - PendingDeprecationWarning) + if client_config is not None: + warnings.warn( + 'The `client_config` argument is deprecated.', + PendingDeprecationWarning, + stacklevel=2) + else: + client_config = subscriber_client_config.config + if channel: warnings.warn( 'The `channel` argument is deprecated; use ' - '`transport` instead.', PendingDeprecationWarning) + '`transport` instead.', + PendingDeprecationWarning, + stacklevel=2) # Instantiate the transport. # The transport is responsible for handling serialization and diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 7ce2bca1a552..3353208636bb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -66,6 +66,8 @@ def __init__(self, credentials=credentials, ) + self._channel = channel + # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { @@ -96,6 +98,15 @@ def create_channel(cls, scopes=cls._OAUTH_SCOPES, ) + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + @property def create_topic(self): """Return the gRPC stub for {$apiMethod.name}. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 1455d6c476b4..c2682b5a8f63 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -66,6 +66,8 @@ def __init__(self, credentials=credentials, ) + self._channel = channel + # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { @@ -96,6 +98,15 @@ def create_channel(cls, scopes=cls._OAUTH_SCOPES, ) + @property + def channel(self): + """The gRPC channel used by the transport. + + Returns: + grpc.Channel: A gRPC channel object. + """ + return self._channel + @property def create_subscription(self): """Return the gRPC stub for {$apiMethod.name}. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index befea7775a4f..feff5f1df72f 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -15,6 +15,7 @@ # limitations under the License. """Unit tests.""" +import mock import pytest from google.cloud.pubsub_v1.gapic import publisher_client @@ -73,7 +74,10 @@ def test_create_topic(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request name = client.topic_path('[PROJECT]', '[TOPIC]') @@ -89,7 +93,10 @@ def test_create_topic(self): def test_create_topic_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request name = client.topic_path('[PROJECT]', '[TOPIC]') @@ -105,7 +112,10 @@ def test_update_topic(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request topic = {} @@ -123,7 +133,10 @@ def test_update_topic(self): def test_update_topic_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request topic = {} @@ -141,7 +154,10 @@ def test_publish(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request topic = client.topic_path('[PROJECT]', '[TOPIC]') @@ -161,7 +177,10 @@ def test_publish(self): def test_publish_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request topic = client.topic_path('[PROJECT]', '[TOPIC]') @@ -180,7 +199,10 @@ def test_get_topic(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request topic = client.topic_path('[PROJECT]', '[TOPIC]') @@ -196,7 +218,10 @@ def test_get_topic(self): def test_get_topic_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request topic = client.topic_path('[PROJECT]', '[TOPIC]') @@ -217,7 +242,10 @@ def test_list_topics(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request project = client.project_path('[PROJECT]') @@ -235,7 +263,10 @@ def test_list_topics(self): def test_list_topics_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request project = client.project_path('[PROJECT]') @@ -258,7 +289,10 @@ def test_list_topic_subscriptions(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request topic = client.topic_path('[PROJECT]', '[TOPIC]') @@ -277,7 +311,10 @@ def test_list_topic_subscriptions(self): def test_list_topic_subscriptions_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request topic = client.topic_path('[PROJECT]', '[TOPIC]') @@ -288,7 +325,10 @@ def test_list_topic_subscriptions_exception(self): def test_delete_topic(self): channel = ChannelStub() - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request topic = client.topic_path('[PROJECT]', '[TOPIC]') @@ -303,7 +343,10 @@ def test_delete_topic(self): def test_delete_topic_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request topic = client.topic_path('[PROJECT]', '[TOPIC]') @@ -320,7 +363,10 @@ def test_set_iam_policy(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request resource = client.topic_path('[PROJECT]', '[TOPIC]') @@ -338,7 +384,10 @@ def test_set_iam_policy(self): def test_set_iam_policy_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request resource = client.topic_path('[PROJECT]', '[TOPIC]') @@ -356,7 +405,10 @@ def test_get_iam_policy(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request resource = client.topic_path('[PROJECT]', '[TOPIC]') @@ -373,7 +425,10 @@ def test_get_iam_policy(self): def test_get_iam_policy_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request resource = client.topic_path('[PROJECT]', '[TOPIC]') @@ -389,7 +444,10 @@ def test_test_iam_permissions(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup Request resource = client.topic_path('[PROJECT]', '[TOPIC]') @@ -407,7 +465,10 @@ def test_test_iam_permissions(self): def test_test_iam_permissions_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = publisher_client.PublisherClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() # Setup request resource = client.topic_path('[PROJECT]', '[TOPIC]') diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index 5e1ddc2059e3..aad07e34e45b 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -15,6 +15,7 @@ # limitations under the License. """Unit tests.""" +import mock import pytest from google.cloud.pubsub_v1.gapic import subscriber_client @@ -88,7 +89,10 @@ def test_create_subscription(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -105,7 +109,10 @@ def test_create_subscription(self): def test_create_subscription_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -130,7 +137,10 @@ def test_get_subscription(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -147,7 +157,10 @@ def test_get_subscription(self): def test_get_subscription_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -171,7 +184,10 @@ def test_update_subscription(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request ack_deadline_seconds = 42 @@ -192,7 +208,10 @@ def test_update_subscription(self): def test_update_subscription_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request ack_deadline_seconds = 42 @@ -218,7 +237,10 @@ def test_list_subscriptions(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request project = client.project_path('[PROJECT]') @@ -236,7 +258,10 @@ def test_list_subscriptions(self): def test_list_subscriptions_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request project = client.project_path('[PROJECT]') @@ -247,7 +272,10 @@ def test_list_subscriptions_exception(self): def test_delete_subscription(self): channel = ChannelStub() - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -263,7 +291,10 @@ def test_delete_subscription(self): def test_delete_subscription_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -273,7 +304,10 @@ def test_delete_subscription_exception(self): def test_modify_ack_deadline(self): channel = ChannelStub() - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -293,7 +327,10 @@ def test_modify_ack_deadline(self): def test_modify_ack_deadline_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -306,7 +343,10 @@ def test_modify_ack_deadline_exception(self): def test_acknowledge(self): channel = ChannelStub() - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -323,7 +363,10 @@ def test_acknowledge(self): def test_acknowledge_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -339,7 +382,10 @@ def test_pull(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -357,7 +403,10 @@ def test_pull(self): def test_pull_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -376,7 +425,10 @@ def test_streaming_pull(self): # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -402,7 +454,10 @@ def test_streaming_pull(self): def test_streaming_pull_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -420,7 +475,10 @@ def test_streaming_pull_exception(self): def test_modify_push_config(self): channel = ChannelStub() - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -437,7 +495,10 @@ def test_modify_push_config(self): def test_modify_push_config_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -460,7 +521,10 @@ def test_list_snapshots(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request project = client.project_path('[PROJECT]') @@ -478,7 +542,10 @@ def test_list_snapshots(self): def test_list_snapshots_exception(self): channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request project = client.project_path('[PROJECT]') @@ -496,7 +563,10 @@ def test_create_snapshot(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') @@ -514,7 +584,10 @@ def test_create_snapshot(self): def test_create_snapshot_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') @@ -532,7 +605,10 @@ def test_update_snapshot(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request seconds = 123456 @@ -554,7 +630,10 @@ def test_update_snapshot(self): def test_update_snapshot_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request seconds = 123456 @@ -569,7 +648,10 @@ def test_update_snapshot_exception(self): def test_delete_snapshot(self): channel = ChannelStub() - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') @@ -584,7 +666,10 @@ def test_delete_snapshot(self): def test_delete_snapshot_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') @@ -599,7 +684,10 @@ def test_seek(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -615,7 +703,10 @@ def test_seek(self): def test_seek_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -632,7 +723,10 @@ def test_set_iam_policy(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -650,7 +744,10 @@ def test_set_iam_policy(self): def test_set_iam_policy_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -668,7 +765,10 @@ def test_get_iam_policy(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -685,7 +785,10 @@ def test_get_iam_policy(self): def test_get_iam_policy_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -701,7 +804,10 @@ def test_test_iam_permissions(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup Request resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') @@ -719,7 +825,10 @@ def test_test_iam_permissions(self): def test_test_iam_permissions_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - client = subscriber_client.SubscriberClient(channel=channel) + patch = mock.patch('google.api_core.grpc_helpers.create_channel') + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() # Setup request resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') From b3fc8a063ae3560e672ea5ef2f01a668c09e3f31 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 27 Nov 2018 13:49:07 -0800 Subject: [PATCH 0316/1197] Release 0.39.0 (#6656) --- packages/google-cloud-pubsub/CHANGELOG.md | 32 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index a6ed8ca5f36c..36e815ae1950 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,38 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.39.0 + +11-27-2018 13:32 PST + +### Implementation Changes +- Pick up fixes to GAPIC generator. ([#6503](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6503)) +- Override client classmethod factories inherited from GAPIC. ([#6453](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6453)) +- Fix imports for hand-written client docstring examples. ([#6345](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6345)) +- Fix path for patch of 'bidi' elements. ([#6243](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6243)) +- Move bidi to api-core. ([#6211](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6211)) +- Re-generate library using pubsub/synth.py ([#6059](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6059)) +- Re-generate library using pubsub/synth.py ([#5978](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/5978)) + +### New Features +- Add 'expiration_policy' to subscriber client. ([#6223](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6223)) + +### Dependencies +- Bump minimum 'api_core' version for all GAPIC libs to 1.4.1. ([#6391](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6391)) +- Update IAM version in dependencies. ([#6362](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6362)) +- Bump minimum 'api_core' version to '1.4.1'. ([#6134](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6134)) + +### Documentation +- Fix client_info bug, update docstrings. ([#6418](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6418)) +- Fix docstring reference to wrong future class. ([#6382](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6382)) +- Normalize use of support level badges. ([#6159](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6159)) +- Update subscriber example in README to current patterns. ([#6194](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6194)) +- Prep pubsub docs for repo split. ([#6001](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6001)) + +### Internal / Testing Changes +- Fix error from new flake8 version. ([#6346](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6346)) +- Use new Nox. ([#6175](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6175)) + ## 0.38.0 ### Implementation Changes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 899b85ca9325..70c7c5c3264a 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.38.0' +version = '0.39.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 553e6f577f069e74d10da0bf0d2c9ec4a538e5ff Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 28 Nov 2018 13:55:23 -0800 Subject: [PATCH 0317/1197] Add templates for flake8, coveragerc, noxfile, and black. (#6642) --- packages/google-cloud-pubsub/.coveragerc | 14 +- packages/google-cloud-pubsub/.flake8 | 1 + packages/google-cloud-pubsub/noxfile.py | 156 +++++++++++++---------- packages/google-cloud-pubsub/synth.py | 81 +++++++----- 4 files changed, 147 insertions(+), 105 deletions(-) diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index 4bea65589ecb..51fec440cebf 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -1,16 +1,9 @@ [run] branch = True -source = - google.cloud.pubsub - google.cloud.pubsub_v1 - tests.unit [report] -omit = - */gapic/* - */proto/* +fail_under = 100 show_missing = True - exclude_lines = # Re-enable the standard pragma pragma: NO COVER @@ -18,3 +11,8 @@ exclude_lines = def __repr__ # Ignore abstract methods raise NotImplementedError +omit = + */gapic/*.py + */proto/*.py + */google-cloud-python/core/*.py + */site-packages/*.py \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index 1f44a90f8195..61766fa84d02 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -1,4 +1,5 @@ [flake8] +ignore = E203, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index a061653d5407..a9efc0e344ce 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -1,10 +1,12 @@ -# Copyright 2016 Google LLC +# -*- coding: utf-8 -*- +# +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# http://www.apache.org/licenses/LICENSE-2.0 +# https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, @@ -13,102 +15,126 @@ # limitations under the License. from __future__ import absolute_import - import os import nox -LOCAL_DEPS = ( - os.path.join('..', 'api_core'), - os.path.join('..', 'core'), -) +LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) +@nox.session(python="3.7") +def blacken(session): + """Run black. -@nox.session -def default(session): - """Default unit test session. + Format code to uniform standard. + """ + session.install("black") + session.run( + "black", + "google", + "tests", + "docs", + "--exclude", + ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", + ) + + +@nox.session(python="3.7") +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. """ - # Install all test dependencies, then install local packages in-place. - session.install('mock', 'pytest', 'pytest-cov') + session.install("flake8", "black", *LOCAL_DEPS) + session.run( + "black", + "--check", + "google", + "tests", + "docs", + "--exclude", + ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", + ) + session.run("flake8", "google", "tests") + + +@nox.session(python="3.7") +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def default(session): + # Install all test dependencies, then install this package in-place. + session.install("mock", "pytest", "pytest-cov") for local_dep in LOCAL_DEPS: - session.install('-e', local_dep) - session.install('-e', '.') + session.install("-e", local_dep) + session.install("-e", ".") # Run py.test against the unit tests. session.run( - 'py.test', - '--quiet', - '--cov-append', - '--cov-report=', - '--cov=google.cloud.pubsub', - '--cov=google.cloud.pubsub_v1', - '--cov-config=.coveragerc', - os.path.join('tests', 'unit'), - *session.posargs + "py.test", + "--quiet", + "--cov=google.cloud", + "--cov=tests.unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=97", + os.path.join("tests", "unit"), + *session.posargs, ) -@nox.session(python=['2.7', '3.5', '3.6', '3.7']) +@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=['2.7', '3.6']) +@nox.session(python=["2.7", "3.7"]) def system(session): """Run the system test suite.""" - - # Sanity check: Only run system tests if the environment variable is set. - if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): - session.skip('Credentials must be set via environment variable.') + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + # Sanity check: Only run tests if the environment variable is set. + if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): + session.skip("Credentials must be set via environment variable") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") # Use pre-release gRPC for system tests. - session.install('--pre', 'grpcio') + session.install("--pre", "grpcio") - # Install all test dependencies, then install local packages in-place. - session.install('mock', 'pytest') + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install("mock", "pytest") for local_dep in LOCAL_DEPS: - session.install('-e', local_dep) - session.install('-e', '../test_utils/') - session.install('-e', '.') + session.install("-e", local_dep) + session.install("-e", "../test_utils/") + session.install("-e", ".") # Run py.test against the system tests. - session.run( - 'py.test', - '--quiet', - 'tests/system.py', - *session.posargs - ) + if system_test_exists: + session.run("py.test", "--quiet", system_test_path, *session.posargs) + if system_test_folder_exists: + session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python='3.6') -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install('flake8', *LOCAL_DEPS) - session.install('.') - session.run('flake8', 'google', 'tests') - - -@nox.session(python='3.6') -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install('docutils', 'Pygments') - session.run( - 'python', 'setup.py', 'check', '--restructuredtext', '--strict') - - -@nox.session(python='3.6') +@nox.session(python="3.7") def cover(session): """Run the final coverage report. This outputs the coverage report aggregating coverage from the unit test runs (not system test runs), and then erases coverage data. """ - session.install('coverage', 'pytest-cov') - session.run('coverage', 'report', '--show-missing', '--fail-under=100') - session.run('coverage', 'erase') + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 1123c75e4169..0289f096dddd 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -20,70 +20,81 @@ from synthtool import gcp gapic = gcp.GAPICGenerator() +common = gcp.CommonTemplates() +version = "v1" -version = 'v1' - - +# ---------------------------------------------------------------------------- +# Generate pubsub GAPIC layer +# ---------------------------------------------------------------------------- library = gapic.py_library( - 'pubsub', version, config_path='/google/pubsub/artman_pubsub.yaml') + "pubsub", version, config_path="/google/pubsub/artman_pubsub.yaml" +) s.move( library, excludes=[ - 'docs/**/*', 'nox.py', 'README.rst', 'setup.py', - 'google/cloud/pubsub_v1/__init__.py', 'google/cloud/pubsub_v1/types.py']) + "docs/**/*", + "nox.py", + "README.rst", + "setup.py", + "google/cloud/pubsub_v1/__init__.py", + "google/cloud/pubsub_v1/types.py", + ], +) # Adjust tests to import the clients directly. s.replace( - 'tests/unit/gapic/v1/test_publisher_client_v1.py', - 'from google.cloud import pubsub_v1', - 'from google.cloud.pubsub_v1.gapic import publisher_client') + "tests/unit/gapic/v1/test_publisher_client_v1.py", + "from google.cloud import pubsub_v1", + "from google.cloud.pubsub_v1.gapic import publisher_client", +) s.replace( - 'tests/unit/gapic/v1/test_publisher_client_v1.py', - ' pubsub_v1', - ' publisher_client') + "tests/unit/gapic/v1/test_publisher_client_v1.py", " pubsub_v1", " publisher_client" +) s.replace( - 'tests/unit/gapic/v1/test_subscriber_client_v1.py', - 'from google.cloud import pubsub_v1', - 'from google.cloud.pubsub_v1.gapic import subscriber_client') + "tests/unit/gapic/v1/test_subscriber_client_v1.py", + "from google.cloud import pubsub_v1", + "from google.cloud.pubsub_v1.gapic import subscriber_client", +) s.replace( - 'tests/unit/gapic/v1/test_subscriber_client_v1.py', - ' pubsub_v1', - ' subscriber_client') + "tests/unit/gapic/v1/test_subscriber_client_v1.py", + " pubsub_v1", + " subscriber_client", +) # DEFAULT SCOPES are being used. so let's force them in. s.replace( - 'google/cloud/pubsub_v1/gapic/*er_client.py', - '# The name of the interface for this client. This is the key used to', - '''# The scopes needed to make gRPC calls to all of the methods defined in + "google/cloud/pubsub_v1/gapic/*er_client.py", + "# The name of the interface for this client. This is the key used to", + """# The scopes needed to make gRPC calls to all of the methods defined in # this service _DEFAULT_SCOPES = ( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/pubsub', ) - \g<0>''' + \g<0>""", ) s.replace( - 'google/cloud/pubsub_v1/gapic/publisher_client.py', - 'import google.api_core.gapic_v1.method\n', - '\g<0>import google.api_core.path_template\n' + "google/cloud/pubsub_v1/gapic/publisher_client.py", + "import google.api_core.gapic_v1.method\n", + "\g<0>import google.api_core.path_template\n", ) # Doc strings are formatted poorly s.replace( - 'google/cloud/pubsub_v1/proto/pubsub_pb2.py', + "google/cloud/pubsub_v1/proto/pubsub_pb2.py", 'DESCRIPTOR = _MESSAGESTORAGEPOLICY,\n\s+__module__.*\n\s+,\n\s+__doc__ = """', - '\g<0>A message storage policy.\n\n\n ' + "\g<0>A message storage policy.\n\n\n ", ) s.replace( - 'google/cloud/pubsub_v1/gapic/subscriber_client.py', - 'subscription \(str\): The subscription whose backlog .*\n(.*\n)+?' - '\s+Format is .*', - '''subscription (str): The subscription whose backlog the snapshot retains. + "google/cloud/pubsub_v1/gapic/subscriber_client.py", + "subscription \(str\): The subscription whose backlog .*\n(.*\n)+?" + "\s+Format is .*", + """subscription (str): The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to retain: \\ (a) The existing backlog on the subscription. More precisely, this is \\ defined as the messages in the subscription's backlog that are \\ @@ -92,5 +103,11 @@ (b) Any messages published to the subscription's topic following the \\ successful completion of the CreateSnapshot request. \\ - Format is ``projects/{project}/subscriptions/{sub}``.''' + Format is ``projects/{project}/subscriptions/{sub}``.""", ) + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = gcp.CommonTemplates().py_library(unit_cov_level=97, cov_level=100) +s.move(templated_files) From 93e0b610e1605986305477cd95f81bd00a394866 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 28 Nov 2018 14:28:56 -0800 Subject: [PATCH 0318/1197] Run Black on Generated libraries (#6666) * blacken appveyor * Blacken all gen'd libraries not under PR --- .../google-cloud-pubsub/google/__init__.py | 2 + .../google/cloud/__init__.py | 2 + .../google/cloud/pubsub.py | 6 +- .../google/cloud/pubsub_v1/__init__.py | 6 +- .../google/cloud/pubsub_v1/_gapic.py | 5 +- .../google/cloud/pubsub_v1/exceptions.py | 4 +- .../google/cloud/pubsub_v1/futures.py | 9 +- .../cloud/pubsub_v1/gapic/publisher_client.py | 461 +++++----- .../gapic/publisher_client_config.py | 43 +- .../pubsub_v1/gapic/subscriber_client.py | 804 +++++++++--------- .../gapic/subscriber_client_config.py | 56 +- .../transports/publisher_grpc_transport.py | 53 +- .../transports/subscriber_grpc_transport.py | 69 +- .../cloud/pubsub_v1/publisher/__init__.py | 4 +- .../cloud/pubsub_v1/publisher/_batch/base.py | 12 +- .../pubsub_v1/publisher/_batch/thread.py | 43 +- .../cloud/pubsub_v1/publisher/client.py | 42 +- .../cloud/pubsub_v1/publisher/exceptions.py | 5 +- .../cloud/pubsub_v1/publisher/futures.py | 5 +- .../cloud/pubsub_v1/subscriber/__init__.py | 4 +- .../subscriber/_protocol/dispatcher.py | 30 +- .../subscriber/_protocol/heartbeater.py | 14 +- .../subscriber/_protocol/helper_threads.py | 16 +- .../subscriber/_protocol/histogram.py | 7 +- .../pubsub_v1/subscriber/_protocol/leaser.py | 53 +- .../subscriber/_protocol/requests.py | 23 +- .../_protocol/streaming_pull_manager.py | 99 ++- .../cloud/pubsub_v1/subscriber/client.py | 42 +- .../cloud/pubsub_v1/subscriber/message.py | 41 +- .../cloud/pubsub_v1/subscriber/scheduler.py | 9 +- .../google/cloud/pubsub_v1/types.py | 43 +- packages/google-cloud-pubsub/tests/system.py | 34 +- .../gapic/v1/test_system_publisher_v1.py | 2 +- .../unit/gapic/v1/test_publisher_client_v1.py | 152 ++-- .../gapic/v1/test_subscriber_client_v1.py | 314 ++++--- .../pubsub_v1/publisher/batch/test_base.py | 10 +- .../pubsub_v1/publisher/batch/test_thread.py | 103 ++- .../publisher/test_publisher_client.py | 80 +- .../pubsub_v1/subscriber/test_dispatcher.py | 101 ++- .../subscriber/test_futures_subscriber.py | 8 +- .../pubsub_v1/subscriber/test_heartbeater.py | 30 +- .../subscriber/test_helper_threads.py | 41 +- .../unit/pubsub_v1/subscriber/test_leaser.py | 98 +-- .../unit/pubsub_v1/subscriber/test_message.py | 128 +-- .../pubsub_v1/subscriber/test_scheduler.py | 7 +- .../subscriber/test_streaming_pull_manager.py | 233 ++--- .../subscriber/test_subscriber_client.py | 39 +- .../tests/unit/pubsub_v1/test_futures.py | 28 +- 48 files changed, 1681 insertions(+), 1739 deletions(-) diff --git a/packages/google-cloud-pubsub/google/__init__.py b/packages/google-cloud-pubsub/google/__init__.py index f65701dd143f..7aa71bb4e2f1 100644 --- a/packages/google-cloud-pubsub/google/__init__.py +++ b/packages/google-cloud-pubsub/google/__init__.py @@ -16,7 +16,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py index f65701dd143f..7aa71bb4e2f1 100644 --- a/packages/google-cloud-pubsub/google/cloud/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/__init__.py @@ -16,7 +16,9 @@ try: import pkg_resources + pkg_resources.declare_namespace(__name__) except ImportError: import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py index 7da5d951d804..2c249504d7b3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -20,8 +20,4 @@ from google.cloud.pubsub_v1 import SubscriberClient from google.cloud.pubsub_v1 import types -__all__ = ( - 'types', - 'PublisherClient', - 'SubscriberClient', -) +__all__ = ("types", "PublisherClient", "SubscriberClient") diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py index e6f3c0aae6e7..67bec51b248b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py @@ -27,8 +27,4 @@ class SubscriberClient(subscriber.Client): __doc__ = subscriber.Client.__doc__ -__all__ = ( - 'types', - 'PublisherClient', - 'SubscriberClient', -) +__all__ = ("types", "PublisherClient", "SubscriberClient") diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py index 0383ec85f451..25cb3e5fa33c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py @@ -24,6 +24,7 @@ def add_methods(source_class, blacklist=()): Additionally, any methods explicitly defined on the wrapped class are not added. """ + def wrap(wrapped_fx): """Wrap a GAPIC method; preserve its name and docstring.""" # If this is a static or class method, then we need to *not* @@ -34,7 +35,7 @@ def wrap(wrapped_fx): instance_method = True # If this is a bound method it's a classmethod. - self = getattr(wrapped_fx, '__self__', None) + self = getattr(wrapped_fx, "__self__", None) if issubclass(type(self), type): instance_method = False @@ -52,7 +53,7 @@ def actual_decorator(cls): # (the GAPIC) and make wrapped versions available on this client. for name in dir(source_class): # Ignore all private and magic methods. - if name.startswith('_'): + if name.startswith("_"): continue # Ignore anything on our blacklist. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py index 806bb204aeef..5ab750d0477c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/exceptions.py @@ -17,6 +17,4 @@ from concurrent.futures import TimeoutError -__all__ = ( - 'TimeoutError', -) +__all__ = ("TimeoutError",) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index 0940a47709a3..39688f291dbf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -84,8 +84,7 @@ def done(self): This still returns True in failure cases; checking :meth:`result` or :meth:`exception` is the canonical way to assess success or failure. """ - return (self._exception != self._SENTINEL or - self._result != self._SENTINEL) + return self._exception != self._SENTINEL or self._result != self._SENTINEL def result(self, timeout=None): """Return the message ID, or raise an exception. @@ -131,7 +130,7 @@ def exception(self, timeout=None): """ # Wait until the future is done. if not self._completed.wait(timeout=timeout): - raise exceptions.TimeoutError('Timed out waiting for result.') + raise exceptions.TimeoutError("Timed out waiting for result.") # If the batch completed successfully, this should return None. if self._result != self._SENTINEL: @@ -158,7 +157,7 @@ def set_result(self, result): """ # Sanity check: A future can only complete once. if self.done(): - raise RuntimeError('set_result can only be called once.') + raise RuntimeError("set_result can only be called once.") # Set the result and trigger the future. self._result = result @@ -172,7 +171,7 @@ def set_exception(self, exception): """ # Sanity check: A future can only complete once. if self.done(): - raise RuntimeError('set_exception can only be called once.') + raise RuntimeError("set_exception can only be called once.") # Set the exception and trigger the future. self._exception = exception diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 9e57c5f4789d..7c4da4ab6e01 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -38,8 +38,7 @@ from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-pubsub', ).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub").version class PublisherClient(object): @@ -48,18 +47,19 @@ class PublisherClient(object): messages to a topic. """ - SERVICE_ADDRESS = 'pubsub.googleapis.com:443' + SERVICE_ADDRESS = "pubsub.googleapis.com:443" """The default address of the service.""" # The scopes needed to make gRPC calls to all of the methods defined in # this service _DEFAULT_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', ) + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = 'google.pubsub.v1.Publisher' + _INTERFACE_NAME = "google.pubsub.v1.Publisher" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -75,9 +75,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): Returns: PublisherClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @@ -86,25 +85,24 @@ def from_service_account_file(cls, filename, *args, **kwargs): def topic_path(cls, project, topic): """Return a fully-qualified topic string.""" return google.api_core.path_template.expand( - 'projects/{project}/topics/{topic}', - project=project, - topic=topic, + "projects/{project}/topics/{topic}", project=project, topic=topic ) @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - 'projects/{project}', - project=project, + "projects/{project}", project=project ) - def __init__(self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None): + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + ): """Constructor. Args: @@ -138,18 +136,19 @@ def __init__(self, # Raise deprecation warnings for things we want to go away. if client_config is not None: warnings.warn( - 'The `client_config` argument is deprecated.', + "The `client_config` argument is deprecated.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) else: client_config = publisher_client_config.config if channel: warnings.warn( - 'The `channel` argument is deprecated; use ' - '`transport` instead.', + "The `channel` argument is deprecated; use " "`transport` instead.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) # Instantiate the transport. # The transport is responsible for handling serialization and @@ -158,25 +157,24 @@ def __init__(self, if callable(transport): self.transport = transport( credentials=credentials, - default_class=publisher_grpc_transport. - PublisherGrpcTransport, + default_class=publisher_grpc_transport.PublisherGrpcTransport, ) else: if credentials: raise ValueError( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive.') + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) self.transport = transport else: self.transport = publisher_grpc_transport.PublisherGrpcTransport( - address=self.SERVICE_ADDRESS, - channel=channel, - credentials=credentials, + address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, ) + gapic_version=_GAPIC_LIBRARY_VERSION + ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info @@ -186,7 +184,8 @@ def __init__(self, # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config['interfaces'][self._INTERFACE_NAME], ) + client_config["interfaces"][self._INTERFACE_NAME] + ) # Save a dictionary of cached API call functions. # These are the actual callables which invoke the proper @@ -195,13 +194,15 @@ def __init__(self, self._inner_api_calls = {} # Service calls - def create_topic(self, - name, - labels=None, - message_storage_policy=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def create_topic( + self, + name, + labels=None, + message_storage_policy=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Creates the given topic with the given name. See the resource name rules. @@ -252,30 +253,31 @@ def create_topic(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'create_topic' not in self._inner_api_calls: + if "create_topic" not in self._inner_api_calls: self._inner_api_calls[ - 'create_topic'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_topic, - default_retry=self._method_configs['CreateTopic'].retry, - default_timeout=self._method_configs['CreateTopic']. - timeout, - client_info=self._client_info, - ) + "create_topic" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_topic, + default_retry=self._method_configs["CreateTopic"].retry, + default_timeout=self._method_configs["CreateTopic"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.Topic( - name=name, - labels=labels, - message_storage_policy=message_storage_policy, + name=name, labels=labels, message_storage_policy=message_storage_policy + ) + return self._inner_api_calls["create_topic"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['create_topic']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def update_topic(self, - topic, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def update_topic( + self, + topic, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Updates an existing topic. Note that certain properties of a topic are not modifiable. @@ -327,29 +329,29 @@ def update_topic(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'update_topic' not in self._inner_api_calls: + if "update_topic" not in self._inner_api_calls: self._inner_api_calls[ - 'update_topic'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_topic, - default_retry=self._method_configs['UpdateTopic'].retry, - default_timeout=self._method_configs['UpdateTopic']. - timeout, - client_info=self._client_info, - ) + "update_topic" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_topic, + default_retry=self._method_configs["UpdateTopic"].retry, + default_timeout=self._method_configs["UpdateTopic"].timeout, + client_info=self._client_info, + ) - request = pubsub_pb2.UpdateTopicRequest( - topic=topic, - update_mask=update_mask, + request = pubsub_pb2.UpdateTopicRequest(topic=topic, update_mask=update_mask) + return self._inner_api_calls["update_topic"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['update_topic']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def publish(self, - topic, - messages, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def publish( + self, + topic, + messages, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic does not exist. @@ -393,27 +395,28 @@ def publish(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'publish' not in self._inner_api_calls: + if "publish" not in self._inner_api_calls: self._inner_api_calls[ - 'publish'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.publish, - default_retry=self._method_configs['Publish'].retry, - default_timeout=self._method_configs['Publish'].timeout, - client_info=self._client_info, - ) + "publish" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.publish, + default_retry=self._method_configs["Publish"].retry, + default_timeout=self._method_configs["Publish"].timeout, + client_info=self._client_info, + ) - request = pubsub_pb2.PublishRequest( - topic=topic, - messages=messages, + request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) + return self._inner_api_calls["publish"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['publish']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def get_topic(self, - topic, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def get_topic( + self, + topic, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Gets the configuration of a topic. @@ -449,25 +452,29 @@ def get_topic(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'get_topic' not in self._inner_api_calls: + if "get_topic" not in self._inner_api_calls: self._inner_api_calls[ - 'get_topic'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_topic, - default_retry=self._method_configs['GetTopic'].retry, - default_timeout=self._method_configs['GetTopic'].timeout, - client_info=self._client_info, - ) + "get_topic" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_topic, + default_retry=self._method_configs["GetTopic"].retry, + default_timeout=self._method_configs["GetTopic"].timeout, + client_info=self._client_info, + ) - request = pubsub_pb2.GetTopicRequest(topic=topic, ) - return self._inner_api_calls['get_topic']( - request, retry=retry, timeout=timeout, metadata=metadata) + request = pubsub_pb2.GetTopicRequest(topic=topic) + return self._inner_api_calls["get_topic"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) - def list_topics(self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def list_topics( + self, + project, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists matching topics. @@ -523,40 +530,40 @@ def list_topics(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_topics' not in self._inner_api_calls: + if "list_topics" not in self._inner_api_calls: self._inner_api_calls[ - 'list_topics'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topics, - default_retry=self._method_configs['ListTopics'].retry, - default_timeout=self._method_configs['ListTopics'].timeout, - client_info=self._client_info, - ) + "list_topics" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_topics, + default_retry=self._method_configs["ListTopics"].retry, + default_timeout=self._method_configs["ListTopics"].timeout, + client_info=self._client_info, + ) - request = pubsub_pb2.ListTopicsRequest( - project=project, - page_size=page_size, - ) + request = pubsub_pb2.ListTopicsRequest(project=project, page_size=page_size) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_topics'], + self._inner_api_calls["list_topics"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='topics', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="topics", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator def list_topic_subscriptions( - self, - topic, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + self, + topic, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists the names of the subscriptions on this topic. @@ -612,40 +619,41 @@ def list_topic_subscriptions( ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_topic_subscriptions' not in self._inner_api_calls: + if "list_topic_subscriptions" not in self._inner_api_calls: self._inner_api_calls[ - 'list_topic_subscriptions'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topic_subscriptions, - default_retry=self. - _method_configs['ListTopicSubscriptions'].retry, - default_timeout=self. - _method_configs['ListTopicSubscriptions'].timeout, - client_info=self._client_info, - ) + "list_topic_subscriptions" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_topic_subscriptions, + default_retry=self._method_configs["ListTopicSubscriptions"].retry, + default_timeout=self._method_configs["ListTopicSubscriptions"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.ListTopicSubscriptionsRequest( - topic=topic, - page_size=page_size, + topic=topic, page_size=page_size ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_topic_subscriptions'], + self._inner_api_calls["list_topic_subscriptions"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='subscriptions', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="subscriptions", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator - def delete_topic(self, - topic, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def delete_topic( + self, + topic, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic does not exist. After a topic is deleted, a new topic may be @@ -683,26 +691,29 @@ def delete_topic(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'delete_topic' not in self._inner_api_calls: + if "delete_topic" not in self._inner_api_calls: self._inner_api_calls[ - 'delete_topic'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_topic, - default_retry=self._method_configs['DeleteTopic'].retry, - default_timeout=self._method_configs['DeleteTopic']. - timeout, - client_info=self._client_info, - ) + "delete_topic" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_topic, + default_retry=self._method_configs["DeleteTopic"].retry, + default_timeout=self._method_configs["DeleteTopic"].timeout, + client_info=self._client_info, + ) - request = pubsub_pb2.DeleteTopicRequest(topic=topic, ) - self._inner_api_calls['delete_topic']( - request, retry=retry, timeout=timeout, metadata=metadata) + request = pubsub_pb2.DeleteTopicRequest(topic=topic) + self._inner_api_calls["delete_topic"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) - def set_iam_policy(self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def set_iam_policy( + self, + resource, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Sets the access control policy on the specified resource. Replaces any existing policy. @@ -750,28 +761,28 @@ def set_iam_policy(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'set_iam_policy' not in self._inner_api_calls: + if "set_iam_policy" not in self._inner_api_calls: self._inner_api_calls[ - 'set_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs['SetIamPolicy'].retry, - default_timeout=self._method_configs['SetIamPolicy']. - timeout, - client_info=self._client_info, - ) + "set_iam_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.set_iam_policy, + default_retry=self._method_configs["SetIamPolicy"].retry, + default_timeout=self._method_configs["SetIamPolicy"].timeout, + client_info=self._client_info, + ) - request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, - policy=policy, + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + return self._inner_api_calls["set_iam_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['set_iam_policy']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def get_iam_policy(self, - resource, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def get_iam_policy( + self, + resource, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy @@ -810,26 +821,29 @@ def get_iam_policy(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'get_iam_policy' not in self._inner_api_calls: + if "get_iam_policy" not in self._inner_api_calls: self._inner_api_calls[ - 'get_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs['GetIamPolicy'].retry, - default_timeout=self._method_configs['GetIamPolicy']. - timeout, - client_info=self._client_info, - ) + "get_iam_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_iam_policy, + default_retry=self._method_configs["GetIamPolicy"].retry, + default_timeout=self._method_configs["GetIamPolicy"].timeout, + client_info=self._client_info, + ) - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) - return self._inner_api_calls['get_iam_policy']( - request, retry=retry, timeout=timeout, metadata=metadata) + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._inner_api_calls["get_iam_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) - def test_iam_permissions(self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def test_iam_permissions( + self, + resource, + permissions, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, @@ -875,20 +889,19 @@ def test_iam_permissions(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'test_iam_permissions' not in self._inner_api_calls: + if "test_iam_permissions" not in self._inner_api_calls: self._inner_api_calls[ - 'test_iam_permissions'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs['TestIamPermissions']. - retry, - default_timeout=self._method_configs['TestIamPermissions']. - timeout, - client_info=self._client_info, - ) + "test_iam_permissions" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.test_iam_permissions, + default_retry=self._method_configs["TestIamPermissions"].retry, + default_timeout=self._method_configs["TestIamPermissions"].timeout, + client_info=self._client_info, + ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, - permissions=permissions, + resource=resource, permissions=permissions + ) + return self._inner_api_calls["test_iam_permissions"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['test_iam_permissions']( - request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index e5db20ce1d9f..4c3519d7dac2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -5,9 +5,14 @@ "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "non_idempotent": [], "one_plus_delivery": [ - "ABORTED", "CANCELLED", "DEADLINE_EXCEEDED", "INTERNAL", - "RESOURCE_EXHAUSTED", "UNAVAILABLE", "UNKNOWN" - ] + "ABORTED", + "CANCELLED", + "DEADLINE_EXCEEDED", + "INTERNAL", + "RESOURCE_EXHAUSTED", + "UNAVAILABLE", + "UNKNOWN", + ], }, "retry_params": { "default": { @@ -17,7 +22,7 @@ "initial_rpc_timeout_millis": 60000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 + "total_timeout_millis": 600000, }, "messaging": { "initial_retry_delay_millis": 100, @@ -26,19 +31,19 @@ "initial_rpc_timeout_millis": 12000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 30000, - "total_timeout_millis": 600000 - } + "total_timeout_millis": 600000, + }, }, "methods": { "CreateTopic": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "UpdateTopic": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "Publish": { "timeout_millis": 60000, @@ -49,45 +54,45 @@ "element_count_limit": 1000, "request_byte_threshold": 1024, "request_byte_limit": 10485760, - "delay_threshold_millis": 10 - } + "delay_threshold_millis": 10, + }, }, "GetTopic": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ListTopics": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ListTopicSubscriptions": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "DeleteTopic": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "SetIamPolicy": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "GetIamPolicy": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "TestIamPermissions": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } + "retry_params_name": "default", + }, + }, } } } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 7f50e15e8a9c..67375a9387c9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -40,8 +40,7 @@ from google.protobuf import field_mask_pb2 from google.protobuf import timestamp_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-pubsub', ).version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub").version class SubscriberClient(object): @@ -51,18 +50,19 @@ class SubscriberClient(object): establishing a bi-directional stream using the ``StreamingPull`` method. """ - SERVICE_ADDRESS = 'pubsub.googleapis.com:443' + SERVICE_ADDRESS = "pubsub.googleapis.com:443" """The default address of the service.""" # The scopes needed to make gRPC calls to all of the methods defined in # this service _DEFAULT_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', ) + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) # The name of the interface for this client. This is the key used to # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = 'google.pubsub.v1.Subscriber' + _INTERFACE_NAME = "google.pubsub.v1.Subscriber" @classmethod def from_service_account_file(cls, filename, *args, **kwargs): @@ -78,9 +78,8 @@ def from_service_account_file(cls, filename, *args, **kwargs): Returns: SubscriberClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(*args, **kwargs) from_service_account_json = from_service_account_file @@ -89,7 +88,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def subscription_path(cls, project, subscription): """Return a fully-qualified subscription string.""" return google.api_core.path_template.expand( - 'projects/{project}/subscriptions/{subscription}', + "projects/{project}/subscriptions/{subscription}", project=project, subscription=subscription, ) @@ -98,34 +97,33 @@ def subscription_path(cls, project, subscription): def topic_path(cls, project, topic): """Return a fully-qualified topic string.""" return google.api_core.path_template.expand( - 'projects/{project}/topics/{topic}', - project=project, - topic=topic, + "projects/{project}/topics/{topic}", project=project, topic=topic ) @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - 'projects/{project}', - project=project, + "projects/{project}", project=project ) @classmethod def snapshot_path(cls, project, snapshot): """Return a fully-qualified snapshot string.""" return google.api_core.path_template.expand( - 'projects/{project}/snapshots/{snapshot}', + "projects/{project}/snapshots/{snapshot}", project=project, snapshot=snapshot, ) - def __init__(self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None): + def __init__( + self, + transport=None, + channel=None, + credentials=None, + client_config=None, + client_info=None, + ): """Constructor. Args: @@ -159,18 +157,19 @@ def __init__(self, # Raise deprecation warnings for things we want to go away. if client_config is not None: warnings.warn( - 'The `client_config` argument is deprecated.', + "The `client_config` argument is deprecated.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) else: client_config = subscriber_client_config.config if channel: warnings.warn( - 'The `channel` argument is deprecated; use ' - '`transport` instead.', + "The `channel` argument is deprecated; use " "`transport` instead.", PendingDeprecationWarning, - stacklevel=2) + stacklevel=2, + ) # Instantiate the transport. # The transport is responsible for handling serialization and @@ -179,25 +178,24 @@ def __init__(self, if callable(transport): self.transport = transport( credentials=credentials, - default_class=subscriber_grpc_transport. - SubscriberGrpcTransport, + default_class=subscriber_grpc_transport.SubscriberGrpcTransport, ) else: if credentials: raise ValueError( - 'Received both a transport instance and ' - 'credentials; these are mutually exclusive.') + "Received both a transport instance and " + "credentials; these are mutually exclusive." + ) self.transport = transport else: self.transport = subscriber_grpc_transport.SubscriberGrpcTransport( - address=self.SERVICE_ADDRESS, - channel=channel, - credentials=credentials, + address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, ) + gapic_version=_GAPIC_LIBRARY_VERSION + ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION self._client_info = client_info @@ -207,7 +205,8 @@ def __init__(self, # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config['interfaces'][self._INTERFACE_NAME], ) + client_config["interfaces"][self._INTERFACE_NAME] + ) # Save a dictionary of cached API call functions. # These are the actual callables which invoke the proper @@ -216,18 +215,20 @@ def __init__(self, self._inner_api_calls = {} # Service calls - def create_subscription(self, - name, - topic, - push_config=None, - ack_deadline_seconds=None, - retain_acked_messages=None, - message_retention_duration=None, - labels=None, - expiration_policy=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def create_subscription( + self, + name, + topic, + push_config=None, + ack_deadline_seconds=None, + retain_acked_messages=None, + message_retention_duration=None, + labels=None, + expiration_policy=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Creates a subscription to a given topic. See the resource name rules. If the subscription already exists, returns ``ALREADY_EXISTS``. If the @@ -338,16 +339,15 @@ def create_subscription(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'create_subscription' not in self._inner_api_calls: + if "create_subscription" not in self._inner_api_calls: self._inner_api_calls[ - 'create_subscription'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_subscription, - default_retry=self._method_configs['CreateSubscription']. - retry, - default_timeout=self._method_configs['CreateSubscription']. - timeout, - client_info=self._client_info, - ) + "create_subscription" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_subscription, + default_retry=self._method_configs["CreateSubscription"].retry, + default_timeout=self._method_configs["CreateSubscription"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.Subscription( name=name, @@ -359,14 +359,17 @@ def create_subscription(self, labels=labels, expiration_policy=expiration_policy, ) - return self._inner_api_calls['create_subscription']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def get_subscription(self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["create_subscription"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def get_subscription( + self, + subscription, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Gets the configuration details of a subscription. @@ -402,28 +405,29 @@ def get_subscription(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'get_subscription' not in self._inner_api_calls: + if "get_subscription" not in self._inner_api_calls: self._inner_api_calls[ - 'get_subscription'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_subscription, - default_retry=self._method_configs['GetSubscription']. - retry, - default_timeout=self._method_configs['GetSubscription']. - timeout, - client_info=self._client_info, - ) + "get_subscription" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_subscription, + default_retry=self._method_configs["GetSubscription"].retry, + default_timeout=self._method_configs["GetSubscription"].timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) + return self._inner_api_calls["get_subscription"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) - request = pubsub_pb2.GetSubscriptionRequest( - subscription=subscription, ) - return self._inner_api_calls['get_subscription']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def update_subscription(self, - subscription, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def update_subscription( + self, + subscription, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. @@ -471,30 +475,31 @@ def update_subscription(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'update_subscription' not in self._inner_api_calls: + if "update_subscription" not in self._inner_api_calls: self._inner_api_calls[ - 'update_subscription'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_subscription, - default_retry=self._method_configs['UpdateSubscription']. - retry, - default_timeout=self._method_configs['UpdateSubscription']. - timeout, - client_info=self._client_info, - ) + "update_subscription" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_subscription, + default_retry=self._method_configs["UpdateSubscription"].retry, + default_timeout=self._method_configs["UpdateSubscription"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, - update_mask=update_mask, + subscription=subscription, update_mask=update_mask ) - return self._inner_api_calls['update_subscription']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def list_subscriptions(self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["update_subscription"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def list_subscriptions( + self, + project, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists matching subscriptions. @@ -550,40 +555,41 @@ def list_subscriptions(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_subscriptions' not in self._inner_api_calls: + if "list_subscriptions" not in self._inner_api_calls: self._inner_api_calls[ - 'list_subscriptions'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_subscriptions, - default_retry=self._method_configs['ListSubscriptions']. - retry, - default_timeout=self._method_configs['ListSubscriptions']. - timeout, - client_info=self._client_info, - ) + "list_subscriptions" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_subscriptions, + default_retry=self._method_configs["ListSubscriptions"].retry, + default_timeout=self._method_configs["ListSubscriptions"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.ListSubscriptionsRequest( - project=project, - page_size=page_size, + project=project, page_size=page_size ) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_subscriptions'], + self._inner_api_calls["list_subscriptions"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='subscriptions', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="subscriptions", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator - def delete_subscription(self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def delete_subscription( + self, + subscription, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Deletes an existing subscription. All messages retained in the subscription are immediately dropped. Calls to ``Pull`` after deletion @@ -621,29 +627,30 @@ def delete_subscription(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'delete_subscription' not in self._inner_api_calls: + if "delete_subscription" not in self._inner_api_calls: self._inner_api_calls[ - 'delete_subscription'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_subscription, - default_retry=self._method_configs['DeleteSubscription']. - retry, - default_timeout=self._method_configs['DeleteSubscription']. - timeout, - client_info=self._client_info, - ) + "delete_subscription" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_subscription, + default_retry=self._method_configs["DeleteSubscription"].retry, + default_timeout=self._method_configs["DeleteSubscription"].timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.DeleteSubscriptionRequest(subscription=subscription) + self._inner_api_calls["delete_subscription"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) - request = pubsub_pb2.DeleteSubscriptionRequest( - subscription=subscription, ) - self._inner_api_calls['delete_subscription']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def modify_ack_deadline(self, - subscription, - ack_ids, - ack_deadline_seconds, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def modify_ack_deadline( + self, + subscription, + ack_ids, + ack_deadline_seconds, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Modifies the ack deadline for a specific message. This method is useful to indicate that more time is needed to process a message by the @@ -693,31 +700,33 @@ def modify_ack_deadline(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'modify_ack_deadline' not in self._inner_api_calls: + if "modify_ack_deadline" not in self._inner_api_calls: self._inner_api_calls[ - 'modify_ack_deadline'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.modify_ack_deadline, - default_retry=self._method_configs['ModifyAckDeadline']. - retry, - default_timeout=self._method_configs['ModifyAckDeadline']. - timeout, - client_info=self._client_info, - ) + "modify_ack_deadline" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.modify_ack_deadline, + default_retry=self._method_configs["ModifyAckDeadline"].retry, + default_timeout=self._method_configs["ModifyAckDeadline"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.ModifyAckDeadlineRequest( subscription=subscription, ack_ids=ack_ids, ack_deadline_seconds=ack_deadline_seconds, ) - self._inner_api_calls['modify_ack_deadline']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def acknowledge(self, - subscription, - ack_ids, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + self._inner_api_calls["modify_ack_deadline"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def acknowledge( + self, + subscription, + ack_ids, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Acknowledges the messages associated with the ``ack_ids`` in the ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant @@ -762,30 +771,32 @@ def acknowledge(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'acknowledge' not in self._inner_api_calls: + if "acknowledge" not in self._inner_api_calls: self._inner_api_calls[ - 'acknowledge'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.acknowledge, - default_retry=self._method_configs['Acknowledge'].retry, - default_timeout=self._method_configs['Acknowledge']. - timeout, - client_info=self._client_info, - ) + "acknowledge" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.acknowledge, + default_retry=self._method_configs["Acknowledge"].retry, + default_timeout=self._method_configs["Acknowledge"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, - ack_ids=ack_ids, + subscription=subscription, ack_ids=ack_ids + ) + self._inner_api_calls["acknowledge"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - self._inner_api_calls['acknowledge']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def pull(self, - subscription, - max_messages, - return_immediately=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def pull( + self, + subscription, + max_messages, + return_immediately=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Pulls messages from the server. The server may return ``UNAVAILABLE`` if there are too many concurrent pull requests pending for the given @@ -832,28 +843,30 @@ def pull(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'pull' not in self._inner_api_calls: - self._inner_api_calls[ - 'pull'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.pull, - default_retry=self._method_configs['Pull'].retry, - default_timeout=self._method_configs['Pull'].timeout, - client_info=self._client_info, - ) + if "pull" not in self._inner_api_calls: + self._inner_api_calls["pull"] = google.api_core.gapic_v1.method.wrap_method( + self.transport.pull, + default_retry=self._method_configs["Pull"].retry, + default_timeout=self._method_configs["Pull"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.PullRequest( subscription=subscription, max_messages=max_messages, return_immediately=return_immediately, ) - return self._inner_api_calls['pull']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def streaming_pull(self, - requests, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["pull"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def streaming_pull( + self, + requests, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Establishes a stream with the server, which sends messages down to the client. The client streams acknowledgements and ack deadline @@ -904,25 +917,28 @@ def streaming_pull(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'streaming_pull' not in self._inner_api_calls: + if "streaming_pull" not in self._inner_api_calls: self._inner_api_calls[ - 'streaming_pull'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.streaming_pull, - default_retry=self._method_configs['StreamingPull'].retry, - default_timeout=self._method_configs['StreamingPull']. - timeout, - client_info=self._client_info, - ) + "streaming_pull" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.streaming_pull, + default_retry=self._method_configs["StreamingPull"].retry, + default_timeout=self._method_configs["StreamingPull"].timeout, + client_info=self._client_info, + ) - return self._inner_api_calls['streaming_pull']( - requests, retry=retry, timeout=timeout, metadata=metadata) + return self._inner_api_calls["streaming_pull"]( + requests, retry=retry, timeout=timeout, metadata=metadata + ) - def modify_push_config(self, - subscription, - push_config, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def modify_push_config( + self, + subscription, + push_config, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Modifies the ``PushConfig`` for a specified subscription. @@ -973,30 +989,31 @@ def modify_push_config(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'modify_push_config' not in self._inner_api_calls: + if "modify_push_config" not in self._inner_api_calls: self._inner_api_calls[ - 'modify_push_config'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.modify_push_config, - default_retry=self._method_configs['ModifyPushConfig']. - retry, - default_timeout=self._method_configs['ModifyPushConfig']. - timeout, - client_info=self._client_info, - ) + "modify_push_config" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.modify_push_config, + default_retry=self._method_configs["ModifyPushConfig"].retry, + default_timeout=self._method_configs["ModifyPushConfig"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, - push_config=push_config, + subscription=subscription, push_config=push_config + ) + self._inner_api_calls["modify_push_config"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - self._inner_api_calls['modify_push_config']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def list_snapshots(self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def list_snapshots( + self, + project, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Lists the existing snapshots.

ALPHA: This feature is part of an alpha release. This API might be @@ -1055,41 +1072,41 @@ def list_snapshots(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'list_snapshots' not in self._inner_api_calls: + if "list_snapshots" not in self._inner_api_calls: self._inner_api_calls[ - 'list_snapshots'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_snapshots, - default_retry=self._method_configs['ListSnapshots'].retry, - default_timeout=self._method_configs['ListSnapshots']. - timeout, - client_info=self._client_info, - ) + "list_snapshots" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_snapshots, + default_retry=self._method_configs["ListSnapshots"].retry, + default_timeout=self._method_configs["ListSnapshots"].timeout, + client_info=self._client_info, + ) - request = pubsub_pb2.ListSnapshotsRequest( - project=project, - page_size=page_size, - ) + request = pubsub_pb2.ListSnapshotsRequest(project=project, page_size=page_size) iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( - self._inner_api_calls['list_snapshots'], + self._inner_api_calls["list_snapshots"], retry=retry, timeout=timeout, - metadata=metadata), + metadata=metadata, + ), request=request, - items_field='snapshots', - request_token_field='page_token', - response_token_field='next_page_token', + items_field="snapshots", + request_token_field="page_token", + response_token_field="next_page_token", ) return iterator - def create_snapshot(self, - name, - subscription, - labels=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def create_snapshot( + self, + name, + subscription, + labels=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Creates a snapshot from the requested subscription. ALPHA: This feature is part of an alpha release. This API might be changed in @@ -1151,30 +1168,31 @@ def create_snapshot(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'create_snapshot' not in self._inner_api_calls: + if "create_snapshot" not in self._inner_api_calls: self._inner_api_calls[ - 'create_snapshot'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_snapshot, - default_retry=self._method_configs['CreateSnapshot'].retry, - default_timeout=self._method_configs['CreateSnapshot']. - timeout, - client_info=self._client_info, - ) + "create_snapshot" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.create_snapshot, + default_retry=self._method_configs["CreateSnapshot"].retry, + default_timeout=self._method_configs["CreateSnapshot"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.CreateSnapshotRequest( - name=name, - subscription=subscription, - labels=labels, + name=name, subscription=subscription, labels=labels + ) + return self._inner_api_calls["create_snapshot"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['create_snapshot']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def update_snapshot(self, - snapshot, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def update_snapshot( + self, + snapshot, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Updates an existing snapshot.

ALPHA: This feature is part of an alpha release. This API might be @@ -1226,28 +1244,30 @@ def update_snapshot(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'update_snapshot' not in self._inner_api_calls: + if "update_snapshot" not in self._inner_api_calls: self._inner_api_calls[ - 'update_snapshot'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_snapshot, - default_retry=self._method_configs['UpdateSnapshot'].retry, - default_timeout=self._method_configs['UpdateSnapshot']. - timeout, - client_info=self._client_info, - ) + "update_snapshot" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.update_snapshot, + default_retry=self._method_configs["UpdateSnapshot"].retry, + default_timeout=self._method_configs["UpdateSnapshot"].timeout, + client_info=self._client_info, + ) request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, - update_mask=update_mask, + snapshot=snapshot, update_mask=update_mask ) - return self._inner_api_calls['update_snapshot']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def delete_snapshot(self, - snapshot, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["update_snapshot"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def delete_snapshot( + self, + snapshot, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Removes an existing snapshot.

ALPHA: This feature is part of an alpha release. This API might be @@ -1287,27 +1307,30 @@ def delete_snapshot(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'delete_snapshot' not in self._inner_api_calls: + if "delete_snapshot" not in self._inner_api_calls: self._inner_api_calls[ - 'delete_snapshot'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_snapshot, - default_retry=self._method_configs['DeleteSnapshot'].retry, - default_timeout=self._method_configs['DeleteSnapshot']. - timeout, - client_info=self._client_info, - ) + "delete_snapshot" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.delete_snapshot, + default_retry=self._method_configs["DeleteSnapshot"].retry, + default_timeout=self._method_configs["DeleteSnapshot"].timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) + self._inner_api_calls["delete_snapshot"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) - request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot, ) - self._inner_api_calls['delete_snapshot']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def seek(self, - subscription, - time=None, - snapshot=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def seek( + self, + subscription, + time=None, + snapshot=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Seeks an existing subscription to a point in time or to a given snapshot, whichever is provided in the request.

@@ -1362,36 +1385,33 @@ def seek(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'seek' not in self._inner_api_calls: - self._inner_api_calls[ - 'seek'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.seek, - default_retry=self._method_configs['Seek'].retry, - default_timeout=self._method_configs['Seek'].timeout, - client_info=self._client_info, - ) + if "seek" not in self._inner_api_calls: + self._inner_api_calls["seek"] = google.api_core.gapic_v1.method.wrap_method( + self.transport.seek, + default_retry=self._method_configs["Seek"].retry, + default_timeout=self._method_configs["Seek"].timeout, + client_info=self._client_info, + ) # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - time=time, - snapshot=snapshot, - ) + google.api_core.protobuf_helpers.check_oneof(time=time, snapshot=snapshot) request = pubsub_pb2.SeekRequest( - subscription=subscription, - time=time, - snapshot=snapshot, + subscription=subscription, time=time, snapshot=snapshot ) - return self._inner_api_calls['seek']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def set_iam_policy(self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + return self._inner_api_calls["seek"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + + def set_iam_policy( + self, + resource, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Sets the access control policy on the specified resource. Replaces any existing policy. @@ -1439,28 +1459,28 @@ def set_iam_policy(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'set_iam_policy' not in self._inner_api_calls: + if "set_iam_policy" not in self._inner_api_calls: self._inner_api_calls[ - 'set_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs['SetIamPolicy'].retry, - default_timeout=self._method_configs['SetIamPolicy']. - timeout, - client_info=self._client_info, - ) + "set_iam_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.set_iam_policy, + default_retry=self._method_configs["SetIamPolicy"].retry, + default_timeout=self._method_configs["SetIamPolicy"].timeout, + client_info=self._client_info, + ) - request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, - policy=policy, + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + return self._inner_api_calls["set_iam_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['set_iam_policy']( - request, retry=retry, timeout=timeout, metadata=metadata) - - def get_iam_policy(self, - resource, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + + def get_iam_policy( + self, + resource, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy @@ -1499,26 +1519,29 @@ def get_iam_policy(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'get_iam_policy' not in self._inner_api_calls: + if "get_iam_policy" not in self._inner_api_calls: self._inner_api_calls[ - 'get_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs['GetIamPolicy'].retry, - default_timeout=self._method_configs['GetIamPolicy']. - timeout, - client_info=self._client_info, - ) + "get_iam_policy" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_iam_policy, + default_retry=self._method_configs["GetIamPolicy"].retry, + default_timeout=self._method_configs["GetIamPolicy"].timeout, + client_info=self._client_info, + ) - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) - return self._inner_api_calls['get_iam_policy']( - request, retry=retry, timeout=timeout, metadata=metadata) + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + return self._inner_api_calls["get_iam_policy"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) - def test_iam_permissions(self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None): + def test_iam_permissions( + self, + resource, + permissions, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): """ Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, @@ -1564,20 +1587,19 @@ def test_iam_permissions(self, ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. - if 'test_iam_permissions' not in self._inner_api_calls: + if "test_iam_permissions" not in self._inner_api_calls: self._inner_api_calls[ - 'test_iam_permissions'] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs['TestIamPermissions']. - retry, - default_timeout=self._method_configs['TestIamPermissions']. - timeout, - client_info=self._client_info, - ) + "test_iam_permissions" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.test_iam_permissions, + default_retry=self._method_configs["TestIamPermissions"].retry, + default_timeout=self._method_configs["TestIamPermissions"].timeout, + client_info=self._client_info, + ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, - permissions=permissions, + resource=resource, permissions=permissions + ) + return self._inner_api_calls["test_iam_permissions"]( + request, retry=retry, timeout=timeout, metadata=metadata ) - return self._inner_api_calls['test_iam_permissions']( - request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 536d6d34b2b5..8d85d8f6094d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -4,10 +4,12 @@ "retry_codes": { "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "pull": [ - "DEADLINE_EXCEEDED", "INTERNAL", "RESOURCE_EXHAUSTED", - "UNAVAILABLE" + "DEADLINE_EXCEEDED", + "INTERNAL", + "RESOURCE_EXHAUSTED", + "UNAVAILABLE", ], - "non_idempotent": [] + "non_idempotent": [], }, "retry_params": { "default": { @@ -17,7 +19,7 @@ "initial_rpc_timeout_millis": 60000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 + "total_timeout_millis": 600000, }, "messaging": { "initial_retry_delay_millis": 100, @@ -26,7 +28,7 @@ "initial_rpc_timeout_millis": 12000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 12000, - "total_timeout_millis": 600000 + "total_timeout_millis": 600000, }, "streaming_messaging": { "initial_retry_delay_millis": 100, @@ -35,101 +37,101 @@ "initial_rpc_timeout_millis": 600000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 600000, - "total_timeout_millis": 600000 - } + "total_timeout_millis": 600000, + }, }, "methods": { "CreateSubscription": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "GetSubscription": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "UpdateSubscription": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ListSubscriptions": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "DeleteSubscription": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ModifyAckDeadline": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "Acknowledge": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "messaging" + "retry_params_name": "messaging", }, "Pull": { "timeout_millis": 60000, "retry_codes_name": "pull", - "retry_params_name": "messaging" + "retry_params_name": "messaging", }, "StreamingPull": { "timeout_millis": 900000, "retry_codes_name": "pull", - "retry_params_name": "streaming_messaging" + "retry_params_name": "streaming_messaging", }, "ModifyPushConfig": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "ListSnapshots": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "CreateSnapshot": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "UpdateSnapshot": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "DeleteSnapshot": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "Seek": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "SetIamPolicy": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "GetIamPolicy": { "timeout_millis": 60000, "retry_codes_name": "idempotent", - "retry_params_name": "default" + "retry_params_name": "default", }, "TestIamPermissions": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } + "retry_params_name": "default", + }, + }, } } } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 3353208636bb..77016f503d9e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -28,17 +28,17 @@ class PublisherGrpcTransport(object): which can be used to take advantage of advanced features of gRPC. """ + # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", ) - def __init__(self, - channel=None, - credentials=None, - address='pubsub.googleapis.com:443'): + def __init__( + self, channel=None, credentials=None, address="pubsub.googleapis.com:443" + ): """Instantiate the transport class. Args: @@ -56,29 +56,24 @@ def __init__(self, # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - 'The `channel` and `credentials` arguments are mutually ' - 'exclusive.', ) + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) # Create the channel. if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - ) + channel = self.create_channel(address=address, credentials=credentials) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - 'iam_policy_stub': iam_policy_pb2.IAMPolicyStub(channel), - 'publisher_stub': pubsub_pb2_grpc.PublisherStub(channel), + "iam_policy_stub": iam_policy_pb2.IAMPolicyStub(channel), + "publisher_stub": pubsub_pb2_grpc.PublisherStub(channel), } @classmethod - def create_channel(cls, - address='pubsub.googleapis.com:443', - credentials=None): + def create_channel(cls, address="pubsub.googleapis.com:443", credentials=None): """Create and return a gRPC channel object. Args: @@ -93,9 +88,7 @@ def create_channel(cls, grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=cls._OAUTH_SCOPES, + address, credentials=credentials, scopes=cls._OAUTH_SCOPES ) @property @@ -119,7 +112,7 @@ def create_topic(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['publisher_stub'].CreateTopic + return self._stubs["publisher_stub"].CreateTopic @property def update_topic(self): @@ -133,7 +126,7 @@ def update_topic(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['publisher_stub'].UpdateTopic + return self._stubs["publisher_stub"].UpdateTopic @property def publish(self): @@ -147,7 +140,7 @@ def publish(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['publisher_stub'].Publish + return self._stubs["publisher_stub"].Publish @property def get_topic(self): @@ -160,7 +153,7 @@ def get_topic(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['publisher_stub'].GetTopic + return self._stubs["publisher_stub"].GetTopic @property def list_topics(self): @@ -173,7 +166,7 @@ def list_topics(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['publisher_stub'].ListTopics + return self._stubs["publisher_stub"].ListTopics @property def list_topic_subscriptions(self): @@ -186,7 +179,7 @@ def list_topic_subscriptions(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['publisher_stub'].ListTopicSubscriptions + return self._stubs["publisher_stub"].ListTopicSubscriptions @property def delete_topic(self): @@ -204,7 +197,7 @@ def delete_topic(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['publisher_stub'].DeleteTopic + return self._stubs["publisher_stub"].DeleteTopic @property def set_iam_policy(self): @@ -218,7 +211,7 @@ def set_iam_policy(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['iam_policy_stub'].SetIamPolicy + return self._stubs["iam_policy_stub"].SetIamPolicy @property def get_iam_policy(self): @@ -233,7 +226,7 @@ def get_iam_policy(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['iam_policy_stub'].GetIamPolicy + return self._stubs["iam_policy_stub"].GetIamPolicy @property def test_iam_permissions(self): @@ -248,4 +241,4 @@ def test_iam_permissions(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['iam_policy_stub'].TestIamPermissions + return self._stubs["iam_policy_stub"].TestIamPermissions diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index c2682b5a8f63..ddcb075a9d58 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -28,17 +28,17 @@ class SubscriberGrpcTransport(object): which can be used to take advantage of advanced features of gRPC. """ + # The scopes needed to make gRPC calls to all of the methods defined # in this service. _OAUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", ) - def __init__(self, - channel=None, - credentials=None, - address='pubsub.googleapis.com:443'): + def __init__( + self, channel=None, credentials=None, address="pubsub.googleapis.com:443" + ): """Instantiate the transport class. Args: @@ -56,29 +56,24 @@ def __init__(self, # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - 'The `channel` and `credentials` arguments are mutually ' - 'exclusive.', ) + "The `channel` and `credentials` arguments are mutually " "exclusive." + ) # Create the channel. if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - ) + channel = self.create_channel(address=address, credentials=credentials) self._channel = channel # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - 'iam_policy_stub': iam_policy_pb2.IAMPolicyStub(channel), - 'subscriber_stub': pubsub_pb2_grpc.SubscriberStub(channel), + "iam_policy_stub": iam_policy_pb2.IAMPolicyStub(channel), + "subscriber_stub": pubsub_pb2_grpc.SubscriberStub(channel), } @classmethod - def create_channel(cls, - address='pubsub.googleapis.com:443', - credentials=None): + def create_channel(cls, address="pubsub.googleapis.com:443", credentials=None): """Create and return a gRPC channel object. Args: @@ -93,9 +88,7 @@ def create_channel(cls, grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, - credentials=credentials, - scopes=cls._OAUTH_SCOPES, + address, credentials=credentials, scopes=cls._OAUTH_SCOPES ) @property @@ -127,7 +120,7 @@ def create_subscription(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].CreateSubscription + return self._stubs["subscriber_stub"].CreateSubscription @property def get_subscription(self): @@ -140,7 +133,7 @@ def get_subscription(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].GetSubscription + return self._stubs["subscriber_stub"].GetSubscription @property def update_subscription(self): @@ -154,7 +147,7 @@ def update_subscription(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].UpdateSubscription + return self._stubs["subscriber_stub"].UpdateSubscription @property def list_subscriptions(self): @@ -167,7 +160,7 @@ def list_subscriptions(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].ListSubscriptions + return self._stubs["subscriber_stub"].ListSubscriptions @property def delete_subscription(self): @@ -185,7 +178,7 @@ def delete_subscription(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].DeleteSubscription + return self._stubs["subscriber_stub"].DeleteSubscription @property def modify_ack_deadline(self): @@ -202,7 +195,7 @@ def modify_ack_deadline(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].ModifyAckDeadline + return self._stubs["subscriber_stub"].ModifyAckDeadline @property def acknowledge(self): @@ -221,7 +214,7 @@ def acknowledge(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].Acknowledge + return self._stubs["subscriber_stub"].Acknowledge @property def pull(self): @@ -236,7 +229,7 @@ def pull(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].Pull + return self._stubs["subscriber_stub"].Pull @property def streaming_pull(self): @@ -255,7 +248,7 @@ def streaming_pull(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].StreamingPull + return self._stubs["subscriber_stub"].StreamingPull @property def modify_push_config(self): @@ -274,7 +267,7 @@ def modify_push_config(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].ModifyPushConfig + return self._stubs["subscriber_stub"].ModifyPushConfig @property def list_snapshots(self): @@ -290,7 +283,7 @@ def list_snapshots(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].ListSnapshots + return self._stubs["subscriber_stub"].ListSnapshots @property def create_snapshot(self): @@ -316,7 +309,7 @@ def create_snapshot(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].CreateSnapshot + return self._stubs["subscriber_stub"].CreateSnapshot @property def update_snapshot(self): @@ -333,7 +326,7 @@ def update_snapshot(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].UpdateSnapshot + return self._stubs["subscriber_stub"].UpdateSnapshot @property def delete_snapshot(self): @@ -353,7 +346,7 @@ def delete_snapshot(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].DeleteSnapshot + return self._stubs["subscriber_stub"].DeleteSnapshot @property def seek(self): @@ -370,7 +363,7 @@ def seek(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['subscriber_stub'].Seek + return self._stubs["subscriber_stub"].Seek @property def set_iam_policy(self): @@ -384,7 +377,7 @@ def set_iam_policy(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['iam_policy_stub'].SetIamPolicy + return self._stubs["iam_policy_stub"].SetIamPolicy @property def get_iam_policy(self): @@ -399,7 +392,7 @@ def get_iam_policy(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['iam_policy_stub'].GetIamPolicy + return self._stubs["iam_policy_stub"].GetIamPolicy @property def test_iam_permissions(self): @@ -414,4 +407,4 @@ def test_iam_permissions(self): deserialized request object and returns a deserialized response object. """ - return self._stubs['iam_policy_stub'].TestIamPermissions + return self._stubs["iam_policy_stub"].TestIamPermissions diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py index ca5f04d582c2..688ee744b7b4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/__init__.py @@ -17,6 +17,4 @@ from google.cloud.pubsub_v1.publisher.client import Client -__all__ = ( - 'Client', -) +__all__ = ("Client",) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py index ac1f7ef7fe0e..4dc6ceec6a80 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -45,6 +45,7 @@ class Batch(object): regardless of how much is in it. However, if either the message count or size thresholds are encountered first, then the batch will commit early. """ + def __len__(self): """Return the number of messages currently in the batch.""" return len(self.messages) @@ -157,8 +158,9 @@ class BatchStatus(object): class; this represents the list of statuses where the existing library hooks in functionality. """ - ACCEPTING_MESSAGES = 'accepting messages' - STARTING = 'starting' - IN_PROGRESS = 'in progress' - ERROR = 'error' - SUCCESS = 'success' + + ACCEPTING_MESSAGES = "accepting messages" + STARTING = "starting" + IN_PROGRESS = "in progress" + ERROR = "error" + SUCCESS = "success" diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index b59b8a3cafd8..f187024b7cf7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -28,10 +28,7 @@ _LOGGER = logging.getLogger(__name__) -_CAN_COMMIT = ( - base.BatchStatus.ACCEPTING_MESSAGES, - base.BatchStatus.STARTING, -) +_CAN_COMMIT = (base.BatchStatus.ACCEPTING_MESSAGES, base.BatchStatus.STARTING) class Batch(base.Batch): @@ -68,6 +65,7 @@ class Batch(base.Batch): has elapsed. Defaults to True unless ``settings.max_latency`` is inf. """ + def __init__(self, client, topic, settings, autocommit=True): self._client = client self._topic = topic @@ -84,10 +82,9 @@ def __init__(self, client, topic, settings, autocommit=True): # If max latency is specified, start a thread to monitor the batch and # commit when the max latency is reached. self._thread = None - if autocommit and self._settings.max_latency < float('inf'): + if autocommit and self._settings.max_latency < float("inf"): self._thread = threading.Thread( - name='Thread-MonitorBatchPublisher', - target=self.monitor, + name="Thread-MonitorBatchPublisher", target=self.monitor ) self._thread.start() @@ -164,8 +161,7 @@ def commit(self): # Start a new thread to actually handle the commit. commit_thread = threading.Thread( - name='Thread-CommitBatchPublisher', - target=self._commit, + name="Thread-CommitBatchPublisher", target=self._commit ) commit_thread.start() @@ -188,12 +184,12 @@ def _commit(self): # If, in the intervening period between when this method was # called and now, the batch started to be committed, or # completed a commit, then no-op at this point. - _LOGGER.debug('Batch is already in progress, exiting commit') + _LOGGER.debug("Batch is already in progress, exiting commit") return # Sanity check: If there are no messages, no-op. if not self._messages: - _LOGGER.debug('No messages to publish, exiting commit') + _LOGGER.debug("No messages to publish, exiting commit") self._status = base.BatchStatus.SUCCESS return @@ -202,10 +198,7 @@ def _commit(self): start = time.time() try: - response = self._client.api.publish( - self._topic, - self._messages, - ) + response = self._client.api.publish(self._topic, self._messages) except google.api_core.exceptions.GoogleAPICallError as exc: # We failed to publish, set the exception on all futures and # exit. @@ -214,12 +207,11 @@ def _commit(self): for future in self._futures: future.set_exception(exc) - _LOGGER.exception( - 'Failed to publish %s messages.', len(self._futures)) + _LOGGER.exception("Failed to publish %s messages.", len(self._futures)) return end = time.time() - _LOGGER.debug('gRPC Publish took %s seconds.', end - start) + _LOGGER.debug("gRPC Publish took %s seconds.", end - start) if len(response.message_ids) == len(self._futures): # Iterate over the futures on the queue and return the response @@ -234,14 +226,17 @@ def _commit(self): # the number of futures I have, then something went wrong. self._status = base.BatchStatus.ERROR exception = exceptions.PublishError( - 'Some messages were not successfully published.') + "Some messages were not successfully published." + ) for future in self._futures: future.set_exception(exception) _LOGGER.error( - 'Only %s of %s messages were published.', - len(response.message_ids), len(self._futures)) + "Only %s of %s messages were published.", + len(response.message_ids), + len(self._futures), + ) def monitor(self): """Commit this batch after sufficient time has elapsed. @@ -255,7 +250,7 @@ def monitor(self): # Sleep for however long we should be waiting. time.sleep(self._settings.max_latency) - _LOGGER.debug('Monitor is waking up') + _LOGGER.debug("Monitor is waking up") return self._commit() def publish(self, message): @@ -289,8 +284,8 @@ def publish(self, message): new_size = self._size + message.ByteSize() new_count = len(self._messages) + 1 overflow = ( - new_size > self.settings.max_bytes or - new_count >= self._settings.max_messages + new_size > self.settings.max_bytes + or new_count >= self._settings.max_messages ) if not self._messages or not overflow: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 670000f7fc69..45b230e34eab 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -30,17 +30,16 @@ from google.cloud.pubsub_v1.publisher._batch import thread -__version__ = pkg_resources.get_distribution('google-cloud-pubsub').version +__version__ = pkg_resources.get_distribution("google-cloud-pubsub").version _BLACKLISTED_METHODS = ( - 'publish', - 'from_service_account_file', - 'from_service_account_json', + "publish", + "from_service_account_file", + "from_service_account_json", ) -@_gapic.add_methods( - publisher_client.PublisherClient, blacklist=_BLACKLISTED_METHODS) +@_gapic.add_methods(publisher_client.PublisherClient, blacklist=_BLACKLISTED_METHODS) class Client(object): """A publisher client for Google Cloud Pub/Sub. @@ -59,28 +58,29 @@ class Client(object): be added if ``credentials`` are passed explicitly or if the Pub / Sub emulator is detected as running. """ + _batch_class = thread.Batch def __init__(self, batch_settings=(), **kwargs): # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. - if os.environ.get('PUBSUB_EMULATOR_HOST'): - kwargs['channel'] = grpc.insecure_channel( - target=os.environ.get('PUBSUB_EMULATOR_HOST'), + if os.environ.get("PUBSUB_EMULATOR_HOST"): + kwargs["channel"] = grpc.insecure_channel( + target=os.environ.get("PUBSUB_EMULATOR_HOST") ) # Use a custom channel. # We need this in order to set appropriate default message size and # keepalive options. - if 'channel' not in kwargs: - kwargs['channel'] = grpc_helpers.create_channel( - credentials=kwargs.pop('credentials', None), + if "channel" not in kwargs: + kwargs["channel"] = grpc_helpers.create_channel( + credentials=kwargs.pop("credentials", None), target=self.target, scopes=publisher_client.PublisherClient._DEFAULT_SCOPES, options={ - 'grpc.max_send_message_length': -1, - 'grpc.max_receive_message_length': -1, + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, }.items(), ) @@ -109,9 +109,8 @@ def from_service_account_file(cls, filename, batch_settings=(), **kwargs): Returns: PublisherClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(batch_settings, **kwargs) from_service_account_json = from_service_account_file @@ -204,8 +203,7 @@ def publish(self, topic, data, **attrs): # If it is literally anything else, complain loudly about it. if not isinstance(data, six.binary_type): raise TypeError( - 'Data being published to Pub/Sub must be sent ' - 'as a bytestring.' + "Data being published to Pub/Sub must be sent " "as a bytestring." ) # Coerce all attributes to text strings. @@ -213,11 +211,11 @@ def publish(self, topic, data, **attrs): if isinstance(v, six.text_type): continue if isinstance(v, six.binary_type): - attrs[k] = v.decode('utf-8') + attrs[k] = v.decode("utf-8") continue raise TypeError( - 'All attributes being published to Pub/Sub must ' - 'be sent as text strings.' + "All attributes being published to Pub/Sub must " + "be sent as text strings." ) # Create the Pub/Sub message object. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index b6bb0256966f..adbfaaaa1ee1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -22,7 +22,4 @@ class PublishError(GoogleAPICallError): pass -__all__ = ( - 'PublishError', - 'TimeoutError', -) +__all__ = ("PublishError", "TimeoutError") diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index 9c0e93120bc5..a47f50e00a0d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -35,6 +35,7 @@ class Future(futures.Future): used. If this argument is not provided, then a new :class:`threading.Event` will be created and used. """ + # The publishing-side subclass does not need any special behavior # at this time. # @@ -44,6 +45,4 @@ class Future(futures.Future): pass -__all__ = ( - 'Future', -) +__all__ = ("Future",) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py index d82e3da96286..a444c3ea7571 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/__init__.py @@ -17,6 +17,4 @@ from google.cloud.pubsub_v1.subscriber.client import Client -__all__ = ( - 'Client', -) +__all__ = ("Client",) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index c70f8531a817..e41341afab3d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -24,7 +24,7 @@ _LOGGER = logging.getLogger(__name__) -_CALLBACK_WORKER_NAME = 'Thread-CallbackRequestDispatcher' +_CALLBACK_WORKER_NAME = "Thread-CallbackRequestDispatcher" class Dispatcher(object): @@ -40,23 +40,20 @@ def start(self): """ with self._operational_lock: if self._thread is not None: - raise ValueError('Dispatcher is already running.') + raise ValueError("Dispatcher is already running.") flow_control = self._manager.flow_control worker = helper_threads.QueueCallbackWorker( self._queue, self.dispatch_callback, max_items=flow_control.max_request_batch_size, - max_latency=flow_control.max_request_batch_latency + max_latency=flow_control.max_request_batch_latency, ) # Create and start the helper thread. - thread = threading.Thread( - name=_CALLBACK_WORKER_NAME, - target=worker, - ) + thread = threading.Thread(name=_CALLBACK_WORKER_NAME, target=worker) thread.daemon = True thread.start() - _LOGGER.debug('Started helper thread %s', thread.name) + _LOGGER.debug("Started helper thread %s", thread.name) self._thread = thread def stop(self): @@ -88,13 +85,12 @@ def dispatch_callback(self, items): for item in items: batched_commands[item.__class__].append(item) - _LOGGER.debug('Handling %d batched requests', len(items)) + _LOGGER.debug("Handling %d batched requests", len(items)) if batched_commands[requests.LeaseRequest]: self.lease(batched_commands.pop(requests.LeaseRequest)) if batched_commands[requests.ModAckRequest]: - self.modify_ack_deadline( - batched_commands.pop(requests.ModAckRequest)) + self.modify_ack_deadline(batched_commands.pop(requests.ModAckRequest)) # Note: Drop and ack *must* be after lease. It's possible to get both # the lease the and ack/drop request in the same batch. if batched_commands[requests.AckRequest]: @@ -151,8 +147,7 @@ def modify_ack_deadline(self, items): seconds = [item.seconds for item in items] request = types.StreamingPullRequest( - modify_deadline_ack_ids=ack_ids, - modify_deadline_seconds=seconds, + modify_deadline_ack_ids=ack_ids, modify_deadline_seconds=seconds ) self._manager.send(request) @@ -162,8 +157,7 @@ def nack(self, items): Args: items(Sequence[NackRequest]): The items to deny. """ - self.modify_ack_deadline([ - requests.ModAckRequest(ack_id=item.ack_id, seconds=0) - for item in items]) - self.drop( - [requests.DropRequest(*item) for item in items]) + self.modify_ack_deadline( + [requests.ModAckRequest(ack_id=item.ack_id, seconds=0) for item in items] + ) + self.drop([requests.DropRequest(*item) for item in items]) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py index 38d2ae8dc505..9cd84a1e2397 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -19,7 +19,7 @@ _LOGGER = logging.getLogger(__name__) -_HEARTBEAT_WORKER_NAME = 'Thread-Heartbeater' +_HEARTBEAT_WORKER_NAME = "Thread-Heartbeater" # How often to send heartbeats in seconds. Determined as half the period of # time where the Pub/Sub server will close the stream as inactive, which is # 60 seconds. @@ -38,24 +38,24 @@ def heartbeat(self): """Periodically send heartbeats.""" while self._manager.is_active and not self._stop_event.is_set(): self._manager.heartbeat() - _LOGGER.debug('Sent heartbeat.') + _LOGGER.debug("Sent heartbeat.") self._stop_event.wait(timeout=self._period) - _LOGGER.info('%s exiting.', _HEARTBEAT_WORKER_NAME) + _LOGGER.info("%s exiting.", _HEARTBEAT_WORKER_NAME) def start(self): with self._operational_lock: if self._thread is not None: - raise ValueError('Heartbeater is already running.') + raise ValueError("Heartbeater is already running.") # Create and start the helper thread. self._stop_event.clear() thread = threading.Thread( - name=_HEARTBEAT_WORKER_NAME, - target=self.heartbeat) + name=_HEARTBEAT_WORKER_NAME, target=self.heartbeat + ) thread.daemon = True thread.start() - _LOGGER.debug('Started helper thread %s', thread.name) + _LOGGER.debug("Started helper thread %s", thread.name) self._thread = thread def stop(self): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py index edb22d14fea5..80ad58738cd1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py @@ -19,10 +19,7 @@ from six.moves import queue -__all__ = ( - 'QueueCallbackWorker', - 'STOP', -) +__all__ = ("QueueCallbackWorker", "STOP") _LOGGER = logging.getLogger(__name__) @@ -94,14 +91,13 @@ def __call__(self): continue_ = True while continue_: items = _get_many( - self.queue, - max_items=self.max_items, - max_latency=self.max_latency) + self.queue, max_items=self.max_items, max_latency=self.max_latency + ) # If stop is in the items, process all items up to STOP and then # exit. try: - items = items[:items.index(STOP)] + items = items[: items.index(STOP)] continue_ = False except ValueError: pass @@ -111,6 +107,6 @@ def __call__(self): try: self._callback(items) except Exception as exc: - _LOGGER.exception('Error in queue callback worker: %s', exc) + _LOGGER.exception("Error in queue callback worker: %s", exc) - _LOGGER.debug('Exiting the QueueCallbackWorker.') + _LOGGER.debug("Exiting the QueueCallbackWorker.") diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py index 46a8c3f1a2c0..29ee6fc61685 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py @@ -30,6 +30,7 @@ class Histogram(object): values outside the range of ``10 <= x <= 600`` are stored as ``10`` or ``600``, since these are the boundaries of leases in the actual API. """ + def __init__(self, data=None): """Instantiate the histogram. @@ -74,10 +75,8 @@ def __contains__(self, needle): return needle in self._data def __repr__(self): - return ''.format( - len=len(self), - max=self.max, - min=self.min, + return "".format( + len=len(self), max=self.max, min=self.min ) @property diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index c3ef6565587a..bcb73352b537 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -27,12 +27,10 @@ _LOGGER = logging.getLogger(__name__) -_LEASE_WORKER_NAME = 'Thread-LeaseMaintainer' +_LEASE_WORKER_NAME = "Thread-LeaseMaintainer" -_LeasedMessage = collections.namedtuple( - '_LeasedMessage', - ['added_time', 'size']) +_LeasedMessage = collections.namedtuple("_LeasedMessage", ["added_time", "size"]) class Leaser(object): @@ -71,12 +69,11 @@ def add(self, items): # the size counter. if item.ack_id not in self._leased_messages: self._leased_messages[item.ack_id] = _LeasedMessage( - added_time=time.time(), - size=item.byte_size) + added_time=time.time(), size=item.byte_size + ) self._bytes += item.byte_size else: - _LOGGER.debug( - 'Message %s is already lease managed', item.ack_id) + _LOGGER.debug("Message %s is already lease managed", item.ack_id) def remove(self, items): """Remove messages from lease management.""" @@ -86,11 +83,10 @@ def remove(self, items): if self._leased_messages.pop(item.ack_id, None) is not None: self._bytes -= item.byte_size else: - _LOGGER.debug('Item %s was not managed.', item.ack_id) + _LOGGER.debug("Item %s was not managed.", item.ack_id) if self._bytes < 0: - _LOGGER.debug( - 'Bytes was unexpectedly negative: %d', self._bytes) + _LOGGER.debug("Bytes was unexpectedly negative: %d", self._bytes) self._bytes = 0 def maintain_leases(self): @@ -105,7 +101,7 @@ def maintain_leases(self): # based off of how long previous messages have taken to ack, with # a sensible default and within the ranges allowed by Pub/Sub. p99 = self._manager.ack_histogram.percentile(99) - _LOGGER.debug('The current p99 value is %d seconds.', p99) + _LOGGER.debug("The current p99 value is %d seconds.", p99) # Make a copy of the leased messages. This is needed because it's # possible for another thread to modify the dictionary while @@ -115,19 +111,17 @@ def maintain_leases(self): # Drop any leases that are well beyond max lease time. This # ensures that in the event of a badly behaving actor, we can # drop messages and allow Pub/Sub to resend them. - cutoff = ( - time.time() - - self._manager.flow_control.max_lease_duration) + cutoff = time.time() - self._manager.flow_control.max_lease_duration to_drop = [ requests.DropRequest(ack_id, item.size) - for ack_id, item - in six.iteritems(leased_messages) - if item.added_time < cutoff] + for ack_id, item in six.iteritems(leased_messages) + if item.added_time < cutoff + ] if to_drop: _LOGGER.warning( - 'Dropping %s items because they were leased too long.', - len(to_drop)) + "Dropping %s items because they were leased too long.", len(to_drop) + ) self._manager.dispatcher.drop(to_drop) # Remove dropped items from our copy of the leased messages (they @@ -141,15 +135,16 @@ def maintain_leases(self): # because it is more efficient to make a single request. ack_ids = leased_messages.keys() if ack_ids: - _LOGGER.debug('Renewing lease for %d ack IDs.', len(ack_ids)) + _LOGGER.debug("Renewing lease for %d ack IDs.", len(ack_ids)) # NOTE: This may not work as expected if ``consumer.active`` # has changed since we checked it. An implementation # without any sort of race condition would require a # way for ``send_request`` to fail when the consumer # is inactive. - self._manager.dispatcher.modify_ack_deadline([ - requests.ModAckRequest(ack_id, p99) for ack_id in ack_ids]) + self._manager.dispatcher.modify_ack_deadline( + [requests.ModAckRequest(ack_id, p99) for ack_id in ack_ids] + ) # Now wait an appropriate period of time and do this again. # @@ -158,24 +153,24 @@ def maintain_leases(self): # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases # where there are many clients. snooze = random.uniform(0.0, p99 * 0.9) - _LOGGER.debug('Snoozing lease management for %f seconds.', snooze) + _LOGGER.debug("Snoozing lease management for %f seconds.", snooze) self._stop_event.wait(timeout=snooze) - _LOGGER.info('%s exiting.', _LEASE_WORKER_NAME) + _LOGGER.info("%s exiting.", _LEASE_WORKER_NAME) def start(self): with self._operational_lock: if self._thread is not None: - raise ValueError('Leaser is already running.') + raise ValueError("Leaser is already running.") # Create and start the helper thread. self._stop_event.clear() thread = threading.Thread( - name=_LEASE_WORKER_NAME, - target=self.maintain_leases) + name=_LEASE_WORKER_NAME, target=self.maintain_leases + ) thread.daemon = True thread.start() - _LOGGER.debug('Started helper thread %s', thread.name) + _LOGGER.debug("Started helper thread %s", thread.name) self._thread = thread def stop(self): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py index 6e042e080648..ac1df0af8eff 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py @@ -21,26 +21,13 @@ # Namedtuples for management requests. Used by the Message class to communicate # items of work back to the policy. AckRequest = collections.namedtuple( - 'AckRequest', - ['ack_id', 'byte_size', 'time_to_ack'], + "AckRequest", ["ack_id", "byte_size", "time_to_ack"] ) -DropRequest = collections.namedtuple( - 'DropRequest', - ['ack_id', 'byte_size'], -) +DropRequest = collections.namedtuple("DropRequest", ["ack_id", "byte_size"]) -LeaseRequest = collections.namedtuple( - 'LeaseRequest', - ['ack_id', 'byte_size'], -) +LeaseRequest = collections.namedtuple("LeaseRequest", ["ack_id", "byte_size"]) -ModAckRequest = collections.namedtuple( - 'ModAckRequest', - ['ack_id', 'seconds'], -) +ModAckRequest = collections.namedtuple("ModAckRequest", ["ack_id", "seconds"]) -NackRequest = collections.namedtuple( - 'NackRequest', - ['ack_id', 'byte_size'], -) +NackRequest = collections.namedtuple("NackRequest", ["ack_id", "byte_size"]) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 5a7c7d754262..d8d94384c67e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -34,13 +34,13 @@ import google.cloud.pubsub_v1.subscriber.scheduler _LOGGER = logging.getLogger(__name__) -_RPC_ERROR_THREAD_NAME = 'Thread-OnRpcTerminated' +_RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" _RETRYABLE_STREAM_ERRORS = ( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, exceptions.InternalServerError, exceptions.Unknown, - exceptions.GatewayTimeout + exceptions.GatewayTimeout, ) @@ -66,8 +66,8 @@ def _wrap_callback_errors(callback, message): # a message to a queue, so if this doesn't work the world is in an # unrecoverable state and this thread should just bail. _LOGGER.exception( - 'Top-level exception occurred in callback while processing a ' - 'message') + "Top-level exception occurred in callback while processing a " "message" + ) message.nack() @@ -92,8 +92,9 @@ class StreamingPullManager(object): """If set to True, this class will make requests over a separate unary RPC instead of over the streaming RPC.""" - def __init__(self, client, subscription, flow_control=types.FlowControl(), - scheduler=None): + def __init__( + self, client, subscription, flow_control=types.FlowControl(), scheduler=None + ): self._client = client self._subscription = subscription self._flow_control = flow_control @@ -108,7 +109,8 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), if scheduler is None: self._scheduler = ( - google.cloud.pubsub_v1.subscriber.scheduler.ThreadScheduler()) + google.cloud.pubsub_v1.subscriber.scheduler.ThreadScheduler() + ) else: self._scheduler = scheduler @@ -165,10 +167,7 @@ def ack_deadline(self): Returns: int: The ack deadline. """ - target = min([ - self._last_histogram_size * 2, - self._last_histogram_size + 100, - ]) + target = min([self._last_histogram_size * 2, self._last_histogram_size + 100]) if len(self.ack_histogram) > target: self._ack_deadline = self.ack_histogram.percentile(percent=99) return self._ack_deadline @@ -193,10 +192,12 @@ def load(self): if self._leaser is None: return 0 - return max([ - self._leaser.message_count / self._flow_control.max_messages, - self._leaser.bytes / self._flow_control.max_bytes, - ]) + return max( + [ + self._leaser.message_count / self._flow_control.max_messages, + self._leaser.bytes / self._flow_control.max_bytes, + ] + ) def add_close_callback(self, callback): """Schedules a callable when the manager closes. @@ -210,8 +211,7 @@ def maybe_pause_consumer(self): """Check the current load and pause the consumer if needed.""" if self.load >= 1.0: if self._consumer is not None and not self._consumer.is_paused: - _LOGGER.debug( - 'Message backlog over load at %.2f, pausing.', self.load) + _LOGGER.debug("Message backlog over load at %.2f, pausing.", self.load) self._consumer.pause() def maybe_resume_consumer(self): @@ -228,7 +228,7 @@ def maybe_resume_consumer(self): if self.load < self.flow_control.resume_threshold: self._consumer.resume() else: - _LOGGER.debug('Did not resume, current load is %s', self.load) + _LOGGER.debug("Did not resume, current load is %s", self.load) def _send_unary_request(self, request): """Send a request using a separate unary request instead of over the @@ -240,8 +240,8 @@ def _send_unary_request(self, request): """ if request.ack_ids: self._client.acknowledge( - subscription=self._subscription, - ack_ids=list(request.ack_ids)) + subscription=self._subscription, ack_ids=list(request.ack_ids) + ) if request.modify_deadline_ack_ids: # Send ack_ids with the same deadline seconds together. @@ -255,9 +255,10 @@ def _send_unary_request(self, request): self._client.modify_ack_deadline( subscription=self._subscription, ack_ids=ack_ids, - ack_deadline_seconds=deadline) + ack_deadline_seconds=deadline, + ) - _LOGGER.debug('Sent request(s) over unary RPC.') + _LOGGER.debug("Sent request(s) over unary RPC.") def send(self, request): """Queue a request to be sent to the RPC.""" @@ -266,9 +267,10 @@ def send(self, request): self._send_unary_request(request) except exceptions.GoogleAPICallError: _LOGGER.debug( - 'Exception while sending unary RPC. This is typically ' - 'non-fatal as stream requests are best-effort.', - exc_info=True) + "Exception while sending unary RPC. This is typically " + "non-fatal as stream requests are best-effort.", + exc_info=True, + ) else: self._rpc.send(request) @@ -290,11 +292,10 @@ def open(self, callback): stream. """ if self.is_active: - raise ValueError('This manager is already open.') + raise ValueError("This manager is already open.") if self._closed: - raise ValueError( - 'This manager has been closed and can not be re-used.') + raise ValueError("This manager has been closed and can not be re-used.") self._callback = functools.partial(_wrap_callback_errors, callback) @@ -302,13 +303,13 @@ def open(self, callback): self._rpc = bidi.ResumableBidiRpc( start_rpc=self._client.api.streaming_pull, initial_request=self._get_initial_request, - should_recover=self._should_recover) + should_recover=self._should_recover, + ) self._rpc.add_done_callback(self._on_rpc_done) # Create references to threads self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) - self._consumer = bidi.BackgroundConsumer( - self._rpc, self._on_response) + self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) self._leaser = leaser.Leaser(self) self._heartbeater = heartbeater.Heartbeater(self) @@ -340,27 +341,27 @@ def close(self, reason=None): # Stop consuming messages. if self.is_active: - _LOGGER.debug('Stopping consumer.') + _LOGGER.debug("Stopping consumer.") self._consumer.stop() self._consumer = None # Shutdown all helper threads - _LOGGER.debug('Stopping scheduler.') + _LOGGER.debug("Stopping scheduler.") self._scheduler.shutdown() self._scheduler = None - _LOGGER.debug('Stopping leaser.') + _LOGGER.debug("Stopping leaser.") self._leaser.stop() self._leaser = None - _LOGGER.debug('Stopping dispatcher.') + _LOGGER.debug("Stopping dispatcher.") self._dispatcher.stop() self._dispatcher = None - _LOGGER.debug('Stopping heartbeater.') + _LOGGER.debug("Stopping heartbeater.") self._heartbeater.stop() self._heartbeater = None self._rpc = None self._closed = True - _LOGGER.debug('Finished stopping manager.') + _LOGGER.debug("Finished stopping manager.") for callback in self._close_callbacks: callback(self, reason) @@ -410,22 +411,20 @@ def _on_response(self, response): """ _LOGGER.debug( - 'Scheduling callbacks for %s messages.', - len(response.received_messages)) + "Scheduling callbacks for %s messages.", len(response.received_messages) + ) # Immediately modack the messages we received, as this tells the server # that we've received them. items = [ - requests.ModAckRequest( - message.ack_id, self._ack_histogram.percentile(99)) + requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) for message in response.received_messages ] self._dispatcher.modify_ack_deadline(items) for received_message in response.received_messages: message = google.cloud.pubsub_v1.subscriber.message.Message( - received_message.message, - received_message.ack_id, - self._scheduler.queue) + received_message.message, received_message.ack_id, self._scheduler.queue + ) # TODO: Immediately lease instead of using the callback queue. self._scheduler.schedule(self._callback, message) @@ -446,9 +445,9 @@ def _should_recover(self, exception): # If this is in the list of idempotent exceptions, then we want to # recover. if isinstance(exception, _RETRYABLE_STREAM_ERRORS): - _LOGGER.info('Observed recoverable stream error %s', exception) + _LOGGER.info("Observed recoverable stream error %s", exception) return True - _LOGGER.info('Observed non-recoverable stream error %s', exception) + _LOGGER.info("Observed non-recoverable stream error %s", exception) return False def _on_rpc_done(self, future): @@ -462,12 +461,10 @@ def _on_rpc_done(self, future): with shutting everything down. This is to prevent blocking in the background consumer and preventing it from being ``joined()``. """ - _LOGGER.info( - 'RPC termination has signaled streaming pull manager shutdown.') + _LOGGER.info("RPC termination has signaled streaming pull manager shutdown.") future = _maybe_wrap_exception(future) thread = threading.Thread( - name=_RPC_ERROR_THREAD_NAME, - target=self.close, - kwargs={'reason': future}) + name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future} + ) thread.daemon = True thread.start() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 226b7bf344f8..b50a269e99f0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -29,17 +29,16 @@ from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager -__version__ = pkg_resources.get_distribution('google-cloud-pubsub').version +__version__ = pkg_resources.get_distribution("google-cloud-pubsub").version _BLACKLISTED_METHODS = ( - 'publish', - 'from_service_account_file', - 'from_service_account_json', + "publish", + "from_service_account_file", + "from_service_account_json", ) -@_gapic.add_methods( - subscriber_client.SubscriberClient, blacklist=_BLACKLISTED_METHODS) +@_gapic.add_methods(subscriber_client.SubscriberClient, blacklist=_BLACKLISTED_METHODS) class Client(object): """A subscriber client for Google Cloud Pub/Sub. @@ -54,27 +53,28 @@ class Client(object): Generally, you should not need to set additional keyword arguments. """ + def __init__(self, **kwargs): # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. - if os.environ.get('PUBSUB_EMULATOR_HOST'): - kwargs['channel'] = grpc.insecure_channel( - target=os.environ.get('PUBSUB_EMULATOR_HOST'), + if os.environ.get("PUBSUB_EMULATOR_HOST"): + kwargs["channel"] = grpc.insecure_channel( + target=os.environ.get("PUBSUB_EMULATOR_HOST") ) # Use a custom channel. # We need this in order to set appropriate default message size and # keepalive options. - if 'channel' not in kwargs: - kwargs['channel'] = grpc_helpers.create_channel( - credentials=kwargs.pop('credentials', None), + if "channel" not in kwargs: + kwargs["channel"] = grpc_helpers.create_channel( + credentials=kwargs.pop("credentials", None), target=self.target, scopes=subscriber_client.SubscriberClient._DEFAULT_SCOPES, options={ - 'grpc.max_send_message_length': -1, - 'grpc.max_receive_message_length': -1, - 'grpc.keepalive_time_ms': 30000, + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + "grpc.keepalive_time_ms": 30000, }.items(), ) @@ -95,9 +95,8 @@ def from_service_account_file(cls, filename, **kwargs): Returns: PublisherClient: The constructed client. """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs['credentials'] = credentials + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials return cls(**kwargs) from_service_account_json = from_service_account_file @@ -116,9 +115,7 @@ def api(self): """The underlying gapic API client.""" return self._api - def subscribe( - self, subscription, callback, flow_control=(), - scheduler=None): + def subscribe(self, subscription, callback, flow_control=(), scheduler=None): """Asynchronously start receiving messages on a given subscription. This method starts a background thread to begin pulling messages from @@ -198,7 +195,8 @@ def callback(message): flow_control = types.FlowControl(*flow_control) manager = streaming_pull_manager.StreamingPullManager( - self, subscription, flow_control=flow_control, scheduler=scheduler) + self, subscription, flow_control=flow_control, scheduler=scheduler + ) future = futures.StreamingPullFuture(manager) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 6772f196bf60..56dde9a7f6b8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -30,7 +30,7 @@ }}""" -def _indent(lines, prefix=' '): +def _indent(lines, prefix=" "): """Indent some text. Note that this is present as ``textwrap.indent``, but not in Python 2. @@ -44,9 +44,9 @@ def _indent(lines, prefix=' '): str: The newly indented content. """ indented = [] - for line in lines.split('\n'): + for line in lines.split("\n"): indented.append(prefix + line) - return '\n'.join(indented) + return "\n".join(indented) class Message(object): @@ -104,13 +104,10 @@ def __repr__(self): # Get an abbreviated version of the data. abbv_data = self._message.data if len(abbv_data) > 50: - abbv_data = abbv_data[:50] + b'...' + abbv_data = abbv_data[:50] + b"..." pretty_attrs = json.dumps( - dict(self.attributes), - indent=2, - separators=(',', ': '), - sort_keys=True, + dict(self.attributes), indent=2, separators=(",", ": "), sort_keys=True ) pretty_attrs = _indent(pretty_attrs) # We don't actually want the first line indented. @@ -155,8 +152,8 @@ def publish_time(self): """ timestamp = self._message.publish_time delta = datetime.timedelta( - seconds=timestamp.seconds, - microseconds=timestamp.nanos // 1000) + seconds=timestamp.seconds, microseconds=timestamp.nanos // 1000 + ) return datetime_helpers._UTC_EPOCH + delta @property @@ -186,9 +183,7 @@ def ack(self): time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( requests.AckRequest( - ack_id=self._ack_id, - byte_size=self.size, - time_to_ack=time_to_ack + ack_id=self._ack_id, byte_size=self.size, time_to_ack=time_to_ack ) ) @@ -206,10 +201,7 @@ def drop(self): directly. """ self._request_queue.put( - requests.DropRequest( - ack_id=self._ack_id, - byte_size=self.size - ) + requests.DropRequest(ack_id=self._ack_id, byte_size=self.size) ) def lease(self): @@ -220,10 +212,7 @@ def lease(self): need to call it manually. """ self._request_queue.put( - requests.LeaseRequest( - ack_id=self._ack_id, - byte_size=self.size - ) + requests.LeaseRequest(ack_id=self._ack_id, byte_size=self.size) ) def modify_ack_deadline(self, seconds): @@ -242,10 +231,7 @@ def modify_ack_deadline(self, seconds): values below 10 are advised against. """ self._request_queue.put( - requests.ModAckRequest( - ack_id=self._ack_id, - seconds=seconds - ) + requests.ModAckRequest(ack_id=self._ack_id, seconds=seconds) ) def nack(self): @@ -254,8 +240,5 @@ def nack(self): This will cause the message to be re-delivered to the subscription. """ self._request_queue.put( - requests.NackRequest( - ack_id=self._ack_id, - byte_size=self.size - ) + requests.NackRequest(ack_id=self._ack_id, byte_size=self.size) ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index 70c3721249f9..94502d48e754 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -69,12 +69,8 @@ def _make_default_thread_pool_executor(): # for debugging. executor_kwargs = {} if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6): - executor_kwargs['thread_name_prefix'] = ( - 'ThreadPoolExecutor-ThreadScheduler') - return concurrent.futures.ThreadPoolExecutor( - max_workers=10, - **executor_kwargs - ) + executor_kwargs["thread_name_prefix"] = "ThreadPoolExecutor-ThreadScheduler" + return concurrent.futures.ThreadPoolExecutor(max_workers=10, **executor_kwargs) class ThreadScheduler(object): @@ -86,6 +82,7 @@ class ThreadScheduler(object): executor(concurrent.futures.ThreadPoolExecutor): An optional executor to use. If not specified, a default one will be created. """ + def __init__(self, executor=None): self._queue = queue.Queue() if executor is None: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 1f5ca92174e2..c2662cf83631 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -36,13 +36,12 @@ # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. BatchSettings = collections.namedtuple( - 'BatchSettings', - ['max_bytes', 'max_latency', 'max_messages'], + "BatchSettings", ["max_bytes", "max_latency", "max_messages"] ) BatchSettings.__new__.__defaults__ = ( 1000 * 1000 * 10, # max_bytes: documented "10 MB", enforced 10000000 - 0.05, # max_latency: 0.05 seconds - 1000, # max_messages: 1,000 + 0.05, # max_latency: 0.05 seconds + 1000, # max_messages: 1,000 ) # Define the type class and default values for flow control settings. @@ -51,19 +50,25 @@ # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. FlowControl = collections.namedtuple( - 'FlowControl', - ['max_bytes', 'max_messages', 'resume_threshold', 'max_requests', - 'max_request_batch_size', 'max_request_batch_latency', - 'max_lease_duration'], + "FlowControl", + [ + "max_bytes", + "max_messages", + "resume_threshold", + "max_requests", + "max_request_batch_size", + "max_request_batch_latency", + "max_lease_duration", + ], ) FlowControl.__new__.__defaults__ = ( - 100 * 1024 * 1024, # max_bytes: 100mb - 100, # max_messages: 100 - 0.8, # resume_threshold: 80% - 100, # max_requests: 100 - 100, # max_request_batch_size: 100 - 0.01, # max_request_batch_latency: 0.01s - 2 * 60 * 60, # max_lease_duration: 2 hours. + 100 * 1024 * 1024, # max_bytes: 100mb + 100, # max_messages: 100 + 0.8, # resume_threshold: 80% + 100, # max_requests: 100 + 100, # max_request_batch_size: 100 + 0.01, # max_request_batch_latency: 0.01s + 2 * 60 * 60, # max_lease_duration: 2 hours. ) @@ -79,12 +84,10 @@ timestamp_pb2, ] -_local_modules = [ - pubsub_pb2, -] +_local_modules = [pubsub_pb2] -names = ['BatchSettings', 'FlowControl'] +names = ["BatchSettings", "FlowControl"] for module in _shared_modules: @@ -94,7 +97,7 @@ for module in _local_modules: for name, message in get_messages(module).items(): - message.__module__ = 'google.cloud.pubsub_v1.types' + message.__module__ = "google.cloud.pubsub_v1.types" setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 3c77f0629aef..e6001f8e7801 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -28,31 +28,31 @@ from test_utils.system import unique_resource_id -@pytest.fixture(scope=u'module') +@pytest.fixture(scope=u"module") def project(): _, default_project = google.auth.default() yield default_project -@pytest.fixture(scope=u'module') +@pytest.fixture(scope=u"module") def publisher(): yield pubsub_v1.PublisherClient() -@pytest.fixture(scope=u'module') +@pytest.fixture(scope=u"module") def subscriber(): yield pubsub_v1.SubscriberClient() @pytest.fixture def topic_path(project, publisher): - topic_name = 't' + unique_resource_id('-') + topic_name = "t" + unique_resource_id("-") yield publisher.topic_path(project, topic_name) @pytest.fixture def subscription_path(project, subscriber): - sub_name = 's' + unique_resource_id('-') + sub_name = "s" + unique_resource_id("-") yield subscriber.subscription_path(project, sub_name) @@ -76,9 +76,9 @@ def test_publish_messages(publisher, topic_path, cleanup): futures.append( publisher.publish( topic_path, - b'The hail in Wales falls mainly on the snails.', + b"The hail in Wales falls mainly on the snails.", num=str(index), - ), + ) ) for future in futures: result = future.result() @@ -86,7 +86,8 @@ def test_publish_messages(publisher, topic_path, cleanup): def test_subscribe_to_messages( - publisher, topic_path, subscriber, subscription_path, cleanup): + publisher, topic_path, subscriber, subscription_path, cleanup +): # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, topic_path)) cleanup.append((subscriber.delete_subscription, subscription_path)) @@ -100,11 +101,7 @@ def test_subscribe_to_messages( # Publish some messages. futures = [ - publisher.publish( - topic_path, - b'Wooooo! The claaaaaw!', - num=str(index), - ) + publisher.publish(topic_path, b"Wooooo! The claaaaaw!", num=str(index)) for index in six.moves.range(50) ] @@ -132,7 +129,8 @@ def test_subscribe_to_messages( def test_subscribe_to_messages_async_callbacks( - publisher, topic_path, subscriber, subscription_path, cleanup): + publisher, topic_path, subscriber, subscription_path, cleanup +): # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, topic_path)) cleanup.append((subscriber.delete_subscription, subscription_path)) @@ -146,11 +144,7 @@ def test_subscribe_to_messages_async_callbacks( # Publish some messages. futures = [ - publisher.publish( - topic_path, - b'Wooooo! The claaaaaw!', - num=str(index), - ) + publisher.publish(topic_path, b"Wooooo! The claaaaaw!", num=str(index)) for index in six.moves.range(2) ] @@ -185,7 +179,6 @@ def test_subscribe_to_messages_async_callbacks( class AckCallback(object): - def __init__(self): self.calls = 0 self.lock = threading.Lock() @@ -198,7 +191,6 @@ def __call__(self, message): class TimesCallback(object): - def __init__(self, sleep_time): self.sleep_time = sleep_time self.calls = 0 diff --git a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py index 0d1dfa290e9a..7723419da509 100644 --- a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py +++ b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py @@ -23,7 +23,7 @@ class TestSystemPublisher(object): def test_list_topics(self): - project_id = os.environ['PROJECT_ID'] + project_id = os.environ["PROJECT_ID"] client = pubsub_v1.PublisherClient() project = client.project_path(project_id) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index feff5f1df72f..5d6d3029e5d3 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -54,10 +54,7 @@ def __init__(self, responses=[]): self.responses = responses self.requests = [] - def unary_unary(self, - method, - request_serializer=None, - response_deserializer=None): + def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) @@ -68,19 +65,19 @@ class CustomException(Exception): class TestPublisherClient(object): def test_create_topic(self): # Setup Expected Response - name_2 = 'name2-1052831874' - expected_response = {'name': name_2} + name_2 = "name2-1052831874" + expected_response = {"name": name_2} expected_response = pubsub_pb2.Topic(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup Request - name = client.topic_path('[PROJECT]', '[TOPIC]') + name = client.topic_path("[PROJECT]", "[TOPIC]") response = client.create_topic(name) assert expected_response == response @@ -93,26 +90,26 @@ def test_create_topic(self): def test_create_topic_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup request - name = client.topic_path('[PROJECT]', '[TOPIC]') + name = client.topic_path("[PROJECT]", "[TOPIC]") with pytest.raises(CustomException): client.create_topic(name) def test_update_topic(self): # Setup Expected Response - name = 'name3373707' - expected_response = {'name': name} + name = "name3373707" + expected_response = {"name": name} expected_response = pubsub_pb2.Topic(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() @@ -126,14 +123,15 @@ def test_update_topic(self): assert len(channel.requests) == 1 expected_request = pubsub_pb2.UpdateTopicRequest( - topic=topic, update_mask=update_mask) + topic=topic, update_mask=update_mask + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_update_topic_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() @@ -147,45 +145,44 @@ def test_update_topic_exception(self): def test_publish(self): # Setup Expected Response - message_ids_element = 'messageIdsElement-744837059' + message_ids_element = "messageIdsElement-744837059" message_ids = [message_ids_element] - expected_response = {'message_ids': message_ids} + expected_response = {"message_ids": message_ids} expected_response = pubsub_pb2.PublishResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup Request - topic = client.topic_path('[PROJECT]', '[TOPIC]') - data = b'-86' - messages_element = {'data': data} + topic = client.topic_path("[PROJECT]", "[TOPIC]") + data = b"-86" + messages_element = {"data": data} messages = [messages_element] response = client.publish(topic, messages) assert expected_response == response assert len(channel.requests) == 1 - expected_request = pubsub_pb2.PublishRequest( - topic=topic, messages=messages) + expected_request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_publish_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup request - topic = client.topic_path('[PROJECT]', '[TOPIC]') - data = b'-86' - messages_element = {'data': data} + topic = client.topic_path("[PROJECT]", "[TOPIC]") + data = b"-86" + messages_element = {"data": data} messages = [messages_element] with pytest.raises(CustomException): @@ -193,19 +190,19 @@ def test_publish_exception(self): def test_get_topic(self): # Setup Expected Response - name = 'name3373707' - expected_response = {'name': name} + name = "name3373707" + expected_response = {"name": name} expected_response = pubsub_pb2.Topic(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup Request - topic = client.topic_path('[PROJECT]', '[TOPIC]') + topic = client.topic_path("[PROJECT]", "[TOPIC]") response = client.get_topic(topic) assert expected_response == response @@ -218,37 +215,34 @@ def test_get_topic(self): def test_get_topic_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup request - topic = client.topic_path('[PROJECT]', '[TOPIC]') + topic = client.topic_path("[PROJECT]", "[TOPIC]") with pytest.raises(CustomException): client.get_topic(topic) def test_list_topics(self): # Setup Expected Response - next_page_token = '' + next_page_token = "" topics_element = {} topics = [topics_element] - expected_response = { - 'next_page_token': next_page_token, - 'topics': topics - } + expected_response = {"next_page_token": next_page_token, "topics": topics} expected_response = pubsub_pb2.ListTopicsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup Request - project = client.project_path('[PROJECT]') + project = client.project_path("[PROJECT]") paged_list_response = client.list_topics(project) resources = list(paged_list_response) @@ -263,13 +257,13 @@ def test_list_topics(self): def test_list_topics_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup request - project = client.project_path('[PROJECT]') + project = client.project_path("[PROJECT]") paged_list_response = client.list_topics(project) with pytest.raises(CustomException): @@ -277,25 +271,26 @@ def test_list_topics_exception(self): def test_list_topic_subscriptions(self): # Setup Expected Response - next_page_token = '' - subscriptions_element = 'subscriptionsElement1698708147' + next_page_token = "" + subscriptions_element = "subscriptionsElement1698708147" subscriptions = [subscriptions_element] expected_response = { - 'next_page_token': next_page_token, - 'subscriptions': subscriptions + "next_page_token": next_page_token, + "subscriptions": subscriptions, } expected_response = pubsub_pb2.ListTopicSubscriptionsResponse( - **expected_response) + **expected_response + ) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup Request - topic = client.topic_path('[PROJECT]', '[TOPIC]') + topic = client.topic_path("[PROJECT]", "[TOPIC]") paged_list_response = client.list_topic_subscriptions(topic) resources = list(paged_list_response) @@ -304,20 +299,19 @@ def test_list_topic_subscriptions(self): assert expected_response.subscriptions[0] == resources[0] assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicSubscriptionsRequest( - topic=topic) + expected_request = pubsub_pb2.ListTopicSubscriptionsRequest(topic=topic) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_list_topic_subscriptions_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup request - topic = client.topic_path('[PROJECT]', '[TOPIC]') + topic = client.topic_path("[PROJECT]", "[TOPIC]") paged_list_response = client.list_topic_subscriptions(topic) with pytest.raises(CustomException): @@ -325,13 +319,13 @@ def test_list_topic_subscriptions_exception(self): def test_delete_topic(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup Request - topic = client.topic_path('[PROJECT]', '[TOPIC]') + topic = client.topic_path("[PROJECT]", "[TOPIC]") client.delete_topic(topic) @@ -343,13 +337,13 @@ def test_delete_topic(self): def test_delete_topic_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup request - topic = client.topic_path('[PROJECT]', '[TOPIC]') + topic = client.topic_path("[PROJECT]", "[TOPIC]") with pytest.raises(CustomException): client.delete_topic(topic) @@ -357,19 +351,19 @@ def test_delete_topic_exception(self): def test_set_iam_policy(self): # Setup Expected Response version = 351608024 - etag = b'21' - expected_response = {'version': version, 'etag': etag} + etag = b"21" + expected_response = {"version": version, "etag": etag} expected_response = policy_pb2.Policy(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup Request - resource = client.topic_path('[PROJECT]', '[TOPIC]') + resource = client.topic_path("[PROJECT]", "[TOPIC]") policy = {} response = client.set_iam_policy(resource, policy) @@ -377,20 +371,21 @@ def test_set_iam_policy(self): assert len(channel.requests) == 1 expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy) + resource=resource, policy=policy + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_set_iam_policy_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup request - resource = client.topic_path('[PROJECT]', '[TOPIC]') + resource = client.topic_path("[PROJECT]", "[TOPIC]") policy = {} with pytest.raises(CustomException): @@ -399,39 +394,38 @@ def test_set_iam_policy_exception(self): def test_get_iam_policy(self): # Setup Expected Response version = 351608024 - etag = b'21' - expected_response = {'version': version, 'etag': etag} + etag = b"21" + expected_response = {"version": version, "etag": etag} expected_response = policy_pb2.Policy(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup Request - resource = client.topic_path('[PROJECT]', '[TOPIC]') + resource = client.topic_path("[PROJECT]", "[TOPIC]") response = client.get_iam_policy(resource) assert expected_response == response assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource) + expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_get_iam_policy_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup request - resource = client.topic_path('[PROJECT]', '[TOPIC]') + resource = client.topic_path("[PROJECT]", "[TOPIC]") with pytest.raises(CustomException): client.get_iam_policy(resource) @@ -440,17 +434,18 @@ def test_test_iam_permissions(self): # Setup Expected Response expected_response = {} expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response) + **expected_response + ) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup Request - resource = client.topic_path('[PROJECT]', '[TOPIC]') + resource = client.topic_path("[PROJECT]", "[TOPIC]") permissions = [] response = client.test_iam_permissions(resource, permissions) @@ -458,20 +453,21 @@ def test_test_iam_permissions(self): assert len(channel.requests) == 1 expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions) + resource=resource, permissions=permissions + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_test_iam_permissions_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = publisher_client.PublisherClient() # Setup request - resource = client.topic_path('[PROJECT]', '[TOPIC]') + resource = client.topic_path("[PROJECT]", "[TOPIC]") permissions = [] with pytest.raises(CustomException): diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index aad07e34e45b..70d3222b98bc 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -55,16 +55,12 @@ def __init__(self, responses=[]): self.responses = responses self.requests = [] - def unary_unary(self, - method, - request_serializer=None, - response_deserializer=None): + def unary_unary(self, method, request_serializer=None, response_deserializer=None): return MultiCallableStub(method, self) - def stream_stream(self, - method, - request_serializer=None, - response_deserializer=None): + def stream_stream( + self, method, request_serializer=None, response_deserializer=None + ): return MultiCallableStub(method, self) @@ -75,28 +71,28 @@ class CustomException(Exception): class TestSubscriberClient(object): def test_create_subscription(self): # Setup Expected Response - name_2 = 'name2-1052831874' - topic_2 = 'topic2-1139259102' + name_2 = "name2-1052831874" + topic_2 = "topic2-1139259102" ack_deadline_seconds = 2135351438 retain_acked_messages = False expected_response = { - 'name': name_2, - 'topic': topic_2, - 'ack_deadline_seconds': ack_deadline_seconds, - 'retain_acked_messages': retain_acked_messages + "name": name_2, + "topic": topic_2, + "ack_deadline_seconds": ack_deadline_seconds, + "retain_acked_messages": retain_acked_messages, } expected_response = pubsub_pb2.Subscription(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - topic = client.topic_path('[PROJECT]', '[TOPIC]') + name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + topic = client.topic_path("[PROJECT]", "[TOPIC]") response = client.create_subscription(name, topic) assert expected_response == response @@ -109,141 +105,140 @@ def test_create_subscription(self): def test_create_subscription_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - topic = client.topic_path('[PROJECT]', '[TOPIC]') + name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + topic = client.topic_path("[PROJECT]", "[TOPIC]") with pytest.raises(CustomException): client.create_subscription(name, topic) def test_get_subscription(self): # Setup Expected Response - name = 'name3373707' - topic = 'topic110546223' + name = "name3373707" + topic = "topic110546223" ack_deadline_seconds = 2135351438 retain_acked_messages = False expected_response = { - 'name': name, - 'topic': topic, - 'ack_deadline_seconds': ack_deadline_seconds, - 'retain_acked_messages': retain_acked_messages + "name": name, + "topic": topic, + "ack_deadline_seconds": ack_deadline_seconds, + "retain_acked_messages": retain_acked_messages, } expected_response = pubsub_pb2.Subscription(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") response = client.get_subscription(subscription) assert expected_response == response assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetSubscriptionRequest( - subscription=subscription) + expected_request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_get_subscription_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") with pytest.raises(CustomException): client.get_subscription(subscription) def test_update_subscription(self): # Setup Expected Response - name = 'name3373707' - topic = 'topic110546223' + name = "name3373707" + topic = "topic110546223" ack_deadline_seconds_2 = 921632575 retain_acked_messages = False expected_response = { - 'name': name, - 'topic': topic, - 'ack_deadline_seconds': ack_deadline_seconds_2, - 'retain_acked_messages': retain_acked_messages + "name": name, + "topic": topic, + "ack_deadline_seconds": ack_deadline_seconds_2, + "retain_acked_messages": retain_acked_messages, } expected_response = pubsub_pb2.Subscription(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request ack_deadline_seconds = 42 - subscription = {'ack_deadline_seconds': ack_deadline_seconds} - paths_element = 'ack_deadline_seconds' + subscription = {"ack_deadline_seconds": ack_deadline_seconds} + paths_element = "ack_deadline_seconds" paths = [paths_element] - update_mask = {'paths': paths} + update_mask = {"paths": paths} response = client.update_subscription(subscription, update_mask) assert expected_response == response assert len(channel.requests) == 1 expected_request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask) + subscription=subscription, update_mask=update_mask + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_update_subscription_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request ack_deadline_seconds = 42 - subscription = {'ack_deadline_seconds': ack_deadline_seconds} - paths_element = 'ack_deadline_seconds' + subscription = {"ack_deadline_seconds": ack_deadline_seconds} + paths_element = "ack_deadline_seconds" paths = [paths_element] - update_mask = {'paths': paths} + update_mask = {"paths": paths} with pytest.raises(CustomException): client.update_subscription(subscription, update_mask) def test_list_subscriptions(self): # Setup Expected Response - next_page_token = '' + next_page_token = "" subscriptions_element = {} subscriptions = [subscriptions_element] expected_response = { - 'next_page_token': next_page_token, - 'subscriptions': subscriptions + "next_page_token": next_page_token, + "subscriptions": subscriptions, } - expected_response = pubsub_pb2.ListSubscriptionsResponse( - **expected_response) + expected_response = pubsub_pb2.ListSubscriptionsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - project = client.project_path('[PROJECT]') + project = client.project_path("[PROJECT]") paged_list_response = client.list_subscriptions(project) resources = list(paged_list_response) @@ -258,13 +253,13 @@ def test_list_subscriptions(self): def test_list_subscriptions_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - project = client.project_path('[PROJECT]') + project = client.project_path("[PROJECT]") paged_list_response = client.list_subscriptions(project) with pytest.raises(CustomException): @@ -272,45 +267,46 @@ def test_list_subscriptions_exception(self): def test_delete_subscription(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") client.delete_subscription(subscription) assert len(channel.requests) == 1 expected_request = pubsub_pb2.DeleteSubscriptionRequest( - subscription=subscription) + subscription=subscription + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_delete_subscription_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") with pytest.raises(CustomException): client.delete_subscription(subscription) def test_modify_ack_deadline(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") ack_ids = [] ack_deadline_seconds = 2135351438 @@ -320,56 +316,57 @@ def test_modify_ack_deadline(self): expected_request = pubsub_pb2.ModifyAckDeadlineRequest( subscription=subscription, ack_ids=ack_ids, - ack_deadline_seconds=ack_deadline_seconds) + ack_deadline_seconds=ack_deadline_seconds, + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_modify_ack_deadline_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") ack_ids = [] ack_deadline_seconds = 2135351438 with pytest.raises(CustomException): - client.modify_ack_deadline(subscription, ack_ids, - ack_deadline_seconds) + client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) def test_acknowledge(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") ack_ids = [] client.acknowledge(subscription, ack_ids) assert len(channel.requests) == 1 expected_request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids) + subscription=subscription, ack_ids=ack_ids + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_acknowledge_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") ack_ids = [] with pytest.raises(CustomException): @@ -382,13 +379,13 @@ def test_pull(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") max_messages = 496131527 response = client.pull(subscription, max_messages) @@ -396,20 +393,21 @@ def test_pull(self): assert len(channel.requests) == 1 expected_request = pubsub_pb2.PullRequest( - subscription=subscription, max_messages=max_messages) + subscription=subscription, max_messages=max_messages + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_pull_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") max_messages = 496131527 with pytest.raises(CustomException): @@ -419,23 +417,22 @@ def test_streaming_pull(self): # Setup Expected Response received_messages_element = {} received_messages = [received_messages_element] - expected_response = {'received_messages': received_messages} - expected_response = pubsub_pb2.StreamingPullResponse( - **expected_response) + expected_response = {"received_messages": received_messages} + expected_response = pubsub_pb2.StreamingPullResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[iter([expected_response])]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") stream_ack_deadline_seconds = 1875467245 request = { - 'subscription': subscription, - 'stream_ack_deadline_seconds': stream_ack_deadline_seconds + "subscription": subscription, + "stream_ack_deadline_seconds": stream_ack_deadline_seconds, } request = pubsub_pb2.StreamingPullRequest(**request) requests = [request] @@ -454,17 +451,17 @@ def test_streaming_pull(self): def test_streaming_pull_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") stream_ack_deadline_seconds = 1875467245 request = { - 'subscription': subscription, - 'stream_ack_deadline_seconds': stream_ack_deadline_seconds + "subscription": subscription, + "stream_ack_deadline_seconds": stream_ack_deadline_seconds, } request = pubsub_pb2.StreamingPullRequest(**request) @@ -475,33 +472,34 @@ def test_streaming_pull_exception(self): def test_modify_push_config(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") push_config = {} client.modify_push_config(subscription, push_config) assert len(channel.requests) == 1 expected_request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config) + subscription=subscription, push_config=push_config + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_modify_push_config_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") push_config = {} with pytest.raises(CustomException): @@ -509,25 +507,21 @@ def test_modify_push_config_exception(self): def test_list_snapshots(self): # Setup Expected Response - next_page_token = '' + next_page_token = "" snapshots_element = {} snapshots = [snapshots_element] - expected_response = { - 'next_page_token': next_page_token, - 'snapshots': snapshots - } - expected_response = pubsub_pb2.ListSnapshotsResponse( - **expected_response) + expected_response = {"next_page_token": next_page_token, "snapshots": snapshots} + expected_response = pubsub_pb2.ListSnapshotsResponse(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - project = client.project_path('[PROJECT]') + project = client.project_path("[PROJECT]") paged_list_response = client.list_snapshots(project) resources = list(paged_list_response) @@ -542,13 +536,13 @@ def test_list_snapshots(self): def test_list_snapshots_exception(self): channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - project = client.project_path('[PROJECT]') + project = client.project_path("[PROJECT]") paged_list_response = client.list_snapshots(project) with pytest.raises(CustomException): @@ -556,105 +550,107 @@ def test_list_snapshots_exception(self): def test_create_snapshot(self): # Setup Expected Response - name_2 = 'name2-1052831874' - topic = 'topic110546223' - expected_response = {'name': name_2, 'topic': topic} + name_2 = "name2-1052831874" + topic = "topic110546223" + expected_response = {"name": name_2, "topic": topic} expected_response = pubsub_pb2.Snapshot(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") response = client.create_snapshot(name, subscription) assert expected_response == response assert len(channel.requests) == 1 expected_request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription) + name=name, subscription=subscription + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_create_snapshot_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") with pytest.raises(CustomException): client.create_snapshot(name, subscription) def test_update_snapshot(self): # Setup Expected Response - name = 'name3373707' - topic = 'topic110546223' - expected_response = {'name': name, 'topic': topic} + name = "name3373707" + topic = "topic110546223" + expected_response = {"name": name, "topic": topic} expected_response = pubsub_pb2.Snapshot(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request seconds = 123456 - expire_time = {'seconds': seconds} - snapshot = {'expire_time': expire_time} - paths_element = 'expire_time' + expire_time = {"seconds": seconds} + snapshot = {"expire_time": expire_time} + paths_element = "expire_time" paths = [paths_element] - update_mask = {'paths': paths} + update_mask = {"paths": paths} response = client.update_snapshot(snapshot, update_mask) assert expected_response == response assert len(channel.requests) == 1 expected_request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, update_mask=update_mask) + snapshot=snapshot, update_mask=update_mask + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_update_snapshot_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request seconds = 123456 - expire_time = {'seconds': seconds} - snapshot = {'expire_time': expire_time} - paths_element = 'expire_time' + expire_time = {"seconds": seconds} + snapshot = {"expire_time": expire_time} + paths_element = "expire_time" paths = [paths_element] - update_mask = {'paths': paths} + update_mask = {"paths": paths} with pytest.raises(CustomException): client.update_snapshot(snapshot, update_mask) def test_delete_snapshot(self): channel = ChannelStub() - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") client.delete_snapshot(snapshot) @@ -666,13 +662,13 @@ def test_delete_snapshot(self): def test_delete_snapshot_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") with pytest.raises(CustomException): client.delete_snapshot(snapshot) @@ -684,13 +680,13 @@ def test_seek(self): # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") response = client.seek(subscription) assert expected_response == response @@ -703,13 +699,13 @@ def test_seek(self): def test_seek_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") with pytest.raises(CustomException): client.seek(subscription) @@ -717,19 +713,19 @@ def test_seek_exception(self): def test_set_iam_policy(self): # Setup Expected Response version = 351608024 - etag = b'21' - expected_response = {'version': version, 'etag': etag} + etag = b"21" + expected_response = {"version": version, "etag": etag} expected_response = policy_pb2.Policy(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") policy = {} response = client.set_iam_policy(resource, policy) @@ -737,20 +733,21 @@ def test_set_iam_policy(self): assert len(channel.requests) == 1 expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy) + resource=resource, policy=policy + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_set_iam_policy_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") policy = {} with pytest.raises(CustomException): @@ -759,39 +756,38 @@ def test_set_iam_policy_exception(self): def test_get_iam_policy(self): # Setup Expected Response version = 351608024 - etag = b'21' - expected_response = {'version': version, 'etag': etag} + etag = b"21" + expected_response = {"version": version, "etag": etag} expected_response = policy_pb2.Policy(**expected_response) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") response = client.get_iam_policy(resource) assert expected_response == response assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource) + expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_get_iam_policy_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") with pytest.raises(CustomException): client.get_iam_policy(resource) @@ -800,17 +796,18 @@ def test_test_iam_permissions(self): # Setup Expected Response expected_response = {} expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response) + **expected_response + ) # Mock the API response channel = ChannelStub(responses=[expected_response]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup Request - resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") permissions = [] response = client.test_iam_permissions(resource, permissions) @@ -818,20 +815,21 @@ def test_test_iam_permissions(self): assert len(channel.requests) == 1 expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions) + resource=resource, permissions=permissions + ) actual_request = channel.requests[0][1] assert expected_request == actual_request def test_test_iam_permissions_exception(self): # Mock the API response channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch('google.api_core.grpc_helpers.create_channel') + patch = mock.patch("google.api_core.grpc_helpers.create_channel") with patch as create_channel: create_channel.return_value = channel client = subscriber_client.SubscriberClient() # Setup request - resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") permissions = [] with pytest.raises(CustomException): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index 0ef98945b0bf..b19a5a1f1504 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -35,7 +35,7 @@ def create_batch(status=None, settings=types.BatchSettings()): """ creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - batch = Batch(client, 'topic_name', settings, autocommit=False) + batch = Batch(client, "topic_name", settings, autocommit=False) if status: batch._status = status return batch @@ -44,7 +44,7 @@ def create_batch(status=None, settings=types.BatchSettings()): def test_len(): batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) assert len(batch) == 0 - batch.publish(types.PubsubMessage(data=b'foo')) + batch.publish(types.PubsubMessage(data=b"foo")) assert len(batch) == 1 @@ -59,12 +59,12 @@ def test_will_accept_oversize(): settings=types.BatchSettings(max_bytes=10), status=BatchStatus.ACCEPTING_MESSAGES, ) - message = types.PubsubMessage(data=b'abcdefghijklmnopqrstuvwxyz') + message = types.PubsubMessage(data=b"abcdefghijklmnopqrstuvwxyz") assert batch.will_accept(message) is True def test_will_not_accept_status(): - batch = create_batch(status='talk to the hand') + batch = create_batch(status="talk to the hand") message = types.PubsubMessage() assert batch.will_accept(message) is False @@ -74,5 +74,5 @@ def test_will_not_accept_number(): settings=types.BatchSettings(max_messages=-1), status=BatchStatus.ACCEPTING_MESSAGES, ) - message = types.PubsubMessage(data=b'abc') + message = types.PubsubMessage(data=b"abc") assert batch.will_accept(message) is False diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 315f072bdf29..af04f865dd40 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -47,7 +47,7 @@ def create_batch(autocommit=False, **batch_settings): """ client = create_client() settings = types.BatchSettings(**batch_settings) - return Batch(client, 'topic_name', settings, autocommit=autocommit) + return Batch(client, "topic_name", settings, autocommit=autocommit) def test_init(): @@ -57,11 +57,10 @@ def test_init(): # Do not actually create a thread, but do verify that one was created; # it should be running the batch's "monitor" method (which commits the # batch once time elapses). - with mock.patch.object(threading, 'Thread', autospec=True) as Thread: - batch = Batch(client, 'topic_name', types.BatchSettings()) + with mock.patch.object(threading, "Thread", autospec=True) as Thread: + batch = Batch(client, "topic_name", types.BatchSettings()) Thread.assert_called_once_with( - name='Thread-MonitorBatchPublisher', - target=batch.monitor, + name="Thread-MonitorBatchPublisher", target=batch.monitor ) # New batches start able to accept messages by default. @@ -69,11 +68,11 @@ def test_init(): def test_init_infinite_latency(): - batch = create_batch(max_latency=float('inf')) + batch = create_batch(max_latency=float("inf")) assert batch._thread is None -@mock.patch.object(threading, 'Lock') +@mock.patch.object(threading, "Lock") def test_make_lock(Lock): lock = Batch.make_lock() assert lock is Lock.return_value @@ -83,19 +82,18 @@ def test_make_lock(Lock): def test_client(): client = create_client() settings = types.BatchSettings() - batch = Batch(client, 'topic_name', settings, autocommit=False) + batch = Batch(client, "topic_name", settings, autocommit=False) assert batch.client is client def test_commit(): batch = create_batch() - with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + with mock.patch.object(threading, "Thread", autospec=True) as Thread: batch.commit() # A thread should have been created to do the actual commit. Thread.assert_called_once_with( - name='Thread-CommitBatchPublisher', - target=batch._commit, + name="Thread-CommitBatchPublisher", target=batch._commit ) Thread.return_value.start.assert_called_once_with() @@ -108,7 +106,7 @@ def test_commit(): def test_commit_no_op(): batch = create_batch() batch._status = BatchStatus.IN_PROGRESS - with mock.patch.object(threading, 'Thread', autospec=True) as Thread: + with mock.patch.object(threading, "Thread", autospec=True) as Thread: batch.commit() # Make sure a thread was not created. @@ -121,36 +119,37 @@ def test_commit_no_op(): def test_blocking__commit(): batch = create_batch() futures = ( - batch.publish({'data': b'This is my message.'}), - batch.publish({'data': b'This is another message.'}), + batch.publish({"data": b"This is my message."}), + batch.publish({"data": b"This is another message."}), ) # Set up the underlying API publish method to return a PublishResponse. - publish_response = types.PublishResponse(message_ids=['a', 'b']) + publish_response = types.PublishResponse(message_ids=["a", "b"]) patch = mock.patch.object( - type(batch.client.api), 'publish', return_value=publish_response) + type(batch.client.api), "publish", return_value=publish_response + ) with patch as publish: batch._commit() # Establish that the underlying API call was made with expected # arguments. publish.assert_called_once_with( - 'topic_name', + "topic_name", [ - types.PubsubMessage(data=b'This is my message.'), - types.PubsubMessage(data=b'This is another message.'), + types.PubsubMessage(data=b"This is my message."), + types.PubsubMessage(data=b"This is another message."), ], ) # Establish that all of the futures are done, and that they have the # expected values. assert futures[0].done() - assert futures[0].result() == 'a' + assert futures[0].result() == "a" assert futures[1].done() - assert futures[1].result() == 'b' + assert futures[1].result() == "b" -@mock.patch.object(thread, '_LOGGER') +@mock.patch.object(thread, "_LOGGER") def test_blocking__commit_starting(_LOGGER): batch = create_batch() batch._status = BatchStatus.STARTING @@ -158,11 +157,10 @@ def test_blocking__commit_starting(_LOGGER): batch._commit() assert batch._status == BatchStatus.SUCCESS - _LOGGER.debug.assert_called_once_with( - 'No messages to publish, exiting commit') + _LOGGER.debug.assert_called_once_with("No messages to publish, exiting commit") -@mock.patch.object(thread, '_LOGGER') +@mock.patch.object(thread, "_LOGGER") def test_blocking__commit_already_started(_LOGGER): batch = create_batch() batch._status = BatchStatus.IN_PROGRESS @@ -171,12 +169,13 @@ def test_blocking__commit_already_started(_LOGGER): assert batch._status == BatchStatus.IN_PROGRESS _LOGGER.debug.assert_called_once_with( - 'Batch is already in progress, exiting commit') + "Batch is already in progress, exiting commit" + ) def test_blocking__commit_no_messages(): batch = create_batch() - with mock.patch.object(type(batch.client.api), 'publish') as publish: + with mock.patch.object(type(batch.client.api), "publish") as publish: batch._commit() assert publish.call_count == 0 @@ -185,14 +184,15 @@ def test_blocking__commit_no_messages(): def test_blocking__commit_wrong_messageid_length(): batch = create_batch() futures = ( - batch.publish({'data': b'blah blah blah'}), - batch.publish({'data': b'blah blah blah blah'}), + batch.publish({"data": b"blah blah blah"}), + batch.publish({"data": b"blah blah blah blah"}), ) # Set up a PublishResponse that only returns one message ID. - publish_response = types.PublishResponse(message_ids=['a']) + publish_response = types.PublishResponse(message_ids=["a"]) patch = mock.patch.object( - type(batch.client.api), 'publish', return_value=publish_response) + type(batch.client.api), "publish", return_value=publish_response + ) with patch: batch._commit() @@ -205,14 +205,13 @@ def test_blocking__commit_wrong_messageid_length(): def test_block__commmit_api_error(): batch = create_batch() futures = ( - batch.publish({'data': b'blah blah blah'}), - batch.publish({'data': b'blah blah blah blah'}), + batch.publish({"data": b"blah blah blah"}), + batch.publish({"data": b"blah blah blah blah"}), ) # Make the API throw an error when publishing. - error = google.api_core.exceptions.InternalServerError('uh oh') - patch = mock.patch.object( - type(batch.client.api), 'publish', side_effect=error) + error = google.api_core.exceptions.InternalServerError("uh oh") + patch = mock.patch.object(type(batch.client.api), "publish", side_effect=error) with patch: batch._commit() @@ -224,8 +223,8 @@ def test_block__commmit_api_error(): def test_monitor(): batch = create_batch(max_latency=5.0) - with mock.patch.object(time, 'sleep') as sleep: - with mock.patch.object(type(batch), '_commit') as _commit: + with mock.patch.object(time, "sleep") as sleep: + with mock.patch.object(type(batch), "_commit") as _commit: batch.monitor() # The monitor should have waited the given latency. @@ -238,9 +237,9 @@ def test_monitor(): def test_monitor_already_committed(): batch = create_batch(max_latency=5.0) - status = 'something else' + status = "something else" batch._status = status - with mock.patch.object(time, 'sleep') as sleep: + with mock.patch.object(time, "sleep") as sleep: batch.monitor() # The monitor should have waited the given latency. @@ -253,9 +252,9 @@ def test_monitor_already_committed(): def test_publish(): batch = create_batch() messages = ( - types.PubsubMessage(data=b'foobarbaz'), - types.PubsubMessage(data=b'spameggs'), - types.PubsubMessage(data=b'1335020400'), + types.PubsubMessage(data=b"foobarbaz"), + types.PubsubMessage(data=b"spameggs"), + types.PubsubMessage(data=b"1335020400"), ) # Publish each of the messages, which should save them to the batch. @@ -276,7 +275,7 @@ def test_publish_not_will_accept(): batch = create_batch(max_messages=0) # Publish the message. - message = types.PubsubMessage(data=b'foobarbaz') + message = types.PubsubMessage(data=b"foobarbaz") future = batch.publish(message) assert future is None @@ -289,13 +288,13 @@ def test_publish_exceed_max_messages(): max_messages = 4 batch = create_batch(max_messages=max_messages) messages = ( - types.PubsubMessage(data=b'foobarbaz'), - types.PubsubMessage(data=b'spameggs'), - types.PubsubMessage(data=b'1335020400'), + types.PubsubMessage(data=b"foobarbaz"), + types.PubsubMessage(data=b"spameggs"), + types.PubsubMessage(data=b"1335020400"), ) # Publish each of the messages, which should save them to the batch. - with mock.patch.object(batch, 'commit') as commit: + with mock.patch.object(batch, "commit") as commit: futures = [batch.publish(message) for message in messages] assert batch._futures == futures assert len(futures) == max_messages - 1 @@ -305,7 +304,7 @@ def test_publish_exceed_max_messages(): # When a fourth message is published, commit should be called. # No future will be returned in this case. - future = batch.publish(types.PubsubMessage(data=b'last one')) + future = batch.publish(types.PubsubMessage(data=b"last one")) commit.assert_called_once_with() assert future is None @@ -314,11 +313,11 @@ def test_publish_exceed_max_messages(): def test_publish_dict(): batch = create_batch() - future = batch.publish( - {'data': b'foobarbaz', 'attributes': {'spam': 'eggs'}}) + future = batch.publish({"data": b"foobarbaz", "attributes": {"spam": "eggs"}}) # There should be one message on the batch. expected_message = types.PubsubMessage( - data=b'foobarbaz', attributes={'spam': 'eggs'}) + data=b"foobarbaz", attributes={"spam": "eggs"} + ) assert batch.messages == [expected_message] assert batch._futures == [future] diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 523dbe855fd1..a141e1f12187 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -37,7 +37,7 @@ def test_init(): def test_init_emulator(monkeypatch): - monkeypatch.setenv('PUBSUB_EMULATOR_HOST', '/foo/bar/') + monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar/") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. client = publisher.Client() @@ -47,7 +47,7 @@ def test_init_emulator(monkeypatch): # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. channel = client.api.transport.publish._channel - assert channel.target().decode('utf8') == '/foo/bar/' + assert channel.target().decode("utf8") == "/foo/bar/" def test_batch_create(): @@ -55,7 +55,7 @@ def test_batch_create(): client = publisher.Client(credentials=creds) assert len(client._batches) == 0 - topic = 'topic/path' + topic = "topic/path" batch = client._batch(topic, autocommit=False) assert client._batches == {topic: batch} @@ -64,7 +64,7 @@ def test_batch_exists(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - topic = 'topic/path' + topic = "topic/path" client._batches[topic] = mock.sentinel.batch # A subsequent request should return the same batch. @@ -77,7 +77,7 @@ def test_batch_create_and_exists(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - topic = 'topic/path' + topic = "topic/path" client._batches[topic] = mock.sentinel.batch # A subsequent request should return the same batch. @@ -94,17 +94,14 @@ def test_publish(): batch = mock.Mock(spec=client._batch_class) # Set the mock up to claim indiscriminately that it accepts all messages. batch.will_accept.return_value = True - batch.publish.side_effect = ( - mock.sentinel.future1, - mock.sentinel.future2, - ) + batch.publish.side_effect = (mock.sentinel.future1, mock.sentinel.future2) - topic = 'topic/path' + topic = "topic/path" client._batches[topic] = batch # Begin publishing. - future1 = client.publish(topic, b'spam') - future2 = client.publish(topic, b'foo', bar='baz') + future1 = client.publish(topic, b"spam") + future2 = client.publish(topic, b"foo", bar="baz") assert future1 is mock.sentinel.future1 assert future2 is mock.sentinel.future2 @@ -112,21 +109,18 @@ def test_publish(): # Check mock. batch.publish.assert_has_calls( [ - mock.call(types.PubsubMessage(data=b'spam')), - mock.call(types.PubsubMessage( - data=b'foo', - attributes={'bar': 'baz'}, - )), - ], + mock.call(types.PubsubMessage(data=b"spam")), + mock.call(types.PubsubMessage(data=b"foo", attributes={"bar": "baz"})), + ] ) def test_publish_data_not_bytestring_error(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - topic = 'topic/path' + topic = "topic/path" with pytest.raises(TypeError): - client.publish(topic, u'This is a text string.') + client.publish(topic, u"This is a text string.") with pytest.raises(TypeError): client.publish(topic, 42) @@ -140,20 +134,17 @@ def test_publish_attrs_bytestring(): # Set the mock up to claim indiscriminately that it accepts all messages. batch.will_accept.return_value = True - topic = 'topic/path' + topic = "topic/path" client._batches[topic] = batch # Begin publishing. - future = client.publish(topic, b'foo', bar=b'baz') + future = client.publish(topic, b"foo", bar=b"baz") assert future is batch.publish.return_value # The attributes should have been sent as text. batch.publish.assert_called_once_with( - types.PubsubMessage( - data=b'foo', - attributes={'bar': u'baz'}, - ), + types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) ) @@ -169,7 +160,7 @@ def test_publish_new_batch_needed(): batch1.publish.return_value = None batch2.publish.return_value = mock.sentinel.future - topic = 'topic/path' + topic = "topic/path" client._batches[topic] = batch1 # Actually mock the batch class now. @@ -177,20 +168,14 @@ def test_publish_new_batch_needed(): client._batch_class = batch_class # Publish a message. - future = client.publish(topic, b'foo', bar=b'baz') + future = client.publish(topic, b"foo", bar=b"baz") assert future is mock.sentinel.future # Check the mocks. batch_class.assert_called_once_with( - autocommit=True, - client=client, - settings=client.batch_settings, - topic=topic, - ) - message_pb = types.PubsubMessage( - data=b'foo', - attributes={'bar': u'baz'}, + autocommit=True, client=client, settings=client.batch_settings, topic=topic ) + message_pb = types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) batch1.publish.assert_called_once_with(message_pb) batch2.publish.assert_called_once_with(message_pb) @@ -198,9 +183,9 @@ def test_publish_new_batch_needed(): def test_publish_attrs_type_error(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - topic = 'topic/path' + topic = "topic/path" with pytest.raises(TypeError): - client.publish(topic, b'foo', answer=42) + client.publish(topic, b"foo", answer=42) def test_gapic_instance_method(): @@ -208,25 +193,26 @@ def test_gapic_instance_method(): client = publisher.Client(credentials=creds) ct = mock.Mock() - client.api._inner_api_calls['create_topic'] = ct + client.api._inner_api_calls["create_topic"] = ct - client.create_topic('projects/foo/topics/bar') + client.create_topic("projects/foo/topics/bar") assert ct.call_count == 1 _, args, _ = ct.mock_calls[0] - assert args[0] == types.Topic(name='projects/foo/topics/bar') + assert args[0] == types.Topic(name="projects/foo/topics/bar") def test_gapic_class_method_on_class(): - answer = publisher.Client.topic_path('foo', 'bar') - assert answer == 'projects/foo/topics/bar' + answer = publisher.Client.topic_path("foo", "bar") + assert answer == "projects/foo/topics/bar" def test_class_method_factory(): patch = mock.patch( - 'google.oauth2.service_account.Credentials.from_service_account_file') + "google.oauth2.service_account.Credentials.from_service_account_file" + ) with patch: - client = publisher.Client.from_service_account_file('filename.json') + client = publisher.Client.from_service_account_file("filename.json") assert isinstance(client, publisher.Client) @@ -234,5 +220,5 @@ def test_class_method_factory(): def test_gapic_class_method_on_instance(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - answer = client.topic_path('foo', 'bar') - assert answer == 'projects/foo/topics/bar' + answer = client.topic_path("foo", "bar") + assert answer == "projects/foo/topics/bar" diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 7fdee71e7dcb..0e1e9744f6d9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -25,16 +25,20 @@ import pytest -@pytest.mark.parametrize('item,method_name', [ - (requests.AckRequest(0, 0, 0), 'ack'), - (requests.DropRequest(0, 0), 'drop'), - (requests.LeaseRequest(0, 0), 'lease'), - (requests.ModAckRequest(0, 0), 'modify_ack_deadline'), - (requests.NackRequest(0, 0), 'nack') -]) +@pytest.mark.parametrize( + "item,method_name", + [ + (requests.AckRequest(0, 0, 0), "ack"), + (requests.DropRequest(0, 0), "drop"), + (requests.LeaseRequest(0, 0), "lease"), + (requests.ModAckRequest(0, 0), "modify_ack_deadline"), + (requests.NackRequest(0, 0), "nack"), + ], +) def test_dispatch_callback(item, method_name): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) items = [item] @@ -47,7 +51,8 @@ def test_dispatch_callback(item, method_name): def test_dispatch_callback_inactive(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) manager.is_active = False dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) @@ -58,16 +63,16 @@ def test_dispatch_callback_inactive(): def test_ack(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.AckRequest( - ack_id='ack_id_string', byte_size=0, time_to_ack=20)] + items = [requests.AckRequest(ack_id="ack_id_string", byte_size=0, time_to_ack=20)] dispatcher_.ack(items) - manager.send.assert_called_once_with(types.StreamingPullRequest( - ack_ids=['ack_id_string'], - )) + manager.send.assert_called_once_with( + types.StreamingPullRequest(ack_ids=["ack_id_string"]) + ) manager.leaser.remove.assert_called_once_with(items) manager.maybe_resume_consumer.assert_called_once() @@ -76,26 +81,27 @@ def test_ack(): def test_ack_no_time(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.AckRequest( - ack_id='ack_id_string', byte_size=0, time_to_ack=None)] + items = [requests.AckRequest(ack_id="ack_id_string", byte_size=0, time_to_ack=None)] dispatcher_.ack(items) - manager.send.assert_called_once_with(types.StreamingPullRequest( - ack_ids=['ack_id_string'], - )) + manager.send.assert_called_once_with( + types.StreamingPullRequest(ack_ids=["ack_id_string"]) + ) manager.ack_histogram.add.assert_not_called() def test_lease(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.LeaseRequest(ack_id='ack_id_string', byte_size=10)] + items = [requests.LeaseRequest(ack_id="ack_id_string", byte_size=10)] dispatcher_.lease(items) manager.leaser.add.assert_called_once_with(items) @@ -104,10 +110,11 @@ def test_lease(): def test_drop(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.DropRequest(ack_id='ack_id_string', byte_size=10)] + items = [requests.DropRequest(ack_id="ack_id_string", byte_size=10)] dispatcher_.drop(items) manager.leaser.remove.assert_called_once_with(items) @@ -116,52 +123,59 @@ def test_drop(): def test_nack(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.NackRequest(ack_id='ack_id_string', byte_size=10)] + items = [requests.NackRequest(ack_id="ack_id_string", byte_size=10)] dispatcher_.nack(items) - manager.send.assert_called_once_with(types.StreamingPullRequest( - modify_deadline_ack_ids=['ack_id_string'], - modify_deadline_seconds=[0], - )) + manager.send.assert_called_once_with( + types.StreamingPullRequest( + modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[0] + ) + ) def test_modify_ack_deadline(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.ModAckRequest(ack_id='ack_id_string', seconds=60)] + items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=60)] dispatcher_.modify_ack_deadline(items) - manager.send.assert_called_once_with(types.StreamingPullRequest( - modify_deadline_ack_ids=['ack_id_string'], - modify_deadline_seconds=[60], - )) + manager.send.assert_called_once_with( + types.StreamingPullRequest( + modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[60] + ) + ) -@mock.patch('threading.Thread', autospec=True) +@mock.patch("threading.Thread", autospec=True) def test_start(thread): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) dispatcher_.start() thread.assert_called_once_with( - name=dispatcher._CALLBACK_WORKER_NAME, target=mock.ANY) + name=dispatcher._CALLBACK_WORKER_NAME, target=mock.ANY + ) thread.return_value.start.assert_called_once() assert dispatcher_._thread is not None -@mock.patch('threading.Thread', autospec=True) +@mock.patch("threading.Thread", autospec=True) def test_start_already_started(thread): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) dispatcher_._thread = mock.sentinel.thread @@ -185,7 +199,6 @@ def test_stop(): def test_stop_no_join(): - dispatcher_ = dispatcher.Dispatcher( - mock.sentinel.manager, mock.sentinel.queue) + dispatcher_ = dispatcher.Dispatcher(mock.sentinel.manager, mock.sentinel.queue) dispatcher_.stop() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 7c695047c86f..4d41713e6ec8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -24,7 +24,8 @@ class TestStreamingPullFuture(object): def make_future(self): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) future = futures.StreamingPullFuture(manager) return future @@ -34,7 +35,8 @@ def test_default_state(self): assert future.running() assert not future.done() future._manager.add_close_callback.assert_called_once_with( - future._on_close_callback) + future._on_close_callback + ) def test__on_close_callback_success(self): future = self.make_future() @@ -47,7 +49,7 @@ def test__on_close_callback_success(self): def test__on_close_callback_failure(self): future = self.make_future() - future._on_close_callback(mock.sentinel.manager, ValueError('meep')) + future._on_close_callback(mock.sentinel.manager, ValueError("meep")) with pytest.raises(ValueError): future.result() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py index f9147a4d7e39..8f5049691a9d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -25,27 +25,29 @@ def test_heartbeat_inactive(caplog): caplog.set_level(logging.INFO) manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) manager.is_active = False heartbeater_ = heartbeater.Heartbeater(manager) heartbeater_.heartbeat() - assert 'exiting' in caplog.text + assert "exiting" in caplog.text def test_heartbeat_stopped(caplog): caplog.set_level(logging.INFO) manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) heartbeater_ = heartbeater.Heartbeater(manager) heartbeater_.stop() heartbeater_.heartbeat() - assert 'exiting' in caplog.text + assert "exiting" in caplog.text def make_sleep_mark_manager_as_inactive(heartbeater): @@ -60,7 +62,8 @@ def trigger_inactive(timeout): def test_heartbeat_once(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) heartbeater_ = heartbeater.Heartbeater(manager) make_sleep_mark_manager_as_inactive(heartbeater_) @@ -69,27 +72,29 @@ def test_heartbeat_once(): manager.heartbeat.assert_called_once() -@mock.patch('threading.Thread', autospec=True) +@mock.patch("threading.Thread", autospec=True) def test_start(thread): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) heartbeater_ = heartbeater.Heartbeater(manager) heartbeater_.start() thread.assert_called_once_with( - name=heartbeater._HEARTBEAT_WORKER_NAME, - target=heartbeater_.heartbeat) + name=heartbeater._HEARTBEAT_WORKER_NAME, target=heartbeater_.heartbeat + ) thread.return_value.start.assert_called_once() assert heartbeater_._thread is not None -@mock.patch('threading.Thread', autospec=True) +@mock.patch("threading.Thread", autospec=True) def test_start_already_started(thread): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) heartbeater_ = heartbeater.Heartbeater(manager) heartbeater_._thread = mock.sentinel.thread @@ -101,7 +106,8 @@ def test_start_already_started(thread): def test_stop(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) heartbeater_ = heartbeater.Heartbeater(manager) thread = mock.create_autospec(threading.Thread, instance=True) heartbeater_._thread = thread diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 507e8292f7c8..6e1bcc813c89 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -25,11 +25,8 @@ def test_queue_callback_worker(): # Set up an appropriate mock for the queue, and call the queue callback # thread. - with mock.patch.object(queue.Queue, 'get') as get: - get.side_effect = ( - mock.sentinel.A, - helper_threads.STOP, - queue.Empty()) + with mock.patch.object(queue.Queue, "get") as get: + get.side_effect = (mock.sentinel.A, helper_threads.STOP, queue.Empty()) qct() # Assert that we got the expected calls. @@ -44,12 +41,13 @@ def test_queue_callback_worker_stop_with_extra_items(): # Set up an appropriate mock for the queue, and call the queue callback # thread. - with mock.patch.object(queue.Queue, 'get') as get: + with mock.patch.object(queue.Queue, "get") as get: get.side_effect = ( mock.sentinel.A, helper_threads.STOP, mock.sentinel.B, - queue.Empty()) + queue.Empty(), + ) qct() # Assert that we got the expected calls. @@ -64,20 +62,21 @@ def test_queue_callback_worker_get_many(): # Set up an appropriate mock for the queue, and call the queue callback # thread. - with mock.patch.object(queue.Queue, 'get') as get: + with mock.patch.object(queue.Queue, "get") as get: get.side_effect = ( mock.sentinel.A, queue.Empty(), mock.sentinel.B, helper_threads.STOP, - queue.Empty()) + queue.Empty(), + ) qct() # Assert that we got the expected calls. assert get.call_count == 5 - callback.assert_has_calls([ - mock.call([(mock.sentinel.A)]), - mock.call([(mock.sentinel.B)])]) + callback.assert_has_calls( + [mock.call([(mock.sentinel.A)]), mock.call([(mock.sentinel.B)])] + ) def test_queue_callback_worker_max_items(): @@ -87,19 +86,20 @@ def test_queue_callback_worker_max_items(): # Set up an appropriate mock for the queue, and call the queue callback # thread. - with mock.patch.object(queue.Queue, 'get') as get: + with mock.patch.object(queue.Queue, "get") as get: get.side_effect = ( mock.sentinel.A, mock.sentinel.B, helper_threads.STOP, - queue.Empty()) + queue.Empty(), + ) qct() # Assert that we got the expected calls. assert get.call_count == 3 - callback.assert_has_calls([ - mock.call([(mock.sentinel.A)]), - mock.call([(mock.sentinel.B)])]) + callback.assert_has_calls( + [mock.call([(mock.sentinel.A)]), mock.call([(mock.sentinel.B)])] + ) def test_queue_callback_worker_exception(): @@ -109,11 +109,8 @@ def test_queue_callback_worker_exception(): # Set up an appropriate mock for the queue, and call the queue callback # thread. - with mock.patch.object(queue.Queue, 'get') as get: - get.side_effect = ( - mock.sentinel.A, - helper_threads.STOP, - queue.Empty()) + with mock.patch.object(queue.Queue, "get") as get: + get.side_effect = (mock.sentinel.A, helper_threads.STOP, queue.Empty()) qct() # Assert that we got the expected calls. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 447fa79f5036..c8b217473d40 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -29,20 +29,17 @@ def test_add_and_remove(): leaser_ = leaser.Leaser(mock.sentinel.manager) - leaser_.add([ - requests.LeaseRequest(ack_id='ack1', byte_size=50)]) - leaser_.add([ - requests.LeaseRequest(ack_id='ack2', byte_size=25)]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) + leaser_.add([requests.LeaseRequest(ack_id="ack2", byte_size=25)]) assert leaser_.message_count == 2 - assert set(leaser_.ack_ids) == set(['ack1', 'ack2']) + assert set(leaser_.ack_ids) == set(["ack1", "ack2"]) assert leaser_.bytes == 75 - leaser_.remove([ - requests.DropRequest(ack_id='ack1', byte_size=50)]) + leaser_.remove([requests.DropRequest(ack_id="ack1", byte_size=50)]) assert leaser_.message_count == 1 - assert set(leaser_.ack_ids) == set(['ack2']) + assert set(leaser_.ack_ids) == set(["ack2"]) assert leaser_.bytes == 25 @@ -51,12 +48,10 @@ def test_add_already_managed(caplog): leaser_ = leaser.Leaser(mock.sentinel.manager) - leaser_.add([ - requests.LeaseRequest(ack_id='ack1', byte_size=50)]) - leaser_.add([ - requests.LeaseRequest(ack_id='ack1', byte_size=50)]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) - assert 'already lease managed' in caplog.text + assert "already lease managed" in caplog.text def test_remove_not_managed(caplog): @@ -64,10 +59,9 @@ def test_remove_not_managed(caplog): leaser_ = leaser.Leaser(mock.sentinel.manager) - leaser_.remove([ - requests.DropRequest(ack_id='ack1', byte_size=50)]) + leaser_.remove([requests.DropRequest(ack_id="ack1", byte_size=50)]) - assert 'not managed' in caplog.text + assert "not managed" in caplog.text def test_remove_negative_bytes(caplog): @@ -75,20 +69,18 @@ def test_remove_negative_bytes(caplog): leaser_ = leaser.Leaser(mock.sentinel.manager) - leaser_.add([ - requests.LeaseRequest(ack_id='ack1', byte_size=50)]) - leaser_.remove([ - requests.DropRequest(ack_id='ack1', byte_size=75)]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) + leaser_.remove([requests.DropRequest(ack_id="ack1", byte_size=75)]) assert leaser_.bytes == 0 - assert 'unexpectedly negative' in caplog.text + assert "unexpectedly negative" in caplog.text def create_manager(flow_control=types.FlowControl()): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) - manager.dispatcher = mock.create_autospec( - dispatcher.Dispatcher, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) + manager.dispatcher = mock.create_autospec(dispatcher.Dispatcher, instance=True) manager.is_active = True manager.flow_control = flow_control manager.ack_histogram = histogram.Histogram() @@ -104,7 +96,7 @@ def test_maintain_leases_inactive(caplog): leaser_.maintain_leases() - assert 'exiting' in caplog.text + assert "exiting" in caplog.text def test_maintain_leases_stopped(caplog): @@ -116,7 +108,7 @@ def test_maintain_leases_stopped(caplog): leaser_.maintain_leases() - assert 'exiting' in caplog.text + assert "exiting" in caplog.text def make_sleep_mark_manager_as_inactive(leaser): @@ -133,16 +125,13 @@ def test_maintain_leases_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) make_sleep_mark_manager_as_inactive(leaser_) - leaser_.add([requests.LeaseRequest(ack_id='my ack id', byte_size=50)]) + leaser_.add([requests.LeaseRequest(ack_id="my ack id", byte_size=50)]) leaser_.maintain_leases() - manager.dispatcher.modify_ack_deadline.assert_called_once_with([ - requests.ModAckRequest( - ack_id='my ack id', - seconds=10, - ) - ]) + manager.dispatcher.modify_ack_deadline.assert_called_once_with( + [requests.ModAckRequest(ack_id="my ack id", seconds=10)] + ) def test_maintain_leases_no_ack_ids(): @@ -155,7 +144,7 @@ def test_maintain_leases_no_ack_ids(): manager.dispatcher.modify_ack_deadline.assert_not_called() -@mock.patch('time.time', autospec=True) +@mock.patch("time.time", autospec=True) def test_maintain_leases_outdated_items(time): manager = create_manager() leaser_ = leaser.Leaser(manager) @@ -163,13 +152,11 @@ def test_maintain_leases_outdated_items(time): # Add these items at the beginning of the timeline time.return_value = 0 - leaser_.add([ - requests.LeaseRequest(ack_id='ack1', byte_size=50)]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) # Add another item at towards end of the timeline time.return_value = manager.flow_control.max_lease_duration - 1 - leaser_.add([ - requests.LeaseRequest(ack_id='ack2', byte_size=50)]) + leaser_.add([requests.LeaseRequest(ack_id="ack2", byte_size=50)]) # Now make sure time reports that we are at the end of our timeline. time.return_value = manager.flow_control.max_lease_duration + 1 @@ -177,37 +164,37 @@ def test_maintain_leases_outdated_items(time): leaser_.maintain_leases() # Only ack2 should be renewed. ack1 should've been dropped - manager.dispatcher.modify_ack_deadline.assert_called_once_with([ - requests.ModAckRequest( - ack_id='ack2', - seconds=10, - ) - ]) - manager.dispatcher.drop.assert_called_once_with([ - requests.DropRequest(ack_id='ack1', byte_size=50) - ]) - - -@mock.patch('threading.Thread', autospec=True) + manager.dispatcher.modify_ack_deadline.assert_called_once_with( + [requests.ModAckRequest(ack_id="ack2", seconds=10)] + ) + manager.dispatcher.drop.assert_called_once_with( + [requests.DropRequest(ack_id="ack1", byte_size=50)] + ) + + +@mock.patch("threading.Thread", autospec=True) def test_start(thread): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) leaser_ = leaser.Leaser(manager) leaser_.start() thread.assert_called_once_with( - name=leaser._LEASE_WORKER_NAME, target=leaser_.maintain_leases) + name=leaser._LEASE_WORKER_NAME, target=leaser_.maintain_leases + ) thread.return_value.start.assert_called_once() assert leaser_._thread is not None -@mock.patch('threading.Thread', autospec=True) +@mock.patch("threading.Thread", autospec=True) def test_start_already_started(thread): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) leaser_ = leaser.Leaser(manager) leaser_._thread = mock.sentinel.thread @@ -219,7 +206,8 @@ def test_start_already_started(thread): def test_stop(): manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True) + streaming_pull_manager.StreamingPullManager, instance=True + ) leaser_ = leaser.Leaser(manager) thread = mock.create_autospec(threading.Thread, instance=True) leaser_._thread = thread diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index bb87dec3518c..98a946ae75c6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -29,51 +29,53 @@ RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=pytz.utc) RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000 PUBLISHED_MICROS = 123456 -PUBLISHED = RECEIVED + datetime.timedelta( - days=1, microseconds=PUBLISHED_MICROS) +PUBLISHED = RECEIVED + datetime.timedelta(days=1, microseconds=PUBLISHED_MICROS) PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 -def create_message(data, ack_id='ACKID', **attrs): - with mock.patch.object(message.Message, 'lease') as lease: - with mock.patch.object(time, 'time') as time_: +def create_message(data, ack_id="ACKID", **attrs): + with mock.patch.object(message.Message, "lease") as lease: + with mock.patch.object(time, "time") as time_: time_.return_value = RECEIVED_SECONDS - msg = message.Message(types.PubsubMessage( - attributes=attrs, - data=data, - message_id='message_id', - publish_time=timestamp_pb2.Timestamp( - seconds=PUBLISHED_SECONDS, - nanos=PUBLISHED_MICROS * 1000, + msg = message.Message( + types.PubsubMessage( + attributes=attrs, + data=data, + message_id="message_id", + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 + ), ), - ), ack_id, queue.Queue()) + ack_id, + queue.Queue(), + ) lease.assert_called_once_with() return msg def test_attributes(): - msg = create_message(b'foo', baz='bacon', spam='eggs') - assert msg.attributes == {'baz': 'bacon', 'spam': 'eggs'} + msg = create_message(b"foo", baz="bacon", spam="eggs") + assert msg.attributes == {"baz": "bacon", "spam": "eggs"} def test_data(): - msg = create_message(b'foo') - assert msg.data == b'foo' + msg = create_message(b"foo") + assert msg.data == b"foo" def test_size(): - msg = create_message(b'foo') + msg = create_message(b"foo") assert msg.size == 30 # payload + protobuf overhead def test_ack_id(): - ack_id = 'MY-ACK-ID' - msg = create_message(b'foo', ack_id=ack_id) + ack_id = "MY-ACK-ID" + msg = create_message(b"foo", ack_id=ack_id) assert msg.ack_id == ack_id def test_publish_time(): - msg = create_message(b'foo') + msg = create_message(b"foo") assert msg.publish_time == PUBLISHED @@ -101,72 +103,70 @@ def check_call_types(mock, *args, **kwargs): def test_ack(): - msg = create_message(b'foo', ack_id='bogus_ack_id') - with mock.patch.object(msg._request_queue, 'put') as put: + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: msg.ack() - put.assert_called_once_with(requests.AckRequest( - ack_id='bogus_ack_id', - byte_size=30, - time_to_ack=mock.ANY, - )) + put.assert_called_once_with( + requests.AckRequest( + ack_id="bogus_ack_id", byte_size=30, time_to_ack=mock.ANY + ) + ) check_call_types(put, requests.AckRequest) def test_drop(): - msg = create_message(b'foo', ack_id='bogus_ack_id') - with mock.patch.object(msg._request_queue, 'put') as put: + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: msg.drop() - put.assert_called_once_with(requests.DropRequest( - ack_id='bogus_ack_id', - byte_size=30, - )) + put.assert_called_once_with( + requests.DropRequest(ack_id="bogus_ack_id", byte_size=30) + ) check_call_types(put, requests.DropRequest) def test_lease(): - msg = create_message(b'foo', ack_id='bogus_ack_id') - with mock.patch.object(msg._request_queue, 'put') as put: + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: msg.lease() - put.assert_called_once_with(requests.LeaseRequest( - ack_id='bogus_ack_id', - byte_size=30, - )) + put.assert_called_once_with( + requests.LeaseRequest(ack_id="bogus_ack_id", byte_size=30) + ) check_call_types(put, requests.LeaseRequest) def test_modify_ack_deadline(): - msg = create_message(b'foo', ack_id='bogus_ack_id') - with mock.patch.object(msg._request_queue, 'put') as put: + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: msg.modify_ack_deadline(60) - put.assert_called_once_with(requests.ModAckRequest( - ack_id='bogus_ack_id', - seconds=60, - )) + put.assert_called_once_with( + requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60) + ) check_call_types(put, requests.ModAckRequest) def test_nack(): - msg = create_message(b'foo', ack_id='bogus_ack_id') - with mock.patch.object(msg._request_queue, 'put') as put: + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: msg.nack() - put.assert_called_once_with(requests.NackRequest( - ack_id='bogus_ack_id', - byte_size=30, - )) + put.assert_called_once_with( + requests.NackRequest(ack_id="bogus_ack_id", byte_size=30) + ) check_call_types(put, requests.NackRequest) def test_repr(): - data = b'foo' - msg = create_message(data, snow='cones', orange='juice') - data_line = ' data: {!r}'.format(data) - expected_repr = '\n'.join(( - 'Message {', - data_line, - ' attributes: {', - ' "orange": "juice",', - ' "snow": "cones"', - ' }', - '}', - )) + data = b"foo" + msg = create_message(data, snow="cones", orange="juice") + data_line = " data: {!r}".format(data) + expected_repr = "\n".join( + ( + "Message {", + data_line, + " attributes: {", + ' "orange": "juice",', + ' "snow": "cones"', + " }", + "}", + ) + ) assert repr(msg) == expected_repr diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index ddd40637e15e..2e1561db022e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -29,8 +29,7 @@ def test_constructor_defaults(): def test_constructor_options(): - scheduler_ = scheduler.ThreadScheduler( - executor=mock.sentinel.executor) + scheduler_ = scheduler.ThreadScheduler(executor=mock.sentinel.executor) assert scheduler_._executor == mock.sentinel.executor @@ -45,9 +44,9 @@ def callback(*args, **kwargs): scheduler_ = scheduler.ThreadScheduler() - scheduler_.schedule(callback, 'arg1', kwarg1='meep') + scheduler_.schedule(callback, "arg1", kwarg1="meep") called.wait() scheduler_.shutdown() - assert called_with == [(('arg1',), {'kwarg1': 'meep'})] + assert called_with == [(("arg1",), {"kwarg1": "meep"})] diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 924fde56ea70..f5f7354f15d5 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -32,14 +32,20 @@ import grpc -@pytest.mark.parametrize('exception,expected_cls', [ - (ValueError('meep'), ValueError), - (mock.create_autospec(grpc.RpcError, instance=True), - exceptions.GoogleAPICallError), -]) +@pytest.mark.parametrize( + "exception,expected_cls", + [ + (ValueError("meep"), ValueError), + ( + mock.create_autospec(grpc.RpcError, instance=True), + exceptions.GoogleAPICallError, + ), + ], +) def test__maybe_wrap_exception(exception, expected_cls): assert isinstance( - streaming_pull_manager._maybe_wrap_exception(exception), expected_cls) + streaming_pull_manager._maybe_wrap_exception(exception), expected_cls + ) def test__wrap_callback_errors_no_error(): @@ -54,7 +60,7 @@ def test__wrap_callback_errors_no_error(): def test__wrap_callback_errors_error(): msg = mock.create_autospec(message.Message, instance=True) - callback = mock.Mock(side_effect=ValueError('meep')) + callback = mock.Mock(side_effect=ValueError("meep")) streaming_pull_manager._wrap_callback_errors(callback, msg) @@ -63,8 +69,8 @@ def test__wrap_callback_errors_error(): def test_constructor_and_default_state(): manager = streaming_pull_manager.StreamingPullManager( - mock.sentinel.client, - mock.sentinel.subscription) + mock.sentinel.client, mock.sentinel.subscription + ) # Public state assert manager.is_active is False @@ -86,7 +92,8 @@ def test_constructor_with_options(): mock.sentinel.client, mock.sentinel.subscription, flow_control=mock.sentinel.flow_control, - scheduler=mock.sentinel.scheduler) + scheduler=mock.sentinel.scheduler, + ) assert manager.flow_control == mock.sentinel.flow_control assert manager._scheduler == mock.sentinel.scheduler @@ -96,10 +103,8 @@ def make_manager(**kwargs): client_ = mock.create_autospec(client.Client, instance=True) scheduler_ = mock.create_autospec(scheduler.Scheduler, instance=True) return streaming_pull_manager.StreamingPullManager( - client_, - 'subscription-name', - scheduler=scheduler_, - **kwargs) + client_, "subscription-name", scheduler=scheduler_, **kwargs + ) def test_ack_deadline(): @@ -113,7 +118,8 @@ def test_ack_deadline(): def test_maybe_pause_consumer_wo_consumer_set(): manager = make_manager( - flow_control=types.FlowControl(max_messages=10, max_bytes=1000)) + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) manager.maybe_pause_consumer() # no raise # Ensure load > 1 _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) @@ -124,27 +130,27 @@ def test_maybe_pause_consumer_wo_consumer_set(): def test_lease_load_and_pause(): manager = make_manager( - flow_control=types.FlowControl(max_messages=10, max_bytes=1000)) + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) manager._leaser = leaser.Leaser(manager) - manager._consumer = mock.create_autospec( - bidi.BackgroundConsumer, instance=True) + manager._consumer = mock.create_autospec(bidi.BackgroundConsumer, instance=True) manager._consumer.is_paused = False # This should mean that our messages count is at 10%, and our bytes # are at 15%; load should return the higher (0.15), and shouldn't cause # the consumer to pause. - manager.leaser.add([requests.LeaseRequest(ack_id='one', byte_size=150)]) + manager.leaser.add([requests.LeaseRequest(ack_id="one", byte_size=150)]) assert manager.load == 0.15 manager.maybe_pause_consumer() manager._consumer.pause.assert_not_called() # After this message is added, the messages should be higher at 20% # (versus 16% for bytes). - manager.leaser.add([requests.LeaseRequest(ack_id='two', byte_size=10)]) + manager.leaser.add([requests.LeaseRequest(ack_id="two", byte_size=10)]) assert manager.load == 0.2 # Returning a number above 100% is fine, and it should cause this to pause. - manager.leaser.add([requests.LeaseRequest(ack_id='three', byte_size=1000)]) + manager.leaser.add([requests.LeaseRequest(ack_id="three", byte_size=1000)]) assert manager.load == 1.16 manager.maybe_pause_consumer() manager._consumer.pause.assert_called_once() @@ -152,16 +158,19 @@ def test_lease_load_and_pause(): def test_drop_and_resume(): manager = make_manager( - flow_control=types.FlowControl(max_messages=10, max_bytes=1000)) + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) manager._leaser = leaser.Leaser(manager) - manager._consumer = mock.create_autospec( - bidi.BackgroundConsumer, instance=True) + manager._consumer = mock.create_autospec(bidi.BackgroundConsumer, instance=True) manager._consumer.is_paused = True # Add several messages until we're over the load threshold. - manager.leaser.add([ - requests.LeaseRequest(ack_id='one', byte_size=750), - requests.LeaseRequest(ack_id='two', byte_size=250)]) + manager.leaser.add( + [ + requests.LeaseRequest(ack_id="one", byte_size=750), + requests.LeaseRequest(ack_id="two", byte_size=250), + ] + ) assert manager.load == 1.0 @@ -171,16 +180,14 @@ def test_drop_and_resume(): # Drop the 200 byte message, which should put us under the resume # threshold. - manager.leaser.remove([ - requests.DropRequest(ack_id='two', byte_size=250)]) + manager.leaser.remove([requests.DropRequest(ack_id="two", byte_size=250)]) manager.maybe_resume_consumer() manager._consumer.resume.assert_called_once() def test_resume_not_paused(): manager = make_manager() - manager._consumer = mock.create_autospec( - bidi.BackgroundConsumer, instance=True) + manager._consumer = mock.create_autospec(bidi.BackgroundConsumer, instance=True) manager._consumer.is_paused = False # Resuming should have no effect is the consumer is not actually paused. @@ -190,7 +197,8 @@ def test_resume_not_paused(): def test_maybe_resume_consumer_wo_consumer_set(): manager = make_manager( - flow_control=types.FlowControl(max_messages=10, max_bytes=1000)) + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) manager.maybe_resume_consumer() # no raise @@ -198,22 +206,33 @@ def test_send_unary(): manager = make_manager() manager._UNARY_REQUESTS = True - manager.send(types.StreamingPullRequest( - ack_ids=['ack_id1', 'ack_id2'], - modify_deadline_ack_ids=['ack_id3', 'ack_id4', 'ack_id5'], - modify_deadline_seconds=[10, 20, 20])) + manager.send( + types.StreamingPullRequest( + ack_ids=["ack_id1", "ack_id2"], + modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], + modify_deadline_seconds=[10, 20, 20], + ) + ) manager._client.acknowledge.assert_called_once_with( - subscription=manager._subscription, ack_ids=['ack_id1', 'ack_id2']) + subscription=manager._subscription, ack_ids=["ack_id1", "ack_id2"] + ) - manager._client.modify_ack_deadline.assert_has_calls([ - mock.call( - subscription=manager._subscription, - ack_ids=['ack_id3'], ack_deadline_seconds=10), - mock.call( - subscription=manager._subscription, - ack_ids=['ack_id4', 'ack_id5'], ack_deadline_seconds=20), - ], any_order=True) + manager._client.modify_ack_deadline.assert_has_calls( + [ + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id3"], + ack_deadline_seconds=10, + ), + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id4", "ack_id5"], + ack_deadline_seconds=20, + ), + ], + any_order=True, + ) def test_send_unary_empty(): @@ -232,13 +251,12 @@ def test_send_unary_error(caplog): manager = make_manager() manager._UNARY_REQUESTS = True - error = exceptions.GoogleAPICallError('The front fell off') + error = exceptions.GoogleAPICallError("The front fell off") manager._client.acknowledge.side_effect = error - manager.send(types.StreamingPullRequest( - ack_ids=['ack_id1', 'ack_id2'])) + manager.send(types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) - assert 'The front fell off' in caplog.text + assert "The front fell off" in caplog.text def test_send_streaming(): @@ -271,24 +289,16 @@ def test_heartbeat_inactive(): manager._rpc.send.assert_not_called() +@mock.patch("google.api_core.bidi.ResumableBidiRpc", autospec=True) +@mock.patch("google.api_core.bidi.BackgroundConsumer", autospec=True) +@mock.patch("google.cloud.pubsub_v1.subscriber._protocol.leaser.Leaser", autospec=True) @mock.patch( - 'google.api_core.bidi.ResumableBidiRpc', - autospec=True) -@mock.patch( - 'google.api_core.bidi.BackgroundConsumer', - autospec=True) -@mock.patch( - 'google.cloud.pubsub_v1.subscriber._protocol.leaser.Leaser', - autospec=True) + "google.cloud.pubsub_v1.subscriber._protocol.dispatcher.Dispatcher", autospec=True +) @mock.patch( - 'google.cloud.pubsub_v1.subscriber._protocol.dispatcher.Dispatcher', - autospec=True) -@mock.patch( - 'google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater', - autospec=True) -def test_open( - heartbeater, dispatcher, leaser, background_consumer, - resumable_bidi_rpc): + "google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater", autospec=True +) +def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): manager = make_manager() manager.open(mock.sentinel.callback) @@ -305,17 +315,18 @@ def test_open( leaser.return_value.start.assert_called_once() assert manager.leaser == leaser.return_value - background_consumer.assert_called_once_with( - manager._rpc, manager._on_response) + background_consumer.assert_called_once_with(manager._rpc, manager._on_response) background_consumer.return_value.start.assert_called_once() assert manager._consumer == background_consumer.return_value resumable_bidi_rpc.assert_called_once_with( start_rpc=manager._client.api.streaming_pull, initial_request=manager._get_initial_request, - should_recover=manager._should_recover) + should_recover=manager._should_recover, + ) resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( - manager._on_rpc_done) + manager._on_rpc_done + ) assert manager._rpc == resumable_bidi_rpc.return_value manager._consumer.is_active = True @@ -324,11 +335,10 @@ def test_open( def test_open_already_active(): manager = make_manager() - manager._consumer = mock.create_autospec( - bidi.BackgroundConsumer, instance=True) + manager._consumer = mock.create_autospec(bidi.BackgroundConsumer, instance=True) manager._consumer.is_active = True - with pytest.raises(ValueError, match='already open'): + with pytest.raises(ValueError, match="already open"): manager.open(mock.sentinel.callback) @@ -336,30 +346,32 @@ def test_open_has_been_closed(): manager = make_manager() manager._closed = True - with pytest.raises(ValueError, match='closed'): + with pytest.raises(ValueError, match="closed"): manager.open(mock.sentinel.callback) def make_running_manager(): manager = make_manager() - manager._consumer = mock.create_autospec( - bidi.BackgroundConsumer, instance=True) + manager._consumer = mock.create_autospec(bidi.BackgroundConsumer, instance=True) manager._consumer.is_active = True - manager._dispatcher = mock.create_autospec( - dispatcher.Dispatcher, instance=True) - manager._leaser = mock.create_autospec( - leaser.Leaser, instance=True) - manager._heartbeater = mock.create_autospec( - heartbeater.Heartbeater, instance=True) + manager._dispatcher = mock.create_autospec(dispatcher.Dispatcher, instance=True) + manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) + manager._heartbeater = mock.create_autospec(heartbeater.Heartbeater, instance=True) return ( - manager, manager._consumer, manager._dispatcher, manager._leaser, - manager._heartbeater, manager._scheduler) + manager, + manager._consumer, + manager._dispatcher, + manager._leaser, + manager._heartbeater, + manager._scheduler, + ) def test_close(): manager, consumer, dispatcher, leaser, heartbeater, scheduler = ( - make_running_manager()) + make_running_manager() + ) manager.close() @@ -374,7 +386,8 @@ def test_close(): def test_close_inactive_consumer(): manager, consumer, dispatcher, leaser, heartbeater, scheduler = ( - make_running_manager()) + make_running_manager() + ) consumer.is_active = False manager.close() @@ -401,23 +414,22 @@ def test_close_callbacks(): callback = mock.Mock() manager.add_close_callback(callback) - manager.close(reason='meep') + manager.close(reason="meep") - callback.assert_called_once_with(manager, 'meep') + callback.assert_called_once_with(manager, "meep") def test__get_initial_request(): manager = make_manager() - manager._leaser = mock.create_autospec( - leaser.Leaser, instance=True) - manager._leaser.ack_ids = ['1', '2'] + manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) + manager._leaser.ack_ids = ["1", "2"] initial_request = manager._get_initial_request() assert isinstance(initial_request, types.StreamingPullRequest) - assert initial_request.subscription == 'subscription-name' + assert initial_request.subscription == "subscription-name" assert initial_request.stream_ack_deadline_seconds == 10 - assert initial_request.modify_deadline_ack_ids == ['1', '2'] + assert initial_request.modify_deadline_ack_ids == ["1", "2"] assert initial_request.modify_deadline_seconds == [10, 10] @@ -428,7 +440,7 @@ def test__get_initial_request_wo_leaser(): initial_request = manager._get_initial_request() assert isinstance(initial_request, types.StreamingPullRequest) - assert initial_request.subscription == 'subscription-name' + assert initial_request.subscription == "subscription-name" assert initial_request.stream_ack_deadline_seconds == 10 assert initial_request.modify_deadline_ack_ids == [] assert initial_request.modify_deadline_seconds == [] @@ -442,14 +454,12 @@ def test_on_response(): response = types.StreamingPullResponse( received_messages=[ types.ReceivedMessage( - ack_id='fack', - message=types.PubsubMessage(data=b'foo', message_id='1') + ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") ), types.ReceivedMessage( - ack_id='back', - message=types.PubsubMessage(data=b'bar', message_id='2') + ack_id="back", message=types.PubsubMessage(data=b"bar", message_id="2") ), - ], + ] ) # Actually run the method and prove that modack and schedule @@ -457,8 +467,7 @@ def test_on_response(): manager._on_response(response) dispatcher.modify_ack_deadline.assert_called_once_with( - [requests.ModAckRequest('fack', 10), - requests.ModAckRequest('back', 10)] + [requests.ModAckRequest("fack", 10), requests.ModAckRequest("back", 10)] ) schedule_calls = scheduler.schedule.mock_calls @@ -470,26 +479,22 @@ def test_on_response(): def test_retryable_stream_errors(): # Make sure the config matches our hard-coded tuple of exceptions. - interfaces = subscriber_client_config.config['interfaces'] - retry_codes = interfaces['google.pubsub.v1.Subscriber']['retry_codes'] - idempotent = retry_codes['idempotent'] + interfaces = subscriber_client_config.config["interfaces"] + retry_codes = interfaces["google.pubsub.v1.Subscriber"]["retry_codes"] + idempotent = retry_codes["idempotent"] - status_codes = tuple( - getattr(grpc.StatusCode, name, None) - for name in idempotent - ) + status_codes = tuple(getattr(grpc.StatusCode, name, None) for name in idempotent) expected = tuple( exceptions.exception_class_for_grpc_status(status_code) for status_code in status_codes ) - assert set(expected).issubset( - set(streaming_pull_manager._RETRYABLE_STREAM_ERRORS)) + assert set(expected).issubset(set(streaming_pull_manager._RETRYABLE_STREAM_ERRORS)) def test__should_recover_true(): manager = make_manager() - details = 'UNAVAILABLE. Service taking nap.' + details = "UNAVAILABLE. Service taking nap." exc = exceptions.ServiceUnavailable(details) assert manager._should_recover(exc) is True @@ -498,17 +503,17 @@ def test__should_recover_true(): def test__should_recover_false(): manager = make_manager() - exc = TypeError('wahhhhhh') + exc = TypeError("wahhhhhh") assert manager._should_recover(exc) is False -@mock.patch('threading.Thread', autospec=True) +@mock.patch("threading.Thread", autospec=True) def test__on_rpc_done(thread): manager = make_manager() manager._on_rpc_done(mock.sentinel.error) thread.assert_called_once_with( - name=mock.ANY, target=manager.close, - kwargs={'reason': mock.sentinel.error}) + name=mock.ANY, target=manager.close, kwargs={"reason": mock.sentinel.error} + ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 86297d31cd96..5acd5b6f8dd7 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -27,7 +27,7 @@ def test_init(): def test_init_emulator(monkeypatch): - monkeypatch.setenv('PUBSUB_EMULATOR_HOST', '/baz/bacon/') + monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/baz/bacon/") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. client = subscriber.Client() @@ -37,38 +37,41 @@ def test_init_emulator(monkeypatch): # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. channel = client.api.transport.pull._channel - assert channel.target().decode('utf8') == '/baz/bacon/' + assert channel.target().decode("utf8") == "/baz/bacon/" def test_class_method_factory(): patch = mock.patch( - 'google.oauth2.service_account.Credentials.from_service_account_file') + "google.oauth2.service_account.Credentials.from_service_account_file" + ) with patch: - client = subscriber.Client.from_service_account_file('filename.json') + client = subscriber.Client.from_service_account_file("filename.json") assert isinstance(client, subscriber.Client) @mock.patch( - 'google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager.' - 'StreamingPullManager.open', autospec=True) + "google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager." + "StreamingPullManager.open", + autospec=True, +) def test_subscribe(manager_open): creds = mock.Mock(spec=credentials.Credentials) client = subscriber.Client(credentials=creds) - future = client.subscribe( - 'sub_name_a', callback=mock.sentinel.callback) + future = client.subscribe("sub_name_a", callback=mock.sentinel.callback) assert isinstance(future, futures.StreamingPullFuture) - assert future._manager._subscription == 'sub_name_a' - manager_open.assert_called_once_with( - mock.ANY, mock.sentinel.callback) + assert future._manager._subscription == "sub_name_a" + manager_open.assert_called_once_with(mock.ANY, mock.sentinel.callback) @mock.patch( - 'google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager.' - 'StreamingPullManager.open', autospec=True) + "google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager." + "StreamingPullManager.open", + autospec=True, +) def test_subscribe_options(manager_open): creds = mock.Mock(spec=credentials.Credentials) client = subscriber.Client(credentials=creds) @@ -76,14 +79,14 @@ def test_subscribe_options(manager_open): scheduler = mock.sentinel.scheduler future = client.subscribe( - 'sub_name_a', + "sub_name_a", callback=mock.sentinel.callback, flow_control=flow_control, - scheduler=scheduler) + scheduler=scheduler, + ) assert isinstance(future, futures.StreamingPullFuture) - assert future._manager._subscription == 'sub_name_a' + assert future._manager._subscription == "sub_name_a" assert future._manager.flow_control == flow_control assert future._manager._scheduler == scheduler - manager_open.assert_called_once_with( - mock.ANY, mock.sentinel.callback) + manager_open.assert_called_once_with(mock.ANY, mock.sentinel.callback) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py index 9dd77b506267..11349d5d480a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py @@ -26,7 +26,7 @@ def _future(*args, **kwargs): def test_constructor_defaults(): - with mock.patch.object(threading, 'Event', autospec=True) as Event: + with mock.patch.object(threading, "Event", autospec=True) as Event: future = _future() assert future._result == futures.Future._SENTINEL @@ -58,26 +58,26 @@ def test_cancelled(): def test_running(): future = _future() assert future.running() is True - future.set_result('foobar') + future.set_result("foobar") assert future.running() is False def test_done(): future = _future() assert future.done() is False - future.set_result('12345') + future.set_result("12345") assert future.done() is True def test_exception_no_error(): future = _future() - future.set_result('12345') + future.set_result("12345") assert future.exception() is None def test_exception_with_error(): future = _future() - error = RuntimeError('Something really bad happened.') + error = RuntimeError("Something really bad happened.") future.set_exception(error) # Make sure that the exception that is returned is the batch's error. @@ -97,13 +97,13 @@ def test_exception_timeout(): def test_result_no_error(): future = _future() - future.set_result('42') - assert future.result() == '42' + future.set_result("42") + assert future.result() == "42" def test_result_with_error(): future = _future() - future.set_exception(RuntimeError('Something really bad happened.')) + future.set_exception(RuntimeError("Something really bad happened.")) with pytest.raises(RuntimeError): future.result() @@ -119,7 +119,7 @@ def test_add_done_callback_pending_batch(): def test_add_done_callback_completed_batch(): future = _future() - future.set_result('12345') + future.set_result("12345") callback = mock.Mock(spec=()) future.add_done_callback(callback) callback.assert_called_once_with(future) @@ -130,19 +130,19 @@ def test_trigger(): callback = mock.Mock(spec=()) future.add_done_callback(callback) assert callback.call_count == 0 - future.set_result('12345') + future.set_result("12345") callback.assert_called_once_with(future) def test_set_result_once_only(): future = _future() - future.set_result('12345') + future.set_result("12345") with pytest.raises(RuntimeError): - future.set_result('67890') + future.set_result("67890") def test_set_exception_once_only(): future = _future() - future.set_exception(ValueError('wah wah')) + future.set_exception(ValueError("wah wah")) with pytest.raises(RuntimeError): - future.set_exception(TypeError('other wah wah')) + future.set_exception(TypeError("other wah wah")) From afcb5630fe03091ff2c6021f27b200d8666bf2e1 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 29 Nov 2018 13:13:54 -0800 Subject: [PATCH 0319/1197] Run black at end of synth.py (#6698) * Run black at end of synth.py * blacken logging --- packages/google-cloud-pubsub/synth.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 0289f096dddd..50fc92a07a9a 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -111,3 +111,5 @@ # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library(unit_cov_level=97, cov_level=100) s.move(templated_files) + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 9ac64bb3cf78f7341b49ab9a62c120ea522f505a Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 29 Nov 2018 13:23:53 -0800 Subject: [PATCH 0320/1197] omit local deps (#6701) --- packages/google-cloud-pubsub/.coveragerc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index 51fec440cebf..6b9ab9da4a1b 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -14,5 +14,5 @@ exclude_lines = omit = */gapic/*.py */proto/*.py - */google-cloud-python/core/*.py + */core/*.py */site-packages/*.py \ No newline at end of file From 066476b285140b287e63b2519c7238dfce80fda1 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 30 Nov 2018 15:25:18 -0800 Subject: [PATCH 0321/1197] blacken all gen'd libs (#6792) * blacken all gen'd libs --- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 5740 ++++++++++------- .../cloud/pubsub_v1/proto/pubsub_pb2_grpc.py | 714 +- 2 files changed, 3859 insertions(+), 2595 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 6f4a75859082..50f0a4b791a9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -2,12 +2,14 @@ # source: google/cloud/pubsub_v1/proto/pubsub.proto import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) + +_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -21,1878 +23,2895 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name='google/cloud/pubsub_v1/proto/pubsub.proto', - package='google.pubsub.v1', - syntax='proto3', - serialized_pb=_b('\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t\"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t\"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t\"\x84\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32\".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05\"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage\"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t\"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target\"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic\")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic\"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse\"2\x82\xd3\xe4\x93\x02,\"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse\"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse\"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty\"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription\"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription\"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse\".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty\"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty\"J\x82\xd3\xe4\x93\x02\x44\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse\"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse\"*\x82\xd3\xe4\x93\x02$\x12\"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot\"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse\"=\x82\xd3\xe4\x93\x02\x37\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\x92\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1b\x06proto3') - , - dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) - - + name="google/cloud/pubsub_v1/proto/pubsub.proto", + package="google.pubsub.v1", + syntax="proto3", + serialized_pb=_b( + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\x84\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\x92\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1b\x06proto3' + ), + dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, + google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + ], +) _MESSAGESTORAGEPOLICY = _descriptor.Descriptor( - name='MessageStoragePolicy', - full_name='google.pubsub.v1.MessageStoragePolicy', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='allowed_persistence_regions', full_name='google.pubsub.v1.MessageStoragePolicy.allowed_persistence_regions', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=221, - serialized_end=280, + name="MessageStoragePolicy", + full_name="google.pubsub.v1.MessageStoragePolicy", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="allowed_persistence_regions", + full_name="google.pubsub.v1.MessageStoragePolicy.allowed_persistence_regions", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=221, + serialized_end=280, ) _TOPIC_LABELSENTRY = _descriptor.Descriptor( - name='LabelsEntry', - full_name='google.pubsub.v1.Topic.LabelsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.pubsub.v1.Topic.LabelsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.pubsub.v1.Topic.LabelsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=431, - serialized_end=476, + name="LabelsEntry", + full_name="google.pubsub.v1.Topic.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.pubsub.v1.Topic.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.pubsub.v1.Topic.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=431, + serialized_end=476, ) _TOPIC = _descriptor.Descriptor( - name='Topic', - full_name='google.pubsub.v1.Topic', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.pubsub.v1.Topic.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='labels', full_name='google.pubsub.v1.Topic.labels', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='message_storage_policy', full_name='google.pubsub.v1.Topic.message_storage_policy', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_TOPIC_LABELSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=283, - serialized_end=476, + name="Topic", + full_name="google.pubsub.v1.Topic", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.pubsub.v1.Topic.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.pubsub.v1.Topic.labels", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="message_storage_policy", + full_name="google.pubsub.v1.Topic.message_storage_policy", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_TOPIC_LABELSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=283, + serialized_end=476, ) _PUBSUBMESSAGE_ATTRIBUTESENTRY = _descriptor.Descriptor( - name='AttributesEntry', - full_name='google.pubsub.v1.PubsubMessage.AttributesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.pubsub.v1.PubsubMessage.AttributesEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=649, - serialized_end=698, + name="AttributesEntry", + full_name="google.pubsub.v1.PubsubMessage.AttributesEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.pubsub.v1.PubsubMessage.AttributesEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.pubsub.v1.PubsubMessage.AttributesEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=649, + serialized_end=698, ) _PUBSUBMESSAGE = _descriptor.Descriptor( - name='PubsubMessage', - full_name='google.pubsub.v1.PubsubMessage', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='data', full_name='google.pubsub.v1.PubsubMessage.data', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='attributes', full_name='google.pubsub.v1.PubsubMessage.attributes', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='message_id', full_name='google.pubsub.v1.PubsubMessage.message_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='publish_time', full_name='google.pubsub.v1.PubsubMessage.publish_time', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=479, - serialized_end=698, + name="PubsubMessage", + full_name="google.pubsub.v1.PubsubMessage", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="data", + full_name="google.pubsub.v1.PubsubMessage.data", + index=0, + number=1, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b(""), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="attributes", + full_name="google.pubsub.v1.PubsubMessage.attributes", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="message_id", + full_name="google.pubsub.v1.PubsubMessage.message_id", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="publish_time", + full_name="google.pubsub.v1.PubsubMessage.publish_time", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=479, + serialized_end=698, ) _GETTOPICREQUEST = _descriptor.Descriptor( - name='GetTopicRequest', - full_name='google.pubsub.v1.GetTopicRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='topic', full_name='google.pubsub.v1.GetTopicRequest.topic', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=700, - serialized_end=732, + name="GetTopicRequest", + full_name="google.pubsub.v1.GetTopicRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="topic", + full_name="google.pubsub.v1.GetTopicRequest.topic", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=700, + serialized_end=732, ) _UPDATETOPICREQUEST = _descriptor.Descriptor( - name='UpdateTopicRequest', - full_name='google.pubsub.v1.UpdateTopicRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='topic', full_name='google.pubsub.v1.UpdateTopicRequest.topic', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_mask', full_name='google.pubsub.v1.UpdateTopicRequest.update_mask', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=734, - serialized_end=843, + name="UpdateTopicRequest", + full_name="google.pubsub.v1.UpdateTopicRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="topic", + full_name="google.pubsub.v1.UpdateTopicRequest.topic", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.pubsub.v1.UpdateTopicRequest.update_mask", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=734, + serialized_end=843, ) _PUBLISHREQUEST = _descriptor.Descriptor( - name='PublishRequest', - full_name='google.pubsub.v1.PublishRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='topic', full_name='google.pubsub.v1.PublishRequest.topic', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='messages', full_name='google.pubsub.v1.PublishRequest.messages', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=845, - serialized_end=927, + name="PublishRequest", + full_name="google.pubsub.v1.PublishRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="topic", + full_name="google.pubsub.v1.PublishRequest.topic", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="messages", + full_name="google.pubsub.v1.PublishRequest.messages", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=845, + serialized_end=927, ) _PUBLISHRESPONSE = _descriptor.Descriptor( - name='PublishResponse', - full_name='google.pubsub.v1.PublishResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='message_ids', full_name='google.pubsub.v1.PublishResponse.message_ids', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=929, - serialized_end=967, + name="PublishResponse", + full_name="google.pubsub.v1.PublishResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="message_ids", + full_name="google.pubsub.v1.PublishResponse.message_ids", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=929, + serialized_end=967, ) _LISTTOPICSREQUEST = _descriptor.Descriptor( - name='ListTopicsRequest', - full_name='google.pubsub.v1.ListTopicsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project', full_name='google.pubsub.v1.ListTopicsRequest.project', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.pubsub.v1.ListTopicsRequest.page_size', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.pubsub.v1.ListTopicsRequest.page_token', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=969, - serialized_end=1044, + name="ListTopicsRequest", + full_name="google.pubsub.v1.ListTopicsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project", + full_name="google.pubsub.v1.ListTopicsRequest.project", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.pubsub.v1.ListTopicsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.pubsub.v1.ListTopicsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=969, + serialized_end=1044, ) _LISTTOPICSRESPONSE = _descriptor.Descriptor( - name='ListTopicsResponse', - full_name='google.pubsub.v1.ListTopicsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='topics', full_name='google.pubsub.v1.ListTopicsResponse.topics', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.pubsub.v1.ListTopicsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1046, - serialized_end=1132, + name="ListTopicsResponse", + full_name="google.pubsub.v1.ListTopicsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="topics", + full_name="google.pubsub.v1.ListTopicsResponse.topics", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.pubsub.v1.ListTopicsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1046, + serialized_end=1132, ) _LISTTOPICSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( - name='ListTopicSubscriptionsRequest', - full_name='google.pubsub.v1.ListTopicSubscriptionsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='topic', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.topic', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_size', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsRequest.page_token', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1134, - serialized_end=1219, + name="ListTopicSubscriptionsRequest", + full_name="google.pubsub.v1.ListTopicSubscriptionsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="topic", + full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.topic", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1134, + serialized_end=1219, ) _LISTTOPICSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( - name='ListTopicSubscriptionsResponse', - full_name='google.pubsub.v1.ListTopicSubscriptionsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscriptions', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.subscriptions', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1221, - serialized_end=1301, + name="ListTopicSubscriptionsResponse", + full_name="google.pubsub.v1.ListTopicSubscriptionsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscriptions", + full_name="google.pubsub.v1.ListTopicSubscriptionsResponse.subscriptions", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1221, + serialized_end=1301, ) _LISTTOPICSNAPSHOTSREQUEST = _descriptor.Descriptor( - name='ListTopicSnapshotsRequest', - full_name='google.pubsub.v1.ListTopicSnapshotsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='topic', full_name='google.pubsub.v1.ListTopicSnapshotsRequest.topic', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.pubsub.v1.ListTopicSnapshotsRequest.page_size', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.pubsub.v1.ListTopicSnapshotsRequest.page_token', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1303, - serialized_end=1384, + name="ListTopicSnapshotsRequest", + full_name="google.pubsub.v1.ListTopicSnapshotsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="topic", + full_name="google.pubsub.v1.ListTopicSnapshotsRequest.topic", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.pubsub.v1.ListTopicSnapshotsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.pubsub.v1.ListTopicSnapshotsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1303, + serialized_end=1384, ) _LISTTOPICSNAPSHOTSRESPONSE = _descriptor.Descriptor( - name='ListTopicSnapshotsResponse', - full_name='google.pubsub.v1.ListTopicSnapshotsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='snapshots', full_name='google.pubsub.v1.ListTopicSnapshotsResponse.snapshots', index=0, - number=1, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.pubsub.v1.ListTopicSnapshotsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1386, - serialized_end=1458, + name="ListTopicSnapshotsResponse", + full_name="google.pubsub.v1.ListTopicSnapshotsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="snapshots", + full_name="google.pubsub.v1.ListTopicSnapshotsResponse.snapshots", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.pubsub.v1.ListTopicSnapshotsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1386, + serialized_end=1458, ) _DELETETOPICREQUEST = _descriptor.Descriptor( - name='DeleteTopicRequest', - full_name='google.pubsub.v1.DeleteTopicRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='topic', full_name='google.pubsub.v1.DeleteTopicRequest.topic', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1460, - serialized_end=1495, + name="DeleteTopicRequest", + full_name="google.pubsub.v1.DeleteTopicRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="topic", + full_name="google.pubsub.v1.DeleteTopicRequest.topic", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1460, + serialized_end=1495, ) _SUBSCRIPTION_LABELSENTRY = _descriptor.Descriptor( - name='LabelsEntry', - full_name='google.pubsub.v1.Subscription.LabelsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.pubsub.v1.Subscription.LabelsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.pubsub.v1.Subscription.LabelsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=431, - serialized_end=476, + name="LabelsEntry", + full_name="google.pubsub.v1.Subscription.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.pubsub.v1.Subscription.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.pubsub.v1.Subscription.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=431, + serialized_end=476, ) _SUBSCRIPTION = _descriptor.Descriptor( - name='Subscription', - full_name='google.pubsub.v1.Subscription', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.pubsub.v1.Subscription.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='topic', full_name='google.pubsub.v1.Subscription.topic', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='push_config', full_name='google.pubsub.v1.Subscription.push_config', index=2, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ack_deadline_seconds', full_name='google.pubsub.v1.Subscription.ack_deadline_seconds', index=3, - number=5, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='retain_acked_messages', full_name='google.pubsub.v1.Subscription.retain_acked_messages', index=4, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='message_retention_duration', full_name='google.pubsub.v1.Subscription.message_retention_duration', index=5, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='labels', full_name='google.pubsub.v1.Subscription.labels', index=6, - number=9, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='expiration_policy', full_name='google.pubsub.v1.Subscription.expiration_policy', index=7, - number=11, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_SUBSCRIPTION_LABELSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1498, - serialized_end=1886, + name="Subscription", + full_name="google.pubsub.v1.Subscription", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.pubsub.v1.Subscription.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="topic", + full_name="google.pubsub.v1.Subscription.topic", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="push_config", + full_name="google.pubsub.v1.Subscription.push_config", + index=2, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="ack_deadline_seconds", + full_name="google.pubsub.v1.Subscription.ack_deadline_seconds", + index=3, + number=5, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="retain_acked_messages", + full_name="google.pubsub.v1.Subscription.retain_acked_messages", + index=4, + number=7, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="message_retention_duration", + full_name="google.pubsub.v1.Subscription.message_retention_duration", + index=5, + number=8, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.pubsub.v1.Subscription.labels", + index=6, + number=9, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="expiration_policy", + full_name="google.pubsub.v1.Subscription.expiration_policy", + index=7, + number=11, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_SUBSCRIPTION_LABELSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1498, + serialized_end=1886, ) _EXPIRATIONPOLICY = _descriptor.Descriptor( - name='ExpirationPolicy', - full_name='google.pubsub.v1.ExpirationPolicy', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='ttl', full_name='google.pubsub.v1.ExpirationPolicy.ttl', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1888, - serialized_end=1946, + name="ExpirationPolicy", + full_name="google.pubsub.v1.ExpirationPolicy", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="ttl", + full_name="google.pubsub.v1.ExpirationPolicy.ttl", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1888, + serialized_end=1946, ) _PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( - name='AttributesEntry', - full_name='google.pubsub.v1.PushConfig.AttributesEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.pubsub.v1.PushConfig.AttributesEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.pubsub.v1.PushConfig.AttributesEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=649, - serialized_end=698, + name="AttributesEntry", + full_name="google.pubsub.v1.PushConfig.AttributesEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.pubsub.v1.PushConfig.AttributesEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.pubsub.v1.PushConfig.AttributesEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=649, + serialized_end=698, ) _PUSHCONFIG = _descriptor.Descriptor( - name='PushConfig', - full_name='google.pubsub.v1.PushConfig', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='push_endpoint', full_name='google.pubsub.v1.PushConfig.push_endpoint', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='attributes', full_name='google.pubsub.v1.PushConfig.attributes', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1949, - serialized_end=2101, + name="PushConfig", + full_name="google.pubsub.v1.PushConfig", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="push_endpoint", + full_name="google.pubsub.v1.PushConfig.push_endpoint", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="attributes", + full_name="google.pubsub.v1.PushConfig.attributes", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1949, + serialized_end=2101, ) _RECEIVEDMESSAGE = _descriptor.Descriptor( - name='ReceivedMessage', - full_name='google.pubsub.v1.ReceivedMessage', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='ack_id', full_name='google.pubsub.v1.ReceivedMessage.ack_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='message', full_name='google.pubsub.v1.ReceivedMessage.message', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2103, - serialized_end=2186, + name="ReceivedMessage", + full_name="google.pubsub.v1.ReceivedMessage", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="ack_id", + full_name="google.pubsub.v1.ReceivedMessage.ack_id", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="message", + full_name="google.pubsub.v1.ReceivedMessage.message", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2103, + serialized_end=2186, ) _GETSUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name='GetSubscriptionRequest', - full_name='google.pubsub.v1.GetSubscriptionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.GetSubscriptionRequest.subscription', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2188, - serialized_end=2234, + name="GetSubscriptionRequest", + full_name="google.pubsub.v1.GetSubscriptionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.GetSubscriptionRequest.subscription", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2188, + serialized_end=2234, ) _UPDATESUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name='UpdateSubscriptionRequest', - full_name='google.pubsub.v1.UpdateSubscriptionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.UpdateSubscriptionRequest.subscription', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_mask', full_name='google.pubsub.v1.UpdateSubscriptionRequest.update_mask', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2237, - serialized_end=2367, + name="UpdateSubscriptionRequest", + full_name="google.pubsub.v1.UpdateSubscriptionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.UpdateSubscriptionRequest.subscription", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.pubsub.v1.UpdateSubscriptionRequest.update_mask", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2237, + serialized_end=2367, ) _LISTSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( - name='ListSubscriptionsRequest', - full_name='google.pubsub.v1.ListSubscriptionsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project', full_name='google.pubsub.v1.ListSubscriptionsRequest.project', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_size', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.pubsub.v1.ListSubscriptionsRequest.page_token', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2369, - serialized_end=2451, + name="ListSubscriptionsRequest", + full_name="google.pubsub.v1.ListSubscriptionsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project", + full_name="google.pubsub.v1.ListSubscriptionsRequest.project", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.pubsub.v1.ListSubscriptionsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.pubsub.v1.ListSubscriptionsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2369, + serialized_end=2451, ) _LISTSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( - name='ListSubscriptionsResponse', - full_name='google.pubsub.v1.ListSubscriptionsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscriptions', full_name='google.pubsub.v1.ListSubscriptionsResponse.subscriptions', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.pubsub.v1.ListSubscriptionsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2453, - serialized_end=2560, + name="ListSubscriptionsResponse", + full_name="google.pubsub.v1.ListSubscriptionsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscriptions", + full_name="google.pubsub.v1.ListSubscriptionsResponse.subscriptions", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.pubsub.v1.ListSubscriptionsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2453, + serialized_end=2560, ) _DELETESUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name='DeleteSubscriptionRequest', - full_name='google.pubsub.v1.DeleteSubscriptionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.DeleteSubscriptionRequest.subscription', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2562, - serialized_end=2611, + name="DeleteSubscriptionRequest", + full_name="google.pubsub.v1.DeleteSubscriptionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.DeleteSubscriptionRequest.subscription", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2562, + serialized_end=2611, ) _MODIFYPUSHCONFIGREQUEST = _descriptor.Descriptor( - name='ModifyPushConfigRequest', - full_name='google.pubsub.v1.ModifyPushConfigRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.ModifyPushConfigRequest.subscription', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='push_config', full_name='google.pubsub.v1.ModifyPushConfigRequest.push_config', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2613, - serialized_end=2711, + name="ModifyPushConfigRequest", + full_name="google.pubsub.v1.ModifyPushConfigRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.ModifyPushConfigRequest.subscription", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="push_config", + full_name="google.pubsub.v1.ModifyPushConfigRequest.push_config", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2613, + serialized_end=2711, ) _PULLREQUEST = _descriptor.Descriptor( - name='PullRequest', - full_name='google.pubsub.v1.PullRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.PullRequest.subscription', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='return_immediately', full_name='google.pubsub.v1.PullRequest.return_immediately', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='max_messages', full_name='google.pubsub.v1.PullRequest.max_messages', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2713, - serialized_end=2798, + name="PullRequest", + full_name="google.pubsub.v1.PullRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.PullRequest.subscription", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="return_immediately", + full_name="google.pubsub.v1.PullRequest.return_immediately", + index=1, + number=2, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="max_messages", + full_name="google.pubsub.v1.PullRequest.max_messages", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2713, + serialized_end=2798, ) _PULLRESPONSE = _descriptor.Descriptor( - name='PullResponse', - full_name='google.pubsub.v1.PullResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='received_messages', full_name='google.pubsub.v1.PullResponse.received_messages', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2800, - serialized_end=2876, + name="PullResponse", + full_name="google.pubsub.v1.PullResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="received_messages", + full_name="google.pubsub.v1.PullResponse.received_messages", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2800, + serialized_end=2876, ) _MODIFYACKDEADLINEREQUEST = _descriptor.Descriptor( - name='ModifyAckDeadlineRequest', - full_name='google.pubsub.v1.ModifyAckDeadlineRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.subscription', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ack_ids', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids', index=1, - number=4, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ack_deadline_seconds', full_name='google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2878, - serialized_end=2973, + name="ModifyAckDeadlineRequest", + full_name="google.pubsub.v1.ModifyAckDeadlineRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.ModifyAckDeadlineRequest.subscription", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="ack_ids", + full_name="google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids", + index=1, + number=4, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="ack_deadline_seconds", + full_name="google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2878, + serialized_end=2973, ) _ACKNOWLEDGEREQUEST = _descriptor.Descriptor( - name='AcknowledgeRequest', - full_name='google.pubsub.v1.AcknowledgeRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.AcknowledgeRequest.subscription', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ack_ids', full_name='google.pubsub.v1.AcknowledgeRequest.ack_ids', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=2975, - serialized_end=3034, + name="AcknowledgeRequest", + full_name="google.pubsub.v1.AcknowledgeRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.AcknowledgeRequest.subscription", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="ack_ids", + full_name="google.pubsub.v1.AcknowledgeRequest.ack_ids", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2975, + serialized_end=3034, ) _STREAMINGPULLREQUEST = _descriptor.Descriptor( - name='StreamingPullRequest', - full_name='google.pubsub.v1.StreamingPullRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.StreamingPullRequest.subscription', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.ack_ids', index=1, - number=2, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='modify_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds', index=2, - number=3, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='modify_deadline_ack_ids', full_name='google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids', index=3, - number=4, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='stream_ack_deadline_seconds', full_name='google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds', index=4, - number=5, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3037, - serialized_end=3201, + name="StreamingPullRequest", + full_name="google.pubsub.v1.StreamingPullRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.StreamingPullRequest.subscription", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="ack_ids", + full_name="google.pubsub.v1.StreamingPullRequest.ack_ids", + index=1, + number=2, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="modify_deadline_seconds", + full_name="google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds", + index=2, + number=3, + type=5, + cpp_type=1, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="modify_deadline_ack_ids", + full_name="google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids", + index=3, + number=4, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="stream_ack_deadline_seconds", + full_name="google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds", + index=4, + number=5, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3037, + serialized_end=3201, ) _STREAMINGPULLRESPONSE = _descriptor.Descriptor( - name='StreamingPullResponse', - full_name='google.pubsub.v1.StreamingPullResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='received_messages', full_name='google.pubsub.v1.StreamingPullResponse.received_messages', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3203, - serialized_end=3288, + name="StreamingPullResponse", + full_name="google.pubsub.v1.StreamingPullResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="received_messages", + full_name="google.pubsub.v1.StreamingPullResponse.received_messages", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3203, + serialized_end=3288, ) _CREATESNAPSHOTREQUEST_LABELSENTRY = _descriptor.Descriptor( - name='LabelsEntry', - full_name='google.pubsub.v1.CreateSnapshotRequest.LabelsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=431, - serialized_end=476, + name="LabelsEntry", + full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=431, + serialized_end=476, ) _CREATESNAPSHOTREQUEST = _descriptor.Descriptor( - name='CreateSnapshotRequest', - full_name='google.pubsub.v1.CreateSnapshotRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.pubsub.v1.CreateSnapshotRequest.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.CreateSnapshotRequest.subscription', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='labels', full_name='google.pubsub.v1.CreateSnapshotRequest.labels', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_CREATESNAPSHOTREQUEST_LABELSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3291, - serialized_end=3466, + name="CreateSnapshotRequest", + full_name="google.pubsub.v1.CreateSnapshotRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.pubsub.v1.CreateSnapshotRequest.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.CreateSnapshotRequest.subscription", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.pubsub.v1.CreateSnapshotRequest.labels", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_CREATESNAPSHOTREQUEST_LABELSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3291, + serialized_end=3466, ) _UPDATESNAPSHOTREQUEST = _descriptor.Descriptor( - name='UpdateSnapshotRequest', - full_name='google.pubsub.v1.UpdateSnapshotRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='snapshot', full_name='google.pubsub.v1.UpdateSnapshotRequest.snapshot', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='update_mask', full_name='google.pubsub.v1.UpdateSnapshotRequest.update_mask', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3468, - serialized_end=3586, + name="UpdateSnapshotRequest", + full_name="google.pubsub.v1.UpdateSnapshotRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="snapshot", + full_name="google.pubsub.v1.UpdateSnapshotRequest.snapshot", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="update_mask", + full_name="google.pubsub.v1.UpdateSnapshotRequest.update_mask", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3468, + serialized_end=3586, ) _SNAPSHOT_LABELSENTRY = _descriptor.Descriptor( - name='LabelsEntry', - full_name='google.pubsub.v1.Snapshot.LabelsEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='google.pubsub.v1.Snapshot.LabelsEntry.key', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='value', full_name='google.pubsub.v1.Snapshot.LabelsEntry.value', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=431, - serialized_end=476, + name="LabelsEntry", + full_name="google.pubsub.v1.Snapshot.LabelsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="google.pubsub.v1.Snapshot.LabelsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="google.pubsub.v1.Snapshot.LabelsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=431, + serialized_end=476, ) _SNAPSHOT = _descriptor.Descriptor( - name='Snapshot', - full_name='google.pubsub.v1.Snapshot', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='google.pubsub.v1.Snapshot.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='topic', full_name='google.pubsub.v1.Snapshot.topic', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='expire_time', full_name='google.pubsub.v1.Snapshot.expire_time', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='labels', full_name='google.pubsub.v1.Snapshot.labels', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[_SNAPSHOT_LABELSENTRY, ], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3589, - serialized_end=3780, + name="Snapshot", + full_name="google.pubsub.v1.Snapshot", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="google.pubsub.v1.Snapshot.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="topic", + full_name="google.pubsub.v1.Snapshot.topic", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="expire_time", + full_name="google.pubsub.v1.Snapshot.expire_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="labels", + full_name="google.pubsub.v1.Snapshot.labels", + index=3, + number=4, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[_SNAPSHOT_LABELSENTRY], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3589, + serialized_end=3780, ) _GETSNAPSHOTREQUEST = _descriptor.Descriptor( - name='GetSnapshotRequest', - full_name='google.pubsub.v1.GetSnapshotRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='snapshot', full_name='google.pubsub.v1.GetSnapshotRequest.snapshot', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3782, - serialized_end=3820, + name="GetSnapshotRequest", + full_name="google.pubsub.v1.GetSnapshotRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="snapshot", + full_name="google.pubsub.v1.GetSnapshotRequest.snapshot", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3782, + serialized_end=3820, ) _LISTSNAPSHOTSREQUEST = _descriptor.Descriptor( - name='ListSnapshotsRequest', - full_name='google.pubsub.v1.ListSnapshotsRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='project', full_name='google.pubsub.v1.ListSnapshotsRequest.project', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_size', full_name='google.pubsub.v1.ListSnapshotsRequest.page_size', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='page_token', full_name='google.pubsub.v1.ListSnapshotsRequest.page_token', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3822, - serialized_end=3900, + name="ListSnapshotsRequest", + full_name="google.pubsub.v1.ListSnapshotsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="project", + full_name="google.pubsub.v1.ListSnapshotsRequest.project", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_size", + full_name="google.pubsub.v1.ListSnapshotsRequest.page_size", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="page_token", + full_name="google.pubsub.v1.ListSnapshotsRequest.page_token", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3822, + serialized_end=3900, ) _LISTSNAPSHOTSRESPONSE = _descriptor.Descriptor( - name='ListSnapshotsResponse', - full_name='google.pubsub.v1.ListSnapshotsResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='snapshots', full_name='google.pubsub.v1.ListSnapshotsResponse.snapshots', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='next_page_token', full_name='google.pubsub.v1.ListSnapshotsResponse.next_page_token', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3902, - serialized_end=3997, + name="ListSnapshotsResponse", + full_name="google.pubsub.v1.ListSnapshotsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="snapshots", + full_name="google.pubsub.v1.ListSnapshotsResponse.snapshots", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="next_page_token", + full_name="google.pubsub.v1.ListSnapshotsResponse.next_page_token", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3902, + serialized_end=3997, ) _DELETESNAPSHOTREQUEST = _descriptor.Descriptor( - name='DeleteSnapshotRequest', - full_name='google.pubsub.v1.DeleteSnapshotRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='snapshot', full_name='google.pubsub.v1.DeleteSnapshotRequest.snapshot', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=3999, - serialized_end=4040, + name="DeleteSnapshotRequest", + full_name="google.pubsub.v1.DeleteSnapshotRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="snapshot", + full_name="google.pubsub.v1.DeleteSnapshotRequest.snapshot", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=3999, + serialized_end=4040, ) _SEEKREQUEST = _descriptor.Descriptor( - name='SeekRequest', - full_name='google.pubsub.v1.SeekRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='subscription', full_name='google.pubsub.v1.SeekRequest.subscription', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='time', full_name='google.pubsub.v1.SeekRequest.time', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='snapshot', full_name='google.pubsub.v1.SeekRequest.snapshot', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='target', full_name='google.pubsub.v1.SeekRequest.target', - index=0, containing_type=None, fields=[]), - ], - serialized_start=4042, - serialized_end=4151, + name="SeekRequest", + full_name="google.pubsub.v1.SeekRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.SeekRequest.subscription", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="time", + full_name="google.pubsub.v1.SeekRequest.time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="snapshot", + full_name="google.pubsub.v1.SeekRequest.snapshot", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="target", + full_name="google.pubsub.v1.SeekRequest.target", + index=0, + containing_type=None, + fields=[], + ) + ], + serialized_start=4042, + serialized_end=4151, ) _SEEKRESPONSE = _descriptor.Descriptor( - name='SeekResponse', - full_name='google.pubsub.v1.SeekResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=4153, - serialized_end=4167, + name="SeekResponse", + full_name="google.pubsub.v1.SeekResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=4153, + serialized_end=4167, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC -_TOPIC.fields_by_name['labels'].message_type = _TOPIC_LABELSENTRY -_TOPIC.fields_by_name['message_storage_policy'].message_type = _MESSAGESTORAGEPOLICY +_TOPIC.fields_by_name["labels"].message_type = _TOPIC_LABELSENTRY +_TOPIC.fields_by_name["message_storage_policy"].message_type = _MESSAGESTORAGEPOLICY _PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE -_PUBSUBMESSAGE.fields_by_name['attributes'].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY -_PUBSUBMESSAGE.fields_by_name['publish_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UPDATETOPICREQUEST.fields_by_name['topic'].message_type = _TOPIC -_UPDATETOPICREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_PUBLISHREQUEST.fields_by_name['messages'].message_type = _PUBSUBMESSAGE -_LISTTOPICSRESPONSE.fields_by_name['topics'].message_type = _TOPIC +_PUBSUBMESSAGE.fields_by_name[ + "attributes" +].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY +_PUBSUBMESSAGE.fields_by_name[ + "publish_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_UPDATETOPICREQUEST.fields_by_name["topic"].message_type = _TOPIC +_UPDATETOPICREQUEST.fields_by_name[ + "update_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_PUBLISHREQUEST.fields_by_name["messages"].message_type = _PUBSUBMESSAGE +_LISTTOPICSRESPONSE.fields_by_name["topics"].message_type = _TOPIC _SUBSCRIPTION_LABELSENTRY.containing_type = _SUBSCRIPTION -_SUBSCRIPTION.fields_by_name['push_config'].message_type = _PUSHCONFIG -_SUBSCRIPTION.fields_by_name['message_retention_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_SUBSCRIPTION.fields_by_name['labels'].message_type = _SUBSCRIPTION_LABELSENTRY -_SUBSCRIPTION.fields_by_name['expiration_policy'].message_type = _EXPIRATIONPOLICY -_EXPIRATIONPOLICY.fields_by_name['ttl'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_SUBSCRIPTION.fields_by_name["push_config"].message_type = _PUSHCONFIG +_SUBSCRIPTION.fields_by_name[ + "message_retention_duration" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_SUBSCRIPTION.fields_by_name["labels"].message_type = _SUBSCRIPTION_LABELSENTRY +_SUBSCRIPTION.fields_by_name["expiration_policy"].message_type = _EXPIRATIONPOLICY +_EXPIRATIONPOLICY.fields_by_name[ + "ttl" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION _PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG -_PUSHCONFIG.fields_by_name['attributes'].message_type = _PUSHCONFIG_ATTRIBUTESENTRY -_RECEIVEDMESSAGE.fields_by_name['message'].message_type = _PUBSUBMESSAGE -_UPDATESUBSCRIPTIONREQUEST.fields_by_name['subscription'].message_type = _SUBSCRIPTION -_UPDATESUBSCRIPTIONREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTSUBSCRIPTIONSRESPONSE.fields_by_name['subscriptions'].message_type = _SUBSCRIPTION -_MODIFYPUSHCONFIGREQUEST.fields_by_name['push_config'].message_type = _PUSHCONFIG -_PULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE -_STREAMINGPULLRESPONSE.fields_by_name['received_messages'].message_type = _RECEIVEDMESSAGE +_PUSHCONFIG.fields_by_name["attributes"].message_type = _PUSHCONFIG_ATTRIBUTESENTRY +_RECEIVEDMESSAGE.fields_by_name["message"].message_type = _PUBSUBMESSAGE +_UPDATESUBSCRIPTIONREQUEST.fields_by_name["subscription"].message_type = _SUBSCRIPTION +_UPDATESUBSCRIPTIONREQUEST.fields_by_name[ + "update_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"].message_type = _SUBSCRIPTION +_MODIFYPUSHCONFIGREQUEST.fields_by_name["push_config"].message_type = _PUSHCONFIG +_PULLRESPONSE.fields_by_name["received_messages"].message_type = _RECEIVEDMESSAGE +_STREAMINGPULLRESPONSE.fields_by_name[ + "received_messages" +].message_type = _RECEIVEDMESSAGE _CREATESNAPSHOTREQUEST_LABELSENTRY.containing_type = _CREATESNAPSHOTREQUEST -_CREATESNAPSHOTREQUEST.fields_by_name['labels'].message_type = _CREATESNAPSHOTREQUEST_LABELSENTRY -_UPDATESNAPSHOTREQUEST.fields_by_name['snapshot'].message_type = _SNAPSHOT -_UPDATESNAPSHOTREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_CREATESNAPSHOTREQUEST.fields_by_name[ + "labels" +].message_type = _CREATESNAPSHOTREQUEST_LABELSENTRY +_UPDATESNAPSHOTREQUEST.fields_by_name["snapshot"].message_type = _SNAPSHOT +_UPDATESNAPSHOTREQUEST.fields_by_name[ + "update_mask" +].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK _SNAPSHOT_LABELSENTRY.containing_type = _SNAPSHOT -_SNAPSHOT.fields_by_name['expire_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SNAPSHOT.fields_by_name['labels'].message_type = _SNAPSHOT_LABELSENTRY -_LISTSNAPSHOTSRESPONSE.fields_by_name['snapshots'].message_type = _SNAPSHOT -_SEEKREQUEST.fields_by_name['time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SEEKREQUEST.oneofs_by_name['target'].fields.append( - _SEEKREQUEST.fields_by_name['time']) -_SEEKREQUEST.fields_by_name['time'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] -_SEEKREQUEST.oneofs_by_name['target'].fields.append( - _SEEKREQUEST.fields_by_name['snapshot']) -_SEEKREQUEST.fields_by_name['snapshot'].containing_oneof = _SEEKREQUEST.oneofs_by_name['target'] -DESCRIPTOR.message_types_by_name['MessageStoragePolicy'] = _MESSAGESTORAGEPOLICY -DESCRIPTOR.message_types_by_name['Topic'] = _TOPIC -DESCRIPTOR.message_types_by_name['PubsubMessage'] = _PUBSUBMESSAGE -DESCRIPTOR.message_types_by_name['GetTopicRequest'] = _GETTOPICREQUEST -DESCRIPTOR.message_types_by_name['UpdateTopicRequest'] = _UPDATETOPICREQUEST -DESCRIPTOR.message_types_by_name['PublishRequest'] = _PUBLISHREQUEST -DESCRIPTOR.message_types_by_name['PublishResponse'] = _PUBLISHRESPONSE -DESCRIPTOR.message_types_by_name['ListTopicsRequest'] = _LISTTOPICSREQUEST -DESCRIPTOR.message_types_by_name['ListTopicsResponse'] = _LISTTOPICSRESPONSE -DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsRequest'] = _LISTTOPICSUBSCRIPTIONSREQUEST -DESCRIPTOR.message_types_by_name['ListTopicSubscriptionsResponse'] = _LISTTOPICSUBSCRIPTIONSRESPONSE -DESCRIPTOR.message_types_by_name['ListTopicSnapshotsRequest'] = _LISTTOPICSNAPSHOTSREQUEST -DESCRIPTOR.message_types_by_name['ListTopicSnapshotsResponse'] = _LISTTOPICSNAPSHOTSRESPONSE -DESCRIPTOR.message_types_by_name['DeleteTopicRequest'] = _DELETETOPICREQUEST -DESCRIPTOR.message_types_by_name['Subscription'] = _SUBSCRIPTION -DESCRIPTOR.message_types_by_name['ExpirationPolicy'] = _EXPIRATIONPOLICY -DESCRIPTOR.message_types_by_name['PushConfig'] = _PUSHCONFIG -DESCRIPTOR.message_types_by_name['ReceivedMessage'] = _RECEIVEDMESSAGE -DESCRIPTOR.message_types_by_name['GetSubscriptionRequest'] = _GETSUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name['UpdateSubscriptionRequest'] = _UPDATESUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name['ListSubscriptionsRequest'] = _LISTSUBSCRIPTIONSREQUEST -DESCRIPTOR.message_types_by_name['ListSubscriptionsResponse'] = _LISTSUBSCRIPTIONSRESPONSE -DESCRIPTOR.message_types_by_name['DeleteSubscriptionRequest'] = _DELETESUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name['ModifyPushConfigRequest'] = _MODIFYPUSHCONFIGREQUEST -DESCRIPTOR.message_types_by_name['PullRequest'] = _PULLREQUEST -DESCRIPTOR.message_types_by_name['PullResponse'] = _PULLRESPONSE -DESCRIPTOR.message_types_by_name['ModifyAckDeadlineRequest'] = _MODIFYACKDEADLINEREQUEST -DESCRIPTOR.message_types_by_name['AcknowledgeRequest'] = _ACKNOWLEDGEREQUEST -DESCRIPTOR.message_types_by_name['StreamingPullRequest'] = _STREAMINGPULLREQUEST -DESCRIPTOR.message_types_by_name['StreamingPullResponse'] = _STREAMINGPULLRESPONSE -DESCRIPTOR.message_types_by_name['CreateSnapshotRequest'] = _CREATESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name['UpdateSnapshotRequest'] = _UPDATESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT -DESCRIPTOR.message_types_by_name['GetSnapshotRequest'] = _GETSNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name['ListSnapshotsRequest'] = _LISTSNAPSHOTSREQUEST -DESCRIPTOR.message_types_by_name['ListSnapshotsResponse'] = _LISTSNAPSHOTSRESPONSE -DESCRIPTOR.message_types_by_name['DeleteSnapshotRequest'] = _DELETESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name['SeekRequest'] = _SEEKREQUEST -DESCRIPTOR.message_types_by_name['SeekResponse'] = _SEEKRESPONSE +_SNAPSHOT.fields_by_name[ + "expire_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SNAPSHOT.fields_by_name["labels"].message_type = _SNAPSHOT_LABELSENTRY +_LISTSNAPSHOTSRESPONSE.fields_by_name["snapshots"].message_type = _SNAPSHOT +_SEEKREQUEST.fields_by_name[ + "time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SEEKREQUEST.oneofs_by_name["target"].fields.append(_SEEKREQUEST.fields_by_name["time"]) +_SEEKREQUEST.fields_by_name["time"].containing_oneof = _SEEKREQUEST.oneofs_by_name[ + "target" +] +_SEEKREQUEST.oneofs_by_name["target"].fields.append( + _SEEKREQUEST.fields_by_name["snapshot"] +) +_SEEKREQUEST.fields_by_name["snapshot"].containing_oneof = _SEEKREQUEST.oneofs_by_name[ + "target" +] +DESCRIPTOR.message_types_by_name["MessageStoragePolicy"] = _MESSAGESTORAGEPOLICY +DESCRIPTOR.message_types_by_name["Topic"] = _TOPIC +DESCRIPTOR.message_types_by_name["PubsubMessage"] = _PUBSUBMESSAGE +DESCRIPTOR.message_types_by_name["GetTopicRequest"] = _GETTOPICREQUEST +DESCRIPTOR.message_types_by_name["UpdateTopicRequest"] = _UPDATETOPICREQUEST +DESCRIPTOR.message_types_by_name["PublishRequest"] = _PUBLISHREQUEST +DESCRIPTOR.message_types_by_name["PublishResponse"] = _PUBLISHRESPONSE +DESCRIPTOR.message_types_by_name["ListTopicsRequest"] = _LISTTOPICSREQUEST +DESCRIPTOR.message_types_by_name["ListTopicsResponse"] = _LISTTOPICSRESPONSE +DESCRIPTOR.message_types_by_name[ + "ListTopicSubscriptionsRequest" +] = _LISTTOPICSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name[ + "ListTopicSubscriptionsResponse" +] = _LISTTOPICSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name[ + "ListTopicSnapshotsRequest" +] = _LISTTOPICSNAPSHOTSREQUEST +DESCRIPTOR.message_types_by_name[ + "ListTopicSnapshotsResponse" +] = _LISTTOPICSNAPSHOTSRESPONSE +DESCRIPTOR.message_types_by_name["DeleteTopicRequest"] = _DELETETOPICREQUEST +DESCRIPTOR.message_types_by_name["Subscription"] = _SUBSCRIPTION +DESCRIPTOR.message_types_by_name["ExpirationPolicy"] = _EXPIRATIONPOLICY +DESCRIPTOR.message_types_by_name["PushConfig"] = _PUSHCONFIG +DESCRIPTOR.message_types_by_name["ReceivedMessage"] = _RECEIVEDMESSAGE +DESCRIPTOR.message_types_by_name["GetSubscriptionRequest"] = _GETSUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name[ + "UpdateSubscriptionRequest" +] = _UPDATESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name["ListSubscriptionsRequest"] = _LISTSUBSCRIPTIONSREQUEST +DESCRIPTOR.message_types_by_name[ + "ListSubscriptionsResponse" +] = _LISTSUBSCRIPTIONSRESPONSE +DESCRIPTOR.message_types_by_name[ + "DeleteSubscriptionRequest" +] = _DELETESUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name["ModifyPushConfigRequest"] = _MODIFYPUSHCONFIGREQUEST +DESCRIPTOR.message_types_by_name["PullRequest"] = _PULLREQUEST +DESCRIPTOR.message_types_by_name["PullResponse"] = _PULLRESPONSE +DESCRIPTOR.message_types_by_name["ModifyAckDeadlineRequest"] = _MODIFYACKDEADLINEREQUEST +DESCRIPTOR.message_types_by_name["AcknowledgeRequest"] = _ACKNOWLEDGEREQUEST +DESCRIPTOR.message_types_by_name["StreamingPullRequest"] = _STREAMINGPULLREQUEST +DESCRIPTOR.message_types_by_name["StreamingPullResponse"] = _STREAMINGPULLRESPONSE +DESCRIPTOR.message_types_by_name["CreateSnapshotRequest"] = _CREATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name["UpdateSnapshotRequest"] = _UPDATESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT +DESCRIPTOR.message_types_by_name["GetSnapshotRequest"] = _GETSNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name["ListSnapshotsRequest"] = _LISTSNAPSHOTSREQUEST +DESCRIPTOR.message_types_by_name["ListSnapshotsResponse"] = _LISTSNAPSHOTSRESPONSE +DESCRIPTOR.message_types_by_name["DeleteSnapshotRequest"] = _DELETESNAPSHOTREQUEST +DESCRIPTOR.message_types_by_name["SeekRequest"] = _SEEKREQUEST +DESCRIPTOR.message_types_by_name["SeekResponse"] = _SEEKRESPONSE _sym_db.RegisterFileDescriptor(DESCRIPTOR) -MessageStoragePolicy = _reflection.GeneratedProtocolMessageType('MessageStoragePolicy', (_message.Message,), dict( - DESCRIPTOR = _MESSAGESTORAGEPOLICY, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """A message storage policy. +MessageStoragePolicy = _reflection.GeneratedProtocolMessageType( + "MessageStoragePolicy", + (_message.Message,), + dict( + DESCRIPTOR=_MESSAGESTORAGEPOLICY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""A message storage policy. Attributes: @@ -1905,22 +2924,27 @@ misconfiguration at the project or organization level, which will result in all Publish operations failing. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.MessageStoragePolicy) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.MessageStoragePolicy) + ), +) _sym_db.RegisterMessage(MessageStoragePolicy) -Topic = _reflection.GeneratedProtocolMessageType('Topic', (_message.Message,), dict( - - LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( - DESCRIPTOR = _TOPIC_LABELSENTRY, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) - )) - , - DESCRIPTOR = _TOPIC, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """A topic resource. +Topic = _reflection.GeneratedProtocolMessageType( + "Topic", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_TOPIC_LABELSENTRY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) + ), + ), + DESCRIPTOR=_TOPIC, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""A topic resource. Attributes: @@ -1943,23 +2967,28 @@ CreateTopic, and UpdateTopic: if not present in the response, then no constraints are in effect. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) + ), +) _sym_db.RegisterMessage(Topic) _sym_db.RegisterMessage(Topic.LabelsEntry) -PubsubMessage = _reflection.GeneratedProtocolMessageType('PubsubMessage', (_message.Message,), dict( - - AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( - DESCRIPTOR = _PUBSUBMESSAGE_ATTRIBUTESENTRY, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) - )) - , - DESCRIPTOR = _PUBSUBMESSAGE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """A message that is published by publishers and consumed by subscribers. +PubsubMessage = _reflection.GeneratedProtocolMessageType( + "PubsubMessage", + (_message.Message,), + dict( + AttributesEntry=_reflection.GeneratedProtocolMessageType( + "AttributesEntry", + (_message.Message,), + dict( + DESCRIPTOR=_PUBSUBMESSAGE_ATTRIBUTESENTRY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) + ), + ), + DESCRIPTOR=_PUBSUBMESSAGE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""A message that is published by publishers and consumed by subscribers. The message must contain either a non-empty data field or at least one attribute. See Quotas and limits for more information about message limits. @@ -1982,16 +3011,19 @@ server when it receives the ``Publish`` call. It must not be populated by the publisher in a ``Publish`` call. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) + ), +) _sym_db.RegisterMessage(PubsubMessage) _sym_db.RegisterMessage(PubsubMessage.AttributesEntry) -GetTopicRequest = _reflection.GeneratedProtocolMessageType('GetTopicRequest', (_message.Message,), dict( - DESCRIPTOR = _GETTOPICREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the GetTopic method. +GetTopicRequest = _reflection.GeneratedProtocolMessageType( + "GetTopicRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETTOPICREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the GetTopic method. Attributes: @@ -1999,15 +3031,18 @@ The name of the topic to get. Format is ``projects/{project}/topics/{topic}``. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) + ), +) _sym_db.RegisterMessage(GetTopicRequest) -UpdateTopicRequest = _reflection.GeneratedProtocolMessageType('UpdateTopicRequest', (_message.Message,), dict( - DESCRIPTOR = _UPDATETOPICREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the UpdateTopic method. +UpdateTopicRequest = _reflection.GeneratedProtocolMessageType( + "UpdateTopicRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATETOPICREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the UpdateTopic method. Attributes: @@ -2021,15 +3056,18 @@ organization level. The ``message_storage_policy`` must not be set in the ``topic`` provided above. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) + ), +) _sym_db.RegisterMessage(UpdateTopicRequest) -PublishRequest = _reflection.GeneratedProtocolMessageType('PublishRequest', (_message.Message,), dict( - DESCRIPTOR = _PUBLISHREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the Publish method. +PublishRequest = _reflection.GeneratedProtocolMessageType( + "PublishRequest", + (_message.Message,), + dict( + DESCRIPTOR=_PUBLISHREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the Publish method. Attributes: @@ -2039,15 +3077,18 @@ messages: The messages to publish. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) + ), +) _sym_db.RegisterMessage(PublishRequest) -PublishResponse = _reflection.GeneratedProtocolMessageType('PublishResponse', (_message.Message,), dict( - DESCRIPTOR = _PUBLISHRESPONSE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Response for the ``Publish`` method. +PublishResponse = _reflection.GeneratedProtocolMessageType( + "PublishResponse", + (_message.Message,), + dict( + DESCRIPTOR=_PUBLISHRESPONSE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Response for the ``Publish`` method. Attributes: @@ -2056,15 +3097,18 @@ order as the messages in the request. IDs are guaranteed to be unique within the topic. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) + ), +) _sym_db.RegisterMessage(PublishResponse) -ListTopicsRequest = _reflection.GeneratedProtocolMessageType('ListTopicsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTTOPICSREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``ListTopics`` method. +ListTopicsRequest = _reflection.GeneratedProtocolMessageType( + "ListTopicsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTTOPICSREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``ListTopics`` method. Attributes: @@ -2079,15 +3123,18 @@ ``ListTopics`` call, and that the system should return the next page of data. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) + ), +) _sym_db.RegisterMessage(ListTopicsRequest) -ListTopicsResponse = _reflection.GeneratedProtocolMessageType('ListTopicsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTTOPICSRESPONSE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Response for the ``ListTopics`` method. +ListTopicsResponse = _reflection.GeneratedProtocolMessageType( + "ListTopicsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTTOPICSRESPONSE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Response for the ``ListTopics`` method. Attributes: @@ -2098,15 +3145,18 @@ match the request; this value should be passed in a new ``ListTopicsRequest``. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) + ), +) _sym_db.RegisterMessage(ListTopicsResponse) -ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``ListTopicSubscriptions`` method. +ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( + "ListTopicSubscriptionsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTTOPICSUBSCRIPTIONSREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``ListTopicSubscriptions`` method. Attributes: @@ -2121,15 +3171,18 @@ continuation of a prior ``ListTopicSubscriptions`` call, and that the system should return the next page of data. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) + ), +) _sym_db.RegisterMessage(ListTopicSubscriptionsRequest) -ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSubscriptionsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTTOPICSUBSCRIPTIONSRESPONSE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Response for the ``ListTopicSubscriptions`` method. +ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( + "ListTopicSubscriptionsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTTOPICSUBSCRIPTIONSRESPONSE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Response for the ``ListTopicSubscriptions`` method. Attributes: @@ -2140,15 +3193,18 @@ that match the request; this value should be passed in a new ``ListTopicSubscriptionsRequest`` to get more subscriptions. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) + ), +) _sym_db.RegisterMessage(ListTopicSubscriptionsResponse) -ListTopicSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListTopicSnapshotsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTTOPICSNAPSHOTSREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``ListTopicSnapshots`` method. ALPHA: This feature is +ListTopicSnapshotsRequest = _reflection.GeneratedProtocolMessageType( + "ListTopicSnapshotsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTTOPICSNAPSHOTSREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``ListTopicSnapshots`` method. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2166,15 +3222,18 @@ ``ListTopicSnapshots`` call, and that the system should return the next page of data. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsRequest) + ), +) _sym_db.RegisterMessage(ListTopicSnapshotsRequest) -ListTopicSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListTopicSnapshotsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTTOPICSNAPSHOTSRESPONSE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Response for the ``ListTopicSnapshots`` method. ALPHA: This feature is +ListTopicSnapshotsResponse = _reflection.GeneratedProtocolMessageType( + "ListTopicSnapshotsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTTOPICSNAPSHOTSRESPONSE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Response for the ``ListTopicSnapshots`` method. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2188,15 +3247,18 @@ match the request; this value should be passed in a new ``ListTopicSnapshotsRequest`` to get more snapshots. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsResponse) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsResponse) + ), +) _sym_db.RegisterMessage(ListTopicSnapshotsResponse) -DeleteTopicRequest = _reflection.GeneratedProtocolMessageType('DeleteTopicRequest', (_message.Message,), dict( - DESCRIPTOR = _DELETETOPICREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``DeleteTopic`` method. +DeleteTopicRequest = _reflection.GeneratedProtocolMessageType( + "DeleteTopicRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETETOPICREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``DeleteTopic`` method. Attributes: @@ -2204,22 +3266,27 @@ Name of the topic to delete. Format is ``projects/{project}/topics/{topic}``. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) + ), +) _sym_db.RegisterMessage(DeleteTopicRequest) -Subscription = _reflection.GeneratedProtocolMessageType('Subscription', (_message.Message,), dict( - - LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( - DESCRIPTOR = _SUBSCRIPTION_LABELSENTRY, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) - )) - , - DESCRIPTOR = _SUBSCRIPTION, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """A subscription resource. +Subscription = _reflection.GeneratedProtocolMessageType( + "Subscription", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_SUBSCRIPTION_LABELSENTRY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) + ), + ), + DESCRIPTOR=_SUBSCRIPTION, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""A subscription resource. Attributes: @@ -2293,16 +3360,19 @@ incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) + ), +) _sym_db.RegisterMessage(Subscription) _sym_db.RegisterMessage(Subscription.LabelsEntry) -ExpirationPolicy = _reflection.GeneratedProtocolMessageType('ExpirationPolicy', (_message.Message,), dict( - DESCRIPTOR = _EXPIRATIONPOLICY, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """A policy that specifies the conditions for resource expiration (i.e., +ExpirationPolicy = _reflection.GeneratedProtocolMessageType( + "ExpirationPolicy", + (_message.Message,), + dict( + DESCRIPTOR=_EXPIRATIONPOLICY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""A policy that specifies the conditions for resource expiration (i.e., automatic resource deletion). @@ -2316,22 +3386,27 @@ associated resource, as well. If ``ttl`` is not set, the associated resource never expires. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ExpirationPolicy) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ExpirationPolicy) + ), +) _sym_db.RegisterMessage(ExpirationPolicy) -PushConfig = _reflection.GeneratedProtocolMessageType('PushConfig', (_message.Message,), dict( - - AttributesEntry = _reflection.GeneratedProtocolMessageType('AttributesEntry', (_message.Message,), dict( - DESCRIPTOR = _PUSHCONFIG_ATTRIBUTESENTRY, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) - )) - , - DESCRIPTOR = _PUSHCONFIG, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Configuration for a push delivery endpoint. +PushConfig = _reflection.GeneratedProtocolMessageType( + "PushConfig", + (_message.Message,), + dict( + AttributesEntry=_reflection.GeneratedProtocolMessageType( + "AttributesEntry", + (_message.Message,), + dict( + DESCRIPTOR=_PUSHCONFIG_ATTRIBUTESENTRY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) + ), + ), + DESCRIPTOR=_PUSHCONFIG, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Configuration for a push delivery endpoint. Attributes: @@ -2359,16 +3434,19 @@ ``v1`` or ``v1beta2``: uses the push format defined in the v1 Pub/Sub API. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) + ), +) _sym_db.RegisterMessage(PushConfig) _sym_db.RegisterMessage(PushConfig.AttributesEntry) -ReceivedMessage = _reflection.GeneratedProtocolMessageType('ReceivedMessage', (_message.Message,), dict( - DESCRIPTOR = _RECEIVEDMESSAGE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """A message and its corresponding acknowledgment ID. +ReceivedMessage = _reflection.GeneratedProtocolMessageType( + "ReceivedMessage", + (_message.Message,), + dict( + DESCRIPTOR=_RECEIVEDMESSAGE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""A message and its corresponding acknowledgment ID. Attributes: @@ -2377,15 +3455,18 @@ message: The message. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) + ), +) _sym_db.RegisterMessage(ReceivedMessage) -GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType('GetSubscriptionRequest', (_message.Message,), dict( - DESCRIPTOR = _GETSUBSCRIPTIONREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the GetSubscription method. +GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType( + "GetSubscriptionRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETSUBSCRIPTIONREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the GetSubscription method. Attributes: @@ -2393,15 +3474,18 @@ The name of the subscription to get. Format is ``projects/{project}/subscriptions/{sub}``. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) + ), +) _sym_db.RegisterMessage(GetSubscriptionRequest) -UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType('UpdateSubscriptionRequest', (_message.Message,), dict( - DESCRIPTOR = _UPDATESUBSCRIPTIONREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the UpdateSubscription method. +UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType( + "UpdateSubscriptionRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATESUBSCRIPTIONREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the UpdateSubscription method. Attributes: @@ -2411,15 +3495,18 @@ Indicates which fields in the provided subscription to update. Must be specified and non-empty. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) + ), +) _sym_db.RegisterMessage(UpdateSubscriptionRequest) -ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType('ListSubscriptionsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTSUBSCRIPTIONSREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``ListSubscriptions`` method. +ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( + "ListSubscriptionsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTSUBSCRIPTIONSREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``ListSubscriptions`` method. Attributes: @@ -2434,15 +3521,18 @@ ``ListSubscriptions`` call, and that the system should return the next page of data. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) + ), +) _sym_db.RegisterMessage(ListSubscriptionsRequest) -ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType('ListSubscriptionsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTSUBSCRIPTIONSRESPONSE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Response for the ``ListSubscriptions`` method. +ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( + "ListSubscriptionsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTSUBSCRIPTIONSRESPONSE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Response for the ``ListSubscriptions`` method. Attributes: @@ -2453,15 +3543,18 @@ that match the request; this value should be passed in a new ``ListSubscriptionsRequest`` to get more subscriptions. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) + ), +) _sym_db.RegisterMessage(ListSubscriptionsResponse) -DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType('DeleteSubscriptionRequest', (_message.Message,), dict( - DESCRIPTOR = _DELETESUBSCRIPTIONREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the DeleteSubscription method. +DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType( + "DeleteSubscriptionRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETESUBSCRIPTIONREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the DeleteSubscription method. Attributes: @@ -2469,15 +3562,18 @@ The subscription to delete. Format is ``projects/{project}/subscriptions/{sub}``. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) + ), +) _sym_db.RegisterMessage(DeleteSubscriptionRequest) -ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType('ModifyPushConfigRequest', (_message.Message,), dict( - DESCRIPTOR = _MODIFYPUSHCONFIGREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ModifyPushConfig method. +ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType( + "ModifyPushConfigRequest", + (_message.Message,), + dict( + DESCRIPTOR=_MODIFYPUSHCONFIGREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ModifyPushConfig method. Attributes: @@ -2492,15 +3588,18 @@ the subscription if ``Pull`` or ``StreamingPull`` is not called. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) + ), +) _sym_db.RegisterMessage(ModifyPushConfigRequest) -PullRequest = _reflection.GeneratedProtocolMessageType('PullRequest', (_message.Message,), dict( - DESCRIPTOR = _PULLREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``Pull`` method. +PullRequest = _reflection.GeneratedProtocolMessageType( + "PullRequest", + (_message.Message,), + dict( + DESCRIPTOR=_PULLREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``Pull`` method. Attributes: @@ -2517,15 +3616,18 @@ The maximum number of messages returned for this request. The Pub/Sub system may return fewer than the number specified. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) + ), +) _sym_db.RegisterMessage(PullRequest) -PullResponse = _reflection.GeneratedProtocolMessageType('PullResponse', (_message.Message,), dict( - DESCRIPTOR = _PULLRESPONSE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Response for the ``Pull`` method. +PullResponse = _reflection.GeneratedProtocolMessageType( + "PullResponse", + (_message.Message,), + dict( + DESCRIPTOR=_PULLRESPONSE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Response for the ``Pull`` method. Attributes: @@ -2536,15 +3638,18 @@ fewer than the ``maxMessages`` requested even if there are more messages available in the backlog. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) + ), +) _sym_db.RegisterMessage(PullResponse) -ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType('ModifyAckDeadlineRequest', (_message.Message,), dict( - DESCRIPTOR = _MODIFYACKDEADLINEREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ModifyAckDeadline method. +ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType( + "ModifyAckDeadlineRequest", + (_message.Message,), + dict( + DESCRIPTOR=_MODIFYACKDEADLINEREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ModifyAckDeadline method. Attributes: @@ -2563,15 +3668,18 @@ The maximum deadline you can specify is 600 seconds (10 minutes). """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) + ), +) _sym_db.RegisterMessage(ModifyAckDeadlineRequest) -AcknowledgeRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeRequest', (_message.Message,), dict( - DESCRIPTOR = _ACKNOWLEDGEREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the Acknowledge method. +AcknowledgeRequest = _reflection.GeneratedProtocolMessageType( + "AcknowledgeRequest", + (_message.Message,), + dict( + DESCRIPTOR=_ACKNOWLEDGEREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the Acknowledge method. Attributes: @@ -2583,15 +3691,18 @@ was returned by the Pub/Sub system in the ``Pull`` response. Must not be empty. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) + ), +) _sym_db.RegisterMessage(AcknowledgeRequest) -StreamingPullRequest = _reflection.GeneratedProtocolMessageType('StreamingPullRequest', (_message.Message,), dict( - DESCRIPTOR = _STREAMINGPULLREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``StreamingPull`` streaming RPC method. This request is +StreamingPullRequest = _reflection.GeneratedProtocolMessageType( + "StreamingPullRequest", + (_message.Message,), + dict( + DESCRIPTOR=_STREAMINGPULLREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``StreamingPull`` streaming RPC method. This request is used to establish the initial stream as well as to stream acknowledgements and ack deadline modifications from the client to the server. @@ -2639,15 +3750,18 @@ deadline you can specify is 10 seconds. The maximum deadline you can specify is 600 seconds (10 minutes). """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) + ), +) _sym_db.RegisterMessage(StreamingPullRequest) -StreamingPullResponse = _reflection.GeneratedProtocolMessageType('StreamingPullResponse', (_message.Message,), dict( - DESCRIPTOR = _STREAMINGPULLRESPONSE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Response for the ``StreamingPull`` method. This response is used to +StreamingPullResponse = _reflection.GeneratedProtocolMessageType( + "StreamingPullResponse", + (_message.Message,), + dict( + DESCRIPTOR=_STREAMINGPULLRESPONSE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Response for the ``StreamingPull`` method. This response is used to stream messages from the server to the client. @@ -2655,22 +3769,27 @@ received_messages: Received Pub/Sub messages. This will not be empty. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) + ), +) _sym_db.RegisterMessage(StreamingPullResponse) -CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType('CreateSnapshotRequest', (_message.Message,), dict( - - LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( - DESCRIPTOR = _CREATESNAPSHOTREQUEST_LABELSENTRY, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest.LabelsEntry) - )) - , - DESCRIPTOR = _CREATESNAPSHOTREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``CreateSnapshot`` method. ALPHA: This feature is part +CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType( + "CreateSnapshotRequest", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_CREATESNAPSHOTREQUEST_LABELSENTRY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest.LabelsEntry) + ), + ), + DESCRIPTOR=_CREATESNAPSHOTREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``CreateSnapshot`` method. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2697,16 +3816,19 @@ labels: See Creating and managing labels. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) + ), +) _sym_db.RegisterMessage(CreateSnapshotRequest) _sym_db.RegisterMessage(CreateSnapshotRequest.LabelsEntry) -UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType('UpdateSnapshotRequest', (_message.Message,), dict( - DESCRIPTOR = _UPDATESNAPSHOTREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the UpdateSnapshot method. ALPHA: This feature is part of an +UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType( + "UpdateSnapshotRequest", + (_message.Message,), + dict( + DESCRIPTOR=_UPDATESNAPSHOTREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the UpdateSnapshot method. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2719,22 +3841,27 @@ Indicates which fields in the provided snapshot to update. Must be specified and non-empty. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) + ), +) _sym_db.RegisterMessage(UpdateSnapshotRequest) -Snapshot = _reflection.GeneratedProtocolMessageType('Snapshot', (_message.Message,), dict( - - LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( - DESCRIPTOR = _SNAPSHOT_LABELSENTRY, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) - )) - , - DESCRIPTOR = _SNAPSHOT, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """A snapshot resource. ALPHA: This feature is part of an alpha release. +Snapshot = _reflection.GeneratedProtocolMessageType( + "Snapshot", + (_message.Message,), + dict( + LabelsEntry=_reflection.GeneratedProtocolMessageType( + "LabelsEntry", + (_message.Message,), + dict( + DESCRIPTOR=_SNAPSHOT_LABELSENTRY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) + ), + ), + DESCRIPTOR=_SNAPSHOT, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""A snapshot resource. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2762,16 +3889,19 @@ labels: See Creating and managing labels. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) + ), +) _sym_db.RegisterMessage(Snapshot) _sym_db.RegisterMessage(Snapshot.LabelsEntry) -GetSnapshotRequest = _reflection.GeneratedProtocolMessageType('GetSnapshotRequest', (_message.Message,), dict( - DESCRIPTOR = _GETSNAPSHOTREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the GetSnapshot method. ALPHA: This feature is part of an +GetSnapshotRequest = _reflection.GeneratedProtocolMessageType( + "GetSnapshotRequest", + (_message.Message,), + dict( + DESCRIPTOR=_GETSNAPSHOTREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the GetSnapshot method. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2782,15 +3912,18 @@ The name of the snapshot to get. Format is ``projects/{project}/snapshots/{snap}``. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSnapshotRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSnapshotRequest) + ), +) _sym_db.RegisterMessage(GetSnapshotRequest) -ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType('ListSnapshotsRequest', (_message.Message,), dict( - DESCRIPTOR = _LISTSNAPSHOTSREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``ListSnapshots`` method. ALPHA: This feature is part of +ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType( + "ListSnapshotsRequest", + (_message.Message,), + dict( + DESCRIPTOR=_LISTSNAPSHOTSREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``ListSnapshots`` method. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2808,15 +3941,18 @@ ``ListSnapshots`` call, and that the system should return the next page of data. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) + ), +) _sym_db.RegisterMessage(ListSnapshotsRequest) -ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType('ListSnapshotsResponse', (_message.Message,), dict( - DESCRIPTOR = _LISTSNAPSHOTSRESPONSE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Response for the ``ListSnapshots`` method. ALPHA: This feature is part +ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType( + "ListSnapshotsResponse", + (_message.Message,), + dict( + DESCRIPTOR=_LISTSNAPSHOTSRESPONSE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Response for the ``ListSnapshots`` method. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2830,15 +3966,18 @@ match the request; this value should be passed in a new ``ListSnapshotsRequest``. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) + ), +) _sym_db.RegisterMessage(ListSnapshotsResponse) -DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType('DeleteSnapshotRequest', (_message.Message,), dict( - DESCRIPTOR = _DELETESNAPSHOTREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``DeleteSnapshot`` method. ALPHA: This feature is part +DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType( + "DeleteSnapshotRequest", + (_message.Message,), + dict( + DESCRIPTOR=_DELETESNAPSHOTREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``DeleteSnapshot`` method. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2849,15 +3988,18 @@ The name of the snapshot to delete. Format is ``projects/{project}/snapshots/{snap}``. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) + ), +) _sym_db.RegisterMessage(DeleteSnapshotRequest) -SeekRequest = _reflection.GeneratedProtocolMessageType('SeekRequest', (_message.Message,), dict( - DESCRIPTOR = _SEEKREQUEST, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Request for the ``Seek`` method. ALPHA: This feature is part of an alpha +SeekRequest = _reflection.GeneratedProtocolMessageType( + "SeekRequest", + (_message.Message,), + dict( + DESCRIPTOR=_SEEKREQUEST, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Request for the ``Seek`` method. ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -2884,279 +4026,397 @@ as that of the provided subscription. Format is ``projects/{project}/snapshots/{snap}``. """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) + ), +) _sym_db.RegisterMessage(SeekRequest) -SeekResponse = _reflection.GeneratedProtocolMessageType('SeekResponse', (_message.Message,), dict( - DESCRIPTOR = _SEEKRESPONSE, - __module__ = 'google.cloud.pubsub_v1.proto.pubsub_pb2' - , - __doc__ = """Response for the ``Seek`` method (this response is empty). +SeekResponse = _reflection.GeneratedProtocolMessageType( + "SeekResponse", + (_message.Message,), + dict( + DESCRIPTOR=_SEEKRESPONSE, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Response for the ``Seek`` method (this response is empty). """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) - )) + # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) + ), +) _sym_db.RegisterMessage(SeekResponse) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1')) +DESCRIPTOR._options = _descriptor._ParseOptions( + descriptor_pb2.FileOptions(), + _b( + "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1" + ), +) _TOPIC_LABELSENTRY.has_options = True -_TOPIC_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_TOPIC_LABELSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _PUBSUBMESSAGE_ATTRIBUTESENTRY.has_options = True -_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _SUBSCRIPTION_LABELSENTRY.has_options = True -_SUBSCRIPTION_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SUBSCRIPTION_LABELSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _PUSHCONFIG_ATTRIBUTESENTRY.has_options = True -_PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _CREATESNAPSHOTREQUEST_LABELSENTRY.has_options = True -_CREATESNAPSHOTREQUEST_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_CREATESNAPSHOTREQUEST_LABELSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _SNAPSHOT_LABELSENTRY.has_options = True -_SNAPSHOT_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_SNAPSHOT_LABELSENTRY._options = _descriptor._ParseOptions( + descriptor_pb2.MessageOptions(), _b("8\001") +) _PUBLISHER = _descriptor.ServiceDescriptor( - name='Publisher', - full_name='google.pubsub.v1.Publisher', - file=DESCRIPTOR, - index=0, - options=None, - serialized_start=4170, - serialized_end=5257, - methods=[ - _descriptor.MethodDescriptor( - name='CreateTopic', - full_name='google.pubsub.v1.Publisher.CreateTopic', + name="Publisher", + full_name="google.pubsub.v1.Publisher", + file=DESCRIPTOR, index=0, - containing_service=None, - input_type=_TOPIC, - output_type=_TOPIC, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*')), - ), - _descriptor.MethodDescriptor( - name='UpdateTopic', - full_name='google.pubsub.v1.Publisher.UpdateTopic', - index=1, - containing_service=None, - input_type=_UPDATETOPICREQUEST, - output_type=_TOPIC, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*')), - ), - _descriptor.MethodDescriptor( - name='Publish', - full_name='google.pubsub.v1.Publisher.Publish', - index=2, - containing_service=None, - input_type=_PUBLISHREQUEST, - output_type=_PUBLISHRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002,\"\'/v1/{topic=projects/*/topics/*}:publish:\001*')), - ), - _descriptor.MethodDescriptor( - name='GetTopic', - full_name='google.pubsub.v1.Publisher.GetTopic', - index=3, - containing_service=None, - input_type=_GETTOPICREQUEST, - output_type=_TOPIC, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}')), - ), - _descriptor.MethodDescriptor( - name='ListTopics', - full_name='google.pubsub.v1.Publisher.ListTopics', - index=4, - containing_service=None, - input_type=_LISTTOPICSREQUEST, - output_type=_LISTTOPICSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics')), - ), - _descriptor.MethodDescriptor( - name='ListTopicSubscriptions', - full_name='google.pubsub.v1.Publisher.ListTopicSubscriptions', - index=5, - containing_service=None, - input_type=_LISTTOPICSUBSCRIPTIONSREQUEST, - output_type=_LISTTOPICSUBSCRIPTIONSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions')), - ), - _descriptor.MethodDescriptor( - name='ListTopicSnapshots', - full_name='google.pubsub.v1.Publisher.ListTopicSnapshots', - index=6, - containing_service=None, - input_type=_LISTTOPICSNAPSHOTSREQUEST, - output_type=_LISTTOPICSNAPSHOTSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots')), - ), - _descriptor.MethodDescriptor( - name='DeleteTopic', - full_name='google.pubsub.v1.Publisher.DeleteTopic', - index=7, - containing_service=None, - input_type=_DELETETOPICREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}')), - ), -]) + options=None, + serialized_start=4170, + serialized_end=5257, + methods=[ + _descriptor.MethodDescriptor( + name="CreateTopic", + full_name="google.pubsub.v1.Publisher.CreateTopic", + index=0, + containing_service=None, + input_type=_TOPIC, + output_type=_TOPIC, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*"), + ), + ), + _descriptor.MethodDescriptor( + name="UpdateTopic", + full_name="google.pubsub.v1.Publisher.UpdateTopic", + index=1, + containing_service=None, + input_type=_UPDATETOPICREQUEST, + output_type=_TOPIC, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*"), + ), + ), + _descriptor.MethodDescriptor( + name="Publish", + full_name="google.pubsub.v1.Publisher.Publish", + index=2, + containing_service=None, + input_type=_PUBLISHREQUEST, + output_type=_PUBLISHRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*" + ), + ), + ), + _descriptor.MethodDescriptor( + name="GetTopic", + full_name="google.pubsub.v1.Publisher.GetTopic", + index=3, + containing_service=None, + input_type=_GETTOPICREQUEST, + output_type=_TOPIC, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}"), + ), + ), + _descriptor.MethodDescriptor( + name="ListTopics", + full_name="google.pubsub.v1.Publisher.ListTopics", + index=4, + containing_service=None, + input_type=_LISTTOPICSREQUEST, + output_type=_LISTTOPICSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics"), + ), + ), + _descriptor.MethodDescriptor( + name="ListTopicSubscriptions", + full_name="google.pubsub.v1.Publisher.ListTopicSubscriptions", + index=5, + containing_service=None, + input_type=_LISTTOPICSUBSCRIPTIONSREQUEST, + output_type=_LISTTOPICSUBSCRIPTIONSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions" + ), + ), + ), + _descriptor.MethodDescriptor( + name="ListTopicSnapshots", + full_name="google.pubsub.v1.Publisher.ListTopicSnapshots", + index=6, + containing_service=None, + input_type=_LISTTOPICSNAPSHOTSREQUEST, + output_type=_LISTTOPICSNAPSHOTSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots" + ), + ), + ), + _descriptor.MethodDescriptor( + name="DeleteTopic", + full_name="google.pubsub.v1.Publisher.DeleteTopic", + index=7, + containing_service=None, + input_type=_DELETETOPICREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}"), + ), + ), + ], +) _sym_db.RegisterServiceDescriptor(_PUBLISHER) -DESCRIPTOR.services_by_name['Publisher'] = _PUBLISHER +DESCRIPTOR.services_by_name["Publisher"] = _PUBLISHER _SUBSCRIBER = _descriptor.ServiceDescriptor( - name='Subscriber', - full_name='google.pubsub.v1.Subscriber', - file=DESCRIPTOR, - index=1, - options=None, - serialized_start=5260, - serialized_end=7557, - methods=[ - _descriptor.MethodDescriptor( - name='CreateSubscription', - full_name='google.pubsub.v1.Subscriber.CreateSubscription', - index=0, - containing_service=None, - input_type=_SUBSCRIPTION, - output_type=_SUBSCRIPTION, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*')), - ), - _descriptor.MethodDescriptor( - name='GetSubscription', - full_name='google.pubsub.v1.Subscriber.GetSubscription', + name="Subscriber", + full_name="google.pubsub.v1.Subscriber", + file=DESCRIPTOR, index=1, - containing_service=None, - input_type=_GETSUBSCRIPTIONREQUEST, - output_type=_SUBSCRIPTION, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}')), - ), - _descriptor.MethodDescriptor( - name='UpdateSubscription', - full_name='google.pubsub.v1.Subscriber.UpdateSubscription', - index=2, - containing_service=None, - input_type=_UPDATESUBSCRIPTIONREQUEST, - output_type=_SUBSCRIPTION, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*')), - ), - _descriptor.MethodDescriptor( - name='ListSubscriptions', - full_name='google.pubsub.v1.Subscriber.ListSubscriptions', - index=3, - containing_service=None, - input_type=_LISTSUBSCRIPTIONSREQUEST, - output_type=_LISTSUBSCRIPTIONSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions')), - ), - _descriptor.MethodDescriptor( - name='DeleteSubscription', - full_name='google.pubsub.v1.Subscriber.DeleteSubscription', - index=4, - containing_service=None, - input_type=_DELETESUBSCRIPTIONREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}')), - ), - _descriptor.MethodDescriptor( - name='ModifyAckDeadline', - full_name='google.pubsub.v1.Subscriber.ModifyAckDeadline', - index=5, - containing_service=None, - input_type=_MODIFYACKDEADLINEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002D\"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*')), - ), - _descriptor.MethodDescriptor( - name='Acknowledge', - full_name='google.pubsub.v1.Subscriber.Acknowledge', - index=6, - containing_service=None, - input_type=_ACKNOWLEDGEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002>\"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*')), - ), - _descriptor.MethodDescriptor( - name='Pull', - full_name='google.pubsub.v1.Subscriber.Pull', - index=7, - containing_service=None, - input_type=_PULLREQUEST, - output_type=_PULLRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0027\"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*')), - ), - _descriptor.MethodDescriptor( - name='StreamingPull', - full_name='google.pubsub.v1.Subscriber.StreamingPull', - index=8, - containing_service=None, - input_type=_STREAMINGPULLREQUEST, - output_type=_STREAMINGPULLRESPONSE, options=None, - ), - _descriptor.MethodDescriptor( - name='ModifyPushConfig', - full_name='google.pubsub.v1.Subscriber.ModifyPushConfig', - index=9, - containing_service=None, - input_type=_MODIFYPUSHCONFIGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002C\">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*')), - ), - _descriptor.MethodDescriptor( - name='GetSnapshot', - full_name='google.pubsub.v1.Subscriber.GetSnapshot', - index=10, - containing_service=None, - input_type=_GETSNAPSHOTREQUEST, - output_type=_SNAPSHOT, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\'\022%/v1/{snapshot=projects/*/snapshots/*}')), - ), - _descriptor.MethodDescriptor( - name='ListSnapshots', - full_name='google.pubsub.v1.Subscriber.ListSnapshots', - index=11, - containing_service=None, - input_type=_LISTSNAPSHOTSREQUEST, - output_type=_LISTSNAPSHOTSRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002$\022\"/v1/{project=projects/*}/snapshots')), - ), - _descriptor.MethodDescriptor( - name='CreateSnapshot', - full_name='google.pubsub.v1.Subscriber.CreateSnapshot', - index=12, - containing_service=None, - input_type=_CREATESNAPSHOTREQUEST, - output_type=_SNAPSHOT, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*')), - ), - _descriptor.MethodDescriptor( - name='UpdateSnapshot', - full_name='google.pubsub.v1.Subscriber.UpdateSnapshot', - index=13, - containing_service=None, - input_type=_UPDATESNAPSHOTREQUEST, - output_type=_SNAPSHOT, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*')), - ), - _descriptor.MethodDescriptor( - name='DeleteSnapshot', - full_name='google.pubsub.v1.Subscriber.DeleteSnapshot', - index=14, - containing_service=None, - input_type=_DELETESNAPSHOTREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\'*%/v1/{snapshot=projects/*/snapshots/*}')), - ), - _descriptor.MethodDescriptor( - name='Seek', - full_name='google.pubsub.v1.Subscriber.Seek', - index=15, - containing_service=None, - input_type=_SEEKREQUEST, - output_type=_SEEKRESPONSE, - options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0027\"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*')), - ), -]) + serialized_start=5260, + serialized_end=7557, + methods=[ + _descriptor.MethodDescriptor( + name="CreateSubscription", + full_name="google.pubsub.v1.Subscriber.CreateSubscription", + index=0, + containing_service=None, + input_type=_SUBSCRIPTION, + output_type=_SUBSCRIPTION, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*" + ), + ), + ), + _descriptor.MethodDescriptor( + name="GetSubscription", + full_name="google.pubsub.v1.Subscriber.GetSubscription", + index=1, + containing_service=None, + input_type=_GETSUBSCRIPTIONREQUEST, + output_type=_SUBSCRIPTION, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="UpdateSubscription", + full_name="google.pubsub.v1.Subscriber.UpdateSubscription", + index=2, + containing_service=None, + input_type=_UPDATESUBSCRIPTIONREQUEST, + output_type=_SUBSCRIPTION, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*" + ), + ), + ), + _descriptor.MethodDescriptor( + name="ListSubscriptions", + full_name="google.pubsub.v1.Subscriber.ListSubscriptions", + index=3, + containing_service=None, + input_type=_LISTSUBSCRIPTIONSREQUEST, + output_type=_LISTSUBSCRIPTIONSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions"), + ), + ), + _descriptor.MethodDescriptor( + name="DeleteSubscription", + full_name="google.pubsub.v1.Subscriber.DeleteSubscription", + index=4, + containing_service=None, + input_type=_DELETESUBSCRIPTIONREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}" + ), + ), + ), + _descriptor.MethodDescriptor( + name="ModifyAckDeadline", + full_name="google.pubsub.v1.Subscriber.ModifyAckDeadline", + index=5, + containing_service=None, + input_type=_MODIFYACKDEADLINEREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="Acknowledge", + full_name="google.pubsub.v1.Subscriber.Acknowledge", + index=6, + containing_service=None, + input_type=_ACKNOWLEDGEREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="Pull", + full_name="google.pubsub.v1.Subscriber.Pull", + index=7, + containing_service=None, + input_type=_PULLREQUEST, + output_type=_PULLRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="StreamingPull", + full_name="google.pubsub.v1.Subscriber.StreamingPull", + index=8, + containing_service=None, + input_type=_STREAMINGPULLREQUEST, + output_type=_STREAMINGPULLRESPONSE, + options=None, + ), + _descriptor.MethodDescriptor( + name="ModifyPushConfig", + full_name="google.pubsub.v1.Subscriber.ModifyPushConfig", + index=9, + containing_service=None, + input_type=_MODIFYPUSHCONFIGREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*' + ), + ), + ), + _descriptor.MethodDescriptor( + name="GetSnapshot", + full_name="google.pubsub.v1.Subscriber.GetSnapshot", + index=10, + containing_service=None, + input_type=_GETSNAPSHOTREQUEST, + output_type=_SNAPSHOT, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}"), + ), + ), + _descriptor.MethodDescriptor( + name="ListSnapshots", + full_name="google.pubsub.v1.Subscriber.ListSnapshots", + index=11, + containing_service=None, + input_type=_LISTSNAPSHOTSREQUEST, + output_type=_LISTSNAPSHOTSRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b('\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots'), + ), + ), + _descriptor.MethodDescriptor( + name="CreateSnapshot", + full_name="google.pubsub.v1.Subscriber.CreateSnapshot", + index=12, + containing_service=None, + input_type=_CREATESNAPSHOTREQUEST, + output_type=_SNAPSHOT, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*"), + ), + ), + _descriptor.MethodDescriptor( + name="UpdateSnapshot", + full_name="google.pubsub.v1.Subscriber.UpdateSnapshot", + index=13, + containing_service=None, + input_type=_UPDATESNAPSHOTREQUEST, + output_type=_SNAPSHOT, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + "\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*" + ), + ), + ), + _descriptor.MethodDescriptor( + name="DeleteSnapshot", + full_name="google.pubsub.v1.Subscriber.DeleteSnapshot", + index=14, + containing_service=None, + input_type=_DELETESNAPSHOTREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b("\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}"), + ), + ), + _descriptor.MethodDescriptor( + name="Seek", + full_name="google.pubsub.v1.Subscriber.Seek", + index=15, + containing_service=None, + input_type=_SEEKREQUEST, + output_type=_SEEKRESPONSE, + options=_descriptor._ParseOptions( + descriptor_pb2.MethodOptions(), + _b( + '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*' + ), + ), + ), + ], +) _sym_db.RegisterServiceDescriptor(_SUBSCRIBER) -DESCRIPTOR.services_by_name['Subscriber'] = _SUBSCRIBER +DESCRIPTOR.services_by_name["Subscriber"] = _SUBSCRIBER # @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py index 98f878c83b0a..2a13a9d7b12c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py @@ -1,285 +1,288 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc -from google.cloud.pubsub_v1.proto import pubsub_pb2 as google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2 +from google.cloud.pubsub_v1.proto import ( + pubsub_pb2 as google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2, +) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 class PublisherStub(object): - """The service that an application uses to manipulate topics, and to send + """The service that an application uses to manipulate topics, and to send messages to a topic. """ - def __init__(self, channel): - """Constructor. + def __init__(self, channel): + """Constructor. Args: channel: A grpc.Channel. """ - self.CreateTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/CreateTopic', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + self.CreateTopic = channel.unary_unary( + "/google.pubsub.v1.Publisher/CreateTopic", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, ) - self.UpdateTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/UpdateTopic', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + self.UpdateTopic = channel.unary_unary( + "/google.pubsub.v1.Publisher/UpdateTopic", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, ) - self.Publish = channel.unary_unary( - '/google.pubsub.v1.Publisher/Publish', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, + self.Publish = channel.unary_unary( + "/google.pubsub.v1.Publisher/Publish", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, ) - self.GetTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/GetTopic', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + self.GetTopic = channel.unary_unary( + "/google.pubsub.v1.Publisher/GetTopic", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, ) - self.ListTopics = channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopics', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, + self.ListTopics = channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopics", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, ) - self.ListTopicSubscriptions = channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopicSubscriptions', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, + self.ListTopicSubscriptions = channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSubscriptions", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, ) - self.ListTopicSnapshots = channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopicSnapshots', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, + self.ListTopicSnapshots = channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSnapshots", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, ) - self.DeleteTopic = channel.unary_unary( - '/google.pubsub.v1.Publisher/DeleteTopic', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.DeleteTopic = channel.unary_unary( + "/google.pubsub.v1.Publisher/DeleteTopic", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) class PublisherServicer(object): - """The service that an application uses to manipulate topics, and to send + """The service that an application uses to manipulate topics, and to send messages to a topic. """ - def CreateTopic(self, request, context): - """Creates the given topic with the given name. See the + def CreateTopic(self, request, context): + """Creates the given topic with the given name. See the resource name rules. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def UpdateTopic(self, request, context): - """Updates an existing topic. Note that certain properties of a + def UpdateTopic(self, request, context): + """Updates an existing topic. Note that certain properties of a topic are not modifiable. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def Publish(self, request, context): - """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + def Publish(self, request, context): + """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic does not exist. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def GetTopic(self, request, context): - """Gets the configuration of a topic. + def GetTopic(self, request, context): + """Gets the configuration of a topic. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListTopics(self, request, context): - """Lists matching topics. + def ListTopics(self, request, context): + """Lists matching topics. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListTopicSubscriptions(self, request, context): - """Lists the names of the subscriptions on this topic. + def ListTopicSubscriptions(self, request, context): + """Lists the names of the subscriptions on this topic. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListTopicSnapshots(self, request, context): - """Lists the names of the snapshots on this topic.

+ def ListTopicSnapshots(self, request, context): + """Lists the names of the snapshots on this topic.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def DeleteTopic(self, request, context): - """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + def DeleteTopic(self, request, context): + """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic does not exist. After a topic is deleted, a new topic may be created with the same name; this is an entirely new topic with none of the old configuration or subscriptions. Existing subscriptions to this topic are not deleted, but their `topic` field is set to `_deleted-topic_`. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_PublisherServicer_to_server(servicer, server): - rpc_method_handlers = { - 'CreateTopic': grpc.unary_unary_rpc_method_handler( - servicer.CreateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - 'UpdateTopic': grpc.unary_unary_rpc_method_handler( - servicer.UpdateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - 'Publish': grpc.unary_unary_rpc_method_handler( - servicer.Publish, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.SerializeToString, - ), - 'GetTopic': grpc.unary_unary_rpc_method_handler( - servicer.GetTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - 'ListTopics': grpc.unary_unary_rpc_method_handler( - servicer.ListTopics, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, - ), - 'ListTopicSubscriptions': grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSubscriptions, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, - ), - 'ListTopicSnapshots': grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSnapshots, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.SerializeToString, - ), - 'DeleteTopic': grpc.unary_unary_rpc_method_handler( - servicer.DeleteTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.pubsub.v1.Publisher', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) + rpc_method_handlers = { + "CreateTopic": grpc.unary_unary_rpc_method_handler( + servicer.CreateTopic, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + ), + "UpdateTopic": grpc.unary_unary_rpc_method_handler( + servicer.UpdateTopic, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + ), + "Publish": grpc.unary_unary_rpc_method_handler( + servicer.Publish, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.SerializeToString, + ), + "GetTopic": grpc.unary_unary_rpc_method_handler( + servicer.GetTopic, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + ), + "ListTopics": grpc.unary_unary_rpc_method_handler( + servicer.ListTopics, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, + ), + "ListTopicSubscriptions": grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSubscriptions, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, + ), + "ListTopicSnapshots": grpc.unary_unary_rpc_method_handler( + servicer.ListTopicSnapshots, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.SerializeToString, + ), + "DeleteTopic": grpc.unary_unary_rpc_method_handler( + servicer.DeleteTopic, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.pubsub.v1.Publisher", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) class SubscriberStub(object): - """The service that an application uses to manipulate subscriptions and to + """The service that an application uses to manipulate subscriptions and to consume messages from a subscription via the `Pull` method or by establishing a bi-directional stream using the `StreamingPull` method. """ - def __init__(self, channel): - """Constructor. + def __init__(self, channel): + """Constructor. Args: channel: A grpc.Channel. """ - self.CreateSubscription = channel.unary_unary( - '/google.pubsub.v1.Subscriber/CreateSubscription', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, + self.CreateSubscription = channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSubscription", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, ) - self.GetSubscription = channel.unary_unary( - '/google.pubsub.v1.Subscriber/GetSubscription', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, + self.GetSubscription = channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSubscription", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, ) - self.UpdateSubscription = channel.unary_unary( - '/google.pubsub.v1.Subscriber/UpdateSubscription', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, + self.UpdateSubscription = channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSubscription", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, ) - self.ListSubscriptions = channel.unary_unary( - '/google.pubsub.v1.Subscriber/ListSubscriptions', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, + self.ListSubscriptions = channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSubscriptions", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, ) - self.DeleteSubscription = channel.unary_unary( - '/google.pubsub.v1.Subscriber/DeleteSubscription', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.DeleteSubscription = channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSubscription", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.ModifyAckDeadline = channel.unary_unary( - '/google.pubsub.v1.Subscriber/ModifyAckDeadline', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.ModifyAckDeadline = channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyAckDeadline", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.Acknowledge = channel.unary_unary( - '/google.pubsub.v1.Subscriber/Acknowledge', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.Acknowledge = channel.unary_unary( + "/google.pubsub.v1.Subscriber/Acknowledge", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.Pull = channel.unary_unary( - '/google.pubsub.v1.Subscriber/Pull', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, + self.Pull = channel.unary_unary( + "/google.pubsub.v1.Subscriber/Pull", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, ) - self.StreamingPull = channel.stream_stream( - '/google.pubsub.v1.Subscriber/StreamingPull', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, + self.StreamingPull = channel.stream_stream( + "/google.pubsub.v1.Subscriber/StreamingPull", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, ) - self.ModifyPushConfig = channel.unary_unary( - '/google.pubsub.v1.Subscriber/ModifyPushConfig', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.ModifyPushConfig = channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyPushConfig", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.GetSnapshot = channel.unary_unary( - '/google.pubsub.v1.Subscriber/GetSnapshot', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, + self.GetSnapshot = channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSnapshot", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, ) - self.ListSnapshots = channel.unary_unary( - '/google.pubsub.v1.Subscriber/ListSnapshots', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, + self.ListSnapshots = channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSnapshots", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, ) - self.CreateSnapshot = channel.unary_unary( - '/google.pubsub.v1.Subscriber/CreateSnapshot', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, + self.CreateSnapshot = channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSnapshot", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, ) - self.UpdateSnapshot = channel.unary_unary( - '/google.pubsub.v1.Subscriber/UpdateSnapshot', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, + self.UpdateSnapshot = channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSnapshot", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, ) - self.DeleteSnapshot = channel.unary_unary( - '/google.pubsub.v1.Subscriber/DeleteSnapshot', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + self.DeleteSnapshot = channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSnapshot", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) - self.Seek = channel.unary_unary( - '/google.pubsub.v1.Subscriber/Seek', - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, + self.Seek = channel.unary_unary( + "/google.pubsub.v1.Subscriber/Seek", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, ) class SubscriberServicer(object): - """The service that an application uses to manipulate subscriptions and to + """The service that an application uses to manipulate subscriptions and to consume messages from a subscription via the `Pull` method or by establishing a bi-directional stream using the `StreamingPull` method. """ - def CreateSubscription(self, request, context): - """Creates a subscription to a given topic. See the + def CreateSubscription(self, request, context): + """Creates a subscription to a given topic. See the resource name rules. If the subscription already exists, returns `ALREADY_EXISTS`. If the corresponding topic doesn't exist, returns `NOT_FOUND`. @@ -291,56 +294,56 @@ def CreateSubscription(self, request, context): The generated name is populated in the returned Subscription object. Note that for REST API requests, you must specify a name in the request. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def GetSubscription(self, request, context): - """Gets the configuration details of a subscription. + def GetSubscription(self, request, context): + """Gets the configuration details of a subscription. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def UpdateSubscription(self, request, context): - """Updates an existing subscription. Note that certain properties of a + def UpdateSubscription(self, request, context): + """Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListSubscriptions(self, request, context): - """Lists matching subscriptions. + def ListSubscriptions(self, request, context): + """Lists matching subscriptions. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def DeleteSubscription(self, request, context): - """Deletes an existing subscription. All messages retained in the subscription + def DeleteSubscription(self, request, context): + """Deletes an existing subscription. All messages retained in the subscription are immediately dropped. Calls to `Pull` after deletion will return `NOT_FOUND`. After a subscription is deleted, a new one may be created with the same name, but the new one has no association with the old subscription or its topic unless the same topic is specified. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ModifyAckDeadline(self, request, context): - """Modifies the ack deadline for a specific message. This method is useful + def ModifyAckDeadline(self, request, context): + """Modifies the ack deadline for a specific message. This method is useful to indicate that more time is needed to process a message by the subscriber, or to make the message available for redelivery if the processing was interrupted. Note that this does not modify the subscription-level `ackDeadlineSeconds` used for subsequent messages. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def Acknowledge(self, request, context): - """Acknowledges the messages associated with the `ack_ids` in the + def Acknowledge(self, request, context): + """Acknowledges the messages associated with the `ack_ids` in the `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages from the subscription. @@ -348,21 +351,21 @@ def Acknowledge(self, request, context): but such a message may be redelivered later. Acknowledging a message more than once will not result in an error. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def Pull(self, request, context): - """Pulls messages from the server. The server may return `UNAVAILABLE` if + def Pull(self, request, context): + """Pulls messages from the server. The server may return `UNAVAILABLE` if there are too many concurrent pull requests pending for the given subscription. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def StreamingPull(self, request_iterator, context): - """Establishes a stream with the server, which sends messages down to the + def StreamingPull(self, request_iterator, context): + """Establishes a stream with the server, which sends messages down to the client. The client streams acknowledgements and ack deadline modifications back to the server. The server will close the stream and return the status on any error. The server may close the stream with status `UNAVAILABLE` to @@ -370,44 +373,44 @@ def StreamingPull(self, request_iterator, context): re-establish the stream. Flow control can be achieved by configuring the underlying RPC channel. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ModifyPushConfig(self, request, context): - """Modifies the `PushConfig` for a specified subscription. + def ModifyPushConfig(self, request, context): + """Modifies the `PushConfig` for a specified subscription. This may be used to change a push subscription to a pull one (signified by an empty `PushConfig`) or vice versa, or change the endpoint URL and other attributes of a push subscription. Messages will accumulate for delivery continuously through the call regardless of changes to the `PushConfig`. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def GetSnapshot(self, request, context): - """Gets the configuration details of a snapshot.

+ def GetSnapshot(self, request, context): + """Gets the configuration details of a snapshot.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def ListSnapshots(self, request, context): - """Lists the existing snapshots.

+ def ListSnapshots(self, request, context): + """Lists the existing snapshots.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def CreateSnapshot(self, request, context): - """Creates a snapshot from the requested subscription.

+ def CreateSnapshot(self, request, context): + """Creates a snapshot from the requested subscription.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy.

@@ -423,23 +426,23 @@ def CreateSnapshot(self, request, context): name is populated in the returned Snapshot object. Note that for REST API requests, you must specify a name in the request. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def UpdateSnapshot(self, request, context): - """Updates an existing snapshot.

+ def UpdateSnapshot(self, request, context): + """Updates an existing snapshot.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Note that certain properties of a snapshot are not modifiable. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def DeleteSnapshot(self, request, context): - """Removes an existing snapshot.

+ def DeleteSnapshot(self, request, context): + """Removes an existing snapshot.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -448,105 +451,106 @@ def DeleteSnapshot(self, request, context): created with the same name, but the new one has no association with the old snapshot or its subscription, unless the same subscription is specified. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") - def Seek(self, request, context): - """Seeks an existing subscription to a point in time or to a given snapshot, + def Seek(self, request, context): + """Seeks an existing subscription to a point in time or to a given snapshot, whichever is provided in the request.

ALPHA: This feature is part of an alpha release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_SubscriberServicer_to_server(servicer, server): - rpc_method_handlers = { - 'CreateSubscription': grpc.unary_unary_rpc_method_handler( - servicer.CreateSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - 'GetSubscription': grpc.unary_unary_rpc_method_handler( - servicer.GetSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - 'UpdateSubscription': grpc.unary_unary_rpc_method_handler( - servicer.UpdateSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - 'ListSubscriptions': grpc.unary_unary_rpc_method_handler( - servicer.ListSubscriptions, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, - ), - 'DeleteSubscription': grpc.unary_unary_rpc_method_handler( - servicer.DeleteSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'ModifyAckDeadline': grpc.unary_unary_rpc_method_handler( - servicer.ModifyAckDeadline, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'Acknowledge': grpc.unary_unary_rpc_method_handler( - servicer.Acknowledge, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'Pull': grpc.unary_unary_rpc_method_handler( - servicer.Pull, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.SerializeToString, - ), - 'StreamingPull': grpc.stream_stream_rpc_method_handler( - servicer.StreamingPull, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, - ), - 'ModifyPushConfig': grpc.unary_unary_rpc_method_handler( - servicer.ModifyPushConfig, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'GetSnapshot': grpc.unary_unary_rpc_method_handler( - servicer.GetSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - 'ListSnapshots': grpc.unary_unary_rpc_method_handler( - servicer.ListSnapshots, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, - ), - 'CreateSnapshot': grpc.unary_unary_rpc_method_handler( - servicer.CreateSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - 'UpdateSnapshot': grpc.unary_unary_rpc_method_handler( - servicer.UpdateSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - 'DeleteSnapshot': grpc.unary_unary_rpc_method_handler( - servicer.DeleteSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'Seek': grpc.unary_unary_rpc_method_handler( - servicer.Seek, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'google.pubsub.v1.Subscriber', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) + rpc_method_handlers = { + "CreateSubscription": grpc.unary_unary_rpc_method_handler( + servicer.CreateSubscription, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, + ), + "GetSubscription": grpc.unary_unary_rpc_method_handler( + servicer.GetSubscription, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, + ), + "UpdateSubscription": grpc.unary_unary_rpc_method_handler( + servicer.UpdateSubscription, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, + ), + "ListSubscriptions": grpc.unary_unary_rpc_method_handler( + servicer.ListSubscriptions, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, + ), + "DeleteSubscription": grpc.unary_unary_rpc_method_handler( + servicer.DeleteSubscription, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "ModifyAckDeadline": grpc.unary_unary_rpc_method_handler( + servicer.ModifyAckDeadline, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "Acknowledge": grpc.unary_unary_rpc_method_handler( + servicer.Acknowledge, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "Pull": grpc.unary_unary_rpc_method_handler( + servicer.Pull, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.SerializeToString, + ), + "StreamingPull": grpc.stream_stream_rpc_method_handler( + servicer.StreamingPull, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, + ), + "ModifyPushConfig": grpc.unary_unary_rpc_method_handler( + servicer.ModifyPushConfig, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "GetSnapshot": grpc.unary_unary_rpc_method_handler( + servicer.GetSnapshot, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, + ), + "ListSnapshots": grpc.unary_unary_rpc_method_handler( + servicer.ListSnapshots, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, + ), + "CreateSnapshot": grpc.unary_unary_rpc_method_handler( + servicer.CreateSnapshot, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, + ), + "UpdateSnapshot": grpc.unary_unary_rpc_method_handler( + servicer.UpdateSnapshot, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, + ), + "DeleteSnapshot": grpc.unary_unary_rpc_method_handler( + servicer.DeleteSnapshot, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + "Seek": grpc.unary_unary_rpc_method_handler( + servicer.Seek, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "google.pubsub.v1.Subscriber", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) From ff71baac12763aaccf38ead277217a1cfa07c3ae Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Fri, 30 Nov 2018 17:18:37 -0800 Subject: [PATCH 0322/1197] Update noxfile. --- packages/google-cloud-pubsub/noxfile.py | 28 +++++++++++-------------- 1 file changed, 12 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index a9efc0e344ce..bfac9f4c2bce 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -23,40 +23,36 @@ LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) @nox.session(python="3.7") -def blacken(session): - """Run black. +def lint(session): + """Run linters. - Format code to uniform standard. + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. """ - session.install("black") + session.install("flake8", "black", *LOCAL_DEPS) session.run( "black", + "--check", "google", "tests", "docs", - "--exclude", - ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", ) + session.run("flake8", "google", "tests") -@nox.session(python="3.7") -def lint(session): - """Run linters. +@nox.session(python="3.6") +def blacken(session): + """Run black. - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. + Format code to uniform standard. """ - session.install("flake8", "black", *LOCAL_DEPS) + session.install("black") session.run( "black", - "--check", "google", "tests", "docs", - "--exclude", - ".*/proto/.*|.*/gapic/.*|.*/.*_pb2.py", ) - session.run("flake8", "google", "tests") @nox.session(python="3.7") From 1c93c5615c090b36f5b230dc5661651cb1239d94 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 3 Dec 2018 13:59:48 -0800 Subject: [PATCH 0323/1197] Use moved iam.policy now at google.api_core.iam.policy (#6741) * update references to iam to use api-core\ * Update dependency to api_core --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 70c7c5c3264a..14d78aa1a1ed 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 4 - Beta' dependencies = [ - 'google-api-core[grpc] >= 1.4.1, < 2.0.0dev', + 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', 'grpc-google-iam-v1 >= 0.11.4, < 0.12dev', 'enum34; python_version < "3.4"', ] From 7731177a828be8e98d0c9ca977565df5b30f9fbb Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Fri, 7 Dec 2018 10:08:56 -0800 Subject: [PATCH 0324/1197] Add baseline for synth.metadata --- packages/google-cloud-pubsub/synth.metadata | 39 +++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 packages/google-cloud-pubsub/synth.metadata diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata new file mode 100644 index 000000000000..3e979517b2e0 --- /dev/null +++ b/packages/google-cloud-pubsub/synth.metadata @@ -0,0 +1,39 @@ +{ + "updateTime": "2018-12-07T13:23:18.328036Z", + "sources": [ + { + "generator": { + "name": "artman", + "version": "0.16.2", + "dockerImage": "googleapis/artman@sha256:2f6b261ee7fe1aedf238991c93a20b3820de37a343d0cacf3e3e9555c2aaf2ea" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "200fbbe59cc7b0077ae525eb5f3d6420c0c53e61", + "internalRef": "224174014" + } + }, + { + "template": { + "name": "python_library", + "origin": "synthtool.gcp", + "version": "2018.12.6" + } + } + ], + "destinations": [ + { + "client": { + "source": "googleapis", + "apiName": "pubsub", + "apiVersion": "v1", + "language": "python", + "generator": "gapic", + "config": "google/pubsub/artman_pubsub.yaml" + } + } + ] +} \ No newline at end of file From 3929b208262b7289b0d3ea0cccb64b36e7bf8cba Mon Sep 17 00:00:00 2001 From: brunoais Date: Wed, 12 Dec 2018 18:28:10 +0000 Subject: [PATCH 0325/1197] Initialize 'StreamingPullFuture._cancelled' as True (#6901) --- .../google/cloud/pubsub_v1/subscriber/futures.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index 11fddf24abd0..f3c06416083b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -30,7 +30,7 @@ def __init__(self, manager): super(StreamingPullFuture, self).__init__() self._manager = manager self._manager.add_close_callback(self._on_close_callback) - self._cancelled = True + self._cancelled = False def _on_close_callback(self, manager, result): if result is None: diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 4d41713e6ec8..2b4566018f7f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -34,6 +34,7 @@ def test_default_state(self): assert future.running() assert not future.done() + assert not future.cancelled() future._manager.add_close_callback.assert_called_once_with( future._on_close_callback ) From 0a5fbe6edfa75ff66573829ed708618ed1c22942 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 13 Dec 2018 18:02:08 -0500 Subject: [PATCH 0326/1197] Emphasize that returned futures may differ from stdlib futures. (#6875) Closes #6201. --- packages/google-cloud-pubsub/docs/publisher/index.rst | 11 +++++++++-- .../google/cloud/pubsub_v1/publisher/client.py | 5 +++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/publisher/index.rst b/packages/google-cloud-pubsub/docs/publisher/index.rst index bd1d318b4310..16a869925184 100644 --- a/packages/google-cloud-pubsub/docs/publisher/index.rst +++ b/packages/google-cloud-pubsub/docs/publisher/index.rst @@ -94,8 +94,15 @@ batch can not exceed 10 megabytes. Futures ------- -Every call to :meth:`~.pubsub_v1.publisher.client.Client.publish` will return -a class that conforms to the :class:`~concurrent.futures.Future` interface. +Every call to :meth:`~.pubsub_v1.publisher.client.Client.publish` returns +an instance of :class:`google.api_core.future.Future`. + +.. note:: + + The returned future conforms for the most part to the interface of + the standard library's :class:`~concurrent.futures.Future`, but might not + be usable in all cases which expect that exact implementaton. + You can use this to ensure that the publish succeeded: .. code-block:: python diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 45b230e34eab..76ceb470da24 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -196,8 +196,9 @@ def publish(self, topic, data, **attrs): sent as metadata. (These may be text strings or byte strings.) Returns: - ~concurrent.futures.Future: An object conforming to the - ``concurrent.futures.Future`` interface. + ~google.api_core.future.Future: An object conforming to the + ``concurrent.futures.Future`` interface (but not an instance + of that class). """ # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. From 95037aa6079902b9f35611eaf5af15697e67e6cf Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 14 Dec 2018 12:25:37 -0800 Subject: [PATCH 0327/1197] Document Python 2 deprecation (#6910) --- packages/google-cloud-pubsub/README.rst | 9 +++++++++ packages/google-cloud-pubsub/setup.py | 2 ++ 2 files changed, 11 insertions(+) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index c0ef2a2f4034..e9599751e4a0 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -58,6 +58,15 @@ dependencies. .. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.4 + +Deprecated Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python == 2.7. Python 2.7 support will be removed on January 1, 2020. + + Mac/Linux ^^^^^^^^^ diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 14d78aa1a1ed..5ef5da0d908b 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -77,6 +77,7 @@ 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Operating System :: OS Independent', 'Topic :: Internet', ], @@ -85,6 +86,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, + python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', include_package_data=True, zip_safe=False, ) From df88bde05c2cd2ac3bf60a570990af44fe0668f1 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 18 Dec 2018 13:32:57 -0800 Subject: [PATCH 0328/1197] Release pubsub 0.39.1 (#6949) * Release 0.39.1 --- packages/google-cloud-pubsub/CHANGELOG.md | 22 ++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 36e815ae1950..4d07ae909e2c 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,28 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.39.1 + +12-17-2018 16:57 PST + + +### Implementation Changes +- Initialize `StreamingPullFuture._cancelled` as True. ([#6901](https://github.com/googleapis/google-cloud-python/pull/6901)) +- Import `iam.policy` from `google.api_core`. ([#6741](https://github.com/googleapis/google-cloud-python/pull/6741)) + +### Documentation +- Document Python 2 deprecation ([#6910](https://github.com/googleapis/google-cloud-python/pull/6910)) +- Emphasize that returned futures may differ from stdlib futures. ([#6875](https://github.com/googleapis/google-cloud-python/pull/6875)) + +### Internal / Testing Changes +- Add baseline for synth.metadata +- Update noxfile. +- blacken all gen'd libs ([#6792](https://github.com/googleapis/google-cloud-python/pull/6792)) +- omit local deps ([#6701](https://github.com/googleapis/google-cloud-python/pull/6701)) +- Run black at end of synth.py ([#6698](https://github.com/googleapis/google-cloud-python/pull/6698)) +- Run Black on Generated libraries ([#6666](https://github.com/googleapis/google-cloud-python/pull/6666)) +- Add templates for flake8, coveragerc, noxfile, and black. ([#6642](https://github.com/googleapis/google-cloud-python/pull/6642)) + ## 0.39.0 11-27-2018 13:32 PST diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 5ef5da0d908b..ee6ab845a790 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.39.0' +version = '0.39.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 93aae0b8f9a15aef767253adeac33683f80f7033 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Fri, 4 Jan 2019 10:18:07 -0800 Subject: [PATCH 0329/1197] Pick up stub docstring fix in GAPIC generator. (#6978) --- .../transports/publisher_grpc_transport.py | 20 +++++------ .../transports/subscriber_grpc_transport.py | 36 +++++++++---------- packages/google-cloud-pubsub/synth.metadata | 10 +++--- 3 files changed, 33 insertions(+), 33 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 77016f503d9e..6d2ce126e776 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -102,7 +102,7 @@ def channel(self): @property def create_topic(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.create_topic`. Creates the given topic with the given name. See the resource name rules. @@ -116,7 +116,7 @@ def create_topic(self): @property def update_topic(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.update_topic`. Updates an existing topic. Note that certain properties of a topic are not modifiable. @@ -130,7 +130,7 @@ def update_topic(self): @property def publish(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.publish`. Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic does not exist. @@ -144,7 +144,7 @@ def publish(self): @property def get_topic(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.get_topic`. Gets the configuration of a topic. @@ -157,7 +157,7 @@ def get_topic(self): @property def list_topics(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.list_topics`. Lists matching topics. @@ -170,7 +170,7 @@ def list_topics(self): @property def list_topic_subscriptions(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.list_topic_subscriptions`. Lists the names of the subscriptions on this topic. @@ -183,7 +183,7 @@ def list_topic_subscriptions(self): @property def delete_topic(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.delete_topic`. Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic does not exist. After a topic is deleted, a new topic may be @@ -201,7 +201,7 @@ def delete_topic(self): @property def set_iam_policy(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.set_iam_policy`. Sets the access control policy on the specified resource. Replaces any existing policy. @@ -215,7 +215,7 @@ def set_iam_policy(self): @property def get_iam_policy(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.get_iam_policy`. Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy @@ -230,7 +230,7 @@ def get_iam_policy(self): @property def test_iam_permissions(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`PublisherClient.test_iam_permissions`. Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index ddcb075a9d58..717c04a407e4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -102,7 +102,7 @@ def channel(self): @property def create_subscription(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.create_subscription`. Creates a subscription to a given topic. See the resource name rules. If the subscription already exists, returns ``ALREADY_EXISTS``. If the @@ -124,7 +124,7 @@ def create_subscription(self): @property def get_subscription(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.get_subscription`. Gets the configuration details of a subscription. @@ -137,7 +137,7 @@ def get_subscription(self): @property def update_subscription(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.update_subscription`. Updates an existing subscription. Note that certain properties of a subscription, such as its topic, are not modifiable. @@ -151,7 +151,7 @@ def update_subscription(self): @property def list_subscriptions(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.list_subscriptions`. Lists matching subscriptions. @@ -164,7 +164,7 @@ def list_subscriptions(self): @property def delete_subscription(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.delete_subscription`. Deletes an existing subscription. All messages retained in the subscription are immediately dropped. Calls to ``Pull`` after deletion @@ -182,7 +182,7 @@ def delete_subscription(self): @property def modify_ack_deadline(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.modify_ack_deadline`. Modifies the ack deadline for a specific message. This method is useful to indicate that more time is needed to process a message by the @@ -199,7 +199,7 @@ def modify_ack_deadline(self): @property def acknowledge(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.acknowledge`. Acknowledges the messages associated with the ``ack_ids`` in the ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant @@ -218,7 +218,7 @@ def acknowledge(self): @property def pull(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.pull`. Pulls messages from the server. The server may return ``UNAVAILABLE`` if there are too many concurrent pull requests pending for the given @@ -233,7 +233,7 @@ def pull(self): @property def streaming_pull(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.streaming_pull`. Establishes a stream with the server, which sends messages down to the client. The client streams acknowledgements and ack deadline @@ -252,7 +252,7 @@ def streaming_pull(self): @property def modify_push_config(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.modify_push_config`. Modifies the ``PushConfig`` for a specified subscription. @@ -271,7 +271,7 @@ def modify_push_config(self): @property def list_snapshots(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.list_snapshots`. Lists the existing snapshots.

ALPHA: This feature is part of an alpha release. This API might be @@ -287,7 +287,7 @@ def list_snapshots(self): @property def create_snapshot(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.create_snapshot`. Creates a snapshot from the requested subscription. ALPHA: This feature is part of an alpha release. This API might be changed in @@ -313,7 +313,7 @@ def create_snapshot(self): @property def update_snapshot(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.update_snapshot`. Updates an existing snapshot.

ALPHA: This feature is part of an alpha release. This API might be @@ -330,7 +330,7 @@ def update_snapshot(self): @property def delete_snapshot(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.delete_snapshot`. Removes an existing snapshot.

ALPHA: This feature is part of an alpha release. This API might be @@ -350,7 +350,7 @@ def delete_snapshot(self): @property def seek(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.seek`. Seeks an existing subscription to a point in time or to a given snapshot, whichever is provided in the request.

@@ -367,7 +367,7 @@ def seek(self): @property def set_iam_policy(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.set_iam_policy`. Sets the access control policy on the specified resource. Replaces any existing policy. @@ -381,7 +381,7 @@ def set_iam_policy(self): @property def get_iam_policy(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.get_iam_policy`. Gets the access control policy for a resource. Returns an empty policy if the resource exists and does not have a policy @@ -396,7 +396,7 @@ def get_iam_policy(self): @property def test_iam_permissions(self): - """Return the gRPC stub for {$apiMethod.name}. + """Return the gRPC stub for :meth:`SubscriberClient.test_iam_permissions`. Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of permissions, diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 3e979517b2e0..600c1135d368 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2018-12-07T13:23:18.328036Z", + "updateTime": "2018-12-18T13:23:31.222196Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.2", - "dockerImage": "googleapis/artman@sha256:2f6b261ee7fe1aedf238991c93a20b3820de37a343d0cacf3e3e9555c2aaf2ea" + "version": "0.16.3", + "dockerImage": "googleapis/artman@sha256:bfb92654b4a77368471f70e2808eaf4e60f263b9559f27bb3284097322787bf1" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "200fbbe59cc7b0077ae525eb5f3d6420c0c53e61", - "internalRef": "224174014" + "sha": "c04bc0dc0a9164d924a9ab923fd6845b4ae6a7ab", + "internalRef": "225851467" } }, { From 7b290b7046a1a9ffe930f5438994556a761000c4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 4 Jan 2019 15:54:28 -0500 Subject: [PATCH 0330/1197] Pub/sub: pass transport w/ custom channel to GAPIC API clients. (#7008) Silences pending deprecation warnings for passing channel. Closes #6887. --- .../cloud/pubsub_v1/publisher/client.py | 27 ++++++++++------- .../cloud/pubsub_v1/subscriber/client.py | 29 ++++++++++++------- .../publisher/test_publisher_client.py | 13 +++++++++ .../subscriber/test_subscriber_client.py | 10 ++++++- 4 files changed, 58 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 76ceb470da24..b837de24c6f0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -27,6 +27,7 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.gapic import publisher_client +from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport from google.cloud.pubsub_v1.publisher._batch import thread @@ -73,16 +74,22 @@ def __init__(self, batch_settings=(), **kwargs): # Use a custom channel. # We need this in order to set appropriate default message size and # keepalive options. - if "channel" not in kwargs: - kwargs["channel"] = grpc_helpers.create_channel( - credentials=kwargs.pop("credentials", None), - target=self.target, - scopes=publisher_client.PublisherClient._DEFAULT_SCOPES, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) + if "transport" not in kwargs: + channel = kwargs.pop("channel", None) + if channel is None: + channel = grpc_helpers.create_channel( + credentials=kwargs.pop("credentials", None), + target=self.target, + scopes=publisher_client.PublisherClient._DEFAULT_SCOPES, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) + # cannot pass both 'channel' and 'credentials' + kwargs.pop("credentials", None) + transport = publisher_grpc_transport.PublisherGrpcTransport(channel=channel) + kwargs["transport"] = transport # Add the metrics headers, and instantiate the underlying GAPIC # client. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index b50a269e99f0..0540333ad8ea 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -25,6 +25,7 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.gapic import subscriber_client +from google.cloud.pubsub_v1.gapic.transports import subscriber_grpc_transport from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager @@ -66,17 +67,25 @@ def __init__(self, **kwargs): # Use a custom channel. # We need this in order to set appropriate default message size and # keepalive options. - if "channel" not in kwargs: - kwargs["channel"] = grpc_helpers.create_channel( - credentials=kwargs.pop("credentials", None), - target=self.target, - scopes=subscriber_client.SubscriberClient._DEFAULT_SCOPES, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - "grpc.keepalive_time_ms": 30000, - }.items(), + if "transport" not in kwargs: + channel = kwargs.pop("channel", None) + if channel is None: + channel = grpc_helpers.create_channel( + credentials=kwargs.pop("credentials", None), + target=self.target, + scopes=subscriber_client.SubscriberClient._DEFAULT_SCOPES, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + "grpc.keepalive_time_ms": 30000, + }.items(), + ) + # cannot pass both 'channel' and 'credentials' + kwargs.pop("credentials", None) + transport = subscriber_grpc_transport.SubscriberGrpcTransport( + channel=channel ) + kwargs["transport"] = transport # Add the metrics headers, and instantiate the underlying GAPIC # client. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index a141e1f12187..05e4c8c67209 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -36,6 +36,19 @@ def test_init(): assert client.batch_settings.max_messages == 1000 +def test_init_w_custom_transport(): + transport = object() + client = publisher.Client(transport=transport) + + # A plain client should have an `api` (the underlying GAPIC) and a + # batch settings object, which should have the defaults. + assert isinstance(client.api, publisher_client.PublisherClient) + assert client.api.transport is transport + assert client.batch_settings.max_bytes == 10 * 1000 * 1000 + assert client.batch_settings.max_latency == 0.05 + assert client.batch_settings.max_messages == 1000 + + def test_init_emulator(monkeypatch): monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar/") # NOTE: When the emulator host is set, a custom channel will be used, so diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 5acd5b6f8dd7..d4914fee8f5b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -16,6 +16,7 @@ import mock from google.cloud.pubsub_v1 import subscriber +from google.cloud.pubsub_v1.gapic import subscriber_client from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import futures @@ -23,7 +24,14 @@ def test_init(): creds = mock.Mock(spec=credentials.Credentials) client = subscriber.Client(credentials=creds) - assert client.api is not None + assert isinstance(client.api, subscriber_client.SubscriberClient) + + +def test_init_w_custom_transport(): + transport = object() + client = subscriber.Client(transport=transport) + assert isinstance(client.api, subscriber_client.SubscriberClient) + assert client.api.transport is transport def test_init_emulator(monkeypatch): From e5aed7505bf31bbccdacfeb9aa328364579451d3 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Sat, 5 Jan 2019 10:02:06 -0800 Subject: [PATCH 0331/1197] Docstring changes from updates to .proto files. (#7054) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 3 +- .../pubsub_v1/gapic/subscriber_client.py | 91 +++++++++----- .../transports/subscriber_grpc_transport.py | 56 ++++++--- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 117 ++++++++++-------- .../cloud/pubsub_v1/proto/pubsub_pb2_grpc.py | 80 ++++++++---- packages/google-cloud-pubsub/synth.metadata | 10 +- 6 files changed, 228 insertions(+), 129 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 7c4da4ab6e01..d857be8d04e0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -223,7 +223,8 @@ def create_topic( underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters in length, and it must not start with `"goog"`. - labels (dict[str -> str]): See Creating and managing labels. + labels (dict[str -> str]): See Creating and + managing labels. message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining how messages published to the topic may be stored. It is determined when the topic is created based on the policy configured at the project level. It must not be set by the caller in the request to diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 67375a9387c9..5c4dd174def6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -237,9 +237,9 @@ def create_subscription( If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the `resource name - format `__. The - generated name is populated in the returned Subscription object. Note - that for REST API requests, you must specify a name in the request. + format `__. + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. Example: >>> from google.cloud import pubsub_v1 @@ -267,11 +267,11 @@ def create_subscription( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PushConfig` - ack_deadline_seconds (int): This value is the maximum time after a subscriber receives a message - before the subscriber should acknowledge the message. After message - delivery but before the ack deadline expires and before the message is - acknowledged, it is an outstanding message and will not be delivered - again during that time (on a best-effort basis). + ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits + for the subscriber to acknowledge receipt before resending the message. + In the interval after the message is delivered and before it is + acknowledged, it is considered to be outstanding. During that time + period, the message will not be redelivered (on a best-effort basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this value for a given message, call @@ -290,23 +290,25 @@ def create_subscription( retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the - ``message_retention_duration`` window. ALPHA: This feature is part of an - alpha release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. + ``message_retention_duration`` window. This must be true if you would + like to Seek to a timestamp. BETA: This feature is part of a beta + release. This API might be changed in backward-incompatible ways and is + not recommended for production use. It is not subject to any SLA or + deprecation policy. message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If ``retain_acked_messages`` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 - days or less than 10 minutes. ALPHA: This feature is part of an alpha + days or less than 10 minutes. BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Duration` - labels (dict[str -> str]): See Creating and managing labels. + labels (dict[str -> str]): See Creating and + managing labels. expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any connected subscriber is successfully consuming messages from the subscription or @@ -680,9 +682,11 @@ def modify_ack_deadline( ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to the Pub/Sub system. For example, if the value is 10, the new ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call was - made. Specifying zero may immediately make the message available for - another pull request. The minimum deadline you can specify is 0 seconds. - The maximum deadline you can specify is 600 seconds (10 minutes). + made. Specifying zero might immediately make the message available for + delivery to another subscriber client. This typically results in an + increase in the rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The maximum deadline you + can specify is 600 seconds (10 minutes). retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -1015,8 +1019,13 @@ def list_snapshots( metadata=None, ): """ - Lists the existing snapshots.

- ALPHA: This feature is part of an alpha release. This API might be + Lists the existing snapshots. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -1108,8 +1117,11 @@ def create_snapshot( metadata=None, ): """ - Creates a snapshot from the requested subscription. ALPHA: This feature - is part of an alpha release. This API might be changed in + Creates a snapshot from the requested subscription. Snapshots are used + in Seek operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages in an + existing subscription to the state captured by a snapshot. BETA: This + feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. If the snapshot already exists, returns ``ALREADY_EXISTS``. If the requested subscription @@ -1119,9 +1131,9 @@ def create_snapshot( ``Snapshot.expire_time`` field. If the name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming to the `resource name - format `__. The - generated name is populated in the returned Snapshot object. Note that - for REST API requests, you must specify a name in the request. + format `__. + The generated name is populated in the returned Snapshot object. Note + that for REST API requests, you must specify a name in the request. Example: >>> from google.cloud import pubsub_v1 @@ -1147,7 +1159,8 @@ def create_snapshot( messages published to the subscription's topic following the successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. - labels (dict[str -> str]): See Creating and managing labels. + labels (dict[str -> str]): See Creating and + managing labels. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -1194,8 +1207,13 @@ def update_snapshot( metadata=None, ): """ - Updates an existing snapshot.

- ALPHA: This feature is part of an alpha release. This API might be + Updates an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Note that certain properties of a snapshot are not modifiable. @@ -1269,8 +1287,13 @@ def delete_snapshot( metadata=None, ): """ - Removes an existing snapshot.

- ALPHA: This feature is part of an alpha release. This API might be + Removes an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. When the snapshot is deleted, all messages retained in the snapshot @@ -1333,8 +1356,14 @@ def seek( ): """ Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request.

- ALPHA: This feature is part of an alpha release. This API might be + whichever is provided in the request. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. Note that both the subscription and the snapshot + must be on the same topic.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 717c04a407e4..ac8f5f096b64 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -111,9 +111,9 @@ def create_subscription(self): If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the `resource name - format `__. The - generated name is populated in the returned Subscription object. Note - that for REST API requests, you must specify a name in the request. + format `__. + The generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the request. Returns: Callable: A callable which accepts the appropriate @@ -273,8 +273,13 @@ def modify_push_config(self): def list_snapshots(self): """Return the gRPC stub for :meth:`SubscriberClient.list_snapshots`. - Lists the existing snapshots.

- ALPHA: This feature is part of an alpha release. This API might be + Lists the existing snapshots. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -289,8 +294,11 @@ def list_snapshots(self): def create_snapshot(self): """Return the gRPC stub for :meth:`SubscriberClient.create_snapshot`. - Creates a snapshot from the requested subscription. ALPHA: This feature - is part of an alpha release. This API might be changed in + Creates a snapshot from the requested subscription. Snapshots are used + in Seek operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages in an + existing subscription to the state captured by a snapshot. BETA: This + feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. If the snapshot already exists, returns ``ALREADY_EXISTS``. If the requested subscription @@ -300,9 +308,9 @@ def create_snapshot(self): ``Snapshot.expire_time`` field. If the name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming to the `resource name - format `__. The - generated name is populated in the returned Snapshot object. Note that - for REST API requests, you must specify a name in the request. + format `__. + The generated name is populated in the returned Snapshot object. Note + that for REST API requests, you must specify a name in the request. Returns: Callable: A callable which accepts the appropriate @@ -315,8 +323,13 @@ def create_snapshot(self): def update_snapshot(self): """Return the gRPC stub for :meth:`SubscriberClient.update_snapshot`. - Updates an existing snapshot.

- ALPHA: This feature is part of an alpha release. This API might be + Updates an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Note that certain properties of a snapshot are not modifiable. @@ -332,8 +345,13 @@ def update_snapshot(self): def delete_snapshot(self): """Return the gRPC stub for :meth:`SubscriberClient.delete_snapshot`. - Removes an existing snapshot.

- ALPHA: This feature is part of an alpha release. This API might be + Removes an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. When the snapshot is deleted, all messages retained in the snapshot @@ -353,8 +371,14 @@ def seek(self): """Return the gRPC stub for :meth:`SubscriberClient.seek`. Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request.

- ALPHA: This feature is part of an alpha release. This API might be + whichever is provided in the request. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. Note that both the subscription and the snapshot + must be on the same topic.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 50f0a4b791a9..495d1f56362e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -27,7 +27,7 @@ package="google.pubsub.v1", syntax="proto3", serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\x84\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\x92\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\x84\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -2916,9 +2916,9 @@ Attributes: allowed_persistence_regions: - The list of GCP regions where messages that are published to - the topic may be persisted in storage. Messages published by - publishers running in non-allowed GCP regions (or running + The list of GCP region IDs where messages that are published + to the topic may be persisted in storage. Messages published + by publishers running in non-allowed GCP regions (or running outside of GCP altogether) will be routed for storage in one of the allowed regions. An empty list indicates a misconfiguration at the project or organization level, which @@ -2990,8 +2990,10 @@ __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""A message that is published by publishers and consumed by subscribers. The message must contain either a non-empty data field or at least one - attribute. See Quotas and limits for more information about message - limits. + attribute. Note that client libraries represent this object differently + depending on the language. See the corresponding client library + documentation for more information. See Quotas and limits for more + information about message limits. Attributes: @@ -3204,8 +3206,8 @@ dict( DESCRIPTOR=_LISTTOPICSNAPSHOTSREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``ListTopicSnapshots`` method. ALPHA: This feature is - part of an alpha release. This API might be changed in + __doc__="""Request for the ``ListTopicSnapshots`` method. BETA: This feature is + part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -3233,8 +3235,8 @@ dict( DESCRIPTOR=_LISTTOPICSNAPSHOTSRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``ListTopicSnapshots`` method. ALPHA: This feature is - part of an alpha release. This API might be changed in + __doc__="""Response for the ``ListTopicSnapshots`` method. BETA: This feature is + part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -3308,16 +3310,16 @@ used to configure it. An empty ``pushConfig`` signifies that the subscriber will pull and ack messages using API methods. ack_deadline_seconds: - This value is the maximum time after a subscriber receives a - message before the subscriber should acknowledge the message. - After message delivery but before the ack deadline expires and - before the message is acknowledged, it is an outstanding - message and will not be delivered again during that time (on a - best-effort basis). For pull subscriptions, this value is - used as the initial value for the ack deadline. To override - this value for a given message, call ``ModifyAckDeadline`` - with the corresponding ``ack_id`` if using non-streaming pull - or send the ``ack_id`` in a + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt before + resending the message. In the interval after the message is + delivered and before it is acknowledged, it is considered to + be outstanding. During that time period, the message will not + be redelivered (on a best-effort basis). For pull + subscriptions, this value is used as the initial value for the + ack deadline. To override this value for a given message, call + ``ModifyAckDeadline`` with the corresponding ``ack_id`` if + using non-streaming pull or send the ``ack_id`` in a ``StreamingModifyAckDeadlineRequest`` if using streaming pull. The minimum custom deadline you can specify is 10 seconds. The maximum custom deadline you can specify is 600 seconds (10 @@ -3330,8 +3332,9 @@ Indicates whether to retain acknowledged messages. If true, then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of - the ``message_retention_duration`` window. ALPHA: This feature - is part of an alpha release. This API might be changed in + the ``message_retention_duration`` window. This must be true + if you would like to Seek to a timestamp. BETA: This feature + is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -3342,7 +3345,7 @@ also configures the retention of acknowledged messages, and thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 - minutes. ALPHA: This feature is part of an alpha release. This + minutes. BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -3662,11 +3665,12 @@ The new ack deadline with respect to the time this request was sent to the Pub/Sub system. For example, if the value is 10, the new ack deadline will expire 10 seconds after the - ``ModifyAckDeadline`` call was made. Specifying zero may - immediately make the message available for another pull - request. The minimum deadline you can specify is 0 seconds. - The maximum deadline you can specify is 600 seconds (10 - minutes). + ``ModifyAckDeadline`` call was made. Specifying zero might + immediately make the message available for delivery to another + subscriber client. This typically results in an increase in + the rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The maximum + deadline you can specify is 600 seconds (10 minutes). """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) ), @@ -3789,10 +3793,10 @@ ), DESCRIPTOR=_CREATESNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``CreateSnapshot`` method. ALPHA: This feature is part - of an alpha release. This API might be changed in backward-incompatible - ways and is not recommended for production use. It is not subject to any - SLA or deprecation policy. + __doc__="""Request for the ``CreateSnapshot`` method. BETA: This feature is part of + a beta release. This API might be changed in backward-incompatible ways + and is not recommended for production use. It is not subject to any SLA + or deprecation policy. Attributes: @@ -3828,8 +3832,8 @@ dict( DESCRIPTOR=_UPDATESNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the UpdateSnapshot method. ALPHA: This feature is part of an - alpha release. This API might be changed in backward-incompatible ways + __doc__="""Request for the UpdateSnapshot method. BETA: This feature is part of a + beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -3861,9 +3865,12 @@ ), DESCRIPTOR=_SNAPSHOT, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A snapshot resource. ALPHA: This feature is part of an alpha release. - This API might be changed in backward-incompatible ways and is not - recommended for production use. It is not subject to any SLA or + __doc__="""A snapshot resource. Snapshots are used in Seek operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the + state captured by a snapshot. BETA: This feature is part of a beta + release. This API might be changed in backward-incompatible ways and is + not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -3901,10 +3908,10 @@ dict( DESCRIPTOR=_GETSNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the GetSnapshot method. ALPHA: This feature is part of an - alpha release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. + __doc__="""Request for the GetSnapshot method. BETA: This feature is part of a beta + release. This API might be changed in backward-incompatible ways and is + not recommended for production use. It is not subject to any SLA or + deprecation policy. Attributes: @@ -3923,10 +3930,10 @@ dict( DESCRIPTOR=_LISTSNAPSHOTSREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``ListSnapshots`` method. ALPHA: This feature is part of - an alpha release. This API might be changed in backward-incompatible - ways and is not recommended for production use. It is not subject to any - SLA or deprecation policy. + __doc__="""Request for the ``ListSnapshots`` method. BETA: This feature is part of + a beta release. This API might be changed in backward-incompatible ways + and is not recommended for production use. It is not subject to any SLA + or deprecation policy. Attributes: @@ -3952,10 +3959,10 @@ dict( DESCRIPTOR=_LISTSNAPSHOTSRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``ListSnapshots`` method. ALPHA: This feature is part - of an alpha release. This API might be changed in backward-incompatible - ways and is not recommended for production use. It is not subject to any - SLA or deprecation policy. + __doc__="""Response for the ``ListSnapshots`` method. BETA: This feature is part of + a beta release. This API might be changed in backward-incompatible ways + and is not recommended for production use. It is not subject to any SLA + or deprecation policy. Attributes: @@ -3977,10 +3984,10 @@ dict( DESCRIPTOR=_DELETESNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``DeleteSnapshot`` method. ALPHA: This feature is part - of an alpha release. This API might be changed in backward-incompatible - ways and is not recommended for production use. It is not subject to any - SLA or deprecation policy. + __doc__="""Request for the ``DeleteSnapshot`` method. BETA: This feature is part of + a beta release. This API might be changed in backward-incompatible ways + and is not recommended for production use. It is not subject to any SLA + or deprecation policy. Attributes: @@ -3999,7 +4006,7 @@ dict( DESCRIPTOR=_SEEKREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``Seek`` method. ALPHA: This feature is part of an alpha + __doc__="""Request for the ``Seek`` method. BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. @@ -4049,7 +4056,7 @@ DESCRIPTOR._options = _descriptor._ParseOptions( descriptor_pb2.FileOptions(), _b( - "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1" + "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), ) _TOPIC_LABELSENTRY.has_options = True diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py index 2a13a9d7b12c..7668377d78de 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py @@ -67,7 +67,8 @@ class PublisherServicer(object): def CreateTopic(self, request, context): """Creates the given topic with the given name. See the - resource name rules. + + resource name rules. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -111,8 +112,13 @@ def ListTopicSubscriptions(self, request, context): raise NotImplementedError("Method not implemented!") def ListTopicSnapshots(self, request, context): - """Lists the names of the snapshots on this topic.

- ALPHA: This feature is part of an alpha release. This API might be + """Lists the names of the snapshots on this topic. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. """ @@ -283,14 +289,15 @@ class SubscriberServicer(object): def CreateSubscription(self, request, context): """Creates a subscription to a given topic. See the - resource name rules. + + resource name rules. If the subscription already exists, returns `ALREADY_EXISTS`. If the corresponding topic doesn't exist, returns `NOT_FOUND`. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the - [resource name format](https://cloud.google.com/pubsub/docs/overview#names). + [resource name format](https://cloud.google.com/pubsub/docs/admin#resource_names). The generated name is populated in the returned Subscription object. Note that for REST API requests, you must specify a name in the request. """ @@ -390,8 +397,12 @@ def ModifyPushConfig(self, request, context): raise NotImplementedError("Method not implemented!") def GetSnapshot(self, request, context): - """Gets the configuration details of a snapshot.

- ALPHA: This feature is part of an alpha release. This API might be + """Gets the configuration details of a snapshot. Snapshots are used in + Seek + operations, which allow you to manage message acknowledgments in bulk. That + is, you can set the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. """ @@ -400,8 +411,13 @@ def GetSnapshot(self, request, context): raise NotImplementedError("Method not implemented!") def ListSnapshots(self, request, context): - """Lists the existing snapshots.

- ALPHA: This feature is part of an alpha release. This API might be + """Lists the existing snapshots. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. """ @@ -410,8 +426,14 @@ def ListSnapshots(self, request, context): raise NotImplementedError("Method not implemented!") def CreateSnapshot(self, request, context): - """Creates a snapshot from the requested subscription.

- ALPHA: This feature is part of an alpha release. This API might be + """Creates a snapshot from the requested subscription. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. +

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy.

If the snapshot already exists, returns `ALREADY_EXISTS`. @@ -421,18 +443,23 @@ def CreateSnapshot(self, request, context): See also the `Snapshot.expire_time` field. If the name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming - to the [resource name format](https://cloud.google.com/pubsub/docs/overview#names). - The generated - name is populated in the returned Snapshot object. Note that for REST API - requests, you must specify a name in the request. + to the + [resource name format](https://cloud.google.com/pubsub/docs/admin#resource_names). + The generated name is populated in the returned Snapshot object. Note that + for REST API requests, you must specify a name in the request. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateSnapshot(self, request, context): - """Updates an existing snapshot.

- ALPHA: This feature is part of an alpha release. This API might be + """Updates an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. Note that certain properties of a snapshot are not modifiable. @@ -442,8 +469,13 @@ def UpdateSnapshot(self, request, context): raise NotImplementedError("Method not implemented!") def DeleteSnapshot(self, request, context): - """Removes an existing snapshot.

- ALPHA: This feature is part of an alpha release. This API might be + """Removes an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. When the snapshot is deleted, all messages retained in the snapshot @@ -457,8 +489,14 @@ def DeleteSnapshot(self, request, context): def Seek(self, request, context): """Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request.

- ALPHA: This feature is part of an alpha release. This API might be + whichever is provided in the request. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. Note that both the subscription and the snapshot + must be on the same topic.

+ BETA: This feature is part of a beta release. This API might be changed in backward-incompatible ways and is not recommended for production use. It is not subject to any SLA or deprecation policy. """ diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 600c1135d368..84db296a1c2a 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2018-12-18T13:23:31.222196Z", + "updateTime": "2019-01-05T13:24:39.145322Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.3", - "dockerImage": "googleapis/artman@sha256:bfb92654b4a77368471f70e2808eaf4e60f263b9559f27bb3284097322787bf1" + "version": "0.16.4", + "dockerImage": "googleapis/artman@sha256:8b45fae963557c3299921037ecbb86f0689f41b1b4aea73408ebc50562cb2857" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "c04bc0dc0a9164d924a9ab923fd6845b4ae6a7ab", - "internalRef": "225851467" + "sha": "a111a53c0c6722afcd793b64724ceef7862db5b9", + "internalRef": "227896184" } }, { From 5eb90f4eed64e1993d5ee27b8b60869fd40d364d Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Wed, 9 Jan 2019 12:07:33 -0800 Subject: [PATCH 0332/1197] Protoc-generated serialization update.. (#7091) --- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 479 ++++++++---------- packages/google-cloud-pubsub/synth.metadata | 10 +- 2 files changed, 209 insertions(+), 280 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 495d1f56362e..09a4c8ae8226 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -8,7 +8,6 @@ from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) @@ -26,6 +25,9 @@ name="google/cloud/pubsub_v1/proto/pubsub.proto", package="google.pubsub.v1", syntax="proto3", + serialized_options=_b( + "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" + ), serialized_pb=_b( '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\x84\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), @@ -61,14 +63,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -100,7 +102,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -118,14 +120,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -156,7 +158,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -174,7 +176,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -192,14 +194,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[_TOPIC_LABELSENTRY], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -231,7 +233,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -249,14 +251,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -287,7 +289,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -305,7 +307,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -323,7 +325,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -341,14 +343,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -380,14 +382,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -419,7 +421,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -437,14 +439,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -476,7 +478,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -494,14 +496,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -533,14 +535,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -572,7 +574,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -590,7 +592,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -608,14 +610,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -647,7 +649,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -665,14 +667,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -704,7 +706,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -722,7 +724,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -740,14 +742,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -779,7 +781,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -797,14 +799,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -836,7 +838,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -854,7 +856,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -872,14 +874,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -911,7 +913,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -929,14 +931,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -968,14 +970,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1007,7 +1009,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1025,14 +1027,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1063,7 +1065,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1081,7 +1083,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1099,7 +1101,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1117,7 +1119,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1135,7 +1137,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1153,7 +1155,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1171,7 +1173,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1189,14 +1191,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[_SUBSCRIPTION_LABELSENTRY], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1228,14 +1230,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1267,7 +1269,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1285,14 +1287,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1323,7 +1325,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1341,14 +1343,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1380,7 +1382,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1398,14 +1400,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1437,14 +1439,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1476,7 +1478,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1494,14 +1496,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1533,7 +1535,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1551,7 +1553,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1569,14 +1571,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1608,7 +1610,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1626,14 +1628,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1665,14 +1667,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1704,7 +1706,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1722,14 +1724,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1761,7 +1763,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1779,7 +1781,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1797,14 +1799,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1836,14 +1838,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1875,7 +1877,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1893,7 +1895,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1911,14 +1913,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1950,7 +1952,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1968,14 +1970,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2007,7 +2009,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2025,7 +2027,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2043,7 +2045,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2061,7 +2063,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2079,14 +2081,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2118,14 +2120,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2157,7 +2159,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2175,14 +2177,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2213,7 +2215,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2231,7 +2233,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2249,14 +2251,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[_CREATESNAPSHOTREQUEST_LABELSENTRY], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2288,7 +2290,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2306,14 +2308,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2345,7 +2347,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2363,14 +2365,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b("8\001")), + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2401,7 +2403,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2419,7 +2421,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2437,7 +2439,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2455,14 +2457,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[_SNAPSHOT_LABELSENTRY], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2494,14 +2496,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2533,7 +2535,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2551,7 +2553,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2569,14 +2571,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2608,7 +2610,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2626,14 +2628,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2665,14 +2667,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ) ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2704,7 +2706,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2722,7 +2724,7 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2740,14 +2742,14 @@ containing_type=None, is_extension=False, extension_scope=None, - options=None, + serialized_options=None, file=DESCRIPTOR, ), ], extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2775,7 +2777,7 @@ extensions=[], nested_types=[], enum_types=[], - options=None, + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], @@ -4052,44 +4054,20 @@ _sym_db.RegisterMessage(SeekResponse) -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions( - descriptor_pb2.FileOptions(), - _b( - "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" - ), -) -_TOPIC_LABELSENTRY.has_options = True -_TOPIC_LABELSENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) -_PUBSUBMESSAGE_ATTRIBUTESENTRY.has_options = True -_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) -_SUBSCRIPTION_LABELSENTRY.has_options = True -_SUBSCRIPTION_LABELSENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) -_PUSHCONFIG_ATTRIBUTESENTRY.has_options = True -_PUSHCONFIG_ATTRIBUTESENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) -_CREATESNAPSHOTREQUEST_LABELSENTRY.has_options = True -_CREATESNAPSHOTREQUEST_LABELSENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) -_SNAPSHOT_LABELSENTRY.has_options = True -_SNAPSHOT_LABELSENTRY._options = _descriptor._ParseOptions( - descriptor_pb2.MessageOptions(), _b("8\001") -) +DESCRIPTOR._options = None +_TOPIC_LABELSENTRY._options = None +_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = None +_SUBSCRIPTION_LABELSENTRY._options = None +_PUSHCONFIG_ATTRIBUTESENTRY._options = None +_CREATESNAPSHOTREQUEST_LABELSENTRY._options = None +_SNAPSHOT_LABELSENTRY._options = None _PUBLISHER = _descriptor.ServiceDescriptor( name="Publisher", full_name="google.pubsub.v1.Publisher", file=DESCRIPTOR, index=0, - options=None, + serialized_options=None, serialized_start=4170, serialized_end=5257, methods=[ @@ -4100,9 +4078,8 @@ containing_service=None, input_type=_TOPIC, output_type=_TOPIC, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*"), + serialized_options=_b( + "\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*" ), ), _descriptor.MethodDescriptor( @@ -4112,9 +4089,8 @@ containing_service=None, input_type=_UPDATETOPICREQUEST, output_type=_TOPIC, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*"), + serialized_options=_b( + "\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*" ), ), _descriptor.MethodDescriptor( @@ -4124,11 +4100,8 @@ containing_service=None, input_type=_PUBLISHREQUEST, output_type=_PUBLISHRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*" - ), + serialized_options=_b( + "\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*" ), ), _descriptor.MethodDescriptor( @@ -4138,9 +4111,8 @@ containing_service=None, input_type=_GETTOPICREQUEST, output_type=_TOPIC, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}"), + serialized_options=_b( + "\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}" ), ), _descriptor.MethodDescriptor( @@ -4150,9 +4122,8 @@ containing_service=None, input_type=_LISTTOPICSREQUEST, output_type=_LISTTOPICSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics"), + serialized_options=_b( + "\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics" ), ), _descriptor.MethodDescriptor( @@ -4162,11 +4133,8 @@ containing_service=None, input_type=_LISTTOPICSUBSCRIPTIONSREQUEST, output_type=_LISTTOPICSUBSCRIPTIONSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions" - ), + serialized_options=_b( + "\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions" ), ), _descriptor.MethodDescriptor( @@ -4176,11 +4144,8 @@ containing_service=None, input_type=_LISTTOPICSNAPSHOTSREQUEST, output_type=_LISTTOPICSNAPSHOTSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots" - ), + serialized_options=_b( + "\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots" ), ), _descriptor.MethodDescriptor( @@ -4190,9 +4155,8 @@ containing_service=None, input_type=_DELETETOPICREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}"), + serialized_options=_b( + "\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}" ), ), ], @@ -4207,7 +4171,7 @@ full_name="google.pubsub.v1.Subscriber", file=DESCRIPTOR, index=1, - options=None, + serialized_options=None, serialized_start=5260, serialized_end=7557, methods=[ @@ -4218,11 +4182,8 @@ containing_service=None, input_type=_SUBSCRIPTION, output_type=_SUBSCRIPTION, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*" - ), + serialized_options=_b( + "\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*" ), ), _descriptor.MethodDescriptor( @@ -4232,11 +4193,8 @@ containing_service=None, input_type=_GETSUBSCRIPTIONREQUEST, output_type=_SUBSCRIPTION, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}" - ), + serialized_options=_b( + "\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}" ), ), _descriptor.MethodDescriptor( @@ -4246,11 +4204,8 @@ containing_service=None, input_type=_UPDATESUBSCRIPTIONREQUEST, output_type=_SUBSCRIPTION, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*" - ), + serialized_options=_b( + "\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*" ), ), _descriptor.MethodDescriptor( @@ -4260,9 +4215,8 @@ containing_service=None, input_type=_LISTSUBSCRIPTIONSREQUEST, output_type=_LISTSUBSCRIPTIONSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions"), + serialized_options=_b( + "\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions" ), ), _descriptor.MethodDescriptor( @@ -4272,11 +4226,8 @@ containing_service=None, input_type=_DELETESUBSCRIPTIONREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}" - ), + serialized_options=_b( + "\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}" ), ), _descriptor.MethodDescriptor( @@ -4286,11 +4237,8 @@ containing_service=None, input_type=_MODIFYACKDEADLINEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - '\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*' - ), + serialized_options=_b( + '\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*' ), ), _descriptor.MethodDescriptor( @@ -4300,11 +4248,8 @@ containing_service=None, input_type=_ACKNOWLEDGEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - '\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*' - ), + serialized_options=_b( + '\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*' ), ), _descriptor.MethodDescriptor( @@ -4314,11 +4259,8 @@ containing_service=None, input_type=_PULLREQUEST, output_type=_PULLRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*' - ), + serialized_options=_b( + '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*' ), ), _descriptor.MethodDescriptor( @@ -4328,7 +4270,7 @@ containing_service=None, input_type=_STREAMINGPULLREQUEST, output_type=_STREAMINGPULLRESPONSE, - options=None, + serialized_options=None, ), _descriptor.MethodDescriptor( name="ModifyPushConfig", @@ -4337,11 +4279,8 @@ containing_service=None, input_type=_MODIFYPUSHCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - '\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*' - ), + serialized_options=_b( + '\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*' ), ), _descriptor.MethodDescriptor( @@ -4351,9 +4290,8 @@ containing_service=None, input_type=_GETSNAPSHOTREQUEST, output_type=_SNAPSHOT, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}"), + serialized_options=_b( + "\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}" ), ), _descriptor.MethodDescriptor( @@ -4363,9 +4301,8 @@ containing_service=None, input_type=_LISTSNAPSHOTSREQUEST, output_type=_LISTSNAPSHOTSRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b('\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots'), + serialized_options=_b( + '\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots' ), ), _descriptor.MethodDescriptor( @@ -4375,9 +4312,8 @@ containing_service=None, input_type=_CREATESNAPSHOTREQUEST, output_type=_SNAPSHOT, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*"), + serialized_options=_b( + "\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*" ), ), _descriptor.MethodDescriptor( @@ -4387,11 +4323,8 @@ containing_service=None, input_type=_UPDATESNAPSHOTREQUEST, output_type=_SNAPSHOT, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - "\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*" - ), + serialized_options=_b( + "\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*" ), ), _descriptor.MethodDescriptor( @@ -4401,9 +4334,8 @@ containing_service=None, input_type=_DELETESNAPSHOTREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b("\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}"), + serialized_options=_b( + "\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}" ), ), _descriptor.MethodDescriptor( @@ -4413,11 +4345,8 @@ containing_service=None, input_type=_SEEKREQUEST, output_type=_SEEKRESPONSE, - options=_descriptor._ParseOptions( - descriptor_pb2.MethodOptions(), - _b( - '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*' - ), + serialized_options=_b( + '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*' ), ), ], diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 84db296a1c2a..c0016ada8407 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-01-05T13:24:39.145322Z", + "updateTime": "2019-01-09T13:25:55.011967Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.4", - "dockerImage": "googleapis/artman@sha256:8b45fae963557c3299921037ecbb86f0689f41b1b4aea73408ebc50562cb2857" + "version": "0.16.5", + "dockerImage": "googleapis/artman@sha256:5a96c2c5c6f9570cc9556b63dc9ce1838777fd9166b5b64e43ad8e0ecee2fe2c" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "a111a53c0c6722afcd793b64724ceef7862db5b9", - "internalRef": "227896184" + "sha": "659d66ec24bf40b35a41a0b79218d96ba3add3d3", + "internalRef": "228437827" } }, { From 53f5993bcc9eca8087293dd8429f2802f8c842d5 Mon Sep 17 00:00:00 2001 From: Daniel Thorn Date: Fri, 11 Jan 2019 12:47:04 -0800 Subject: [PATCH 0333/1197] Propagate 'RetryError' in 'PublisherClient.publish'. (#7071) --- .../pubsub_v1/publisher/_batch/thread.py | 2 +- .../pubsub_v1/publisher/batch/test_thread.py | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index f187024b7cf7..d3fd0d956a90 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -199,7 +199,7 @@ def _commit(self): try: response = self._client.api.publish(self._topic, self._messages) - except google.api_core.exceptions.GoogleAPICallError as exc: + except google.api_core.exceptions.GoogleAPIError as exc: # We failed to publish, set the exception on all futures and # exit. self._status = base.BatchStatus.ERROR diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index af04f865dd40..d323c2ed2d24 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -221,6 +221,25 @@ def test_block__commmit_api_error(): assert future.exception() == error +def test_block__commmit_retry_error(): + batch = create_batch() + futures = ( + batch.publish({"data": b"blah blah blah"}), + batch.publish({"data": b"blah blah blah blah"}), + ) + + # Make the API throw an error when publishing. + error = google.api_core.exceptions.RetryError("uh oh", None) + patch = mock.patch.object(type(batch.client.api), "publish", side_effect=error) + + with patch: + batch._commit() + + for future in futures: + assert future.done() + assert future.exception() == error + + def test_monitor(): batch = create_batch(max_latency=5.0) with mock.patch.object(time, "sleep") as sleep: From b3d2ffb4728f50ca91963908eb66c9f1c69c2f4f Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 17 Jan 2019 09:22:14 -0800 Subject: [PATCH 0334/1197] Fix broken docstring cross-reference links. (#7132) --- .../google/cloud/pubsub_v1/subscriber/client.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 0540333ad8ea..35b05a022a12 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -186,14 +186,14 @@ def callback(message): subscription (str): The name of the subscription. The subscription should have already been created (for example, by using :meth:`create_subscription`). - callback (Callable[~.pubsub_v1.subscriber.message.Message]): + callback (Callable[~google.cloud.pubsub_v1.subscriber.message.Message]): The callback function. This function receives the message as its only argument and will be called from a different thread/ process depending on the scheduling strategy. - flow_control (~.pubsub_v1.types.FlowControl): The flow control + flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow control settings. Use this to prevent situations where you are inundated with too many messages at once. - scheduler (~.pubsub_v1.subscriber.scheduler.Scheduler): An optional + scheduler (~google.cloud.pubsub_v1.subscriber.scheduler.Scheduler): An optional *scheduler* to use when executing the callback. This controls how callbacks are executed concurrently. From 73d8bc08978338b80ecbaaa70de7cabd981102b9 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Thu, 17 Jan 2019 15:33:56 -0800 Subject: [PATCH 0335/1197] Update copyright headers --- packages/google-cloud-pubsub/google/__init__.py | 2 +- .../google-cloud-pubsub/google/cloud/__init__.py | 2 +- packages/google-cloud-pubsub/google/cloud/pubsub.py | 2 +- .../google/cloud/pubsub_v1/gapic/publisher_client.py | 2 +- .../cloud/pubsub_v1/gapic/subscriber_client.py | 2 +- .../gapic/transports/publisher_grpc_transport.py | 2 +- .../gapic/transports/subscriber_grpc_transport.py | 2 +- packages/google-cloud-pubsub/synth.metadata | 12 ++++++------ .../system/gapic/v1/test_system_publisher_v1.py | 2 +- .../tests/unit/gapic/v1/test_publisher_client_v1.py | 2 +- .../tests/unit/gapic/v1/test_subscriber_client_v1.py | 2 +- 11 files changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-pubsub/google/__init__.py b/packages/google-cloud-pubsub/google/__init__.py index 7aa71bb4e2f1..8fcc60e2b9c6 100644 --- a/packages/google-cloud-pubsub/google/__init__.py +++ b/packages/google-cloud-pubsub/google/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py index 7aa71bb4e2f1..8fcc60e2b9c6 100644 --- a/packages/google-cloud-pubsub/google/cloud/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py index 2c249504d7b3..7b2c75158878 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index d857be8d04e0..43a24555a577 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 5c4dd174def6..fa739e689f53 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 6d2ce126e776..01d65362e387 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index ac8f5f096b64..3bf052b25c3c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index c0016ada8407..a855bbbfe298 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-01-09T13:25:55.011967Z", + "updateTime": "2019-01-17T13:24:31.789706Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.5", - "dockerImage": "googleapis/artman@sha256:5a96c2c5c6f9570cc9556b63dc9ce1838777fd9166b5b64e43ad8e0ecee2fe2c" + "version": "0.16.6", + "dockerImage": "googleapis/artman@sha256:12722f2ca3fbc3b53cc6aa5f0e569d7d221b46bd876a2136497089dec5e3634e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "659d66ec24bf40b35a41a0b79218d96ba3add3d3", - "internalRef": "228437827" + "sha": "0ac60e21a1aa86c07c1836865b35308ba8178b05", + "internalRef": "229626798" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2018.12.6" + "version": "2019.1.16" } } ], diff --git a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py index 7723419da509..9c0d2b33ad11 100644 --- a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py +++ b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index 5d6d3029e5d3..c7dbae01da07 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index 70d3222b98bc..c95feddf5ea3 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From c9206f0402842ccaf6c08c29002d599c0d38e4fb Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 29 Jan 2019 13:28:49 -0800 Subject: [PATCH 0336/1197] Add protos as an artifact to library (#7205) --- .../google/cloud/pubsub_v1/proto/pubsub.proto | 384 ++++++++++++++++++ packages/google-cloud-pubsub/synth.metadata | 10 +- packages/google-cloud-pubsub/synth.py | 5 +- 3 files changed, 393 insertions(+), 6 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto new file mode 100644 index 000000000000..70dd1556a281 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -0,0 +1,384 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.pubsub.v1beta2; + +import "google/protobuf/empty.proto"; + +option go_package = "google.golang.org/genproto/googleapis/pubsub/v1beta2;pubsub"; +option java_multiple_files = true; +option java_outer_classname = "PubsubProto"; +option java_package = "com.google.pubsub.v1beta2"; + + +// The service that an application uses to manipulate subscriptions and to +// consume messages from a subscription via the Pull method. +service Subscriber { + // Creates a subscription to a given topic for a given subscriber. + // If the subscription already exists, returns ALREADY_EXISTS. + // If the corresponding topic doesn't exist, returns NOT_FOUND. + // + // If the name is not provided in the request, the server will assign a random + // name for this subscription on the same project as the topic. + rpc CreateSubscription(Subscription) returns (Subscription); + + // Gets the configuration details of a subscription. + rpc GetSubscription(GetSubscriptionRequest) returns (Subscription); + + // Lists matching subscriptions. + rpc ListSubscriptions(ListSubscriptionsRequest) returns (ListSubscriptionsResponse); + + // Deletes an existing subscription. All pending messages in the subscription + // are immediately dropped. Calls to Pull after deletion will return + // NOT_FOUND. After a subscription is deleted, a new one may be created with + // the same name, but the new one has no association with the old + // subscription, or its topic unless the same topic is specified. + rpc DeleteSubscription(DeleteSubscriptionRequest) returns (google.protobuf.Empty); + + // Modifies the ack deadline for a specific message. This method is useful to + // indicate that more time is needed to process a message by the subscriber, + // or to make the message available for redelivery if the processing was + // interrupted. + rpc ModifyAckDeadline(ModifyAckDeadlineRequest) returns (google.protobuf.Empty); + + // Acknowledges the messages associated with the ack tokens in the + // AcknowledgeRequest. The Pub/Sub system can remove the relevant messages + // from the subscription. + // + // Acknowledging a message whose ack deadline has expired may succeed, + // but such a message may be redelivered later. Acknowledging a message more + // than once will not result in an error. + rpc Acknowledge(AcknowledgeRequest) returns (google.protobuf.Empty); + + // Pulls messages from the server. Returns an empty list if there are no + // messages available in the backlog. The server may return UNAVAILABLE if + // there are too many concurrent pull requests pending for the given + // subscription. + rpc Pull(PullRequest) returns (PullResponse); + + // Modifies the PushConfig for a specified subscription. + // + // This may be used to change a push subscription to a pull one (signified + // by an empty PushConfig) or vice versa, or change the endpoint URL and other + // attributes of a push subscription. Messages will accumulate for + // delivery continuously through the call regardless of changes to the + // PushConfig. + rpc ModifyPushConfig(ModifyPushConfigRequest) returns (google.protobuf.Empty); +} + +// The service that an application uses to manipulate topics, and to send +// messages to a topic. +service Publisher { + // Creates the given topic with the given name. + rpc CreateTopic(Topic) returns (Topic); + + // Adds one or more messages to the topic. Returns NOT_FOUND if the topic does + // not exist. + rpc Publish(PublishRequest) returns (PublishResponse); + + // Gets the configuration of a topic. + rpc GetTopic(GetTopicRequest) returns (Topic); + + // Lists matching topics. + rpc ListTopics(ListTopicsRequest) returns (ListTopicsResponse); + + // Lists the name of the subscriptions for this topic. + rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) returns (ListTopicSubscriptionsResponse); + + // Deletes the topic with the given name. Returns NOT_FOUND if the topic does + // not exist. After a topic is deleted, a new topic may be created with the + // same name; this is an entirely new topic with none of the old + // configuration or subscriptions. Existing subscriptions to this topic are + // not deleted. + rpc DeleteTopic(DeleteTopicRequest) returns (google.protobuf.Empty); +} + +// A topic resource. +message Topic { + // Name of the topic. + string name = 1; +} + +// A message data and its attributes. +message PubsubMessage { + // The message payload. For JSON requests, the value of this field must be + // base64-encoded. + bytes data = 1; + + // Optional attributes for this message. + map attributes = 2; + + // ID of this message assigned by the server at publication time. Guaranteed + // to be unique within the topic. This value may be read by a subscriber + // that receives a PubsubMessage via a Pull call or a push delivery. It must + // not be populated by a publisher in a Publish call. + string message_id = 3; +} + +// Request for the GetTopic method. +message GetTopicRequest { + // The name of the topic to get. + string topic = 1; +} + +// Request for the Publish method. +message PublishRequest { + // The messages in the request will be published on this topic. + string topic = 1; + + // The messages to publish. + repeated PubsubMessage messages = 2; +} + +// Response for the Publish method. +message PublishResponse { + // The server-assigned ID of each published message, in the same order as + // the messages in the request. IDs are guaranteed to be unique within + // the topic. + repeated string message_ids = 1; +} + +// Request for the ListTopics method. +message ListTopicsRequest { + // The name of the cloud project that topics belong to. + string project = 1; + + // Maximum number of topics to return. + int32 page_size = 2; + + // The value returned by the last ListTopicsResponse; indicates that this is + // a continuation of a prior ListTopics call, and that the system should + // return the next page of data. + string page_token = 3; +} + +// Response for the ListTopics method. +message ListTopicsResponse { + // The resulting topics. + repeated Topic topics = 1; + + // If not empty, indicates that there may be more topics that match the + // request; this value should be passed in a new ListTopicsRequest. + string next_page_token = 2; +} + +// Request for the ListTopicSubscriptions method. +message ListTopicSubscriptionsRequest { + // The name of the topic that subscriptions are attached to. + string topic = 1; + + // Maximum number of subscription names to return. + int32 page_size = 2; + + // The value returned by the last ListTopicSubscriptionsResponse; indicates + // that this is a continuation of a prior ListTopicSubscriptions call, and + // that the system should return the next page of data. + string page_token = 3; +} + +// Response for the ListTopicSubscriptions method. +message ListTopicSubscriptionsResponse { + // The names of the subscriptions that match the request. + repeated string subscriptions = 1; + + // If not empty, indicates that there may be more subscriptions that match + // the request; this value should be passed in a new + // ListTopicSubscriptionsRequest to get more subscriptions. + string next_page_token = 2; +} + +// Request for the DeleteTopic method. +message DeleteTopicRequest { + // Name of the topic to delete. + string topic = 1; +} + +// A subscription resource. +message Subscription { + // Name of the subscription. + string name = 1; + + // The name of the topic from which this subscription is receiving messages. + // This will be present if and only if the subscription has not been detached + // from its topic. + string topic = 2; + + // If push delivery is used with this subscription, this field is + // used to configure it. An empty pushConfig signifies that the subscriber + // will pull and ack messages using API methods. + PushConfig push_config = 4; + + // This value is the maximum time after a subscriber receives a message + // before the subscriber should acknowledge the message. After message + // delivery but before the ack deadline expires and before the message is + // acknowledged, it is an outstanding message and will not be delivered + // again during that time (on a best-effort basis). + // + // For pull delivery this value + // is used as the initial value for the ack deadline. It may be overridden + // for a specific message by calling ModifyAckDeadline. + // + // For push delivery, this value is also used to set the request timeout for + // the call to the push endpoint. + // + // If the subscriber never acknowledges the message, the Pub/Sub + // system will eventually redeliver the message. + int32 ack_deadline_seconds = 5; +} + +// Configuration for a push delivery endpoint. +message PushConfig { + // A URL locating the endpoint to which messages should be pushed. + // For example, a Webhook endpoint might use "https://example.com/push". + string push_endpoint = 1; + + // Endpoint configuration attributes. + // + // Every endpoint has a set of API supported attributes that can be used to + // control different aspects of the message delivery. + // + // The currently supported attribute is `x-goog-version`, which you can + // use to change the format of the push message. This attribute + // indicates the version of the data expected by the endpoint. This + // controls the shape of the envelope (i.e. its fields and metadata). + // The endpoint version is based on the version of the Pub/Sub + // API. + // + // If not present during the CreateSubscription call, it will default to + // the version of the API used to make such call. If not present during a + // ModifyPushConfig call, its value will not be changed. GetSubscription + // calls will always return a valid version, even if the subscription was + // created without this attribute. + // + // The possible values for this attribute are: + // + // * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. + // * `v1beta2`: uses the push format defined in the v1beta2 Pub/Sub API. + // + map attributes = 2; +} + +// A message and its corresponding acknowledgment ID. +message ReceivedMessage { + // This ID can be used to acknowledge the received message. + string ack_id = 1; + + // The message. + PubsubMessage message = 2; +} + +// Request for the GetSubscription method. +message GetSubscriptionRequest { + // The name of the subscription to get. + string subscription = 1; +} + +// Request for the ListSubscriptions method. +message ListSubscriptionsRequest { + // The name of the cloud project that subscriptions belong to. + string project = 1; + + // Maximum number of subscriptions to return. + int32 page_size = 2; + + // The value returned by the last ListSubscriptionsResponse; indicates that + // this is a continuation of a prior ListSubscriptions call, and that the + // system should return the next page of data. + string page_token = 3; +} + +// Response for the ListSubscriptions method. +message ListSubscriptionsResponse { + // The subscriptions that match the request. + repeated Subscription subscriptions = 1; + + // If not empty, indicates that there may be more subscriptions that match + // the request; this value should be passed in a new ListSubscriptionsRequest + // to get more subscriptions. + string next_page_token = 2; +} + +// Request for the DeleteSubscription method. +message DeleteSubscriptionRequest { + // The subscription to delete. + string subscription = 1; +} + +// Request for the ModifyPushConfig method. +message ModifyPushConfigRequest { + // The name of the subscription. + string subscription = 1; + + // The push configuration for future deliveries. + // + // An empty pushConfig indicates that the Pub/Sub system should + // stop pushing messages from the given subscription and allow + // messages to be pulled and acknowledged - effectively pausing + // the subscription if Pull is not called. + PushConfig push_config = 2; +} + +// Request for the Pull method. +message PullRequest { + // The subscription from which messages should be pulled. + string subscription = 1; + + // If this is specified as true the system will respond immediately even if + // it is not able to return a message in the Pull response. Otherwise the + // system is allowed to wait until at least one message is available rather + // than returning no messages. The client may cancel the request if it does + // not wish to wait any longer for the response. + bool return_immediately = 2; + + // The maximum number of messages returned for this request. The Pub/Sub + // system may return fewer than the number specified. + int32 max_messages = 3; +} + +// Response for the Pull method. +message PullResponse { + // Received Pub/Sub messages. The Pub/Sub system will return zero messages if + // there are no more available in the backlog. The Pub/Sub system may return + // fewer than the maxMessages requested even if there are more messages + // available in the backlog. + repeated ReceivedMessage received_messages = 1; +} + +// Request for the ModifyAckDeadline method. +message ModifyAckDeadlineRequest { + // The name of the subscription. + string subscription = 1; + + // The acknowledgment ID. + string ack_id = 2; + + // The new ack deadline with respect to the time this request was sent to the + // Pub/Sub system. Must be >= 0. For example, if the value is 10, the new ack + // deadline will expire 10 seconds after the ModifyAckDeadline call was made. + // Specifying zero may immediately make the message available for another pull + // request. + int32 ack_deadline_seconds = 3; +} + +// Request for the Acknowledge method. +message AcknowledgeRequest { + // The subscription whose message is being acknowledged. + string subscription = 1; + + // The acknowledgment ID for the messages being acknowledged that was returned + // by the Pub/Sub system in the Pull response. Must not be empty. + repeated string ack_ids = 2; +} diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index a855bbbfe298..c966ed982898 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-01-17T13:24:31.789706Z", + "updateTime": "2019-01-24T17:13:22.929911Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.6", - "dockerImage": "googleapis/artman@sha256:12722f2ca3fbc3b53cc6aa5f0e569d7d221b46bd876a2136497089dec5e3634e" + "version": "0.16.7", + "dockerImage": "googleapis/artman@sha256:d6c8ced606eb49973ca95d2af7c55a681acc042db0f87d135968349e7bf6dd80" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0ac60e21a1aa86c07c1836865b35308ba8178b05", - "internalRef": "229626798" + "sha": "9aac88a22468b1e291937f55fa1ef237adfdc63e", + "internalRef": "230568136" } }, { diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 50fc92a07a9a..b568e148b060 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -27,7 +27,10 @@ # Generate pubsub GAPIC layer # ---------------------------------------------------------------------------- library = gapic.py_library( - "pubsub", version, config_path="/google/pubsub/artman_pubsub.yaml" + "pubsub", + version, + config_path="/google/pubsub/artman_pubsub.yaml", + include_protos=True, ) s.move( library, From 52796af6c3fd595929a776901457c479510c5916 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Fri, 1 Feb 2019 10:36:17 -0800 Subject: [PATCH 0337/1197] Copy proto files alongside protoc versions. --- .../google/cloud/pubsub_v1/proto/pubsub.proto | 883 +++++++++++++++--- packages/google-cloud-pubsub/synth.metadata | 6 +- 2 files changed, 751 insertions(+), 138 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 70dd1556a281..02d0bf34b3ee 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -1,4 +1,4 @@ -// Copyright (c) 2015, Google Inc. +// Copyright 2018 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,139 +11,427 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. +// syntax = "proto3"; -package google.pubsub.v1beta2; +package google.pubsub.v1; +import "google/api/annotations.proto"; +import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; -option go_package = "google.golang.org/genproto/googleapis/pubsub/v1beta2;pubsub"; +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.PubSub.V1"; +option go_package = "google.golang.org/genproto/googleapis/pubsub/v1;pubsub"; option java_multiple_files = true; option java_outer_classname = "PubsubProto"; -option java_package = "com.google.pubsub.v1beta2"; +option java_package = "com.google.pubsub.v1"; +option php_namespace = "Google\\Cloud\\PubSub\\V1"; +option ruby_package = "Google::Cloud::PubSub::V1"; + +// The service that an application uses to manipulate topics, and to send +// messages to a topic. +service Publisher { + // Creates the given topic with the given name. See the + // + // resource name rules. + rpc CreateTopic(Topic) returns (Topic) { + option (google.api.http) = { + put: "/v1/{name=projects/*/topics/*}" + body: "*" + }; + } + + // Updates an existing topic. Note that certain properties of a + // topic are not modifiable. + rpc UpdateTopic(UpdateTopicRequest) returns (Topic) { + option (google.api.http) = { + patch: "/v1/{topic.name=projects/*/topics/*}" + body: "*" + }; + } + + // Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic + // does not exist. + rpc Publish(PublishRequest) returns (PublishResponse) { + option (google.api.http) = { + post: "/v1/{topic=projects/*/topics/*}:publish" + body: "*" + }; + } + + // Gets the configuration of a topic. + rpc GetTopic(GetTopicRequest) returns (Topic) { + option (google.api.http) = { + get: "/v1/{topic=projects/*/topics/*}" + }; + } + + // Lists matching topics. + rpc ListTopics(ListTopicsRequest) returns (ListTopicsResponse) { + option (google.api.http) = { + get: "/v1/{project=projects/*}/topics" + }; + } + + // Lists the names of the subscriptions on this topic. + rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) returns (ListTopicSubscriptionsResponse) { + option (google.api.http) = { + get: "/v1/{topic=projects/*/topics/*}/subscriptions" + }; + } + + // Lists the names of the snapshots on this topic. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot.

+ // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy. + rpc ListTopicSnapshots(ListTopicSnapshotsRequest) returns (ListTopicSnapshotsResponse) { + option (google.api.http) = { + get: "/v1/{topic=projects/*/topics/*}/snapshots" + }; + } + + // Deletes the topic with the given name. Returns `NOT_FOUND` if the topic + // does not exist. After a topic is deleted, a new topic may be created with + // the same name; this is an entirely new topic with none of the old + // configuration or subscriptions. Existing subscriptions to this topic are + // not deleted, but their `topic` field is set to `_deleted-topic_`. + rpc DeleteTopic(DeleteTopicRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{topic=projects/*/topics/*}" + }; + } +} // The service that an application uses to manipulate subscriptions and to -// consume messages from a subscription via the Pull method. +// consume messages from a subscription via the `Pull` method or by +// establishing a bi-directional stream using the `StreamingPull` method. service Subscriber { - // Creates a subscription to a given topic for a given subscriber. - // If the subscription already exists, returns ALREADY_EXISTS. - // If the corresponding topic doesn't exist, returns NOT_FOUND. + // Creates a subscription to a given topic. See the + // + // resource name rules. + // If the subscription already exists, returns `ALREADY_EXISTS`. + // If the corresponding topic doesn't exist, returns `NOT_FOUND`. // // If the name is not provided in the request, the server will assign a random - // name for this subscription on the same project as the topic. - rpc CreateSubscription(Subscription) returns (Subscription); + // name for this subscription on the same project as the topic, conforming + // to the + // [resource name format](https://cloud.google.com/pubsub/docs/admin#resource_names). + // The generated name is populated in the returned Subscription object. + // Note that for REST API requests, you must specify a name in the request. + rpc CreateSubscription(Subscription) returns (Subscription) { + option (google.api.http) = { + put: "/v1/{name=projects/*/subscriptions/*}" + body: "*" + }; + } // Gets the configuration details of a subscription. - rpc GetSubscription(GetSubscriptionRequest) returns (Subscription); + rpc GetSubscription(GetSubscriptionRequest) returns (Subscription) { + option (google.api.http) = { + get: "/v1/{subscription=projects/*/subscriptions/*}" + }; + } + + // Updates an existing subscription. Note that certain properties of a + // subscription, such as its topic, are not modifiable. + rpc UpdateSubscription(UpdateSubscriptionRequest) returns (Subscription) { + option (google.api.http) = { + patch: "/v1/{subscription.name=projects/*/subscriptions/*}" + body: "*" + }; + } // Lists matching subscriptions. - rpc ListSubscriptions(ListSubscriptionsRequest) returns (ListSubscriptionsResponse); - - // Deletes an existing subscription. All pending messages in the subscription - // are immediately dropped. Calls to Pull after deletion will return - // NOT_FOUND. After a subscription is deleted, a new one may be created with + rpc ListSubscriptions(ListSubscriptionsRequest) returns (ListSubscriptionsResponse) { + option (google.api.http) = { + get: "/v1/{project=projects/*}/subscriptions" + }; + } + + // Deletes an existing subscription. All messages retained in the subscription + // are immediately dropped. Calls to `Pull` after deletion will return + // `NOT_FOUND`. After a subscription is deleted, a new one may be created with // the same name, but the new one has no association with the old - // subscription, or its topic unless the same topic is specified. - rpc DeleteSubscription(DeleteSubscriptionRequest) returns (google.protobuf.Empty); - - // Modifies the ack deadline for a specific message. This method is useful to - // indicate that more time is needed to process a message by the subscriber, - // or to make the message available for redelivery if the processing was - // interrupted. - rpc ModifyAckDeadline(ModifyAckDeadlineRequest) returns (google.protobuf.Empty); - - // Acknowledges the messages associated with the ack tokens in the - // AcknowledgeRequest. The Pub/Sub system can remove the relevant messages + // subscription or its topic unless the same topic is specified. + rpc DeleteSubscription(DeleteSubscriptionRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{subscription=projects/*/subscriptions/*}" + }; + } + + // Modifies the ack deadline for a specific message. This method is useful + // to indicate that more time is needed to process a message by the + // subscriber, or to make the message available for redelivery if the + // processing was interrupted. Note that this does not modify the + // subscription-level `ackDeadlineSeconds` used for subsequent messages. + rpc ModifyAckDeadline(ModifyAckDeadlineRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline" + body: "*" + }; + } + + // Acknowledges the messages associated with the `ack_ids` in the + // `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages // from the subscription. // // Acknowledging a message whose ack deadline has expired may succeed, // but such a message may be redelivered later. Acknowledging a message more // than once will not result in an error. - rpc Acknowledge(AcknowledgeRequest) returns (google.protobuf.Empty); - - // Pulls messages from the server. Returns an empty list if there are no - // messages available in the backlog. The server may return UNAVAILABLE if + rpc Acknowledge(AcknowledgeRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1/{subscription=projects/*/subscriptions/*}:acknowledge" + body: "*" + }; + } + + // Pulls messages from the server. The server may return `UNAVAILABLE` if // there are too many concurrent pull requests pending for the given // subscription. - rpc Pull(PullRequest) returns (PullResponse); - - // Modifies the PushConfig for a specified subscription. + rpc Pull(PullRequest) returns (PullResponse) { + option (google.api.http) = { + post: "/v1/{subscription=projects/*/subscriptions/*}:pull" + body: "*" + }; + } + + // Establishes a stream with the server, which sends messages down to the + // client. The client streams acknowledgements and ack deadline modifications + // back to the server. The server will close the stream and return the status + // on any error. The server may close the stream with status `UNAVAILABLE` to + // reassign server-side resources, in which case, the client should + // re-establish the stream. Flow control can be achieved by configuring the + // underlying RPC channel. + rpc StreamingPull(stream StreamingPullRequest) returns (stream StreamingPullResponse) { + } + + // Modifies the `PushConfig` for a specified subscription. // - // This may be used to change a push subscription to a pull one (signified - // by an empty PushConfig) or vice versa, or change the endpoint URL and other - // attributes of a push subscription. Messages will accumulate for - // delivery continuously through the call regardless of changes to the - // PushConfig. - rpc ModifyPushConfig(ModifyPushConfigRequest) returns (google.protobuf.Empty); + // This may be used to change a push subscription to a pull one (signified by + // an empty `PushConfig`) or vice versa, or change the endpoint URL and other + // attributes of a push subscription. Messages will accumulate for delivery + // continuously through the call regardless of changes to the `PushConfig`. + rpc ModifyPushConfig(ModifyPushConfigRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig" + body: "*" + }; + } + + // Gets the configuration details of a snapshot. Snapshots are used in + // Seek + // operations, which allow you to manage message acknowledgments in bulk. That + // is, you can set the acknowledgment state of messages in an existing + // subscription to the state captured by a snapshot.

+ // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy. + rpc GetSnapshot(GetSnapshotRequest) returns (Snapshot) { + option (google.api.http) = { + get: "/v1/{snapshot=projects/*/snapshots/*}" + }; + } + + // Lists the existing snapshots. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot.

+ // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy. + rpc ListSnapshots(ListSnapshotsRequest) returns (ListSnapshotsResponse) { + option (google.api.http) = { + get: "/v1/{project=projects/*}/snapshots" + }; + } + + // Creates a snapshot from the requested subscription. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. + //

+ // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy.

+ // If the snapshot already exists, returns `ALREADY_EXISTS`. + // If the requested subscription doesn't exist, returns `NOT_FOUND`. + // If the backlog in the subscription is too old -- and the resulting snapshot + // would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. + // See also the `Snapshot.expire_time` field. If the name is not provided in + // the request, the server will assign a random + // name for this snapshot on the same project as the subscription, conforming + // to the + // [resource name format](https://cloud.google.com/pubsub/docs/admin#resource_names). + // The generated name is populated in the returned Snapshot object. Note that + // for REST API requests, you must specify a name in the request. + rpc CreateSnapshot(CreateSnapshotRequest) returns (Snapshot) { + option (google.api.http) = { + put: "/v1/{name=projects/*/snapshots/*}" + body: "*" + }; + } + + // Updates an existing snapshot. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot.

+ // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy. + // Note that certain properties of a snapshot are not modifiable. + rpc UpdateSnapshot(UpdateSnapshotRequest) returns (Snapshot) { + option (google.api.http) = { + patch: "/v1/{snapshot.name=projects/*/snapshots/*}" + body: "*" + }; + } + + // Removes an existing snapshot. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot.

+ // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy. + // When the snapshot is deleted, all messages retained in the snapshot + // are immediately dropped. After a snapshot is deleted, a new one may be + // created with the same name, but the new one has no association with the old + // snapshot or its subscription, unless the same subscription is specified. + rpc DeleteSnapshot(DeleteSnapshotRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{snapshot=projects/*/snapshots/*}" + }; + } + + // Seeks an existing subscription to a point in time or to a given snapshot, + // whichever is provided in the request. Snapshots are used in + // Seek + // operations, which allow + // you to manage message acknowledgments in bulk. That is, you can set the + // acknowledgment state of messages in an existing subscription to the state + // captured by a snapshot. Note that both the subscription and the snapshot + // must be on the same topic.

+ // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy. + rpc Seek(SeekRequest) returns (SeekResponse) { + option (google.api.http) = { + post: "/v1/{subscription=projects/*/subscriptions/*}:seek" + body: "*" + }; + } } -// The service that an application uses to manipulate topics, and to send -// messages to a topic. -service Publisher { - // Creates the given topic with the given name. - rpc CreateTopic(Topic) returns (Topic); - - // Adds one or more messages to the topic. Returns NOT_FOUND if the topic does - // not exist. - rpc Publish(PublishRequest) returns (PublishResponse); - - // Gets the configuration of a topic. - rpc GetTopic(GetTopicRequest) returns (Topic); - - // Lists matching topics. - rpc ListTopics(ListTopicsRequest) returns (ListTopicsResponse); - - // Lists the name of the subscriptions for this topic. - rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) returns (ListTopicSubscriptionsResponse); - - // Deletes the topic with the given name. Returns NOT_FOUND if the topic does - // not exist. After a topic is deleted, a new topic may be created with the - // same name; this is an entirely new topic with none of the old - // configuration or subscriptions. Existing subscriptions to this topic are - // not deleted. - rpc DeleteTopic(DeleteTopicRequest) returns (google.protobuf.Empty); +message MessageStoragePolicy { + // The list of GCP region IDs where messages that are published to the topic + // may be persisted in storage. Messages published by publishers running in + // non-allowed GCP regions (or running outside of GCP altogether) will be + // routed for storage in one of the allowed regions. An empty list indicates a + // misconfiguration at the project or organization level, which will result in + // all Publish operations failing. + repeated string allowed_persistence_regions = 1; } // A topic resource. message Topic { - // Name of the topic. + // The name of the topic. It must have the format + // `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter, + // and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), + // underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent + // signs (`%`). It must be between 3 and 255 characters in length, and it + // must not start with `"goog"`. string name = 1; + + // See Creating and + // managing labels. + map labels = 2; + + // Policy constraining how messages published to the topic may be stored. It + // is determined when the topic is created based on the policy configured at + // the project level. It must not be set by the caller in the request to + // CreateTopic or to UpdateTopic. This field will be populated in the + // responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the + // response, then no constraints are in effect. + MessageStoragePolicy message_storage_policy = 3; } -// A message data and its attributes. +// A message that is published by publishers and consumed by subscribers. The +// message must contain either a non-empty data field or at least one attribute. +// Note that client libraries represent this object differently +// depending on the language. See the corresponding +// client +// library documentation for more information. See +// Quotas and limits +// for more information about message limits. message PubsubMessage { - // The message payload. For JSON requests, the value of this field must be - // base64-encoded. + // The message data field. If this field is empty, the message must contain + // at least one attribute. bytes data = 1; // Optional attributes for this message. map attributes = 2; - // ID of this message assigned by the server at publication time. Guaranteed - // to be unique within the topic. This value may be read by a subscriber - // that receives a PubsubMessage via a Pull call or a push delivery. It must - // not be populated by a publisher in a Publish call. + // ID of this message, assigned by the server when the message is published. + // Guaranteed to be unique within the topic. This value may be read by a + // subscriber that receives a `PubsubMessage` via a `Pull` call or a push + // delivery. It must not be populated by the publisher in a `Publish` call. string message_id = 3; + + // The time at which the message was published, populated by the server when + // it receives the `Publish` call. It must not be populated by the + // publisher in a `Publish` call. + google.protobuf.Timestamp publish_time = 4; } // Request for the GetTopic method. message GetTopicRequest { // The name of the topic to get. + // Format is `projects/{project}/topics/{topic}`. string topic = 1; } +// Request for the UpdateTopic method. +message UpdateTopicRequest { + // The updated topic object. + Topic topic = 1; + + // Indicates which fields in the provided topic to update. Must be specified + // and non-empty. Note that if `update_mask` contains + // "message_storage_policy" then the new value will be determined based on the + // policy configured at the project or organization level. The + // `message_storage_policy` must not be set in the `topic` provided above. + google.protobuf.FieldMask update_mask = 2; +} + // Request for the Publish method. message PublishRequest { // The messages in the request will be published on this topic. + // Format is `projects/{project}/topics/{topic}`. string topic = 1; // The messages to publish. repeated PubsubMessage messages = 2; } -// Response for the Publish method. +// Response for the `Publish` method. message PublishResponse { // The server-assigned ID of each published message, in the same order as // the messages in the request. IDs are guaranteed to be unique within @@ -151,85 +439,131 @@ message PublishResponse { repeated string message_ids = 1; } -// Request for the ListTopics method. +// Request for the `ListTopics` method. message ListTopicsRequest { - // The name of the cloud project that topics belong to. + // The name of the project in which to list topics. + // Format is `projects/{project-id}`. string project = 1; // Maximum number of topics to return. int32 page_size = 2; - // The value returned by the last ListTopicsResponse; indicates that this is - // a continuation of a prior ListTopics call, and that the system should + // The value returned by the last `ListTopicsResponse`; indicates that this is + // a continuation of a prior `ListTopics` call, and that the system should // return the next page of data. string page_token = 3; } -// Response for the ListTopics method. +// Response for the `ListTopics` method. message ListTopicsResponse { // The resulting topics. repeated Topic topics = 1; // If not empty, indicates that there may be more topics that match the - // request; this value should be passed in a new ListTopicsRequest. + // request; this value should be passed in a new `ListTopicsRequest`. string next_page_token = 2; } -// Request for the ListTopicSubscriptions method. +// Request for the `ListTopicSubscriptions` method. message ListTopicSubscriptionsRequest { // The name of the topic that subscriptions are attached to. + // Format is `projects/{project}/topics/{topic}`. string topic = 1; // Maximum number of subscription names to return. int32 page_size = 2; - // The value returned by the last ListTopicSubscriptionsResponse; indicates - // that this is a continuation of a prior ListTopicSubscriptions call, and + // The value returned by the last `ListTopicSubscriptionsResponse`; indicates + // that this is a continuation of a prior `ListTopicSubscriptions` call, and // that the system should return the next page of data. string page_token = 3; } -// Response for the ListTopicSubscriptions method. +// Response for the `ListTopicSubscriptions` method. message ListTopicSubscriptionsResponse { // The names of the subscriptions that match the request. repeated string subscriptions = 1; // If not empty, indicates that there may be more subscriptions that match // the request; this value should be passed in a new - // ListTopicSubscriptionsRequest to get more subscriptions. + // `ListTopicSubscriptionsRequest` to get more subscriptions. + string next_page_token = 2; +} + +// Request for the `ListTopicSnapshots` method.

+// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. +message ListTopicSnapshotsRequest { + // The name of the topic that snapshots are attached to. + // Format is `projects/{project}/topics/{topic}`. + string topic = 1; + + // Maximum number of snapshot names to return. + int32 page_size = 2; + + // The value returned by the last `ListTopicSnapshotsResponse`; indicates + // that this is a continuation of a prior `ListTopicSnapshots` call, and + // that the system should return the next page of data. + string page_token = 3; +} + +// Response for the `ListTopicSnapshots` method.

+// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. +message ListTopicSnapshotsResponse { + // The names of the snapshots that match the request. + repeated string snapshots = 1; + + // If not empty, indicates that there may be more snapshots that match + // the request; this value should be passed in a new + // `ListTopicSnapshotsRequest` to get more snapshots. string next_page_token = 2; } -// Request for the DeleteTopic method. +// Request for the `DeleteTopic` method. message DeleteTopicRequest { // Name of the topic to delete. + // Format is `projects/{project}/topics/{topic}`. string topic = 1; } // A subscription resource. message Subscription { - // Name of the subscription. + // The name of the subscription. It must have the format + // `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must + // start with a letter, and contain only letters (`[A-Za-z]`), numbers + // (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), + // plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters + // in length, and it must not start with `"goog"`. string name = 1; // The name of the topic from which this subscription is receiving messages. - // This will be present if and only if the subscription has not been detached - // from its topic. + // Format is `projects/{project}/topics/{topic}`. + // The value of this field will be `_deleted-topic_` if the topic has been + // deleted. string topic = 2; // If push delivery is used with this subscription, this field is - // used to configure it. An empty pushConfig signifies that the subscriber + // used to configure it. An empty `pushConfig` signifies that the subscriber // will pull and ack messages using API methods. PushConfig push_config = 4; - // This value is the maximum time after a subscriber receives a message - // before the subscriber should acknowledge the message. After message - // delivery but before the ack deadline expires and before the message is - // acknowledged, it is an outstanding message and will not be delivered - // again during that time (on a best-effort basis). + // The approximate amount of time (on a best-effort basis) Pub/Sub waits for + // the subscriber to acknowledge receipt before resending the message. In the + // interval after the message is delivered and before it is acknowledged, it + // is considered to be outstanding. During that time period, the + // message will not be redelivered (on a best-effort basis). // - // For pull delivery this value - // is used as the initial value for the ack deadline. It may be overridden - // for a specific message by calling ModifyAckDeadline. + // For pull subscriptions, this value is used as the initial value for the ack + // deadline. To override this value for a given message, call + // `ModifyAckDeadline` with the corresponding `ack_id` if using + // non-streaming pull or send the `ack_id` in a + // `StreamingModifyAckDeadlineRequest` if using streaming pull. + // The minimum custom deadline you can specify is 10 seconds. + // The maximum custom deadline you can specify is 600 seconds (10 minutes). + // If this parameter is 0, a default value of 10 seconds is used. // // For push delivery, this value is also used to set the request timeout for // the call to the push endpoint. @@ -237,6 +571,56 @@ message Subscription { // If the subscriber never acknowledges the message, the Pub/Sub // system will eventually redeliver the message. int32 ack_deadline_seconds = 5; + + // Indicates whether to retain acknowledged messages. If true, then + // messages are not expunged from the subscription's backlog, even if they are + // acknowledged, until they fall out of the `message_retention_duration` + // window. This must be true if you would like to + // + // Seek to a timestamp. + //

+ // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy. + bool retain_acked_messages = 7; + + // How long to retain unacknowledged messages in the subscription's backlog, + // from the moment a message is published. + // If `retain_acked_messages` is true, then this also configures the retention + // of acknowledged messages, and thus configures how far back in time a `Seek` + // can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 + // minutes.

+ // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy. + google.protobuf.Duration message_retention_duration = 8; + + // See Creating and + // managing labels. + map labels = 9; + + // A policy that specifies the conditions for this subscription's expiration. + // A subscription is considered active as long as any connected subscriber is + // successfully consuming messages from the subscription or is issuing + // operations on the subscription. If `expiration_policy` is not set, a + // *default policy* with `ttl` of 31 days will be used. The minimum allowed + // value for `expiration_policy.ttl` is 1 day. + // BETA: This feature is part of a beta release. This API might be + // changed in backward-incompatible ways and is not recommended for production + // use. It is not subject to any SLA or deprecation policy. + ExpirationPolicy expiration_policy = 11; +} + +// A policy that specifies the conditions for resource expiration (i.e., +// automatic resource deletion). +message ExpirationPolicy { + // Specifies the "time-to-live" duration for an associated resource. The + // resource expires if it is not active for a period of `ttl`. The definition + // of "activity" depends on the type of the associated resource. The minimum + // and maximum allowed values for `ttl` depend on the type of the associated + // resource, as well. If `ttl` is not set, the associated resource never + // expires. + google.protobuf.Duration ttl = 1; } // Configuration for a push delivery endpoint. @@ -251,23 +635,21 @@ message PushConfig { // control different aspects of the message delivery. // // The currently supported attribute is `x-goog-version`, which you can - // use to change the format of the push message. This attribute + // use to change the format of the pushed message. This attribute // indicates the version of the data expected by the endpoint. This - // controls the shape of the envelope (i.e. its fields and metadata). - // The endpoint version is based on the version of the Pub/Sub - // API. + // controls the shape of the pushed message (i.e., its fields and metadata). + // The endpoint version is based on the version of the Pub/Sub API. // - // If not present during the CreateSubscription call, it will default to + // If not present during the `CreateSubscription` call, it will default to // the version of the API used to make such call. If not present during a - // ModifyPushConfig call, its value will not be changed. GetSubscription + // `ModifyPushConfig` call, its value will not be changed. `GetSubscription` // calls will always return a valid version, even if the subscription was // created without this attribute. // // The possible values for this attribute are: // // * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. - // * `v1beta2`: uses the push format defined in the v1beta2 Pub/Sub API. - // + // * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. map attributes = 2; } @@ -283,64 +665,78 @@ message ReceivedMessage { // Request for the GetSubscription method. message GetSubscriptionRequest { // The name of the subscription to get. + // Format is `projects/{project}/subscriptions/{sub}`. string subscription = 1; } -// Request for the ListSubscriptions method. +// Request for the UpdateSubscription method. +message UpdateSubscriptionRequest { + // The updated subscription object. + Subscription subscription = 1; + + // Indicates which fields in the provided subscription to update. + // Must be specified and non-empty. + google.protobuf.FieldMask update_mask = 2; +} + +// Request for the `ListSubscriptions` method. message ListSubscriptionsRequest { - // The name of the cloud project that subscriptions belong to. + // The name of the project in which to list subscriptions. + // Format is `projects/{project-id}`. string project = 1; // Maximum number of subscriptions to return. int32 page_size = 2; - // The value returned by the last ListSubscriptionsResponse; indicates that - // this is a continuation of a prior ListSubscriptions call, and that the + // The value returned by the last `ListSubscriptionsResponse`; indicates that + // this is a continuation of a prior `ListSubscriptions` call, and that the // system should return the next page of data. string page_token = 3; } -// Response for the ListSubscriptions method. +// Response for the `ListSubscriptions` method. message ListSubscriptionsResponse { // The subscriptions that match the request. repeated Subscription subscriptions = 1; // If not empty, indicates that there may be more subscriptions that match - // the request; this value should be passed in a new ListSubscriptionsRequest - // to get more subscriptions. + // the request; this value should be passed in a new + // `ListSubscriptionsRequest` to get more subscriptions. string next_page_token = 2; } // Request for the DeleteSubscription method. message DeleteSubscriptionRequest { // The subscription to delete. + // Format is `projects/{project}/subscriptions/{sub}`. string subscription = 1; } // Request for the ModifyPushConfig method. message ModifyPushConfigRequest { // The name of the subscription. + // Format is `projects/{project}/subscriptions/{sub}`. string subscription = 1; // The push configuration for future deliveries. // - // An empty pushConfig indicates that the Pub/Sub system should + // An empty `pushConfig` indicates that the Pub/Sub system should // stop pushing messages from the given subscription and allow // messages to be pulled and acknowledged - effectively pausing - // the subscription if Pull is not called. + // the subscription if `Pull` or `StreamingPull` is not called. PushConfig push_config = 2; } -// Request for the Pull method. +// Request for the `Pull` method. message PullRequest { // The subscription from which messages should be pulled. + // Format is `projects/{project}/subscriptions/{sub}`. string subscription = 1; - // If this is specified as true the system will respond immediately even if - // it is not able to return a message in the Pull response. Otherwise the - // system is allowed to wait until at least one message is available rather - // than returning no messages. The client may cancel the request if it does - // not wish to wait any longer for the response. + // If this field set to true, the system will respond immediately even if + // it there are no messages available to return in the `Pull` response. + // Otherwise, the system may wait (for a bounded amount of time) until at + // least one message is available, rather than returning no messages. bool return_immediately = 2; // The maximum number of messages returned for this request. The Pub/Sub @@ -348,37 +744,254 @@ message PullRequest { int32 max_messages = 3; } -// Response for the Pull method. +// Response for the `Pull` method. message PullResponse { - // Received Pub/Sub messages. The Pub/Sub system will return zero messages if - // there are no more available in the backlog. The Pub/Sub system may return - // fewer than the maxMessages requested even if there are more messages - // available in the backlog. + // Received Pub/Sub messages. The list will be empty if there are no more + // messages available in the backlog. For JSON, the response can be entirely + // empty. The Pub/Sub system may return fewer than the `maxMessages` requested + // even if there are more messages available in the backlog. repeated ReceivedMessage received_messages = 1; } // Request for the ModifyAckDeadline method. message ModifyAckDeadlineRequest { // The name of the subscription. + // Format is `projects/{project}/subscriptions/{sub}`. string subscription = 1; - // The acknowledgment ID. - string ack_id = 2; - - // The new ack deadline with respect to the time this request was sent to the - // Pub/Sub system. Must be >= 0. For example, if the value is 10, the new ack - // deadline will expire 10 seconds after the ModifyAckDeadline call was made. - // Specifying zero may immediately make the message available for another pull - // request. + // List of acknowledgment IDs. + repeated string ack_ids = 4; + + // The new ack deadline with respect to the time this request was sent to + // the Pub/Sub system. For example, if the value is 10, the new + // ack deadline will expire 10 seconds after the `ModifyAckDeadline` call + // was made. Specifying zero might immediately make the message available for + // delivery to another subscriber client. This typically results in an + // increase in the rate of message redeliveries (that is, duplicates). + // The minimum deadline you can specify is 0 seconds. + // The maximum deadline you can specify is 600 seconds (10 minutes). int32 ack_deadline_seconds = 3; } // Request for the Acknowledge method. message AcknowledgeRequest { // The subscription whose message is being acknowledged. + // Format is `projects/{project}/subscriptions/{sub}`. string subscription = 1; // The acknowledgment ID for the messages being acknowledged that was returned - // by the Pub/Sub system in the Pull response. Must not be empty. + // by the Pub/Sub system in the `Pull` response. Must not be empty. + repeated string ack_ids = 2; +} + +// Request for the `StreamingPull` streaming RPC method. This request is used to +// establish the initial stream as well as to stream acknowledgements and ack +// deadline modifications from the client to the server. +message StreamingPullRequest { + // The subscription for which to initialize the new stream. This must be + // provided in the first request on the stream, and must not be set in + // subsequent requests from client to server. + // Format is `projects/{project}/subscriptions/{sub}`. + string subscription = 1; + + // List of acknowledgement IDs for acknowledging previously received messages + // (received on this stream or a different stream). If an ack ID has expired, + // the corresponding message may be redelivered later. Acknowledging a message + // more than once will not result in an error. If the acknowledgement ID is + // malformed, the stream will be aborted with status `INVALID_ARGUMENT`. repeated string ack_ids = 2; + + // The list of new ack deadlines for the IDs listed in + // `modify_deadline_ack_ids`. The size of this list must be the same as the + // size of `modify_deadline_ack_ids`. If it differs the stream will be aborted + // with `INVALID_ARGUMENT`. Each element in this list is applied to the + // element in the same position in `modify_deadline_ack_ids`. The new ack + // deadline is with respect to the time this request was sent to the Pub/Sub + // system. Must be >= 0. For example, if the value is 10, the new ack deadline + // will expire 10 seconds after this request is received. If the value is 0, + // the message is immediately made available for another streaming or + // non-streaming pull request. If the value is < 0 (an error), the stream will + // be aborted with status `INVALID_ARGUMENT`. + repeated int32 modify_deadline_seconds = 3; + + // List of acknowledgement IDs whose deadline will be modified based on the + // corresponding element in `modify_deadline_seconds`. This field can be used + // to indicate that more time is needed to process a message by the + // subscriber, or to make the message available for redelivery if the + // processing was interrupted. + repeated string modify_deadline_ack_ids = 4; + + // The ack deadline to use for the stream. This must be provided in the + // first request on the stream, but it can also be updated on subsequent + // requests from client to server. The minimum deadline you can specify is 10 + // seconds. The maximum deadline you can specify is 600 seconds (10 minutes). + int32 stream_ack_deadline_seconds = 5; +} + +// Response for the `StreamingPull` method. This response is used to stream +// messages from the server to the client. +message StreamingPullResponse { + // Received Pub/Sub messages. This will not be empty. + repeated ReceivedMessage received_messages = 1; +} + +// Request for the `CreateSnapshot` method.

+// BETA: This feature is part of a beta release. This API might be changed in +// backward-incompatible ways and is not recommended for production use. +// It is not subject to any SLA or deprecation policy. +message CreateSnapshotRequest { + // Optional user-provided name for this snapshot. + // If the name is not provided in the request, the server will assign a random + // name for this snapshot on the same project as the subscription. + // Note that for REST API requests, you must specify a name. See the + // + // resource name rules. + // Format is `projects/{project}/snapshots/{snap}`. + string name = 1; + + // The subscription whose backlog the snapshot retains. + // Specifically, the created snapshot is guaranteed to retain: + // (a) The existing backlog on the subscription. More precisely, this is + // defined as the messages in the subscription's backlog that are + // unacknowledged upon the successful completion of the + // `CreateSnapshot` request; as well as: + // (b) Any messages published to the subscription's topic following the + // successful completion of the CreateSnapshot request. + // Format is `projects/{project}/subscriptions/{sub}`. + string subscription = 2; + + // See Creating and + // managing labels. + map labels = 3; +} + +// Request for the UpdateSnapshot method.

+// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. +message UpdateSnapshotRequest { + // The updated snapshot object. + Snapshot snapshot = 1; + + // Indicates which fields in the provided snapshot to update. + // Must be specified and non-empty. + google.protobuf.FieldMask update_mask = 2; +} + +// A snapshot resource. Snapshots are used in +// Seek +// operations, which allow +// you to manage message acknowledgments in bulk. That is, you can set the +// acknowledgment state of messages in an existing subscription to the state +// captured by a snapshot.

+// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. +message Snapshot { + // The name of the snapshot. + string name = 1; + + // The name of the topic from which this snapshot is retaining messages. + string topic = 2; + + // The snapshot is guaranteed to exist up until this time. + // A newly-created snapshot expires no later than 7 days from the time of its + // creation. Its exact lifetime is determined at creation by the existing + // backlog in the source subscription. Specifically, the lifetime of the + // snapshot is `7 days - (age of oldest unacked message in the subscription)`. + // For example, consider a subscription whose oldest unacked message is 3 days + // old. If a snapshot is created from this subscription, the snapshot -- which + // will always capture this 3-day-old backlog as long as the snapshot + // exists -- will expire in 4 days. The service will refuse to create a + // snapshot that would expire in less than 1 hour after creation. + google.protobuf.Timestamp expire_time = 3; + + // See Creating and + // managing labels. + map labels = 4; +} + +// Request for the GetSnapshot method.

+// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. +message GetSnapshotRequest { + // The name of the snapshot to get. + // Format is `projects/{project}/snapshots/{snap}`. + string snapshot = 1; +} + +// Request for the `ListSnapshots` method.

+// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. +message ListSnapshotsRequest { + // The name of the project in which to list snapshots. + // Format is `projects/{project-id}`. + string project = 1; + + // Maximum number of snapshots to return. + int32 page_size = 2; + + // The value returned by the last `ListSnapshotsResponse`; indicates that this + // is a continuation of a prior `ListSnapshots` call, and that the system + // should return the next page of data. + string page_token = 3; +} + +// Response for the `ListSnapshots` method.

+// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. +message ListSnapshotsResponse { + // The resulting snapshots. + repeated Snapshot snapshots = 1; + + // If not empty, indicates that there may be more snapshot that match the + // request; this value should be passed in a new `ListSnapshotsRequest`. + string next_page_token = 2; +} + +// Request for the `DeleteSnapshot` method.

+// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. +message DeleteSnapshotRequest { + // The name of the snapshot to delete. + // Format is `projects/{project}/snapshots/{snap}`. + string snapshot = 1; +} + +// Request for the `Seek` method.

+// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. +message SeekRequest { + // The subscription to affect. + string subscription = 1; + + oneof target { + // The time to seek to. + // Messages retained in the subscription that were published before this + // time are marked as acknowledged, and messages retained in the + // subscription that were published after this time are marked as + // unacknowledged. Note that this operation affects only those messages + // retained in the subscription (configured by the combination of + // `message_retention_duration` and `retain_acked_messages`). For example, + // if `time` corresponds to a point before the message retention + // window (or to a point before the system's notion of the subscription + // creation time), only retained messages will be marked as unacknowledged, + // and already-expunged messages will not be restored. + google.protobuf.Timestamp time = 2; + + // The snapshot to seek to. The snapshot's topic must be the same as that of + // the provided subscription. + // Format is `projects/{project}/snapshots/{snap}`. + string snapshot = 3; + } +} + +// Response for the `Seek` method (this response is empty). +message SeekResponse { + } diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index c966ed982898..e1d6002d65d2 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-01-24T17:13:22.929911Z", + "updateTime": "2019-02-01T01:07:16.579788Z", "sources": [ { "generator": { @@ -12,8 +12,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "9aac88a22468b1e291937f55fa1ef237adfdc63e", - "internalRef": "230568136" + "sha": "acb5253cd11cd43cab93eb153d6e48ba0fa5303d", + "internalRef": "231786007" } }, { From 8a2f8aeabb3f0d419b31eebcc8bbd353749e09c2 Mon Sep 17 00:00:00 2001 From: Pravin Dahal Date: Mon, 11 Feb 2019 19:12:32 +0100 Subject: [PATCH 0338/1197] Updated client library documentation URLs. (#7307) Previously, the URLs would redirect using JavaScript, which would either be slow or not work at all (in case JavaScript is disabled on the browser) --- packages/google-cloud-pubsub/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index e9599751e4a0..47f3eb452823 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -27,7 +27,7 @@ independently written applications. :target: https://pypi.org/project/google-cloud-pubsub/ .. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/ .. _Product Documentation: https://cloud.google.com/pubsub/docs -.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/pubsub/ +.. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/pubsub/ Quick Start ----------- @@ -42,7 +42,7 @@ In order to use this library, you first need to go through the following steps: .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Google Cloud Pub / Sub API.: https://cloud.google.com/pubsub -.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/latest/core/auth.html +.. _Setup Authentication.: https://googleapis.github.io/google-cloud-python/latest/core/auth.html Installation ~~~~~~~~~~~~ From ed1cb6da58ec2ff117237e04a4387999fbca7da9 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 15 Mar 2019 16:59:01 -0400 Subject: [PATCH 0339/1197] Add 'authentication_method' to 'PushConfig' (via synth). (#7512) Closes #7399. --- .../gapic/publisher_client_config.py | 15 +- .../gapic/subscriber_client_config.py | 29 +-- .../google/cloud/pubsub_v1/proto/pubsub.proto | 88 +++++-- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 241 ++++++++++++++---- .../cloud/pubsub_v1/proto/pubsub_pb2_grpc.py | 14 +- .../_protocol/streaming_pull_manager.py | 1 + packages/google-cloud-pubsub/noxfile.py | 4 + packages/google-cloud-pubsub/synth.metadata | 12 +- 8 files changed, 293 insertions(+), 111 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index 4c3519d7dac2..1aa04e62894f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -2,9 +2,10 @@ "interfaces": { "google.pubsub.v1.Publisher": { "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - "one_plus_delivery": [ + "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], + "non_idempotent": ["UNAVAILABLE"], + "none": [], + "publish": [ "ABORTED", "CANCELLED", "DEADLINE_EXCEEDED", @@ -37,17 +38,17 @@ "methods": { "CreateTopic": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "UpdateTopic": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "Publish": { "timeout_millis": 60000, - "retry_codes_name": "one_plus_delivery", + "retry_codes_name": "publish", "retry_params_name": "messaging", "bundling": { "element_count_threshold": 10, @@ -74,7 +75,7 @@ }, "DeleteTopic": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "SetIamPolicy": { diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 8d85d8f6094d..54a37acc66b9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -2,14 +2,9 @@ "interfaces": { "google.pubsub.v1.Subscriber": { "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "pull": [ - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - ], - "non_idempotent": [], + "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], + "non_idempotent": ["UNAVAILABLE"], + "none": [], }, "retry_params": { "default": { @@ -53,7 +48,7 @@ }, "UpdateSubscription": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "ListSubscriptions": { @@ -63,7 +58,7 @@ }, "DeleteSubscription": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "ModifyAckDeadline": { @@ -73,17 +68,17 @@ }, "Acknowledge": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "messaging", }, "Pull": { "timeout_millis": 60000, - "retry_codes_name": "pull", + "retry_codes_name": "idempotent", "retry_params_name": "messaging", }, "StreamingPull": { "timeout_millis": 900000, - "retry_codes_name": "pull", + "retry_codes_name": "none", "retry_params_name": "streaming_messaging", }, "ModifyPushConfig": { @@ -98,22 +93,22 @@ }, "CreateSnapshot": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "UpdateSnapshot": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "DeleteSnapshot": { "timeout_millis": 60000, - "retry_codes_name": "idempotent", + "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, "Seek": { "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", + "retry_codes_name": "idempotent", "retry_params_name": "default", }, "SetIamPolicy": { diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 02d0bf34b3ee..9d79638f8fd6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -1,4 +1,4 @@ -// Copyright 2018 Google LLC. +// Copyright 2019 Google LLC. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -32,7 +32,6 @@ option java_package = "com.google.pubsub.v1"; option php_namespace = "Google\\Cloud\\PubSub\\V1"; option ruby_package = "Google::Cloud::PubSub::V1"; - // The service that an application uses to manipulate topics, and to send // messages to a topic. service Publisher { @@ -79,7 +78,8 @@ service Publisher { } // Lists the names of the subscriptions on this topic. - rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) returns (ListTopicSubscriptionsResponse) { + rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) + returns (ListTopicSubscriptionsResponse) { option (google.api.http) = { get: "/v1/{topic=projects/*/topics/*}/subscriptions" }; @@ -94,7 +94,8 @@ service Publisher { // BETA: This feature is part of a beta release. This API might be // changed in backward-incompatible ways and is not recommended for production // use. It is not subject to any SLA or deprecation policy. - rpc ListTopicSnapshots(ListTopicSnapshotsRequest) returns (ListTopicSnapshotsResponse) { + rpc ListTopicSnapshots(ListTopicSnapshotsRequest) + returns (ListTopicSnapshotsResponse) { option (google.api.http) = { get: "/v1/{topic=projects/*/topics/*}/snapshots" }; @@ -125,9 +126,10 @@ service Subscriber { // If the name is not provided in the request, the server will assign a random // name for this subscription on the same project as the topic, conforming // to the - // [resource name format](https://cloud.google.com/pubsub/docs/admin#resource_names). - // The generated name is populated in the returned Subscription object. - // Note that for REST API requests, you must specify a name in the request. + // [resource name + // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + // generated name is populated in the returned Subscription object. Note that + // for REST API requests, you must specify a name in the request. rpc CreateSubscription(Subscription) returns (Subscription) { option (google.api.http) = { put: "/v1/{name=projects/*/subscriptions/*}" @@ -152,7 +154,8 @@ service Subscriber { } // Lists matching subscriptions. - rpc ListSubscriptions(ListSubscriptionsRequest) returns (ListSubscriptionsResponse) { + rpc ListSubscriptions(ListSubscriptionsRequest) + returns (ListSubscriptionsResponse) { option (google.api.http) = { get: "/v1/{project=projects/*}/subscriptions" }; @@ -163,7 +166,8 @@ service Subscriber { // `NOT_FOUND`. After a subscription is deleted, a new one may be created with // the same name, but the new one has no association with the old // subscription or its topic unless the same topic is specified. - rpc DeleteSubscription(DeleteSubscriptionRequest) returns (google.protobuf.Empty) { + rpc DeleteSubscription(DeleteSubscriptionRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{subscription=projects/*/subscriptions/*}" }; @@ -174,7 +178,8 @@ service Subscriber { // subscriber, or to make the message available for redelivery if the // processing was interrupted. Note that this does not modify the // subscription-level `ackDeadlineSeconds` used for subsequent messages. - rpc ModifyAckDeadline(ModifyAckDeadlineRequest) returns (google.protobuf.Empty) { + rpc ModifyAckDeadline(ModifyAckDeadlineRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline" body: "*" @@ -212,8 +217,8 @@ service Subscriber { // reassign server-side resources, in which case, the client should // re-establish the stream. Flow control can be achieved by configuring the // underlying RPC channel. - rpc StreamingPull(stream StreamingPullRequest) returns (stream StreamingPullResponse) { - } + rpc StreamingPull(stream StreamingPullRequest) + returns (stream StreamingPullResponse) {} // Modifies the `PushConfig` for a specified subscription. // @@ -221,7 +226,8 @@ service Subscriber { // an empty `PushConfig`) or vice versa, or change the endpoint URL and other // attributes of a push subscription. Messages will accumulate for delivery // continuously through the call regardless of changes to the `PushConfig`. - rpc ModifyPushConfig(ModifyPushConfigRequest) returns (google.protobuf.Empty) { + rpc ModifyPushConfig(ModifyPushConfigRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig" body: "*" @@ -275,9 +281,10 @@ service Subscriber { // the request, the server will assign a random // name for this snapshot on the same project as the subscription, conforming // to the - // [resource name format](https://cloud.google.com/pubsub/docs/admin#resource_names). - // The generated name is populated in the returned Snapshot object. Note that - // for REST API requests, you must specify a name in the request. + // [resource name + // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + // generated name is populated in the returned Snapshot object. Note that for + // REST API requests, you must specify a name in the request. rpc CreateSnapshot(CreateSnapshotRequest) returns (Snapshot) { option (google.api.http) = { put: "/v1/{name=projects/*/snapshots/*}" @@ -576,7 +583,8 @@ message Subscription { // messages are not expunged from the subscription's backlog, even if they are // acknowledged, until they fall out of the `message_retention_duration` // window. This must be true if you would like to - // + // // Seek to a timestamp. //

// BETA: This feature is part of a beta release. This API might be @@ -651,6 +659,42 @@ message PushConfig { // * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. // * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. map attributes = 2; + + // An authentication method used by push endpoints to verify the source of + // push requests. This can be used with push endpoints that are private by + // default to allow requests only from the Cloud Pub/Sub system, for example. + // This field is optional and should be set only by users interested in + // authenticated push. + // EXPERIMENTAL: This field a part of a closed alpha that may not be + // accessible to all users. It may be changed in backward-incompatible ways + // and is not subject to any SLA or deprecation policy. It is not recommended + // for production use. + oneof authentication_method { + // If specified, Pub/Sub will generate and attach an OIDC JWT token as an + // `Authorization` header in the HTTP request for every pushed message. + OidcToken oidc_token = 3; + } + + // Contains information needed for generating an + // [OpenID Connect + // token](https://developers.google.com/identity/protocols/OpenIDConnect). + message OidcToken { + // [Service account + // email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating the OIDC token. The caller (for + // CreateSubscription, UpdateSubscription, and ModifyPushConfig calls) must + // have the iam.serviceAccounts.actAs permission for the service account. + // See https://cloud.google.com/iam/docs/understanding-roles#service-accounts-roles. + string service_account_email = 1; + + // Audience to be used when generating OIDC token. The audience claim + // identifies the recipients that the JWT is intended for. The audience + // value is a single case-sensitive string. Having multiple values (array) + // for the audience field is not supported. More info about the OIDC JWT + // token audience here: https://tools.ietf.org/html/rfc7519#section-4.1.3 + // Note: if not specified, the Push endpoint URL will be used. + string audience = 2; + } } // A message and its corresponding acknowledgment ID. @@ -836,9 +880,9 @@ message StreamingPullResponse { } // Request for the `CreateSnapshot` method.

-// BETA: This feature is part of a beta release. This API might be changed in -// backward-incompatible ways and is not recommended for production use. -// It is not subject to any SLA or deprecation policy. +// BETA: This feature is part of a beta release. This API might be +// changed in backward-incompatible ways and is not recommended for production +// use. It is not subject to any SLA or deprecation policy. message CreateSnapshotRequest { // Optional user-provided name for this snapshot. // If the name is not provided in the request, the server will assign a random @@ -992,6 +1036,4 @@ message SeekRequest { } // Response for the `Seek` method (this response is empty). -message SeekResponse { - -} +message SeekResponse {} diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 09a4c8ae8226..82205753e85d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -29,7 +29,7 @@ "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\x84\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\x98\x01\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\x84\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\tB\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -1303,6 +1303,62 @@ serialized_end=698, ) +_PUSHCONFIG_OIDCTOKEN = _descriptor.Descriptor( + name="OidcToken", + full_name="google.pubsub.v1.PushConfig.OidcToken", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="service_account_email", + full_name="google.pubsub.v1.PushConfig.OidcToken.service_account_email", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="audience", + full_name="google.pubsub.v1.PushConfig.OidcToken.audience", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2165, + serialized_end=2225, +) + _PUSHCONFIG = _descriptor.Descriptor( name="PushConfig", full_name="google.pubsub.v1.PushConfig", @@ -1346,17 +1402,43 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="oidc_token", + full_name="google.pubsub.v1.PushConfig.oidc_token", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], - nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY], + nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY, _PUSHCONFIG_OIDCTOKEN], enum_types=[], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], - oneofs=[], + oneofs=[ + _descriptor.OneofDescriptor( + name="authentication_method", + full_name="google.pubsub.v1.PushConfig.authentication_method", + index=0, + containing_type=None, + fields=[], + ) + ], serialized_start=1949, - serialized_end=2101, + serialized_end=2250, ) @@ -1412,8 +1494,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2103, - serialized_end=2186, + serialized_start=2252, + serialized_end=2335, ) @@ -1451,8 +1533,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2188, - serialized_end=2234, + serialized_start=2337, + serialized_end=2383, ) @@ -1508,8 +1590,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2237, - serialized_end=2367, + serialized_start=2386, + serialized_end=2516, ) @@ -1583,8 +1665,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2369, - serialized_end=2451, + serialized_start=2518, + serialized_end=2600, ) @@ -1640,8 +1722,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2453, - serialized_end=2560, + serialized_start=2602, + serialized_end=2709, ) @@ -1679,8 +1761,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2562, - serialized_end=2611, + serialized_start=2711, + serialized_end=2760, ) @@ -1736,8 +1818,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2613, - serialized_end=2711, + serialized_start=2762, + serialized_end=2860, ) @@ -1811,8 +1893,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2713, - serialized_end=2798, + serialized_start=2862, + serialized_end=2947, ) @@ -1850,8 +1932,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2800, - serialized_end=2876, + serialized_start=2949, + serialized_end=3025, ) @@ -1925,8 +2007,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2878, - serialized_end=2973, + serialized_start=3027, + serialized_end=3122, ) @@ -1982,8 +2064,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2975, - serialized_end=3034, + serialized_start=3124, + serialized_end=3183, ) @@ -2093,8 +2175,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3037, - serialized_end=3201, + serialized_start=3186, + serialized_end=3350, ) @@ -2132,8 +2214,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3203, - serialized_end=3288, + serialized_start=3352, + serialized_end=3437, ) @@ -2263,8 +2345,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3291, - serialized_end=3466, + serialized_start=3440, + serialized_end=3615, ) @@ -2320,8 +2402,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3468, - serialized_end=3586, + serialized_start=3617, + serialized_end=3735, ) @@ -2469,8 +2551,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3589, - serialized_end=3780, + serialized_start=3738, + serialized_end=3929, ) @@ -2508,8 +2590,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3782, - serialized_end=3820, + serialized_start=3931, + serialized_end=3969, ) @@ -2583,8 +2665,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3822, - serialized_end=3900, + serialized_start=3971, + serialized_end=4049, ) @@ -2640,8 +2722,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3902, - serialized_end=3997, + serialized_start=4051, + serialized_end=4146, ) @@ -2679,8 +2761,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3999, - serialized_end=4040, + serialized_start=4148, + serialized_end=4189, ) @@ -2762,8 +2844,8 @@ fields=[], ) ], - serialized_start=4042, - serialized_end=4151, + serialized_start=4191, + serialized_end=4300, ) @@ -2782,8 +2864,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4153, - serialized_end=4167, + serialized_start=4302, + serialized_end=4316, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -2813,7 +2895,15 @@ "ttl" ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION _PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG +_PUSHCONFIG_OIDCTOKEN.containing_type = _PUSHCONFIG _PUSHCONFIG.fields_by_name["attributes"].message_type = _PUSHCONFIG_ATTRIBUTESENTRY +_PUSHCONFIG.fields_by_name["oidc_token"].message_type = _PUSHCONFIG_OIDCTOKEN +_PUSHCONFIG.oneofs_by_name["authentication_method"].fields.append( + _PUSHCONFIG.fields_by_name["oidc_token"] +) +_PUSHCONFIG.fields_by_name["oidc_token"].containing_oneof = _PUSHCONFIG.oneofs_by_name[ + "authentication_method" +] _RECEIVEDMESSAGE.fields_by_name["message"].message_type = _PUBSUBMESSAGE _UPDATESUBSCRIPTIONREQUEST.fields_by_name["subscription"].message_type = _SUBSCRIPTION _UPDATESUBSCRIPTIONREQUEST.fields_by_name[ @@ -3409,6 +3499,38 @@ # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) ), ), + OidcToken=_reflection.GeneratedProtocolMessageType( + "OidcToken", + (_message.Message,), + dict( + DESCRIPTOR=_PUSHCONFIG_OIDCTOKEN, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Contains information needed for generating an `OpenID Connect + token `__. + + + Attributes: + service_account_email: + `Service account email + `__ to be + used for generating the OIDC token. The caller (for + CreateSubscription, UpdateSubscription, and ModifyPushConfig + calls) must have the iam.serviceAccounts.actAs permission for + the service account. See + https://cloud.google.com/iam/docs/understanding-roles#service- + accounts-roles. + audience: + Audience to be used when generating OIDC token. The audience + claim identifies the recipients that the JWT is intended for. + The audience value is a single case-sensitive string. Having + multiple values (array) for the audience field is not + supported. More info about the OIDC JWT token audience here: + https://tools.ietf.org/html/rfc7519#section-4.1.3 Note: if not + specified, the Push endpoint URL will be used. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.OidcToken) + ), + ), DESCRIPTOR=_PUSHCONFIG, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Configuration for a push delivery endpoint. @@ -3438,12 +3560,27 @@ uses the push format defined in the v1beta1 Pub/Sub API. - ``v1`` or ``v1beta2``: uses the push format defined in the v1 Pub/Sub API. + authentication_method: + An authentication method used by push endpoints to verify the + source of push requests. This can be used with push endpoints + that are private by default to allow requests only from the + Cloud Pub/Sub system, for example. This field is optional and + should be set only by users interested in authenticated push. + EXPERIMENTAL: This field a part of a closed alpha that may not + be accessible to all users. It may be changed in backward- + incompatible ways and is not subject to any SLA or deprecation + policy. It is not recommended for production use. + oidc_token: + If specified, Pub/Sub will generate and attach an OIDC JWT + token as an ``Authorization`` header in the HTTP request for + every pushed message. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) ), ) _sym_db.RegisterMessage(PushConfig) _sym_db.RegisterMessage(PushConfig.AttributesEntry) +_sym_db.RegisterMessage(PushConfig.OidcToken) ReceivedMessage = _reflection.GeneratedProtocolMessageType( "ReceivedMessage", @@ -4068,8 +4205,8 @@ file=DESCRIPTOR, index=0, serialized_options=None, - serialized_start=4170, - serialized_end=5257, + serialized_start=4319, + serialized_end=5406, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4172,8 +4309,8 @@ file=DESCRIPTOR, index=1, serialized_options=None, - serialized_start=5260, - serialized_end=7557, + serialized_start=5409, + serialized_end=7706, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py index 7668377d78de..dfe337e36271 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py @@ -297,9 +297,10 @@ def CreateSubscription(self, request, context): If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the - [resource name format](https://cloud.google.com/pubsub/docs/admin#resource_names). - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. + [resource name + format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. Note that + for REST API requests, you must specify a name in the request. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -444,9 +445,10 @@ def CreateSnapshot(self, request, context): the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming to the - [resource name format](https://cloud.google.com/pubsub/docs/admin#resource_names). - The generated name is populated in the returned Snapshot object. Note that - for REST API requests, you must specify a name in the request. + [resource name + format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. Note that for + REST API requests, you must specify a name in the request. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index d8d94384c67e..acf514775779 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -41,6 +41,7 @@ exceptions.InternalServerError, exceptions.Unknown, exceptions.GatewayTimeout, + exceptions.Aborted, ) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index bfac9f4c2bce..d692cf37f39c 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -45,6 +45,10 @@ def blacken(session): """Run black. Format code to uniform standard. + + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. + That run uses an image that doesn't have 3.6 installed. Before updating this + check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install("black") session.run( diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index e1d6002d65d2..6cb01ab8bfc4 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-02-01T01:07:16.579788Z", + "updateTime": "2019-03-14T17:14:10.475230Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.7", - "dockerImage": "googleapis/artman@sha256:d6c8ced606eb49973ca95d2af7c55a681acc042db0f87d135968349e7bf6dd80" + "version": "0.16.17", + "dockerImage": "googleapis/artman@sha256:7231f27272231a884e09edb5953148c85ecd8467780d33c4a35c3e507885715b" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "acb5253cd11cd43cab93eb153d6e48ba0fa5303d", - "internalRef": "231786007" + "sha": "d05a746ba27b89374c7407c88c0d5dabfb0afc7c", + "internalRef": "238372434" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.1.16" + "version": "2019.2.26" } } ], From 7f91a4e958e4815cf9e28d228d3e604777f2488b Mon Sep 17 00:00:00 2001 From: Solomon Duskis Date: Fri, 15 Mar 2019 17:32:43 -0400 Subject: [PATCH 0340/1197] Release 0.40.0 (#7519) --- packages/google-cloud-pubsub/CHANGELOG.md | 26 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 4d07ae909e2c..a5497c6f4743 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,32 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.40.0 + +03-15-2019 14:09 PDT + + +### Implementation Changes +- Propagate 'RetryError' in 'PublisherClient.publish'. ([#7071](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7071)) +- Protoc-generated serialization update.. ([#7091](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7091)) + +### New Features +- Add 'authentication_method' to 'PushConfig' (via synth). ([#7512](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7512)) +- Add protos as an artifact to library ([#7205](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7205)) +- Pub/sub: pass transport w/ custom channel to GAPIC API clients. ([#7008](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7008)) + +### Dependencies + +### Documentation +- Updated client library documentation URLs. ([#7307](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7307)) +- Update copyright headers +- Fix broken docstring cross-reference links. ([#7132](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7132)) +- Docstring changes from updates to .proto files. ([#7054](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7054)) +- Pick up stub docstring fix in GAPIC generator. ([#6978](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/6978)) + +### Internal / Testing Changes +- Copy proto files alongside protoc versions. + ## 0.39.1 12-17-2018 16:57 PST diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index ee6ab845a790..f057f7ce8fd8 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.39.1' +version = '0.40.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 0376e926764898a75fa514ef444856b09cbb2f74 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 20 Mar 2019 13:41:12 -0700 Subject: [PATCH 0341/1197] Remove classifier for Python 3.4 for end-of-life. (#7535) * Remove classifier for Python 3.4 for end-of-life. * Update supported versions in Client README, Contributing Guide --- packages/google-cloud-pubsub/README.rst | 2 +- packages/google-cloud-pubsub/setup.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 47f3eb452823..2895b60f761e 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -60,7 +60,7 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.4 +Python >= 3.5 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index f057f7ce8fd8..d9ad1aea8410 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -74,7 +74,6 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', From 1b940bbd0526c49379c1898904ddbdd81709cf12 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Fri, 29 Mar 2019 16:38:26 -0700 Subject: [PATCH 0342/1197] Add message ordering (via synth). (#7551) --- .../pubsub_v1/gapic/subscriber_client.py | 9 + .../google/cloud/pubsub_v1/proto/pubsub.proto | 18 ++ .../cloud/pubsub_v1/proto/pubsub_pb2.py | 226 +++++++++++------- packages/google-cloud-pubsub/synth.metadata | 10 +- .../gapic/v1/test_subscriber_client_v1.py | 6 + 5 files changed, 179 insertions(+), 90 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index fa739e689f53..31ce68db82e9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -224,6 +224,7 @@ def create_subscription( retain_acked_messages=None, message_retention_duration=None, labels=None, + enable_message_ordering=None, expiration_policy=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, @@ -309,6 +310,13 @@ def create_subscription( message :class:`~google.cloud.pubsub_v1.types.Duration` labels (dict[str -> str]): See Creating and managing labels. + enable_message_ordering (bool): If true, messages published with the same ``ordering_key`` in + ``PubsubMessage`` will be delivered to the subscribers in the order in + which they are received by the Pub/Sub system. Otherwise, they may be + delivered in any order. EXPERIMENTAL: This feature is part of a closed + alpha release. This API might be changed in backward-incompatible ways + and is not recommended for production use. It is not subject to any SLA + or deprecation policy. expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any connected subscriber is successfully consuming messages from the subscription or @@ -359,6 +367,7 @@ def create_subscription( retain_acked_messages=retain_acked_messages, message_retention_duration=message_retention_duration, labels=labels, + enable_message_ordering=enable_message_ordering, expiration_policy=expiration_policy, ) return self._inner_api_calls["create_subscription"]( diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 9d79638f8fd6..715af9c18c46 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -406,6 +406,15 @@ message PubsubMessage { // it receives the `Publish` call. It must not be populated by the // publisher in a `Publish` call. google.protobuf.Timestamp publish_time = 4; + + // Identifies related messages for which publish order should be respected. + // If a `Subscription` has `enable_message_ordering` set to `true`, messages + // published with the same `ordering_key` value will be delivered to + // subscribers in the order in which they are received by the Pub/Sub system. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + string ordering_key = 5; } // Request for the GetTopic method. @@ -607,6 +616,15 @@ message Subscription { // managing labels. map labels = 9; + // If true, messages published with the same `ordering_key` in `PubsubMessage` + // will be delivered to the subscribers in the order in which they + // are received by the Pub/Sub system. Otherwise, they may be delivered in + // any order. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + bool enable_message_ordering = 10; + // A policy that specifies the conditions for this subscription's expiration. // A subscription is considered active as long as any connected subscriber is // successfully consuming messages from the subscription or is issuing diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 82205753e85d..7b66db7fe659 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -29,7 +29,7 @@ "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xdb\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\x84\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\tB\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xa5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\tB\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -263,8 +263,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=649, - serialized_end=698, + serialized_start=671, + serialized_end=720, ) _PUBSUBMESSAGE = _descriptor.Descriptor( @@ -346,6 +346,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="ordering_key", + full_name="google.pubsub.v1.PubsubMessage.ordering_key", + index=4, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY], @@ -356,7 +374,7 @@ extension_ranges=[], oneofs=[], serialized_start=479, - serialized_end=698, + serialized_end=720, ) @@ -394,8 +412,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=700, - serialized_end=732, + serialized_start=722, + serialized_end=754, ) @@ -451,8 +469,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=734, - serialized_end=843, + serialized_start=756, + serialized_end=865, ) @@ -508,8 +526,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=845, - serialized_end=927, + serialized_start=867, + serialized_end=949, ) @@ -547,8 +565,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=929, - serialized_end=967, + serialized_start=951, + serialized_end=989, ) @@ -622,8 +640,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=969, - serialized_end=1044, + serialized_start=991, + serialized_end=1066, ) @@ -679,8 +697,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1046, - serialized_end=1132, + serialized_start=1068, + serialized_end=1154, ) @@ -754,8 +772,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1134, - serialized_end=1219, + serialized_start=1156, + serialized_end=1241, ) @@ -811,8 +829,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1221, - serialized_end=1301, + serialized_start=1243, + serialized_end=1323, ) @@ -886,8 +904,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1303, - serialized_end=1384, + serialized_start=1325, + serialized_end=1406, ) @@ -943,8 +961,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1386, - serialized_end=1458, + serialized_start=1408, + serialized_end=1480, ) @@ -982,8 +1000,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1460, - serialized_end=1495, + serialized_start=1482, + serialized_end=1517, ) @@ -1176,10 +1194,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="enable_message_ordering", + full_name="google.pubsub.v1.Subscription.enable_message_ordering", + index=7, + number=10, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="expiration_policy", full_name="google.pubsub.v1.Subscription.expiration_policy", - index=7, + index=8, number=11, type=11, cpp_type=10, @@ -1203,8 +1239,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1498, - serialized_end=1886, + serialized_start=1520, + serialized_end=1941, ) @@ -1242,8 +1278,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1888, - serialized_end=1946, + serialized_start=1943, + serialized_end=2001, ) @@ -1299,8 +1335,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=649, - serialized_end=698, + serialized_start=671, + serialized_end=720, ) _PUSHCONFIG_OIDCTOKEN = _descriptor.Descriptor( @@ -1355,8 +1391,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2165, - serialized_end=2225, + serialized_start=2220, + serialized_end=2280, ) _PUSHCONFIG = _descriptor.Descriptor( @@ -1437,8 +1473,8 @@ fields=[], ) ], - serialized_start=1949, - serialized_end=2250, + serialized_start=2004, + serialized_end=2305, ) @@ -1494,8 +1530,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2252, - serialized_end=2335, + serialized_start=2307, + serialized_end=2390, ) @@ -1533,8 +1569,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2337, - serialized_end=2383, + serialized_start=2392, + serialized_end=2438, ) @@ -1590,8 +1626,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2386, - serialized_end=2516, + serialized_start=2441, + serialized_end=2571, ) @@ -1665,8 +1701,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2518, - serialized_end=2600, + serialized_start=2573, + serialized_end=2655, ) @@ -1722,8 +1758,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2602, - serialized_end=2709, + serialized_start=2657, + serialized_end=2764, ) @@ -1761,8 +1797,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2711, - serialized_end=2760, + serialized_start=2766, + serialized_end=2815, ) @@ -1818,8 +1854,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2762, - serialized_end=2860, + serialized_start=2817, + serialized_end=2915, ) @@ -1893,8 +1929,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2862, - serialized_end=2947, + serialized_start=2917, + serialized_end=3002, ) @@ -1932,8 +1968,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2949, - serialized_end=3025, + serialized_start=3004, + serialized_end=3080, ) @@ -2007,8 +2043,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3027, - serialized_end=3122, + serialized_start=3082, + serialized_end=3177, ) @@ -2064,8 +2100,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3124, - serialized_end=3183, + serialized_start=3179, + serialized_end=3238, ) @@ -2175,8 +2211,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3186, - serialized_end=3350, + serialized_start=3241, + serialized_end=3405, ) @@ -2214,8 +2250,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3352, - serialized_end=3437, + serialized_start=3407, + serialized_end=3492, ) @@ -2345,8 +2381,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3440, - serialized_end=3615, + serialized_start=3495, + serialized_end=3670, ) @@ -2402,8 +2438,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3617, - serialized_end=3735, + serialized_start=3672, + serialized_end=3790, ) @@ -2551,8 +2587,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3738, - serialized_end=3929, + serialized_start=3793, + serialized_end=3984, ) @@ -2590,8 +2626,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3931, - serialized_end=3969, + serialized_start=3986, + serialized_end=4024, ) @@ -2665,8 +2701,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3971, - serialized_end=4049, + serialized_start=4026, + serialized_end=4104, ) @@ -2722,8 +2758,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4051, - serialized_end=4146, + serialized_start=4106, + serialized_end=4201, ) @@ -2761,8 +2797,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4148, - serialized_end=4189, + serialized_start=4203, + serialized_end=4244, ) @@ -2844,8 +2880,8 @@ fields=[], ) ], - serialized_start=4191, - serialized_end=4300, + serialized_start=4246, + serialized_end=4355, ) @@ -2864,8 +2900,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4302, - serialized_end=4316, + serialized_start=4357, + serialized_end=4371, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -3104,6 +3140,17 @@ The time at which the message was published, populated by the server when it receives the ``Publish`` call. It must not be populated by the publisher in a ``Publish`` call. + ordering_key: + Identifies related messages for which publish order should be + respected. If a ``Subscription`` has + ``enable_message_ordering`` set to ``true``, messages + published with the same ``ordering_key`` value will be + delivered to subscribers in the order in which they are + received by the Pub/Sub system. EXPERIMENTAL: This feature is + part of a closed alpha release. This API might be changed in + backward-incompatible ways and is not recommended for + production use. It is not subject to any SLA or deprecation + policy. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) ), @@ -3443,6 +3490,15 @@ or deprecation policy. labels: See Creating and managing labels. + enable_message_ordering: + If true, messages published with the same ``ordering_key`` in + ``PubsubMessage`` will be delivered to the subscribers in the + order in which they are received by the Pub/Sub system. + Otherwise, they may be delivered in any order. EXPERIMENTAL: + This feature is part of a closed alpha release. This API might + be changed in backward-incompatible ways and is not + recommended for production use. It is not subject to any SLA + or deprecation policy. expiration_policy: A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any @@ -4205,8 +4261,8 @@ file=DESCRIPTOR, index=0, serialized_options=None, - serialized_start=4319, - serialized_end=5406, + serialized_start=4374, + serialized_end=5461, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4309,8 +4365,8 @@ file=DESCRIPTOR, index=1, serialized_options=None, - serialized_start=5409, - serialized_end=7706, + serialized_start=5464, + serialized_end=7761, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 6cb01ab8bfc4..4a27eece2b29 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-03-14T17:14:10.475230Z", + "updateTime": "2019-03-23T12:20:50.408656Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.17", - "dockerImage": "googleapis/artman@sha256:7231f27272231a884e09edb5953148c85ecd8467780d33c4a35c3e507885715b" + "version": "0.16.19", + "dockerImage": "googleapis/artman@sha256:70ba28fda87e032ae44e6df41b7fc342c1b0cce1ed90658c4890eb4f613038c2" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "d05a746ba27b89374c7407c88c0d5dabfb0afc7c", - "internalRef": "238372434" + "sha": "e80435a132c53da26f46daf0787035ee63fb942b", + "internalRef": "239938670" } }, { diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index c95feddf5ea3..3e27074bf781 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -75,11 +75,13 @@ def test_create_subscription(self): topic_2 = "topic2-1139259102" ack_deadline_seconds = 2135351438 retain_acked_messages = False + enable_message_ordering = True expected_response = { "name": name_2, "topic": topic_2, "ack_deadline_seconds": ack_deadline_seconds, "retain_acked_messages": retain_acked_messages, + "enable_message_ordering": enable_message_ordering, } expected_response = pubsub_pb2.Subscription(**expected_response) @@ -123,11 +125,13 @@ def test_get_subscription(self): topic = "topic110546223" ack_deadline_seconds = 2135351438 retain_acked_messages = False + enable_message_ordering = True expected_response = { "name": name, "topic": topic, "ack_deadline_seconds": ack_deadline_seconds, "retain_acked_messages": retain_acked_messages, + "enable_message_ordering": enable_message_ordering, } expected_response = pubsub_pb2.Subscription(**expected_response) @@ -169,11 +173,13 @@ def test_update_subscription(self): topic = "topic110546223" ack_deadline_seconds_2 = 921632575 retain_acked_messages = False + enable_message_ordering = True expected_response = { "name": name, "topic": topic, "ack_deadline_seconds": ack_deadline_seconds_2, "retain_acked_messages": retain_acked_messages, + "enable_message_ordering": enable_message_ordering, } expected_response = pubsub_pb2.Subscription(**expected_response) From b4575933e7dad96dc46a7da941d1c9fc3411d5f1 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Tue, 2 Apr 2019 14:42:21 -0700 Subject: [PATCH 0343/1197] Add routing header to method metadata (via synth). (#7623) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 131 +++++++++++ .../pubsub_v1/gapic/subscriber_client.py | 222 ++++++++++++++++++ packages/google-cloud-pubsub/synth.metadata | 10 +- 3 files changed, 358 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 43a24555a577..a308af5d13fd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -24,6 +24,7 @@ import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method import google.api_core.path_template +import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.page_iterator import google.api_core.path_template @@ -267,6 +268,19 @@ def create_topic( request = pubsub_pb2.Topic( name=name, labels=labels, message_storage_policy=message_storage_policy ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["create_topic"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -341,6 +355,19 @@ def update_topic( ) request = pubsub_pb2.UpdateTopicRequest(topic=topic, update_mask=update_mask) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("topic.name", topic.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["update_topic"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -407,6 +434,19 @@ def publish( ) request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("topic", topic)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["publish"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -464,6 +504,19 @@ def get_topic( ) request = pubsub_pb2.GetTopicRequest(topic=topic) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("topic", topic)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["get_topic"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -542,6 +595,19 @@ def list_topics( ) request = pubsub_pb2.ListTopicsRequest(project=project, page_size=page_size) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project", project)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( @@ -633,6 +699,19 @@ def list_topic_subscriptions( request = pubsub_pb2.ListTopicSubscriptionsRequest( topic=topic, page_size=page_size ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("topic", topic)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( @@ -703,6 +782,19 @@ def delete_topic( ) request = pubsub_pb2.DeleteTopicRequest(topic=topic) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("topic", topic)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["delete_topic"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -773,6 +865,19 @@ def set_iam_policy( ) request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("resource", resource)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["set_iam_policy"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -833,6 +938,19 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("resource", resource)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["get_iam_policy"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -903,6 +1021,19 @@ def test_iam_permissions( request = iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("resource", resource)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["test_iam_permissions"]( request, retry=retry, timeout=timeout, metadata=metadata ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 31ce68db82e9..5cd288ade084 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -23,6 +23,7 @@ import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.page_iterator import google.api_core.path_template @@ -370,6 +371,19 @@ def create_subscription( enable_message_ordering=enable_message_ordering, expiration_policy=expiration_policy, ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["create_subscription"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -427,6 +441,19 @@ def get_subscription( ) request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("subscription", subscription)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["get_subscription"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -499,6 +526,19 @@ def update_subscription( request = pubsub_pb2.UpdateSubscriptionRequest( subscription=subscription, update_mask=update_mask ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("subscription.name", subscription.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["update_subscription"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -579,6 +619,19 @@ def list_subscriptions( request = pubsub_pb2.ListSubscriptionsRequest( project=project, page_size=page_size ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project", project)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( @@ -649,6 +702,19 @@ def delete_subscription( ) request = pubsub_pb2.DeleteSubscriptionRequest(subscription=subscription) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("subscription", subscription)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["delete_subscription"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -728,6 +794,19 @@ def modify_ack_deadline( ack_ids=ack_ids, ack_deadline_seconds=ack_deadline_seconds, ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("subscription", subscription)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["modify_ack_deadline"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -797,6 +876,19 @@ def acknowledge( request = pubsub_pb2.AcknowledgeRequest( subscription=subscription, ack_ids=ack_ids ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("subscription", subscription)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["acknowledge"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -869,6 +961,19 @@ def pull( max_messages=max_messages, return_immediately=return_immediately, ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("subscription", subscription)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["pull"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -1015,6 +1120,19 @@ def modify_push_config( request = pubsub_pb2.ModifyPushConfigRequest( subscription=subscription, push_config=push_config ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("subscription", subscription)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["modify_push_config"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -1101,6 +1219,19 @@ def list_snapshots( ) request = pubsub_pb2.ListSnapshotsRequest(project=project, page_size=page_size) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("project", project)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + iterator = google.api_core.page_iterator.GRPCIterator( client=None, method=functools.partial( @@ -1203,6 +1334,19 @@ def create_snapshot( request = pubsub_pb2.CreateSnapshotRequest( name=name, subscription=subscription, labels=labels ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("name", name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["create_snapshot"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -1284,6 +1428,19 @@ def update_snapshot( request = pubsub_pb2.UpdateSnapshotRequest( snapshot=snapshot, update_mask=update_mask ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("snapshot.name", snapshot.name)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["update_snapshot"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -1350,6 +1507,19 @@ def delete_snapshot( ) request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("snapshot", snapshot)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + self._inner_api_calls["delete_snapshot"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -1438,6 +1608,19 @@ def seek( request = pubsub_pb2.SeekRequest( subscription=subscription, time=time, snapshot=snapshot ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("subscription", subscription)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["seek"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -1508,6 +1691,19 @@ def set_iam_policy( ) request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("resource", resource)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["set_iam_policy"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -1568,6 +1764,19 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("resource", resource)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["get_iam_policy"]( request, retry=retry, timeout=timeout, metadata=metadata ) @@ -1638,6 +1847,19 @@ def test_iam_permissions( request = iam_policy_pb2.TestIamPermissionsRequest( resource=resource, permissions=permissions ) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("resource", resource)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + return self._inner_api_calls["test_iam_permissions"]( request, retry=retry, timeout=timeout, metadata=metadata ) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 4a27eece2b29..1495d2973449 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-03-23T12:20:50.408656Z", + "updateTime": "2019-03-30T12:13:59.579900Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.19", - "dockerImage": "googleapis/artman@sha256:70ba28fda87e032ae44e6df41b7fc342c1b0cce1ed90658c4890eb4f613038c2" + "version": "0.16.21", + "dockerImage": "googleapis/artman@sha256:854131ec1af7b3a313253474c24748dc0acd217a58a0b74dbfb559f340a15d78" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e80435a132c53da26f46daf0787035ee63fb942b", - "internalRef": "239938670" + "sha": "fd67be4b51a8c809b97100ccf1c3cffd15b85d7b", + "internalRef": "241096997" } }, { From dbb08ff01b69f31989dca333e563c9d1f8a632dc Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 15 Apr 2019 14:49:44 -0700 Subject: [PATCH 0344/1197] Pub/Sub (nit): wrong var name in sample (#7705) --- .../google/cloud/pubsub_v1/subscriber/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 35b05a022a12..7294969daf6c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -174,7 +174,7 @@ def callback(message): print(message) message.ack() - future = subscriber.subscribe( + future = subscriber_client.subscribe( subscription, callback) try: From b9a6bd1f8f650e09c1761c81d69bf52bedc5b105 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 24 Apr 2019 23:53:30 +0200 Subject: [PATCH 0345/1197] Make PubSub subscriber Scheduler inherit from ABC (#7690) --- .../google/cloud/pubsub_v1/subscriber/scheduler.py | 2 +- .../tests/unit/pubsub_v1/subscriber/test_scheduler.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index 94502d48e754..42674c824c64 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -73,7 +73,7 @@ def _make_default_thread_pool_executor(): return concurrent.futures.ThreadPoolExecutor(max_workers=10, **executor_kwargs) -class ThreadScheduler(object): +class ThreadScheduler(Scheduler): """A thread pool-based scheduler. This scheduler is useful in typical I/O-bound message processing. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index 2e1561db022e..774d0d63e2a2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -21,6 +21,10 @@ from google.cloud.pubsub_v1.subscriber import scheduler +def test_subclasses_base_abc(): + assert issubclass(scheduler.ThreadScheduler, scheduler.Scheduler) + + def test_constructor_defaults(): scheduler_ = scheduler.ThreadScheduler() From 71e921a1f66530401d902774ae2913efff21365b Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 30 Apr 2019 11:28:41 -0700 Subject: [PATCH 0346/1197] Firestore: add support for CollectionGroup queries. (#7758) * Initial plumbing for collection group queries * Don't assume direct children for collection group queries. * trim document path to DocumentReference * add unit tests * ensure all_descendants is set after calling other query methods * port test for node impl * port tests from node impl * Fix collection group test on Python 2.7. Blacken. * Use '_all_descendants' in 'Query.__eq__'. * Ensure '_all_descendants' propagates when narrowing query. * Refactor collection group system tests. Skip the one for 'where', because it requires a custom index. * Match node test's collection group ID / expected output. See: https://github.com/googleapis/nodejs-firestore/pull/578/files#diff-6b8febc8d51ea01205628091b3611eacR1188 * Match Node test for filter on '__name__'. Note that this still doesn't pass, so remains skipped. * Blacken. * Fix / unskip systest for collection groups w/ filter on '__name__'. * Blacken * 100% coverage. * Lint --- .../subscriber/_protocol/streaming_pull_manager.py | 13 +++++++------ packages/google-cloud-pubsub/tests/system.py | 1 - 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index acf514775779..d025fa71368f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -193,12 +193,10 @@ def load(self): if self._leaser is None: return 0 - return max( - [ - self._leaser.message_count / self._flow_control.max_messages, - self._leaser.bytes / self._flow_control.max_bytes, - ] - ) + messages_percent = self._leaser.message_count / self._flow_control.max_messages + bytes_percent = self._leaser.bytes / self._flow_control.max_bytes + print(f"{messages_percent}, {bytes_percent}") + return max(messages_percent, bytes_percent) def add_close_callback(self, callback): """Schedules a callable when the manager closes. @@ -210,10 +208,12 @@ def add_close_callback(self, callback): def maybe_pause_consumer(self): """Check the current load and pause the consumer if needed.""" + print(self.load) if self.load >= 1.0: if self._consumer is not None and not self._consumer.is_paused: _LOGGER.debug("Message backlog over load at %.2f, pausing.", self.load) self._consumer.pause() + print('paused') def maybe_resume_consumer(self): """Check the current load and resume the consumer if needed.""" @@ -227,6 +227,7 @@ def maybe_resume_consumer(self): return if self.load < self.flow_control.resume_threshold: + print('resuming') self._consumer.resume() else: _LOGGER.debug("Did not resume, current load is %s", self.load) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index e6001f8e7801..80349240fabb 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -65,7 +65,6 @@ def cleanup(): for to_call, argument in registry: to_call(argument) - def test_publish_messages(publisher, topic_path, cleanup): futures = [] # Make sure the topic gets deleted. From 3bfa5f1f98c12cd7277515ed60e3d5c9feb53a07 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 1 May 2019 16:55:52 -0700 Subject: [PATCH 0347/1197] Clean up debugging code (#7843) * remove prints * black formatter --- .../subscriber/_protocol/streaming_pull_manager.py | 13 ++++++------- packages/google-cloud-pubsub/tests/system.py | 1 + 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index d025fa71368f..acf514775779 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -193,10 +193,12 @@ def load(self): if self._leaser is None: return 0 - messages_percent = self._leaser.message_count / self._flow_control.max_messages - bytes_percent = self._leaser.bytes / self._flow_control.max_bytes - print(f"{messages_percent}, {bytes_percent}") - return max(messages_percent, bytes_percent) + return max( + [ + self._leaser.message_count / self._flow_control.max_messages, + self._leaser.bytes / self._flow_control.max_bytes, + ] + ) def add_close_callback(self, callback): """Schedules a callable when the manager closes. @@ -208,12 +210,10 @@ def add_close_callback(self, callback): def maybe_pause_consumer(self): """Check the current load and pause the consumer if needed.""" - print(self.load) if self.load >= 1.0: if self._consumer is not None and not self._consumer.is_paused: _LOGGER.debug("Message backlog over load at %.2f, pausing.", self.load) self._consumer.pause() - print('paused') def maybe_resume_consumer(self): """Check the current load and resume the consumer if needed.""" @@ -227,7 +227,6 @@ def maybe_resume_consumer(self): return if self.load < self.flow_control.resume_threshold: - print('resuming') self._consumer.resume() else: _LOGGER.debug("Did not resume, current load is %s", self.load) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 80349240fabb..e6001f8e7801 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -65,6 +65,7 @@ def cleanup(): for to_call, argument in registry: to_call(argument) + def test_publish_messages(publisher, topic_path, cleanup): futures = [] # Make sure the topic gets deleted. From 8a059c2adbbdc3f10c3056075f1a361df12c3184 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 7 May 2019 19:18:00 +0200 Subject: [PATCH 0348/1197] Fix pubsub Streaming Pull shutdown on RetryError (#7863) If a RetryError occurs, it is time to stop waiting for the underlying gRPC channel to recover from a transient failure, and a clean shutdown needs to be triggered. This commit assures that this indeed happens (it used to happen on terminal channel errors only). --- .../_protocol/streaming_pull_manager.py | 17 +++++++++++++++- .../subscriber/test_streaming_pull_manager.py | 20 ++++++++++++++++++- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index acf514775779..c51914693a94 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -262,7 +262,11 @@ def _send_unary_request(self, request): _LOGGER.debug("Sent request(s) over unary RPC.") def send(self, request): - """Queue a request to be sent to the RPC.""" + """Queue a request to be sent to the RPC. + + If a RetryError occurs, the manager shutdown is triggered, and the + error is re-raised. + """ if self._UNARY_REQUESTS: try: self._send_unary_request(request) @@ -272,6 +276,17 @@ def send(self, request): "non-fatal as stream requests are best-effort.", exc_info=True, ) + except exceptions.RetryError as exc: + _LOGGER.debug( + "RetryError while sending unary RPC. Waiting on a transient " + "error resolution for too long, will now trigger shutdown.", + exc_info=False, + ) + # The underlying channel has been suffering from a retryable error + # for too long, time to give up and shut the streaming pull down. + self._on_rpc_done(exc) + raise + else: self._rpc.send(request) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index f5f7354f15d5..414fc00b96d1 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -245,7 +245,7 @@ def test_send_unary_empty(): manager._client.modify_ack_deadline.assert_not_called() -def test_send_unary_error(caplog): +def test_send_unary_api_call_error(caplog): caplog.set_level(logging.DEBUG) manager = make_manager() @@ -259,6 +259,24 @@ def test_send_unary_error(caplog): assert "The front fell off" in caplog.text +def test_send_unary_retry_error(caplog): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._UNARY_REQUESTS = True + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.acknowledge.side_effect = error + + with pytest.raises(exceptions.RetryError): + manager.send(types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) + + assert "RetryError while sending unary RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + + def test_send_streaming(): manager = make_manager() manager._UNARY_REQUESTS = False From 8ddae877aa5dbc886f47e03774ca291f183e06cd Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Thu, 9 May 2019 09:46:15 -0700 Subject: [PATCH 0349/1197] Add nox session `docs` (via synth). (#7778) --- packages/google-cloud-pubsub/docs/README.rst | 1 + packages/google-cloud-pubsub/docs/conf.py | 359 +++++++++++++++++++ packages/google-cloud-pubsub/docs/index.rst | 2 +- packages/google-cloud-pubsub/noxfile.py | 20 ++ packages/google-cloud-pubsub/synth.metadata | 12 +- 5 files changed, 387 insertions(+), 7 deletions(-) create mode 120000 packages/google-cloud-pubsub/docs/README.rst create mode 100644 packages/google-cloud-pubsub/docs/conf.py diff --git a/packages/google-cloud-pubsub/docs/README.rst b/packages/google-cloud-pubsub/docs/README.rst new file mode 120000 index 000000000000..89a0106941ff --- /dev/null +++ b/packages/google-cloud-pubsub/docs/README.rst @@ -0,0 +1 @@ +../README.rst \ No newline at end of file diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py new file mode 100644 index 000000000000..9611fcb59493 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -0,0 +1,359 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-pubsub documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-pubsub" +copyright = u"2017, Google" +author = u"Google APIs" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-pubsub-doc" + +# -- Options for warnings ------------------------------------------------------ + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-pubsub.tex", + u"google-cloud-pubsub Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-pubsub", + u"google-cloud-pubsub Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-pubsub", + u"google-cloud-pubsub Documentation", + author, + "google-cloud-pubsub", + "GAPIC library for the {metadata.shortName} service", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ( + "https://googleapis.github.io/google-cloud-python/latest", + None, + ), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://docs.python-requests.org/en/master/", None), + "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), + "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-pubsub/docs/index.rst b/packages/google-cloud-pubsub/docs/index.rst index f20e6ed74414..5f6f8d4d26d1 100644 --- a/packages/google-cloud-pubsub/docs/index.rst +++ b/packages/google-cloud-pubsub/docs/index.rst @@ -1,4 +1,4 @@ -.. include:: /../pubsub/README.rst +.. include:: README.rst API Documentation ----------------- diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index d692cf37f39c..0f528b7f3902 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -16,6 +16,7 @@ from __future__ import absolute_import import os +import shutil import nox @@ -138,3 +139,22 @@ def cover(session): session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") + +@nox.session(python="3.7") +def docs(session): + """Build the docs for this library.""" + + session.install('-e', '.') + session.install('sphinx', 'alabaster', 'recommonmark') + + shutil.rmtree(os.path.join('docs', '_build'), ignore_errors=True) + session.run( + 'sphinx-build', + '-W', # warnings as errors + '-T', # show full traceback on exception + '-N', # no colors + '-b', 'html', + '-d', os.path.join('docs', '_build', 'doctrees', ''), + os.path.join('docs', ''), + os.path.join('docs', '_build', 'html', ''), + ) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 1495d2973449..8348023941e7 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-03-30T12:13:59.579900Z", + "updateTime": "2019-04-23T12:24:42.913789Z", "sources": [ { "generator": { "name": "artman", - "version": "0.16.21", - "dockerImage": "googleapis/artman@sha256:854131ec1af7b3a313253474c24748dc0acd217a58a0b74dbfb559f340a15d78" + "version": "0.17.0", + "dockerImage": "googleapis/artman@sha256:c58f4ec3838eb4e0718eb1bccc6512bd6850feaa85a360a9e38f6f848ec73bc2" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "fd67be4b51a8c809b97100ccf1c3cffd15b85d7b", - "internalRef": "241096997" + "sha": "547e19e7df398e9290e8e3674d7351efc500f9b0", + "internalRef": "244712781" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.2.26" + "version": "2019.4.10" } } ], From bf5711bf92fe05675399b090e364773f6d1a6c48 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot <44816363+yoshi-automation@users.noreply.github.com> Date: Mon, 13 May 2019 15:07:29 -0700 Subject: [PATCH 0350/1197] [CHANGE ME] Re-generated pubsub to pick up changes in the API or client library generator. (#7936) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 39 +- .../pubsub_v1/gapic/subscriber_client.py | 94 ++-- .../transports/publisher_grpc_transport.py | 4 + .../transports/subscriber_grpc_transport.py | 38 +- .../google/cloud/pubsub_v1/proto/pubsub.proto | 532 ++++++++---------- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 355 ++++++------ .../cloud/pubsub_v1/proto/pubsub_pb2_grpc.py | 35 +- packages/google-cloud-pubsub/synth.metadata | 12 +- .../unit/gapic/v1/test_publisher_client_v1.py | 9 +- 9 files changed, 502 insertions(+), 616 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index a308af5d13fd..3759f783c367 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -83,17 +83,17 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file @classmethod - def topic_path(cls, project, topic): - """Return a fully-qualified topic string.""" + def project_path(cls, project): + """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic + "projects/{project}", project=project ) @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" + def topic_path(cls, project, topic): + """Return a fully-qualified topic string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}/topics/{topic}", project=project, topic=topic ) def __init__( @@ -200,6 +200,7 @@ def create_topic( name, labels=None, message_storage_policy=None, + kms_key_name=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -235,6 +236,14 @@ def create_topic( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.MessageStoragePolicy` + kms_key_name (str): The resource name of the Cloud KMS CryptoKey to be used to protect + access to messages published on this topic. + + The expected format is + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. EXPERIMENTAL: This + feature is part of a closed alpha release. This API might be changed in + backward-incompatible ways and is not recommended for production use. It + is not subject to any SLA or deprecation policy. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -266,7 +275,10 @@ def create_topic( ) request = pubsub_pb2.Topic( - name=name, labels=labels, message_storage_policy=message_storage_policy + name=name, + labels=labels, + message_storage_policy=message_storage_policy, + kms_key_name=kms_key_name, ) if metadata is None: metadata = [] @@ -825,8 +837,7 @@ def set_iam_policy( Args: resource (str): REQUIRED: The resource for which the policy is being specified. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. + See the operation documentation for the appropriate value for this field. policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform services (such as Projects) @@ -905,8 +916,7 @@ def get_iam_policy( Args: resource (str): REQUIRED: The resource for which the policy is being requested. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. + See the operation documentation for the appropriate value for this field. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -968,6 +978,10 @@ def test_iam_permissions( resource does not exist, this will return an empty set of permissions, not a NOT\_FOUND error. + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for authorization + checking. This operation may "fail open" without warning. + Example: >>> from google.cloud import pubsub_v1 >>> @@ -982,8 +996,7 @@ def test_iam_permissions( Args: resource (str): REQUIRED: The resource for which the policy detail is being requested. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. + See the operation documentation for the appropriate value for this field. permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with wildcards (such as '*' or 'storage.*') are not allowed. For more information see `IAM diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 5cd288ade084..b94a52c574ee 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -85,22 +85,6 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file - @classmethod - def subscription_path(cls, project, subscription): - """Return a fully-qualified subscription string.""" - return google.api_core.path_template.expand( - "projects/{project}/subscriptions/{subscription}", - project=project, - subscription=subscription, - ) - - @classmethod - def topic_path(cls, project, topic): - """Return a fully-qualified topic string.""" - return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic - ) - @classmethod def project_path(cls, project): """Return a fully-qualified project string.""" @@ -117,6 +101,22 @@ def snapshot_path(cls, project, snapshot): snapshot=snapshot, ) + @classmethod + def subscription_path(cls, project, subscription): + """Return a fully-qualified subscription string.""" + return google.api_core.path_template.expand( + "projects/{project}/subscriptions/{subscription}", + project=project, + subscription=subscription, + ) + + @classmethod + def topic_path(cls, project, topic): + """Return a fully-qualified topic string.""" + return google.api_core.path_template.expand( + "projects/{project}/topics/{topic}", project=project, topic=topic + ) + def __init__( self, transport=None, @@ -293,19 +293,13 @@ def create_subscription( messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the ``message_retention_duration`` window. This must be true if you would - like to Seek to a timestamp. BETA: This feature is part of a beta - release. This API might be changed in backward-incompatible ways and is - not recommended for production use. It is not subject to any SLA or - deprecation policy. + like to Seek to a timestamp. message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If ``retain_acked_messages`` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 - days or less than 10 minutes. BETA: This feature is part of a beta - release. This API might be changed in backward-incompatible ways and is - not recommended for production use. It is not subject to any SLA or - deprecation policy. + days or less than 10 minutes. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Duration` @@ -323,10 +317,7 @@ def create_subscription( subscriber is successfully consuming messages from the subscription or is issuing operations on the subscription. If ``expiration_policy`` is not set, a *default policy* with ``ttl`` of 31 days will be used. The - minimum allowed value for ``expiration_policy.ttl`` is 1 day. BETA: This - feature is part of a beta release. This API might be changed in - backward-incompatible ways and is not recommended for production use. It - is not subject to any SLA or deprecation policy. + minimum allowed value for ``expiration_policy.ttl`` is 1 day. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` @@ -1151,10 +1142,7 @@ def list_snapshots( operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + captured by a snapshot. Example: >>> from google.cloud import pubsub_v1 @@ -1260,15 +1248,12 @@ def create_snapshot( Creates a snapshot from the requested subscription. Snapshots are used in Seek operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an - existing subscription to the state captured by a snapshot. BETA: This - feature is part of a beta release. This API might be changed in - backward-incompatible ways and is not recommended for production use. It - is not subject to any SLA or deprecation policy. If the snapshot already - exists, returns ``ALREADY_EXISTS``. If the requested subscription - doesn't exist, returns ``NOT_FOUND``. If the backlog in the subscription - is too old -- and the resulting snapshot would expire in less than 1 - hour -- then ``FAILED_PRECONDITION`` is returned. See also the - ``Snapshot.expire_time`` field. If the name is not provided in the + existing subscription to the state captured by a snapshot. If the + snapshot already exists, returns ``ALREADY_EXISTS``. If the requested + subscription doesn't exist, returns ``NOT_FOUND``. If the backlog in the + subscription is too old -- and the resulting snapshot would expire in + less than 1 hour -- then ``FAILED_PRECONDITION`` is returned. See also + the ``Snapshot.expire_time`` field. If the name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming to the `resource name format `__. @@ -1365,11 +1350,7 @@ def update_snapshot( operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. - Note that certain properties of a snapshot are not modifiable. + captured by a snapshot. Example: >>> from google.cloud import pubsub_v1 @@ -1459,9 +1440,6 @@ def delete_snapshot( you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. When the snapshot is deleted, all messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be created with the same name, but the new one has no association with the old @@ -1541,10 +1519,7 @@ def seek( you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + must be on the same topic. Example: >>> from google.cloud import pubsub_v1 @@ -1651,8 +1626,7 @@ def set_iam_policy( Args: resource (str): REQUIRED: The resource for which the policy is being specified. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. + See the operation documentation for the appropriate value for this field. policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The size of the policy is limited to a few 10s of KB. An empty policy is a valid policy but certain Cloud Platform services (such as Projects) @@ -1731,8 +1705,7 @@ def get_iam_policy( Args: resource (str): REQUIRED: The resource for which the policy is being requested. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. + See the operation documentation for the appropriate value for this field. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -1794,6 +1767,10 @@ def test_iam_permissions( resource does not exist, this will return an empty set of permissions, not a NOT\_FOUND error. + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for authorization + checking. This operation may "fail open" without warning. + Example: >>> from google.cloud import pubsub_v1 >>> @@ -1808,8 +1785,7 @@ def test_iam_permissions( Args: resource (str): REQUIRED: The resource for which the policy detail is being requested. - ``resource`` is usually specified as a path. For example, a Project - resource is specified as ``projects/{project}``. + See the operation documentation for the appropriate value for this field. permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with wildcards (such as '*' or 'storage.*') are not allowed. For more information see `IAM diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 01d65362e387..d151e086a8b9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -236,6 +236,10 @@ def test_iam_permissions(self): resource does not exist, this will return an empty set of permissions, not a NOT\_FOUND error. + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for authorization + checking. This operation may "fail open" without warning. + Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 3bf052b25c3c..66596f2396fb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -278,10 +278,7 @@ def list_snapshots(self): operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + captured by a snapshot. Returns: Callable: A callable which accepts the appropriate @@ -297,15 +294,12 @@ def create_snapshot(self): Creates a snapshot from the requested subscription. Snapshots are used in Seek operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an - existing subscription to the state captured by a snapshot. BETA: This - feature is part of a beta release. This API might be changed in - backward-incompatible ways and is not recommended for production use. It - is not subject to any SLA or deprecation policy. If the snapshot already - exists, returns ``ALREADY_EXISTS``. If the requested subscription - doesn't exist, returns ``NOT_FOUND``. If the backlog in the subscription - is too old -- and the resulting snapshot would expire in less than 1 - hour -- then ``FAILED_PRECONDITION`` is returned. See also the - ``Snapshot.expire_time`` field. If the name is not provided in the + existing subscription to the state captured by a snapshot. If the + snapshot already exists, returns ``ALREADY_EXISTS``. If the requested + subscription doesn't exist, returns ``NOT_FOUND``. If the backlog in the + subscription is too old -- and the resulting snapshot would expire in + less than 1 hour -- then ``FAILED_PRECONDITION`` is returned. See also + the ``Snapshot.expire_time`` field. If the name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming to the `resource name format `__. @@ -328,11 +322,7 @@ def update_snapshot(self): operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. - Note that certain properties of a snapshot are not modifiable. + captured by a snapshot. Returns: Callable: A callable which accepts the appropriate @@ -351,9 +341,6 @@ def delete_snapshot(self): you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. When the snapshot is deleted, all messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be created with the same name, but the new one has no association with the old @@ -377,10 +364,7 @@ def seek(self): you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + must be on the same topic. Returns: Callable: A callable which accepts the appropriate @@ -426,6 +410,10 @@ def test_iam_permissions(self): resource does not exist, this will return an empty set of permissions, not a NOT\_FOUND error. + Note: This operation is designed to be used for building + permission-aware UIs and command-line tools, not for authorization + checking. This operation may "fail open" without warning. + Returns: Callable: A callable which accepts the appropriate deserialized request object and returns a diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 715af9c18c46..7cea47b1e539 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -18,6 +18,7 @@ syntax = "proto3"; package google.pubsub.v1; import "google/api/annotations.proto"; +import "google/api/resource.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; @@ -90,10 +91,7 @@ service Publisher { // operations, which allow // you to manage message acknowledgments in bulk. That is, you can set the // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot.

- // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy. + // captured by a snapshot. rpc ListTopicSnapshots(ListTopicSnapshotsRequest) returns (ListTopicSnapshotsResponse) { option (google.api.http) = { @@ -113,6 +111,207 @@ service Publisher { } } +message MessageStoragePolicy { + // The list of GCP region IDs where messages that are published to the topic + // may be persisted in storage. Messages published by publishers running in + // non-allowed GCP regions (or running outside of GCP altogether) will be + // routed for storage in one of the allowed regions. An empty list indicates a + // misconfiguration at the project or organization level, which will result in + // all Publish operations failing. + repeated string allowed_persistence_regions = 1; +} + +// A topic resource. +message Topic { + // The name of the topic. It must have the format + // `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter, + // and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), + // underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent + // signs (`%`). It must be between 3 and 255 characters in length, and it + // must not start with `"goog"`. + string name = 1; + + // See Creating and + // managing labels. + map labels = 2; + + // Policy constraining how messages published to the topic may be stored. It + // is determined when the topic is created based on the policy configured at + // the project level. It must not be set by the caller in the request to + // CreateTopic or to UpdateTopic. This field will be populated in the + // responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the + // response, then no constraints are in effect. + MessageStoragePolicy message_storage_policy = 3; + + // The resource name of the Cloud KMS CryptoKey to be used to protect access + // to messages published on this topic. + // + // The expected format is `projects/*/locations/*/keyRings/*/cryptoKeys/*`. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + string kms_key_name = 5; +} + +// A message that is published by publishers and consumed by subscribers. The +// message must contain either a non-empty data field or at least one attribute. +// Note that client libraries represent this object differently +// depending on the language. See the corresponding +// client +// library documentation for more information. See +// Quotas and limits +// for more information about message limits. +message PubsubMessage { + // The message data field. If this field is empty, the message must contain + // at least one attribute. + bytes data = 1; + + // Optional attributes for this message. + map attributes = 2; + + // ID of this message, assigned by the server when the message is published. + // Guaranteed to be unique within the topic. This value may be read by a + // subscriber that receives a `PubsubMessage` via a `Pull` call or a push + // delivery. It must not be populated by the publisher in a `Publish` call. + string message_id = 3; + + // The time at which the message was published, populated by the server when + // it receives the `Publish` call. It must not be populated by the + // publisher in a `Publish` call. + google.protobuf.Timestamp publish_time = 4; + + // Identifies related messages for which publish order should be respected. + // If a `Subscription` has `enable_message_ordering` set to `true`, messages + // published with the same `ordering_key` value will be delivered to + // subscribers in the order in which they are received by the Pub/Sub system. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + string ordering_key = 5; +} + +// Request for the GetTopic method. +message GetTopicRequest { + // The name of the topic to get. + // Format is `projects/{project}/topics/{topic}`. + string topic = 1; +} + +// Request for the UpdateTopic method. +message UpdateTopicRequest { + // The updated topic object. + Topic topic = 1; + + // Indicates which fields in the provided topic to update. Must be specified + // and non-empty. Note that if `update_mask` contains + // "message_storage_policy" then the new value will be determined based on the + // policy configured at the project or organization level. The + // `message_storage_policy` must not be set in the `topic` provided above. + google.protobuf.FieldMask update_mask = 2; +} + +// Request for the Publish method. +message PublishRequest { + // The messages in the request will be published on this topic. + // Format is `projects/{project}/topics/{topic}`. + string topic = 1; + + // The messages to publish. + repeated PubsubMessage messages = 2; +} + +// Response for the `Publish` method. +message PublishResponse { + // The server-assigned ID of each published message, in the same order as + // the messages in the request. IDs are guaranteed to be unique within + // the topic. + repeated string message_ids = 1; +} + +// Request for the `ListTopics` method. +message ListTopicsRequest { + // The name of the project in which to list topics. + // Format is `projects/{project-id}`. + string project = 1; + + // Maximum number of topics to return. + int32 page_size = 2; + + // The value returned by the last `ListTopicsResponse`; indicates that this is + // a continuation of a prior `ListTopics` call, and that the system should + // return the next page of data. + string page_token = 3; +} + +// Response for the `ListTopics` method. +message ListTopicsResponse { + // The resulting topics. + repeated Topic topics = 1; + + // If not empty, indicates that there may be more topics that match the + // request; this value should be passed in a new `ListTopicsRequest`. + string next_page_token = 2; +} + +// Request for the `ListTopicSubscriptions` method. +message ListTopicSubscriptionsRequest { + // The name of the topic that subscriptions are attached to. + // Format is `projects/{project}/topics/{topic}`. + string topic = 1; + + // Maximum number of subscription names to return. + int32 page_size = 2; + + // The value returned by the last `ListTopicSubscriptionsResponse`; indicates + // that this is a continuation of a prior `ListTopicSubscriptions` call, and + // that the system should return the next page of data. + string page_token = 3; +} + +// Response for the `ListTopicSubscriptions` method. +message ListTopicSubscriptionsResponse { + // The names of the subscriptions that match the request. + repeated string subscriptions = 1; + + // If not empty, indicates that there may be more subscriptions that match + // the request; this value should be passed in a new + // `ListTopicSubscriptionsRequest` to get more subscriptions. + string next_page_token = 2; +} + +// Request for the `ListTopicSnapshots` method. +message ListTopicSnapshotsRequest { + // The name of the topic that snapshots are attached to. + // Format is `projects/{project}/topics/{topic}`. + string topic = 1; + + // Maximum number of snapshot names to return. + int32 page_size = 2; + + // The value returned by the last `ListTopicSnapshotsResponse`; indicates + // that this is a continuation of a prior `ListTopicSnapshots` call, and + // that the system should return the next page of data. + string page_token = 3; +} + +// Response for the `ListTopicSnapshots` method. +message ListTopicSnapshotsResponse { + // The names of the snapshots that match the request. + repeated string snapshots = 1; + + // If not empty, indicates that there may be more snapshots that match + // the request; this value should be passed in a new + // `ListTopicSnapshotsRequest` to get more snapshots. + string next_page_token = 2; +} + +// Request for the `DeleteTopic` method. +message DeleteTopicRequest { + // Name of the topic to delete. + // Format is `projects/{project}/topics/{topic}`. + string topic = 1; +} + // The service that an application uses to manipulate subscriptions and to // consume messages from a subscription via the `Pull` method or by // establishing a bi-directional stream using the `StreamingPull` method. @@ -238,10 +437,7 @@ service Subscriber { // Seek // operations, which allow you to manage message acknowledgments in bulk. That // is, you can set the acknowledgment state of messages in an existing - // subscription to the state captured by a snapshot.

- // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy. + // subscription to the state captured by a snapshot. rpc GetSnapshot(GetSnapshotRequest) returns (Snapshot) { option (google.api.http) = { get: "/v1/{snapshot=projects/*/snapshots/*}" @@ -253,10 +449,7 @@ service Subscriber { // operations, which allow // you to manage message acknowledgments in bulk. That is, you can set the // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot.

- // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy. + // captured by a snapshot. rpc ListSnapshots(ListSnapshotsRequest) returns (ListSnapshotsResponse) { option (google.api.http) = { get: "/v1/{project=projects/*}/snapshots" @@ -269,11 +462,7 @@ service Subscriber { // you to manage message acknowledgments in bulk. That is, you can set the // acknowledgment state of messages in an existing subscription to the state // captured by a snapshot. - //

- // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy.

- // If the snapshot already exists, returns `ALREADY_EXISTS`. + //

If the snapshot already exists, returns `ALREADY_EXISTS`. // If the requested subscription doesn't exist, returns `NOT_FOUND`. // If the backlog in the subscription is too old -- and the resulting snapshot // would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. @@ -297,11 +486,7 @@ service Subscriber { // operations, which allow // you to manage message acknowledgments in bulk. That is, you can set the // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot.

- // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy. - // Note that certain properties of a snapshot are not modifiable. + // captured by a snapshot. rpc UpdateSnapshot(UpdateSnapshotRequest) returns (Snapshot) { option (google.api.http) = { patch: "/v1/{snapshot.name=projects/*/snapshots/*}" @@ -315,9 +500,6 @@ service Subscriber { // you to manage message acknowledgments in bulk. That is, you can set the // acknowledgment state of messages in an existing subscription to the state // captured by a snapshot.

- // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy. // When the snapshot is deleted, all messages retained in the snapshot // are immediately dropped. After a snapshot is deleted, a new one may be // created with the same name, but the new one has no association with the old @@ -335,10 +517,7 @@ service Subscriber { // you to manage message acknowledgments in bulk. That is, you can set the // acknowledgment state of messages in an existing subscription to the state // captured by a snapshot. Note that both the subscription and the snapshot - // must be on the same topic.

- // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy. + // must be on the same topic. rpc Seek(SeekRequest) returns (SeekResponse) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:seek" @@ -347,204 +526,6 @@ service Subscriber { } } -message MessageStoragePolicy { - // The list of GCP region IDs where messages that are published to the topic - // may be persisted in storage. Messages published by publishers running in - // non-allowed GCP regions (or running outside of GCP altogether) will be - // routed for storage in one of the allowed regions. An empty list indicates a - // misconfiguration at the project or organization level, which will result in - // all Publish operations failing. - repeated string allowed_persistence_regions = 1; -} - -// A topic resource. -message Topic { - // The name of the topic. It must have the format - // `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter, - // and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), - // underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent - // signs (`%`). It must be between 3 and 255 characters in length, and it - // must not start with `"goog"`. - string name = 1; - - // See Creating and - // managing labels. - map labels = 2; - - // Policy constraining how messages published to the topic may be stored. It - // is determined when the topic is created based on the policy configured at - // the project level. It must not be set by the caller in the request to - // CreateTopic or to UpdateTopic. This field will be populated in the - // responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the - // response, then no constraints are in effect. - MessageStoragePolicy message_storage_policy = 3; -} - -// A message that is published by publishers and consumed by subscribers. The -// message must contain either a non-empty data field or at least one attribute. -// Note that client libraries represent this object differently -// depending on the language. See the corresponding -// client -// library documentation for more information. See -// Quotas and limits -// for more information about message limits. -message PubsubMessage { - // The message data field. If this field is empty, the message must contain - // at least one attribute. - bytes data = 1; - - // Optional attributes for this message. - map attributes = 2; - - // ID of this message, assigned by the server when the message is published. - // Guaranteed to be unique within the topic. This value may be read by a - // subscriber that receives a `PubsubMessage` via a `Pull` call or a push - // delivery. It must not be populated by the publisher in a `Publish` call. - string message_id = 3; - - // The time at which the message was published, populated by the server when - // it receives the `Publish` call. It must not be populated by the - // publisher in a `Publish` call. - google.protobuf.Timestamp publish_time = 4; - - // Identifies related messages for which publish order should be respected. - // If a `Subscription` has `enable_message_ordering` set to `true`, messages - // published with the same `ordering_key` value will be delivered to - // subscribers in the order in which they are received by the Pub/Sub system. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. - string ordering_key = 5; -} - -// Request for the GetTopic method. -message GetTopicRequest { - // The name of the topic to get. - // Format is `projects/{project}/topics/{topic}`. - string topic = 1; -} - -// Request for the UpdateTopic method. -message UpdateTopicRequest { - // The updated topic object. - Topic topic = 1; - - // Indicates which fields in the provided topic to update. Must be specified - // and non-empty. Note that if `update_mask` contains - // "message_storage_policy" then the new value will be determined based on the - // policy configured at the project or organization level. The - // `message_storage_policy` must not be set in the `topic` provided above. - google.protobuf.FieldMask update_mask = 2; -} - -// Request for the Publish method. -message PublishRequest { - // The messages in the request will be published on this topic. - // Format is `projects/{project}/topics/{topic}`. - string topic = 1; - - // The messages to publish. - repeated PubsubMessage messages = 2; -} - -// Response for the `Publish` method. -message PublishResponse { - // The server-assigned ID of each published message, in the same order as - // the messages in the request. IDs are guaranteed to be unique within - // the topic. - repeated string message_ids = 1; -} - -// Request for the `ListTopics` method. -message ListTopicsRequest { - // The name of the project in which to list topics. - // Format is `projects/{project-id}`. - string project = 1; - - // Maximum number of topics to return. - int32 page_size = 2; - - // The value returned by the last `ListTopicsResponse`; indicates that this is - // a continuation of a prior `ListTopics` call, and that the system should - // return the next page of data. - string page_token = 3; -} - -// Response for the `ListTopics` method. -message ListTopicsResponse { - // The resulting topics. - repeated Topic topics = 1; - - // If not empty, indicates that there may be more topics that match the - // request; this value should be passed in a new `ListTopicsRequest`. - string next_page_token = 2; -} - -// Request for the `ListTopicSubscriptions` method. -message ListTopicSubscriptionsRequest { - // The name of the topic that subscriptions are attached to. - // Format is `projects/{project}/topics/{topic}`. - string topic = 1; - - // Maximum number of subscription names to return. - int32 page_size = 2; - - // The value returned by the last `ListTopicSubscriptionsResponse`; indicates - // that this is a continuation of a prior `ListTopicSubscriptions` call, and - // that the system should return the next page of data. - string page_token = 3; -} - -// Response for the `ListTopicSubscriptions` method. -message ListTopicSubscriptionsResponse { - // The names of the subscriptions that match the request. - repeated string subscriptions = 1; - - // If not empty, indicates that there may be more subscriptions that match - // the request; this value should be passed in a new - // `ListTopicSubscriptionsRequest` to get more subscriptions. - string next_page_token = 2; -} - -// Request for the `ListTopicSnapshots` method.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. -message ListTopicSnapshotsRequest { - // The name of the topic that snapshots are attached to. - // Format is `projects/{project}/topics/{topic}`. - string topic = 1; - - // Maximum number of snapshot names to return. - int32 page_size = 2; - - // The value returned by the last `ListTopicSnapshotsResponse`; indicates - // that this is a continuation of a prior `ListTopicSnapshots` call, and - // that the system should return the next page of data. - string page_token = 3; -} - -// Response for the `ListTopicSnapshots` method.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. -message ListTopicSnapshotsResponse { - // The names of the snapshots that match the request. - repeated string snapshots = 1; - - // If not empty, indicates that there may be more snapshots that match - // the request; this value should be passed in a new - // `ListTopicSnapshotsRequest` to get more snapshots. - string next_page_token = 2; -} - -// Request for the `DeleteTopic` method. -message DeleteTopicRequest { - // Name of the topic to delete. - // Format is `projects/{project}/topics/{topic}`. - string topic = 1; -} - // A subscription resource. message Subscription { // The name of the subscription. It must have the format @@ -595,10 +576,6 @@ message Subscription { // // Seek to a timestamp. - //

- // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy. bool retain_acked_messages = 7; // How long to retain unacknowledged messages in the subscription's backlog, @@ -606,10 +583,7 @@ message Subscription { // If `retain_acked_messages` is true, then this also configures the retention // of acknowledged messages, and thus configures how far back in time a `Seek` // can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 - // minutes.

- // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy. + // minutes. google.protobuf.Duration message_retention_duration = 8; // See Creating and @@ -631,9 +605,6 @@ message Subscription { // operations on the subscription. If `expiration_policy` is not set, a // *default policy* with `ttl` of 31 days will be used. The minimum allowed // value for `expiration_policy.ttl` is 1 day. - // BETA: This feature is part of a beta release. This API might be - // changed in backward-incompatible ways and is not recommended for production - // use. It is not subject to any SLA or deprecation policy. ExpirationPolicy expiration_policy = 11; } @@ -651,6 +622,26 @@ message ExpirationPolicy { // Configuration for a push delivery endpoint. message PushConfig { + // Contains information needed for generating an + // [OpenID Connect + // token](https://developers.google.com/identity/protocols/OpenIDConnect). + message OidcToken { + // [Service account + // email](https://cloud.google.com/iam/docs/service-accounts) + // to be used for generating the OIDC token. The caller (for + // CreateSubscription, UpdateSubscription, and ModifyPushConfig RPCs) must + // have the iam.serviceAccounts.actAs permission for the service account. + string service_account_email = 1; + + // Audience to be used when generating OIDC token. The audience claim + // identifies the recipients that the JWT is intended for. The audience + // value is a single case-sensitive string. Having multiple values (array) + // for the audience field is not supported. More info about the OIDC JWT + // token audience here: https://tools.ietf.org/html/rfc7519#section-4.1.3 + // Note: if not specified, the Push endpoint URL will be used. + string audience = 2; + } + // A URL locating the endpoint to which messages should be pushed. // For example, a Webhook endpoint might use "https://example.com/push". string push_endpoint = 1; @@ -683,36 +674,11 @@ message PushConfig { // default to allow requests only from the Cloud Pub/Sub system, for example. // This field is optional and should be set only by users interested in // authenticated push. - // EXPERIMENTAL: This field a part of a closed alpha that may not be - // accessible to all users. It may be changed in backward-incompatible ways - // and is not subject to any SLA or deprecation policy. It is not recommended - // for production use. oneof authentication_method { // If specified, Pub/Sub will generate and attach an OIDC JWT token as an // `Authorization` header in the HTTP request for every pushed message. OidcToken oidc_token = 3; } - - // Contains information needed for generating an - // [OpenID Connect - // token](https://developers.google.com/identity/protocols/OpenIDConnect). - message OidcToken { - // [Service account - // email](https://cloud.google.com/iam/docs/service-accounts) - // to be used for generating the OIDC token. The caller (for - // CreateSubscription, UpdateSubscription, and ModifyPushConfig calls) must - // have the iam.serviceAccounts.actAs permission for the service account. - // See https://cloud.google.com/iam/docs/understanding-roles#service-accounts-roles. - string service_account_email = 1; - - // Audience to be used when generating OIDC token. The audience claim - // identifies the recipients that the JWT is intended for. The audience - // value is a single case-sensitive string. Having multiple values (array) - // for the audience field is not supported. More info about the OIDC JWT - // token audience here: https://tools.ietf.org/html/rfc7519#section-4.1.3 - // Note: if not specified, the Push endpoint URL will be used. - string audience = 2; - } } // A message and its corresponding acknowledgment ID. @@ -897,10 +863,7 @@ message StreamingPullResponse { repeated ReceivedMessage received_messages = 1; } -// Request for the `CreateSnapshot` method.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. +// Request for the `CreateSnapshot` method. message CreateSnapshotRequest { // Optional user-provided name for this snapshot. // If the name is not provided in the request, the server will assign a random @@ -927,10 +890,7 @@ message CreateSnapshotRequest { map labels = 3; } -// Request for the UpdateSnapshot method.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. +// Request for the UpdateSnapshot method. message UpdateSnapshotRequest { // The updated snapshot object. Snapshot snapshot = 1; @@ -945,10 +905,7 @@ message UpdateSnapshotRequest { // operations, which allow // you to manage message acknowledgments in bulk. That is, you can set the // acknowledgment state of messages in an existing subscription to the state -// captured by a snapshot.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. +// captured by a snapshot. message Snapshot { // The name of the snapshot. string name = 1; @@ -973,20 +930,14 @@ message Snapshot { map labels = 4; } -// Request for the GetSnapshot method.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. +// Request for the GetSnapshot method. message GetSnapshotRequest { // The name of the snapshot to get. // Format is `projects/{project}/snapshots/{snap}`. string snapshot = 1; } -// Request for the `ListSnapshots` method.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. +// Request for the `ListSnapshots` method. message ListSnapshotsRequest { // The name of the project in which to list snapshots. // Format is `projects/{project-id}`. @@ -1001,10 +952,7 @@ message ListSnapshotsRequest { string page_token = 3; } -// Response for the `ListSnapshots` method.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. +// Response for the `ListSnapshots` method. message ListSnapshotsResponse { // The resulting snapshots. repeated Snapshot snapshots = 1; @@ -1014,20 +962,14 @@ message ListSnapshotsResponse { string next_page_token = 2; } -// Request for the `DeleteSnapshot` method.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. +// Request for the `DeleteSnapshot` method. message DeleteSnapshotRequest { // The name of the snapshot to delete. // Format is `projects/{project}/snapshots/{snap}`. string snapshot = 1; } -// Request for the `Seek` method.

-// BETA: This feature is part of a beta release. This API might be -// changed in backward-incompatible ways and is not recommended for production -// use. It is not subject to any SLA or deprecation policy. +// Request for the `Seek` method. message SeekRequest { // The subscription to affect. string subscription = 1; diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 7b66db7fe659..ace1751bbf46 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -15,6 +15,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 @@ -29,10 +30,11 @@ "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xc1\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xa5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\tB\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xa5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, @@ -75,8 +77,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=221, - serialized_end=280, + serialized_start=248, + serialized_end=307, ) @@ -132,8 +134,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=431, - serialized_end=476, + serialized_start=480, + serialized_end=525, ) _TOPIC = _descriptor.Descriptor( @@ -197,6 +199,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="kms_key_name", + full_name="google.pubsub.v1.Topic.kms_key_name", + index=3, + number=5, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[_TOPIC_LABELSENTRY], @@ -206,8 +226,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=283, - serialized_end=476, + serialized_start=310, + serialized_end=525, ) @@ -263,8 +283,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=671, - serialized_end=720, + serialized_start=720, + serialized_end=769, ) _PUBSUBMESSAGE = _descriptor.Descriptor( @@ -373,8 +393,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=479, - serialized_end=720, + serialized_start=528, + serialized_end=769, ) @@ -412,8 +432,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=722, - serialized_end=754, + serialized_start=771, + serialized_end=803, ) @@ -469,8 +489,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=756, - serialized_end=865, + serialized_start=805, + serialized_end=914, ) @@ -526,8 +546,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=867, - serialized_end=949, + serialized_start=916, + serialized_end=998, ) @@ -565,8 +585,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=951, - serialized_end=989, + serialized_start=1000, + serialized_end=1038, ) @@ -640,8 +660,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=991, - serialized_end=1066, + serialized_start=1040, + serialized_end=1115, ) @@ -697,8 +717,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1068, - serialized_end=1154, + serialized_start=1117, + serialized_end=1203, ) @@ -772,8 +792,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1156, - serialized_end=1241, + serialized_start=1205, + serialized_end=1290, ) @@ -829,8 +849,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1243, - serialized_end=1323, + serialized_start=1292, + serialized_end=1372, ) @@ -904,8 +924,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1325, - serialized_end=1406, + serialized_start=1374, + serialized_end=1455, ) @@ -961,8 +981,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1408, - serialized_end=1480, + serialized_start=1457, + serialized_end=1529, ) @@ -1000,8 +1020,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1482, - serialized_end=1517, + serialized_start=1531, + serialized_end=1566, ) @@ -1057,8 +1077,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=431, - serialized_end=476, + serialized_start=480, + serialized_end=525, ) _SUBSCRIPTION = _descriptor.Descriptor( @@ -1239,8 +1259,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1520, - serialized_end=1941, + serialized_start=1569, + serialized_end=1990, ) @@ -1278,21 +1298,21 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1943, - serialized_end=2001, + serialized_start=1992, + serialized_end=2050, ) -_PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( - name="AttributesEntry", - full_name="google.pubsub.v1.PushConfig.AttributesEntry", +_PUSHCONFIG_OIDCTOKEN = _descriptor.Descriptor( + name="OidcToken", + full_name="google.pubsub.v1.PushConfig.OidcToken", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.PushConfig.AttributesEntry.key", + name="service_account_email", + full_name="google.pubsub.v1.PushConfig.OidcToken.service_account_email", index=0, number=1, type=9, @@ -1309,8 +1329,8 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.PushConfig.AttributesEntry.value", + name="audience", + full_name="google.pubsub.v1.PushConfig.OidcToken.audience", index=1, number=2, type=9, @@ -1330,25 +1350,25 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=_b("8\001"), + serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=671, - serialized_end=720, + serialized_start=2218, + serialized_end=2278, ) -_PUSHCONFIG_OIDCTOKEN = _descriptor.Descriptor( - name="OidcToken", - full_name="google.pubsub.v1.PushConfig.OidcToken", +_PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( + name="AttributesEntry", + full_name="google.pubsub.v1.PushConfig.AttributesEntry", filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( - name="service_account_email", - full_name="google.pubsub.v1.PushConfig.OidcToken.service_account_email", + name="key", + full_name="google.pubsub.v1.PushConfig.AttributesEntry.key", index=0, number=1, type=9, @@ -1365,8 +1385,8 @@ file=DESCRIPTOR, ), _descriptor.FieldDescriptor( - name="audience", - full_name="google.pubsub.v1.PushConfig.OidcToken.audience", + name="value", + full_name="google.pubsub.v1.PushConfig.AttributesEntry.value", index=1, number=2, type=9, @@ -1386,13 +1406,13 @@ extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=_b("8\001"), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2220, - serialized_end=2280, + serialized_start=720, + serialized_end=769, ) _PUSHCONFIG = _descriptor.Descriptor( @@ -1458,7 +1478,7 @@ ), ], extensions=[], - nested_types=[_PUSHCONFIG_ATTRIBUTESENTRY, _PUSHCONFIG_OIDCTOKEN], + nested_types=[_PUSHCONFIG_OIDCTOKEN, _PUSHCONFIG_ATTRIBUTESENTRY], enum_types=[], serialized_options=None, is_extendable=False, @@ -1473,8 +1493,8 @@ fields=[], ) ], - serialized_start=2004, - serialized_end=2305, + serialized_start=2053, + serialized_end=2354, ) @@ -1530,8 +1550,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2307, - serialized_end=2390, + serialized_start=2356, + serialized_end=2439, ) @@ -1569,8 +1589,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2392, - serialized_end=2438, + serialized_start=2441, + serialized_end=2487, ) @@ -1626,8 +1646,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2441, - serialized_end=2571, + serialized_start=2490, + serialized_end=2620, ) @@ -1701,8 +1721,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2573, - serialized_end=2655, + serialized_start=2622, + serialized_end=2704, ) @@ -1758,8 +1778,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2657, - serialized_end=2764, + serialized_start=2706, + serialized_end=2813, ) @@ -1797,8 +1817,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2766, - serialized_end=2815, + serialized_start=2815, + serialized_end=2864, ) @@ -1854,8 +1874,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2817, - serialized_end=2915, + serialized_start=2866, + serialized_end=2964, ) @@ -1929,8 +1949,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2917, - serialized_end=3002, + serialized_start=2966, + serialized_end=3051, ) @@ -1968,8 +1988,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3004, - serialized_end=3080, + serialized_start=3053, + serialized_end=3129, ) @@ -2043,8 +2063,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3082, - serialized_end=3177, + serialized_start=3131, + serialized_end=3226, ) @@ -2100,8 +2120,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3179, - serialized_end=3238, + serialized_start=3228, + serialized_end=3287, ) @@ -2211,8 +2231,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3241, - serialized_end=3405, + serialized_start=3290, + serialized_end=3454, ) @@ -2250,8 +2270,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3407, - serialized_end=3492, + serialized_start=3456, + serialized_end=3541, ) @@ -2307,8 +2327,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=431, - serialized_end=476, + serialized_start=480, + serialized_end=525, ) _CREATESNAPSHOTREQUEST = _descriptor.Descriptor( @@ -2381,8 +2401,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3495, - serialized_end=3670, + serialized_start=3544, + serialized_end=3719, ) @@ -2438,8 +2458,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3672, - serialized_end=3790, + serialized_start=3721, + serialized_end=3839, ) @@ -2495,8 +2515,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=431, - serialized_end=476, + serialized_start=480, + serialized_end=525, ) _SNAPSHOT = _descriptor.Descriptor( @@ -2587,8 +2607,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3793, - serialized_end=3984, + serialized_start=3842, + serialized_end=4033, ) @@ -2626,8 +2646,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3986, - serialized_end=4024, + serialized_start=4035, + serialized_end=4073, ) @@ -2701,8 +2721,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4026, - serialized_end=4104, + serialized_start=4075, + serialized_end=4153, ) @@ -2758,8 +2778,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4106, - serialized_end=4201, + serialized_start=4155, + serialized_end=4250, ) @@ -2797,8 +2817,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4203, - serialized_end=4244, + serialized_start=4252, + serialized_end=4293, ) @@ -2880,8 +2900,8 @@ fields=[], ) ], - serialized_start=4246, - serialized_end=4355, + serialized_start=4295, + serialized_end=4404, ) @@ -2900,8 +2920,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4357, - serialized_end=4371, + serialized_start=4406, + serialized_end=4420, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -2930,8 +2950,8 @@ _EXPIRATIONPOLICY.fields_by_name[ "ttl" ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG _PUSHCONFIG_OIDCTOKEN.containing_type = _PUSHCONFIG +_PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG _PUSHCONFIG.fields_by_name["attributes"].message_type = _PUSHCONFIG_ATTRIBUTESENTRY _PUSHCONFIG.fields_by_name["oidc_token"].message_type = _PUSHCONFIG_OIDCTOKEN _PUSHCONFIG.oneofs_by_name["authentication_method"].fields.append( @@ -3094,6 +3114,15 @@ This field will be populated in the responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the response, then no constraints are in effect. + kms_key_name: + The resource name of the Cloud KMS CryptoKey to be used to + protect access to messages published on this topic. The + expected format is + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + EXPERIMENTAL: This feature is part of a closed alpha release. + This API might be changed in backward-incompatible ways and is + not recommended for production use. It is not subject to any + SLA or deprecation policy. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) ), @@ -3345,10 +3374,7 @@ dict( DESCRIPTOR=_LISTTOPICSNAPSHOTSREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``ListTopicSnapshots`` method. BETA: This feature is - part of a beta release. This API might be changed in - backward-incompatible ways and is not recommended for production use. It - is not subject to any SLA or deprecation policy. + __doc__="""Request for the ``ListTopicSnapshots`` method. Attributes: @@ -3374,10 +3400,7 @@ dict( DESCRIPTOR=_LISTTOPICSNAPSHOTSRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``ListTopicSnapshots`` method. BETA: This feature is - part of a beta release. This API might be changed in - backward-incompatible ways and is not recommended for production use. It - is not subject to any SLA or deprecation policy. + __doc__="""Response for the ``ListTopicSnapshots`` method. Attributes: @@ -3472,11 +3495,7 @@ then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the ``message_retention_duration`` window. This must be true - if you would like to Seek to a timestamp. BETA: This feature - is part of a beta release. This API might be changed in - backward-incompatible ways and is not recommended for - production use. It is not subject to any SLA or deprecation - policy. + if you would like to Seek to a timestamp. message_retention_duration: How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is @@ -3484,10 +3503,7 @@ also configures the retention of acknowledged messages, and thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 - minutes. BETA: This feature is part of a beta release. This - API might be changed in backward-incompatible ways and is not - recommended for production use. It is not subject to any SLA - or deprecation policy. + minutes. labels: See Creating and managing labels. enable_message_ordering: @@ -3506,10 +3522,7 @@ the subscription or is issuing operations on the subscription. If ``expiration_policy`` is not set, a *default policy* with ``ttl`` of 31 days will be used. The minimum allowed value for - ``expiration_policy.ttl`` is 1 day. BETA: This feature is part - of a beta release. This API might be changed in backward- - incompatible ways and is not recommended for production use. - It is not subject to any SLA or deprecation policy. + ``expiration_policy.ttl`` is 1 day. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) ), @@ -3546,15 +3559,6 @@ "PushConfig", (_message.Message,), dict( - AttributesEntry=_reflection.GeneratedProtocolMessageType( - "AttributesEntry", - (_message.Message,), - dict( - DESCRIPTOR=_PUSHCONFIG_ATTRIBUTESENTRY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) - ), - ), OidcToken=_reflection.GeneratedProtocolMessageType( "OidcToken", (_message.Message,), @@ -3571,10 +3575,8 @@ `__ to be used for generating the OIDC token. The caller (for CreateSubscription, UpdateSubscription, and ModifyPushConfig - calls) must have the iam.serviceAccounts.actAs permission for - the service account. See - https://cloud.google.com/iam/docs/understanding-roles#service- - accounts-roles. + RPCs) must have the iam.serviceAccounts.actAs permission for + the service account. audience: Audience to be used when generating OIDC token. The audience claim identifies the recipients that the JWT is intended for. @@ -3587,6 +3589,15 @@ # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.OidcToken) ), ), + AttributesEntry=_reflection.GeneratedProtocolMessageType( + "AttributesEntry", + (_message.Message,), + dict( + DESCRIPTOR=_PUSHCONFIG_ATTRIBUTESENTRY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) + ), + ), DESCRIPTOR=_PUSHCONFIG, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Configuration for a push delivery endpoint. @@ -3622,10 +3633,6 @@ that are private by default to allow requests only from the Cloud Pub/Sub system, for example. This field is optional and should be set only by users interested in authenticated push. - EXPERIMENTAL: This field a part of a closed alpha that may not - be accessible to all users. It may be changed in backward- - incompatible ways and is not subject to any SLA or deprecation - policy. It is not recommended for production use. oidc_token: If specified, Pub/Sub will generate and attach an OIDC JWT token as an ``Authorization`` header in the HTTP request for @@ -3635,8 +3642,8 @@ ), ) _sym_db.RegisterMessage(PushConfig) -_sym_db.RegisterMessage(PushConfig.AttributesEntry) _sym_db.RegisterMessage(PushConfig.OidcToken) +_sym_db.RegisterMessage(PushConfig.AttributesEntry) ReceivedMessage = _reflection.GeneratedProtocolMessageType( "ReceivedMessage", @@ -3988,10 +3995,7 @@ ), DESCRIPTOR=_CREATESNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``CreateSnapshot`` method. BETA: This feature is part of - a beta release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. + __doc__="""Request for the ``CreateSnapshot`` method. Attributes: @@ -4027,10 +4031,7 @@ dict( DESCRIPTOR=_UPDATESNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the UpdateSnapshot method. BETA: This feature is part of a - beta release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. + __doc__="""Request for the UpdateSnapshot method. Attributes: @@ -4063,10 +4064,7 @@ __doc__="""A snapshot resource. Snapshots are used in Seek operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the - state captured by a snapshot. BETA: This feature is part of a beta - release. This API might be changed in backward-incompatible ways and is - not recommended for production use. It is not subject to any SLA or - deprecation policy. + state captured by a snapshot. Attributes: @@ -4103,10 +4101,7 @@ dict( DESCRIPTOR=_GETSNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the GetSnapshot method. BETA: This feature is part of a beta - release. This API might be changed in backward-incompatible ways and is - not recommended for production use. It is not subject to any SLA or - deprecation policy. + __doc__="""Request for the GetSnapshot method. Attributes: @@ -4125,10 +4120,7 @@ dict( DESCRIPTOR=_LISTSNAPSHOTSREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``ListSnapshots`` method. BETA: This feature is part of - a beta release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. + __doc__="""Request for the ``ListSnapshots`` method. Attributes: @@ -4154,10 +4146,7 @@ dict( DESCRIPTOR=_LISTSNAPSHOTSRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``ListSnapshots`` method. BETA: This feature is part of - a beta release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. + __doc__="""Response for the ``ListSnapshots`` method. Attributes: @@ -4179,10 +4168,7 @@ dict( DESCRIPTOR=_DELETESNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``DeleteSnapshot`` method. BETA: This feature is part of - a beta release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. + __doc__="""Request for the ``DeleteSnapshot`` method. Attributes: @@ -4201,10 +4187,7 @@ dict( DESCRIPTOR=_SEEKREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``Seek`` method. BETA: This feature is part of a beta - release. This API might be changed in backward-incompatible ways and is - not recommended for production use. It is not subject to any SLA or - deprecation policy. + __doc__="""Request for the ``Seek`` method. Attributes: @@ -4261,8 +4244,8 @@ file=DESCRIPTOR, index=0, serialized_options=None, - serialized_start=4374, - serialized_end=5461, + serialized_start=4423, + serialized_end=5510, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4365,8 +4348,8 @@ file=DESCRIPTOR, index=1, serialized_options=None, - serialized_start=5464, - serialized_end=7761, + serialized_start=5513, + serialized_end=7810, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py index dfe337e36271..dd1a365877e1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py @@ -117,10 +117,7 @@ def ListTopicSnapshots(self, request, context): operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + captured by a snapshot. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -402,10 +399,7 @@ def GetSnapshot(self, request, context):
Seek operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + subscription to the state captured by a snapshot. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -417,10 +411,7 @@ def ListSnapshots(self, request, context): operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + captured by a snapshot. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -433,11 +424,7 @@ def CreateSnapshot(self, request, context): you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. -

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy.

- If the snapshot already exists, returns `ALREADY_EXISTS`. +

If the snapshot already exists, returns `ALREADY_EXISTS`. If the requested subscription doesn't exist, returns `NOT_FOUND`. If the backlog in the subscription is too old -- and the resulting snapshot would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. @@ -460,11 +447,7 @@ def UpdateSnapshot(self, request, context): operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. - Note that certain properties of a snapshot are not modifiable. + captured by a snapshot. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -477,9 +460,6 @@ def DeleteSnapshot(self, request, context): you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. When the snapshot is deleted, all messages retained in the snapshot are immediately dropped. After a snapshot is deleted, a new one may be created with the same name, but the new one has no association with the old @@ -497,10 +477,7 @@ def Seek(self, request, context): you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic.

- BETA: This feature is part of a beta release. This API might be - changed in backward-incompatible ways and is not recommended for production - use. It is not subject to any SLA or deprecation policy. + must be on the same topic. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 8348023941e7..c677890bd9af 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-04-23T12:24:42.913789Z", + "updateTime": "2019-05-10T12:31:34.113225Z", "sources": [ { "generator": { "name": "artman", - "version": "0.17.0", - "dockerImage": "googleapis/artman@sha256:c58f4ec3838eb4e0718eb1bccc6512bd6850feaa85a360a9e38f6f848ec73bc2" + "version": "0.19.0", + "dockerImage": "googleapis/artman@sha256:d3df563538225ac6caac45d8ad86499500211d1bcb2536955a6dbda15e1b368e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "547e19e7df398e9290e8e3674d7351efc500f9b0", - "internalRef": "244712781" + "sha": "07883be5bf3c3233095e99d8e92b8094f5d7084a", + "internalRef": "247530843" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.4.10" + "version": "2019.5.2" } } ], diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index c7dbae01da07..fb6de29fd129 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -66,7 +66,8 @@ class TestPublisherClient(object): def test_create_topic(self): # Setup Expected Response name_2 = "name2-1052831874" - expected_response = {"name": name_2} + kms_key_name = "kmsKeyName2094986649" + expected_response = {"name": name_2, "kms_key_name": kms_key_name} expected_response = pubsub_pb2.Topic(**expected_response) # Mock the API response @@ -104,7 +105,8 @@ def test_create_topic_exception(self): def test_update_topic(self): # Setup Expected Response name = "name3373707" - expected_response = {"name": name} + kms_key_name = "kmsKeyName2094986649" + expected_response = {"name": name, "kms_key_name": kms_key_name} expected_response = pubsub_pb2.Topic(**expected_response) # Mock the API response @@ -191,7 +193,8 @@ def test_publish_exception(self): def test_get_topic(self): # Setup Expected Response name = "name3373707" - expected_response = {"name": name} + kms_key_name = "kmsKeyName2094986649" + expected_response = {"name": name, "kms_key_name": kms_key_name} expected_response = pubsub_pb2.Topic(**expected_response) # Mock the API response From 76338d809954a1206c7beb6581a2db1dc8b0a78c Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 15 May 2019 08:52:59 +0200 Subject: [PATCH 0351/1197] Propagate subscribe callback errors to main thread (#7954) The **reCAPTCHA Enterprise** build failure is unrelated to this, thus merging. --- .../_protocol/streaming_pull_manager.py | 18 ++++++++---- .../cloud/pubsub_v1/subscriber/client.py | 2 +- packages/google-cloud-pubsub/tests/system.py | 29 +++++++++++++++++++ .../subscriber/test_streaming_pull_manager.py | 18 ++++++++---- .../subscriber/test_subscriber_client.py | 8 +++-- 5 files changed, 60 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index c51914693a94..650e2f661915 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -52,7 +52,7 @@ def _maybe_wrap_exception(exception): return exception -def _wrap_callback_errors(callback, message): +def _wrap_callback_errors(callback, on_callback_error, message): """Wraps a user callback so that if an exception occurs the message is nacked. @@ -62,14 +62,15 @@ def _wrap_callback_errors(callback, message): """ try: callback(message) - except Exception: + except Exception as exc: # Note: the likelihood of this failing is extremely low. This just adds # a message to a queue, so if this doesn't work the world is in an # unrecoverable state and this thread should just bail. _LOGGER.exception( - "Top-level exception occurred in callback while processing a " "message" + "Top-level exception occurred in callback while processing a message" ) message.nack() + on_callback_error(exc) class StreamingPullManager(object): @@ -299,13 +300,16 @@ def heartbeat(self): if self._rpc is not None and self._rpc.is_active: self._rpc.send(types.StreamingPullRequest()) - def open(self, callback): + def open(self, callback, on_callback_error): """Begin consuming messages. Args: - callback (Callable[None, google.cloud.pubsub_v1.message.Messages]): + callback (Callable[None, google.cloud.pubsub_v1.message.Message]): A callback that will be called for each message received on the stream. + on_callback_error (Callable[Exception]): + A callable that will be called if an exception is raised in + the provided `callback`. """ if self.is_active: raise ValueError("This manager is already open.") @@ -313,7 +317,9 @@ def open(self, callback): if self._closed: raise ValueError("This manager has been closed and can not be re-used.") - self._callback = functools.partial(_wrap_callback_errors, callback) + self._callback = functools.partial( + _wrap_callback_errors, callback, on_callback_error + ) # Create the RPC self._rpc = bidi.ResumableBidiRpc( diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 7294969daf6c..f2e8faa4fcf5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -209,6 +209,6 @@ def callback(message): future = futures.StreamingPullFuture(manager) - manager.open(callback) + manager.open(callback=callback, on_callback_error=future.set_exception) return future diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index e6001f8e7801..e8921e039164 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -18,6 +18,7 @@ import threading import time +import mock import pytest import six @@ -178,6 +179,34 @@ def test_subscribe_to_messages_async_callbacks( future.cancel() +class TestStreamingPull(object): + def test_streaming_pull_callback_error_propagation( + self, publisher, topic_path, subscriber, subscription_path, cleanup + ): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # create a topic and subscribe to it + publisher.create_topic(topic_path) + subscriber.create_subscription(subscription_path, topic_path) + + # publish a messages and wait until published + future = publisher.publish(topic_path, b"hello!") + future.result(timeout=30) + + # Now subscribe to the topic and verify that an error in the callback + # is propagated through the streaming pull future. + class CallbackError(Exception): + pass + + callback = mock.Mock(side_effect=CallbackError) + future = subscriber.subscribe(subscription_path, callback) + + with pytest.raises(CallbackError): + future.result(timeout=30) + + class AckCallback(object): def __init__(self): self.calls = 0 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 414fc00b96d1..cbd02e28ac6c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -51,20 +51,26 @@ def test__maybe_wrap_exception(exception, expected_cls): def test__wrap_callback_errors_no_error(): msg = mock.create_autospec(message.Message, instance=True) callback = mock.Mock() + on_callback_error = mock.Mock() - streaming_pull_manager._wrap_callback_errors(callback, msg) + streaming_pull_manager._wrap_callback_errors(callback, on_callback_error, msg) callback.assert_called_once_with(msg) msg.nack.assert_not_called() + on_callback_error.assert_not_called() def test__wrap_callback_errors_error(): + callback_error = ValueError("meep") + msg = mock.create_autospec(message.Message, instance=True) - callback = mock.Mock(side_effect=ValueError("meep")) + callback = mock.Mock(side_effect=callback_error) + on_callback_error = mock.Mock() - streaming_pull_manager._wrap_callback_errors(callback, msg) + streaming_pull_manager._wrap_callback_errors(callback, on_callback_error, msg) msg.nack.assert_called_once() + on_callback_error.assert_called_once_with(callback_error) def test_constructor_and_default_state(): @@ -319,7 +325,7 @@ def test_heartbeat_inactive(): def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): manager = make_manager() - manager.open(mock.sentinel.callback) + manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) heartbeater.assert_called_once_with(manager) heartbeater.return_value.start.assert_called_once() @@ -357,7 +363,7 @@ def test_open_already_active(): manager._consumer.is_active = True with pytest.raises(ValueError, match="already open"): - manager.open(mock.sentinel.callback) + manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) def test_open_has_been_closed(): @@ -365,7 +371,7 @@ def test_open_has_been_closed(): manager._closed = True with pytest.raises(ValueError, match="closed"): - manager.open(mock.sentinel.callback) + manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) def make_running_manager(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index d4914fee8f5b..8bdb414c6280 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -72,7 +72,9 @@ def test_subscribe(manager_open): assert isinstance(future, futures.StreamingPullFuture) assert future._manager._subscription == "sub_name_a" - manager_open.assert_called_once_with(mock.ANY, mock.sentinel.callback) + manager_open.assert_called_once_with( + mock.ANY, mock.sentinel.callback, future.set_exception + ) @mock.patch( @@ -97,4 +99,6 @@ def test_subscribe_options(manager_open): assert future._manager._subscription == "sub_name_a" assert future._manager.flow_control == flow_control assert future._manager._scheduler == scheduler - manager_open.assert_called_once_with(mock.ANY, mock.sentinel.callback) + manager_open.assert_called_once_with( + mock.ANY, mock.sentinel.callback, future.set_exception + ) From 958756ddcf1c98878bae403f452173a513f9e421 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 15 May 2019 18:33:07 -0400 Subject: [PATCH 0352/1197] Release pubsub-0.41.0 (#7991) --- packages/google-cloud-pubsub/CHANGELOG.md | 20 ++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index a5497c6f4743..a63bc01d8b3d 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,26 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.41.0 + +05-15-2019 13:57 PDT + + +### New Features +- Add `kms_key_name` arg to `create_topic`; remove BETA warnings (via synth). ([#7936](https://github.com/googleapis/google-cloud-python/pull/7936)) +- Add message ordering (via synth). ([#7551](https://github.com/googleapis/google-cloud-python/pull/7551)) + +### Implementation Changes +- Propagate subscribe callback errors to main thread ([#7954](https://github.com/googleapis/google-cloud-python/pull/7954)) +- Fix pubsub Streaming Pull shutdown on RetryError ([#7863](https://github.com/googleapis/google-cloud-python/pull/7863)) +- Make PubSub subscriber Scheduler inherit from ABC ([#7690](https://github.com/googleapis/google-cloud-python/pull/7690)) +- Add routing header to method metadata (via synth). ([#7623](https://github.com/googleapis/google-cloud-python/pull/7623)) + +### Internal / Testing Changes +- Remove classifier for Python 3.4 for end-of-life. ([#7535](https://github.com/googleapis/google-cloud-python/pull/7535)) +- Add nox session `docs` (via synth). ([#7778](https://github.com/googleapis/google-cloud-python/pull/7778)) +- Pub/Sub (nit): wrong var name in sample ([#7705](https://github.com/googleapis/google-cloud-python/pull/7705)) + ## 0.40.0 03-15-2019 14:09 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index d9ad1aea8410..86d463ffe754 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.40.0' +version = '0.41.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 716e4c8494879557ebc5fd9c46670ae03a77bafe Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 21 May 2019 15:49:19 -0400 Subject: [PATCH 0353/1197] Drop mention of long-removed 'policy' object. (#8081) Closes #8080. --- packages/google-cloud-pubsub/docs/subscriber/index.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/subscriber/index.rst b/packages/google-cloud-pubsub/docs/subscriber/index.rst index 7593f17b711f..1e13e1dc1e79 100644 --- a/packages/google-cloud-pubsub/docs/subscriber/index.rst +++ b/packages/google-cloud-pubsub/docs/subscriber/index.rst @@ -5,9 +5,7 @@ Subscribing to messages is handled through the :class:`~.pubsub_v1.subscriber.client.Client` class (aliased as ``google.cloud.pubsub.SubscriberClient``). This class provides a :meth:`~.pubsub_v1.subscriber.client.Client.subscribe` method to -attach to subscriptions on existing topics, and (most importantly) a -:meth:`~.pubsub_v1.subscriber.policy.thread.Policy.open` method that -consumes messages from Pub/Sub. +attach to subscriptions on existing topics. Instantiating a subscriber client is straightforward: From 07b3387d77f2e4f66315a5a85cf4dcb0b36ed603 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 22 May 2019 09:18:23 -0700 Subject: [PATCH 0354/1197] Add empty lines (via synth). (#8067) --- packages/google-cloud-pubsub/google/cloud/pubsub.py | 2 ++ .../google/cloud/pubsub_v1/gapic/publisher_client.py | 2 ++ .../google/cloud/pubsub_v1/gapic/subscriber_client.py | 2 ++ .../gapic/transports/publisher_grpc_transport.py | 1 + .../gapic/transports/subscriber_grpc_transport.py | 1 + packages/google-cloud-pubsub/synth.metadata | 10 +++++----- .../tests/unit/gapic/v1/test_publisher_client_v1.py | 1 + .../tests/unit/gapic/v1/test_subscriber_client_v1.py | 1 + 8 files changed, 15 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py index 7b2c75158878..b9f92e991aa6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -14,10 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. + from __future__ import absolute_import from google.cloud.pubsub_v1 import PublisherClient from google.cloud.pubsub_v1 import SubscriberClient from google.cloud.pubsub_v1 import types + __all__ = ("types", "PublisherClient", "SubscriberClient") diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 3759f783c367..9bfe52ac638d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Accesses the google.pubsub.v1 Publisher API.""" import functools @@ -39,6 +40,7 @@ from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 + _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub").version diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index b94a52c574ee..4aed21df8f27 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Accesses the google.pubsub.v1 Subscriber API.""" import functools @@ -41,6 +42,7 @@ from google.protobuf import field_mask_pb2 from google.protobuf import timestamp_pb2 + _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub").version diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index d151e086a8b9..ff1878ea8d1d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + import google.api_core.grpc_helpers from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 66596f2396fb..1975250e96fb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + import google.api_core.grpc_helpers from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index c677890bd9af..683b3b560c59 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-05-10T12:31:34.113225Z", + "updateTime": "2019-05-21T12:27:35.596050Z", "sources": [ { "generator": { "name": "artman", - "version": "0.19.0", - "dockerImage": "googleapis/artman@sha256:d3df563538225ac6caac45d8ad86499500211d1bcb2536955a6dbda15e1b368e" + "version": "0.20.0", + "dockerImage": "googleapis/artman@sha256:3246adac900f4bdbd62920e80de2e5877380e44036b3feae13667ec255ebf5ec" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "07883be5bf3c3233095e99d8e92b8094f5d7084a", - "internalRef": "247530843" + "sha": "32a10f69e2c9ce15bba13ab1ff928bacebb25160", + "internalRef": "249058354" } }, { diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index fb6de29fd129..6c64cf5a9ba2 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Unit tests.""" import mock diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index 3e27074bf781..b1d7f52fad5f 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -13,6 +13,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + """Unit tests.""" import mock From 4906219e82c50d7fbd9fd4840ac585484cfc42d3 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 23 May 2019 10:54:32 -0700 Subject: [PATCH 0355/1197] Pub/Sub: staticmethod check (#8091) * Add staticmethod check * Remove unnecessary import * compact if isinstance * add unit test & remove duplicate * run black * use standard test fx name & call out self.api --- .../google/cloud/pubsub_v1/_gapic.py | 28 ++++----- .../tests/unit/pubsub_v1/test__gapic.py | 63 +++++++++++++++++++ 2 files changed, 74 insertions(+), 17 deletions(-) create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py index 25cb3e5fa33c..da755dfbca09 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google LLC All rights reserved. +# Copyright 2019, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -25,29 +25,21 @@ def add_methods(source_class, blacklist=()): not added. """ - def wrap(wrapped_fx): + def wrap(wrapped_fx, lookup_fx): """Wrap a GAPIC method; preserve its name and docstring.""" - # If this is a static or class method, then we need to *not* + # If this is a static or class method, then we do *not* # send self as the first argument. # - # Similarly, for instance methods, we need to send self.api rather + # For instance methods, we need to send self.api rather # than self, since that is where the actual methods were declared. - instance_method = True - # If this is a bound method it's a classmethod. - self = getattr(wrapped_fx, "__self__", None) - if issubclass(type(self), type): - instance_method = False - - # Okay, we have figured out what kind of method this is; send - # down the correct wrapper function. - if instance_method: + if isinstance(lookup_fx, (classmethod, staticmethod)): + fx = lambda *a, **kw: wrapped_fx(*a, **kw) # noqa + return staticmethod(functools.wraps(wrapped_fx)(fx)) + else: fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) # noqa return functools.wraps(wrapped_fx)(fx) - fx = lambda *a, **kw: wrapped_fx(*a, **kw) # noqa - return staticmethod(functools.wraps(wrapped_fx)(fx)) - def actual_decorator(cls): # Reflectively iterate over most of the methods on the source class # (the GAPIC) and make wrapped versions available on this client. @@ -66,7 +58,9 @@ def actual_decorator(cls): continue # Add a wrapper method to this object. - fx = wrap(getattr(source_class, name)) + lookup_fx = source_class.__dict__[name] + fx = wrap(attr, lookup_fx) + setattr(cls, name, fx) # Return the augmented class. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py new file mode 100644 index 000000000000..5478aee18213 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py @@ -0,0 +1,63 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +from google.cloud.pubsub_v1 import _gapic + + +class SourceClass(object): + def __init__(self): + self.x = "x" + + def method(self): + return "source class instance method" + + @staticmethod + def static_method(): + return "source class static method" + + @classmethod + def class_method(cls): + return "source class class method" + + @classmethod + def blacklisted_method(cls): + return "source class blacklisted method" + + +def test_add_method(): + @_gapic.add_methods(SourceClass, ("blacklisted_method",)) + class Foo(object): + def __init__(self): + self.api = SourceClass() + + def method(self): + return "foo class instance method" + + foo = Foo() + + # Any method that's callable and not blacklisted is "inherited". + assert set(["method", "static_method", "class_method"]) <= set(dir(foo)) + assert "blacklisted_method" not in dir(foo) + + # Source Class's static and class methods become static methods. + assert type(Foo.__dict__["static_method"]) == staticmethod + assert foo.static_method() == "source class static method" + assert type(Foo.__dict__["class_method"]) == staticmethod + assert foo.class_method() == "source class class method" + + # The decorator changes the behavior of instance methods of the wrapped class. + # method() is called on an instance of the Source Class (stored as an + # attribute on the wrapped class). + assert foo.method() == "source class instance method" From b29118aade713ffb464240b91defef65ceb1947c Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 24 May 2019 19:01:46 +0200 Subject: [PATCH 0356/1197] PubSub: Fix streaming pull incorrectly handling FlowControl max_messages setting (#7948) * Allow skipping Pub/Sub Message autolease In certain cases automatically leasing Message instances upon creation might not be desired, thus an optional parameter is added to Message initializer that allows skipping that. The default behavior is not changed, new Message instances *are* automatically leased upon creation. * Directly lease received Messages w/o request queue Leasing messages through a request queue in dispatcher causes a race condition with the ConsumeBidirectionalStream thread. A request to pause the background consumer can arrive when the Bidi consumer is just about to fetch the the next batch of messages, and thus the latter gets paused only *after* fetching those messages. This commit synchronously leases received messages in the streaming pull manager callback. If that hits the lease management load limit, the background consumer is paused synchronously, and will correctly pause *before* pulling another batch of messages. * Add streaming pull message holding buffer If the PubSub backend sends too many messages in a single response that would cause the leaser overload should all these messeges were added to it, the StreamingPullManager now puts excessive messages into an internal holding buffer. The messages are released from the buffer when the leaser again has enough capacity (as defined by the FlowControl settings), and the message received callback is invoked then as well. * Make a few streaming pull methods thread-safe With the StreamingPullManager._on_response() callback adding received messages to the leaser synchronously (in the background consumer thread), a race condition can happen with the dispatcher thread that can asynchronously add (remove) messages to (from) lease management, e.g. on ack() and nack() requests. The same is the case with related operations of maybe pausing/resuming the background consumer. This commit thus adds locks in key places, assuring that these operations are atomic, ant not subject to race conditions. * Add system test for PubSub max_messages setting --- .../pubsub_v1/subscriber/_protocol/leaser.py | 52 ++++--- .../_protocol/streaming_pull_manager.py | 125 ++++++++++++--- .../cloud/pubsub_v1/subscriber/message.py | 14 +- packages/google-cloud-pubsub/tests/system.py | 113 ++++++++++++++ .../unit/pubsub_v1/subscriber/test_message.py | 13 +- .../subscriber/test_streaming_pull_manager.py | 144 +++++++++++++++++- 6 files changed, 408 insertions(+), 53 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index bcb73352b537..8a683e4e772d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -36,9 +36,15 @@ class Leaser(object): def __init__(self, manager): self._thread = None - self._operational_lock = threading.Lock() self._manager = manager + # a lock used for start/stop operations, protecting the _thread attribute + self._operational_lock = threading.Lock() + + # A lock ensuring that add/remove operations are atomic and cannot be + # intertwined. Protects the _leased_messages and _bytes attributes. + self._add_remove_lock = threading.Lock() + self._leased_messages = {} """dict[str, float]: A mapping of ack IDs to the local time when the ack ID was initially leased in seconds since the epoch.""" @@ -64,30 +70,32 @@ def bytes(self): def add(self, items): """Add messages to be managed by the leaser.""" - for item in items: - # Add the ack ID to the set of managed ack IDs, and increment - # the size counter. - if item.ack_id not in self._leased_messages: - self._leased_messages[item.ack_id] = _LeasedMessage( - added_time=time.time(), size=item.byte_size - ) - self._bytes += item.byte_size - else: - _LOGGER.debug("Message %s is already lease managed", item.ack_id) + with self._add_remove_lock: + for item in items: + # Add the ack ID to the set of managed ack IDs, and increment + # the size counter. + if item.ack_id not in self._leased_messages: + self._leased_messages[item.ack_id] = _LeasedMessage( + added_time=time.time(), size=item.byte_size + ) + self._bytes += item.byte_size + else: + _LOGGER.debug("Message %s is already lease managed", item.ack_id) def remove(self, items): """Remove messages from lease management.""" - # Remove the ack ID from lease management, and decrement the - # byte counter. - for item in items: - if self._leased_messages.pop(item.ack_id, None) is not None: - self._bytes -= item.byte_size - else: - _LOGGER.debug("Item %s was not managed.", item.ack_id) - - if self._bytes < 0: - _LOGGER.debug("Bytes was unexpectedly negative: %d", self._bytes) - self._bytes = 0 + with self._add_remove_lock: + # Remove the ack ID from lease management, and decrement the + # byte counter. + for item in items: + if self._leased_messages.pop(item.ack_id, None) is not None: + self._bytes -= item.byte_size + else: + _LOGGER.debug("Item %s was not managed.", item.ack_id) + + if self._bytes < 0: + _LOGGER.debug("Bytes was unexpectedly negative: %d", self._bytes) + self._bytes = 0 def maintain_leases(self): """Maintain all of the leases being managed. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 650e2f661915..74008bc94fcb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -21,6 +21,7 @@ import grpc import six +from six.moves import queue from google.api_core import bidi from google.api_core import exceptions @@ -116,6 +117,16 @@ def __init__( else: self._scheduler = scheduler + # A FIFO queue for the messages that have been received from the server, + # but not yet added to the lease management (and not sent to user callback), + # because the FlowControl limits have been hit. + self._messages_on_hold = queue.Queue() + + # A lock ensuring that pausing / resuming the consumer are both atomic + # operations that cannot be executed concurrently. Needed for properly + # syncing these operations with the current leaser load. + self._pause_resume_lock = threading.Lock() + # The threads created in ``.open()``. self._dispatcher = None self._leaser = None @@ -211,26 +222,72 @@ def add_close_callback(self, callback): def maybe_pause_consumer(self): """Check the current load and pause the consumer if needed.""" - if self.load >= 1.0: - if self._consumer is not None and not self._consumer.is_paused: - _LOGGER.debug("Message backlog over load at %.2f, pausing.", self.load) - self._consumer.pause() + with self._pause_resume_lock: + if self.load >= 1.0: + if self._consumer is not None and not self._consumer.is_paused: + _LOGGER.debug( + "Message backlog over load at %.2f, pausing.", self.load + ) + self._consumer.pause() def maybe_resume_consumer(self): - """Check the current load and resume the consumer if needed.""" - # If we have been paused by flow control, check and see if we are - # back within our limits. - # - # In order to not thrash too much, require us to have passed below - # the resume threshold (80% by default) of each flow control setting - # before restarting. - if self._consumer is None or not self._consumer.is_paused: - return - - if self.load < self.flow_control.resume_threshold: - self._consumer.resume() - else: - _LOGGER.debug("Did not resume, current load is %s", self.load) + """Check the load and held messages and resume the consumer if needed. + + If there are messages held internally, release those messages before + resuming the consumer. That will avoid leaser overload. + """ + with self._pause_resume_lock: + # If we have been paused by flow control, check and see if we are + # back within our limits. + # + # In order to not thrash too much, require us to have passed below + # the resume threshold (80% by default) of each flow control setting + # before restarting. + if self._consumer is None or not self._consumer.is_paused: + return + + _LOGGER.debug("Current load: %.2f", self.load) + + # Before maybe resuming the background consumer, release any messages + # currently on hold, if the current load allows for it. + self._maybe_release_messages() + + if self.load < self.flow_control.resume_threshold: + _LOGGER.debug("Current load is %.2f, resuming consumer.", self.load) + self._consumer.resume() + else: + _LOGGER.debug("Did not resume, current load is %.2f.", self.load) + + def _maybe_release_messages(self): + """Release (some of) the held messages if the current load allows for it. + + The method tries to release as many messages as the current leaser load + would allow. Each released message is added to the lease management, + and the user callback is scheduled for it. + + If there are currently no messageges on hold, or if the leaser is + already overloaded, this method is effectively a no-op. + + The method assumes the caller has acquired the ``_pause_resume_lock``. + """ + while True: + if self.load >= 1.0: + break # already overloaded + + try: + msg = self._messages_on_hold.get_nowait() + except queue.Empty: + break + + self.leaser.add( + [requests.LeaseRequest(ack_id=msg.ack_id, byte_size=msg.size)] + ) + _LOGGER.debug( + "Released held message to leaser, scheduling callback for it, " + "still on hold %s.", + self._messages_on_hold.qsize(), + ) + self._scheduler.schedule(self._callback, msg) def _send_unary_request(self, request): """Send a request using a separate unary request instead of over the @@ -431,9 +488,10 @@ def _on_response(self, response): After the messages have all had their ack deadline updated, execute the callback for each message using the executor. """ - _LOGGER.debug( - "Scheduling callbacks for %s messages.", len(response.received_messages) + "Processing %s received message(s), currenty on hold %s.", + len(response.received_messages), + self._messages_on_hold.qsize(), ) # Immediately modack the messages we received, as this tells the server @@ -443,12 +501,33 @@ def _on_response(self, response): for message in response.received_messages ] self._dispatcher.modify_ack_deadline(items) + + invoke_callbacks_for = [] + for received_message in response.received_messages: message = google.cloud.pubsub_v1.subscriber.message.Message( - received_message.message, received_message.ack_id, self._scheduler.queue + received_message.message, + received_message.ack_id, + self._scheduler.queue, + autolease=False, ) - # TODO: Immediately lease instead of using the callback queue. - self._scheduler.schedule(self._callback, message) + if self.load < 1.0: + req = requests.LeaseRequest( + ack_id=message.ack_id, byte_size=message.size + ) + self.leaser.add([req]) + invoke_callbacks_for.append(message) + self.maybe_pause_consumer() + else: + self._messages_on_hold.put(message) + + _LOGGER.debug( + "Scheduling callbacks for %s new messages, new total on hold %s.", + len(invoke_callbacks_for), + self._messages_on_hold.qsize(), + ) + for msg in invoke_callbacks_for: + self._scheduler.schedule(self._callback, msg) def _should_recover(self, exception): """Determine if an error on the RPC stream should be recovered. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 56dde9a7f6b8..b62a28ff6cb6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -70,7 +70,7 @@ class Message(object): published. """ - def __init__(self, message, ack_id, request_queue): + def __init__(self, message, ack_id, request_queue, autolease=True): """Construct the Message. .. note:: @@ -85,6 +85,9 @@ def __init__(self, message, ack_id, request_queue): request_queue (queue.Queue): A queue provided by the policy that can accept requests; the policy is responsible for handling those requests. + autolease (bool): An optional flag determining whether a new Message + instance should automatically lease itself upon creation. + Defaults to :data:`True`. """ self._message = message self._ack_id = ack_id @@ -98,7 +101,8 @@ def __init__(self, message, ack_id, request_queue): # The policy should lease this message, telling PubSub that it has # it until it is acked or otherwise dropped. - self.lease() + if autolease: + self.lease() def __repr__(self): # Get an abbreviated version of the data. @@ -208,8 +212,10 @@ def lease(self): """Inform the policy to lease this message continually. .. note:: - This method is called by the constructor, and you should never - need to call it manually. + By default this method is called by the constructor, and you should + never need to call it manually, unless the + :class:`~.pubsub_v1.subscriber.message.Message` instance was + created with ``autolease=False``. """ self._request_queue.put( requests.LeaseRequest(ack_id=self._ack_id, byte_size=self.size) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index e8921e039164..13e81d281f42 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -15,6 +15,7 @@ from __future__ import absolute_import import datetime +import itertools import threading import time @@ -24,6 +25,9 @@ import google.auth from google.cloud import pubsub_v1 +from google.cloud.pubsub_v1 import exceptions +from google.cloud.pubsub_v1 import futures +from google.cloud.pubsub_v1 import types from test_utils.system import unique_resource_id @@ -206,6 +210,85 @@ class CallbackError(Exception): with pytest.raises(CallbackError): future.result(timeout=30) + def test_streaming_pull_max_messages( + self, publisher, topic_path, subscriber, subscription_path, cleanup + ): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # create a topic and subscribe to it + publisher.create_topic(topic_path) + subscriber.create_subscription(subscription_path, topic_path) + + batch_sizes = (7, 4, 8, 2, 10, 1, 3, 8, 6, 1) # total: 50 + self._publish_messages(publisher, topic_path, batch_sizes=batch_sizes) + + # now subscribe and do the main part, check for max pending messages + total_messages = sum(batch_sizes) + flow_control = types.FlowControl(max_messages=5) + callback = StreamingPullCallback( + processing_time=1, resolve_at_msg_count=total_messages + ) + + subscription_future = subscriber.subscribe( + subscription_path, callback, flow_control=flow_control + ) + + # Expected time to process all messages in ideal case: + # (total_messages / FlowControl.max_messages) * processing_time + # + # With total=50, max messages=5, and processing_time=1 this amounts to + # 10 seconds (+ overhead), thus a full minute should be more than enough + # for the processing to complete. If not, fail the test with a timeout. + try: + callback.done_future.result(timeout=60) + except exceptions.TimeoutError: + pytest.fail( + "Timeout: receiving/processing streamed messages took too long." + ) + + # The callback future gets resolved once total_messages have been processed, + # but we want to wait for just a little bit longer to possibly catch cases + # when the callback gets invoked *more* than total_messages times. + time.sleep(3) + + try: + # All messages should have been processed exactly once, and no more + # than max_messages simultaneously at any time. + assert callback.completed_calls == total_messages + assert sorted(callback.seen_message_ids) == list( + range(1, total_messages + 1) + ) + assert callback.max_pending_ack <= flow_control.max_messages + finally: + subscription_future.cancel() # trigger clean shutdown + + def _publish_messages(self, publisher, topic_path, batch_sizes): + """Publish ``count`` messages in batches and wait until completion.""" + publish_futures = [] + msg_counter = itertools.count(start=1) + + for batch_size in batch_sizes: + msg_batch = self._make_messages(count=batch_size) + for msg in msg_batch: + future = publisher.publish( + topic_path, msg, seq_num=str(next(msg_counter)) + ) + publish_futures.append(future) + time.sleep(0.1) + + # wait untill all messages have been successfully published + for future in publish_futures: + future.result(timeout=30) + + def _make_messages(self, count): + messages = [ + u"message {}/{}".format(i, count).encode("utf-8") + for i in range(1, count + 1) + ] + return messages + class AckCallback(object): def __init__(self): @@ -236,3 +319,33 @@ def __call__(self, message): # ``calls`` is incremented to do it. self.call_times.append(now) self.calls += 1 + + +class StreamingPullCallback(object): + def __init__(self, processing_time, resolve_at_msg_count): + self._lock = threading.Lock() + self._processing_time = processing_time + self._pending_ack = 0 + self.max_pending_ack = 0 + self.completed_calls = 0 + self.seen_message_ids = [] + + self._resolve_at_msg_count = resolve_at_msg_count + self.done_future = futures.Future() + + def __call__(self, message): + with self._lock: + self._pending_ack += 1 + self.max_pending_ack = max(self.max_pending_ack, self._pending_ack) + self.seen_message_ids.append(int(message.attributes["seq_num"])) + + time.sleep(self._processing_time) + + with self._lock: + self._pending_ack -= 1 + message.ack() + self.completed_calls += 1 + + if self.completed_calls >= self._resolve_at_msg_count: + if not self.done_future.done(): + self.done_future.set_result(None) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 98a946ae75c6..8c22992f7a2b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -33,7 +33,7 @@ PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 -def create_message(data, ack_id="ACKID", **attrs): +def create_message(data, ack_id="ACKID", autolease=True, **attrs): with mock.patch.object(message.Message, "lease") as lease: with mock.patch.object(time, "time") as time_: time_.return_value = RECEIVED_SECONDS @@ -48,8 +48,12 @@ def create_message(data, ack_id="ACKID", **attrs): ), ack_id, queue.Queue(), + autolease=autolease, ) - lease.assert_called_once_with() + if autolease: + lease.assert_called_once_with() + else: + lease.assert_not_called() return msg @@ -79,6 +83,11 @@ def test_publish_time(): assert msg.publish_time == PUBLISHED +def test_disable_autolease_on_creation(): + # the create_message() helper does the actual assertion + create_message(b"foo", autolease=False) + + def check_call_types(mock, *args, **kwargs): """Checks a mock's call types. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index cbd02e28ac6c..22585675a324 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -13,9 +13,11 @@ # limitations under the License. import logging +import types as stdlib_types import mock import pytest +from six.moves import queue from google.api_core import bidi from google.api_core import exceptions @@ -113,6 +115,23 @@ def make_manager(**kwargs): ) +def fake_leaser_add(leaser, init_msg_count=0, init_bytes=0): + """Add a simplified fake add() method to a leaser instance. + + The fake add() method actually increases the leaser's internal message count + by one for each message, and the total bytes by 10 for each message (hardcoded, + regardless of the actual message size). + """ + + def fake_add(self, items): + self.message_count += len(items) + self.bytes += len(items) * 10 + + leaser.message_count = init_msg_count + leaser.bytes = init_bytes + leaser.add = stdlib_types.MethodType(fake_add, leaser) + + def test_ack_deadline(): manager = make_manager() assert manager.ack_deadline == 10 @@ -208,6 +227,66 @@ def test_maybe_resume_consumer_wo_consumer_set(): manager.maybe_resume_consumer() # no raise +def test__maybe_release_messages_on_overload(): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) + # Ensure load is exactly 1.0 (to verify that >= condition is used) + _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) + _leaser.message_count = 10 + _leaser.bytes = 1000 + + msg = mock.create_autospec(message.Message, instance=True, ack_id="ack", size=11) + manager._messages_on_hold.put(msg) + + manager._maybe_release_messages() + + assert manager._messages_on_hold.qsize() == 1 + manager._leaser.add.assert_not_called() + manager._scheduler.schedule.assert_not_called() + + +def test__maybe_release_messages_below_overload(): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) + manager._callback = mock.sentinel.callback + + # init leaser message count to 8 to leave room for 2 more messages + _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) + fake_leaser_add(_leaser, init_msg_count=8, init_bytes=200) + _leaser.add = mock.Mock(wraps=_leaser.add) # to spy on calls + + messages = [ + mock.create_autospec(message.Message, instance=True, ack_id="ack_foo", size=11), + mock.create_autospec(message.Message, instance=True, ack_id="ack_bar", size=22), + mock.create_autospec(message.Message, instance=True, ack_id="ack_baz", size=33), + ] + for msg in messages: + manager._messages_on_hold.put(msg) + + # the actual call of MUT + manager._maybe_release_messages() + + assert manager._messages_on_hold.qsize() == 1 + msg = manager._messages_on_hold.get_nowait() + assert msg.ack_id == "ack_baz" + + assert len(_leaser.add.mock_calls) == 2 + expected_calls = [ + mock.call([requests.LeaseRequest(ack_id="ack_foo", byte_size=11)]), + mock.call([requests.LeaseRequest(ack_id="ack_bar", byte_size=22)]), + ] + _leaser.add.assert_has_calls(expected_calls) + + schedule_calls = manager._scheduler.schedule.mock_calls + assert len(schedule_calls) == 2 + for _, call_args, _ in schedule_calls: + assert call_args[0] == mock.sentinel.callback + assert isinstance(call_args[1], message.Message) + assert call_args[1].ack_id in ("ack_foo", "ack_bar") + + def test_send_unary(): manager = make_manager() manager._UNARY_REQUESTS = True @@ -470,8 +549,8 @@ def test__get_initial_request_wo_leaser(): assert initial_request.modify_deadline_seconds == [] -def test_on_response(): - manager, _, dispatcher, _, _, scheduler = make_running_manager() +def test__on_response_no_leaser_overload(): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback # Set up the messages. @@ -486,6 +565,9 @@ def test_on_response(): ] ) + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, init_bytes=0) + # Actually run the method and prove that modack and schedule # are called in the expected way. manager._on_response(response) @@ -500,6 +582,64 @@ def test_on_response(): assert call[1][0] == mock.sentinel.callback assert isinstance(call[1][1], message.Message) + # the leaser load limit not hit, no messages had to be put on hold + assert manager._messages_on_hold.qsize() == 0 + + +def test__on_response_with_leaser_overload(): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + # Set up the messages. + response = types.StreamingPullResponse( + received_messages=[ + types.ReceivedMessage( + ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + ), + types.ReceivedMessage( + ack_id="back", message=types.PubsubMessage(data=b"bar", message_id="2") + ), + types.ReceivedMessage( + ack_id="zack", message=types.PubsubMessage(data=b"baz", message_id="3") + ), + ] + ) + + # Adjust message bookkeeping in leaser. Pick 99 messages, which is just below + # the default FlowControl.max_messages limit. + fake_leaser_add(leaser, init_msg_count=99, init_bytes=990) + + # Actually run the method and prove that modack and schedule + # are called in the expected way. + manager._on_response(response) + + dispatcher.modify_ack_deadline.assert_called_once_with( + [ + requests.ModAckRequest("fack", 10), + requests.ModAckRequest("back", 10), + requests.ModAckRequest("zack", 10), + ] + ) + + # one message should be scheduled, the leaser capacity allows for it + schedule_calls = scheduler.schedule.mock_calls + assert len(schedule_calls) == 1 + call_args = schedule_calls[0][1] + assert call_args[0] == mock.sentinel.callback + assert isinstance(call_args[1], message.Message) + assert call_args[1].message_id == "1" + + # the rest of the messages should have been put on hold + assert manager._messages_on_hold.qsize() == 2 + while True: + try: + msg = manager._messages_on_hold.get_nowait() + except queue.Empty: + break + else: + assert isinstance(msg, message.Message) + assert msg.message_id in ("2", "3") + def test_retryable_stream_errors(): # Make sure the config matches our hard-coded tuple of exceptions. From 22d6a76eceac80f5bcefb0570b3f77fb621deda6 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 24 May 2019 10:02:56 -0700 Subject: [PATCH 0357/1197] Update timeouts, blacken noxfile.py, setup.py (via synth). (#8128) --- .../gapic/subscriber_client_config.py | 4 +- packages/google-cloud-pubsub/noxfile.py | 46 +++++++------- packages/google-cloud-pubsub/setup.py | 63 +++++++++---------- packages/google-cloud-pubsub/synth.metadata | 6 +- 4 files changed, 58 insertions(+), 61 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 54a37acc66b9..0aa68315c1c0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -20,9 +20,9 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 12000, + "initial_rpc_timeout_millis": 25000, "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 12000, + "max_rpc_timeout_millis": 25000, "total_timeout_millis": 600000, }, "streaming_messaging": { diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 0f528b7f3902..f021e0290c80 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -23,6 +23,12 @@ LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) +BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] + +if os.path.exists("samples"): + BLACK_PATHS.append("samples") + + @nox.session(python="3.7") def lint(session): """Run linters. @@ -31,13 +37,7 @@ def lint(session): serious code quality issues. """ session.install("flake8", "black", *LOCAL_DEPS) - session.run( - "black", - "--check", - "google", - "tests", - "docs", - ) + session.run("black", "--check", *BLACK_PATHS) session.run("flake8", "google", "tests") @@ -52,12 +52,7 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install("black") - session.run( - "black", - "google", - "tests", - "docs", - ) + session.run("black", *BLACK_PATHS) @nox.session(python="3.7") @@ -140,21 +135,24 @@ def cover(session): session.run("coverage", "erase") + @nox.session(python="3.7") def docs(session): """Build the docs for this library.""" - session.install('-e', '.') - session.install('sphinx', 'alabaster', 'recommonmark') + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark") - shutil.rmtree(os.path.join('docs', '_build'), ignore_errors=True) + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( - 'sphinx-build', - '-W', # warnings as errors - '-T', # show full traceback on exception - '-N', # no colors - '-b', 'html', - '-d', os.path.join('docs', '_build', 'doctrees', ''), - os.path.join('docs', ''), - os.path.join('docs', '_build', 'html', ''), + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), ) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 86d463ffe754..479f71946619 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -20,41 +20,40 @@ # Package metadata. -name = 'google-cloud-pubsub' -description = 'Google Cloud Pub/Sub API client library' -version = '0.41.0' +name = "google-cloud-pubsub" +description = "Google Cloud Pub/Sub API client library" +version = "0.41.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' -release_status = 'Development Status :: 4 - Beta' +release_status = "Development Status :: 4 - Beta" dependencies = [ - 'google-api-core[grpc] >= 1.6.0, < 2.0.0dev', - 'grpc-google-iam-v1 >= 0.11.4, < 0.12dev', + "google-api-core[grpc] >= 1.6.0, < 2.0.0dev", + "grpc-google-iam-v1 >= 0.11.4, < 0.12dev", 'enum34; python_version < "3.4"', ] -extras = { -} +extras = {} # Setup boilerplate below this line. package_root = os.path.abspath(os.path.dirname(__file__)) -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() # Only include packages under the 'google' namespace. Do not include tests, # benchmarks, etc. packages = [ - package for package in setuptools.find_packages() - if package.startswith('google')] + package for package in setuptools.find_packages() if package.startswith("google") +] # Determine which namespaces are needed. -namespaces = ['google'] -if 'google.cloud' in packages: - namespaces.append('google.cloud') +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") setuptools.setup( @@ -62,30 +61,30 @@ version=version, description=description, long_description=readme, - author='Google LLC', - author_email='googleapis-packages@google.com', - license='Apache 2.0', - url='https://github.com/GoogleCloudPlatform/google-cloud-python', + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url="https://github.com/GoogleCloudPlatform/google-cloud-python", classifiers=[ release_status, - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Operating System :: OS Independent', - 'Topic :: Internet', + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 2", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Operating System :: OS Independent", + "Topic :: Internet", ], - platforms='Posix; MacOS X; Windows', + platforms="Posix; MacOS X; Windows", packages=packages, namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', + python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 683b3b560c59..65a9a17abde9 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-05-21T12:27:35.596050Z", + "updateTime": "2019-05-24T12:25:23.530312Z", "sources": [ { "generator": { @@ -12,8 +12,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "32a10f69e2c9ce15bba13ab1ff928bacebb25160", - "internalRef": "249058354" + "sha": "0537189470f04f24836d6959821c24197a0ed120", + "internalRef": "249742806" } }, { From a33044b4aa3372df45061c179ef014d47cc4a239 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 28 May 2019 19:58:42 +0200 Subject: [PATCH 0358/1197] Prevent unhandled background error on SPM shutdown (#8111) --- .../_protocol/streaming_pull_manager.py | 13 +++++- .../subscriber/test_streaming_pull_manager.py | 46 +++++++++++++++++++ 2 files changed, 58 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 74008bc94fcb..159bdfd8d9e5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -428,12 +428,23 @@ def close(self, reason=None): _LOGGER.debug("Stopping scheduler.") self._scheduler.shutdown() self._scheduler = None + + # Leaser and dispatcher reference each other through the shared + # StreamingPullManager instance, i.e. "self", thus do not set their + # references to None until both have been shut down. + # + # NOTE: Even if the dispatcher operates on an inactive leaser using + # the latter's add() and remove() methods, these have no impact on + # the stopped leaser (the leaser is never again re-started). Ditto + # for the manager's maybe_resume_consumer() / maybe_pause_consumer(), + # because the consumer gets shut down first. _LOGGER.debug("Stopping leaser.") self._leaser.stop() - self._leaser = None _LOGGER.debug("Stopping dispatcher.") self._dispatcher.stop() self._dispatcher = None + # dispatcher terminated, OK to dispose the leaser reference now + self._leaser = None _LOGGER.debug("Stopping heartbeater.") self._heartbeater.stop() self._heartbeater = None diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 22585675a324..849137f7af7a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -13,6 +13,8 @@ # limitations under the License. import logging +import threading +import time import types as stdlib_types import mock @@ -511,6 +513,50 @@ def test_close_idempotent(): assert scheduler.shutdown.call_count == 1 +class FakeDispatcher(object): + def __init__(self, manager, error_callback): + self._manager = manager + self._error_callback = error_callback + self._thread = None + self._stop = False + + def start(self): + self._thread = threading.Thread(target=self._do_work) + self._thread.daemon = True + self._thread.start() + + def stop(self): + self._stop = True + self._thread.join() + self._thread = None + + def _do_work(self): + while not self._stop: + try: + self._manager.leaser.add([mock.Mock()]) + except Exception as exc: + self._error_callback(exc) + time.sleep(0.1) + + # also try to interact with the leaser after the stop flag has been set + try: + self._manager.leaser.remove([mock.Mock()]) + except Exception as exc: + self._error_callback(exc) + + +def test_close_no_dispatcher_error(): + manager, _, _, _, _, _ = make_running_manager() + error_callback = mock.Mock(name="error_callback") + dispatcher = FakeDispatcher(manager=manager, error_callback=error_callback) + manager._dispatcher = dispatcher + dispatcher.start() + + manager.close() + + error_callback.assert_not_called() + + def test_close_callbacks(): manager, _, _, _, _, _ = make_running_manager() From 64bc77563ee0a036e815b63dbbf993e1cfb5449d Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 4 Jun 2019 12:52:15 -0700 Subject: [PATCH 0359/1197] [CHANGE ME] Re-generated pubsub to pick up changes in the API or client library generator. (#8219) --- .../cloud/pubsub_v1/gapic/publisher_client_config.py | 2 +- packages/google-cloud-pubsub/synth.metadata | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index 1aa04e62894f..33f0af827924 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -29,7 +29,7 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 12000, + "initial_rpc_timeout_millis": 25000, "rpc_timeout_multiplier": 1.0, "max_rpc_timeout_millis": 30000, "total_timeout_millis": 600000, diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 65a9a17abde9..32341b4a8d79 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-05-24T12:25:23.530312Z", + "updateTime": "2019-06-04T19:35:17.049372Z", "sources": [ { "generator": { "name": "artman", - "version": "0.20.0", - "dockerImage": "googleapis/artman@sha256:3246adac900f4bdbd62920e80de2e5877380e44036b3feae13667ec255ebf5ec" + "version": "0.23.0", + "dockerImage": "googleapis/artman@sha256:846102ebf7ea2239162deea69f64940443b4147f7c2e68d64b332416f74211ba" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0537189470f04f24836d6959821c24197a0ed120", - "internalRef": "249742806" + "sha": "0026f4b890ed9e2388fb0573c0727defa6f5b82e", + "internalRef": "251265049" } }, { From a251e504ad53c79af63aee65225b19e0a9760f49 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 5 Jun 2019 00:39:53 -0700 Subject: [PATCH 0360/1197] Pubsub: Separate subscriber and publish future documentation. (#8205) * separate publish futures from streaming pull futures documentation * remove trailing whitespaces * Add test --- .../google/cloud/pubsub_v1/futures.py | 23 ++++++------ .../cloud/pubsub_v1/publisher/futures.py | 35 +++++++++++++------ .../cloud/pubsub_v1/subscriber/futures.py | 5 ++- .../publisher/test_futures_publisher.py | 32 +++++++++++++++++ 4 files changed, 71 insertions(+), 24 deletions(-) create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures_publisher.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index 39688f291dbf..21d5d810199f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -87,18 +87,12 @@ def done(self): return self._exception != self._SENTINEL or self._result != self._SENTINEL def result(self, timeout=None): - """Return the message ID, or raise an exception. - - This blocks until the message has successfully been published, and - returns the message ID. + """Resolve the future and return a value where appropriate. Args: timeout (Union[int, float]): The number of seconds before this call times out and raises TimeoutError. - Returns: - str: The message ID. - Raises: ~.pubsub_v1.TimeoutError: If the request times out. Exception: For undefined exceptions in the underlying @@ -115,9 +109,6 @@ def result(self, timeout=None): def exception(self, timeout=None): """Return the exception raised by the call, if any. - This blocks until the message has successfully been published, and - returns the exception. If the call succeeded, return None. - Args: timeout (Union[int, float]): The number of seconds before this call times out and raises TimeoutError. @@ -139,15 +130,21 @@ def exception(self, timeout=None): # Okay, this batch had an error; this should return it. return self._exception - def add_done_callback(self, fn): + def add_done_callback(self, callback): """Attach the provided callable to the future. The provided function is called, with this future as its only argument, when the future finishes running. + + Args: + callback (Callable): The function to call. + + Returns: + None """ if self.done(): - return fn(self) - self._callbacks.append(fn) + return callback(self) + self._callbacks.append(callback) def set_result(self, result): """Set the result of the future to the provided result. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index a47f50e00a0d..8fec17d2d64f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -36,13 +36,28 @@ class Future(futures.Future): :class:`threading.Event` will be created and used. """ - # The publishing-side subclass does not need any special behavior - # at this time. - # - # However, there is still a subclass so that if someone attempts - # isinstance checks against a publisher-returned or subscriber-returned - # future, trying either one against the other returns False. - pass - - -__all__ = ("Future",) + def result(self, timeout=None): + """Return the message ID or raise an exception. + + This blocks until the message has been published successfully and + returns the message ID unless an exception is raised. + + Args: + timeout (Union[int, float]): The number of seconds before this call + times out and raises TimeoutError. + + Returns: + str: The message ID. + + Raises: + ~.pubsub_v1.TimeoutError: If the request times out. + Exception: For undefined exceptions in the underlying + call execution. + """ + # Attempt to get the exception if there is one. + # If there is not one, then we know everything worked, and we can + # return an appropriate value. + err = self.exception(timeout=timeout) + if err is None: + return self._result + raise err diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index f3c06416083b..12504c18b5df 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -46,5 +46,8 @@ def cancel(self): return self._manager.close() def cancelled(self): - """bool: True if the subscription has been cancelled.""" + """ + returns: + bool: ``True`` if the subscription has been cancelled. + """ return self._cancelled diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures_publisher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures_publisher.py new file mode 100644 index 000000000000..eb32d05185b6 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures_publisher.py @@ -0,0 +1,32 @@ +# Copyright 2019, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import pytest + +from google.cloud.pubsub_v1.publisher import futures + + +class TestFuture(object): + def test_result_on_success(self): + future = futures.Future() + future.set_result("570307942214048") + assert future.result() == "570307942214048" + + def test_result_on_failure(self): + future = futures.Future() + future.set_exception(RuntimeError("Something bad happened.")) + with pytest.raises(RuntimeError): + future.result() From ff44da7edd2292e1a7cb4a71e650e1b7a643e521 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 6 Jun 2019 09:16:34 -0700 Subject: [PATCH 0361/1197] Pub/Sub: Expose publish retry settings (#8231) * Expose publish retry settings * remove backslashes * re-ran synthtool from HEAD --- .../cloud/pubsub_v1/gapic/publisher_client.py | 40 ++++++++++--- packages/google-cloud-pubsub/noxfile.py | 4 +- packages/google-cloud-pubsub/synth.metadata | 10 ++-- packages/google-cloud-pubsub/synth.py | 57 +++++++++++++++++++ 4 files changed, 95 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 9bfe52ac638d..d9698e301b07 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -16,8 +16,11 @@ """Accesses the google.pubsub.v1 Publisher API.""" +import collections +from copy import deepcopy import functools import pkg_resources +import six import warnings from google.oauth2 import service_account @@ -44,6 +47,28 @@ _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub").version +# TODO: remove conditional import after Python 2 support is dropped +if six.PY3: + from collections.abc import Mapping +else: + from collections import Mapping + + +def _merge_dict(d1, d2): + # Modifies d1 in-place to take values from d2 + # if the nested keys from d2 are present in d1. + # https://stackoverflow.com/a/10704003/4488789 + for k, v2 in d2.items(): + v1 = d1.get(k) # returns None if v1 has no such key + if v1 is None: + raise Exception("{} is not recognized by client_config".format(k)) + if isinstance(v1, Mapping) and isinstance(v2, Mapping): + _merge_dict(v1, v2) + else: + d1[k] = v2 + return d1 + + class PublisherClient(object): """ The service that an application uses to manipulate topics, and to send @@ -128,7 +153,7 @@ def __init__( This argument is mutually exclusive with providing a transport instance to ``transport``; doing so will raise an exception. - client_config (dict): DEPRECATED. A dictionary of call options for + client_config (dict): A dictionary of call options for each method. If not specified, the default configuration is used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with @@ -136,15 +161,12 @@ def __init__( Generally, you only need to set this if you're developing your own client library. """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) + default_client_config = deepcopy(publisher_client_config.config) + + if client_config is None: + client_config = default_client_config else: - client_config = publisher_client_config.config + client_config = _merge_dict(default_client_config, client_config) if channel: warnings.warn( diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index f021e0290c80..968fb5a09bf1 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -46,7 +46,7 @@ def blacken(session): """Run black. Format code to uniform standard. - + This currently uses Python 3.6 due to the automated Kokoro run of synthtool. That run uses an image that doesn't have 3.6 installed. Before updating this check the state of the `gcp_ubuntu_config` we use for that Kokoro run. @@ -78,7 +78,7 @@ def default(session): "--cov-append", "--cov-config=.coveragerc", "--cov-report=", - "--cov-fail-under=97", + "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, ) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 32341b4a8d79..26a0f82b68e7 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-04T19:35:17.049372Z", + "updateTime": "2019-06-06T00:31:04.007153Z", "sources": [ { "generator": { "name": "artman", - "version": "0.23.0", - "dockerImage": "googleapis/artman@sha256:846102ebf7ea2239162deea69f64940443b4147f7c2e68d64b332416f74211ba" + "version": "0.23.1", + "dockerImage": "googleapis/artman@sha256:9d5cae1454da64ac3a87028f8ef486b04889e351c83bb95e83b8fab3959faed0" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0026f4b890ed9e2388fb0573c0727defa6f5b82e", - "internalRef": "251265049" + "sha": "5487c78983f6bd5bbafa69166593826a90778a2f", + "internalRef": "251716150" } }, { diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index b568e148b060..d7b8a460c9bb 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -109,6 +109,63 @@ Format is ``projects/{project}/subscriptions/{sub}``.""", ) +s.replace( + "google/cloud/pubsub_v1/gapic/publisher_client.py", + "import functools\n", + "import collections\n" + "from copy import deepcopy\n\g<0>" +) + +s.replace( + "google/cloud/pubsub_v1/gapic/publisher_client.py", + "import pkg_resources\n", + "\g<0>import six\n" +) + +s.replace( + "google/cloud/pubsub_v1/gapic/publisher_client.py", + "class PublisherClient", + """# TODO: remove conditional import after Python 2 support is dropped +if six.PY3: + from collections.abc import Mapping +else: + from collections import Mapping + + +def _merge_dict(d1, d2): + # Modifies d1 in-place to take values from d2 + # if the nested keys from d2 are present in d1. + # https://stackoverflow.com/a/10704003/4488789 + for k, v2 in d2.items(): + v1 = d1.get(k) # returns None if v1 has no such key + if v1 is None: + raise Exception("{} is not recognized by client_config".format(k)) + if isinstance(v1, Mapping) and isinstance(v2, Mapping): + _merge_dict(v1, v2) + else: + d1[k] = v2 + return d1 + \n\n\g<0>""" +) + +s.replace( + "google/cloud/pubsub_v1/gapic/publisher_client.py", + "client_config \(dict\): DEPRECATED.", + "client_config (dict):" +) + +s.replace( + "google/cloud/pubsub_v1/gapic/publisher_client.py", + "# Raise deprecation warnings .*\n.*\n.*\n.*\n.*\n.*\n", + """default_client_config = deepcopy(publisher_client_config.config) + + if client_config is None: + client_config = default_client_config + else: + client_config = _merge_dict(default_client_config, client_config) + """ +) + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- From 408d4ba01d15c06b6cd75cb085cfbb323eeec206 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 6 Jun 2019 09:56:22 -0700 Subject: [PATCH 0362/1197] Pub/Sub: surface publish future in documentation (#8229) * surface publish future in documentation * Update class definition --- .../docs/publisher/api/futures.rst | 6 +++++ .../docs/publisher/index.rst | 3 ++- .../cloud/pubsub_v1/publisher/futures.py | 23 +++++-------------- 3 files changed, 14 insertions(+), 18 deletions(-) create mode 100644 packages/google-cloud-pubsub/docs/publisher/api/futures.rst diff --git a/packages/google-cloud-pubsub/docs/publisher/api/futures.rst b/packages/google-cloud-pubsub/docs/publisher/api/futures.rst new file mode 100644 index 000000000000..b02b9bf9039d --- /dev/null +++ b/packages/google-cloud-pubsub/docs/publisher/api/futures.rst @@ -0,0 +1,6 @@ +Futures +======= + +.. automodule:: google.cloud.pubsub_v1.publisher.futures + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/docs/publisher/index.rst b/packages/google-cloud-pubsub/docs/publisher/index.rst index 16a869925184..a8485632c6f8 100644 --- a/packages/google-cloud-pubsub/docs/publisher/index.rst +++ b/packages/google-cloud-pubsub/docs/publisher/index.rst @@ -95,7 +95,7 @@ Futures ------- Every call to :meth:`~.pubsub_v1.publisher.client.Client.publish` returns -an instance of :class:`google.api_core.future.Future`. +an instance of :class:`~.pubsub_v1.publisher.futures.Future`. .. note:: @@ -135,3 +135,4 @@ API Reference :maxdepth: 2 api/client + api/futures diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index 8fec17d2d64f..ed200041177b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google LLC All rights reserved. +# Copyright 2019, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,22 +18,11 @@ class Future(futures.Future): - """Encapsulation of the asynchronous execution of an action. - - This object is returned from asychronous Pub/Sub publishing calls, and is - the interface to determine the status of those calls. - - This object should not be created directly, but is returned by other - methods in this library. - - Args: - completed (Optional[Any]): An event, with the same interface as - :class:`threading.Event`. This is provided so that callers - with different concurrency models (e.g. ``threading`` or - ``multiprocessing``) can supply an event that is compatible - with that model. The ``wait()`` and ``set()`` methods will be - used. If this argument is not provided, then a new - :class:`threading.Event` will be created and used. + """This future object is returned from asychronous Pub/Sub publishing + calls. + + Calling :meth:`result` will resolve the future by returning the message + ID, unless an error occurs. """ def result(self, timeout=None): From 88fbb7521c583f354f46290df38dfa17c1d8e219 Mon Sep 17 00:00:00 2001 From: Antonio Bustos Date: Wed, 12 Jun 2019 15:06:54 +0200 Subject: [PATCH 0363/1197] Replace readthedocs links with links to github docs. (#8291) --- packages/google-cloud-pubsub/README.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 2895b60f761e..96027f9ffad5 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -113,7 +113,7 @@ messages to it To learn more, consult the `publishing documentation`_. -.. _publishing documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/publisher/index.html +.. _publishing documentation: https://googleapis.github.io/google-cloud-python/latest/pubsub/publisher/index.html Subscribing @@ -157,4 +157,4 @@ block the current thread until a given condition obtains: To learn more, consult the `subscriber documentation`_. -.. _subscriber documentation: http://google-cloud-python.readthedocs.io/en/latest/pubsub/subscriber/index.html +.. _subscriber documentation: https://googleapis.github.io/google-cloud-python/latest/pubsub/subscriber/index.html From c3936d525483b93c36b5b8a647b0d753352ac942 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 13 Jun 2019 09:12:52 +0200 Subject: [PATCH 0364/1197] PubSub: Release the state lock before calling the publish api (#8234) * Release publish batch lock much sooner Once the publish batch transitions to IN_PROGRESS state, any subsequent calls to commit the batch effectively become a no-op. The state lock can thus be released immediately after the state change, unblocking other threads that might be waiting to publish another PubSub message. Co-authored by @sayap (GitHub) and Rencana Tarigan rtarigan@bbmtek.com https://github.com/googleapis/google-cloud-python/pull/7686 * Add minor comment improvements to Batch methods --- .../pubsub_v1/publisher/_batch/thread.py | 106 ++++++++++-------- .../pubsub_v1/publisher/batch/test_thread.py | 33 +++++- 2 files changed, 89 insertions(+), 50 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index d3fd0d956a90..117ee12b8463 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -187,56 +187,63 @@ def _commit(self): _LOGGER.debug("Batch is already in progress, exiting commit") return - # Sanity check: If there are no messages, no-op. - if not self._messages: - _LOGGER.debug("No messages to publish, exiting commit") - self._status = base.BatchStatus.SUCCESS - return - - # Begin the request to publish these messages. - # Log how long the underlying request takes. - start = time.time() - - try: - response = self._client.api.publish(self._topic, self._messages) - except google.api_core.exceptions.GoogleAPIError as exc: - # We failed to publish, set the exception on all futures and - # exit. - self._status = base.BatchStatus.ERROR - - for future in self._futures: - future.set_exception(exc) + # Once in the IN_PROGRESS state, no other thread can publish additional + # messages or initiate a commit (those operations become a no-op), thus + # it is safe to release the state lock here. Releasing the lock avoids + # blocking other threads in case api.publish() below takes a long time + # to complete. + # https://github.com/googleapis/google-cloud-python/issues/8036 + + # Sanity check: If there are no messages, no-op. + if not self._messages: + _LOGGER.debug("No messages to publish, exiting commit") + self._status = base.BatchStatus.SUCCESS + return + + # Begin the request to publish these messages. + # Log how long the underlying request takes. + start = time.time() + + try: + response = self._client.api.publish(self._topic, self._messages) + except google.api_core.exceptions.GoogleAPIError as exc: + # We failed to publish, set the exception on all futures and + # exit. + self._status = base.BatchStatus.ERROR + + for future in self._futures: + future.set_exception(exc) + + _LOGGER.exception("Failed to publish %s messages.", len(self._futures)) + return + + end = time.time() + _LOGGER.debug("gRPC Publish took %s seconds.", end - start) + + if len(response.message_ids) == len(self._futures): + # Iterate over the futures on the queue and return the response + # IDs. We are trusting that there is a 1:1 mapping, and raise + # an exception if not. + self._status = base.BatchStatus.SUCCESS + zip_iter = six.moves.zip(response.message_ids, self._futures) + for message_id, future in zip_iter: + future.set_result(message_id) + else: + # Sanity check: If the number of message IDs is not equal to + # the number of futures I have, then something went wrong. + self._status = base.BatchStatus.ERROR + exception = exceptions.PublishError( + "Some messages were not successfully published." + ) - _LOGGER.exception("Failed to publish %s messages.", len(self._futures)) - return + for future in self._futures: + future.set_exception(exception) - end = time.time() - _LOGGER.debug("gRPC Publish took %s seconds.", end - start) - - if len(response.message_ids) == len(self._futures): - # Iterate over the futures on the queue and return the response - # IDs. We are trusting that there is a 1:1 mapping, and raise - # an exception if not. - self._status = base.BatchStatus.SUCCESS - zip_iter = six.moves.zip(response.message_ids, self._futures) - for message_id, future in zip_iter: - future.set_result(message_id) - else: - # Sanity check: If the number of message IDs is not equal to - # the number of futures I have, then something went wrong. - self._status = base.BatchStatus.ERROR - exception = exceptions.PublishError( - "Some messages were not successfully published." - ) - - for future in self._futures: - future.set_exception(exception) - - _LOGGER.error( - "Only %s of %s messages were published.", - len(response.message_ids), - len(self._futures), - ) + _LOGGER.error( + "Only %s of %s messages were published.", + len(response.message_ids), + len(self._futures), + ) def monitor(self): """Commit this batch after sufficient time has elapsed. @@ -258,7 +265,8 @@ def publish(self, message): Add the given message to this object; this will cause it to be published once the batch either has enough messages or a sufficient - period of time has elapsed. + period of time has elapsed. If the batch is full or the commit is + already in progress, the method does not do anything. This method is called by :meth:`~.PublisherClient.publish`. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index d323c2ed2d24..60425e748043 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import datetime import threading import time import mock +import pytest import google.api_core.exceptions from google.auth import credentials @@ -39,7 +41,7 @@ def create_batch(autocommit=False, **batch_settings): autocommit (bool): Whether the batch should commit after ``max_latency`` seconds. By default, this is ``False`` for unit testing. - kwargs (dict): Arguments passed on to the + batch_settings (dict): Arguments passed on to the :class:``~.pubsub_v1.types.BatchSettings`` constructor. Returns: @@ -149,6 +151,35 @@ def test_blocking__commit(): assert futures[1].result() == "b" +def test_client_api_publish_not_blocking_additional_publish_calls(): + batch = create_batch(max_messages=1) + api_publish_called = threading.Event() + + def api_publish_delay(_, messages): + api_publish_called.set() + time.sleep(1.0) + message_ids = [str(i) for i in range(len(messages))] + return types.PublishResponse(message_ids=message_ids) + + api_publish_patch = mock.patch.object( + type(batch.client.api), "publish", side_effect=api_publish_delay + ) + + with api_publish_patch: + batch.publish({"data": b"first message"}) + + start = datetime.datetime.now() + event_set = api_publish_called.wait(timeout=1.0) + if not event_set: + pytest.fail("API publish was not called in time") + batch.publish({"data": b"second message"}) + end = datetime.datetime.now() + + # While a batch commit in progress, waiting for the API publish call to + # complete should not unnecessariliy delay other calls to batch.publish(). + assert (end - start).total_seconds() < 1.0 + + @mock.patch.object(thread, "_LOGGER") def test_blocking__commit_starting(_LOGGER): batch = create_batch() From c6b34aa7e09667dfc9bec6d7aabe1ef94e28f3a3 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 18 Jun 2019 01:26:23 +0200 Subject: [PATCH 0365/1197] Document PubSub FlowControl settings (#8293) --- .../google/cloud/pubsub_v1/types.py | 32 ++++++++++++++ packages/google-cloud-pubsub/synth.metadata | 10 ++--- packages/google-cloud-pubsub/synth.py | 43 +++++++++++++++++++ 3 files changed, 80 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index c2662cf83631..0e73ce6b5588 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -71,6 +71,38 @@ 2 * 60 * 60, # max_lease_duration: 2 hours. ) +if sys.version_info >= (3, 5): + FlowControl.__doc__ = ( + "The settings for controlling the rate at which messages are pulled " + "with an asynchronous subscription." + ) + FlowControl.max_bytes.__doc__ = ( + "The maximum total size of received - but not yet processed - messages " + "before pausing the message stream." + ) + FlowControl.max_messages.__doc__ = ( + "The maximum number of received - but not yet processed - messages before " + "pausing the message stream." + ) + FlowControl.resume_threshold.__doc__ = ( + "The relative threshold of the ``max_bytes`` and ``max_messages`` limits " + "below which to resume the message stream. Must be a positive number not " + "greater than ``1.0``." + ) + FlowControl.max_requests.__doc__ = "Currently not in use." + FlowControl.max_request_batch_size.__doc__ = ( + "The maximum number of requests scheduled by callbacks to process and " + "dispatch at a time." + ) + FlowControl.max_request_batch_latency.__doc__ = ( + "The maximum amount of time in seconds to wait for additional request " + "items before processing the next batch of requests." + ) + FlowControl.max_lease_duration.__doc__ = ( + "The maximum amount of time in seconds to hold a lease on a message " + "before dropping it from the lease management." + ) + _shared_modules = [ http_pb2, diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 26a0f82b68e7..0743cddffec8 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-06T00:31:04.007153Z", + "updateTime": "2019-06-12T13:55:40.520528Z", "sources": [ { "generator": { "name": "artman", - "version": "0.23.1", - "dockerImage": "googleapis/artman@sha256:9d5cae1454da64ac3a87028f8ef486b04889e351c83bb95e83b8fab3959faed0" + "version": "0.24.1", + "dockerImage": "googleapis/artman@sha256:6018498e15310260dc9b03c9d576608908ed9fbabe42e1494ff3d827fea27b19" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "5487c78983f6bd5bbafa69166593826a90778a2f", - "internalRef": "251716150" + "sha": "f117dac435e96ebe58d85280a3faf2350c4d4219", + "internalRef": "252714985" } }, { diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index d7b8a460c9bb..e9a54a5da10a 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -15,6 +15,7 @@ """This script is used to synthesize generated parts of this library.""" import re +import textwrap import synthtool as s from synthtool import gcp @@ -166,6 +167,48 @@ def _merge_dict(d1, d2): """ ) +# document FlowControl settings in Python 3.5+ +s.replace( + "google/cloud/pubsub_v1/types.py", + "FlowControl.__new__.__defaults__ = \(.*?\)", + textwrap.dedent("""\ + \g<0> + + if sys.version_info >= (3, 5): + FlowControl.__doc__ = ( + "The settings for controlling the rate at which messages are pulled " + "with an asynchronous subscription." + ) + FlowControl.max_bytes.__doc__ = ( + "The maximum total size of received - but not yet processed - messages " + "before pausing the message stream." + ) + FlowControl.max_messages.__doc__ = ( + "The maximum number of received - but not yet processed - messages before " + "pausing the message stream." + ) + FlowControl.resume_threshold.__doc__ = ( + "The relative threshold of the ``max_bytes`` and ``max_messages`` limits " + "below which to resume the message stream. Must be a positive number not " + "greater than ``1.0``." + ) + FlowControl.max_requests.__doc__ = "Currently not in use." + FlowControl.max_request_batch_size.__doc__ = ( + "The maximum number of requests scheduled by callbacks to process and " + "dispatch at a time." + ) + FlowControl.max_request_batch_latency.__doc__ = ( + "The maximum amount of time in seconds to wait for additional request " + "items before processing the next batch of requests." + ) + FlowControl.max_lease_duration.__doc__ = ( + "The maximum amount of time in seconds to hold a lease on a message " + "before dropping it from the lease management." + ) + """), + flags=re.DOTALL, +) + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- From ff1fd9d28bbfdf615c6fa72b5a60fdd9484b1b41 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 18 Jun 2019 20:25:44 +0200 Subject: [PATCH 0366/1197] Core: Mitigate busy reopen loop in ResumableBidiRpc consuming 100% CPU (#8193) * Add bidi._Throttle helper class * Add optional reopen throttling to ResumableBidiRpc * Enable Bidi reopen throttling in SPM * Change bidi._Throttle signature The commit renames the entry_cap parameter to access_limit, and changes the type of the time_window argument from float to timedelta. --- .../pubsub_v1/subscriber/_protocol/streaming_pull_manager.py | 1 + .../unit/pubsub_v1/subscriber/test_streaming_pull_manager.py | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 159bdfd8d9e5..68ab452fc564 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -383,6 +383,7 @@ def open(self, callback, on_callback_error): start_rpc=self._client.api.streaming_pull, initial_request=self._get_initial_request, should_recover=self._should_recover, + throttle_reopen=True, ) self._rpc.add_done_callback(self._on_rpc_done) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 849137f7af7a..877ccf97fd9a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -428,6 +428,7 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi start_rpc=manager._client.api.streaming_pull, initial_request=manager._get_initial_request, should_recover=manager._should_recover, + throttle_reopen=True, ) resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( manager._on_rpc_done From 3b8aa3f0738dbf8473edb4f169234612517f21f1 Mon Sep 17 00:00:00 2001 From: Solomon Duskis Date: Tue, 18 Jun 2019 15:15:02 -0400 Subject: [PATCH 0367/1197] Release pubsub 0.42.0 (#8415) --- packages/google-cloud-pubsub/CHANGELOG.md | 24 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index a63bc01d8b3d..d558f39eabef 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,30 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.42.0 + +06-18-2019 11:32 PDT + +### Implementation Changes +- Core: Mitigate busy reopen loop in ResumableBidiRpc consuming 100% CPU ([#8193](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8193)) +- Pub/Sub: Increase initial_rpc_timeout for messaging (via synth). ([#8219](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8219)) +- PubSub: Release the state lock before calling the publish api ([#8234](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8234)) +- Pub/Sub: Expose publish retry settings ([#8231](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8231)) +- Prevent unhandled background error on SPM shutdown ([#8111](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8111)) +- Update timeouts, blacken noxfile.py, setup.py (via synth). ([#8128](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8128)) +- PubSub: Fix streaming pull incorrectly handling FlowControl max_messages setting ([#7948](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/7948)) + +### Documentation +- Document PubSub FlowControl settings ([#8293](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8293)) +- Replace readthedocs links with links to github docs. ([#8291](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8291)) +- Pub/Sub: surface publish future in documentation ([#8229](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8229)) +- Pubsub: Separate subscriber and publish future documentation. ([#8205](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8205)) +- Drop mention of long-removed 'policy' object. ([#8081](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8081)) + +### Internal / Testing Changes +- Pub/Sub: staticmethod check ([#8091](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8091)) +- Add empty lines (via synth). ([#8067](https://github.com/GoogleCloudPlatform/google-cloud-python/pull/8067)) + ## 0.41.0 05-15-2019 13:57 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 479f71946619..4cc4225fce7a 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "0.41.0" +version = "0.42.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 12614f97dd48f59a39e363f330839e03528049be Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 18 Jun 2019 12:41:45 -0700 Subject: [PATCH 0368/1197] [CHANGE ME] Re-generated pubsub to pick up changes in the API or client library generator. (#8399) --- packages/google-cloud-pubsub/.coveragerc | 1 + packages/google-cloud-pubsub/.flake8 | 1 + .../transports/publisher_grpc_transport.py | 8 +++-- .../transports/subscriber_grpc_transport.py | 8 +++-- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 1 + .../google/cloud/pubsub_v1/types.py | 33 +++++++++++++++++++ packages/google-cloud-pubsub/noxfile.py | 2 ++ packages/google-cloud-pubsub/setup.cfg | 1 + packages/google-cloud-pubsub/synth.metadata | 10 +++--- 9 files changed, 56 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index 6b9ab9da4a1b..b178b094aa1d 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! [run] branch = True diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index 61766fa84d02..0268ecc9c55c 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! [flake8] ignore = E203, E266, E501, W503 exclude = diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index ff1878ea8d1d..38ecea168903 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -74,7 +74,9 @@ def __init__( } @classmethod - def create_channel(cls, address="pubsub.googleapis.com:443", credentials=None): + def create_channel( + cls, address="pubsub.googleapis.com:443", credentials=None, **kwargs + ): """Create and return a gRPC channel object. Args: @@ -84,12 +86,14 @@ def create_channel(cls, address="pubsub.googleapis.com:443", credentials=None): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. Returns: grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) @property diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 1975250e96fb..794575b5551d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -74,7 +74,9 @@ def __init__( } @classmethod - def create_channel(cls, address="pubsub.googleapis.com:443", credentials=None): + def create_channel( + cls, address="pubsub.googleapis.com:443", credentials=None, **kwargs + ): """Create and return a gRPC channel object. Args: @@ -84,12 +86,14 @@ def create_channel(cls, address="pubsub.googleapis.com:443", credentials=None): credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. + kwargs (dict): Keyword arguments, which are passed to the + channel creation. Returns: grpc.Channel: A gRPC channel object. """ return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES + address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) @property diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index ace1751bbf46..62ea3b6d88ee 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/pubsub_v1/proto/pubsub.proto diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 0e73ce6b5588..f818d4aad80f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -104,6 +104,39 @@ ) +if sys.version_info >= (3, 5): + FlowControl.__doc__ = ( + "The settings for controlling the rate at which messages are pulled " + "with an asynchronous subscription." + ) + FlowControl.max_bytes.__doc__ = ( + "The maximum total size of received - but not yet processed - messages " + "before pausing the message stream." + ) + FlowControl.max_messages.__doc__ = ( + "The maximum number of received - but not yet processed - messages before " + "pausing the message stream." + ) + FlowControl.resume_threshold.__doc__ = ( + "The relative threshold of the ``max_bytes`` and ``max_messages`` limits " + "below which to resume the message stream. Must be a positive number not " + "greater than ``1.0``." + ) + FlowControl.max_requests.__doc__ = "Currently not in use." + FlowControl.max_request_batch_size.__doc__ = ( + "The maximum number of requests scheduled by callbacks to process and " + "dispatch at a time." + ) + FlowControl.max_request_batch_latency.__doc__ = ( + "The maximum amount of time in seconds to wait for additional request " + "items before processing the next batch of requests." + ) + FlowControl.max_lease_duration.__doc__ = ( + "The maximum amount of time in seconds to hold a lease on a message " + "before dropping it from the lease management." + ) + + _shared_modules = [ http_pb2, iam_policy_pb2, diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 968fb5a09bf1..f6257317fccd 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Generated by synthtool. DO NOT EDIT! + from __future__ import absolute_import import os import shutil diff --git a/packages/google-cloud-pubsub/setup.cfg b/packages/google-cloud-pubsub/setup.cfg index 2a9acf13daa9..3bd555500e37 100644 --- a/packages/google-cloud-pubsub/setup.cfg +++ b/packages/google-cloud-pubsub/setup.cfg @@ -1,2 +1,3 @@ +# Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 0743cddffec8..09be1deb9f68 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-12T13:55:40.520528Z", + "updateTime": "2019-06-18T12:24:16.486264Z", "sources": [ { "generator": { "name": "artman", - "version": "0.24.1", - "dockerImage": "googleapis/artman@sha256:6018498e15310260dc9b03c9d576608908ed9fbabe42e1494ff3d827fea27b19" + "version": "0.27.0", + "dockerImage": "googleapis/artman@sha256:b036a7f4278d9deb5796f065e5c7f608d47d75369985ca7ab5039998120e972d" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f117dac435e96ebe58d85280a3faf2350c4d4219", - "internalRef": "252714985" + "sha": "384aa843867c4d17756d14a01f047b6368494d32", + "internalRef": "253675319" } }, { From 3b2774e4b31d5fe80a1393a963c514c8d09f7693 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 18 Jun 2019 15:12:55 -0700 Subject: [PATCH 0369/1197] Increase the minimum allowed version for api-core (#8419) --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 4cc4225fce7a..b8ab933c5167 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 4 - Beta" dependencies = [ - "google-api-core[grpc] >= 1.6.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.12.0, < 2.0.0dev", "grpc-google-iam-v1 >= 0.11.4, < 0.12dev", 'enum34; python_version < "3.4"', ] From 2522f9955578958ff23927c2c6f06ebe4064f461 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 18 Jun 2019 15:41:55 -0700 Subject: [PATCH 0370/1197] Release 0.42.1 (#8421) --- packages/google-cloud-pubsub/CHANGELOG.md | 9 +++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index d558f39eabef..922a22532e8d 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.42.1 + +06-18-2019 15:14 PDT + + +### Implementation Changes +- Increase the minimum allowed version for api core. ([#8419](https://github.com/googleapis/google-cloud-python/pull/8419)) +- Allow kwargs to be passed to create_channel. ([#8399](https://github.com/googleapis/google-cloud-python/pull/8399)) + ## 0.42.0 06-18-2019 11:32 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index b8ab933c5167..6edb02352d35 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "0.42.0" +version = "0.42.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 6f38cb7a4ad05f173b3e3f176b4aff296c29dcd4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 19 Jun 2019 08:13:32 -0700 Subject: [PATCH 0371/1197] Add custom docstrings for FlowControl enum and values (via synth). (#8426) --- .../google/cloud/pubsub_v1/types.py | 33 +++++++++++++++++++ packages/google-cloud-pubsub/synth.metadata | 10 +++--- 2 files changed, 38 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index f818d4aad80f..1b90bbb7dbbb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -137,6 +137,39 @@ ) +if sys.version_info >= (3, 5): + FlowControl.__doc__ = ( + "The settings for controlling the rate at which messages are pulled " + "with an asynchronous subscription." + ) + FlowControl.max_bytes.__doc__ = ( + "The maximum total size of received - but not yet processed - messages " + "before pausing the message stream." + ) + FlowControl.max_messages.__doc__ = ( + "The maximum number of received - but not yet processed - messages before " + "pausing the message stream." + ) + FlowControl.resume_threshold.__doc__ = ( + "The relative threshold of the ``max_bytes`` and ``max_messages`` limits " + "below which to resume the message stream. Must be a positive number not " + "greater than ``1.0``." + ) + FlowControl.max_requests.__doc__ = "Currently not in use." + FlowControl.max_request_batch_size.__doc__ = ( + "The maximum number of requests scheduled by callbacks to process and " + "dispatch at a time." + ) + FlowControl.max_request_batch_latency.__doc__ = ( + "The maximum amount of time in seconds to wait for additional request " + "items before processing the next batch of requests." + ) + FlowControl.max_lease_duration.__doc__ = ( + "The maximum amount of time in seconds to hold a lease on a message " + "before dropping it from the lease management." + ) + + _shared_modules = [ http_pb2, iam_policy_pb2, diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 09be1deb9f68..3838907bf04b 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-18T12:24:16.486264Z", + "updateTime": "2019-06-19T12:19:44.898368Z", "sources": [ { "generator": { "name": "artman", - "version": "0.27.0", - "dockerImage": "googleapis/artman@sha256:b036a7f4278d9deb5796f065e5c7f608d47d75369985ca7ab5039998120e972d" + "version": "0.28.0", + "dockerImage": "googleapis/artman@sha256:6ced5a36b08b82a328c69844e629300d58c14067f25cadab47f52542bdef7daf" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "384aa843867c4d17756d14a01f047b6368494d32", - "internalRef": "253675319" + "sha": "ac13167e31a20314aa05cc9911c95df250880485", + "internalRef": "253867808" } }, { From 87ffccf672e8eec97bee158dc0eeed6126d3e0e5 Mon Sep 17 00:00:00 2001 From: Daniel Gorelik Date: Wed, 19 Jun 2019 12:09:22 -0400 Subject: [PATCH 0372/1197] Use kwargs in test_subscriber_client (#8414) This prevents test failures in case the ordering of the keyword arguments to the `StreamingPullManager.open` is switched --- .../unit/pubsub_v1/subscriber/test_subscriber_client.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 8bdb414c6280..b367733aa705 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -73,7 +73,9 @@ def test_subscribe(manager_open): assert future._manager._subscription == "sub_name_a" manager_open.assert_called_once_with( - mock.ANY, mock.sentinel.callback, future.set_exception + mock.ANY, + callback=mock.sentinel.callback, + on_callback_error=future.set_exception, ) @@ -100,5 +102,7 @@ def test_subscribe_options(manager_open): assert future._manager.flow_control == flow_control assert future._manager._scheduler == scheduler manager_open.assert_called_once_with( - mock.ANY, mock.sentinel.callback, future.set_exception + mock.ANY, + callback=mock.sentinel.callback, + on_callback_error=future.set_exception, ) From c1b2385fe1ea850d3f8b1f914d386d7650900913 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 20 Jun 2019 19:56:39 +0200 Subject: [PATCH 0373/1197] PubSub: Document batch settings, make synth operations idempotent (#8448) * Make synth operations on PubSub types idempotent This commit assures that the code block for injecting FlowControl docs is not added on every synth tool run - it is only added if one does not exist yet. * Document PubSub BatchSettings * Document PubSub types directly (i.e. w/o synth) --- .../google/cloud/pubsub_v1/types.py | 82 ++++--------------- packages/google-cloud-pubsub/synth.metadata | 10 +-- packages/google-cloud-pubsub/synth.py | 42 ---------- 3 files changed, 21 insertions(+), 113 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 1b90bbb7dbbb..58b880fb125b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -44,6 +44,22 @@ 1000, # max_messages: 1,000 ) +if sys.version_info >= (3, 5): + BatchSettings.__doc__ = "The settings for batch publishing the messages." + BatchSettings.max_bytes.__doc__ = ( + "The maximum total size of the messages to collect before automatically " + "publishing the batch." + ) + BatchSettings.max_latency.__doc__ = ( + "The maximum number of seconds to wait for additional messages before " + "automatically publishing the batch." + ) + BatchSettings.max_messages.__doc__ = ( + "The maximum number of messages to collect before automatically " + "publishing the batch." + ) + + # Define the type class and default values for flow control settings. # # This class is used when creating a publisher or subscriber client, and @@ -104,72 +120,6 @@ ) -if sys.version_info >= (3, 5): - FlowControl.__doc__ = ( - "The settings for controlling the rate at which messages are pulled " - "with an asynchronous subscription." - ) - FlowControl.max_bytes.__doc__ = ( - "The maximum total size of received - but not yet processed - messages " - "before pausing the message stream." - ) - FlowControl.max_messages.__doc__ = ( - "The maximum number of received - but not yet processed - messages before " - "pausing the message stream." - ) - FlowControl.resume_threshold.__doc__ = ( - "The relative threshold of the ``max_bytes`` and ``max_messages`` limits " - "below which to resume the message stream. Must be a positive number not " - "greater than ``1.0``." - ) - FlowControl.max_requests.__doc__ = "Currently not in use." - FlowControl.max_request_batch_size.__doc__ = ( - "The maximum number of requests scheduled by callbacks to process and " - "dispatch at a time." - ) - FlowControl.max_request_batch_latency.__doc__ = ( - "The maximum amount of time in seconds to wait for additional request " - "items before processing the next batch of requests." - ) - FlowControl.max_lease_duration.__doc__ = ( - "The maximum amount of time in seconds to hold a lease on a message " - "before dropping it from the lease management." - ) - - -if sys.version_info >= (3, 5): - FlowControl.__doc__ = ( - "The settings for controlling the rate at which messages are pulled " - "with an asynchronous subscription." - ) - FlowControl.max_bytes.__doc__ = ( - "The maximum total size of received - but not yet processed - messages " - "before pausing the message stream." - ) - FlowControl.max_messages.__doc__ = ( - "The maximum number of received - but not yet processed - messages before " - "pausing the message stream." - ) - FlowControl.resume_threshold.__doc__ = ( - "The relative threshold of the ``max_bytes`` and ``max_messages`` limits " - "below which to resume the message stream. Must be a positive number not " - "greater than ``1.0``." - ) - FlowControl.max_requests.__doc__ = "Currently not in use." - FlowControl.max_request_batch_size.__doc__ = ( - "The maximum number of requests scheduled by callbacks to process and " - "dispatch at a time." - ) - FlowControl.max_request_batch_latency.__doc__ = ( - "The maximum amount of time in seconds to wait for additional request " - "items before processing the next batch of requests." - ) - FlowControl.max_lease_duration.__doc__ = ( - "The maximum amount of time in seconds to hold a lease on a message " - "before dropping it from the lease management." - ) - - _shared_modules = [ http_pb2, iam_policy_pb2, diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 3838907bf04b..0f3f84587b5a 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-19T12:19:44.898368Z", + "updateTime": "2019-06-20T16:24:11.087131Z", "sources": [ { "generator": { "name": "artman", - "version": "0.28.0", - "dockerImage": "googleapis/artman@sha256:6ced5a36b08b82a328c69844e629300d58c14067f25cadab47f52542bdef7daf" + "version": "0.29.0", + "dockerImage": "googleapis/artman@sha256:b79c8c20ee51e5302686c9d1294672d59290df1489be93749ef17d0172cc508d" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "ac13167e31a20314aa05cc9911c95df250880485", - "internalRef": "253867808" + "sha": "45e125f9e30dc5d45b52752b3ab78dd4f6084f2d", + "internalRef": "254026509" } }, { diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index e9a54a5da10a..f95da65ede0c 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -167,48 +167,6 @@ def _merge_dict(d1, d2): """ ) -# document FlowControl settings in Python 3.5+ -s.replace( - "google/cloud/pubsub_v1/types.py", - "FlowControl.__new__.__defaults__ = \(.*?\)", - textwrap.dedent("""\ - \g<0> - - if sys.version_info >= (3, 5): - FlowControl.__doc__ = ( - "The settings for controlling the rate at which messages are pulled " - "with an asynchronous subscription." - ) - FlowControl.max_bytes.__doc__ = ( - "The maximum total size of received - but not yet processed - messages " - "before pausing the message stream." - ) - FlowControl.max_messages.__doc__ = ( - "The maximum number of received - but not yet processed - messages before " - "pausing the message stream." - ) - FlowControl.resume_threshold.__doc__ = ( - "The relative threshold of the ``max_bytes`` and ``max_messages`` limits " - "below which to resume the message stream. Must be a positive number not " - "greater than ``1.0``." - ) - FlowControl.max_requests.__doc__ = "Currently not in use." - FlowControl.max_request_batch_size.__doc__ = ( - "The maximum number of requests scheduled by callbacks to process and " - "dispatch at a time." - ) - FlowControl.max_request_batch_latency.__doc__ = ( - "The maximum amount of time in seconds to wait for additional request " - "items before processing the next batch of requests." - ) - FlowControl.max_lease_duration.__doc__ = ( - "The maximum amount of time in seconds to hold a lease on a message " - "before dropping it from the lease management." - ) - """), - flags=re.DOTALL, -) - # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- From 9adc8b4581f79ff466a12dace180c422f7d9461a Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 21 Jun 2019 10:31:29 +0200 Subject: [PATCH 0374/1197] PubSub: Add system tests for PubSub clients (#8277) * Add system tests for listing topics, subscriptions * Add system test for listing topic's subscriptions * Add system test for using PubSub snapshots * Add PubSub system tests for managing IAM policy * Add test for creating non-default subscriptions * Remove flaky PubSub snapshots system test The PubSub backend does not give any guarantees about when a message will be re-delivered after seeking back to a snapshot, it will only be delivered "eventually". That causes flakiness in the snapshots test. Since the test cannot wait for an indefinite amount of time, this commit removes it in order to not randomly break the CI builds. --- packages/google-cloud-pubsub/tests/system.py | 171 +++++++++++++++++++ 1 file changed, 171 insertions(+) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 13e81d281f42..7ffb4a580194 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -16,6 +16,7 @@ import datetime import itertools +import operator as op import threading import time @@ -183,6 +184,176 @@ def test_subscribe_to_messages_async_callbacks( future.cancel() +def test_creating_subscriptions_with_non_default_settings( + publisher, subscriber, project, topic_path, subscription_path, cleanup +): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # create a topic and a subscription, customize the latter's policy + publisher.create_topic(topic_path) + + msg_retention_duration = {"seconds": 911} + expiration_policy = {"ttl": {"seconds": 90210}} + new_subscription = subscriber.create_subscription( + subscription_path, + topic_path, + ack_deadline_seconds=30, + retain_acked_messages=True, + message_retention_duration=msg_retention_duration, + expiration_policy=expiration_policy, + ) + + # fetch the subscription and check its settings + project_path = subscriber.project_path(project) + subscriptions = subscriber.list_subscriptions(project_path) + + subscriptions = [sub for sub in subscriptions if sub.topic == topic_path] + assert len(subscriptions) == 1 + subscription = subscriptions[0] + + assert subscription == new_subscription + assert subscription.ack_deadline_seconds == 30 + assert subscription.retain_acked_messages + assert subscription.message_retention_duration.seconds == 911 + assert subscription.expiration_policy.ttl.seconds == 90210 + + +def test_listing_project_topics(publisher, project, cleanup): + topic_paths = [ + publisher.topic_path(project, "topic-{}".format(i) + unique_resource_id(".")) + for i in range(1, 4) + ] + for topic in topic_paths: + cleanup.append((publisher.delete_topic, topic)) + publisher.create_topic(topic) + + project_path = publisher.project_path(project) + project_topics = publisher.list_topics(project_path) + project_topics = set(t.name for t in project_topics) + + # there might be other topics in the project, thus do a "is subset" check + assert set(topic_paths) <= project_topics + + +def test_listing_project_subscriptions(publisher, subscriber, project, cleanup): + # create topics + topic_paths = [ + publisher.topic_path(project, "topic-1" + unique_resource_id(".")), + publisher.topic_path(project, "topic-2" + unique_resource_id(".")), + ] + for topic in topic_paths: + cleanup.append((publisher.delete_topic, topic)) + publisher.create_topic(topic) + + # create subscriptions + subscription_paths = [ + subscriber.subscription_path( + project, "sub-{}".format(i) + unique_resource_id(".") + ) + for i in range(1, 4) + ] + for i, subscription in enumerate(subscription_paths): + topic = topic_paths[i % 2] + cleanup.append((subscriber.delete_subscription, subscription)) + subscriber.create_subscription(subscription, topic) + + # retrieve subscriptions and check that the list matches the expected + project_path = subscriber.project_path(project) + subscriptions = subscriber.list_subscriptions(project_path) + subscriptions = set(s.name for s in subscriptions) + + # there might be other subscriptions in the project, thus do a "is subset" check + assert set(subscription_paths) <= subscriptions + + +def test_listing_topic_subscriptions(publisher, subscriber, project, cleanup): + # create topics + topic_paths = [ + publisher.topic_path(project, "topic-1" + unique_resource_id(".")), + publisher.topic_path(project, "topic-2" + unique_resource_id(".")), + ] + for topic in topic_paths: + cleanup.append((publisher.delete_topic, topic)) + publisher.create_topic(topic) + + # create subscriptions + subscription_paths = [ + subscriber.subscription_path( + project, "sub-{}".format(i) + unique_resource_id(".") + ) + for i in range(1, 4) + ] + for i, subscription in enumerate(subscription_paths): + topic = topic_paths[i % 2] + cleanup.append((subscriber.delete_subscription, subscription)) + subscriber.create_subscription(subscription, topic) + + # retrieve subscriptions and check that the list matches the expected + subscriptions = publisher.list_topic_subscriptions(topic_paths[0]) + subscriptions = set(subscriptions) + + assert subscriptions == {subscription_paths[0], subscription_paths[2]} + + +def test_managing_topic_iam_policy(publisher, topic_path, cleanup): + cleanup.append((publisher.delete_topic, topic_path)) + + # create a topic and customize its policy + publisher.create_topic(topic_path) + topic_policy = publisher.get_iam_policy(topic_path) + + topic_policy.bindings.add(role="roles/pubsub.editor", members=["domain:google.com"]) + topic_policy.bindings.add( + role="roles/pubsub.viewer", members=["group:cloud-logs@google.com"] + ) + new_policy = publisher.set_iam_policy(topic_path, topic_policy) + + # fetch the topic policy again and check its values + topic_policy = publisher.get_iam_policy(topic_path) + assert topic_policy.bindings == new_policy.bindings + assert len(topic_policy.bindings) == 2 + + bindings = sorted(topic_policy.bindings, key=op.attrgetter("role")) + assert bindings[0].role == "roles/pubsub.editor" + assert bindings[0].members == ["domain:google.com"] + + assert bindings[1].role == "roles/pubsub.viewer" + assert bindings[1].members == ["group:cloud-logs@google.com"] + + +def test_managing_subscription_iam_policy( + publisher, subscriber, topic_path, subscription_path, cleanup +): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # create a topic and a subscription, customize the latter's policy + publisher.create_topic(topic_path) + subscriber.create_subscription(subscription_path, topic_path) + sub_policy = subscriber.get_iam_policy(subscription_path) + + sub_policy.bindings.add(role="roles/pubsub.editor", members=["domain:google.com"]) + sub_policy.bindings.add( + role="roles/pubsub.viewer", members=["group:cloud-logs@google.com"] + ) + new_policy = subscriber.set_iam_policy(subscription_path, sub_policy) + + # fetch the subscription policy again and check its values + sub_policy = subscriber.get_iam_policy(subscription_path) + assert sub_policy.bindings == new_policy.bindings + assert len(sub_policy.bindings) == 2 + + bindings = sorted(sub_policy.bindings, key=op.attrgetter("role")) + assert bindings[0].role == "roles/pubsub.editor" + assert bindings[0].members == ["domain:google.com"] + + assert bindings[1].role == "roles/pubsub.viewer" + assert bindings[1].members == ["group:cloud-logs@google.com"] + + class TestStreamingPull(object): def test_streaming_pull_callback_error_propagation( self, publisher, topic_path, subscriber, subscription_path, cleanup From 889d3e9a091f0e65e9c7097d4d86d0f939ae1839 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 24 Jun 2019 18:45:52 +0200 Subject: [PATCH 0375/1197] Document different PuSub received message types (#8468) --- packages/google-cloud-pubsub/README.rst | 3 +- .../docs/subscriber/index.rst | 72 +++++++++++++------ 2 files changed, 52 insertions(+), 23 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 96027f9ffad5..7e98bc731b5a 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -155,6 +155,7 @@ block the current thread until a given condition obtains: except KeyboardInterrupt: future.cancel() -To learn more, consult the `subscriber documentation`_. +It is also possible to pull messages in a synchronous (blocking) fashion. To +learn more about subscribing, consult the `subscriber documentation`_. .. _subscriber documentation: https://googleapis.github.io/google-cloud-python/latest/pubsub/subscriber/index.html diff --git a/packages/google-cloud-pubsub/docs/subscriber/index.rst b/packages/google-cloud-pubsub/docs/subscriber/index.rst index 1e13e1dc1e79..d3d1cb5415b5 100644 --- a/packages/google-cloud-pubsub/docs/subscriber/index.rst +++ b/packages/google-cloud-pubsub/docs/subscriber/index.rst @@ -34,30 +34,54 @@ to subscribe to, and it must already exist. Once you have that, it is easy: .. code-block:: python - # Substitute {project}, {topic}, and {subscription} with appropriate - # values for your application. - topic_name = 'projects/{project}/topics/{topic}' - sub_name = 'projects/{project}/subscriptions/{subscription}' - subscriber.create_subscription(sub_name, topic_name) + # Substitute PROJECT, SUBSCRIPTION, and TOPIC with appropriate values for + # your application. + sub_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) + topic_path = subscriber.topic_path(PROJECT, TOPIC) + subscriber.create_subscription(sub_path, topic_path) +Once you have created a subscription (or if you already had one), the next +step is to pull data from it. -Pulling a Subscription ----------------------- -Once you have created a subscription (or if you already had one), the next -step is to pull data from it. The subscriber client uses the +Pulling a Subscription Synchronously +------------------------------------ + +To pull the messages synchronously, use the client's +:meth:`~.pubsub_v1.subscriber.client.Client.pull` method. + +.. code-block:: python + + # Substitute PROJECT and SUBSCRIPTION with appropriate values for your + # application. + subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) + response = subscriber.pull(subscription_path, max_messages=5) + + for msg in response.received_messages: + print("Received message:", msg.message.data) + + ack_ids = [msg.ack_id for msg in response.received_messages] + subscriber.acknowledge(subscription_path, ack_ids) + +The method returns a :class:`~.pubsub_v1.types.PullResponse` instance that +cointains a list of received :class:`~.pubsub_v1.types.ReceivedMessage` +instances. + + +Pulling a Subscription Asynchronously +------------------------------------- + +The subscriber client uses the :meth:`~.pubsub_v1.subscriber.client.Client.subscribe` method to start a background thread to receive messages from Pub/Sub and calls a callback with each message received. .. code-block:: python - # As before, substitute {project} and {subscription} with appropriate - # values for your application. - future = subscriber.subscribe( - 'projects/{project}/subscriptions/{subscription}', - callback - ) + # Substitute PROJECT and SUBSCRIPTION with appropriate values for your + # application. + subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) + future = subscriber.subscribe(subscription_path, callback) This will return a :class:`~.pubsub_v1.subscriber.futures.StreamingPullFuture`. This future allows @@ -71,8 +95,11 @@ Messages received from a subscription are processed asynchronously through **callbacks**. The basic idea: Define a function that takes one argument; this argument -will be a :class:`~.pubsub_v1.subscriber.message.Message` instance. This -function should do whatever processing is necessary. At the end, the +will be a :class:`~.pubsub_v1.subscriber.message.Message` instance, which is +a convenience wrapper around the :class:`~.pubsub_v1.types.PubsubMessage` +instance received from the server (and stored under the ``message`` attribute). + +This function should do whatever processing is necessary. At the end, the function should either :meth:`~.pubsub_v1.subscriber.message.Message.ack` or :meth:`~.pubsub_v1.subscriber.message.Message.nack` the message. @@ -87,13 +114,14 @@ Here is an example: # Note that the callback is defined *before* the subscription is opened. def callback(message): do_something_with(message) # Replace this with your actual logic. - message.ack() + message.ack() # Asynchronously acknowledge the message. + + # Substitute PROJECT and SUBSCRIPTION with appropriate values for your + # application. + subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) # Open the subscription, passing the callback. - future = subscriber.subscribe( - 'projects/{project}/subscriptions/{subscription}', - callback - ) + future = subscriber.subscribe(subscription_path, callback) The :meth:`~.pubsub_v1.subscriber.client.Client.subscribe` method returns a :class:`~.pubsub_v1.subscriber.futures.StreamingPullFuture`, which is both From 2465c3499fa2ead9492948e569757d8d5e27e938 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 24 Jun 2019 18:46:04 +0200 Subject: [PATCH 0376/1197] PubSub: Document how to choose the PubSub auth method (#8429) * Document how to choose the PubSub auth method * Give more exposure to google-auth in PubSub docs The link to the library is moved to the first paragraph of the Authentication section for better visibility. * Use publisher audience for the publisher client --- packages/google-cloud-pubsub/README.rst | 33 +++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 7e98bc731b5a..ec54a57dfc34 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -159,3 +159,36 @@ It is also possible to pull messages in a synchronous (blocking) fashion. To learn more about subscribing, consult the `subscriber documentation`_. .. _subscriber documentation: https://googleapis.github.io/google-cloud-python/latest/pubsub/subscriber/index.html + + +Authentication +^^^^^^^^^^^^^^ + +It is possible to specify the authentication method to use with the Pub/Sub +clients. This can be done by providing an explicit `Credentials`_ instance. Support +for various authentication methods is available from the `google-auth`_ library. + +For example, to use JSON Web Tokens, provide a `google.auth.jwt.Credentials`_ instance: + +.. code-block:: python + + import json + from google.auth import jwt + + service_account_info = json.load(open("service-account-info.json")) + audience = "https://pubsub.googleapis.com/google.pubsub.v1.Subscriber" + + credentials = jwt.Credentials.from_service_account_info( + service_account_info, audience=audience + ) + + subscriber = pubsub_v1.SubscriberClient(credentials=credentials) + + # The same for the publisher, except that the "audience" claim needs to be adjusted + publisher_audience = "https://pubsub.googleapis.com/google.pubsub.v1.Publisher" + credentials_pub = credentials.with_claims(audience=publisher_audience) + publisher = pubsub_v1.PublisherClient(credentials=credentials_pub) + +.. _Credentials: https://google-auth.readthedocs.io/en/latest/reference/google.auth.credentials.html#google.auth.credentials.Credentials +.. _google-auth: https://google-auth.readthedocs.io/en/latest/index.html +.. _google.auth.jwt.Credentials: https://google-auth.readthedocs.io/en/latest/reference/google.auth.jwt.html#google.auth.jwt.Credentials From 4679fcee4597bd752976f9b6a9965aeb2c80d43c Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 25 Jun 2019 12:44:16 -0700 Subject: [PATCH 0377/1197] All: Add docs job to publish to googleapis.dev. (#8464) --- packages/google-cloud-pubsub/.repo-metadata.json | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 packages/google-cloud-pubsub/.repo-metadata.json diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json new file mode 100644 index 000000000000..916011d82ca9 --- /dev/null +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -0,0 +1,13 @@ +{ + "name": "pubsub", + "name_pretty": "Google Cloud Pub/Sub", + "product_documentation": "https://cloud.google.com/pubsub/docs/", + "client_documentation": "https://googleapis.dev/python/pubsub/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", + "release_level": "beta", + "language": "python", + "repo": "googleapis/google-cloud-python", + "distribution_name": "google-cloud-pubsub", + "api_id": "pubsub.googleapis.com", + "requires_billing": true +} \ No newline at end of file From 485e43a1385bea3f1e896f48759e36c26d5c0a68 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 28 Jun 2019 10:05:13 -0700 Subject: [PATCH 0378/1197] Add 'client_options' support, update list method docstrings (via synth). (#8518) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 33 ++++++++++++++----- .../pubsub_v1/gapic/subscriber_client.py | 33 ++++++++++++++----- packages/google-cloud-pubsub/synth.metadata | 10 +++--- 3 files changed, 53 insertions(+), 23 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index d9698e301b07..8bd90580a4e5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -24,6 +24,7 @@ import warnings from google.oauth2 import service_account +import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method @@ -130,6 +131,7 @@ def __init__( credentials=None, client_config=None, client_info=None, + client_options=None, ): """Constructor. @@ -160,6 +162,9 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ default_client_config = deepcopy(publisher_client_config.config) @@ -175,6 +180,15 @@ def __init__( stacklevel=2, ) + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + # Instantiate the transport. # The transport is responsible for handling serialization and # deserialization and actually sending data to the service. @@ -183,6 +197,7 @@ def __init__( self.transport = transport( credentials=credentials, default_class=publisher_grpc_transport.PublisherGrpcTransport, + address=api_endpoint, ) else: if credentials: @@ -193,7 +208,7 @@ def __init__( self.transport = transport else: self.transport = publisher_grpc_transport.PublisherGrpcTransport( - address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials ) if client_info is None: @@ -607,10 +622,10 @@ def list_topics( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`~google.cloud.pubsub_v1.types.Topic` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.pubsub_v1.types.Topic` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -709,10 +724,10 @@ def list_topic_subscriptions( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`str` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`str` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 4aed21df8f27..f15442581d0a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -21,6 +21,7 @@ import warnings from google.oauth2 import service_account +import google.api_core.client_options import google.api_core.gapic_v1.client_info import google.api_core.gapic_v1.config import google.api_core.gapic_v1.method @@ -126,6 +127,7 @@ def __init__( credentials=None, client_config=None, client_info=None, + client_options=None, ): """Constructor. @@ -156,6 +158,9 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + client_options (Union[dict, google.api_core.client_options.ClientOptions]): + Client options used to set user options on the client. API Endpoint + should be set through client_options. """ # Raise deprecation warnings for things we want to go away. if client_config is not None: @@ -174,6 +179,15 @@ def __init__( stacklevel=2, ) + api_endpoint = self.SERVICE_ADDRESS + if client_options: + if type(client_options) == dict: + client_options = google.api_core.client_options.from_dict( + client_options + ) + if client_options.api_endpoint: + api_endpoint = client_options.api_endpoint + # Instantiate the transport. # The transport is responsible for handling serialization and # deserialization and actually sending data to the service. @@ -182,6 +196,7 @@ def __init__( self.transport = transport( credentials=credentials, default_class=subscriber_grpc_transport.SubscriberGrpcTransport, + address=api_endpoint, ) else: if credentials: @@ -192,7 +207,7 @@ def __init__( self.transport = transport else: self.transport = subscriber_grpc_transport.SubscriberGrpcTransport( - address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials ) if client_info is None: @@ -586,10 +601,10 @@ def list_subscriptions( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`~google.cloud.pubsub_v1.types.Subscription` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.pubsub_v1.types.Subscription` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request @@ -1185,10 +1200,10 @@ def list_snapshots( that is provided to the method. Returns: - A :class:`~google.gax.PageIterator` instance. By default, this - is an iterable of :class:`~google.cloud.pubsub_v1.types.Snapshot` instances. - This object can also be configured to iterate over the pages - of the response through the `options` parameter. + A :class:`~google.api_core.page_iterator.PageIterator` instance. + An iterable of :class:`~google.cloud.pubsub_v1.types.Snapshot` instances. + You can also iterate over the pages of the response + using its `pages` property. Raises: google.api_core.exceptions.GoogleAPICallError: If the request diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 0f3f84587b5a..329e9b3fe4de 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-20T16:24:11.087131Z", + "updateTime": "2019-06-28T12:30:21.076185Z", "sources": [ { "generator": { "name": "artman", - "version": "0.29.0", - "dockerImage": "googleapis/artman@sha256:b79c8c20ee51e5302686c9d1294672d59290df1489be93749ef17d0172cc508d" + "version": "0.29.2", + "dockerImage": "googleapis/artman@sha256:45263333b058a4b3c26a8b7680a2710f43eae3d250f791a6cb66423991dcb2df" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "45e125f9e30dc5d45b52752b3ab78dd4f6084f2d", - "internalRef": "254026509" + "sha": "84c8ad4e52f8eec8f08a60636cfa597b86969b5c", + "internalRef": "255474859" } }, { From 87ea6c1877e6d5f5de2845cc77a3c4cd08d21383 Mon Sep 17 00:00:00 2001 From: Calvin Jeong Date: Wed, 10 Jul 2019 01:16:33 +1000 Subject: [PATCH 0379/1197] Fix typo in publisher index. (#8619) --- packages/google-cloud-pubsub/docs/publisher/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/docs/publisher/index.rst b/packages/google-cloud-pubsub/docs/publisher/index.rst index a8485632c6f8..2a785359c443 100644 --- a/packages/google-cloud-pubsub/docs/publisher/index.rst +++ b/packages/google-cloud-pubsub/docs/publisher/index.rst @@ -84,7 +84,7 @@ If you need different batching settings, simply provide a from google.cloud.pubsub import types client = pubsub.PublisherClient( - batch_settings=BatchSettings(max_messages=500), + batch_settings=types.BatchSettings(max_messages=500), ) Pub/Sub accepts a maximum of 1,000 messages in a batch, and the size of a From 3a39a85682631144d4b342d751d8abe2cb4ed0ee Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 11 Jul 2019 14:40:30 -0400 Subject: [PATCH 0380/1197] Update pin for 'grpc-google-iam-v1' to 0.12.3+. (#8647) For pubsub / kms, also update the import of the 'IAMPolicy' stub, which is no longer exported from the same location. Supersedes: #8639 Supersedes: #8640 Closes: #8574 Closes: #8576 Closes: #8577 Closes: #8585 Closes: #8587 Closes: #8591 Closes: #8594 Closes: #8595 Closes: #8598 --- .../pubsub_v1/gapic/transports/publisher_grpc_transport.py | 2 +- .../gapic/transports/subscriber_grpc_transport.py | 2 +- packages/google-cloud-pubsub/setup.py | 2 +- packages/google-cloud-pubsub/synth.py | 7 +++++++ 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 38ecea168903..2a5c38104b3b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -18,7 +18,7 @@ import google.api_core.grpc_helpers from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2 class PublisherGrpcTransport(object): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 794575b5551d..16c1f82d0f30 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -18,7 +18,7 @@ import google.api_core.grpc_helpers from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2 class SubscriberGrpcTransport(object): diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 6edb02352d35..df05565bb5e9 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -30,7 +30,7 @@ release_status = "Development Status :: 4 - Beta" dependencies = [ "google-api-core[grpc] >= 1.12.0, < 2.0.0dev", - "grpc-google-iam-v1 >= 0.11.4, < 0.12dev", + "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", 'enum34; python_version < "3.4"', ] extras = {} diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index f95da65ede0c..c4601afbdb51 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -167,6 +167,13 @@ def _merge_dict(d1, d2): """ ) +# Temporary fixup for 'grpc-google-iam-vi 0.12.4' (before generation). +s.replace( + "google/cloud/pubsub_v1/gapic/transports/*_grpc_transport.py", + "from google.iam.v1 import iam_policy_pb2", + "from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2", +) + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- From 273cb8bf51ccc400f2f75bb9439ad5ca52c6b68a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 12 Jul 2019 09:56:42 -0700 Subject: [PATCH 0381/1197] Add 'options_' argument to clients' 'get_iam_policy'; pin black version (via synth). (#8657) --- .../google/cloud/pubsub_v1/gapic/publisher_client.py | 11 ++++++++++- .../google/cloud/pubsub_v1/gapic/subscriber_client.py | 11 ++++++++++- packages/google-cloud-pubsub/noxfile.py | 6 +++--- packages/google-cloud-pubsub/synth.metadata | 10 +++++----- 4 files changed, 28 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 8bd90580a4e5..31761b3c47e2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -40,6 +40,7 @@ from google.cloud.pubsub_v1.proto import pubsub_pb2 from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import options_pb2 from google.iam.v1 import policy_pb2 from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 @@ -935,6 +936,7 @@ def set_iam_policy( def get_iam_policy( self, resource, + options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -956,6 +958,11 @@ def get_iam_policy( Args: resource (str): REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field. + options_ (Union[dict, ~google.cloud.pubsub_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to + ``GetIamPolicy``. This field is only used by Cloud IAM. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -986,7 +993,9 @@ def get_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource, options=options_ + ) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index f15442581d0a..e109a1283777 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -37,6 +37,7 @@ from google.cloud.pubsub_v1.proto import pubsub_pb2 from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import options_pb2 from google.iam.v1 import policy_pb2 from google.protobuf import duration_pb2 from google.protobuf import empty_pb2 @@ -1702,6 +1703,7 @@ def set_iam_policy( def get_iam_policy( self, resource, + options_=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -1723,6 +1725,11 @@ def get_iam_policy( Args: resource (str): REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field. + options_ (Union[dict, ~google.cloud.pubsub_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to + ``GetIamPolicy``. This field is only used by Cloud IAM. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. @@ -1753,7 +1760,9 @@ def get_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) + request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource, options=options_ + ) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index f6257317fccd..a2eefbb6765f 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -24,7 +24,7 @@ LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) - +BLACK_VERSION = "black==19.3b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] if os.path.exists("samples"): @@ -38,7 +38,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", "black", *LOCAL_DEPS) + session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) session.run("black", "--check", *BLACK_PATHS) session.run("flake8", "google", "tests") @@ -53,7 +53,7 @@ def blacken(session): That run uses an image that doesn't have 3.6 installed. Before updating this check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ - session.install("black") + session.install(BLACK_VERSION) session.run("black", *BLACK_PATHS) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 329e9b3fe4de..46f314fac5df 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-06-28T12:30:21.076185Z", + "updateTime": "2019-07-12T12:30:16.225921Z", "sources": [ { "generator": { "name": "artman", - "version": "0.29.2", - "dockerImage": "googleapis/artman@sha256:45263333b058a4b3c26a8b7680a2710f43eae3d250f791a6cb66423991dcb2df" + "version": "0.29.4", + "dockerImage": "googleapis/artman@sha256:63f21e83cb92680b7001dc381069e962c9e6dee314fd8365ac554c07c89221fb" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "84c8ad4e52f8eec8f08a60636cfa597b86969b5c", - "internalRef": "255474859" + "sha": "47bd0c2ba33c28dd624a65dad382e02bb61d1618", + "internalRef": "257690259" } }, { From 452852bb78c7ecfdc13c303a692cf824aa8c8e98 Mon Sep 17 00:00:00 2001 From: ylil93 Date: Mon, 15 Jul 2019 12:12:29 -0700 Subject: [PATCH 0382/1197] Add compatibility check badges to READMEs. (#8288) --- packages/google-cloud-pubsub/README.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index ec54a57dfc34..3c0eb17b8644 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -1,7 +1,7 @@ Python Client for Google Cloud Pub / Sub ======================================== -|beta| |pypi| |versions| +|beta| |pypi| |versions| |compat_check_pypi| |compat_check_github| `Google Cloud Pub / Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. You @@ -25,6 +25,10 @@ independently written applications. :target: https://pypi.org/project/google-cloud-pubsub/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ +.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=google-cloud-pubsub + :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=google-cloud-pubsub +.. |compat_check_github| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dpubsub + :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dpubsub .. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/ .. _Product Documentation: https://cloud.google.com/pubsub/docs .. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/pubsub/ From 17e36dd6f6d25182e543696715cd98250fa3ddbe Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 16 Jul 2019 08:07:20 -0700 Subject: [PATCH 0383/1197] Accomodate new location of 'IAMPolicyStub' (via synth). (#8680) --- .../google/cloud/pubsub_v1/gapic/publisher_client.py | 1 + .../google/cloud/pubsub_v1/gapic/subscriber_client.py | 1 + .../gapic/transports/publisher_grpc_transport.py | 4 ++-- .../gapic/transports/subscriber_grpc_transport.py | 4 ++-- packages/google-cloud-pubsub/synth.metadata | 10 +++++----- 5 files changed, 11 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 31761b3c47e2..2ecfe52dbe7a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -40,6 +40,7 @@ from google.cloud.pubsub_v1.proto import pubsub_pb2 from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import iam_policy_pb2_grpc from google.iam.v1 import options_pb2 from google.iam.v1 import policy_pb2 from google.protobuf import empty_pb2 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index e109a1283777..48843d4d5cd0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -37,6 +37,7 @@ from google.cloud.pubsub_v1.proto import pubsub_pb2 from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import iam_policy_pb2_grpc from google.iam.v1 import options_pb2 from google.iam.v1 import policy_pb2 from google.protobuf import duration_pb2 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 2a5c38104b3b..27e7d7527158 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -18,7 +18,7 @@ import google.api_core.grpc_helpers from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2 +from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2_grpc class PublisherGrpcTransport(object): @@ -69,7 +69,7 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - "iam_policy_stub": iam_policy_pb2.IAMPolicyStub(channel), + "iam_policy_stub": iam_policy_pb2_grpc.IAMPolicyStub(channel), "publisher_stub": pubsub_pb2_grpc.PublisherStub(channel), } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 16c1f82d0f30..ca784e6d25f6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -18,7 +18,7 @@ import google.api_core.grpc_helpers from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2 +from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2_grpc class SubscriberGrpcTransport(object): @@ -69,7 +69,7 @@ def __init__( # gRPC uses objects called "stubs" that are bound to the # channel and provide a basic method for each RPC. self._stubs = { - "iam_policy_stub": iam_policy_pb2.IAMPolicyStub(channel), + "iam_policy_stub": iam_policy_pb2_grpc.IAMPolicyStub(channel), "subscriber_stub": pubsub_pb2_grpc.SubscriberStub(channel), } diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 46f314fac5df..d1a0b007b1a1 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-07-12T12:30:16.225921Z", + "updateTime": "2019-07-16T12:29:08.120680Z", "sources": [ { "generator": { "name": "artman", - "version": "0.29.4", - "dockerImage": "googleapis/artman@sha256:63f21e83cb92680b7001dc381069e962c9e6dee314fd8365ac554c07c89221fb" + "version": "0.30.0", + "dockerImage": "googleapis/artman@sha256:a44d9fb6fe826ca0ea7d6f7be23c596346bed82ee513a0043f3c068279717439" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "47bd0c2ba33c28dd624a65dad382e02bb61d1618", - "internalRef": "257690259" + "sha": "2c7bc0a10225cc8e74476ce1131ebf670bed6169", + "internalRef": "258244875" } }, { From d94587fe285754af204948950a7adce548ada1e4 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 19 Jul 2019 13:31:47 -0400 Subject: [PATCH 0384/1197] Bump minimum version for google-api-core to 1.14.0. (#8709) --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index df05565bb5e9..17fad9fe3f96 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 4 - Beta" dependencies = [ - "google-api-core[grpc] >= 1.12.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", 'enum34; python_version < "3.4"', ] From 385cc0460ca7b031a8846c32cfb3a0d632894bca Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 19 Jul 2019 14:45:47 -0700 Subject: [PATCH 0385/1197] Link to googleapis.dev documentation in READMEs. (#8705) --- packages/google-cloud-pubsub/README.rst | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 3c0eb17b8644..f31894deac5b 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -31,7 +31,7 @@ independently written applications. :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dpubsub .. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/ .. _Product Documentation: https://cloud.google.com/pubsub/docs -.. _Client Library Documentation: https://googleapis.github.io/google-cloud-python/latest/pubsub/ +.. _Client Library Documentation: https://googleapis.dev/python/pubsub/latest Quick Start ----------- @@ -46,7 +46,7 @@ In order to use this library, you first need to go through the following steps: .. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project .. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project .. _Enable the Google Cloud Pub / Sub API.: https://cloud.google.com/pubsub -.. _Setup Authentication.: https://googleapis.github.io/google-cloud-python/latest/core/auth.html +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html Installation ~~~~~~~~~~~~ @@ -117,7 +117,7 @@ messages to it To learn more, consult the `publishing documentation`_. -.. _publishing documentation: https://googleapis.github.io/google-cloud-python/latest/pubsub/publisher/index.html +.. _publishing documentation: https://googleapis.dev/python/pubsub/latest Subscribing @@ -162,7 +162,7 @@ block the current thread until a given condition obtains: It is also possible to pull messages in a synchronous (blocking) fashion. To learn more about subscribing, consult the `subscriber documentation`_. -.. _subscriber documentation: https://googleapis.github.io/google-cloud-python/latest/pubsub/subscriber/index.html +.. _subscriber documentation: https://googleapis.dev/python/pubsub/latest Authentication From a194f804c4b16d1399d89a39f81a1f47472bf354 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 25 Jul 2019 09:41:05 -0700 Subject: [PATCH 0386/1197] Release 0.43.0 (#8761) --- packages/google-cloud-pubsub/CHANGELOG.md | 30 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 922a22532e8d..a354f098bf65 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,36 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.43.0 + +07-24-2019 17:13 PDT + + +### Implementation Changes +- Accomodate new location of 'IAMPolicyStub' (via synth). ([#8680](https://github.com/googleapis/google-cloud-python/pull/8680)) +- Use kwargs in test_subscriber_client ([#8414](https://github.com/googleapis/google-cloud-python/pull/8414)) + +### New Features +- Add 'options_' argument to clients' 'get_iam_policy'; pin black version (via synth). ([#8657](https://github.com/googleapis/google-cloud-python/pull/8657)) +- Add 'client_options' support, update list method docstrings (via synth). ([#8518](https://github.com/googleapis/google-cloud-python/pull/8518)) + +### Dependencies +- Bump minimum version for google-api-core to 1.14.0. ([#8709](https://github.com/googleapis/google-cloud-python/pull/8709)) +- Update pin for 'grpc-google-iam-v1' to 0.12.3+. ([#8647](https://github.com/googleapis/google-cloud-python/pull/8647)) + +### Documentation +- Link to googleapis.dev documentation in READMEs. ([#8705](https://github.com/googleapis/google-cloud-python/pull/8705)) +- Add compatibility check badges to READMEs. ([#8288](https://github.com/googleapis/google-cloud-python/pull/8288)) +- Fix typo in publisher index. ([#8619](https://github.com/googleapis/google-cloud-python/pull/8619)) +- Document how to choose the PubSub auth method ([#8429](https://github.com/googleapis/google-cloud-python/pull/8429)) +- Document different PuSub received message types ([#8468](https://github.com/googleapis/google-cloud-python/pull/8468)) +- PubSub: Document batch settings, make synth operations idempotent ([#8448](https://github.com/googleapis/google-cloud-python/pull/8448)) +- Add custom docstrings for FlowControl enum and values (via synth). ([#8426](https://github.com/googleapis/google-cloud-python/pull/8426)) + +### Internal / Testing Changes +- Add docs job to publish to googleapis.dev. ([#8464](https://github.com/googleapis/google-cloud-python/pull/8464)) +- Add system tests for PubSub clients ([#8277](https://github.com/googleapis/google-cloud-python/pull/8277)) + ## 0.42.1 06-18-2019 15:14 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 17fad9fe3f96..6953b8e13f2c 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "0.42.1" +version = "0.43.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 16f20da68fd7b93534632c2115f1556fb177de3e Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 26 Jul 2019 10:23:30 -0700 Subject: [PATCH 0387/1197] Pub/Sub: document regional endpoint (#8789) * document client_options * show an example that sets client_options --- .../google/cloud/pubsub_v1/publisher/client.py | 3 +++ .../google/cloud/pubsub_v1/subscriber/client.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index b837de24c6f0..006bf509d24b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -58,6 +58,9 @@ class Client(object): Before being passed along to the GAPIC constructor, a channel may be added if ``credentials`` are passed explicitly or if the Pub / Sub emulator is detected as running. + Regional endpoints can be set via `client_options` that takes a + single key-value pair that defines the endpoint, i.e. + `client_options={"api_endpoint": REGIONAL_ENDPOINT}` """ _batch_class = thread.Batch diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index f2e8faa4fcf5..7dbb425caf66 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -53,6 +53,9 @@ class Client(object): :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. Generally, you should not need to set additional keyword arguments. + Regional endpoints can be set via `client_options` that takes a + single key-value pair that defines the endpoint, i.e. + `client_options={"api_endpoint": REGIONAL_ENDPOINT}`. """ def __init__(self, **kwargs): From 25cd2bdce04fc9d0c1827836198d9984dbe37616 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 29 Jul 2019 12:46:22 +0200 Subject: [PATCH 0388/1197] PubSub: Deprecate several FlowControl settings and things in Message class (#8796) * Deprecate several PubSub FlowControl settings * Deprecate things in PubSub message.Message class * Change deprecation release version to 0.44.0 --- .../cloud/pubsub_v1/subscriber/message.py | 12 +++++ .../google/cloud/pubsub_v1/types.py | 45 ++++++++++++++----- .../unit/pubsub_v1/subscriber/test_message.py | 5 ++- 3 files changed, 50 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index b62a28ff6cb6..db8e650db06c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -18,6 +18,7 @@ import json import math import time +import warnings from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -88,6 +89,10 @@ def __init__(self, message, ack_id, request_queue, autolease=True): autolease (bool): An optional flag determining whether a new Message instance should automatically lease itself upon creation. Defaults to :data:`True`. + + .. note:: + .. deprecated:: 0.44.0 + Parameter will be removed in future versions. """ self._message = message self._ack_id = ack_id @@ -216,7 +221,14 @@ def lease(self): never need to call it manually, unless the :class:`~.pubsub_v1.subscriber.message.Message` instance was created with ``autolease=False``. + + .. deprecated:: 0.44.0 + Will be removed in future versions. """ + warnings.warn( + "lease() is deprecated since 0.44.0, and will be removed in future versions.", + category=DeprecationWarning, + ) self._request_queue.put( requests.LeaseRequest(ack_id=self._ack_id, byte_size=self.size) ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 58b880fb125b..9b0d3fef3f64 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -15,6 +15,7 @@ from __future__ import absolute_import import collections import sys +import textwrap from google.api import http_pb2 from google.iam.v1 import iam_policy_pb2 @@ -100,19 +101,41 @@ "The maximum number of received - but not yet processed - messages before " "pausing the message stream." ) - FlowControl.resume_threshold.__doc__ = ( - "The relative threshold of the ``max_bytes`` and ``max_messages`` limits " - "below which to resume the message stream. Must be a positive number not " - "greater than ``1.0``." + FlowControl.resume_threshold.__doc__ = textwrap.dedent( + """ + The relative threshold of the ``max_bytes`` and ``max_messages`` limits + below which to resume the message stream. Must be a positive number not + greater than ``1.0``. + + .. note:: + .. deprecated:: 0.44.0 + Will be removed in future versions.""" ) - FlowControl.max_requests.__doc__ = "Currently not in use." - FlowControl.max_request_batch_size.__doc__ = ( - "The maximum number of requests scheduled by callbacks to process and " - "dispatch at a time." + FlowControl.max_requests.__doc__ = textwrap.dedent( + """ + Currently not in use. + + .. note:: + .. deprecated:: 0.44.0 + Will be removed in future versions.""" + ) + FlowControl.max_request_batch_size.__doc__ = textwrap.dedent( + """ + The maximum number of requests scheduled by callbacks to process and + dispatch at a time. + + .. note:: + .. deprecated:: 0.44.0 + Will be removed in future versions.""" ) - FlowControl.max_request_batch_latency.__doc__ = ( - "The maximum amount of time in seconds to wait for additional request " - "items before processing the next batch of requests." + FlowControl.max_request_batch_latency.__doc__ = textwrap.dedent( + """ + The maximum amount of time in seconds to wait for additional request + items before processing the next batch of requests. + + .. note:: + .. deprecated:: 0.44.0 + Will be removed in future versions.""" ) FlowControl.max_lease_duration.__doc__ = ( "The maximum amount of time in seconds to hold a lease on a message " diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 8c22992f7a2b..0a7d7fb8c391 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -16,6 +16,7 @@ import time import mock +import pytest import pytz from six.moves import queue from google.protobuf import timestamp_pb2 @@ -135,7 +136,9 @@ def test_drop(): def test_lease(): msg = create_message(b"foo", ack_id="bogus_ack_id") - with mock.patch.object(msg._request_queue, "put") as put: + + pytest_warns = pytest.warns(DeprecationWarning) + with pytest_warns, mock.patch.object(msg._request_queue, "put") as put: msg.lease() put.assert_called_once_with( requests.LeaseRequest(ack_id="bogus_ack_id", byte_size=30) From 548aafa6a029df7cfca77b13fc8faea6b8d6d7cf Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 29 Jul 2019 21:13:04 +0200 Subject: [PATCH 0389/1197] Release 0.44.0 (#8804) --- packages/google-cloud-pubsub/CHANGELOG.md | 13 +++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index a354f098bf65..9ade0e92c2bd 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,19 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.44.0 + +07-29-2019 04:28 PDT + + +### Implementation Changes + +- PubSub: Deprecate several FlowControl settings and things in Message class ([#8796](https://github.com/googleapis/google-cloud-python/pull/8796)) + +### Documentation + +- Pub/Sub: document regional endpoint ([#8789](https://github.com/googleapis/google-cloud-python/pull/8789)) + ## 0.43.0 07-24-2019 17:13 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 6953b8e13f2c..603d98adbff4 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "0.43.0" +version = "0.44.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From fb271e8330d93f970a0480d670d8bf7c13c29684 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 29 Jul 2019 12:53:23 -0700 Subject: [PATCH 0390/1197] Update intersphinx mapping for requests. (#8805) --- packages/google-cloud-pubsub/docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 9611fcb59493..af234ffe721b 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -341,7 +341,7 @@ None, ), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://docs.python-requests.org/en/master/", None), + "requests": ("https://2.python-requests.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } From 900c69410ba24103650d37cae75267b7f6a5b1ee Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 30 Jul 2019 11:40:19 -0700 Subject: [PATCH 0391/1197] Use double backticks for ReST correctness. (#8829) --- .../google/cloud/pubsub_v1/publisher/client.py | 4 ++-- .../google/cloud/pubsub_v1/subscriber/client.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 006bf509d24b..d0f42e55c50e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -58,9 +58,9 @@ class Client(object): Before being passed along to the GAPIC constructor, a channel may be added if ``credentials`` are passed explicitly or if the Pub / Sub emulator is detected as running. - Regional endpoints can be set via `client_options` that takes a + Regional endpoints can be set via ``client_options`` that takes a single key-value pair that defines the endpoint, i.e. - `client_options={"api_endpoint": REGIONAL_ENDPOINT}` + ``client_options={"api_endpoint": REGIONAL_ENDPOINT}``. """ _batch_class = thread.Batch diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 7dbb425caf66..8c7ce59846c4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -53,9 +53,9 @@ class Client(object): :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. Generally, you should not need to set additional keyword arguments. - Regional endpoints can be set via `client_options` that takes a + Regional endpoints can be set via ``client_options`` that takes a single key-value pair that defines the endpoint, i.e. - `client_options={"api_endpoint": REGIONAL_ENDPOINT}`. + ``client_options={"api_endpoint": REGIONAL_ENDPOINT}``. """ def __init__(self, **kwargs): From 6d93b7ac0bda5d1a90cd165467b94e5233857171 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 31 Jul 2019 11:00:58 +0200 Subject: [PATCH 0392/1197] PubSub: Remove deprecated methods and settings (#8836) * Remove Message lease() method and autolease param These two have been deprecated in 0.44.0 and it's time to remove them. * Remove FlowControl.resume_threshold setting * Remove FlowControl.max_requests setting * Remove FlowControl.max_request_batch_size setting * Remove FlowControl.max_request_batch_latency * Promote hardcoded values to module constants --- .../subscriber/_protocol/dispatcher.py | 13 +++-- .../_protocol/streaming_pull_manager.py | 18 ++++--- .../cloud/pubsub_v1/subscriber/message.py | 35 +----------- .../google/cloud/pubsub_v1/types.py | 52 +----------------- .../unit/pubsub_v1/subscriber/test_message.py | 54 ++++++------------- 5 files changed, 37 insertions(+), 135 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index e41341afab3d..2b2574829306 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -27,6 +27,14 @@ _CALLBACK_WORKER_NAME = "Thread-CallbackRequestDispatcher" +_MAX_BATCH_SIZE = 100 +"""The maximum number of requests to process and dispatch at a time.""" + +_MAX_BATCH_LATENCY = 0.01 +"""The maximum amount of time in seconds to wait for additional request items +before processing the next batch of requests.""" + + class Dispatcher(object): def __init__(self, manager, queue): self._manager = manager @@ -42,12 +50,11 @@ def start(self): if self._thread is not None: raise ValueError("Dispatcher is already running.") - flow_control = self._manager.flow_control worker = helper_threads.QueueCallbackWorker( self._queue, self.dispatch_callback, - max_items=flow_control.max_request_batch_size, - max_latency=flow_control.max_request_batch_latency, + max_items=_MAX_BATCH_SIZE, + max_latency=_MAX_BATCH_LATENCY, ) # Create and start the helper thread. thread = threading.Thread(name=_CALLBACK_WORKER_NAME, target=worker) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 68ab452fc564..af6883fd067e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -44,6 +44,11 @@ exceptions.GatewayTimeout, exceptions.Aborted, ) +_MAX_LOAD = 1.0 +"""The load threshold above which to pause the incoming message stream.""" + +_RESUME_THRESHOLD = 0.8 +"""The load threshold below which to resume the incoming message stream.""" def _maybe_wrap_exception(exception): @@ -223,7 +228,7 @@ def add_close_callback(self, callback): def maybe_pause_consumer(self): """Check the current load and pause the consumer if needed.""" with self._pause_resume_lock: - if self.load >= 1.0: + if self.load >= _MAX_LOAD: if self._consumer is not None and not self._consumer.is_paused: _LOGGER.debug( "Message backlog over load at %.2f, pausing.", self.load @@ -252,7 +257,7 @@ def maybe_resume_consumer(self): # currently on hold, if the current load allows for it. self._maybe_release_messages() - if self.load < self.flow_control.resume_threshold: + if self.load < _RESUME_THRESHOLD: _LOGGER.debug("Current load is %.2f, resuming consumer.", self.load) self._consumer.resume() else: @@ -271,7 +276,7 @@ def _maybe_release_messages(self): The method assumes the caller has acquired the ``_pause_resume_lock``. """ while True: - if self.load >= 1.0: + if self.load >= _MAX_LOAD: break # already overloaded try: @@ -518,12 +523,9 @@ def _on_response(self, response): for received_message in response.received_messages: message = google.cloud.pubsub_v1.subscriber.message.Message( - received_message.message, - received_message.ack_id, - self._scheduler.queue, - autolease=False, + received_message.message, received_message.ack_id, self._scheduler.queue ) - if self.load < 1.0: + if self.load < _MAX_LOAD: req = requests.LeaseRequest( ack_id=message.ack_id, byte_size=message.size ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index db8e650db06c..41bc42755ad7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -18,7 +18,6 @@ import json import math import time -import warnings from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -71,7 +70,7 @@ class Message(object): published. """ - def __init__(self, message, ack_id, request_queue, autolease=True): + def __init__(self, message, ack_id, request_queue): """Construct the Message. .. note:: @@ -86,13 +85,6 @@ def __init__(self, message, ack_id, request_queue, autolease=True): request_queue (queue.Queue): A queue provided by the policy that can accept requests; the policy is responsible for handling those requests. - autolease (bool): An optional flag determining whether a new Message - instance should automatically lease itself upon creation. - Defaults to :data:`True`. - - .. note:: - .. deprecated:: 0.44.0 - Parameter will be removed in future versions. """ self._message = message self._ack_id = ack_id @@ -104,11 +96,6 @@ def __init__(self, message, ack_id, request_queue, autolease=True): # the default lease deadline. self._received_timestamp = time.time() - # The policy should lease this message, telling PubSub that it has - # it until it is acked or otherwise dropped. - if autolease: - self.lease() - def __repr__(self): # Get an abbreviated version of the data. abbv_data = self._message.data @@ -213,26 +200,6 @@ def drop(self): requests.DropRequest(ack_id=self._ack_id, byte_size=self.size) ) - def lease(self): - """Inform the policy to lease this message continually. - - .. note:: - By default this method is called by the constructor, and you should - never need to call it manually, unless the - :class:`~.pubsub_v1.subscriber.message.Message` instance was - created with ``autolease=False``. - - .. deprecated:: 0.44.0 - Will be removed in future versions. - """ - warnings.warn( - "lease() is deprecated since 0.44.0, and will be removed in future versions.", - category=DeprecationWarning, - ) - self._request_queue.put( - requests.LeaseRequest(ack_id=self._ack_id, byte_size=self.size) - ) - def modify_ack_deadline(self, seconds): """Resets the deadline for acknowledgement. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 9b0d3fef3f64..733d3bf97ac0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -15,7 +15,6 @@ from __future__ import absolute_import import collections import sys -import textwrap from google.api import http_pb2 from google.iam.v1 import iam_policy_pb2 @@ -67,24 +66,11 @@ # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. FlowControl = collections.namedtuple( - "FlowControl", - [ - "max_bytes", - "max_messages", - "resume_threshold", - "max_requests", - "max_request_batch_size", - "max_request_batch_latency", - "max_lease_duration", - ], + "FlowControl", ["max_bytes", "max_messages", "max_lease_duration"] ) FlowControl.__new__.__defaults__ = ( 100 * 1024 * 1024, # max_bytes: 100mb 100, # max_messages: 100 - 0.8, # resume_threshold: 80% - 100, # max_requests: 100 - 100, # max_request_batch_size: 100 - 0.01, # max_request_batch_latency: 0.01s 2 * 60 * 60, # max_lease_duration: 2 hours. ) @@ -101,42 +87,6 @@ "The maximum number of received - but not yet processed - messages before " "pausing the message stream." ) - FlowControl.resume_threshold.__doc__ = textwrap.dedent( - """ - The relative threshold of the ``max_bytes`` and ``max_messages`` limits - below which to resume the message stream. Must be a positive number not - greater than ``1.0``. - - .. note:: - .. deprecated:: 0.44.0 - Will be removed in future versions.""" - ) - FlowControl.max_requests.__doc__ = textwrap.dedent( - """ - Currently not in use. - - .. note:: - .. deprecated:: 0.44.0 - Will be removed in future versions.""" - ) - FlowControl.max_request_batch_size.__doc__ = textwrap.dedent( - """ - The maximum number of requests scheduled by callbacks to process and - dispatch at a time. - - .. note:: - .. deprecated:: 0.44.0 - Will be removed in future versions.""" - ) - FlowControl.max_request_batch_latency.__doc__ = textwrap.dedent( - """ - The maximum amount of time in seconds to wait for additional request - items before processing the next batch of requests. - - .. note:: - .. deprecated:: 0.44.0 - Will be removed in future versions.""" - ) FlowControl.max_lease_duration.__doc__ = ( "The maximum amount of time in seconds to hold a lease on a message " "before dropping it from the lease management." diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 0a7d7fb8c391..4bb3329a29f0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -16,7 +16,6 @@ import time import mock -import pytest import pytz from six.moves import queue from google.protobuf import timestamp_pb2 @@ -34,28 +33,22 @@ PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 -def create_message(data, ack_id="ACKID", autolease=True, **attrs): - with mock.patch.object(message.Message, "lease") as lease: - with mock.patch.object(time, "time") as time_: - time_.return_value = RECEIVED_SECONDS - msg = message.Message( - types.PubsubMessage( - attributes=attrs, - data=data, - message_id="message_id", - publish_time=timestamp_pb2.Timestamp( - seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 - ), +def create_message(data, ack_id="ACKID", **attrs): + with mock.patch.object(time, "time") as time_: + time_.return_value = RECEIVED_SECONDS + msg = message.Message( + types.PubsubMessage( + attributes=attrs, + data=data, + message_id="message_id", + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 ), - ack_id, - queue.Queue(), - autolease=autolease, - ) - if autolease: - lease.assert_called_once_with() - else: - lease.assert_not_called() - return msg + ), + ack_id, + queue.Queue(), + ) + return msg def test_attributes(): @@ -84,11 +77,6 @@ def test_publish_time(): assert msg.publish_time == PUBLISHED -def test_disable_autolease_on_creation(): - # the create_message() helper does the actual assertion - create_message(b"foo", autolease=False) - - def check_call_types(mock, *args, **kwargs): """Checks a mock's call types. @@ -134,18 +122,6 @@ def test_drop(): check_call_types(put, requests.DropRequest) -def test_lease(): - msg = create_message(b"foo", ack_id="bogus_ack_id") - - pytest_warns = pytest.warns(DeprecationWarning) - with pytest_warns, mock.patch.object(msg._request_queue, "put") as put: - msg.lease() - put.assert_called_once_with( - requests.LeaseRequest(ack_id="bogus_ack_id", byte_size=30) - ) - check_call_types(put, requests.LeaseRequest) - - def test_modify_ack_deadline(): msg = create_message(b"foo", ack_id="bogus_ack_id") with mock.patch.object(msg._request_queue, "put") as put: From d7749a5d675afd3340d084ab20743f75dbd82f6a Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 31 Jul 2019 19:34:47 +0200 Subject: [PATCH 0393/1197] Release pubsub 0.45.0 (#8839) * Release 0.45.0 * Remove redundant PubSub prefix from changelog item --- packages/google-cloud-pubsub/CHANGELOG.md | 15 +++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 9ade0e92c2bd..05df7382e81e 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.45.0 + +07-31-2019 02:03 PDT + + +### Implementation Changes + +- Remove deprecated methods and settings ([#8836](https://github.com/googleapis/google-cloud-python/pull/8836)) + + +### Documentation + +- Use double backticks for ReST correctness. ([#8829](https://github.com/googleapis/google-cloud-python/pull/8829)) +- Update intersphinx mapping for requests. ([#8805](https://github.com/googleapis/google-cloud-python/pull/8805)) + ## 0.44.0 07-29-2019 04:28 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 603d98adbff4..ab1d963174a5 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "0.44.0" +version = "0.45.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From e283952de2e3ddd0bbf0a9ba69f3e5c779c3da74 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 6 Aug 2019 10:57:42 -0700 Subject: [PATCH 0394/1197] Remove send/recv msg size limit, update docstrings (via synth). (#8964) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 54 ++-- .../pubsub_v1/gapic/subscriber_client.py | 72 ++--- .../transports/publisher_grpc_transport.py | 9 +- .../transports/subscriber_grpc_transport.py | 9 +- .../google/cloud/pubsub_v1/proto/pubsub.proto | 47 ++-- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 247 +++++++++--------- packages/google-cloud-pubsub/synth.metadata | 10 +- 7 files changed, 227 insertions(+), 221 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 2ecfe52dbe7a..6ea062166878 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -268,12 +268,9 @@ def create_topic( must not start with `"goog"`. labels (dict[str -> str]): See Creating and managing labels. - message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining how messages published to the topic may be stored. It - is determined when the topic is created based on the policy configured at - the project level. It must not be set by the caller in the request to - CreateTopic or to UpdateTopic. This field will be populated in the - responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the - response, then no constraints are in effect. + message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining the set of Google Cloud Platform regions where messages + published to the topic may be stored. If not present, then no constraints + are in effect. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.MessageStoragePolicy` @@ -281,13 +278,10 @@ def create_topic( access to messages published on this topic. The expected format is - ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. EXPERIMENTAL: This - feature is part of a closed alpha release. This API might be changed in - backward-incompatible ways and is not recommended for production use. It - is not subject to any SLA or deprecation policy. + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -378,8 +372,8 @@ def update_topic( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -457,8 +451,8 @@ def publish( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PubsubMessage` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -527,8 +521,8 @@ def get_topic( topic (str): The name of the topic to get. Format is ``projects/{project}/topics/{topic}``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -615,8 +609,8 @@ def list_topics( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -717,8 +711,8 @@ def list_topic_subscriptions( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -808,8 +802,8 @@ def delete_topic( topic (str): Name of the topic to delete. Format is ``projects/{project}/topics/{topic}``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -887,8 +881,8 @@ def set_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -965,8 +959,8 @@ def get_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1051,8 +1045,8 @@ def test_iam_permissions( information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 48843d4d5cd0..dd5ef7fbe659 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -341,8 +341,8 @@ def create_subscription( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -421,8 +421,8 @@ def get_subscription( subscription (str): The name of the subscription to get. Format is ``projects/{project}/subscriptions/{sub}``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -504,8 +504,8 @@ def update_subscription( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -594,8 +594,8 @@ def list_subscriptions( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -685,8 +685,8 @@ def delete_subscription( subscription (str): The subscription to delete. Format is ``projects/{project}/subscriptions/{sub}``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -773,8 +773,8 @@ def modify_ack_deadline( minimum deadline you can specify is 0 seconds. The maximum deadline you can specify is 600 seconds (10 minutes). retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -857,8 +857,8 @@ def acknowledge( returned by the Pub/Sub system in the ``Pull`` response. Must not be empty. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -939,8 +939,8 @@ def pull( Otherwise, the system may wait (for a bounded amount of time) until at least one message is available, rather than returning no messages. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1026,8 +1026,8 @@ def streaming_pull( requests (iterator[dict|google.cloud.pubsub_v1.proto.pubsub_pb2.StreamingPullRequest]): The input objects. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.StreamingPullRequest` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1101,8 +1101,8 @@ def modify_push_config( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PushConfig` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1193,8 +1193,8 @@ def list_snapshots( streaming is performed per-page, this determines the maximum number of resources in a page. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1306,8 +1306,8 @@ def create_snapshot( labels (dict[str -> str]): See Creating and managing labels. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1396,8 +1396,8 @@ def update_snapshot( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1477,8 +1477,8 @@ def delete_snapshot( snapshot (str): The name of the snapshot to delete. Format is ``projects/{project}/snapshots/{snap}``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1568,8 +1568,8 @@ def seek( of the provided subscription. Format is ``projects/{project}/snapshots/{snap}``. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1654,8 +1654,8 @@ def set_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Policy` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1732,8 +1732,8 @@ def get_iam_policy( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. @@ -1818,8 +1818,8 @@ def test_iam_permissions( information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will not - be retried. + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 27e7d7527158..aedcc8c465e1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -62,7 +62,14 @@ def __init__( # Create the channel. if channel is None: - channel = self.create_channel(address=address, credentials=credentials) + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) self._channel = channel diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index ca784e6d25f6..fa8ac9f29329 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -62,7 +62,14 @@ def __init__( # Create the channel. if channel is None: - channel = self.create_channel(address=address, credentials=credentials) + channel = self.create_channel( + address=address, + credentials=credentials, + options={ + "grpc.max_send_message_length": -1, + "grpc.max_receive_message_length": -1, + }.items(), + ) self._channel = channel diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 7cea47b1e539..ea0d4c7e53e5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -18,7 +18,7 @@ syntax = "proto3"; package google.pubsub.v1; import "google/api/annotations.proto"; -import "google/api/resource.proto"; +import "google/api/client.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; @@ -36,6 +36,11 @@ option ruby_package = "Google::Cloud::PubSub::V1"; // The service that an application uses to manipulate topics, and to send // messages to a topic. service Publisher { + option (google.api.default_host) = "pubsub.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/pubsub"; + // Creates the given topic with the given name. See the // // resource name rules. @@ -112,12 +117,11 @@ service Publisher { } message MessageStoragePolicy { - // The list of GCP region IDs where messages that are published to the topic + // A list of IDs of GCP regions where messages that are published to the topic // may be persisted in storage. Messages published by publishers running in // non-allowed GCP regions (or running outside of GCP altogether) will be - // routed for storage in one of the allowed regions. An empty list indicates a - // misconfiguration at the project or organization level, which will result in - // all Publish operations failing. + // routed for storage in one of the allowed regions. An empty list means that + // no regions are allowed, and is not a valid configuration. repeated string allowed_persistence_regions = 1; } @@ -135,21 +139,15 @@ message Topic { // managing labels. map labels = 2; - // Policy constraining how messages published to the topic may be stored. It - // is determined when the topic is created based on the policy configured at - // the project level. It must not be set by the caller in the request to - // CreateTopic or to UpdateTopic. This field will be populated in the - // responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the - // response, then no constraints are in effect. + // Policy constraining the set of Google Cloud Platform regions where messages + // published to the topic may be stored. If not present, then no constraints + // are in effect. MessageStoragePolicy message_storage_policy = 3; // The resource name of the Cloud KMS CryptoKey to be used to protect access // to messages published on this topic. // // The expected format is `projects/*/locations/*/keyRings/*/cryptoKeys/*`. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. string kms_key_name = 5; } @@ -316,6 +314,11 @@ message DeleteTopicRequest { // consume messages from a subscription via the `Pull` method or by // establishing a bi-directional stream using the `StreamingPull` method. service Subscriber { + option (google.api.default_host) = "pubsub.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/pubsub"; + // Creates a subscription to a given topic. See the // // resource name rules. @@ -646,27 +649,27 @@ message PushConfig { // For example, a Webhook endpoint might use "https://example.com/push". string push_endpoint = 1; - // Endpoint configuration attributes. - // - // Every endpoint has a set of API supported attributes that can be used to - // control different aspects of the message delivery. + // Endpoint configuration attributes that can be used to control different + // aspects of the message delivery. // - // The currently supported attribute is `x-goog-version`, which you can + // The only currently supported attribute is `x-goog-version`, which you can // use to change the format of the pushed message. This attribute // indicates the version of the data expected by the endpoint. This // controls the shape of the pushed message (i.e., its fields and metadata). - // The endpoint version is based on the version of the Pub/Sub API. // // If not present during the `CreateSubscription` call, it will default to - // the version of the API used to make such call. If not present during a + // the version of the Pub/Sub API used to make such call. If not present in a // `ModifyPushConfig` call, its value will not be changed. `GetSubscription` // calls will always return a valid version, even if the subscription was // created without this attribute. // - // The possible values for this attribute are: + // The only supported values for the `x-goog-version` attribute are: // // * `v1beta1`: uses the push format defined in the v1beta1 Pub/Sub API. // * `v1` or `v1beta2`: uses the push format defined in the v1 Pub/Sub API. + // + // For example: + //
attributes { "x-goog-version": "v1" } 
map attributes = 2; // An authentication method used by push endpoints to verify the source of diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 62ea3b6d88ee..2aba365a6da4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -16,7 +16,7 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 @@ -31,11 +31,11 @@ "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xa5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xbf\x08\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}2\xf9\x11\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*B\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xa5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xb1\t\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xeb\x12\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_api_dot_client__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, @@ -78,8 +78,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=248, - serialized_end=307, + serialized_start=246, + serialized_end=305, ) @@ -135,8 +135,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=480, - serialized_end=525, + serialized_start=478, + serialized_end=523, ) _TOPIC = _descriptor.Descriptor( @@ -227,8 +227,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=310, - serialized_end=525, + serialized_start=308, + serialized_end=523, ) @@ -284,8 +284,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=720, - serialized_end=769, + serialized_start=718, + serialized_end=767, ) _PUBSUBMESSAGE = _descriptor.Descriptor( @@ -394,8 +394,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=528, - serialized_end=769, + serialized_start=526, + serialized_end=767, ) @@ -433,8 +433,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=771, - serialized_end=803, + serialized_start=769, + serialized_end=801, ) @@ -490,8 +490,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=805, - serialized_end=914, + serialized_start=803, + serialized_end=912, ) @@ -547,8 +547,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=916, - serialized_end=998, + serialized_start=914, + serialized_end=996, ) @@ -586,8 +586,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1000, - serialized_end=1038, + serialized_start=998, + serialized_end=1036, ) @@ -661,8 +661,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1040, - serialized_end=1115, + serialized_start=1038, + serialized_end=1113, ) @@ -718,8 +718,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1117, - serialized_end=1203, + serialized_start=1115, + serialized_end=1201, ) @@ -793,8 +793,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1205, - serialized_end=1290, + serialized_start=1203, + serialized_end=1288, ) @@ -850,8 +850,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1292, - serialized_end=1372, + serialized_start=1290, + serialized_end=1370, ) @@ -925,8 +925,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1374, - serialized_end=1455, + serialized_start=1372, + serialized_end=1453, ) @@ -982,8 +982,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1457, - serialized_end=1529, + serialized_start=1455, + serialized_end=1527, ) @@ -1021,8 +1021,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1531, - serialized_end=1566, + serialized_start=1529, + serialized_end=1564, ) @@ -1078,8 +1078,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=480, - serialized_end=525, + serialized_start=478, + serialized_end=523, ) _SUBSCRIPTION = _descriptor.Descriptor( @@ -1260,8 +1260,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1569, - serialized_end=1990, + serialized_start=1567, + serialized_end=1988, ) @@ -1299,8 +1299,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1992, - serialized_end=2050, + serialized_start=1990, + serialized_end=2048, ) @@ -1356,8 +1356,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2218, - serialized_end=2278, + serialized_start=2216, + serialized_end=2276, ) _PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( @@ -1412,8 +1412,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=720, - serialized_end=769, + serialized_start=718, + serialized_end=767, ) _PUSHCONFIG = _descriptor.Descriptor( @@ -1494,8 +1494,8 @@ fields=[], ) ], - serialized_start=2053, - serialized_end=2354, + serialized_start=2051, + serialized_end=2352, ) @@ -1551,8 +1551,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2356, - serialized_end=2439, + serialized_start=2354, + serialized_end=2437, ) @@ -1590,8 +1590,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2441, - serialized_end=2487, + serialized_start=2439, + serialized_end=2485, ) @@ -1647,8 +1647,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2490, - serialized_end=2620, + serialized_start=2488, + serialized_end=2618, ) @@ -1722,8 +1722,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2622, - serialized_end=2704, + serialized_start=2620, + serialized_end=2702, ) @@ -1779,8 +1779,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2706, - serialized_end=2813, + serialized_start=2704, + serialized_end=2811, ) @@ -1818,8 +1818,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2815, - serialized_end=2864, + serialized_start=2813, + serialized_end=2862, ) @@ -1875,8 +1875,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2866, - serialized_end=2964, + serialized_start=2864, + serialized_end=2962, ) @@ -1950,8 +1950,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2966, - serialized_end=3051, + serialized_start=2964, + serialized_end=3049, ) @@ -1989,8 +1989,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3053, - serialized_end=3129, + serialized_start=3051, + serialized_end=3127, ) @@ -2064,8 +2064,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3131, - serialized_end=3226, + serialized_start=3129, + serialized_end=3224, ) @@ -2121,8 +2121,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3228, - serialized_end=3287, + serialized_start=3226, + serialized_end=3285, ) @@ -2232,8 +2232,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3290, - serialized_end=3454, + serialized_start=3288, + serialized_end=3452, ) @@ -2271,8 +2271,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3456, - serialized_end=3541, + serialized_start=3454, + serialized_end=3539, ) @@ -2328,8 +2328,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=480, - serialized_end=525, + serialized_start=478, + serialized_end=523, ) _CREATESNAPSHOTREQUEST = _descriptor.Descriptor( @@ -2402,8 +2402,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3544, - serialized_end=3719, + serialized_start=3542, + serialized_end=3717, ) @@ -2459,8 +2459,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3721, - serialized_end=3839, + serialized_start=3719, + serialized_end=3837, ) @@ -2516,8 +2516,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=480, - serialized_end=525, + serialized_start=478, + serialized_end=523, ) _SNAPSHOT = _descriptor.Descriptor( @@ -2608,8 +2608,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3842, - serialized_end=4033, + serialized_start=3840, + serialized_end=4031, ) @@ -2647,8 +2647,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4035, - serialized_end=4073, + serialized_start=4033, + serialized_end=4071, ) @@ -2722,8 +2722,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4075, - serialized_end=4153, + serialized_start=4073, + serialized_end=4151, ) @@ -2779,8 +2779,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4155, - serialized_end=4250, + serialized_start=4153, + serialized_end=4248, ) @@ -2818,8 +2818,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4252, - serialized_end=4293, + serialized_start=4250, + serialized_end=4291, ) @@ -2901,8 +2901,8 @@ fields=[], ) ], - serialized_start=4295, - serialized_end=4404, + serialized_start=4293, + serialized_end=4402, ) @@ -2921,8 +2921,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4406, - serialized_end=4420, + serialized_start=4404, + serialized_end=4418, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -3065,13 +3065,12 @@ Attributes: allowed_persistence_regions: - The list of GCP region IDs where messages that are published + A list of IDs of GCP regions where messages that are published to the topic may be persisted in storage. Messages published by publishers running in non-allowed GCP regions (or running outside of GCP altogether) will be routed for storage in one - of the allowed regions. An empty list indicates a - misconfiguration at the project or organization level, which - will result in all Publish operations failing. + of the allowed regions. An empty list means that no regions + are allowed, and is not a valid configuration. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.MessageStoragePolicy) ), @@ -3108,22 +3107,14 @@ labels: See Creating and managing labels. message_storage_policy: - Policy constraining how messages published to the topic may be - stored. It is determined when the topic is created based on - the policy configured at the project level. It must not be set - by the caller in the request to CreateTopic or to UpdateTopic. - This field will be populated in the responses for GetTopic, - CreateTopic, and UpdateTopic: if not present in the response, - then no constraints are in effect. + Policy constraining the set of Google Cloud Platform regions + where messages published to the topic may be stored. If not + present, then no constraints are in effect. kms_key_name: The resource name of the Cloud KMS CryptoKey to be used to protect access to messages published on this topic. The expected format is ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. - EXPERIMENTAL: This feature is part of a closed alpha release. - This API might be changed in backward-incompatible ways and is - not recommended for production use. It is not subject to any - SLA or deprecation policy. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) ), @@ -3610,24 +3601,24 @@ pushed. For example, a Webhook endpoint might use "https://example.com/push". attributes: - Endpoint configuration attributes. Every endpoint has a set - of API supported attributes that can be used to control - different aspects of the message delivery. The currently + Endpoint configuration attributes that can be used to control + different aspects of the message delivery. The only currently supported attribute is ``x-goog-version``, which you can use to change the format of the pushed message. This attribute indicates the version of the data expected by the endpoint. This controls the shape of the pushed message (i.e., its - fields and metadata). The endpoint version is based on the - version of the Pub/Sub API. If not present during the + fields and metadata). If not present during the ``CreateSubscription`` call, it will default to the version of - the API used to make such call. If not present during a + the Pub/Sub API used to make such call. If not present in a ``ModifyPushConfig`` call, its value will not be changed. ``GetSubscription`` calls will always return a valid version, even if the subscription was created without this attribute. - The possible values for this attribute are: - ``v1beta1``: - uses the push format defined in the v1beta1 Pub/Sub API. - - ``v1`` or ``v1beta2``: uses the push format defined in the v1 - Pub/Sub API. + The only supported values for the ``x-goog-version`` attribute + are: - ``v1beta1``: uses the push format defined in the + v1beta1 Pub/Sub API. - ``v1`` or ``v1beta2``: uses the push + format defined in the v1 Pub/Sub API. For example: .. + raw:: html
attributes { "x-goog-version": "v1"
+          } 
authentication_method: An authentication method used by push endpoints to verify the source of push requests. This can be used with push endpoints @@ -4244,9 +4235,11 @@ full_name="google.pubsub.v1.Publisher", file=DESCRIPTOR, index=0, - serialized_options=None, - serialized_start=4423, - serialized_end=5510, + serialized_options=_b( + "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" + ), + serialized_start=4421, + serialized_end=5622, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4348,9 +4341,11 @@ full_name="google.pubsub.v1.Subscriber", file=DESCRIPTOR, index=1, - serialized_options=None, - serialized_start=5513, - serialized_end=7810, + serialized_options=_b( + "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" + ), + serialized_start=5625, + serialized_end=8036, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index d1a0b007b1a1..ac0b324745e3 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-07-16T12:29:08.120680Z", + "updateTime": "2019-08-06T12:34:22.031743Z", "sources": [ { "generator": { "name": "artman", - "version": "0.30.0", - "dockerImage": "googleapis/artman@sha256:a44d9fb6fe826ca0ea7d6f7be23c596346bed82ee513a0043f3c068279717439" + "version": "0.32.1", + "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "2c7bc0a10225cc8e74476ce1131ebf670bed6169", - "internalRef": "258244875" + "sha": "e699b0cba64ffddfae39633417180f1f65875896", + "internalRef": "261759677" } }, { From 2e0b1dfa91e8ed45775899c157a2e5e84f85239f Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 16 Aug 2019 13:25:32 -0700 Subject: [PATCH 0395/1197] Remove compatability badges from READMEs. (#9035) --- packages/google-cloud-pubsub/README.rst | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index f31894deac5b..3014e1375686 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -1,7 +1,7 @@ Python Client for Google Cloud Pub / Sub ======================================== -|beta| |pypi| |versions| |compat_check_pypi| |compat_check_github| +|beta| |pypi| |versions| `Google Cloud Pub / Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. You @@ -25,10 +25,6 @@ independently written applications. :target: https://pypi.org/project/google-cloud-pubsub/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ -.. |compat_check_pypi| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=google-cloud-pubsub - :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=google-cloud-pubsub -.. |compat_check_github| image:: https://python-compatibility-tools.appspot.com/one_badge_image?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dpubsub - :target: https://python-compatibility-tools.appspot.com/one_badge_target?package=git%2Bgit%3A//github.com/googleapis/google-cloud-python.git%23subdirectory%3Dpubsub .. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/ .. _Product Documentation: https://cloud.google.com/pubsub/docs .. _Client Library Documentation: https://googleapis.dev/python/pubsub/latest From 57a32fbfb174028225a12d76f4eff84a230ba66e Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Sun, 18 Aug 2019 11:48:23 -0700 Subject: [PATCH 0396/1197] Pub/Sub: update docstrings for client kwargs and fix return types uris (#9037) * Update kwargs docs * lint * Fix return object type * Put back changes in setup.cfg and noxfile.py * fix no new line * fix various return types uris --- .../cloud/pubsub_v1/gapic/publisher_client.py | 4 +- .../pubsub_v1/gapic/subscriber_client.py | 4 +- .../cloud/pubsub_v1/publisher/client.py | 59 +++++++++++++++---- .../cloud/pubsub_v1/subscriber/client.py | 35 +++++++---- packages/google-cloud-pubsub/synth.metadata | 10 ++-- packages/google-cloud-pubsub/synth.py | 12 ++++ 6 files changed, 91 insertions(+), 33 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 6ea062166878..774fda25d622 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -618,7 +618,7 @@ def list_topics( that is provided to the method. Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. + A :class:`~google.api_core.page_iterator.GRPCIterator` instance. An iterable of :class:`~google.cloud.pubsub_v1.types.Topic` instances. You can also iterate over the pages of the response using its `pages` property. @@ -720,7 +720,7 @@ def list_topic_subscriptions( that is provided to the method. Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. + A :class:`~google.api_core.page_iterator.GRPCIterator` instance. An iterable of :class:`str` instances. You can also iterate over the pages of the response using its `pages` property. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index dd5ef7fbe659..094e0ce8aa23 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -603,7 +603,7 @@ def list_subscriptions( that is provided to the method. Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. + A :class:`~google.api_core.page_iterator.GRPCIterator` instance. An iterable of :class:`~google.cloud.pubsub_v1.types.Subscription` instances. You can also iterate over the pages of the response using its `pages` property. @@ -1202,7 +1202,7 @@ def list_snapshots( that is provided to the method. Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. + A :class:`~google.api_core.page_iterator.GRPCIterator` instance. An iterable of :class:`~google.cloud.pubsub_v1.types.Snapshot` instances. You can also iterate over the pages of the response using its `pages` property. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index d0f42e55c50e..05a4161e889a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google LLC All rights reserved. +# Copyright 2019, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -53,14 +53,45 @@ class Client(object): settings for batch publishing. kwargs (dict): Any additional arguments provided are sent as keyword arguments to the underlying - :class:`~.gapic.pubsub.v1.publisher_client.PublisherClient`. - Generally, you should not need to set additional keyword arguments. - Before being passed along to the GAPIC constructor, a channel may - be added if ``credentials`` are passed explicitly or if the - Pub / Sub emulator is detected as running. - Regional endpoints can be set via ``client_options`` that takes a - single key-value pair that defines the endpoint, i.e. - ``client_options={"api_endpoint": REGIONAL_ENDPOINT}``. + :class:`~google.cloud.pubsub_v1.gapic.publisher_client.PublisherClient`. + Generally you should not need to set additional keyword + arguments. Optionally, publish retry settings can be set via + ``client_config`` where user-provided retry configurations are + applied to default retry settings. And regional endpoints can be + set via ``client_options`` that takes a single key-value pair that + defines the endpoint. + + Example: + + .. code-block:: python + + from google.cloud import pubsub_v1 + + publisher_client = pubsub_v1.PublisherClient( + # Optional + batch_settings = pubsub_v1.types.BatchSettings( + max_bytes=1024, # One kilobyte + max_latency=1, # One second + ), + + # Optional + client_config = { + "interfaces": { + "google.pubsub.v1.Publisher": { + "retry_params": { + "messaging": { + 'total_timeout_millis': 650000, # default: 600000 + } + } + } + } + }, + + # Optional + client_options = { + "api_endpoint": REGIONAL_ENDPOINT + } + ) """ _batch_class = thread.Batch @@ -117,7 +148,8 @@ def from_service_account_file(cls, filename, batch_settings=(), **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - PublisherClient: The constructed client. + A Publisher :class:`~google.cloud.pubsub_v1.publisher.client.Client` + instance that is the constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -206,9 +238,10 @@ def publish(self, topic, data, **attrs): sent as metadata. (These may be text strings or byte strings.) Returns: - ~google.api_core.future.Future: An object conforming to the - ``concurrent.futures.Future`` interface (but not an instance - of that class). + A :class:`~google.cloud.pubsub_v1.publisher.futures.Future` + instance that conforms to Python Standard library's + :class:`~concurrent.futures.Future` interface (but not an + instance of that class). """ # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 8c7ce59846c4..b255fe4767ca 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -1,4 +1,4 @@ -# Copyright 2017, Google LLC All rights reserved. +# Copyright 2019, Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,8 +14,8 @@ from __future__ import absolute_import -import pkg_resources import os +import pkg_resources import grpc @@ -50,12 +50,24 @@ class Client(object): Args: kwargs (dict): Any additional arguments provided are sent as keyword keyword arguments to the underlying - :class:`~.gapic.pubsub.v1.subscriber_client.SubscriberClient`. - Generally, you should not need to set additional keyword - arguments. - Regional endpoints can be set via ``client_options`` that takes a - single key-value pair that defines the endpoint, i.e. - ``client_options={"api_endpoint": REGIONAL_ENDPOINT}``. + :class:`~google.cloud.pubsub_v1.gapic.subscriber_client.SubscriberClient`. + Generally you should not need to set additional keyword + arguments. Optionally, regional endpoints can be set via + ``client_options`` that takes a single key-value pair that + defines the endpoint. + + Example: + + .. code-block:: python + + from google.cloud import pubsub_v1 + + subscriber_client = pubsub_v1.SubscriberClient( + # Optional + client_options = { + "api_endpoint": REGIONAL_ENDPOINT + } + ) """ def __init__(self, **kwargs): @@ -105,7 +117,8 @@ def from_service_account_file(cls, filename, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - PublisherClient: The constructed client. + A Subscriber :class:`~google.cloud.pubsub_v1.subscriber.client.Client` + instance that is the constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -201,8 +214,8 @@ def callback(message): how callbacks are executed concurrently. Returns: - google.cloud.pubsub_v1.subscriber.futures.StreamingPullFuture: A - Future object that can be used to manage the background stream. + A :class:`~google.cloud.pubsub_v1.subscriber.futures.StreamingPullFuture` + instance that can be used to manage the background stream. """ flow_control = types.FlowControl(*flow_control) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index ac0b324745e3..798a0aef3a08 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-06T12:34:22.031743Z", + "updateTime": "2019-08-16T22:21:02.392751Z", "sources": [ { "generator": { "name": "artman", - "version": "0.32.1", - "dockerImage": "googleapis/artman@sha256:a684d40ba9a4e15946f5f2ca6b4bd9fe301192f522e9de4fff622118775f309b" + "version": "0.33.0", + "dockerImage": "googleapis/artman@sha256:c6231efb525569736226b1f7af7565dbc84248efafb3692a5bb1d2d8a7975d53" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "e699b0cba64ffddfae39633417180f1f65875896", - "internalRef": "261759677" + "sha": "f6cc01ff6d13bda19ed717dfde6e92c593dfc590", + "internalRef": "263831339" } }, { diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index c4601afbdb51..a65bf2bf4f56 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -167,6 +167,18 @@ def _merge_dict(d1, d2): """ ) +s.replace( + "google/cloud/pubsub_v1/gapic/publisher_client.py", + "~google.api_core.page_iterator.PageIterator", + "~google.api_core.page_iterator.GRPCIterator" +) + +s.replace( + "google/cloud/pubsub_v1/gapic/subscriber_client.py", + "~google.api_core.page_iterator.PageIterator", + "~google.api_core.page_iterator.GRPCIterator" +) + # Temporary fixup for 'grpc-google-iam-vi 0.12.4' (before generation). s.replace( "google/cloud/pubsub_v1/gapic/transports/*_grpc_transport.py", From bd0281f7ffa542370ccfa1a1b053c92c8fde7381 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 22 Aug 2019 12:06:22 -0700 Subject: [PATCH 0397/1197] [CHANGE ME] Re-generated pubsub to pick up changes in the API or client library generator. (#9078) --- .../pubsub_v1/gapic/subscriber_client.py | 21 +- .../google/cloud/pubsub_v1/proto/pubsub.proto | 51 +++- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 247 ++++++++++++++---- packages/google-cloud-pubsub/synth.metadata | 10 +- 4 files changed, 262 insertions(+), 67 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 094e0ce8aa23..0ecce7ac9d1c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -246,6 +246,7 @@ def create_subscription( labels=None, enable_message_ordering=None, expiration_policy=None, + dead_letter_policy=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -340,6 +341,20 @@ def create_subscription( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` + dead_letter_policy (Union[dict, ~google.cloud.pubsub_v1.types.DeadLetterPolicy]): A policy that specifies the conditions for dead lettering messages in + this subscription. If dead\_letter\_policy is not set, dead lettering is + disabled. + + The Cloud Pub/Sub service account associated with this subscriptions's + parent project (i.e., + service-{project\_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must + have permission to Acknowledge() messages on this subscription. + EXPERIMENTAL: This feature is part of a closed alpha release. This API + might be changed in backward-incompatible ways and is not recommended + for production use. It is not subject to any SLA or deprecation policy. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.DeadLetterPolicy` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -380,6 +395,7 @@ def create_subscription( labels=labels, enable_message_ordering=enable_message_ordering, expiration_policy=expiration_policy, + dead_letter_policy=dead_letter_policy, ) if metadata is None: metadata = [] @@ -932,8 +948,9 @@ def pull( Args: subscription (str): The subscription from which messages should be pulled. Format is ``projects/{project}/subscriptions/{sub}``. - max_messages (int): The maximum number of messages returned for this request. The Pub/Sub - system may return fewer than the number specified. + max_messages (int): The maximum number of messages to return for this request. Must be a + positive integer. The Pub/Sub system may return fewer than the number + specified. return_immediately (bool): If this field set to true, the system will respond immediately even if it there are no messages available to return in the ``Pull`` response. Otherwise, the system may wait (for a bounded amount of time) until at diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index ea0d4c7e53e5..8ee03eb17753 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -609,6 +609,52 @@ message Subscription { // *default policy* with `ttl` of 31 days will be used. The minimum allowed // value for `expiration_policy.ttl` is 1 day. ExpirationPolicy expiration_policy = 11; + + // A policy that specifies the conditions for dead lettering messages in + // this subscription. If dead_letter_policy is not set, dead lettering + // is disabled. + // + // The Cloud Pub/Sub service account associated with this subscriptions's + // parent project (i.e., + // service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have + // permission to Acknowledge() messages on this subscription. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + DeadLetterPolicy dead_letter_policy = 13; +} + +// Dead lettering is done on a best effort basis. The same message might be +// dead lettered multiple times. +// +// If validation on any of the fields fails at subscription creation/updation, +// the create/update subscription request will fail. +message DeadLetterPolicy { + // The name of the topic to which dead letter messages should be published. + // Format is `projects/{project}/topics/{topic}`.The Cloud Pub/Sub service + // account associated with the enclosing subscription's parent project (i.e., + // service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have + // permission to Publish() to this topic. + // + // The operation will fail if the topic does not exist. + // Users should ensure that there is a subscription attached to this topic + // since messages published to a topic with no subscriptions are lost. + string dead_letter_topic = 1; + + // The maximum number of delivery attempts for any message. The value must be + // between 5 and 100. + // + // The number of delivery attempts is defined as 1 + (the sum of number of + // NACKs and number of times the acknowledgement deadline has been exceeded + // for the message). + // + // A NACK is any call to ModifyAckDeadline with a 0 deadline. Note that + // client libraries may automatically extend ack_deadlines. + // + // This field will be honored on a best effort basis. + // + // If this parameter is 0, a default value of 5 is used. + int32 max_delivery_attempts = 2; } // A policy that specifies the conditions for resource expiration (i.e., @@ -770,8 +816,9 @@ message PullRequest { // least one message is available, rather than returning no messages. bool return_immediately = 2; - // The maximum number of messages returned for this request. The Pub/Sub - // system may return fewer than the number specified. + // The maximum number of messages to return for this request. Must be a + // positive integer. The Pub/Sub system may return fewer than the number + // specified. int32 max_messages = 3; } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 2aba365a6da4..91390dac3b82 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -31,7 +31,7 @@ "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xa5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xb1\t\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xeb\x12\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xe5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xb1\t\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xeb\x12\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -1251,6 +1251,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="dead_letter_policy", + full_name="google.pubsub.v1.Subscription.dead_letter_policy", + index=9, + number=13, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[_SUBSCRIPTION_LABELSENTRY], @@ -1261,7 +1279,64 @@ extension_ranges=[], oneofs=[], serialized_start=1567, - serialized_end=1988, + serialized_end=2052, +) + + +_DEADLETTERPOLICY = _descriptor.Descriptor( + name="DeadLetterPolicy", + full_name="google.pubsub.v1.DeadLetterPolicy", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="dead_letter_topic", + full_name="google.pubsub.v1.DeadLetterPolicy.dead_letter_topic", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="max_delivery_attempts", + full_name="google.pubsub.v1.DeadLetterPolicy.max_delivery_attempts", + index=1, + number=2, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2054, + serialized_end=2130, ) @@ -1299,8 +1374,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1990, - serialized_end=2048, + serialized_start=2132, + serialized_end=2190, ) @@ -1356,8 +1431,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2216, - serialized_end=2276, + serialized_start=2358, + serialized_end=2418, ) _PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( @@ -1494,8 +1569,8 @@ fields=[], ) ], - serialized_start=2051, - serialized_end=2352, + serialized_start=2193, + serialized_end=2494, ) @@ -1551,8 +1626,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2354, - serialized_end=2437, + serialized_start=2496, + serialized_end=2579, ) @@ -1590,8 +1665,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2439, - serialized_end=2485, + serialized_start=2581, + serialized_end=2627, ) @@ -1647,8 +1722,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2488, - serialized_end=2618, + serialized_start=2630, + serialized_end=2760, ) @@ -1722,8 +1797,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2620, - serialized_end=2702, + serialized_start=2762, + serialized_end=2844, ) @@ -1779,8 +1854,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2704, - serialized_end=2811, + serialized_start=2846, + serialized_end=2953, ) @@ -1818,8 +1893,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2813, - serialized_end=2862, + serialized_start=2955, + serialized_end=3004, ) @@ -1875,8 +1950,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2864, - serialized_end=2962, + serialized_start=3006, + serialized_end=3104, ) @@ -1950,8 +2025,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2964, - serialized_end=3049, + serialized_start=3106, + serialized_end=3191, ) @@ -1989,8 +2064,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3051, - serialized_end=3127, + serialized_start=3193, + serialized_end=3269, ) @@ -2064,8 +2139,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3129, - serialized_end=3224, + serialized_start=3271, + serialized_end=3366, ) @@ -2121,8 +2196,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3226, - serialized_end=3285, + serialized_start=3368, + serialized_end=3427, ) @@ -2232,8 +2307,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3288, - serialized_end=3452, + serialized_start=3430, + serialized_end=3594, ) @@ -2271,8 +2346,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3454, - serialized_end=3539, + serialized_start=3596, + serialized_end=3681, ) @@ -2402,8 +2477,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3542, - serialized_end=3717, + serialized_start=3684, + serialized_end=3859, ) @@ -2459,8 +2534,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3719, - serialized_end=3837, + serialized_start=3861, + serialized_end=3979, ) @@ -2608,8 +2683,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3840, - serialized_end=4031, + serialized_start=3982, + serialized_end=4173, ) @@ -2647,8 +2722,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4033, - serialized_end=4071, + serialized_start=4175, + serialized_end=4213, ) @@ -2722,8 +2797,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4073, - serialized_end=4151, + serialized_start=4215, + serialized_end=4293, ) @@ -2779,8 +2854,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4153, - serialized_end=4248, + serialized_start=4295, + serialized_end=4390, ) @@ -2818,8 +2893,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4250, - serialized_end=4291, + serialized_start=4392, + serialized_end=4433, ) @@ -2901,8 +2976,8 @@ fields=[], ) ], - serialized_start=4293, - serialized_end=4402, + serialized_start=4435, + serialized_end=4544, ) @@ -2921,8 +2996,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4404, - serialized_end=4418, + serialized_start=4546, + serialized_end=4560, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -2948,6 +3023,7 @@ ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION _SUBSCRIPTION.fields_by_name["labels"].message_type = _SUBSCRIPTION_LABELSENTRY _SUBSCRIPTION.fields_by_name["expiration_policy"].message_type = _EXPIRATIONPOLICY +_SUBSCRIPTION.fields_by_name["dead_letter_policy"].message_type = _DEADLETTERPOLICY _EXPIRATIONPOLICY.fields_by_name[ "ttl" ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION @@ -3022,6 +3098,7 @@ ] = _LISTTOPICSNAPSHOTSRESPONSE DESCRIPTOR.message_types_by_name["DeleteTopicRequest"] = _DELETETOPICREQUEST DESCRIPTOR.message_types_by_name["Subscription"] = _SUBSCRIPTION +DESCRIPTOR.message_types_by_name["DeadLetterPolicy"] = _DEADLETTERPOLICY DESCRIPTOR.message_types_by_name["ExpirationPolicy"] = _EXPIRATIONPOLICY DESCRIPTOR.message_types_by_name["PushConfig"] = _PUSHCONFIG DESCRIPTOR.message_types_by_name["ReceivedMessage"] = _RECEIVEDMESSAGE @@ -3515,6 +3592,18 @@ If ``expiration_policy`` is not set, a *default policy* with ``ttl`` of 31 days will be used. The minimum allowed value for ``expiration_policy.ttl`` is 1 day. + dead_letter_policy: + A policy that specifies the conditions for dead lettering + messages in this subscription. If dead\_letter\_policy is not + set, dead lettering is disabled. The Cloud Pub/Sub service + account associated with this subscriptions's parent project + (i.e., service-{project\_number}@gcp-sa- + pubsub.iam.gserviceaccount.com) must have permission to + Acknowledge() messages on this subscription. EXPERIMENTAL: + This feature is part of a closed alpha release. This API might + be changed in backward-incompatible ways and is not + recommended for production use. It is not subject to any SLA + or deprecation policy. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) ), @@ -3522,6 +3611,47 @@ _sym_db.RegisterMessage(Subscription) _sym_db.RegisterMessage(Subscription.LabelsEntry) +DeadLetterPolicy = _reflection.GeneratedProtocolMessageType( + "DeadLetterPolicy", + (_message.Message,), + dict( + DESCRIPTOR=_DEADLETTERPOLICY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""Dead lettering is done on a best effort basis. The same message might be + dead lettered multiple times. + + If validation on any of the fields fails at subscription + creation/updation, the create/update subscription request will fail. + + + Attributes: + dead_letter_topic: + The name of the topic to which dead letter messages should be + published. Format is ``projects/{project}/topics/{topic}``.The + Cloud Pub/Sub service account associated with the enclosing + subscription's parent project (i.e., + service-{project\_number}@gcp-sa- + pubsub.iam.gserviceaccount.com) must have permission to + Publish() to this topic. The operation will fail if the topic + does not exist. Users should ensure that there is a + subscription attached to this topic since messages published + to a topic with no subscriptions are lost. + max_delivery_attempts: + The maximum number of delivery attempts for any message. The + value must be between 5 and 100. The number of delivery + attempts is defined as 1 + (the sum of number of NACKs and + number of times the acknowledgement deadline has been exceeded + for the message). A NACK is any call to ModifyAckDeadline + with a 0 deadline. Note that client libraries may + automatically extend ack\_deadlines. This field will be + honored on a best effort basis. If this parameter is 0, a + default value of 5 is used. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeadLetterPolicy) + ), +) +_sym_db.RegisterMessage(DeadLetterPolicy) + ExpirationPolicy = _reflection.GeneratedProtocolMessageType( "ExpirationPolicy", (_message.Message,), @@ -3810,8 +3940,9 @@ bounded amount of time) until at least one message is available, rather than returning no messages. max_messages: - The maximum number of messages returned for this request. The - Pub/Sub system may return fewer than the number specified. + The maximum number of messages to return for this request. + Must be a positive integer. The Pub/Sub system may return + fewer than the number specified. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) ), @@ -4238,8 +4369,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=4421, - serialized_end=5622, + serialized_start=4563, + serialized_end=5764, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4344,8 +4475,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=5625, - serialized_end=8036, + serialized_start=5767, + serialized_end=8178, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 798a0aef3a08..4218661a3122 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-16T22:21:02.392751Z", + "updateTime": "2019-08-22T12:30:33.804189Z", "sources": [ { "generator": { "name": "artman", - "version": "0.33.0", - "dockerImage": "googleapis/artman@sha256:c6231efb525569736226b1f7af7565dbc84248efafb3692a5bb1d2d8a7975d53" + "version": "0.34.0", + "dockerImage": "googleapis/artman@sha256:38a27ba6245f96c3e86df7acb2ebcc33b4f186d9e475efe2d64303aec3d4e0ea" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f6cc01ff6d13bda19ed717dfde6e92c593dfc590", - "internalRef": "263831339" + "sha": "92bebf78345af8b2d3585220527115bda8bdedf8", + "internalRef": "264715111" } }, { From 7ddaeff503e57e39862bb498537e02c88062f80b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 27 Aug 2019 10:24:48 -0700 Subject: [PATCH 0398/1197] Add 'ReceivedMessage.delivery_attempt' field (via synth). (#9098) --- .../google/cloud/pubsub_v1/proto/pubsub.proto | 18 +++ .../cloud/pubsub_v1/proto/pubsub_pb2.py | 129 +++++++++++------- packages/google-cloud-pubsub/synth.metadata | 6 +- 3 files changed, 102 insertions(+), 51 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 8ee03eb17753..3ad6355a8bf6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -737,6 +737,24 @@ message ReceivedMessage { // The message. PubsubMessage message = 2; + + // Delivery attempt counter is 1 + (the sum of number of NACKs and number of + // ack_deadline exceeds) for this message. + // + // A NACK is any call to ModifyAckDeadline with a 0 deadline. An ack_deadline + // exceeds event is whenever a message is not acknowledged within + // ack_deadline. Note that ack_deadline is initially + // Subscription.ackDeadlineSeconds, but may get extended automatically by + // the client library. + // + // The first delivery of a given message will have this value as 1. The value + // is calculated at best effort and is approximate. + // + // If a DeadLetterPolicy is not set on the subscription, this will be 0. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + int32 delivery_attempt = 3; } // Request for the GetSubscription method. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 91390dac3b82..dd4427f7e9a1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -31,7 +31,7 @@ "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xe5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"S\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xb1\t\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xeb\x12\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xe5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xb1\t\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xeb\x12\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -1617,6 +1617,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="delivery_attempt", + full_name="google.pubsub.v1.ReceivedMessage.delivery_attempt", + index=2, + number=3, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -1627,7 +1645,7 @@ extension_ranges=[], oneofs=[], serialized_start=2496, - serialized_end=2579, + serialized_end=2605, ) @@ -1665,8 +1683,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2581, - serialized_end=2627, + serialized_start=2607, + serialized_end=2653, ) @@ -1722,8 +1740,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2630, - serialized_end=2760, + serialized_start=2656, + serialized_end=2786, ) @@ -1797,8 +1815,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2762, - serialized_end=2844, + serialized_start=2788, + serialized_end=2870, ) @@ -1854,8 +1872,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2846, - serialized_end=2953, + serialized_start=2872, + serialized_end=2979, ) @@ -1893,8 +1911,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2955, - serialized_end=3004, + serialized_start=2981, + serialized_end=3030, ) @@ -1950,8 +1968,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3006, - serialized_end=3104, + serialized_start=3032, + serialized_end=3130, ) @@ -2025,8 +2043,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3106, - serialized_end=3191, + serialized_start=3132, + serialized_end=3217, ) @@ -2064,8 +2082,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3193, - serialized_end=3269, + serialized_start=3219, + serialized_end=3295, ) @@ -2139,8 +2157,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3271, - serialized_end=3366, + serialized_start=3297, + serialized_end=3392, ) @@ -2196,8 +2214,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3368, - serialized_end=3427, + serialized_start=3394, + serialized_end=3453, ) @@ -2307,8 +2325,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3430, - serialized_end=3594, + serialized_start=3456, + serialized_end=3620, ) @@ -2346,8 +2364,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3596, - serialized_end=3681, + serialized_start=3622, + serialized_end=3707, ) @@ -2477,8 +2495,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3684, - serialized_end=3859, + serialized_start=3710, + serialized_end=3885, ) @@ -2534,8 +2552,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3861, - serialized_end=3979, + serialized_start=3887, + serialized_end=4005, ) @@ -2683,8 +2701,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3982, - serialized_end=4173, + serialized_start=4008, + serialized_end=4199, ) @@ -2722,8 +2740,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4175, - serialized_end=4213, + serialized_start=4201, + serialized_end=4239, ) @@ -2797,8 +2815,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4215, - serialized_end=4293, + serialized_start=4241, + serialized_end=4319, ) @@ -2854,8 +2872,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4295, - serialized_end=4390, + serialized_start=4321, + serialized_end=4416, ) @@ -2893,8 +2911,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4392, - serialized_end=4433, + serialized_start=4418, + serialized_end=4459, ) @@ -2976,8 +2994,8 @@ fields=[], ) ], - serialized_start=4435, - serialized_end=4544, + serialized_start=4461, + serialized_end=4570, ) @@ -2996,8 +3014,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4546, - serialized_end=4560, + serialized_start=4572, + serialized_end=4586, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -3781,6 +3799,21 @@ This ID can be used to acknowledge the received message. message: The message. + delivery_attempt: + Delivery attempt counter is 1 + (the sum of number of NACKs + and number of ack\_deadline exceeds) for this message. A NACK + is any call to ModifyAckDeadline with a 0 deadline. An + ack\_deadline exceeds event is whenever a message is not + acknowledged within ack\_deadline. Note that ack\_deadline is + initially Subscription.ackDeadlineSeconds, but may get + extended automatically by the client library. The first + delivery of a given message will have this value as 1. The + value is calculated at best effort and is approximate. If a + DeadLetterPolicy is not set on the subscription, this will be + 0. EXPERIMENTAL: This feature is part of a closed alpha + release. This API might be changed in backward-incompatible + ways and is not recommended for production use. It is not + subject to any SLA or deprecation policy. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) ), @@ -4369,8 +4402,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=4563, - serialized_end=5764, + serialized_start=4589, + serialized_end=5790, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4475,8 +4508,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=5767, - serialized_end=8178, + serialized_start=5793, + serialized_end=8204, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 4218661a3122..1686bce7e232 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-08-22T12:30:33.804189Z", + "updateTime": "2019-08-24T12:27:35.100766Z", "sources": [ { "generator": { @@ -12,8 +12,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "92bebf78345af8b2d3585220527115bda8bdedf8", - "internalRef": "264715111" + "sha": "b97af5f7fea49d533900b62cca171da0e49743de", + "internalRef": "265156479" } }, { From b764ff891050b165727b2ba634ba2140b362c896 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 27 Aug 2019 16:35:22 -0700 Subject: [PATCH 0399/1197] Docs: Remove CI for gh-pages, use googleapis.dev for api_core refs. (#9085) --- packages/google-cloud-pubsub/docs/conf.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index af234ffe721b..cc9198fc4b3b 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -336,10 +336,7 @@ "gax": ("https://gax-python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ( - "https://googleapis.github.io/google-cloud-python/latest", - None, - ), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), "requests": ("https://2.python-requests.org/en/master/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), From 02c1f4b8ed57ba257c8ce499e607457d3e7da7e5 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Thu, 29 Aug 2019 13:43:00 -0400 Subject: [PATCH 0400/1197] Release PubSub 1.0.0 (#9145) * Release 1.0.0 * Update CPS Python library READMEs to indicate the library is now GA. Also add some extra notes about versioning and contributions. * Fix badge image * Fix rst docs * Address comments by Peter and Bu Sun * Update repo metadata * Improve changelog entry --- .../google-cloud-pubsub/.repo-metadata.json | 4 +- packages/google-cloud-pubsub/CHANGELOG.md | 16 ++++++++ packages/google-cloud-pubsub/README.rst | 41 +++++++++++++++++-- packages/google-cloud-pubsub/setup.py | 4 +- 4 files changed, 58 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json index 916011d82ca9..e14db4392f0a 100644 --- a/packages/google-cloud-pubsub/.repo-metadata.json +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -4,10 +4,10 @@ "product_documentation": "https://cloud.google.com/pubsub/docs/", "client_documentation": "https://googleapis.dev/python/pubsub/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", - "release_level": "beta", + "release_level": "ga", "language": "python", "repo": "googleapis/google-cloud-python", "distribution_name": "google-cloud-pubsub", "api_id": "pubsub.googleapis.com", "requires_billing": true -} \ No newline at end of file +} diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 05df7382e81e..347266bd85dc 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 1.0.0 + +08-29-2019 09:27 PDT + +### Implementation Changes +- Add 'ReceivedMessage.delivery_attempt' field (via synth). ([#9098](https://github.com/googleapis/google-cloud-python/pull/9098)) +- Remove send/recv msg size limit, update docstrings (via synth). ([#8964](https://github.com/googleapis/google-cloud-python/pull/8964)) + +### Documentation +- Update docstrings for client kwargs and fix return types uris ([#9037](https://github.com/googleapis/google-cloud-python/pull/9037)) +- Remove CI for gh-pages, use googleapis.dev for api_core refs. ([#9085](https://github.com/googleapis/google-cloud-python/pull/9085)) +- Remove compatability badges from READMEs. ([#9035](https://github.com/googleapis/google-cloud-python/pull/9035)) + +### Internal / Testing Changes +- Add dead-letter-policy field in preparation for its implementation (via synth) ([#9078](https://github.com/googleapis/google-cloud-python/pull/9078)) + ## 0.45.0 07-31-2019 02:03 PDT diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 3014e1375686..a92a43087052 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -1,7 +1,7 @@ Python Client for Google Cloud Pub / Sub ======================================== -|beta| |pypi| |versions| +|GA| |pypi| |versions| `Google Cloud Pub / Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. You @@ -19,8 +19,8 @@ independently written applications. - `Product Documentation`_ - `Client Library Documentation`_ -.. |beta| image:: https://img.shields.io/badge/support-beta-silver.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#beta-support +.. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg + :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg @@ -192,3 +192,38 @@ For example, to use JSON Web Tokens, provide a `google.auth.jwt.Credentials`_ in .. _Credentials: https://google-auth.readthedocs.io/en/latest/reference/google.auth.credentials.html#google.auth.credentials.Credentials .. _google-auth: https://google-auth.readthedocs.io/en/latest/index.html .. _google.auth.jwt.Credentials: https://google-auth.readthedocs.io/en/latest/reference/google.auth.jwt.html#google.auth.jwt.Credentials + + +Versioning +---------- + +This library follows `Semantic Versioning`_. + +It is currently in major version one (1.y.z), which means that the public API should be considered stable. + +.. _Semantic Versioning: http://semver.org/ + +Contributing +------------ + +Contributions to this library are always welcome and highly encouraged. + +See the `CONTRIBUTING doc`_ for more information on how to get started. + +.. _CONTRIBUTING doc: https://github.com/googleapis/google-cloud-python/blob/master/CONTRIBUTING.rst + +Community +--------- + +Google Cloud Platform Python developers hang out in `Slack`_ in the ``#python`` +channel, click here to `get an invitation`_. + +.. _Slack: https://googlecloud-community.slack.com +.. _get an invitation: https://gcp-slack.appspot.com/ + +License +------- + +Apache 2.0 - See `the LICENSE`_ for more information. + +.. _the LICENSE: https://github.com/googleapis/google-cloud-python/blob/master/LICENSE diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index ab1d963174a5..9a13e8ae0f2c 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,12 +22,12 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "0.45.0" +version = "1.0.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 4 - Beta" +release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", From 095b8f57062cbf904ead508da1c6beb6531a71b5 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 12 Sep 2019 17:26:29 +0200 Subject: [PATCH 0401/1197] Link to correct TimeoutError in futures docs (#9216) --- .../google-cloud-pubsub/google/cloud/pubsub_v1/futures.py | 4 ++-- .../google/cloud/pubsub_v1/publisher/futures.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index 21d5d810199f..0d7ba7f9bf52 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -94,7 +94,7 @@ def result(self, timeout=None): times out and raises TimeoutError. Raises: - ~.pubsub_v1.TimeoutError: If the request times out. + concurrent.futures.TimeoutError: If the request times out. Exception: For undefined exceptions in the underlying call execution. """ @@ -114,7 +114,7 @@ def exception(self, timeout=None): times out and raises TimeoutError. Raises: - TimeoutError: If the request times out. + concurrent.futures.TimeoutError: If the request times out. Returns: Exception: The exception raised by the call, if any. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index ed200041177b..fa8a79998617 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -39,7 +39,7 @@ def result(self, timeout=None): str: The message ID. Raises: - ~.pubsub_v1.TimeoutError: If the request times out. + concurrent.futures.TimeoutError: If the request times out. Exception: For undefined exceptions in the underlying call execution. """ From 5b3b89ba6fc0c5e202d8ea142f8f698fc8fe8cd4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 24 Sep 2019 08:09:04 -0700 Subject: [PATCH 0402/1197] config(pubsub): adjust messaging RPC timeout settings (via synth) --- .../cloud/pubsub_v1/gapic/publisher_client_config.py | 6 +++--- .../cloud/pubsub_v1/gapic/subscriber_client_config.py | 6 +++--- packages/google-cloud-pubsub/synth.metadata | 10 +++++----- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index 33f0af827924..1b8982b639b2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -29,9 +29,9 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 25000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 30000, + "initial_rpc_timeout_millis": 5000, + "rpc_timeout_multiplier": 1.3, + "max_rpc_timeout_millis": 600000, "total_timeout_millis": 600000, }, }, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 0aa68315c1c0..083a6c19a440 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -20,9 +20,9 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 25000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 25000, + "initial_rpc_timeout_millis": 5000, + "rpc_timeout_multiplier": 1.3, + "max_rpc_timeout_millis": 600000, "total_timeout_millis": 600000, }, "streaming_messaging": { diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 1686bce7e232..9dfcec7b72e4 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-08-24T12:27:35.100766Z", + "updateTime": "2019-09-24T12:27:36.347590Z", "sources": [ { "generator": { "name": "artman", - "version": "0.34.0", - "dockerImage": "googleapis/artman@sha256:38a27ba6245f96c3e86df7acb2ebcc33b4f186d9e475efe2d64303aec3d4e0ea" + "version": "0.37.0", + "dockerImage": "googleapis/artman@sha256:0f66008f69061ea6d41499e2a34da3fc64fc7c9798077e3a37158653a135d801" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b97af5f7fea49d533900b62cca171da0e49743de", - "internalRef": "265156479" + "sha": "fe6115fdfae318277426ec0e11b4b05e2b150723", + "internalRef": "270882829" } }, { From 20a11305ad1c36b381845f7f816b773bb370833f Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 25 Sep 2019 12:35:50 -0400 Subject: [PATCH 0403/1197] docs: fix intersphinx reference to requests (#9294) --- packages/google-cloud-pubsub/docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index cc9198fc4b3b..472fe878fe8f 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -338,7 +338,7 @@ "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://2.python-requests.org/en/master/", None), + "requests": ("https://requests.kennethreitz.org/en/stable/", None), "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } From d74a597ba63dcee1aa1aa22db073c57f53291ec3 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 27 Sep 2019 10:13:02 +0200 Subject: [PATCH 0404/1197] fix(pubsub): set default stream ACK deadline to subscriptions' (#9268) * feat(pubsub): set default stream ACK deadline to subscriptions' When subscribing, it makes sense to use the configured subscription's maximum ACK deadline for the streaming pull, instead of an optimistic minimum of 10 seconds. Using an optimistic deadline affects messages that are put on hold and are not lease managed, because by the time the client dispatches them to the user's callback, the optimistic ACK deadline could have already been missed, resulting in the backend unnecessary re-sending those messages, even if the subscription's ACK deadline has not been hit yet. * Rename sub_future to subscription_future --- .../_protocol/streaming_pull_manager.py | 41 +++++++++++----- packages/google-cloud-pubsub/tests/system.py | 49 +++++++++++++++++++ .../subscriber/test_streaming_pull_manager.py | 27 ++++++---- 3 files changed, 95 insertions(+), 22 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index af6883fd067e..b393cbfd5ec6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -208,7 +208,7 @@ def load(self): float: The load value. """ if self._leaser is None: - return 0 + return 0.0 return max( [ @@ -384,14 +384,26 @@ def open(self, callback, on_callback_error): ) # Create the RPC + subscription = self._client.api.get_subscription(self._subscription) + stream_ack_deadline_seconds = subscription.ack_deadline_seconds + + get_initial_request = functools.partial( + self._get_initial_request, stream_ack_deadline_seconds + ) self._rpc = bidi.ResumableBidiRpc( start_rpc=self._client.api.streaming_pull, - initial_request=self._get_initial_request, + initial_request=get_initial_request, should_recover=self._should_recover, throttle_reopen=True, ) self._rpc.add_done_callback(self._on_rpc_done) + _LOGGER.debug( + "Creating a stream, default ACK deadline set to {} seconds.".format( + stream_ack_deadline_seconds + ) + ) + # Create references to threads self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) @@ -462,12 +474,16 @@ def close(self, reason=None): for callback in self._close_callbacks: callback(self, reason) - def _get_initial_request(self): + def _get_initial_request(self, stream_ack_deadline_seconds): """Return the initial request for the RPC. This defines the initial request that must always be sent to Pub/Sub immediately upon opening the subscription. + Args: + stream_ack_deadline_seconds (int): + The default message acknowledge deadline for the stream. + Returns: google.cloud.pubsub_v1.types.StreamingPullRequest: A request suitable for being the first request on the stream (and not @@ -486,7 +502,7 @@ def _get_initial_request(self): request = types.StreamingPullRequest( modify_deadline_ack_ids=list(lease_ids), modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), - stream_ack_deadline_seconds=self.ack_histogram.percentile(99), + stream_ack_deadline_seconds=stream_ack_deadline_seconds, subscription=self._subscription, ) @@ -511,14 +527,6 @@ def _on_response(self, response): self._messages_on_hold.qsize(), ) - # Immediately modack the messages we received, as this tells the server - # that we've received them. - items = [ - requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) - for message in response.received_messages - ] - self._dispatcher.modify_ack_deadline(items) - invoke_callbacks_for = [] for received_message in response.received_messages: @@ -535,6 +543,15 @@ def _on_response(self, response): else: self._messages_on_hold.put(message) + # Immediately (i.e. without waiting for the auto lease management) + # modack the messages we received and not put on hold, as this tells + # the server that we've received them. + items = [ + requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) + for message in invoke_callbacks_for + ] + self._dispatcher.modify_ack_deadline(items) + _LOGGER.debug( "Scheduling callbacks for %s new messages, new total on hold %s.", len(invoke_callbacks_for), diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 7ffb4a580194..c8030a5773db 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -381,6 +381,55 @@ class CallbackError(Exception): with pytest.raises(CallbackError): future.result(timeout=30) + def test_streaming_pull_ack_deadline( + self, publisher, subscriber, project, topic_path, subscription_path, cleanup + ): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # Create a topic and a subscription, then subscribe to the topic. This + # must happen before the messages are published. + publisher.create_topic(topic_path) + + # Subscribe to the topic. This must happen before the messages + # are published. + subscriber.create_subscription( + subscription_path, topic_path, ack_deadline_seconds=60 + ) + + # publish some messages and wait for completion + self._publish_messages(publisher, topic_path, batch_sizes=[2]) + + # subscribe to the topic + callback = StreamingPullCallback( + processing_time=15, # more than the default ACK deadline of 10 seconds + resolve_at_msg_count=3, # one more than the published messages count + ) + flow_control = types.FlowControl(max_messages=1) + subscription_future = subscriber.subscribe( + subscription_path, callback, flow_control=flow_control + ) + + # We expect to process the first two messages in 2 * 15 seconds, and + # any duplicate message that is re-sent by the backend in additional + # 15 seconds, totalling 45 seconds (+ overhead) --> if there have been + # no duplicates in 60 seconds, we can reasonably assume that there + # won't be any. + try: + callback.done_future.result(timeout=60) + except exceptions.TimeoutError: + # future timed out, because we received no excessive messages + assert sorted(callback.seen_message_ids) == [1, 2] + else: + pytest.fail( + "Expected to receive 2 messages, but got at least {}.".format( + len(callback.seen_message_ids) + ) + ) + finally: + subscription_future.cancel() + def test_streaming_pull_max_messages( self, publisher, topic_path, subscriber, subscription_path, cleanup ): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 877ccf97fd9a..352b09ba83fc 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -405,6 +405,11 @@ def test_heartbeat_inactive(): ) def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): manager = make_manager() + manager._client.api.get_subscription.return_value = types.Subscription( + name="projects/foo/subscriptions/bar", + topic="projects/foo/topics/baz", + ack_deadline_seconds=123, + ) manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) @@ -426,10 +431,14 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi resumable_bidi_rpc.assert_called_once_with( start_rpc=manager._client.api.streaming_pull, - initial_request=manager._get_initial_request, + initial_request=mock.ANY, should_recover=manager._should_recover, throttle_reopen=True, ) + initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] + assert initial_request_arg.func == manager._get_initial_request + assert initial_request_arg.args[0] == 123 + resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( manager._on_rpc_done ) @@ -574,11 +583,11 @@ def test__get_initial_request(): manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) manager._leaser.ack_ids = ["1", "2"] - initial_request = manager._get_initial_request() + initial_request = manager._get_initial_request(123) assert isinstance(initial_request, types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" - assert initial_request.stream_ack_deadline_seconds == 10 + assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == ["1", "2"] assert initial_request.modify_deadline_seconds == [10, 10] @@ -587,11 +596,11 @@ def test__get_initial_request_wo_leaser(): manager = make_manager() manager._leaser = None - initial_request = manager._get_initial_request() + initial_request = manager._get_initial_request(123) assert isinstance(initial_request, types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" - assert initial_request.stream_ack_deadline_seconds == 10 + assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == [] assert initial_request.modify_deadline_seconds == [] @@ -660,12 +669,10 @@ def test__on_response_with_leaser_overload(): # are called in the expected way. manager._on_response(response) + # only the messages that are added to the lease management and dispatched to + # callbacks should have their ACK deadline extended dispatcher.modify_ack_deadline.assert_called_once_with( - [ - requests.ModAckRequest("fack", 10), - requests.ModAckRequest("back", 10), - requests.ModAckRequest("zack", 10), - ] + [requests.ModAckRequest("fack", 10)] ) # one message should be scheduled, the leaser capacity allows for it From c98d00cf43669d12130f2b9576cd724adfbe9ae8 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Fri, 27 Sep 2019 10:50:05 -0400 Subject: [PATCH 0405/1197] chore(pubsub): release PubSub 1.0.1 (#9331) * Release PubSub 1.0.1 --- packages/google-cloud-pubsub/CHANGELOG.md | 15 +++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 347266bd85dc..17278359d09a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,21 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 1.0.1 + +09-27-2019 07:01 PDT + + +### Implementation Changes +- Set default stream ACK deadline to subscriptions'. ([#9268](https://github.com/googleapis/google-cloud-python/pull/9268)) + +### Documentation +- Fix intersphinx reference to requests. ([#9294](https://github.com/googleapis/google-cloud-python/pull/9294)) +- Link to correct TimeoutError in futures docs. ([#9216](https://github.com/googleapis/google-cloud-python/pull/9216)) + +### Internal / Testing Changes +- Adjust messaging RPC timeout settings (via synth). [#9279](https://github.com/googleapis/google-cloud-python/pull/9279) + ## 1.0.0 08-29-2019 09:27 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 9a13e8ae0f2c..ed5193ca3b61 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.0.0" +version = "1.0.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From a53e3a9c36446703b5dfb4a98743bd5471a4e130 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 30 Sep 2019 20:19:50 +0200 Subject: [PATCH 0406/1197] fix(pubsub): streaming pull shouldn't need subscriptions.get permission (#9360) Pulling the mesages with the streaming pull should work with the default pubsub.subscriber role. This commit removes the call to fetch a subscription, and replaces the subscription's ACK deadline with a fixed deadline of 60 seconds. That *will* re-introduce the issue #9252, but at least in a less severe manner. --- .../_protocol/streaming_pull_manager.py | 20 +++++++++++++++++-- packages/google-cloud-pubsub/tests/system.py | 16 +++++++++------ .../subscriber/test_streaming_pull_manager.py | 10 ++++------ 3 files changed, 32 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index b393cbfd5ec6..e9414247f9cd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -50,6 +50,13 @@ _RESUME_THRESHOLD = 0.8 """The load threshold below which to resume the incoming message stream.""" +_DEFAULT_STREAM_ACK_DEADLINE = 60 +"""The default message acknowledge deadline in seconds for incoming message stream. + +This default deadline is dynamically modified for the messages that are added +to the lease management. +""" + def _maybe_wrap_exception(exception): """Wraps a gRPC exception class, if needed.""" @@ -384,8 +391,17 @@ def open(self, callback, on_callback_error): ) # Create the RPC - subscription = self._client.api.get_subscription(self._subscription) - stream_ack_deadline_seconds = subscription.ack_deadline_seconds + + # We must use a fixed value for the ACK deadline, as we cannot read it + # from the subscription. The latter would require `pubsub.subscriptions.get` + # permission, which is not granted to the default subscriber role + # `roles/pubsub.subscriber`. + # See also https://github.com/googleapis/google-cloud-python/issues/9339 + # + # When dynamic lease management is enabled for the "on hold" messages, + # the default stream ACK deadline should again be set based on the + # historic ACK timing data, i.e. `self.ack_histogram.percentile(99)`. + stream_ack_deadline_seconds = _DEFAULT_STREAM_ACK_DEADLINE get_initial_request = functools.partial( self._get_initial_request, stream_ack_deadline_seconds diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index c8030a5773db..cb00a4b91ecd 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -381,6 +381,10 @@ class CallbackError(Exception): with pytest.raises(CallbackError): future.result(timeout=30) + @pytest.mark.xfail( + reason="The default stream ACK deadline is static and received messages " + "exceeding FlowControl.max_messages are currently not lease managed." + ) def test_streaming_pull_ack_deadline( self, publisher, subscriber, project, topic_path, subscription_path, cleanup ): @@ -395,7 +399,7 @@ def test_streaming_pull_ack_deadline( # Subscribe to the topic. This must happen before the messages # are published. subscriber.create_subscription( - subscription_path, topic_path, ack_deadline_seconds=60 + subscription_path, topic_path, ack_deadline_seconds=240 ) # publish some messages and wait for completion @@ -403,7 +407,7 @@ def test_streaming_pull_ack_deadline( # subscribe to the topic callback = StreamingPullCallback( - processing_time=15, # more than the default ACK deadline of 10 seconds + processing_time=70, # more than the default stream ACK deadline (60s) resolve_at_msg_count=3, # one more than the published messages count ) flow_control = types.FlowControl(max_messages=1) @@ -411,13 +415,13 @@ def test_streaming_pull_ack_deadline( subscription_path, callback, flow_control=flow_control ) - # We expect to process the first two messages in 2 * 15 seconds, and + # We expect to process the first two messages in 2 * 70 seconds, and # any duplicate message that is re-sent by the backend in additional - # 15 seconds, totalling 45 seconds (+ overhead) --> if there have been - # no duplicates in 60 seconds, we can reasonably assume that there + # 70 seconds, totalling 210 seconds (+ overhead) --> if there have been + # no duplicates in 240 seconds, we can reasonably assume that there # won't be any. try: - callback.done_future.result(timeout=60) + callback.done_future.result(timeout=240) except exceptions.TimeoutError: # future timed out, because we received no excessive messages assert sorted(callback.seen_message_ids) == [1, 2] diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 352b09ba83fc..2bd20caa04d1 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -404,12 +404,9 @@ def test_heartbeat_inactive(): "google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater", autospec=True ) def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): + stream_ack_deadline = streaming_pull_manager._DEFAULT_STREAM_ACK_DEADLINE + manager = make_manager() - manager._client.api.get_subscription.return_value = types.Subscription( - name="projects/foo/subscriptions/bar", - topic="projects/foo/topics/baz", - ack_deadline_seconds=123, - ) manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) @@ -437,7 +434,8 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi ) initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] assert initial_request_arg.func == manager._get_initial_request - assert initial_request_arg.args[0] == 123 + assert initial_request_arg.args[0] == stream_ack_deadline + assert not manager._client.api.get_subscription.called resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( manager._on_rpc_done From 6eece1e1a004cad6e38a799ec642ca1b26ef6667 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 30 Sep 2019 23:26:12 +0200 Subject: [PATCH 0407/1197] chore(pubsub): release 1.0.2 (#9362) * chore(pubsub): release 1.0.2 * Adjust release notes wording and grammar. --- packages/google-cloud-pubsub/CHANGELOG.md | 9 +++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 17278359d09a..09716f05a9cf 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 1.0.2 + +09-30-2019 11:57 PDT + + +### Implementation Changes + +- Streaming pull shouldn't need `subscriptions.get` permission ([#9360](https://github.com/googleapis/google-cloud-python/pull/9360)). + ## 1.0.1 09-27-2019 07:01 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index ed5193ca3b61..45e2cc04c07d 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.0.1" +version = "1.0.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From a7c08fee0ab2f30c422be6faa84aa61716cc1fff Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 1 Oct 2019 10:45:57 -0400 Subject: [PATCH 0408/1197] fix(pubsub): add 'StreamingPullManager._should_terminate' (#9335) Toward clean shutdown of the subscriber's background thread. See: #8616. --- .../_protocol/streaming_pull_manager.py | 22 +++++++++++++++++++ .../subscriber/test_streaming_pull_manager.py | 18 +++++++++++++++ 2 files changed, 40 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index e9414247f9cd..d3b1d6f51eb6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -44,6 +44,7 @@ exceptions.GatewayTimeout, exceptions.Aborted, ) +_TERMINATING_STREAM_ERRORS = (exceptions.Cancelled,) _MAX_LOAD = 1.0 """The load threshold above which to pause the incoming message stream.""" @@ -410,6 +411,7 @@ def open(self, callback, on_callback_error): start_rpc=self._client.api.streaming_pull, initial_request=get_initial_request, should_recover=self._should_recover, + should_terminate=self._should_terminate, throttle_reopen=True, ) self._rpc.add_done_callback(self._on_rpc_done) @@ -598,6 +600,26 @@ def _should_recover(self, exception): _LOGGER.info("Observed non-recoverable stream error %s", exception) return False + def _should_terminate(self, exception): + """Determine if an error on the RPC stream should be terminated. + + If the exception is one of the terminating exceptions, this will signal + to the consumer thread that it should terminate. + + This will cause the stream to exit when it returns :data:`True`. + + Returns: + bool: Indicates if the caller should terminate or attempt recovery. + Will be :data:`True` if the ``exception`` is "acceptable", i.e. + in a list of terminating exceptions. + """ + exception = _maybe_wrap_exception(exception) + if isinstance(exception, _TERMINATING_STREAM_ERRORS): + _LOGGER.info("Observed terminating stream error %s", exception) + return True + _LOGGER.info("Observed non-terminating stream error %s", exception) + return False + def _on_rpc_done(self, future): """Triggered whenever the underlying RPC terminates without recovery. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 2bd20caa04d1..a69ea5ca5268 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -430,6 +430,7 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi start_rpc=manager._client.api.streaming_pull, initial_request=mock.ANY, should_recover=manager._should_recover, + should_terminate=manager._should_terminate, throttle_reopen=True, ) initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] @@ -724,6 +725,23 @@ def test__should_recover_false(): assert manager._should_recover(exc) is False +def test__should_terminate_true(): + manager = make_manager() + + details = "Cancelled. Go away, before I taunt you a second time." + exc = exceptions.Cancelled(details) + + assert manager._should_terminate(exc) is True + + +def test__should_terminate_false(): + manager = make_manager() + + exc = TypeError("wahhhhhh") + + assert manager._should_terminate(exc) is False + + @mock.patch("threading.Thread", autospec=True) def test__on_rpc_done(thread): manager = make_manager() From 2f321577df87cc1355ad4e9b1fb156eb37aae7a4 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 31 Oct 2019 22:22:49 +0200 Subject: [PATCH 0409/1197] chore(pubsub): add subscriber role test for streaming (#9507) Pulling the messages using a streaming pull should work with accounts having only the pubsub.subscriber role. This commits add a test that covers this aspect. --- packages/google-cloud-pubsub/tests/system.py | 40 ++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index cb00a4b91ecd..fd7473e1e53b 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -17,6 +17,7 @@ import datetime import itertools import operator as op +import os import threading import time @@ -488,6 +489,45 @@ def test_streaming_pull_max_messages( finally: subscription_future.cancel() # trigger clean shutdown + @pytest.mark.skipif( + "KOKORO_GFILE_DIR" not in os.environ, + reason="Requires Kokoro environment with a limited subscriber service account.", + ) + def test_streaming_pull_subscriber_permissions_sufficient( + self, publisher, topic_path, subscriber, subscription_path, cleanup + ): + + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # create a topic and subscribe to it + publisher.create_topic(topic_path) + subscriber.create_subscription(subscription_path, topic_path) + + # A service account granting only the pubsub.subscriber role must be used. + filename = os.path.join( + os.environ["KOKORO_GFILE_DIR"], "pubsub-subscriber-service-account.json" + ) + streaming_pull_subscriber = type(subscriber).from_service_account_file(filename) + + # Subscribe to the topic, publish a message, and verify that subscriber + # successfully pulls and processes it. + callback = StreamingPullCallback(processing_time=0.01, resolve_at_msg_count=1) + future = streaming_pull_subscriber.subscribe(subscription_path, callback) + self._publish_messages(publisher, topic_path, batch_sizes=[1]) + + try: + callback.done_future.result(timeout=10) + except exceptions.TimeoutError: + pytest.fail( + "Timeout: receiving/processing streamed messages took too long." + ) + else: + assert 1 in callback.seen_message_ids + finally: + future.cancel() + def _publish_messages(self, publisher, topic_path, batch_sizes): """Publish ``count`` messages in batches and wait until completion.""" publish_futures = [] From bd92594e4acea0f2c0d7d9693a06cd91e84bbfc1 Mon Sep 17 00:00:00 2001 From: Gurov Ilya Date: Thu, 7 Nov 2019 21:06:18 +0300 Subject: [PATCH 0410/1197] feat(pubsub): add stop method (#9365) --- .../google/cloud/pubsub_v1/futures.py | 4 +- .../pubsub_v1/publisher/_batch/thread.py | 3 + .../cloud/pubsub_v1/publisher/client.py | 74 +++++++++++++------ .../publisher/test_publisher_client.py | 30 ++++++++ 4 files changed, 87 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index 0d7ba7f9bf52..ba861e40c653 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -74,9 +74,7 @@ def running(self): bool: ``True`` if this method has not yet completed, or ``False`` if it has completed. """ - if self.done(): - return False - return True + return not self.done() def done(self): """Return True the future is done, False otherwise. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 117ee12b8463..726e93166cda 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -74,6 +74,9 @@ def __init__(self, client, topic, settings, autocommit=True): self._state_lock = threading.Lock() # These members are all communicated between threads; ensure that # any writes to them use the "state lock" to remain atomic. + # _futures list should remain unchanged after batch + # status changed from ACCEPTING_MESSAGES to any other + # in order to avoid race conditions self._futures = [] self._messages = [] self._size = 0 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 05a4161e889a..60a03bb652ab 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -134,6 +134,7 @@ def __init__(self, batch_settings=(), **kwargs): # messages. One batch exists for each topic. self._batch_lock = self._batch_class.make_lock() self._batches = {} + self._is_stopped = False @classmethod def from_service_account_file(cls, filename, batch_settings=(), **kwargs): @@ -187,20 +188,19 @@ def _batch(self, topic, create=False, autocommit=True): """ # If there is no matching batch yet, then potentially create one # and place it on the batches dictionary. - with self._batch_lock: - if not create: - batch = self._batches.get(topic) - if batch is None: - create = True - - if create: - batch = self._batch_class( - autocommit=autocommit, - client=self, - settings=self.batch_settings, - topic=topic, - ) - self._batches[topic] = batch + if not create: + batch = self._batches.get(topic) + if batch is None: + create = True + + if create: + batch = self._batch_class( + autocommit=autocommit, + client=self, + settings=self.batch_settings, + topic=topic, + ) + self._batches[topic] = batch return batch @@ -242,12 +242,17 @@ def publish(self, topic, data, **attrs): instance that conforms to Python Standard library's :class:`~concurrent.futures.Future` interface (but not an instance of that class). + + Raises: + RuntimeError: + If called after publisher has been stopped + by a `stop()` method call. """ # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. if not isinstance(data, six.binary_type): raise TypeError( - "Data being published to Pub/Sub must be sent " "as a bytestring." + "Data being published to Pub/Sub must be sent as a bytestring." ) # Coerce all attributes to text strings. @@ -266,11 +271,38 @@ def publish(self, topic, data, **attrs): message = types.PubsubMessage(data=data, attributes=attrs) # Delegate the publishing to the batch. - batch = self._batch(topic) - future = None - while future is None: - future = batch.publish(message) - if future is None: - batch = self._batch(topic, create=True) + with self._batch_lock: + if self._is_stopped: + raise RuntimeError("Cannot publish on a stopped publisher.") + + batch = self._batch(topic) + future = None + while future is None: + future = batch.publish(message) + if future is None: + batch = self._batch(topic, create=True) return future + + def stop(self): + """Immediately publish all outstanding messages. + + Asynchronously sends all outstanding messages and + prevents future calls to `publish()`. Method should + be invoked prior to deleting this `Client()` object + in order to ensure that no pending messages are lost. + + .. note:: + + This method is non-blocking. Use `Future()` objects + returned by `publish()` to make sure all publish + requests completed, either in success or error. + """ + with self._batch_lock: + if self._is_stopped: + raise RuntimeError("Cannot stop a publisher already stopped.") + + self._is_stopped = True + + for batch in self._batches.values(): + batch.commit() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 05e4c8c67209..6519b2b23149 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -201,6 +201,36 @@ def test_publish_attrs_type_error(): client.publish(topic, b"foo", answer=42) +def test_stop(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + + batch = client._batch("topic1", autocommit=False) + batch2 = client._batch("topic2", autocommit=False) + + pubsub_msg = types.PubsubMessage(data=b"msg") + + patch = mock.patch.object(batch, "commit") + patch2 = mock.patch.object(batch2, "commit") + + with patch as commit_mock, patch2 as commit_mock2: + batch.publish(pubsub_msg) + batch2.publish(pubsub_msg) + + client.stop() + + # check if commit() called + commit_mock.assert_called() + commit_mock2.assert_called() + + # check that closed publisher doesn't accept new messages + with pytest.raises(RuntimeError): + client.publish("topic1", b"msg2") + + with pytest.raises(RuntimeError): + client.stop() + + def test_gapic_instance_method(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) From c5eba0f41e4bfca03a7ec3e473e53e578b1f278a Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Mon, 11 Nov 2019 02:27:36 -0600 Subject: [PATCH 0411/1197] fix(pubsub): update batching and flow control parameters to be same as the other client libraries (#9597) * Update batching and flow control parameters to be same as the other client libraries. * Fix tests * Update test leaser param --- .../google/cloud/pubsub_v1/types.py | 10 +++++----- .../pubsub_v1/publisher/test_publisher_client.py | 12 ++++++------ .../subscriber/test_streaming_pull_manager.py | 4 ++-- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 733d3bf97ac0..7f833660f6e2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -39,9 +39,9 @@ "BatchSettings", ["max_bytes", "max_latency", "max_messages"] ) BatchSettings.__new__.__defaults__ = ( - 1000 * 1000 * 10, # max_bytes: documented "10 MB", enforced 10000000 - 0.05, # max_latency: 0.05 seconds - 1000, # max_messages: 1,000 + 1 * 1000 * 1000, # max_bytes: 1 MB + 0.01, # max_latency: 10 ms + 100, # max_messages: 100 ) if sys.version_info >= (3, 5): @@ -70,8 +70,8 @@ ) FlowControl.__new__.__defaults__ = ( 100 * 1024 * 1024, # max_bytes: 100mb - 100, # max_messages: 100 - 2 * 60 * 60, # max_lease_duration: 2 hours. + 1000, # max_messages: 1000 + 1 * 60 * 60, # max_lease_duration: 1 hour. ) if sys.version_info >= (3, 5): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 6519b2b23149..a06d2d0cf697 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -31,9 +31,9 @@ def test_init(): # A plain client should have an `api` (the underlying GAPIC) and a # batch settings object, which should have the defaults. assert isinstance(client.api, publisher_client.PublisherClient) - assert client.batch_settings.max_bytes == 10 * 1000 * 1000 - assert client.batch_settings.max_latency == 0.05 - assert client.batch_settings.max_messages == 1000 + assert client.batch_settings.max_bytes == 1 * 1000 * 1000 + assert client.batch_settings.max_latency == 0.01 + assert client.batch_settings.max_messages == 100 def test_init_w_custom_transport(): @@ -44,9 +44,9 @@ def test_init_w_custom_transport(): # batch settings object, which should have the defaults. assert isinstance(client.api, publisher_client.PublisherClient) assert client.api.transport is transport - assert client.batch_settings.max_bytes == 10 * 1000 * 1000 - assert client.batch_settings.max_latency == 0.05 - assert client.batch_settings.max_messages == 1000 + assert client.batch_settings.max_bytes == 1 * 1000 * 1000 + assert client.batch_settings.max_latency == 0.01 + assert client.batch_settings.max_messages == 100 def test_init_emulator(monkeypatch): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index a69ea5ca5268..16d8f7343b02 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -660,9 +660,9 @@ def test__on_response_with_leaser_overload(): ] ) - # Adjust message bookkeeping in leaser. Pick 99 messages, which is just below + # Adjust message bookkeeping in leaser. Pick 999 messages, which is just below # the default FlowControl.max_messages limit. - fake_leaser_add(leaser, init_msg_count=99, init_bytes=990) + fake_leaser_add(leaser, init_msg_count=999, init_bytes=9900) # Actually run the method and prove that modack and schedule # are called in the expected way. From 6eca30a38182a423b54f38debd323e067e58c546 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 11 Nov 2019 11:44:50 +0200 Subject: [PATCH 0412/1197] chore(pubsub): refactor fake leaser test helper (#9632) --- .../subscriber/test_streaming_pull_manager.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 16d8f7343b02..114663e7b8e2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -117,20 +117,20 @@ def make_manager(**kwargs): ) -def fake_leaser_add(leaser, init_msg_count=0, init_bytes=0): +def fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10): """Add a simplified fake add() method to a leaser instance. The fake add() method actually increases the leaser's internal message count - by one for each message, and the total bytes by 10 for each message (hardcoded, - regardless of the actual message size). + by one for each message, and the total bytes by ``assumed_msg_size`` for + each message (regardless of the actual message size). """ def fake_add(self, items): self.message_count += len(items) - self.bytes += len(items) * 10 + self.bytes += len(items) * assumed_msg_size leaser.message_count = init_msg_count - leaser.bytes = init_bytes + leaser.bytes = init_msg_count * assumed_msg_size leaser.add = stdlib_types.MethodType(fake_add, leaser) @@ -256,7 +256,7 @@ def test__maybe_release_messages_below_overload(): # init leaser message count to 8 to leave room for 2 more messages _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) - fake_leaser_add(_leaser, init_msg_count=8, init_bytes=200) + fake_leaser_add(_leaser, init_msg_count=8, assumed_msg_size=25) _leaser.add = mock.Mock(wraps=_leaser.add) # to spy on calls messages = [ @@ -621,7 +621,7 @@ def test__on_response_no_leaser_overload(): ) # adjust message bookkeeping in leaser - fake_leaser_add(leaser, init_msg_count=0, init_bytes=0) + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) # Actually run the method and prove that modack and schedule # are called in the expected way. @@ -662,7 +662,7 @@ def test__on_response_with_leaser_overload(): # Adjust message bookkeeping in leaser. Pick 999 messages, which is just below # the default FlowControl.max_messages limit. - fake_leaser_add(leaser, init_msg_count=999, init_bytes=9900) + fake_leaser_add(leaser, init_msg_count=999, assumed_msg_size=10) # Actually run the method and prove that modack and schedule # are called in the expected way. From e935b7ac4a05bb2b60b37afd5c1418d1ff5fc38c Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 11 Nov 2019 15:15:32 -0800 Subject: [PATCH 0413/1197] docs: add python 2 sunset banner to documentation (#9036) --- .../docs/_static/custom.css | 4 ++ .../docs/_templates/layout.html | 49 +++++++++++++++++++ packages/google-cloud-pubsub/docs/conf.py | 2 +- 3 files changed, 54 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-pubsub/docs/_static/custom.css create mode 100644 packages/google-cloud-pubsub/docs/_templates/layout.html diff --git a/packages/google-cloud-pubsub/docs/_static/custom.css b/packages/google-cloud-pubsub/docs/_static/custom.css new file mode 100644 index 000000000000..9a6f9f8ddc3a --- /dev/null +++ b/packages/google-cloud-pubsub/docs/_static/custom.css @@ -0,0 +1,4 @@ +div#python2-eol { + border-color: red; + border-width: medium; +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/docs/_templates/layout.html b/packages/google-cloud-pubsub/docs/_templates/layout.html new file mode 100644 index 000000000000..de457b2c2767 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/_templates/layout.html @@ -0,0 +1,49 @@ +{% extends "!layout.html" %} +{%- block content %} +{%- if theme_fixed_sidebar|lower == 'true' %} +
+ {{ sidebar() }} + {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} + + {%- block relbar_top %} + {%- if theme_show_relbar_top|tobool %} + + {%- endif %} + {% endblock %} + +
+
+ On January 1, 2020 this library will no longer support Python 2 on the latest released version. + Previously released library versions will continue to be available. For more information please + visit Python 2 support on Google Cloud. +
+ {% block body %} {% endblock %} +
+ + {%- block relbar_bottom %} + {%- if theme_show_relbar_bottom|tobool %} + + {%- endif %} + {% endblock %} + + {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} +
+
+{%- else %} +{{ super() }} +{%- endif %} +{%- endblock %} diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 472fe878fe8f..778fe480b370 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -162,7 +162,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -# html_static_path = [] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied From 2a270c3fe26d6fb526a9697aa0be935c79bb8b92 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 12 Nov 2019 12:15:53 -0800 Subject: [PATCH 0414/1197] chore(pubsub): change spacing in docs templates (via synth) (#9759) --- packages/google-cloud-pubsub/docs/_static/custom.css | 2 +- .../google-cloud-pubsub/docs/_templates/layout.html | 1 + packages/google-cloud-pubsub/synth.metadata | 12 ++++++------ 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/_static/custom.css b/packages/google-cloud-pubsub/docs/_static/custom.css index 9a6f9f8ddc3a..0abaf229fce3 100644 --- a/packages/google-cloud-pubsub/docs/_static/custom.css +++ b/packages/google-cloud-pubsub/docs/_static/custom.css @@ -1,4 +1,4 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/docs/_templates/layout.html b/packages/google-cloud-pubsub/docs/_templates/layout.html index de457b2c2767..228529efe2d2 100644 --- a/packages/google-cloud-pubsub/docs/_templates/layout.html +++ b/packages/google-cloud-pubsub/docs/_templates/layout.html @@ -1,3 +1,4 @@ + {% extends "!layout.html" %} {%- block content %} {%- if theme_fixed_sidebar|lower == 'true' %} diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 9dfcec7b72e4..6d615c0f3310 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,26 +1,26 @@ { - "updateTime": "2019-09-24T12:27:36.347590Z", + "updateTime": "2019-11-12T13:34:45.693013Z", "sources": [ { "generator": { "name": "artman", - "version": "0.37.0", - "dockerImage": "googleapis/artman@sha256:0f66008f69061ea6d41499e2a34da3fc64fc7c9798077e3a37158653a135d801" + "version": "0.41.1", + "dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "fe6115fdfae318277426ec0e11b4b05e2b150723", - "internalRef": "270882829" + "sha": "f69562be0608904932bdcfbc5ad8b9a22d9dceb8", + "internalRef": "279774957" } }, { "template": { "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.5.2" + "version": "2019.10.17" } } ], From c7bbac0b1a145d3bef390cec977c7a7c090041e0 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 13 Nov 2019 12:20:25 -0800 Subject: [PATCH 0415/1197] chore(pubsub): update client configurations (via synth) (#9784) --- .../cloud/pubsub_v1/gapic/publisher_client_config.py | 8 ++++---- .../cloud/pubsub_v1/gapic/subscriber_client_config.py | 10 ++++++++-- packages/google-cloud-pubsub/synth.metadata | 6 +++--- 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index 1b8982b639b2..055b3424e6cd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -31,8 +31,8 @@ "max_retry_delay_millis": 60000, "initial_rpc_timeout_millis": 5000, "rpc_timeout_multiplier": 1.3, - "max_rpc_timeout_millis": 600000, - "total_timeout_millis": 600000, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 60000, }, }, "methods": { @@ -51,9 +51,9 @@ "retry_codes_name": "publish", "retry_params_name": "messaging", "bundling": { - "element_count_threshold": 10, + "element_count_threshold": 100, "element_count_limit": 1000, - "request_byte_threshold": 1024, + "request_byte_threshold": 1048576, "request_byte_limit": 10485760, "delay_threshold_millis": 10, }, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 083a6c19a440..3955f2da1dcb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -4,7 +4,13 @@ "retry_codes": { "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], "non_idempotent": ["UNAVAILABLE"], - "none": [], + "streaming_pull": [ + "ABORTED", + "DEADLINE_EXCEEDED", + "INTERNAL", + "RESOURCE_EXHAUSTED", + "UNAVAILABLE", + ], }, "retry_params": { "default": { @@ -78,7 +84,7 @@ }, "StreamingPull": { "timeout_millis": 900000, - "retry_codes_name": "none", + "retry_codes_name": "streaming_pull", "retry_params_name": "streaming_messaging", }, "ModifyPushConfig": { diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 6d615c0f3310..d732bd24e76a 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-11-12T13:34:45.693013Z", + "updateTime": "2019-11-13T13:23:51.614817Z", "sources": [ { "generator": { @@ -12,8 +12,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "f69562be0608904932bdcfbc5ad8b9a22d9dceb8", - "internalRef": "279774957" + "sha": "88bbf96b90089994ed16208a0f38cdd07f743742", + "internalRef": "280134477" } }, { From 817cc2a1d196170cc5f738a4a001a97d99536025 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 22 Nov 2019 21:21:16 +0100 Subject: [PATCH 0416/1197] fix(pubsub): fix messages delivered multiple times despite a long ACK deadline (#9525) * fix(pubsub): lease-manage all received messages This is to prevent the messages that are put on hold from unnecessarily timing out too soon, causing the backend to re-send them. * Exclude on hold messages from load calculation Even the messages received that exceed the maximum load (as defined by flow control) must be lease-mananged to avoid unnecessary ACK deadline expirations, but since they are not dispatched (yet) to user callbacks, they should not contribute to the overall load. Without this change, the total load could be overestimated, resulting in an indefinitely paused message stream, and messages not being dispatched to callbacks when they should be. * Use histogram to set default stream ACK deadline With all the messages lease-managed (even those on hold), there is no need to have a fixed default value. * Add warning if internal bytes count is negative This should not happen, but if it does, it is a bug in the StreamingPullManager logic, and we should know about it. --- .../_protocol/streaming_pull_manager.py | 101 ++++++++++-------- packages/google-cloud-pubsub/tests/system.py | 16 ++- .../subscriber/test_streaming_pull_manager.py | 75 ++++++++----- 3 files changed, 112 insertions(+), 80 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index d3b1d6f51eb6..f3798c05610e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -51,13 +51,6 @@ _RESUME_THRESHOLD = 0.8 """The load threshold below which to resume the incoming message stream.""" -_DEFAULT_STREAM_ACK_DEADLINE = 60 -"""The default message acknowledge deadline in seconds for incoming message stream. - -This default deadline is dynamically modified for the messages that are added -to the lease management. -""" - def _maybe_wrap_exception(exception): """Wraps a gRPC exception class, if needed.""" @@ -135,9 +128,15 @@ def __init__( # because the FlowControl limits have been hit. self._messages_on_hold = queue.Queue() + # the total number of bytes consumed by the messages currently on hold + self._on_hold_bytes = 0 + # A lock ensuring that pausing / resuming the consumer are both atomic # operations that cannot be executed concurrently. Needed for properly - # syncing these operations with the current leaser load. + # syncing these operations with the current leaser load. Additionally, + # the lock is used to protect modifications of internal data that + # affects the load computation, i.e. the count and size of the messages + # currently on hold. self._pause_resume_lock = threading.Lock() # The threads created in ``.open()``. @@ -218,10 +217,18 @@ def load(self): if self._leaser is None: return 0.0 + # Messages that are temporarily put on hold are not being delivered to + # user's callbacks, thus they should not contribute to the flow control + # load calculation. + # However, since these messages must still be lease-managed to avoid + # unnecessary ACK deadline expirations, their count and total size must + # be subtracted from the leaser's values. return max( [ - self._leaser.message_count / self._flow_control.max_messages, - self._leaser.bytes / self._flow_control.max_bytes, + (self._leaser.message_count - self._messages_on_hold.qsize()) + / self._flow_control.max_messages, + (self._leaser.bytes - self._on_hold_bytes) + / self._flow_control.max_bytes, ] ) @@ -292,13 +299,19 @@ def _maybe_release_messages(self): except queue.Empty: break - self.leaser.add( - [requests.LeaseRequest(ack_id=msg.ack_id, byte_size=msg.size)] - ) + self._on_hold_bytes -= msg.size + + if self._on_hold_bytes < 0: + _LOGGER.warning( + "On hold bytes was unexpectedly negative: %s", self._on_hold_bytes + ) + self._on_hold_bytes = 0 + _LOGGER.debug( - "Released held message to leaser, scheduling callback for it, " - "still on hold %s.", + "Released held message, scheduling callback for it, " + "still on hold %s (bytes %s).", self._messages_on_hold.qsize(), + self._on_hold_bytes, ) self._scheduler.schedule(self._callback, msg) @@ -392,17 +405,7 @@ def open(self, callback, on_callback_error): ) # Create the RPC - - # We must use a fixed value for the ACK deadline, as we cannot read it - # from the subscription. The latter would require `pubsub.subscriptions.get` - # permission, which is not granted to the default subscriber role - # `roles/pubsub.subscriber`. - # See also https://github.com/googleapis/google-cloud-python/issues/9339 - # - # When dynamic lease management is enabled for the "on hold" messages, - # the default stream ACK deadline should again be set based on the - # historic ACK timing data, i.e. `self.ack_histogram.percentile(99)`. - stream_ack_deadline_seconds = _DEFAULT_STREAM_ACK_DEADLINE + stream_ack_deadline_seconds = self.ack_histogram.percentile(99) get_initial_request = functools.partial( self._get_initial_request, stream_ack_deadline_seconds @@ -540,40 +543,46 @@ def _on_response(self, response): the callback for each message using the executor. """ _LOGGER.debug( - "Processing %s received message(s), currenty on hold %s.", + "Processing %s received message(s), currenty on hold %s (bytes %s).", len(response.received_messages), self._messages_on_hold.qsize(), + self._on_hold_bytes, ) + # Immediately (i.e. without waiting for the auto lease management) + # modack the messages we received, as this tells the server that we've + # received them. + items = [ + requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) + for message in response.received_messages + ] + self._dispatcher.modify_ack_deadline(items) + invoke_callbacks_for = [] for received_message in response.received_messages: message = google.cloud.pubsub_v1.subscriber.message.Message( received_message.message, received_message.ack_id, self._scheduler.queue ) - if self.load < _MAX_LOAD: - req = requests.LeaseRequest( - ack_id=message.ack_id, byte_size=message.size - ) - self.leaser.add([req]) - invoke_callbacks_for.append(message) - self.maybe_pause_consumer() - else: - self._messages_on_hold.put(message) - - # Immediately (i.e. without waiting for the auto lease management) - # modack the messages we received and not put on hold, as this tells - # the server that we've received them. - items = [ - requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) - for message in invoke_callbacks_for - ] - self._dispatcher.modify_ack_deadline(items) + # Making a decision based on the load, and modifying the data that + # affects the load -> needs a lock, as that state can be modified + # by different threads. + with self._pause_resume_lock: + if self.load < _MAX_LOAD: + invoke_callbacks_for.append(message) + else: + self._messages_on_hold.put(message) + self._on_hold_bytes += message.size + + req = requests.LeaseRequest(ack_id=message.ack_id, byte_size=message.size) + self.leaser.add([req]) + self.maybe_pause_consumer() _LOGGER.debug( - "Scheduling callbacks for %s new messages, new total on hold %s.", + "Scheduling callbacks for %s new messages, new total on hold %s (bytes %s).", len(invoke_callbacks_for), self._messages_on_hold.qsize(), + self._on_hold_bytes, ) for msg in invoke_callbacks_for: self._scheduler.schedule(self._callback, msg) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index fd7473e1e53b..59e5e3fe83a4 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -382,10 +382,6 @@ class CallbackError(Exception): with pytest.raises(CallbackError): future.result(timeout=30) - @pytest.mark.xfail( - reason="The default stream ACK deadline is static and received messages " - "exceeding FlowControl.max_messages are currently not lease managed." - ) def test_streaming_pull_ack_deadline( self, publisher, subscriber, project, topic_path, subscription_path, cleanup ): @@ -400,7 +396,7 @@ def test_streaming_pull_ack_deadline( # Subscribe to the topic. This must happen before the messages # are published. subscriber.create_subscription( - subscription_path, topic_path, ack_deadline_seconds=240 + subscription_path, topic_path, ack_deadline_seconds=45 ) # publish some messages and wait for completion @@ -408,7 +404,7 @@ def test_streaming_pull_ack_deadline( # subscribe to the topic callback = StreamingPullCallback( - processing_time=70, # more than the default stream ACK deadline (60s) + processing_time=13, # more than the default stream ACK deadline (10s) resolve_at_msg_count=3, # one more than the published messages count ) flow_control = types.FlowControl(max_messages=1) @@ -416,13 +412,13 @@ def test_streaming_pull_ack_deadline( subscription_path, callback, flow_control=flow_control ) - # We expect to process the first two messages in 2 * 70 seconds, and + # We expect to process the first two messages in 2 * 13 seconds, and # any duplicate message that is re-sent by the backend in additional - # 70 seconds, totalling 210 seconds (+ overhead) --> if there have been - # no duplicates in 240 seconds, we can reasonably assume that there + # 13 seconds, totalling 39 seconds (+ overhead) --> if there have been + # no duplicates in 60 seconds, we can reasonably assume that there # won't be any. try: - callback.done_future.result(timeout=240) + callback.done_future.result(timeout=60) except exceptions.TimeoutError: # future timed out, because we received no excessive messages assert sorted(callback.seen_message_ids) == [1, 2] diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 114663e7b8e2..1732ec6cd4b3 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -233,13 +233,15 @@ def test__maybe_release_messages_on_overload(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) ) - # Ensure load is exactly 1.0 (to verify that >= condition is used) - _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) - _leaser.message_count = 10 - _leaser.bytes = 1000 msg = mock.create_autospec(message.Message, instance=True, ack_id="ack", size=11) manager._messages_on_hold.put(msg) + manager._on_hold_bytes = msg.size + + # Ensure load is exactly 1.0 (to verify that >= condition is used) + _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) + _leaser.message_count = 10 + _leaser.bytes = 1000 + msg.size manager._maybe_release_messages() @@ -254,18 +256,20 @@ def test__maybe_release_messages_below_overload(): ) manager._callback = mock.sentinel.callback - # init leaser message count to 8 to leave room for 2 more messages + # Init leaser message count to 11, so that when subtracting the 3 messages + # that are on hold, there is still room for another 2 messages before the + # max load is hit. _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) - fake_leaser_add(_leaser, init_msg_count=8, assumed_msg_size=25) - _leaser.add = mock.Mock(wraps=_leaser.add) # to spy on calls + fake_leaser_add(_leaser, init_msg_count=11, assumed_msg_size=10) messages = [ - mock.create_autospec(message.Message, instance=True, ack_id="ack_foo", size=11), - mock.create_autospec(message.Message, instance=True, ack_id="ack_bar", size=22), - mock.create_autospec(message.Message, instance=True, ack_id="ack_baz", size=33), + mock.create_autospec(message.Message, instance=True, ack_id="ack_foo", size=10), + mock.create_autospec(message.Message, instance=True, ack_id="ack_bar", size=10), + mock.create_autospec(message.Message, instance=True, ack_id="ack_baz", size=10), ] for msg in messages: manager._messages_on_hold.put(msg) + manager._on_hold_bytes = 3 * 10 # the actual call of MUT manager._maybe_release_messages() @@ -274,13 +278,6 @@ def test__maybe_release_messages_below_overload(): msg = manager._messages_on_hold.get_nowait() assert msg.ack_id == "ack_baz" - assert len(_leaser.add.mock_calls) == 2 - expected_calls = [ - mock.call([requests.LeaseRequest(ack_id="ack_foo", byte_size=11)]), - mock.call([requests.LeaseRequest(ack_id="ack_bar", byte_size=22)]), - ] - _leaser.add.assert_has_calls(expected_calls) - schedule_calls = manager._scheduler.schedule.mock_calls assert len(schedule_calls) == 2 for _, call_args, _ in schedule_calls: @@ -289,6 +286,34 @@ def test__maybe_release_messages_below_overload(): assert call_args[1].ack_id in ("ack_foo", "ack_bar") +def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) + + msg = mock.create_autospec(message.Message, instance=True, ack_id="ack", size=17) + manager._messages_on_hold.put(msg) + manager._on_hold_bytes = 5 # too low for some reason + + _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) + _leaser.message_count = 3 + _leaser.bytes = 150 + + with caplog.at_level(logging.WARNING): + manager._maybe_release_messages() + + expected_warnings = [ + record.message.lower() + for record in caplog.records + if "unexpectedly negative" in record.message + ] + assert len(expected_warnings) == 1 + assert "on hold bytes" in expected_warnings[0] + assert "-12" in expected_warnings[0] + + assert manager._on_hold_bytes == 0 # should be auto-corrected + + def test_send_unary(): manager = make_manager() manager._UNARY_REQUESTS = True @@ -404,8 +429,6 @@ def test_heartbeat_inactive(): "google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater", autospec=True ) def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): - stream_ack_deadline = streaming_pull_manager._DEFAULT_STREAM_ACK_DEADLINE - manager = make_manager() manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) @@ -435,7 +458,7 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi ) initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] assert initial_request_arg.func == manager._get_initial_request - assert initial_request_arg.args[0] == stream_ack_deadline + assert initial_request_arg.args[0] == 10 # the default stream ACK timeout assert not manager._client.api.get_subscription.called resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( @@ -668,13 +691,17 @@ def test__on_response_with_leaser_overload(): # are called in the expected way. manager._on_response(response) - # only the messages that are added to the lease management and dispatched to - # callbacks should have their ACK deadline extended + # all messages should be added to the lease management and have their ACK + # deadline extended, even those not dispatched to callbacks dispatcher.modify_ack_deadline.assert_called_once_with( - [requests.ModAckRequest("fack", 10)] + [ + requests.ModAckRequest("fack", 10), + requests.ModAckRequest("back", 10), + requests.ModAckRequest("zack", 10), + ] ) - # one message should be scheduled, the leaser capacity allows for it + # one message should be scheduled, the flow control limits allow for it schedule_calls = scheduler.schedule.mock_calls assert len(schedule_calls) == 1 call_args = schedule_calls[0][1] From 57fae7d6c636c6dea8b1352587fdcc4f009e5658 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 22 Nov 2019 21:23:37 +0100 Subject: [PATCH 0417/1197] fix(pubsub): split large (mod)ACK requests into smaller ones (#9594) * fix(pubsub): split large (mod)ACK requests into smaller ones There is a server-side limit on the maximum size of ACK and modACK requests, which can be hit if the leaser tries to manage too many messages in a single requests. This commit assures that such large requests are split into multiple smaller requests. * Decrease max ACK batch size to 2500 The previous limit of 3000 seems to be too optimistic, and the request size limit is still hit. Reducing the batch size to 2500 fixes the problem. * Add additional test assertions about sent ACK IDs The tests should also check that each message is (MOD)ACK-ed exactly once. --- .../subscriber/_protocol/dispatcher.py | 47 +++++++++++---- .../pubsub_v1/subscriber/test_dispatcher.py | 57 +++++++++++++++++++ 2 files changed, 94 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 2b2574829306..b1d8429cba58 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -13,9 +13,12 @@ # limitations under the License. from __future__ import absolute_import +from __future__ import division import collections +import itertools import logging +import math import threading from google.cloud.pubsub_v1 import types @@ -34,6 +37,18 @@ """The maximum amount of time in seconds to wait for additional request items before processing the next batch of requests.""" +_ACK_IDS_BATCH_SIZE = 2500 +"""The maximum number of ACK IDs to send in a single StreamingPullRequest. + +The backend imposes a maximum request size limit of 524288 bytes (512 KiB) per +acknowledge / modifyAckDeadline request. ACK IDs have a maximum size of 164 +bytes, thus we cannot send more than o 524288/176 ~= 2979 ACK IDs in a single +StreamingPullRequest message. + +Accounting for some overhead, we should thus only send a maximum of 2500 ACK +IDs at a time. +""" + class Dispatcher(object): def __init__(self, manager, queue): @@ -119,9 +134,16 @@ def ack(self, items): if time_to_ack is not None: self._manager.ack_histogram.add(time_to_ack) - ack_ids = [item.ack_id for item in items] - request = types.StreamingPullRequest(ack_ids=ack_ids) - self._manager.send(request) + # We must potentially split the request into multiple smaller requests + # to avoid the server-side max request size limit. + ack_ids = (item.ack_id for item in items) + total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) + + for _ in range(total_chunks): + request = types.StreamingPullRequest( + ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE) + ) + self._manager.send(request) # Remove the message from lease management. self.drop(items) @@ -150,13 +172,18 @@ def modify_ack_deadline(self, items): Args: items(Sequence[ModAckRequest]): The items to modify. """ - ack_ids = [item.ack_id for item in items] - seconds = [item.seconds for item in items] - - request = types.StreamingPullRequest( - modify_deadline_ack_ids=ack_ids, modify_deadline_seconds=seconds - ) - self._manager.send(request) + # We must potentially split the request into multiple smaller requests + # to avoid the server-side max request size limit. + ack_ids = (item.ack_id for item in items) + seconds = (item.seconds for item in items) + total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) + + for _ in range(total_chunks): + request = types.StreamingPullRequest( + modify_deadline_ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE), + modify_deadline_seconds=itertools.islice(seconds, _ACK_IDS_BATCH_SIZE), + ) + self._manager.send(request) def nack(self, items): """Explicitly deny receipt of messages. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 0e1e9744f6d9..592a03c6422c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import collections import threading from google.cloud.pubsub_v1 import types @@ -95,6 +96,34 @@ def test_ack_no_time(): manager.ack_histogram.add.assert_not_called() +def test_ack_splitting_large_payload(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [ + # use realistic lengths for ACK IDs (max 176 bytes) + requests.AckRequest(ack_id=str(i).zfill(176), byte_size=0, time_to_ack=20) + for i in range(5001) + ] + dispatcher_.ack(items) + + calls = manager.send.call_args_list + assert len(calls) == 3 + + all_ack_ids = {item.ack_id for item in items} + sent_ack_ids = collections.Counter() + + for call in calls: + message = call.args[0] + assert message.ByteSize() <= 524288 # server-side limit (2**19) + sent_ack_ids.update(message.ack_ids) + + assert set(sent_ack_ids) == all_ack_ids # all messages should have been ACK-ed + assert sent_ack_ids.most_common(1)[0][1] == 1 # each message ACK-ed exactly once + + def test_lease(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True @@ -153,6 +182,34 @@ def test_modify_ack_deadline(): ) +def test_modify_ack_deadline_splitting_large_payload(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [ + # use realistic lengths for ACK IDs (max 176 bytes) + requests.ModAckRequest(ack_id=str(i).zfill(176), seconds=60) + for i in range(5001) + ] + dispatcher_.modify_ack_deadline(items) + + calls = manager.send.call_args_list + assert len(calls) == 3 + + all_ack_ids = {item.ack_id for item in items} + sent_ack_ids = collections.Counter() + + for call in calls: + message = call.args[0] + assert message.ByteSize() <= 524288 # server-side limit (2**19) + sent_ack_ids.update(message.modify_deadline_ack_ids) + + assert set(sent_ack_ids) == all_ack_ids # all messages should have been MODACK-ed + assert sent_ack_ids.most_common(1)[0][1] == 1 # each message MODACK-ed exactly once + + @mock.patch("threading.Thread", autospec=True) def test_start(thread): manager = mock.create_autospec( From 806fdd13d076dbf70293ce96eb117079663219af Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 5 Dec 2019 08:05:26 +0100 Subject: [PATCH 0418/1197] fix(pubsub): include request overhead when computing publish batch size overflow (#9911) * Clarify the description of BatchSettings.max_bytes * Include overhead in batch overflow calculation The maximum allowed size for a PublishRequest on the backend is lower than a mere sum of the byte sizes of individual messages. This commit adjusts the batch size overflow calculation to account for this overhead. It also caps the effective maximum BatchSetting.max_size value to 10_000_000 bytes (the limit on the backend). (credit also to GitHub @relud for outlining the main idea first in the issue description) * Access settings inside Batch in a consistent way. * Cleanup and refactor a few code snippets * Raise more specific error if message too large --- .../cloud/pubsub_v1/publisher/_batch/base.py | 5 +- .../pubsub_v1/publisher/_batch/thread.py | 41 ++++++++--- .../cloud/pubsub_v1/publisher/exceptions.py | 6 +- .../google/cloud/pubsub_v1/types.py | 4 +- packages/google-cloud-pubsub/tests/system.py | 44 +++++++++--- .../pubsub_v1/publisher/batch/test_thread.py | 68 ++++++++++++++++--- 6 files changed, 137 insertions(+), 31 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py index 4dc6ceec6a80..75f430b09421 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -75,9 +75,12 @@ def messages(self): def size(self): """Return the total size of all of the messages currently in the batch. + The size includes any overhead of the actual ``PublishRequest`` that is + sent to the backend. + Returns: int: The total size of all of the messages currently - in the batch, in bytes. + in the batch (including the request overhead), in bytes. """ raise NotImplementedError diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 726e93166cda..4101bc518b0a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -29,6 +29,7 @@ _LOGGER = logging.getLogger(__name__) _CAN_COMMIT = (base.BatchStatus.ACCEPTING_MESSAGES, base.BatchStatus.STARTING) +_SERVER_PUBLISH_MAX_BYTES = 10 * 1000 * 1000 # max accepted size of PublishRequest class Batch(base.Batch): @@ -79,13 +80,17 @@ def __init__(self, client, topic, settings, autocommit=True): # in order to avoid race conditions self._futures = [] self._messages = [] - self._size = 0 self._status = base.BatchStatus.ACCEPTING_MESSAGES + # The initial size is not zero, we need to account for the size overhead + # of the PublishRequest message itself. + self._base_request_size = types.PublishRequest(topic=topic).ByteSize() + self._size = self._base_request_size + # If max latency is specified, start a thread to monitor the batch and # commit when the max latency is reached. self._thread = None - if autocommit and self._settings.max_latency < float("inf"): + if autocommit and self.settings.max_latency < float("inf"): self._thread = threading.Thread( name="Thread-MonitorBatchPublisher", target=self.monitor ) @@ -124,9 +129,12 @@ def settings(self): def size(self): """Return the total size of all of the messages currently in the batch. + The size includes any overhead of the actual ``PublishRequest`` that is + sent to the backend. + Returns: int: The total size of all of the messages currently - in the batch, in bytes. + in the batch (including the request overhead), in bytes. """ return self._size @@ -251,14 +259,14 @@ def _commit(self): def monitor(self): """Commit this batch after sufficient time has elapsed. - This simply sleeps for ``self._settings.max_latency`` seconds, + This simply sleeps for ``self.settings.max_latency`` seconds, and then calls commit unless the batch has already been committed. """ # NOTE: This blocks; it is up to the calling code to call it # in a separate thread. # Sleep for however long we should be waiting. - time.sleep(self._settings.max_latency) + time.sleep(self.settings.max_latency) _LOGGER.debug("Monitor is waking up") return self._commit() @@ -281,6 +289,10 @@ def publish(self, message): the :class:`~concurrent.futures.Future` interface or :data:`None`. If :data:`None` is returned, that signals that the batch cannot accept a message. + + Raises: + pubsub_v1.publisher.exceptions.MessageTooLargeError: If publishing + the ``message`` would exceed the max size limit on the backend. """ # Coerce the type, just in case. if not isinstance(message, types.PubsubMessage): @@ -292,12 +304,21 @@ def publish(self, message): if not self.will_accept(message): return future - new_size = self._size + message.ByteSize() + size_increase = types.PublishRequest(messages=[message]).ByteSize() + + if (self._base_request_size + size_increase) > _SERVER_PUBLISH_MAX_BYTES: + err_msg = ( + "The message being published would produce too large a publish " + "request that would exceed the maximum allowed size on the " + "backend ({} bytes).".format(_SERVER_PUBLISH_MAX_BYTES) + ) + raise exceptions.MessageTooLargeError(err_msg) + + new_size = self._size + size_increase new_count = len(self._messages) + 1 - overflow = ( - new_size > self.settings.max_bytes - or new_count >= self._settings.max_messages - ) + + size_limit = min(self.settings.max_bytes, _SERVER_PUBLISH_MAX_BYTES) + overflow = new_size > size_limit or new_count >= self.settings.max_messages if not self._messages or not overflow: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index adbfaaaa1ee1..be176bac2dba 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -22,4 +22,8 @@ class PublishError(GoogleAPICallError): pass -__all__ = ("PublishError", "TimeoutError") +class MessageTooLargeError(ValueError): + """Attempt to publish a message that would exceed the server max size limit.""" + + +__all__ = ("MessageTooLargeError", "PublishError", "TimeoutError") diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 7f833660f6e2..2d238b42f797 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -48,7 +48,9 @@ BatchSettings.__doc__ = "The settings for batch publishing the messages." BatchSettings.max_bytes.__doc__ = ( "The maximum total size of the messages to collect before automatically " - "publishing the batch." + "publishing the batch, including any byte size overhead of the publish " + "request itself. The maximum value is bound by the server-side limit of " + "10_000_000 bytes." ) BatchSettings.max_latency.__doc__ = ( "The maximum number of seconds to wait for additional messages before " diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 59e5e3fe83a4..65baaf016407 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -74,24 +74,52 @@ def cleanup(): def test_publish_messages(publisher, topic_path, cleanup): - futures = [] # Make sure the topic gets deleted. cleanup.append((publisher.delete_topic, topic_path)) publisher.create_topic(topic_path) - for index in six.moves.range(500): - futures.append( - publisher.publish( - topic_path, - b"The hail in Wales falls mainly on the snails.", - num=str(index), - ) + + futures = [ + publisher.publish( + topic_path, b"The hail in Wales falls mainly on the snails.", num=str(i) ) + for i in six.moves.range(500) + ] + for future in futures: result = future.result() assert isinstance(result, six.string_types) +def test_publish_large_messages(publisher, topic_path, cleanup): + # Make sure the topic gets deleted. + cleanup.append((publisher.delete_topic, topic_path)) + + # Each message should be smaller than 10**7 bytes (the server side limit for + # PublishRequest), but all messages combined in a PublishRequest should + # slightly exceed that threshold to make sure the publish code handles these + # cases well. + # Mind that the total PublishRequest size must still be smaller than + # 10 * 1024 * 1024 bytes in order to not exceed the max request body size limit. + msg_data = b"x" * (2 * 10 ** 6) + + publisher.batch_settings = types.BatchSettings( + max_bytes=11 * 1000 * 1000, # more than the server limit of 10 ** 7 + max_latency=2.0, # so that autocommit happens after publishing all messages + max_messages=100, + ) + publisher.create_topic(topic_path) + + futures = [publisher.publish(topic_path, msg_data, num=str(i)) for i in range(5)] + + # If the publishing logic correctly split all messages into more than a + # single batch despite a high BatchSettings.max_bytes limit, there should + # be no "InvalidArgument: request_size is too large" error. + for future in futures: + result = future.result(timeout=10) + assert isinstance(result, six.string_types) # the message ID + + def test_subscribe_to_messages( publisher, topic_path, subscriber, subscription_path, cleanup ): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 60425e748043..f51b314af6df 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -34,13 +34,15 @@ def create_client(): return publisher.Client(credentials=creds) -def create_batch(autocommit=False, **batch_settings): +def create_batch(autocommit=False, topic="topic_name", **batch_settings): """Return a batch object suitable for testing. Args: autocommit (bool): Whether the batch should commit after ``max_latency`` seconds. By default, this is ``False`` for unit testing. + topic (str): The name of the topic the batch should publish + the messages to. batch_settings (dict): Arguments passed on to the :class:``~.pubsub_v1.types.BatchSettings`` constructor. @@ -49,7 +51,7 @@ def create_batch(autocommit=False, **batch_settings): """ client = create_client() settings = types.BatchSettings(**batch_settings) - return Batch(client, "topic_name", settings, autocommit=autocommit) + return Batch(client, topic, settings, autocommit=autocommit) def test_init(): @@ -299,8 +301,8 @@ def test_monitor_already_committed(): assert batch._status == status -def test_publish(): - batch = create_batch() +def test_publish_updating_batch_size(): + batch = create_batch(topic="topic_foo") messages = ( types.PubsubMessage(data=b"foobarbaz"), types.PubsubMessage(data=b"spameggs"), @@ -314,22 +316,27 @@ def test_publish(): assert len(batch.messages) == 3 assert batch._futures == futures - # The size should have been incremented by the sum of the size of the - # messages. - expected_size = sum([message_pb.ByteSize() for message_pb in messages]) - assert batch.size == expected_size + # The size should have been incremented by the sum of the size + # contributions of each message to the PublishRequest. + base_request_size = types.PublishRequest(topic="topic_foo").ByteSize() + expected_request_size = base_request_size + sum( + types.PublishRequest(messages=[msg]).ByteSize() for msg in messages + ) + + assert batch.size == expected_request_size assert batch.size > 0 # I do not always trust protobuf. def test_publish_not_will_accept(): - batch = create_batch(max_messages=0) + batch = create_batch(topic="topic_foo", max_messages=0) + base_request_size = types.PublishRequest(topic="topic_foo").ByteSize() # Publish the message. message = types.PubsubMessage(data=b"foobarbaz") future = batch.publish(message) assert future is None - assert batch.size == 0 + assert batch.size == base_request_size assert batch.messages == [] assert batch._futures == [] @@ -361,6 +368,47 @@ def test_publish_exceed_max_messages(): assert batch._futures == futures +@mock.patch.object(thread, "_SERVER_PUBLISH_MAX_BYTES", 1000) +def test_publish_single_message_size_exceeds_server_size_limit(): + batch = create_batch( + topic="topic_foo", + max_messages=1000, + max_bytes=1000 * 1000, # way larger than (mocked) server side limit + ) + + big_message = types.PubsubMessage(data=b"x" * 984) + + request_size = types.PublishRequest( + topic="topic_foo", messages=[big_message] + ).ByteSize() + assert request_size == 1001 # sanity check, just above the (mocked) server limit + + with pytest.raises(exceptions.MessageTooLargeError): + batch.publish(big_message) + + +@mock.patch.object(thread, "_SERVER_PUBLISH_MAX_BYTES", 1000) +def test_publish_total_messages_size_exceeds_server_size_limit(): + batch = create_batch(topic="topic_foo", max_messages=10, max_bytes=1500) + + messages = ( + types.PubsubMessage(data=b"x" * 500), + types.PubsubMessage(data=b"x" * 600), + ) + + # Sanity check - request size is still below BatchSettings.max_bytes, + # but it exceeds the server-side size limit. + request_size = types.PublishRequest(topic="topic_foo", messages=messages).ByteSize() + assert 1000 < request_size < 1500 + + with mock.patch.object(batch, "commit") as fake_commit: + batch.publish(messages[0]) + batch.publish(messages[1]) + + # The server side limit should kick in and cause a commit. + fake_commit.assert_called_once() + + def test_publish_dict(): batch = create_batch() future = batch.publish({"data": b"foobarbaz", "attributes": {"spam": "eggs"}}) From fbf9f1caf1ef2b3970f4052a33afae29169dea40 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Tue, 10 Dec 2019 19:09:17 -0300 Subject: [PATCH 0419/1197] chore(pubsub): release 1.1.0 (#9950) * Release 1.1.0 * Fix commit message formatting. Change version from 1.0.3 to 1.1.0 because the stop method was added to the publisher client. --- packages/google-cloud-pubsub/CHANGELOG.md | 25 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 09716f05a9cf..ddca773a09a7 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,31 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 1.1.0 + +12-09-2019 18:51 PST + +### Implementation Changes +- Update client configurations (via synth). ([#9784](https://github.com/googleapis/google-cloud-python/pull/9784)) +- Include request overhead when computing publish batch size overflow. ([#9911](https://github.com/googleapis/google-cloud-python/pull/9911)) +- Split large (mod)ACK requests into smaller ones. ([#9594](https://github.com/googleapis/google-cloud-python/pull/9594)) +- Fix messages delivered multiple times despite a long ACK deadline. ([#9525](https://github.com/googleapis/google-cloud-python/pull/9525)) +- Update batching and flow control parameters to be same as the other client libraries. ([#9597](https://github.com/googleapis/google-cloud-python/pull/9597)) +- Add `StreamingPullManager._should_terminate`. ([#9335](https://github.com/googleapis/google-cloud-python/pull/9335)) + +### New Features +- Add stop method. ([#9365](https://github.com/googleapis/google-cloud-python/pull/9365)) + +### Dependencies +- Add Python 2 sunset banner to documentation. ([#9036](https://github.com/googleapis/google-cloud-python/pull/9036)) + +### Documentation +- Change spacing in docs templates (via synth). ([#9759](https://github.com/googleapis/google-cloud-python/pull/9759)) + +### Internal / Testing Changes +- Refactor fake leaser test helper. ([#9632](https://github.com/googleapis/google-cloud-python/pull/9632)) +- Add subscriber role test for streaming. ([#9507](https://github.com/googleapis/google-cloud-python/pull/9507)) + ## 1.0.2 09-30-2019 11:57 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 45e2cc04c07d..e26fb4b75778 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.0.2" +version = "1.1.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 225bcc58e7b830183d96871c6ebb5e34cc5cd5c6 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 16 Dec 2019 23:26:18 +0100 Subject: [PATCH 0420/1197] fix(pubsub): handle None in on response callback (#9982) If the underlying RPC is shut down while pulling the messages with a streming pull, the StreamingPullManager's _on_response() method is invoked with None (as opposed to a StreamingPullResponse instance). This commit handles this case and prevents an error in a background thread on streaming pull manager shutdown. --- .../_protocol/streaming_pull_manager.py | 7 +++++++ .../subscriber/test_streaming_pull_manager.py | 15 +++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index f3798c05610e..7a08a63887a2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -542,6 +542,13 @@ def _on_response(self, response): After the messages have all had their ack deadline updated, execute the callback for each message using the executor. """ + if response is None: + _LOGGER.debug( + "Response callback invoked with None, likely due to a " + "transport shutdown." + ) + return + _LOGGER.debug( "Processing %s received message(s), currenty on hold %s (bytes %s).", len(response.received_messages), diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 1732ec6cd4b3..8bb53f15068f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -721,6 +721,21 @@ def test__on_response_with_leaser_overload(): assert msg.message_id in ("2", "3") +def test__on_response_none_data(caplog): + caplog.set_level(logging.DEBUG) + + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10) + + manager._on_response(response=None) + + scheduler.schedule.assert_not_called() + assert "callback invoked with None" in caplog.text + + def test_retryable_stream_errors(): # Make sure the config matches our hard-coded tuple of exceptions. interfaces = subscriber_client_config.config["interfaces"] From 5d3289c4079e049c62e279c15d7a7cfd32545004 Mon Sep 17 00:00:00 2001 From: Hugo van Kemenade Date: Wed, 15 Jan 2020 17:09:20 +0200 Subject: [PATCH 0421/1197] fix: replace unsafe six.PY3 with PY2 for better future compatibility with Python 4 (#10081) * fix: fix for Python 4: replace unsafe six.PY3 with PY2 * Fix wording --- packages/google-cloud-pubsub/synth.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index a65bf2bf4f56..88ac4a8d4f7e 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -127,10 +127,10 @@ "google/cloud/pubsub_v1/gapic/publisher_client.py", "class PublisherClient", """# TODO: remove conditional import after Python 2 support is dropped -if six.PY3: - from collections.abc import Mapping -else: +if six.PY2: from collections import Mapping +else: + from collections.abc import Mapping def _merge_dict(d1, d2): From e170be159a94399556fad93cc926b155acca6696 Mon Sep 17 00:00:00 2001 From: Gurov Ilya Date: Thu, 16 Jan 2020 17:02:55 +0300 Subject: [PATCH 0422/1197] docs(pubsub): tiny spelling mistake fix (#10163) --- .../pubsub_v1/subscriber/_protocol/streaming_pull_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 7a08a63887a2..6cf5b6e8f2d9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -285,7 +285,7 @@ def _maybe_release_messages(self): would allow. Each released message is added to the lease management, and the user callback is scheduled for it. - If there are currently no messageges on hold, or if the leaser is + If there are currently no messages on hold, or if the leaser is already overloaded, this method is effectively a no-op. The method assumes the caller has acquired the ``_pause_resume_lock``. From 7ce019480c82b0b56625a16fde8c239499147559 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 16 Jan 2020 21:34:14 +0000 Subject: [PATCH 0423/1197] chore(pubsub): declaratively drop Python 3.4 support (#10168) --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index e26fb4b75778..69f19b3db72e 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -84,7 +84,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", + python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*", include_package_data=True, zip_safe=False, ) From 4153f61c148f9d935aed250b8700dd6ea837375a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 21 Jan 2020 08:04:29 -0800 Subject: [PATCH 0424/1197] chore(pubsub): add Python 3.8 unit tests run (via synth) --- packages/google-cloud-pubsub/noxfile.py | 2 +- packages/google-cloud-pubsub/synth.metadata | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index a2eefbb6765f..7949a4e3925a 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -86,7 +86,7 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7"]) +@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) def unit(session): """Run the unit test suite.""" default(session) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index d732bd24e76a..48f9b92bf006 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,19 +1,19 @@ { - "updateTime": "2019-11-13T13:23:51.614817Z", + "updateTime": "2020-01-08T13:27:29.257325Z", "sources": [ { "generator": { "name": "artman", - "version": "0.41.1", - "dockerImage": "googleapis/artman@sha256:545c758c76c3f779037aa259023ec3d1ef2d57d2c8cd00a222cb187d63ceac5e" + "version": "0.43.0", + "dockerImage": "googleapis/artman@sha256:264654a37596a44b0668b8ce6ac41082d713f6ee150b3fc6425fa78cc64e4f20" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "88bbf96b90089994ed16208a0f38cdd07f743742", - "internalRef": "280134477" + "sha": "08b488e0660c59842a7dee0e3e2b65d9e3a514a9", + "internalRef": "288625007" } }, { From 9848ce801ada22aaaef602da0bbac56469ca1da7 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Thu, 30 Jan 2020 17:03:47 -0500 Subject: [PATCH 0425/1197] feat(pubsub): add delivery attempt property to message object received by user code (#10205) - Return None when a DeadLetterPolicy hasn't been set on the subscription. --- .../_protocol/streaming_pull_manager.py | 5 ++- .../cloud/pubsub_v1/subscriber/message.py | 30 +++++++++++++++++- .../unit/pubsub_v1/subscriber/test_message.py | 20 +++++++++--- .../subscriber/test_streaming_pull_manager.py | 31 +++++++++++++++++++ 4 files changed, 80 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 6cf5b6e8f2d9..26764b1a9101 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -569,7 +569,10 @@ def _on_response(self, response): for received_message in response.received_messages: message = google.cloud.pubsub_v1.subscriber.message.Message( - received_message.message, received_message.ack_id, self._scheduler.queue + received_message.message, + received_message.ack_id, + received_message.delivery_attempt, + self._scheduler.queue, ) # Making a decision based on the load, and modifying the data that # affects the load -> needs a lock, as that state can be modified diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 41bc42755ad7..6dc7bc443b59 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -70,7 +70,7 @@ class Message(object): published. """ - def __init__(self, message, ack_id, request_queue): + def __init__(self, message, ack_id, delivery_attempt, request_queue): """Construct the Message. .. note:: @@ -82,12 +82,16 @@ def __init__(self, message, ack_id, request_queue): message (~.pubsub_v1.types.PubsubMessage): The message received from Pub/Sub. ack_id (str): The ack_id received from Pub/Sub. + delivery_attempt (int): The delivery attempt counter received + from Pub/Sub if a DeadLetterPolicy is set on the subscription, + and zero otherwise. request_queue (queue.Queue): A queue provided by the policy that can accept requests; the policy is responsible for handling those requests. """ self._message = message self._ack_id = ack_id + self._delivery_attempt = delivery_attempt if delivery_attempt > 0 else None self._request_queue = request_queue self.message_id = message.message_id @@ -162,6 +166,30 @@ def ack_id(self): """str: the ID used to ack the message.""" return self._ack_id + @property + def delivery_attempt(self): + """The delivery attempt counter is 1 + (the sum of number of NACKs + and number of ack_deadline exceeds) for this message. It is set to None + if a DeadLetterPolicy is not set on the subscription. + + A NACK is any call to ModifyAckDeadline with a 0 deadline. An ack_deadline + exceeds event is whenever a message is not acknowledged within + ack_deadline. Note that ack_deadline is initially + Subscription.ackDeadlineSeconds, but may get extended automatically by + the client library. + + The first delivery of a given message will have this value as 1. The value + is calculated at best effort and is approximate. + + EXPERIMENTAL: This feature is part of a closed alpha release. This + API might be changed in backward-incompatible ways and is not recommended + for production use. It is not subject to any SLA or deprecation policy. + + Returns: + Optional[int]: The delivery attempt counter or None. + """ + return self._delivery_attempt + def ack(self): """Acknowledge the given message. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 4bb3329a29f0..fd23deef06c9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -33,11 +33,11 @@ PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 -def create_message(data, ack_id="ACKID", **attrs): +def create_message(data, ack_id="ACKID", delivery_attempt=0, **attrs): with mock.patch.object(time, "time") as time_: time_.return_value = RECEIVED_SECONDS msg = message.Message( - types.PubsubMessage( + message=types.PubsubMessage( attributes=attrs, data=data, message_id="message_id", @@ -45,8 +45,9 @@ def create_message(data, ack_id="ACKID", **attrs): seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 ), ), - ack_id, - queue.Queue(), + ack_id=ack_id, + delivery_attempt=delivery_attempt, + request_queue=queue.Queue(), ) return msg @@ -72,6 +73,17 @@ def test_ack_id(): assert msg.ack_id == ack_id +def test_delivery_attempt(): + delivery_attempt = 10 + msg = create_message(b"foo", delivery_attempt=delivery_attempt) + assert msg.delivery_attempt == delivery_attempt + + +def test_delivery_attempt_is_none(): + msg = create_message(b"foo", delivery_attempt=0) + assert msg.delivery_attempt is None + + def test_publish_time(): msg = create_message(b"foo") assert msg.publish_time == PUBLISHED diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 8bb53f15068f..6f8a04ac9935 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -627,6 +627,37 @@ def test__get_initial_request_wo_leaser(): assert initial_request.modify_deadline_seconds == [] +def test__on_response_delivery_attempt(): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + # Set up the messages. + response = types.StreamingPullResponse( + received_messages=[ + types.ReceivedMessage( + ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + ), + types.ReceivedMessage( + ack_id="back", + message=types.PubsubMessage(data=b"bar", message_id="2"), + delivery_attempt=6, + ), + ] + ) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + manager._on_response(response) + + schedule_calls = scheduler.schedule.mock_calls + assert len(schedule_calls) == 2 + msg1 = schedule_calls[0][1][1] + assert msg1.delivery_attempt is None + msg2 = schedule_calls[1][1][1] + assert msg2.delivery_attempt == 6 + + def test__on_response_no_leaser_overload(): manager, _, dispatcher, leaser, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback From 66bdb5bab0debe2f322ff2494007fc9316a1d6bc Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 3 Feb 2020 20:51:32 +0000 Subject: [PATCH 0426/1197] chore: add split repo templates (#1) * chore: add split repo templates * Reduce unit tests coverage threshold to 99% * Fix system tests by including test_utils directory --- .../.github/CONTRIBUTING.md | 28 ++ .../.github/ISSUE_TEMPLATE/bug_report.md | 44 +++ .../.github/ISSUE_TEMPLATE/feature_request.md | 18 ++ .../.github/ISSUE_TEMPLATE/support_request.md | 7 + .../.github/PULL_REQUEST_TEMPLATE.md | 7 + .../.github/release-please.yml | 1 + packages/google-cloud-pubsub/.gitignore | 58 ++++ packages/google-cloud-pubsub/.kokoro/build.sh | 39 +++ .../.kokoro/continuous/common.cfg | 27 ++ .../.kokoro/continuous/continuous.cfg | 1 + .../.kokoro/docs/common.cfg | 48 +++ .../google-cloud-pubsub/.kokoro/docs/docs.cfg | 1 + .../.kokoro/presubmit/common.cfg | 27 ++ .../.kokoro/presubmit/presubmit.cfg | 1 + .../.kokoro/publish-docs.sh | 57 ++++ .../google-cloud-pubsub/.kokoro/release.sh | 34 +++ .../.kokoro/release/common.cfg | 64 ++++ .../.kokoro/release/release.cfg | 1 + .../google-cloud-pubsub/.kokoro/trampoline.sh | 23 ++ .../google-cloud-pubsub/.repo-metadata.json | 2 +- .../google-cloud-pubsub/CODE_OF_CONDUCT.md | 44 +++ packages/google-cloud-pubsub/CONTRIBUTING.rst | 279 ++++++++++++++++++ packages/google-cloud-pubsub/MANIFEST.in | 1 + packages/google-cloud-pubsub/docs/conf.py | 24 +- packages/google-cloud-pubsub/noxfile.py | 12 +- packages/google-cloud-pubsub/renovate.json | 5 + packages/google-cloud-pubsub/setup.py | 2 +- packages/google-cloud-pubsub/synth.metadata | 12 +- packages/google-cloud-pubsub/synth.py | 12 +- .../test_utils/credentials.json.enc | 49 +++ .../scripts/circleci/get_tagged_package.py | 64 ++++ .../scripts/circleci/twine_upload.sh | 36 +++ .../test_utils/scripts/get_target_packages.py | 268 +++++++++++++++++ .../scripts/get_target_packages_kokoro.py | 98 ++++++ .../test_utils/scripts/run_emulator.py | 199 +++++++++++++ .../test_utils/scripts/update_docs.sh | 93 ++++++ .../google-cloud-pubsub/test_utils/setup.py | 64 ++++ .../test_utils/test_utils/__init__.py | 0 .../test_utils/test_utils/imports.py | 38 +++ .../test_utils/test_utils/retry.py | 207 +++++++++++++ .../test_utils/test_utils/system.py | 81 +++++ .../test_utils/test_utils/vpcsc_config.py | 118 ++++++++ 42 files changed, 2166 insertions(+), 28 deletions(-) create mode 100644 packages/google-cloud-pubsub/.github/CONTRIBUTING.md create mode 100644 packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md create mode 100644 packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/feature_request.md create mode 100644 packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/support_request.md create mode 100644 packages/google-cloud-pubsub/.github/PULL_REQUEST_TEMPLATE.md create mode 100644 packages/google-cloud-pubsub/.github/release-please.yml create mode 100644 packages/google-cloud-pubsub/.gitignore create mode 100755 packages/google-cloud-pubsub/.kokoro/build.sh create mode 100644 packages/google-cloud-pubsub/.kokoro/continuous/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/continuous/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/docs/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/docs/docs.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg create mode 100755 packages/google-cloud-pubsub/.kokoro/publish-docs.sh create mode 100755 packages/google-cloud-pubsub/.kokoro/release.sh create mode 100644 packages/google-cloud-pubsub/.kokoro/release/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/release/release.cfg create mode 100755 packages/google-cloud-pubsub/.kokoro/trampoline.sh create mode 100644 packages/google-cloud-pubsub/CODE_OF_CONDUCT.md create mode 100644 packages/google-cloud-pubsub/CONTRIBUTING.rst create mode 100644 packages/google-cloud-pubsub/renovate.json create mode 100644 packages/google-cloud-pubsub/test_utils/credentials.json.enc create mode 100644 packages/google-cloud-pubsub/test_utils/scripts/circleci/get_tagged_package.py create mode 100755 packages/google-cloud-pubsub/test_utils/scripts/circleci/twine_upload.sh create mode 100644 packages/google-cloud-pubsub/test_utils/scripts/get_target_packages.py create mode 100644 packages/google-cloud-pubsub/test_utils/scripts/get_target_packages_kokoro.py create mode 100644 packages/google-cloud-pubsub/test_utils/scripts/run_emulator.py create mode 100755 packages/google-cloud-pubsub/test_utils/scripts/update_docs.sh create mode 100644 packages/google-cloud-pubsub/test_utils/setup.py create mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/__init__.py create mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/imports.py create mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/retry.py create mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/system.py create mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/vpcsc_config.py diff --git a/packages/google-cloud-pubsub/.github/CONTRIBUTING.md b/packages/google-cloud-pubsub/.github/CONTRIBUTING.md new file mode 100644 index 000000000000..939e5341e74d --- /dev/null +++ b/packages/google-cloud-pubsub/.github/CONTRIBUTING.md @@ -0,0 +1,28 @@ +# How to Contribute + +We'd love to accept your patches and contributions to this project. There are +just a few small guidelines you need to follow. + +## Contributor License Agreement + +Contributions to this project must be accompanied by a Contributor License +Agreement. You (or your employer) retain the copyright to your contribution; +this simply gives us permission to use and redistribute your contributions as +part of the project. Head over to to see +your current agreements on file or to sign a new one. + +You generally only need to submit a CLA once, so if you've already submitted one +(even if it was for a different project), you probably don't need to do it +again. + +## Code reviews + +All submissions, including submissions by project members, require review. We +use GitHub pull requests for this purpose. Consult +[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more +information on using pull requests. + +## Community Guidelines + +This project follows [Google's Open Source Community +Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000000..2c59dab147dc --- /dev/null +++ b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,44 @@ +--- +name: Bug report +about: Create a report to help us improve + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + +Please run down the following list and make sure you've tried the usual "quick fixes": + + - Search the issues already opened: https://github.com/googleapis/python-pubsub/issues + - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python + - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python + +If you are still having issues, please be sure to include as much information as possible: + +#### Environment details + + - OS type and version: + - Python version: `python --version` + - pip version: `pip --version` + - `google-cloud-pubsub` version: `pip show google-cloud-pubsub` + +#### Steps to reproduce + + 1. ? + 2. ? + +#### Code example + +```python +# example +``` + +#### Stack trace +``` +# example +``` + +Making sure to follow these steps will guarantee the quickest resolution possible. + +Thanks! diff --git a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/feature_request.md b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000000..6365857f33c6 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,18 @@ +--- +name: Feature request +about: Suggest an idea for this library + +--- + +Thanks for stopping by to let us know something could be better! + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. + + **Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] + **Describe the solution you'd like** +A clear and concise description of what you want to happen. + **Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + **Additional context** +Add any other context or screenshots about the feature request here. diff --git a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/support_request.md b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/support_request.md new file mode 100644 index 000000000000..995869032125 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/support_request.md @@ -0,0 +1,7 @@ +--- +name: Support request +about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. + +--- + +**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/google-cloud-pubsub/.github/PULL_REQUEST_TEMPLATE.md b/packages/google-cloud-pubsub/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000000..89f4a4d1a5c9 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,7 @@ +Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: +- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea +- [ ] Ensure the tests and linter pass +- [ ] Code coverage does not decrease (if any source code was changed) +- [ ] Appropriate docs were updated (if necessary) + +Fixes # 🦕 diff --git a/packages/google-cloud-pubsub/.github/release-please.yml b/packages/google-cloud-pubsub/.github/release-please.yml new file mode 100644 index 000000000000..4507ad0598a5 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/release-please.yml @@ -0,0 +1 @@ +releaseType: python diff --git a/packages/google-cloud-pubsub/.gitignore b/packages/google-cloud-pubsub/.gitignore new file mode 100644 index 000000000000..3fb06e09ce74 --- /dev/null +++ b/packages/google-cloud-pubsub/.gitignore @@ -0,0 +1,58 @@ +*.py[cod] +*.sw[op] + +# C extensions +*.so + +# Packages +*.egg +*.egg-info +dist +build +eggs +parts +bin +var +sdist +develop-eggs +.installed.cfg +lib +lib64 +__pycache__ + +# Installer logs +pip-log.txt + +# Unit test / coverage reports +.coverage +.nox +.cache +.pytest_cache + + +# Mac +.DS_Store + +# JetBrains +.idea + +# VS Code +.vscode + +# emacs +*~ + +# Built documentation +docs/_build +bigquery/docs/generated + +# Virtual environment +env/ +coverage.xml + +# System test environment variables. +system_tests/local_test_setup + +# Make sure a generated file isn't accidentally committed. +pylintrc +pylintrc.test \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/build.sh b/packages/google-cloud-pubsub/.kokoro/build.sh new file mode 100755 index 000000000000..6a68ebd105f1 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/build.sh @@ -0,0 +1,39 @@ +#!/bin/bash +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +cd github/python-pubsub + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Setup service account credentials. +export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json + +# Setup project id. +export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") + +# Remove old nox +python3.6 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3.6 -m pip install --upgrade --quiet nox +python3.6 -m nox --version + +python3.6 -m nox diff --git a/packages/google-cloud-pubsub/.kokoro/continuous/common.cfg b/packages/google-cloud-pubsub/.kokoro/continuous/common.cfg new file mode 100644 index 000000000000..a812af888d39 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/continuous/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/build.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/continuous/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/continuous/continuous.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/continuous/continuous.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg new file mode 100644 index 000000000000..b602fa54258d --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg @@ -0,0 +1,48 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/publish-docs.sh" +} + +env_vars: { + key: "STAGING_BUCKET" + value: "docs-staging" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "docuploader_service_account" + } + } +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/docs/docs.cfg b/packages/google-cloud-pubsub/.kokoro/docs/docs.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/docs/docs.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg new file mode 100644 index 000000000000..a812af888d39 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg @@ -0,0 +1,27 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Download resources for system tests (service account key, etc.) +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/build.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh new file mode 100755 index 000000000000..6a15192de2ec --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh @@ -0,0 +1,57 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#!/bin/bash + +set -eo pipefail + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +cd github/python-pubsub + +# Remove old nox +python3.6 -m pip uninstall --yes --quiet nox-automation + +# Install nox +python3.6 -m pip install --upgrade --quiet nox +python3.6 -m nox --version + +# build docs +nox -s docs + +python3 -m pip install gcp-docuploader + +# install a json parser +sudo apt-get update +sudo apt-get -y install software-properties-common +sudo add-apt-repository universe +sudo apt-get update +sudo apt-get -y install jq + +# create metadata +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh new file mode 100755 index 000000000000..9f8df9b3eb74 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -0,0 +1,34 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +#!/bin/bash + +set -eo pipefail + +# Start the releasetool reporter +python3 -m pip install gcp-releasetool +python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script + +# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. +python3 -m pip install --upgrade twine wheel setuptools + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Move into the package, build the distribution and upload. +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +cd github/python-pubsub +python3 setup.py sdist bdist_wheel +twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-pubsub/.kokoro/release/common.cfg b/packages/google-cloud-pubsub/.kokoro/release/common.cfg new file mode 100644 index 000000000000..d9c893b597ee --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/release/common.cfg @@ -0,0 +1,64 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline.sh" + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" +} +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/release.sh" +} + +# Fetch the token needed for reporting release status to GitHub +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "yoshi-automation-github-key" + } + } +} + +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } +} + +# Fetch magictoken to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "releasetool-magictoken" + } + } +} + +# Fetch api key to use with Magic Github Proxy +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "magic-github-proxy-api-key" + } + } +} diff --git a/packages/google-cloud-pubsub/.kokoro/release/release.cfg b/packages/google-cloud-pubsub/.kokoro/release/release.cfg new file mode 100644 index 000000000000..8f43917d92fe --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/release/release.cfg @@ -0,0 +1 @@ +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline.sh b/packages/google-cloud-pubsub/.kokoro/trampoline.sh new file mode 100755 index 000000000000..e8c4251f3ed4 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/trampoline.sh @@ -0,0 +1,23 @@ +#!/bin/bash +# Copyright 2017 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? + +chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh +${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true + +exit ${ret_code} diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json index e14db4392f0a..b21c198b1485 100644 --- a/packages/google-cloud-pubsub/.repo-metadata.json +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -6,7 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", "release_level": "ga", "language": "python", - "repo": "googleapis/google-cloud-python", + "repo": "googleapis/python-pubsub", "distribution_name": "google-cloud-pubsub", "api_id": "pubsub.googleapis.com", "requires_billing": true diff --git a/packages/google-cloud-pubsub/CODE_OF_CONDUCT.md b/packages/google-cloud-pubsub/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..b3d1f6029849 --- /dev/null +++ b/packages/google-cloud-pubsub/CODE_OF_CONDUCT.md @@ -0,0 +1,44 @@ + +# Contributor Code of Conduct + +As contributors and maintainers of this project, +and in the interest of fostering an open and welcoming community, +we pledge to respect all people who contribute through reporting issues, +posting feature requests, updating documentation, +submitting pull requests or patches, and other activities. + +We are committed to making participation in this project +a harassment-free experience for everyone, +regardless of level of experience, gender, gender identity and expression, +sexual orientation, disability, personal appearance, +body size, race, ethnicity, age, religion, or nationality. + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery +* Personal attacks +* Trolling or insulting/derogatory comments +* Public or private harassment +* Publishing other's private information, +such as physical or electronic +addresses, without explicit permission +* Other unethical or unprofessional conduct. + +Project maintainers have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct. +By adopting this Code of Conduct, +project maintainers commit themselves to fairly and consistently +applying these principles to every aspect of managing this project. +Project maintainers who do not follow or enforce the Code of Conduct +may be permanently removed from the project team. + +This code of conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. + +Instances of abusive, harassing, or otherwise unacceptable behavior +may be reported by opening an issue +or contacting one or more of the project maintainers. + +This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, +available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst new file mode 100644 index 000000000000..4f6294209cd3 --- /dev/null +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -0,0 +1,279 @@ +.. Generated by synthtool. DO NOT EDIT! +############ +Contributing +############ + +#. **Please sign one of the contributor license agreements below.** +#. Fork the repo, develop and test your code changes, add docs. +#. Make sure that your commit messages clearly describe the changes. +#. Send a pull request. (Please Read: `Faster Pull Request Reviews`_) + +.. _Faster Pull Request Reviews: https://github.com/kubernetes/community/blob/master/contributors/guide/pull-requests.md#best-practices-for-faster-reviews + +.. contents:: Here are some guidelines for hacking on the Google Cloud Client libraries. + +*************** +Adding Features +*************** + +In order to add a feature: + +- The feature must be documented in both the API and narrative + documentation. + +- The feature must work fully on the following CPython versions: 2.7, + 3.5, 3.6, and 3.7 on both UNIX and Windows. + +- The feature must not add unnecessary dependencies (where + "unnecessary" is of course subjective, but new dependencies should + be discussed). + +**************************** +Using a Development Checkout +**************************** + +You'll have to create a development environment using a Git checkout: + +- While logged into your GitHub account, navigate to the + ``python-pubsub`` `repo`_ on GitHub. + +- Fork and clone the ``python-pubsub`` repository to your GitHub account by + clicking the "Fork" button. + +- Clone your fork of ``python-pubsub`` from your GitHub account to your local + computer, substituting your account username and specifying the destination + as ``hack-on-python-pubsub``. E.g.:: + + $ cd ${HOME} + $ git clone git@github.com:USERNAME/python-pubsub.git hack-on-python-pubsub + $ cd hack-on-python-pubsub + # Configure remotes such that you can pull changes from the googleapis/python-pubsub + # repository into your local repository. + $ git remote add upstream git@github.com:googleapis/python-pubsub.git + # fetch and merge changes from upstream into master + $ git fetch upstream + $ git merge upstream/master + +Now your local repo is set up such that you will push changes to your GitHub +repo, from which you can submit a pull request. + +To work on the codebase and run the tests, we recommend using ``nox``, +but you can also use a ``virtualenv`` of your own creation. + +.. _repo: https://github.com/googleapis/python-pubsub + +Using ``nox`` +============= + +We use `nox `__ to instrument our tests. + +- To test your changes, run unit tests with ``nox``:: + + $ nox -s unit-2.7 + $ nox -s unit-3.7 + $ ... + + .. note:: + + The unit tests and system tests are described in the + ``noxfile.py`` files in each directory. + +.. nox: https://pypi.org/project/nox/ + +Note on Editable Installs / Develop Mode +======================================== + +- As mentioned previously, using ``setuptools`` in `develop mode`_ + or a ``pip`` `editable install`_ is not possible with this + library. This is because this library uses `namespace packages`_. + For context see `Issue #2316`_ and the relevant `PyPA issue`_. + + Since ``editable`` / ``develop`` mode can't be used, packages + need to be installed directly. Hence your changes to the source + tree don't get incorporated into the **already installed** + package. + +.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ +.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 +.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 +.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode +.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs + +***************************************** +I'm getting weird errors... Can you help? +***************************************** + +If the error mentions ``Python.h`` not being found, +install ``python-dev`` and try again. +On Debian/Ubuntu:: + + $ sudo apt-get install python-dev + +************ +Coding Style +************ + +- PEP8 compliance, with exceptions defined in the linter configuration. + If you have ``nox`` installed, you can test that you have not introduced + any non-compliant code via:: + + $ nox -s lint + +- In order to make ``nox -s lint`` run faster, you can set some environment + variables:: + + export GOOGLE_CLOUD_TESTING_REMOTE="upstream" + export GOOGLE_CLOUD_TESTING_BRANCH="master" + + By doing this, you are specifying the location of the most up-to-date + version of ``python-pubsub``. The the suggested remote name ``upstream`` + should point to the official ``googleapis`` checkout and the + the branch should be the main branch on that remote (``master``). + +Exceptions to PEP8: + +- Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for + "Function-Under-Test"), which is PEP8-incompliant, but more readable. + Some also use a local variable, ``MUT`` (short for "Module-Under-Test"). + +******************** +Running System Tests +******************** + +- To run system tests, you can execute:: + + $ nox -s system-3.7 + $ nox -s system-2.7 + + .. note:: + + System tests are only configured to run under Python 2.7 and + Python 3.7. For expediency, we do not run them in older versions + of Python 3. + + This alone will not run the tests. You'll need to change some local + auth settings and change some configuration in your project to + run all the tests. + +- System tests will be run against an actual project and + so you'll need to provide some environment variables to facilitate + authentication to your project: + + - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; + Such a file can be downloaded directly from the developer's console by clicking + "Generate new JSON key". See private key + `docs `__ + for more details. + +- Once you have downloaded your json keys, set the environment variable + ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: + + $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" + + +************* +Test Coverage +************* + +- The codebase *must* have 100% test statement coverage after each commit. + You can test coverage via ``nox -s cover``. + +****************************************************** +Documentation Coverage and Building HTML Documentation +****************************************************** + +If you fix a bug, and the bug requires an API or behavior modification, all +documentation in this package which references that API or behavior must be +changed to reflect the bug fix, ideally in the same commit that fixes the bug +or adds the feature. + +Build the docs via: + + $ nox -s docs + +******************************************** +Note About ``README`` as it pertains to PyPI +******************************************** + +The `description on PyPI`_ for the project comes directly from the +``README``. Due to the reStructuredText (``rst``) parser used by +PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` +instead of +``https://github.com/googleapis/python-pubsub/blob/master/CONTRIBUTING.rst``) +may cause problems creating links or rendering the description. + +.. _description on PyPI: https://pypi.org/project/google-cloud-pubsub + + +************************* +Supported Python Versions +************************* + +We support: + +- `Python 3.5`_ +- `Python 3.6`_ +- `Python 3.7`_ + +.. _Python 3.5: https://docs.python.org/3.5/ +.. _Python 3.6: https://docs.python.org/3.6/ +.. _Python 3.7: https://docs.python.org/3.7/ + + +Supported versions can be found in our ``noxfile.py`` `config`_. + +.. _config: https://github.com/googleapis/python-pubsub/blob/master/noxfile.py + +We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ +and lack of continuous integration `support`_. + +.. _Python 2.5: https://docs.python.org/2.5/ +.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ +.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ + +We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no +longer supported by the core development team. + +Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. + +We also explicitly decided to support Python 3 beginning with version +3.5. Reasons for this include: + +- Encouraging use of newest versions of Python 3 +- Taking the lead of `prominent`_ open-source `projects`_ +- `Unicode literal support`_ which allows for a cleaner codebase that + works in both Python 2 and Python 3 + +.. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django +.. _projects: http://flask.pocoo.org/docs/0.10/python3/ +.. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ +.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995 + +********** +Versioning +********** + +This library follows `Semantic Versioning`_. + +.. _Semantic Versioning: http://semver.org/ + +Some packages are currently in major version zero (``0.y.z``), which means that +anything may change at any time and the public API should not be considered +stable. + +****************************** +Contributor License Agreements +****************************** + +Before we can accept your pull requests you'll need to sign a Contributor +License Agreement (CLA): + +- **If you are an individual writing original source code** and **you own the + intellectual property**, then you'll need to sign an + `individual CLA `__. +- **If you work for a company that wants to allow you to contribute your work**, + then you'll need to sign a + `corporate CLA `__. + +You can sign these electronically (just scroll to the bottom). After that, +we'll be able to accept your pull requests. diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index 9cbf175afe6b..cd011be27a0e 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -1,3 +1,4 @@ +# Generated by synthtool. DO NOT EDIT! include README.rst LICENSE recursive-include google *.json *.proto recursive-include tests * diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 778fe480b370..20c1b57fe653 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -20,7 +20,7 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) -__version__ = "0.1.0" +__version__ = "" # -- General configuration ------------------------------------------------ @@ -45,6 +45,7 @@ autodoc_default_flags = ["members"] autosummary_generate = True + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] @@ -65,7 +66,7 @@ # General information about the project. project = u"google-cloud-pubsub" -copyright = u"2017, Google" +copyright = u"2019, Google" author = u"Google APIs" # The version info for the project you're documenting, acts as replacement for @@ -121,6 +122,7 @@ # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True + # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -131,9 +133,9 @@ # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "description": "Google Cloud Client Libraries for Python", + "description": "Google Cloud Client Libraries for google-cloud-pubsub", "github_user": "googleapis", - "github_repo": "google-cloud-python", + "github_repo": "python-pubsub", "github_banner": True, "font_family": "'Roboto', Georgia, sans", "head_font_family": "'Roboto', Georgia, serif", @@ -229,6 +231,7 @@ # -- Options for warnings ------------------------------------------------------ + suppress_warnings = [ # Temporarily suppress this to avoid "more than one target found for # cross-reference" warning, which are intractable for us to avoid while in @@ -284,6 +287,7 @@ # If false, no module index is generated. # latex_domain_indices = True + # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples @@ -301,6 +305,7 @@ # If true, show URL addresses after external links. # man_show_urls = False + # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples @@ -313,7 +318,7 @@ u"google-cloud-pubsub Documentation", author, "google-cloud-pubsub", - "GAPIC library for the {metadata.shortName} service", + "google-cloud-pubsub Library", "APIs", ) ] @@ -330,19 +335,16 @@ # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False + # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("https://requests.kennethreitz.org/en/stable/", None), - "fastavro": ("https://fastavro.readthedocs.io/en/stable/", None), - "pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None), } + # Napoleon settings napoleon_google_docstring = True napoleon_numpy_docstring = True diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 7949a4e3925a..0b8e845fc8c3 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -23,7 +23,6 @@ import nox -LOCAL_DEPS = (os.path.join("..", "api_core"), os.path.join("..", "core")) BLACK_VERSION = "black==19.3b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] @@ -38,7 +37,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", BLACK_VERSION, *LOCAL_DEPS) + session.install("flake8", BLACK_VERSION) session.run("black", "--check", *BLACK_PATHS) session.run("flake8", "google", "tests") @@ -67,8 +66,6 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. session.install("mock", "pytest", "pytest-cov") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) session.install("-e", ".") # Run py.test against the unit tests. @@ -113,9 +110,8 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install("mock", "pytest") - for local_dep in LOCAL_DEPS: - session.install("-e", local_dep) - session.install("-e", "../test_utils/") + # session.install("-e", "../test_utils/") + session.install("-e", "test_utils") session.install("-e", ".") # Run py.test against the system tests. @@ -133,7 +129,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") diff --git a/packages/google-cloud-pubsub/renovate.json b/packages/google-cloud-pubsub/renovate.json new file mode 100644 index 000000000000..4fa949311b20 --- /dev/null +++ b/packages/google-cloud-pubsub/renovate.json @@ -0,0 +1,5 @@ +{ + "extends": [ + "config:base", ":preserveSemverRanges" + ] +} diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 69f19b3db72e..44559aeaf754 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -64,7 +64,7 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/GoogleCloudPlatform/google-cloud-python", + url="https://github.com/googleapis/python-pubsub", classifiers=[ release_status, "Intended Audience :: Developers", diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 48f9b92bf006..0ad4372bdd7c 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,24 +1,24 @@ { - "updateTime": "2020-01-08T13:27:29.257325Z", + "updateTime": "2020-01-31T14:49:10.048857Z", "sources": [ { "generator": { "name": "artman", - "version": "0.43.0", - "dockerImage": "googleapis/artman@sha256:264654a37596a44b0668b8ce6ac41082d713f6ee150b3fc6425fa78cc64e4f20" + "version": "0.44.4", + "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "08b488e0660c59842a7dee0e3e2b65d9e3a514a9", - "internalRef": "288625007" + "sha": "7ac66d9be8a7d7de4f13566d8663978c9ee9dcd7", + "internalRef": "292450564" } }, { "template": { - "name": "python_library", + "name": "python_split_library", "origin": "synthtool.gcp", "version": "2019.10.17" } diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 88ac4a8d4f7e..a92290e21089 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -186,10 +186,20 @@ def _merge_dict(d1, d2): "from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2", ) + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = gcp.CommonTemplates().py_library(unit_cov_level=97, cov_level=100) +templated_files = gcp.CommonTemplates().py_library( + unit_cov_level=97, cov_level=99, system_test_dependencies=["test_utils"] +) s.move(templated_files) +# Temporary fix for the generated synth file (the test_utils path) +s.replace( + "noxfile.py", + r'session\.install\("-e", "\.\./test_utils/"\)', + '# \g<0>', +) + s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-pubsub/test_utils/credentials.json.enc b/packages/google-cloud-pubsub/test_utils/credentials.json.enc new file mode 100644 index 000000000000..f073c7e4f774 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/credentials.json.enc @@ -0,0 +1,49 @@ +U2FsdGVkX1/vVm/dOEg1DCACYbdOcL+ey6+64A+DZGZVgF8Z/3skK6rpPocu6GOA +UZAqASsBH9QifDf8cKVXQXVYpYq6HSv2O0w7vOmVorZO9GYPo98s9/8XO+4ty/AU +aB6TD68frBAYv4cT/l5m7aYdzfzMTy0EOXoleZT09JYP3B5FV3KCO114FzMXGwrj +HXsR6E5SyUUlUnWPC3eD3aqmovay0gxOKYO3ZwjFK1nlbN/8q6/8nwBCf/Bg6SHV +V93pNxdolRlJev9kgKz4RN1z4jGCy5PAndhSLE82NFIs9LoAiEOU5YeMlN+Ulqus +J92nh+ptUe9a4pJGbAuveUWO7zdS1QyXvTMUcmmSfXCNm/eIQjNuu5+rHtIjWKh8 +Ilwj2w1aTfSptQEhk/kwRgFz/d11vfwJzvwTmCxO6zyOeL0VUWLqdCBGgG5As9He +/RenF8PZ1O0WbTt7fns5oTlTk/MUo+0xJ1xqvu/y45LaqqcBAnEdrWKmtM3dJHWv +ufQku+kD+83F/VwBnQdvgMHu6KZEs6LRrNo58r4QuK6fS7VCACdzxID1RM2cL7kT +6BFRlyGj1aigmjne9g9M9Jx4R+mZDpPU1WDzzG71J4qCUwaX8Dfwutuv4uiFvzwq +NUF0wLJJPtKWmtW+hnZ/fhHQGCRsOpZzFnqp6Zv7J7k6esqxMgIjfal7Djk5Acy8 +j3iVvm6CYmKMVqzL62JHYS9Ye83tzBCaR8hpnJQKgH3FSOFY8HSwrtQSIsl/hSeF +41sgnz0Y+/gkzNeU18qFk+eCZmvljyu+JK0nPYUgpOCJYVBNQpNHz5PUyiAEKhtM +IOSdjPRW1Y+Xf4RroJnLPoF24Ijwrow5LCm9hBRY6TPPMMmnIXCd23xcLJ1rMj6g +x4ZikElans+cwuc9wtbb7w01DcpTwQ1+eIV1qV+KIgpnLjRGLhZD4etobBsrwYu/ +vnIwy2QHCKENPb8sbdgp7x2mF7VSX0/7tf+9+i70EBiMzpOKBkiZhtLzm6hOBkEy +ODaWrx4lTTwbSw8Rmtf58APhPFMsjHoNsjiUoK249Y8Y2Ff4fMfqYsXu6VC1n/At +CuWYHc3EfBwFcLJS+RQB9kFk/4FygFBWq4Kj0MqoRruLbKmoGeJKH9q35W0f0NCD +j+iHt3014kMGiuyJe1UDQ6fvEihFFdHuDivFpPAXDt4PTY/WtpDhaGMx23kb54pK +jkAuxpznAB1lK3u9bGRXDasGeHIrNtIlPvgkrWHXvoBVqM7zry8TGtoxp3E3I42Z +cUfDWfB9GqVdrOwvrTzyZsl2uShRkAJaZFZj5aMyYxiptp4gM8CwWiNtOd2EwtRO +LxZX4M02PQFIqXV3FSDA0q6EwglUrTZdAlYeOEkopaKCtG31dEPOSQG3NGJAEYso +Cxm99H7970dp0OAgpNSgRbcWDbhVbQXnRzvFGqLeH6a9dQ/a8uD3s8Qm9Du/kB6d +XxTRe2OGxzcD0AgI8GClE4rIZHCLbcwuJRp0EYcN+pgY80O4U98fZ5RYpU6OYbU/ +MEiaBYFKtZtGkV6AQD568V7hHJWqc5DDfVHUQ/aeQwnKi2vnU66u+nnV2rZxXxLP ++dqeLRpul+wKa5b/Z5SfQ14Ff8s7aVyxaogGpyggyPL1vyq4KWZ6Or/wEE5hgNO4 +kBh6ht0QT1Hti8XY2JK1M+Jgbjgcg4jkHBGVqegrG1Rvcc2A4TYKwx+QMSBhyxrU +5qhROjS4lTcC42hQslMUkUwc4U/Y91XdFbOOnaAkwzI36NRYL0pmgZnYxGJZeRvr +E5foOhnOEVSFGdOkLfFh+FkWZQf56Lmn8Gg2wHE3dZTxLHibiUYfkgOr1uEosq29 +D1NstvlJURPQ0Q+8QQNWcl9nEZHMAjOmnL1hbx+QfuC6seucp+sXGzdZByMLZbvT +tG8KNL293CmyQowgf9MXToWYnwRkcvqfTaKyor2Ggze3JtoFW4t0j4DI1XPciZFX +XmfApHrzdB/bZadzxyaZ2NE0CuH9zDelwI6rz38xsN5liYnp5qmNKVCZVOHccXa6 +J8x365m5/VaaA2RrtdPqKxn8VaKy7+T690QgMXVGM4PbzQzQxHuSleklocqlP+sB +jSMXCZY+ng/i4UmRO9noiyW3UThYh0hIdMYs12EmmI9cnF/OuYZpl30fmqwV+VNM +td5B2fYvAvvsjiX60SFCn3DATP1GrPMBlZSmhhP3GYS+xrWt3Xxta9qIX2BEF1Gg +twnZZRjoULSRFUYPfJPEOfEH2UQwm84wxx/GezVE+S/RpBlatPOgCiLnNNaLfdTC +mTG9qY9elJv3GGQO8Lqgf4i8blExs05lSPk1BDhzTB6H9TLz+Ge0/l1QxKf3gPXU +aImK1azieXMXHECkdKxrzmehwu1dZ/oYOLc/OFQCETwSRoLPFOFpYUpizwmVVHR6 +uLSfRptte4ZOU3zHfpd/0+J4tkwHwEkGzsmMdqudlm7qME6upuIplyVBH8JiXzUK +n1RIH/OPmVEluAnexWRLZNdk7MrakIO4XACVbICENiYQgAIErP568An6twWEGDbZ +bEN64E3cVDTDRPRAunIhhsEaapcxpFEPWlHorxv36nMUt0R0h0bJlCu5QdzckfcX +ZrRuu1kl76ZfbSE8T0G4/rBb9gsU4Gn3WyvLIO3MgFBuxR68ZwcR8LpEUd8qp38H +NG4cxPmN1nGKo663Z+xI2Gt5up4gpl+fOt4mXqxY386rB7yHaOfElMG5TUYdrS9w +1xbbCVgeJ6zxX+NFlndG33cSAPprhw+C18eUu6ZU63WZcYFo3GfK6rs3lvYtofvE +8DxztdTidQedNVNE+63YCjhxd/cZUI5n/UpgYkr9owp7hNGJiR3tdoNLR2gcoGqL +qWhH928k2aSgF2j97LZ2OqoPCp0tUB7ho4jD2u4Ik3GLVNlCc3dCvWRvpHtDTQDv +tujESMfHUc9I2r4S/PD3bku/ABGwa977Yp1PjzJGr9RajA5is5n6GVpyynwjtKG4 +iyyITpdwpCgr8pueTBLwZnas3slmiMOog/E4PmPgctHzvC+vhQijhUtw5zSsmv0l +bZlw/mVhp5Ta7dTcLBKR8DA3m3vTbaEGkz0xpfQr7GfiSMRbJyvIw88pDK0gyTMD diff --git a/packages/google-cloud-pubsub/test_utils/scripts/circleci/get_tagged_package.py b/packages/google-cloud-pubsub/test_utils/scripts/circleci/get_tagged_package.py new file mode 100644 index 000000000000..c148b9dc2370 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/scripts/circleci/get_tagged_package.py @@ -0,0 +1,64 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper to determine package from tag. +Get the current package directory corresponding to the Circle Tag. +""" + +from __future__ import print_function + +import os +import re +import sys + + +TAG_RE = re.compile(r""" + ^ + (?P + (([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed) + ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) + $ +""", re.VERBOSE) +TAG_ENV = 'CIRCLE_TAG' +ERROR_MSG = '%s env. var. not set' % (TAG_ENV,) +BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z' +CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__) +ROOT_DIR = os.path.realpath( + os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..')) + + +def main(): + """Get the current package directory. + Prints the package directory out so callers can consume it. + """ + if TAG_ENV not in os.environ: + print(ERROR_MSG, file=sys.stderr) + sys.exit(1) + + tag_name = os.environ[TAG_ENV] + match = TAG_RE.match(tag_name) + if match is None: + print(BAD_TAG_MSG % (tag_name,), file=sys.stderr) + sys.exit(1) + + pkg_name = match.group('pkg') + if pkg_name is None: + print(ROOT_DIR) + else: + pkg_dir = pkg_name.rstrip('-').replace('-', '_') + print(os.path.join(ROOT_DIR, pkg_dir)) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-pubsub/test_utils/scripts/circleci/twine_upload.sh b/packages/google-cloud-pubsub/test_utils/scripts/circleci/twine_upload.sh new file mode 100755 index 000000000000..23a4738e90b9 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/scripts/circleci/twine_upload.sh @@ -0,0 +1,36 @@ +#!/bin/bash + +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ev + +# If this is not a CircleCI tag, no-op. +if [[ -z "$CIRCLE_TAG" ]]; then + echo "This is not a release tag. Doing nothing." + exit 0 +fi + +# H/T: http://stackoverflow.com/a/246128/1068170 +SCRIPT="$(dirname "${BASH_SOURCE[0]}")/get_tagged_package.py" +# Determine the package directory being deploying on this tag. +PKG_DIR="$(python ${SCRIPT})" + +# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. +python3 -m pip install --upgrade twine wheel setuptools + +# Move into the package, build the distribution and upload. +cd ${PKG_DIR} +python3 setup.py sdist bdist_wheel +twine upload dist/* diff --git a/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages.py b/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages.py new file mode 100644 index 000000000000..1d51830cc23a --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages.py @@ -0,0 +1,268 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Print a list of packages which require testing.""" + +import os +import re +import subprocess +import warnings + + +CURRENT_DIR = os.path.realpath(os.path.dirname(__file__)) +BASE_DIR = os.path.realpath(os.path.join(CURRENT_DIR, '..', '..')) +GITHUB_REPO = os.environ.get('GITHUB_REPO', 'google-cloud-python') +CI = os.environ.get('CI', '') +CI_BRANCH = os.environ.get('CIRCLE_BRANCH') +CI_PR = os.environ.get('CIRCLE_PR_NUMBER') +CIRCLE_TAG = os.environ.get('CIRCLE_TAG') +head_hash, head_name = subprocess.check_output(['git', 'show-ref', 'HEAD'] +).strip().decode('ascii').split() +rev_parse = subprocess.check_output( + ['git', 'rev-parse', '--abbrev-ref', 'HEAD'] +).strip().decode('ascii') +MAJOR_DIV = '#' * 78 +MINOR_DIV = '#' + '-' * 77 + +# NOTE: This reg-ex is copied from ``get_tagged_packages``. +TAG_RE = re.compile(r""" + ^ + (?P + (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed) + ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) + $ +""", re.VERBOSE) + +# This is the current set of dependencies by package. +# As of this writing, the only "real" dependency is that of error_reporting +# (on logging), the rest are just system test dependencies. +PKG_DEPENDENCIES = { + 'logging': {'pubsub'}, +} + + +def get_baseline(): + """Return the baseline commit. + + On a pull request, or on a branch, return the common parent revision + with the master branch. + + Locally, return a value pulled from environment variables, or None if + the environment variables are not set. + + On a push to master, return None. This will effectively cause everything + to be considered to be affected. + """ + + # If this is a pull request or branch, return the tip for master. + # We will test only packages which have changed since that point. + ci_non_master = (CI == 'true') and any([CI_BRANCH != 'master', CI_PR]) + + if ci_non_master: + + repo_url = 'git@github.com:GoogleCloudPlatform/{}'.format(GITHUB_REPO) + subprocess.run(['git', 'remote', 'add', 'baseline', repo_url], + stderr=subprocess.DEVNULL) + subprocess.run(['git', 'pull', 'baseline'], stderr=subprocess.DEVNULL) + + if CI_PR is None and CI_BRANCH is not None: + output = subprocess.check_output([ + 'git', 'merge-base', '--fork-point', + 'baseline/master', CI_BRANCH]) + return output.strip().decode('ascii') + + return 'baseline/master' + + # If environment variables are set identifying what the master tip is, + # use that. + if os.environ.get('GOOGLE_CLOUD_TESTING_REMOTE', ''): + remote = os.environ['GOOGLE_CLOUD_TESTING_REMOTE'] + branch = os.environ.get('GOOGLE_CLOUD_TESTING_BRANCH', 'master') + return '%s/%s' % (remote, branch) + + # If we are not in CI and we got this far, issue a warning. + if not CI: + warnings.warn('No baseline could be determined; this means tests ' + 'will run for every package. If this is local ' + 'development, set the $GOOGLE_CLOUD_TESTING_REMOTE ' + 'environment variable.') + + # That is all we can do; return None. + return None + + +def get_changed_files(): + """Return a list of files that have been changed since the baseline. + + If there is no base, return None. + """ + # Get the baseline, and fail quickly if there is no baseline. + baseline = get_baseline() + print('# Baseline commit: {}'.format(baseline)) + if not baseline: + return None + + # Return a list of altered files. + try: + return subprocess.check_output([ + 'git', 'diff', '--name-only', '{}..HEAD'.format(baseline), + ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') + except subprocess.CalledProcessError: + warnings.warn('Unable to perform git diff; falling back to assuming ' + 'all packages have changed.') + return None + + +def reverse_map(dict_of_sets): + """Reverse a map of one-to-many. + + So the map:: + + { + 'A': {'B', 'C'}, + 'B': {'C'}, + } + + becomes + + { + 'B': {'A'}, + 'C': {'A', 'B'}, + } + + Args: + dict_of_sets (dict[set]): A dictionary of sets, mapping + one value to many. + + Returns: + dict[set]: The reversed map. + """ + result = {} + for key, values in dict_of_sets.items(): + for value in values: + result.setdefault(value, set()).add(key) + + return result + +def get_changed_packages(file_list): + """Return a list of changed packages based on the provided file list. + + If the file list is None, then all packages should be considered to be + altered. + """ + # Determine a complete list of packages. + all_packages = set() + for file_ in os.listdir(BASE_DIR): + abs_file = os.path.realpath(os.path.join(BASE_DIR, file_)) + nox_file = os.path.join(abs_file, 'nox.py') + if os.path.isdir(abs_file) and os.path.isfile(nox_file): + all_packages.add(file_) + + # If ther is no file list, send down the full package set. + if file_list is None: + return all_packages + + # Create a set based on the list of changed files. + answer = set() + reverse_deps = reverse_map(PKG_DEPENDENCIES) + for file_ in file_list: + # Ignore root directory changes (setup.py, .gitignore, etc.). + if os.path.sep not in file_: + continue + + # Ignore changes that are not in a package (usually this will be docs). + package = file_.split(os.path.sep, 1)[0] + if package not in all_packages: + continue + + # If there is a change in core, short-circuit now and return + # everything. + if package in ('core',): + return all_packages + + # Add the package, as well as any dependencies this package has. + # NOTE: For now, dependencies only go down one level. + answer.add(package) + answer = answer.union(reverse_deps.get(package, set())) + + # We got this far without being short-circuited; return the final answer. + return answer + + +def get_tagged_package(): + """Return the package corresponding to the current tag. + + If there is not tag, will return :data:`None`. + """ + if CIRCLE_TAG is None: + return + + match = TAG_RE.match(CIRCLE_TAG) + if match is None: + return + + pkg_name = match.group('pkg') + if pkg_name == '': + # NOTE: This corresponds to the "umbrella" tag. + return + + return pkg_name.rstrip('-').replace('-', '_') + + +def get_target_packages(): + """Return a list of target packages to be run in the current build. + + If in a tag build, will run only the package(s) that are tagged, otherwise + will run the packages that have file changes in them (or packages that + depend on those). + """ + tagged_package = get_tagged_package() + if tagged_package is None: + file_list = get_changed_files() + print(MAJOR_DIV) + print('# Changed files:') + print(MINOR_DIV) + for file_ in file_list or (): + print('# {}'.format(file_)) + for package in sorted(get_changed_packages(file_list)): + yield package + else: + yield tagged_package + + +def main(): + print(MAJOR_DIV) + print('# Environment') + print(MINOR_DIV) + print('# CircleCI: {}'.format(CI)) + print('# CircleCI branch: {}'.format(CI_BRANCH)) + print('# CircleCI pr: {}'.format(CI_PR)) + print('# CircleCI tag: {}'.format(CIRCLE_TAG)) + print('# HEAD ref: {}'.format(head_hash)) + print('# {}'.format(head_name)) + print('# Git branch: {}'.format(rev_parse)) + print(MAJOR_DIV) + + packages = list(get_target_packages()) + + print(MAJOR_DIV) + print('# Target packages:') + print(MINOR_DIV) + for package in packages: + print(package) + print(MAJOR_DIV) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages_kokoro.py b/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages_kokoro.py new file mode 100644 index 000000000000..27d3a0c940ea --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages_kokoro.py @@ -0,0 +1,98 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Print a list of packages which require testing.""" + +import pathlib +import subprocess + +import ci_diff_helper +import requests + + +def print_environment(environment): + print("-> CI environment:") + print('Branch', environment.branch) + print('PR', environment.pr) + print('In PR', environment.in_pr) + print('Repo URL', environment.repo_url) + if environment.in_pr: + print('PR Base', environment.base) + + +def get_base(environment): + if environment.in_pr: + return environment.base + else: + # If we're not in a PR, just calculate the changes between this commit + # and its parent. + return 'HEAD~1' + + +def get_changed_files_from_base(base): + return subprocess.check_output([ + 'git', 'diff', '--name-only', f'{base}..HEAD', + ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') + + +_URL_TEMPLATE = ( + 'https://api.github.com/repos/googleapis/google-cloud-python/pulls/' + '{}/files' +) + + +def get_changed_files_from_pr(pr): + url = _URL_TEMPLATE.format(pr) + while url is not None: + response = requests.get(url) + for info in response.json(): + yield info['filename'] + url = response.links.get('next', {}).get('url') + + +def determine_changed_packages(changed_files): + packages = [ + path.parent for path in pathlib.Path('.').glob('*/noxfile.py') + ] + + changed_packages = set() + for file in changed_files: + file = pathlib.Path(file) + for package in packages: + if package in file.parents: + changed_packages.add(package) + + return changed_packages + + +def main(): + environment = ci_diff_helper.get_config() + print_environment(environment) + base = get_base(environment) + + if environment.in_pr: + changed_files = list(get_changed_files_from_pr(environment.pr)) + else: + changed_files = get_changed_files_from_base(base) + + packages = determine_changed_packages(changed_files) + + print(f"Comparing against {base}.") + print("-> Changed packages:") + + for package in packages: + print(package) + + +main() diff --git a/packages/google-cloud-pubsub/test_utils/scripts/run_emulator.py b/packages/google-cloud-pubsub/test_utils/scripts/run_emulator.py new file mode 100644 index 000000000000..287b08640691 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/scripts/run_emulator.py @@ -0,0 +1,199 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Run system tests locally with the emulator. + +First makes system calls to spawn the emulator and get the local environment +variable needed for it. Then calls the system tests. +""" + + +import argparse +import os +import subprocess + +import psutil + +from google.cloud.environment_vars import BIGTABLE_EMULATOR +from google.cloud.environment_vars import GCD_DATASET +from google.cloud.environment_vars import GCD_HOST +from google.cloud.environment_vars import PUBSUB_EMULATOR +from run_system_test import run_module_tests + + +BIGTABLE = 'bigtable' +DATASTORE = 'datastore' +PUBSUB = 'pubsub' +PACKAGE_INFO = { + BIGTABLE: (BIGTABLE_EMULATOR,), + DATASTORE: (GCD_DATASET, GCD_HOST), + PUBSUB: (PUBSUB_EMULATOR,), +} +EXTRA = { + DATASTORE: ('--no-legacy',), +} +_DS_READY_LINE = '[datastore] Dev App Server is now running.\n' +_PS_READY_LINE_PREFIX = '[pubsub] INFO: Server started, listening on ' +_BT_READY_LINE_PREFIX = '[bigtable] Cloud Bigtable emulator running on ' + + +def get_parser(): + """Get simple ``argparse`` parser to determine package. + + :rtype: :class:`argparse.ArgumentParser` + :returns: The parser for this script. + """ + parser = argparse.ArgumentParser( + description='Run google-cloud system tests against local emulator.') + parser.add_argument('--package', dest='package', + choices=sorted(PACKAGE_INFO.keys()), + default=DATASTORE, help='Package to be tested.') + return parser + + +def get_start_command(package): + """Get command line arguments for starting emulator. + + :type package: str + :param package: The package to start an emulator for. + + :rtype: tuple + :returns: The arguments to be used, in a tuple. + """ + result = ('gcloud', 'beta', 'emulators', package, 'start') + extra = EXTRA.get(package, ()) + return result + extra + + +def get_env_init_command(package): + """Get command line arguments for getting emulator env. info. + + :type package: str + :param package: The package to get environment info for. + + :rtype: tuple + :returns: The arguments to be used, in a tuple. + """ + result = ('gcloud', 'beta', 'emulators', package, 'env-init') + extra = EXTRA.get(package, ()) + return result + extra + + +def datastore_wait_ready(popen): + """Wait until the datastore emulator is ready to use. + + :type popen: :class:`subprocess.Popen` + :param popen: An open subprocess to interact with. + """ + emulator_ready = False + while not emulator_ready: + emulator_ready = popen.stderr.readline() == _DS_READY_LINE + + +def wait_ready_prefix(popen, prefix): + """Wait until the a process encounters a line with matching prefix. + + :type popen: :class:`subprocess.Popen` + :param popen: An open subprocess to interact with. + + :type prefix: str + :param prefix: The prefix to match + """ + emulator_ready = False + while not emulator_ready: + emulator_ready = popen.stderr.readline().startswith(prefix) + + +def wait_ready(package, popen): + """Wait until the emulator is ready to use. + + :type package: str + :param package: The package to check if ready. + + :type popen: :class:`subprocess.Popen` + :param popen: An open subprocess to interact with. + + :raises: :class:`KeyError` if the ``package`` is not among + ``datastore``, ``pubsub`` or ``bigtable``. + """ + if package == DATASTORE: + datastore_wait_ready(popen) + elif package == PUBSUB: + wait_ready_prefix(popen, _PS_READY_LINE_PREFIX) + elif package == BIGTABLE: + wait_ready_prefix(popen, _BT_READY_LINE_PREFIX) + else: + raise KeyError('Package not supported', package) + + +def cleanup(pid): + """Cleanup a process (including all of its children). + + :type pid: int + :param pid: Process ID. + """ + proc = psutil.Process(pid) + for child_proc in proc.children(recursive=True): + try: + child_proc.kill() + child_proc.terminate() + except psutil.NoSuchProcess: + pass + proc.terminate() + proc.kill() + + +def run_tests_in_emulator(package): + """Spawn an emulator instance and run the system tests. + + :type package: str + :param package: The package to run system tests against. + """ + # Make sure this package has environment vars to replace. + env_vars = PACKAGE_INFO[package] + + start_command = get_start_command(package) + # Ignore stdin and stdout, don't pollute the user's output with them. + proc_start = subprocess.Popen(start_command, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + try: + wait_ready(package, proc_start) + env_init_command = get_env_init_command(package) + proc_env = subprocess.Popen(env_init_command, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + env_status = proc_env.wait() + if env_status != 0: + raise RuntimeError(env_status, proc_env.stderr.read()) + env_lines = proc_env.stdout.read().strip().split('\n') + # Set environment variables before running the system tests. + for env_var in env_vars: + line_prefix = 'export ' + env_var + '=' + value, = [line.split(line_prefix, 1)[1] for line in env_lines + if line.startswith(line_prefix)] + os.environ[env_var] = value + run_module_tests(package, + ignore_requirements=True) + finally: + cleanup(proc_start.pid) + + +def main(): + """Main method to run this script.""" + parser = get_parser() + args = parser.parse_args() + run_tests_in_emulator(args.package) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-pubsub/test_utils/scripts/update_docs.sh b/packages/google-cloud-pubsub/test_utils/scripts/update_docs.sh new file mode 100755 index 000000000000..8cbab9f0dad0 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/scripts/update_docs.sh @@ -0,0 +1,93 @@ +#!/bin/bash + +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -ev + +GH_OWNER='GoogleCloudPlatform' +GH_PROJECT_NAME='google-cloud-python' + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +# Function to build the docs. +function build_docs { + rm -rf docs/_build/ + rm -f docs/bigquery/generated/*.rst + # -W -> warnings as errors + # -T -> show full traceback on exception + # -N -> no color + sphinx-build \ + -W -T -N \ + -b html \ + -d docs/_build/doctrees \ + docs/ \ + docs/_build/html/ + return $? +} + +# Only update docs if we are on CircleCI. +if [[ "${CIRCLE_BRANCH}" == "master" ]] && [[ -z "${CIRCLE_PR_NUMBER}" ]]; then + echo "Building new docs on a merged commit." +elif [[ "$1" == "kokoro" ]]; then + echo "Building and publishing docs on Kokoro." +elif [[ -n "${CIRCLE_TAG}" ]]; then + echo "Building new docs on a tag (but will not deploy)." + build_docs + exit $? +else + echo "Not on master nor a release tag." + echo "Building new docs for testing purposes, but not deploying." + build_docs + exit $? +fi + +# Adding GitHub pages branch. `git submodule add` checks it +# out at HEAD. +GH_PAGES_DIR='ghpages' +git submodule add -q -b gh-pages \ + "git@github.com:${GH_OWNER}/${GH_PROJECT_NAME}" ${GH_PAGES_DIR} + +# Determine if we are building a new tag or are building docs +# for master. Then build new docs in docs/_build from master. +if [[ -n "${CIRCLE_TAG}" ]]; then + # Sphinx will use the package version by default. + build_docs +else + SPHINX_RELEASE=$(git log -1 --pretty=%h) build_docs +fi + +# Update gh-pages with the created docs. +cd ${GH_PAGES_DIR} +git rm -fr latest/ +cp -R ../docs/_build/html/ latest/ + +# Update the files push to gh-pages. +git add . +git status + +# If there are no changes, just exit cleanly. +if [[ -z "$(git status --porcelain)" ]]; then + echo "Nothing to commit. Exiting without pushing changes." + exit +fi + +# Commit to gh-pages branch to apply changes. +git config --global user.email "dpebot@google.com" +git config --global user.name "dpebot" +git commit -m "Update docs after merge to master." + +# NOTE: This may fail if two docs updates (on merges to master) +# happen in close proximity. +git push -q origin HEAD:gh-pages diff --git a/packages/google-cloud-pubsub/test_utils/setup.py b/packages/google-cloud-pubsub/test_utils/setup.py new file mode 100644 index 000000000000..8e9222a7f862 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/setup.py @@ -0,0 +1,64 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from setuptools import find_packages +from setuptools import setup + + +PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) + + +# NOTE: This is duplicated throughout and we should try to +# consolidate. +SETUP_BASE = { + 'author': 'Google Cloud Platform', + 'author_email': 'googleapis-publisher@google.com', + 'scripts': [], + 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', + 'license': 'Apache 2.0', + 'platforms': 'Posix; MacOS X; Windows', + 'include_package_data': True, + 'zip_safe': False, + 'classifiers': [ + 'Development Status :: 4 - Beta', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Topic :: Internet', + ], +} + + +REQUIREMENTS = [ + 'google-auth >= 0.4.0', + 'six', +] + +setup( + name='google-cloud-testutils', + version='0.24.0', + description='System test utilities for google-cloud-python', + packages=find_packages(), + install_requires=REQUIREMENTS, + python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', + **SETUP_BASE +) diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/__init__.py b/packages/google-cloud-pubsub/test_utils/test_utils/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/imports.py b/packages/google-cloud-pubsub/test_utils/test_utils/imports.py new file mode 100644 index 000000000000..5991af7fc465 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/test_utils/imports.py @@ -0,0 +1,38 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock +import six + + +def maybe_fail_import(predicate): + """Create and return a patcher that conditionally makes an import fail. + + Args: + predicate (Callable[[...], bool]): A callable that, if it returns `True`, + triggers an `ImportError`. It must accept the same arguments as the + built-in `__import__` function. + https://docs.python.org/3/library/functions.html#__import__ + + Returns: + A mock patcher object that can be used to enable patched import behavior. + """ + orig_import = six.moves.builtins.__import__ + + def custom_import(name, globals=None, locals=None, fromlist=(), level=0): + if predicate(name, globals, locals, fromlist, level): + raise ImportError + return orig_import(name, globals, locals, fromlist, level) + + return mock.patch.object(six.moves.builtins, "__import__", new=custom_import) diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/retry.py b/packages/google-cloud-pubsub/test_utils/test_utils/retry.py new file mode 100644 index 000000000000..e61c001a03e1 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/test_utils/retry.py @@ -0,0 +1,207 @@ +# Copyright 2016 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +from functools import wraps + +import six + +MAX_TRIES = 4 +DELAY = 1 +BACKOFF = 2 + + +def _retry_all(_): + """Retry all caught exceptions.""" + return True + + +class BackoffFailed(Exception): + """Retry w/ backoffs did not complete successfully.""" + + +class RetryBase(object): + """Base for retrying calling a decorated function w/ exponential backoff. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + self.max_tries = max_tries + self.delay = delay + self.backoff = backoff + self.logger = logger.warning if logger else six.print_ + + +class RetryErrors(RetryBase): + """Decorator for retrying given exceptions in testing. + + :type exception: Exception or tuple of Exceptions + :param exception: The exception to check or may be a tuple of + exceptions to check. + + :type error_predicate: function, takes caught exception, returns bool + :param error_predicate: Predicate evaluating whether to retry after a + caught exception. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, exception, error_predicate=_retry_all, + max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + super(RetryErrors, self).__init__(max_tries, delay, backoff, logger) + self.exception = exception + self.error_predicate = error_predicate + + def __call__(self, to_wrap): + @wraps(to_wrap) + def wrapped_function(*args, **kwargs): + tries = 0 + while tries < self.max_tries: + try: + return to_wrap(*args, **kwargs) + except self.exception as caught_exception: + + if not self.error_predicate(caught_exception): + raise + + delay = self.delay * self.backoff**tries + msg = ("%s, Trying again in %d seconds..." % + (caught_exception, delay)) + self.logger(msg) + + time.sleep(delay) + tries += 1 + return to_wrap(*args, **kwargs) + + return wrapped_function + + +class RetryResult(RetryBase): + """Decorator for retrying based on non-error result. + + :type result_predicate: function, takes result, returns bool + :param result_predicate: Predicate evaluating whether to retry after a + result is returned. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, result_predicate, + max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + super(RetryResult, self).__init__(max_tries, delay, backoff, logger) + self.result_predicate = result_predicate + + def __call__(self, to_wrap): + @wraps(to_wrap) + def wrapped_function(*args, **kwargs): + tries = 0 + while tries < self.max_tries: + result = to_wrap(*args, **kwargs) + if self.result_predicate(result): + return result + + delay = self.delay * self.backoff**tries + msg = "%s. Trying again in %d seconds..." % ( + self.result_predicate.__name__, delay,) + self.logger(msg) + + time.sleep(delay) + tries += 1 + raise BackoffFailed() + + return wrapped_function + + +class RetryInstanceState(RetryBase): + """Decorator for retrying based on instance state. + + :type instance_predicate: function, takes instance, returns bool + :param instance_predicate: Predicate evaluating whether to retry after an + API-invoking method is called. + + :type max_tries: int + :param max_tries: Number of times to try (not retry) before giving up. + + :type delay: int + :param delay: Initial delay between retries in seconds. + + :type backoff: int + :param backoff: Backoff multiplier e.g. value of 2 will double the + delay each retry. + + :type logger: logging.Logger instance + :param logger: Logger to use. If None, print. + """ + def __init__(self, instance_predicate, + max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, + logger=None): + super(RetryInstanceState, self).__init__( + max_tries, delay, backoff, logger) + self.instance_predicate = instance_predicate + + def __call__(self, to_wrap): + instance = to_wrap.__self__ # only instance methods allowed + + @wraps(to_wrap) + def wrapped_function(*args, **kwargs): + tries = 0 + while tries < self.max_tries: + result = to_wrap(*args, **kwargs) + if self.instance_predicate(instance): + return result + + delay = self.delay * self.backoff**tries + msg = "%s. Trying again in %d seconds..." % ( + self.instance_predicate.__name__, delay,) + self.logger(msg) + + time.sleep(delay) + tries += 1 + raise BackoffFailed() + + return wrapped_function diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/system.py b/packages/google-cloud-pubsub/test_utils/test_utils/system.py new file mode 100644 index 000000000000..590dc62a06e6 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/test_utils/system.py @@ -0,0 +1,81 @@ +# Copyright 2014 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function +import os +import sys +import time + +import google.auth.credentials +from google.auth.environment_vars import CREDENTIALS as TEST_CREDENTIALS + + +# From shell environ. May be None. +CREDENTIALS = os.getenv(TEST_CREDENTIALS) + +ENVIRON_ERROR_MSG = """\ +To run the system tests, you need to set some environment variables. +Please check the CONTRIBUTING guide for instructions. +""" + + +class EmulatorCreds(google.auth.credentials.Credentials): + """A mock credential object. + + Used to avoid unnecessary token refreshing or reliance on the network + while an emulator is running. + """ + + def __init__(self): # pylint: disable=super-init-not-called + self.token = b'seekrit' + self.expiry = None + + @property + def valid(self): + """Would-be validity check of the credentials. + + Always is :data:`True`. + """ + return True + + def refresh(self, unused_request): # pylint: disable=unused-argument + """Off-limits implementation for abstract method.""" + raise RuntimeError('Should never be refreshed.') + + +def check_environ(): + err_msg = None + if CREDENTIALS is None: + err_msg = '\nMissing variables: ' + TEST_CREDENTIALS + elif not os.path.isfile(CREDENTIALS): + err_msg = '\nThe %s path %r is not a file.' % (TEST_CREDENTIALS, + CREDENTIALS) + + if err_msg is not None: + msg = ENVIRON_ERROR_MSG + err_msg + print(msg, file=sys.stderr) + sys.exit(1) + + +def unique_resource_id(delimiter='_'): + """A unique identifier for a resource. + + Intended to help locate resources created in particular + testing environments and at particular times. + """ + build_id = os.getenv('CIRCLE_BUILD_NUM', '') + if build_id == '': + return '%s%d' % (delimiter, 1000 * time.time()) + else: + return '%s%s%s%d' % (delimiter, build_id, delimiter, time.time()) diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/vpcsc_config.py b/packages/google-cloud-pubsub/test_utils/test_utils/vpcsc_config.py new file mode 100644 index 000000000000..36b15d6be991 --- /dev/null +++ b/packages/google-cloud-pubsub/test_utils/test_utils/vpcsc_config.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +import pytest + + +INSIDE_VPCSC_ENVVAR = "GOOGLE_CLOUD_TESTS_IN_VPCSC" +PROJECT_INSIDE_ENVVAR = "PROJECT_ID" +PROJECT_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT" +BUCKET_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET" + + +class VPCSCTestConfig(object): + """System test utility for VPCSC detection. + + See: https://cloud.google.com/vpc-service-controls/docs/ + """ + + @property + def inside_vpcsc(self): + """Test whether the test environment is configured to run inside VPCSC. + + Returns: + bool: + true if the environment is configured to run inside VPCSC, + else false. + """ + return INSIDE_VPCSC_ENVVAR in os.environ + + @property + def project_inside(self): + """Project ID for testing outside access. + + Returns: + str: project ID used for testing outside access; None if undefined. + """ + return os.environ.get(PROJECT_INSIDE_ENVVAR, None) + + @property + def project_outside(self): + """Project ID for testing inside access. + + Returns: + str: project ID used for testing inside access; None if undefined. + """ + return os.environ.get(PROJECT_OUTSIDE_ENVVAR, None) + + @property + def bucket_outside(self): + """GCS bucket for testing inside access. + + Returns: + str: bucket ID used for testing inside access; None if undefined. + """ + return os.environ.get(BUCKET_OUTSIDE_ENVVAR, None) + + def skip_if_inside_vpcsc(self, testcase): + """Test decorator: skip if running inside VPCSC.""" + reason = ( + "Running inside VPCSC. " + "Unset the {} environment variable to enable this test." + ).format(INSIDE_VPCSC_ENVVAR) + skip = pytest.mark.skipif(self.inside_vpcsc, reason=reason) + return skip(testcase) + + def skip_unless_inside_vpcsc(self, testcase): + """Test decorator: skip if running outside VPCSC.""" + reason = ( + "Running outside VPCSC. " + "Set the {} environment variable to enable this test." + ).format(INSIDE_VPCSC_ENVVAR) + skip = pytest.mark.skipif(not self.inside_vpcsc, reason=reason) + return skip(testcase) + + def skip_unless_inside_project(self, testcase): + """Test decorator: skip if inside project env var not set.""" + reason = ( + "Project ID for running inside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(PROJECT_INSIDE_ENVVAR) + skip = pytest.mark.skipif(self.project_inside is None, reason=reason) + return skip(testcase) + + def skip_unless_outside_project(self, testcase): + """Test decorator: skip if outside project env var not set.""" + reason = ( + "Project ID for running outside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(PROJECT_OUTSIDE_ENVVAR) + skip = pytest.mark.skipif(self.project_outside is None, reason=reason) + return skip(testcase) + + def skip_unless_outside_bucket(self, testcase): + """Test decorator: skip if outside bucket env var not set.""" + reason = ( + "Bucket ID for running outside VPCSC not set. " + "Set the {} environment variable to enable this test." + ).format(BUCKET_OUTSIDE_ENVVAR) + skip = pytest.mark.skipif(self.bucket_outside is None, reason=reason) + return skip(testcase) + + +vpcsc_config = VPCSCTestConfig() From a715b756c8bccb4e93ba182c9ccf3763d2601655 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 4 Feb 2020 23:54:50 -0800 Subject: [PATCH 0427/1197] feat: add `StreamingPullRequest.client_id` field (via synth) Additionally, the copyright year is bumped to 2020 in some of the files. --- .../google-cloud-pubsub/google/__init__.py | 2 +- .../google/cloud/__init__.py | 2 +- .../google/cloud/pubsub.py | 2 +- .../cloud/pubsub_v1/gapic/publisher_client.py | 8 +- .../pubsub_v1/gapic/subscriber_client.py | 12 +- .../transports/publisher_grpc_transport.py | 2 +- .../transports/subscriber_grpc_transport.py | 2 +- .../google/cloud/pubsub_v1/proto/pubsub.proto | 34 ++-- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 156 +++++++++++------- packages/google-cloud-pubsub/noxfile.py | 3 +- packages/google-cloud-pubsub/synth.metadata | 7 +- .../gapic/v1/test_system_publisher_v1.py | 2 +- .../unit/gapic/v1/test_publisher_client_v1.py | 2 +- .../gapic/v1/test_subscriber_client_v1.py | 2 +- 14 files changed, 139 insertions(+), 97 deletions(-) diff --git a/packages/google-cloud-pubsub/google/__init__.py b/packages/google-cloud-pubsub/google/__init__.py index 8fcc60e2b9c6..9a1b64a6d586 100644 --- a/packages/google-cloud-pubsub/google/__init__.py +++ b/packages/google-cloud-pubsub/google/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py index 8fcc60e2b9c6..9a1b64a6d586 100644 --- a/packages/google-cloud-pubsub/google/cloud/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/__init__.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py index b9f92e991aa6..e7006048ba82 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 774fda25d622..defa110384aa 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -51,10 +51,10 @@ # TODO: remove conditional import after Python 2 support is dropped -if six.PY3: - from collections.abc import Mapping -else: +if six.PY2: from collections import Mapping +else: + from collections.abc import Mapping def _merge_dict(d1, d2): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 0ecce7ac9d1c..22e955322fb6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -1307,11 +1307,11 @@ def create_snapshot( >>> response = client.create_snapshot(name, subscription) Args: - name (str): Optional user-provided name for this snapshot. If the name is not - provided in the request, the server will assign a random name for this - snapshot on the same project as the subscription. Note that for REST API - requests, you must specify a name. See the resource name rules. Format - is ``projects/{project}/snapshots/{snap}``. + name (str): User-provided name for this snapshot. If the name is not provided in the + request, the server will assign a random name for this snapshot on the + same project as the subscription. Note that for REST API requests, you + must specify a name. See the resource name rules. Format is + ``projects/{project}/snapshots/{snap}``. subscription (str): The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to retain: (a) The existing backlog on the subscription. More precisely, this is defined as the messages in the diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index aedcc8c465e1..918224ebd61f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index fa8ac9f29329..2c86fc778887 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 3ad6355a8bf6..da3801dfecb7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -164,7 +164,8 @@ message PubsubMessage { // at least one attribute. bytes data = 1; - // Optional attributes for this message. + // Attributes for this message. If this field is empty, the message must + // contain non-empty data. map attributes = 2; // ID of this message, assigned by the server when the message is published. @@ -178,10 +179,12 @@ message PubsubMessage { // publisher in a `Publish` call. google.protobuf.Timestamp publish_time = 4; - // Identifies related messages for which publish order should be respected. - // If a `Subscription` has `enable_message_ordering` set to `true`, messages - // published with the same `ordering_key` value will be delivered to - // subscribers in the order in which they are received by the Pub/Sub system. + // If non-empty, identifies related messages for which publish order should be + // respected. If a `Subscription` has `enable_message_ordering` set to `true`, + // messages published with the same non-empty `ordering_key` value will be + // delivered to subscribers in the order in which they are received by the + // Pub/Sub system. All `PubsubMessage`s published in a given `PublishRequest` + // must specify the same `ordering_key` value. // EXPERIMENTAL: This feature is part of a closed alpha release. This // API might be changed in backward-incompatible ways and is not recommended // for production use. It is not subject to any SLA or deprecation policy. @@ -922,6 +925,14 @@ message StreamingPullRequest { // requests from client to server. The minimum deadline you can specify is 10 // seconds. The maximum deadline you can specify is 600 seconds (10 minutes). int32 stream_ack_deadline_seconds = 5; + + // A unique identifier that is used to distinguish client instances from each + // other. Only needs to be provided on the initial request. When a stream + // disconnects and reconnects for the same stream, the client_id should be set + // to the same value so that state associated with the old stream can be + // transferred to the new stream. The same client_id should not be used for + // different client instances. + string client_id = 6; } // Response for the `StreamingPull` method. This response is used to stream @@ -933,13 +944,12 @@ message StreamingPullResponse { // Request for the `CreateSnapshot` method. message CreateSnapshotRequest { - // Optional user-provided name for this snapshot. - // If the name is not provided in the request, the server will assign a random - // name for this snapshot on the same project as the subscription. - // Note that for REST API requests, you must specify a name. See the - // - // resource name rules. - // Format is `projects/{project}/snapshots/{snap}`. + // User-provided name for this snapshot. If the name is not provided in the + // request, the server will assign a random name for this snapshot on the same + // project as the subscription. Note that for REST API requests, you must + // specify a name. See the resource + // name rules. Format is `projects/{project}/snapshots/{snap}`. string name = 1; // The subscription whose backlog the snapshot retains. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index dd4427f7e9a1..58a14a866526 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -31,7 +31,7 @@ "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xe5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xa4\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xb1\t\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xeb\x12\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xe5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xb7\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x12\x11\n\tclient_id\x18\x06 \x01(\t"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xb1\t\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xeb\x12\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, @@ -2316,6 +2316,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="client_id", + full_name="google.pubsub.v1.StreamingPullRequest.client_id", + index=5, + number=6, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[], @@ -2326,7 +2344,7 @@ extension_ranges=[], oneofs=[], serialized_start=3456, - serialized_end=3620, + serialized_end=3639, ) @@ -2364,8 +2382,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3622, - serialized_end=3707, + serialized_start=3641, + serialized_end=3726, ) @@ -2495,8 +2513,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3710, - serialized_end=3885, + serialized_start=3729, + serialized_end=3904, ) @@ -2552,8 +2570,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3887, - serialized_end=4005, + serialized_start=3906, + serialized_end=4024, ) @@ -2701,8 +2719,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4008, - serialized_end=4199, + serialized_start=4027, + serialized_end=4218, ) @@ -2740,8 +2758,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4201, - serialized_end=4239, + serialized_start=4220, + serialized_end=4258, ) @@ -2815,8 +2833,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4241, - serialized_end=4319, + serialized_start=4260, + serialized_end=4338, ) @@ -2872,8 +2890,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4321, - serialized_end=4416, + serialized_start=4340, + serialized_end=4435, ) @@ -2911,8 +2929,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4418, - serialized_end=4459, + serialized_start=4437, + serialized_end=4478, ) @@ -2994,8 +3012,8 @@ fields=[], ) ], - serialized_start=4461, - serialized_end=4570, + serialized_start=4480, + serialized_end=4589, ) @@ -3014,8 +3032,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4572, - serialized_end=4586, + serialized_start=4591, + serialized_end=4605, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -3232,12 +3250,12 @@ ), DESCRIPTOR=_PUBSUBMESSAGE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A message that is published by publishers and consumed by subscribers. - The message must contain either a non-empty data field or at least one - attribute. Note that client libraries represent this object differently - depending on the language. See the corresponding client library - documentation for more information. See Quotas and limits for more - information about message limits. + __doc__="""A message that is published by publishers and consumed by + subscribers. The message must contain either a non-empty data field or + at least one attribute. Note that client libraries represent this object + differently depending on the language. See the corresponding client + library documentation for more information. See Quotas and limits for + more information about message limits. Attributes: @@ -3245,7 +3263,8 @@ The message data field. If this field is empty, the message must contain at least one attribute. attributes: - Optional attributes for this message. + Attributes for this message. If this field is empty, the + message must contain non-empty data. message_id: ID of this message, assigned by the server when the message is published. Guaranteed to be unique within the topic. This @@ -3257,16 +3276,17 @@ server when it receives the ``Publish`` call. It must not be populated by the publisher in a ``Publish`` call. ordering_key: - Identifies related messages for which publish order should be - respected. If a ``Subscription`` has + If non-empty, identifies related messages for which publish + order should be respected. If a ``Subscription`` has ``enable_message_ordering`` set to ``true``, messages - published with the same ``ordering_key`` value will be - delivered to subscribers in the order in which they are - received by the Pub/Sub system. EXPERIMENTAL: This feature is - part of a closed alpha release. This API might be changed in - backward-incompatible ways and is not recommended for - production use. It is not subject to any SLA or deprecation - policy. + published with the same non-empty ``ordering_key`` value will + be delivered to subscribers in the order in which they are + received by the Pub/Sub system. All ``PubsubMessage``\ s + published in a given ``PublishRequest`` must specify the same + ``ordering_key`` value. EXPERIMENTAL: This feature is part of + a closed alpha release. This API might be changed in backward- + incompatible ways and is not recommended for production use. + It is not subject to any SLA or deprecation policy. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) ), @@ -3635,8 +3655,8 @@ dict( DESCRIPTOR=_DEADLETTERPOLICY, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Dead lettering is done on a best effort basis. The same message might be - dead lettered multiple times. + __doc__="""Dead lettering is done on a best effort basis. The same + message might be dead lettered multiple times. If validation on any of the fields fails at subscription creation/updation, the create/update subscription request will fail. @@ -3676,8 +3696,8 @@ dict( DESCRIPTOR=_EXPIRATIONPOLICY, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A policy that specifies the conditions for resource expiration (i.e., - automatic resource deletion). + __doc__="""A policy that specifies the conditions for resource + expiration (i.e., automatic resource deletion). Attributes: @@ -3705,13 +3725,14 @@ dict( DESCRIPTOR=_PUSHCONFIG_OIDCTOKEN, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Contains information needed for generating an `OpenID Connect + __doc__="""Contains information needed for generating an `OpenID + Connect token `__. Attributes: service_account_email: - `Service account email + \ `Service account email `__ to be used for generating the OIDC token. The caller (for CreateSubscription, UpdateSubscription, and ModifyPushConfig @@ -4064,10 +4085,10 @@ dict( DESCRIPTOR=_STREAMINGPULLREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``StreamingPull`` streaming RPC method. This request is - used to establish the initial stream as well as to stream - acknowledgements and ack deadline modifications from the client to the - server. + __doc__="""Request for the ``StreamingPull`` streaming RPC method. + This request is used to establish the initial stream as well as to + stream acknowledgements and ack deadline modifications from the client + to the server. Attributes: @@ -4111,6 +4132,14 @@ on subsequent requests from client to server. The minimum deadline you can specify is 10 seconds. The maximum deadline you can specify is 600 seconds (10 minutes). + client_id: + A unique identifier that is used to distinguish client + instances from each other. Only needs to be provided on the + initial request. When a stream disconnects and reconnects for + the same stream, the client\_id should be set to the same + value so that state associated with the old stream can be + transferred to the new stream. The same client\_id should not + be used for different client instances. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) ), @@ -4123,8 +4152,8 @@ dict( DESCRIPTOR=_STREAMINGPULLRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``StreamingPull`` method. This response is used to - stream messages from the server to the client. + __doc__="""Response for the ``StreamingPull`` method. This response + is used to stream messages from the server to the client. Attributes: @@ -4156,11 +4185,11 @@ Attributes: name: - Optional user-provided name for this snapshot. If the name is - not provided in the request, the server will assign a random - name for this snapshot on the same project as the - subscription. Note that for REST API requests, you must - specify a name. See the resource name rules. Format is + User-provided name for this snapshot. If the name is not + provided in the request, the server will assign a random name + for this snapshot on the same project as the subscription. + Note that for REST API requests, you must specify a name. See + the resource name rules. Format is ``projects/{project}/snapshots/{snap}``. subscription: The subscription whose backlog the snapshot retains. @@ -4217,10 +4246,10 @@ ), DESCRIPTOR=_SNAPSHOT, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A snapshot resource. Snapshots are used in Seek operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the - state captured by a snapshot. + __doc__="""A snapshot resource. Snapshots are used in Seek + operations, which allow you to manage message acknowledgments in bulk. + That is, you can set the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. Attributes: @@ -4379,6 +4408,7 @@ DESCRIPTOR=_SEEKRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Response for the ``Seek`` method (this response is empty). + """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) ), @@ -4402,8 +4432,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=4589, - serialized_end=5790, + serialized_start=4608, + serialized_end=5809, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4508,8 +4538,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=5793, - serialized_end=8204, + serialized_start=5812, + serialized_end=8223, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 0b8e845fc8c3..99d73295c127 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -72,6 +72,7 @@ def default(session): session.run( "py.test", "--quiet", + "--cov=google.cloud.pubsub", "--cov=google.cloud", "--cov=tests.unit", "--cov-append", @@ -110,7 +111,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install("mock", "pytest") - # session.install("-e", "../test_utils/") + session.install("-e", "test_utils") session.install("-e", ".") diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 0ad4372bdd7c..6c204cd636f9 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2020-01-31T14:49:10.048857Z", + "updateTime": "2020-02-04T13:19:45.479344Z", "sources": [ { "generator": { @@ -12,8 +12,9 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "7ac66d9be8a7d7de4f13566d8663978c9ee9dcd7", - "internalRef": "292450564" + "sha": "69d9945330a5721cd679f17331a78850e2618226", + "internalRef": "293080182", + "log": "69d9945330a5721cd679f17331a78850e2618226\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080182\n\nf6a1a6b417f39694275ca286110bc3c1ca4db0dc\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080178\n\n29d40b78e3dc1579b0b209463fbcb76e5767f72a\nExpose managedidentities/v1beta1/ API for client library usage.\n\nPiperOrigin-RevId: 292979741\n\na22129a1fb6e18056d576dfb7717aef74b63734a\nExpose managedidentities/v1/ API for client library usage.\n\nPiperOrigin-RevId: 292968186\n\nb5cbe4a4ba64ab19e6627573ff52057a1657773d\nSecurityCenter v1p1beta1: move file-level option on top to workaround protobuf.js bug.\n\nPiperOrigin-RevId: 292647187\n\nb224b317bf20c6a4fbc5030b4a969c3147f27ad3\nAdds API definitions for bigqueryreservation v1beta1.\n\nPiperOrigin-RevId: 292634722\n\nc1468702f9b17e20dd59007c0804a089b83197d2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 292626173\n\nffdfa4f55ab2f0afc11d0eb68f125ccbd5e404bd\nvision: v1p3beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605599\n\n78f61482cd028fc1d9892aa5d89d768666a954cd\nvision: v1p1beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605125\n\n60bb5a294a604fd1778c7ec87b265d13a7106171\nvision: v1p2beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604980\n\n3bcf7aa79d45eb9ec29ab9036e9359ea325a7fc3\nvision: v1p4beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604656\n\n2717b8a1c762b26911b45ecc2e4ee01d98401b28\nFix dataproc artman client library generation.\n\nPiperOrigin-RevId: 292555664\n\n" } }, { diff --git a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py index 9c0d2b33ad11..2ccebf07f0de 100644 --- a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py +++ b/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index 6c64cf5a9ba2..1d2cb7522be5 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index b1d7f52fad5f..db9e77b2adda 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From ec18fe856e002f56782667ada0595e1a6c7e179d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 5 Feb 2020 14:53:47 -0500 Subject: [PATCH 0428/1197] chore: release 1.2.0 (#24) * updated CHANGELOG.md [ci skip] * updated setup.py [ci skip] --- packages/google-cloud-pubsub/CHANGELOG.md | 14 ++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index ddca773a09a7..e2ce6f105720 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,20 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [1.2.0](https://www.github.com/googleapis/python-pubsub/compare/v1.1.0...v1.2.0) (2020-02-05) + + +### Features + +* **pubsub:** add delivery attempt property to message object received by user code ([#10205](https://www.github.com/googleapis/python-pubsub/issues/10205)) ([a0937c1](https://www.github.com/googleapis/python-pubsub/commit/a0937c13107b92271913de579b60f24b2aaac177)) +* add `StreamingPullRequest.client_id` field (via synth) ([199d56a](https://www.github.com/googleapis/python-pubsub/commit/199d56a939bb6244f67138f843dafdd80721f0d3)) + + +### Bug Fixes + +* **pubsub:** handle None in on response callback ([#9982](https://www.github.com/googleapis/python-pubsub/issues/9982)) ([6596c4b](https://www.github.com/googleapis/python-pubsub/commit/6596c4bae5526d82f5c1b5e0c243b2883404d51f)) +* replace unsafe six.PY3 with PY2 for better future compatibility with Python 4 ([#10081](https://www.github.com/googleapis/python-pubsub/issues/10081)) ([975c1ac](https://www.github.com/googleapis/python-pubsub/commit/975c1ac2cfdac0ce4403c0b56ad19f2ee7241f1a)) + ## 1.1.0 12-09-2019 18:51 PST diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 44559aeaf754..3144e258f124 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.1.0" +version = "1.2.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 9f077e81ac043c9779729104c33bef6c408d083c Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Wed, 5 Feb 2020 17:06:10 -0500 Subject: [PATCH 0429/1197] feat(pubsub): ordering keys (#26) --- .../cloud/pubsub_v1/publisher/_batch/base.py | 32 +- .../pubsub_v1/publisher/_batch/thread.py | 96 ++++-- .../publisher/_sequencer/__init__.py | 0 .../pubsub_v1/publisher/_sequencer/base.py | 70 ++++ .../publisher/_sequencer/ordered_sequencer.py | 303 +++++++++++++++++ .../_sequencer/unordered_sequencer.py | 124 +++++++ .../cloud/pubsub_v1/publisher/client.py | 250 +++++++++++--- .../cloud/pubsub_v1/publisher/exceptions.py | 19 +- .../subscriber/_protocol/dispatcher.py | 2 + .../pubsub_v1/subscriber/_protocol/leaser.py | 36 ++- .../subscriber/_protocol/messages_on_hold.py | 162 ++++++++++ .../subscriber/_protocol/requests.py | 14 +- .../_protocol/streaming_pull_manager.py | 133 ++++---- .../cloud/pubsub_v1/subscriber/message.py | 27 +- .../google/cloud/pubsub_v1/types.py | 19 ++ .../pubsub_v1/publisher/batch/test_base.py | 2 +- .../pubsub_v1/publisher/batch/test_thread.py | 207 ++++++++---- .../sequencer/test_ordered_sequencer.py | 305 ++++++++++++++++++ .../sequencer/test_unordered_sequencer.py | 104 ++++++ .../publisher/test_publisher_client.py | 277 +++++++++++++--- .../pubsub_v1/subscriber/test_dispatcher.py | 59 +++- .../unit/pubsub_v1/subscriber/test_leaser.py | 64 ++-- .../unit/pubsub_v1/subscriber/test_message.py | 22 +- .../subscriber/test_messages_on_hold.py | 274 ++++++++++++++++ .../subscriber/test_streaming_pull_manager.py | 132 +++++++- 25 files changed, 2406 insertions(+), 327 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py index 75f430b09421..53d3dee5be21 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -15,6 +15,7 @@ from __future__ import absolute_import import abc +import enum import six @@ -134,6 +135,18 @@ def will_accept(self, message): # Okay, everything is good. return True + def cancel(self, cancellation_reason): + """Complete pending futures with an exception. + + This method must be called before publishing starts (ie: while the + batch is still accepting messages.) + + Args: + cancellation_reason (BatchCancellationReason): The reason why this + batch has been cancelled. + """ + raise NotImplementedError + @abc.abstractmethod def publish(self, message): """Publish a single message. @@ -154,16 +167,21 @@ def publish(self, message): raise NotImplementedError -class BatchStatus(object): - """An enum-like class representing valid statuses for a batch. - - It is acceptable for a class to use a status that is not on this - class; this represents the list of statuses where the existing - library hooks in functionality. - """ +class BatchStatus(str, enum.Enum): + """An enum-like class representing valid statuses for a batch.""" ACCEPTING_MESSAGES = "accepting messages" STARTING = "starting" IN_PROGRESS = "in progress" ERROR = "error" SUCCESS = "success" + + +class BatchCancellationReason(str, enum.Enum): + """An enum-like class representing reasons why a batch was cancelled.""" + + PRIOR_ORDERED_MESSAGE_FAILED = ( + "Batch cancelled because prior ordered message for the same key has " + "failed. This batch has been cancelled to avoid out-of-order publish." + ) + CLIENT_STOPPED = "Batch cancelled because the publisher client has been stopped." diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 4101bc518b0a..cdd913db459c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -62,15 +62,23 @@ class Batch(base.Batch): settings (~.pubsub_v1.types.BatchSettings): The settings for batch publishing. These should be considered immutable once the batch has been opened. - autocommit (bool): Whether to autocommit the batch when the time - has elapsed. Defaults to True unless ``settings.max_latency`` is - inf. + batch_done_callback (Callable[[bool], Any]): Callback called when the + response for a batch publish has been received. Called with one + boolean argument: successfully published or a permanent error + occurred. Temporary errors are not surfaced because they are retried + at a lower level. + commit_when_full (bool): Whether to commit the batch when the batch + is full. """ - def __init__(self, client, topic, settings, autocommit=True): + def __init__( + self, client, topic, settings, batch_done_callback=None, commit_when_full=True + ): self._client = client self._topic = topic self._settings = settings + self._batch_done_callback = batch_done_callback + self._commit_when_full = commit_when_full self._state_lock = threading.Lock() # These members are all communicated between threads; ensure that @@ -87,15 +95,6 @@ def __init__(self, client, topic, settings, autocommit=True): self._base_request_size = types.PublishRequest(topic=topic).ByteSize() self._size = self._base_request_size - # If max latency is specified, start a thread to monitor the batch and - # commit when the max latency is reached. - self._thread = None - if autocommit and self.settings.max_latency < float("inf"): - self._thread = threading.Thread( - name="Thread-MonitorBatchPublisher", target=self.monitor - ) - self._thread.start() - @staticmethod def make_lock(): """Return a threading lock. @@ -148,6 +147,27 @@ def status(self): """ return self._status + def cancel(self, cancellation_reason): + """Complete pending futures with an exception. + + This method must be called before publishing starts (ie: while the + batch is still accepting messages.) + + Args: + cancellation_reason (BatchCancellationReason): The reason why this + batch has been cancelled. + """ + + with self._state_lock: + assert ( + self._status == base.BatchStatus.ACCEPTING_MESSAGES + ), "Cancel should not be called after sending has started." + + exc = RuntimeError(cancellation_reason.value) + for future in self._futures: + future.set_exception(exc) + self._status = base.BatchStatus.ERROR + def commit(self): """Actually publish all of the messages on the active batch. @@ -162,6 +182,7 @@ def commit(self): If the current batch is **not** accepting messages, this method does nothing. """ + # Set the status to "starting" synchronously, to ensure that # this batch will necessarily not accept new messages. with self._state_lock: @@ -170,7 +191,11 @@ def commit(self): else: return - # Start a new thread to actually handle the commit. + self._start_commit_thread() + + def _start_commit_thread(self): + """Start a new thread to actually handle the commit.""" + commit_thread = threading.Thread( name="Thread-CommitBatchPublisher", target=self._commit ) @@ -195,7 +220,10 @@ def _commit(self): # If, in the intervening period between when this method was # called and now, the batch started to be committed, or # completed a commit, then no-op at this point. - _LOGGER.debug("Batch is already in progress, exiting commit") + _LOGGER.debug( + "Batch is already in progress or has been cancelled, " + "exiting commit" + ) return # Once in the IN_PROGRESS state, no other thread can publish additional @@ -215,16 +243,24 @@ def _commit(self): # Log how long the underlying request takes. start = time.time() + batch_transport_succeeded = True try: + # Performs retries for errors defined in retry_codes.publish in the + # publisher_client_config.py file. response = self._client.api.publish(self._topic, self._messages) except google.api_core.exceptions.GoogleAPIError as exc: - # We failed to publish, set the exception on all futures and - # exit. + # We failed to publish, even after retries, so set the exception on + # all futures and exit. self._status = base.BatchStatus.ERROR for future in self._futures: future.set_exception(exc) + batch_transport_succeeded = False + if self._batch_done_callback is not None: + # Failed to publish batch. + self._batch_done_callback(batch_transport_succeeded) + _LOGGER.exception("Failed to publish %s messages.", len(self._futures)) return @@ -250,26 +286,17 @@ def _commit(self): for future in self._futures: future.set_exception(exception) + # Unknown error -> batch failed to be correctly transported/ + batch_transport_succeeded = False + _LOGGER.error( "Only %s of %s messages were published.", len(response.message_ids), len(self._futures), ) - def monitor(self): - """Commit this batch after sufficient time has elapsed. - - This simply sleeps for ``self.settings.max_latency`` seconds, - and then calls commit unless the batch has already been committed. - """ - # NOTE: This blocks; it is up to the calling code to call it - # in a separate thread. - - # Sleep for however long we should be waiting. - time.sleep(self.settings.max_latency) - - _LOGGER.debug("Monitor is waking up") - return self._commit() + if self._batch_done_callback is not None: + self._batch_done_callback(batch_transport_succeeded) def publish(self, message): """Publish a single message. @@ -294,6 +321,7 @@ def publish(self, message): pubsub_v1.publisher.exceptions.MessageTooLargeError: If publishing the ``message`` would exceed the max size limit on the backend. """ + # Coerce the type, just in case. if not isinstance(message, types.PubsubMessage): message = types.PubsubMessage(**message) @@ -301,6 +329,10 @@ def publish(self, message): future = None with self._state_lock: + assert ( + self._status != base.BatchStatus.ERROR + ), "Publish after stop() or publish error." + if not self.will_accept(message): return future @@ -333,7 +365,7 @@ def publish(self, message): # Try to commit, but it must be **without** the lock held, since # ``commit()`` will try to obtain the lock. - if overflow: + if self._commit_when_full and overflow: self.commit() return future diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py new file mode 100644 index 000000000000..fda5c1ee96cd --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -0,0 +1,70 @@ +# Copyright 2019, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class Sequencer(object): + """The base class for sequencers for Pub/Sub publishing. A sequencer + sequences messages to be published. + """ + + @staticmethod + @abc.abstractmethod + def is_finished(self): + """ Whether the sequencer is finished and should be cleaned up. + + Returns: + bool: Whether the sequencer is finished and should be cleaned up. + """ + raise NotImplementedError + + @staticmethod + @abc.abstractmethod + def unpause(self, message): + """ Unpauses this sequencer. + + Raises: + RuntimeError: + If called when the sequencer has not been paused. + """ + raise NotImplementedError + + @staticmethod + @abc.abstractmethod + def publish(self, message): + """ Publish message for this ordering key. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + A class instance that conforms to Python Standard library's + :class:`~concurrent.futures.Future` interface (but not an + instance of that class). The future might return immediately with a + `pubsub_v1.publisher.exceptions.PublishToPausedOrderingKeyException` + if the ordering key is paused. Otherwise, the future tracks the + lifetime of the message publish. + + Raises: + RuntimeError: + If called after this sequencer has been stopped, either by + a call to stop() or after all batches have been published. + """ + raise NotImplementedError diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py new file mode 100644 index 000000000000..d8ddb3f8f6eb --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py @@ -0,0 +1,303 @@ +# Copyright 2019, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum +import collections +import concurrent.futures as futures +import threading + +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher._sequencer import base as sequencer_base +from google.cloud.pubsub_v1.publisher._batch import base as batch_base + + +class _OrderedSequencerStatus(str, enum.Enum): + """An enum-like class representing valid statuses for an OrderedSequencer. + + Starting state: ACCEPTING_MESSAGES + Valid transitions: + ACCEPTING_MESSAGES -> PAUSED (on permanent error) + ACCEPTING_MESSAGES -> STOPPED (when user calls stop() explicitly) + ACCEPTING_MESSAGES -> FINISHED (all batch publishes finish normally) + + PAUSED -> ACCEPTING_MESSAGES (when user unpauses) + PAUSED -> STOPPED (when user calls stop() explicitly) + + STOPPED -> FINISHED (user stops client and the one remaining batch finishes + publish) + STOPPED -> PAUSED (stop() commits one batch, which fails permanently) + + FINISHED -> ACCEPTING_MESSAGES (publish happens while waiting for cleanup) + FINISHED -> STOPPED (when user calls stop() explicitly) + Illegal transitions: + PAUSED -> FINISHED (since all batches are cancelled on pause, there should + not be any that finish normally. paused sequencers + should not be cleaned up because their presence + indicates that the ordering key needs to be resumed) + STOPPED -> ACCEPTING_MESSAGES (no way to make a user-stopped sequencer + accept messages again. this is okay since + stop() should only be called on shutdown.) + FINISHED -> PAUSED (no messages remain in flight, so they can't cause a + permanent error and pause the sequencer) + """ + + # Accepting publishes and/or waiting for result of batch publish + ACCEPTING_MESSAGES = "accepting messages" + # Permanent error occurred. User must unpause this sequencer to resume + # publishing. This is done to maintain ordering. + PAUSED = "paused" + # No more publishes allowed. There may be an outstanding batch that will + # call the _batch_done_callback when it's done (success or error.) + STOPPED = "stopped" + # No more work to do. Waiting to be cleaned-up. A publish will transform + # this sequencer back into the normal accepting-messages state. + FINISHED = "finished" + + +class OrderedSequencer(sequencer_base.Sequencer): + """ Sequences messages into batches ordered by an ordering key for one topic. + + A sequencer always has at least one batch in it, unless paused or stopped. + When no batches remain, the |publishes_done_callback| is called so the + client can perform cleanup. + + Public methods are thread-safe. + + Args: + client (~.pubsub_v1.PublisherClient): The publisher client used to + create this sequencer. + topic (str): The topic. The format for this is + ``projects/{project}/topics/{topic}``. + ordering_key (str): The ordering key for this sequencer. + """ + + def __init__(self, client, topic, ordering_key): + self._client = client + self._topic = topic + self._ordering_key = ordering_key + # Guards the variables below + self._state_lock = threading.Lock() + # Batches ordered from first (head/left) to last (right/tail). + # Invariant: always has at least one batch after the first publish, + # unless paused or stopped. + self._ordered_batches = collections.deque() + # See _OrderedSequencerStatus for valid state transitions. + self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES + + def is_finished(self): + """ Whether the sequencer is finished and should be cleaned up. + + Returns: + bool: Whether the sequencer is finished and should be cleaned up. + """ + with self._state_lock: + return self._state == _OrderedSequencerStatus.FINISHED + + def stop(self): + """ Permanently stop this sequencer. + + This differs from pausing, which may be resumed. Immediately commits + the first batch and cancels the rest. + + Raises: + RuntimeError: + If called after stop() has already been called. + """ + with self._state_lock: + if self._state == _OrderedSequencerStatus.STOPPED: + raise RuntimeError("Ordered sequencer already stopped.") + + self._state = _OrderedSequencerStatus.STOPPED + if self._ordered_batches: + # Give only the first batch the chance to finish. + self._ordered_batches[0].commit() + + # Cancel the rest of the batches and remove them from the deque + # of batches. + while len(self._ordered_batches) > 1: + # Pops from the tail until it leaves only the head in the + # deque. + batch = self._ordered_batches.pop() + batch.cancel(batch_base.BatchCancellationReason.CLIENT_STOPPED) + + def commit(self): + """ Commit the first batch, if unpaused. + + If paused or no batches exist, this method does nothing. + + Raises: + RuntimeError: + If called after stop() has already been called. + """ + with self._state_lock: + if self._state == _OrderedSequencerStatus.STOPPED: + raise RuntimeError("Ordered sequencer already stopped.") + + if self._state != _OrderedSequencerStatus.PAUSED and self._ordered_batches: + # It's okay to commit the same batch more than once. The + # operation is idempotent. + self._ordered_batches[0].commit() + + def _batch_done_callback(self, success): + """ Deal with completion of a batch. + + Called when a batch has finished publishing, with either a success + or a failure. (Temporary failures are retried infinitely when + ordering keys are enabled.) + """ + ensure_cleanup_and_commit_timer_runs = False + with self._state_lock: + assert self._state != _OrderedSequencerStatus.PAUSED, ( + "This method should not be called after pause() because " + "pause() should have cancelled all of the batches." + ) + assert self._state != _OrderedSequencerStatus.FINISHED, ( + "This method should not be called after all batches have been " + "finished." + ) + + # Message futures for the batch have been completed (either with a + # result or an exception) already, so remove the batch. + self._ordered_batches.popleft() + + if success: + if len(self._ordered_batches) == 0: + # Mark this sequencer as finished. + # If new messages come in for this ordering key and this + # sequencer hasn't been cleaned up yet, it will go back + # into accepting-messages state. Otherwise, the client + # must create a new OrderedSequencer. + self._state = _OrderedSequencerStatus.FINISHED + # Ensure cleanup thread runs at some point. + ensure_cleanup_and_commit_timer_runs = True + elif len(self._ordered_batches) == 1: + # Wait for messages and/or commit timeout + # Ensure there's actually a commit timer thread that'll commit + # after a delay. + ensure_cleanup_and_commit_timer_runs = True + else: + # If there is more than one batch, we know that the next batch + # must be full and, therefore, ready to be committed. + self._ordered_batches[0].commit() + else: + # Unrecoverable error detected + self._pause() + + if ensure_cleanup_and_commit_timer_runs: + self._client.ensure_cleanup_and_commit_timer_runs() + + def _pause(self): + """ Pause this sequencer: set state to paused, cancel all batches, and + clear the list of ordered batches. + + _state_lock must be taken before calling this method. + """ + assert ( + self._state != _OrderedSequencerStatus.FINISHED + ), "Pause should not be called after all batches have finished." + self._state = _OrderedSequencerStatus.PAUSED + for batch in self._ordered_batches: + batch.cancel( + batch_base.BatchCancellationReason.PRIOR_ORDERED_MESSAGE_FAILED + ) + self._ordered_batches.clear() + + def unpause(self): + """ Unpause this sequencer. + + Raises: + RuntimeError: + If called when the ordering key has not been paused. + """ + with self._state_lock: + if self._state != _OrderedSequencerStatus.PAUSED: + raise RuntimeError("Ordering key is not paused.") + self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES + + def _create_batch(self): + """ Create a new batch using the client's batch class and other stored + settings. + """ + return self._client._batch_class( + client=self._client, + topic=self._topic, + settings=self._client.batch_settings, + batch_done_callback=self._batch_done_callback, + commit_when_full=False, + ) + + def publish(self, message): + """ Publish message for this ordering key. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + A class instance that conforms to Python Standard library's + :class:`~concurrent.futures.Future` interface (but not an + instance of that class). The future might return immediately with a + PublishToPausedOrderingKeyException if the ordering key is paused. + Otherwise, the future tracks the lifetime of the message publish. + + Raises: + RuntimeError: + If called after this sequencer has been stopped, either by + a call to stop() or after all batches have been published. + """ + with self._state_lock: + if self._state == _OrderedSequencerStatus.PAUSED: + future = futures.Future() + exception = exceptions.PublishToPausedOrderingKeyException( + self._ordering_key + ) + future.set_exception(exception) + return future + + # If waiting to be cleaned-up, convert to accepting messages to + # prevent this sequencer from being cleaned-up only to have another + # one with the same ordering key created immediately afterward. + if self._state == _OrderedSequencerStatus.FINISHED: + self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES + + if self._state == _OrderedSequencerStatus.STOPPED: + raise RuntimeError("Cannot publish on a stopped sequencer.") + + assert ( + self._state == _OrderedSequencerStatus.ACCEPTING_MESSAGES + ), "Publish is only allowed in accepting-messages state." + + if not self._ordered_batches: + new_batch = self._create_batch() + self._ordered_batches.append(new_batch) + + batch = self._ordered_batches[-1] + future = batch.publish(message) + while future is None: + batch = self._create_batch() + self._ordered_batches.append(batch) + future = batch.publish(message) + + return future + + # Used only for testing. + def _set_batch(self, batch): + self._ordered_batches = collections.deque([batch]) + + # Used only for testing. + def _set_batches(self, batches): + self._ordered_batches = collections.deque(batches) + + # Used only for testing. + def _get_batches(self): + return self._ordered_batches diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py new file mode 100644 index 000000000000..426bbded7ba1 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -0,0 +1,124 @@ +# Copyright 2019, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud.pubsub_v1.publisher._sequencer import base + + +class UnorderedSequencer(base.Sequencer): + """ Sequences messages into batches for one topic without any ordering. + + Public methods are NOT thread-safe. + """ + + def __init__(self, client, topic): + self._client = client + self._topic = topic + self._current_batch = None + self._stopped = False + + def is_finished(self): + """ Whether the sequencer is finished and should be cleaned up. + + Returns: + bool: Whether the sequencer is finished and should be cleaned up. + """ + # TODO: Implement. Not implementing yet because of possible performance + # impact due to extra locking required. This does mean that + # UnorderedSequencers don't get cleaned up, but this is the same as + # previously existing behavior. + return False + + def stop(self): + """ Stop the sequencer. + + Subsequent publishes will fail. + + Raises: + RuntimeError: + If called after stop() has already been called. + """ + if self._stopped: + raise RuntimeError("Unordered sequencer already stopped.") + self.commit() + self._stopped = True + + def commit(self): + """ Commit the batch. + + Raises: + RuntimeError: + If called after stop() has already been called. + """ + if self._stopped: + raise RuntimeError("Unordered sequencer already stopped.") + if self._current_batch: + self._current_batch.commit() + + def unpause(self): + """ Not relevant for this class. """ + raise NotImplementedError + + def _create_batch(self): + """ Create a new batch using the client's batch class and other stored + settings. + """ + return self._client._batch_class( + client=self._client, + topic=self._topic, + settings=self._client.batch_settings, + batch_done_callback=None, + commit_when_full=True, + ) + + def publish(self, message): + """ Batch message into existing or new batch. + + Args: + message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + + Returns: + ~google.api_core.future.Future: An object conforming to + the :class:`~concurrent.futures.Future` interface. The future tracks + the publishing status of the message. + + Raises: + RuntimeError: + If called after stop() has already been called. + + pubsub_v1.publisher.exceptions.MessageTooLargeError: If publishing + the ``message`` would exceed the max size limit on the backend. + """ + if self._stopped: + raise RuntimeError("Unordered sequencer already stopped.") + + if not self._current_batch: + newbatch = self._create_batch() + self._current_batch = newbatch + + batch = self._current_batch + future = None + while future is None: + # Might throw MessageTooLargeError + future = batch.publish(message) + # batch is full, triggering commit_when_full + if future is None: + batch = self._create_batch() + # At this point, we lose track of the old batch, but we don't + # care since it's already committed (because it was full.) + self._current_batch = batch + return future + + # Used only for testing. + def _set_batch(self, batch): + self._current_batch = batch diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 60a03bb652ab..9284420f5c0e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -15,8 +15,11 @@ from __future__ import absolute_import import copy +import logging import os import pkg_resources +import threading +import time import grpc import six @@ -29,10 +32,13 @@ from google.cloud.pubsub_v1.gapic import publisher_client from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport from google.cloud.pubsub_v1.publisher._batch import thread - +from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer +from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version +_LOGGER = logging.getLogger(__name__) + _BLACKLISTED_METHODS = ( "publish", "from_service_account_file", @@ -40,6 +46,14 @@ ) +def _set_nested_value(container, value, keys): + current = container + for key in keys[:-1]: + current = current.setdefault(key, {}) + current[keys[-1]] = value + return container + + @_gapic.add_methods(publisher_client.PublisherClient, blacklist=_BLACKLISTED_METHODS) class Client(object): """A publisher client for Google Cloud Pub/Sub. @@ -51,6 +65,9 @@ class Client(object): Args: batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The settings for batch publishing. + publisher_options (~google.cloud.pubsub_v1.types.PublisherOptions): The + options for the publisher client. Note that enabling message ordering will + override the publish retry timeout to be infinite. kwargs (dict): Any additional arguments provided are sent as keyword arguments to the underlying :class:`~google.cloud.pubsub_v1.gapic.publisher_client.PublisherClient`. @@ -74,6 +91,11 @@ class Client(object): max_latency=1, # One second ), + # Optional + publisher_options = pubsub_v1.types.PublisherOptions( + enable_message_ordering=False + ), + # Optional client_config = { "interfaces": { @@ -94,9 +116,15 @@ class Client(object): ) """ - _batch_class = thread.Batch + def __init__(self, batch_settings=(), publisher_options=(), **kwargs): + assert ( + type(batch_settings) is types.BatchSettings or len(batch_settings) == 0 + ), "batch_settings must be of type BatchSettings or an empty tuple." + assert ( + type(publisher_options) is types.PublisherOptions + or len(publisher_options) == 0 + ), "publisher_options must be of type PublisherOptions or an empty tuple." - def __init__(self, batch_settings=(), **kwargs): # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. @@ -125,16 +153,40 @@ def __init__(self, batch_settings=(), **kwargs): transport = publisher_grpc_transport.PublisherGrpcTransport(channel=channel) kwargs["transport"] = transport + # For a transient failure, retry publishing the message infinitely. + self.publisher_options = types.PublisherOptions(*publisher_options) + self._enable_message_ordering = self.publisher_options[0] + if self._enable_message_ordering: + # Set retry timeout to "infinite" when message ordering is enabled. + # Note that this then also impacts messages added with an empty ordering + # key. + client_config = _set_nested_value( + kwargs.pop("client_config", {}), + 2 ** 32, + [ + "interfaces", + "google.pubsub.v1.Publisher", + "retry_params", + "messaging", + "total_timeout_millis", + ], + ) + kwargs["client_config"] = client_config + # Add the metrics headers, and instantiate the underlying GAPIC # client. self.api = publisher_client.PublisherClient(**kwargs) + self._batch_class = thread.Batch self.batch_settings = types.BatchSettings(*batch_settings) # The batches on the publisher client are responsible for holding # messages. One batch exists for each topic. self._batch_lock = self._batch_class.make_lock() - self._batches = {} + # (topic, ordering_key) => sequencers object + self._sequencers = {} self._is_stopped = False + # Thread created to commit all sequencers after a timeout. + self._commit_thread = None @classmethod def from_service_account_file(cls, filename, batch_settings=(), **kwargs): @@ -167,44 +219,60 @@ def target(self): """ return publisher_client.PublisherClient.SERVICE_ADDRESS - def _batch(self, topic, create=False, autocommit=True): - """Return the current batch for the provided topic. + def _get_or_create_sequencer(self, topic, ordering_key): + """ Get an existing sequencer or create a new one given the (topic, + ordering_key) pair. + """ + sequencer_key = (topic, ordering_key) + sequencer = self._sequencers.get(sequencer_key) + if sequencer is None: + if ordering_key == "": + sequencer = unordered_sequencer.UnorderedSequencer(self, topic) + else: + sequencer = ordered_sequencer.OrderedSequencer( + self, topic, ordering_key + ) + self._sequencers[sequencer_key] = sequencer + + return sequencer - This will create a new batch if ``create=True`` or if no batch - currently exists. + def resume_publish(self, topic, ordering_key): + """ Resume publish on an ordering key that has had unrecoverable errors. Args: - topic (str): A string representing the topic. - create (bool): Whether to create a new batch. Defaults to - :data:`False`. If :data:`True`, this will create a new batch - even if one already exists. - autocommit (bool): Whether to autocommit this batch. This is - primarily useful for debugging and testing, since it allows - the caller to avoid some side effects that batch creation - might have (e.g. spawning a worker to publish a batch). + topic (str): The topic to publish messages to. + ordering_key: A string that identifies related messages for which + publish order should be respected. - Returns: - ~.pubsub_v1._batch.Batch: The batch object. + Raises: + RuntimeError: + If called after publisher has been stopped by a `stop()` method + call. + ValueError: + If the topic/ordering key combination has not been seen before + by this client. """ - # If there is no matching batch yet, then potentially create one - # and place it on the batches dictionary. - if not create: - batch = self._batches.get(topic) - if batch is None: - create = True - - if create: - batch = self._batch_class( - autocommit=autocommit, - client=self, - settings=self.batch_settings, - topic=topic, - ) - self._batches[topic] = batch + with self._batch_lock: + if self._is_stopped: + raise RuntimeError("Cannot resume publish on a stopped publisher.") + + if not self._enable_message_ordering: + raise ValueError( + "Cannot resume publish on a topic/ordering key if ordering " + "is not enabled." + ) - return batch + sequencer_key = (topic, ordering_key) + sequencer = self._sequencers.get(sequencer_key) + if sequencer is None: + _LOGGER.debug( + "Error: The topic/ordering key combination has not " + "been seen before." + ) + else: + sequencer.unpause() - def publish(self, topic, data, **attrs): + def publish(self, topic, data, ordering_key="", **attrs): """Publish a single message. .. note:: @@ -234,6 +302,11 @@ def publish(self, topic, data, **attrs): topic (str): The topic to publish messages to. data (bytes): A bytestring representing the message body. This must be a bytestring. + ordering_key: A string that identifies related messages for which + publish order should be respected. Message ordering must be + enabled for this client to use this feature. + EXPERIMENTAL: This feature is currently available in a closed + alpha. Please contact the Cloud Pub/Sub team to use it. attrs (Mapping[str, str]): A dictionary of attributes to be sent as metadata. (These may be text strings or byte strings.) @@ -245,8 +318,11 @@ def publish(self, topic, data, **attrs): Raises: RuntimeError: - If called after publisher has been stopped - by a `stop()` method call. + If called after publisher has been stopped by a `stop()` method + call. + + pubsub_v1.publisher.exceptions.MessageTooLargeError: If publishing + the ``message`` would exceed the max size limit on the backend. """ # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. @@ -255,6 +331,12 @@ def publish(self, topic, data, **attrs): "Data being published to Pub/Sub must be sent as a bytestring." ) + if not self._enable_message_ordering and ordering_key != "": + raise ValueError( + "Cannot publish a message with an ordering key when message " + "ordering is not enabled." + ) + # Coerce all attributes to text strings. for k, v in copy.copy(attrs).items(): if isinstance(v, six.text_type): @@ -268,21 +350,74 @@ def publish(self, topic, data, **attrs): ) # Create the Pub/Sub message object. - message = types.PubsubMessage(data=data, attributes=attrs) + message = types.PubsubMessage( + data=data, ordering_key=ordering_key, attributes=attrs + ) - # Delegate the publishing to the batch. with self._batch_lock: if self._is_stopped: raise RuntimeError("Cannot publish on a stopped publisher.") - batch = self._batch(topic) - future = None - while future is None: - future = batch.publish(message) - if future is None: - batch = self._batch(topic, create=True) + sequencer = self._get_or_create_sequencer(topic, ordering_key) + + # Delegate the publishing to the sequencer. + future = sequencer.publish(message) + + # Create a timer thread if necessary to enforce the batching + # timeout. + self._ensure_commit_timer_runs_no_lock() + + return future + + def ensure_cleanup_and_commit_timer_runs(self): + """ Ensure a cleanup/commit timer thread is running. + + If a cleanup/commit timer thread is already running, this does nothing. + """ + with self._batch_lock: + self._ensure_commit_timer_runs_no_lock() + + def _ensure_commit_timer_runs_no_lock(self): + """ Ensure a commit timer thread is running, without taking + _batch_lock. + + _batch_lock must be held before calling this method. + """ + if not self._commit_thread and self.batch_settings.max_latency < float("inf"): + self._start_commit_thread() + + def _start_commit_thread(self): + """Start a new thread to actually wait and commit the sequencers.""" + self._commit_thread = threading.Thread( + name="Thread-PubSubBatchCommitter", target=self._wait_and_commit_sequencers + ) + self._commit_thread.start() + + def _wait_and_commit_sequencers(self): + """ Wait up to the batching timeout, and commit all sequencers. + """ + # Sleep for however long we should be waiting. + time.sleep(self.batch_settings.max_latency) + _LOGGER.debug("Commit thread is waking up") - return future + with self._batch_lock: + if self._is_stopped: + return + self._commit_sequencers() + self._commit_thread = None + + def _commit_sequencers(self): + """ Clean up finished sequencers and commit the rest. """ + finished_sequencer_keys = [ + key + for key, sequencer in self._sequencers.items() + if sequencer.is_finished() + ] + for sequencer_key in finished_sequencer_keys: + del self._sequencers[sequencer_key] + + for sequencer in self._sequencers.values(): + sequencer.commit() def stop(self): """Immediately publish all outstanding messages. @@ -297,6 +432,11 @@ def stop(self): This method is non-blocking. Use `Future()` objects returned by `publish()` to make sure all publish requests completed, either in success or error. + + Raises: + RuntimeError: + If called after publisher has been stopped by a `stop()` method + call. """ with self._batch_lock: if self._is_stopped: @@ -304,5 +444,19 @@ def stop(self): self._is_stopped = True - for batch in self._batches.values(): - batch.commit() + for sequencer in self._sequencers.values(): + sequencer.stop() + + # Used only for testing. + def _set_batch(self, topic, batch, ordering_key=""): + sequencer = self._get_or_create_sequencer(topic, ordering_key) + sequencer._set_batch(batch) + + # Used only for testing. + def _set_batch_class(self, batch_class): + self._batch_class = batch_class + + # Used only for testing. + def _set_sequencer(self, topic, sequencer, ordering_key=""): + sequencer_key = (topic, ordering_key) + self._sequencers[sequencer_key] = sequencer diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index be176bac2dba..856be955a179 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -26,4 +26,21 @@ class MessageTooLargeError(ValueError): """Attempt to publish a message that would exceed the server max size limit.""" -__all__ = ("MessageTooLargeError", "PublishError", "TimeoutError") +class PublishToPausedOrderingKeyException(Exception): + """ Publish attempted to paused ordering key. To resume publishing, call + the resumePublish method on the publisher Client object with this + ordering key. Ordering keys are paused if an unrecoverable error + occurred during publish of a batch for that key. + """ + + def __init__(self, ordering_key): + self.ordering_key = ordering_key + super(PublishToPausedOrderingKeyException, self).__init__() + + +__all__ = ( + "MessageTooLargeError", + "PublishError", + "TimeoutError", + "PublishToPausedOrderingKeyException", +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index b1d8429cba58..6a82ba0469c0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -155,6 +155,8 @@ def drop(self, items): items(Sequence[DropRequest]): The items to drop. """ self._manager.leaser.remove(items) + ordering_keys = (k.ordering_key for k in items if k.ordering_key) + self._manager.activate_ordering_keys(ordering_keys) self._manager.maybe_resume_consumer() def lease(self, items): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 8a683e4e772d..b60379444a44 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -30,7 +30,9 @@ _LEASE_WORKER_NAME = "Thread-LeaseMaintainer" -_LeasedMessage = collections.namedtuple("_LeasedMessage", ["added_time", "size"]) +_LeasedMessage = collections.namedtuple( + "_LeasedMessage", ["sent_time", "size", "ordering_key"] +) class Leaser(object): @@ -45,6 +47,7 @@ def __init__(self, manager): # intertwined. Protects the _leased_messages and _bytes attributes. self._add_remove_lock = threading.Lock() + # Dict of ack_id -> _LeasedMessage self._leased_messages = {} """dict[str, float]: A mapping of ack IDs to the local time when the ack ID was initially leased in seconds since the epoch.""" @@ -76,12 +79,31 @@ def add(self, items): # the size counter. if item.ack_id not in self._leased_messages: self._leased_messages[item.ack_id] = _LeasedMessage( - added_time=time.time(), size=item.byte_size + sent_time=float("inf"), + size=item.byte_size, + ordering_key=item.ordering_key, ) self._bytes += item.byte_size else: _LOGGER.debug("Message %s is already lease managed", item.ack_id) + def start_lease_expiry_timer(self, ack_ids): + """Start the lease expiry timer for `items`. + + Args: + items (Sequence[str]): Sequence of ack-ids for which to start + lease expiry timers. + """ + with self._add_remove_lock: + for ack_id in ack_ids: + lease_info = self._leased_messages.get(ack_id) + # Lease info might not exist for this ack_id because it has already + # been removed by remove(). + if lease_info: + self._leased_messages[ack_id] = lease_info._replace( + sent_time=time.time() + ) + def remove(self, items): """Remove messages from lease management.""" with self._add_remove_lock: @@ -116,14 +138,14 @@ def maintain_leases(self): # we're iterating over it. leased_messages = copy.copy(self._leased_messages) - # Drop any leases that are well beyond max lease time. This - # ensures that in the event of a badly behaving actor, we can - # drop messages and allow Pub/Sub to resend them. + # Drop any leases that are beyond the max lease time. This ensures + # that in the event of a badly behaving actor, we can drop messages + # and allow the Pub/Sub server to resend them. cutoff = time.time() - self._manager.flow_control.max_lease_duration to_drop = [ - requests.DropRequest(ack_id, item.size) + requests.DropRequest(ack_id, item.size, item.ordering_key) for ack_id, item in six.iteritems(leased_messages) - if item.added_time < cutoff + if item.sent_time < cutoff ] if to_drop: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py new file mode 100644 index 000000000000..bab15f2182b7 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py @@ -0,0 +1,162 @@ +# Copyright 2020, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import collections + + +class MessagesOnHold(object): + """Tracks messages on hold by ordering key. Not thread-safe. + """ + + def __init__(self): + self._size = 0 + + # A FIFO queue for the messages that have been received from the server, + # but not yet sent to the user callback. + # Both ordered and unordered messages may be in this queue. Ordered + # message state tracked in _pending_ordered_messages once ordered + # messages are taken off this queue. + # The tail of the queue is to the right side of the deque; the head is + # to the left side. + self._messages_on_hold = collections.deque() + + # Dict of ordering_key -> queue of ordered messages that have not been + # delivered to the user. + # All ordering keys in this collection have a message in flight. Once + # that one is acked or nacked, the next message in the queue for that + # ordering key will be sent. + # If the queue is empty, it means there's a message for that key in + # flight, but there are no pending messages. + self._pending_ordered_messages = {} + + @property + def size(self): + """Return the number of messages on hold across ordered and unordered + messages. + + Note that this object may still store information about ordered messages + in flight even if size is zero. + + Returns: + int: The size value. + """ + return self._size + + def get(self): + """ Gets a message from the on-hold queue. A message with an ordering + key wont be returned if there's another message with the same key in + flight. + + Returns: + Optional[google.cloud.pubsub_v1.subscriber.message.Message]: A message + that hasn't been sent to the user yet or None if there are no + messages available. + """ + while self._messages_on_hold: + msg = self._messages_on_hold.popleft() + + if msg.ordering_key: + pending_queue = self._pending_ordered_messages.get(msg.ordering_key) + if pending_queue is None: + # Create empty queue to indicate a message with the + # ordering key is in flight. + self._pending_ordered_messages[ + msg.ordering_key + ] = collections.deque() + self._size = self._size - 1 + return msg + else: + # Another message is in flight so add message to end of + # queue for this ordering key. + pending_queue.append(msg) + else: + # Unordered messages can be returned without any + # restrictions. + self._size = self._size - 1 + return msg + + return None + + def put(self, message): + """Put a message on hold. + + Args: + message (google.cloud.pubsub_v1.subscriber.message.Message): The + message to put on hold. + """ + self._messages_on_hold.append(message) + self._size = self._size + 1 + + def activate_ordering_keys(self, ordering_keys, schedule_message_callback): + """Send the next message in the queue for each of the passed-in + ordering keys, if they exist. Clean up state for keys that no longer + have any queued messages. + + See comment at streaming_pull_manager.activate_ordering_keys() for more + detail about the impact of this method on load. + + Args: + ordering_keys(Sequence[str]): A sequence of ordering keys to + activate. May be empty. + schedule_message_callback(Callable[google.cloud.pubsub_v1.subscriber.message.Message]): + The callback to call to schedule a message to be sent to the user. + """ + for key in ordering_keys: + assert ( + self._pending_ordered_messages.get(key) is not None + ), "A message queue should exist for every ordered message in flight." + next_msg = self._get_next_for_ordering_key(key) + if next_msg: + # Schedule the next message because the previous was dropped. + # Note that this may overload the user's `max_bytes` limit, but + # not their `max_messages` limit. + schedule_message_callback(next_msg) + else: + # No more messages for this ordering key, so do clean-up. + self._clean_up_ordering_key(key) + + def _get_next_for_ordering_key(self, ordering_key): + """Get next message for ordering key. + + The client should call clean_up_ordering_key() if this method returns + None. + + Args: + ordering_key (str): Ordering key for which to get the next message. + + Returns: + google.cloud.pubsub_v1.subscriber.message.Message|None: The + next message for this ordering key or None if there aren't any. + """ + queue_for_key = self._pending_ordered_messages.get(ordering_key) + if queue_for_key: + self._size = self._size - 1 + return queue_for_key.popleft() + return None + + def _clean_up_ordering_key(self, ordering_key): + """Clean up state for an ordering key with no pending messages. + + Args: + ordering_key (str): The ordering key to clean up. + """ + message_queue = self._pending_ordered_messages.get(ordering_key) + assert ( + message_queue is not None + ), "Cleaning up ordering key that does not exist." + assert not len(message_queue), ( + "Ordering key must only be removed if there are no messages " + "left for that key." + ) + del self._pending_ordered_messages[ordering_key] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py index ac1df0af8eff..58d53a61da9c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py @@ -21,13 +21,19 @@ # Namedtuples for management requests. Used by the Message class to communicate # items of work back to the policy. AckRequest = collections.namedtuple( - "AckRequest", ["ack_id", "byte_size", "time_to_ack"] + "AckRequest", ["ack_id", "byte_size", "time_to_ack", "ordering_key"] ) -DropRequest = collections.namedtuple("DropRequest", ["ack_id", "byte_size"]) +DropRequest = collections.namedtuple( + "DropRequest", ["ack_id", "byte_size", "ordering_key"] +) -LeaseRequest = collections.namedtuple("LeaseRequest", ["ack_id", "byte_size"]) +LeaseRequest = collections.namedtuple( + "LeaseRequest", ["ack_id", "byte_size", "ordering_key"] +) ModAckRequest = collections.namedtuple("ModAckRequest", ["ack_id", "seconds"]) -NackRequest = collections.namedtuple("NackRequest", ["ack_id", "byte_size"]) +NackRequest = collections.namedtuple( + "NackRequest", ["ack_id", "byte_size", "ordering_key"] +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 26764b1a9101..0a3d9141fef0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -21,7 +21,6 @@ import grpc import six -from six.moves import queue from google.api_core import bidi from google.api_core import exceptions @@ -30,6 +29,7 @@ from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import histogram from google.cloud.pubsub_v1.subscriber._protocol import leaser +from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold from google.cloud.pubsub_v1.subscriber._protocol import requests import google.cloud.pubsub_v1.subscriber.message import google.cloud.pubsub_v1.subscriber.scheduler @@ -123,12 +123,11 @@ def __init__( else: self._scheduler = scheduler - # A FIFO queue for the messages that have been received from the server, - # but not yet added to the lease management (and not sent to user callback), - # because the FlowControl limits have been hit. - self._messages_on_hold = queue.Queue() + # A collection for the messages that have been received from the server, + # but not yet sent to the user callback. + self._messages_on_hold = messages_on_hold.MessagesOnHold() - # the total number of bytes consumed by the messages currently on hold + # The total number of bytes consumed by the messages currently on hold self._on_hold_bytes = 0 # A lock ensuring that pausing / resuming the consumer are both atomic @@ -225,7 +224,7 @@ def load(self): # be subtracted from the leaser's values. return max( [ - (self._leaser.message_count - self._messages_on_hold.qsize()) + (self._leaser.message_count - self._messages_on_hold.size) / self._flow_control.max_messages, (self._leaser.bytes - self._on_hold_bytes) / self._flow_control.max_bytes, @@ -240,6 +239,25 @@ def add_close_callback(self, callback): """ self._close_callbacks.append(callback) + def activate_ordering_keys(self, ordering_keys): + """Send the next message in the queue for each of the passed-in + ordering keys, if they exist. Clean up state for keys that no longer + have any queued messages. + + Since the load went down by one message, it's probably safe to send the + user another message for the same key. Since the released message may be + bigger than the previous one, this may increase the load above the maximum. + This decision is by design because it simplifies MessagesOnHold. + + Args: + ordering_keys(Sequence[str]): A sequence of ordering keys to + activate. May be empty. + """ + with self._pause_resume_lock: + self._messages_on_hold.activate_ordering_keys( + ordering_keys, self._schedule_message_on_hold + ) + def maybe_pause_consumer(self): """Check the current load and pause the consumer if needed.""" with self._pause_resume_lock: @@ -290,30 +308,44 @@ def _maybe_release_messages(self): The method assumes the caller has acquired the ``_pause_resume_lock``. """ - while True: - if self.load >= _MAX_LOAD: - break # already overloaded - - try: - msg = self._messages_on_hold.get_nowait() - except queue.Empty: + released_ack_ids = [] + while self.load < _MAX_LOAD: + msg = self._messages_on_hold.get() + if not msg: break - self._on_hold_bytes -= msg.size + self._schedule_message_on_hold(msg) + released_ack_ids.append(msg.ack_id) + self._leaser.start_lease_expiry_timer(released_ack_ids) - if self._on_hold_bytes < 0: - _LOGGER.warning( - "On hold bytes was unexpectedly negative: %s", self._on_hold_bytes - ) - self._on_hold_bytes = 0 + def _schedule_message_on_hold(self, msg): + """Schedule a message on hold to be sent to the user and change + on-hold-bytes. - _LOGGER.debug( - "Released held message, scheduling callback for it, " - "still on hold %s (bytes %s).", - self._messages_on_hold.qsize(), - self._on_hold_bytes, + The method assumes the caller has acquired the ``_pause_resume_lock``. + + Args: + msg (google.cloud.pubsub_v1.message.Message): The message to + schedule to be sent to the user. + """ + assert msg, "Message must not be None." + + # On-hold bytes goes down, increasing load. + self._on_hold_bytes -= msg.size + + if self._on_hold_bytes < 0: + _LOGGER.warning( + "On hold bytes was unexpectedly negative: %s", self._on_hold_bytes ) - self._scheduler.schedule(self._callback, msg) + self._on_hold_bytes = 0 + + _LOGGER.debug( + "Released held message, scheduling callback for it, " + "still on hold %s (bytes %s).", + self._messages_on_hold.size, + self._on_hold_bytes, + ) + self._scheduler.schedule(self._callback, msg) def _send_unary_request(self, request): """Send a request using a separate unary request instead of over the @@ -552,7 +584,7 @@ def _on_response(self, response): _LOGGER.debug( "Processing %s received message(s), currenty on hold %s (bytes %s).", len(response.received_messages), - self._messages_on_hold.qsize(), + self._messages_on_hold.size, self._on_hold_bytes, ) @@ -565,37 +597,26 @@ def _on_response(self, response): ] self._dispatcher.modify_ack_deadline(items) - invoke_callbacks_for = [] + with self._pause_resume_lock: + for received_message in response.received_messages: + message = google.cloud.pubsub_v1.subscriber.message.Message( + received_message.message, + received_message.ack_id, + received_message.delivery_attempt, + self._scheduler.queue, + ) + self._messages_on_hold.put(message) + self._on_hold_bytes += message.size + req = requests.LeaseRequest( + ack_id=message.ack_id, + byte_size=message.size, + ordering_key=message.ordering_key, + ) + self.leaser.add([req]) - for received_message in response.received_messages: - message = google.cloud.pubsub_v1.subscriber.message.Message( - received_message.message, - received_message.ack_id, - received_message.delivery_attempt, - self._scheduler.queue, - ) - # Making a decision based on the load, and modifying the data that - # affects the load -> needs a lock, as that state can be modified - # by different threads. - with self._pause_resume_lock: - if self.load < _MAX_LOAD: - invoke_callbacks_for.append(message) - else: - self._messages_on_hold.put(message) - self._on_hold_bytes += message.size - - req = requests.LeaseRequest(ack_id=message.ack_id, byte_size=message.size) - self.leaser.add([req]) - self.maybe_pause_consumer() + self._maybe_release_messages() - _LOGGER.debug( - "Scheduling callbacks for %s new messages, new total on hold %s (bytes %s).", - len(invoke_callbacks_for), - self._messages_on_hold.qsize(), - self._on_hold_bytes, - ) - for msg in invoke_callbacks_for: - self._scheduler.schedule(self._callback, msg) + self.maybe_pause_consumer() def _should_recover(self, exception): """Determine if an error on the RPC stream should be recovered. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 6dc7bc443b59..cafc34b80f2f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -26,6 +26,7 @@ _MESSAGE_REPR = """\ Message {{ data: {!r} + ordering_key: {!r} attributes: {} }}""" @@ -112,7 +113,7 @@ def __repr__(self): pretty_attrs = _indent(pretty_attrs) # We don't actually want the first line indented. pretty_attrs = pretty_attrs.lstrip() - return _MESSAGE_REPR.format(abbv_data, pretty_attrs) + return _MESSAGE_REPR.format(abbv_data, str(self.ordering_key), pretty_attrs) @property def attributes(self): @@ -156,6 +157,11 @@ def publish_time(self): ) return datetime_helpers._UTC_EPOCH + delta + @property + def ordering_key(self): + """str: the ordering key used to publish the message.""" + return self._message.ordering_key + @property def size(self): """Return the size of the underlying message, in bytes.""" @@ -207,7 +213,10 @@ def ack(self): time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( requests.AckRequest( - ack_id=self._ack_id, byte_size=self.size, time_to_ack=time_to_ack + ack_id=self._ack_id, + byte_size=self.size, + time_to_ack=time_to_ack, + ordering_key=self.ordering_key, ) ) @@ -220,12 +229,14 @@ def drop(self): .. warning:: For most use cases, the only reason to drop a message from - lease management is on :meth:`ack` or :meth:`nack`; these methods - both call this one. You probably do not want to call this method - directly. + lease management is on `ack` or `nack`; this library + automatically drop()s the message on `ack` or `nack`. You probably + do not want to call this method directly. """ self._request_queue.put( - requests.DropRequest(ack_id=self._ack_id, byte_size=self.size) + requests.DropRequest( + ack_id=self._ack_id, byte_size=self.size, ordering_key=self.ordering_key + ) ) def modify_ack_deadline(self, seconds): @@ -253,5 +264,7 @@ def nack(self): This will cause the message to be re-delivered to the subscription. """ self._request_queue.put( - requests.NackRequest(ack_id=self._ack_id, byte_size=self.size) + requests.NackRequest( + ack_id=self._ack_id, byte_size=self.size, ordering_key=self.ordering_key + ) ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 2d238b42f797..28019f47801f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -30,6 +30,25 @@ from google.cloud.pubsub_v1.proto import pubsub_pb2 +# Define the default publisher options. +# +# This class is used when creating a publisher client to pass in options +# to enable/disable features. +PublisherOptions = collections.namedtuple( + "PublisherConfig", ["enable_message_ordering"] +) +PublisherOptions.__new__.__defaults__ = (False,) # enable_message_ordering: False + +if sys.version_info >= (3, 5): + PublisherOptions.__doc__ = "The options for the publisher client." + PublisherOptions.enable_message_ordering.__doc__ = ( + "Whether to order messages in a batch by a supplied ordering key." + "EXPERIMENTAL: Message ordering is an alpha feature that requires " + "special permissions to use. Please contact the Cloud Pub/Sub team for " + "more information." + ) + + # Define the default values for batching. # # This class is used when creating a publisher or subscriber client, and diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index b19a5a1f1504..96f18451d8ee 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -35,7 +35,7 @@ def create_batch(status=None, settings=types.BatchSettings()): """ creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - batch = Batch(client, "topic_name", settings, autocommit=False) + batch = Batch(client, "topic_name", settings) if status: batch._status = status return batch diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index f51b314af6df..ce288a48e41c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -25,6 +25,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._batch.base import BatchStatus +from google.cloud.pubsub_v1.publisher._batch.base import BatchCancellationReason from google.cloud.pubsub_v1.publisher._batch import thread from google.cloud.pubsub_v1.publisher._batch.thread import Batch @@ -34,16 +35,21 @@ def create_client(): return publisher.Client(credentials=creds) -def create_batch(autocommit=False, topic="topic_name", **batch_settings): +def create_batch( + topic="topic_name", + batch_done_callback=None, + commit_when_full=True, + **batch_settings +): """Return a batch object suitable for testing. Args: - autocommit (bool): Whether the batch should commit after - ``max_latency`` seconds. By default, this is ``False`` - for unit testing. - topic (str): The name of the topic the batch should publish - the messages to. - batch_settings (dict): Arguments passed on to the + topic (str): Topic name. + batch_done_callback (Callable[bool]): A callable that is called when + the batch is done, either with a success or a failure flag. + commit_when_full (bool): Whether to commit the batch when the batch + has reached byte-size or number-of-messages limits. + batch_settings (Mapping[str, str]): Arguments passed on to the :class:``~.pubsub_v1.types.BatchSettings`` constructor. Returns: @@ -51,29 +57,13 @@ def create_batch(autocommit=False, topic="topic_name", **batch_settings): """ client = create_client() settings = types.BatchSettings(**batch_settings) - return Batch(client, topic, settings, autocommit=autocommit) - - -def test_init(): - """Establish that a monitor thread is usually created on init.""" - client = create_client() - - # Do not actually create a thread, but do verify that one was created; - # it should be running the batch's "monitor" method (which commits the - # batch once time elapses). - with mock.patch.object(threading, "Thread", autospec=True) as Thread: - batch = Batch(client, "topic_name", types.BatchSettings()) - Thread.assert_called_once_with( - name="Thread-MonitorBatchPublisher", target=batch.monitor - ) - - # New batches start able to accept messages by default. - assert batch.status == BatchStatus.ACCEPTING_MESSAGES - - -def test_init_infinite_latency(): - batch = create_batch(max_latency=float("inf")) - assert batch._thread is None + return Batch( + client, + topic, + settings, + batch_done_callback=batch_done_callback, + commit_when_full=commit_when_full, + ) @mock.patch.object(threading, "Lock") @@ -86,20 +76,18 @@ def test_make_lock(Lock): def test_client(): client = create_client() settings = types.BatchSettings() - batch = Batch(client, "topic_name", settings, autocommit=False) + batch = Batch(client, "topic_name", settings) assert batch.client is client def test_commit(): batch = create_batch() - with mock.patch.object(threading, "Thread", autospec=True) as Thread: - batch.commit() - # A thread should have been created to do the actual commit. - Thread.assert_called_once_with( - name="Thread-CommitBatchPublisher", target=batch._commit - ) - Thread.return_value.start.assert_called_once_with() + with mock.patch.object( + Batch, "_start_commit_thread", autospec=True + ) as _start_commit_thread: + batch.commit() + _start_commit_thread.assert_called_once() # The batch's status needs to be something other than "accepting messages", # since the commit started. @@ -202,7 +190,7 @@ def test_blocking__commit_already_started(_LOGGER): assert batch._status == BatchStatus.IN_PROGRESS _LOGGER.debug.assert_called_once_with( - "Batch is already in progress, exiting commit" + "Batch is already in progress or has been cancelled, exiting commit" ) @@ -273,34 +261,6 @@ def test_block__commmit_retry_error(): assert future.exception() == error -def test_monitor(): - batch = create_batch(max_latency=5.0) - with mock.patch.object(time, "sleep") as sleep: - with mock.patch.object(type(batch), "_commit") as _commit: - batch.monitor() - - # The monitor should have waited the given latency. - sleep.assert_called_once_with(5.0) - - # Since `monitor` runs in its own thread, it should call - # the blocking commit implementation. - _commit.assert_called_once_with() - - -def test_monitor_already_committed(): - batch = create_batch(max_latency=5.0) - status = "something else" - batch._status = status - with mock.patch.object(time, "sleep") as sleep: - batch.monitor() - - # The monitor should have waited the given latency. - sleep.assert_called_once_with(5.0) - - # The status should not have changed. - assert batch._status == status - - def test_publish_updating_batch_size(): batch = create_batch(topic="topic_foo") messages = ( @@ -419,3 +379,116 @@ def test_publish_dict(): ) assert batch.messages == [expected_message] assert batch._futures == [future] + + +def test_cancel(): + batch = create_batch() + futures = ( + batch.publish({"data": b"This is my message."}), + batch.publish({"data": b"This is another message."}), + ) + + batch.cancel(BatchCancellationReason.PRIOR_ORDERED_MESSAGE_FAILED) + + # Assert all futures are cancelled with an error. + for future in futures: + exc = future.exception() + assert type(exc) is RuntimeError + assert exc.args[0] == BatchCancellationReason.PRIOR_ORDERED_MESSAGE_FAILED.value + + +def test_do_not_commit_when_full_when_flag_is_off(): + max_messages = 4 + # Set commit_when_full flag to False + batch = create_batch(max_messages=max_messages, commit_when_full=False) + messages = ( + types.PubsubMessage(data=b"foobarbaz"), + types.PubsubMessage(data=b"spameggs"), + types.PubsubMessage(data=b"1335020400"), + ) + + with mock.patch.object(batch, "commit") as commit: + # Publish 3 messages. + futures = [batch.publish(message) for message in messages] + assert len(futures) == 3 + + # When a fourth message is published, commit should not be called. + future = batch.publish(types.PubsubMessage(data=b"last one")) + assert commit.call_count == 0 + assert future is None + + +class BatchDoneCallbackTracker(object): + def __init__(self): + self.called = False + self.success = None + + def __call__(self, success): + self.called = True + self.success = success + + +def test_batch_done_callback_called_on_success(): + batch_done_callback_tracker = BatchDoneCallbackTracker() + batch = create_batch(batch_done_callback=batch_done_callback_tracker) + + # Ensure messages exist. + message = types.PubsubMessage(data=b"foobarbaz") + batch.publish(message) + + # One response for one published message. + publish_response = types.PublishResponse(message_ids=["a"]) + + with mock.patch.object( + type(batch.client.api), "publish", return_value=publish_response + ): + batch._commit() + + assert batch_done_callback_tracker.called + assert batch_done_callback_tracker.success + + +def test_batch_done_callback_called_on_publish_failure(): + batch_done_callback_tracker = BatchDoneCallbackTracker() + batch = create_batch(batch_done_callback=batch_done_callback_tracker) + + # Ensure messages exist. + message = types.PubsubMessage(data=b"foobarbaz") + batch.publish(message) + + # One response for one published message. + publish_response = types.PublishResponse(message_ids=["a"]) + + # Induce publish error. + error = google.api_core.exceptions.InternalServerError("uh oh") + + with mock.patch.object( + type(batch.client.api), + "publish", + return_value=publish_response, + side_effect=error, + ): + batch._commit() + + assert batch_done_callback_tracker.called + assert not batch_done_callback_tracker.success + + +def test_batch_done_callback_called_on_publish_response_invalid(): + batch_done_callback_tracker = BatchDoneCallbackTracker() + batch = create_batch(batch_done_callback=batch_done_callback_tracker) + + # Ensure messages exist. + message = types.PubsubMessage(data=b"foobarbaz") + batch.publish(message) + + # No message ids returned in successful publish response -> invalid. + publish_response = types.PublishResponse(message_ids=[]) + + with mock.patch.object( + type(batch.client.api), "publish", return_value=publish_response + ): + batch._commit() + + assert batch_done_callback_tracker.called + assert not batch_done_callback_tracker.success diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py new file mode 100644 index 000000000000..08e1954e6de3 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -0,0 +1,305 @@ +# Copyright 2019, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import concurrent.futures as futures +import mock +import pytest + +from google.auth import credentials +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer + +_ORDERING_KEY = "ordering_key_1" + + +def create_message(): + return types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return publisher.Client(credentials=creds) + + +def create_ordered_sequencer(client): + return ordered_sequencer.OrderedSequencer(client, "topic_name", _ORDERING_KEY) + + +def test_stop_makes_sequencer_invalid(): + client = create_client() + message = create_message() + + sequencer = create_ordered_sequencer(client) + + sequencer.stop() + + # Publish after stop() throws + with pytest.raises(RuntimeError): + sequencer.publish(message) + + # Commit after stop() throws + with pytest.raises(RuntimeError): + sequencer.commit() + + # Stop after stop() throws + with pytest.raises(RuntimeError): + sequencer.stop() + + +def test_stop_no_batches(): + client = create_client() + + sequencer = create_ordered_sequencer(client) + + # No exceptions thrown if there are no batches. + sequencer.stop() + + +def test_stop_one_batch(): + client = create_client() + + sequencer = create_ordered_sequencer(client) + + batch1 = mock.Mock(spec=client._batch_class) + sequencer._set_batches([batch1]) + + sequencer.stop() + + # Assert that the first batch is committed. + assert batch1.commit.call_count == 1 + assert batch1.cancel.call_count == 0 + + +def test_stop_many_batches(): + client = create_client() + + sequencer = create_ordered_sequencer(client) + + batch1 = mock.Mock(spec=client._batch_class) + batch2 = mock.Mock(spec=client._batch_class) + sequencer._set_batches([batch1, batch2]) + + sequencer.stop() + + # Assert that the first batch is committed and the rest cancelled. + assert batch1.commit.call_count == 1 + assert batch1.cancel.call_count == 0 + assert batch2.commit.call_count == 0 + assert batch2.cancel.call_count == 1 + + +def test_commit(): + client = create_client() + + sequencer = create_ordered_sequencer(client) + + batch1 = mock.Mock(spec=client._batch_class) + batch2 = mock.Mock(spec=client._batch_class) + sequencer._set_batches([batch1, batch2]) + + sequencer.commit() + # Only commit the first batch. + assert batch1.commit.call_count == 1 + assert batch2.commit.call_count == 0 + + +def test_commit_empty_batch_list(): + client = create_client() + + sequencer = create_ordered_sequencer(client) + + # Test nothing bad happens. + sequencer.commit() + + +def test_no_commit_when_paused(): + client = create_client() + batch = mock.Mock(spec=client._batch_class) + + sequencer = create_ordered_sequencer(client) + sequencer._set_batch(batch) + + sequencer._pause() + + sequencer.commit() + assert batch.commit.call_count == 0 + + +def test_pause_and_unpause(): + client = create_client() + message = create_message() + sequencer = create_ordered_sequencer(client) + + # Unpausing without pausing throws. + with pytest.raises(RuntimeError): + sequencer.unpause() + + sequencer._pause() + + # Publishing while paused returns a future with an exception. + future = sequencer.publish(message) + assert future.exception().ordering_key == _ORDERING_KEY + + sequencer.unpause() + + # Assert publish does not set exception after unpause(). + future = sequencer.publish(message) + with pytest.raises(futures._base.TimeoutError): + future.exception(timeout=0) + + +def test_basic_publish(): + client = create_client() + message = create_message() + batch = mock.Mock(spec=client._batch_class) + + sequencer = create_ordered_sequencer(client) + sequencer._set_batch(batch) + + sequencer.publish(message) + batch.publish.assert_called_once_with(message) + + +def test_publish_batch_full(): + client = create_client() + message = create_message() + batch = mock.Mock(spec=client._batch_class) + # Make batch full. + batch.publish.return_value = None + + sequencer = create_ordered_sequencer(client) + sequencer._set_batch(batch) + + # Will create a new batch since the old one is full, and return a future. + future = sequencer.publish(message) + batch.publish.assert_called_once_with(message) + assert future is not None + + # There's now the old and the new batches. + assert len(sequencer._get_batches()) == 2 + + +def test_batch_done_successfully(): + client = create_client() + batch = mock.Mock(spec=client._batch_class) + + sequencer = ordered_sequencer.OrderedSequencer(client, "topic_name", _ORDERING_KEY) + sequencer._set_batch(batch) + + sequencer._batch_done_callback(success=True) + + # One batch is done, so the OrderedSequencer has no more work, and should + # return true for is_finished(). + assert sequencer.is_finished() + + # No batches remain in the batches list. + assert len(sequencer._get_batches()) == 0 + + +def test_batch_done_successfully_one_batch_remains(): + client = create_client() + batch1 = mock.Mock(spec=client._batch_class) + batch2 = mock.Mock(spec=client._batch_class) + + sequencer = ordered_sequencer.OrderedSequencer(client, "topic_name", _ORDERING_KEY) + sequencer._set_batches([batch1, batch2]) + + sequencer._batch_done_callback(success=True) + + # One batch is done, but the OrderedSequencer has more work, so is_finished() + # should return false. + assert not sequencer.is_finished() + + # Second batch should be not be committed since the it may still be able to + # accept messages. + assert batch2.commit.call_count == 0 + + # Only the second batch remains in the batches list. + assert len(sequencer._get_batches()) == 1 + + +def test_batch_done_successfully_many_batches_remain(): + client = create_client() + batch1 = mock.Mock(spec=client._batch_class) + batch2 = mock.Mock(spec=client._batch_class) + batch3 = mock.Mock(spec=client._batch_class) + + sequencer = ordered_sequencer.OrderedSequencer(client, "topic_name", _ORDERING_KEY) + sequencer._set_batches([batch1, batch2, batch3]) + + sequencer._batch_done_callback(success=True) + + # One batch is done, but the OrderedSequencer has more work, so DO NOT + # return true for is_finished(). + assert not sequencer.is_finished() + + # Second batch should be committed since it is full. We know it's full + # because there exists a third batch. Batches are created only if the + # previous one can't accept messages any more / is full. + assert batch2.commit.call_count == 1 + + # Both the second and third batches remain in the batches list. + assert len(sequencer._get_batches()) == 2 + + +def test_batch_done_unsuccessfully(): + client = create_client() + message = create_message() + batch1 = mock.Mock(spec=client._batch_class) + batch2 = mock.Mock(spec=client._batch_class) + batch3 = mock.Mock(spec=client._batch_class) + + sequencer = ordered_sequencer.OrderedSequencer(client, "topic_name", _ORDERING_KEY) + sequencer._set_batches([batch1, batch2, batch3]) + + # Make the batch fail. + sequencer._batch_done_callback(success=False) + + # Sequencer should remain as a sentinel to indicate this ordering key is + # paused. Therefore, don't call the cleanup callback. + assert not sequencer.is_finished() + + # Cancel the remaining batches. + assert batch2.cancel.call_count == 1 + assert batch3.cancel.call_count == 1 + + # Remove all the batches. + assert len(sequencer._get_batches()) == 0 + + # Verify that the sequencer is paused. Publishing while paused returns a + # future with an exception. + future = sequencer.publish(message) + assert future.exception().ordering_key == _ORDERING_KEY + + +def test_publish_after_finish(): + client = create_client() + batch = mock.Mock(spec=client._batch_class) + + sequencer = ordered_sequencer.OrderedSequencer(client, "topic_name", _ORDERING_KEY) + sequencer._set_batch(batch) + + sequencer._batch_done_callback(success=True) + + # One batch is done, so the OrderedSequencer has no more work, and should + # return true for is_finished(). + assert sequencer.is_finished() + + message = create_message() + # It's legal to publish after being finished. + sequencer.publish(message) + + # Go back to accepting-messages mode. + assert not sequencer.is_finished() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py new file mode 100644 index 000000000000..21887e297c11 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -0,0 +1,104 @@ +# Copyright 2019, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock +import pytest + +from google.auth import credentials +from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer + + +def create_message(): + return types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + + +def create_client(): + creds = mock.Mock(spec=credentials.Credentials) + return publisher.Client(credentials=creds) + + +def test_stop(): + client = create_client() + message = create_message() + + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + + sequencer.publish(message) + sequencer.stop() + + # Publish after stop() throws + with pytest.raises(RuntimeError): + sequencer.publish(message) + + # Commit after stop() throws + with pytest.raises(RuntimeError): + sequencer.commit() + + # Stop after stop() throws + with pytest.raises(RuntimeError): + sequencer.stop() + + +def test_commit(): + client = create_client() + batch = mock.Mock(spec=client._batch_class) + + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + sequencer._set_batch(batch) + + sequencer.commit() + batch.commit.assert_called_once() + + +def test_commit_no_batch(): + client = create_client() + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + sequencer.commit() + + +def test_unpause(): + client = create_client() + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + with pytest.raises(NotImplementedError): + sequencer.unpause() + + +def test_basic_publish(): + client = create_client() + message = create_message() + batch = mock.Mock(spec=client._batch_class) + + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + sequencer._set_batch(batch) + + sequencer.publish(message) + batch.publish.assert_called_once_with(message) + + +def test_publish_batch_full(): + client = create_client() + message = create_message() + batch = mock.Mock(spec=client._batch_class) + # Make batch full. + batch.publish.return_value = None + + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + sequencer._set_batch(batch) + + # Will create a new batch since the old one is full, and return a future. + future = sequencer.publish(message) + batch.publish.assert_called_once_with(message) + assert future is not None diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index a06d2d0cf697..69c854b47cf6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -13,16 +13,20 @@ # limitations under the License. from __future__ import absolute_import +from __future__ import division from google.auth import credentials import mock import pytest +import time from google.cloud.pubsub_v1.gapic import publisher_client from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer + def test_init(): creds = mock.Mock(spec=credentials.Credentials) @@ -63,40 +67,29 @@ def test_init_emulator(monkeypatch): assert channel.target().decode("utf8") == "/foo/bar/" -def test_batch_create(): +def test_message_ordering_enabled(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) + assert not client._enable_message_ordering - assert len(client._batches) == 0 - topic = "topic/path" - batch = client._batch(topic, autocommit=False) - assert client._batches == {topic: batch} + client = publisher.Client( + publisher_options=types.PublisherOptions(enable_message_ordering=True), + credentials=creds, + ) + assert client._enable_message_ordering -def test_batch_exists(): +def test_message_ordering_changes_retry_deadline(): creds = mock.Mock(spec=credentials.Credentials) - client = publisher.Client(credentials=creds) - - topic = "topic/path" - client._batches[topic] = mock.sentinel.batch - # A subsequent request should return the same batch. - batch = client._batch(topic, autocommit=False) - assert batch is mock.sentinel.batch - assert client._batches == {topic: batch} - - -def test_batch_create_and_exists(): - creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) + assert client.api._method_configs["Publish"].retry._deadline == 60 - topic = "topic/path" - client._batches[topic] = mock.sentinel.batch - - # A subsequent request should return the same batch. - batch = client._batch(topic, create=True, autocommit=False) - assert batch is not mock.sentinel.batch - assert client._batches == {topic: batch} + client = publisher.Client( + publisher_options=types.PublisherOptions(enable_message_ordering=True), + credentials=creds, + ) + assert client.api._method_configs["Publish"].retry._deadline == 2 ** 32 / 1000 def test_publish(): @@ -110,7 +103,7 @@ def test_publish(): batch.publish.side_effect = (mock.sentinel.future1, mock.sentinel.future2) topic = "topic/path" - client._batches[topic] = batch + client._set_batch(topic, batch) # Begin publishing. future1 = client.publish(topic, b"spam") @@ -138,6 +131,24 @@ def test_publish_data_not_bytestring_error(): client.publish(topic, 42) +def test_publish_message_ordering_not_enabled_error(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + topic = "topic/path" + with pytest.raises(ValueError): + client.publish(topic, b"bytestring body", ordering_key="ABC") + + +def test_publish_empty_ordering_key_when_message_ordering_enabled(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client( + publisher_options=types.PublisherOptions(enable_message_ordering=True), + credentials=creds, + ) + topic = "topic/path" + assert client.publish(topic, b"bytestring body", ordering_key="") is not None + + def test_publish_attrs_bytestring(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) @@ -148,7 +159,7 @@ def test_publish_attrs_bytestring(): batch.will_accept.return_value = True topic = "topic/path" - client._batches[topic] = batch + client._set_batch(topic, batch) # Begin publishing. future = client.publish(topic, b"foo", bar=b"baz") @@ -174,11 +185,11 @@ def test_publish_new_batch_needed(): batch2.publish.return_value = mock.sentinel.future topic = "topic/path" - client._batches[topic] = batch1 + client._set_batch(topic, batch1) # Actually mock the batch class now. batch_class = mock.Mock(spec=(), return_value=batch2) - client._batch_class = batch_class + client._set_batch_class(batch_class) # Publish a message. future = client.publish(topic, b"foo", bar=b"baz") @@ -186,7 +197,11 @@ def test_publish_new_batch_needed(): # Check the mocks. batch_class.assert_called_once_with( - autocommit=True, client=client, settings=client.batch_settings, topic=topic + client=mock.ANY, + topic=topic, + settings=client.batch_settings, + batch_done_callback=None, + commit_when_full=True, ) message_pb = types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) batch1.publish.assert_called_once_with(message_pb) @@ -205,28 +220,20 @@ def test_stop(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - batch = client._batch("topic1", autocommit=False) - batch2 = client._batch("topic2", autocommit=False) - - pubsub_msg = types.PubsubMessage(data=b"msg") - - patch = mock.patch.object(batch, "commit") - patch2 = mock.patch.object(batch2, "commit") + batch1 = mock.Mock(spec=client._batch_class) + topic = "topic/path" + client._set_batch(topic, batch1) - with patch as commit_mock, patch2 as commit_mock2: - batch.publish(pubsub_msg) - batch2.publish(pubsub_msg) + client.stop() - client.stop() + assert batch1.commit.call_count == 1 - # check if commit() called - commit_mock.assert_called() - commit_mock2.assert_called() - - # check that closed publisher doesn't accept new messages with pytest.raises(RuntimeError): client.publish("topic1", b"msg2") + with pytest.raises(RuntimeError): + client.resume_publish("topic", "ord_key") + with pytest.raises(RuntimeError): client.stop() @@ -265,3 +272,181 @@ def test_gapic_class_method_on_instance(): client = publisher.Client(credentials=creds) answer = client.topic_path("foo", "bar") assert answer == "projects/foo/topics/bar" + + +def test_commit_thread_created_on_publish(): + creds = mock.Mock(spec=credentials.Credentials) + # Max latency is not infinite so a commit thread is created. + batch_settings = types.BatchSettings(max_latency=600) + client = publisher.Client(batch_settings=batch_settings, credentials=creds) + + with mock.patch.object( + client, "_start_commit_thread", autospec=True + ) as _start_commit_thread: + # First publish should create a commit thread. + assert client.publish("topic", b"bytestring body", ordering_key="") is not None + _start_commit_thread.assert_called_once() + + # Since _start_commit_thread is a mock, no actual thread has been + # created, so let's put a sentinel there to mimic real behavior. + client._commit_thread = mock.Mock() + + # Second publish should not create a commit thread since one (the mock) + # already exists. + assert client.publish("topic", b"bytestring body", ordering_key="") is not None + # Call count should remain 1. + _start_commit_thread.assert_called_once() + + +def test_commit_thread_not_created_on_publish_if_max_latency_is_inf(): + creds = mock.Mock(spec=credentials.Credentials) + # Max latency is infinite so a commit thread is not created. + batch_settings = types.BatchSettings(max_latency=float("inf")) + client = publisher.Client(batch_settings=batch_settings, credentials=creds) + + assert client.publish("topic", b"bytestring body", ordering_key="") is not None + assert client._commit_thread is None + + +def test_wait_and_commit_sequencers(): + creds = mock.Mock(spec=credentials.Credentials) + # Max latency is infinite so a commit thread is not created. + # We don't want a commit thread to interfere with this test. + batch_settings = types.BatchSettings(max_latency=float("inf")) + client = publisher.Client(batch_settings=batch_settings, credentials=creds) + + # Mock out time so no sleep is actually done. + with mock.patch.object(time, "sleep"): + with mock.patch.object( + publisher.Client, "_commit_sequencers" + ) as _commit_sequencers: + assert ( + client.publish("topic", b"bytestring body", ordering_key="") is not None + ) + # Call _wait_and_commit_sequencers to simulate what would happen if a + # commit thread actually ran. + client._wait_and_commit_sequencers() + assert _commit_sequencers.call_count == 1 + + +def test_stopped_client_does_not_commit_sequencers(): + creds = mock.Mock(spec=credentials.Credentials) + # Max latency is infinite so a commit thread is not created. + # We don't want a commit thread to interfere with this test. + batch_settings = types.BatchSettings(max_latency=float("inf")) + client = publisher.Client(batch_settings=batch_settings, credentials=creds) + + # Mock out time so no sleep is actually done. + with mock.patch.object(time, "sleep"): + with mock.patch.object( + publisher.Client, "_commit_sequencers" + ) as _commit_sequencers: + assert ( + client.publish("topic", b"bytestring body", ordering_key="") is not None + ) + + client.stop() + + # Call _wait_and_commit_sequencers to simulate what would happen if a + # commit thread actually ran after the client was stopped. + client._wait_and_commit_sequencers() + # Should not be called since Client is stopped. + assert _commit_sequencers.call_count == 0 + + +def test_publish_with_ordering_key(): + creds = mock.Mock(spec=credentials.Credentials) + publisher_options = types.PublisherOptions(enable_message_ordering=True) + client = publisher.Client(publisher_options=publisher_options, credentials=creds) + + # Use a mock in lieu of the actual batch class. + batch = mock.Mock(spec=client._batch_class) + # Set the mock up to claim indiscriminately that it accepts all messages. + batch.will_accept.return_value = True + batch.publish.side_effect = (mock.sentinel.future1, mock.sentinel.future2) + + topic = "topic/path" + ordering_key = "k1" + client._set_batch(topic, batch, ordering_key=ordering_key) + + # Begin publishing. + future1 = client.publish(topic, b"spam", ordering_key=ordering_key) + future2 = client.publish(topic, b"foo", bar="baz", ordering_key=ordering_key) + + assert future1 is mock.sentinel.future1 + assert future2 is mock.sentinel.future2 + + # Check mock. + batch.publish.assert_has_calls( + [ + mock.call(types.PubsubMessage(data=b"spam", ordering_key="k1")), + mock.call( + types.PubsubMessage( + data=b"foo", attributes={"bar": "baz"}, ordering_key="k1" + ) + ), + ] + ) + + +def test_ordered_sequencer_cleaned_up(): + creds = mock.Mock(spec=credentials.Credentials) + # Max latency is infinite so a commit thread is not created. + # We don't want a commit thread to interfere with this test. + batch_settings = types.BatchSettings(max_latency=float("inf")) + publisher_options = types.PublisherOptions(enable_message_ordering=True) + client = publisher.Client( + batch_settings=batch_settings, + publisher_options=publisher_options, + credentials=creds, + ) + + topic = "topic" + ordering_key = "ord_key" + sequencer = mock.Mock(spec=ordered_sequencer.OrderedSequencer) + sequencer.is_finished.return_value = False + client._set_sequencer(topic=topic, sequencer=sequencer, ordering_key=ordering_key) + + assert len(client._sequencers) == 1 + # 'sequencer' is not finished yet so don't remove it. + client._commit_sequencers() + assert len(client._sequencers) == 1 + + sequencer.is_finished.return_value = True + # 'sequencer' is finished so remove it. + client._commit_sequencers() + assert len(client._sequencers) == 0 + + +def test_resume_publish(): + creds = mock.Mock(spec=credentials.Credentials) + publisher_options = types.PublisherOptions(enable_message_ordering=True) + client = publisher.Client(publisher_options=publisher_options, credentials=creds) + + topic = "topic" + ordering_key = "ord_key" + sequencer = mock.Mock(spec=ordered_sequencer.OrderedSequencer) + client._set_sequencer(topic=topic, sequencer=sequencer, ordering_key=ordering_key) + + client.resume_publish(topic, ordering_key) + assert sequencer.unpause.called_once() + + +def test_resume_publish_no_sequencer_found(): + creds = mock.Mock(spec=credentials.Credentials) + publisher_options = types.PublisherOptions(enable_message_ordering=True) + client = publisher.Client(publisher_options=publisher_options, credentials=creds) + + # Check no exception is thrown if a sequencer with the (topic, ordering_key) + # pair does not exist. + client.resume_publish("topic", "ord_key") + + +def test_resume_publish_ordering_keys_not_enabled(): + creds = mock.Mock(spec=credentials.Credentials) + publisher_options = types.PublisherOptions(enable_message_ordering=False) + client = publisher.Client(publisher_options=publisher_options, credentials=creds) + + # Throw on calling resume_publish() when enable_message_ordering is False. + with pytest.raises(ValueError): + client.resume_publish("topic", "ord_key") diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 592a03c6422c..43822e96e88c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -29,11 +29,11 @@ @pytest.mark.parametrize( "item,method_name", [ - (requests.AckRequest(0, 0, 0), "ack"), - (requests.DropRequest(0, 0), "drop"), - (requests.LeaseRequest(0, 0), "lease"), + (requests.AckRequest(0, 0, 0, ""), "ack"), + (requests.DropRequest(0, 0, ""), "drop"), + (requests.LeaseRequest(0, 0, ""), "lease"), (requests.ModAckRequest(0, 0), "modify_ack_deadline"), - (requests.NackRequest(0, 0), "nack"), + (requests.NackRequest(0, 0, ""), "nack"), ], ) def test_dispatch_callback(item, method_name): @@ -57,7 +57,7 @@ def test_dispatch_callback_inactive(): manager.is_active = False dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - dispatcher_.dispatch_callback([requests.AckRequest(0, 0, 0)]) + dispatcher_.dispatch_callback([requests.AckRequest(0, 0, 0, "")]) manager.send.assert_not_called() @@ -68,7 +68,11 @@ def test_ack(): ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.AckRequest(ack_id="ack_id_string", byte_size=0, time_to_ack=20)] + items = [ + requests.AckRequest( + ack_id="ack_id_string", byte_size=0, time_to_ack=20, ordering_key="" + ) + ] dispatcher_.ack(items) manager.send.assert_called_once_with( @@ -86,7 +90,11 @@ def test_ack_no_time(): ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.AckRequest(ack_id="ack_id_string", byte_size=0, time_to_ack=None)] + items = [ + requests.AckRequest( + ack_id="ack_id_string", byte_size=0, time_to_ack=None, ordering_key="" + ) + ] dispatcher_.ack(items) manager.send.assert_called_once_with( @@ -104,7 +112,9 @@ def test_ack_splitting_large_payload(): items = [ # use realistic lengths for ACK IDs (max 176 bytes) - requests.AckRequest(ack_id=str(i).zfill(176), byte_size=0, time_to_ack=20) + requests.AckRequest( + ack_id=str(i).zfill(176), byte_size=0, time_to_ack=20, ordering_key="" + ) for i in range(5001) ] dispatcher_.ack(items) @@ -130,23 +140,46 @@ def test_lease(): ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.LeaseRequest(ack_id="ack_id_string", byte_size=10)] + items = [ + requests.LeaseRequest(ack_id="ack_id_string", byte_size=10, ordering_key="") + ] dispatcher_.lease(items) manager.leaser.add.assert_called_once_with(items) manager.maybe_pause_consumer.assert_called_once() -def test_drop(): +def test_drop_unordered_messages(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [ + requests.DropRequest(ack_id="ack_id_string", byte_size=10, ordering_key="") + ] + dispatcher_.drop(items) + + manager.leaser.remove.assert_called_once_with(items) + assert list(manager.activate_ordering_keys.call_args.args[0]) == [] + manager.maybe_resume_consumer.assert_called_once() + + +def test_drop_ordered_messages(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.DropRequest(ack_id="ack_id_string", byte_size=10)] + items = [ + requests.DropRequest(ack_id="ack_id_string", byte_size=10, ordering_key=""), + requests.DropRequest(ack_id="ack_id_string", byte_size=10, ordering_key="key1"), + requests.DropRequest(ack_id="ack_id_string", byte_size=10, ordering_key="key2"), + ] dispatcher_.drop(items) manager.leaser.remove.assert_called_once_with(items) + assert list(manager.activate_ordering_keys.call_args.args[0]) == ["key1", "key2"] manager.maybe_resume_consumer.assert_called_once() @@ -156,7 +189,9 @@ def test_nack(): ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.NackRequest(ack_id="ack_id_string", byte_size=10)] + items = [ + requests.NackRequest(ack_id="ack_id_string", byte_size=10, ordering_key="") + ] dispatcher_.nack(items) manager.send.assert_called_once_with( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index c8b217473d40..ec954b89d101 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -29,14 +29,14 @@ def test_add_and_remove(): leaser_ = leaser.Leaser(mock.sentinel.manager) - leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) - leaser_.add([requests.LeaseRequest(ack_id="ack2", byte_size=25)]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50, ordering_key="")]) + leaser_.add([requests.LeaseRequest(ack_id="ack2", byte_size=25, ordering_key="")]) assert leaser_.message_count == 2 assert set(leaser_.ack_ids) == set(["ack1", "ack2"]) assert leaser_.bytes == 75 - leaser_.remove([requests.DropRequest(ack_id="ack1", byte_size=50)]) + leaser_.remove([requests.DropRequest(ack_id="ack1", byte_size=50, ordering_key="")]) assert leaser_.message_count == 1 assert set(leaser_.ack_ids) == set(["ack2"]) @@ -48,8 +48,8 @@ def test_add_already_managed(caplog): leaser_ = leaser.Leaser(mock.sentinel.manager) - leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) - leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50, ordering_key="")]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50, ordering_key="")]) assert "already lease managed" in caplog.text @@ -59,7 +59,7 @@ def test_remove_not_managed(caplog): leaser_ = leaser.Leaser(mock.sentinel.manager) - leaser_.remove([requests.DropRequest(ack_id="ack1", byte_size=50)]) + leaser_.remove([requests.DropRequest(ack_id="ack1", byte_size=50, ordering_key="")]) assert "not managed" in caplog.text @@ -69,8 +69,8 @@ def test_remove_negative_bytes(caplog): leaser_ = leaser.Leaser(mock.sentinel.manager) - leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) - leaser_.remove([requests.DropRequest(ack_id="ack1", byte_size=75)]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50, ordering_key="")]) + leaser_.remove([requests.DropRequest(ack_id="ack1", byte_size=75, ordering_key="")]) assert leaser_.bytes == 0 assert "unexpectedly negative" in caplog.text @@ -125,7 +125,9 @@ def test_maintain_leases_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) make_sleep_mark_manager_as_inactive(leaser_) - leaser_.add([requests.LeaseRequest(ack_id="my ack id", byte_size=50)]) + leaser_.add( + [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] + ) leaser_.maintain_leases() @@ -150,28 +152,52 @@ def test_maintain_leases_outdated_items(time): leaser_ = leaser.Leaser(manager) make_sleep_mark_manager_as_inactive(leaser_) - # Add these items at the beginning of the timeline + # Add and start expiry timer at the beginning of the timeline. time.return_value = 0 - leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50)]) + leaser_.add([requests.LeaseRequest(ack_id="ack1", byte_size=50, ordering_key="")]) + leaser_.start_lease_expiry_timer(["ack1"]) + + # Add a message but don't start the lease expiry timer. + leaser_.add([requests.LeaseRequest(ack_id="ack2", byte_size=50, ordering_key="")]) - # Add another item at towards end of the timeline + # Add a message and start expiry timer towards the end of the timeline. time.return_value = manager.flow_control.max_lease_duration - 1 - leaser_.add([requests.LeaseRequest(ack_id="ack2", byte_size=50)]) + leaser_.add([requests.LeaseRequest(ack_id="ack3", byte_size=50, ordering_key="")]) + leaser_.start_lease_expiry_timer(["ack3"]) + + # Add a message towards the end of the timeline, but DO NOT start expiry + # timer. + leaser_.add([requests.LeaseRequest(ack_id="ack4", byte_size=50, ordering_key="")]) - # Now make sure time reports that we are at the end of our timeline. + # Now make sure time reports that we are past the end of our timeline. time.return_value = manager.flow_control.max_lease_duration + 1 leaser_.maintain_leases() - # Only ack2 should be renewed. ack1 should've been dropped - manager.dispatcher.modify_ack_deadline.assert_called_once_with( - [requests.ModAckRequest(ack_id="ack2", seconds=10)] - ) + # ack2, ack3, and ack4 should be renewed. ack1 should've been dropped + modacks = manager.dispatcher.modify_ack_deadline.call_args.args[0] + expected = [ + requests.ModAckRequest(ack_id="ack2", seconds=10), + requests.ModAckRequest(ack_id="ack3", seconds=10), + requests.ModAckRequest(ack_id="ack4", seconds=10), + ] + # Use sorting to allow for ordering variance. + assert sorted(modacks) == sorted(expected) + manager.dispatcher.drop.assert_called_once_with( - [requests.DropRequest(ack_id="ack1", byte_size=50)] + [requests.DropRequest(ack_id="ack1", byte_size=50, ordering_key="")] ) +def test_start_lease_expiry_timer_unknown_ack_id(): + manager = create_manager() + leaser_ = leaser.Leaser(manager) + + # Nothing happens when this method is called with an ack-id that hasn't been + # added yet. + leaser_.start_lease_expiry_timer(["ack1"]) + + @mock.patch("threading.Thread", autospec=True) def test_start(thread): manager = mock.create_autospec( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index fd23deef06c9..0c8a6d181a59 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -33,7 +33,7 @@ PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 -def create_message(data, ack_id="ACKID", delivery_attempt=0, **attrs): +def create_message(data, ack_id="ACKID", delivery_attempt=0, ordering_key="", **attrs): with mock.patch.object(time, "time") as time_: time_.return_value = RECEIVED_SECONDS msg = message.Message( @@ -44,6 +44,7 @@ def create_message(data, ack_id="ACKID", delivery_attempt=0, **attrs): publish_time=timestamp_pb2.Timestamp( seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 ), + ordering_key=ordering_key, ), ack_id=ack_id, delivery_attempt=delivery_attempt, @@ -89,6 +90,11 @@ def test_publish_time(): assert msg.publish_time == PUBLISHED +def test_ordering_key(): + msg = create_message(b"foo", ordering_key="key1") + assert msg.ordering_key == "key1" + + def check_call_types(mock, *args, **kwargs): """Checks a mock's call types. @@ -118,7 +124,10 @@ def test_ack(): msg.ack() put.assert_called_once_with( requests.AckRequest( - ack_id="bogus_ack_id", byte_size=30, time_to_ack=mock.ANY + ack_id="bogus_ack_id", + byte_size=30, + time_to_ack=mock.ANY, + ordering_key="", ) ) check_call_types(put, requests.AckRequest) @@ -129,7 +138,7 @@ def test_drop(): with mock.patch.object(msg._request_queue, "put") as put: msg.drop() put.assert_called_once_with( - requests.DropRequest(ack_id="bogus_ack_id", byte_size=30) + requests.DropRequest(ack_id="bogus_ack_id", byte_size=30, ordering_key="") ) check_call_types(put, requests.DropRequest) @@ -149,19 +158,22 @@ def test_nack(): with mock.patch.object(msg._request_queue, "put") as put: msg.nack() put.assert_called_once_with( - requests.NackRequest(ack_id="bogus_ack_id", byte_size=30) + requests.NackRequest(ack_id="bogus_ack_id", byte_size=30, ordering_key="") ) check_call_types(put, requests.NackRequest) def test_repr(): data = b"foo" - msg = create_message(data, snow="cones", orange="juice") + ordering_key = "ord_key" + msg = create_message(data, ordering_key=ordering_key, snow="cones", orange="juice") data_line = " data: {!r}".format(data) + ordering_key_line = " ordering_key: {!r}".format(ordering_key) expected_repr = "\n".join( ( "Message {", data_line, + ordering_key_line, " attributes: {", ' "orange": "juice",', ' "snow": "cones"', diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py new file mode 100644 index 000000000000..946e2598ac8f --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py @@ -0,0 +1,274 @@ +# Copyright 2020, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from six.moves import queue + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.subscriber import message +from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold + + +def make_message(ack_id, ordering_key): + proto_msg = types.PubsubMessage(data=b"Q", ordering_key=ordering_key) + return message.Message(proto_msg, ack_id, 0, queue.Queue()) + + +def test_init(): + moh = messages_on_hold.MessagesOnHold() + + assert moh.size == 0 + assert moh.get() is None + + +def test_put_and_get_unordered_messages(): + moh = messages_on_hold.MessagesOnHold() + + msg1 = make_message(ack_id="ack1", ordering_key="") + moh.put(msg1) + assert moh.size == 1 + + msg2 = make_message(ack_id="ack2", ordering_key="") + moh.put(msg2) + assert moh.size == 2 + + assert moh.get() == msg1 + assert moh.size == 1 + assert moh.get() == msg2 + assert moh.size == 0 + assert moh.get() is None + + +class ScheduleMessageCallbackTracker(object): + def __init__(self): + self.called = False + self.message = "" + + def __call__(self, message): + self.called = True + self.message = message + + +def test_ordered_messages_one_key(): + moh = messages_on_hold.MessagesOnHold() + + msg1 = make_message(ack_id="ack1", ordering_key="key1") + moh.put(msg1) + assert moh.size == 1 + + msg2 = make_message(ack_id="ack2", ordering_key="key1") + moh.put(msg2) + assert moh.size == 2 + + # Get first message for "key1" + assert moh.get() == msg1 + assert moh.size == 1 + + # Still waiting on the previously-sent message for "key1", and there are no + # other messages, so return None. + assert moh.get() is None + assert moh.size == 1 + + # Activate "key1" to release the second message with that key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert callback_tracker.called + assert callback_tracker.message == msg2 + assert moh.size == 0 + assert len(moh._pending_ordered_messages) == 1 + + # Activate "key1" again. There are no other messages for that key, so clean + # up state for that key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Check that clean-up happened. + assert moh.size == 0 + assert len(moh._messages_on_hold) == 0 + assert len(moh._pending_ordered_messages) == 0 + + # No messages left. + assert moh.get() is None + assert moh.size == 0 + + +def test_ordered_messages_two_keys(): + moh = messages_on_hold.MessagesOnHold() + + # Put message with "key1". + msg1 = make_message(ack_id="ack1", ordering_key="key1") + moh.put(msg1) + assert moh.size == 1 + + # Put second message with "key1". + msg2 = make_message(ack_id="ack2", ordering_key="key1") + moh.put(msg2) + assert moh.size == 2 + + # Put message with another key: "key2". + msg3 = make_message(ack_id="ack3", ordering_key="key2") + moh.put(msg3) + assert moh.size == 3 + + # Get first message for "key1" + assert moh.get() == msg1 + assert moh.size == 2 + + # Get another message. Still waiting on the previously-sent message for + # "key1", so release msg3 with key "key2". + assert moh.get() is msg3 + assert moh.size == 1 + + # Activate "key1" to release the second message with that key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert callback_tracker.called + assert callback_tracker.message == msg2 + assert moh.size == 0 + + # Activate "key2" and release no messages because there are none left for + # that key. State for "key2" should be cleaned up. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key2"], callback_tracker) + assert not callback_tracker.called + assert moh.size == 0 + + # Activate "key1" again to mark msg2 as complete. Since there are no other + # messages for that key, clean up state for both keys. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Check that clean-up happened. + assert moh.size == 0 + assert len(moh._messages_on_hold) == 0 + assert len(moh._pending_ordered_messages) == 0 + + # No messages left. + assert moh.get() is None + assert moh.size == 0 + + +def test_ordered_messages_two_keys_interleaved(): + moh = messages_on_hold.MessagesOnHold() + + # Put message with "key1". + msg1 = make_message(ack_id="ack1", ordering_key="key1") + moh.put(msg1) + assert moh.size == 1 + + # Put message with another key: "key2". + msg2 = make_message(ack_id="ack2", ordering_key="key2") + moh.put(msg2) + assert moh.size == 2 + + # Put second message with "key1". + msg3 = make_message(ack_id="ack3", ordering_key="key1") + moh.put(msg3) + assert moh.size == 3 + + # Get first message for "key1" + assert moh.get() == msg1 + assert moh.size == 2 + + # Get another message. msg2 with "key2" is next in line in the queue. + assert moh.get() is msg2 + assert moh.size == 1 + + # Activate "key1". Clean up state for "key1" because another message with + # the same key (msg3) hasn't been sorted into _pending_ordered_messages yet + # through a call to get(). + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + assert moh.size == 1 + + # Get another message. msg3 is next in line in the queue. + assert moh.get() is msg3 + assert moh.size == 0 + + # Activate "key2" to mark msg2 as complete. Release no messages because + # there are none left for that key. State for "key2" should be cleaned up. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key2"], callback_tracker) + assert not callback_tracker.called + assert moh.size == 0 + + # Activate "key1" to mark msg3 as complete. Release no messages because + # there are none left for that key. State for "key1" should be cleaned up. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Check that clean-up happened. + assert moh.size == 0 + assert len(moh._messages_on_hold) == 0 + assert len(moh._pending_ordered_messages) == 0 + + # No messages left. + assert moh.get() is None + assert moh.size == 0 + + +def test_ordered_and_unordered_messages_interleaved(): + moh = messages_on_hold.MessagesOnHold() + + # Put message with "key1". + msg1 = make_message(ack_id="ack1", ordering_key="key1") + moh.put(msg1) + assert moh.size == 1 + + # Put another message "key1" + msg2 = make_message(ack_id="ack2", ordering_key="key1") + moh.put(msg2) + assert moh.size == 2 + + # Put a message with no ordering key. + msg3 = make_message(ack_id="ack3", ordering_key="") + moh.put(msg3) + assert moh.size == 3 + + # Get first message for "key1" + assert moh.get() == msg1 + assert moh.size == 2 + + # Get another message. msg2 will be skipped because another message with the + # same key (msg1) is in flight. + assert moh.get() is msg3 + assert moh.size == 1 + + # Activate "key1". Send msg2, the next in line for the same ordering key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert callback_tracker.called + assert callback_tracker.message == msg2 + assert moh.size == 0 + + # No more messages left. + assert moh.get() is None + + # Activate "key1" to mark msg2 as complete. Release no messages because + # there are none left for that key. State for "key1" should be cleaned up. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Check that clean-up happened. + assert moh.size == 0 + assert len(moh._messages_on_hold) == 0 + assert len(moh._pending_ordered_messages) == 0 + + # No messages left. + assert moh.get() is None + assert moh.size == 0 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 6f8a04ac9935..0886a45087de 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -19,7 +19,6 @@ import mock import pytest -from six.moves import queue from google.api_core import bidi from google.api_core import exceptions @@ -31,6 +30,7 @@ from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import leaser +from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager import grpc @@ -95,6 +95,7 @@ def test_constructor_and_default_state(): assert manager._client == mock.sentinel.client assert manager._subscription == mock.sentinel.subscription assert manager._scheduler is not None + assert manager._messages_on_hold is not None def test_constructor_with_options(): @@ -166,18 +167,24 @@ def test_lease_load_and_pause(): # This should mean that our messages count is at 10%, and our bytes # are at 15%; load should return the higher (0.15), and shouldn't cause # the consumer to pause. - manager.leaser.add([requests.LeaseRequest(ack_id="one", byte_size=150)]) + manager.leaser.add( + [requests.LeaseRequest(ack_id="one", byte_size=150, ordering_key="")] + ) assert manager.load == 0.15 manager.maybe_pause_consumer() manager._consumer.pause.assert_not_called() # After this message is added, the messages should be higher at 20% # (versus 16% for bytes). - manager.leaser.add([requests.LeaseRequest(ack_id="two", byte_size=10)]) + manager.leaser.add( + [requests.LeaseRequest(ack_id="two", byte_size=10, ordering_key="")] + ) assert manager.load == 0.2 # Returning a number above 100% is fine, and it should cause this to pause. - manager.leaser.add([requests.LeaseRequest(ack_id="three", byte_size=1000)]) + manager.leaser.add( + [requests.LeaseRequest(ack_id="three", byte_size=1000, ordering_key="")] + ) assert manager.load == 1.16 manager.maybe_pause_consumer() manager._consumer.pause.assert_called_once() @@ -194,8 +201,8 @@ def test_drop_and_resume(): # Add several messages until we're over the load threshold. manager.leaser.add( [ - requests.LeaseRequest(ack_id="one", byte_size=750), - requests.LeaseRequest(ack_id="two", byte_size=250), + requests.LeaseRequest(ack_id="one", byte_size=750, ordering_key=""), + requests.LeaseRequest(ack_id="two", byte_size=250, ordering_key=""), ] ) @@ -207,7 +214,9 @@ def test_drop_and_resume(): # Drop the 200 byte message, which should put us under the resume # threshold. - manager.leaser.remove([requests.DropRequest(ack_id="two", byte_size=250)]) + manager.leaser.remove( + [requests.DropRequest(ack_id="two", byte_size=250, ordering_key="")] + ) manager.maybe_resume_consumer() manager._consumer.resume.assert_called_once() @@ -245,7 +254,7 @@ def test__maybe_release_messages_on_overload(): manager._maybe_release_messages() - assert manager._messages_on_hold.qsize() == 1 + assert manager._messages_on_hold.size == 1 manager._leaser.add.assert_not_called() manager._scheduler.schedule.assert_not_called() @@ -274,8 +283,8 @@ def test__maybe_release_messages_below_overload(): # the actual call of MUT manager._maybe_release_messages() - assert manager._messages_on_hold.qsize() == 1 - msg = manager._messages_on_hold.get_nowait() + assert manager._messages_on_hold.size == 1 + msg = manager._messages_on_hold.get() assert msg.ack_id == "ack_baz" schedule_calls = manager._scheduler.schedule.mock_calls @@ -692,7 +701,7 @@ def test__on_response_no_leaser_overload(): assert isinstance(call[1][1], message.Message) # the leaser load limit not hit, no messages had to be put on hold - assert manager._messages_on_hold.qsize() == 0 + assert manager._messages_on_hold.size == 0 def test__on_response_with_leaser_overload(): @@ -741,11 +750,10 @@ def test__on_response_with_leaser_overload(): assert call_args[1].message_id == "1" # the rest of the messages should have been put on hold - assert manager._messages_on_hold.qsize() == 2 + assert manager._messages_on_hold.size == 2 while True: - try: - msg = manager._messages_on_hold.get_nowait() - except queue.Empty: + msg = manager._messages_on_hold.get() + if msg is None: break else: assert isinstance(msg, message.Message) @@ -767,6 +775,87 @@ def test__on_response_none_data(caplog): assert "callback invoked with None" in caplog.text +def test__on_response_with_ordering_keys(): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + # Set up the messages. + response = types.StreamingPullResponse( + received_messages=[ + types.ReceivedMessage( + ack_id="fack", + message=types.PubsubMessage( + data=b"foo", message_id="1", ordering_key="" + ), + ), + types.ReceivedMessage( + ack_id="back", + message=types.PubsubMessage( + data=b"bar", message_id="2", ordering_key="key1" + ), + ), + types.ReceivedMessage( + ack_id="zack", + message=types.PubsubMessage( + data=b"baz", message_id="3", ordering_key="key1" + ), + ), + ] + ) + + # Make leaser with zero initial messages, so we don't test lease management + # behavior. + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10) + + # Actually run the method and prove that modack and schedule are called in + # the expected way. + manager._on_response(response) + + # All messages should be added to the lease management and have their ACK + # deadline extended, even those not dispatched to callbacks. + dispatcher.modify_ack_deadline.assert_called_once_with( + [ + requests.ModAckRequest("fack", 10), + requests.ModAckRequest("back", 10), + requests.ModAckRequest("zack", 10), + ] + ) + + # The first two messages should be scheduled, The third should be put on + # hold because it's blocked by the completion of the second, which has the + # same ordering key. + schedule_calls = scheduler.schedule.mock_calls + assert len(schedule_calls) == 2 + call_args = schedule_calls[0][1] + assert call_args[0] == mock.sentinel.callback + assert isinstance(call_args[1], message.Message) + assert call_args[1].message_id == "1" + + call_args = schedule_calls[1][1] + assert call_args[0] == mock.sentinel.callback + assert isinstance(call_args[1], message.Message) + assert call_args[1].message_id == "2" + + # Message 3 should have been put on hold. + assert manager._messages_on_hold.size == 1 + # No messages available because message 2 (with "key1") has not completed yet. + assert manager._messages_on_hold.get() is None + + # Complete message 2 (with "key1"). + manager.activate_ordering_keys(["key1"]) + + # Completing message 2 should release message 3. + schedule_calls = scheduler.schedule.mock_calls + assert len(schedule_calls) == 3 + call_args = schedule_calls[2][1] + assert call_args[0] == mock.sentinel.callback + assert isinstance(call_args[1], message.Message) + assert call_args[1].message_id == "3" + + # No messages available in the queue. + assert manager._messages_on_hold.get() is None + + def test_retryable_stream_errors(): # Make sure the config matches our hard-coded tuple of exceptions. interfaces = subscriber_client_config.config["interfaces"] @@ -824,3 +913,16 @@ def test__on_rpc_done(thread): thread.assert_called_once_with( name=mock.ANY, target=manager.close, kwargs={"reason": mock.sentinel.error} ) + + +def test_activate_ordering_keys(): + manager = make_manager() + manager._messages_on_hold = mock.create_autospec( + messages_on_hold.MessagesOnHold, instance=True + ) + + manager.activate_ordering_keys(["key1", "key2"]) + + manager._messages_on_hold.activate_ordering_keys.assert_called_once_with( + ["key1", "key2"], mock.ANY + ) From 080f980447ec8632d1d3b75f54bc5607390ab0e8 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 14 Feb 2020 12:11:22 -0800 Subject: [PATCH 0430/1197] nit: typo (#31) --- .../pubsub_v1/subscriber/_protocol/streaming_pull_manager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 0a3d9141fef0..9b3f8d5fe06a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -582,7 +582,7 @@ def _on_response(self, response): return _LOGGER.debug( - "Processing %s received message(s), currenty on hold %s (bytes %s).", + "Processing %s received message(s), currently on hold %s (bytes %s).", len(response.received_messages), self._messages_on_hold.size, self._on_hold_bytes, From f87dd39b9ffd060e7bc8d4968f3adb16c71af113 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 20 Feb 2020 07:52:08 +0000 Subject: [PATCH 0431/1197] feat: add context manager capability to subscriber (#32) * feat: add context manager capability to subscriber * Do not use shared subscriber in socket leak test --- .../cloud/pubsub_v1/subscriber/client.py | 16 ++++++ packages/google-cloud-pubsub/noxfile.py | 2 +- packages/google-cloud-pubsub/tests/system.py | 51 ++++++++++++++++++- .../subscriber/test_subscriber_client.py | 19 +++++++ 4 files changed, 86 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index b255fe4767ca..00d97231e0e7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -228,3 +228,19 @@ def callback(message): manager.open(callback=callback, on_callback_error=future.set_exception) return future + + def close(self): + """Close the underlying channel to release socket resources. + + After a channel has been closed, the client instance cannot be used + anymore. + + This method is idempotent. + """ + self.api.transport.channel.close() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 99d73295c127..03c570f571a6 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -110,7 +110,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest") + session.install("mock", "pytest", "psutil") session.install("-e", "test_utils") session.install("-e", ".") diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 65baaf016407..37a39766ae07 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -18,6 +18,7 @@ import itertools import operator as op import os +import psutil import threading import time @@ -46,7 +47,7 @@ def publisher(): yield pubsub_v1.PublisherClient() -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def subscriber(): yield pubsub_v1.SubscriberClient() @@ -383,6 +384,54 @@ def test_managing_subscription_iam_policy( assert bindings[1].members == ["group:cloud-logs@google.com"] +def test_subscriber_not_leaking_open_sockets( + publisher, topic_path, subscription_path, cleanup +): + # Make sure the topic and the supscription get deleted. + # NOTE: Since subscriber client will be closed in the test, we should not + # use the shared `subscriber` fixture, but instead construct a new client + # in this test. + # Also, since the client will get closed, we need another subscriber client + # to clean up the subscription. We also need to make sure that auxiliary + # subscriber releases the sockets, too. + subscriber = pubsub_v1.SubscriberClient() + subscriber_2 = pubsub_v1.SubscriberClient() + cleanup.append((subscriber_2.delete_subscription, subscription_path)) + + def one_arg_close(subscriber): # the cleanup helper expects exactly one argument + subscriber.close() + + cleanup.append((one_arg_close, subscriber_2)) + cleanup.append((publisher.delete_topic, topic_path)) + + # Create topic before starting to track connection count (any sockets opened + # by the publisher client are not counted by this test). + publisher.create_topic(topic_path) + + current_process = psutil.Process() + conn_count_start = len(current_process.connections()) + + # Publish a few messages, then synchronously pull them and check that + # no sockets are leaked. + with subscriber: + subscriber.create_subscription(name=subscription_path, topic=topic_path) + + # Publish a few messages, wait for the publish to succeed. + publish_futures = [ + publisher.publish(topic_path, u"message {}".format(i).encode()) + for i in range(1, 4) + ] + for future in publish_futures: + future.result() + + # Synchronously pull messages. + response = subscriber.pull(subscription_path, max_messages=3) + assert len(response.received_messages) == 3 + + conn_count_end = len(current_process.connections()) + assert conn_count_end == conn_count_start + + class TestStreamingPull(object): def test_streaming_pull_callback_error_propagation( self, publisher, topic_path, subscriber, subscription_path, cleanup diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index b367733aa705..19ec194ce1e6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -106,3 +106,22 @@ def test_subscribe_options(manager_open): callback=mock.sentinel.callback, on_callback_error=future.set_exception, ) + + +def test_close(): + mock_transport = mock.NonCallableMock() + client = subscriber.Client(transport=mock_transport) + + client.close() + + mock_transport.channel.close.assert_called() + + +def test_closes_channel_as_context_manager(): + mock_transport = mock.NonCallableMock() + client = subscriber.Client(transport=mock_transport) + + with client: + pass + + mock_transport.channel.close.assert_called() From db8e7b91a2882fd7e910d9705fc957003f64f919 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 20 Feb 2020 12:56:44 -0500 Subject: [PATCH 0432/1197] chore: release 1.3.0 (#27) * updated CHANGELOG.md [ci skip] * updated setup.py [ci skip] --- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index e2ce6f105720..eb067ef88c11 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [1.3.0](https://www.github.com/googleapis/python-pubsub/compare/v1.2.0...v1.3.0) (2020-02-20) + + +### Features + +* **pubsub:** ordering keys ([#26](https://www.github.com/googleapis/python-pubsub/issues/26)) ([cc3093a](https://www.github.com/googleapis/python-pubsub/commit/cc3093a2c0304259bc374bc2eeec9630e4a11a5e)) +* add context manager capability to subscriber ([#32](https://www.github.com/googleapis/python-pubsub/issues/32)) ([b58d0d8](https://www.github.com/googleapis/python-pubsub/commit/b58d0d8e404c0a085b89d3407e6640651e81568c)) + ## [1.2.0](https://www.github.com/googleapis/python-pubsub/compare/v1.1.0...v1.2.0) (2020-02-05) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 3144e258f124..34055dbc012d 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.2.0" +version = "1.3.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 76aa475cf7c780441ba3aa2e2f161617769ed5b7 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 21 Feb 2020 19:13:06 +0000 Subject: [PATCH 0433/1197] test: add system tests for common RBAC operations (#34) * test: add system tests for common RBAC operations * Disable flaky snapshot system test * Capitalize comment --- packages/google-cloud-pubsub/tests/system.py | 132 ++++++++++++++++++- 1 file changed, 131 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 37a39766ae07..2eccad8ed4a6 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -564,7 +564,7 @@ def test_streaming_pull_max_messages( @pytest.mark.skipif( "KOKORO_GFILE_DIR" not in os.environ, - reason="Requires Kokoro environment with a limited subscriber service account.", + reason="Requires Kokoro environment with a service account with limited role.", ) def test_streaming_pull_subscriber_permissions_sufficient( self, publisher, topic_path, subscriber, subscription_path, cleanup @@ -601,6 +601,136 @@ def test_streaming_pull_subscriber_permissions_sufficient( finally: future.cancel() + @pytest.mark.skipif( + "KOKORO_GFILE_DIR" not in os.environ, + reason="Requires Kokoro environment with a service account with limited role.", + ) + def test_publisher_role_can_publish_messages( + self, publisher, topic_path, subscriber, subscription_path, cleanup + ): + + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # Create a topic and subscribe to it. + publisher.create_topic(topic_path) + subscriber.create_subscription(subscription_path, topic_path) + + # Create a publisher client with only the publisher role only. + filename = os.path.join( + os.environ["KOKORO_GFILE_DIR"], "pubsub-publisher-service-account.json" + ) + publisher_only_client = type(publisher).from_service_account_file(filename) + + self._publish_messages(publisher_only_client, topic_path, batch_sizes=[2]) + + response = subscriber.pull(subscription_path, max_messages=2) + assert len(response.received_messages) == 2 + + @pytest.mark.skip( + "Snapshot creation is not instant on the backend, causing test falkiness." + ) + @pytest.mark.skipif( + "KOKORO_GFILE_DIR" not in os.environ, + reason="Requires Kokoro environment with a service account with limited role.", + ) + def test_snapshot_seek_subscriber_permissions_sufficient( + self, project, publisher, topic_path, subscriber, subscription_path, cleanup + ): + snapshot_name = "snap" + unique_resource_id("-") + snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) + + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((subscriber.delete_snapshot, snapshot_path)) + + # Create a topic and subscribe to it. + publisher.create_topic(topic_path) + subscriber.create_subscription( + subscription_path, topic_path, retain_acked_messages=True + ) + + # A service account granting only the pubsub.subscriber role must be used. + filename = os.path.join( + os.environ["KOKORO_GFILE_DIR"], "pubsub-subscriber-service-account.json" + ) + subscriber_only_client = type(subscriber).from_service_account_file(filename) + + # Publish two messages and create a snapshot inbetween. + self._publish_messages(publisher, topic_path, batch_sizes=[1]) + response = subscriber.pull(subscription_path, max_messages=10) + assert len(response.received_messages) == 1 + + subscriber.create_snapshot(snapshot_path, subscription_path) + + self._publish_messages(publisher, topic_path, batch_sizes=[1]) + response = subscriber.pull(subscription_path, max_messages=10) + assert len(response.received_messages) == 1 + + # A subscriber-only client should be allowed to seek to a snapshot. + subscriber_only_client.seek(subscription_path, snapshot=snapshot_path) + + # We should receive one message again, since we sought back to a snapshot. + response = subscriber.pull(subscription_path, max_messages=10) + assert len(response.received_messages) == 1 + + @pytest.mark.skipif( + "KOKORO_GFILE_DIR" not in os.environ, + reason="Requires Kokoro environment with a service account with limited role.", + ) + def test_viewer_role_can_list_resources( + self, project, publisher, topic_path, subscriber, cleanup + ): + project_path = "projects/" + project + + # Make sure the created topic gets deleted. + cleanup.append((publisher.delete_topic, topic_path)) + + publisher.create_topic(topic_path) + + # A service account granting only the pubsub.viewer role must be used. + filename = os.path.join( + os.environ["KOKORO_GFILE_DIR"], "pubsub-viewer-service-account.json" + ) + viewer_only_subscriber = type(subscriber).from_service_account_file(filename) + viewer_only_publisher = type(publisher).from_service_account_file(filename) + + # The following operations should not raise permission denied errors. + # NOTE: At least one topic exists. + topic = next(iter(viewer_only_publisher.list_topics(project_path))) + next(iter(viewer_only_publisher.list_topic_subscriptions(topic.name)), None) + next(iter(viewer_only_subscriber.list_subscriptions(project_path)), None) + next(iter(viewer_only_subscriber.list_snapshots(project_path)), None) + + @pytest.mark.skipif( + "KOKORO_GFILE_DIR" not in os.environ, + reason="Requires Kokoro environment with a service account with limited role.", + ) + def test_editor_role_can_create_resources( + self, project, publisher, topic_path, subscriber, subscription_path, cleanup + ): + snapshot_name = "snap" + unique_resource_id("-") + snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) + + # Make sure the created resources get deleted. + cleanup.append((subscriber.delete_snapshot, snapshot_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, topic_path)) + + # A service account granting only the pubsub.editor role must be used. + filename = os.path.join( + os.environ["KOKORO_GFILE_DIR"], "pubsub-editor-service-account.json" + ) + editor_subscriber = type(subscriber).from_service_account_file(filename) + editor_publisher = type(publisher).from_service_account_file(filename) + + # The following operations should not raise permission denied errors. + editor_publisher.create_topic(topic_path) + editor_subscriber.create_subscription(subscription_path, topic_path) + editor_subscriber.create_snapshot(snapshot_path, subscription_path) + def _publish_messages(self, publisher, topic_path, batch_sizes): """Publish ``count`` messages in batches and wait until completion.""" publish_futures = [] From 3baf5ea99f82195315faf6e7d25bfed6f211d0da Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Fri, 21 Feb 2020 17:09:03 -0500 Subject: [PATCH 0434/1197] Fix typo in dispatcher.py comment (#37) --- .../google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 6a82ba0469c0..dd324fe21aa4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -114,7 +114,7 @@ def dispatch_callback(self, items): if batched_commands[requests.ModAckRequest]: self.modify_ack_deadline(batched_commands.pop(requests.ModAckRequest)) # Note: Drop and ack *must* be after lease. It's possible to get both - # the lease the and ack/drop request in the same batch. + # the lease and the ack/drop request in the same batch. if batched_commands[requests.AckRequest]: self.ack(batched_commands.pop(requests.AckRequest)) if batched_commands[requests.NackRequest]: From bfbd83d8242995c3a023b0d00fe052bf0b777170 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 28 Feb 2020 16:21:57 +0000 Subject: [PATCH 0435/1197] fix: shutdown error on streaming pull callback error (#40) * fix: shutdown error on streaming pull callback error * fix incorrect comment --- .../google/cloud/pubsub_v1/subscriber/futures.py | 5 +++++ .../pubsub_v1/subscriber/test_futures_subscriber.py | 12 ++++++++++++ 2 files changed, 17 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index 12504c18b5df..f9fdd76abc87 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -33,6 +33,11 @@ def __init__(self, manager): self._cancelled = False def _on_close_callback(self, manager, result): + if self.done(): + # The future has already been resolved in a different thread, + # nothing to do on the streaming pull manager shutdown. + return + if result is None: self.set_result(True) else: diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 2b4566018f7f..909337cc88c7 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -57,6 +57,18 @@ def test__on_close_callback_failure(self): assert not future.running() + def test__on_close_callback_future_already_done(self): + future = self.make_future() + + future.set_result("foo") + assert future.done() + + # invoking on close callback should not result in an error + future._on_close_callback(mock.sentinel.manager, "bar") + + result = future.result() + assert result == "foo" # on close callback was a no-op + def test_cancel(self): future = self.make_future() From 28f10944ad5d1f726e040282ac1be1728416c40b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 28 Feb 2020 16:38:03 +0000 Subject: [PATCH 0436/1197] chore: release 1.3.1 (#41) :robot: I have created a release \*beep\* \*boop\* --- ### [1.3.1](https://www.github.com/googleapis/python-pubsub/compare/v1.3.0...v1.3.1) (2020-02-28) ### Bug Fixes * shutdown error on streaming pull callback error ([#40](https://www.github.com/googleapis/python-pubsub/issues/40)) ([552539e](https://www.github.com/googleapis/python-pubsub/commit/552539e7beb30833c39dd29bfcb0183a07895f97)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index eb067ef88c11..982ab894d221 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +### [1.3.1](https://www.github.com/googleapis/python-pubsub/compare/v1.3.0...v1.3.1) (2020-02-28) + + +### Bug Fixes + +* shutdown error on streaming pull callback error ([#40](https://www.github.com/googleapis/python-pubsub/issues/40)) ([552539e](https://www.github.com/googleapis/python-pubsub/commit/552539e7beb30833c39dd29bfcb0183a07895f97)) + ## [1.3.0](https://www.github.com/googleapis/python-pubsub/compare/v1.2.0...v1.3.0) (2020-02-20) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 34055dbc012d..f78b1dc51970 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.3.0" +version = "1.3.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 36d0bccc084e831a4acd7e34b3c27cd793f1ca65 Mon Sep 17 00:00:00 2001 From: Gurov Ilya Date: Tue, 3 Mar 2020 18:05:16 +0300 Subject: [PATCH 0437/1197] feat(pubsub): implement max_duration_per_lease_extension option (#38) * feat(pubsub): implement max_duration_per_lease_extension * comment change * simplify ack_deadline() --- .../cloud/pubsub_v1/subscriber/_protocol/leaser.py | 8 ++++---- .../subscriber/_protocol/streaming_pull_manager.py | 14 ++++++++++++-- .../google/cloud/pubsub_v1/types.py | 14 +++++++++++++- .../tests/unit/pubsub_v1/subscriber/test_leaser.py | 1 + .../subscriber/test_streaming_pull_manager.py | 11 +++++++++++ 5 files changed, 41 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index b60379444a44..5830680da8eb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -130,8 +130,8 @@ def maintain_leases(self): # Determine the appropriate duration for the lease. This is # based off of how long previous messages have taken to ack, with # a sensible default and within the ranges allowed by Pub/Sub. - p99 = self._manager.ack_histogram.percentile(99) - _LOGGER.debug("The current p99 value is %d seconds.", p99) + deadline = self._manager.ack_deadline + _LOGGER.debug("The current deadline value is %d seconds.", deadline) # Make a copy of the leased messages. This is needed because it's # possible for another thread to modify the dictionary while @@ -173,7 +173,7 @@ def maintain_leases(self): # way for ``send_request`` to fail when the consumer # is inactive. self._manager.dispatcher.modify_ack_deadline( - [requests.ModAckRequest(ack_id, p99) for ack_id in ack_ids] + [requests.ModAckRequest(ack_id, deadline) for ack_id in ack_ids] ) # Now wait an appropriate period of time and do this again. @@ -182,7 +182,7 @@ def maintain_leases(self): # period between 0 seconds and 90% of the lease. This use of # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases # where there are many clients. - snooze = random.uniform(0.0, p99 * 0.9) + snooze = random.uniform(0.0, deadline * 0.9) _LOGGER.debug("Snoozing lease management for %f seconds.", snooze) self._stop_event.wait(timeout=snooze) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 9b3f8d5fe06a..0a25d4625a19 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -191,9 +191,19 @@ def ack_deadline(self): Returns: int: The ack deadline. """ - target = min([self._last_histogram_size * 2, self._last_histogram_size + 100]) - if len(self.ack_histogram) > target: + target_size = min( + self._last_histogram_size * 2, self._last_histogram_size + 100 + ) + hist_size = len(self.ack_histogram) + + if hist_size > target_size: + self._last_histogram_size = hist_size self._ack_deadline = self.ack_histogram.percentile(percent=99) + + if self.flow_control.max_duration_per_lease_extension > 0: + self._ack_deadline = min( + self._ack_deadline, self.flow_control.max_duration_per_lease_extension + ) return self._ack_deadline @property diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 28019f47801f..eb4f00681cfa 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -87,12 +87,19 @@ # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. FlowControl = collections.namedtuple( - "FlowControl", ["max_bytes", "max_messages", "max_lease_duration"] + "FlowControl", + [ + "max_bytes", + "max_messages", + "max_lease_duration", + "max_duration_per_lease_extension", + ], ) FlowControl.__new__.__defaults__ = ( 100 * 1024 * 1024, # max_bytes: 100mb 1000, # max_messages: 1000 1 * 60 * 60, # max_lease_duration: 1 hour. + 0, # max_duration_per_lease_extension: disabled ) if sys.version_info >= (3, 5): @@ -112,6 +119,11 @@ "The maximum amount of time in seconds to hold a lease on a message " "before dropping it from the lease management." ) + FlowControl.max_duration_per_lease_extension.__doc__ = ( + "The max amount of time in seconds for a single lease extension attempt. " + "Bounds the delay before a message redelivery if the subscriber " + "fails to extend the deadline." + ) _shared_modules = [ diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index ec954b89d101..17409cb3fdb8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -84,6 +84,7 @@ def create_manager(flow_control=types.FlowControl()): manager.is_active = True manager.flow_control = flow_control manager.ack_histogram = histogram.Histogram() + manager.ack_deadline = 10 return manager diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 0886a45087de..70f320fcc53d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -144,6 +144,17 @@ def test_ack_deadline(): assert manager.ack_deadline == 20 +def test_ack_deadline_with_max_duration_per_lease_extension(): + manager = make_manager() + manager._flow_control = types.FlowControl(max_duration_per_lease_extension=5) + + assert manager.ack_deadline == 5 + for _ in range(5): + manager.ack_histogram.add(20) + + assert manager.ack_deadline == 5 + + def test_maybe_pause_consumer_wo_consumer_set(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) From 3257fbda1439f067bf007a8454f90b9200d5b776 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Fri, 6 Mar 2020 07:34:31 -0500 Subject: [PATCH 0438/1197] docs: fix CHANGELOG links to point to issues in the old repo. (#44) --- packages/google-cloud-pubsub/CHANGELOG.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 982ab894d221..f7699905e910 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -24,14 +24,14 @@ ### Features -* **pubsub:** add delivery attempt property to message object received by user code ([#10205](https://www.github.com/googleapis/python-pubsub/issues/10205)) ([a0937c1](https://www.github.com/googleapis/python-pubsub/commit/a0937c13107b92271913de579b60f24b2aaac177)) +* **pubsub:** add delivery attempt property to message object received by user code ([#10205](https://www.github.com/googleapis/google-cloud-python/issues/10205)) ([a0937c1](https://www.github.com/googleapis/python-pubsub/commit/a0937c13107b92271913de579b60f24b2aaac177)) * add `StreamingPullRequest.client_id` field (via synth) ([199d56a](https://www.github.com/googleapis/python-pubsub/commit/199d56a939bb6244f67138f843dafdd80721f0d3)) ### Bug Fixes -* **pubsub:** handle None in on response callback ([#9982](https://www.github.com/googleapis/python-pubsub/issues/9982)) ([6596c4b](https://www.github.com/googleapis/python-pubsub/commit/6596c4bae5526d82f5c1b5e0c243b2883404d51f)) -* replace unsafe six.PY3 with PY2 for better future compatibility with Python 4 ([#10081](https://www.github.com/googleapis/python-pubsub/issues/10081)) ([975c1ac](https://www.github.com/googleapis/python-pubsub/commit/975c1ac2cfdac0ce4403c0b56ad19f2ee7241f1a)) +* **pubsub:** handle None in on response callback ([#9982](https://www.github.com/googleapis/google-cloud-python/issues/9982)) ([6596c4b](https://www.github.com/googleapis/python-pubsub/commit/6596c4bae5526d82f5c1b5e0c243b2883404d51f)) +* replace unsafe six.PY3 with PY2 for better future compatibility with Python 4 ([#10081](https://www.github.com/googleapis/google-cloud-python/issues/10081)) ([975c1ac](https://www.github.com/googleapis/python-pubsub/commit/975c1ac2cfdac0ce4403c0b56ad19f2ee7241f1a)) ## 1.1.0 From 53cf4fd9b4b22858836256f08fe4a0d859f914d1 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Mar 2020 10:16:31 -0700 Subject: [PATCH 0439/1197] chore: release 1.4.0 (#42) * updated CHANGELOG.md [ci skip] * updated setup.py [ci skip] Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index f7699905e910..efadf4672d3e 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [1.4.0](https://www.github.com/googleapis/python-pubsub/compare/v1.3.1...v1.4.0) (2020-03-06) + + +### Features + +* **pubsub:** implement max_duration_per_lease_extension option ([#38](https://www.github.com/googleapis/python-pubsub/issues/38)) ([d911a2d](https://www.github.com/googleapis/python-pubsub/commit/d911a2dc8edf3c348ad3f128368b30e32dbc449e)) + ### [1.3.1](https://www.github.com/googleapis/python-pubsub/compare/v1.3.0...v1.3.1) (2020-02-28) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index f78b1dc51970..9cec9209e4f4 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.3.1" +version = "1.4.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 0e0a1851c960ebcfad4a09f4108c47aadbc46f47 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Mon, 23 Mar 2020 14:16:22 -0400 Subject: [PATCH 0440/1197] fix: don't assert on unordered publishes after publish error (#49) * fix: allow unordered publishes after publish error * Clarify comment / retrigger checks --- .../pubsub_v1/publisher/_batch/thread.py | 3 +++ .../_sequencer/unordered_sequencer.py | 6 +++++ .../sequencer/test_unordered_sequencer.py | 22 +++++++++++++++++++ 3 files changed, 31 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index cdd913db459c..73210011d0d8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -369,3 +369,6 @@ def publish(self, message): self.commit() return future + + def _set_status(self, status): + self._status = status diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py index 426bbded7ba1..dff114630b48 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -65,6 +65,12 @@ def commit(self): if self._current_batch: self._current_batch.commit() + # At this point, we lose track of the old batch, but we don't + # care since we just committed it. + # Setting this to None guarantees the next publish() creates a new + # batch. + self._current_batch = None + def unpause(self): """ Not relevant for this class. """ raise NotImplementedError diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index 21887e297c11..22e24ed064b0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -18,6 +18,7 @@ from google.auth import credentials from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher._batch import base from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer @@ -102,3 +103,24 @@ def test_publish_batch_full(): future = sequencer.publish(message) batch.publish.assert_called_once_with(message) assert future is not None + + +def test_publish_after_batch_error(): + client = create_client() + message = create_message() + batch = mock.Mock(spec=client._batch_class) + + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + sequencer._set_batch(batch) + + sequencer.commit() + batch.commit.assert_called_once() + + # Simulate publish RPC failing. + batch._set_status(base.BatchStatus.ERROR) + + # Will create a new batch since the old one has been committed. The fact + # that the old batch errored should not matter in the publish of the next + # message. + future = sequencer.publish(message) + assert future is not None From 3547eafa687590a564023f5e42388434a732dc11 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Mar 2020 17:09:40 -0400 Subject: [PATCH 0441/1197] chore: release 1.4.1 (#54) * updated CHANGELOG.md [ci skip] * updated setup.cfg [ci skip] * updated setup.py [ci skip] * Update CHANGELOG.md Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Prad Nelluru --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index efadf4672d3e..b1ea6bf3b1cd 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +### [1.4.1](https://www.github.com/googleapis/python-pubsub/compare/v1.4.0...v1.4.1) (2020-03-23) + + +### Bug Fixes + +* Don't assert on unordered publishes after publish error. ([#49](https://www.github.com/googleapis/python-pubsub/issues/49)) ([ea19ce6](https://www.github.com/googleapis/python-pubsub/commit/ea19ce616f6961e8993b72cd2921f7f3e61541f9)) + ## [1.4.0](https://www.github.com/googleapis/python-pubsub/compare/v1.3.1...v1.4.0) (2020-03-06) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 9cec9209e4f4..01758dc482c7 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.4.0" +version = "1.4.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From a6af59230be7676e360cc4f87669a288d3283df6 Mon Sep 17 00:00:00 2001 From: Alex Hong <9397363+hongalex@users.noreply.github.com> Date: Mon, 23 Mar 2020 14:58:50 -0700 Subject: [PATCH 0442/1197] chore: add psutil to synth.py (#55) * add psutil to synth.py * chore: add psutil to synth.py --- packages/google-cloud-pubsub/synth.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index a92290e21089..cc45712add21 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -191,7 +191,7 @@ def _merge_dict(d1, d2): # Add templated files # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library( - unit_cov_level=97, cov_level=99, system_test_dependencies=["test_utils"] + unit_cov_level=97, cov_level=99, system_test_dependencies=["test_utils", "psutil"] ) s.move(templated_files) From 3ceb8cbc0412220cb1a584bb0212f8dd2bf9977a Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 23 Mar 2020 15:49:14 -0700 Subject: [PATCH 0443/1197] fix: update generated retry timings for publish and pull rpcs via synth (#43) * fix: update generated retry settings for pull and publish rpcs via synth Co-authored-by: Alex Hong Co-authored-by: Alex Hong <9397363+hongalex@users.noreply.github.com> --- .../cloud/pubsub_v1/gapic/publisher_client.py | 50 +- .../gapic/publisher_client_config.py | 2 +- .../pubsub_v1/gapic/subscriber_client.py | 110 ++-- .../gapic/subscriber_client_config.py | 6 +- .../transports/publisher_grpc_transport.py | 16 +- .../transports/subscriber_grpc_transport.py | 16 +- .../google/cloud/pubsub_v1/proto/pubsub.proto | 326 ++++++++--- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 554 ++++++++++-------- packages/google-cloud-pubsub/synth.metadata | 16 +- 9 files changed, 667 insertions(+), 429 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index defa110384aa..589d64508bbd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -260,12 +260,12 @@ def create_topic( >>> response = client.create_topic(name) Args: - name (str): The name of the topic. It must have the format - `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter, - and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), - underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent - signs (`%`). It must be between 3 and 255 characters in length, and it - must not start with `"goog"`. + name (str): Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` must start with a + letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), + dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be between 3 and 255 + characters in length, and it must not start with ``"goog"``. labels (dict[str -> str]): See Creating and managing labels. message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining the set of Google Cloud Platform regions where messages @@ -358,12 +358,12 @@ def update_topic( >>> response = client.update_topic(topic, update_mask) Args: - topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): The updated topic object. + topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): Required. The updated topic object. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Topic` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided topic to update. Must be - specified and non-empty. Note that if ``update_mask`` contains + update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided topic to update. Must + be specified and non-empty. Note that if ``update_mask`` contains "message\_storage\_policy" then the new value will be determined based on the policy configured at the project or organization level. The ``message_storage_policy`` must not be set in the ``topic`` provided @@ -444,9 +444,9 @@ def publish( >>> response = client.publish(topic, messages) Args: - topic (str): The messages in the request will be published on this topic. Format is - ``projects/{project}/topics/{topic}``. - messages (list[Union[dict, ~google.cloud.pubsub_v1.types.PubsubMessage]]): The messages to publish. + topic (str): Required. The messages in the request will be published on this topic. + Format is ``projects/{project}/topics/{topic}``. + messages (list[Union[dict, ~google.cloud.pubsub_v1.types.PubsubMessage]]): Required. The messages to publish. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PubsubMessage` @@ -518,7 +518,7 @@ def get_topic( >>> response = client.get_topic(topic) Args: - topic (str): The name of the topic to get. Format is + topic (str): Required. The name of the topic to get. Format is ``projects/{project}/topics/{topic}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -601,7 +601,7 @@ def list_topics( ... pass Args: - project (str): The name of the project in which to list topics. Format is + project (str): Required. The name of the project in which to list topics. Format is ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -703,8 +703,8 @@ def list_topic_subscriptions( ... pass Args: - topic (str): The name of the topic that subscriptions are attached to. Format is - ``projects/{project}/topics/{topic}``. + topic (str): Required. The name of the topic that subscriptions are attached to. + Format is ``projects/{project}/topics/{topic}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -799,7 +799,7 @@ def delete_topic( >>> client.delete_topic(topic) Args: - topic (str): Name of the topic to delete. Format is + topic (str): Required. Name of the topic to delete. Format is ``projects/{project}/topics/{topic}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -855,8 +855,11 @@ def set_iam_policy( metadata=None, ): """ - Sets the access control policy on the specified resource. Replaces any - existing policy. + Sets the access control policy on the specified resource. Replaces + any existing policy. + + Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and + PERMISSION_DENIED Example: >>> from google.cloud import pubsub_v1 @@ -937,9 +940,8 @@ def get_iam_policy( metadata=None, ): """ - Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does not have a policy - set. + Gets the access control policy for a resource. Returns an empty policy + if the resource exists and does not have a policy set. Example: >>> from google.cloud import pubsub_v1 @@ -1018,8 +1020,8 @@ def test_iam_permissions( ): """ Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of permissions, - not a NOT\_FOUND error. + resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index 055b3424e6cd..dceb04a3ccc2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -47,7 +47,7 @@ "retry_params_name": "default", }, "Publish": { - "timeout_millis": 60000, + "timeout_millis": 20000, "retry_codes_name": "publish", "retry_params_name": "messaging", "bundling": { diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 22e955322fb6..6516c22644e3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -274,15 +274,17 @@ def create_subscription( >>> response = client.create_subscription(name, topic) Args: - name (str): The name of the subscription. It must have the format - `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must - start with a letter, and contain only letters (`[A-Za-z]`), numbers - (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), - plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters - in length, and it must not start with `"goog"` - topic (str): The name of the topic from which this subscription is receiving - messages. Format is ``projects/{project}/topics/{topic}``. The value of - this field will be ``_deleted-topic_`` if the topic has been deleted. + name (str): Required. The name of the subscription. It must have the format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), underscores + (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent signs + (``%``). It must be between 3 and 255 characters in length, and it must + not start with ``"goog"``. + topic (str): Required. The name of the topic from which this subscription is + receiving messages. Format is ``projects/{project}/topics/{topic}``. The + value of this field will be ``_deleted-topic_`` if the topic has been + deleted. push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used to configure it. An empty ``pushConfig`` signifies that the subscriber will pull and ack messages using API methods. @@ -434,7 +436,7 @@ def get_subscription( >>> response = client.get_subscription(subscription) Args: - subscription (str): The name of the subscription to get. Format is + subscription (str): Required. The name of the subscription to get. Format is ``projects/{project}/subscriptions/{sub}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -510,11 +512,11 @@ def update_subscription( >>> response = client.update_subscription(subscription, update_mask) Args: - subscription (Union[dict, ~google.cloud.pubsub_v1.types.Subscription]): The updated subscription object. + subscription (Union[dict, ~google.cloud.pubsub_v1.types.Subscription]): Required. The updated subscription object. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Subscription` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided subscription to update. + update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided subscription to update. Must be specified and non-empty. If a dict is provided, it must be of the same form as the protobuf @@ -602,8 +604,8 @@ def list_subscriptions( ... pass Args: - project (str): The name of the project in which to list subscriptions. Format is - ``projects/{project-id}``. + project (str): Required. The name of the project in which to list subscriptions. Format + is ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -698,7 +700,7 @@ def delete_subscription( >>> client.delete_subscription(subscription) Args: - subscription (str): The subscription to delete. Format is + subscription (str): Required. The subscription to delete. Format is ``projects/{project}/subscriptions/{sub}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -777,11 +779,11 @@ def modify_ack_deadline( >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) Args: - subscription (str): The name of the subscription. Format is + subscription (str): Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): List of acknowledgment IDs. - ack_deadline_seconds (int): The new ack deadline with respect to the time this request was sent to - the Pub/Sub system. For example, if the value is 10, the new ack + ack_ids (list[str]): Required. List of acknowledgment IDs. + ack_deadline_seconds (int): Required. The new ack deadline with respect to the time this request was + sent to the Pub/Sub system. For example, if the value is 10, the new ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call was made. Specifying zero might immediately make the message available for delivery to another subscriber client. This typically results in an @@ -867,10 +869,10 @@ def acknowledge( >>> client.acknowledge(subscription, ack_ids) Args: - subscription (str): The subscription whose message is being acknowledged. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): The acknowledgment ID for the messages being acknowledged that was - returned by the Pub/Sub system in the ``Pull`` response. Must not be + subscription (str): Required. The subscription whose message is being acknowledged. Format + is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[str]): Required. The acknowledgment ID for the messages being acknowledged that + was returned by the Pub/Sub system in the ``Pull`` response. Must not be empty. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -946,9 +948,9 @@ def pull( >>> response = client.pull(subscription, max_messages) Args: - subscription (str): The subscription from which messages should be pulled. Format is - ``projects/{project}/subscriptions/{sub}``. - max_messages (int): The maximum number of messages to return for this request. Must be a + subscription (str): Required. The subscription from which messages should be pulled. Format + is ``projects/{project}/subscriptions/{sub}``. + max_messages (int): Required. The maximum number of messages to return for this request. Must be a positive integer. The Pub/Sub system may return fewer than the number specified. return_immediately (bool): If this field set to true, the system will respond immediately even if @@ -1021,8 +1023,6 @@ def streaming_pull( the client should re-establish the stream. Flow control can be achieved by configuring the underlying RPC channel. - EXPERIMENTAL: This method interface might change in the future. - Example: >>> from google.cloud import pubsub_v1 >>> @@ -1106,9 +1106,9 @@ def modify_push_config( >>> client.modify_push_config(subscription, push_config) Args: - subscription (str): The name of the subscription. Format is + subscription (str): Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. - push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): The push configuration for future deliveries. + push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): Required. The push configuration for future deliveries. An empty ``pushConfig`` indicates that the Pub/Sub system should stop pushing messages from the given subscription and allow messages to be @@ -1202,7 +1202,7 @@ def list_snapshots( ... pass Args: - project (str): The name of the project in which to list snapshots. Format is + project (str): Required. The name of the project in which to list snapshots. Format is ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -1307,18 +1307,18 @@ def create_snapshot( >>> response = client.create_snapshot(name, subscription) Args: - name (str): User-provided name for this snapshot. If the name is not provided in the - request, the server will assign a random name for this snapshot on the - same project as the subscription. Note that for REST API requests, you - must specify a name. See the resource name rules. Format is - ``projects/{project}/snapshots/{snap}``. - subscription (str): The subscription whose backlog the snapshot retains. Specifically, the - created snapshot is guaranteed to retain: (a) The existing backlog on - the subscription. More precisely, this is defined as the messages in the - subscription's backlog that are unacknowledged upon the successful - completion of the ``CreateSnapshot`` request; as well as: (b) Any - messages published to the subscription's topic following the successful - completion of the CreateSnapshot request. Format is + name (str): Required. User-provided name for this snapshot. If the name is not + provided in the request, the server will assign a random name for this + snapshot on the same project as the subscription. Note that for REST API + requests, you must specify a name. See the resource name rules. Format + is ``projects/{project}/snapshots/{snap}``. + subscription (str): Required. The subscription whose backlog the snapshot retains. + Specifically, the created snapshot is guaranteed to retain: (a) The + existing backlog on the subscription. More precisely, this is defined as + the messages in the subscription's backlog that are unacknowledged upon + the successful completion of the ``CreateSnapshot`` request; as well as: + (b) Any messages published to the subscription's topic following the + successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. labels (dict[str -> str]): See Creating and managing labels. @@ -1403,11 +1403,11 @@ def update_snapshot( >>> response = client.update_snapshot(snapshot, update_mask) Args: - snapshot (Union[dict, ~google.cloud.pubsub_v1.types.Snapshot]): The updated snapshot object. + snapshot (Union[dict, ~google.cloud.pubsub_v1.types.Snapshot]): Required. The updated snapshot object. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Snapshot` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided snapshot to update. + update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided snapshot to update. Must be specified and non-empty. If a dict is provided, it must be of the same form as the protobuf @@ -1491,7 +1491,7 @@ def delete_snapshot( >>> client.delete_snapshot(snapshot) Args: - snapshot (str): The name of the snapshot to delete. Format is + snapshot (str): Required. The name of the snapshot to delete. Format is ``projects/{project}/snapshots/{snap}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -1567,7 +1567,7 @@ def seek( >>> response = client.seek(subscription) Args: - subscription (str): The subscription to affect. + subscription (str): Required. The subscription to affect. time (Union[dict, ~google.cloud.pubsub_v1.types.Timestamp]): The time to seek to. Messages retained in the subscription that were published before this time are marked as acknowledged, and messages retained in the subscription that were published after this time are @@ -1645,8 +1645,11 @@ def set_iam_policy( metadata=None, ): """ - Sets the access control policy on the specified resource. Replaces any - existing policy. + Sets the access control policy on the specified resource. Replaces + any existing policy. + + Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and + PERMISSION_DENIED Example: >>> from google.cloud import pubsub_v1 @@ -1727,9 +1730,8 @@ def get_iam_policy( metadata=None, ): """ - Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does not have a policy - set. + Gets the access control policy for a resource. Returns an empty policy + if the resource exists and does not have a policy set. Example: >>> from google.cloud import pubsub_v1 @@ -1808,8 +1810,8 @@ def test_iam_permissions( ): """ Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of permissions, - not a NOT\_FOUND error. + resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 3955f2da1dcb..2c8e64b51a1b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -26,9 +26,9 @@ "initial_retry_delay_millis": 100, "retry_delay_multiplier": 1.3, "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 5000, - "rpc_timeout_multiplier": 1.3, - "max_rpc_timeout_millis": 600000, + "initial_rpc_timeout_millis": 25000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 25000, "total_timeout_millis": 600000, }, "streaming_messaging": { diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 918224ebd61f..90c5726ff106 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -215,8 +215,11 @@ def delete_topic(self): def set_iam_policy(self): """Return the gRPC stub for :meth:`PublisherClient.set_iam_policy`. - Sets the access control policy on the specified resource. Replaces any - existing policy. + Sets the access control policy on the specified resource. Replaces + any existing policy. + + Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and + PERMISSION_DENIED Returns: Callable: A callable which accepts the appropriate @@ -229,9 +232,8 @@ def set_iam_policy(self): def get_iam_policy(self): """Return the gRPC stub for :meth:`PublisherClient.get_iam_policy`. - Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does not have a policy - set. + Gets the access control policy for a resource. Returns an empty policy + if the resource exists and does not have a policy set. Returns: Callable: A callable which accepts the appropriate @@ -245,8 +247,8 @@ def test_iam_permissions(self): """Return the gRPC stub for :meth:`PublisherClient.test_iam_permissions`. Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of permissions, - not a NOT\_FOUND error. + resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 2c86fc778887..82af562aef1d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -389,8 +389,11 @@ def seek(self): def set_iam_policy(self): """Return the gRPC stub for :meth:`SubscriberClient.set_iam_policy`. - Sets the access control policy on the specified resource. Replaces any - existing policy. + Sets the access control policy on the specified resource. Replaces + any existing policy. + + Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and + PERMISSION_DENIED Returns: Callable: A callable which accepts the appropriate @@ -403,9 +406,8 @@ def set_iam_policy(self): def get_iam_policy(self): """Return the gRPC stub for :meth:`SubscriberClient.get_iam_policy`. - Gets the access control policy for a resource. - Returns an empty policy if the resource exists and does not have a policy - set. + Gets the access control policy for a resource. Returns an empty policy + if the resource exists and does not have a policy set. Returns: Callable: A callable which accepts the appropriate @@ -419,8 +421,8 @@ def test_iam_permissions(self): """Return the gRPC stub for :meth:`SubscriberClient.test_iam_permissions`. Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of permissions, - not a NOT\_FOUND error. + resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index da3801dfecb7..adaf8c6fc3bd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,18 +11,19 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; package google.pubsub.v1; -import "google/api/annotations.proto"; import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.PubSub.V1"; @@ -49,6 +50,7 @@ service Publisher { put: "/v1/{name=projects/*/topics/*}" body: "*" }; + option (google.api.method_signature) = "name"; } // Updates an existing topic. Note that certain properties of a @@ -67,6 +69,7 @@ service Publisher { post: "/v1/{topic=projects/*/topics/*}:publish" body: "*" }; + option (google.api.method_signature) = "topic,messages"; } // Gets the configuration of a topic. @@ -74,6 +77,7 @@ service Publisher { option (google.api.http) = { get: "/v1/{topic=projects/*/topics/*}" }; + option (google.api.method_signature) = "topic"; } // Lists matching topics. @@ -81,14 +85,15 @@ service Publisher { option (google.api.http) = { get: "/v1/{project=projects/*}/topics" }; + option (google.api.method_signature) = "project"; } // Lists the names of the subscriptions on this topic. - rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) - returns (ListTopicSubscriptionsResponse) { + rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) returns (ListTopicSubscriptionsResponse) { option (google.api.http) = { get: "/v1/{topic=projects/*/topics/*}/subscriptions" }; + option (google.api.method_signature) = "topic"; } // Lists the names of the snapshots on this topic. Snapshots are used in @@ -97,8 +102,7 @@ service Publisher { // you to manage message acknowledgments in bulk. That is, you can set the // acknowledgment state of messages in an existing subscription to the state // captured by a snapshot. - rpc ListTopicSnapshots(ListTopicSnapshotsRequest) - returns (ListTopicSnapshotsResponse) { + rpc ListTopicSnapshots(ListTopicSnapshotsRequest) returns (ListTopicSnapshotsResponse) { option (google.api.http) = { get: "/v1/{topic=projects/*/topics/*}/snapshots" }; @@ -113,6 +117,7 @@ service Publisher { option (google.api.http) = { delete: "/v1/{topic=projects/*/topics/*}" }; + option (google.api.method_signature) = "topic"; } } @@ -127,13 +132,19 @@ message MessageStoragePolicy { // A topic resource. message Topic { - // The name of the topic. It must have the format + option (google.api.resource) = { + type: "pubsub.googleapis.com/Topic" + pattern: "projects/{project}/topics/{topic}" + pattern: "_deleted-topic_" + }; + + // Required. The name of the topic. It must have the format // `"projects/{project}/topics/{topic}"`. `{topic}` must start with a letter, // and contain only letters (`[A-Za-z]`), numbers (`[0-9]`), dashes (`-`), // underscores (`_`), periods (`.`), tildes (`~`), plus (`+`) or percent // signs (`%`). It must be between 3 and 255 characters in length, and it // must not start with `"goog"`. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; // See Creating and // managing labels. @@ -193,32 +204,42 @@ message PubsubMessage { // Request for the GetTopic method. message GetTopicRequest { - // The name of the topic to get. + // Required. The name of the topic to get. // Format is `projects/{project}/topics/{topic}`. - string topic = 1; + string topic = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + } + ]; } // Request for the UpdateTopic method. message UpdateTopicRequest { - // The updated topic object. - Topic topic = 1; + // Required. The updated topic object. + Topic topic = 1 [(google.api.field_behavior) = REQUIRED]; - // Indicates which fields in the provided topic to update. Must be specified + // Required. Indicates which fields in the provided topic to update. Must be specified // and non-empty. Note that if `update_mask` contains // "message_storage_policy" then the new value will be determined based on the // policy configured at the project or organization level. The // `message_storage_policy` must not be set in the `topic` provided above. - google.protobuf.FieldMask update_mask = 2; + google.protobuf.FieldMask update_mask = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for the Publish method. message PublishRequest { - // The messages in the request will be published on this topic. + // Required. The messages in the request will be published on this topic. // Format is `projects/{project}/topics/{topic}`. - string topic = 1; - - // The messages to publish. - repeated PubsubMessage messages = 2; + string topic = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + } + ]; + + // Required. The messages to publish. + repeated PubsubMessage messages = 2 [(google.api.field_behavior) = REQUIRED]; } // Response for the `Publish` method. @@ -231,9 +252,14 @@ message PublishResponse { // Request for the `ListTopics` method. message ListTopicsRequest { - // The name of the project in which to list topics. + // Required. The name of the project in which to list topics. // Format is `projects/{project-id}`. - string project = 1; + string project = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // Maximum number of topics to return. int32 page_size = 2; @@ -256,9 +282,14 @@ message ListTopicsResponse { // Request for the `ListTopicSubscriptions` method. message ListTopicSubscriptionsRequest { - // The name of the topic that subscriptions are attached to. + // Required. The name of the topic that subscriptions are attached to. // Format is `projects/{project}/topics/{topic}`. - string topic = 1; + string topic = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + } + ]; // Maximum number of subscription names to return. int32 page_size = 2; @@ -272,7 +303,9 @@ message ListTopicSubscriptionsRequest { // Response for the `ListTopicSubscriptions` method. message ListTopicSubscriptionsResponse { // The names of the subscriptions that match the request. - repeated string subscriptions = 1; + repeated string subscriptions = 1 [(google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + }]; // If not empty, indicates that there may be more subscriptions that match // the request; this value should be passed in a new @@ -308,9 +341,14 @@ message ListTopicSnapshotsResponse { // Request for the `DeleteTopic` method. message DeleteTopicRequest { - // Name of the topic to delete. + // Required. Name of the topic to delete. // Format is `projects/{project}/topics/{topic}`. - string topic = 1; + string topic = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + } + ]; } // The service that an application uses to manipulate subscriptions and to @@ -340,6 +378,7 @@ service Subscriber { put: "/v1/{name=projects/*/subscriptions/*}" body: "*" }; + option (google.api.method_signature) = "name,topic,push_config,ack_deadline_seconds"; } // Gets the configuration details of a subscription. @@ -347,6 +386,7 @@ service Subscriber { option (google.api.http) = { get: "/v1/{subscription=projects/*/subscriptions/*}" }; + option (google.api.method_signature) = "subscription"; } // Updates an existing subscription. Note that certain properties of a @@ -359,11 +399,11 @@ service Subscriber { } // Lists matching subscriptions. - rpc ListSubscriptions(ListSubscriptionsRequest) - returns (ListSubscriptionsResponse) { + rpc ListSubscriptions(ListSubscriptionsRequest) returns (ListSubscriptionsResponse) { option (google.api.http) = { get: "/v1/{project=projects/*}/subscriptions" }; + option (google.api.method_signature) = "project"; } // Deletes an existing subscription. All messages retained in the subscription @@ -371,11 +411,11 @@ service Subscriber { // `NOT_FOUND`. After a subscription is deleted, a new one may be created with // the same name, but the new one has no association with the old // subscription or its topic unless the same topic is specified. - rpc DeleteSubscription(DeleteSubscriptionRequest) - returns (google.protobuf.Empty) { + rpc DeleteSubscription(DeleteSubscriptionRequest) returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{subscription=projects/*/subscriptions/*}" }; + option (google.api.method_signature) = "subscription"; } // Modifies the ack deadline for a specific message. This method is useful @@ -383,12 +423,12 @@ service Subscriber { // subscriber, or to make the message available for redelivery if the // processing was interrupted. Note that this does not modify the // subscription-level `ackDeadlineSeconds` used for subsequent messages. - rpc ModifyAckDeadline(ModifyAckDeadlineRequest) - returns (google.protobuf.Empty) { + rpc ModifyAckDeadline(ModifyAckDeadlineRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline" body: "*" }; + option (google.api.method_signature) = "subscription,ack_ids,ack_deadline_seconds"; } // Acknowledges the messages associated with the `ack_ids` in the @@ -403,6 +443,7 @@ service Subscriber { post: "/v1/{subscription=projects/*/subscriptions/*}:acknowledge" body: "*" }; + option (google.api.method_signature) = "subscription,ack_ids"; } // Pulls messages from the server. The server may return `UNAVAILABLE` if @@ -413,6 +454,7 @@ service Subscriber { post: "/v1/{subscription=projects/*/subscriptions/*}:pull" body: "*" }; + option (google.api.method_signature) = "subscription,return_immediately,max_messages"; } // Establishes a stream with the server, which sends messages down to the @@ -422,8 +464,8 @@ service Subscriber { // reassign server-side resources, in which case, the client should // re-establish the stream. Flow control can be achieved by configuring the // underlying RPC channel. - rpc StreamingPull(stream StreamingPullRequest) - returns (stream StreamingPullResponse) {} + rpc StreamingPull(stream StreamingPullRequest) returns (stream StreamingPullResponse) { + } // Modifies the `PushConfig` for a specified subscription. // @@ -431,12 +473,12 @@ service Subscriber { // an empty `PushConfig`) or vice versa, or change the endpoint URL and other // attributes of a push subscription. Messages will accumulate for delivery // continuously through the call regardless of changes to the `PushConfig`. - rpc ModifyPushConfig(ModifyPushConfigRequest) - returns (google.protobuf.Empty) { + rpc ModifyPushConfig(ModifyPushConfigRequest) returns (google.protobuf.Empty) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig" body: "*" }; + option (google.api.method_signature) = "subscription,push_config"; } // Gets the configuration details of a snapshot. Snapshots are used in @@ -460,6 +502,7 @@ service Subscriber { option (google.api.http) = { get: "/v1/{project=projects/*}/snapshots" }; + option (google.api.method_signature) = "project"; } // Creates a snapshot from the requested subscription. Snapshots are used in @@ -485,6 +528,7 @@ service Subscriber { put: "/v1/{name=projects/*/snapshots/*}" body: "*" }; + option (google.api.method_signature) = "name,subscription"; } // Updates an existing snapshot. Snapshots are used in @@ -514,6 +558,7 @@ service Subscriber { option (google.api.http) = { delete: "/v1/{snapshot=projects/*/snapshots/*}" }; + option (google.api.method_signature) = "snapshot"; } // Seeks an existing subscription to a point in time or to a given snapshot, @@ -534,19 +579,29 @@ service Subscriber { // A subscription resource. message Subscription { - // The name of the subscription. It must have the format + option (google.api.resource) = { + type: "pubsub.googleapis.com/Subscription" + pattern: "projects/{project}/subscriptions/{subscription}" + }; + + // Required. The name of the subscription. It must have the format // `"projects/{project}/subscriptions/{subscription}"`. `{subscription}` must // start with a letter, and contain only letters (`[A-Za-z]`), numbers // (`[0-9]`), dashes (`-`), underscores (`_`), periods (`.`), tildes (`~`), // plus (`+`) or percent signs (`%`). It must be between 3 and 255 characters // in length, and it must not start with `"goog"`. - string name = 1; + string name = 1 [(google.api.field_behavior) = REQUIRED]; - // The name of the topic from which this subscription is receiving messages. + // Required. The name of the topic from which this subscription is receiving messages. // Format is `projects/{project}/topics/{topic}`. // The value of this field will be `_deleted-topic_` if the topic has been // deleted. - string topic = 2; + string topic = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + } + ]; // If push delivery is used with this subscription, this field is // used to configure it. An empty `pushConfig` signifies that the subscriber @@ -762,26 +817,36 @@ message ReceivedMessage { // Request for the GetSubscription method. message GetSubscriptionRequest { - // The name of the subscription to get. + // Required. The name of the subscription to get. // Format is `projects/{project}/subscriptions/{sub}`. - string subscription = 1; + string subscription = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; } // Request for the UpdateSubscription method. message UpdateSubscriptionRequest { - // The updated subscription object. - Subscription subscription = 1; + // Required. The updated subscription object. + Subscription subscription = 1 [(google.api.field_behavior) = REQUIRED]; - // Indicates which fields in the provided subscription to update. + // Required. Indicates which fields in the provided subscription to update. // Must be specified and non-empty. - google.protobuf.FieldMask update_mask = 2; + google.protobuf.FieldMask update_mask = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for the `ListSubscriptions` method. message ListSubscriptionsRequest { - // The name of the project in which to list subscriptions. + // Required. The name of the project in which to list subscriptions. // Format is `projects/{project-id}`. - string project = 1; + string project = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // Maximum number of subscriptions to return. int32 page_size = 2; @@ -805,31 +870,46 @@ message ListSubscriptionsResponse { // Request for the DeleteSubscription method. message DeleteSubscriptionRequest { - // The subscription to delete. + // Required. The subscription to delete. // Format is `projects/{project}/subscriptions/{sub}`. - string subscription = 1; + string subscription = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; } // Request for the ModifyPushConfig method. message ModifyPushConfigRequest { - // The name of the subscription. + // Required. The name of the subscription. // Format is `projects/{project}/subscriptions/{sub}`. - string subscription = 1; - - // The push configuration for future deliveries. + string subscription = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; + + // Required. The push configuration for future deliveries. // // An empty `pushConfig` indicates that the Pub/Sub system should // stop pushing messages from the given subscription and allow // messages to be pulled and acknowledged - effectively pausing // the subscription if `Pull` or `StreamingPull` is not called. - PushConfig push_config = 2; + PushConfig push_config = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for the `Pull` method. message PullRequest { - // The subscription from which messages should be pulled. + // Required. The subscription from which messages should be pulled. // Format is `projects/{project}/subscriptions/{sub}`. - string subscription = 1; + string subscription = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; // If this field set to true, the system will respond immediately even if // it there are no messages available to return in the `Pull` response. @@ -837,10 +917,10 @@ message PullRequest { // least one message is available, rather than returning no messages. bool return_immediately = 2; - // The maximum number of messages to return for this request. Must be a + // Required. The maximum number of messages to return for this request. Must be a // positive integer. The Pub/Sub system may return fewer than the number // specified. - int32 max_messages = 3; + int32 max_messages = 3 [(google.api.field_behavior) = REQUIRED]; } // Response for the `Pull` method. @@ -854,14 +934,19 @@ message PullResponse { // Request for the ModifyAckDeadline method. message ModifyAckDeadlineRequest { - // The name of the subscription. + // Required. The name of the subscription. // Format is `projects/{project}/subscriptions/{sub}`. - string subscription = 1; + string subscription = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; - // List of acknowledgment IDs. - repeated string ack_ids = 4; + // Required. List of acknowledgment IDs. + repeated string ack_ids = 4 [(google.api.field_behavior) = REQUIRED]; - // The new ack deadline with respect to the time this request was sent to + // Required. The new ack deadline with respect to the time this request was sent to // the Pub/Sub system. For example, if the value is 10, the new // ack deadline will expire 10 seconds after the `ModifyAckDeadline` call // was made. Specifying zero might immediately make the message available for @@ -869,29 +954,39 @@ message ModifyAckDeadlineRequest { // increase in the rate of message redeliveries (that is, duplicates). // The minimum deadline you can specify is 0 seconds. // The maximum deadline you can specify is 600 seconds (10 minutes). - int32 ack_deadline_seconds = 3; + int32 ack_deadline_seconds = 3 [(google.api.field_behavior) = REQUIRED]; } // Request for the Acknowledge method. message AcknowledgeRequest { - // The subscription whose message is being acknowledged. + // Required. The subscription whose message is being acknowledged. // Format is `projects/{project}/subscriptions/{sub}`. - string subscription = 1; - - // The acknowledgment ID for the messages being acknowledged that was returned + string subscription = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; + + // Required. The acknowledgment ID for the messages being acknowledged that was returned // by the Pub/Sub system in the `Pull` response. Must not be empty. - repeated string ack_ids = 2; + repeated string ack_ids = 2 [(google.api.field_behavior) = REQUIRED]; } // Request for the `StreamingPull` streaming RPC method. This request is used to // establish the initial stream as well as to stream acknowledgements and ack // deadline modifications from the client to the server. message StreamingPullRequest { - // The subscription for which to initialize the new stream. This must be + // Required. The subscription for which to initialize the new stream. This must be // provided in the first request on the stream, and must not be set in // subsequent requests from client to server. // Format is `projects/{project}/subscriptions/{sub}`. - string subscription = 1; + string subscription = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; // List of acknowledgement IDs for acknowledging previously received messages // (received on this stream or a different stream). If an ack ID has expired, @@ -920,11 +1015,11 @@ message StreamingPullRequest { // processing was interrupted. repeated string modify_deadline_ack_ids = 4; - // The ack deadline to use for the stream. This must be provided in the + // Required. The ack deadline to use for the stream. This must be provided in the // first request on the stream, but it can also be updated on subsequent // requests from client to server. The minimum deadline you can specify is 10 // seconds. The maximum deadline you can specify is 600 seconds (10 minutes). - int32 stream_ack_deadline_seconds = 5; + int32 stream_ack_deadline_seconds = 5 [(google.api.field_behavior) = REQUIRED]; // A unique identifier that is used to distinguish client instances from each // other. Only needs to be provided on the initial request. When a stream @@ -944,15 +1039,20 @@ message StreamingPullResponse { // Request for the `CreateSnapshot` method. message CreateSnapshotRequest { - // User-provided name for this snapshot. If the name is not provided in the + // Required. User-provided name for this snapshot. If the name is not provided in the // request, the server will assign a random name for this snapshot on the same // project as the subscription. Note that for REST API requests, you must // specify a name. See the resource // name rules. Format is `projects/{project}/snapshots/{snap}`. - string name = 1; - - // The subscription whose backlog the snapshot retains. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Snapshot" + } + ]; + + // Required. The subscription whose backlog the snapshot retains. // Specifically, the created snapshot is guaranteed to retain: // (a) The existing backlog on the subscription. More precisely, this is // defined as the messages in the subscription's backlog that are @@ -961,7 +1061,12 @@ message CreateSnapshotRequest { // (b) Any messages published to the subscription's topic following the // successful completion of the CreateSnapshot request. // Format is `projects/{project}/subscriptions/{sub}`. - string subscription = 2; + string subscription = 2 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; // See Creating and // managing labels. @@ -970,12 +1075,12 @@ message CreateSnapshotRequest { // Request for the UpdateSnapshot method. message UpdateSnapshotRequest { - // The updated snapshot object. - Snapshot snapshot = 1; + // Required. The updated snapshot object. + Snapshot snapshot = 1 [(google.api.field_behavior) = REQUIRED]; - // Indicates which fields in the provided snapshot to update. + // Required. Indicates which fields in the provided snapshot to update. // Must be specified and non-empty. - google.protobuf.FieldMask update_mask = 2; + google.protobuf.FieldMask update_mask = 2 [(google.api.field_behavior) = REQUIRED]; } // A snapshot resource. Snapshots are used in @@ -985,11 +1090,18 @@ message UpdateSnapshotRequest { // acknowledgment state of messages in an existing subscription to the state // captured by a snapshot. message Snapshot { + option (google.api.resource) = { + type: "pubsub.googleapis.com/Snapshot" + pattern: "projects/{project}/snapshots/{snapshot}" + }; + // The name of the snapshot. string name = 1; // The name of the topic from which this snapshot is retaining messages. - string topic = 2; + string topic = 2 [(google.api.resource_reference) = { + type: "pubsub.googleapis.com/Topic" + }]; // The snapshot is guaranteed to exist up until this time. // A newly-created snapshot expires no later than 7 days from the time of its @@ -1010,16 +1122,26 @@ message Snapshot { // Request for the GetSnapshot method. message GetSnapshotRequest { - // The name of the snapshot to get. + // Required. The name of the snapshot to get. // Format is `projects/{project}/snapshots/{snap}`. - string snapshot = 1; + string snapshot = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Snapshot" + } + ]; } // Request for the `ListSnapshots` method. message ListSnapshotsRequest { - // The name of the project in which to list snapshots. + // Required. The name of the project in which to list snapshots. // Format is `projects/{project-id}`. - string project = 1; + string project = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; // Maximum number of snapshots to return. int32 page_size = 2; @@ -1042,15 +1164,25 @@ message ListSnapshotsResponse { // Request for the `DeleteSnapshot` method. message DeleteSnapshotRequest { - // The name of the snapshot to delete. + // Required. The name of the snapshot to delete. // Format is `projects/{project}/snapshots/{snap}`. - string snapshot = 1; + string snapshot = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Snapshot" + } + ]; } // Request for the `Seek` method. message SeekRequest { - // The subscription to affect. - string subscription = 1; + // Required. The subscription to affect. + string subscription = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; oneof target { // The time to seek to. @@ -1069,9 +1201,13 @@ message SeekRequest { // The snapshot to seek to. The snapshot's topic must be the same as that of // the provided subscription. // Format is `projects/{project}/snapshots/{snap}`. - string snapshot = 3; + string snapshot = 3 [(google.api.resource_reference) = { + type: "pubsub.googleapis.com/Snapshot" + }]; } } // Response for the `Seek` method (this response is empty). -message SeekResponse {} +message SeekResponse { + +} diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 58a14a866526..4843bbdbeb09 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -15,12 +15,14 @@ _sym_db = _symbol_database.Default() -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -31,15 +33,17 @@ "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xd7\x01\n\x05Topic\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01" \n\x0fGetTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"m\n\x12UpdateTopicRequest\x12&\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.Topic\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x0ePublishRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x31\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"K\n\x11ListTopicsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"U\n\x1dListTopicSubscriptionsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"P\n\x1eListTopicSubscriptionsResponse\x12\x15\n\rsubscriptions\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"#\n\x12\x44\x65leteTopicRequest\x12\r\n\x05topic\x18\x01 \x01(\t"\xe5\x03\n\x0cSubscription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05".\n\x16GetSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"\x82\x01\n\x19UpdateSubscriptionRequest\x12\x34\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"R\n\x18ListSubscriptionsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"1\n\x19\x44\x65leteSubscriptionRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t"b\n\x17ModifyPushConfigRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x31\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig"U\n\x0bPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x14\n\x0cmax_messages\x18\x03 \x01(\x05"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"_\n\x18ModifyAckDeadlineRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x04 \x03(\t\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05";\n\x12\x41\x63knowledgeRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t"\xb7\x01\n\x14StreamingPullRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12#\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x12\x11\n\tclient_id\x18\x06 \x01(\t"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\xaf\x01\n\x15\x43reateSnapshotRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0csubscription\x18\x02 \x01(\t\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"v\n\x15UpdateSnapshotRequest\x12,\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"\xbf\x01\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05topic\x18\x02 \x01(\t\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"&\n\x12GetSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"N\n\x14ListSnapshotsRequest\x12\x0f\n\x07project\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t")\n\x15\x44\x65leteSnapshotRequest\x12\x10\n\x08snapshot\x18\x01 \x01(\t"m\n\x0bSeekRequest\x12\x14\n\x0csubscription\x18\x01 \x01(\t\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x12\n\x08snapshot\x18\x03 \x01(\tH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xb1\t\n\tPublisher\x12j\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic")\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x82\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"2\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\x12o\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\x12\x80\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"\'\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\x12\xb2\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"5\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12t\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"\'\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xeb\x12\n\nSubscriber\x12\x86\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"0\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\x12\x92\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"5\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\x9c\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse".\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\x12\x90\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"5\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\x12\xa3\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"J\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\x12\x91\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\x12\x84\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xa0\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"I\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x8c\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\x12\x83\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot",\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x80\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"-\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"\xe9\x04\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x86\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xe8\x01\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xeb\t\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xf7\x14\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -78,8 +82,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=246, - serialized_end=305, + serialized_start=306, + serialized_end=365, ) @@ -135,8 +139,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=478, - serialized_end=523, + serialized_start=543, + serialized_end=588, ) _TOPIC = _descriptor.Descriptor( @@ -161,7 +165,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -222,13 +226,15 @@ extensions=[], nested_types=[_TOPIC_LABELSENTRY], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352AQ\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}\022\017_deleted-topic_" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=308, - serialized_end=523, + serialized_start=368, + serialized_end=674, ) @@ -284,8 +290,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=718, - serialized_end=767, + serialized_start=869, + serialized_end=918, ) _PUBSUBMESSAGE = _descriptor.Descriptor( @@ -394,8 +400,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=526, - serialized_end=767, + serialized_start=677, + serialized_end=918, ) @@ -421,7 +427,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" + ), file=DESCRIPTOR, ) ], @@ -433,8 +441,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=769, - serialized_end=801, + serialized_start=920, + serialized_end=989, ) @@ -460,7 +468,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -478,7 +486,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -490,8 +498,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=803, - serialized_end=912, + serialized_start=991, + serialized_end=1110, ) @@ -517,7 +525,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -535,7 +545,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -547,8 +557,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=914, - serialized_end=996, + serialized_start=1112, + serialized_end=1236, ) @@ -586,8 +596,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=998, - serialized_end=1036, + serialized_start=1238, + serialized_end=1276, ) @@ -613,7 +623,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -661,8 +673,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1038, - serialized_end=1113, + serialized_start=1279, + serialized_end=1407, ) @@ -718,8 +730,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1115, - serialized_end=1201, + serialized_start=1409, + serialized_end=1495, ) @@ -745,7 +757,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -793,8 +807,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1203, - serialized_end=1288, + serialized_start=1497, + serialized_end=1619, ) @@ -820,7 +834,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b('\372A$\n"pubsub.googleapis.com/Subscription'), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -850,8 +864,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1290, - serialized_end=1370, + serialized_start=1621, + serialized_end=1742, ) @@ -925,8 +939,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1372, - serialized_end=1453, + serialized_start=1744, + serialized_end=1825, ) @@ -982,8 +996,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1455, - serialized_end=1527, + serialized_start=1827, + serialized_end=1899, ) @@ -1009,7 +1023,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" + ), file=DESCRIPTOR, ) ], @@ -1021,8 +1037,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1529, - serialized_end=1564, + serialized_start=1901, + serialized_end=1973, ) @@ -1078,8 +1094,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=478, - serialized_end=523, + serialized_start=543, + serialized_end=588, ) _SUBSCRIPTION = _descriptor.Descriptor( @@ -1104,7 +1120,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1122,7 +1138,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1273,13 +1291,15 @@ extensions=[], nested_types=[_SUBSCRIPTION_LABELSENTRY], enum_types=[], - serialized_options=None, + serialized_options=_b( + '\352AU\n"pubsub.googleapis.com/Subscription\022/projects/{project}/subscriptions/{subscription}' + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1567, - serialized_end=2052, + serialized_start=1976, + serialized_end=2593, ) @@ -1335,8 +1355,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2054, - serialized_end=2130, + serialized_start=2595, + serialized_end=2671, ) @@ -1374,8 +1394,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2132, - serialized_end=2190, + serialized_start=2673, + serialized_end=2731, ) @@ -1431,8 +1451,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2358, - serialized_end=2418, + serialized_start=2899, + serialized_end=2959, ) _PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( @@ -1487,8 +1507,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=718, - serialized_end=767, + serialized_start=869, + serialized_end=918, ) _PUSHCONFIG = _descriptor.Descriptor( @@ -1569,8 +1589,8 @@ fields=[], ) ], - serialized_start=2193, - serialized_end=2494, + serialized_start=2734, + serialized_end=3035, ) @@ -1644,8 +1664,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2496, - serialized_end=2605, + serialized_start=3037, + serialized_end=3146, ) @@ -1671,7 +1691,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' + ), file=DESCRIPTOR, ) ], @@ -1683,8 +1705,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2607, - serialized_end=2653, + serialized_start=3148, + serialized_end=3238, ) @@ -1710,7 +1732,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1728,7 +1750,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1740,8 +1762,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2656, - serialized_end=2786, + serialized_start=3241, + serialized_end=3381, ) @@ -1767,7 +1789,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1815,8 +1839,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2788, - serialized_end=2870, + serialized_start=3384, + serialized_end=3519, ) @@ -1872,8 +1896,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2872, - serialized_end=2979, + serialized_start=3521, + serialized_end=3628, ) @@ -1899,7 +1923,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' + ), file=DESCRIPTOR, ) ], @@ -1911,8 +1937,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2981, - serialized_end=3030, + serialized_start=3630, + serialized_end=3723, ) @@ -1938,7 +1964,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -1956,7 +1984,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -1968,8 +1996,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3032, - serialized_end=3130, + serialized_start=3726, + serialized_end=3873, ) @@ -1995,7 +2023,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2031,7 +2061,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2043,8 +2073,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3132, - serialized_end=3217, + serialized_start=3876, + serialized_end=4010, ) @@ -2082,8 +2112,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3219, - serialized_end=3295, + serialized_start=4012, + serialized_end=4088, ) @@ -2109,7 +2139,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2127,7 +2159,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2145,7 +2177,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2157,8 +2189,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3297, - serialized_end=3392, + serialized_start=4091, + serialized_end=4240, ) @@ -2184,7 +2216,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2202,7 +2236,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2214,8 +2248,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3394, - serialized_end=3453, + serialized_start=4242, + serialized_end=4350, ) @@ -2241,7 +2275,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2313,7 +2349,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2343,8 +2379,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3456, - serialized_end=3639, + serialized_start=4353, + serialized_end=4585, ) @@ -2382,8 +2418,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3641, - serialized_end=3726, + serialized_start=4587, + serialized_end=4672, ) @@ -2439,8 +2475,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=478, - serialized_end=523, + serialized_start=543, + serialized_end=588, ) _CREATESNAPSHOTREQUEST = _descriptor.Descriptor( @@ -2465,7 +2501,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A \n\036pubsub.googleapis.com/Snapshot" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2483,7 +2521,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2513,8 +2553,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3729, - serialized_end=3904, + serialized_start=4675, + serialized_end=4934, ) @@ -2540,7 +2580,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2558,7 +2598,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\340A\002"), file=DESCRIPTOR, ), ], @@ -2570,8 +2610,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3906, - serialized_end=4024, + serialized_start=4937, + serialized_end=5065, ) @@ -2627,8 +2667,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=478, - serialized_end=523, + serialized_start=543, + serialized_end=588, ) _SNAPSHOT = _descriptor.Descriptor( @@ -2671,7 +2711,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A\035\n\033pubsub.googleapis.com/Topic"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2714,13 +2754,15 @@ extensions=[], nested_types=[_SNAPSHOT_LABELSENTRY], enum_types=[], - serialized_options=None, + serialized_options=_b( + "\352AI\n\036pubsub.googleapis.com/Snapshot\022'projects/{project}/snapshots/{snapshot}" + ), is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4027, - serialized_end=4218, + serialized_start=5068, + serialized_end=5371, ) @@ -2746,7 +2788,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A \n\036pubsub.googleapis.com/Snapshot" + ), file=DESCRIPTOR, ) ], @@ -2758,8 +2802,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4220, - serialized_end=4258, + serialized_start=5373, + serialized_end=5451, ) @@ -2785,7 +2829,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2833,8 +2879,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4260, - serialized_end=4338, + serialized_start=5454, + serialized_end=5585, ) @@ -2890,8 +2936,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4340, - serialized_end=4435, + serialized_start=5587, + serialized_end=5682, ) @@ -2917,7 +2963,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A \n\036pubsub.googleapis.com/Snapshot" + ), file=DESCRIPTOR, ) ], @@ -2929,8 +2977,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4437, - serialized_end=4478, + serialized_start=5684, + serialized_end=5765, ) @@ -2956,7 +3004,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2992,7 +3042,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\372A \n\036pubsub.googleapis.com/Snapshot"), file=DESCRIPTOR, ), ], @@ -3012,8 +3062,8 @@ fields=[], ) ], - serialized_start=4480, - serialized_end=4589, + serialized_start=5768, + serialized_end=5958, ) @@ -3032,8 +3082,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4591, - serialized_end=4605, + serialized_start=5960, + serialized_end=5974, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -3210,7 +3260,7 @@ Attributes: name: - The name of the topic. It must have the format + Required. The name of the topic. It must have the format ``"projects/{project}/topics/{topic}"``. ``{topic}`` must start with a letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), underscores (``_``), @@ -3305,7 +3355,7 @@ Attributes: topic: - The name of the topic to get. Format is + Required. The name of the topic to get. Format is ``projects/{project}/topics/{topic}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) @@ -3324,14 +3374,15 @@ Attributes: topic: - The updated topic object. + Required. The updated topic object. update_mask: - Indicates which fields in the provided topic to update. Must - be specified and non-empty. Note that if ``update_mask`` - contains "message\_storage\_policy" then the new value will be - determined based on the policy configured at the project or - organization level. The ``message_storage_policy`` must not be - set in the ``topic`` provided above. + Required. Indicates which fields in the provided topic to + update. Must be specified and non-empty. Note that if + ``update_mask`` contains "message\_storage\_policy" then the + new value will be determined based on the policy configured at + the project or organization level. The + ``message_storage_policy`` must not be set in the ``topic`` + provided above. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) ), @@ -3349,10 +3400,10 @@ Attributes: topic: - The messages in the request will be published on this topic. - Format is ``projects/{project}/topics/{topic}``. + Required. The messages in the request will be published on + this topic. Format is ``projects/{project}/topics/{topic}``. messages: - The messages to publish. + Required. The messages to publish. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) ), @@ -3390,8 +3441,8 @@ Attributes: project: - The name of the project in which to list topics. Format is - ``projects/{project-id}``. + Required. The name of the project in which to list topics. + Format is ``projects/{project-id}``. page_size: Maximum number of topics to return. page_token: @@ -3438,8 +3489,8 @@ Attributes: topic: - The name of the topic that subscriptions are attached to. - Format is ``projects/{project}/topics/{topic}``. + Required. The name of the topic that subscriptions are + attached to. Format is ``projects/{project}/topics/{topic}``. page_size: Maximum number of subscription names to return. page_token: @@ -3534,7 +3585,7 @@ Attributes: topic: - Name of the topic to delete. Format is + Required. Name of the topic to delete. Format is ``projects/{project}/topics/{topic}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) @@ -3562,16 +3613,16 @@ Attributes: name: - The name of the subscription. It must have the format - ``"projects/{project}/subscriptions/{subscription}"``. + Required. The name of the subscription. It must have the + format ``"projects/{project}/subscriptions/{subscription}"``. ``{subscription}`` must start with a letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``"goog"``. topic: - The name of the topic from which this subscription is - receiving messages. Format is + Required. The name of the topic from which this subscription + is receiving messages. Format is ``projects/{project}/topics/{topic}``. The value of this field will be ``_deleted-topic_`` if the topic has been deleted. push_config: @@ -3852,7 +3903,7 @@ Attributes: subscription: - The name of the subscription to get. Format is + Required. The name of the subscription to get. Format is ``projects/{project}/subscriptions/{sub}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) @@ -3871,10 +3922,10 @@ Attributes: subscription: - The updated subscription object. + Required. The updated subscription object. update_mask: - Indicates which fields in the provided subscription to update. - Must be specified and non-empty. + Required. Indicates which fields in the provided subscription + to update. Must be specified and non-empty. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) ), @@ -3892,8 +3943,8 @@ Attributes: project: - The name of the project in which to list subscriptions. Format - is ``projects/{project-id}``. + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. page_size: Maximum number of subscriptions to return. page_token: @@ -3940,7 +3991,7 @@ Attributes: subscription: - The subscription to delete. Format is + Required. The subscription to delete. Format is ``projects/{project}/subscriptions/{sub}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) @@ -3959,12 +4010,12 @@ Attributes: subscription: - The name of the subscription. Format is + Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. push_config: - The push configuration for future deliveries. An empty - ``pushConfig`` indicates that the Pub/Sub system should stop - pushing messages from the given subscription and allow + Required. The push configuration for future deliveries. An + empty ``pushConfig`` indicates that the Pub/Sub system should + stop pushing messages from the given subscription and allow messages to be pulled and acknowledged - effectively pausing the subscription if ``Pull`` or ``StreamingPull`` is not called. @@ -3985,8 +4036,8 @@ Attributes: subscription: - The subscription from which messages should be pulled. Format - is ``projects/{project}/subscriptions/{sub}``. + Required. The subscription from which messages should be + pulled. Format is ``projects/{project}/subscriptions/{sub}``. return_immediately: If this field set to true, the system will respond immediately even if it there are no messages available to return in the @@ -3994,9 +4045,9 @@ bounded amount of time) until at least one message is available, rather than returning no messages. max_messages: - The maximum number of messages to return for this request. - Must be a positive integer. The Pub/Sub system may return - fewer than the number specified. + Required. The maximum number of messages to return for this + request. Must be a positive integer. The Pub/Sub system may + return fewer than the number specified. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) ), @@ -4036,15 +4087,15 @@ Attributes: subscription: - The name of the subscription. Format is + Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. ack_ids: - List of acknowledgment IDs. + Required. List of acknowledgment IDs. ack_deadline_seconds: - The new ack deadline with respect to the time this request was - sent to the Pub/Sub system. For example, if the value is 10, - the new ack deadline will expire 10 seconds after the - ``ModifyAckDeadline`` call was made. Specifying zero might + Required. The new ack deadline with respect to the time this + request was sent to the Pub/Sub system. For example, if the + value is 10, the new ack deadline will expire 10 seconds after + the ``ModifyAckDeadline`` call was made. Specifying zero might immediately make the message available for delivery to another subscriber client. This typically results in an increase in the rate of message redeliveries (that is, duplicates). The @@ -4067,12 +4118,13 @@ Attributes: subscription: - The subscription whose message is being acknowledged. Format - is ``projects/{project}/subscriptions/{sub}``. + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. ack_ids: - The acknowledgment ID for the messages being acknowledged that - was returned by the Pub/Sub system in the ``Pull`` response. - Must not be empty. + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in the + ``Pull`` response. Must not be empty. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) ), @@ -4093,10 +4145,11 @@ Attributes: subscription: - The subscription for which to initialize the new stream. This - must be provided in the first request on the stream, and must - not be set in subsequent requests from client to server. - Format is ``projects/{project}/subscriptions/{sub}``. + Required. The subscription for which to initialize the new + stream. This must be provided in the first request on the + stream, and must not be set in subsequent requests from client + to server. Format is + ``projects/{project}/subscriptions/{sub}``. ack_ids: List of acknowledgement IDs for acknowledging previously received messages (received on this stream or a different @@ -4127,11 +4180,11 @@ subscriber, or to make the message available for redelivery if the processing was interrupted. stream_ack_deadline_seconds: - The ack deadline to use for the stream. This must be provided - in the first request on the stream, but it can also be updated - on subsequent requests from client to server. The minimum - deadline you can specify is 10 seconds. The maximum deadline - you can specify is 600 seconds (10 minutes). + Required. The ack deadline to use for the stream. This must be + provided in the first request on the stream, but it can also + be updated on subsequent requests from client to server. The + minimum deadline you can specify is 10 seconds. The maximum + deadline you can specify is 600 seconds (10 minutes). client_id: A unique identifier that is used to distinguish client instances from each other. Only needs to be provided on the @@ -4185,14 +4238,14 @@ Attributes: name: - User-provided name for this snapshot. If the name is not - provided in the request, the server will assign a random name - for this snapshot on the same project as the subscription. - Note that for REST API requests, you must specify a name. See - the resource name rules. Format is + Required. User-provided name for this snapshot. If the name is + not provided in the request, the server will assign a random + name for this snapshot on the same project as the + subscription. Note that for REST API requests, you must + specify a name. See the resource name rules. Format is ``projects/{project}/snapshots/{snap}``. subscription: - The subscription whose backlog the snapshot retains. + Required. The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to retain: (a) The existing backlog on the subscription. More precisely, this is defined as the messages in the subscription's backlog @@ -4221,10 +4274,10 @@ Attributes: snapshot: - The updated snapshot object. + Required. The updated snapshot object. update_mask: - Indicates which fields in the provided snapshot to update. - Must be specified and non-empty. + Required. Indicates which fields in the provided snapshot to + update. Must be specified and non-empty. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) ), @@ -4291,7 +4344,7 @@ Attributes: snapshot: - The name of the snapshot to get. Format is + Required. The name of the snapshot to get. Format is ``projects/{project}/snapshots/{snap}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSnapshotRequest) @@ -4310,8 +4363,8 @@ Attributes: project: - The name of the project in which to list snapshots. Format is - ``projects/{project-id}``. + Required. The name of the project in which to list snapshots. + Format is ``projects/{project-id}``. page_size: Maximum number of snapshots to return. page_token: @@ -4358,7 +4411,7 @@ Attributes: snapshot: - The name of the snapshot to delete. Format is + Required. The name of the snapshot to delete. Format is ``projects/{project}/snapshots/{snap}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) @@ -4377,7 +4430,7 @@ Attributes: subscription: - The subscription to affect. + Required. The subscription to affect. time: The time to seek to. Messages retained in the subscription that were published before this time are marked as @@ -4418,11 +4471,52 @@ DESCRIPTOR._options = None _TOPIC_LABELSENTRY._options = None +_TOPIC.fields_by_name["name"]._options = None +_TOPIC._options = None _PUBSUBMESSAGE_ATTRIBUTESENTRY._options = None +_GETTOPICREQUEST.fields_by_name["topic"]._options = None +_UPDATETOPICREQUEST.fields_by_name["topic"]._options = None +_UPDATETOPICREQUEST.fields_by_name["update_mask"]._options = None +_PUBLISHREQUEST.fields_by_name["topic"]._options = None +_PUBLISHREQUEST.fields_by_name["messages"]._options = None +_LISTTOPICSREQUEST.fields_by_name["project"]._options = None +_LISTTOPICSUBSCRIPTIONSREQUEST.fields_by_name["topic"]._options = None +_LISTTOPICSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"]._options = None +_DELETETOPICREQUEST.fields_by_name["topic"]._options = None _SUBSCRIPTION_LABELSENTRY._options = None +_SUBSCRIPTION.fields_by_name["name"]._options = None +_SUBSCRIPTION.fields_by_name["topic"]._options = None +_SUBSCRIPTION._options = None _PUSHCONFIG_ATTRIBUTESENTRY._options = None +_GETSUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None +_UPDATESUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None +_UPDATESUBSCRIPTIONREQUEST.fields_by_name["update_mask"]._options = None +_LISTSUBSCRIPTIONSREQUEST.fields_by_name["project"]._options = None +_DELETESUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None +_MODIFYPUSHCONFIGREQUEST.fields_by_name["subscription"]._options = None +_MODIFYPUSHCONFIGREQUEST.fields_by_name["push_config"]._options = None +_PULLREQUEST.fields_by_name["subscription"]._options = None +_PULLREQUEST.fields_by_name["max_messages"]._options = None +_MODIFYACKDEADLINEREQUEST.fields_by_name["subscription"]._options = None +_MODIFYACKDEADLINEREQUEST.fields_by_name["ack_ids"]._options = None +_MODIFYACKDEADLINEREQUEST.fields_by_name["ack_deadline_seconds"]._options = None +_ACKNOWLEDGEREQUEST.fields_by_name["subscription"]._options = None +_ACKNOWLEDGEREQUEST.fields_by_name["ack_ids"]._options = None +_STREAMINGPULLREQUEST.fields_by_name["subscription"]._options = None +_STREAMINGPULLREQUEST.fields_by_name["stream_ack_deadline_seconds"]._options = None _CREATESNAPSHOTREQUEST_LABELSENTRY._options = None +_CREATESNAPSHOTREQUEST.fields_by_name["name"]._options = None +_CREATESNAPSHOTREQUEST.fields_by_name["subscription"]._options = None +_UPDATESNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None +_UPDATESNAPSHOTREQUEST.fields_by_name["update_mask"]._options = None _SNAPSHOT_LABELSENTRY._options = None +_SNAPSHOT.fields_by_name["topic"]._options = None +_SNAPSHOT._options = None +_GETSNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None +_LISTSNAPSHOTSREQUEST.fields_by_name["project"]._options = None +_DELETESNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None +_SEEKREQUEST.fields_by_name["subscription"]._options = None +_SEEKREQUEST.fields_by_name["snapshot"]._options = None _PUBLISHER = _descriptor.ServiceDescriptor( name="Publisher", @@ -4432,8 +4526,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=4608, - serialized_end=5809, + serialized_start=5977, + serialized_end=7236, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4443,7 +4537,7 @@ input_type=_TOPIC, output_type=_TOPIC, serialized_options=_b( - "\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*" + "\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*\332A\004name" ), ), _descriptor.MethodDescriptor( @@ -4465,7 +4559,7 @@ input_type=_PUBLISHREQUEST, output_type=_PUBLISHRESPONSE, serialized_options=_b( - "\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*" + "\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*\332A\016topic,messages" ), ), _descriptor.MethodDescriptor( @@ -4476,7 +4570,7 @@ input_type=_GETTOPICREQUEST, output_type=_TOPIC, serialized_options=_b( - "\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}" + "\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}\332A\005topic" ), ), _descriptor.MethodDescriptor( @@ -4487,7 +4581,7 @@ input_type=_LISTTOPICSREQUEST, output_type=_LISTTOPICSRESPONSE, serialized_options=_b( - "\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics" + "\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics\332A\007project" ), ), _descriptor.MethodDescriptor( @@ -4498,7 +4592,7 @@ input_type=_LISTTOPICSUBSCRIPTIONSREQUEST, output_type=_LISTTOPICSUBSCRIPTIONSRESPONSE, serialized_options=_b( - "\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions" + "\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions\332A\005topic" ), ), _descriptor.MethodDescriptor( @@ -4520,7 +4614,7 @@ input_type=_DELETETOPICREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}" + "\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}\332A\005topic" ), ), ], @@ -4538,8 +4632,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=5812, - serialized_end=8223, + serialized_start=7239, + serialized_end=9918, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", @@ -4549,7 +4643,7 @@ input_type=_SUBSCRIPTION, output_type=_SUBSCRIPTION, serialized_options=_b( - "\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*" + "\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*\332A+name,topic,push_config,ack_deadline_seconds" ), ), _descriptor.MethodDescriptor( @@ -4560,7 +4654,7 @@ input_type=_GETSUBSCRIPTIONREQUEST, output_type=_SUBSCRIPTION, serialized_options=_b( - "\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}" + "\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription" ), ), _descriptor.MethodDescriptor( @@ -4582,7 +4676,7 @@ input_type=_LISTSUBSCRIPTIONSREQUEST, output_type=_LISTSUBSCRIPTIONSRESPONSE, serialized_options=_b( - "\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions" + "\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions\332A\007project" ), ), _descriptor.MethodDescriptor( @@ -4593,7 +4687,7 @@ input_type=_DELETESUBSCRIPTIONREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}" + "\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription" ), ), _descriptor.MethodDescriptor( @@ -4604,7 +4698,7 @@ input_type=_MODIFYACKDEADLINEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*' + '\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*\332A)subscription,ack_ids,ack_deadline_seconds' ), ), _descriptor.MethodDescriptor( @@ -4615,7 +4709,7 @@ input_type=_ACKNOWLEDGEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*' + '\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*\332A\024subscription,ack_ids' ), ), _descriptor.MethodDescriptor( @@ -4626,7 +4720,7 @@ input_type=_PULLREQUEST, output_type=_PULLRESPONSE, serialized_options=_b( - '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*' + '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*\332A,subscription,return_immediately,max_messages' ), ), _descriptor.MethodDescriptor( @@ -4646,7 +4740,7 @@ input_type=_MODIFYPUSHCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - '\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*' + '\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*\332A\030subscription,push_config' ), ), _descriptor.MethodDescriptor( @@ -4668,7 +4762,7 @@ input_type=_LISTSNAPSHOTSREQUEST, output_type=_LISTSNAPSHOTSRESPONSE, serialized_options=_b( - '\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots' + '\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots\332A\007project' ), ), _descriptor.MethodDescriptor( @@ -4679,7 +4773,7 @@ input_type=_CREATESNAPSHOTREQUEST, output_type=_SNAPSHOT, serialized_options=_b( - "\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*" + "\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*\332A\021name,subscription" ), ), _descriptor.MethodDescriptor( @@ -4701,7 +4795,7 @@ input_type=_DELETESNAPSHOTREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, serialized_options=_b( - "\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}" + "\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot" ), ), _descriptor.MethodDescriptor( diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 6c204cd636f9..05b0eeac24a1 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,27 +1,27 @@ { - "updateTime": "2020-02-04T13:19:45.479344Z", + "updateTime": "2020-03-04T13:26:32.035995Z", "sources": [ { "generator": { "name": "artman", - "version": "0.44.4", - "dockerImage": "googleapis/artman@sha256:19e945954fc960a4bdfee6cb34695898ab21a8cf0bac063ee39b91f00a1faec8" + "version": "1.0.0", + "dockerImage": "googleapis/artman@sha256:f37f2464788cb551299209b4fcab4eb323533154488c2ef9ec0c75d7c2b4b482" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "69d9945330a5721cd679f17331a78850e2618226", - "internalRef": "293080182", - "log": "69d9945330a5721cd679f17331a78850e2618226\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080182\n\nf6a1a6b417f39694275ca286110bc3c1ca4db0dc\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080178\n\n29d40b78e3dc1579b0b209463fbcb76e5767f72a\nExpose managedidentities/v1beta1/ API for client library usage.\n\nPiperOrigin-RevId: 292979741\n\na22129a1fb6e18056d576dfb7717aef74b63734a\nExpose managedidentities/v1/ API for client library usage.\n\nPiperOrigin-RevId: 292968186\n\nb5cbe4a4ba64ab19e6627573ff52057a1657773d\nSecurityCenter v1p1beta1: move file-level option on top to workaround protobuf.js bug.\n\nPiperOrigin-RevId: 292647187\n\nb224b317bf20c6a4fbc5030b4a969c3147f27ad3\nAdds API definitions for bigqueryreservation v1beta1.\n\nPiperOrigin-RevId: 292634722\n\nc1468702f9b17e20dd59007c0804a089b83197d2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 292626173\n\nffdfa4f55ab2f0afc11d0eb68f125ccbd5e404bd\nvision: v1p3beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605599\n\n78f61482cd028fc1d9892aa5d89d768666a954cd\nvision: v1p1beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605125\n\n60bb5a294a604fd1778c7ec87b265d13a7106171\nvision: v1p2beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604980\n\n3bcf7aa79d45eb9ec29ab9036e9359ea325a7fc3\nvision: v1p4beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604656\n\n2717b8a1c762b26911b45ecc2e4ee01d98401b28\nFix dataproc artman client library generation.\n\nPiperOrigin-RevId: 292555664\n\n" + "sha": "541b1ded4abadcc38e8178680b0677f65594ea6f", + "internalRef": "298686266", + "log": "541b1ded4abadcc38e8178680b0677f65594ea6f\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 298686266\n\nc0d171acecb4f5b0bfd2c4ca34fc54716574e300\n Updated to include the Notification v1 API.\n\nPiperOrigin-RevId: 298652775\n\n2346a9186c0bff2c9cc439f2459d558068637e05\nAdd Service Directory v1beta1 protos and configs\n\nPiperOrigin-RevId: 298625638\n\na78ed801b82a5c6d9c5368e24b1412212e541bb7\nPublishing v3 protos and configs.\n\nPiperOrigin-RevId: 298607357\n\n4a180bfff8a21645b3a935c2756e8d6ab18a74e0\nautoml/v1beta1 publish proto updates\n\nPiperOrigin-RevId: 298484782\n\n6de6e938b7df1cd62396563a067334abeedb9676\nchore: use the latest gapic-generator and protoc-java-resource-name-plugin in Bazel workspace.\n\nPiperOrigin-RevId: 298474513\n\n244ab2b83a82076a1fa7be63b7e0671af73f5c02\nAdds service config definition for bigqueryreservation v1\n\nPiperOrigin-RevId: 298455048\n\n83c6f84035ee0f80eaa44d8b688a010461cc4080\nUpdate google/api/auth.proto to make AuthProvider to have JwtLocation\n\nPiperOrigin-RevId: 297918498\n\ne9e90a787703ec5d388902e2cb796aaed3a385b4\nDialogflow weekly v2/v2beta1 library update:\n - adding get validation result\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297671458\n\n1a2b05cc3541a5f7714529c665aecc3ea042c646\nAdding .yaml and .json config files.\n\nPiperOrigin-RevId: 297570622\n\ndfe1cf7be44dee31d78f78e485d8c95430981d6e\nPublish `QueryOptions` proto.\n\nIntroduced a `query_options` input in `ExecuteSqlRequest`.\n\nPiperOrigin-RevId: 297497710\n\ndafc905f71e5d46f500b41ed715aad585be062c3\npubsub: revert pull init_rpc_timeout & max_rpc_timeout back to 25 seconds and reset multiplier to 1.0\n\nPiperOrigin-RevId: 297486523\n\nf077632ba7fee588922d9e8717ee272039be126d\nfirestore: add update_transform\n\nPiperOrigin-RevId: 297405063\n\n0aba1900ffef672ec5f0da677cf590ee5686e13b\ncluster: use square brace for cross-reference\n\nPiperOrigin-RevId: 297204568\n\n5dac2da18f6325cbaed54603c43f0667ecd50247\nRestore retry params in gapic config because securitycenter has non-standard default retry params.\nRestore a few retry codes for some idempotent methods.\n\nPiperOrigin-RevId: 297196720\n\n1eb61455530252bba8b2c8d4bc9832960e5a56f6\npubsub: v1 replace IAM HTTP rules\n\nPiperOrigin-RevId: 297188590\n\n80b2d25f8d43d9d47024ff06ead7f7166548a7ba\nDialogflow weekly v2/v2beta1 library update:\n - updates to mega agent api\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297187629\n\n0b1876b35e98f560f9c9ca9797955f020238a092\nUse an older version of protoc-docs-plugin that is compatible with the specified gapic-generator and protobuf versions.\n\nprotoc-docs-plugin >=0.4.0 (see commit https://github.com/googleapis/protoc-docs-plugin/commit/979f03ede6678c487337f3d7e88bae58df5207af) is incompatible with protobuf 3.9.1.\n\nPiperOrigin-RevId: 296986742\n\n1e47e676cddbbd8d93f19ba0665af15b5532417e\nFix: Restore a method signature for UpdateCluster\n\nPiperOrigin-RevId: 296901854\n\n7f910bcc4fc4704947ccfd3ceed015d16b9e00c2\nUpdate Dataproc v1beta2 client.\n\nPiperOrigin-RevId: 296451205\n\nde287524405a3dce124d301634731584fc0432d7\nFix: Reinstate method signatures that had been missed off some RPCs\nFix: Correct resource types for two fields\n\nPiperOrigin-RevId: 296435091\n\ne5bc9566ae057fb4c92f8b7e047f1c8958235b53\nDeprecate the endpoint_uris field, as it is unused.\n\nPiperOrigin-RevId: 296357191\n\n8c12e2b4dca94e12bff9f538bdac29524ff7ef7a\nUpdate Dataproc v1 client.\n\nPiperOrigin-RevId: 296336662\n\n17567c4a1ef0a9b50faa87024d66f8acbb561089\nRemoving erroneous comment, a la https://github.com/googleapis/java-speech/pull/103\n\nPiperOrigin-RevId: 296332968\n\n3eaaaf8626ce5b0c0bc7eee05e143beffa373b01\nAdd BUILD.bazel for v1 secretmanager.googleapis.com\n\nPiperOrigin-RevId: 296274723\n\ne76149c3d992337f85eeb45643106aacae7ede82\nMove securitycenter v1 to use generate from annotations.\n\nPiperOrigin-RevId: 296266862\n\n203740c78ac69ee07c3bf6be7408048751f618f8\nAdd StackdriverLoggingConfig field to Cloud Tasks v2 API.\n\nPiperOrigin-RevId: 296256388\n\ne4117d5e9ed8bbca28da4a60a94947ca51cb2083\nCreate a Bazel BUILD file for the google.actions.type export.\n\nPiperOrigin-RevId: 296212567\n\na9639a0a9854fd6e1be08bba1ac3897f4f16cb2f\nAdd secretmanager.googleapis.com v1 protos\n\nPiperOrigin-RevId: 295983266\n\nce4f4c21d9dd2bfab18873a80449b9d9851efde8\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295861722\n\ncb61d6c2d070b589980c779b68ffca617f789116\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295855449\n\nab2685d8d3a0e191dc8aef83df36773c07cb3d06\nfix: Dataproc v1 - AutoscalingPolicy annotation\n\nThis adds the second resource name pattern to the\nAutoscalingPolicy resource.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 295738415\n\n8a1020bf6828f6e3c84c3014f2c51cb62b739140\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295286165\n\n5cfa105206e77670369e4b2225597386aba32985\nAdd service control related proto build rule.\n\nPiperOrigin-RevId: 295262088\n\nee4dddf805072004ab19ac94df2ce669046eec26\nmonitoring v3: Add prefix \"https://cloud.google.com/\" into the link for global access\ncl 295167522, get ride of synth.py hacks\n\nPiperOrigin-RevId: 295238095\n\nd9835e922ea79eed8497db270d2f9f85099a519c\nUpdate some minor docs changes about user event proto\n\nPiperOrigin-RevId: 295185610\n\n5f311e416e69c170243de722023b22f3df89ec1c\nfix: use correct PHP package name in gapic configuration\n\nPiperOrigin-RevId: 295161330\n\n6cdd74dcdb071694da6a6b5a206e3a320b62dd11\npubsub: v1 add client config annotations and retry config\n\nPiperOrigin-RevId: 295158776\n\n5169f46d9f792e2934d9fa25c36d0515b4fd0024\nAdded cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295026522\n\n56b55aa8818cd0a532a7d779f6ef337ba809ccbd\nFix: Resource annotations for CreateTimeSeriesRequest and ListTimeSeriesRequest should refer to valid resources. TimeSeries is not a named resource.\n\nPiperOrigin-RevId: 294931650\n\n0646bc775203077226c2c34d3e4d50cc4ec53660\nRemove unnecessary languages from bigquery-related artman configuration files.\n\nPiperOrigin-RevId: 294809380\n\n8b78aa04382e3d4147112ad6d344666771bb1909\nUpdate backend.proto for schemes and protocol\n\nPiperOrigin-RevId: 294788800\n\n80b8f8b3de2359831295e24e5238641a38d8488f\nAdds artman config files for bigquerystorage endpoints v1beta2, v1alpha2, v1\n\nPiperOrigin-RevId: 294763931\n\n2c17ac33b226194041155bb5340c3f34733f1b3a\nAdd parameter to sample generated for UpdateInstance. Related to https://github.com/googleapis/python-redis/issues/4\n\nPiperOrigin-RevId: 294734008\n\nd5e8a8953f2acdfe96fb15e85eb2f33739623957\nMove bigquery datatransfer to gapic v2.\n\nPiperOrigin-RevId: 294703703\n\nefd36705972cfcd7d00ab4c6dfa1135bafacd4ae\nfix: Add two annotations that we missed.\n\nPiperOrigin-RevId: 294664231\n\n8a36b928873ff9c05b43859b9d4ea14cd205df57\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1beta2).\n\nPiperOrigin-RevId: 294459768\n\nc7a3caa2c40c49f034a3c11079dd90eb24987047\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1).\n\nPiperOrigin-RevId: 294456889\n\n5006247aa157e59118833658084345ee59af7c09\nFix: Make deprecated fields optional\nFix: Deprecate SetLoggingServiceRequest.zone in line with the comments\nFeature: Add resource name method signatures where appropriate\n\nPiperOrigin-RevId: 294383128\n\neabba40dac05c5cbe0fca3a35761b17e372036c4\nFix: C# and PHP package/namespace capitalization for BigQuery Storage v1.\n\nPiperOrigin-RevId: 294382444\n\nf8d9a858a7a55eba8009a23aa3f5cc5fe5e88dde\nfix: artman configuration file for bigtable-admin\n\nPiperOrigin-RevId: 294322616\n\n0f29555d1cfcf96add5c0b16b089235afbe9b1a9\nAPI definition for (not-yet-launched) GCS gRPC.\n\nPiperOrigin-RevId: 294321472\n\nfcc86bee0e84dc11e9abbff8d7c3529c0626f390\nfix: Bigtable Admin v2\n\nChange LRO metadata from PartialUpdateInstanceMetadata\nto UpdateInstanceMetadata. (Otherwise, it will not build.)\n\nPiperOrigin-RevId: 294264582\n\n6d9361eae2ebb3f42d8c7ce5baf4bab966fee7c0\nrefactor: Add annotations to Bigtable Admin v2.\n\nPiperOrigin-RevId: 294243406\n\nad7616f3fc8e123451c8b3a7987bc91cea9e6913\nFix: Resource type in CreateLogMetricRequest should use logging.googleapis.com.\nFix: ListLogEntries should have a method signature for convenience of calling it.\n\nPiperOrigin-RevId: 294222165\n\n63796fcbb08712676069e20a3e455c9f7aa21026\nFix: Remove extraneous resource definition for cloudkms.googleapis.com/CryptoKey.\n\nPiperOrigin-RevId: 294176658\n\ne7d8a694f4559201e6913f6610069cb08b39274e\nDepend on the latest gapic-generator and resource names plugin.\n\nThis fixes the very old an very annoying bug: https://github.com/googleapis/gapic-generator/pull/3087\n\nPiperOrigin-RevId: 293903652\n\n806b2854a966d55374ee26bb0cef4e30eda17b58\nfix: correct capitalization of Ruby namespaces in SecurityCenter V1p1beta1\n\nPiperOrigin-RevId: 293903613\n\n1b83c92462b14d67a7644e2980f723112472e03a\nPublish annotations and grpc service config for Logging API.\n\nPiperOrigin-RevId: 293893514\n\ne46f761cd6ec15a9e3d5ed4ff321a4bcba8e8585\nGenerate the Bazel build file for recommendengine public api\n\nPiperOrigin-RevId: 293710856\n\n68477017c4173c98addac0373950c6aa9d7b375f\nMake `language_code` optional for UpdateIntentRequest and BatchUpdateIntentsRequest.\n\nThe comments and proto annotations describe this parameter as optional.\n\nPiperOrigin-RevId: 293703548\n\n16f823f578bca4e845a19b88bb9bc5870ea71ab2\nAdd BUILD.bazel files for managedidentities API\n\nPiperOrigin-RevId: 293698246\n\n2f53fd8178c9a9de4ad10fae8dd17a7ba36133f2\nAdd v1p1beta1 config file\n\nPiperOrigin-RevId: 293696729\n\n052b274138fce2be80f97b6dcb83ab343c7c8812\nAdd source field for user event and add field behavior annotations\n\nPiperOrigin-RevId: 293693115\n\n1e89732b2d69151b1b3418fff3d4cc0434f0dded\ndatacatalog: v1beta1 add three new RPCs to gapic v1beta1 config\n\nPiperOrigin-RevId: 293692823\n\n9c8bd09bbdc7c4160a44f1fbab279b73cd7a2337\nchange the name of AccessApproval service to AccessApprovalAdmin\n\nPiperOrigin-RevId: 293690934\n\n2e23b8fbc45f5d9e200572ca662fe1271bcd6760\nAdd ListEntryGroups method, add http bindings to support entry group tagging, and update some comments.\n\nPiperOrigin-RevId: 293666452\n\n0275e38a4ca03a13d3f47a9613aac8c8b0d3f1f2\nAdd proto_package field to managedidentities API. It is needed for APIs that still depend on artman generation.\n\nPiperOrigin-RevId: 293643323\n\n4cdfe8278cb6f308106580d70648001c9146e759\nRegenerating public protos for Data Catalog to add new Custom Type Entry feature.\n\nPiperOrigin-RevId: 293614782\n\n45d2a569ab526a1fad3720f95eefb1c7330eaada\nEnable client generation for v1 ManagedIdentities API.\n\nPiperOrigin-RevId: 293515675\n\n2c17086b77e6f3bcf04a1f65758dfb0c3da1568f\nAdd the Actions on Google common types (//google/actions/type/*).\n\nPiperOrigin-RevId: 293478245\n\n781aadb932e64a12fb6ead7cd842698d99588433\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293443396\n\ne2602608c9138c2fca24162720e67f9307c30b95\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293442964\n\nc8aef82028d06b7992278fa9294c18570dc86c3d\nAdd cc_proto_library and cc_grpc_library targets for Bigtable protos.\n\nAlso fix indentation of cc_grpc_library targets in Spanner and IAM protos.\n\nPiperOrigin-RevId: 293440538\n\ne2faab04f4cb7f9755072330866689b1943a16e9\ncloudtasks: v2 replace non-standard retry params in gapic config v2\n\nPiperOrigin-RevId: 293424055\n\ndfb4097ea628a8470292c6590a4313aee0c675bd\nerrorreporting: v1beta1 add legacy artman config for php\n\nPiperOrigin-RevId: 293423790\n\nb18aed55b45bfe5b62476292c72759e6c3e573c6\nasset: v1p1beta1 updated comment for `page_size` limit.\n\nPiperOrigin-RevId: 293421386\n\nc9ef36b7956d9859a2fc86ad35fcaa16958ab44f\nbazel: Refactor CI build scripts\n\nPiperOrigin-RevId: 293387911\n\na8ed9d921fdddc61d8467bfd7c1668f0ad90435c\nfix: set Ruby module name for OrgPolicy\n\nPiperOrigin-RevId: 293257997\n\n6c7d28509bd8315de8af0889688ee20099594269\nredis: v1beta1 add UpgradeInstance and connect_mode field to Instance\n\nPiperOrigin-RevId: 293242878\n\nae0abed4fcb4c21f5cb67a82349a049524c4ef68\nredis: v1 add connect_mode field to Instance\n\nPiperOrigin-RevId: 293241914\n\n3f7a0d29b28ee9365771da2b66edf7fa2b4e9c56\nAdds service config definition for bigqueryreservation v1beta1\n\nPiperOrigin-RevId: 293234418\n\n0c88168d5ed6fe353a8cf8cbdc6bf084f6bb66a5\naddition of BUILD & configuration for accessapproval v1\n\nPiperOrigin-RevId: 293219198\n\n39bedc2e30f4778ce81193f6ba1fec56107bcfc4\naccessapproval: v1 publish protos\n\nPiperOrigin-RevId: 293167048\n\n" } }, { "template": { - "name": "python_split_library", + "name": "python_library", "origin": "synthtool.gcp", - "version": "2019.10.17" + "version": "2020.2.4" } } ], From e9d1ab6de406e8c709ac0f401e65cda4d24651f6 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 24 Mar 2020 18:25:00 +0100 Subject: [PATCH 0444/1197] test: cover the fix for sync pull with no messages (#53) * test: cover the fix for sync pull with no messages * Fix typo in test failure message --- packages/google-cloud-pubsub/tests/system.py | 23 ++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 2eccad8ed4a6..81c196ccc1fc 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -27,6 +27,7 @@ import six import google.auth +from google.api_core import exceptions as core_exceptions from google.cloud import pubsub_v1 from google.cloud.pubsub_v1 import exceptions from google.cloud.pubsub_v1 import futures @@ -432,6 +433,28 @@ def one_arg_close(subscriber): # the cleanup helper expects exactly one argumen assert conn_count_end == conn_count_start +def test_synchronous_pull_no_deadline_error_if_no_messages( + publisher, topic_path, subscriber, subscription_path, cleanup +): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_subscription, subscription_path)) + + # Create a topic and subscribe to it. + publisher.create_topic(topic_path) + subscriber.create_subscription(subscription_path, topic_path) + + try: + response = subscriber.pull(subscription_path, max_messages=2) + except core_exceptions.DeadlineExceeded: + pytest.fail( + "Unexpected DeadlineExceeded error on synchronous pull when no " + "messages published to the topic." + ) + else: + assert list(response.received_messages) == [] + + class TestStreamingPull(object): def test_streaming_pull_callback_error_propagation( self, publisher, topic_path, subscriber, subscription_path, cleanup From 9b747e368edde95bdf79938f4f331f3caa658617 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 24 Mar 2020 18:57:57 +0100 Subject: [PATCH 0445/1197] test: put RBAC tests into their own suite (#51) Co-authored-by: Prad Nelluru --- packages/google-cloud-pubsub/tests/system.py | 79 ++++++++------------ 1 file changed, 32 insertions(+), 47 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 81c196ccc1fc..1694c50229f1 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -500,7 +500,7 @@ def test_streaming_pull_ack_deadline( ) # publish some messages and wait for completion - self._publish_messages(publisher, topic_path, batch_sizes=[2]) + _publish_messages(publisher, topic_path, batch_sizes=[2]) # subscribe to the topic callback = StreamingPullCallback( @@ -543,7 +543,7 @@ def test_streaming_pull_max_messages( subscriber.create_subscription(subscription_path, topic_path) batch_sizes = (7, 4, 8, 2, 10, 1, 3, 8, 6, 1) # total: 50 - self._publish_messages(publisher, topic_path, batch_sizes=batch_sizes) + _publish_messages(publisher, topic_path, batch_sizes=batch_sizes) # now subscribe and do the main part, check for max pending messages total_messages = sum(batch_sizes) @@ -585,10 +585,12 @@ def test_streaming_pull_max_messages( finally: subscription_future.cancel() # trigger clean shutdown - @pytest.mark.skipif( - "KOKORO_GFILE_DIR" not in os.environ, - reason="Requires Kokoro environment with a service account with limited role.", - ) + +@pytest.mark.skipif( + "KOKORO_GFILE_DIR" not in os.environ, + reason="Requires Kokoro environment with a service account with limited role.", +) +class TestBasicRBAC(object): def test_streaming_pull_subscriber_permissions_sufficient( self, publisher, topic_path, subscriber, subscription_path, cleanup ): @@ -611,7 +613,7 @@ def test_streaming_pull_subscriber_permissions_sufficient( # successfully pulls and processes it. callback = StreamingPullCallback(processing_time=0.01, resolve_at_msg_count=1) future = streaming_pull_subscriber.subscribe(subscription_path, callback) - self._publish_messages(publisher, topic_path, batch_sizes=[1]) + _publish_messages(publisher, topic_path, batch_sizes=[1]) try: callback.done_future.result(timeout=10) @@ -624,10 +626,6 @@ def test_streaming_pull_subscriber_permissions_sufficient( finally: future.cancel() - @pytest.mark.skipif( - "KOKORO_GFILE_DIR" not in os.environ, - reason="Requires Kokoro environment with a service account with limited role.", - ) def test_publisher_role_can_publish_messages( self, publisher, topic_path, subscriber, subscription_path, cleanup ): @@ -646,7 +644,7 @@ def test_publisher_role_can_publish_messages( ) publisher_only_client = type(publisher).from_service_account_file(filename) - self._publish_messages(publisher_only_client, topic_path, batch_sizes=[2]) + _publish_messages(publisher_only_client, topic_path, batch_sizes=[2]) response = subscriber.pull(subscription_path, max_messages=2) assert len(response.received_messages) == 2 @@ -654,10 +652,6 @@ def test_publisher_role_can_publish_messages( @pytest.mark.skip( "Snapshot creation is not instant on the backend, causing test falkiness." ) - @pytest.mark.skipif( - "KOKORO_GFILE_DIR" not in os.environ, - reason="Requires Kokoro environment with a service account with limited role.", - ) def test_snapshot_seek_subscriber_permissions_sufficient( self, project, publisher, topic_path, subscriber, subscription_path, cleanup ): @@ -682,13 +676,13 @@ def test_snapshot_seek_subscriber_permissions_sufficient( subscriber_only_client = type(subscriber).from_service_account_file(filename) # Publish two messages and create a snapshot inbetween. - self._publish_messages(publisher, topic_path, batch_sizes=[1]) + _publish_messages(publisher, topic_path, batch_sizes=[1]) response = subscriber.pull(subscription_path, max_messages=10) assert len(response.received_messages) == 1 subscriber.create_snapshot(snapshot_path, subscription_path) - self._publish_messages(publisher, topic_path, batch_sizes=[1]) + _publish_messages(publisher, topic_path, batch_sizes=[1]) response = subscriber.pull(subscription_path, max_messages=10) assert len(response.received_messages) == 1 @@ -699,10 +693,6 @@ def test_snapshot_seek_subscriber_permissions_sufficient( response = subscriber.pull(subscription_path, max_messages=10) assert len(response.received_messages) == 1 - @pytest.mark.skipif( - "KOKORO_GFILE_DIR" not in os.environ, - reason="Requires Kokoro environment with a service account with limited role.", - ) def test_viewer_role_can_list_resources( self, project, publisher, topic_path, subscriber, cleanup ): @@ -727,10 +717,6 @@ def test_viewer_role_can_list_resources( next(iter(viewer_only_subscriber.list_subscriptions(project_path)), None) next(iter(viewer_only_subscriber.list_snapshots(project_path)), None) - @pytest.mark.skipif( - "KOKORO_GFILE_DIR" not in os.environ, - reason="Requires Kokoro environment with a service account with limited role.", - ) def test_editor_role_can_create_resources( self, project, publisher, topic_path, subscriber, subscription_path, cleanup ): @@ -754,30 +740,29 @@ def test_editor_role_can_create_resources( editor_subscriber.create_subscription(subscription_path, topic_path) editor_subscriber.create_snapshot(snapshot_path, subscription_path) - def _publish_messages(self, publisher, topic_path, batch_sizes): - """Publish ``count`` messages in batches and wait until completion.""" - publish_futures = [] - msg_counter = itertools.count(start=1) - for batch_size in batch_sizes: - msg_batch = self._make_messages(count=batch_size) - for msg in msg_batch: - future = publisher.publish( - topic_path, msg, seq_num=str(next(msg_counter)) - ) - publish_futures.append(future) - time.sleep(0.1) +def _publish_messages(publisher, topic_path, batch_sizes): + """Publish ``count`` messages in batches and wait until completion.""" + publish_futures = [] + msg_counter = itertools.count(start=1) - # wait untill all messages have been successfully published - for future in publish_futures: - future.result(timeout=30) + for batch_size in batch_sizes: + msg_batch = _make_messages(count=batch_size) + for msg in msg_batch: + future = publisher.publish(topic_path, msg, seq_num=str(next(msg_counter))) + publish_futures.append(future) + time.sleep(0.1) - def _make_messages(self, count): - messages = [ - u"message {}/{}".format(i, count).encode("utf-8") - for i in range(1, count + 1) - ] - return messages + # wait untill all messages have been successfully published + for future in publish_futures: + future.result(timeout=30) + + +def _make_messages(count): + messages = [ + u"message {}/{}".format(i, count).encode("utf-8") for i in range(1, count + 1) + ] + return messages class AckCallback(object): From 9e305ff9ad7a9569acc6456ca1ed9fbf6d6cad9d Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Wed, 25 Mar 2020 18:08:05 -0400 Subject: [PATCH 0446/1197] fix: use client_options.api_endpoint parameter instead of ignoring it (#59) - The client_options.api-endpoint parameter was being ignored. This bug prevented users from using regional endpoints. It also made it harder to test code against nonprod endpoints. Fixes #61 --- .../cloud/pubsub_v1/publisher/client.py | 12 +++++++- .../cloud/pubsub_v1/subscriber/client.py | 14 ++++++++- .../publisher/test_publisher_client.py | 29 +++++++++++++++++++ .../subscriber/test_subscriber_client.py | 29 +++++++++++++++++++ 4 files changed, 82 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 9284420f5c0e..caa7844071d2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -133,6 +133,16 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): target=os.environ.get("PUBSUB_EMULATOR_HOST") ) + client_options = kwargs.pop("client_options", None) + if ( + client_options + and "api_endpoint" in client_options + and isinstance(client_options["api_endpoint"], six.string_types) + ): + self._target = client_options["api_endpoint"] + else: + self._target = publisher_client.PublisherClient.SERVICE_ADDRESS + # Use a custom channel. # We need this in order to set appropriate default message size and # keepalive options. @@ -217,7 +227,7 @@ def target(self): Returns: str: The location of the API. """ - return publisher_client.PublisherClient.SERVICE_ADDRESS + return self._target def _get_or_create_sequencer(self, topic, ordering_key): """ Get an existing sequencer or create a new one given the (topic, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 00d97231e0e7..718e69083fcf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -16,6 +16,7 @@ import os import pkg_resources +import six import grpc @@ -79,6 +80,17 @@ def __init__(self, **kwargs): target=os.environ.get("PUBSUB_EMULATOR_HOST") ) + # api_endpoint wont be applied if 'transport' is passed in. + client_options = kwargs.pop("client_options", None) + if ( + client_options + and "api_endpoint" in client_options + and isinstance(client_options["api_endpoint"], six.string_types) + ): + self._target = client_options["api_endpoint"] + else: + self._target = subscriber_client.SubscriberClient.SERVICE_ADDRESS + # Use a custom channel. # We need this in order to set appropriate default message size and # keepalive options. @@ -133,7 +145,7 @@ def target(self): Returns: str: The location of the API. """ - return subscriber_client.SubscriberClient.SERVICE_ADDRESS + return self._target @property def api(self): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 69c854b47cf6..4ca979892b9d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -53,6 +53,35 @@ def test_init_w_custom_transport(): assert client.batch_settings.max_messages == 100 +def test_init_w_api_endpoint(): + client_options = {"api_endpoint": "testendpoint.google.com"} + client = publisher.Client(client_options=client_options) + + assert isinstance(client.api, publisher_client.PublisherClient) + assert (client.api.transport._channel._channel.target()).decode( + "utf-8" + ) == "testendpoint.google.com" + + +def test_init_w_unicode_api_endpoint(): + client_options = {"api_endpoint": u"testendpoint.google.com"} + client = publisher.Client(client_options=client_options) + + assert isinstance(client.api, publisher_client.PublisherClient) + assert (client.api.transport._channel._channel.target()).decode( + "utf-8" + ) == "testendpoint.google.com" + + +def test_init_w_empty_client_options(): + client = publisher.Client(client_options={}) + + assert isinstance(client.api, publisher_client.PublisherClient) + assert (client.api.transport._channel._channel.target()).decode( + "utf-8" + ) == publisher_client.PublisherClient.SERVICE_ADDRESS + + def test_init_emulator(monkeypatch): monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar/") # NOTE: When the emulator host is set, a custom channel will be used, so diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 19ec194ce1e6..d8f671157297 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -34,6 +34,35 @@ def test_init_w_custom_transport(): assert client.api.transport is transport +def test_init_w_api_endpoint(): + client_options = {"api_endpoint": "testendpoint.google.com"} + client = subscriber.Client(client_options=client_options) + + assert isinstance(client.api, subscriber_client.SubscriberClient) + assert (client.api.transport._channel._channel.target()).decode( + "utf-8" + ) == "testendpoint.google.com" + + +def test_init_w_unicode_api_endpoint(): + client_options = {"api_endpoint": u"testendpoint.google.com"} + client = subscriber.Client(client_options=client_options) + + assert isinstance(client.api, subscriber_client.SubscriberClient) + assert (client.api.transport._channel._channel.target()).decode( + "utf-8" + ) == "testendpoint.google.com" + + +def test_init_w_empty_client_options(): + client = subscriber.Client(client_options={}) + + assert isinstance(client.api, subscriber_client.SubscriberClient) + assert (client.api.transport._channel._channel.target()).decode( + "utf-8" + ) == subscriber_client.SubscriberClient.SERVICE_ADDRESS + + def test_init_emulator(monkeypatch): monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/baz/bacon/") # NOTE: When the emulator host is set, a custom channel will be used, so From 0ee27b3a4be2b9d97d617ced929afcf2d5bf4c7a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 26 Mar 2020 14:32:07 -0400 Subject: [PATCH 0447/1197] chore: release 1.4.2 (#56) * updated CHANGELOG.md [ci skip] * updated setup.cfg [ci skip] * updated setup.py [ci skip] Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index b1ea6bf3b1cd..3e65d0a6d446 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +### [1.4.2](https://www.github.com/googleapis/python-pubsub/compare/v1.4.1...v1.4.2) (2020-03-25) + + +### Bug Fixes + +* update generated retry timings for publish and pull rpcs via synth ([#43](https://www.github.com/googleapis/python-pubsub/issues/43)) ([4f7fe85](https://www.github.com/googleapis/python-pubsub/commit/4f7fe85618d811fea94cb46b5dc758aa78c328a8)) +* use client_options.api_endpoint parameter instead of ignoring it ([#59](https://www.github.com/googleapis/python-pubsub/issues/59)) ([56b8d7b](https://www.github.com/googleapis/python-pubsub/commit/56b8d7b046a495ce2ce59bebdd354385147a5013)), closes [#61](https://www.github.com/googleapis/python-pubsub/issues/61) + ### [1.4.1](https://www.github.com/googleapis/python-pubsub/compare/v1.4.0...v1.4.1) (2020-03-23) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 01758dc482c7..99e009844290 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.4.1" +version = "1.4.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 50715a1e477670c80cb2257a22831696a6af4296 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 9 Apr 2020 18:32:46 +0200 Subject: [PATCH 0448/1197] fix: fix docs warnings in Sphinx 3.0+ (#70) * docs: fix Sphinx auto-gen warnings * Fix docs warning in changelog --- packages/google-cloud-pubsub/CHANGELOG.md | 23 ++++++++----------- packages/google-cloud-pubsub/docs/conf.py | 5 +--- .../docs/subscriber/api/message.rst | 1 + packages/google-cloud-pubsub/docs/types.rst | 1 + 4 files changed, 12 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 3e65d0a6d446..074f33decc3a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,16 +4,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history -### [1.4.2](https://www.github.com/googleapis/python-pubsub/compare/v1.4.1...v1.4.2) (2020-03-25) - +## [1.4.2](https://www.github.com/googleapis/python-pubsub/compare/v1.4.1...v1.4.2) (2020-03-25) ### Bug Fixes * update generated retry timings for publish and pull rpcs via synth ([#43](https://www.github.com/googleapis/python-pubsub/issues/43)) ([4f7fe85](https://www.github.com/googleapis/python-pubsub/commit/4f7fe85618d811fea94cb46b5dc758aa78c328a8)) * use client_options.api_endpoint parameter instead of ignoring it ([#59](https://www.github.com/googleapis/python-pubsub/issues/59)) ([56b8d7b](https://www.github.com/googleapis/python-pubsub/commit/56b8d7b046a495ce2ce59bebdd354385147a5013)), closes [#61](https://www.github.com/googleapis/python-pubsub/issues/61) -### [1.4.1](https://www.github.com/googleapis/python-pubsub/compare/v1.4.0...v1.4.1) (2020-03-23) - +## [1.4.1](https://www.github.com/googleapis/python-pubsub/compare/v1.4.0...v1.4.1) (2020-03-23) ### Bug Fixes @@ -21,13 +19,11 @@ ## [1.4.0](https://www.github.com/googleapis/python-pubsub/compare/v1.3.1...v1.4.0) (2020-03-06) - ### Features * **pubsub:** implement max_duration_per_lease_extension option ([#38](https://www.github.com/googleapis/python-pubsub/issues/38)) ([d911a2d](https://www.github.com/googleapis/python-pubsub/commit/d911a2dc8edf3c348ad3f128368b30e32dbc449e)) -### [1.3.1](https://www.github.com/googleapis/python-pubsub/compare/v1.3.0...v1.3.1) (2020-02-28) - +## [1.3.1](https://www.github.com/googleapis/python-pubsub/compare/v1.3.0...v1.3.1) (2020-02-28) ### Bug Fixes @@ -35,7 +31,6 @@ ## [1.3.0](https://www.github.com/googleapis/python-pubsub/compare/v1.2.0...v1.3.0) (2020-02-20) - ### Features * **pubsub:** ordering keys ([#26](https://www.github.com/googleapis/python-pubsub/issues/26)) ([cc3093a](https://www.github.com/googleapis/python-pubsub/commit/cc3093a2c0304259bc374bc2eeec9630e4a11a5e)) @@ -43,13 +38,11 @@ ## [1.2.0](https://www.github.com/googleapis/python-pubsub/compare/v1.1.0...v1.2.0) (2020-02-05) - ### Features * **pubsub:** add delivery attempt property to message object received by user code ([#10205](https://www.github.com/googleapis/google-cloud-python/issues/10205)) ([a0937c1](https://www.github.com/googleapis/python-pubsub/commit/a0937c13107b92271913de579b60f24b2aaac177)) * add `StreamingPullRequest.client_id` field (via synth) ([199d56a](https://www.github.com/googleapis/python-pubsub/commit/199d56a939bb6244f67138f843dafdd80721f0d3)) - ### Bug Fixes * **pubsub:** handle None in on response callback ([#9982](https://www.github.com/googleapis/google-cloud-python/issues/9982)) ([6596c4b](https://www.github.com/googleapis/python-pubsub/commit/6596c4bae5526d82f5c1b5e0c243b2883404d51f)) @@ -158,7 +151,7 @@ - Use kwargs in test_subscriber_client ([#8414](https://github.com/googleapis/google-cloud-python/pull/8414)) ### New Features -- Add 'options_' argument to clients' 'get_iam_policy'; pin black version (via synth). ([#8657](https://github.com/googleapis/google-cloud-python/pull/8657)) +- Add `options_` argument to clients' `get_iam_policy`; pin black version (via synth). ([#8657](https://github.com/googleapis/google-cloud-python/pull/8657)) - Add 'client_options' support, update list method docstrings (via synth). ([#8518](https://github.com/googleapis/google-cloud-python/pull/8518)) ### Dependencies @@ -420,8 +413,7 @@ ### New Features -- A new implementation of the subscriber has been added. This is available as `SubscriberClient.subscribe_experimental`. In the next release, this will be replace the current `subscribe` method. If you use this, please report your -findings to us on GitHub. (#5189, #5201, #5210, #5229, #5230, #5237, #5256) +- A new implementation of the subscriber has been added. This is available as `SubscriberClient.subscribe_experimental`. In the next release, this will be replace the current `subscribe` method. If you use this, please report your findings to us on GitHub. (#5189, #5201, #5210, #5229, #5230, #5237, #5256) ### Dependencies @@ -524,6 +516,7 @@ PyPI: https://pypi.org/project/google-cloud-pubsub/0.30.1/ when a "monitor" worker calls `commit()` after `max_latency` seconds, a failure can occur if a new message is added to the batch **during** the commit. To fix, the following changes were implemented: + - Adding a "STARTING" status for `Batch.commit()` (#4614). This fixes the issue when the batch exceeds `max_messages`. - Adding extra check in `Batch.will_accept` for the number of @@ -601,10 +594,12 @@ PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.3/ running after encountering an exception (#4472, #4498). This bug is the **only** reason for the `0.29.2` release. - Thread Changes + - Added names to all threads created directly by Pub / Sub (#4474, #4476, #4480). Also removing spaces and colons from thread names (#4476). - Logging changes + - Adding debug logs when lease management exits (#4484) - Adding debug logs when `QueueCallbackThread` exits (#4494). Instances handle the processing of messages in a @@ -656,6 +651,6 @@ PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.1/ - Upgrading to `google-cloud-core >= 0.28.0` and adding dependency on `google-api-core` (#4221, #4280) - Deferring to `google-api-core` for `grpcio` and - `googleapis-common-protos`dependencies (#4096, #4098) + `googleapis-common-protos` dependencies (#4096, #4098) PyPI: https://pypi.org/project/google-cloud-pubsub/0.29.0/ diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 20c1b57fe653..3e4bdfa1d7ad 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -38,6 +38,7 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags @@ -49,10 +50,6 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/message.rst b/packages/google-cloud-pubsub/docs/subscriber/api/message.rst index cc5b0e37c9b4..6e7a55ded6c7 100644 --- a/packages/google-cloud-pubsub/docs/subscriber/api/message.rst +++ b/packages/google-cloud-pubsub/docs/subscriber/api/message.rst @@ -3,3 +3,4 @@ Messages .. autoclass:: google.cloud.pubsub_v1.subscriber.message.Message :members: + :noindex: diff --git a/packages/google-cloud-pubsub/docs/types.rst b/packages/google-cloud-pubsub/docs/types.rst index 87c987571766..308f66971e05 100644 --- a/packages/google-cloud-pubsub/docs/types.rst +++ b/packages/google-cloud-pubsub/docs/types.rst @@ -3,3 +3,4 @@ Pub/Sub Client Types .. automodule:: google.cloud.pubsub_v1.types :members: + :noindex: From e95dbd9f5ab05aa994dcdfa90f55d3cb3ab1e6d0 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 16 Apr 2020 19:18:05 +0200 Subject: [PATCH 0449/1197] fix: restrict api-core dependency to < 1.17.0 (#76) Version 1.17.0 causes problems on re-establishing streams on retryable errors, thus restricting it. --- packages/google-cloud-pubsub/setup.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 99e009844290..ca24d2ad194f 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,7 +29,10 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", + # google-api-core[grpc] 1.17.0 causes problems, thus restricting its + # version until the issue gets fixed. + # https://github.com/googleapis/python-pubsub/issues/74 + "google-api-core[grpc] >= 1.14.0, < 1.17.0", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", 'enum34; python_version < "3.4"', ] From e7cf06125f4a3a5fcebfaf937e391d978ae147f5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 16 Apr 2020 13:40:49 -0400 Subject: [PATCH 0450/1197] chore: release 1.4.3 (#71) * updated CHANGELOG.md [ci skip] * updated setup.cfg [ci skip] * updated setup.py [ci skip] Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 074f33decc3a..7cd23c1736e8 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +### [1.4.3](https://www.github.com/googleapis/python-pubsub/compare/v1.4.2...v1.4.3) (2020-04-16) + + +### Bug Fixes + +* fix docs warnings in Sphinx 3.0+ ([#70](https://www.github.com/googleapis/python-pubsub/issues/70)) ([21e761e](https://www.github.com/googleapis/python-pubsub/commit/21e761ee89a4c03e105dc9cddbab0a34be9a9fda)) +* restrict api-core dependency to < 1.17.0 ([#76](https://www.github.com/googleapis/python-pubsub/issues/76)) ([191b051](https://www.github.com/googleapis/python-pubsub/commit/191b0516335f5c60828a818ba79e99d6c68aa7bd)) + ## [1.4.2](https://www.github.com/googleapis/python-pubsub/compare/v1.4.1...v1.4.2) (2020-03-25) ### Bug Fixes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index ca24d2ad194f..8f5b8755af22 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.4.2" +version = "1.4.3" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From d235cf742033ac90032a3e399330709f20795664 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 21 Apr 2020 01:49:28 -0700 Subject: [PATCH 0451/1197] feat: add methods for listing snapshots (via synth) (#66) * feat: add methods for listing snapshots (via synth) (#66) * chore: manual regen Co-authored-by: Bu Sun Kim Co-authored-by: Peter Lamut --- packages/google-cloud-pubsub/.coveragerc | 16 + packages/google-cloud-pubsub/.flake8 | 16 + .../.github/ISSUE_TEMPLATE/bug_report.md | 3 +- packages/google-cloud-pubsub/CONTRIBUTING.rst | 15 +- packages/google-cloud-pubsub/MANIFEST.in | 16 + .../cloud/pubsub_v1/gapic/publisher_client.py | 129 ++++- .../gapic/publisher_client_config.py | 6 + .../pubsub_v1/gapic/subscriber_client.py | 131 ++++- .../gapic/subscriber_client_config.py | 6 + .../transports/publisher_grpc_transport.py | 24 +- .../transports/subscriber_grpc_transport.py | 23 +- .../google/cloud/pubsub_v1/proto/pubsub.proto | 217 +++++---- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 455 ++++++++++-------- packages/google-cloud-pubsub/noxfile.py | 5 +- packages/google-cloud-pubsub/setup.cfg | 16 + packages/google-cloud-pubsub/synth.metadata | 32 +- packages/google-cloud-pubsub/synth.py | 2 +- .../unit/gapic/v1/test_publisher_client_v1.py | 55 ++- .../gapic/v1/test_subscriber_client_v1.py | 61 ++- 19 files changed, 874 insertions(+), 354 deletions(-) diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index b178b094aa1d..dd39c8546c41 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [run] branch = True diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index 0268ecc9c55c..20fe9bda2ee4 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [flake8] ignore = E203, E266, E501, W503 diff --git a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md index 2c59dab147dc..27057e60ffd0 100644 --- a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md +++ b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md @@ -11,8 +11,7 @@ Thanks for stopping by to let us know something could be better! Please run down the following list and make sure you've tried the usual "quick fixes": - Search the issues already opened: https://github.com/googleapis/python-pubsub/issues - - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python - - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python + - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python If you are still having issues, please be sure to include as much information as possible: diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 4f6294209cd3..7eb8028d56b0 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, and 3.7 on both UNIX and Windows. + 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -214,26 +214,18 @@ We support: - `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ +- `Python 3.8`_ .. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-pubsub/blob/master/noxfile.py -We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ -and lack of continuous integration `support`_. - -.. _Python 2.5: https://docs.python.org/2.5/ -.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ -.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ - -We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no -longer supported by the core development team. - Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version @@ -247,7 +239,6 @@ We also explicitly decided to support Python 3 beginning with version .. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django .. _projects: http://flask.pocoo.org/docs/0.10/python3/ .. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ -.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995 ********** Versioning diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index cd011be27a0e..68855abc3f02 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE recursive-include google *.json *.proto diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 589d64508bbd..1a981fefac1f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -364,10 +364,9 @@ def update_topic( message :class:`~google.cloud.pubsub_v1.types.Topic` update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided topic to update. Must be specified and non-empty. Note that if ``update_mask`` contains - "message\_storage\_policy" then the new value will be determined based - on the policy configured at the project or organization level. The - ``message_storage_policy`` must not be set in the ``topic`` provided - above. + "message\_storage\_policy" but the ``message_storage_policy`` is not set + in the ``topic`` provided above, then the updated value is determined by + the policy configured at the project or organization level. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.FieldMask` @@ -774,6 +773,113 @@ def list_topic_subscriptions( ) return iterator + def list_topic_snapshots( + self, + topic, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Lists the names of the snapshots on this topic. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') + >>> + >>> # Iterate over all results + >>> for element in client.list_topic_snapshots(topic): + ... # process element + ... pass + >>> + >>> + >>> # Alternatively: + >>> + >>> # Iterate over results one page at a time + >>> for page in client.list_topic_snapshots(topic).pages: + ... for element in page: + ... # process element + ... pass + + Args: + topic (str): Required. The name of the topic that snapshots are attached to. Format + is ``projects/{project}/topics/{topic}``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.api_core.page_iterator.GRPCIterator` instance. + An iterable of :class:`str` instances. + You can also iterate over the pages of the response + using its `pages` property. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "list_topic_snapshots" not in self._inner_api_calls: + self._inner_api_calls[ + "list_topic_snapshots" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.list_topic_snapshots, + default_retry=self._method_configs["ListTopicSnapshots"].retry, + default_timeout=self._method_configs["ListTopicSnapshots"].timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.ListTopicSnapshotsRequest(topic=topic, page_size=page_size) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("topic", topic)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls["list_topic_snapshots"], + retry=retry, + timeout=timeout, + metadata=metadata, + ), + request=request, + items_field="snapshots", + request_token_field="page_token", + response_token_field="next_page_token", + ) + return iterator + def delete_topic( self, topic, @@ -858,15 +964,16 @@ def set_iam_policy( Sets the access control policy on the specified resource. Replaces any existing policy. - Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and - PERMISSION_DENIED + Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` + errors. Example: >>> from google.cloud import pubsub_v1 >>> >>> client = pubsub_v1.PublisherClient() >>> - >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> # TODO: Initialize `resource`: + >>> resource = '' >>> >>> # TODO: Initialize `policy`: >>> policy = {} @@ -948,7 +1055,8 @@ def get_iam_policy( >>> >>> client = pubsub_v1.PublisherClient() >>> - >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> # TODO: Initialize `resource`: + >>> resource = '' >>> >>> response = client.get_iam_policy(resource) @@ -1021,7 +1129,7 @@ def test_iam_permissions( """ Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. + permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization @@ -1032,7 +1140,8 @@ def test_iam_permissions( >>> >>> client = pubsub_v1.PublisherClient() >>> - >>> resource = client.topic_path('[PROJECT]', '[TOPIC]') + >>> # TODO: Initialize `resource`: + >>> resource = '' >>> >>> # TODO: Initialize `permissions`: >>> permissions = [] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index dceb04a3ccc2..ec8f8baec789 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -4,6 +4,7 @@ "retry_codes": { "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], "non_idempotent": ["UNAVAILABLE"], + "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "none": [], "publish": [ "ABORTED", @@ -73,6 +74,11 @@ "retry_codes_name": "idempotent", "retry_params_name": "default", }, + "ListTopicSnapshots": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent2", + "retry_params_name": "default", + }, "DeleteTopic": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 6516c22644e3..e98a686bf3ae 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -246,7 +246,9 @@ def create_subscription( labels=None, enable_message_ordering=None, expiration_policy=None, + filter_=None, dead_letter_policy=None, + retry_policy=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, @@ -343,6 +345,13 @@ def create_subscription( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` + filter_ (str): An expression written in the Cloud Pub/Sub filter language. If + non-empty, then only ``PubsubMessage``\ s whose ``attributes`` field + matches the filter are delivered on this subscription. If empty, then no + messages are filtered out. EXPERIMENTAL: This feature is part of a + closed alpha release. This API might be changed in backward-incompatible + ways and is not recommended for production use. It is not subject to any + SLA or deprecation policy. dead_letter_policy (Union[dict, ~google.cloud.pubsub_v1.types.DeadLetterPolicy]): A policy that specifies the conditions for dead lettering messages in this subscription. If dead\_letter\_policy is not set, dead lettering is disabled. @@ -351,12 +360,22 @@ def create_subscription( parent project (i.e., service-{project\_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have permission to Acknowledge() messages on this subscription. - EXPERIMENTAL: This feature is part of a closed alpha release. This API - might be changed in backward-incompatible ways and is not recommended - for production use. It is not subject to any SLA or deprecation policy. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.DeadLetterPolicy` + retry_policy (Union[dict, ~google.cloud.pubsub_v1.types.RetryPolicy]): A policy that specifies how Cloud Pub/Sub retries message delivery for this + subscription. + + If not set, the default retry policy is applied. This generally implies + that messages will be retried as soon as possible for healthy subscribers. + RetryPolicy will be triggered on NACKs or acknowledgement deadline + exceeded events for a given message. + EXPERIMENTAL: This API might be changed in backward-incompatible + ways and is not recommended for production use. It is not subject to any + SLA or deprecation policy. + + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.pubsub_v1.types.RetryPolicy` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -397,7 +416,9 @@ def create_subscription( labels=labels, enable_message_ordering=enable_message_ordering, expiration_policy=expiration_policy, + filter=filter_, dead_letter_policy=dead_letter_policy, + retry_policy=retry_policy, ) if metadata is None: metadata = [] @@ -747,6 +768,80 @@ def delete_subscription( request, retry=retry, timeout=timeout, metadata=metadata ) + def get_snapshot( + self, + snapshot, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Gets the configuration details of a snapshot. Snapshots are used in + Seek + operations, which allow you to manage message acknowledgments in bulk. That + is, you can set the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.SubscriberClient() + >>> + >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') + >>> + >>> response = client.get_snapshot(snapshot) + + Args: + snapshot (str): Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "get_snapshot" not in self._inner_api_calls: + self._inner_api_calls[ + "get_snapshot" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.get_snapshot, + default_retry=self._method_configs["GetSnapshot"].retry, + default_timeout=self._method_configs["GetSnapshot"].timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("snapshot", snapshot)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["get_snapshot"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) + def modify_ack_deadline( self, subscription, @@ -950,13 +1045,16 @@ def pull( Args: subscription (str): Required. The subscription from which messages should be pulled. Format is ``projects/{project}/subscriptions/{sub}``. - max_messages (int): Required. The maximum number of messages to return for this request. Must be a - positive integer. The Pub/Sub system may return fewer than the number + max_messages (int): Required. The maximum number of messages to return for this request. Must + be a positive integer. The Pub/Sub system may return fewer than the number specified. - return_immediately (bool): If this field set to true, the system will respond immediately even if - it there are no messages available to return in the ``Pull`` response. - Otherwise, the system may wait (for a bounded amount of time) until at - least one message is available, rather than returning no messages. + return_immediately (bool): Optional. If this field set to true, the system will respond immediately + even if it there are no messages available to return in the ``Pull`` + response. Otherwise, the system may wait (for a bounded amount of time) + until at least one message is available, rather than returning no + messages. Warning: setting this field to ``true`` is discouraged because + it adversely impacts the performance of ``Pull`` operations. We + recommend that users do not set this field. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -1648,15 +1746,16 @@ def set_iam_policy( Sets the access control policy on the specified resource. Replaces any existing policy. - Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and - PERMISSION_DENIED + Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` + errors. Example: >>> from google.cloud import pubsub_v1 >>> >>> client = pubsub_v1.SubscriberClient() >>> - >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> # TODO: Initialize `resource`: + >>> resource = '' >>> >>> # TODO: Initialize `policy`: >>> policy = {} @@ -1738,7 +1837,8 @@ def get_iam_policy( >>> >>> client = pubsub_v1.SubscriberClient() >>> - >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> # TODO: Initialize `resource`: + >>> resource = '' >>> >>> response = client.get_iam_policy(resource) @@ -1811,7 +1911,7 @@ def test_iam_permissions( """ Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. + permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization @@ -1822,7 +1922,8 @@ def test_iam_permissions( >>> >>> client = pubsub_v1.SubscriberClient() >>> - >>> resource = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> # TODO: Initialize `resource`: + >>> resource = '' >>> >>> # TODO: Initialize `permissions`: >>> permissions = [] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py index 2c8e64b51a1b..fc3254975dae 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py @@ -4,6 +4,7 @@ "retry_codes": { "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], "non_idempotent": ["UNAVAILABLE"], + "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "streaming_pull": [ "ABORTED", "DEADLINE_EXCEEDED", @@ -67,6 +68,11 @@ "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, + "GetSnapshot": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent2", + "retry_params_name": "default", + }, "ModifyAckDeadline": { "timeout_millis": 60000, "retry_codes_name": "non_idempotent", diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index 90c5726ff106..a32aa8f7e3ce 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -193,6 +193,24 @@ def list_topic_subscriptions(self): """ return self._stubs["publisher_stub"].ListTopicSubscriptions + @property + def list_topic_snapshots(self): + """Return the gRPC stub for :meth:`PublisherClient.list_topic_snapshots`. + + Lists the names of the snapshots on this topic. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["publisher_stub"].ListTopicSnapshots + @property def delete_topic(self): """Return the gRPC stub for :meth:`PublisherClient.delete_topic`. @@ -218,8 +236,8 @@ def set_iam_policy(self): Sets the access control policy on the specified resource. Replaces any existing policy. - Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and - PERMISSION_DENIED + Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` + errors. Returns: Callable: A callable which accepts the appropriate @@ -248,7 +266,7 @@ def test_iam_permissions(self): Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. + permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 82af562aef1d..c569051b3975 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -192,6 +192,23 @@ def delete_subscription(self): """ return self._stubs["subscriber_stub"].DeleteSubscription + @property + def get_snapshot(self): + """Return the gRPC stub for :meth:`SubscriberClient.get_snapshot`. + + Gets the configuration details of a snapshot. Snapshots are used in + Seek + operations, which allow you to manage message acknowledgments in bulk. That + is, you can set the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["subscriber_stub"].GetSnapshot + @property def modify_ack_deadline(self): """Return the gRPC stub for :meth:`SubscriberClient.modify_ack_deadline`. @@ -392,8 +409,8 @@ def set_iam_policy(self): Sets the access control policy on the specified resource. Replaces any existing policy. - Can return Public Errors: NOT_FOUND, INVALID_ARGUMENT and - PERMISSION_DENIED + Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` + errors. Returns: Callable: A callable which accepts the appropriate @@ -422,7 +439,7 @@ def test_iam_permissions(self): Returns permissions that a caller has on the specified resource. If the resource does not exist, this will return an empty set of - permissions, not a NOT_FOUND error. + permissions, not a `NOT_FOUND` error. Note: This operation is designed to be used for building permission-aware UIs and command-line tools, not for authorization diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index adaf8c6fc3bd..832e0649c9c0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -16,6 +16,7 @@ syntax = "proto3"; package google.pubsub.v1; +import "google/api/annotations.proto"; import "google/api/client.proto"; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; @@ -23,7 +24,6 @@ import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; -import "google/api/annotations.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.PubSub.V1"; @@ -89,7 +89,8 @@ service Publisher { } // Lists the names of the subscriptions on this topic. - rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) returns (ListTopicSubscriptionsResponse) { + rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) + returns (ListTopicSubscriptionsResponse) { option (google.api.http) = { get: "/v1/{topic=projects/*/topics/*}/subscriptions" }; @@ -102,10 +103,12 @@ service Publisher { // you to manage message acknowledgments in bulk. That is, you can set the // acknowledgment state of messages in an existing subscription to the state // captured by a snapshot. - rpc ListTopicSnapshots(ListTopicSnapshotsRequest) returns (ListTopicSnapshotsResponse) { + rpc ListTopicSnapshots(ListTopicSnapshotsRequest) + returns (ListTopicSnapshotsResponse) { option (google.api.http) = { get: "/v1/{topic=projects/*/topics/*}/snapshots" }; + option (google.api.method_signature) = "topic"; } // Deletes the topic with the given name. Returns `NOT_FOUND` if the topic @@ -208,9 +211,7 @@ message GetTopicRequest { // Format is `projects/{project}/topics/{topic}`. string topic = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "pubsub.googleapis.com/Topic" - } + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Topic" } ]; } @@ -219,12 +220,13 @@ message UpdateTopicRequest { // Required. The updated topic object. Topic topic = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. Indicates which fields in the provided topic to update. Must be specified - // and non-empty. Note that if `update_mask` contains - // "message_storage_policy" then the new value will be determined based on the - // policy configured at the project or organization level. The - // `message_storage_policy` must not be set in the `topic` provided above. - google.protobuf.FieldMask update_mask = 2 [(google.api.field_behavior) = REQUIRED]; + // Required. Indicates which fields in the provided topic to update. Must be + // specified and non-empty. Note that if `update_mask` contains + // "message_storage_policy" but the `message_storage_policy` is not set in + // the `topic` provided above, then the updated value is determined by the + // policy configured at the project or organization level. + google.protobuf.FieldMask update_mask = 2 + [(google.api.field_behavior) = REQUIRED]; } // Request for the Publish method. @@ -233,9 +235,7 @@ message PublishRequest { // Format is `projects/{project}/topics/{topic}`. string topic = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "pubsub.googleapis.com/Topic" - } + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Topic" } ]; // Required. The messages to publish. @@ -286,9 +286,7 @@ message ListTopicSubscriptionsRequest { // Format is `projects/{project}/topics/{topic}`. string topic = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "pubsub.googleapis.com/Topic" - } + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Topic" } ]; // Maximum number of subscription names to return. @@ -304,8 +302,8 @@ message ListTopicSubscriptionsRequest { message ListTopicSubscriptionsResponse { // The names of the subscriptions that match the request. repeated string subscriptions = 1 [(google.api.resource_reference) = { - type: "pubsub.googleapis.com/Subscription" - }]; + type: "pubsub.googleapis.com/Subscription" + }]; // If not empty, indicates that there may be more subscriptions that match // the request; this value should be passed in a new @@ -315,9 +313,12 @@ message ListTopicSubscriptionsResponse { // Request for the `ListTopicSnapshots` method. message ListTopicSnapshotsRequest { - // The name of the topic that snapshots are attached to. + // Required. The name of the topic that snapshots are attached to. // Format is `projects/{project}/topics/{topic}`. - string topic = 1; + string topic = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Topic" } + ]; // Maximum number of snapshot names to return. int32 page_size = 2; @@ -345,9 +346,7 @@ message DeleteTopicRequest { // Format is `projects/{project}/topics/{topic}`. string topic = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "pubsub.googleapis.com/Topic" - } + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Topic" } ]; } @@ -378,7 +377,8 @@ service Subscriber { put: "/v1/{name=projects/*/subscriptions/*}" body: "*" }; - option (google.api.method_signature) = "name,topic,push_config,ack_deadline_seconds"; + option (google.api.method_signature) = + "name,topic,push_config,ack_deadline_seconds"; } // Gets the configuration details of a subscription. @@ -399,7 +399,8 @@ service Subscriber { } // Lists matching subscriptions. - rpc ListSubscriptions(ListSubscriptionsRequest) returns (ListSubscriptionsResponse) { + rpc ListSubscriptions(ListSubscriptionsRequest) + returns (ListSubscriptionsResponse) { option (google.api.http) = { get: "/v1/{project=projects/*}/subscriptions" }; @@ -411,7 +412,8 @@ service Subscriber { // `NOT_FOUND`. After a subscription is deleted, a new one may be created with // the same name, but the new one has no association with the old // subscription or its topic unless the same topic is specified. - rpc DeleteSubscription(DeleteSubscriptionRequest) returns (google.protobuf.Empty) { + rpc DeleteSubscription(DeleteSubscriptionRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { delete: "/v1/{subscription=projects/*/subscriptions/*}" }; @@ -423,12 +425,14 @@ service Subscriber { // subscriber, or to make the message available for redelivery if the // processing was interrupted. Note that this does not modify the // subscription-level `ackDeadlineSeconds` used for subsequent messages. - rpc ModifyAckDeadline(ModifyAckDeadlineRequest) returns (google.protobuf.Empty) { + rpc ModifyAckDeadline(ModifyAckDeadlineRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline" body: "*" }; - option (google.api.method_signature) = "subscription,ack_ids,ack_deadline_seconds"; + option (google.api.method_signature) = + "subscription,ack_ids,ack_deadline_seconds"; } // Acknowledges the messages associated with the `ack_ids` in the @@ -454,7 +458,8 @@ service Subscriber { post: "/v1/{subscription=projects/*/subscriptions/*}:pull" body: "*" }; - option (google.api.method_signature) = "subscription,return_immediately,max_messages"; + option (google.api.method_signature) = + "subscription,return_immediately,max_messages"; } // Establishes a stream with the server, which sends messages down to the @@ -464,8 +469,8 @@ service Subscriber { // reassign server-side resources, in which case, the client should // re-establish the stream. Flow control can be achieved by configuring the // underlying RPC channel. - rpc StreamingPull(stream StreamingPullRequest) returns (stream StreamingPullResponse) { - } + rpc StreamingPull(stream StreamingPullRequest) + returns (stream StreamingPullResponse) {} // Modifies the `PushConfig` for a specified subscription. // @@ -473,7 +478,8 @@ service Subscriber { // an empty `PushConfig`) or vice versa, or change the endpoint URL and other // attributes of a push subscription. Messages will accumulate for delivery // continuously through the call regardless of changes to the `PushConfig`. - rpc ModifyPushConfig(ModifyPushConfigRequest) returns (google.protobuf.Empty) { + rpc ModifyPushConfig(ModifyPushConfigRequest) + returns (google.protobuf.Empty) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig" body: "*" @@ -490,6 +496,7 @@ service Subscriber { option (google.api.http) = { get: "/v1/{snapshot=projects/*/snapshots/*}" }; + option (google.api.method_signature) = "snapshot"; } // Lists the existing snapshots. Snapshots are used in @@ -592,15 +599,12 @@ message Subscription { // in length, and it must not start with `"goog"`. string name = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The name of the topic from which this subscription is receiving messages. - // Format is `projects/{project}/topics/{topic}`. - // The value of this field will be `_deleted-topic_` if the topic has been - // deleted. + // Required. The name of the topic from which this subscription is receiving + // messages. Format is `projects/{project}/topics/{topic}`. The value of this + // field will be `_deleted-topic_` if the topic has been deleted. string topic = 2 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "pubsub.googleapis.com/Topic" - } + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Topic" } ]; // If push delivery is used with this subscription, this field is @@ -668,6 +672,15 @@ message Subscription { // value for `expiration_policy.ttl` is 1 day. ExpirationPolicy expiration_policy = 11; + // An expression written in the Cloud Pub/Sub filter language. If non-empty, + // then only `PubsubMessage`s whose `attributes` field matches the filter are + // delivered on this subscription. If empty, then no messages are filtered + // out. + // EXPERIMENTAL: This feature is part of a closed alpha release. This + // API might be changed in backward-incompatible ways and is not recommended + // for production use. It is not subject to any SLA or deprecation policy. + string filter = 12; + // A policy that specifies the conditions for dead lettering messages in // this subscription. If dead_letter_policy is not set, dead lettering // is disabled. @@ -676,10 +689,40 @@ message Subscription { // parent project (i.e., // service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have // permission to Acknowledge() messages on this subscription. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. DeadLetterPolicy dead_letter_policy = 13; + + // A policy that specifies how Cloud Pub/Sub retries message delivery for this + // subscription. + // + // If not set, the default retry policy is applied. This generally implies + // that messages will be retried as soon as possible for healthy subscribers. + // RetryPolicy will be triggered on NACKs or acknowledgement deadline + // exceeded events for a given message. + // EXPERIMENTAL: This API might be changed in backward-incompatible + // ways and is not recommended for production use. It is not subject to any + // SLA or deprecation policy. + RetryPolicy retry_policy = 14; +} + +// A policy that specifies how Cloud Pub/Sub retries message delivery. +// +// Retry delay will be exponential based on provided minimum and maximum +// backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. +// +// RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded +// events for a given message. +// +// Retry Policy is implemented on a best effort basis. At times, the delay +// between consecutive deliveries may not match the configuration. That is, +// delay can be more or less than configured backoff. +message RetryPolicy { + // The minimum delay between consecutive deliveries of a given message. + // Value should be between 0 and 600 seconds. Defaults to 10 seconds. + google.protobuf.Duration minimum_backoff = 1; + + // The maximum delay between consecutive deliveries of a given message. + // Value should be between 0 and 600 seconds. Defaults to 600 seconds. + google.protobuf.Duration maximum_backoff = 2; } // Dead lettering is done on a best effort basis. The same message might be @@ -750,7 +793,7 @@ message PushConfig { } // A URL locating the endpoint to which messages should be pushed. - // For example, a Webhook endpoint might use "https://example.com/push". + // For example, a Webhook endpoint might use `https://example.com/push`. string push_endpoint = 1; // Endpoint configuration attributes that can be used to control different @@ -834,7 +877,8 @@ message UpdateSubscriptionRequest { // Required. Indicates which fields in the provided subscription to update. // Must be specified and non-empty. - google.protobuf.FieldMask update_mask = 2 [(google.api.field_behavior) = REQUIRED]; + google.protobuf.FieldMask update_mask = 2 + [(google.api.field_behavior) = REQUIRED]; } // Request for the `ListSubscriptions` method. @@ -911,14 +955,18 @@ message PullRequest { } ]; - // If this field set to true, the system will respond immediately even if - // it there are no messages available to return in the `Pull` response. - // Otherwise, the system may wait (for a bounded amount of time) until at - // least one message is available, rather than returning no messages. - bool return_immediately = 2; - - // Required. The maximum number of messages to return for this request. Must be a - // positive integer. The Pub/Sub system may return fewer than the number + // Optional. If this field set to true, the system will respond immediately + // even if it there are no messages available to return in the `Pull` + // response. Otherwise, the system may wait (for a bounded amount of time) + // until at least one message is available, rather than returning no messages. + // Warning: setting this field to `true` is discouraged because it adversely + // impacts the performance of `Pull` operations. We recommend that users do + // not set this field. + bool return_immediately = 2 + [deprecated = true, (google.api.field_behavior) = OPTIONAL]; + + // Required. The maximum number of messages to return for this request. Must + // be a positive integer. The Pub/Sub system may return fewer than the number // specified. int32 max_messages = 3 [(google.api.field_behavior) = REQUIRED]; } @@ -946,10 +994,10 @@ message ModifyAckDeadlineRequest { // Required. List of acknowledgment IDs. repeated string ack_ids = 4 [(google.api.field_behavior) = REQUIRED]; - // Required. The new ack deadline with respect to the time this request was sent to - // the Pub/Sub system. For example, if the value is 10, the new - // ack deadline will expire 10 seconds after the `ModifyAckDeadline` call - // was made. Specifying zero might immediately make the message available for + // Required. The new ack deadline with respect to the time this request was + // sent to the Pub/Sub system. For example, if the value is 10, the new ack + // deadline will expire 10 seconds after the `ModifyAckDeadline` call was + // made. Specifying zero might immediately make the message available for // delivery to another subscriber client. This typically results in an // increase in the rate of message redeliveries (that is, duplicates). // The minimum deadline you can specify is 0 seconds. @@ -968,8 +1016,9 @@ message AcknowledgeRequest { } ]; - // Required. The acknowledgment ID for the messages being acknowledged that was returned - // by the Pub/Sub system in the `Pull` response. Must not be empty. + // Required. The acknowledgment ID for the messages being acknowledged that + // was returned by the Pub/Sub system in the `Pull` response. Must not be + // empty. repeated string ack_ids = 2 [(google.api.field_behavior) = REQUIRED]; } @@ -977,8 +1026,8 @@ message AcknowledgeRequest { // establish the initial stream as well as to stream acknowledgements and ack // deadline modifications from the client to the server. message StreamingPullRequest { - // Required. The subscription for which to initialize the new stream. This must be - // provided in the first request on the stream, and must not be set in + // Required. The subscription for which to initialize the new stream. This + // must be provided in the first request on the stream, and must not be set in // subsequent requests from client to server. // Format is `projects/{project}/subscriptions/{sub}`. string subscription = 1 [ @@ -1015,11 +1064,12 @@ message StreamingPullRequest { // processing was interrupted. repeated string modify_deadline_ack_ids = 4; - // Required. The ack deadline to use for the stream. This must be provided in the - // first request on the stream, but it can also be updated on subsequent + // Required. The ack deadline to use for the stream. This must be provided in + // the first request on the stream, but it can also be updated on subsequent // requests from client to server. The minimum deadline you can specify is 10 // seconds. The maximum deadline you can specify is 600 seconds (10 minutes). - int32 stream_ack_deadline_seconds = 5 [(google.api.field_behavior) = REQUIRED]; + int32 stream_ack_deadline_seconds = 5 + [(google.api.field_behavior) = REQUIRED]; // A unique identifier that is used to distinguish client instances from each // other. Only needs to be provided on the initial request. When a stream @@ -1039,17 +1089,15 @@ message StreamingPullResponse { // Request for the `CreateSnapshot` method. message CreateSnapshotRequest { - // Required. User-provided name for this snapshot. If the name is not provided in the - // request, the server will assign a random name for this snapshot on the same - // project as the subscription. Note that for REST API requests, you must - // specify a name. See the resource // name rules. Format is `projects/{project}/snapshots/{snap}`. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "pubsub.googleapis.com/Snapshot" - } + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Snapshot" } ]; // Required. The subscription whose backlog the snapshot retains. @@ -1080,7 +1128,8 @@ message UpdateSnapshotRequest { // Required. Indicates which fields in the provided snapshot to update. // Must be specified and non-empty. - google.protobuf.FieldMask update_mask = 2 [(google.api.field_behavior) = REQUIRED]; + google.protobuf.FieldMask update_mask = 2 + [(google.api.field_behavior) = REQUIRED]; } // A snapshot resource. Snapshots are used in @@ -1099,9 +1148,9 @@ message Snapshot { string name = 1; // The name of the topic from which this snapshot is retaining messages. - string topic = 2 [(google.api.resource_reference) = { - type: "pubsub.googleapis.com/Topic" - }]; + string topic = 2 [ + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Topic" } + ]; // The snapshot is guaranteed to exist up until this time. // A newly-created snapshot expires no later than 7 days from the time of its @@ -1126,9 +1175,7 @@ message GetSnapshotRequest { // Format is `projects/{project}/snapshots/{snap}`. string snapshot = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "pubsub.googleapis.com/Snapshot" - } + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Snapshot" } ]; } @@ -1168,9 +1215,7 @@ message DeleteSnapshotRequest { // Format is `projects/{project}/snapshots/{snap}`. string snapshot = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "pubsub.googleapis.com/Snapshot" - } + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Snapshot" } ]; } @@ -1202,12 +1247,10 @@ message SeekRequest { // the provided subscription. // Format is `projects/{project}/snapshots/{snap}`. string snapshot = 3 [(google.api.resource_reference) = { - type: "pubsub.googleapis.com/Snapshot" - }]; + type: "pubsub.googleapis.com/Snapshot" + }]; } } // Response for the `Seek` method (this response is empty). -message SeekResponse { - -} +message SeekResponse {} diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 4843bbdbeb09..4729f11ee306 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -15,6 +15,7 @@ _sym_db = _symbol_database.Default() +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import client_pb2 as google_dot_api_dot_client__pb2 from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 @@ -22,7 +23,6 @@ from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -33,9 +33,10 @@ "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" ), serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x19ListTopicSnapshotsRequest\x12\r\n\x05topic\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"\xe9\x04\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x86\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x1a\n\x12return_immediately\x18\x02 \x01(\x08\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xe8\x01\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xeb\t\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xa2\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"1\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\xf7\x14\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12~\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"-\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' + '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"v\n\x19ListTopicSnapshotsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"\xae\x05\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12\x0e\n\x06\x66ilter\x18\x0c \x01(\t\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x12\x33\n\x0cretry_policy\x18\x0e \x01(\x0b\x32\x1d.google.pubsub.v1.RetryPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"u\n\x0bRetryPolicy\x12\x32\n\x0fminimum_backoff\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0fmaximum_backoff\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x8d\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12!\n\x12return_immediately\x18\x02 \x01(\x08\x42\x05\x18\x01\xe0\x41\x01\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xe8\x01\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xf3\t\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xaa\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"9\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\xda\x41\x05topic\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\x83\x15\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12\x89\x01\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"8\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' ), dependencies=[ + google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, google_dot_api_dot_resource__pb2.DESCRIPTOR, @@ -43,7 +44,6 @@ google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_api_dot_annotations__pb2.DESCRIPTOR, ], ) @@ -891,7 +891,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b( + "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" + ), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -940,7 +942,7 @@ extension_ranges=[], oneofs=[], serialized_start=1744, - serialized_end=1825, + serialized_end=1862, ) @@ -996,8 +998,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1827, - serialized_end=1899, + serialized_start=1864, + serialized_end=1936, ) @@ -1037,8 +1039,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1901, - serialized_end=1973, + serialized_start=1938, + serialized_end=2010, ) @@ -1269,10 +1271,28 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="filter", + full_name="google.pubsub.v1.Subscription.filter", + index=9, + number=12, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=_b("").decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), _descriptor.FieldDescriptor( name="dead_letter_policy", full_name="google.pubsub.v1.Subscription.dead_letter_policy", - index=9, + index=10, number=13, type=11, cpp_type=10, @@ -1287,6 +1307,24 @@ serialized_options=None, file=DESCRIPTOR, ), + _descriptor.FieldDescriptor( + name="retry_policy", + full_name="google.pubsub.v1.Subscription.retry_policy", + index=11, + number=14, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), ], extensions=[], nested_types=[_SUBSCRIPTION_LABELSENTRY], @@ -1298,8 +1336,65 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1976, - serialized_end=2593, + serialized_start=2013, + serialized_end=2699, +) + + +_RETRYPOLICY = _descriptor.Descriptor( + name="RetryPolicy", + full_name="google.pubsub.v1.RetryPolicy", + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name="minimum_backoff", + full_name="google.pubsub.v1.RetryPolicy.minimum_backoff", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + _descriptor.FieldDescriptor( + name="maximum_backoff", + full_name="google.pubsub.v1.RetryPolicy.maximum_backoff", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2701, + serialized_end=2818, ) @@ -1355,8 +1450,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2595, - serialized_end=2671, + serialized_start=2820, + serialized_end=2896, ) @@ -1394,8 +1489,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2673, - serialized_end=2731, + serialized_start=2898, + serialized_end=2956, ) @@ -1451,8 +1546,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2899, - serialized_end=2959, + serialized_start=3124, + serialized_end=3184, ) _PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( @@ -1589,8 +1684,8 @@ fields=[], ) ], - serialized_start=2734, - serialized_end=3035, + serialized_start=2959, + serialized_end=3260, ) @@ -1664,8 +1759,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3037, - serialized_end=3146, + serialized_start=3262, + serialized_end=3371, ) @@ -1705,8 +1800,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3148, - serialized_end=3238, + serialized_start=3373, + serialized_end=3463, ) @@ -1762,8 +1857,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3241, - serialized_end=3381, + serialized_start=3466, + serialized_end=3606, ) @@ -1839,8 +1934,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3384, - serialized_end=3519, + serialized_start=3609, + serialized_end=3744, ) @@ -1896,8 +1991,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3521, - serialized_end=3628, + serialized_start=3746, + serialized_end=3853, ) @@ -1937,8 +2032,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3630, - serialized_end=3723, + serialized_start=3855, + serialized_end=3948, ) @@ -1996,8 +2091,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3726, - serialized_end=3873, + serialized_start=3951, + serialized_end=4098, ) @@ -2043,7 +2138,7 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=_b("\030\001\340A\001"), file=DESCRIPTOR, ), _descriptor.FieldDescriptor( @@ -2073,8 +2168,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3876, - serialized_end=4010, + serialized_start=4101, + serialized_end=4242, ) @@ -2112,8 +2207,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4012, - serialized_end=4088, + serialized_start=4244, + serialized_end=4320, ) @@ -2189,8 +2284,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4091, - serialized_end=4240, + serialized_start=4323, + serialized_end=4472, ) @@ -2248,8 +2343,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4242, - serialized_end=4350, + serialized_start=4474, + serialized_end=4582, ) @@ -2379,8 +2474,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4353, - serialized_end=4585, + serialized_start=4585, + serialized_end=4817, ) @@ -2418,8 +2513,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4587, - serialized_end=4672, + serialized_start=4819, + serialized_end=4904, ) @@ -2553,8 +2648,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4675, - serialized_end=4934, + serialized_start=4907, + serialized_end=5166, ) @@ -2610,8 +2705,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4937, - serialized_end=5065, + serialized_start=5169, + serialized_end=5297, ) @@ -2761,8 +2856,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5068, - serialized_end=5371, + serialized_start=5300, + serialized_end=5603, ) @@ -2802,8 +2897,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5373, - serialized_end=5451, + serialized_start=5605, + serialized_end=5683, ) @@ -2879,8 +2974,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5454, - serialized_end=5585, + serialized_start=5686, + serialized_end=5817, ) @@ -2936,8 +3031,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5587, - serialized_end=5682, + serialized_start=5819, + serialized_end=5914, ) @@ -2977,8 +3072,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5684, - serialized_end=5765, + serialized_start=5916, + serialized_end=5997, ) @@ -3062,8 +3157,8 @@ fields=[], ) ], - serialized_start=5768, - serialized_end=5958, + serialized_start=6000, + serialized_end=6190, ) @@ -3082,8 +3177,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5960, - serialized_end=5974, + serialized_start=6192, + serialized_end=6206, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -3110,6 +3205,13 @@ _SUBSCRIPTION.fields_by_name["labels"].message_type = _SUBSCRIPTION_LABELSENTRY _SUBSCRIPTION.fields_by_name["expiration_policy"].message_type = _EXPIRATIONPOLICY _SUBSCRIPTION.fields_by_name["dead_letter_policy"].message_type = _DEADLETTERPOLICY +_SUBSCRIPTION.fields_by_name["retry_policy"].message_type = _RETRYPOLICY +_RETRYPOLICY.fields_by_name[ + "minimum_backoff" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_RETRYPOLICY.fields_by_name[ + "maximum_backoff" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION _EXPIRATIONPOLICY.fields_by_name[ "ttl" ].message_type = google_dot_protobuf_dot_duration__pb2._DURATION @@ -3184,6 +3286,7 @@ ] = _LISTTOPICSNAPSHOTSRESPONSE DESCRIPTOR.message_types_by_name["DeleteTopicRequest"] = _DELETETOPICREQUEST DESCRIPTOR.message_types_by_name["Subscription"] = _SUBSCRIPTION +DESCRIPTOR.message_types_by_name["RetryPolicy"] = _RETRYPOLICY DESCRIPTOR.message_types_by_name["DeadLetterPolicy"] = _DEADLETTERPOLICY DESCRIPTOR.message_types_by_name["ExpirationPolicy"] = _EXPIRATIONPOLICY DESCRIPTOR.message_types_by_name["PushConfig"] = _PUSHCONFIG @@ -3256,8 +3359,6 @@ DESCRIPTOR=_TOPIC, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""A topic resource. - - Attributes: name: Required. The name of the topic. It must have the format @@ -3300,14 +3401,12 @@ ), DESCRIPTOR=_PUBSUBMESSAGE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A message that is published by publishers and consumed by - subscribers. The message must contain either a non-empty data field or - at least one attribute. Note that client libraries represent this object + __doc__="""A message that is published by publishers and consumed by subscribers. + The message must contain either a non-empty data field or at least one + attribute. Note that client libraries represent this object differently depending on the language. See the corresponding client library documentation for more information. See Quotas and limits for more information about message limits. - - Attributes: data: The message data field. If this field is empty, the message @@ -3351,8 +3450,6 @@ DESCRIPTOR=_GETTOPICREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the GetTopic method. - - Attributes: topic: Required. The name of the topic to get. Format is @@ -3370,19 +3467,16 @@ DESCRIPTOR=_UPDATETOPICREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the UpdateTopic method. - - Attributes: topic: Required. The updated topic object. update_mask: Required. Indicates which fields in the provided topic to update. Must be specified and non-empty. Note that if - ``update_mask`` contains "message\_storage\_policy" then the - new value will be determined based on the policy configured at - the project or organization level. The - ``message_storage_policy`` must not be set in the ``topic`` - provided above. + ``update_mask`` contains "message\_storage\_policy" but the + ``message_storage_policy`` is not set in the ``topic`` + provided above, then the updated value is determined by the + policy configured at the project or organization level. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) ), @@ -3396,8 +3490,6 @@ DESCRIPTOR=_PUBLISHREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the Publish method. - - Attributes: topic: Required. The messages in the request will be published on @@ -3417,8 +3509,6 @@ DESCRIPTOR=_PUBLISHRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Response for the ``Publish`` method. - - Attributes: message_ids: The server-assigned ID of each published message, in the same @@ -3437,8 +3527,6 @@ DESCRIPTOR=_LISTTOPICSREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``ListTopics`` method. - - Attributes: project: Required. The name of the project in which to list topics. @@ -3463,8 +3551,6 @@ DESCRIPTOR=_LISTTOPICSRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Response for the ``ListTopics`` method. - - Attributes: topics: The resulting topics. @@ -3485,8 +3571,6 @@ DESCRIPTOR=_LISTTOPICSUBSCRIPTIONSREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``ListTopicSubscriptions`` method. - - Attributes: topic: Required. The name of the topic that subscriptions are @@ -3511,8 +3595,6 @@ DESCRIPTOR=_LISTTOPICSUBSCRIPTIONSRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Response for the ``ListTopicSubscriptions`` method. - - Attributes: subscriptions: The names of the subscriptions that match the request. @@ -3533,12 +3615,10 @@ DESCRIPTOR=_LISTTOPICSNAPSHOTSREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``ListTopicSnapshots`` method. - - Attributes: topic: - The name of the topic that snapshots are attached to. Format - is ``projects/{project}/topics/{topic}``. + Required. The name of the topic that snapshots are attached + to. Format is ``projects/{project}/topics/{topic}``. page_size: Maximum number of snapshot names to return. page_token: @@ -3559,8 +3639,6 @@ DESCRIPTOR=_LISTTOPICSNAPSHOTSRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Response for the ``ListTopicSnapshots`` method. - - Attributes: snapshots: The names of the snapshots that match the request. @@ -3581,8 +3659,6 @@ DESCRIPTOR=_DELETETOPICREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``DeleteTopic`` method. - - Attributes: topic: Required. Name of the topic to delete. Format is @@ -3609,8 +3685,6 @@ DESCRIPTOR=_SUBSCRIPTION, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""A subscription resource. - - Attributes: name: Required. The name of the subscription. It must have the @@ -3681,6 +3755,15 @@ If ``expiration_policy`` is not set, a *default policy* with ``ttl`` of 31 days will be used. The minimum allowed value for ``expiration_policy.ttl`` is 1 day. + filter: + An expression written in the Cloud Pub/Sub filter language. If + non-empty, then only ``PubsubMessage``\ s whose ``attributes`` + field matches the filter are delivered on this subscription. + If empty, then no messages are filtered out. EXPERIMENTAL: + This feature is part of a closed alpha release. This API might + be changed in backward-incompatible ways and is not + recommended for production use. It is not subject to any SLA + or deprecation policy. dead_letter_policy: A policy that specifies the conditions for dead lettering messages in this subscription. If dead\_letter\_policy is not @@ -3688,11 +3771,17 @@ account associated with this subscriptions's parent project (i.e., service-{project\_number}@gcp-sa- pubsub.iam.gserviceaccount.com) must have permission to - Acknowledge() messages on this subscription. EXPERIMENTAL: - This feature is part of a closed alpha release. This API might - be changed in backward-incompatible ways and is not - recommended for production use. It is not subject to any SLA - or deprecation policy. + Acknowledge() messages on this subscription. + retry_policy: + A policy that specifies how Cloud Pub/Sub retries message + delivery for this subscription. If not set, the default retry + policy is applied. This generally implies that messages will + be retried as soon as possible for healthy subscribers. + RetryPolicy will be triggered on NACKs or acknowledgement + deadline exceeded events for a given message. EXPERIMENTAL: + This API might be changed in backward-incompatible ways and is + not recommended for production use. It is not subject to any + SLA or deprecation policy. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) ), @@ -3700,19 +3789,45 @@ _sym_db.RegisterMessage(Subscription) _sym_db.RegisterMessage(Subscription.LabelsEntry) +RetryPolicy = _reflection.GeneratedProtocolMessageType( + "RetryPolicy", + (_message.Message,), + dict( + DESCRIPTOR=_RETRYPOLICY, + __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", + __doc__="""A policy that specifies how Cloud Pub/Sub retries message delivery. + Retry delay will be exponential based on provided minimum and maximum + backoffs. https://en.wikipedia.org/wiki/Exponential\_backoff. + RetryPolicy will be triggered on NACKs or acknowledgement deadline + exceeded events for a given message. Retry Policy is implemented on a + best effort basis. At times, the delay between consecutive deliveries + may not match the configuration. That is, delay can be more or less + than configured backoff. + Attributes: + minimum_backoff: + The minimum delay between consecutive deliveries of a given + message. Value should be between 0 and 600 seconds. Defaults + to 10 seconds. + maximum_backoff: + The maximum delay between consecutive deliveries of a given + message. Value should be between 0 and 600 seconds. Defaults + to 600 seconds. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.RetryPolicy) + ), +) +_sym_db.RegisterMessage(RetryPolicy) + DeadLetterPolicy = _reflection.GeneratedProtocolMessageType( "DeadLetterPolicy", (_message.Message,), dict( DESCRIPTOR=_DEADLETTERPOLICY, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Dead lettering is done on a best effort basis. The same - message might be dead lettered multiple times. - - If validation on any of the fields fails at subscription - creation/updation, the create/update subscription request will fail. - - + __doc__="""Dead lettering is done on a best effort basis. The same message might + be dead lettered multiple times. If validation on any of the fields + fails at subscription creation/updation, the create/update + subscription request will fail. Attributes: dead_letter_topic: The name of the topic to which dead letter messages should be @@ -3747,10 +3862,8 @@ dict( DESCRIPTOR=_EXPIRATIONPOLICY, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A policy that specifies the conditions for resource - expiration (i.e., automatic resource deletion). - - + __doc__="""A policy that specifies the conditions for resource expiration (i.e., + automatic resource deletion). Attributes: ttl: Specifies the "time-to-live" duration for an associated @@ -3776,11 +3889,8 @@ dict( DESCRIPTOR=_PUSHCONFIG_OIDCTOKEN, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Contains information needed for generating an `OpenID - Connect - token `__. - - + __doc__="""Contains information needed for generating an `OpenID Connect token + `__. Attributes: service_account_email: \ `Service account email @@ -3813,13 +3923,11 @@ DESCRIPTOR=_PUSHCONFIG, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Configuration for a push delivery endpoint. - - Attributes: push_endpoint: A URL locating the endpoint to which messages should be pushed. For example, a Webhook endpoint might use - "https://example.com/push". + ``https://example.com/push``. attributes: Endpoint configuration attributes that can be used to control different aspects of the message delivery. The only currently @@ -3864,8 +3972,6 @@ DESCRIPTOR=_RECEIVEDMESSAGE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""A message and its corresponding acknowledgment ID. - - Attributes: ack_id: This ID can be used to acknowledge the received message. @@ -3899,8 +4005,6 @@ DESCRIPTOR=_GETSUBSCRIPTIONREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the GetSubscription method. - - Attributes: subscription: Required. The name of the subscription to get. Format is @@ -3918,8 +4022,6 @@ DESCRIPTOR=_UPDATESUBSCRIPTIONREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the UpdateSubscription method. - - Attributes: subscription: Required. The updated subscription object. @@ -3939,8 +4041,6 @@ DESCRIPTOR=_LISTSUBSCRIPTIONSREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``ListSubscriptions`` method. - - Attributes: project: Required. The name of the project in which to list @@ -3965,8 +4065,6 @@ DESCRIPTOR=_LISTSUBSCRIPTIONSRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Response for the ``ListSubscriptions`` method. - - Attributes: subscriptions: The subscriptions that match the request. @@ -3987,8 +4085,6 @@ DESCRIPTOR=_DELETESUBSCRIPTIONREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the DeleteSubscription method. - - Attributes: subscription: Required. The subscription to delete. Format is @@ -4006,8 +4102,6 @@ DESCRIPTOR=_MODIFYPUSHCONFIGREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ModifyPushConfig method. - - Attributes: subscription: Required. The name of the subscription. Format is @@ -4032,18 +4126,19 @@ DESCRIPTOR=_PULLREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``Pull`` method. - - Attributes: subscription: Required. The subscription from which messages should be pulled. Format is ``projects/{project}/subscriptions/{sub}``. return_immediately: - If this field set to true, the system will respond immediately - even if it there are no messages available to return in the - ``Pull`` response. Otherwise, the system may wait (for a - bounded amount of time) until at least one message is - available, rather than returning no messages. + Optional. If this field set to true, the system will respond + immediately even if it there are no messages available to + return in the ``Pull`` response. Otherwise, the system may + wait (for a bounded amount of time) until at least one message + is available, rather than returning no messages. Warning: + setting this field to ``true`` is discouraged because it + adversely impacts the performance of ``Pull`` operations. We + recommend that users do not set this field. max_messages: Required. The maximum number of messages to return for this request. Must be a positive integer. The Pub/Sub system may @@ -4061,8 +4156,6 @@ DESCRIPTOR=_PULLRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Response for the ``Pull`` method. - - Attributes: received_messages: Received Pub/Sub messages. The list will be empty if there are @@ -4083,8 +4176,6 @@ DESCRIPTOR=_MODIFYACKDEADLINEREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ModifyAckDeadline method. - - Attributes: subscription: Required. The name of the subscription. Format is @@ -4114,8 +4205,6 @@ DESCRIPTOR=_ACKNOWLEDGEREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the Acknowledge method. - - Attributes: subscription: Required. The subscription whose message is being @@ -4137,12 +4226,10 @@ dict( DESCRIPTOR=_STREAMINGPULLREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``StreamingPull`` streaming RPC method. - This request is used to establish the initial stream as well as to - stream acknowledgements and ack deadline modifications from the client - to the server. - - + __doc__="""Request for the ``StreamingPull`` streaming RPC method. This request + is used to establish the initial stream as well as to stream + acknowledgements and ack deadline modifications from the client to the + server. Attributes: subscription: Required. The subscription for which to initialize the new @@ -4205,10 +4292,8 @@ dict( DESCRIPTOR=_STREAMINGPULLRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``StreamingPull`` method. This response - is used to stream messages from the server to the client. - - + __doc__="""Response for the ``StreamingPull`` method. This response is used to + stream messages from the server to the client. Attributes: received_messages: Received Pub/Sub messages. This will not be empty. @@ -4234,8 +4319,6 @@ DESCRIPTOR=_CREATESNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``CreateSnapshot`` method. - - Attributes: name: Required. User-provided name for this snapshot. If the name is @@ -4270,8 +4353,6 @@ DESCRIPTOR=_UPDATESNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the UpdateSnapshot method. - - Attributes: snapshot: Required. The updated snapshot object. @@ -4299,12 +4380,10 @@ ), DESCRIPTOR=_SNAPSHOT, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A snapshot resource. Snapshots are used in Seek - operations, which allow you to manage message acknowledgments in bulk. - That is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - + __doc__="""A snapshot resource. Snapshots are used in Seek operations, which + allow you to manage message acknowledgments in bulk. That is, you can + set the acknowledgment state of messages in an existing subscription + to the state captured by a snapshot. Attributes: name: The name of the snapshot. @@ -4340,8 +4419,6 @@ DESCRIPTOR=_GETSNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the GetSnapshot method. - - Attributes: snapshot: Required. The name of the snapshot to get. Format is @@ -4359,8 +4436,6 @@ DESCRIPTOR=_LISTSNAPSHOTSREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``ListSnapshots`` method. - - Attributes: project: Required. The name of the project in which to list snapshots. @@ -4385,8 +4460,6 @@ DESCRIPTOR=_LISTSNAPSHOTSRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Response for the ``ListSnapshots`` method. - - Attributes: snapshots: The resulting snapshots. @@ -4407,8 +4480,6 @@ DESCRIPTOR=_DELETESNAPSHOTREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``DeleteSnapshot`` method. - - Attributes: snapshot: Required. The name of the snapshot to delete. Format is @@ -4426,8 +4497,6 @@ DESCRIPTOR=_SEEKREQUEST, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", __doc__="""Request for the ``Seek`` method. - - Attributes: subscription: Required. The subscription to affect. @@ -4460,9 +4529,7 @@ dict( DESCRIPTOR=_SEEKRESPONSE, __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``Seek`` method (this response is empty). - - """, + __doc__="""Response for the ``Seek`` method (this response is empty).""", # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) ), ) @@ -4482,6 +4549,7 @@ _LISTTOPICSREQUEST.fields_by_name["project"]._options = None _LISTTOPICSUBSCRIPTIONSREQUEST.fields_by_name["topic"]._options = None _LISTTOPICSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"]._options = None +_LISTTOPICSNAPSHOTSREQUEST.fields_by_name["topic"]._options = None _DELETETOPICREQUEST.fields_by_name["topic"]._options = None _SUBSCRIPTION_LABELSENTRY._options = None _SUBSCRIPTION.fields_by_name["name"]._options = None @@ -4496,6 +4564,7 @@ _MODIFYPUSHCONFIGREQUEST.fields_by_name["subscription"]._options = None _MODIFYPUSHCONFIGREQUEST.fields_by_name["push_config"]._options = None _PULLREQUEST.fields_by_name["subscription"]._options = None +_PULLREQUEST.fields_by_name["return_immediately"]._options = None _PULLREQUEST.fields_by_name["max_messages"]._options = None _MODIFYACKDEADLINEREQUEST.fields_by_name["subscription"]._options = None _MODIFYACKDEADLINEREQUEST.fields_by_name["ack_ids"]._options = None @@ -4526,8 +4595,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=5977, - serialized_end=7236, + serialized_start=6209, + serialized_end=7476, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4603,7 +4672,7 @@ input_type=_LISTTOPICSNAPSHOTSREQUEST, output_type=_LISTTOPICSNAPSHOTSRESPONSE, serialized_options=_b( - "\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots" + "\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots\332A\005topic" ), ), _descriptor.MethodDescriptor( @@ -4632,8 +4701,8 @@ serialized_options=_b( "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" ), - serialized_start=7239, - serialized_end=9918, + serialized_start=7479, + serialized_end=10170, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", @@ -4751,7 +4820,7 @@ input_type=_GETSNAPSHOTREQUEST, output_type=_SNAPSHOT, serialized_options=_b( - "\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}" + "\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot" ), ), _descriptor.MethodDescriptor( diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 03c570f571a6..e48f0680a6b7 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -111,8 +111,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install("mock", "pytest", "psutil") - - session.install("-e", "test_utils") + session.install("git+https://github.com/googleapis/python-test-utils") session.install("-e", ".") # Run py.test against the system tests. @@ -140,7 +139,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-pubsub/setup.cfg b/packages/google-cloud-pubsub/setup.cfg index 3bd555500e37..c3a2b39f6528 100644 --- a/packages/google-cloud-pubsub/setup.cfg +++ b/packages/google-cloud-pubsub/setup.cfg @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 05b0eeac24a1..c38b8778df06 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,27 +1,39 @@ { - "updateTime": "2020-03-04T13:26:32.035995Z", "sources": [ { "generator": { "name": "artman", - "version": "1.0.0", - "dockerImage": "googleapis/artman@sha256:f37f2464788cb551299209b4fcab4eb323533154488c2ef9ec0c75d7c2b4b482" + "version": "2.0.0", + "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098" + } + }, + { + "git": { + "name": ".", + "remote": "git@github.com:googleapis/python-pubsub", + "sha": "96f4f820801dc83a04aed992af14e52f8e5bb710" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "541b1ded4abadcc38e8178680b0677f65594ea6f", - "internalRef": "298686266", - "log": "541b1ded4abadcc38e8178680b0677f65594ea6f\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 298686266\n\nc0d171acecb4f5b0bfd2c4ca34fc54716574e300\n Updated to include the Notification v1 API.\n\nPiperOrigin-RevId: 298652775\n\n2346a9186c0bff2c9cc439f2459d558068637e05\nAdd Service Directory v1beta1 protos and configs\n\nPiperOrigin-RevId: 298625638\n\na78ed801b82a5c6d9c5368e24b1412212e541bb7\nPublishing v3 protos and configs.\n\nPiperOrigin-RevId: 298607357\n\n4a180bfff8a21645b3a935c2756e8d6ab18a74e0\nautoml/v1beta1 publish proto updates\n\nPiperOrigin-RevId: 298484782\n\n6de6e938b7df1cd62396563a067334abeedb9676\nchore: use the latest gapic-generator and protoc-java-resource-name-plugin in Bazel workspace.\n\nPiperOrigin-RevId: 298474513\n\n244ab2b83a82076a1fa7be63b7e0671af73f5c02\nAdds service config definition for bigqueryreservation v1\n\nPiperOrigin-RevId: 298455048\n\n83c6f84035ee0f80eaa44d8b688a010461cc4080\nUpdate google/api/auth.proto to make AuthProvider to have JwtLocation\n\nPiperOrigin-RevId: 297918498\n\ne9e90a787703ec5d388902e2cb796aaed3a385b4\nDialogflow weekly v2/v2beta1 library update:\n - adding get validation result\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297671458\n\n1a2b05cc3541a5f7714529c665aecc3ea042c646\nAdding .yaml and .json config files.\n\nPiperOrigin-RevId: 297570622\n\ndfe1cf7be44dee31d78f78e485d8c95430981d6e\nPublish `QueryOptions` proto.\n\nIntroduced a `query_options` input in `ExecuteSqlRequest`.\n\nPiperOrigin-RevId: 297497710\n\ndafc905f71e5d46f500b41ed715aad585be062c3\npubsub: revert pull init_rpc_timeout & max_rpc_timeout back to 25 seconds and reset multiplier to 1.0\n\nPiperOrigin-RevId: 297486523\n\nf077632ba7fee588922d9e8717ee272039be126d\nfirestore: add update_transform\n\nPiperOrigin-RevId: 297405063\n\n0aba1900ffef672ec5f0da677cf590ee5686e13b\ncluster: use square brace for cross-reference\n\nPiperOrigin-RevId: 297204568\n\n5dac2da18f6325cbaed54603c43f0667ecd50247\nRestore retry params in gapic config because securitycenter has non-standard default retry params.\nRestore a few retry codes for some idempotent methods.\n\nPiperOrigin-RevId: 297196720\n\n1eb61455530252bba8b2c8d4bc9832960e5a56f6\npubsub: v1 replace IAM HTTP rules\n\nPiperOrigin-RevId: 297188590\n\n80b2d25f8d43d9d47024ff06ead7f7166548a7ba\nDialogflow weekly v2/v2beta1 library update:\n - updates to mega agent api\n - adding field mask override control for output audio config\nImportant updates are also posted at:\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 297187629\n\n0b1876b35e98f560f9c9ca9797955f020238a092\nUse an older version of protoc-docs-plugin that is compatible with the specified gapic-generator and protobuf versions.\n\nprotoc-docs-plugin >=0.4.0 (see commit https://github.com/googleapis/protoc-docs-plugin/commit/979f03ede6678c487337f3d7e88bae58df5207af) is incompatible with protobuf 3.9.1.\n\nPiperOrigin-RevId: 296986742\n\n1e47e676cddbbd8d93f19ba0665af15b5532417e\nFix: Restore a method signature for UpdateCluster\n\nPiperOrigin-RevId: 296901854\n\n7f910bcc4fc4704947ccfd3ceed015d16b9e00c2\nUpdate Dataproc v1beta2 client.\n\nPiperOrigin-RevId: 296451205\n\nde287524405a3dce124d301634731584fc0432d7\nFix: Reinstate method signatures that had been missed off some RPCs\nFix: Correct resource types for two fields\n\nPiperOrigin-RevId: 296435091\n\ne5bc9566ae057fb4c92f8b7e047f1c8958235b53\nDeprecate the endpoint_uris field, as it is unused.\n\nPiperOrigin-RevId: 296357191\n\n8c12e2b4dca94e12bff9f538bdac29524ff7ef7a\nUpdate Dataproc v1 client.\n\nPiperOrigin-RevId: 296336662\n\n17567c4a1ef0a9b50faa87024d66f8acbb561089\nRemoving erroneous comment, a la https://github.com/googleapis/java-speech/pull/103\n\nPiperOrigin-RevId: 296332968\n\n3eaaaf8626ce5b0c0bc7eee05e143beffa373b01\nAdd BUILD.bazel for v1 secretmanager.googleapis.com\n\nPiperOrigin-RevId: 296274723\n\ne76149c3d992337f85eeb45643106aacae7ede82\nMove securitycenter v1 to use generate from annotations.\n\nPiperOrigin-RevId: 296266862\n\n203740c78ac69ee07c3bf6be7408048751f618f8\nAdd StackdriverLoggingConfig field to Cloud Tasks v2 API.\n\nPiperOrigin-RevId: 296256388\n\ne4117d5e9ed8bbca28da4a60a94947ca51cb2083\nCreate a Bazel BUILD file for the google.actions.type export.\n\nPiperOrigin-RevId: 296212567\n\na9639a0a9854fd6e1be08bba1ac3897f4f16cb2f\nAdd secretmanager.googleapis.com v1 protos\n\nPiperOrigin-RevId: 295983266\n\nce4f4c21d9dd2bfab18873a80449b9d9851efde8\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295861722\n\ncb61d6c2d070b589980c779b68ffca617f789116\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295855449\n\nab2685d8d3a0e191dc8aef83df36773c07cb3d06\nfix: Dataproc v1 - AutoscalingPolicy annotation\n\nThis adds the second resource name pattern to the\nAutoscalingPolicy resource.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 295738415\n\n8a1020bf6828f6e3c84c3014f2c51cb62b739140\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295286165\n\n5cfa105206e77670369e4b2225597386aba32985\nAdd service control related proto build rule.\n\nPiperOrigin-RevId: 295262088\n\nee4dddf805072004ab19ac94df2ce669046eec26\nmonitoring v3: Add prefix \"https://cloud.google.com/\" into the link for global access\ncl 295167522, get ride of synth.py hacks\n\nPiperOrigin-RevId: 295238095\n\nd9835e922ea79eed8497db270d2f9f85099a519c\nUpdate some minor docs changes about user event proto\n\nPiperOrigin-RevId: 295185610\n\n5f311e416e69c170243de722023b22f3df89ec1c\nfix: use correct PHP package name in gapic configuration\n\nPiperOrigin-RevId: 295161330\n\n6cdd74dcdb071694da6a6b5a206e3a320b62dd11\npubsub: v1 add client config annotations and retry config\n\nPiperOrigin-RevId: 295158776\n\n5169f46d9f792e2934d9fa25c36d0515b4fd0024\nAdded cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295026522\n\n56b55aa8818cd0a532a7d779f6ef337ba809ccbd\nFix: Resource annotations for CreateTimeSeriesRequest and ListTimeSeriesRequest should refer to valid resources. TimeSeries is not a named resource.\n\nPiperOrigin-RevId: 294931650\n\n0646bc775203077226c2c34d3e4d50cc4ec53660\nRemove unnecessary languages from bigquery-related artman configuration files.\n\nPiperOrigin-RevId: 294809380\n\n8b78aa04382e3d4147112ad6d344666771bb1909\nUpdate backend.proto for schemes and protocol\n\nPiperOrigin-RevId: 294788800\n\n80b8f8b3de2359831295e24e5238641a38d8488f\nAdds artman config files for bigquerystorage endpoints v1beta2, v1alpha2, v1\n\nPiperOrigin-RevId: 294763931\n\n2c17ac33b226194041155bb5340c3f34733f1b3a\nAdd parameter to sample generated for UpdateInstance. Related to https://github.com/googleapis/python-redis/issues/4\n\nPiperOrigin-RevId: 294734008\n\nd5e8a8953f2acdfe96fb15e85eb2f33739623957\nMove bigquery datatransfer to gapic v2.\n\nPiperOrigin-RevId: 294703703\n\nefd36705972cfcd7d00ab4c6dfa1135bafacd4ae\nfix: Add two annotations that we missed.\n\nPiperOrigin-RevId: 294664231\n\n8a36b928873ff9c05b43859b9d4ea14cd205df57\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1beta2).\n\nPiperOrigin-RevId: 294459768\n\nc7a3caa2c40c49f034a3c11079dd90eb24987047\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1).\n\nPiperOrigin-RevId: 294456889\n\n5006247aa157e59118833658084345ee59af7c09\nFix: Make deprecated fields optional\nFix: Deprecate SetLoggingServiceRequest.zone in line with the comments\nFeature: Add resource name method signatures where appropriate\n\nPiperOrigin-RevId: 294383128\n\neabba40dac05c5cbe0fca3a35761b17e372036c4\nFix: C# and PHP package/namespace capitalization for BigQuery Storage v1.\n\nPiperOrigin-RevId: 294382444\n\nf8d9a858a7a55eba8009a23aa3f5cc5fe5e88dde\nfix: artman configuration file for bigtable-admin\n\nPiperOrigin-RevId: 294322616\n\n0f29555d1cfcf96add5c0b16b089235afbe9b1a9\nAPI definition for (not-yet-launched) GCS gRPC.\n\nPiperOrigin-RevId: 294321472\n\nfcc86bee0e84dc11e9abbff8d7c3529c0626f390\nfix: Bigtable Admin v2\n\nChange LRO metadata from PartialUpdateInstanceMetadata\nto UpdateInstanceMetadata. (Otherwise, it will not build.)\n\nPiperOrigin-RevId: 294264582\n\n6d9361eae2ebb3f42d8c7ce5baf4bab966fee7c0\nrefactor: Add annotations to Bigtable Admin v2.\n\nPiperOrigin-RevId: 294243406\n\nad7616f3fc8e123451c8b3a7987bc91cea9e6913\nFix: Resource type in CreateLogMetricRequest should use logging.googleapis.com.\nFix: ListLogEntries should have a method signature for convenience of calling it.\n\nPiperOrigin-RevId: 294222165\n\n63796fcbb08712676069e20a3e455c9f7aa21026\nFix: Remove extraneous resource definition for cloudkms.googleapis.com/CryptoKey.\n\nPiperOrigin-RevId: 294176658\n\ne7d8a694f4559201e6913f6610069cb08b39274e\nDepend on the latest gapic-generator and resource names plugin.\n\nThis fixes the very old an very annoying bug: https://github.com/googleapis/gapic-generator/pull/3087\n\nPiperOrigin-RevId: 293903652\n\n806b2854a966d55374ee26bb0cef4e30eda17b58\nfix: correct capitalization of Ruby namespaces in SecurityCenter V1p1beta1\n\nPiperOrigin-RevId: 293903613\n\n1b83c92462b14d67a7644e2980f723112472e03a\nPublish annotations and grpc service config for Logging API.\n\nPiperOrigin-RevId: 293893514\n\ne46f761cd6ec15a9e3d5ed4ff321a4bcba8e8585\nGenerate the Bazel build file for recommendengine public api\n\nPiperOrigin-RevId: 293710856\n\n68477017c4173c98addac0373950c6aa9d7b375f\nMake `language_code` optional for UpdateIntentRequest and BatchUpdateIntentsRequest.\n\nThe comments and proto annotations describe this parameter as optional.\n\nPiperOrigin-RevId: 293703548\n\n16f823f578bca4e845a19b88bb9bc5870ea71ab2\nAdd BUILD.bazel files for managedidentities API\n\nPiperOrigin-RevId: 293698246\n\n2f53fd8178c9a9de4ad10fae8dd17a7ba36133f2\nAdd v1p1beta1 config file\n\nPiperOrigin-RevId: 293696729\n\n052b274138fce2be80f97b6dcb83ab343c7c8812\nAdd source field for user event and add field behavior annotations\n\nPiperOrigin-RevId: 293693115\n\n1e89732b2d69151b1b3418fff3d4cc0434f0dded\ndatacatalog: v1beta1 add three new RPCs to gapic v1beta1 config\n\nPiperOrigin-RevId: 293692823\n\n9c8bd09bbdc7c4160a44f1fbab279b73cd7a2337\nchange the name of AccessApproval service to AccessApprovalAdmin\n\nPiperOrigin-RevId: 293690934\n\n2e23b8fbc45f5d9e200572ca662fe1271bcd6760\nAdd ListEntryGroups method, add http bindings to support entry group tagging, and update some comments.\n\nPiperOrigin-RevId: 293666452\n\n0275e38a4ca03a13d3f47a9613aac8c8b0d3f1f2\nAdd proto_package field to managedidentities API. It is needed for APIs that still depend on artman generation.\n\nPiperOrigin-RevId: 293643323\n\n4cdfe8278cb6f308106580d70648001c9146e759\nRegenerating public protos for Data Catalog to add new Custom Type Entry feature.\n\nPiperOrigin-RevId: 293614782\n\n45d2a569ab526a1fad3720f95eefb1c7330eaada\nEnable client generation for v1 ManagedIdentities API.\n\nPiperOrigin-RevId: 293515675\n\n2c17086b77e6f3bcf04a1f65758dfb0c3da1568f\nAdd the Actions on Google common types (//google/actions/type/*).\n\nPiperOrigin-RevId: 293478245\n\n781aadb932e64a12fb6ead7cd842698d99588433\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293443396\n\ne2602608c9138c2fca24162720e67f9307c30b95\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293442964\n\nc8aef82028d06b7992278fa9294c18570dc86c3d\nAdd cc_proto_library and cc_grpc_library targets for Bigtable protos.\n\nAlso fix indentation of cc_grpc_library targets in Spanner and IAM protos.\n\nPiperOrigin-RevId: 293440538\n\ne2faab04f4cb7f9755072330866689b1943a16e9\ncloudtasks: v2 replace non-standard retry params in gapic config v2\n\nPiperOrigin-RevId: 293424055\n\ndfb4097ea628a8470292c6590a4313aee0c675bd\nerrorreporting: v1beta1 add legacy artman config for php\n\nPiperOrigin-RevId: 293423790\n\nb18aed55b45bfe5b62476292c72759e6c3e573c6\nasset: v1p1beta1 updated comment for `page_size` limit.\n\nPiperOrigin-RevId: 293421386\n\nc9ef36b7956d9859a2fc86ad35fcaa16958ab44f\nbazel: Refactor CI build scripts\n\nPiperOrigin-RevId: 293387911\n\na8ed9d921fdddc61d8467bfd7c1668f0ad90435c\nfix: set Ruby module name for OrgPolicy\n\nPiperOrigin-RevId: 293257997\n\n6c7d28509bd8315de8af0889688ee20099594269\nredis: v1beta1 add UpgradeInstance and connect_mode field to Instance\n\nPiperOrigin-RevId: 293242878\n\nae0abed4fcb4c21f5cb67a82349a049524c4ef68\nredis: v1 add connect_mode field to Instance\n\nPiperOrigin-RevId: 293241914\n\n3f7a0d29b28ee9365771da2b66edf7fa2b4e9c56\nAdds service config definition for bigqueryreservation v1beta1\n\nPiperOrigin-RevId: 293234418\n\n0c88168d5ed6fe353a8cf8cbdc6bf084f6bb66a5\naddition of BUILD & configuration for accessapproval v1\n\nPiperOrigin-RevId: 293219198\n\n39bedc2e30f4778ce81193f6ba1fec56107bcfc4\naccessapproval: v1 publish protos\n\nPiperOrigin-RevId: 293167048\n\n" + "sha": "42ee97c1b93a0e3759bbba3013da309f670a90ab", + "internalRef": "307114445" } }, { - "template": { - "name": "python_library", - "origin": "synthtool.gcp", - "version": "2020.2.4" + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "f5e4c17dc78a966dbf29961dd01f9bbd63e20a04" + } + }, + { + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "f5e4c17dc78a966dbf29961dd01f9bbd63e20a04" } } ], diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index cc45712add21..8c957d73291d 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -191,7 +191,7 @@ def _merge_dict(d1, d2): # Add templated files # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library( - unit_cov_level=97, cov_level=99, system_test_dependencies=["test_utils", "psutil"] + unit_cov_level=97, cov_level=99, system_test_external_dependencies=["psutil"], ) s.move(templated_files) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index 1d2cb7522be5..4fad76b78518 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -321,6 +321,49 @@ def test_list_topic_subscriptions_exception(self): with pytest.raises(CustomException): list(paged_list_response) + def test_list_topic_snapshots(self): + # Setup Expected Response + next_page_token = "" + snapshots_element = "snapshotsElement1339034092" + snapshots = [snapshots_element] + expected_response = {"next_page_token": next_page_token, "snapshots": snapshots} + expected_response = pubsub_pb2.ListTopicSnapshotsResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() + + # Setup Request + topic = client.topic_path("[PROJECT]", "[TOPIC]") + + paged_list_response = client.list_topic_snapshots(topic) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.snapshots[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.ListTopicSnapshotsRequest(topic=topic) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_topic_snapshots_exception(self): + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() + + # Setup request + topic = client.topic_path("[PROJECT]", "[TOPIC]") + + paged_list_response = client.list_topic_snapshots(topic) + with pytest.raises(CustomException): + list(paged_list_response) + def test_delete_topic(self): channel = ChannelStub() patch = mock.patch("google.api_core.grpc_helpers.create_channel") @@ -367,7 +410,7 @@ def test_set_iam_policy(self): client = publisher_client.PublisherClient() # Setup Request - resource = client.topic_path("[PROJECT]", "[TOPIC]") + resource = "resource-341064690" policy = {} response = client.set_iam_policy(resource, policy) @@ -389,7 +432,7 @@ def test_set_iam_policy_exception(self): client = publisher_client.PublisherClient() # Setup request - resource = client.topic_path("[PROJECT]", "[TOPIC]") + resource = "resource-341064690" policy = {} with pytest.raises(CustomException): @@ -410,7 +453,7 @@ def test_get_iam_policy(self): client = publisher_client.PublisherClient() # Setup Request - resource = client.topic_path("[PROJECT]", "[TOPIC]") + resource = "resource-341064690" response = client.get_iam_policy(resource) assert expected_response == response @@ -429,7 +472,7 @@ def test_get_iam_policy_exception(self): client = publisher_client.PublisherClient() # Setup request - resource = client.topic_path("[PROJECT]", "[TOPIC]") + resource = "resource-341064690" with pytest.raises(CustomException): client.get_iam_policy(resource) @@ -449,7 +492,7 @@ def test_test_iam_permissions(self): client = publisher_client.PublisherClient() # Setup Request - resource = client.topic_path("[PROJECT]", "[TOPIC]") + resource = "resource-341064690" permissions = [] response = client.test_iam_permissions(resource, permissions) @@ -471,7 +514,7 @@ def test_test_iam_permissions_exception(self): client = publisher_client.PublisherClient() # Setup request - resource = client.topic_path("[PROJECT]", "[TOPIC]") + resource = "resource-341064690" permissions = [] with pytest.raises(CustomException): diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index db9e77b2adda..4e34e19057fe 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -77,12 +77,14 @@ def test_create_subscription(self): ack_deadline_seconds = 2135351438 retain_acked_messages = False enable_message_ordering = True + filter_ = "filter-1274492040" expected_response = { "name": name_2, "topic": topic_2, "ack_deadline_seconds": ack_deadline_seconds, "retain_acked_messages": retain_acked_messages, "enable_message_ordering": enable_message_ordering, + "filter": filter_, } expected_response = pubsub_pb2.Subscription(**expected_response) @@ -127,12 +129,14 @@ def test_get_subscription(self): ack_deadline_seconds = 2135351438 retain_acked_messages = False enable_message_ordering = True + filter_ = "filter-1274492040" expected_response = { "name": name, "topic": topic, "ack_deadline_seconds": ack_deadline_seconds, "retain_acked_messages": retain_acked_messages, "enable_message_ordering": enable_message_ordering, + "filter": filter_, } expected_response = pubsub_pb2.Subscription(**expected_response) @@ -175,12 +179,14 @@ def test_update_subscription(self): ack_deadline_seconds_2 = 921632575 retain_acked_messages = False enable_message_ordering = True + filter_ = "filter-1274492040" expected_response = { "name": name, "topic": topic, "ack_deadline_seconds": ack_deadline_seconds_2, "retain_acked_messages": retain_acked_messages, "enable_message_ordering": enable_message_ordering, + "filter": filter_, } expected_response = pubsub_pb2.Subscription(**expected_response) @@ -305,6 +311,45 @@ def test_delete_subscription_exception(self): with pytest.raises(CustomException): client.delete_subscription(subscription) + def test_get_snapshot(self): + # Setup Expected Response + name = "name3373707" + topic = "topic110546223" + expected_response = {"name": name, "topic": topic} + expected_response = pubsub_pb2.Snapshot(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() + + # Setup Request + snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") + + response = client.get_snapshot(snapshot) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_snapshot_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = subscriber_client.SubscriberClient() + + # Setup request + snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") + + with pytest.raises(CustomException): + client.get_snapshot(snapshot) + def test_modify_ack_deadline(self): channel = ChannelStub() patch = mock.patch("google.api_core.grpc_helpers.create_channel") @@ -422,9 +467,7 @@ def test_pull_exception(self): def test_streaming_pull(self): # Setup Expected Response - received_messages_element = {} - received_messages = [received_messages_element] - expected_response = {"received_messages": received_messages} + expected_response = {} expected_response = pubsub_pb2.StreamingPullResponse(**expected_response) # Mock the API response @@ -732,7 +775,7 @@ def test_set_iam_policy(self): client = subscriber_client.SubscriberClient() # Setup Request - resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + resource = "resource-341064690" policy = {} response = client.set_iam_policy(resource, policy) @@ -754,7 +797,7 @@ def test_set_iam_policy_exception(self): client = subscriber_client.SubscriberClient() # Setup request - resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + resource = "resource-341064690" policy = {} with pytest.raises(CustomException): @@ -775,7 +818,7 @@ def test_get_iam_policy(self): client = subscriber_client.SubscriberClient() # Setup Request - resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + resource = "resource-341064690" response = client.get_iam_policy(resource) assert expected_response == response @@ -794,7 +837,7 @@ def test_get_iam_policy_exception(self): client = subscriber_client.SubscriberClient() # Setup request - resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + resource = "resource-341064690" with pytest.raises(CustomException): client.get_iam_policy(resource) @@ -814,7 +857,7 @@ def test_test_iam_permissions(self): client = subscriber_client.SubscriberClient() # Setup Request - resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + resource = "resource-341064690" permissions = [] response = client.test_iam_permissions(resource, permissions) @@ -836,7 +879,7 @@ def test_test_iam_permissions_exception(self): client = subscriber_client.SubscriberClient() # Setup request - resource = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + resource = "resource-341064690" permissions = [] with pytest.raises(CustomException): From e4f221d042c738479b6609d96f708e41580b2f83 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 21 Apr 2020 18:52:48 +0200 Subject: [PATCH 0452/1197] docs: fix release heading in CHANGELOG (#83) --- packages/google-cloud-pubsub/CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 7cd23c1736e8..181a8ee64a55 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,7 +4,7 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history -### [1.4.3](https://www.github.com/googleapis/python-pubsub/compare/v1.4.2...v1.4.3) (2020-04-16) +## [1.4.3](https://www.github.com/googleapis/python-pubsub/compare/v1.4.2...v1.4.3) (2020-04-16) ### Bug Fixes From 5e4f5637a3936088f3c77407a171878e39d4557e Mon Sep 17 00:00:00 2001 From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com> Date: Tue, 21 Apr 2020 16:48:05 -0700 Subject: [PATCH 0453/1197] chore: Migrate python-pubsub synth.py from artman to bazel (#80) --- packages/google-cloud-pubsub/synth.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 8c957d73291d..90d15a59e61f 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -14,13 +14,10 @@ """This script is used to synthesize generated parts of this library.""" -import re -import textwrap - import synthtool as s from synthtool import gcp -gapic = gcp.GAPICGenerator() +gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() version = "v1" @@ -28,9 +25,9 @@ # Generate pubsub GAPIC layer # ---------------------------------------------------------------------------- library = gapic.py_library( - "pubsub", - version, - config_path="/google/pubsub/artman_pubsub.yaml", + service="pubsub", + version=version, + bazel_target="//google/pubsub/v1:pubsub-v1-py", include_protos=True, ) s.move( From 1e544b4ac0dedf874785b0e0c72c0eec2538bc87 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Wed, 22 Apr 2020 07:02:05 -0400 Subject: [PATCH 0454/1197] feat: send client id with StreamingPullRequest (#58) - The client id is created randomly for each StreamingPullManager and is used to establish affinity across stream disconnections/retries. - Server-client affinity is important for ordering keys, where the backend tries to send the same keys to the same client. Fixes #62 --- .../_protocol/streaming_pull_manager.py | 8 ++++++++ .../subscriber/test_streaming_pull_manager.py | 15 +++++++++++++++ 2 files changed, 23 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 0a25d4625a19..2c3e51fee241 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -18,6 +18,7 @@ import functools import logging import threading +import uuid import grpc import six @@ -116,6 +117,12 @@ def __init__( self._closed = False self._close_callbacks = [] + # Generate a random client id tied to this object. All streaming pull + # connections (initial and re-connects) will then use the same client + # id. Doing so lets the server establish affinity even across stream + # disconncetions. + self._client_id = str(uuid.uuid4()) + if scheduler is None: self._scheduler = ( google.cloud.pubsub_v1.subscriber.scheduler.ThreadScheduler() @@ -567,6 +574,7 @@ def _get_initial_request(self, stream_ack_deadline_seconds): modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), stream_ack_deadline_seconds=stream_ack_deadline_seconds, subscription=self._subscription, + client_id=self._client_id, ) # Return the initial request. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 70f320fcc53d..0475aaf6ea38 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -96,6 +96,7 @@ def test_constructor_and_default_state(): assert manager._subscription == mock.sentinel.subscription assert manager._scheduler is not None assert manager._messages_on_hold is not None + assert manager._client_id is not None def test_constructor_with_options(): @@ -144,6 +145,20 @@ def test_ack_deadline(): assert manager.ack_deadline == 20 +def test_client_id(): + manager1 = make_manager() + request1 = manager1._get_initial_request(stream_ack_deadline_seconds=10) + client_id_1 = request1.client_id + assert client_id_1 + + manager2 = make_manager() + request2 = manager2._get_initial_request(stream_ack_deadline_seconds=10) + client_id_2 = request2.client_id + assert client_id_2 + + assert client_id_1 != client_id_2 + + def test_ack_deadline_with_max_duration_per_lease_extension(): manager = make_manager() manager._flow_control = types.FlowControl(max_duration_per_lease_extension=5) From 2b0ceef809f52e9c04867140804b4af1c227f826 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Wed, 22 Apr 2020 16:05:28 -0400 Subject: [PATCH 0455/1197] Remove EXPERIMENTAL notice for DLQ b/c it's GA now (#87) --- .../google/cloud/pubsub_v1/subscriber/message.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index cafc34b80f2f..864d697e0375 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -187,10 +187,6 @@ def delivery_attempt(self): The first delivery of a given message will have this value as 1. The value is calculated at best effort and is approximate. - EXPERIMENTAL: This feature is part of a closed alpha release. This - API might be changed in backward-incompatible ways and is not recommended - for production use. It is not subject to any SLA or deprecation policy. - Returns: Optional[int]: The delivery attempt counter or None. """ From 02dbbda4f26910a8a4f8cdcdf11381177afb9fce Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 23 Apr 2020 11:22:17 -0700 Subject: [PATCH 0456/1197] [CHANGE ME] Re-generated to pick up changes in the API or client library generator. (#84) Co-authored-by: Prad Nelluru --- packages/google-cloud-pubsub/docs/conf.py | 5 ++++- packages/google-cloud-pubsub/synth.metadata | 4 ++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 3e4bdfa1d7ad..20c1b57fe653 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -38,7 +38,6 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", - "recommonmark", ] # autodoc/autosummary flags @@ -50,6 +49,10 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index c38b8778df06..f48ab46269f3 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -10,8 +10,8 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-pubsub", - "sha": "96f4f820801dc83a04aed992af14e52f8e5bb710" + "remote": "https://github.com/googleapis/python-pubsub.git", + "sha": "4ce898e80eeb16b18d1ee29c678ade149804d186" } }, { From 9610b5fd5ab494673a61d83a65da1bb241aab568 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 30 Apr 2020 19:55:03 +0200 Subject: [PATCH 0457/1197] chore: remove local test_utils directory (#92) --- packages/google-cloud-pubsub/synth.py | 7 - .../test_utils/credentials.json.enc | 49 ---- .../scripts/circleci/get_tagged_package.py | 64 ----- .../scripts/circleci/twine_upload.sh | 36 --- .../test_utils/scripts/get_target_packages.py | 268 ------------------ .../scripts/get_target_packages_kokoro.py | 98 ------- .../test_utils/scripts/run_emulator.py | 199 ------------- .../test_utils/scripts/update_docs.sh | 93 ------ .../google-cloud-pubsub/test_utils/setup.py | 64 ----- .../test_utils/test_utils/__init__.py | 0 .../test_utils/test_utils/imports.py | 38 --- .../test_utils/test_utils/retry.py | 207 -------------- .../test_utils/test_utils/system.py | 81 ------ .../test_utils/test_utils/vpcsc_config.py | 118 -------- 14 files changed, 1322 deletions(-) delete mode 100644 packages/google-cloud-pubsub/test_utils/credentials.json.enc delete mode 100644 packages/google-cloud-pubsub/test_utils/scripts/circleci/get_tagged_package.py delete mode 100755 packages/google-cloud-pubsub/test_utils/scripts/circleci/twine_upload.sh delete mode 100644 packages/google-cloud-pubsub/test_utils/scripts/get_target_packages.py delete mode 100644 packages/google-cloud-pubsub/test_utils/scripts/get_target_packages_kokoro.py delete mode 100644 packages/google-cloud-pubsub/test_utils/scripts/run_emulator.py delete mode 100755 packages/google-cloud-pubsub/test_utils/scripts/update_docs.sh delete mode 100644 packages/google-cloud-pubsub/test_utils/setup.py delete mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/__init__.py delete mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/imports.py delete mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/retry.py delete mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/system.py delete mode 100644 packages/google-cloud-pubsub/test_utils/test_utils/vpcsc_config.py diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 90d15a59e61f..645b7955b702 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -192,11 +192,4 @@ def _merge_dict(d1, d2): ) s.move(templated_files) -# Temporary fix for the generated synth file (the test_utils path) -s.replace( - "noxfile.py", - r'session\.install\("-e", "\.\./test_utils/"\)', - '# \g<0>', -) - s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-pubsub/test_utils/credentials.json.enc b/packages/google-cloud-pubsub/test_utils/credentials.json.enc deleted file mode 100644 index f073c7e4f774..000000000000 --- a/packages/google-cloud-pubsub/test_utils/credentials.json.enc +++ /dev/null @@ -1,49 +0,0 @@ -U2FsdGVkX1/vVm/dOEg1DCACYbdOcL+ey6+64A+DZGZVgF8Z/3skK6rpPocu6GOA -UZAqASsBH9QifDf8cKVXQXVYpYq6HSv2O0w7vOmVorZO9GYPo98s9/8XO+4ty/AU -aB6TD68frBAYv4cT/l5m7aYdzfzMTy0EOXoleZT09JYP3B5FV3KCO114FzMXGwrj -HXsR6E5SyUUlUnWPC3eD3aqmovay0gxOKYO3ZwjFK1nlbN/8q6/8nwBCf/Bg6SHV -V93pNxdolRlJev9kgKz4RN1z4jGCy5PAndhSLE82NFIs9LoAiEOU5YeMlN+Ulqus -J92nh+ptUe9a4pJGbAuveUWO7zdS1QyXvTMUcmmSfXCNm/eIQjNuu5+rHtIjWKh8 -Ilwj2w1aTfSptQEhk/kwRgFz/d11vfwJzvwTmCxO6zyOeL0VUWLqdCBGgG5As9He -/RenF8PZ1O0WbTt7fns5oTlTk/MUo+0xJ1xqvu/y45LaqqcBAnEdrWKmtM3dJHWv -ufQku+kD+83F/VwBnQdvgMHu6KZEs6LRrNo58r4QuK6fS7VCACdzxID1RM2cL7kT -6BFRlyGj1aigmjne9g9M9Jx4R+mZDpPU1WDzzG71J4qCUwaX8Dfwutuv4uiFvzwq -NUF0wLJJPtKWmtW+hnZ/fhHQGCRsOpZzFnqp6Zv7J7k6esqxMgIjfal7Djk5Acy8 -j3iVvm6CYmKMVqzL62JHYS9Ye83tzBCaR8hpnJQKgH3FSOFY8HSwrtQSIsl/hSeF -41sgnz0Y+/gkzNeU18qFk+eCZmvljyu+JK0nPYUgpOCJYVBNQpNHz5PUyiAEKhtM -IOSdjPRW1Y+Xf4RroJnLPoF24Ijwrow5LCm9hBRY6TPPMMmnIXCd23xcLJ1rMj6g -x4ZikElans+cwuc9wtbb7w01DcpTwQ1+eIV1qV+KIgpnLjRGLhZD4etobBsrwYu/ -vnIwy2QHCKENPb8sbdgp7x2mF7VSX0/7tf+9+i70EBiMzpOKBkiZhtLzm6hOBkEy -ODaWrx4lTTwbSw8Rmtf58APhPFMsjHoNsjiUoK249Y8Y2Ff4fMfqYsXu6VC1n/At -CuWYHc3EfBwFcLJS+RQB9kFk/4FygFBWq4Kj0MqoRruLbKmoGeJKH9q35W0f0NCD -j+iHt3014kMGiuyJe1UDQ6fvEihFFdHuDivFpPAXDt4PTY/WtpDhaGMx23kb54pK -jkAuxpznAB1lK3u9bGRXDasGeHIrNtIlPvgkrWHXvoBVqM7zry8TGtoxp3E3I42Z -cUfDWfB9GqVdrOwvrTzyZsl2uShRkAJaZFZj5aMyYxiptp4gM8CwWiNtOd2EwtRO -LxZX4M02PQFIqXV3FSDA0q6EwglUrTZdAlYeOEkopaKCtG31dEPOSQG3NGJAEYso -Cxm99H7970dp0OAgpNSgRbcWDbhVbQXnRzvFGqLeH6a9dQ/a8uD3s8Qm9Du/kB6d -XxTRe2OGxzcD0AgI8GClE4rIZHCLbcwuJRp0EYcN+pgY80O4U98fZ5RYpU6OYbU/ -MEiaBYFKtZtGkV6AQD568V7hHJWqc5DDfVHUQ/aeQwnKi2vnU66u+nnV2rZxXxLP -+dqeLRpul+wKa5b/Z5SfQ14Ff8s7aVyxaogGpyggyPL1vyq4KWZ6Or/wEE5hgNO4 -kBh6ht0QT1Hti8XY2JK1M+Jgbjgcg4jkHBGVqegrG1Rvcc2A4TYKwx+QMSBhyxrU -5qhROjS4lTcC42hQslMUkUwc4U/Y91XdFbOOnaAkwzI36NRYL0pmgZnYxGJZeRvr -E5foOhnOEVSFGdOkLfFh+FkWZQf56Lmn8Gg2wHE3dZTxLHibiUYfkgOr1uEosq29 -D1NstvlJURPQ0Q+8QQNWcl9nEZHMAjOmnL1hbx+QfuC6seucp+sXGzdZByMLZbvT -tG8KNL293CmyQowgf9MXToWYnwRkcvqfTaKyor2Ggze3JtoFW4t0j4DI1XPciZFX -XmfApHrzdB/bZadzxyaZ2NE0CuH9zDelwI6rz38xsN5liYnp5qmNKVCZVOHccXa6 -J8x365m5/VaaA2RrtdPqKxn8VaKy7+T690QgMXVGM4PbzQzQxHuSleklocqlP+sB -jSMXCZY+ng/i4UmRO9noiyW3UThYh0hIdMYs12EmmI9cnF/OuYZpl30fmqwV+VNM -td5B2fYvAvvsjiX60SFCn3DATP1GrPMBlZSmhhP3GYS+xrWt3Xxta9qIX2BEF1Gg -twnZZRjoULSRFUYPfJPEOfEH2UQwm84wxx/GezVE+S/RpBlatPOgCiLnNNaLfdTC -mTG9qY9elJv3GGQO8Lqgf4i8blExs05lSPk1BDhzTB6H9TLz+Ge0/l1QxKf3gPXU -aImK1azieXMXHECkdKxrzmehwu1dZ/oYOLc/OFQCETwSRoLPFOFpYUpizwmVVHR6 -uLSfRptte4ZOU3zHfpd/0+J4tkwHwEkGzsmMdqudlm7qME6upuIplyVBH8JiXzUK -n1RIH/OPmVEluAnexWRLZNdk7MrakIO4XACVbICENiYQgAIErP568An6twWEGDbZ -bEN64E3cVDTDRPRAunIhhsEaapcxpFEPWlHorxv36nMUt0R0h0bJlCu5QdzckfcX -ZrRuu1kl76ZfbSE8T0G4/rBb9gsU4Gn3WyvLIO3MgFBuxR68ZwcR8LpEUd8qp38H -NG4cxPmN1nGKo663Z+xI2Gt5up4gpl+fOt4mXqxY386rB7yHaOfElMG5TUYdrS9w -1xbbCVgeJ6zxX+NFlndG33cSAPprhw+C18eUu6ZU63WZcYFo3GfK6rs3lvYtofvE -8DxztdTidQedNVNE+63YCjhxd/cZUI5n/UpgYkr9owp7hNGJiR3tdoNLR2gcoGqL -qWhH928k2aSgF2j97LZ2OqoPCp0tUB7ho4jD2u4Ik3GLVNlCc3dCvWRvpHtDTQDv -tujESMfHUc9I2r4S/PD3bku/ABGwa977Yp1PjzJGr9RajA5is5n6GVpyynwjtKG4 -iyyITpdwpCgr8pueTBLwZnas3slmiMOog/E4PmPgctHzvC+vhQijhUtw5zSsmv0l -bZlw/mVhp5Ta7dTcLBKR8DA3m3vTbaEGkz0xpfQr7GfiSMRbJyvIw88pDK0gyTMD diff --git a/packages/google-cloud-pubsub/test_utils/scripts/circleci/get_tagged_package.py b/packages/google-cloud-pubsub/test_utils/scripts/circleci/get_tagged_package.py deleted file mode 100644 index c148b9dc2370..000000000000 --- a/packages/google-cloud-pubsub/test_utils/scripts/circleci/get_tagged_package.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helper to determine package from tag. -Get the current package directory corresponding to the Circle Tag. -""" - -from __future__ import print_function - -import os -import re -import sys - - -TAG_RE = re.compile(r""" - ^ - (?P - (([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed) - ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) - $ -""", re.VERBOSE) -TAG_ENV = 'CIRCLE_TAG' -ERROR_MSG = '%s env. var. not set' % (TAG_ENV,) -BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z' -CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__) -ROOT_DIR = os.path.realpath( - os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..')) - - -def main(): - """Get the current package directory. - Prints the package directory out so callers can consume it. - """ - if TAG_ENV not in os.environ: - print(ERROR_MSG, file=sys.stderr) - sys.exit(1) - - tag_name = os.environ[TAG_ENV] - match = TAG_RE.match(tag_name) - if match is None: - print(BAD_TAG_MSG % (tag_name,), file=sys.stderr) - sys.exit(1) - - pkg_name = match.group('pkg') - if pkg_name is None: - print(ROOT_DIR) - else: - pkg_dir = pkg_name.rstrip('-').replace('-', '_') - print(os.path.join(ROOT_DIR, pkg_dir)) - - -if __name__ == '__main__': - main() diff --git a/packages/google-cloud-pubsub/test_utils/scripts/circleci/twine_upload.sh b/packages/google-cloud-pubsub/test_utils/scripts/circleci/twine_upload.sh deleted file mode 100755 index 23a4738e90b9..000000000000 --- a/packages/google-cloud-pubsub/test_utils/scripts/circleci/twine_upload.sh +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash - -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ev - -# If this is not a CircleCI tag, no-op. -if [[ -z "$CIRCLE_TAG" ]]; then - echo "This is not a release tag. Doing nothing." - exit 0 -fi - -# H/T: http://stackoverflow.com/a/246128/1068170 -SCRIPT="$(dirname "${BASH_SOURCE[0]}")/get_tagged_package.py" -# Determine the package directory being deploying on this tag. -PKG_DIR="$(python ${SCRIPT})" - -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - -# Move into the package, build the distribution and upload. -cd ${PKG_DIR} -python3 setup.py sdist bdist_wheel -twine upload dist/* diff --git a/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages.py b/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages.py deleted file mode 100644 index 1d51830cc23a..000000000000 --- a/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages.py +++ /dev/null @@ -1,268 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Print a list of packages which require testing.""" - -import os -import re -import subprocess -import warnings - - -CURRENT_DIR = os.path.realpath(os.path.dirname(__file__)) -BASE_DIR = os.path.realpath(os.path.join(CURRENT_DIR, '..', '..')) -GITHUB_REPO = os.environ.get('GITHUB_REPO', 'google-cloud-python') -CI = os.environ.get('CI', '') -CI_BRANCH = os.environ.get('CIRCLE_BRANCH') -CI_PR = os.environ.get('CIRCLE_PR_NUMBER') -CIRCLE_TAG = os.environ.get('CIRCLE_TAG') -head_hash, head_name = subprocess.check_output(['git', 'show-ref', 'HEAD'] -).strip().decode('ascii').split() -rev_parse = subprocess.check_output( - ['git', 'rev-parse', '--abbrev-ref', 'HEAD'] -).strip().decode('ascii') -MAJOR_DIV = '#' * 78 -MINOR_DIV = '#' + '-' * 77 - -# NOTE: This reg-ex is copied from ``get_tagged_packages``. -TAG_RE = re.compile(r""" - ^ - (?P - (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed) - ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints) - $ -""", re.VERBOSE) - -# This is the current set of dependencies by package. -# As of this writing, the only "real" dependency is that of error_reporting -# (on logging), the rest are just system test dependencies. -PKG_DEPENDENCIES = { - 'logging': {'pubsub'}, -} - - -def get_baseline(): - """Return the baseline commit. - - On a pull request, or on a branch, return the common parent revision - with the master branch. - - Locally, return a value pulled from environment variables, or None if - the environment variables are not set. - - On a push to master, return None. This will effectively cause everything - to be considered to be affected. - """ - - # If this is a pull request or branch, return the tip for master. - # We will test only packages which have changed since that point. - ci_non_master = (CI == 'true') and any([CI_BRANCH != 'master', CI_PR]) - - if ci_non_master: - - repo_url = 'git@github.com:GoogleCloudPlatform/{}'.format(GITHUB_REPO) - subprocess.run(['git', 'remote', 'add', 'baseline', repo_url], - stderr=subprocess.DEVNULL) - subprocess.run(['git', 'pull', 'baseline'], stderr=subprocess.DEVNULL) - - if CI_PR is None and CI_BRANCH is not None: - output = subprocess.check_output([ - 'git', 'merge-base', '--fork-point', - 'baseline/master', CI_BRANCH]) - return output.strip().decode('ascii') - - return 'baseline/master' - - # If environment variables are set identifying what the master tip is, - # use that. - if os.environ.get('GOOGLE_CLOUD_TESTING_REMOTE', ''): - remote = os.environ['GOOGLE_CLOUD_TESTING_REMOTE'] - branch = os.environ.get('GOOGLE_CLOUD_TESTING_BRANCH', 'master') - return '%s/%s' % (remote, branch) - - # If we are not in CI and we got this far, issue a warning. - if not CI: - warnings.warn('No baseline could be determined; this means tests ' - 'will run for every package. If this is local ' - 'development, set the $GOOGLE_CLOUD_TESTING_REMOTE ' - 'environment variable.') - - # That is all we can do; return None. - return None - - -def get_changed_files(): - """Return a list of files that have been changed since the baseline. - - If there is no base, return None. - """ - # Get the baseline, and fail quickly if there is no baseline. - baseline = get_baseline() - print('# Baseline commit: {}'.format(baseline)) - if not baseline: - return None - - # Return a list of altered files. - try: - return subprocess.check_output([ - 'git', 'diff', '--name-only', '{}..HEAD'.format(baseline), - ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') - except subprocess.CalledProcessError: - warnings.warn('Unable to perform git diff; falling back to assuming ' - 'all packages have changed.') - return None - - -def reverse_map(dict_of_sets): - """Reverse a map of one-to-many. - - So the map:: - - { - 'A': {'B', 'C'}, - 'B': {'C'}, - } - - becomes - - { - 'B': {'A'}, - 'C': {'A', 'B'}, - } - - Args: - dict_of_sets (dict[set]): A dictionary of sets, mapping - one value to many. - - Returns: - dict[set]: The reversed map. - """ - result = {} - for key, values in dict_of_sets.items(): - for value in values: - result.setdefault(value, set()).add(key) - - return result - -def get_changed_packages(file_list): - """Return a list of changed packages based on the provided file list. - - If the file list is None, then all packages should be considered to be - altered. - """ - # Determine a complete list of packages. - all_packages = set() - for file_ in os.listdir(BASE_DIR): - abs_file = os.path.realpath(os.path.join(BASE_DIR, file_)) - nox_file = os.path.join(abs_file, 'nox.py') - if os.path.isdir(abs_file) and os.path.isfile(nox_file): - all_packages.add(file_) - - # If ther is no file list, send down the full package set. - if file_list is None: - return all_packages - - # Create a set based on the list of changed files. - answer = set() - reverse_deps = reverse_map(PKG_DEPENDENCIES) - for file_ in file_list: - # Ignore root directory changes (setup.py, .gitignore, etc.). - if os.path.sep not in file_: - continue - - # Ignore changes that are not in a package (usually this will be docs). - package = file_.split(os.path.sep, 1)[0] - if package not in all_packages: - continue - - # If there is a change in core, short-circuit now and return - # everything. - if package in ('core',): - return all_packages - - # Add the package, as well as any dependencies this package has. - # NOTE: For now, dependencies only go down one level. - answer.add(package) - answer = answer.union(reverse_deps.get(package, set())) - - # We got this far without being short-circuited; return the final answer. - return answer - - -def get_tagged_package(): - """Return the package corresponding to the current tag. - - If there is not tag, will return :data:`None`. - """ - if CIRCLE_TAG is None: - return - - match = TAG_RE.match(CIRCLE_TAG) - if match is None: - return - - pkg_name = match.group('pkg') - if pkg_name == '': - # NOTE: This corresponds to the "umbrella" tag. - return - - return pkg_name.rstrip('-').replace('-', '_') - - -def get_target_packages(): - """Return a list of target packages to be run in the current build. - - If in a tag build, will run only the package(s) that are tagged, otherwise - will run the packages that have file changes in them (or packages that - depend on those). - """ - tagged_package = get_tagged_package() - if tagged_package is None: - file_list = get_changed_files() - print(MAJOR_DIV) - print('# Changed files:') - print(MINOR_DIV) - for file_ in file_list or (): - print('# {}'.format(file_)) - for package in sorted(get_changed_packages(file_list)): - yield package - else: - yield tagged_package - - -def main(): - print(MAJOR_DIV) - print('# Environment') - print(MINOR_DIV) - print('# CircleCI: {}'.format(CI)) - print('# CircleCI branch: {}'.format(CI_BRANCH)) - print('# CircleCI pr: {}'.format(CI_PR)) - print('# CircleCI tag: {}'.format(CIRCLE_TAG)) - print('# HEAD ref: {}'.format(head_hash)) - print('# {}'.format(head_name)) - print('# Git branch: {}'.format(rev_parse)) - print(MAJOR_DIV) - - packages = list(get_target_packages()) - - print(MAJOR_DIV) - print('# Target packages:') - print(MINOR_DIV) - for package in packages: - print(package) - print(MAJOR_DIV) - - -if __name__ == '__main__': - main() diff --git a/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages_kokoro.py b/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages_kokoro.py deleted file mode 100644 index 27d3a0c940ea..000000000000 --- a/packages/google-cloud-pubsub/test_utils/scripts/get_target_packages_kokoro.py +++ /dev/null @@ -1,98 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Print a list of packages which require testing.""" - -import pathlib -import subprocess - -import ci_diff_helper -import requests - - -def print_environment(environment): - print("-> CI environment:") - print('Branch', environment.branch) - print('PR', environment.pr) - print('In PR', environment.in_pr) - print('Repo URL', environment.repo_url) - if environment.in_pr: - print('PR Base', environment.base) - - -def get_base(environment): - if environment.in_pr: - return environment.base - else: - # If we're not in a PR, just calculate the changes between this commit - # and its parent. - return 'HEAD~1' - - -def get_changed_files_from_base(base): - return subprocess.check_output([ - 'git', 'diff', '--name-only', f'{base}..HEAD', - ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n') - - -_URL_TEMPLATE = ( - 'https://api.github.com/repos/googleapis/google-cloud-python/pulls/' - '{}/files' -) - - -def get_changed_files_from_pr(pr): - url = _URL_TEMPLATE.format(pr) - while url is not None: - response = requests.get(url) - for info in response.json(): - yield info['filename'] - url = response.links.get('next', {}).get('url') - - -def determine_changed_packages(changed_files): - packages = [ - path.parent for path in pathlib.Path('.').glob('*/noxfile.py') - ] - - changed_packages = set() - for file in changed_files: - file = pathlib.Path(file) - for package in packages: - if package in file.parents: - changed_packages.add(package) - - return changed_packages - - -def main(): - environment = ci_diff_helper.get_config() - print_environment(environment) - base = get_base(environment) - - if environment.in_pr: - changed_files = list(get_changed_files_from_pr(environment.pr)) - else: - changed_files = get_changed_files_from_base(base) - - packages = determine_changed_packages(changed_files) - - print(f"Comparing against {base}.") - print("-> Changed packages:") - - for package in packages: - print(package) - - -main() diff --git a/packages/google-cloud-pubsub/test_utils/scripts/run_emulator.py b/packages/google-cloud-pubsub/test_utils/scripts/run_emulator.py deleted file mode 100644 index 287b08640691..000000000000 --- a/packages/google-cloud-pubsub/test_utils/scripts/run_emulator.py +++ /dev/null @@ -1,199 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Run system tests locally with the emulator. - -First makes system calls to spawn the emulator and get the local environment -variable needed for it. Then calls the system tests. -""" - - -import argparse -import os -import subprocess - -import psutil - -from google.cloud.environment_vars import BIGTABLE_EMULATOR -from google.cloud.environment_vars import GCD_DATASET -from google.cloud.environment_vars import GCD_HOST -from google.cloud.environment_vars import PUBSUB_EMULATOR -from run_system_test import run_module_tests - - -BIGTABLE = 'bigtable' -DATASTORE = 'datastore' -PUBSUB = 'pubsub' -PACKAGE_INFO = { - BIGTABLE: (BIGTABLE_EMULATOR,), - DATASTORE: (GCD_DATASET, GCD_HOST), - PUBSUB: (PUBSUB_EMULATOR,), -} -EXTRA = { - DATASTORE: ('--no-legacy',), -} -_DS_READY_LINE = '[datastore] Dev App Server is now running.\n' -_PS_READY_LINE_PREFIX = '[pubsub] INFO: Server started, listening on ' -_BT_READY_LINE_PREFIX = '[bigtable] Cloud Bigtable emulator running on ' - - -def get_parser(): - """Get simple ``argparse`` parser to determine package. - - :rtype: :class:`argparse.ArgumentParser` - :returns: The parser for this script. - """ - parser = argparse.ArgumentParser( - description='Run google-cloud system tests against local emulator.') - parser.add_argument('--package', dest='package', - choices=sorted(PACKAGE_INFO.keys()), - default=DATASTORE, help='Package to be tested.') - return parser - - -def get_start_command(package): - """Get command line arguments for starting emulator. - - :type package: str - :param package: The package to start an emulator for. - - :rtype: tuple - :returns: The arguments to be used, in a tuple. - """ - result = ('gcloud', 'beta', 'emulators', package, 'start') - extra = EXTRA.get(package, ()) - return result + extra - - -def get_env_init_command(package): - """Get command line arguments for getting emulator env. info. - - :type package: str - :param package: The package to get environment info for. - - :rtype: tuple - :returns: The arguments to be used, in a tuple. - """ - result = ('gcloud', 'beta', 'emulators', package, 'env-init') - extra = EXTRA.get(package, ()) - return result + extra - - -def datastore_wait_ready(popen): - """Wait until the datastore emulator is ready to use. - - :type popen: :class:`subprocess.Popen` - :param popen: An open subprocess to interact with. - """ - emulator_ready = False - while not emulator_ready: - emulator_ready = popen.stderr.readline() == _DS_READY_LINE - - -def wait_ready_prefix(popen, prefix): - """Wait until the a process encounters a line with matching prefix. - - :type popen: :class:`subprocess.Popen` - :param popen: An open subprocess to interact with. - - :type prefix: str - :param prefix: The prefix to match - """ - emulator_ready = False - while not emulator_ready: - emulator_ready = popen.stderr.readline().startswith(prefix) - - -def wait_ready(package, popen): - """Wait until the emulator is ready to use. - - :type package: str - :param package: The package to check if ready. - - :type popen: :class:`subprocess.Popen` - :param popen: An open subprocess to interact with. - - :raises: :class:`KeyError` if the ``package`` is not among - ``datastore``, ``pubsub`` or ``bigtable``. - """ - if package == DATASTORE: - datastore_wait_ready(popen) - elif package == PUBSUB: - wait_ready_prefix(popen, _PS_READY_LINE_PREFIX) - elif package == BIGTABLE: - wait_ready_prefix(popen, _BT_READY_LINE_PREFIX) - else: - raise KeyError('Package not supported', package) - - -def cleanup(pid): - """Cleanup a process (including all of its children). - - :type pid: int - :param pid: Process ID. - """ - proc = psutil.Process(pid) - for child_proc in proc.children(recursive=True): - try: - child_proc.kill() - child_proc.terminate() - except psutil.NoSuchProcess: - pass - proc.terminate() - proc.kill() - - -def run_tests_in_emulator(package): - """Spawn an emulator instance and run the system tests. - - :type package: str - :param package: The package to run system tests against. - """ - # Make sure this package has environment vars to replace. - env_vars = PACKAGE_INFO[package] - - start_command = get_start_command(package) - # Ignore stdin and stdout, don't pollute the user's output with them. - proc_start = subprocess.Popen(start_command, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - try: - wait_ready(package, proc_start) - env_init_command = get_env_init_command(package) - proc_env = subprocess.Popen(env_init_command, stdout=subprocess.PIPE, - stderr=subprocess.PIPE) - env_status = proc_env.wait() - if env_status != 0: - raise RuntimeError(env_status, proc_env.stderr.read()) - env_lines = proc_env.stdout.read().strip().split('\n') - # Set environment variables before running the system tests. - for env_var in env_vars: - line_prefix = 'export ' + env_var + '=' - value, = [line.split(line_prefix, 1)[1] for line in env_lines - if line.startswith(line_prefix)] - os.environ[env_var] = value - run_module_tests(package, - ignore_requirements=True) - finally: - cleanup(proc_start.pid) - - -def main(): - """Main method to run this script.""" - parser = get_parser() - args = parser.parse_args() - run_tests_in_emulator(args.package) - - -if __name__ == '__main__': - main() diff --git a/packages/google-cloud-pubsub/test_utils/scripts/update_docs.sh b/packages/google-cloud-pubsub/test_utils/scripts/update_docs.sh deleted file mode 100755 index 8cbab9f0dad0..000000000000 --- a/packages/google-cloud-pubsub/test_utils/scripts/update_docs.sh +++ /dev/null @@ -1,93 +0,0 @@ -#!/bin/bash - -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -ev - -GH_OWNER='GoogleCloudPlatform' -GH_PROJECT_NAME='google-cloud-python' - -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" - -# Function to build the docs. -function build_docs { - rm -rf docs/_build/ - rm -f docs/bigquery/generated/*.rst - # -W -> warnings as errors - # -T -> show full traceback on exception - # -N -> no color - sphinx-build \ - -W -T -N \ - -b html \ - -d docs/_build/doctrees \ - docs/ \ - docs/_build/html/ - return $? -} - -# Only update docs if we are on CircleCI. -if [[ "${CIRCLE_BRANCH}" == "master" ]] && [[ -z "${CIRCLE_PR_NUMBER}" ]]; then - echo "Building new docs on a merged commit." -elif [[ "$1" == "kokoro" ]]; then - echo "Building and publishing docs on Kokoro." -elif [[ -n "${CIRCLE_TAG}" ]]; then - echo "Building new docs on a tag (but will not deploy)." - build_docs - exit $? -else - echo "Not on master nor a release tag." - echo "Building new docs for testing purposes, but not deploying." - build_docs - exit $? -fi - -# Adding GitHub pages branch. `git submodule add` checks it -# out at HEAD. -GH_PAGES_DIR='ghpages' -git submodule add -q -b gh-pages \ - "git@github.com:${GH_OWNER}/${GH_PROJECT_NAME}" ${GH_PAGES_DIR} - -# Determine if we are building a new tag or are building docs -# for master. Then build new docs in docs/_build from master. -if [[ -n "${CIRCLE_TAG}" ]]; then - # Sphinx will use the package version by default. - build_docs -else - SPHINX_RELEASE=$(git log -1 --pretty=%h) build_docs -fi - -# Update gh-pages with the created docs. -cd ${GH_PAGES_DIR} -git rm -fr latest/ -cp -R ../docs/_build/html/ latest/ - -# Update the files push to gh-pages. -git add . -git status - -# If there are no changes, just exit cleanly. -if [[ -z "$(git status --porcelain)" ]]; then - echo "Nothing to commit. Exiting without pushing changes." - exit -fi - -# Commit to gh-pages branch to apply changes. -git config --global user.email "dpebot@google.com" -git config --global user.name "dpebot" -git commit -m "Update docs after merge to master." - -# NOTE: This may fail if two docs updates (on merges to master) -# happen in close proximity. -git push -q origin HEAD:gh-pages diff --git a/packages/google-cloud-pubsub/test_utils/setup.py b/packages/google-cloud-pubsub/test_utils/setup.py deleted file mode 100644 index 8e9222a7f862..000000000000 --- a/packages/google-cloud-pubsub/test_utils/setup.py +++ /dev/null @@ -1,64 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -from setuptools import find_packages -from setuptools import setup - - -PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) - - -# NOTE: This is duplicated throughout and we should try to -# consolidate. -SETUP_BASE = { - 'author': 'Google Cloud Platform', - 'author_email': 'googleapis-publisher@google.com', - 'scripts': [], - 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', - 'license': 'Apache 2.0', - 'platforms': 'Posix; MacOS X; Windows', - 'include_package_data': True, - 'zip_safe': False, - 'classifiers': [ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Topic :: Internet', - ], -} - - -REQUIREMENTS = [ - 'google-auth >= 0.4.0', - 'six', -] - -setup( - name='google-cloud-testutils', - version='0.24.0', - description='System test utilities for google-cloud-python', - packages=find_packages(), - install_requires=REQUIREMENTS, - python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*', - **SETUP_BASE -) diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/__init__.py b/packages/google-cloud-pubsub/test_utils/test_utils/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/imports.py b/packages/google-cloud-pubsub/test_utils/test_utils/imports.py deleted file mode 100644 index 5991af7fc465..000000000000 --- a/packages/google-cloud-pubsub/test_utils/test_utils/imports.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import mock -import six - - -def maybe_fail_import(predicate): - """Create and return a patcher that conditionally makes an import fail. - - Args: - predicate (Callable[[...], bool]): A callable that, if it returns `True`, - triggers an `ImportError`. It must accept the same arguments as the - built-in `__import__` function. - https://docs.python.org/3/library/functions.html#__import__ - - Returns: - A mock patcher object that can be used to enable patched import behavior. - """ - orig_import = six.moves.builtins.__import__ - - def custom_import(name, globals=None, locals=None, fromlist=(), level=0): - if predicate(name, globals, locals, fromlist, level): - raise ImportError - return orig_import(name, globals, locals, fromlist, level) - - return mock.patch.object(six.moves.builtins, "__import__", new=custom_import) diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/retry.py b/packages/google-cloud-pubsub/test_utils/test_utils/retry.py deleted file mode 100644 index e61c001a03e1..000000000000 --- a/packages/google-cloud-pubsub/test_utils/test_utils/retry.py +++ /dev/null @@ -1,207 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import time -from functools import wraps - -import six - -MAX_TRIES = 4 -DELAY = 1 -BACKOFF = 2 - - -def _retry_all(_): - """Retry all caught exceptions.""" - return True - - -class BackoffFailed(Exception): - """Retry w/ backoffs did not complete successfully.""" - - -class RetryBase(object): - """Base for retrying calling a decorated function w/ exponential backoff. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - self.max_tries = max_tries - self.delay = delay - self.backoff = backoff - self.logger = logger.warning if logger else six.print_ - - -class RetryErrors(RetryBase): - """Decorator for retrying given exceptions in testing. - - :type exception: Exception or tuple of Exceptions - :param exception: The exception to check or may be a tuple of - exceptions to check. - - :type error_predicate: function, takes caught exception, returns bool - :param error_predicate: Predicate evaluating whether to retry after a - caught exception. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, exception, error_predicate=_retry_all, - max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - super(RetryErrors, self).__init__(max_tries, delay, backoff, logger) - self.exception = exception - self.error_predicate = error_predicate - - def __call__(self, to_wrap): - @wraps(to_wrap) - def wrapped_function(*args, **kwargs): - tries = 0 - while tries < self.max_tries: - try: - return to_wrap(*args, **kwargs) - except self.exception as caught_exception: - - if not self.error_predicate(caught_exception): - raise - - delay = self.delay * self.backoff**tries - msg = ("%s, Trying again in %d seconds..." % - (caught_exception, delay)) - self.logger(msg) - - time.sleep(delay) - tries += 1 - return to_wrap(*args, **kwargs) - - return wrapped_function - - -class RetryResult(RetryBase): - """Decorator for retrying based on non-error result. - - :type result_predicate: function, takes result, returns bool - :param result_predicate: Predicate evaluating whether to retry after a - result is returned. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, result_predicate, - max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - super(RetryResult, self).__init__(max_tries, delay, backoff, logger) - self.result_predicate = result_predicate - - def __call__(self, to_wrap): - @wraps(to_wrap) - def wrapped_function(*args, **kwargs): - tries = 0 - while tries < self.max_tries: - result = to_wrap(*args, **kwargs) - if self.result_predicate(result): - return result - - delay = self.delay * self.backoff**tries - msg = "%s. Trying again in %d seconds..." % ( - self.result_predicate.__name__, delay,) - self.logger(msg) - - time.sleep(delay) - tries += 1 - raise BackoffFailed() - - return wrapped_function - - -class RetryInstanceState(RetryBase): - """Decorator for retrying based on instance state. - - :type instance_predicate: function, takes instance, returns bool - :param instance_predicate: Predicate evaluating whether to retry after an - API-invoking method is called. - - :type max_tries: int - :param max_tries: Number of times to try (not retry) before giving up. - - :type delay: int - :param delay: Initial delay between retries in seconds. - - :type backoff: int - :param backoff: Backoff multiplier e.g. value of 2 will double the - delay each retry. - - :type logger: logging.Logger instance - :param logger: Logger to use. If None, print. - """ - def __init__(self, instance_predicate, - max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF, - logger=None): - super(RetryInstanceState, self).__init__( - max_tries, delay, backoff, logger) - self.instance_predicate = instance_predicate - - def __call__(self, to_wrap): - instance = to_wrap.__self__ # only instance methods allowed - - @wraps(to_wrap) - def wrapped_function(*args, **kwargs): - tries = 0 - while tries < self.max_tries: - result = to_wrap(*args, **kwargs) - if self.instance_predicate(instance): - return result - - delay = self.delay * self.backoff**tries - msg = "%s. Trying again in %d seconds..." % ( - self.instance_predicate.__name__, delay,) - self.logger(msg) - - time.sleep(delay) - tries += 1 - raise BackoffFailed() - - return wrapped_function diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/system.py b/packages/google-cloud-pubsub/test_utils/test_utils/system.py deleted file mode 100644 index 590dc62a06e6..000000000000 --- a/packages/google-cloud-pubsub/test_utils/test_utils/system.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright 2014 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function -import os -import sys -import time - -import google.auth.credentials -from google.auth.environment_vars import CREDENTIALS as TEST_CREDENTIALS - - -# From shell environ. May be None. -CREDENTIALS = os.getenv(TEST_CREDENTIALS) - -ENVIRON_ERROR_MSG = """\ -To run the system tests, you need to set some environment variables. -Please check the CONTRIBUTING guide for instructions. -""" - - -class EmulatorCreds(google.auth.credentials.Credentials): - """A mock credential object. - - Used to avoid unnecessary token refreshing or reliance on the network - while an emulator is running. - """ - - def __init__(self): # pylint: disable=super-init-not-called - self.token = b'seekrit' - self.expiry = None - - @property - def valid(self): - """Would-be validity check of the credentials. - - Always is :data:`True`. - """ - return True - - def refresh(self, unused_request): # pylint: disable=unused-argument - """Off-limits implementation for abstract method.""" - raise RuntimeError('Should never be refreshed.') - - -def check_environ(): - err_msg = None - if CREDENTIALS is None: - err_msg = '\nMissing variables: ' + TEST_CREDENTIALS - elif not os.path.isfile(CREDENTIALS): - err_msg = '\nThe %s path %r is not a file.' % (TEST_CREDENTIALS, - CREDENTIALS) - - if err_msg is not None: - msg = ENVIRON_ERROR_MSG + err_msg - print(msg, file=sys.stderr) - sys.exit(1) - - -def unique_resource_id(delimiter='_'): - """A unique identifier for a resource. - - Intended to help locate resources created in particular - testing environments and at particular times. - """ - build_id = os.getenv('CIRCLE_BUILD_NUM', '') - if build_id == '': - return '%s%d' % (delimiter, 1000 * time.time()) - else: - return '%s%s%s%d' % (delimiter, build_id, delimiter, time.time()) diff --git a/packages/google-cloud-pubsub/test_utils/test_utils/vpcsc_config.py b/packages/google-cloud-pubsub/test_utils/test_utils/vpcsc_config.py deleted file mode 100644 index 36b15d6be991..000000000000 --- a/packages/google-cloud-pubsub/test_utils/test_utils/vpcsc_config.py +++ /dev/null @@ -1,118 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os - -import pytest - - -INSIDE_VPCSC_ENVVAR = "GOOGLE_CLOUD_TESTS_IN_VPCSC" -PROJECT_INSIDE_ENVVAR = "PROJECT_ID" -PROJECT_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_PROJECT" -BUCKET_OUTSIDE_ENVVAR = "GOOGLE_CLOUD_TESTS_VPCSC_OUTSIDE_PERIMETER_BUCKET" - - -class VPCSCTestConfig(object): - """System test utility for VPCSC detection. - - See: https://cloud.google.com/vpc-service-controls/docs/ - """ - - @property - def inside_vpcsc(self): - """Test whether the test environment is configured to run inside VPCSC. - - Returns: - bool: - true if the environment is configured to run inside VPCSC, - else false. - """ - return INSIDE_VPCSC_ENVVAR in os.environ - - @property - def project_inside(self): - """Project ID for testing outside access. - - Returns: - str: project ID used for testing outside access; None if undefined. - """ - return os.environ.get(PROJECT_INSIDE_ENVVAR, None) - - @property - def project_outside(self): - """Project ID for testing inside access. - - Returns: - str: project ID used for testing inside access; None if undefined. - """ - return os.environ.get(PROJECT_OUTSIDE_ENVVAR, None) - - @property - def bucket_outside(self): - """GCS bucket for testing inside access. - - Returns: - str: bucket ID used for testing inside access; None if undefined. - """ - return os.environ.get(BUCKET_OUTSIDE_ENVVAR, None) - - def skip_if_inside_vpcsc(self, testcase): - """Test decorator: skip if running inside VPCSC.""" - reason = ( - "Running inside VPCSC. " - "Unset the {} environment variable to enable this test." - ).format(INSIDE_VPCSC_ENVVAR) - skip = pytest.mark.skipif(self.inside_vpcsc, reason=reason) - return skip(testcase) - - def skip_unless_inside_vpcsc(self, testcase): - """Test decorator: skip if running outside VPCSC.""" - reason = ( - "Running outside VPCSC. " - "Set the {} environment variable to enable this test." - ).format(INSIDE_VPCSC_ENVVAR) - skip = pytest.mark.skipif(not self.inside_vpcsc, reason=reason) - return skip(testcase) - - def skip_unless_inside_project(self, testcase): - """Test decorator: skip if inside project env var not set.""" - reason = ( - "Project ID for running inside VPCSC not set. " - "Set the {} environment variable to enable this test." - ).format(PROJECT_INSIDE_ENVVAR) - skip = pytest.mark.skipif(self.project_inside is None, reason=reason) - return skip(testcase) - - def skip_unless_outside_project(self, testcase): - """Test decorator: skip if outside project env var not set.""" - reason = ( - "Project ID for running outside VPCSC not set. " - "Set the {} environment variable to enable this test." - ).format(PROJECT_OUTSIDE_ENVVAR) - skip = pytest.mark.skipif(self.project_outside is None, reason=reason) - return skip(testcase) - - def skip_unless_outside_bucket(self, testcase): - """Test decorator: skip if outside bucket env var not set.""" - reason = ( - "Bucket ID for running outside VPCSC not set. " - "Set the {} environment variable to enable this test." - ).format(BUCKET_OUTSIDE_ENVVAR) - skip = pytest.mark.skipif(self.bucket_outside is None, reason=reason) - return skip(testcase) - - -vpcsc_config = VPCSCTestConfig() From c5bcd4c3c81741bde05fefe48cbfff95530cf691 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 4 May 2020 02:30:05 -0700 Subject: [PATCH 0458/1197] chore: install google-cloud-testutils by package name (#94) VPCSC is currently unable to run these tests because testutils is being installed through VCS. --- packages/google-cloud-pubsub/noxfile.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index e48f0680a6b7..751b0e04452a 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -110,8 +110,7 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest", "psutil") - session.install("git+https://github.com/googleapis/python-test-utils") + session.install("mock", "pytest", "google-cloud-testutils", "psutil") session.install("-e", ".") # Run py.test against the system tests. From 7c626f7fbe40253185da5a979d5511fa489c7eeb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 12 May 2020 12:13:28 -0400 Subject: [PATCH 0459/1197] chore: release 1.5.0 (#88) * updated CHANGELOG.md [ci skip] * updated setup.cfg [ci skip] * updated setup.py [ci skip] Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 181a8ee64a55..e3d380fe92fa 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [1.5.0](https://www.github.com/googleapis/python-pubsub/compare/v1.4.3...v1.5.0) (2020-05-04) + + +### Features + +* add methods for listing snapshots (via synth) ([#66](https://www.github.com/googleapis/python-pubsub/issues/66)) ([4ce898e](https://www.github.com/googleapis/python-pubsub/commit/4ce898e80eeb16b18d1ee29c678ade149804d186)) +* send client id with StreamingPullRequest ([#58](https://www.github.com/googleapis/python-pubsub/issues/58)) ([9f8acfa](https://www.github.com/googleapis/python-pubsub/commit/9f8acfacfbe93224f59439bb51a17fc28b06c22a)), closes [#62](https://www.github.com/googleapis/python-pubsub/issues/62) + ## [1.4.3](https://www.github.com/googleapis/python-pubsub/compare/v1.4.2...v1.4.3) (2020-04-16) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 8f5b8755af22..414e7620b284 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.4.3" +version = "1.5.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 794825d9a7b172e293829bcdb736b6f630d41bc5 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Fri, 15 May 2020 04:13:56 -0400 Subject: [PATCH 0460/1197] docs: clarify that Schedulers shouldn't be used with multiple SubscriberClients (#100) --- .../google/cloud/pubsub_v1/subscriber/client.py | 3 ++- .../google/cloud/pubsub_v1/subscriber/scheduler.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 718e69083fcf..00c8f2498893 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -223,7 +223,8 @@ def callback(message): inundated with too many messages at once. scheduler (~google.cloud.pubsub_v1.subscriber.scheduler.Scheduler): An optional *scheduler* to use when executing the callback. This controls - how callbacks are executed concurrently. + how callbacks are executed concurrently. This object must not be shared + across multiple SubscriberClients. Returns: A :class:`~google.cloud.pubsub_v1.subscriber.futures.StreamingPullFuture` diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index 42674c824c64..ef2ef59cb6bf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -74,7 +74,8 @@ def _make_default_thread_pool_executor(): class ThreadScheduler(Scheduler): - """A thread pool-based scheduler. + """A thread pool-based scheduler. It must not be shared across + SubscriberClients. This scheduler is useful in typical I/O-bound message processing. From 1a5342234298d8ed0036c9d4e7505570bc1a33b7 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 2 Jun 2020 21:09:11 +0200 Subject: [PATCH 0461/1197] feat: add flow control for message publishing (#96) * feat: add publish flow control settings * Add flow control logic to publisher client * Add flow control support for multiple add() threads * Raise publish flow control errors through futures * Include load info in debug log messages * Remove incorrect comment in a test * Remove comment about an error not directly raised * Remove redundant check for reservation exsistence * Change exception for publishing too large a message * Add internal sanity check for byte reservations * Reword the docstring on flow control limits error --- .../cloud/pubsub_v1/publisher/client.py | 25 +- .../cloud/pubsub_v1/publisher/exceptions.py | 5 + .../pubsub_v1/publisher/flow_controller.py | 297 +++++++++++++ .../google/cloud/pubsub_v1/types.py | 78 +++- .../publisher/test_flow_controller.py | 409 ++++++++++++++++++ .../publisher/test_publisher_client.py | 46 +- 6 files changed, 837 insertions(+), 23 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index caa7844071d2..8dbbea6348a8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -31,9 +31,12 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.gapic import publisher_client from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher._batch import thread from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer +from google.cloud.pubsub_v1.publisher.flow_controller import FlowController __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version @@ -93,7 +96,11 @@ class Client(object): # Optional publisher_options = pubsub_v1.types.PublisherOptions( - enable_message_ordering=False + enable_message_ordering=False, + flow_control=pubsub_v1.types.PublishFlowControl( + message_limit=2000, + limit_exceeded_behavior=pubsub_v1.types.LimitExceededBehavior.BLOCK, + ), ), # Optional @@ -198,6 +205,9 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): # Thread created to commit all sequencers after a timeout. self._commit_thread = None + # The object controlling the message publishing flow + self._flow_controller = FlowController(self.publisher_options.flow_control) + @classmethod def from_service_account_file(cls, filename, batch_settings=(), **kwargs): """Creates an instance of this client using the provided credentials @@ -364,6 +374,18 @@ def publish(self, topic, data, ordering_key="", **attrs): data=data, ordering_key=ordering_key, attributes=attrs ) + # Messages should go through flow control to prevent excessive + # queuing on the client side (depending on the settings). + try: + self._flow_controller.add(message) + except exceptions.FlowControlLimitError as exc: + future = futures.Future() + future.set_exception(exc) + return future + + def on_publish_done(future): + self._flow_controller.release(message) + with self._batch_lock: if self._is_stopped: raise RuntimeError("Cannot publish on a stopped publisher.") @@ -372,6 +394,7 @@ def publish(self, topic, data, ordering_key="", **attrs): # Delegate the publishing to the sequencer. future = sequencer.publish(message) + future.add_done_callback(on_publish_done) # Create a timer thread if necessary to enforce the batching # timeout. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index 856be955a179..89b3790a0a18 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -38,7 +38,12 @@ def __init__(self, ordering_key): super(PublishToPausedOrderingKeyException, self).__init__() +class FlowControlLimitError(Exception): + """An action resulted in exceeding the flow control limits.""" + + __all__ = ( + "FlowControlLimitError", "MessageTooLargeError", "PublishError", "TimeoutError", diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py new file mode 100644 index 000000000000..c10fadcef9f0 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py @@ -0,0 +1,297 @@ +# Copyright 2020, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +import logging +import threading +import warnings + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions + + +_LOGGER = logging.getLogger(__name__) + + +class _QuantityReservation(object): + """A (partial) reservation of a quantifiable resource.""" + + def __init__(self, reserved, needed): + self.reserved = reserved + self.needed = needed + + +class FlowController(object): + """A class used to control the flow of messages passing through it. + + Args: + settings (~google.cloud.pubsub_v1.types.PublishFlowControl): + Desired flow control configuration. + """ + + def __init__(self, settings): + self._settings = settings + + # Load statistics. They represent the number of messages added, but not + # yet released (and their total size). + self._message_count = 0 + self._total_bytes = 0 + + # A FIFO queue of threads blocked on adding a message, from first to last. + # Only relevant if the configured limit exceeded behavior is BLOCK. + self._waiting = deque() + + # Reservations of available flow control bytes by the waiting threads. + # Each value is a _QuantityReservation instance. + self._byte_reservations = dict() + self._reserved_bytes = 0 + + # The lock is used to protect all internal state (message and byte count, + # waiting threads to add, etc.). + self._operational_lock = threading.Lock() + + # The condition for blocking the flow if capacity is exceeded. + self._has_capacity = threading.Condition(lock=self._operational_lock) + + def add(self, message): + """Add a message to flow control. + + Adding a message updates the internal load statistics, and an action is + taken if these limits are exceeded (depending on the flow control settings). + + Args: + message (:class:`~google.cloud.pubsub_v1.types.PubsubMessage`): + The message entering the flow control. + + Raises: + :exception:`~pubsub_v1.publisher.exceptions.FlowControlLimitError`: + Raised when the desired action is + :attr:`~google.cloud.pubsub_v1.types.LimitExceededBehavior.ERROR` and + the message would exceed flow control limits, or when the desired action + is :attr:`~google.cloud.pubsub_v1.types.LimitExceededBehavior.BLOCK` and + the message would block forever against the flow control limits. + """ + if self._settings.limit_exceeded_behavior == types.LimitExceededBehavior.IGNORE: + return + + with self._operational_lock: + if not self._would_overflow(message): + self._message_count += 1 + self._total_bytes += message.ByteSize() + return + + # Adding a message would overflow, react. + if ( + self._settings.limit_exceeded_behavior + == types.LimitExceededBehavior.ERROR + ): + # Raising an error means rejecting a message, thus we do not + # add anything to the existing load, but we do report the would-be + # load if we accepted the message. + load_info = self._load_info( + message_count=self._message_count + 1, + total_bytes=self._total_bytes + message.ByteSize(), + ) + error_msg = "Flow control limits would be exceeded - {}.".format( + load_info + ) + raise exceptions.FlowControlLimitError(error_msg) + + assert ( + self._settings.limit_exceeded_behavior + == types.LimitExceededBehavior.BLOCK + ) + + # Sanity check - if a message exceeds total flow control limits all + # by itself, it would block forever, thus raise error. + if ( + message.ByteSize() > self._settings.byte_limit + or self._settings.message_limit < 1 + ): + load_info = self._load_info( + message_count=1, total_bytes=message.ByteSize() + ) + error_msg = ( + "Total flow control limits too low for the message, " + "would block forever - {}.".format(load_info) + ) + raise exceptions.FlowControlLimitError(error_msg) + + current_thread = threading.current_thread() + + while self._would_overflow(message): + if current_thread not in self._byte_reservations: + self._waiting.append(current_thread) + self._byte_reservations[current_thread] = _QuantityReservation( + reserved=0, needed=message.ByteSize() + ) + + _LOGGER.debug( + "Blocking until there is enough free capacity in the flow - " + "{}.".format(self._load_info()) + ) + + self._has_capacity.wait() + + _LOGGER.debug( + "Woke up from waiting on free capacity in the flow - " + "{}.".format(self._load_info()) + ) + + # Message accepted, increase the load and remove thread stats. + self._message_count += 1 + self._total_bytes += message.ByteSize() + self._reserved_bytes -= self._byte_reservations[current_thread].reserved + del self._byte_reservations[current_thread] + self._waiting.remove(current_thread) + + def release(self, message): + """Release a mesage from flow control. + + Args: + message (:class:`~google.cloud.pubsub_v1.types.PubsubMessage`): + The message entering the flow control. + """ + if self._settings.limit_exceeded_behavior == types.LimitExceededBehavior.IGNORE: + return + + with self._operational_lock: + # Releasing a message decreases the load. + self._message_count -= 1 + self._total_bytes -= message.ByteSize() + + if self._message_count < 0 or self._total_bytes < 0: + warnings.warn( + "Releasing a message that was never added or already released.", + category=RuntimeWarning, + stacklevel=2, + ) + self._message_count = max(0, self._message_count) + self._total_bytes = max(0, self._total_bytes) + + self._distribute_available_bytes() + + # If at least one thread waiting to add() can be unblocked, wake them up. + if self._ready_to_unblock(): + _LOGGER.debug("Notifying threads waiting to add messages to flow.") + self._has_capacity.notify_all() + + def _distribute_available_bytes(self): + """Distribute availalbe free capacity among the waiting threads in FIFO order. + + The method assumes that the caller has obtained ``_operational_lock``. + """ + available = self._settings.byte_limit - self._total_bytes - self._reserved_bytes + + for thread in self._waiting: + if available <= 0: + break + + reservation = self._byte_reservations[thread] + still_needed = reservation.needed - reservation.reserved + + # Sanity check for any internal inconsistencies. + if still_needed < 0: + msg = "Too many bytes reserved: {} / {}".format( + reservation.reserved, reservation.needed + ) + warnings.warn(msg, category=RuntimeWarning) + still_needed = 0 + + can_give = min(still_needed, available) + reservation.reserved += can_give + self._reserved_bytes += can_give + available -= can_give + + def _ready_to_unblock(self): + """Determine if any of the threads waiting to add a message can proceed. + + The method assumes that the caller has obtained ``_operational_lock``. + + Returns: + bool + """ + if self._waiting: + # It's enough to only check the head of the queue, because FIFO + # distribution of any free capacity. + reservation = self._byte_reservations[self._waiting[0]] + return ( + reservation.reserved >= reservation.needed + and self._message_count < self._settings.message_limit + ) + + return False + + def _would_overflow(self, message): + """Determine if accepting a message would exceed flow control limits. + + The method assumes that the caller has obtained ``_operational_lock``. + + Args: + message (:class:`~google.cloud.pubsub_v1.types.PubsubMessage`): + The message entering the flow control. + + Returns: + bool + """ + reservation = self._byte_reservations.get(threading.current_thread()) + + if reservation: + enough_reserved = reservation.reserved >= reservation.needed + else: + enough_reserved = False + + bytes_taken = self._total_bytes + self._reserved_bytes + message.ByteSize() + size_overflow = bytes_taken > self._settings.byte_limit and not enough_reserved + msg_count_overflow = self._message_count + 1 > self._settings.message_limit + + return size_overflow or msg_count_overflow + + def _load_info(self, message_count=None, total_bytes=None, reserved_bytes=None): + """Return the current flow control load information. + + The caller can optionally adjust some of the values to fit its reporting + needs. + + The method assumes that the caller has obtained ``_operational_lock``. + + Args: + message_count (Optional[int]): + The value to override the current message count with. + total_bytes (Optional[int]): + The value to override the current total bytes with. + reserved_bytes (Optional[int]): + The value to override the current number of reserved bytes with. + + Returns: + str + """ + msg = "messages: {} / {}, bytes: {} / {} (reserved: {})" + + if message_count is None: + message_count = self._message_count + + if total_bytes is None: + total_bytes = self._total_bytes + + if reserved_bytes is None: + reserved_bytes = self._reserved_bytes + + return msg.format( + message_count, + self._settings.message_limit, + total_bytes, + self._settings.byte_limit, + reserved_bytes, + ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index eb4f00681cfa..b52b3ea60049 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -13,7 +13,9 @@ # limitations under the License. from __future__ import absolute_import + import collections +import enum import sys from google.api import http_pb2 @@ -30,25 +32,6 @@ from google.cloud.pubsub_v1.proto import pubsub_pb2 -# Define the default publisher options. -# -# This class is used when creating a publisher client to pass in options -# to enable/disable features. -PublisherOptions = collections.namedtuple( - "PublisherConfig", ["enable_message_ordering"] -) -PublisherOptions.__new__.__defaults__ = (False,) # enable_message_ordering: False - -if sys.version_info >= (3, 5): - PublisherOptions.__doc__ = "The options for the publisher client." - PublisherOptions.enable_message_ordering.__doc__ = ( - "Whether to order messages in a batch by a supplied ordering key." - "EXPERIMENTAL: Message ordering is an alpha feature that requires " - "special permissions to use. Please contact the Cloud Pub/Sub team for " - "more information." - ) - - # Define the default values for batching. # # This class is used when creating a publisher or subscriber client, and @@ -81,6 +64,63 @@ ) +class LimitExceededBehavior(str, enum.Enum): + """The possible actions when exceeding the publish flow control limits.""" + + IGNORE = "ignore" + BLOCK = "block" + ERROR = "error" + + +PublishFlowControl = collections.namedtuple( + "PublishFlowControl", ["message_limit", "byte_limit", "limit_exceeded_behavior"] +) +PublishFlowControl.__new__.__defaults__ = ( + 10 * BatchSettings.__new__.__defaults__[2], # message limit + 10 * BatchSettings.__new__.__defaults__[0], # byte limit + LimitExceededBehavior.IGNORE, # desired behavior +) + +if sys.version_info >= (3, 5): + PublishFlowControl.__doc__ = ( + "The client flow control settings for message publishing." + ) + PublishFlowControl.message_limit.__doc__ = ( + "The maximum number of messages awaiting to be published." + ) + PublishFlowControl.byte_limit.__doc__ = ( + "The maximum total size of messages awaiting to be published." + ) + PublishFlowControl.limit_exceeded_behavior.__doc__ = ( + "The action to take when publish flow control limits are exceeded." + ) + +# Define the default publisher options. +# +# This class is used when creating a publisher client to pass in options +# to enable/disable features. +PublisherOptions = collections.namedtuple( + "PublisherConfig", ["enable_message_ordering", "flow_control"] +) +PublisherOptions.__new__.__defaults__ = ( + False, # enable_message_ordering: False + PublishFlowControl(), # default flow control settings +) + +if sys.version_info >= (3, 5): + PublisherOptions.__doc__ = "The options for the publisher client." + PublisherOptions.enable_message_ordering.__doc__ = ( + "Whether to order messages in a batch by a supplied ordering key." + "EXPERIMENTAL: Message ordering is an alpha feature that requires " + "special permissions to use. Please contact the Cloud Pub/Sub team for " + "more information." + ) + PublisherOptions.flow_control.__doc__ = ( + "Flow control settings for message publishing by the client. By default " + "the publisher client does not do any throttling." + ) + + # Define the type class and default values for flow control settings. # # This class is used when creating a publisher or subscriber client, and diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py new file mode 100644 index 000000000000..26a61663b948 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py @@ -0,0 +1,409 @@ +# Copyright 2020, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import threading +import time +import warnings + +import pytest + +from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions +from google.cloud.pubsub_v1.publisher.flow_controller import FlowController + + +def _run_in_daemon( + flow_controller, + action, + messages, + all_done_event, + error_event=None, + action_pause=None, +): + """Run flow controller action (add or remove messages) in a daemon thread. + """ + assert action in ("add", "release") + + def run_me(): + method = getattr(flow_controller, action) + + try: + for msg in messages: + if action_pause is not None: + time.sleep(action_pause) + method(msg) + except Exception: + if error_event is not None: + error_event.set() + else: + all_done_event.set() + + thread = threading.Thread(target=run_me) + thread.daemon = True + thread.start() + + +def test_no_overflow_no_error(): + settings = types.PublishFlowControl( + message_limit=100, + byte_limit=10000, + limit_exceeded_behavior=types.LimitExceededBehavior.ERROR, + ) + flow_controller = FlowController(settings) + + # there should be no errors + for data in (b"foo", b"bar", b"baz"): + msg = types.PubsubMessage(data=data) + flow_controller.add(msg) + + +def test_overflow_no_error_on_ignore(): + settings = types.PublishFlowControl( + message_limit=1, + byte_limit=2, + limit_exceeded_behavior=types.LimitExceededBehavior.IGNORE, + ) + flow_controller = FlowController(settings) + + # there should be no overflow errors + flow_controller.add(types.PubsubMessage(data=b"foo")) + flow_controller.add(types.PubsubMessage(data=b"bar")) + + +def test_message_count_overflow_error(): + settings = types.PublishFlowControl( + message_limit=1, + byte_limit=10000, + limit_exceeded_behavior=types.LimitExceededBehavior.ERROR, + ) + flow_controller = FlowController(settings) + + flow_controller.add(types.PubsubMessage(data=b"foo")) + with pytest.raises(exceptions.FlowControlLimitError) as error: + flow_controller.add(types.PubsubMessage(data=b"bar")) + + assert "messages: 2 / 1" in str(error.value) + + +def test_byte_size_overflow_error(): + settings = types.PublishFlowControl( + message_limit=10000, + byte_limit=199, + limit_exceeded_behavior=types.LimitExceededBehavior.ERROR, + ) + flow_controller = FlowController(settings) + + # Since the message data itself occupies 100 bytes, it means that both + # messages combined will exceed the imposed byte limit of 199, but a single + # message will not (the message size overhead is way lower than data size). + msg1 = types.PubsubMessage(data=b"x" * 100) + msg2 = types.PubsubMessage(data=b"y" * 100) + + flow_controller.add(msg1) + with pytest.raises(exceptions.FlowControlLimitError) as error: + flow_controller.add(msg2) + + total_size = msg1.ByteSize() + msg2.ByteSize() + expected_info = "bytes: {} / 199".format(total_size) + assert expected_info in str(error.value) + + +def test_no_error_on_moderate_message_flow(): + settings = types.PublishFlowControl( + message_limit=2, + byte_limit=250, + limit_exceeded_behavior=types.LimitExceededBehavior.ERROR, + ) + flow_controller = FlowController(settings) + + msg1 = types.PubsubMessage(data=b"x" * 100) + msg2 = types.PubsubMessage(data=b"y" * 100) + msg3 = types.PubsubMessage(data=b"z" * 100) + + # The flow control settings will accept two in-flight messages, but not three. + # If releasing messages works correctly, the sequence below will not raise errors. + flow_controller.add(msg1) + flow_controller.add(msg2) + flow_controller.release(msg1) + flow_controller.add(msg3) + flow_controller.release(msg2) + flow_controller.release(msg3) + + +def test_rejected_messages_do_not_increase_total_load(): + settings = types.PublishFlowControl( + message_limit=1, + byte_limit=150, + limit_exceeded_behavior=types.LimitExceededBehavior.ERROR, + ) + flow_controller = FlowController(settings) + + msg1 = types.PubsubMessage(data=b"x" * 100) + msg2 = types.PubsubMessage(data=b"y" * 100) + + flow_controller.add(msg1) + + for _ in range(5): + with pytest.raises(exceptions.FlowControlLimitError): + flow_controller.add(types.PubsubMessage(data=b"z" * 100)) + + # After releasing a message we should again be able to add another one, despite + # previously trying to add a lot of other messages. + flow_controller.release(msg1) + flow_controller.add(msg2) + + +def test_incorrectly_releasing_too_many_messages(): + settings = types.PublishFlowControl( + message_limit=1, + byte_limit=150, + limit_exceeded_behavior=types.LimitExceededBehavior.ERROR, + ) + flow_controller = FlowController(settings) + + msg1 = types.PubsubMessage(data=b"x" * 100) + msg2 = types.PubsubMessage(data=b"y" * 100) + msg3 = types.PubsubMessage(data=b"z" * 100) + + # Releasing a message that would make the load negative should result in a warning. + with warnings.catch_warnings(record=True) as warned: + flow_controller.release(msg1) + + assert len(warned) == 1 + assert issubclass(warned[0].category, RuntimeWarning) + warning_msg = str(warned[0].message) + assert "never added or already released" in warning_msg + + # Incorrectly removing a message does not mess up internal stats, we can + # still only add a single message at a time to this flow. + flow_controller.add(msg2) + + with pytest.raises(exceptions.FlowControlLimitError) as error: + flow_controller.add(msg3) + + error_msg = str(error.value) + assert "messages: 2 / 1" in error_msg + total_size = msg2.ByteSize() + msg3.ByteSize() + expected_size_info = "bytes: {} / 150".format(total_size) + assert expected_size_info in error_msg + + +def test_blocking_on_overflow_until_free_capacity(): + settings = types.PublishFlowControl( + message_limit=1, + byte_limit=150, + limit_exceeded_behavior=types.LimitExceededBehavior.BLOCK, + ) + flow_controller = FlowController(settings) + + msg1 = types.PubsubMessage(data=b"x" * 100) + msg2 = types.PubsubMessage(data=b"y" * 100) + msg3 = types.PubsubMessage(data=b"z" * 100) + msg4 = types.PubsubMessage(data=b"w" * 100) + + # If there is a concurrency bug in FlowController, we do not want to block + # the main thread running the tests, thus we delegate all add/release + # operations to daemon threads and check the outcome (blocked/not blocked) + # through Events. + adding_1_done = threading.Event() + adding_2_done = threading.Event() + adding_3_done = threading.Event() + adding_4_done = threading.Event() + releasing_1_done = threading.Event() + releasing_x_done = threading.Event() + + # Adding a message with free capacity should not block. + _run_in_daemon(flow_controller, "add", [msg1], adding_1_done) + if not adding_1_done.wait(timeout=0.1): + pytest.fail("Adding a message with enough flow capacity blocked or errored.") + + # Adding messages when there is not enough capacity should block, even if + # added through multiple threads. + _run_in_daemon(flow_controller, "add", [msg2], adding_2_done) + if adding_2_done.wait(timeout=0.1): + pytest.fail("Adding a message on overflow did not block.") + + _run_in_daemon(flow_controller, "add", [msg3], adding_3_done) + if adding_3_done.wait(timeout=0.1): + pytest.fail("Adding a message on overflow did not block.") + + _run_in_daemon(flow_controller, "add", [msg4], adding_4_done) + if adding_4_done.wait(timeout=0.1): + pytest.fail("Adding a message on overflow did not block.") + + # After releasing one message, there should be room for a new message, which + # should result in unblocking one of the waiting threads. + _run_in_daemon(flow_controller, "release", [msg1], releasing_1_done) + if not releasing_1_done.wait(timeout=0.1): + pytest.fail("Releasing a message blocked or errored.") + + done_status = [ + adding_2_done.wait(timeout=0.1), + adding_3_done.wait(timeout=0.1), + adding_4_done.wait(timeout=0.1), + ] + + # In sum() we use the fact that True==1 and False==0, and that Event.wait() + # returns False only if it times out, i.e. its internal flag has not been set. + done_count = sum(done_status) + assert done_count == 1, "Exactly one thread should have been unblocked." + + # Release another message and verify that yet another thread gets unblocked. + added_msg = [msg2, msg3, msg4][done_status.index(True)] + _run_in_daemon(flow_controller, "release", [added_msg], releasing_x_done) + + if not releasing_x_done.wait(timeout=0.1): + pytest.fail("Releasing messages blocked or errored.") + + released_count = sum( + ( + adding_2_done.wait(timeout=0.1), + adding_3_done.wait(timeout=0.1), + adding_4_done.wait(timeout=0.1), + ) + ) + assert released_count == 2, "Exactly two threads should have been unblocked." + + +def test_error_if_mesage_would_block_indefinitely(): + settings = types.PublishFlowControl( + message_limit=0, # simulate non-sane settings + byte_limit=1, + limit_exceeded_behavior=types.LimitExceededBehavior.BLOCK, + ) + flow_controller = FlowController(settings) + + msg = types.PubsubMessage(data=b"xyz") + adding_done = threading.Event() + error_event = threading.Event() + + _run_in_daemon(flow_controller, "add", [msg], adding_done, error_event=error_event) + + assert error_event.wait(timeout=0.1), "No error on adding too large a message." + + # Now that we know that an error occurs, we can check its type directly + # without the fear of blocking indefinitely. + flow_controller = FlowController(settings) # we want a fresh controller + with pytest.raises(exceptions.FlowControlLimitError) as error_info: + flow_controller.add(msg) + + error_msg = str(error_info.value) + assert "would block forever" in error_msg + assert "messages: 1 / 0" in error_msg + assert "bytes: {} / 1".format(msg.ByteSize()) in error_msg + + +def test_threads_posting_large_messages_do_not_starve(): + settings = types.PublishFlowControl( + message_limit=100, + byte_limit=110, + limit_exceeded_behavior=types.LimitExceededBehavior.BLOCK, + ) + flow_controller = FlowController(settings) + + large_msg = types.PubsubMessage(data=b"x" * 100) # close to entire byte limit + + adding_initial_done = threading.Event() + adding_large_done = threading.Event() + adding_busy_done = threading.Event() + releasing_busy_done = threading.Event() + releasing_large_done = threading.Event() + + # Occupy some of the flow capacity, then try to add a large message. Releasing + # enough messages should eventually allow the large message to come through, even + # if more messages are added after it (those should wait for the large message). + initial_messages = [types.PubsubMessage(data=b"x" * 10)] * 5 + _run_in_daemon(flow_controller, "add", initial_messages, adding_initial_done) + assert adding_initial_done.wait(timeout=0.1) + + _run_in_daemon(flow_controller, "add", [large_msg], adding_large_done) + + # Continuously keep adding more messages after the large one. + messages = [types.PubsubMessage(data=b"x" * 10)] * 10 + _run_in_daemon(flow_controller, "add", messages, adding_busy_done, action_pause=0.1) + + # At the same time, gradually keep releasing the messages - the freeed up + # capacity should be consumed by the large message, not the other small messages + # being added after it. + _run_in_daemon( + flow_controller, "release", messages, releasing_busy_done, action_pause=0.1 + ) + + # Sanity check - releasing should have completed by now. + if not releasing_busy_done.wait(timeout=1.1): + pytest.fail("Releasing messages blocked or errored.") + + # Enough messages released, the large message should have come through in + # the meantime. + if not adding_large_done.wait(timeout=0.1): + pytest.fail("A thread adding a large message starved.") + + if adding_busy_done.wait(timeout=0.1): + pytest.fail("Adding multiple small messages did not block.") + + # Releasing the large message should unblock adding the remaining "busy" messages + # that have not been added yet. + _run_in_daemon(flow_controller, "release", [large_msg], releasing_large_done) + if not releasing_large_done.wait(timeout=0.1): + pytest.fail("Releasing a message blocked or errored.") + + if not adding_busy_done.wait(timeout=1.0): + pytest.fail("Adding messages blocked or errored.") + + +def test_warning_on_internal_reservation_stats_error_when_unblocking(): + settings = types.PublishFlowControl( + message_limit=1, + byte_limit=150, + limit_exceeded_behavior=types.LimitExceededBehavior.BLOCK, + ) + flow_controller = FlowController(settings) + + msg1 = types.PubsubMessage(data=b"x" * 100) + msg2 = types.PubsubMessage(data=b"y" * 100) + + # If there is a concurrency bug in FlowController, we do not want to block + # the main thread running the tests, thus we delegate all add/release + # operations to daemon threads and check the outcome (blocked/not blocked) + # through Events. + adding_1_done = threading.Event() + adding_2_done = threading.Event() + releasing_1_done = threading.Event() + + # Adding a message with free capacity should not block. + _run_in_daemon(flow_controller, "add", [msg1], adding_1_done) + if not adding_1_done.wait(timeout=0.1): + pytest.fail("Adding a message with enough flow capacity blocked or errored.") + + # Adding messages when there is not enough capacity should block, even if + # added through multiple threads. + _run_in_daemon(flow_controller, "add", [msg2], adding_2_done) + if adding_2_done.wait(timeout=0.1): + pytest.fail("Adding a message on overflow did not block.") + + # Intentionally corrupt internal stats + reservation = next(iter(flow_controller._byte_reservations.values()), None) + assert reservation is not None, "No messages blocked by flow controller." + reservation.reserved = reservation.needed + 1 + + with warnings.catch_warnings(record=True) as warned: + _run_in_daemon(flow_controller, "release", [msg1], releasing_1_done) + if not releasing_1_done.wait(timeout=0.1): + pytest.fail("Releasing a message blocked or errored.") + + matches = [warning for warning in warned if warning.category is RuntimeWarning] + assert len(matches) == 1 + assert "too many bytes reserved" in str(matches[0].message).lower() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 4ca979892b9d..4e3a3870f74d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -25,6 +25,7 @@ from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types +from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer @@ -125,11 +126,17 @@ def test_publish(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) + future1 = mock.sentinel.future1 + future2 = mock.sentinel.future2 + future1.add_done_callback = mock.Mock(spec=["__call__"]) + future2.add_done_callback = mock.Mock(spec=["__call__"]) + # Use a mock in lieu of the actual batch class. batch = mock.Mock(spec=client._batch_class) + # Set the mock up to claim indiscriminately that it accepts all messages. batch.will_accept.return_value = True - batch.publish.side_effect = (mock.sentinel.future1, mock.sentinel.future2) + batch.publish.side_effect = (future1, future2) topic = "topic/path" client._set_batch(topic, batch) @@ -150,6 +157,30 @@ def test_publish(): ) +def test_publish_error_exceeding_flow_control_limits(): + creds = mock.Mock(spec=credentials.Credentials) + publisher_options = types.PublisherOptions( + flow_control=types.PublishFlowControl( + message_limit=10, + byte_limit=150, + limit_exceeded_behavior=types.LimitExceededBehavior.ERROR, + ) + ) + client = publisher.Client(credentials=creds, publisher_options=publisher_options) + + mock_batch = mock.Mock(spec=client._batch_class) + mock_batch.will_accept.return_value = True + topic = "topic/path" + client._set_batch(topic, mock_batch) + + future1 = client.publish(topic, b"a" * 100) + future2 = client.publish(topic, b"b" * 100) + + future1.result() # no error, still within flow control limits + with pytest.raises(exceptions.FlowControlLimitError): + future2.result() + + def test_publish_data_not_bytestring_error(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) @@ -208,10 +239,13 @@ def test_publish_new_batch_needed(): # Use mocks in lieu of the actual batch class. batch1 = mock.Mock(spec=client._batch_class) batch2 = mock.Mock(spec=client._batch_class) + # Set the first mock up to claim indiscriminately that it rejects all # messages and the second accepts all. + future = mock.sentinel.future + future.add_done_callback = mock.Mock(spec=["__call__"]) batch1.publish.return_value = None - batch2.publish.return_value = mock.sentinel.future + batch2.publish.return_value = future topic = "topic/path" client._set_batch(topic, batch1) @@ -390,9 +424,15 @@ def test_publish_with_ordering_key(): # Use a mock in lieu of the actual batch class. batch = mock.Mock(spec=client._batch_class) + # Set the mock up to claim indiscriminately that it accepts all messages. + future1 = mock.sentinel.future1 + future2 = mock.sentinel.future2 + future1.add_done_callback = mock.Mock(spec=["__call__"]) + future2.add_done_callback = mock.Mock(spec=["__call__"]) + batch.will_accept.return_value = True - batch.publish.side_effect = (mock.sentinel.future1, mock.sentinel.future2) + batch.publish.side_effect = (future1, future2) topic = "topic/path" ordering_key = "k1" From ae51ddfb82a6432a4f56497d37c0e32ba7f87897 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Sat, 6 Jun 2020 15:11:53 +0200 Subject: [PATCH 0462/1197] docs: fix update subscription/snapshot/topic samples (#113) --- .../cloud/pubsub_v1/gapic/publisher_client.py | 10 ++-- .../pubsub_v1/gapic/subscriber_client.py | 12 +++- packages/google-cloud-pubsub/synth.py | 56 +++++++++++++++++++ 3 files changed, 72 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 1a981fefac1f..d59c247af2cf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -349,11 +349,13 @@ def update_topic( >>> >>> client = pubsub_v1.PublisherClient() >>> - >>> # TODO: Initialize `topic`: - >>> topic = {} + >>> topic_name = 'projects/my-project/topics/my-topic' + >>> topic_labels = {'source': 'external'} + >>> topic = {'name': topic_name, 'labels': topic_labels} >>> - >>> # TODO: Initialize `update_mask`: - >>> update_mask = {} + >>> paths_element = 'labels' + >>> paths = [paths_element] + >>> update_mask = {'paths': paths} >>> >>> response = client.update_topic(topic, update_mask) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index e98a686bf3ae..502f11a7f2aa 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -525,7 +525,11 @@ def update_subscription( >>> client = pubsub_v1.SubscriberClient() >>> >>> ack_deadline_seconds = 42 - >>> subscription = {'ack_deadline_seconds': ack_deadline_seconds} + >>> subscription_name = 'projects/my-project/subscriptions/my-subscription' + >>> subscription = { + ... 'name': subscription_name, + ... 'ack_deadline_seconds': ack_deadline_seconds, + ... } >>> paths_element = 'ack_deadline_seconds' >>> paths = [paths_element] >>> update_mask = {'paths': paths} @@ -1493,7 +1497,11 @@ def update_snapshot( >>> >>> seconds = 123456 >>> expire_time = {'seconds': seconds} - >>> snapshot = {'expire_time': expire_time} + >>> snapshot_name = 'projects/my-project/snapshots/my-snapshot' + >>> snapshot = { + ... 'name': snapshot_name, + ... 'expire_time': expire_time, + ... } >>> paths_element = 'expire_time' >>> paths = [paths_element] >>> update_mask = {'paths': paths} diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 645b7955b702..9e475e781d2c 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -14,6 +14,8 @@ """This script is used to synthesize generated parts of this library.""" +import textwrap + import synthtool as s from synthtool import gcp @@ -183,6 +185,60 @@ def _merge_dict(d1, d2): "from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2", ) +# Fix incomplete docstring examples. +s.replace( + "google/cloud/pubsub_v1/gapic/subscriber_client.py", + r"\s+>>> subscription = \{'ack_deadline_seconds': ack_deadline_seconds\}", + textwrap.indent( + """ +>>> subscription_name = 'projects/my-project/subscriptions/my-subscription' +>>> subscription = { +... 'name': subscription_name, +... 'ack_deadline_seconds': ack_deadline_seconds, +... }""", + prefix=" " * 12, + ) +) + +s.replace( + "google/cloud/pubsub_v1/gapic/subscriber_client.py", + r"\s+>>> snapshot = \{'expire_time': expire_time\}", + textwrap.indent( + """ +>>> snapshot_name = 'projects/my-project/snapshots/my-snapshot' +>>> snapshot = { +... 'name': snapshot_name, +... 'expire_time': expire_time, +... }""", + prefix=" " * 12, + ) +) + +s.replace( + "google/cloud/pubsub_v1/gapic/publisher_client.py", + r"\s+>>> # TODO: Initialize `topic`:\n\s+>>> topic = \{\}\n", + textwrap.indent( + """ +>>> topic_name = 'projects/my-project/topics/my-topic' +>>> topic_labels = {'source': 'external'} +>>> topic = {'name': topic_name, 'labels': topic_labels} +""", + prefix=" " * 12, + ), +) + +s.replace( + "google/cloud/pubsub_v1/gapic/publisher_client.py", + r"\s+>>> # TODO: Initialize `update_mask`:\n\s+>>> update_mask = \{\}\n", + textwrap.indent( + """ +>>> paths_element = 'labels' +>>> paths = [paths_element] +>>> update_mask = {'paths': paths} +""", + prefix=" " * 12, + ), +) # ---------------------------------------------------------------------------- # Add templated files From 9d91ab2b827b72b3d6f2132e8031ac686574ca88 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 9 Jun 2020 10:22:11 -0700 Subject: [PATCH 0463/1197] chore: re-generated to pick up changes in the API or client library generator (#114) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/99f854b3-bcbd-4ca9-a44c-2a6dea51ca52/targets - [ ] To automatically regenerate this PR, check this box. PiperOrigin-RevId: 314795690 Source-Link: https://github.com/googleapis/googleapis/commit/86285bbd54fbf9708838219e3422aa47fb8fc0b0 PiperOrigin-RevId: 313812131 Source-Link: https://github.com/googleapis/googleapis/commit/fb21971becb6c0441efc855dabc371825732872c PiperOrigin-RevId: 313795529 Source-Link: https://github.com/googleapis/googleapis/commit/f4bd71cc173d848ec24d28eede271cca9abc411f Source-Link: https://github.com/googleapis/synthtool/commit/4e1d2cb79b02d7496b1452f91c518630c207145e Source-Link: https://github.com/googleapis/synthtool/commit/e99975b6b49827b8720f0a885e218dbdb67849ca Source-Link: https://github.com/googleapis/synthtool/commit/ffe10407ee2f261c799fb0d01bf32a8abc67ed1e Source-Link: https://github.com/googleapis/synthtool/commit/71b8a272549c06b5768d00fa48d3ae990e871bec PiperOrigin-RevId: 313488995 Source-Link: https://github.com/googleapis/googleapis/commit/eafa840ceec23b44a5c21670288107c661252711 PiperOrigin-RevId: 313460921 Source-Link: https://github.com/googleapis/googleapis/commit/c4e37010d74071851ff24121f522e802231ac86e PiperOrigin-RevId: 313276022 Source-Link: https://github.com/googleapis/googleapis/commit/f5f268f5293e60143ac742a4eeb7dd6306ebf505 PiperOrigin-RevId: 312689208 Source-Link: https://github.com/googleapis/googleapis/commit/dec3204175104cef49bf21d685d5517caaf0058f Source-Link: https://github.com/googleapis/synthtool/commit/d2364eb80b840a36136c8ce12f1c6efabcc9600e Source-Link: https://github.com/googleapis/synthtool/commit/7482e79a82e353248769d819788adc1213e8c207 PiperOrigin-RevId: 310595049 Source-Link: https://github.com/googleapis/googleapis/commit/73d4b5d9a791f8b1ee63d439ffe909bb8ffa07f7 Source-Link: https://github.com/googleapis/synthtool/commit/09c48461232ce929c34386259eb59018ad2d8eef PiperOrigin-RevId: 309824146 Source-Link: https://github.com/googleapis/googleapis/commit/e0f9d9e1f9de890db765be46f45ca8490723e3eb --- packages/google-cloud-pubsub/.flake8 | 2 + packages/google-cloud-pubsub/.gitignore | 2 + .../.kokoro/publish-docs.sh | 2 - .../google-cloud-pubsub/.kokoro/release.sh | 2 - .../.kokoro/samples/lint/common.cfg | 34 + .../.kokoro/samples/lint/continuous.cfg | 6 + .../.kokoro/samples/lint/periodic.cfg | 6 + .../.kokoro/samples/lint/presubmit.cfg | 6 + .../.kokoro/samples/python3.6/common.cfg | 34 + .../.kokoro/samples/python3.6/continuous.cfg | 7 + .../.kokoro/samples/python3.6/periodic.cfg | 6 + .../.kokoro/samples/python3.6/presubmit.cfg | 6 + .../.kokoro/samples/python3.7/common.cfg | 34 + .../.kokoro/samples/python3.7/continuous.cfg | 6 + .../.kokoro/samples/python3.7/periodic.cfg | 6 + .../.kokoro/samples/python3.7/presubmit.cfg | 6 + .../.kokoro/samples/python3.8/common.cfg | 34 + .../.kokoro/samples/python3.8/continuous.cfg | 6 + .../.kokoro/samples/python3.8/periodic.cfg | 6 + .../.kokoro/samples/python3.8/presubmit.cfg | 6 + .../.kokoro/test-samples.sh | 104 ++ packages/google-cloud-pubsub/MANIFEST.in | 3 + packages/google-cloud-pubsub/docs/conf.py | 5 +- packages/google-cloud-pubsub/docs/index.rst | 2 + .../docs/multiprocessing.rst | 7 + .../cloud/pubsub_v1/gapic/publisher_client.py | 102 +- .../gapic/publisher_client_config.py | 6 + .../pubsub_v1/gapic/subscriber_client.py | 136 +- .../transports/publisher_grpc_transport.py | 18 +- .../transports/subscriber_grpc_transport.py | 43 +- .../google/cloud/pubsub_v1/proto/pubsub.proto | 61 +- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 1440 ++++++++++------- .../cloud/pubsub_v1/proto/pubsub_pb2_grpc.py | 22 +- packages/google-cloud-pubsub/noxfile.py | 19 +- .../scripts/decrypt-secrets.sh | 33 + .../scripts/readme-gen/readme_gen.py | 66 + .../readme-gen/templates/README.tmpl.rst | 87 + .../readme-gen/templates/auth.tmpl.rst | 9 + .../templates/auth_api_key.tmpl.rst | 14 + .../templates/install_deps.tmpl.rst | 29 + .../templates/install_portaudio.tmpl.rst | 35 + packages/google-cloud-pubsub/synth.metadata | 20 +- packages/google-cloud-pubsub/synth.py | 8 + .../google-cloud-pubsub/testing/.gitignore | 3 + .../unit/gapic/v1/test_publisher_client_v1.py | 39 + .../gapic/v1/test_subscriber_client_v1.py | 6 + 46 files changed, 1794 insertions(+), 740 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/lint/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/lint/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/lint/presubmit.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/presubmit.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/presubmit.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/presubmit.cfg create mode 100755 packages/google-cloud-pubsub/.kokoro/test-samples.sh create mode 100644 packages/google-cloud-pubsub/docs/multiprocessing.rst create mode 100755 packages/google-cloud-pubsub/scripts/decrypt-secrets.sh create mode 100644 packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py create mode 100644 packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst create mode 100644 packages/google-cloud-pubsub/scripts/readme-gen/templates/auth.tmpl.rst create mode 100644 packages/google-cloud-pubsub/scripts/readme-gen/templates/auth_api_key.tmpl.rst create mode 100644 packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst create mode 100644 packages/google-cloud-pubsub/scripts/readme-gen/templates/install_portaudio.tmpl.rst create mode 100644 packages/google-cloud-pubsub/testing/.gitignore diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index 20fe9bda2ee4..ed9316381c9c 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -21,6 +21,8 @@ exclude = # Exclude generated code. **/proto/** **/gapic/** + **/services/** + **/types/** *_pb2.py # Standard linting exemptions. diff --git a/packages/google-cloud-pubsub/.gitignore b/packages/google-cloud-pubsub/.gitignore index 3fb06e09ce74..b87e1ed580d9 100644 --- a/packages/google-cloud-pubsub/.gitignore +++ b/packages/google-cloud-pubsub/.gitignore @@ -10,6 +10,7 @@ dist build eggs +.eggs parts bin var @@ -49,6 +50,7 @@ bigquery/docs/generated # Virtual environment env/ coverage.xml +sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh index 6a15192de2ec..f462c727b504 100755 --- a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh +++ b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Disable buffering, so that the logs stream through. diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh index 9f8df9b3eb74..321ef575ce7a 100755 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Start the releasetool reporter diff --git a/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg new file mode 100644 index 000000000000..7dcd141937cd --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "lint" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/lint/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/lint/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/lint/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/lint/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/lint/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/lint/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/lint/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/lint/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/lint/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg new file mode 100644 index 000000000000..354ad19efb4c --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.6" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/continuous.cfg new file mode 100644 index 000000000000..7218af1499e5 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/continuous.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg new file mode 100644 index 000000000000..becd0399dfe1 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.7" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg new file mode 100644 index 000000000000..685dfdc590c9 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.8" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples.sh b/packages/google-cloud-pubsub/.kokoro/test-samples.sh new file mode 100755 index 000000000000..98851b56ba8f --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/test-samples.sh @@ -0,0 +1,104 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-pubsub + +# Run periodic samples tests at latest release +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + LATEST_RELEASE=$(git describe --abbrev=0 --tags) + git checkout $LATEST_RELEASE +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the Build Cop Bot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index 68855abc3f02..e9e29d12033d 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -20,3 +20,6 @@ recursive-include google *.json *.proto recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen \ No newline at end of file diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 20c1b57fe653..3e4bdfa1d7ad 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -38,6 +38,7 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags @@ -49,10 +50,6 @@ # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] diff --git a/packages/google-cloud-pubsub/docs/index.rst b/packages/google-cloud-pubsub/docs/index.rst index 5f6f8d4d26d1..ae3053625489 100644 --- a/packages/google-cloud-pubsub/docs/index.rst +++ b/packages/google-cloud-pubsub/docs/index.rst @@ -1,5 +1,7 @@ .. include:: README.rst +.. include:: multiprocessing.rst + API Documentation ----------------- diff --git a/packages/google-cloud-pubsub/docs/multiprocessing.rst b/packages/google-cloud-pubsub/docs/multiprocessing.rst new file mode 100644 index 000000000000..1cb29d4ca967 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpcio` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.Pool` or + :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index d59c247af2cf..6a60d775b8c0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -119,6 +119,15 @@ def project_path(cls, project): "projects/{project}", project=project ) + @classmethod + def subscription_path(cls, project, subscription): + """Return a fully-qualified subscription string.""" + return google.api_core.path_template.expand( + "projects/{project}/subscriptions/{subscription}", + project=project, + subscription=subscription, + ) + @classmethod def topic_path(cls, project, topic): """Return a fully-qualified topic string.""" @@ -364,9 +373,9 @@ def update_topic( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Topic` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided topic to update. Must - be specified and non-empty. Note that if ``update_mask`` contains - "message\_storage\_policy" but the ``message_storage_policy`` is not set + update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided topic to update. + Must be specified and non-empty. Note that if ``update_mask`` contains + "message_storage_policy" but the ``message_storage_policy`` is not set in the ``topic`` provided above, then the updated value is determined by the policy configured at the project or organization level. @@ -445,8 +454,8 @@ def publish( >>> response = client.publish(topic, messages) Args: - topic (str): Required. The messages in the request will be published on this topic. - Format is ``projects/{project}/topics/{topic}``. + topic (str): Required. The messages in the request will be published on this + topic. Format is ``projects/{project}/topics/{topic}``. messages (list[Union[dict, ~google.cloud.pubsub_v1.types.PubsubMessage]]): Required. The messages to publish. If a dict is provided, it must be of the same form as the protobuf @@ -680,7 +689,7 @@ def list_topic_subscriptions( metadata=None, ): """ - Lists the names of the subscriptions on this topic. + Lists the names of the attached subscriptions on this topic. Example: >>> from google.cloud import pubsub_v1 @@ -813,8 +822,8 @@ def list_topic_snapshots( ... pass Args: - topic (str): Required. The name of the topic that snapshots are attached to. Format - is ``projects/{project}/topics/{topic}``. + topic (str): Required. The name of the topic that snapshots are attached to. + Format is ``projects/{project}/topics/{topic}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -1153,8 +1162,8 @@ def test_iam_permissions( Args: resource (str): REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with - wildcards (such as '*' or 'storage.*') are not allowed. For more + permissions (list[str]): The set of permissions to check for the ``resource``. Permissions + with wildcards (such as '*' or 'storage.*') are not allowed. For more information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used @@ -1206,3 +1215,76 @@ def test_iam_permissions( return self._inner_api_calls["test_iam_permissions"]( request, retry=retry, timeout=timeout, metadata=metadata ) + + def detach_subscription( + self, + subscription, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): + """ + Detaches a subscription from this topic. All messages retained in + the subscription are dropped. Subsequent ``Pull`` and ``StreamingPull`` + requests will return FAILED_PRECONDITION. If the subscription is a push + subscription, pushes to the endpoint will stop. + + Example: + >>> from google.cloud import pubsub_v1 + >>> + >>> client = pubsub_v1.PublisherClient() + >>> + >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') + >>> + >>> response = client.detach_subscription(subscription) + + Args: + subscription (str): Required. The subscription to detach. Format is + ``projects/{project}/subscriptions/{subscription}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will + be retried using a default configuration. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.pubsub_v1.types.DetachSubscriptionResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + # Wrap the transport method to add retry and timeout logic. + if "detach_subscription" not in self._inner_api_calls: + self._inner_api_calls[ + "detach_subscription" + ] = google.api_core.gapic_v1.method.wrap_method( + self.transport.detach_subscription, + default_retry=self._method_configs["DetachSubscription"].retry, + default_timeout=self._method_configs["DetachSubscription"].timeout, + client_info=self._client_info, + ) + + request = pubsub_pb2.DetachSubscriptionRequest(subscription=subscription) + if metadata is None: + metadata = [] + metadata = list(metadata) + try: + routing_header = [("subscription", subscription)] + except AttributeError: + pass + else: + routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + routing_header + ) + metadata.append(routing_metadata) + + return self._inner_api_calls["detach_subscription"]( + request, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py index ec8f8baec789..8c96fd10b066 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py @@ -3,6 +3,7 @@ "google.pubsub.v1.Publisher": { "retry_codes": { "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], + "non_idempotent2": [], "non_idempotent": ["UNAVAILABLE"], "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], "none": [], @@ -99,6 +100,11 @@ "retry_codes_name": "non_idempotent", "retry_params_name": "default", }, + "DetachSubscription": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent2", + "retry_params_name": "default", + }, }, } } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 502f11a7f2aa..400bdc3da638 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -51,8 +51,8 @@ class SubscriberClient(object): """ - The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the ``Pull`` method or by + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or by establishing a bi-directional stream using the ``StreamingPull`` method. """ @@ -249,14 +249,15 @@ def create_subscription( filter_=None, dead_letter_policy=None, retry_policy=None, + detached=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ - Creates a subscription to a given topic. See the resource name rules. If - the subscription already exists, returns ``ALREADY_EXISTS``. If the - corresponding topic doesn't exist, returns ``NOT_FOUND``. + Creates a subscription to a given topic. See the resource name + rules. If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, @@ -287,15 +288,15 @@ def create_subscription( receiving messages. Format is ``projects/{project}/topics/{topic}``. The value of this field will be ``_deleted-topic_`` if the topic has been deleted. - push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used to - configure it. An empty ``pushConfig`` signifies that the subscriber will - pull and ack messages using API methods. + push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used + to configure it. An empty ``pushConfig`` signifies that the subscriber + will pull and ack messages using API methods. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.PushConfig` - ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits - for the subscriber to acknowledge receipt before resending the message. - In the interval after the message is delivered and before it is + ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub + waits for the subscriber to acknowledge receipt before resending the + message. In the interval after the message is delivered and before it is acknowledged, it is considered to be outstanding. During that time period, the message will not be redelivered (on a best-effort basis). @@ -348,34 +349,33 @@ def create_subscription( filter_ (str): An expression written in the Cloud Pub/Sub filter language. If non-empty, then only ``PubsubMessage``\ s whose ``attributes`` field matches the filter are delivered on this subscription. If empty, then no - messages are filtered out. EXPERIMENTAL: This feature is part of a - closed alpha release. This API might be changed in backward-incompatible - ways and is not recommended for production use. It is not subject to any - SLA or deprecation policy. - dead_letter_policy (Union[dict, ~google.cloud.pubsub_v1.types.DeadLetterPolicy]): A policy that specifies the conditions for dead lettering messages in - this subscription. If dead\_letter\_policy is not set, dead lettering is - disabled. + messages are filtered out. + dead_letter_policy (Union[dict, ~google.cloud.pubsub_v1.types.DeadLetterPolicy]): A policy that specifies the conditions for dead lettering messages + in this subscription. If dead_letter_policy is not set, dead lettering + is disabled. The Cloud Pub/Sub service account associated with this subscriptions's parent project (i.e., - service-{project\_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have permission to Acknowledge() messages on this subscription. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.DeadLetterPolicy` - retry_policy (Union[dict, ~google.cloud.pubsub_v1.types.RetryPolicy]): A policy that specifies how Cloud Pub/Sub retries message delivery for this + retry_policy (Union[dict, ~google.cloud.pubsub_v1.types.RetryPolicy]): A policy that specifies how Pub/Sub retries message delivery for this subscription. If not set, the default retry policy is applied. This generally implies that messages will be retried as soon as possible for healthy subscribers. RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded events for a given message. - EXPERIMENTAL: This API might be changed in backward-incompatible - ways and is not recommended for production use. It is not subject to any - SLA or deprecation policy. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.RetryPolicy` + detached (bool): Indicates whether the subscription is detached from its topic. + Detached subscriptions don't receive messages from their topic and don't + retain any backlog. ``Pull`` and ``StreamingPull`` requests will return + FAILED_PRECONDITION. If the subscription is a push subscription, pushes + to the endpoint will not be made. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -419,6 +419,7 @@ def create_subscription( filter=filter_, dead_letter_policy=dead_letter_policy, retry_policy=retry_policy, + detached=detached, ) if metadata is None: metadata = [] @@ -629,8 +630,8 @@ def list_subscriptions( ... pass Args: - project (str): Required. The name of the project in which to list subscriptions. Format - is ``projects/{project-id}``. + project (str): Required. The name of the project in which to list subscriptions. + Format is ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -856,8 +857,8 @@ def modify_ack_deadline( metadata=None, ): """ - Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message by the subscriber, or to make the message available for redelivery if the processing was interrupted. Note that this does not modify the subscription-level ``ackDeadlineSeconds`` used for subsequent messages. @@ -881,11 +882,11 @@ def modify_ack_deadline( subscription (str): Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. ack_ids (list[str]): Required. List of acknowledgment IDs. - ack_deadline_seconds (int): Required. The new ack deadline with respect to the time this request was - sent to the Pub/Sub system. For example, if the value is 10, the new ack - deadline will expire 10 seconds after the ``ModifyAckDeadline`` call was - made. Specifying zero might immediately make the message available for - delivery to another subscriber client. This typically results in an + ack_deadline_seconds (int): Required. The new ack deadline with respect to the time this request + was sent to the Pub/Sub system. For example, if the value is 10, the new + ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero might immediately make the message available + for delivery to another subscriber client. This typically results in an increase in the rate of message redeliveries (that is, duplicates). The minimum deadline you can specify is 0 seconds. The maximum deadline you can specify is 600 seconds (10 minutes). @@ -968,11 +969,11 @@ def acknowledge( >>> client.acknowledge(subscription, ack_ids) Args: - subscription (str): Required. The subscription whose message is being acknowledged. Format - is ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): Required. The acknowledgment ID for the messages being acknowledged that - was returned by the Pub/Sub system in the ``Pull`` response. Must not be - empty. + subscription (str): Required. The subscription whose message is being acknowledged. + Format is ``projects/{project}/subscriptions/{sub}``. + ack_ids (list[str]): Required. The acknowledgment ID for the messages being acknowledged + that was returned by the Pub/Sub system in the ``Pull`` response. Must + not be empty. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -1030,9 +1031,9 @@ def pull( metadata=None, ): """ - Pulls messages from the server. The server may return ``UNAVAILABLE`` if - there are too many concurrent pull requests pending for the given - subscription. + Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests pending + for the given subscription. Example: >>> from google.cloud import pubsub_v1 @@ -1047,17 +1048,17 @@ def pull( >>> response = client.pull(subscription, max_messages) Args: - subscription (str): Required. The subscription from which messages should be pulled. Format - is ``projects/{project}/subscriptions/{sub}``. + subscription (str): Required. The subscription from which messages should be pulled. + Format is ``projects/{project}/subscriptions/{sub}``. max_messages (int): Required. The maximum number of messages to return for this request. Must be a positive integer. The Pub/Sub system may return fewer than the number specified. - return_immediately (bool): Optional. If this field set to true, the system will respond immediately - even if it there are no messages available to return in the ``Pull`` - response. Otherwise, the system may wait (for a bounded amount of time) - until at least one message is available, rather than returning no - messages. Warning: setting this field to ``true`` is discouraged because - it adversely impacts the performance of ``Pull`` operations. We + return_immediately (bool): Optional. If this field set to true, the system will respond + immediately even if it there are no messages available to return in the + ``Pull`` response. Otherwise, the system may wait (for a bounded amount + of time) until at least one message is available, rather than returning + no messages. Warning: setting this field to ``true`` is discouraged + because it adversely impacts the performance of ``Pull`` operations. We recommend that users do not set this field. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -1117,8 +1118,8 @@ def streaming_pull( metadata=None, ): """ - Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline + Establishes a stream with the server, which sends messages down to + the client. The client streams acknowledgements and ack deadline modifications back to the server. The server will close the stream and return the status on any error. The server may close the stream with status ``UNAVAILABLE`` to reassign server-side resources, in which case, @@ -1304,8 +1305,8 @@ def list_snapshots( ... pass Args: - project (str): Required. The name of the project in which to list snapshots. Format is - ``projects/{project-id}``. + project (str): Required. The name of the project in which to list snapshots. Format + is ``projects/{project-id}``. page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -1383,17 +1384,18 @@ def create_snapshot( metadata=None, ): """ - Creates a snapshot from the requested subscription. Snapshots are used - in Seek operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages in an - existing subscription to the state captured by a snapshot. If the - snapshot already exists, returns ``ALREADY_EXISTS``. If the requested - subscription doesn't exist, returns ``NOT_FOUND``. If the backlog in the - subscription is too old -- and the resulting snapshot would expire in - less than 1 hour -- then ``FAILED_PRECONDITION`` is returned. See also - the ``Snapshot.expire_time`` field. If the name is not provided in the - request, the server will assign a random name for this snapshot on the - same project as the subscription, conforming to the `resource name + Creates a snapshot from the requested subscription. Snapshots are + used in Seek operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the acknowledgment state + of messages in an existing subscription to the state captured by a + snapshot. If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. If the + backlog in the subscription is too old -- and the resulting snapshot + would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is + returned. See also the ``Snapshot.expire_time`` field. If the name is + not provided in the request, the server will assign a random name for + this snapshot on the same project as the subscription, conforming to the + `resource name format `__. The generated name is populated in the returned Snapshot object. Note that for REST API requests, you must specify a name in the request. @@ -1687,8 +1689,8 @@ def seek( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.Timestamp` - snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as that - of the provided subscription. Format is + snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as + that of the provided subscription. Format is ``projects/{project}/snapshots/{snap}``. retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will @@ -1941,8 +1943,8 @@ def test_iam_permissions( Args: resource (str): REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with - wildcards (such as '*' or 'storage.*') are not allowed. For more + permissions (list[str]): The set of permissions to check for the ``resource``. Permissions + with wildcards (such as '*' or 'storage.*') are not allowed. For more information see `IAM Overview `__. retry (Optional[google.api_core.retry.Retry]): A retry object used diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index a32aa8f7e3ce..c874e78d3c49 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -184,7 +184,7 @@ def list_topics(self): def list_topic_subscriptions(self): """Return the gRPC stub for :meth:`PublisherClient.list_topic_subscriptions`. - Lists the names of the subscriptions on this topic. + Lists the names of the attached subscriptions on this topic. Returns: Callable: A callable which accepts the appropriate @@ -278,3 +278,19 @@ def test_iam_permissions(self): deserialized response object. """ return self._stubs["iam_policy_stub"].TestIamPermissions + + @property + def detach_subscription(self): + """Return the gRPC stub for :meth:`PublisherClient.detach_subscription`. + + Detaches a subscription from this topic. All messages retained in + the subscription are dropped. Subsequent ``Pull`` and ``StreamingPull`` + requests will return FAILED_PRECONDITION. If the subscription is a push + subscription, pushes to the endpoint will stop. + + Returns: + Callable: A callable which accepts the appropriate + deserialized request object and returns a + deserialized response object. + """ + return self._stubs["publisher_stub"].DetachSubscription diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index c569051b3975..4a48b9317dee 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -116,9 +116,9 @@ def channel(self): def create_subscription(self): """Return the gRPC stub for :meth:`SubscriberClient.create_subscription`. - Creates a subscription to a given topic. See the resource name rules. If - the subscription already exists, returns ``ALREADY_EXISTS``. If the - corresponding topic doesn't exist, returns ``NOT_FOUND``. + Creates a subscription to a given topic. See the resource name + rules. If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, @@ -213,8 +213,8 @@ def get_snapshot(self): def modify_ack_deadline(self): """Return the gRPC stub for :meth:`SubscriberClient.modify_ack_deadline`. - Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message by the subscriber, or to make the message available for redelivery if the processing was interrupted. Note that this does not modify the subscription-level ``ackDeadlineSeconds`` used for subsequent messages. @@ -249,9 +249,9 @@ def acknowledge(self): def pull(self): """Return the gRPC stub for :meth:`SubscriberClient.pull`. - Pulls messages from the server. The server may return ``UNAVAILABLE`` if - there are too many concurrent pull requests pending for the given - subscription. + Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests pending + for the given subscription. Returns: Callable: A callable which accepts the appropriate @@ -264,8 +264,8 @@ def pull(self): def streaming_pull(self): """Return the gRPC stub for :meth:`SubscriberClient.streaming_pull`. - Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline + Establishes a stream with the server, which sends messages down to + the client. The client streams acknowledgements and ack deadline modifications back to the server. The server will close the stream and return the status on any error. The server may close the stream with status ``UNAVAILABLE`` to reassign server-side resources, in which case, @@ -320,17 +320,18 @@ def list_snapshots(self): def create_snapshot(self): """Return the gRPC stub for :meth:`SubscriberClient.create_snapshot`. - Creates a snapshot from the requested subscription. Snapshots are used - in Seek operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages in an - existing subscription to the state captured by a snapshot. If the - snapshot already exists, returns ``ALREADY_EXISTS``. If the requested - subscription doesn't exist, returns ``NOT_FOUND``. If the backlog in the - subscription is too old -- and the resulting snapshot would expire in - less than 1 hour -- then ``FAILED_PRECONDITION`` is returned. See also - the ``Snapshot.expire_time`` field. If the name is not provided in the - request, the server will assign a random name for this snapshot on the - same project as the subscription, conforming to the `resource name + Creates a snapshot from the requested subscription. Snapshots are + used in Seek operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the acknowledgment state + of messages in an existing subscription to the state captured by a + snapshot. If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. If the + backlog in the subscription is too old -- and the resulting snapshot + would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is + returned. See also the ``Snapshot.expire_time`` field. If the name is + not provided in the request, the server will assign a random name for + this snapshot on the same project as the subscription, conforming to the + `resource name format `__. The generated name is populated in the returned Snapshot object. Note that for REST API requests, you must specify a name in the request. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 832e0649c9c0..d85f2734e686 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -88,7 +88,7 @@ service Publisher { option (google.api.method_signature) = "project"; } - // Lists the names of the subscriptions on this topic. + // Lists the names of the attached subscriptions on this topic. rpc ListTopicSubscriptions(ListTopicSubscriptionsRequest) returns (ListTopicSubscriptionsResponse) { option (google.api.http) = { @@ -122,8 +122,20 @@ service Publisher { }; option (google.api.method_signature) = "topic"; } + + // Detaches a subscription from this topic. All messages retained in the + // subscription are dropped. Subsequent `Pull` and `StreamingPull` requests + // will return FAILED_PRECONDITION. If the subscription is a push + // subscription, pushes to the endpoint will stop. + rpc DetachSubscription(DetachSubscriptionRequest) + returns (DetachSubscriptionResponse) { + option (google.api.http) = { + post: "/v1/{subscription=projects/*/subscriptions/*}:detach" + }; + } } +// A policy constraining the storage of messages published to the topic. message MessageStoragePolicy { // A list of IDs of GCP regions where messages that are published to the topic // may be persisted in storage. Messages published by publishers running in @@ -300,7 +312,7 @@ message ListTopicSubscriptionsRequest { // Response for the `ListTopicSubscriptions` method. message ListTopicSubscriptionsResponse { - // The names of the subscriptions that match the request. + // The names of subscriptions attached to the topic specified in the request. repeated string subscriptions = 1 [(google.api.resource_reference) = { type: "pubsub.googleapis.com/Subscription" }]; @@ -350,6 +362,22 @@ message DeleteTopicRequest { ]; } +// Request for the DetachSubscription method. +message DetachSubscriptionRequest { + // Required. The subscription to detach. + // Format is `projects/{project}/subscriptions/{subscription}`. + string subscription = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "pubsub.googleapis.com/Subscription" + } + ]; +} + +// Response for the DetachSubscription method. +// Reserved for future use. +message DetachSubscriptionResponse {} + // The service that an application uses to manipulate subscriptions and to // consume messages from a subscription via the `Pull` method or by // establishing a bi-directional stream using the `StreamingPull` method. @@ -676,9 +704,6 @@ message Subscription { // then only `PubsubMessage`s whose `attributes` field matches the filter are // delivered on this subscription. If empty, then no messages are filtered // out. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. string filter = 12; // A policy that specifies the conditions for dead lettering messages in @@ -691,17 +716,21 @@ message Subscription { // permission to Acknowledge() messages on this subscription. DeadLetterPolicy dead_letter_policy = 13; - // A policy that specifies how Cloud Pub/Sub retries message delivery for this + // A policy that specifies how Pub/Sub retries message delivery for this // subscription. // // If not set, the default retry policy is applied. This generally implies // that messages will be retried as soon as possible for healthy subscribers. // RetryPolicy will be triggered on NACKs or acknowledgement deadline // exceeded events for a given message. - // EXPERIMENTAL: This API might be changed in backward-incompatible - // ways and is not recommended for production use. It is not subject to any - // SLA or deprecation policy. RetryPolicy retry_policy = 14; + + // Indicates whether the subscription is detached from its topic. Detached + // subscriptions don't receive messages from their topic and don't retain any + // backlog. `Pull` and `StreamingPull` requests will return + // FAILED_PRECONDITION. If the subscription is a push subscription, pushes to + // the endpoint will not be made. + bool detached = 15; } // A policy that specifies how Cloud Pub/Sub retries message delivery. @@ -839,8 +868,11 @@ message ReceivedMessage { // The message. PubsubMessage message = 2; - // Delivery attempt counter is 1 + (the sum of number of NACKs and number of - // ack_deadline exceeds) for this message. + // The approximate number of times that Cloud Pub/Sub has attempted to deliver + // the associated message to a subscriber. + // + // More precisely, this is 1 + (number of NACKs) + + // (number of ack_deadline exceeds) for this message. // // A NACK is any call to ModifyAckDeadline with a 0 deadline. An ack_deadline // exceeds event is whenever a message is not acknowledged within @@ -848,13 +880,10 @@ message ReceivedMessage { // Subscription.ackDeadlineSeconds, but may get extended automatically by // the client library. // - // The first delivery of a given message will have this value as 1. The value - // is calculated at best effort and is approximate. + // Upon the first delivery of a given message, `delivery_attempt` will have a + // value of 1. The value is calculated at best effort and is approximate. // // If a DeadLetterPolicy is not set on the subscription, this will be 0. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. int32 delivery_attempt = 3; } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 4729f11ee306..6ba73883d3f3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -2,9 +2,6 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/pubsub_v1/proto/pubsub.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -29,12 +26,9 @@ name="google/cloud/pubsub_v1/proto/pubsub.proto", package="google.pubsub.v1", syntax="proto3", - serialized_options=_b( - "\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1" - ), - serialized_pb=_b( - '\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"v\n\x19ListTopicSnapshotsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"\xae\x05\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12\x0e\n\x06\x66ilter\x18\x0c \x01(\t\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x12\x33\n\x0cretry_policy\x18\x0e \x01(\x0b\x32\x1d.google.pubsub.v1.RetryPolicy\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"u\n\x0bRetryPolicy\x12\x32\n\x0fminimum_backoff\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0fmaximum_backoff\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x8d\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12!\n\x12return_immediately\x18\x02 \x01(\x08\x42\x05\x18\x01\xe0\x41\x01\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xe8\x01\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xf3\t\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xaa\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"9\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\xda\x41\x05topic\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\x83\x15\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12\x89\x01\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"8\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3' - ), + serialized_options=b"\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1", + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"v\n\x19ListTopicSnapshotsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"]\n\x19\x44\x65tachSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x1c\n\x1a\x44\x65tachSubscriptionResponse"\xc0\x05\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12\x0e\n\x06\x66ilter\x18\x0c \x01(\t\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x12\x33\n\x0cretry_policy\x18\x0e \x01(\x0b\x32\x1d.google.pubsub.v1.RetryPolicy\x12\x10\n\x08\x64\x65tached\x18\x0f \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"u\n\x0bRetryPolicy\x12\x32\n\x0fminimum_backoff\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0fmaximum_backoff\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x8d\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12!\n\x12return_immediately\x18\x02 \x01(\x08\x42\x05\x18\x01\xe0\x41\x01\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xe8\x01\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xa3\x0b\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xaa\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"9\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\xda\x41\x05topic\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\xad\x01\n\x12\x44\x65tachSubscription\x12+.google.pubsub.v1.DetachSubscriptionRequest\x1a,.google.pubsub.v1.DetachSubscriptionResponse"<\x82\xd3\xe4\x93\x02\x36"4/v1/{subscription=projects/*/subscriptions/*}:detach\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\x83\x15\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12\x89\x01\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"8\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3', dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, @@ -54,6 +48,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="allowed_persistence_regions", @@ -72,6 +67,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -93,6 +89,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="key", @@ -103,7 +100,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -111,6 +108,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="value", @@ -121,7 +119,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -129,12 +127,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=_b("8\001"), + serialized_options=b"8\001", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -149,6 +148,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -159,14 +159,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="labels", @@ -185,6 +186,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="message_storage_policy", @@ -203,6 +205,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="kms_key_name", @@ -213,7 +216,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -221,14 +224,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[_TOPIC_LABELSENTRY], enum_types=[], - serialized_options=_b( - "\352AQ\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}\022\017_deleted-topic_" - ), + serialized_options=b"\352AQ\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}\022\017_deleted-topic_", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -244,6 +246,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="key", @@ -254,7 +257,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -262,6 +265,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="value", @@ -272,7 +276,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -280,12 +284,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=_b("8\001"), + serialized_options=b"8\001", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -300,6 +305,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="data", @@ -310,7 +316,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b(""), + default_value=b"", message_type=None, enum_type=None, containing_type=None, @@ -318,6 +324,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="attributes", @@ -336,6 +343,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="message_id", @@ -346,7 +354,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -354,6 +362,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="publish_time", @@ -372,6 +381,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="ordering_key", @@ -382,7 +392,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -390,6 +400,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -411,6 +422,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="topic", @@ -421,16 +433,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" - ), + serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -452,6 +463,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="topic", @@ -468,8 +480,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="update_mask", @@ -486,8 +499,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -509,6 +523,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="topic", @@ -519,16 +534,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" - ), + serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="messages", @@ -545,8 +559,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -568,6 +583,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="message_ids", @@ -586,6 +602,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -607,6 +624,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="project", @@ -617,16 +635,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" - ), + serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -645,6 +662,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -655,7 +673,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -663,6 +681,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -684,6 +703,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="topics", @@ -702,6 +722,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -712,7 +733,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -720,6 +741,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -741,6 +763,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="topic", @@ -751,16 +774,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" - ), + serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -779,6 +801,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -789,7 +812,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -797,6 +820,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -818,6 +842,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscriptions", @@ -834,8 +859,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b('\372A$\n"pubsub.googleapis.com/Subscription'), + serialized_options=b'\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -846,7 +872,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -854,6 +880,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -875,6 +902,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="topic", @@ -885,16 +913,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" - ), + serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -913,6 +940,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -923,7 +951,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -931,6 +959,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -952,6 +981,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="snapshots", @@ -970,6 +1000,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -980,7 +1011,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -988,6 +1019,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1009,6 +1041,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="topic", @@ -1019,16 +1052,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" - ), + serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -1044,12 +1076,75 @@ ) +_DETACHSUBSCRIPTIONREQUEST = _descriptor.Descriptor( + name="DetachSubscriptionRequest", + full_name="google.pubsub.v1.DetachSubscriptionRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="subscription", + full_name="google.pubsub.v1.DetachSubscriptionRequest.subscription", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ) + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2012, + serialized_end=2105, +) + + +_DETACHSUBSCRIPTIONRESPONSE = _descriptor.Descriptor( + name="DetachSubscriptionResponse", + full_name="google.pubsub.v1.DetachSubscriptionResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=2107, + serialized_end=2135, +) + + _SUBSCRIPTION_LABELSENTRY = _descriptor.Descriptor( name="LabelsEntry", full_name="google.pubsub.v1.Subscription.LabelsEntry", filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="key", @@ -1060,7 +1155,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1068,6 +1163,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="value", @@ -1078,7 +1174,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1086,12 +1182,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=_b("8\001"), + serialized_options=b"8\001", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1106,6 +1203,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -1116,14 +1214,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="topic", @@ -1134,16 +1233,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A\035\n\033pubsub.googleapis.com/Topic" - ), + serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="push_config", @@ -1162,6 +1260,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="ack_deadline_seconds", @@ -1180,6 +1279,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="retain_acked_messages", @@ -1198,6 +1298,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="message_retention_duration", @@ -1216,6 +1317,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="labels", @@ -1234,6 +1336,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="enable_message_ordering", @@ -1252,6 +1355,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="expiration_policy", @@ -1270,6 +1374,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="filter", @@ -1280,7 +1385,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1288,6 +1393,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="dead_letter_policy", @@ -1306,6 +1412,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="retry_policy", @@ -1324,20 +1431,38 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="detached", + full_name="google.pubsub.v1.Subscription.detached", + index=12, + number=15, + type=8, + cpp_type=7, + label=1, + has_default_value=False, + default_value=False, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[_SUBSCRIPTION_LABELSENTRY], enum_types=[], - serialized_options=_b( - '\352AU\n"pubsub.googleapis.com/Subscription\022/projects/{project}/subscriptions/{subscription}' - ), + serialized_options=b'\352AU\n"pubsub.googleapis.com/Subscription\022/projects/{project}/subscriptions/{subscription}', is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2013, - serialized_end=2699, + serialized_start=2138, + serialized_end=2842, ) @@ -1347,6 +1472,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="minimum_backoff", @@ -1365,6 +1491,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="maximum_backoff", @@ -1383,6 +1510,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1393,8 +1521,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2701, - serialized_end=2818, + serialized_start=2844, + serialized_end=2961, ) @@ -1404,6 +1532,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="dead_letter_topic", @@ -1414,7 +1543,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1422,6 +1551,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="max_delivery_attempts", @@ -1440,6 +1570,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1450,8 +1581,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2820, - serialized_end=2896, + serialized_start=2963, + serialized_end=3039, ) @@ -1461,6 +1592,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="ttl", @@ -1479,6 +1611,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -1489,8 +1622,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=2898, - serialized_end=2956, + serialized_start=3041, + serialized_end=3099, ) @@ -1500,6 +1633,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="service_account_email", @@ -1510,7 +1644,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1518,6 +1652,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="audience", @@ -1528,7 +1663,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1536,6 +1671,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1546,8 +1682,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3124, - serialized_end=3184, + serialized_start=3267, + serialized_end=3327, ) _PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( @@ -1556,6 +1692,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="key", @@ -1566,7 +1703,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1574,6 +1711,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="value", @@ -1584,7 +1722,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1592,12 +1730,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=_b("8\001"), + serialized_options=b"8\001", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -1612,6 +1751,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="push_endpoint", @@ -1622,7 +1762,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1630,6 +1770,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="attributes", @@ -1648,6 +1789,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="oidc_token", @@ -1666,6 +1808,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1681,11 +1824,12 @@ full_name="google.pubsub.v1.PushConfig.authentication_method", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], ) ], - serialized_start=2959, - serialized_end=3260, + serialized_start=3102, + serialized_end=3403, ) @@ -1695,6 +1839,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="ack_id", @@ -1705,7 +1850,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1713,6 +1858,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="message", @@ -1731,6 +1877,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="delivery_attempt", @@ -1749,6 +1896,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1759,8 +1907,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3262, - serialized_end=3371, + serialized_start=3405, + serialized_end=3514, ) @@ -1770,6 +1918,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscription", @@ -1780,16 +1929,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' - ), + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -1800,8 +1948,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3373, - serialized_end=3463, + serialized_start=3516, + serialized_end=3606, ) @@ -1811,6 +1959,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscription", @@ -1827,8 +1976,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="update_mask", @@ -1845,8 +1995,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1857,8 +2008,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3466, - serialized_end=3606, + serialized_start=3609, + serialized_end=3749, ) @@ -1868,6 +2019,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="project", @@ -1878,16 +2030,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" - ), + serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -1906,6 +2057,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -1916,7 +2068,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1924,6 +2076,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1934,8 +2087,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3609, - serialized_end=3744, + serialized_start=3752, + serialized_end=3887, ) @@ -1945,6 +2098,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscriptions", @@ -1963,6 +2117,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -1973,7 +2128,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -1981,6 +2136,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -1991,8 +2147,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3746, - serialized_end=3853, + serialized_start=3889, + serialized_end=3996, ) @@ -2002,6 +2158,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscription", @@ -2012,16 +2169,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' - ), + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -2032,8 +2188,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3855, - serialized_end=3948, + serialized_start=3998, + serialized_end=4091, ) @@ -2043,6 +2199,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscription", @@ -2053,16 +2210,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' - ), + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="push_config", @@ -2079,8 +2235,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2091,8 +2248,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=3951, - serialized_end=4098, + serialized_start=4094, + serialized_end=4241, ) @@ -2102,6 +2259,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscription", @@ -2112,16 +2270,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' - ), + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="return_immediately", @@ -2138,8 +2295,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\030\001\340A\001"), + serialized_options=b"\030\001\340A\001", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="max_messages", @@ -2156,8 +2314,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2168,8 +2327,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4101, - serialized_end=4242, + serialized_start=4244, + serialized_end=4385, ) @@ -2179,6 +2338,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="received_messages", @@ -2197,6 +2357,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -2207,8 +2368,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4244, - serialized_end=4320, + serialized_start=4387, + serialized_end=4463, ) @@ -2218,6 +2379,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscription", @@ -2228,16 +2390,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' - ), + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="ack_ids", @@ -2254,8 +2415,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="ack_deadline_seconds", @@ -2272,8 +2434,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2284,8 +2447,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4323, - serialized_end=4472, + serialized_start=4466, + serialized_end=4615, ) @@ -2295,6 +2458,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscription", @@ -2305,16 +2469,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' - ), + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="ack_ids", @@ -2331,8 +2494,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2343,8 +2507,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4474, - serialized_end=4582, + serialized_start=4617, + serialized_end=4725, ) @@ -2354,6 +2518,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscription", @@ -2364,16 +2529,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' - ), + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="ack_ids", @@ -2392,6 +2556,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="modify_deadline_seconds", @@ -2410,6 +2575,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="modify_deadline_ack_ids", @@ -2428,6 +2594,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="stream_ack_deadline_seconds", @@ -2444,8 +2611,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="client_id", @@ -2456,7 +2624,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -2464,6 +2632,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2474,8 +2643,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4585, - serialized_end=4817, + serialized_start=4728, + serialized_end=4960, ) @@ -2485,6 +2654,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="received_messages", @@ -2503,6 +2673,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -2513,8 +2684,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4819, - serialized_end=4904, + serialized_start=4962, + serialized_end=5047, ) @@ -2524,6 +2695,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="key", @@ -2534,7 +2706,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -2542,6 +2714,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="value", @@ -2552,7 +2725,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -2560,12 +2733,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=_b("8\001"), + serialized_options=b"8\001", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2580,6 +2754,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -2590,16 +2765,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A \n\036pubsub.googleapis.com/Snapshot" - ), + serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="subscription", @@ -2610,16 +2784,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' - ), + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="labels", @@ -2638,6 +2811,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2648,8 +2822,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4907, - serialized_end=5166, + serialized_start=5050, + serialized_end=5309, ) @@ -2659,6 +2833,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="snapshot", @@ -2675,8 +2850,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="update_mask", @@ -2693,8 +2869,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2705,8 +2882,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5169, - serialized_end=5297, + serialized_start=5312, + serialized_end=5440, ) @@ -2716,6 +2893,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="key", @@ -2726,7 +2904,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -2734,6 +2912,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="value", @@ -2744,7 +2923,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -2752,12 +2931,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=_b("8\001"), + serialized_options=b"8\001", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -2772,6 +2952,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -2782,7 +2963,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -2790,6 +2971,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="topic", @@ -2800,14 +2982,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\372A\035\n\033pubsub.googleapis.com/Topic"), + serialized_options=b"\372A\035\n\033pubsub.googleapis.com/Topic", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="expire_time", @@ -2826,6 +3009,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="labels", @@ -2844,20 +3028,19 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[_SNAPSHOT_LABELSENTRY], enum_types=[], - serialized_options=_b( - "\352AI\n\036pubsub.googleapis.com/Snapshot\022'projects/{project}/snapshots/{snapshot}" - ), + serialized_options=b"\352AI\n\036pubsub.googleapis.com/Snapshot\022'projects/{project}/snapshots/{snapshot}", is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5300, - serialized_end=5603, + serialized_start=5443, + serialized_end=5746, ) @@ -2867,6 +3050,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="snapshot", @@ -2877,16 +3061,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A \n\036pubsub.googleapis.com/Snapshot" - ), + serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -2897,8 +3080,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5605, - serialized_end=5683, + serialized_start=5748, + serialized_end=5826, ) @@ -2908,6 +3091,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="project", @@ -2918,16 +3102,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project" - ), + serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -2946,6 +3129,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -2956,7 +3140,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -2964,6 +3148,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -2974,8 +3159,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5686, - serialized_end=5817, + serialized_start=5829, + serialized_end=5960, ) @@ -2985,6 +3170,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="snapshots", @@ -3003,6 +3189,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -3013,7 +3200,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -3021,6 +3208,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -3031,8 +3219,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5819, - serialized_end=5914, + serialized_start=5962, + serialized_end=6057, ) @@ -3042,6 +3230,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="snapshot", @@ -3052,16 +3241,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - "\340A\002\372A \n\036pubsub.googleapis.com/Snapshot" - ), + serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ) ], extensions=[], @@ -3072,8 +3260,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5916, - serialized_end=5997, + serialized_start=6059, + serialized_end=6140, ) @@ -3083,6 +3271,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="subscription", @@ -3093,16 +3282,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b( - '\340A\002\372A$\n"pubsub.googleapis.com/Subscription' - ), + serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="time", @@ -3121,6 +3309,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="snapshot", @@ -3131,14 +3320,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\372A \n\036pubsub.googleapis.com/Snapshot"), + serialized_options=b"\372A \n\036pubsub.googleapis.com/Snapshot", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -3154,11 +3344,12 @@ full_name="google.pubsub.v1.SeekRequest.target", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], ) ], - serialized_start=6000, - serialized_end=6190, + serialized_start=6143, + serialized_end=6333, ) @@ -3168,6 +3359,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], extensions=[], nested_types=[], @@ -3177,8 +3369,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6192, - serialized_end=6206, + serialized_start=6335, + serialized_end=6349, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -3285,6 +3477,12 @@ "ListTopicSnapshotsResponse" ] = _LISTTOPICSNAPSHOTSRESPONSE DESCRIPTOR.message_types_by_name["DeleteTopicRequest"] = _DELETETOPICREQUEST +DESCRIPTOR.message_types_by_name[ + "DetachSubscriptionRequest" +] = _DETACHSUBSCRIPTIONREQUEST +DESCRIPTOR.message_types_by_name[ + "DetachSubscriptionResponse" +] = _DETACHSUBSCRIPTIONRESPONSE DESCRIPTOR.message_types_by_name["Subscription"] = _SUBSCRIPTION DESCRIPTOR.message_types_by_name["RetryPolicy"] = _RETRYPOLICY DESCRIPTOR.message_types_by_name["DeadLetterPolicy"] = _DEADLETTERPOLICY @@ -3323,13 +3521,12 @@ MessageStoragePolicy = _reflection.GeneratedProtocolMessageType( "MessageStoragePolicy", (_message.Message,), - dict( - DESCRIPTOR=_MESSAGESTORAGEPOLICY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A message storage policy. - + { + "DESCRIPTOR": _MESSAGESTORAGEPOLICY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """A policy constraining the storage of messages published to the topic. - Attributes: + Attributes: allowed_persistence_regions: A list of IDs of GCP regions where messages that are published to the topic may be persisted in storage. Messages published @@ -3339,26 +3536,27 @@ are allowed, and is not a valid configuration. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.MessageStoragePolicy) - ), + }, ) _sym_db.RegisterMessage(MessageStoragePolicy) Topic = _reflection.GeneratedProtocolMessageType( "Topic", (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( "LabelsEntry", (_message.Message,), - dict( - DESCRIPTOR=_TOPIC_LABELSENTRY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + { + "DESCRIPTOR": _TOPIC_LABELSENTRY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) - ), + }, ), - DESCRIPTOR=_TOPIC, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A topic resource. + "DESCRIPTOR": _TOPIC, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """A topic resource. + Attributes: name: Required. The name of the topic. It must have the format @@ -3381,7 +3579,7 @@ ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) - ), + }, ) _sym_db.RegisterMessage(Topic) _sym_db.RegisterMessage(Topic.LabelsEntry) @@ -3389,24 +3587,25 @@ PubsubMessage = _reflection.GeneratedProtocolMessageType( "PubsubMessage", (_message.Message,), - dict( - AttributesEntry=_reflection.GeneratedProtocolMessageType( + { + "AttributesEntry": _reflection.GeneratedProtocolMessageType( "AttributesEntry", (_message.Message,), - dict( - DESCRIPTOR=_PUBSUBMESSAGE_ATTRIBUTESENTRY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + { + "DESCRIPTOR": _PUBSUBMESSAGE_ATTRIBUTESENTRY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) - ), + }, ), - DESCRIPTOR=_PUBSUBMESSAGE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A message that is published by publishers and consumed by subscribers. + "DESCRIPTOR": _PUBSUBMESSAGE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """A message that is published by publishers and consumed by subscribers. The message must contain either a non-empty data field or at least one attribute. Note that client libraries represent this object differently depending on the language. See the corresponding client library documentation for more information. See Quotas and limits for more information about message limits. + Attributes: data: The message data field. If this field is empty, the message @@ -3438,7 +3637,7 @@ It is not subject to any SLA or deprecation policy. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) - ), + }, ) _sym_db.RegisterMessage(PubsubMessage) _sym_db.RegisterMessage(PubsubMessage.AttributesEntry) @@ -3446,50 +3645,53 @@ GetTopicRequest = _reflection.GeneratedProtocolMessageType( "GetTopicRequest", (_message.Message,), - dict( - DESCRIPTOR=_GETTOPICREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the GetTopic method. + { + "DESCRIPTOR": _GETTOPICREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the GetTopic method. + Attributes: topic: Required. The name of the topic to get. Format is ``projects/{project}/topics/{topic}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) - ), + }, ) _sym_db.RegisterMessage(GetTopicRequest) UpdateTopicRequest = _reflection.GeneratedProtocolMessageType( "UpdateTopicRequest", (_message.Message,), - dict( - DESCRIPTOR=_UPDATETOPICREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the UpdateTopic method. + { + "DESCRIPTOR": _UPDATETOPICREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the UpdateTopic method. + Attributes: topic: Required. The updated topic object. update_mask: Required. Indicates which fields in the provided topic to update. Must be specified and non-empty. Note that if - ``update_mask`` contains "message\_storage\_policy" but the + ``update_mask`` contains “message_storage_policy” but the ``message_storage_policy`` is not set in the ``topic`` provided above, then the updated value is determined by the policy configured at the project or organization level. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) - ), + }, ) _sym_db.RegisterMessage(UpdateTopicRequest) PublishRequest = _reflection.GeneratedProtocolMessageType( "PublishRequest", (_message.Message,), - dict( - DESCRIPTOR=_PUBLISHREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the Publish method. + { + "DESCRIPTOR": _PUBLISHREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the Publish method. + Attributes: topic: Required. The messages in the request will be published on @@ -3498,17 +3700,18 @@ Required. The messages to publish. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) - ), + }, ) _sym_db.RegisterMessage(PublishRequest) PublishResponse = _reflection.GeneratedProtocolMessageType( "PublishResponse", (_message.Message,), - dict( - DESCRIPTOR=_PUBLISHRESPONSE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``Publish`` method. + { + "DESCRIPTOR": _PUBLISHRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the ``Publish`` method. + Attributes: message_ids: The server-assigned ID of each published message, in the same @@ -3516,17 +3719,18 @@ unique within the topic. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) - ), + }, ) _sym_db.RegisterMessage(PublishResponse) ListTopicsRequest = _reflection.GeneratedProtocolMessageType( "ListTopicsRequest", (_message.Message,), - dict( - DESCRIPTOR=_LISTTOPICSREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``ListTopics`` method. + { + "DESCRIPTOR": _LISTTOPICSREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``ListTopics`` method. + Attributes: project: Required. The name of the project in which to list topics. @@ -3540,17 +3744,18 @@ next page of data. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) - ), + }, ) _sym_db.RegisterMessage(ListTopicsRequest) ListTopicsResponse = _reflection.GeneratedProtocolMessageType( "ListTopicsResponse", (_message.Message,), - dict( - DESCRIPTOR=_LISTTOPICSRESPONSE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``ListTopics`` method. + { + "DESCRIPTOR": _LISTTOPICSRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the ``ListTopics`` method. + Attributes: topics: The resulting topics. @@ -3560,17 +3765,18 @@ ``ListTopicsRequest``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) - ), + }, ) _sym_db.RegisterMessage(ListTopicsResponse) ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( "ListTopicSubscriptionsRequest", (_message.Message,), - dict( - DESCRIPTOR=_LISTTOPICSUBSCRIPTIONSREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``ListTopicSubscriptions`` method. + { + "DESCRIPTOR": _LISTTOPICSUBSCRIPTIONSREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``ListTopicSubscriptions`` method. + Attributes: topic: Required. The name of the topic that subscriptions are @@ -3584,37 +3790,40 @@ that the system should return the next page of data. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) - ), + }, ) _sym_db.RegisterMessage(ListTopicSubscriptionsRequest) ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( "ListTopicSubscriptionsResponse", (_message.Message,), - dict( - DESCRIPTOR=_LISTTOPICSUBSCRIPTIONSRESPONSE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``ListTopicSubscriptions`` method. + { + "DESCRIPTOR": _LISTTOPICSUBSCRIPTIONSRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the ``ListTopicSubscriptions`` method. + Attributes: subscriptions: - The names of the subscriptions that match the request. + The names of subscriptions attached to the topic specified in + the request. next_page_token: If not empty, indicates that there may be more subscriptions that match the request; this value should be passed in a new ``ListTopicSubscriptionsRequest`` to get more subscriptions. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) - ), + }, ) _sym_db.RegisterMessage(ListTopicSubscriptionsResponse) ListTopicSnapshotsRequest = _reflection.GeneratedProtocolMessageType( "ListTopicSnapshotsRequest", (_message.Message,), - dict( - DESCRIPTOR=_LISTTOPICSNAPSHOTSREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``ListTopicSnapshots`` method. + { + "DESCRIPTOR": _LISTTOPICSNAPSHOTSREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``ListTopicSnapshots`` method. + Attributes: topic: Required. The name of the topic that snapshots are attached @@ -3628,17 +3837,18 @@ the next page of data. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsRequest) - ), + }, ) _sym_db.RegisterMessage(ListTopicSnapshotsRequest) ListTopicSnapshotsResponse = _reflection.GeneratedProtocolMessageType( "ListTopicSnapshotsResponse", (_message.Message,), - dict( - DESCRIPTOR=_LISTTOPICSNAPSHOTSRESPONSE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``ListTopicSnapshots`` method. + { + "DESCRIPTOR": _LISTTOPICSNAPSHOTSRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the ``ListTopicSnapshots`` method. + Attributes: snapshots: The names of the snapshots that match the request. @@ -3648,43 +3858,75 @@ ``ListTopicSnapshotsRequest`` to get more snapshots. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsResponse) - ), + }, ) _sym_db.RegisterMessage(ListTopicSnapshotsResponse) DeleteTopicRequest = _reflection.GeneratedProtocolMessageType( "DeleteTopicRequest", (_message.Message,), - dict( - DESCRIPTOR=_DELETETOPICREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``DeleteTopic`` method. + { + "DESCRIPTOR": _DELETETOPICREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``DeleteTopic`` method. + Attributes: topic: Required. Name of the topic to delete. Format is ``projects/{project}/topics/{topic}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) - ), + }, ) _sym_db.RegisterMessage(DeleteTopicRequest) +DetachSubscriptionRequest = _reflection.GeneratedProtocolMessageType( + "DetachSubscriptionRequest", + (_message.Message,), + { + "DESCRIPTOR": _DETACHSUBSCRIPTIONREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the DetachSubscription method. + + Attributes: + subscription: + Required. The subscription to detach. Format is + ``projects/{project}/subscriptions/{subscription}``. + """, + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DetachSubscriptionRequest) + }, +) +_sym_db.RegisterMessage(DetachSubscriptionRequest) + +DetachSubscriptionResponse = _reflection.GeneratedProtocolMessageType( + "DetachSubscriptionResponse", + (_message.Message,), + { + "DESCRIPTOR": _DETACHSUBSCRIPTIONRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the DetachSubscription method. Reserved for future use.""", + # @@protoc_insertion_point(class_scope:google.pubsub.v1.DetachSubscriptionResponse) + }, +) +_sym_db.RegisterMessage(DetachSubscriptionResponse) + Subscription = _reflection.GeneratedProtocolMessageType( "Subscription", (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( "LabelsEntry", (_message.Message,), - dict( - DESCRIPTOR=_SUBSCRIPTION_LABELSENTRY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + { + "DESCRIPTOR": _SUBSCRIPTION_LABELSENTRY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) - ), + }, ), - DESCRIPTOR=_SUBSCRIPTION, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A subscription resource. + "DESCRIPTOR": _SUBSCRIPTION, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """A subscription resource. + Attributes: name: Required. The name of the subscription. It must have the @@ -3724,13 +3966,13 @@ system will eventually redeliver the message. retain_acked_messages: Indicates whether to retain acknowledged messages. If true, - then messages are not expunged from the subscription's + then messages are not expunged from the subscription’s backlog, even if they are acknowledged, until they fall out of the ``message_retention_duration`` window. This must be true if you would like to Seek to a timestamp. message_retention_duration: How long to retain unacknowledged messages in the - subscription's backlog, from the moment a message is + subscription’s backlog, from the moment a message is published. If ``retain_acked_messages`` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a ``Seek`` can be done. @@ -3748,7 +3990,7 @@ recommended for production use. It is not subject to any SLA or deprecation policy. expiration_policy: - A policy that specifies the conditions for this subscription's + A policy that specifies the conditions for this subscription’s expiration. A subscription is considered active as long as any connected subscriber is successfully consuming messages from the subscription or is issuing operations on the subscription. @@ -3759,32 +4001,32 @@ An expression written in the Cloud Pub/Sub filter language. If non-empty, then only ``PubsubMessage``\ s whose ``attributes`` field matches the filter are delivered on this subscription. - If empty, then no messages are filtered out. EXPERIMENTAL: - This feature is part of a closed alpha release. This API might - be changed in backward-incompatible ways and is not - recommended for production use. It is not subject to any SLA - or deprecation policy. + If empty, then no messages are filtered out. dead_letter_policy: A policy that specifies the conditions for dead lettering - messages in this subscription. If dead\_letter\_policy is not + messages in this subscription. If dead_letter_policy is not set, dead lettering is disabled. The Cloud Pub/Sub service - account associated with this subscriptions's parent project - (i.e., service-{project\_number}@gcp-sa- + account associated with this subscriptions’s parent project + (i.e., service-{project_number}@gcp-sa- pubsub.iam.gserviceaccount.com) must have permission to Acknowledge() messages on this subscription. retry_policy: - A policy that specifies how Cloud Pub/Sub retries message - delivery for this subscription. If not set, the default retry - policy is applied. This generally implies that messages will - be retried as soon as possible for healthy subscribers. + A policy that specifies how Pub/Sub retries message delivery + for this subscription. If not set, the default retry policy + is applied. This generally implies that messages will be + retried as soon as possible for healthy subscribers. RetryPolicy will be triggered on NACKs or acknowledgement - deadline exceeded events for a given message. EXPERIMENTAL: - This API might be changed in backward-incompatible ways and is - not recommended for production use. It is not subject to any - SLA or deprecation policy. + deadline exceeded events for a given message. + detached: + Indicates whether the subscription is detached from its topic. + Detached subscriptions don’t receive messages from their topic + and don’t retain any backlog. ``Pull`` and ``StreamingPull`` + requests will return FAILED_PRECONDITION. If the subscription + is a push subscription, pushes to the endpoint will not be + made. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) - ), + }, ) _sym_db.RegisterMessage(Subscription) _sym_db.RegisterMessage(Subscription.LabelsEntry) @@ -3792,17 +4034,18 @@ RetryPolicy = _reflection.GeneratedProtocolMessageType( "RetryPolicy", (_message.Message,), - dict( - DESCRIPTOR=_RETRYPOLICY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A policy that specifies how Cloud Pub/Sub retries message delivery. + { + "DESCRIPTOR": _RETRYPOLICY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """A policy that specifies how Cloud Pub/Sub retries message delivery. Retry delay will be exponential based on provided minimum and maximum - backoffs. https://en.wikipedia.org/wiki/Exponential\_backoff. + backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded events for a given message. Retry Policy is implemented on a best effort basis. At times, the delay between consecutive deliveries may not match the configuration. That is, delay can be more or less than configured backoff. + Attributes: minimum_backoff: The minimum delay between consecutive deliveries of a given @@ -3814,27 +4057,28 @@ to 600 seconds. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.RetryPolicy) - ), + }, ) _sym_db.RegisterMessage(RetryPolicy) DeadLetterPolicy = _reflection.GeneratedProtocolMessageType( "DeadLetterPolicy", (_message.Message,), - dict( - DESCRIPTOR=_DEADLETTERPOLICY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Dead lettering is done on a best effort basis. The same message might + { + "DESCRIPTOR": _DEADLETTERPOLICY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Dead lettering is done on a best effort basis. The same message might be dead lettered multiple times. If validation on any of the fields fails at subscription creation/updation, the create/update subscription request will fail. + Attributes: dead_letter_topic: The name of the topic to which dead letter messages should be published. Format is ``projects/{project}/topics/{topic}``.The Cloud Pub/Sub service account associated with the enclosing - subscription's parent project (i.e., - service-{project\_number}@gcp-sa- + subscription’s parent project (i.e., + service-{project_number}@gcp-sa- pubsub.iam.gserviceaccount.com) must have permission to Publish() to this topic. The operation will fail if the topic does not exist. Users should ensure that there is a @@ -3847,50 +4091,52 @@ number of times the acknowledgement deadline has been exceeded for the message). A NACK is any call to ModifyAckDeadline with a 0 deadline. Note that client libraries may - automatically extend ack\_deadlines. This field will be + automatically extend ack_deadlines. This field will be honored on a best effort basis. If this parameter is 0, a default value of 5 is used. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeadLetterPolicy) - ), + }, ) _sym_db.RegisterMessage(DeadLetterPolicy) ExpirationPolicy = _reflection.GeneratedProtocolMessageType( "ExpirationPolicy", (_message.Message,), - dict( - DESCRIPTOR=_EXPIRATIONPOLICY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A policy that specifies the conditions for resource expiration (i.e., + { + "DESCRIPTOR": _EXPIRATIONPOLICY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """A policy that specifies the conditions for resource expiration (i.e., automatic resource deletion). + Attributes: ttl: - Specifies the "time-to-live" duration for an associated + Specifies the “time-to-live” duration for an associated resource. The resource expires if it is not active for a - period of ``ttl``. The definition of "activity" depends on the + period of ``ttl``. The definition of “activity” depends on the type of the associated resource. The minimum and maximum allowed values for ``ttl`` depend on the type of the associated resource, as well. If ``ttl`` is not set, the associated resource never expires. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ExpirationPolicy) - ), + }, ) _sym_db.RegisterMessage(ExpirationPolicy) PushConfig = _reflection.GeneratedProtocolMessageType( "PushConfig", (_message.Message,), - dict( - OidcToken=_reflection.GeneratedProtocolMessageType( + { + "OidcToken": _reflection.GeneratedProtocolMessageType( "OidcToken", (_message.Message,), - dict( - DESCRIPTOR=_PUSHCONFIG_OIDCTOKEN, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Contains information needed for generating an `OpenID Connect token + { + "DESCRIPTOR": _PUSHCONFIG_OIDCTOKEN, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Contains information needed for generating an `OpenID Connect token `__. + Attributes: service_account_email: \ `Service account email @@ -3909,20 +4155,21 @@ specified, the Push endpoint URL will be used. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.OidcToken) - ), + }, ), - AttributesEntry=_reflection.GeneratedProtocolMessageType( + "AttributesEntry": _reflection.GeneratedProtocolMessageType( "AttributesEntry", (_message.Message,), - dict( - DESCRIPTOR=_PUSHCONFIG_ATTRIBUTESENTRY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + { + "DESCRIPTOR": _PUSHCONFIG_ATTRIBUTESENTRY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) - ), + }, ), - DESCRIPTOR=_PUSHCONFIG, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Configuration for a push delivery endpoint. + "DESCRIPTOR": _PUSHCONFIG, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Configuration for a push delivery endpoint. + Attributes: push_endpoint: A URL locating the endpoint to which messages should be @@ -3959,7 +4206,7 @@ every pushed message. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) - ), + }, ) _sym_db.RegisterMessage(PushConfig) _sym_db.RegisterMessage(PushConfig.OidcToken) @@ -3968,60 +4215,62 @@ ReceivedMessage = _reflection.GeneratedProtocolMessageType( "ReceivedMessage", (_message.Message,), - dict( - DESCRIPTOR=_RECEIVEDMESSAGE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A message and its corresponding acknowledgment ID. + { + "DESCRIPTOR": _RECEIVEDMESSAGE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """A message and its corresponding acknowledgment ID. + Attributes: ack_id: This ID can be used to acknowledge the received message. message: The message. delivery_attempt: - Delivery attempt counter is 1 + (the sum of number of NACKs - and number of ack\_deadline exceeds) for this message. A NACK - is any call to ModifyAckDeadline with a 0 deadline. An - ack\_deadline exceeds event is whenever a message is not - acknowledged within ack\_deadline. Note that ack\_deadline is - initially Subscription.ackDeadlineSeconds, but may get - extended automatically by the client library. The first - delivery of a given message will have this value as 1. The - value is calculated at best effort and is approximate. If a - DeadLetterPolicy is not set on the subscription, this will be - 0. EXPERIMENTAL: This feature is part of a closed alpha - release. This API might be changed in backward-incompatible - ways and is not recommended for production use. It is not - subject to any SLA or deprecation policy. + The approximate number of times that Cloud Pub/Sub has + attempted to deliver the associated message to a subscriber. + More precisely, this is 1 + (number of NACKs) + (number of + ack_deadline exceeds) for this message. A NACK is any call to + ModifyAckDeadline with a 0 deadline. An ack_deadline exceeds + event is whenever a message is not acknowledged within + ack_deadline. Note that ack_deadline is initially + Subscription.ackDeadlineSeconds, but may get extended + automatically by the client library. Upon the first delivery + of a given message, ``delivery_attempt`` will have a value of + 1. The value is calculated at best effort and is approximate. + If a DeadLetterPolicy is not set on the subscription, this + will be 0. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) - ), + }, ) _sym_db.RegisterMessage(ReceivedMessage) GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType( "GetSubscriptionRequest", (_message.Message,), - dict( - DESCRIPTOR=_GETSUBSCRIPTIONREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the GetSubscription method. + { + "DESCRIPTOR": _GETSUBSCRIPTIONREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the GetSubscription method. + Attributes: subscription: Required. The name of the subscription to get. Format is ``projects/{project}/subscriptions/{sub}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) - ), + }, ) _sym_db.RegisterMessage(GetSubscriptionRequest) UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType( "UpdateSubscriptionRequest", (_message.Message,), - dict( - DESCRIPTOR=_UPDATESUBSCRIPTIONREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the UpdateSubscription method. + { + "DESCRIPTOR": _UPDATESUBSCRIPTIONREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the UpdateSubscription method. + Attributes: subscription: Required. The updated subscription object. @@ -4030,17 +4279,18 @@ to update. Must be specified and non-empty. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) - ), + }, ) _sym_db.RegisterMessage(UpdateSubscriptionRequest) ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( "ListSubscriptionsRequest", (_message.Message,), - dict( - DESCRIPTOR=_LISTSUBSCRIPTIONSREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``ListSubscriptions`` method. + { + "DESCRIPTOR": _LISTSUBSCRIPTIONSREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``ListSubscriptions`` method. + Attributes: project: Required. The name of the project in which to list @@ -4054,17 +4304,18 @@ the next page of data. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) - ), + }, ) _sym_db.RegisterMessage(ListSubscriptionsRequest) ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( "ListSubscriptionsResponse", (_message.Message,), - dict( - DESCRIPTOR=_LISTSUBSCRIPTIONSRESPONSE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``ListSubscriptions`` method. + { + "DESCRIPTOR": _LISTSUBSCRIPTIONSRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the ``ListSubscriptions`` method. + Attributes: subscriptions: The subscriptions that match the request. @@ -4074,34 +4325,36 @@ ``ListSubscriptionsRequest`` to get more subscriptions. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) - ), + }, ) _sym_db.RegisterMessage(ListSubscriptionsResponse) DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType( "DeleteSubscriptionRequest", (_message.Message,), - dict( - DESCRIPTOR=_DELETESUBSCRIPTIONREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the DeleteSubscription method. + { + "DESCRIPTOR": _DELETESUBSCRIPTIONREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the DeleteSubscription method. + Attributes: subscription: Required. The subscription to delete. Format is ``projects/{project}/subscriptions/{sub}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) - ), + }, ) _sym_db.RegisterMessage(DeleteSubscriptionRequest) ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType( "ModifyPushConfigRequest", (_message.Message,), - dict( - DESCRIPTOR=_MODIFYPUSHCONFIGREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ModifyPushConfig method. + { + "DESCRIPTOR": _MODIFYPUSHCONFIGREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ModifyPushConfig method. + Attributes: subscription: Required. The name of the subscription. Format is @@ -4115,17 +4368,18 @@ called. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) - ), + }, ) _sym_db.RegisterMessage(ModifyPushConfigRequest) PullRequest = _reflection.GeneratedProtocolMessageType( "PullRequest", (_message.Message,), - dict( - DESCRIPTOR=_PULLREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``Pull`` method. + { + "DESCRIPTOR": _PULLREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``Pull`` method. + Attributes: subscription: Required. The subscription from which messages should be @@ -4145,17 +4399,18 @@ return fewer than the number specified. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) - ), + }, ) _sym_db.RegisterMessage(PullRequest) PullResponse = _reflection.GeneratedProtocolMessageType( "PullResponse", (_message.Message,), - dict( - DESCRIPTOR=_PULLRESPONSE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``Pull`` method. + { + "DESCRIPTOR": _PULLRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the ``Pull`` method. + Attributes: received_messages: Received Pub/Sub messages. The list will be empty if there are @@ -4165,17 +4420,18 @@ more messages available in the backlog. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) - ), + }, ) _sym_db.RegisterMessage(PullResponse) ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType( "ModifyAckDeadlineRequest", (_message.Message,), - dict( - DESCRIPTOR=_MODIFYACKDEADLINEREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ModifyAckDeadline method. + { + "DESCRIPTOR": _MODIFYACKDEADLINEREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ModifyAckDeadline method. + Attributes: subscription: Required. The name of the subscription. Format is @@ -4194,17 +4450,18 @@ deadline you can specify is 600 seconds (10 minutes). """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) - ), + }, ) _sym_db.RegisterMessage(ModifyAckDeadlineRequest) AcknowledgeRequest = _reflection.GeneratedProtocolMessageType( "AcknowledgeRequest", (_message.Message,), - dict( - DESCRIPTOR=_ACKNOWLEDGEREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the Acknowledge method. + { + "DESCRIPTOR": _ACKNOWLEDGEREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the Acknowledge method. + Attributes: subscription: Required. The subscription whose message is being @@ -4216,20 +4473,21 @@ ``Pull`` response. Must not be empty. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) - ), + }, ) _sym_db.RegisterMessage(AcknowledgeRequest) StreamingPullRequest = _reflection.GeneratedProtocolMessageType( "StreamingPullRequest", (_message.Message,), - dict( - DESCRIPTOR=_STREAMINGPULLREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``StreamingPull`` streaming RPC method. This request + { + "DESCRIPTOR": _STREAMINGPULLREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``StreamingPull`` streaming RPC method. This request is used to establish the initial stream as well as to stream acknowledgements and ack deadline modifications from the client to the server. + Attributes: subscription: Required. The subscription for which to initialize the new @@ -4276,49 +4534,51 @@ A unique identifier that is used to distinguish client instances from each other. Only needs to be provided on the initial request. When a stream disconnects and reconnects for - the same stream, the client\_id should be set to the same - value so that state associated with the old stream can be - transferred to the new stream. The same client\_id should not + the same stream, the client_id should be set to the same value + so that state associated with the old stream can be + transferred to the new stream. The same client_id should not be used for different client instances. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) - ), + }, ) _sym_db.RegisterMessage(StreamingPullRequest) StreamingPullResponse = _reflection.GeneratedProtocolMessageType( "StreamingPullResponse", (_message.Message,), - dict( - DESCRIPTOR=_STREAMINGPULLRESPONSE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``StreamingPull`` method. This response is used to + { + "DESCRIPTOR": _STREAMINGPULLRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the ``StreamingPull`` method. This response is used to stream messages from the server to the client. + Attributes: received_messages: Received Pub/Sub messages. This will not be empty. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) - ), + }, ) _sym_db.RegisterMessage(StreamingPullResponse) CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType( "CreateSnapshotRequest", (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( "LabelsEntry", (_message.Message,), - dict( - DESCRIPTOR=_CREATESNAPSHOTREQUEST_LABELSENTRY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + { + "DESCRIPTOR": _CREATESNAPSHOTREQUEST_LABELSENTRY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest.LabelsEntry) - ), + }, ), - DESCRIPTOR=_CREATESNAPSHOTREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``CreateSnapshot`` method. + "DESCRIPTOR": _CREATESNAPSHOTREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``CreateSnapshot`` method. + Attributes: name: Required. User-provided name for this snapshot. If the name is @@ -4331,17 +4591,17 @@ Required. The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to retain: (a) The existing backlog on the subscription. More precisely, - this is defined as the messages in the subscription's backlog + this is defined as the messages in the subscription’s backlog that are unacknowledged upon the successful completion of the ``CreateSnapshot`` request; as well as: (b) Any messages - published to the subscription's topic following the successful + published to the subscription’s topic following the successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. labels: See Creating and managing labels. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) - ), + }, ) _sym_db.RegisterMessage(CreateSnapshotRequest) _sym_db.RegisterMessage(CreateSnapshotRequest.LabelsEntry) @@ -4349,10 +4609,11 @@ UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType( "UpdateSnapshotRequest", (_message.Message,), - dict( - DESCRIPTOR=_UPDATESNAPSHOTREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the UpdateSnapshot method. + { + "DESCRIPTOR": _UPDATESNAPSHOTREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the UpdateSnapshot method. + Attributes: snapshot: Required. The updated snapshot object. @@ -4361,29 +4622,30 @@ update. Must be specified and non-empty. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) - ), + }, ) _sym_db.RegisterMessage(UpdateSnapshotRequest) Snapshot = _reflection.GeneratedProtocolMessageType( "Snapshot", (_message.Message,), - dict( - LabelsEntry=_reflection.GeneratedProtocolMessageType( + { + "LabelsEntry": _reflection.GeneratedProtocolMessageType( "LabelsEntry", (_message.Message,), - dict( - DESCRIPTOR=_SNAPSHOT_LABELSENTRY, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2" + { + "DESCRIPTOR": _SNAPSHOT_LABELSENTRY, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) - ), + }, ), - DESCRIPTOR=_SNAPSHOT, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""A snapshot resource. Snapshots are used in Seek operations, which + "DESCRIPTOR": _SNAPSHOT, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """A snapshot resource. Snapshots are used in Seek operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. + Attributes: name: The name of the snapshot. @@ -4399,15 +4661,15 @@ of oldest unacked message in the subscription)``. For example, consider a subscription whose oldest unacked message is 3 days old. If a snapshot is created from this subscription, the - snapshot -- which will always capture this 3-day-old backlog - as long as the snapshot exists -- will expire in 4 days. The + snapshot – which will always capture this 3-day-old backlog as + long as the snapshot exists – will expire in 4 days. The service will refuse to create a snapshot that would expire in less than 1 hour after creation. labels: See Creating and managing labels. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) - ), + }, ) _sym_db.RegisterMessage(Snapshot) _sym_db.RegisterMessage(Snapshot.LabelsEntry) @@ -4415,27 +4677,29 @@ GetSnapshotRequest = _reflection.GeneratedProtocolMessageType( "GetSnapshotRequest", (_message.Message,), - dict( - DESCRIPTOR=_GETSNAPSHOTREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the GetSnapshot method. + { + "DESCRIPTOR": _GETSNAPSHOTREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the GetSnapshot method. + Attributes: snapshot: Required. The name of the snapshot to get. Format is ``projects/{project}/snapshots/{snap}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSnapshotRequest) - ), + }, ) _sym_db.RegisterMessage(GetSnapshotRequest) ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType( "ListSnapshotsRequest", (_message.Message,), - dict( - DESCRIPTOR=_LISTSNAPSHOTSREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``ListSnapshots`` method. + { + "DESCRIPTOR": _LISTSNAPSHOTSREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``ListSnapshots`` method. + Attributes: project: Required. The name of the project in which to list snapshots. @@ -4449,17 +4713,18 @@ next page of data. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) - ), + }, ) _sym_db.RegisterMessage(ListSnapshotsRequest) ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType( "ListSnapshotsResponse", (_message.Message,), - dict( - DESCRIPTOR=_LISTSNAPSHOTSRESPONSE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``ListSnapshots`` method. + { + "DESCRIPTOR": _LISTSNAPSHOTSRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the ``ListSnapshots`` method. + Attributes: snapshots: The resulting snapshots. @@ -4469,34 +4734,36 @@ ``ListSnapshotsRequest``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) - ), + }, ) _sym_db.RegisterMessage(ListSnapshotsResponse) DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType( "DeleteSnapshotRequest", (_message.Message,), - dict( - DESCRIPTOR=_DELETESNAPSHOTREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``DeleteSnapshot`` method. + { + "DESCRIPTOR": _DELETESNAPSHOTREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``DeleteSnapshot`` method. + Attributes: snapshot: Required. The name of the snapshot to delete. Format is ``projects/{project}/snapshots/{snap}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) - ), + }, ) _sym_db.RegisterMessage(DeleteSnapshotRequest) SeekRequest = _reflection.GeneratedProtocolMessageType( "SeekRequest", (_message.Message,), - dict( - DESCRIPTOR=_SEEKREQUEST, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Request for the ``Seek`` method. + { + "DESCRIPTOR": _SEEKREQUEST, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Request for the ``Seek`` method. + Attributes: subscription: Required. The subscription to affect. @@ -4509,29 +4776,29 @@ in the subscription (configured by the combination of ``message_retention_duration`` and ``retain_acked_messages``). For example, if ``time`` corresponds to a point before the - message retention window (or to a point before the system's + message retention window (or to a point before the system’s notion of the subscription creation time), only retained messages will be marked as unacknowledged, and already- expunged messages will not be restored. snapshot: - The snapshot to seek to. The snapshot's topic must be the same + The snapshot to seek to. The snapshot’s topic must be the same as that of the provided subscription. Format is ``projects/{project}/snapshots/{snap}``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) - ), + }, ) _sym_db.RegisterMessage(SeekRequest) SeekResponse = _reflection.GeneratedProtocolMessageType( "SeekResponse", (_message.Message,), - dict( - DESCRIPTOR=_SEEKRESPONSE, - __module__="google.cloud.pubsub_v1.proto.pubsub_pb2", - __doc__="""Response for the ``Seek`` method (this response is empty).""", + { + "DESCRIPTOR": _SEEKRESPONSE, + "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", + "__doc__": """Response for the ``Seek`` method (this response is empty).""", # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) - ), + }, ) _sym_db.RegisterMessage(SeekResponse) @@ -4551,6 +4818,7 @@ _LISTTOPICSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"]._options = None _LISTTOPICSNAPSHOTSREQUEST.fields_by_name["topic"]._options = None _DELETETOPICREQUEST.fields_by_name["topic"]._options = None +_DETACHSUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None _SUBSCRIPTION_LABELSENTRY._options = None _SUBSCRIPTION.fields_by_name["name"]._options = None _SUBSCRIPTION.fields_by_name["topic"]._options = None @@ -4592,11 +4860,10 @@ full_name="google.pubsub.v1.Publisher", file=DESCRIPTOR, index=0, - serialized_options=_b( - "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" - ), - serialized_start=6209, - serialized_end=7476, + serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", + create_key=_descriptor._internal_create_key, + serialized_start=6352, + serialized_end=7795, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4605,9 +4872,8 @@ containing_service=None, input_type=_TOPIC, output_type=_TOPIC, - serialized_options=_b( - "\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*\332A\004name" - ), + serialized_options=b"\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*\332A\004name", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="UpdateTopic", @@ -4616,9 +4882,8 @@ containing_service=None, input_type=_UPDATETOPICREQUEST, output_type=_TOPIC, - serialized_options=_b( - "\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*" - ), + serialized_options=b"\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="Publish", @@ -4627,9 +4892,8 @@ containing_service=None, input_type=_PUBLISHREQUEST, output_type=_PUBLISHRESPONSE, - serialized_options=_b( - "\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*\332A\016topic,messages" - ), + serialized_options=b"\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*\332A\016topic,messages", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="GetTopic", @@ -4638,9 +4902,8 @@ containing_service=None, input_type=_GETTOPICREQUEST, output_type=_TOPIC, - serialized_options=_b( - "\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}\332A\005topic" - ), + serialized_options=b"\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}\332A\005topic", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListTopics", @@ -4649,9 +4912,8 @@ containing_service=None, input_type=_LISTTOPICSREQUEST, output_type=_LISTTOPICSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics\332A\007project" - ), + serialized_options=b"\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics\332A\007project", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListTopicSubscriptions", @@ -4660,9 +4922,8 @@ containing_service=None, input_type=_LISTTOPICSUBSCRIPTIONSREQUEST, output_type=_LISTTOPICSUBSCRIPTIONSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions\332A\005topic" - ), + serialized_options=b"\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions\332A\005topic", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListTopicSnapshots", @@ -4671,9 +4932,8 @@ containing_service=None, input_type=_LISTTOPICSNAPSHOTSREQUEST, output_type=_LISTTOPICSNAPSHOTSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots\332A\005topic" - ), + serialized_options=b"\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots\332A\005topic", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="DeleteTopic", @@ -4682,9 +4942,18 @@ containing_service=None, input_type=_DELETETOPICREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}\332A\005topic" - ), + serialized_options=b"\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}\332A\005topic", + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="DetachSubscription", + full_name="google.pubsub.v1.Publisher.DetachSubscription", + index=8, + containing_service=None, + input_type=_DETACHSUBSCRIPTIONREQUEST, + output_type=_DETACHSUBSCRIPTIONRESPONSE, + serialized_options=b'\202\323\344\223\0026"4/v1/{subscription=projects/*/subscriptions/*}:detach', + create_key=_descriptor._internal_create_key, ), ], ) @@ -4698,11 +4967,10 @@ full_name="google.pubsub.v1.Subscriber", file=DESCRIPTOR, index=1, - serialized_options=_b( - "\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub" - ), - serialized_start=7479, - serialized_end=10170, + serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", + create_key=_descriptor._internal_create_key, + serialized_start=7798, + serialized_end=10489, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", @@ -4711,9 +4979,8 @@ containing_service=None, input_type=_SUBSCRIPTION, output_type=_SUBSCRIPTION, - serialized_options=_b( - "\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*\332A+name,topic,push_config,ack_deadline_seconds" - ), + serialized_options=b"\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*\332A+name,topic,push_config,ack_deadline_seconds", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="GetSubscription", @@ -4722,9 +4989,8 @@ containing_service=None, input_type=_GETSUBSCRIPTIONREQUEST, output_type=_SUBSCRIPTION, - serialized_options=_b( - "\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription" - ), + serialized_options=b"\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="UpdateSubscription", @@ -4733,9 +4999,8 @@ containing_service=None, input_type=_UPDATESUBSCRIPTIONREQUEST, output_type=_SUBSCRIPTION, - serialized_options=_b( - "\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*" - ), + serialized_options=b"\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListSubscriptions", @@ -4744,9 +5009,8 @@ containing_service=None, input_type=_LISTSUBSCRIPTIONSREQUEST, output_type=_LISTSUBSCRIPTIONSRESPONSE, - serialized_options=_b( - "\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions\332A\007project" - ), + serialized_options=b"\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions\332A\007project", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="DeleteSubscription", @@ -4755,9 +5019,8 @@ containing_service=None, input_type=_DELETESUBSCRIPTIONREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription" - ), + serialized_options=b"\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ModifyAckDeadline", @@ -4766,9 +5029,8 @@ containing_service=None, input_type=_MODIFYACKDEADLINEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*\332A)subscription,ack_ids,ack_deadline_seconds' - ), + serialized_options=b'\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*\332A)subscription,ack_ids,ack_deadline_seconds', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="Acknowledge", @@ -4777,9 +5039,8 @@ containing_service=None, input_type=_ACKNOWLEDGEREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*\332A\024subscription,ack_ids' - ), + serialized_options=b'\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*\332A\024subscription,ack_ids', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="Pull", @@ -4788,9 +5049,8 @@ containing_service=None, input_type=_PULLREQUEST, output_type=_PULLRESPONSE, - serialized_options=_b( - '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*\332A,subscription,return_immediately,max_messages' - ), + serialized_options=b'\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*\332A,subscription,return_immediately,max_messages', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="StreamingPull", @@ -4800,6 +5060,7 @@ input_type=_STREAMINGPULLREQUEST, output_type=_STREAMINGPULLRESPONSE, serialized_options=None, + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ModifyPushConfig", @@ -4808,9 +5069,8 @@ containing_service=None, input_type=_MODIFYPUSHCONFIGREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*\332A\030subscription,push_config' - ), + serialized_options=b'\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*\332A\030subscription,push_config', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="GetSnapshot", @@ -4819,9 +5079,8 @@ containing_service=None, input_type=_GETSNAPSHOTREQUEST, output_type=_SNAPSHOT, - serialized_options=_b( - "\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot" - ), + serialized_options=b"\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListSnapshots", @@ -4830,9 +5089,8 @@ containing_service=None, input_type=_LISTSNAPSHOTSREQUEST, output_type=_LISTSNAPSHOTSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots\332A\007project' - ), + serialized_options=b'\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots\332A\007project', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="CreateSnapshot", @@ -4841,9 +5099,8 @@ containing_service=None, input_type=_CREATESNAPSHOTREQUEST, output_type=_SNAPSHOT, - serialized_options=_b( - "\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*\332A\021name,subscription" - ), + serialized_options=b"\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*\332A\021name,subscription", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="UpdateSnapshot", @@ -4852,9 +5109,8 @@ containing_service=None, input_type=_UPDATESNAPSHOTREQUEST, output_type=_SNAPSHOT, - serialized_options=_b( - "\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*" - ), + serialized_options=b"\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="DeleteSnapshot", @@ -4863,9 +5119,8 @@ containing_service=None, input_type=_DELETESNAPSHOTREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - "\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot" - ), + serialized_options=b"\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot", + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="Seek", @@ -4874,9 +5129,8 @@ containing_service=None, input_type=_SEEKREQUEST, output_type=_SEEKRESPONSE, - serialized_options=_b( - '\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*' - ), + serialized_options=b'\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*', + create_key=_descriptor._internal_create_key, ), ], ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py index dd1a365877e1..5e99bf2dedc5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py @@ -58,6 +58,11 @@ def __init__(self, channel): request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) + self.DetachSubscription = channel.unary_unary( + "/google.pubsub.v1.Publisher/DetachSubscription", + request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.FromString, + ) class PublisherServicer(object): @@ -105,7 +110,7 @@ def ListTopics(self, request, context): raise NotImplementedError("Method not implemented!") def ListTopicSubscriptions(self, request, context): - """Lists the names of the subscriptions on this topic. + """Lists the names of the attached subscriptions on this topic. """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") @@ -134,6 +139,16 @@ def DeleteTopic(self, request, context): context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") + def DetachSubscription(self, request, context): + """Detaches a subscription from this topic. All messages retained in the + subscription are dropped. Subsequent `Pull` and `StreamingPull` requests + will return FAILED_PRECONDITION. If the subscription is a push + subscription, pushes to the endpoint will stop. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + def add_PublisherServicer_to_server(servicer, server): rpc_method_handlers = { @@ -177,6 +192,11 @@ def add_PublisherServicer_to_server(servicer, server): request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), + "DetachSubscription": grpc.unary_unary_rpc_method_handler( + servicer.DetachSubscription, + request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.FromString, + response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.SerializeToString, + ), } generic_handler = grpc.method_handlers_generic_handler( "google.pubsub.v1.Publisher", rpc_method_handlers diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 751b0e04452a..cf7a6e9782d0 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -26,11 +26,12 @@ BLACK_VERSION = "black==19.3b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -if os.path.exists("samples"): - BLACK_PATHS.append("samples") +DEFAULT_PYTHON_VERSION = "3.7" +SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.7"] +UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. @@ -56,7 +57,7 @@ def blacken(session): session.run("black", *BLACK_PATHS) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") @@ -84,13 +85,13 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["2.7", "3.7"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") @@ -120,7 +121,7 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -133,12 +134,12 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh b/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh new file mode 100755 index 000000000000..ff599eb2af25 --- /dev/null +++ b/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + > testing/client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py b/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000000..d309d6e97518 --- /dev/null +++ b/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000000..4fd239765b0a --- /dev/null +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/auth.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000000..1446b94a5e3a --- /dev/null +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000000..11957ce2714a --- /dev/null +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000000..a0406dba8c84 --- /dev/null +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000000..5ea33d18c00c --- /dev/null +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index f48ab46269f3..da86b45aab8e 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -1,39 +1,32 @@ { "sources": [ - { - "generator": { - "name": "artman", - "version": "2.0.0", - "dockerImage": "googleapis/artman@sha256:b3b47805231a305d0f40c4bf069df20f6a2635574e6d4259fac651d3f9f6e098" - } - }, { "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "4ce898e80eeb16b18d1ee29c678ade149804d186" + "sha": "06085c4083b9dccdd50383257799904510bbf3a0" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "42ee97c1b93a0e3759bbba3013da309f670a90ab", - "internalRef": "307114445" + "sha": "86285bbd54fbf9708838219e3422aa47fb8fc0b0", + "internalRef": "314795690" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f5e4c17dc78a966dbf29961dd01f9bbd63e20a04" + "sha": "973fec811e9203b4c147121a26f1484841c465fd" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f5e4c17dc78a966dbf29961dd01f9bbd63e20a04" + "sha": "973fec811e9203b4c147121a26f1484841c465fd" } } ], @@ -44,8 +37,7 @@ "apiName": "pubsub", "apiVersion": "v1", "language": "python", - "generator": "gapic", - "config": "google/pubsub/artman_pubsub.yaml" + "generator": "bazel" } } ] diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 9e475e781d2c..e9547c834af2 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -185,6 +185,14 @@ def _merge_dict(d1, d2): "from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2", ) +# Add missing blank line before Attributes: in generated docstrings +# https://github.com/googleapis/protoc-docs-plugin/pull/31 +s.replace( + "google/cloud/pubsub_v1/proto/pubsub_pb2.py", + "(\s+)Attributes:", + "\n\g<1>Attributes:" +) + # Fix incomplete docstring examples. s.replace( "google/cloud/pubsub_v1/gapic/subscriber_client.py", diff --git a/packages/google-cloud-pubsub/testing/.gitignore b/packages/google-cloud-pubsub/testing/.gitignore new file mode 100644 index 000000000000..b05fbd630881 --- /dev/null +++ b/packages/google-cloud-pubsub/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py index 4fad76b78518..ad4f38d8b9de 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py @@ -519,3 +519,42 @@ def test_test_iam_permissions_exception(self): with pytest.raises(CustomException): client.test_iam_permissions(resource, permissions) + + def test_detach_subscription(self): + # Setup Expected Response + expected_response = {} + expected_response = pubsub_pb2.DetachSubscriptionResponse(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() + + # Setup Request + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + + response = client.detach_subscription(subscription) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = pubsub_pb2.DetachSubscriptionRequest( + subscription=subscription + ) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_detach_subscription_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + patch = mock.patch("google.api_core.grpc_helpers.create_channel") + with patch as create_channel: + create_channel.return_value = channel + client = publisher_client.PublisherClient() + + # Setup request + subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") + + with pytest.raises(CustomException): + client.detach_subscription(subscription) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py index 4e34e19057fe..b059214d7f5c 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py @@ -78,6 +78,7 @@ def test_create_subscription(self): retain_acked_messages = False enable_message_ordering = True filter_ = "filter-1274492040" + detached = True expected_response = { "name": name_2, "topic": topic_2, @@ -85,6 +86,7 @@ def test_create_subscription(self): "retain_acked_messages": retain_acked_messages, "enable_message_ordering": enable_message_ordering, "filter": filter_, + "detached": detached, } expected_response = pubsub_pb2.Subscription(**expected_response) @@ -130,6 +132,7 @@ def test_get_subscription(self): retain_acked_messages = False enable_message_ordering = True filter_ = "filter-1274492040" + detached = True expected_response = { "name": name, "topic": topic, @@ -137,6 +140,7 @@ def test_get_subscription(self): "retain_acked_messages": retain_acked_messages, "enable_message_ordering": enable_message_ordering, "filter": filter_, + "detached": detached, } expected_response = pubsub_pb2.Subscription(**expected_response) @@ -180,6 +184,7 @@ def test_update_subscription(self): retain_acked_messages = False enable_message_ordering = True filter_ = "filter-1274492040" + detached = True expected_response = { "name": name, "topic": topic, @@ -187,6 +192,7 @@ def test_update_subscription(self): "retain_acked_messages": retain_acked_messages, "enable_message_ordering": enable_message_ordering, "filter": filter_, + "detached": detached, } expected_response = pubsub_pb2.Subscription(**expected_response) From 1741af1f2a01d3f08068cbeb4df7bae4845422e9 Mon Sep 17 00:00:00 2001 From: Gurov Ilya Date: Wed, 10 Jun 2020 00:13:52 +0300 Subject: [PATCH 0464/1197] refactor: incorporate will_accept() checks into publish() (#108) --- .../cloud/pubsub_v1/publisher/_batch/base.py | 26 ---------- .../pubsub_v1/publisher/_batch/thread.py | 4 +- .../pubsub_v1/publisher/batch/test_base.py | 30 ----------- .../pubsub_v1/publisher/batch/test_thread.py | 52 ++++++++++++++++--- .../publisher/test_publisher_client.py | 4 -- 5 files changed, 47 insertions(+), 69 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py index 53d3dee5be21..212a4b2774c0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -109,32 +109,6 @@ def status(self): """ raise NotImplementedError - def will_accept(self, message): - """Return True if the batch is able to accept the message. - - In concurrent implementations, the attributes on the current batch - may be modified by other workers. With this in mind, the caller will - likely want to hold a lock that will make sure the state remains - the same after the "will accept?" question is answered. - - Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. - - Returns: - bool: Whether this batch can accept the message. - """ - # If this batch is not accepting messages generally, return False. - if self.status != BatchStatus.ACCEPTING_MESSAGES: - return False - - # If this message will make the batch exceed the ``max_messages`` - # setting, return False. - if len(self.messages) >= self.settings.max_messages: - return False - - # Okay, everything is good. - return True - def cancel(self, cancellation_reason): """Complete pending futures with an exception. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 73210011d0d8..67c9f2de3a3c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -333,8 +333,8 @@ def publish(self, message): self._status != base.BatchStatus.ERROR ), "Publish after stop() or publish error." - if not self.will_accept(message): - return future + if self.status != base.BatchStatus.ACCEPTING_MESSAGES: + return size_increase = types.PublishRequest(messages=[message]).ByteSize() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index 96f18451d8ee..f10b54ee5f79 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -46,33 +46,3 @@ def test_len(): assert len(batch) == 0 batch.publish(types.PubsubMessage(data=b"foo")) assert len(batch) == 1 - - -def test_will_accept(): - batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) - message = types.PubsubMessage() - assert batch.will_accept(message) is True - - -def test_will_accept_oversize(): - batch = create_batch( - settings=types.BatchSettings(max_bytes=10), - status=BatchStatus.ACCEPTING_MESSAGES, - ) - message = types.PubsubMessage(data=b"abcdefghijklmnopqrstuvwxyz") - assert batch.will_accept(message) is True - - -def test_will_not_accept_status(): - batch = create_batch(status="talk to the hand") - message = types.PubsubMessage() - assert batch.will_accept(message) is False - - -def test_will_not_accept_number(): - batch = create_batch( - settings=types.BatchSettings(max_messages=-1), - status=BatchStatus.ACCEPTING_MESSAGES, - ) - message = types.PubsubMessage(data=b"abc") - assert batch.will_accept(message) is False diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index ce288a48e41c..e9d2b09c0761 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -287,18 +287,56 @@ def test_publish_updating_batch_size(): assert batch.size > 0 # I do not always trust protobuf. -def test_publish_not_will_accept(): +def test_publish(): + batch = create_batch() + message = types.PubsubMessage() + future = batch.publish(message) + + assert len(batch.messages) == 1 + assert batch._futures == [future] + + +def test_publish_max_messages_zero(): batch = create_batch(topic="topic_foo", max_messages=0) - base_request_size = types.PublishRequest(topic="topic_foo").ByteSize() - # Publish the message. message = types.PubsubMessage(data=b"foobarbaz") + with mock.patch.object(batch, "commit") as commit: + future = batch.publish(message) + + assert future is not None + assert len(batch.messages) == 1 + assert batch._futures == [future] + commit.assert_called_once() + + +def test_publish_max_messages_enforced(): + batch = create_batch(topic="topic_foo", max_messages=1) + + message = types.PubsubMessage(data=b"foobarbaz") + message2 = types.PubsubMessage(data=b"foobarbaz2") + + future = batch.publish(message) + future2 = batch.publish(message2) + + assert future is not None + assert future2 is None + assert len(batch.messages) == 1 + assert len(batch._futures) == 1 + + +def test_publish_max_bytes_enforced(): + batch = create_batch(topic="topic_foo", max_bytes=15) + + message = types.PubsubMessage(data=b"foobarbaz") + message2 = types.PubsubMessage(data=b"foobarbaz2") + future = batch.publish(message) + future2 = batch.publish(message2) - assert future is None - assert batch.size == base_request_size - assert batch.messages == [] - assert batch._futures == [] + assert future is not None + assert future2 is None + assert len(batch.messages) == 1 + assert len(batch._futures) == 1 def test_publish_exceed_max_messages(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 4e3a3870f74d..b58ed133f8bf 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -135,7 +135,6 @@ def test_publish(): batch = mock.Mock(spec=client._batch_class) # Set the mock up to claim indiscriminately that it accepts all messages. - batch.will_accept.return_value = True batch.publish.side_effect = (future1, future2) topic = "topic/path" @@ -169,7 +168,6 @@ def test_publish_error_exceeding_flow_control_limits(): client = publisher.Client(credentials=creds, publisher_options=publisher_options) mock_batch = mock.Mock(spec=client._batch_class) - mock_batch.will_accept.return_value = True topic = "topic/path" client._set_batch(topic, mock_batch) @@ -216,7 +214,6 @@ def test_publish_attrs_bytestring(): # Use a mock in lieu of the actual batch class. batch = mock.Mock(spec=client._batch_class) # Set the mock up to claim indiscriminately that it accepts all messages. - batch.will_accept.return_value = True topic = "topic/path" client._set_batch(topic, batch) @@ -431,7 +428,6 @@ def test_publish_with_ordering_key(): future1.add_done_callback = mock.Mock(spec=["__call__"]) future2.add_done_callback = mock.Mock(spec=["__call__"]) - batch.will_accept.return_value = True batch.publish.side_effect = (future1, future2) topic = "topic/path" From 318dad84f275affe17edb5261ccf03910fa238d0 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 10 Jun 2020 00:24:56 +0200 Subject: [PATCH 0465/1197] fix: PubSub incompatibility with api-core 1.17.0+ (#103) * fix: disable pre-fetching first streaming pull item * Remove api-core version cap, but ban 1.17.0 release * Regenerate gapic layer with synth * Revert "Regenerate gapic layer with synth" This reverts commit 1d24853f1f255d207a9fcf30180290871235bca4. * Retain only the relevant fix in generated code * Exclude multiple incompatible api-core versions * Fix syntax error in synth.py * Ban all bugfix versions of problematic api-core minor versions --- .../cloud/pubsub_v1/gapic/subscriber_client.py | 5 +++++ packages/google-cloud-pubsub/setup.py | 6 +++--- packages/google-cloud-pubsub/synth.py | 14 ++++++++++++++ .../pubsub_v1/subscriber/test_subscriber_client.py | 12 ++++++++++++ 4 files changed, 34 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 400bdc3da638..12c2a780d4ab 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -1175,6 +1175,11 @@ def streaming_pull( client_info=self._client_info, ) + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self.transport.streaming_pull._prefetch_first_result_ = False + return self._inner_api_calls["streaming_pull"]( requests, retry=retry, timeout=timeout, metadata=metadata ) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 414e7620b284..25ce7085b137 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,10 +29,10 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - # google-api-core[grpc] 1.17.0 causes problems, thus restricting its - # version until the issue gets fixed. + # google-api-core[grpc] 1.17.0 up to 1.19.1 causes problems with stream + # recovery, thus those versions should not be used. # https://github.com/googleapis/python-pubsub/issues/74 - "google-api-core[grpc] >= 1.14.0, < 1.17.0", + "google-api-core[grpc] >= 1.14.0, != 1.17.*, != 1.18.*, != 1.19.*", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", 'enum34; python_version < "3.4"', ] diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index e9547c834af2..b44cc0acff57 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -185,6 +185,20 @@ def _merge_dict(d1, d2): "from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2", ) +# Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream +# results. +s.replace( + "google/cloud/pubsub_v1/gapic/subscriber_client.py", + r"return self\._inner_api_calls\['streaming_pull'\]\(.*", + """ + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self.transport.streaming_pull._prefetch_first_result_ = False + + \g<0>""" +) + # Add missing blank line before Attributes: in generated docstrings # https://github.com/googleapis/protoc-docs-plugin/pull/31 s.replace( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index d8f671157297..310485279d9e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -154,3 +154,15 @@ def test_closes_channel_as_context_manager(): pass mock_transport.channel.close.assert_called() + + +def test_streaming_pull_gapic_monkeypatch(): + transport = mock.NonCallableMock(spec=["streaming_pull"]) + transport.streaming_pull = mock.Mock(spec=[]) + client = subscriber.Client(transport=transport) + + client.streaming_pull(requests=iter([])) + + assert client.api.transport is transport + assert hasattr(transport.streaming_pull, "_prefetch_first_result_") + assert not transport.streaming_pull._prefetch_first_result_ From 61e2b02c52db881c96f5ae00610e80d4425519f4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 9 Jun 2020 23:26:04 +0000 Subject: [PATCH 0466/1197] chore: release 1.6.0 (#117) ## [1.6.0](https://www.github.com/googleapis/python-pubsub/compare/v1.5.0...v1.6.0) (2020-06-09) ### Features * Add flow control for message publishing ([#96](https://www.github.com/googleapis/python-pubsub/issues/96)) ([06085c4](https://www.github.com/googleapis/python-pubsub/commit/06085c4083b9dccdd50383257799904510bbf3a0)) ### Bug Fixes * Fix PubSub incompatibility with api-core 1.17.0+ ([#103](https://www.github.com/googleapis/python-pubsub/issues/103)) ([c02060f](https://www.github.com/googleapis/python-pubsub/commit/c02060fbbe6e2ca4664bee08d2de10665d41dc0b)) ### Documentation - Clarify that Schedulers shouldn't be used with multiple SubscriberClients ([#100](https://github.com/googleapis/python-pubsub/pull/100)) ([cf9e87c](https://github.com/googleapis/python-pubsub/commit/cf9e87c80c0771f3fa6ef784a8d76cb760ad37ef)) - Fix update subscription/snapshot/topic samples ([#113](https://github.com/googleapis/python-pubsub/pull/113)) ([e62c38b](https://github.com/googleapis/python-pubsub/commit/e62c38bb33de2434e32f866979de769382dea34a)) ### Internal / Testing Changes - Re-generated service implementaton using synth: removed experimental notes from the RetryPolicy and filtering features in anticipation of GA, added DetachSubscription (experimental) ([#114](https://github.com/googleapis/python-pubsub/pull/114)) ([0132a46](https://github.com/googleapis/python-pubsub/commit/0132a4680e0727ce45d5e27d98ffc9f3541a0962)) - Incorporate will_accept() checks into publish() ([#108](https://github.com/googleapis/python-pubsub/pull/108)) ([6c7677e](https://github.com/googleapis/python-pubsub/commit/6c7677ecb259672bbb9b6f7646919e602c698570)) --- packages/google-cloud-pubsub/CHANGELOG.md | 22 ++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index e3d380fe92fa..3541a3e0d071 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,28 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [1.6.0](https://www.github.com/googleapis/python-pubsub/compare/v1.5.0...v1.6.0) (2020-06-09) + + +### Features + +* Add flow control for message publishing ([#96](https://www.github.com/googleapis/python-pubsub/issues/96)) ([06085c4](https://www.github.com/googleapis/python-pubsub/commit/06085c4083b9dccdd50383257799904510bbf3a0)) + + +### Bug Fixes + +* Fix PubSub incompatibility with api-core 1.17.0+ ([#103](https://www.github.com/googleapis/python-pubsub/issues/103)) ([c02060f](https://www.github.com/googleapis/python-pubsub/commit/c02060fbbe6e2ca4664bee08d2de10665d41dc0b)) + + +### Documentation +- Clarify that Schedulers shouldn't be used with multiple SubscriberClients ([#100](https://github.com/googleapis/python-pubsub/pull/100)) ([cf9e87c](https://github.com/googleapis/python-pubsub/commit/cf9e87c80c0771f3fa6ef784a8d76cb760ad37ef)) +- Fix update subscription/snapshot/topic samples ([#113](https://github.com/googleapis/python-pubsub/pull/113)) ([e62c38b](https://github.com/googleapis/python-pubsub/commit/e62c38bb33de2434e32f866979de769382dea34a)) + + +### Internal / Testing Changes +- Re-generated service implementaton using synth: removed experimental notes from the RetryPolicy and filtering features in anticipation of GA, added DetachSubscription (experimental) ([#114](https://github.com/googleapis/python-pubsub/pull/114)) ([0132a46](https://github.com/googleapis/python-pubsub/commit/0132a4680e0727ce45d5e27d98ffc9f3541a0962)) +- Incorporate will_accept() checks into publish() ([#108](https://github.com/googleapis/python-pubsub/pull/108)) ([6c7677e](https://github.com/googleapis/python-pubsub/commit/6c7677ecb259672bbb9b6f7646919e602c698570)) + ## [1.5.0](https://www.github.com/googleapis/python-pubsub/compare/v1.4.3...v1.5.0) (2020-05-04) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 25ce7085b137..83fa16560931 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.5.0" +version = "1.6.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 3e83617babfd763a9d97c70d59eca6abd4b544a5 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 18 Jun 2020 20:15:55 +0200 Subject: [PATCH 0467/1197] docs: explain how to nack a sync pull message (#123) --- .../google-cloud-pubsub/docs/subscriber/index.rst | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/docs/subscriber/index.rst b/packages/google-cloud-pubsub/docs/subscriber/index.rst index d3d1cb5415b5..ed99566cd3bd 100644 --- a/packages/google-cloud-pubsub/docs/subscriber/index.rst +++ b/packages/google-cloud-pubsub/docs/subscriber/index.rst @@ -64,9 +64,20 @@ To pull the messages synchronously, use the client's subscriber.acknowledge(subscription_path, ack_ids) The method returns a :class:`~.pubsub_v1.types.PullResponse` instance that -cointains a list of received :class:`~.pubsub_v1.types.ReceivedMessage` +contains a list of received :class:`~.pubsub_v1.types.ReceivedMessage` instances. +If you want to **nack** some of the received messages (see :ref:`explaining-ack` below), +you can use the :meth:`~.pubsub_v1.subscriber.client.Client.modify_ack_deadline` +method and set their acknowledge deadlines to zero. This will cause them to +be dropped by this client and the backend will try to re-deliver them. + +.. code-block:: python + + ack_ids = [] # TODO: populate with `ack_ids` of the messages to NACK + ack_deadline_seconds = 0 + subscriber.modify_ack_deadline(subscription_path, ack_ids, ack_deadline_seconds) + Pulling a Subscription Asynchronously ------------------------------------- @@ -159,6 +170,8 @@ receiving messages. future.cancel() +.. _explaining-ack: + Explaining Ack -------------- From 6d3687f4caa32dbaea0ef00622bf44b85849265c Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Fri, 19 Jun 2020 18:54:27 -0700 Subject: [PATCH 0468/1197] chore(python): change autodoc_default_flags to autodoc_default_options (#127) Source-Author: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Source-Date: Thu Jun 18 22:50:02 2020 +0530 Source-Repo: googleapis/synthtool Source-Sha: cd522c3b4dde821766d95c80ae5aeb43d7a41170 Source-Link: https://github.com/googleapis/synthtool/commit/cd522c3b4dde821766d95c80ae5aeb43d7a41170 --- packages/google-cloud-pubsub/docs/conf.py | 2 +- packages/google-cloud-pubsub/synth.metadata | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 3e4bdfa1d7ad..899dac0e74d3 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -43,7 +43,7 @@ # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index da86b45aab8e..3e3c4f0cd852 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "06085c4083b9dccdd50383257799904510bbf3a0" + "sha": "f2eec65cec43066ba7a2d1d45efa979e6b7add4f" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "973fec811e9203b4c147121a26f1484841c465fd" + "sha": "cd522c3b4dde821766d95c80ae5aeb43d7a41170" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "973fec811e9203b4c147121a26f1484841c465fd" + "sha": "cd522c3b4dde821766d95c80ae5aeb43d7a41170" } } ], From df2c25ad9f360a4dcbf1d95ec8e90d081a67d1ee Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 25 Jun 2020 22:50:40 -0700 Subject: [PATCH 0469/1197] test: use 3.8 for system tests (#133) --- packages/google-cloud-pubsub/docs/conf.py | 2 +- .../google/cloud/pubsub.py | 6 ++- .../cloud/pubsub_v1/gapic/publisher_client.py | 36 ++++++++------- .../pubsub_v1/gapic/subscriber_client.py | 46 ++++++++++--------- .../transports/publisher_grpc_transport.py | 2 +- .../transports/subscriber_grpc_transport.py | 2 +- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 40 ++++++++-------- packages/google-cloud-pubsub/noxfile.py | 14 ++++-- packages/google-cloud-pubsub/synth.metadata | 6 +-- .../subscriber/test_streaming_pull_manager.py | 22 ++++++--- 10 files changed, 99 insertions(+), 77 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 899dac0e74d3..fb9f1ca32a08 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -337,7 +337,7 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub.py index e7006048ba82..3dc5fea84f8f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub.py @@ -22,4 +22,8 @@ from google.cloud.pubsub_v1 import types -__all__ = ("types", "PublisherClient", "SubscriberClient") +__all__ = ( + "types", + "PublisherClient", + "SubscriberClient", +) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py index 6a60d775b8c0..e8853d841841 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py @@ -47,7 +47,7 @@ from google.protobuf import field_mask_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub",).version # TODO: remove conditional import after Python 2 support is dropped @@ -116,7 +116,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) @classmethod @@ -132,7 +132,7 @@ def subscription_path(cls, project, subscription): def topic_path(cls, project, topic): """Return a fully-qualified topic string.""" return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic + "projects/{project}/topics/{topic}", project=project, topic=topic, ) def __init__( @@ -219,12 +219,12 @@ def __init__( self.transport = transport else: self.transport = publisher_grpc_transport.PublisherGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -235,7 +235,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -411,7 +411,7 @@ def update_topic( client_info=self._client_info, ) - request = pubsub_pb2.UpdateTopicRequest(topic=topic, update_mask=update_mask) + request = pubsub_pb2.UpdateTopicRequest(topic=topic, update_mask=update_mask,) if metadata is None: metadata = [] metadata = list(metadata) @@ -490,7 +490,7 @@ def publish( client_info=self._client_info, ) - request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) + request = pubsub_pb2.PublishRequest(topic=topic, messages=messages,) if metadata is None: metadata = [] metadata = list(metadata) @@ -560,7 +560,7 @@ def get_topic( client_info=self._client_info, ) - request = pubsub_pb2.GetTopicRequest(topic=topic) + request = pubsub_pb2.GetTopicRequest(topic=topic,) if metadata is None: metadata = [] metadata = list(metadata) @@ -651,7 +651,7 @@ def list_topics( client_info=self._client_info, ) - request = pubsub_pb2.ListTopicsRequest(project=project, page_size=page_size) + request = pubsub_pb2.ListTopicsRequest(project=project, page_size=page_size,) if metadata is None: metadata = [] metadata = list(metadata) @@ -754,7 +754,7 @@ def list_topic_subscriptions( ) request = pubsub_pb2.ListTopicSubscriptionsRequest( - topic=topic, page_size=page_size + topic=topic, page_size=page_size, ) if metadata is None: metadata = [] @@ -862,7 +862,9 @@ def list_topic_snapshots( client_info=self._client_info, ) - request = pubsub_pb2.ListTopicSnapshotsRequest(topic=topic, page_size=page_size) + request = pubsub_pb2.ListTopicSnapshotsRequest( + topic=topic, page_size=page_size, + ) if metadata is None: metadata = [] metadata = list(metadata) @@ -945,7 +947,7 @@ def delete_topic( client_info=self._client_info, ) - request = pubsub_pb2.DeleteTopicRequest(topic=topic) + request = pubsub_pb2.DeleteTopicRequest(topic=topic,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1031,7 +1033,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1110,7 +1112,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -1197,7 +1199,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] @@ -1271,7 +1273,7 @@ def detach_subscription( client_info=self._client_info, ) - request = pubsub_pb2.DetachSubscriptionRequest(subscription=subscription) + request = pubsub_pb2.DetachSubscriptionRequest(subscription=subscription,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 12c2a780d4ab..4d1d9111d2ee 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -46,7 +46,7 @@ from google.protobuf import timestamp_pb2 -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub").version +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub",).version class SubscriberClient(object): @@ -94,7 +94,7 @@ def from_service_account_file(cls, filename, *args, **kwargs): def project_path(cls, project): """Return a fully-qualified project string.""" return google.api_core.path_template.expand( - "projects/{project}", project=project + "projects/{project}", project=project, ) @classmethod @@ -119,7 +119,7 @@ def subscription_path(cls, project, subscription): def topic_path(cls, project, topic): """Return a fully-qualified topic string.""" return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic + "projects/{project}/topics/{topic}", project=project, topic=topic, ) def __init__( @@ -209,12 +209,12 @@ def __init__( self.transport = transport else: self.transport = subscriber_grpc_transport.SubscriberGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -225,7 +225,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -490,7 +490,7 @@ def get_subscription( client_info=self._client_info, ) - request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) + request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription,) if metadata is None: metadata = [] metadata = list(metadata) @@ -578,7 +578,7 @@ def update_subscription( ) request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask + subscription=subscription, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -671,7 +671,7 @@ def list_subscriptions( ) request = pubsub_pb2.ListSubscriptionsRequest( - project=project, page_size=page_size + project=project, page_size=page_size, ) if metadata is None: metadata = [] @@ -755,7 +755,7 @@ def delete_subscription( client_info=self._client_info, ) - request = pubsub_pb2.DeleteSubscriptionRequest(subscription=subscription) + request = pubsub_pb2.DeleteSubscriptionRequest(subscription=subscription,) if metadata is None: metadata = [] metadata = list(metadata) @@ -829,7 +829,7 @@ def get_snapshot( client_info=self._client_info, ) - request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot) + request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1002,7 +1002,7 @@ def acknowledge( ) request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids + subscription=subscription, ack_ids=ack_ids, ) if metadata is None: metadata = [] @@ -1253,7 +1253,7 @@ def modify_push_config( ) request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config + subscription=subscription, push_config=push_config, ) if metadata is None: metadata = [] @@ -1350,7 +1350,7 @@ def list_snapshots( client_info=self._client_info, ) - request = pubsub_pb2.ListSnapshotsRequest(project=project, page_size=page_size) + request = pubsub_pb2.ListSnapshotsRequest(project=project, page_size=page_size,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1462,7 +1462,7 @@ def create_snapshot( ) request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription, labels=labels + name=name, subscription=subscription, labels=labels, ) if metadata is None: metadata = [] @@ -1556,7 +1556,7 @@ def update_snapshot( ) request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, update_mask=update_mask + snapshot=snapshot, update_mask=update_mask, ) if metadata is None: metadata = [] @@ -1633,7 +1633,7 @@ def delete_snapshot( client_info=self._client_info, ) - request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) + request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1727,10 +1727,12 @@ def seek( # Sanity check: We have some fields which are mutually exclusive; # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof(time=time, snapshot=snapshot) + google.api_core.protobuf_helpers.check_oneof( + time=time, snapshot=snapshot, + ) request = pubsub_pb2.SeekRequest( - subscription=subscription, time=time, snapshot=snapshot + subscription=subscription, time=time, snapshot=snapshot, ) if metadata is None: metadata = [] @@ -1817,7 +1819,7 @@ def set_iam_policy( client_info=self._client_info, ) - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy) + request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) if metadata is None: metadata = [] metadata = list(metadata) @@ -1896,7 +1898,7 @@ def get_iam_policy( ) request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_ + resource=resource, options=options_, ) if metadata is None: metadata = [] @@ -1983,7 +1985,7 @@ def test_iam_permissions( ) request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions + resource=resource, permissions=permissions, ) if metadata is None: metadata = [] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py index c874e78d3c49..bdba635553f5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py @@ -57,7 +57,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py index 4a48b9317dee..cd7a19bbe55f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py @@ -57,7 +57,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index 6ba73883d3f3..a53a7551344e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -68,7 +68,7 @@ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -228,7 +228,7 @@ ), ], extensions=[], - nested_types=[_TOPIC_LABELSENTRY], + nested_types=[_TOPIC_LABELSENTRY,], enum_types=[], serialized_options=b"\352AQ\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}\022\017_deleted-topic_", is_extendable=False, @@ -404,7 +404,7 @@ ), ], extensions=[], - nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY], + nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -442,7 +442,7 @@ serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -603,7 +603,7 @@ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -1061,7 +1061,7 @@ serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -1102,7 +1102,7 @@ serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -1454,7 +1454,7 @@ ), ], extensions=[], - nested_types=[_SUBSCRIPTION_LABELSENTRY], + nested_types=[_SUBSCRIPTION_LABELSENTRY,], enum_types=[], serialized_options=b'\352AU\n"pubsub.googleapis.com/Subscription\022/projects/{project}/subscriptions/{subscription}', is_extendable=False, @@ -1612,7 +1612,7 @@ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -1812,7 +1812,7 @@ ), ], extensions=[], - nested_types=[_PUSHCONFIG_OIDCTOKEN, _PUSHCONFIG_ATTRIBUTESENTRY], + nested_types=[_PUSHCONFIG_OIDCTOKEN, _PUSHCONFIG_ATTRIBUTESENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -1826,7 +1826,7 @@ containing_type=None, create_key=_descriptor._internal_create_key, fields=[], - ) + ), ], serialized_start=3102, serialized_end=3403, @@ -1938,7 +1938,7 @@ serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -2178,7 +2178,7 @@ serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -2358,7 +2358,7 @@ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -2674,7 +2674,7 @@ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -2815,7 +2815,7 @@ ), ], extensions=[], - nested_types=[_CREATESNAPSHOTREQUEST_LABELSENTRY], + nested_types=[_CREATESNAPSHOTREQUEST_LABELSENTRY,], enum_types=[], serialized_options=None, is_extendable=False, @@ -3032,7 +3032,7 @@ ), ], extensions=[], - nested_types=[_SNAPSHOT_LABELSENTRY], + nested_types=[_SNAPSHOT_LABELSENTRY,], enum_types=[], serialized_options=b"\352AI\n\036pubsub.googleapis.com/Snapshot\022'projects/{project}/snapshots/{snapshot}", is_extendable=False, @@ -3070,7 +3070,7 @@ serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -3250,7 +3250,7 @@ serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", file=DESCRIPTOR, create_key=_descriptor._internal_create_key, - ) + ), ], extensions=[], nested_types=[], @@ -3346,7 +3346,7 @@ containing_type=None, create_key=_descriptor._internal_create_key, fields=[], - ) + ), ], serialized_start=6143, serialized_end=6333, diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index cf7a6e9782d0..615358c2e4b0 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -23,11 +23,11 @@ import nox -BLACK_VERSION = "black==19.3b0" +BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.7" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.7"] +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] @@ -39,7 +39,9 @@ def lint(session): serious code quality issues. """ session.install("flake8", BLACK_VERSION) - session.run("black", "--check", *BLACK_PATHS) + session.run( + "black", "--check", *BLACK_PATHS, + ) session.run("flake8", "google", "tests") @@ -54,7 +56,9 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run( + "black", *BLACK_PATHS, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 3e3c4f0cd852..fe6ccf394dd7 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "f2eec65cec43066ba7a2d1d45efa979e6b7add4f" + "sha": "71d70822c816062ef10d6d7584c4f8ed038d923f" } }, { @@ -19,14 +19,14 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "cd522c3b4dde821766d95c80ae5aeb43d7a41170" + "sha": "cf2eff09d0f5319a4dc5cdce2b6356d85af4a798" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "cd522c3b4dde821766d95c80ae5aeb43d7a41170" + "sha": "cf2eff09d0f5319a4dc5cdce2b6356d85af4a798" } } ], diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 0475aaf6ea38..dd103599157a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -541,9 +541,14 @@ def make_running_manager(): def test_close(): - manager, consumer, dispatcher, leaser, heartbeater, scheduler = ( - make_running_manager() - ) + ( + manager, + consumer, + dispatcher, + leaser, + heartbeater, + scheduler, + ) = make_running_manager() manager.close() @@ -557,9 +562,14 @@ def test_close(): def test_close_inactive_consumer(): - manager, consumer, dispatcher, leaser, heartbeater, scheduler = ( - make_running_manager() - ) + ( + manager, + consumer, + dispatcher, + leaser, + heartbeater, + scheduler, + ) = make_running_manager() consumer.is_active = False manager.close() From 8c90f22b209dce7d8b8f0aeff22a6c799e7da81c Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 26 Jun 2020 19:01:00 +0200 Subject: [PATCH 0470/1197] samples: add samples from pubsub/cloud-client (#134) * Add pubsub publisher and subscriber samples Change-Id: I38b90c10aef72c37188c4520897302933b9d2ea7 * Update readme Change-Id: Ie95e2e1556a8d97b5321dc86bf8de431aa36a2d5 * Add pubsub iam samples Change-Id: I12c407d3cdf4a3f9736dfaeca6f20b31df6d310a * Fix lint issue Change-Id: Ifebdab0b974cc3d3fe8900a23ca7416fed9e026a * Auto-update dependencies. [(#540)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/540) * Auto-update dependencies. [(#542)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/542) * Move to google-cloud [(#544)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/544) * Add new "quickstart" samples [(#547)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/547) * Quickstart tests [(#569)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/569) * Add tests for quickstarts * Update secrets * Generate readmes for most service samples [(#599)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/599) * Update samples to support latest Google Cloud Python [(#656)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/656) * Auto-update dependencies. [(#715)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/715) * Fix pubusb tests Change-Id: I7dfe60b0f1240dc58a664968fd97ca5a8fa1109d * Auto-update dependencies. [(#825)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/825) * Auto-update dependencies. [(#876)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/876) * Fix reference to our testing tools * Re-generate all readmes * Auto-update dependencies. [(#922)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/922) * Auto-update dependencies. * Fix pubsub iam samples * Fix README rst links [(#962)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/962) * Fix README rst links * Update all READMEs * Auto-update dependencies. [(#1004)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1004) * Auto-update dependencies. * Fix natural language samples * Fix pubsub iam samples * Fix language samples * Fix bigquery samples * Auto-update dependencies. [(#1055)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1055) * Auto-update dependencies. * Explicitly use latest bigtable client Change-Id: Id71e9e768f020730e4ca9514a0d7ebaa794e7d9e * Revert language update for now Change-Id: I8867f154e9a5aae00d0047c9caf880e5e8f50c53 * Remove pdb. smh Change-Id: I5ff905fadc026eebbcd45512d4e76e003e3b2b43 * Update pubsub samples [(#1092)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1092) * Fix argpraser for pubsub subscriber Change-Id: I776863091846ee8ff8a70078c8b8d5498cf81ed6 * Add comment about result blocking in pubsub samples Change-Id: I149fc1242ceb6b2cff8eae7ef18b364dd5c26566 * Auto-update dependencies. [(#1097)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1097) * Update all generated readme auth instructions [(#1121)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1121) Change-Id: I03b5eaef8b17ac3dc3c0339fd2c7447bd3e11bd2 * Added Link to Python Setup Guide [(#1158)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1158) * Update Readme.rst to add Python setup guide As requested in b/64770713. This sample is linked in documentation https://cloud.google.com/bigtable/docs/scaling, and it would make more sense to update the guide here than in the documentation. * Update README.rst * Update README.rst * Update README.rst * Update README.rst * Update README.rst * Update install_deps.tmpl.rst * Updated readmegen scripts and re-generated related README files * Fixed the lint error * Auto-update dependencies. [(#1138)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1138) * Fix a few more lint issues Change-Id: I0d420f3053f391fa225e4b8179e45fd1138f5c65 * Add Snippet for Listing All Subscriptions in a Project [(#1169)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1169) * Auto-update dependencies. [(#1186)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1186) * Auto-update dependencies. [(#1234)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1234) * Auto-update dependencies. * Drop pytest-logcapture as it's no longer needed Change-Id: Ia8b9e8aaf248e9770db6bc4842a4532df8383893 * Auto-update dependencies. [(#1239)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1239) * Added "Open in Cloud Shell" buttons to README files [(#1254)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1254) * Auto-update dependencies. [(#1263)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1263) * Auto-update dependencies. [(#1272)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1272) * Auto-update dependencies. * Update requirements.txt * Auto-update dependencies. [(#1282)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1282) * Auto-update dependencies. * Fix storage acl sample Change-Id: I413bea899fdde4c4859e4070a9da25845b81f7cf * Add listen for errors sample. [(#1306)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1306) * Add listen for errors sample. * Update subscriber.py * Update subscriber.py * Fix subscription.open get called twice in the client libraries [(#1321)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1321) * Add tests for creating push subscription. [(#1332)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1332) This is a separate PR from actually adding the sample, which is in https://github.com/GoogleCloudPlatform/python-docs-samples/pull/1331. * Add create push subscription sample. [(#1331)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1331) * Update API version and body. [(#1326)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1326) The API version should be v1, not v1beta1. Also remove the unnecessary 'data' field from the body and just use 'binary_data'. * Add sample for updating a subscription. [(#1335)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1335) * Change update_subscription to change endpoint URL. [(#1344)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1344) The documentation specifies that the update subscription commands show how to update an endpoint URL: https://cloud.google.com/pubsub/docs/admin#update_a_subscription. * Auto-update dependencies. [(#1359)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1359) * Auto-update dependencies. [(#1389)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1389) * Added sample for publishing/receiving messages with custom attributes [(#1409)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1409) * Auto-update dependencies. * Regenerate the README files and fix the Open in Cloud Shell link for some samples [(#1441)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1441) * Update READMEs to fix numbering and add git clone [(#1464)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1464) * PubSub: adds region tags and updates existing to standard [(#1491)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1491) * Pubsub: Add missing region tag [(#1498)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1498) * Add the Pub/Sub handle_publisher_error sample [(#1440)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1440) * Add the Pub/Sub handle_publisher_error sample * Update requirements.txt * Update publisher.py * Update publisher.py * Added region tag * Modified publisher with error handling [(#1568)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1568) * Updated google-cloud-pubsub to version 0.35 [(#1624)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1624) * Updated library version * Rewrote test for publish with error handler * Custom _publish function in test prints no 'Attributes' * Added timeout in error handling [(#1636)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1636) * Auto-update dependencies. [(#1658)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1658) * Auto-update dependencies. * Rollback appengine/standard/bigquery/. * Rollback appengine/standard/iap/. * Rollback bigtable/metricscaler. * Rolledback appengine/flexible/datastore. * Rollback dataproc/ * Rollback jobs/api_client * Rollback vision/cloud-client. * Rollback functions/ocr/app. * Rollback iot/api-client/end_to_end_example. * Rollback storage/cloud-client. * Rollback kms/api-client. * Rollback dlp/ * Rollback bigquery/cloud-client. * Rollback iot/api-client/manager. * Rollback appengine/flexible/cloudsql_postgresql. * Added sample for Pub/Sub synchronous pull subscriber [(#1673)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1673) * Added sample for synchronous pull * Updated variable name [(#1680)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1680) * Fixed return object from `subscriber.subscribe()` [(#1685)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1685) * Pub/Sub: synchronous pull with lease management [(#1701)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1701) * Synchronous pull with lease management * Updated library version * Pub/Sub: moved import statements inside region tags [(#1753)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1753) * Moved import stataments inside region tags * Explained topic and subscription path methods * Pub/Sub end-to-end sample [(#1800)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1800) * Created new end-to-end sample, moved old sample * Add space around operator * Add test for updating a subscription. [(#1336)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1336) Tests for https://github.com/GoogleCloudPlatform/python-docs-samples/pull/1335. Using ack_deadline_seconds as the example. * Fix update test to use new endpoint [(#1925)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1925) * Fix update test to use new endpoint * Handle subscription already exists Previous deletions don't always succeed * Use a new endpoint for update * Auto-update dependencies. [(#1980)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1980) * Auto-update dependencies. * Update requirements.txt * Update requirements.txt * Cloud Pub/Sub Quickstart V2 [(#2004)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2004) * Quickstart V2 * Adopts Kir's suggestions * Adopted Tim's suggestions * proper resource deletion during teardown * Pub/Sub: publish with error-handling comments [(#2222)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2222) * Resolve all futures [(#2231)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2231) * Pub/Sub: add publish retry sample [(#2273)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2273) * Publish retry sample * double to single quotes * double to single quotes * license year * Fix a TODO comment on pubsub/cloud-client/subscriber.py [(#2302)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2302) * Print actual number of messages pulled [(#2078)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2078) * Print actual number of messages pulled * Pub/Sub: fix subscriber async region tag mistake [(#2334)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2334) * Pub/Sub: update retry settings in sample [(#2395)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2395) * Pub/Sub: improve pub.py [(#2403)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2403) * print number of messages published * two nit's * Adds updates for samples profiler ... vision [(#2439)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2439) * Pub/Sub: update how subscriber client listens to StreamingPullFuture [(#2475)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2475) * update sub.py & requirements.txt * fix flaky subscriber test with separate subscriptions * Pub/Sub: update how to test with mock [(#2555)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2555) * Update test with mock * Clean up resources after tests * Use unique resource names avoid test failures * Delete subscriptions in cleanup phase * Ensure unique topic name * Update assert to remove bytestring notation * Rewrite PubSubToGCS test using dataflow testing module * Pub/Sub: remove infinite while loops in subscriber examples [(#2604)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2604) * use result() on streaming pull futures instead of infinite while * remove unused imports * Pub/Sub: add timeout in argparse [(#2637)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2637) * Auto-update dependencies. [(#2005)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh * remove publish concurrency control sample [(#2960)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2960) * Pub/Sub: remove unreferenced samples [(#2986)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2986) * remove qs samples * update README * Pub/Sub: add SubscriberClient.close() to examples [(#3118)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3118) * Add SubscriberClient.close() to examples. Co-authored-by: Prad Nelluru Co-authored-by: Prad Nelluru * Pub/Sub: update publish with batch settings sample [(#3137)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3137) * non-blocking publish * remove unused lib * lint * add defaults * Simplify noxfile setup. [(#2806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot * chore: remove gcp-devrel-py-tools from iot and pubsub [(#3470)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3470) * [iot] chore: remove unused dependency * [pubsub] chore: remove gcp-devrel-py-tools * Update dependency google-cloud-pubsub to v1.4.2 in Storage and Pub/Sub [(#3343)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3343) * chore: some lint fixes [(#3748)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3748) * chore(deps): update dependency google-cloud-pubsub to v1.4.3 [(#3725)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3725) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Takashi Matsuo * chore(deps): update dependency google-cloud-pubsub to v1.5.0 [(#3781)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3781) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> * samples: add Pub/Sub dead letter queue samples [(#3904)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3904) * fix: make timeout an optional positional arg [(#3938)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3938) * fix: make timeout an optional positional arg * place `none` back in function signature Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> * fix: replace name with id in samples [(#3953)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3953) * Replace GCLOUD_PROJECT with GOOGLE_CLOUD_PROJECT. [(#4022)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4022) * nit: remove redundant/wrong Pub/Sub region tag [(#4027)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4027) * Pub/Sub: wrap subscriber in a with block and add comments [(#4070)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4070) Use a `with` block to wrap subscriber and describe its purpose. Internal bug: b/157401623 * Update dependency google-cloud-pubsub to v1.6.0 [(#4039)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4039) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | minor | `==1.5.0` -> `==1.6.0` | --- ### Release Notes
googleapis/python-pubsub ### [`v1.6.0`](https://togithub.com/googleapis/python-pubsub/blob/master/CHANGELOG.md#​160-httpswwwgithubcomgoogleapispython-pubsubcomparev150v160-2020-06-09) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v1.5.0...v1.6.0) ##### Features - Add flow control for message publishing ([#​96](https://www.github.com/googleapis/python-pubsub/issues/96)) ([06085c4](https://www.github.com/googleapis/python-pubsub/commit/06085c4083b9dccdd50383257799904510bbf3a0)) ##### Bug Fixes - Fix PubSub incompatibility with api-core 1.17.0+ ([#​103](https://www.github.com/googleapis/python-pubsub/issues/103)) ([c02060f](https://www.github.com/googleapis/python-pubsub/commit/c02060fbbe6e2ca4664bee08d2de10665d41dc0b)) ##### Documentation - Clarify that Schedulers shouldn't be used with multiple SubscriberClients ([#​100](https://togithub.com/googleapis/python-pubsub/pull/100)) ([cf9e87c](https://togithub.com/googleapis/python-pubsub/commit/cf9e87c80c0771f3fa6ef784a8d76cb760ad37ef)) - Fix update subscription/snapshot/topic samples ([#​113](https://togithub.com/googleapis/python-pubsub/pull/113)) ([e62c38b](https://togithub.com/googleapis/python-pubsub/commit/e62c38bb33de2434e32f866979de769382dea34a)) ##### Internal / Testing Changes - Re-generated service implementaton using synth: removed experimental notes from the RetryPolicy and filtering features in anticipation of GA, added DetachSubscription (experimental) ([#​114](https://togithub.com/googleapis/python-pubsub/pull/114)) ([0132a46](https://togithub.com/googleapis/python-pubsub/commit/0132a4680e0727ce45d5e27d98ffc9f3541a0962)) - Incorporate will_accept() checks into publish() ([#​108](https://togithub.com/googleapis/python-pubsub/pull/108)) ([6c7677e](https://togithub.com/googleapis/python-pubsub/commit/6c7677ecb259672bbb9b6f7646919e602c698570))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). * chore: update templates Co-authored-by: Jon Wayne Parrott Co-authored-by: DPE bot Co-authored-by: Jason Dobry Co-authored-by: Bill Prin Co-authored-by: michaelawyu Co-authored-by: noerog <32459203+noerog@users.noreply.github.com> Co-authored-by: L J Co-authored-by: Frank Natividad Co-authored-by: Alix Hamilton Co-authored-by: michaelawyu Co-authored-by: Tianzi Cai Co-authored-by: Charles Engelke Co-authored-by: Tianzi Cai Co-authored-by: Keiji Yoshida Co-authored-by: oli Co-authored-by: Gus Class Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh Co-authored-by: Prad Nelluru Co-authored-by: Prad Nelluru Co-authored-by: Renovate Bot Co-authored-by: Takashi Matsuo Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- .../google-cloud-pubsub/.github/CODEOWNERS | 11 + .../samples/AUTHORING_GUIDE.md | 1 + .../samples/CONTRIBUTING.md | 1 + .../samples/snippets/README.rst | 282 +++++++ .../samples/snippets/README.rst.in | 30 + .../samples/snippets/iam.py | 231 ++++++ .../samples/snippets/iam_test.py | 118 +++ .../samples/snippets/noxfile.py | 224 +++++ .../samples/snippets/publisher.py | 334 ++++++++ .../samples/snippets/publisher_test.py | 146 ++++ .../samples/snippets/quickstart/pub.py | 86 ++ .../samples/snippets/quickstart/pub_test.py | 56 ++ .../samples/snippets/quickstart/sub.py | 69 ++ .../samples/snippets/quickstart/sub_test.py | 102 +++ .../samples/snippets/requirements-test.txt | 3 + .../samples/snippets/requirements.txt | 1 + .../samples/snippets/subscriber.py | 783 ++++++++++++++++++ .../samples/snippets/subscriber_test.py | 341 ++++++++ packages/google-cloud-pubsub/synth.py | 11 +- 19 files changed, 2829 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-pubsub/.github/CODEOWNERS create mode 100644 packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md create mode 100644 packages/google-cloud-pubsub/samples/CONTRIBUTING.md create mode 100644 packages/google-cloud-pubsub/samples/snippets/README.rst create mode 100644 packages/google-cloud-pubsub/samples/snippets/README.rst.in create mode 100644 packages/google-cloud-pubsub/samples/snippets/iam.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/iam_test.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/noxfile.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/publisher.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/publisher_test.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/requirements-test.txt create mode 100644 packages/google-cloud-pubsub/samples/snippets/requirements.txt create mode 100644 packages/google-cloud-pubsub/samples/snippets/subscriber.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/subscriber_test.py diff --git a/packages/google-cloud-pubsub/.github/CODEOWNERS b/packages/google-cloud-pubsub/.github/CODEOWNERS new file mode 100644 index 000000000000..cf01548a9f04 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/CODEOWNERS @@ -0,0 +1,11 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + + +# The python-samples-owners team is the default owner for anything not +# explicitly taken by someone else. + + /samples/ @anguillanneuf @hongalex @googleapis/python-samples-owners diff --git a/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md b/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/CONTRIBUTING.md b/packages/google-cloud-pubsub/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst new file mode 100644 index 000000000000..2676680afdef --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -0,0 +1,282 @@ + +.. This file is automatically generated. Do not edit this file directly. + +Google Cloud Pub/Sub Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/README.rst + + +This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. + + + + +.. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs + + +Setup +------------------------------------------------------------------------------- + + + +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started + + + + +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 3.6+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + + + + + + +Samples +------------------------------------------------------------------------------- + + +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/quickstart.py,pubsub/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + + + +Publisher ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/publisher.py,pubsub/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python publisher.py + + + usage: publisher.py [-h] + project_id + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} + ... + + This application demonstrates how to perform basic operations on topics + with the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + project_id Your Google Cloud project ID + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} + list Lists all Pub/Sub topics in the given project. + create Create a new Pub/Sub topic. + delete Deletes an existing Pub/Sub topic. + publish Publishes multiple messages to a Pub/Sub topic. + publish-with-custom-attributes + Publishes multiple messages with custom attributes to + a Pub/Sub topic. + publish-with-error-handler + Publishes multiple messages to a Pub/Sub topic with an + error handler. + publish-with-batch-settings + Publishes multiple messages to a Pub/Sub topic with + batch settings. + publish-with-retry-settings + Publishes messages with custom retry settings. + + optional arguments: + -h, --help show this help message and exit + + + + + +Subscribers ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/subscriber.py,pubsub/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python subscriber.py + + + usage: subscriber.py [-h] + project_id + {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} + ... + + This application demonstrates how to perform basic operations on + subscriptions with the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + project_id Your Google Cloud project ID + {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} + list-in-topic Lists all subscriptions for a given topic. + list-in-project Lists all subscriptions in the current project. + create Create a new pull subscription on the given topic. + create-with-dead-letter-policy + Create a subscription with dead letter policy. + create-push Create a new push subscription on the given topic. + delete Deletes an existing Pub/Sub topic. + update-push Updates an existing Pub/Sub subscription's push + endpoint URL. Note that certain properties of a + subscription, such as its topic, are not modifiable. + update-dead-letter-policy + Update a subscription's dead letter policy. + remove-dead-letter-policy + Remove dead letter policy from a subscription. + receive Receives messages from a pull subscription. + receive-custom-attributes + Receives messages from a pull subscription. + receive-flow-control + Receives messages from a pull subscription with flow + control. + receive-synchronously + Pulling messages synchronously. + receive-synchronously-with-lease + Pulling messages synchronously with lease management + listen-for-errors Receives messages and catches errors from a pull + subscription. + receive-messages-with-delivery-attempts + + optional arguments: + -h, --help show this help message and exit + + + + + +Identity and Access Management ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/iam.py,pubsub/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python iam.py + + + usage: iam.py [-h] + project + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} + ... + + This application demonstrates how to perform basic operations on IAM + policies with the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + project Your Google Cloud project ID + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} + get-topic-policy Prints the IAM policy for the given topic. + get-subscription-policy + Prints the IAM policy for the given subscription. + set-topic-policy Sets the IAM policy for a topic. + set-subscription-policy + Sets the IAM policy for a topic. + check-topic-permissions + Checks to which permissions are available on the given + topic. + check-subscription-permissions + Checks to which permissions are available on the given + subscription. + + optional arguments: + -h, --help show this help message and exit + + + + + + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst.in b/packages/google-cloud-pubsub/samples/snippets/README.rst.in new file mode 100644 index 000000000000..ddbc647121b2 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst.in @@ -0,0 +1,30 @@ +# This file is used to generate README.rst + +product: + name: Google Cloud Pub/Sub + short_name: Cloud Pub/Sub + url: https://cloud.google.com/pubsub/docs + description: > + `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that + allows you to send and receive messages between independent applications. + +setup: +- auth +- install_deps + +samples: +- name: Quickstart + file: quickstart.py +- name: Publisher + file: publisher.py + show_help: true +- name: Subscribers + file: subscriber.py + show_help: true +- name: Identity and Access Management + file: iam.py + show_help: true + +cloud_client_library: true + +folder: pubsub/cloud-client \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py new file mode 100644 index 000000000000..71c55d764c0c --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -0,0 +1,231 @@ +#!/usr/bin/env python + +# Copyright 2019 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on IAM +policies with the Cloud Pub/Sub API. + +For more information, see the README.md under /pubsub and the documentation +at https://cloud.google.com/pubsub/docs. +""" + +import argparse + + +def get_topic_policy(project, topic_id): + """Prints the IAM policy for the given topic.""" + # [START pubsub_get_topic_policy] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + client = pubsub_v1.PublisherClient() + topic_path = client.topic_path(project, topic_id) + + policy = client.get_iam_policy(topic_path) + + print("Policy for topic {}:".format(topic_path)) + for binding in policy.bindings: + print("Role: {}, Members: {}".format(binding.role, binding.members)) + # [END pubsub_get_topic_policy] + + +def get_subscription_policy(project, subscription_id): + """Prints the IAM policy for the given subscription.""" + # [START pubsub_get_subscription_policy] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + client = pubsub_v1.SubscriberClient() + subscription_path = client.subscription_path(project, subscription_id) + + policy = client.get_iam_policy(subscription_path) + + print("Policy for subscription {}:".format(subscription_path)) + for binding in policy.bindings: + print("Role: {}, Members: {}".format(binding.role, binding.members)) + + client.close() + # [END pubsub_get_subscription_policy] + + +def set_topic_policy(project, topic_id): + """Sets the IAM policy for a topic.""" + # [START pubsub_set_topic_policy] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + client = pubsub_v1.PublisherClient() + topic_path = client.topic_path(project, topic_id) + + policy = client.get_iam_policy(topic_path) + + # Add all users as viewers. + policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) + + # Add a group as a publisher. + policy.bindings.add( + role="roles/pubsub.publisher", members=["group:cloud-logs@google.com"] + ) + + # Set the policy + policy = client.set_iam_policy(topic_path, policy) + + print("IAM policy for topic {} set: {}".format(topic_id, policy)) + # [END pubsub_set_topic_policy] + + +def set_subscription_policy(project, subscription_id): + """Sets the IAM policy for a topic.""" + # [START pubsub_set_subscription_policy] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + client = pubsub_v1.SubscriberClient() + subscription_path = client.subscription_path(project, subscription_id) + + policy = client.get_iam_policy(subscription_path) + + # Add all users as viewers. + policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) + + # Add a group as an editor. + policy.bindings.add(role="roles/editor", members=["group:cloud-logs@google.com"]) + + # Set the policy + policy = client.set_iam_policy(subscription_path, policy) + + print("IAM policy for subscription {} set: {}".format(subscription_id, policy)) + + client.close() + # [END pubsub_set_subscription_policy] + + +def check_topic_permissions(project, topic_id): + """Checks to which permissions are available on the given topic.""" + # [START pubsub_test_topic_permissions] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + client = pubsub_v1.PublisherClient() + topic_path = client.topic_path(project, topic_id) + + permissions_to_check = ["pubsub.topics.publish", "pubsub.topics.update"] + + allowed_permissions = client.test_iam_permissions(topic_path, permissions_to_check) + + print( + "Allowed permissions for topic {}: {}".format(topic_path, allowed_permissions) + ) + # [END pubsub_test_topic_permissions] + + +def check_subscription_permissions(project, subscription_id): + """Checks to which permissions are available on the given subscription.""" + # [START pubsub_test_subscription_permissions] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + client = pubsub_v1.SubscriberClient() + subscription_path = client.subscription_path(project, subscription_id) + + permissions_to_check = [ + "pubsub.subscriptions.consume", + "pubsub.subscriptions.update", + ] + + allowed_permissions = client.test_iam_permissions( + subscription_path, permissions_to_check + ) + + print( + "Allowed permissions for subscription {}: {}".format( + subscription_path, allowed_permissions + ) + ) + + client.close() + # [END pubsub_test_subscription_permissions] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("project", help="Your Google Cloud project ID") + + subparsers = parser.add_subparsers(dest="command") + + get_topic_policy_parser = subparsers.add_parser( + "get-topic-policy", help=get_topic_policy.__doc__ + ) + get_topic_policy_parser.add_argument("topic_id") + + get_subscription_policy_parser = subparsers.add_parser( + "get-subscription-policy", help=get_subscription_policy.__doc__ + ) + get_subscription_policy_parser.add_argument("subscription_id") + + set_topic_policy_parser = subparsers.add_parser( + "set-topic-policy", help=set_topic_policy.__doc__ + ) + set_topic_policy_parser.add_argument("topic_id") + + set_subscription_policy_parser = subparsers.add_parser( + "set-subscription-policy", help=set_subscription_policy.__doc__ + ) + set_subscription_policy_parser.add_argument("subscription_id") + + check_topic_permissions_parser = subparsers.add_parser( + "check-topic-permissions", help=check_topic_permissions.__doc__ + ) + check_topic_permissions_parser.add_argument("topic_id") + + check_subscription_permissions_parser = subparsers.add_parser( + "check-subscription-permissions", help=check_subscription_permissions.__doc__, + ) + check_subscription_permissions_parser.add_argument("subscription_id") + + args = parser.parse_args() + + if args.command == "get-topic-policy": + get_topic_policy(args.project, args.topic_id) + elif args.command == "get-subscription-policy": + get_subscription_policy(args.project, args.subscription_id) + elif args.command == "set-topic-policy": + set_topic_policy(args.project, args.topic_id) + elif args.command == "set-subscription-policy": + set_subscription_policy(args.project, args.subscription_id) + elif args.command == "check-topic-permissions": + check_topic_permissions(args.project, args.topic_id) + elif args.command == "check-subscription-permissions": + check_subscription_permissions(args.project, args.subscription_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py new file mode 100644 index 000000000000..d196953f6207 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -0,0 +1,118 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +from google.cloud import pubsub_v1 +import pytest + +import iam + +UUID = uuid.uuid4().hex +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC = "iam-test-topic-" + UUID +SUBSCRIPTION = "iam-test-subscription-" + UUID + + +@pytest.fixture(scope="module") +def publisher_client(): + yield pubsub_v1.PublisherClient() + + +@pytest.fixture(scope="module") +def topic(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, TOPIC) + + try: + publisher_client.delete_topic(topic_path) + except Exception: + pass + + publisher_client.create_topic(topic_path) + + yield topic_path + + publisher_client.delete_topic(topic_path) + + +@pytest.fixture(scope="module") +def subscriber_client(): + subscriber_client = pubsub_v1.SubscriberClient() + yield subscriber_client + subscriber_client.close() + + +@pytest.fixture +def subscription(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) + + try: + subscriber_client.delete_subscription(subscription_path) + except Exception: + pass + + subscriber_client.create_subscription(subscription_path, topic=topic) + + yield subscription_path + + subscriber_client.delete_subscription(subscription_path) + + +def test_get_topic_policy(topic, capsys): + iam.get_topic_policy(PROJECT, TOPIC) + + out, _ = capsys.readouterr() + assert topic in out + + +def test_get_subscription_policy(subscription, capsys): + iam.get_subscription_policy(PROJECT, SUBSCRIPTION) + + out, _ = capsys.readouterr() + assert subscription in out + + +def test_set_topic_policy(publisher_client, topic): + iam.set_topic_policy(PROJECT, TOPIC) + + policy = publisher_client.get_iam_policy(topic) + assert "roles/pubsub.publisher" in str(policy) + assert "allUsers" in str(policy) + + +def test_set_subscription_policy(subscriber_client, subscription): + iam.set_subscription_policy(PROJECT, SUBSCRIPTION) + + policy = subscriber_client.get_iam_policy(subscription) + assert "roles/pubsub.viewer" in str(policy) + assert "allUsers" in str(policy) + + +def test_check_topic_permissions(topic, capsys): + iam.check_topic_permissions(PROJECT, TOPIC) + + out, _ = capsys.readouterr() + + assert topic in out + assert "pubsub.topics.publish" in out + + +def test_check_subscription_permissions(subscription, capsys): + iam.check_subscription_permissions(PROJECT, SUBSCRIPTION) + + out, _ = capsys.readouterr() + + assert subscription in out + assert "pubsub.subscriptions.consume" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py new file mode 100644 index 000000000000..ba55d7ce53ca --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -0,0 +1,224 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py new file mode 100644 index 000000000000..477b31b9cf71 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -0,0 +1,334 @@ +#!/usr/bin/env python + +# Copyright 2016 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on topics +with the Cloud Pub/Sub API. + +For more information, see the README.md under /pubsub and the documentation +at https://cloud.google.com/pubsub/docs. +""" + +import argparse + + +def list_topics(project_id): + """Lists all Pub/Sub topics in the given project.""" + # [START pubsub_list_topics] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + + publisher = pubsub_v1.PublisherClient() + project_path = publisher.project_path(project_id) + + for topic in publisher.list_topics(project_path): + print(topic) + # [END pubsub_list_topics] + + +def create_topic(project_id, topic_id): + """Create a new Pub/Sub topic.""" + # [START pubsub_quickstart_create_topic] + # [START pubsub_create_topic] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + topic = publisher.create_topic(topic_path) + + print("Topic created: {}".format(topic)) + # [END pubsub_quickstart_create_topic] + # [END pubsub_create_topic] + + +def delete_topic(project_id, topic_id): + """Deletes an existing Pub/Sub topic.""" + # [START pubsub_delete_topic] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + publisher.delete_topic(topic_path) + + print("Topic deleted: {}".format(topic_path)) + # [END pubsub_delete_topic] + + +def publish_messages(project_id, topic_id): + """Publishes multiple messages to a Pub/Sub topic.""" + # [START pubsub_quickstart_publisher] + # [START pubsub_publish] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher = pubsub_v1.PublisherClient() + # The `topic_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/topics/{topic_id}` + topic_path = publisher.topic_path(project_id, topic_id) + + for n in range(1, 10): + data = u"Message number {}".format(n) + # Data must be a bytestring + data = data.encode("utf-8") + # When you publish a message, the client returns a future. + future = publisher.publish(topic_path, data=data) + print(future.result()) + + print("Published messages.") + # [END pubsub_quickstart_publisher] + # [END pubsub_publish] + + +def publish_messages_with_custom_attributes(project_id, topic_id): + """Publishes multiple messages with custom attributes + to a Pub/Sub topic.""" + # [START pubsub_publish_custom_attributes] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + for n in range(1, 10): + data = u"Message number {}".format(n) + # Data must be a bytestring + data = data.encode("utf-8") + # Add two attributes, origin and username, to the message + future = publisher.publish( + topic_path, data, origin="python-sample", username="gcp" + ) + print(future.result()) + + print("Published messages with custom attributes.") + # [END pubsub_publish_custom_attributes] + + +def publish_messages_with_error_handler(project_id, topic_id): + # [START pubsub_publish_messages_error_handler] + """Publishes multiple messages to a Pub/Sub topic with an error handler.""" + import time + + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + futures = dict() + + def get_callback(f, data): + def callback(f): + try: + print(f.result()) + futures.pop(data) + except: # noqa + print("Please handle {} for {}.".format(f.exception(), data)) + + return callback + + for i in range(10): + data = str(i) + futures.update({data: None}) + # When you publish a message, the client returns a future. + future = publisher.publish( + topic_path, data=data.encode("utf-8") # data must be a bytestring. + ) + futures[data] = future + # Publish failures shall be handled in the callback function. + future.add_done_callback(get_callback(future, data)) + + # Wait for all the publish futures to resolve before exiting. + while futures: + time.sleep(5) + + print("Published message with error handler.") + # [END pubsub_publish_messages_error_handler] + + +def publish_messages_with_batch_settings(project_id, topic_id): + """Publishes multiple messages to a Pub/Sub topic with batch settings.""" + # [START pubsub_publisher_batch_settings] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + # Configure the batch to publish as soon as there is ten messages, + # one kilobyte of data, or one second has passed. + batch_settings = pubsub_v1.types.BatchSettings( + max_messages=10, # default 100 + max_bytes=1024, # default 1 MB + max_latency=1, # default 10 ms + ) + publisher = pubsub_v1.PublisherClient(batch_settings) + topic_path = publisher.topic_path(project_id, topic_id) + + # Resolve the publish future in a separate thread. + def callback(future): + message_id = future.result() + print(message_id) + + for n in range(1, 10): + data = u"Message number {}".format(n) + # Data must be a bytestring + data = data.encode("utf-8") + future = publisher.publish(topic_path, data=data) + # Non-blocking. Allow the publisher client to batch multiple messages. + future.add_done_callback(callback) + + print("Published messages with batch settings.") + # [END pubsub_publisher_batch_settings] + + +def publish_messages_with_retry_settings(project_id, topic_id): + """Publishes messages with custom retry settings.""" + # [START pubsub_publisher_retry_settings] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + # Configure the retry settings. Defaults will be overwritten. + retry_settings = { + "interfaces": { + "google.pubsub.v1.Publisher": { + "retry_codes": { + "publish": [ + "ABORTED", + "CANCELLED", + "DEADLINE_EXCEEDED", + "INTERNAL", + "RESOURCE_EXHAUSTED", + "UNAVAILABLE", + "UNKNOWN", + ] + }, + "retry_params": { + "messaging": { + "initial_retry_delay_millis": 100, # default: 100 + "retry_delay_multiplier": 1.3, # default: 1.3 + "max_retry_delay_millis": 60000, # default: 60000 + "initial_rpc_timeout_millis": 5000, # default: 25000 + "rpc_timeout_multiplier": 1.0, # default: 1.0 + "max_rpc_timeout_millis": 600000, # default: 30000 + "total_timeout_millis": 600000, # default: 600000 + } + }, + "methods": { + "Publish": { + "retry_codes_name": "publish", + "retry_params_name": "messaging", + } + }, + } + } + } + + publisher = pubsub_v1.PublisherClient(client_config=retry_settings) + topic_path = publisher.topic_path(project_id, topic_id) + + for n in range(1, 10): + data = u"Message number {}".format(n) + # Data must be a bytestring + data = data.encode("utf-8") + future = publisher.publish(topic_path, data=data) + print(future.result()) + + print("Published messages with retry settings.") + # [END pubsub_publisher_retry_settings] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("project_id", help="Your Google Cloud project ID") + + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser("list", help=list_topics.__doc__) + + create_parser = subparsers.add_parser("create", help=create_topic.__doc__) + create_parser.add_argument("topic_id") + + delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) + delete_parser.add_argument("topic_id") + + publish_parser = subparsers.add_parser("publish", help=publish_messages.__doc__) + publish_parser.add_argument("topic_id") + + publish_with_custom_attributes_parser = subparsers.add_parser( + "publish-with-custom-attributes", + help=publish_messages_with_custom_attributes.__doc__, + ) + publish_with_custom_attributes_parser.add_argument("topic_id") + + publish_with_error_handler_parser = subparsers.add_parser( + "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, + ) + publish_with_error_handler_parser.add_argument("topic_id") + + publish_with_batch_settings_parser = subparsers.add_parser( + "publish-with-batch-settings", + help=publish_messages_with_batch_settings.__doc__, + ) + publish_with_batch_settings_parser.add_argument("topic_id") + + publish_with_retry_settings_parser = subparsers.add_parser( + "publish-with-retry-settings", + help=publish_messages_with_retry_settings.__doc__, + ) + publish_with_retry_settings_parser.add_argument("topic_id") + + args = parser.parse_args() + + if args.command == "list": + list_topics(args.project_id) + elif args.command == "create": + create_topic(args.project_id, args.topic_id) + elif args.command == "delete": + delete_topic(args.project_id, args.topic_id) + elif args.command == "publish": + publish_messages(args.project_id, args.topic_id) + elif args.command == "publish-with-custom-attributes": + publish_messages_with_custom_attributes(args.project_id, args.topic_id) + elif args.command == "publish-with-error-handler": + publish_messages_with_error_handler(args.project_id, args.topic_id) + elif args.command == "publish-with-batch-settings": + publish_messages_with_batch_settings(args.project_id, args.topic_id) + elif args.command == "publish-with-retry-settings": + publish_messages_with_retry_settings(args.project_id, args.topic_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py new file mode 100644 index 000000000000..b5c2ea1ea4b5 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -0,0 +1,146 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import time +import uuid + +import backoff +from google.cloud import pubsub_v1 +import mock +import pytest + +import publisher + +UUID = uuid.uuid4().hex +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC_ADMIN = "publisher-test-topic-admin-" + UUID +TOPIC_PUBLISH = "publisher-test-topic-publish-" + UUID + + +@pytest.fixture +def client(): + yield pubsub_v1.PublisherClient() + + +@pytest.fixture +def topic_admin(client): + topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) + + try: + topic = client.get_topic(topic_path) + except: # noqa + topic = client.create_topic(topic_path) + + yield topic.name + # Teardown of `topic_admin` is handled in `test_delete()`. + + +@pytest.fixture +def topic_publish(client): + topic_path = client.topic_path(PROJECT, TOPIC_PUBLISH) + + try: + topic = client.get_topic(topic_path) + except: # noqa + topic = client.create_topic(topic_path) + + yield topic.name + + client.delete_topic(topic.name) + + +def _make_sleep_patch(): + real_sleep = time.sleep + + def new_sleep(period): + if period == 60: + real_sleep(5) + raise RuntimeError("sigil") + else: + real_sleep(period) + + return mock.patch("time.sleep", new=new_sleep) + + +def test_list(client, topic_admin, capsys): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + publisher.list_topics(PROJECT) + out, _ = capsys.readouterr() + assert topic_admin in out + + eventually_consistent_test() + + +def test_create(client): + topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) + try: + client.delete_topic(topic_path) + except Exception: + pass + + publisher.create_topic(PROJECT, TOPIC_ADMIN) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + assert client.get_topic(topic_path) + + eventually_consistent_test() + + +def test_delete(client, topic_admin): + publisher.delete_topic(PROJECT, TOPIC_ADMIN) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + with pytest.raises(Exception): + client.get_topic(client.topic_path(PROJECT, TOPIC_ADMIN)) + + eventually_consistent_test() + + +def test_publish(topic_publish, capsys): + publisher.publish_messages(PROJECT, TOPIC_PUBLISH) + + out, _ = capsys.readouterr() + assert "Published" in out + + +def test_publish_with_custom_attributes(topic_publish, capsys): + publisher.publish_messages_with_custom_attributes(PROJECT, TOPIC_PUBLISH) + + out, _ = capsys.readouterr() + assert "Published" in out + + +def test_publish_with_batch_settings(topic_publish, capsys): + publisher.publish_messages_with_batch_settings(PROJECT, TOPIC_PUBLISH) + + out, _ = capsys.readouterr() + assert "Published" in out + + +def test_publish_with_retry_settings(topic_publish, capsys): + publisher.publish_messages_with_retry_settings(PROJECT, TOPIC_PUBLISH) + + out, _ = capsys.readouterr() + assert "Published" in out + + +def test_publish_with_error_handler(topic_publish, capsys): + publisher.publish_messages_with_error_handler(PROJECT, TOPIC_PUBLISH) + + out, _ = capsys.readouterr() + assert "Published" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py new file mode 100644 index 000000000000..16432c0c3627 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START pubsub_quickstart_pub_all] +import argparse +import time + +# [START pubsub_quickstart_pub_deps] +from google.cloud import pubsub_v1 + +# [END pubsub_quickstart_pub_deps] + + +def get_callback(api_future, data, ref): + """Wrap message data in the context of the callback function.""" + + def callback(api_future): + try: + print( + "Published message {} now has message ID {}".format( + data, api_future.result() + ) + ) + ref["num_messages"] += 1 + except Exception: + print( + "A problem occurred when publishing {}: {}\n".format( + data, api_future.exception() + ) + ) + raise + + return callback + + +def pub(project_id, topic_id): + """Publishes a message to a Pub/Sub topic.""" + # [START pubsub_quickstart_pub_client] + # Initialize a Publisher client. + client = pubsub_v1.PublisherClient() + # [END pubsub_quickstart_pub_client] + # Create a fully qualified identifier in the form of + # `projects/{project_id}/topics/{topic_id}` + topic_path = client.topic_path(project_id, topic_id) + + # Data sent to Cloud Pub/Sub must be a bytestring. + data = b"Hello, World!" + + # Keep track of the number of published messages. + ref = dict({"num_messages": 0}) + + # When you publish a message, the client returns a future. + api_future = client.publish(topic_path, data=data) + api_future.add_done_callback(get_callback(api_future, data, ref)) + + # Keep the main thread from exiting while the message future + # gets resolved in the background. + while api_future.running(): + time.sleep(0.5) + print("Published {} message(s).".format(ref["num_messages"])) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("project_id", help="Google Cloud project ID") + parser.add_argument("topic_id", help="Pub/Sub topic ID") + + args = parser.parse_args() + + pub(args.project_id, args.topic_id) +# [END pubsub_quickstart_pub_all] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py new file mode 100644 index 000000000000..6f5cc06c4456 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +from google.api_core.exceptions import AlreadyExists +from google.cloud import pubsub_v1 +import pytest + +import pub # noqa + + +UUID = uuid.uuid4().hex +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC = "quickstart-pub-test-topic-" + UUID + + +@pytest.fixture(scope="module") +def publisher_client(): + yield pubsub_v1.PublisherClient() + + +@pytest.fixture(scope="module") +def topic(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, TOPIC) + + try: + publisher_client.create_topic(topic_path) + except AlreadyExists: + pass + + yield TOPIC + + publisher_client.delete_topic(topic_path) + + +def test_pub(publisher_client, topic, capsys): + pub.pub(PROJECT, topic) + + out, _ = capsys.readouterr() + + assert "Hello, World!" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py new file mode 100644 index 000000000000..efe00891593e --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START pubsub_quickstart_sub_all] +import argparse + +# [START pubsub_quickstart_sub_deps] +from google.cloud import pubsub_v1 + +# [END pubsub_quickstart_sub_deps] + + +def sub(project_id, subscription_id): + """Receives messages from a Pub/Sub subscription.""" + # [START pubsub_quickstart_sub_client] + # Initialize a Subscriber client + subscriber_client = pubsub_v1.SubscriberClient() + # [END pubsub_quickstart_sub_client] + # Create a fully qualified identifier in the form of + # `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber_client.subscription_path(project_id, subscription_id) + + def callback(message): + print( + "Received message {} of message ID {}\n".format(message, message.message_id) + ) + # Acknowledge the message. Unack'ed messages will be redelivered. + message.ack() + print("Acknowledged message {}\n".format(message.message_id)) + + streaming_pull_future = subscriber_client.subscribe( + subscription_path, callback=callback + ) + print("Listening for messages on {}..\n".format(subscription_path)) + + try: + # Calling result() on StreamingPullFuture keeps the main thread from + # exiting while messages get processed in the callbacks. + streaming_pull_future.result() + except: # noqa + streaming_pull_future.cancel() + + subscriber_client.close() + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("project_id", help="Google Cloud project ID") + parser.add_argument("subscription_id", help="Pub/Sub subscription ID") + + args = parser.parse_args() + + sub(args.project_id, args.subscription_id) +# [END pubsub_quickstart_sub_all] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py new file mode 100644 index 000000000000..38047422a935 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import uuid + +from google.api_core.exceptions import AlreadyExists +from google.cloud import pubsub_v1 +import mock +import pytest + +import sub # noqa + + +UUID = uuid.uuid4().hex +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC = "quickstart-sub-test-topic-" + UUID +SUBSCRIPTION = "quickstart-sub-test-topic-sub-" + UUID + +publisher_client = pubsub_v1.PublisherClient() +subscriber_client = pubsub_v1.SubscriberClient() + + +@pytest.fixture(scope="module") +def topic_path(): + topic_path = publisher_client.topic_path(PROJECT, TOPIC) + + try: + topic = publisher_client.create_topic(topic_path) + yield topic.name + except AlreadyExists: + yield topic_path + + publisher_client.delete_topic(topic_path) + + +@pytest.fixture(scope="module") +def subscription_path(topic_path): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) + + try: + subscription = subscriber_client.create_subscription( + subscription_path, topic_path + ) + yield subscription.name + except AlreadyExists: + yield subscription_path + + subscriber_client.delete_subscription(subscription_path) + subscriber_client.close() + + +def _publish_messages(topic_path): + publish_future = publisher_client.publish(topic_path, data=b"Hello World!") + publish_future.result() + + +def test_sub(monkeypatch, topic_path, subscription_path, capsys): + + real_client = pubsub_v1.SubscriberClient() + mock_client = mock.Mock(spec=pubsub_v1.SubscriberClient, wraps=real_client) + + # Attributes on mock_client_constructor uses the corresponding + # attributes on pubsub_v1.SubscriberClient. + mock_client_constructor = mock.create_autospec(pubsub_v1.SubscriberClient) + mock_client_constructor.return_value = mock_client + + monkeypatch.setattr(pubsub_v1, "SubscriberClient", mock_client_constructor) + + def mock_subscribe(subscription_path, callback=None): + real_future = real_client.subscribe(subscription_path, callback=callback) + mock_future = mock.Mock(spec=real_future, wraps=real_future) + + def mock_result(): + return real_future.result(timeout=10) + + mock_future.result.side_effect = mock_result + return mock_future + + mock_client.subscribe.side_effect = mock_subscribe + + _publish_messages(topic_path) + + sub.sub(PROJECT, SUBSCRIPTION) + + out, _ = capsys.readouterr() + assert "Received message" in out + assert "Acknowledged message" in out + + real_client.close() diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt new file mode 100644 index 000000000000..adf26b9f98bb --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -0,0 +1,3 @@ +backoff==1.10.0 +pytest==5.3.2 +mock==3.0.5 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt new file mode 100644 index 000000000000..9b496510abb5 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -0,0 +1 @@ +google-cloud-pubsub==1.6.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py new file mode 100644 index 000000000000..f079e7d423f8 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -0,0 +1,783 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on +subscriptions with the Cloud Pub/Sub API. + +For more information, see the README.md under /pubsub and the documentation +at https://cloud.google.com/pubsub/docs. +""" + +import argparse + + +def list_subscriptions_in_topic(project_id, topic_id): + """Lists all subscriptions for a given topic.""" + # [START pubsub_list_topic_subscriptions] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + for subscription in publisher.list_topic_subscriptions(topic_path): + print(subscription) + # [END pubsub_list_topic_subscriptions] + + +def list_subscriptions_in_project(project_id): + """Lists all subscriptions in the current project.""" + # [START pubsub_list_subscriptions] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + + subscriber = pubsub_v1.SubscriberClient() + project_path = subscriber.project_path(project_id) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + for subscription in subscriber.list_subscriptions(project_path): + print(subscription.name) + # [END pubsub_list_subscriptions] + + +def create_subscription(project_id, topic_id, subscription_id): + """Create a new pull subscription on the given topic.""" + # [START pubsub_create_pull_subscription] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription(subscription_path, topic_path) + + print("Subscription created: {}".format(subscription)) + # [END pubsub_create_pull_subscription] + + +def create_subscription_with_dead_letter_topic( + project_id, topic_id, subscription_id, dead_letter_topic_id +): + """Create a subscription with dead letter policy.""" + # [START pubsub_dead_letter_create_subscription] + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.types import DeadLetterPolicy + + # TODO(developer) + # project_id = "your-project-id" + # endpoint = "https://my-test-project.appspot.com/push" + # TODO(developer): This is an existing topic that the subscription + # with dead letter policy is attached to. + # topic_id = "your-topic-id" + # TODO(developer): This is an existing subscription with a dead letter policy. + # subscription_id = "your-subscription-id" + # TODO(developer): This is an existing dead letter topic that the subscription + # with dead letter policy will forward dead letter messages to. + # dead_letter_topic_id = "your-dead-letter-topic-id" + + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) + + dead_letter_policy = DeadLetterPolicy( + dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=10 + ) + + with subscriber: + subscription = subscriber.create_subscription( + subscription_path, topic_path, dead_letter_policy=dead_letter_policy + ) + + print("Subscription created: {}".format(subscription.name)) + print( + "It will forward dead letter messages to: {}".format( + subscription.dead_letter_policy.dead_letter_topic + ) + ) + print( + "After {} delivery attempts.".format( + subscription.dead_letter_policy.max_delivery_attempts + ) + ) + # [END pubsub_dead_letter_create_subscription] + + +def create_push_subscription(project_id, topic_id, subscription_id, endpoint): + """Create a new push subscription on the given topic.""" + # [START pubsub_create_push_subscription] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # endpoint = "https://my-test-project.appspot.com/push" + + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription( + subscription_path, topic_path, push_config + ) + + print("Push subscription created: {}".format(subscription)) + print("Endpoint for subscription is: {}".format(endpoint)) + # [END pubsub_create_push_subscription] + + +def delete_subscription(project_id, subscription_id): + """Deletes an existing Pub/Sub topic.""" + # [START pubsub_delete_subscription] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscriber.delete_subscription(subscription_path) + + print("Subscription deleted: {}".format(subscription_path)) + # [END pubsub_delete_subscription] + + +def update_push_subscription(project_id, topic_id, subscription_id, endpoint): + """ + Updates an existing Pub/Sub subscription's push endpoint URL. + Note that certain properties of a subscription, such as + its topic, are not modifiable. + """ + # [START pubsub_update_push_configuration] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # endpoint = "https://my-test-project.appspot.com/push" + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) + + subscription = pubsub_v1.types.Subscription( + name=subscription_path, topic=topic_id, push_config=push_config + ) + + update_mask = {"paths": {"push_config"}} + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + result = subscriber.update_subscription(subscription, update_mask) + + print("Subscription updated: {}".format(subscription_path)) + print("New endpoint for subscription is: {}".format(result.push_config)) + # [END pubsub_update_push_configuration] + + +def update_subscription_with_dead_letter_policy( + project_id, topic_id, subscription_id, dead_letter_topic_id +): + """Update a subscription's dead letter policy.""" + # [START pubsub_dead_letter_update_subscription] + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.types import DeadLetterPolicy, FieldMask + + # TODO(developer) + # project_id = "your-project-id" + # TODO(developer): This is an existing topic that the subscription + # with dead letter policy is attached to. + # topic_id = "your-topic-id" + # TODO(developer): This is an existing subscription with a dead letter policy. + # subscription_id = "your-subscription-id" + # TODO(developer): This is an existing dead letter topic that the subscription + # with dead letter policy will forward dead letter messages to. + # dead_letter_topic_id = "your-dead-letter-topic-id" + + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) + + subscription_before_update = subscriber.get_subscription(subscription_path) + print("Before the update: {}".format(subscription_before_update)) + + # Indicates which fields in the provided subscription to update. + update_mask = FieldMask(paths=["dead_letter_policy.max_delivery_attempts"]) + + # Construct a dead letter policy you expect to have after the update. + dead_letter_policy = DeadLetterPolicy( + dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=20 + ) + + # Construct the subscription with the dead letter policy you expect to have + # after the update. Here, values in the required fields (name, topic) help + # identify the subscription. + subscription = pubsub_v1.types.Subscription( + name=subscription_path, topic=topic_path, dead_letter_policy=dead_letter_policy, + ) + + with subscriber: + subscription_after_update = subscriber.update_subscription( + subscription, update_mask + ) + + print("After the update: {}".format(subscription_after_update)) + # [END pubsub_dead_letter_update_subscription] + return subscription_after_update + + +def remove_dead_letter_policy(project_id, topic_id, subscription_id): + """Remove dead letter policy from a subscription.""" + # [START pubsub_dead_letter_remove] + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.types import FieldMask + + # TODO(developer) + # project_id = "your-project-id" + # TODO(developer): This is an existing topic that the subscription + # with dead letter policy is attached to. + # topic_id = "your-topic-id" + # TODO(developer): This is an existing subscription with a dead letter policy. + # subscription_id = "your-subscription-id" + + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + subscription_before_update = subscriber.get_subscription(subscription_path) + print("Before removing the policy: {}".format(subscription_before_update)) + + # Indicates which fields in the provided subscription to update. + update_mask = FieldMask( + paths=[ + "dead_letter_policy.dead_letter_topic", + "dead_letter_policy.max_delivery_attempts", + ] + ) + + # Construct the subscription (without any dead letter policy) that you + # expect to have after the update. + subscription = pubsub_v1.types.Subscription( + name=subscription_path, topic=topic_path + ) + + with subscriber: + subscription_after_update = subscriber.update_subscription( + subscription, update_mask + ) + + print("After removing the policy: {}".format(subscription_after_update)) + # [END pubsub_dead_letter_remove] + return subscription_after_update + + +def receive_messages(project_id, subscription_id, timeout=None): + """Receives messages from a pull subscription.""" + # [START pubsub_subscriber_async_pull] + # [START pubsub_quickstart_subscriber] + from concurrent.futures import TimeoutError + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + subscriber = pubsub_v1.SubscriberClient() + # The `subscription_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message): + print("Received message: {}".format(message)) + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print("Listening for messages on {}..\n".format(subscription_path)) + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() + # [END pubsub_subscriber_async_pull] + # [END pubsub_quickstart_subscriber] + + +def receive_messages_with_custom_attributes(project_id, subscription_id, timeout=None): + """Receives messages from a pull subscription.""" + # [START pubsub_subscriber_async_pull_custom_attributes] + from concurrent.futures import TimeoutError + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message): + print("Received message: {}".format(message.data)) + if message.attributes: + print("Attributes:") + for key in message.attributes: + value = message.attributes.get(key) + print("{}: {}".format(key, value)) + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print("Listening for messages on {}..\n".format(subscription_path)) + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() + # [END pubsub_subscriber_async_pull_custom_attributes] + + +def receive_messages_with_flow_control(project_id, subscription_id, timeout=None): + """Receives messages from a pull subscription with flow control.""" + # [START pubsub_subscriber_flow_settings] + from concurrent.futures import TimeoutError + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message): + print("Received message: {}".format(message.data)) + message.ack() + + # Limit the subscriber to only have ten outstanding messages at a time. + flow_control = pubsub_v1.types.FlowControl(max_messages=10) + + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback, flow_control=flow_control + ) + print("Listening for messages on {}..\n".format(subscription_path)) + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() + # [END pubsub_subscriber_flow_settings] + + +def synchronous_pull(project_id, subscription_id): + """Pulling messages synchronously.""" + # [START pubsub_subscriber_sync_pull] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + NUM_MESSAGES = 3 + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + # The subscriber pulls a specific number of messages. + response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) + + ack_ids = [] + for received_message in response.received_messages: + print("Received: {}".format(received_message.message.data)) + ack_ids.append(received_message.ack_id) + + # Acknowledges the received messages so they will not be sent again. + subscriber.acknowledge(subscription_path, ack_ids) + + print( + "Received and acknowledged {} messages. Done.".format( + len(response.received_messages) + ) + ) + # [END pubsub_subscriber_sync_pull] + + +def synchronous_pull_with_lease_management(project_id, subscription_id): + """Pulling messages synchronously with lease management""" + # [START pubsub_subscriber_sync_pull_with_lease] + import logging + import multiprocessing + import random + import time + + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + NUM_MESSAGES = 2 + ACK_DEADLINE = 30 + SLEEP_TIME = 10 + + # The subscriber pulls a specific number of messages. + response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) + + multiprocessing.log_to_stderr() + logger = multiprocessing.get_logger() + logger.setLevel(logging.INFO) + + def worker(msg): + """Simulates a long-running process.""" + RUN_TIME = random.randint(1, 60) + logger.info( + "{}: Running {} for {}s".format( + time.strftime("%X", time.gmtime()), msg.message.data, RUN_TIME + ) + ) + time.sleep(RUN_TIME) + + # `processes` stores process as key and ack id and message as values. + processes = dict() + for message in response.received_messages: + process = multiprocessing.Process(target=worker, args=(message,)) + processes[process] = (message.ack_id, message.message.data) + process.start() + + while processes: + for process in list(processes): + ack_id, msg_data = processes[process] + # If the process is still running, reset the ack deadline as + # specified by ACK_DEADLINE once every while as specified + # by SLEEP_TIME. + if process.is_alive(): + # `ack_deadline_seconds` must be between 10 to 600. + subscriber.modify_ack_deadline( + subscription_path, [ack_id], ack_deadline_seconds=ACK_DEADLINE, + ) + logger.info( + "{}: Reset ack deadline for {} for {}s".format( + time.strftime("%X", time.gmtime()), msg_data, ACK_DEADLINE, + ) + ) + + # If the processs is finished, acknowledges using `ack_id`. + else: + subscriber.acknowledge(subscription_path, [ack_id]) + logger.info( + "{}: Acknowledged {}".format( + time.strftime("%X", time.gmtime()), msg_data + ) + ) + processes.pop(process) + + # If there are still processes running, sleeps the thread. + if processes: + time.sleep(SLEEP_TIME) + + print( + "Received and acknowledged {} messages. Done.".format( + len(response.received_messages) + ) + ) + + # Close the underlying gPRC channel. Alternatively, wrap subscriber in + # a 'with' block to automatically call close() when done. + subscriber.close() + # [END pubsub_subscriber_sync_pull_with_lease] + + +def listen_for_errors(project_id, subscription_id, timeout=None): + """Receives messages and catches errors from a pull subscription.""" + # [START pubsub_subscriber_error_listener] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message): + print("Received message: {}".format(message)) + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print("Listening for messages on {}..\n".format(subscription_path)) + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + try: + streaming_pull_future.result(timeout=timeout) + except Exception as e: + streaming_pull_future.cancel() + print( + "Listening for messages on {} threw an exception: {}.".format( + subscription_id, e + ) + ) + # [END pubsub_subscriber_error_listener] + + +def receive_messages_with_delivery_attempts(project_id, subscription_id, timeout=None): + # [START pubsub_dead_letter_delivery_attempt] + from concurrent.futures import TimeoutError + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message): + print("Received message: {}".format(message)) + print("With delivery attempts: {}".format(message.delivery_attempt)) + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print("Listening for messages on {}..\n".format(subscription_path)) + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + try: + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() + # [END pubsub_dead_letter_delivery_attempt] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("project_id", help="Your Google Cloud project ID") + + subparsers = parser.add_subparsers(dest="command") + list_in_topic_parser = subparsers.add_parser( + "list-in-topic", help=list_subscriptions_in_topic.__doc__ + ) + list_in_topic_parser.add_argument("topic_id") + + list_in_project_parser = subparsers.add_parser( + "list-in-project", help=list_subscriptions_in_project.__doc__ + ) + + create_parser = subparsers.add_parser("create", help=create_subscription.__doc__) + create_parser.add_argument("topic_id") + create_parser.add_argument("subscription_id") + + create_with_dead_letter_policy_parser = subparsers.add_parser( + "create-with-dead-letter-policy", + help=create_subscription_with_dead_letter_topic.__doc__, + ) + create_with_dead_letter_policy_parser.add_argument("topic_id") + create_with_dead_letter_policy_parser.add_argument("subscription_id") + create_with_dead_letter_policy_parser.add_argument("dead_letter_topic_id") + + create_push_parser = subparsers.add_parser( + "create-push", help=create_push_subscription.__doc__ + ) + create_push_parser.add_argument("topic_id") + create_push_parser.add_argument("subscription_id") + create_push_parser.add_argument("endpoint") + + delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) + delete_parser.add_argument("subscription_id") + + update_push_parser = subparsers.add_parser( + "update-push", help=update_push_subscription.__doc__ + ) + update_push_parser.add_argument("topic_id") + update_push_parser.add_argument("subscription_id") + update_push_parser.add_argument("endpoint") + + update_dead_letter_policy_parser = subparsers.add_parser( + "update-dead-letter-policy", + help=update_subscription_with_dead_letter_policy.__doc__, + ) + update_dead_letter_policy_parser.add_argument("topic_id") + update_dead_letter_policy_parser.add_argument("subscription_id") + update_dead_letter_policy_parser.add_argument("dead_letter_topic_id") + + remove_dead_letter_policy_parser = subparsers.add_parser( + "remove-dead-letter-policy", help=remove_dead_letter_policy.__doc__ + ) + remove_dead_letter_policy_parser.add_argument("topic_id") + remove_dead_letter_policy_parser.add_argument("subscription_id") + + receive_parser = subparsers.add_parser("receive", help=receive_messages.__doc__) + receive_parser.add_argument("subscription_id") + receive_parser.add_argument("timeout", default=None, type=float, nargs="?") + + receive_with_custom_attributes_parser = subparsers.add_parser( + "receive-custom-attributes", + help=receive_messages_with_custom_attributes.__doc__, + ) + receive_with_custom_attributes_parser.add_argument("subscription_id") + receive_with_custom_attributes_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + + receive_with_flow_control_parser = subparsers.add_parser( + "receive-flow-control", help=receive_messages_with_flow_control.__doc__ + ) + receive_with_flow_control_parser.add_argument("subscription_id") + receive_with_flow_control_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + + synchronous_pull_parser = subparsers.add_parser( + "receive-synchronously", help=synchronous_pull.__doc__ + ) + synchronous_pull_parser.add_argument("subscription_id") + + synchronous_pull_with_lease_management_parser = subparsers.add_parser( + "receive-synchronously-with-lease", + help=synchronous_pull_with_lease_management.__doc__, + ) + synchronous_pull_with_lease_management_parser.add_argument("subscription_id") + + listen_for_errors_parser = subparsers.add_parser( + "listen-for-errors", help=listen_for_errors.__doc__ + ) + listen_for_errors_parser.add_argument("subscription_id") + listen_for_errors_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + + receive_messages_with_delivery_attempts_parser = subparsers.add_parser( + "receive-messages-with-delivery-attempts", + help=receive_messages_with_delivery_attempts.__doc__, + ) + receive_messages_with_delivery_attempts_parser.add_argument("subscription_id") + receive_messages_with_delivery_attempts_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + + args = parser.parse_args() + + if args.command == "list-in-topic": + list_subscriptions_in_topic(args.project_id, args.topic_id) + elif args.command == "list-in-project": + list_subscriptions_in_project(args.project_id) + elif args.command == "create": + create_subscription(args.project_id, args.topic_id, args.subscription_id) + elif args.command == "create-with-dead-letter-policy": + create_subscription_with_dead_letter_topic( + args.project_id, + args.topic_id, + args.subscription_id, + args.dead_letter_topic_id, + ) + elif args.command == "create-push": + create_push_subscription( + args.project_id, args.topic_id, args.subscription_id, args.endpoint, + ) + elif args.command == "delete": + delete_subscription(args.project_id, args.subscription_id) + elif args.command == "update-push": + update_push_subscription( + args.project_id, args.topic_id, args.subscription_id, args.endpoint, + ) + elif args.command == "update-dead-letter-policy": + update_subscription_with_dead_letter_policy( + args.project_id, + args.topic_id, + args.subscription_id, + args.dead_letter_topic_id, + ) + elif args.command == "remove-dead-letter-policy": + remove_dead_letter_policy(args.project_id, args.topic_id, args.subscription_id) + elif args.command == "receive": + receive_messages(args.project_id, args.subscription_id, args.timeout) + elif args.command == "receive-custom-attributes": + receive_messages_with_custom_attributes( + args.project_id, args.subscription_id, args.timeout + ) + elif args.command == "receive-flow-control": + receive_messages_with_flow_control( + args.project_id, args.subscription_id, args.timeout + ) + elif args.command == "receive-synchronously": + synchronous_pull(args.project_id, args.subscription_id) + elif args.command == "receive-synchronously-with-lease": + synchronous_pull_with_lease_management(args.project_id, args.subscription_id) + elif args.command == "listen-for-errors": + listen_for_errors(args.project_id, args.subscription_id, args.timeout) + elif args.command == "receive-messages-with-delivery-attempts": + receive_messages_with_delivery_attempts( + args.project_id, args.subscription_id, args.timeout + ) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py new file mode 100644 index 000000000000..a7f7c139c258 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -0,0 +1,341 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +import backoff +from google.cloud import pubsub_v1 +import pytest + +import subscriber + +UUID = uuid.uuid4().hex +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC = "subscription-test-topic-" + UUID +DEAD_LETTER_TOPIC = "subscription-test-dead-letter-topic-" + UUID +SUBSCRIPTION_ADMIN = "subscription-test-subscription-admin-" + UUID +SUBSCRIPTION_ASYNC = "subscription-test-subscription-async-" + UUID +SUBSCRIPTION_SYNC = "subscription-test-subscription-sync-" + UUID +SUBSCRIPTION_DLQ = "subscription-test-subscription-dlq-" + UUID +ENDPOINT = "https://{}.appspot.com/push".format(PROJECT) +NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT) + + +@pytest.fixture(scope="module") +def publisher_client(): + yield pubsub_v1.PublisherClient() + + +@pytest.fixture(scope="module") +def topic(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, TOPIC) + + try: + topic = publisher_client.get_topic(topic_path) + except: # noqa + topic = publisher_client.create_topic(topic_path) + + yield topic.name + + publisher_client.delete_topic(topic.name) + + +@pytest.fixture(scope="module") +def dead_letter_topic(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) + + try: + dead_letter_topic = publisher_client.get_topic(topic_path) + except: # noqa + dead_letter_topic = publisher_client.create_topic(topic_path) + + yield dead_letter_topic.name + + publisher_client.delete_topic(dead_letter_topic.name) + + +@pytest.fixture(scope="module") +def subscriber_client(): + subscriber_client = pubsub_v1.SubscriberClient() + yield subscriber_client + subscriber_client.close() + + +@pytest.fixture(scope="module") +def subscription_admin(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) + + try: + subscription = subscriber_client.get_subscription(subscription_path) + except: # noqa + subscription = subscriber_client.create_subscription( + subscription_path, topic=topic + ) + + yield subscription.name + + +@pytest.fixture(scope="module") +def subscription_sync(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_SYNC) + + try: + subscription = subscriber_client.get_subscription(subscription_path) + except: # noqa + subscription = subscriber_client.create_subscription( + subscription_path, topic=topic + ) + + yield subscription.name + + subscriber_client.delete_subscription(subscription.name) + + +@pytest.fixture(scope="module") +def subscription_async(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ASYNC) + + try: + subscription = subscriber_client.get_subscription(subscription_path) + except: # noqa + subscription = subscriber_client.create_subscription( + subscription_path, topic=topic + ) + + yield subscription.name + + subscriber_client.delete_subscription(subscription.name) + + +@pytest.fixture(scope="module") +def subscription_dlq(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) + + try: + subscription = subscriber_client.get_subscription(subscription_path) + except: # noqa + subscription = subscriber_client.create_subscription( + subscription_path, topic=topic + ) + + yield subscription.name + + subscriber_client.delete_subscription(subscription.name) + + +def test_list_in_topic(subscription_admin, capsys): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + subscriber.list_subscriptions_in_topic(PROJECT, TOPIC) + out, _ = capsys.readouterr() + assert subscription_admin in out + + eventually_consistent_test() + + +def test_list_in_project(subscription_admin, capsys): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + subscriber.list_subscriptions_in_project(PROJECT) + out, _ = capsys.readouterr() + assert subscription_admin in out + + eventually_consistent_test() + + +def test_create(subscriber_client): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) + + try: + subscriber_client.delete_subscription(subscription_path) + except Exception: + pass + + subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + assert subscriber_client.get_subscription(subscription_path) + + eventually_consistent_test() + + +def test_create_subscription_with_dead_letter_policy( + subscriber_client, publisher_client, topic, dead_letter_topic, capsys +): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) + dead_letter_topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) + + try: + subscriber_client.delete_subscription(subscription_path) + except Exception: + pass + + subscriber.create_subscription_with_dead_letter_topic( + PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC + ) + + out, _ = capsys.readouterr() + assert "Subscription created: " + subscription_path in out + assert "It will forward dead letter messages to: " + dead_letter_topic_path in out + assert "After 10 delivery attempts." in out + + +def test_create_push(subscriber_client): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) + try: + subscriber_client.delete_subscription(subscription_path) + except Exception: + pass + + subscriber.create_push_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + assert subscriber_client.get_subscription(subscription_path) + + eventually_consistent_test() + + +def test_update(subscriber_client, subscription_admin, capsys): + subscriber.update_push_subscription( + PROJECT, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT + ) + + out, _ = capsys.readouterr() + assert "Subscription updated" in out + + +def test_update_dead_letter_policy( + subscriber_client, topic, subscription_dlq, dead_letter_topic, capsys +): + _ = subscriber.update_subscription_with_dead_letter_policy( + PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC + ) + + out, _ = capsys.readouterr() + assert "max_delivery_attempts: 20" in out + + +def test_delete(subscriber_client, subscription_admin): + subscriber.delete_subscription(PROJECT, SUBSCRIPTION_ADMIN) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): + with pytest.raises(Exception): + subscriber_client.get_subscription(subscription_admin) + + eventually_consistent_test() + + +def _publish_messages(publisher_client, topic): + for n in range(5): + data = u"message {}".format(n).encode("utf-8") + publish_future = publisher_client.publish( + topic, data=data, origin="python-sample" + ) + publish_future.result() + + +def test_receive(publisher_client, topic, subscription_async, capsys): + _publish_messages(publisher_client, topic) + + subscriber.receive_messages(PROJECT, SUBSCRIPTION_ASYNC, 5) + + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async in out + assert "message" in out + + +def test_receive_with_custom_attributes( + publisher_client, topic, subscription_async, capsys +): + + _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_custom_attributes(PROJECT, SUBSCRIPTION_ASYNC, 5) + + out, _ = capsys.readouterr() + assert "message" in out + assert "origin" in out + assert "python-sample" in out + + +def test_receive_with_flow_control(publisher_client, topic, subscription_async, capsys): + + _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_flow_control(PROJECT, SUBSCRIPTION_ASYNC, 5) + + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async in out + assert "message" in out + + +def test_receive_synchronously(publisher_client, topic, subscription_sync, capsys): + _publish_messages(publisher_client, topic) + + subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_SYNC) + + out, _ = capsys.readouterr() + assert "Done." in out + + +def test_receive_synchronously_with_lease( + publisher_client, topic, subscription_sync, capsys +): + _publish_messages(publisher_client, topic) + + subscriber.synchronous_pull_with_lease_management(PROJECT, SUBSCRIPTION_SYNC) + + out, _ = capsys.readouterr() + assert "Done." in out + + +def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): + + _publish_messages(publisher_client, topic) + + subscriber.listen_for_errors(PROJECT, SUBSCRIPTION_ASYNC, 5) + + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async in out + assert "threw an exception" in out + + +def test_receive_with_delivery_attempts( + publisher_client, topic, subscription_dlq, dead_letter_topic, capsys +): + _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_delivery_attempts(PROJECT, SUBSCRIPTION_DLQ, 10) + + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_dlq in out + assert "Received message: " in out + assert "message 4" in out + assert "With delivery attempts: " in out + + +def test_remove_dead_letter_policy(subscriber_client, subscription_dlq): + subscription_after_update = subscriber.remove_dead_letter_policy( + PROJECT, TOPIC, SUBSCRIPTION_DLQ + ) + + assert subscription_after_update.dead_letter_policy.dead_letter_topic == "" diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index b44cc0acff57..0e2c96e42de2 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -18,6 +18,7 @@ import synthtool as s from synthtool import gcp +from synthtool.languages import python gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() @@ -266,8 +267,16 @@ def _merge_dict(d1, d2): # Add templated files # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library( - unit_cov_level=97, cov_level=99, system_test_external_dependencies=["psutil"], + unit_cov_level=97, + cov_level=99, + system_test_external_dependencies=["psutil"], + samples=True, ) s.move(templated_files) +# ---------------------------------------------------------------------------- +# Samples templates +# ---------------------------------------------------------------------------- +python.py_samples() + s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 4ecae66c66d546af2ca371125d39d59b1a1c6a8b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 30 Jun 2020 08:51:24 -0700 Subject: [PATCH 0471/1197] docs: added Python2 sunset notice (synth) (#140) * chore: update Py2 support msg to reflect passage of time Source-Author: Dan O'Meara Source-Date: Mon Jun 29 13:20:01 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 303271797a360f8a439203413f13a160f2f5b3b4 Source-Link: https://github.com/googleapis/synthtool/commit/303271797a360f8a439203413f13a160f2f5b3b4 --- .../google-cloud-pubsub/docs/_templates/layout.html | 4 ++-- packages/google-cloud-pubsub/synth.metadata | 13 ++++++++++--- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/_templates/layout.html b/packages/google-cloud-pubsub/docs/_templates/layout.html index 228529efe2d2..6316a537f72b 100644 --- a/packages/google-cloud-pubsub/docs/_templates/layout.html +++ b/packages/google-cloud-pubsub/docs/_templates/layout.html @@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please + As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
{% block body %} {% endblock %} diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index fe6ccf394dd7..87c9114a2c5a 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "71d70822c816062ef10d6d7584c4f8ed038d923f" + "sha": "e204b86e34346b6d456771ef4d4cfe5e15e53238" } }, { @@ -19,14 +19,21 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "cf2eff09d0f5319a4dc5cdce2b6356d85af4a798" + "sha": "303271797a360f8a439203413f13a160f2f5b3b4" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "cf2eff09d0f5319a4dc5cdce2b6356d85af4a798" + "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + } + }, + { + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "303271797a360f8a439203413f13a160f2f5b3b4" } } ], From 5ba399f7f9da68a817e41b4f9fe4b5e6d413bd82 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 6 Jul 2020 12:08:49 -0400 Subject: [PATCH 0472/1197] chore: release 1.6.1 (#125) * updated CHANGELOG.md [ci skip] * updated setup.cfg [ci skip] * updated setup.py Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 3541a3e0d071..939bfd5fc0a1 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +### [1.6.1](https://www.github.com/googleapis/python-pubsub/compare/v1.6.0...v1.6.1) (2020-06-30) + + +### Documentation + +* added Python2 sunset notice (synth) ([#140](https://www.github.com/googleapis/python-pubsub/issues/140)) ([c8f6378](https://www.github.com/googleapis/python-pubsub/commit/c8f63788636c2e3436c8ce6a01ef3b59e3df772a)) +* explain how to nack a sync pull message ([#123](https://www.github.com/googleapis/python-pubsub/issues/123)) ([f2eec65](https://www.github.com/googleapis/python-pubsub/commit/f2eec65cec43066ba7a2d1d45efa979e6b7add4f)) + ## [1.6.0](https://www.github.com/googleapis/python-pubsub/compare/v1.5.0...v1.6.0) (2020-06-09) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 83fa16560931..528bb66a2c97 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.6.0" +version = "1.6.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From fea109f9c530bae0e70ff270eddcccb24b7f6e15 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Mon, 6 Jul 2020 09:45:28 -0700 Subject: [PATCH 0473/1197] docs: Add a link to Pub/Sub filtering language public documentation to pubsub.proto (#121) * docs: Add a link to Pub/Sub filtering language public documentation to pubsub.proto PiperOrigin-RevId: 315930680 Source-Author: Google APIs Source-Date: Thu Jun 11 10:43:34 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: a19b46e54cc5195557f70ff68f1696d1e3b4702e Source-Link: https://github.com/googleapis/googleapis/commit/a19b46e54cc5195557f70ff68f1696d1e3b4702e * fix: use protoc-docs-plugin 0.8.0 Fixes issue with missing newline before 'Attributes' in Python docstrings. PiperOrigin-RevId: 316182409 Source-Author: Google APIs Source-Date: Fri Jun 12 14:52:11 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: 184661793fbe3b89f2b485c303e7466cef9d21a1 Source-Link: https://github.com/googleapis/googleapis/commit/184661793fbe3b89f2b485c303e7466cef9d21a1 * feature: Add flow control settings for StreamingPullRequest to pubsub.proto PiperOrigin-RevId: 317914250 Source-Author: Google APIs Source-Date: Tue Jun 23 12:05:24 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: 14f0c2cc9392234707247ab2b0782c118cb179aa Source-Link: https://github.com/googleapis/googleapis/commit/14f0c2cc9392234707247ab2b0782c118cb179aa * chore: update grpc to v1.30.0 PiperOrigin-RevId: 317949519 Source-Author: Google APIs Source-Date: Tue Jun 23 15:22:22 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: 7157f9552747421572cf1ab3aec1105c05ebd4f9 Source-Link: https://github.com/googleapis/googleapis/commit/7157f9552747421572cf1ab3aec1105c05ebd4f9 * Updates to build Google Ads API build files. PiperOrigin-RevId: 318028816 Source-Author: Google APIs Source-Date: Wed Jun 24 02:32:38 2020 -0700 Source-Repo: googleapis/googleapis Source-Sha: b882b8e6bfcd708042ff00f7adc67ce750817dd0 Source-Link: https://github.com/googleapis/googleapis/commit/b882b8e6bfcd708042ff00f7adc67ce750817dd0 Co-authored-by: Prad Nelluru --- .../pubsub_v1/gapic/subscriber_client.py | 3 +- .../google/cloud/pubsub_v1/proto/pubsub.proto | 28 +- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 165 ++- .../cloud/pubsub_v1/proto/pubsub_pb2_grpc.py | 975 +++++++++++++++--- packages/google-cloud-pubsub/synth.metadata | 6 +- 5 files changed, 999 insertions(+), 178 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py index 4d1d9111d2ee..1e24ba02a4f6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py @@ -346,7 +346,8 @@ def create_subscription( If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` - filter_ (str): An expression written in the Cloud Pub/Sub filter language. If + filter_ (str): An expression written in the Pub/Sub `filter + language `__. If non-empty, then only ``PubsubMessage``\ s whose ``attributes`` field matches the filter are delivered on this subscription. If empty, then no messages are filtered out. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index d85f2734e686..dc9151446fe5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -191,7 +191,8 @@ message PubsubMessage { bytes data = 1; // Attributes for this message. If this field is empty, the message must - // contain non-empty data. + // contain non-empty data. This can be used to filter messages on the + // subscription. map attributes = 2; // ID of this message, assigned by the server when the message is published. @@ -700,7 +701,8 @@ message Subscription { // value for `expiration_policy.ttl` is 1 day. ExpirationPolicy expiration_policy = 11; - // An expression written in the Cloud Pub/Sub filter language. If non-empty, + // An expression written in the Pub/Sub [filter + // language](https://cloud.google.com/pubsub/docs/filtering). If non-empty, // then only `PubsubMessage`s whose `attributes` field matches the filter are // delivered on this subscription. If empty, then no messages are filtered // out. @@ -1107,6 +1109,28 @@ message StreamingPullRequest { // transferred to the new stream. The same client_id should not be used for // different client instances. string client_id = 6; + + // Flow control settings for the maximum number of outstanding messages. When + // there are `max_outstanding_messages` or more currently sent to the + // streaming pull client that have not yet been acked or nacked, the server + // stops sending more messages. The sending of messages resumes once the + // number of outstanding messages is less than this value. If the value is + // <= 0, there is no limit to the number of outstanding messages. This + // property can only be set on the initial StreamingPullRequest. If it is set + // on a subsequent request, the stream will be aborted with status + // `INVALID_ARGUMENT`. + int64 max_outstanding_messages = 7; + + // Flow control settings for the maximum number of outstanding bytes. When + // there are `max_outstanding_bytes` or more worth of messages currently sent + // to the streaming pull client that have not yet been acked or nacked, the + // server will stop sending more messages. The sending of messages resumes + // once the number of outstanding bytes is less than this value. If the value + // is <= 0, there is no limit to the number of outstanding bytes. This + // property can only be set on the initial StreamingPullRequest. If it is set + // on a subsequent request, the stream will be aborted with status + // `INVALID_ARGUMENT`. + int64 max_outstanding_bytes = 8; } // Response for the `StreamingPull` method. This response is used to stream diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py index a53a7551344e..44dc068981d9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/cloud/pubsub_v1/proto/pubsub.proto - +"""Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -28,7 +28,7 @@ syntax="proto3", serialized_options=b"\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1", create_key=_descriptor._internal_create_key, - serialized_pb=b'\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"v\n\x19ListTopicSnapshotsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"]\n\x19\x44\x65tachSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x1c\n\x1a\x44\x65tachSubscriptionResponse"\xc0\x05\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12\x0e\n\x06\x66ilter\x18\x0c \x01(\t\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x12\x33\n\x0cretry_policy\x18\x0e \x01(\x0b\x32\x1d.google.pubsub.v1.RetryPolicy\x12\x10\n\x08\x64\x65tached\x18\x0f \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"u\n\x0bRetryPolicy\x12\x32\n\x0fminimum_backoff\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0fmaximum_backoff\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x8d\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12!\n\x12return_immediately\x18\x02 \x01(\x08\x42\x05\x18\x01\xe0\x41\x01\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xe8\x01\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xa3\x0b\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xaa\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"9\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\xda\x41\x05topic\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\xad\x01\n\x12\x44\x65tachSubscription\x12+.google.pubsub.v1.DetachSubscriptionRequest\x1a,.google.pubsub.v1.DetachSubscriptionResponse"<\x82\xd3\xe4\x93\x02\x36"4/v1/{subscription=projects/*/subscriptions/*}:detach\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\x83\x15\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12\x89\x01\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"8\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3', + serialized_pb=b'\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"v\n\x19ListTopicSnapshotsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"]\n\x19\x44\x65tachSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x1c\n\x1a\x44\x65tachSubscriptionResponse"\xc0\x05\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12\x0e\n\x06\x66ilter\x18\x0c \x01(\t\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x12\x33\n\x0cretry_policy\x18\x0e \x01(\x0b\x32\x1d.google.pubsub.v1.RetryPolicy\x12\x10\n\x08\x64\x65tached\x18\x0f \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"u\n\x0bRetryPolicy\x12\x32\n\x0fminimum_backoff\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0fmaximum_backoff\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x8d\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12!\n\x12return_immediately\x18\x02 \x01(\x08\x42\x05\x18\x01\xe0\x41\x01\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xa9\x02\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t\x12 \n\x18max_outstanding_messages\x18\x07 \x01(\x03\x12\x1d\n\x15max_outstanding_bytes\x18\x08 \x01(\x03"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xa3\x0b\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xaa\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"9\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\xda\x41\x05topic\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\xad\x01\n\x12\x44\x65tachSubscription\x12+.google.pubsub.v1.DetachSubscriptionRequest\x1a,.google.pubsub.v1.DetachSubscriptionResponse"<\x82\xd3\xe4\x93\x02\x36"4/v1/{subscription=projects/*/subscriptions/*}:detach\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\x83\x15\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12\x89\x01\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"8\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3', dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, @@ -2634,6 +2634,44 @@ file=DESCRIPTOR, create_key=_descriptor._internal_create_key, ), + _descriptor.FieldDescriptor( + name="max_outstanding_messages", + full_name="google.pubsub.v1.StreamingPullRequest.max_outstanding_messages", + index=6, + number=7, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="max_outstanding_bytes", + full_name="google.pubsub.v1.StreamingPullRequest.max_outstanding_bytes", + index=7, + number=8, + type=3, + cpp_type=2, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), ], extensions=[], nested_types=[], @@ -2644,7 +2682,7 @@ extension_ranges=[], oneofs=[], serialized_start=4728, - serialized_end=4960, + serialized_end=5025, ) @@ -2684,8 +2722,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=4962, - serialized_end=5047, + serialized_start=5027, + serialized_end=5112, ) @@ -2822,8 +2860,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5050, - serialized_end=5309, + serialized_start=5115, + serialized_end=5374, ) @@ -2882,8 +2920,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5312, - serialized_end=5440, + serialized_start=5377, + serialized_end=5505, ) @@ -3039,8 +3077,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5443, - serialized_end=5746, + serialized_start=5508, + serialized_end=5811, ) @@ -3080,8 +3118,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5748, - serialized_end=5826, + serialized_start=5813, + serialized_end=5891, ) @@ -3159,8 +3197,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5829, - serialized_end=5960, + serialized_start=5894, + serialized_end=6025, ) @@ -3219,8 +3257,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=5962, - serialized_end=6057, + serialized_start=6027, + serialized_end=6122, ) @@ -3260,8 +3298,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6059, - serialized_end=6140, + serialized_start=6124, + serialized_end=6205, ) @@ -3348,8 +3386,8 @@ fields=[], ), ], - serialized_start=6143, - serialized_end=6333, + serialized_start=6208, + serialized_end=6398, ) @@ -3369,8 +3407,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=6335, - serialized_end=6349, + serialized_start=6400, + serialized_end=6414, ) _TOPIC_LABELSENTRY.containing_type = _TOPIC @@ -3526,6 +3564,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """A policy constraining the storage of messages published to the topic. + Attributes: allowed_persistence_regions: A list of IDs of GCP regions where messages that are published @@ -3557,6 +3596,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """A topic resource. + Attributes: name: Required. The name of the topic. It must have the format @@ -3606,13 +3646,15 @@ library documentation for more information. See Quotas and limits for more information about message limits. + Attributes: data: The message data field. If this field is empty, the message must contain at least one attribute. attributes: Attributes for this message. If this field is empty, the - message must contain non-empty data. + message must contain non-empty data. This can be used to + filter messages on the subscription. message_id: ID of this message, assigned by the server when the message is published. Guaranteed to be unique within the topic. This @@ -3650,6 +3692,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the GetTopic method. + Attributes: topic: Required. The name of the topic to get. Format is @@ -3668,6 +3711,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the UpdateTopic method. + Attributes: topic: Required. The updated topic object. @@ -3692,6 +3736,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the Publish method. + Attributes: topic: Required. The messages in the request will be published on @@ -3712,6 +3757,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Response for the ``Publish`` method. + Attributes: message_ids: The server-assigned ID of each published message, in the same @@ -3731,6 +3777,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``ListTopics`` method. + Attributes: project: Required. The name of the project in which to list topics. @@ -3756,6 +3803,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Response for the ``ListTopics`` method. + Attributes: topics: The resulting topics. @@ -3777,6 +3825,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``ListTopicSubscriptions`` method. + Attributes: topic: Required. The name of the topic that subscriptions are @@ -3802,6 +3851,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Response for the ``ListTopicSubscriptions`` method. + Attributes: subscriptions: The names of subscriptions attached to the topic specified in @@ -3824,6 +3874,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``ListTopicSnapshots`` method. + Attributes: topic: Required. The name of the topic that snapshots are attached @@ -3849,6 +3900,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Response for the ``ListTopicSnapshots`` method. + Attributes: snapshots: The names of the snapshots that match the request. @@ -3870,6 +3922,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``DeleteTopic`` method. + Attributes: topic: Required. Name of the topic to delete. Format is @@ -3888,6 +3941,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the DetachSubscription method. + Attributes: subscription: Required. The subscription to detach. Format is @@ -3927,6 +3981,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """A subscription resource. + Attributes: name: Required. The name of the subscription. It must have the @@ -3998,8 +4053,9 @@ ``ttl`` of 31 days will be used. The minimum allowed value for ``expiration_policy.ttl`` is 1 day. filter: - An expression written in the Cloud Pub/Sub filter language. If - non-empty, then only ``PubsubMessage``\ s whose ``attributes`` + An expression written in the Pub/Sub `filter language + `__. If non- + empty, then only ``PubsubMessage``\ s whose ``attributes`` field matches the filter are delivered on this subscription. If empty, then no messages are filtered out. dead_letter_policy: @@ -4046,6 +4102,7 @@ may not match the configuration. That is, delay can be more or less than configured backoff. + Attributes: minimum_backoff: The minimum delay between consecutive deliveries of a given @@ -4072,6 +4129,7 @@ fails at subscription creation/updation, the create/update subscription request will fail. + Attributes: dead_letter_topic: The name of the topic to which dead letter messages should be @@ -4109,6 +4167,7 @@ "__doc__": """A policy that specifies the conditions for resource expiration (i.e., automatic resource deletion). + Attributes: ttl: Specifies the “time-to-live” duration for an associated @@ -4137,6 +4196,7 @@ "__doc__": """Contains information needed for generating an `OpenID Connect token `__. + Attributes: service_account_email: \ `Service account email @@ -4170,6 +4230,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Configuration for a push delivery endpoint. + Attributes: push_endpoint: A URL locating the endpoint to which messages should be @@ -4220,6 +4281,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """A message and its corresponding acknowledgment ID. + Attributes: ack_id: This ID can be used to acknowledge the received message. @@ -4253,6 +4315,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the GetSubscription method. + Attributes: subscription: Required. The name of the subscription to get. Format is @@ -4271,6 +4334,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the UpdateSubscription method. + Attributes: subscription: Required. The updated subscription object. @@ -4291,6 +4355,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``ListSubscriptions`` method. + Attributes: project: Required. The name of the project in which to list @@ -4316,6 +4381,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Response for the ``ListSubscriptions`` method. + Attributes: subscriptions: The subscriptions that match the request. @@ -4337,6 +4403,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the DeleteSubscription method. + Attributes: subscription: Required. The subscription to delete. Format is @@ -4355,6 +4422,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ModifyPushConfig method. + Attributes: subscription: Required. The name of the subscription. Format is @@ -4380,6 +4448,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``Pull`` method. + Attributes: subscription: Required. The subscription from which messages should be @@ -4411,6 +4480,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Response for the ``Pull`` method. + Attributes: received_messages: Received Pub/Sub messages. The list will be empty if there are @@ -4432,6 +4502,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ModifyAckDeadline method. + Attributes: subscription: Required. The name of the subscription. Format is @@ -4462,6 +4533,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the Acknowledge method. + Attributes: subscription: Required. The subscription whose message is being @@ -4488,6 +4560,7 @@ acknowledgements and ack deadline modifications from the client to the server. + Attributes: subscription: Required. The subscription for which to initialize the new @@ -4538,6 +4611,29 @@ so that state associated with the old stream can be transferred to the new stream. The same client_id should not be used for different client instances. + max_outstanding_messages: + Flow control settings for the maximum number of outstanding + messages. When there are ``max_outstanding_messages`` or more + currently sent to the streaming pull client that have not yet + been acked or nacked, the server stops sending more messages. + The sending of messages resumes once the number of outstanding + messages is less than this value. If the value is <= 0, there + is no limit to the number of outstanding messages. This + property can only be set on the initial StreamingPullRequest. + If it is set on a subsequent request, the stream will be + aborted with status ``INVALID_ARGUMENT``. + max_outstanding_bytes: + Flow control settings for the maximum number of outstanding + bytes. When there are ``max_outstanding_bytes`` or more worth + of messages currently sent to the streaming pull client that + have not yet been acked or nacked, the server will stop + sending more messages. The sending of messages resumes once + the number of outstanding bytes is less than this value. If + the value is <= 0, there is no limit to the number of + outstanding bytes. This property can only be set on the + initial StreamingPullRequest. If it is set on a subsequent + request, the stream will be aborted with status + ``INVALID_ARGUMENT``. """, # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) }, @@ -4553,6 +4649,7 @@ "__doc__": """Response for the ``StreamingPull`` method. This response is used to stream messages from the server to the client. + Attributes: received_messages: Received Pub/Sub messages. This will not be empty. @@ -4579,6 +4676,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``CreateSnapshot`` method. + Attributes: name: Required. User-provided name for this snapshot. If the name is @@ -4614,6 +4712,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the UpdateSnapshot method. + Attributes: snapshot: Required. The updated snapshot object. @@ -4646,6 +4745,7 @@ set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. + Attributes: name: The name of the snapshot. @@ -4682,6 +4782,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the GetSnapshot method. + Attributes: snapshot: Required. The name of the snapshot to get. Format is @@ -4700,6 +4801,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``ListSnapshots`` method. + Attributes: project: Required. The name of the project in which to list snapshots. @@ -4725,6 +4827,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Response for the ``ListSnapshots`` method. + Attributes: snapshots: The resulting snapshots. @@ -4746,6 +4849,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``DeleteSnapshot`` method. + Attributes: snapshot: Required. The name of the snapshot to delete. Format is @@ -4764,6 +4868,7 @@ "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", "__doc__": """Request for the ``Seek`` method. + Attributes: subscription: Required. The subscription to affect. @@ -4862,8 +4967,8 @@ index=0, serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", create_key=_descriptor._internal_create_key, - serialized_start=6352, - serialized_end=7795, + serialized_start=6417, + serialized_end=7860, methods=[ _descriptor.MethodDescriptor( name="CreateTopic", @@ -4969,8 +5074,8 @@ index=1, serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", create_key=_descriptor._internal_create_key, - serialized_start=7798, - serialized_end=10489, + serialized_start=7863, + serialized_end=10554, methods=[ _descriptor.MethodDescriptor( name="CreateSubscription", diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py index 5e99bf2dedc5..ca2cf7903d86 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py @@ -1,4 +1,5 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" import grpc from google.cloud.pubsub_v1.proto import ( @@ -9,15 +10,15 @@ class PublisherStub(object): """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ + messages to a topic. + """ def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.CreateTopic = channel.unary_unary( "/google.pubsub.v1.Publisher/CreateTopic", request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, @@ -67,84 +68,84 @@ def __init__(self, channel): class PublisherServicer(object): """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ + messages to a topic. + """ def CreateTopic(self, request, context): """Creates the given topic with the given name. See the - - resource name rules. - """ + + resource name rules. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateTopic(self, request, context): """Updates an existing topic. Note that certain properties of a - topic are not modifiable. - """ + topic are not modifiable. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def Publish(self, request, context): """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic - does not exist. - """ + does not exist. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetTopic(self, request, context): """Gets the configuration of a topic. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListTopics(self, request, context): """Lists matching topics. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListTopicSubscriptions(self, request, context): """Lists the names of the attached subscriptions on this topic. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListTopicSnapshots(self, request, context): """Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteTopic(self, request, context): """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their `topic` field is set to `_deleted-topic_`. - """ + does not exist. After a topic is deleted, a new topic may be created with + the same name; this is an entirely new topic with none of the old + configuration or subscriptions. Existing subscriptions to this topic are + not deleted, but their `topic` field is set to `_deleted-topic_`. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DetachSubscription(self, request, context): """Detaches a subscription from this topic. All messages retained in the - subscription are dropped. Subsequent `Pull` and `StreamingPull` requests - will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - """ + subscription are dropped. Subsequent `Pull` and `StreamingPull` requests + will return FAILED_PRECONDITION. If the subscription is a push + subscription, pushes to the endpoint will stop. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") @@ -204,18 +205,268 @@ def add_PublisherServicer_to_server(servicer, server): server.add_generic_rpc_handlers((generic_handler,)) +# This class is part of an EXPERIMENTAL API. +class Publisher(object): + """The service that an application uses to manipulate topics, and to send + messages to a topic. + """ + + @staticmethod + def CreateTopic( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Publisher/CreateTopic", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def UpdateTopic( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Publisher/UpdateTopic", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def Publish( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Publisher/Publish", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetTopic( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Publisher/GetTopic", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ListTopics( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Publisher/ListTopics", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ListTopicSubscriptions( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Publisher/ListTopicSubscriptions", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ListTopicSnapshots( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Publisher/ListTopicSnapshots", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def DeleteTopic( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Publisher/DeleteTopic", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def DetachSubscription( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Publisher/DetachSubscription", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + class SubscriberStub(object): """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ + consume messages from a subscription via the `Pull` method or by + establishing a bi-directional stream using the `StreamingPull` method. + """ def __init__(self, channel): """Constructor. - Args: - channel: A grpc.Channel. - """ + Args: + channel: A grpc.Channel. + """ self.CreateSubscription = channel.unary_unary( "/google.pubsub.v1.Subscriber/CreateSubscription", request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, @@ -300,104 +551,104 @@ def __init__(self, channel): class SubscriberServicer(object): """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ + consume messages from a subscription via the `Pull` method or by + establishing a bi-directional stream using the `StreamingPull` method. + """ def CreateSubscription(self, request, context): """Creates a subscription to a given topic. See the - - resource name rules. - If the subscription already exists, returns `ALREADY_EXISTS`. - If the corresponding topic doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - [resource name - format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. Note that - for REST API requests, you must specify a name in the request. - """ + + resource name rules. + If the subscription already exists, returns `ALREADY_EXISTS`. + If the corresponding topic doesn't exist, returns `NOT_FOUND`. + + If the name is not provided in the request, the server will assign a random + name for this subscription on the same project as the topic, conforming + to the + [resource name + format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. Note that + for REST API requests, you must specify a name in the request. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetSubscription(self, request, context): """Gets the configuration details of a subscription. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateSubscription(self, request, context): """Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - """ + subscription, such as its topic, are not modifiable. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListSubscriptions(self, request, context): """Lists matching subscriptions. - """ + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteSubscription(self, request, context): """Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to `Pull` after deletion will return - `NOT_FOUND`. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. - """ + are immediately dropped. Calls to `Pull` after deletion will return + `NOT_FOUND`. After a subscription is deleted, a new one may be created with + the same name, but the new one has no association with the old + subscription or its topic unless the same topic is specified. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ModifyAckDeadline(self, request, context): """Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level `ackDeadlineSeconds` used for subsequent messages. - """ + to indicate that more time is needed to process a message by the + subscriber, or to make the message available for redelivery if the + processing was interrupted. Note that this does not modify the + subscription-level `ackDeadlineSeconds` used for subsequent messages. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def Acknowledge(self, request, context): """Acknowledges the messages associated with the `ack_ids` in the - `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages - from the subscription. + `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages + from the subscription. - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - """ + Acknowledging a message whose ack deadline has expired may succeed, + but such a message may be redelivered later. Acknowledging a message more + than once will not result in an error. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def Pull(self, request, context): """Pulls messages from the server. The server may return `UNAVAILABLE` if - there are too many concurrent pull requests pending for the given - subscription. - """ + there are too many concurrent pull requests pending for the given + subscription. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def StreamingPull(self, request_iterator, context): """Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status `UNAVAILABLE` to - reassign server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by configuring the - underlying RPC channel. - """ + client. The client streams acknowledgements and ack deadline modifications + back to the server. The server will close the stream and return the status + on any error. The server may close the stream with status `UNAVAILABLE` to + reassign server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by configuring the + underlying RPC channel. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") @@ -405,100 +656,100 @@ def StreamingPull(self, request_iterator, context): def ModifyPushConfig(self, request, context): """Modifies the `PushConfig` for a specified subscription. - This may be used to change a push subscription to a pull one (signified by - an empty `PushConfig`) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the `PushConfig`. - """ + This may be used to change a push subscription to a pull one (signified by + an empty `PushConfig`) or vice versa, or change the endpoint URL and other + attributes of a push subscription. Messages will accumulate for delivery + continuously through the call regardless of changes to the `PushConfig`. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def GetSnapshot(self, request, context): """Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - """ + Seek + operations, which allow you to manage message acknowledgments in bulk. That + is, you can set the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def ListSnapshots(self, request, context): """Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def CreateSnapshot(self, request, context): """Creates a snapshot from the requested subscription. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. -

If the snapshot already exists, returns `ALREADY_EXISTS`. - If the requested subscription doesn't exist, returns `NOT_FOUND`. - If the backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. - See also the `Snapshot.expire_time` field. If the name is not provided in - the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the - [resource name - format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. Note that for - REST API requests, you must specify a name in the request. - """ + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. +

If the snapshot already exists, returns `ALREADY_EXISTS`. + If the requested subscription doesn't exist, returns `NOT_FOUND`. + If the backlog in the subscription is too old -- and the resulting snapshot + would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. + See also the `Snapshot.expire_time` field. If the name is not provided in + the request, the server will assign a random + name for this snapshot on the same project as the subscription, conforming + to the + [resource name + format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. Note that for + REST API requests, you must specify a name in the request. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def UpdateSnapshot(self, request, context): """Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def DeleteSnapshot(self, request, context): """Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - """ + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot.

+ When the snapshot is deleted, all messages retained in the snapshot + are immediately dropped. After a snapshot is deleted, a new one may be + created with the same name, but the new one has no association with the old + snapshot or its subscription, unless the same subscription is specified. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") def Seek(self, request, context): """Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - """ + whichever is provided in the request. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state + captured by a snapshot. Note that both the subscription and the snapshot + must be on the same topic. + """ context.set_code(grpc.StatusCode.UNIMPLEMENTED) context.set_details("Method not implemented!") raise NotImplementedError("Method not implemented!") @@ -591,3 +842,443 @@ def add_SubscriberServicer_to_server(servicer, server): "google.pubsub.v1.Subscriber", rpc_method_handlers ) server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class Subscriber(object): + """The service that an application uses to manipulate subscriptions and to + consume messages from a subscription via the `Pull` method or by + establishing a bi-directional stream using the `StreamingPull` method. + """ + + @staticmethod + def CreateSubscription( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/CreateSubscription", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetSubscription( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/GetSubscription", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def UpdateSubscription( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/UpdateSubscription", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ListSubscriptions( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/ListSubscriptions", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def DeleteSubscription( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/DeleteSubscription", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ModifyAckDeadline( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/ModifyAckDeadline", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def Acknowledge( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/Acknowledge", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def Pull( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/Pull", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def StreamingPull( + request_iterator, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.stream_stream( + request_iterator, + target, + "/google.pubsub.v1.Subscriber/StreamingPull", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ModifyPushConfig( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/ModifyPushConfig", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetSnapshot( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/GetSnapshot", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ListSnapshots( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/ListSnapshots", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def CreateSnapshot( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/CreateSnapshot", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def UpdateSnapshot( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/UpdateSnapshot", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def DeleteSnapshot( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/DeleteSnapshot", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def Seek( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/google.pubsub.v1.Subscriber/Seek", + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, + google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, + options, + channel_credentials, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 87c9114a2c5a..f67fbeec5314 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "e204b86e34346b6d456771ef4d4cfe5e15e53238" + "sha": "c8f63788636c2e3436c8ce6a01ef3b59e3df772a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "86285bbd54fbf9708838219e3422aa47fb8fc0b0", - "internalRef": "314795690" + "sha": "b882b8e6bfcd708042ff00f7adc67ce750817dd0", + "internalRef": "318028816" } }, { From 87d1b14f5f838d08cf01f465e5a816d247af96ef Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 6 Jul 2020 19:59:11 +0200 Subject: [PATCH 0474/1197] chore(deps): update dependency google-cloud-pubsub to v1.6.1 (#144) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 9b496510abb5..42ab449b1ba1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.6.0 +google-cloud-pubsub==1.6.1 From c00687a7aa1c4550042d2d787eab5965b11c9973 Mon Sep 17 00:00:00 2001 From: Kamal Aboul-Hosn Date: Tue, 7 Jul 2020 11:49:25 -0400 Subject: [PATCH 0475/1197] feat: Add support for server-side flow control (#143) * chore: Remove notes about ordering keys being experimental. * Revert "chore: Remove notes about ordering keys being experimental." This reverts commit 38b2a3e91dd4f3f3c6657f4660fa1df8c0239124. * feat: Add support for server-side flow control * Add unit test for flow control --- .../subscriber/_protocol/streaming_pull_manager.py | 2 ++ .../pubsub_v1/subscriber/test_streaming_pull_manager.py | 9 +++++++++ 2 files changed, 11 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 2c3e51fee241..4e3f2493362e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -575,6 +575,8 @@ def _get_initial_request(self, stream_ack_deadline_seconds): stream_ack_deadline_seconds=stream_ack_deadline_seconds, subscription=self._subscription, client_id=self._client_id, + max_outstanding_messages=self._flow_control.max_messages, + max_outstanding_bytes=self._flow_control.max_bytes, ) # Return the initial request. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index dd103599157a..3f2881df6c09 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -159,6 +159,15 @@ def test_client_id(): assert client_id_1 != client_id_2 +def test_streaming_flow_control(): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) + request = manager._get_initial_request(stream_ack_deadline_seconds=10) + assert request.max_outstanding_messages == 10 + assert request.max_outstanding_bytes == 1000 + + def test_ack_deadline_with_max_duration_per_lease_extension(): manager = make_manager() manager._flow_control = types.FlowControl(max_duration_per_lease_extension=5) From 15dd7e35fb144a8921c9205f8c9a9530b1c44a24 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 10 Jul 2020 17:54:57 +0200 Subject: [PATCH 0476/1197] docs: add flow control section to publish overview (#129) * docs: add flow control section to publish overview * Explain options for limit exceeded behavior * Omit a sentence that might cause confusion. --- .../docs/publisher/index.rst | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/packages/google-cloud-pubsub/docs/publisher/index.rst b/packages/google-cloud-pubsub/docs/publisher/index.rst index 2a785359c443..cd2e5cbea777 100644 --- a/packages/google-cloud-pubsub/docs/publisher/index.rst +++ b/packages/google-cloud-pubsub/docs/publisher/index.rst @@ -128,6 +128,42 @@ You can also attach a callback to the future: future.add_done_callback(callback) +Publish Flow Control +-------------------- + +If publishing large amounts of messages or very large messages in quick +succession, some of the publish requests might time out, especially if the +bandwidth available is limited. To mitigate this the client can be +configured with custom :class:`~.pubsub_v1.types.PublishFlowControl` settings. + +You can configure the maximum desired number of messages and their maximum total +size, as well as the action that should be taken when the threshold is reached. + +.. code-block:: python + + from google.cloud import pubsub_v1 + + client = pubsub_v1.PublisherClient( + publisher_options=pubsub_v1.types.PublisherOptions( + flow_control=pubsub_v1.types.PublishFlowControl( + message_limit=500, + byte_limit=2 * 1024 * 1024, + limit_exceeded_behavior=pubsub_v1.types.LimitExceededBehavior.BLOCK, + ), + ), + ) + +The action to be taken on overflow can be one of the following: + +* :attr:`~.pubsub_v1.types.LimitExceededBehavior.IGNORE` (default): Ignore the + overflow and continue publishing the messages as normal. +* :attr:`~.pubsub_v1.types.LimitExceededBehavior.ERROR`: Raise + :exc:`~.pubsub_v1.publisher.exceptions.FlowControlLimitError` and reject the message. +* :attr:`~.pubsub_v1.types.LimitExceededBehavior.BLOCK`: Temporarily block in the + :meth:`~.pubsub_v1.publisher.client.Client.publish` method until there is + enough capacity available. + + API Reference ------------- From d324cf0b0d2c813e7992073035809e1a5d24ab6e Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 10 Jul 2020 14:30:41 +0200 Subject: [PATCH 0477/1197] Revert "samples: add samples from pubsub/cloud-client (#134)" This reverts commit e204b86e34346b6d456771ef4d4cfe5e15e53238. We want to merge the samples branch *unsquashed* to preserve samples commit history. --- .../google-cloud-pubsub/.github/CODEOWNERS | 11 - .../samples/AUTHORING_GUIDE.md | 1 - .../samples/CONTRIBUTING.md | 1 - .../samples/snippets/README.rst | 282 ------- .../samples/snippets/README.rst.in | 30 - .../samples/snippets/iam.py | 231 ------ .../samples/snippets/iam_test.py | 118 --- .../samples/snippets/noxfile.py | 224 ----- .../samples/snippets/publisher.py | 334 -------- .../samples/snippets/publisher_test.py | 146 ---- .../samples/snippets/quickstart/pub.py | 86 -- .../samples/snippets/quickstart/pub_test.py | 56 -- .../samples/snippets/quickstart/sub.py | 69 -- .../samples/snippets/quickstart/sub_test.py | 102 --- .../samples/snippets/requirements-test.txt | 3 - .../samples/snippets/subscriber.py | 783 ------------------ .../samples/snippets/subscriber_test.py | 341 -------- packages/google-cloud-pubsub/synth.py | 11 +- 18 files changed, 1 insertion(+), 2828 deletions(-) delete mode 100644 packages/google-cloud-pubsub/.github/CODEOWNERS delete mode 100644 packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md delete mode 100644 packages/google-cloud-pubsub/samples/CONTRIBUTING.md delete mode 100644 packages/google-cloud-pubsub/samples/snippets/README.rst delete mode 100644 packages/google-cloud-pubsub/samples/snippets/README.rst.in delete mode 100644 packages/google-cloud-pubsub/samples/snippets/iam.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/iam_test.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/noxfile.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/publisher.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/publisher_test.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/requirements-test.txt delete mode 100644 packages/google-cloud-pubsub/samples/snippets/subscriber.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/subscriber_test.py diff --git a/packages/google-cloud-pubsub/.github/CODEOWNERS b/packages/google-cloud-pubsub/.github/CODEOWNERS deleted file mode 100644 index cf01548a9f04..000000000000 --- a/packages/google-cloud-pubsub/.github/CODEOWNERS +++ /dev/null @@ -1,11 +0,0 @@ -# Code owners file. -# This file controls who is tagged for review for any given pull request. -# -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax - - -# The python-samples-owners team is the default owner for anything not -# explicitly taken by someone else. - - /samples/ @anguillanneuf @hongalex @googleapis/python-samples-owners diff --git a/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md b/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md deleted file mode 100644 index 55c97b32f4c1..000000000000 --- a/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md +++ /dev/null @@ -1 +0,0 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/CONTRIBUTING.md b/packages/google-cloud-pubsub/samples/CONTRIBUTING.md deleted file mode 100644 index 34c882b6f1a3..000000000000 --- a/packages/google-cloud-pubsub/samples/CONTRIBUTING.md +++ /dev/null @@ -1 +0,0 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst deleted file mode 100644 index 2676680afdef..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ /dev/null @@ -1,282 +0,0 @@ - -.. This file is automatically generated. Do not edit this file directly. - -Google Cloud Pub/Sub Python Samples -=============================================================================== - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/README.rst - - -This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. - - - - -.. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs - - -Setup -------------------------------------------------------------------------------- - - - -Authentication -++++++++++++++ - -This sample requires you to have authentication setup. Refer to the -`Authentication Getting Started Guide`_ for instructions on setting up -credentials for applications. - -.. _Authentication Getting Started Guide: - https://cloud.google.com/docs/authentication/getting-started - - - - -Install Dependencies -++++++++++++++++++++ - -#. Clone python-docs-samples and change directory to the sample directory you want to use. - - .. code-block:: bash - - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git - -#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup - -#. Create a virtualenv. Samples are compatible with Python 3.6+. - - .. code-block:: bash - - $ virtualenv env - $ source env/bin/activate - -#. Install the dependencies needed to run the samples. - - .. code-block:: bash - - $ pip install -r requirements.txt - -.. _pip: https://pip.pypa.io/ -.. _virtualenv: https://virtualenv.pypa.io/ - - - - - - -Samples -------------------------------------------------------------------------------- - - -Quickstart -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/quickstart.py,pubsub/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python quickstart.py - - - - -Publisher -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/publisher.py,pubsub/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python publisher.py - - - usage: publisher.py [-h] - project_id - {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} - ... - - This application demonstrates how to perform basic operations on topics - with the Cloud Pub/Sub API. - - For more information, see the README.md under /pubsub and the documentation - at https://cloud.google.com/pubsub/docs. - - positional arguments: - project_id Your Google Cloud project ID - {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} - list Lists all Pub/Sub topics in the given project. - create Create a new Pub/Sub topic. - delete Deletes an existing Pub/Sub topic. - publish Publishes multiple messages to a Pub/Sub topic. - publish-with-custom-attributes - Publishes multiple messages with custom attributes to - a Pub/Sub topic. - publish-with-error-handler - Publishes multiple messages to a Pub/Sub topic with an - error handler. - publish-with-batch-settings - Publishes multiple messages to a Pub/Sub topic with - batch settings. - publish-with-retry-settings - Publishes messages with custom retry settings. - - optional arguments: - -h, --help show this help message and exit - - - - - -Subscribers -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/subscriber.py,pubsub/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python subscriber.py - - - usage: subscriber.py [-h] - project_id - {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} - ... - - This application demonstrates how to perform basic operations on - subscriptions with the Cloud Pub/Sub API. - - For more information, see the README.md under /pubsub and the documentation - at https://cloud.google.com/pubsub/docs. - - positional arguments: - project_id Your Google Cloud project ID - {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} - list-in-topic Lists all subscriptions for a given topic. - list-in-project Lists all subscriptions in the current project. - create Create a new pull subscription on the given topic. - create-with-dead-letter-policy - Create a subscription with dead letter policy. - create-push Create a new push subscription on the given topic. - delete Deletes an existing Pub/Sub topic. - update-push Updates an existing Pub/Sub subscription's push - endpoint URL. Note that certain properties of a - subscription, such as its topic, are not modifiable. - update-dead-letter-policy - Update a subscription's dead letter policy. - remove-dead-letter-policy - Remove dead letter policy from a subscription. - receive Receives messages from a pull subscription. - receive-custom-attributes - Receives messages from a pull subscription. - receive-flow-control - Receives messages from a pull subscription with flow - control. - receive-synchronously - Pulling messages synchronously. - receive-synchronously-with-lease - Pulling messages synchronously with lease management - listen-for-errors Receives messages and catches errors from a pull - subscription. - receive-messages-with-delivery-attempts - - optional arguments: - -h, --help show this help message and exit - - - - - -Identity and Access Management -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/iam.py,pubsub/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python iam.py - - - usage: iam.py [-h] - project - {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} - ... - - This application demonstrates how to perform basic operations on IAM - policies with the Cloud Pub/Sub API. - - For more information, see the README.md under /pubsub and the documentation - at https://cloud.google.com/pubsub/docs. - - positional arguments: - project Your Google Cloud project ID - {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} - get-topic-policy Prints the IAM policy for the given topic. - get-subscription-policy - Prints the IAM policy for the given subscription. - set-topic-policy Sets the IAM policy for a topic. - set-subscription-policy - Sets the IAM policy for a topic. - check-topic-permissions - Checks to which permissions are available on the given - topic. - check-subscription-permissions - Checks to which permissions are available on the given - subscription. - - optional arguments: - -h, --help show this help message and exit - - - - - - - - - -The client library -------------------------------------------------------------------------------- - -This sample uses the `Google Cloud Client Library for Python`_. -You can read the documentation for more details on API usage and use GitHub -to `browse the source`_ and `report issues`_. - -.. _Google Cloud Client Library for Python: - https://googlecloudplatform.github.io/google-cloud-python/ -.. _browse the source: - https://github.com/GoogleCloudPlatform/google-cloud-python -.. _report issues: - https://github.com/GoogleCloudPlatform/google-cloud-python/issues - - - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst.in b/packages/google-cloud-pubsub/samples/snippets/README.rst.in deleted file mode 100644 index ddbc647121b2..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst.in +++ /dev/null @@ -1,30 +0,0 @@ -# This file is used to generate README.rst - -product: - name: Google Cloud Pub/Sub - short_name: Cloud Pub/Sub - url: https://cloud.google.com/pubsub/docs - description: > - `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that - allows you to send and receive messages between independent applications. - -setup: -- auth -- install_deps - -samples: -- name: Quickstart - file: quickstart.py -- name: Publisher - file: publisher.py - show_help: true -- name: Subscribers - file: subscriber.py - show_help: true -- name: Identity and Access Management - file: iam.py - show_help: true - -cloud_client_library: true - -folder: pubsub/cloud-client \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py deleted file mode 100644 index 71c55d764c0c..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ /dev/null @@ -1,231 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to perform basic operations on IAM -policies with the Cloud Pub/Sub API. - -For more information, see the README.md under /pubsub and the documentation -at https://cloud.google.com/pubsub/docs. -""" - -import argparse - - -def get_topic_policy(project, topic_id): - """Prints the IAM policy for the given topic.""" - # [START pubsub_get_topic_policy] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_id) - - policy = client.get_iam_policy(topic_path) - - print("Policy for topic {}:".format(topic_path)) - for binding in policy.bindings: - print("Role: {}, Members: {}".format(binding.role, binding.members)) - # [END pubsub_get_topic_policy] - - -def get_subscription_policy(project, subscription_id): - """Prints the IAM policy for the given subscription.""" - # [START pubsub_get_subscription_policy] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - - client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_id) - - policy = client.get_iam_policy(subscription_path) - - print("Policy for subscription {}:".format(subscription_path)) - for binding in policy.bindings: - print("Role: {}, Members: {}".format(binding.role, binding.members)) - - client.close() - # [END pubsub_get_subscription_policy] - - -def set_topic_policy(project, topic_id): - """Sets the IAM policy for a topic.""" - # [START pubsub_set_topic_policy] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_id) - - policy = client.get_iam_policy(topic_path) - - # Add all users as viewers. - policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) - - # Add a group as a publisher. - policy.bindings.add( - role="roles/pubsub.publisher", members=["group:cloud-logs@google.com"] - ) - - # Set the policy - policy = client.set_iam_policy(topic_path, policy) - - print("IAM policy for topic {} set: {}".format(topic_id, policy)) - # [END pubsub_set_topic_policy] - - -def set_subscription_policy(project, subscription_id): - """Sets the IAM policy for a topic.""" - # [START pubsub_set_subscription_policy] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - - client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_id) - - policy = client.get_iam_policy(subscription_path) - - # Add all users as viewers. - policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) - - # Add a group as an editor. - policy.bindings.add(role="roles/editor", members=["group:cloud-logs@google.com"]) - - # Set the policy - policy = client.set_iam_policy(subscription_path, policy) - - print("IAM policy for subscription {} set: {}".format(subscription_id, policy)) - - client.close() - # [END pubsub_set_subscription_policy] - - -def check_topic_permissions(project, topic_id): - """Checks to which permissions are available on the given topic.""" - # [START pubsub_test_topic_permissions] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_id) - - permissions_to_check = ["pubsub.topics.publish", "pubsub.topics.update"] - - allowed_permissions = client.test_iam_permissions(topic_path, permissions_to_check) - - print( - "Allowed permissions for topic {}: {}".format(topic_path, allowed_permissions) - ) - # [END pubsub_test_topic_permissions] - - -def check_subscription_permissions(project, subscription_id): - """Checks to which permissions are available on the given subscription.""" - # [START pubsub_test_subscription_permissions] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - - client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_id) - - permissions_to_check = [ - "pubsub.subscriptions.consume", - "pubsub.subscriptions.update", - ] - - allowed_permissions = client.test_iam_permissions( - subscription_path, permissions_to_check - ) - - print( - "Allowed permissions for subscription {}: {}".format( - subscription_path, allowed_permissions - ) - ) - - client.close() - # [END pubsub_test_subscription_permissions] - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, - ) - parser.add_argument("project", help="Your Google Cloud project ID") - - subparsers = parser.add_subparsers(dest="command") - - get_topic_policy_parser = subparsers.add_parser( - "get-topic-policy", help=get_topic_policy.__doc__ - ) - get_topic_policy_parser.add_argument("topic_id") - - get_subscription_policy_parser = subparsers.add_parser( - "get-subscription-policy", help=get_subscription_policy.__doc__ - ) - get_subscription_policy_parser.add_argument("subscription_id") - - set_topic_policy_parser = subparsers.add_parser( - "set-topic-policy", help=set_topic_policy.__doc__ - ) - set_topic_policy_parser.add_argument("topic_id") - - set_subscription_policy_parser = subparsers.add_parser( - "set-subscription-policy", help=set_subscription_policy.__doc__ - ) - set_subscription_policy_parser.add_argument("subscription_id") - - check_topic_permissions_parser = subparsers.add_parser( - "check-topic-permissions", help=check_topic_permissions.__doc__ - ) - check_topic_permissions_parser.add_argument("topic_id") - - check_subscription_permissions_parser = subparsers.add_parser( - "check-subscription-permissions", help=check_subscription_permissions.__doc__, - ) - check_subscription_permissions_parser.add_argument("subscription_id") - - args = parser.parse_args() - - if args.command == "get-topic-policy": - get_topic_policy(args.project, args.topic_id) - elif args.command == "get-subscription-policy": - get_subscription_policy(args.project, args.subscription_id) - elif args.command == "set-topic-policy": - set_topic_policy(args.project, args.topic_id) - elif args.command == "set-subscription-policy": - set_subscription_policy(args.project, args.subscription_id) - elif args.command == "check-topic-permissions": - check_topic_permissions(args.project, args.topic_id) - elif args.command == "check-subscription-permissions": - check_subscription_permissions(args.project, args.subscription_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py deleted file mode 100644 index d196953f6207..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ /dev/null @@ -1,118 +0,0 @@ -# Copyright 2016 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import uuid - -from google.cloud import pubsub_v1 -import pytest - -import iam - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "iam-test-topic-" + UUID -SUBSCRIPTION = "iam-test-subscription-" + UUID - - -@pytest.fixture(scope="module") -def publisher_client(): - yield pubsub_v1.PublisherClient() - - -@pytest.fixture(scope="module") -def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) - - try: - publisher_client.delete_topic(topic_path) - except Exception: - pass - - publisher_client.create_topic(topic_path) - - yield topic_path - - publisher_client.delete_topic(topic_path) - - -@pytest.fixture(scope="module") -def subscriber_client(): - subscriber_client = pubsub_v1.SubscriberClient() - yield subscriber_client - subscriber_client.close() - - -@pytest.fixture -def subscription(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) - - try: - subscriber_client.delete_subscription(subscription_path) - except Exception: - pass - - subscriber_client.create_subscription(subscription_path, topic=topic) - - yield subscription_path - - subscriber_client.delete_subscription(subscription_path) - - -def test_get_topic_policy(topic, capsys): - iam.get_topic_policy(PROJECT, TOPIC) - - out, _ = capsys.readouterr() - assert topic in out - - -def test_get_subscription_policy(subscription, capsys): - iam.get_subscription_policy(PROJECT, SUBSCRIPTION) - - out, _ = capsys.readouterr() - assert subscription in out - - -def test_set_topic_policy(publisher_client, topic): - iam.set_topic_policy(PROJECT, TOPIC) - - policy = publisher_client.get_iam_policy(topic) - assert "roles/pubsub.publisher" in str(policy) - assert "allUsers" in str(policy) - - -def test_set_subscription_policy(subscriber_client, subscription): - iam.set_subscription_policy(PROJECT, SUBSCRIPTION) - - policy = subscriber_client.get_iam_policy(subscription) - assert "roles/pubsub.viewer" in str(policy) - assert "allUsers" in str(policy) - - -def test_check_topic_permissions(topic, capsys): - iam.check_topic_permissions(PROJECT, TOPIC) - - out, _ = capsys.readouterr() - - assert topic in out - assert "pubsub.topics.publish" in out - - -def test_check_subscription_permissions(subscription, capsys): - iam.check_subscription_permissions(PROJECT, SUBSCRIPTION) - - out, _ = capsys.readouterr() - - assert subscription in out - assert "pubsub.subscriptions.consume" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py deleted file mode 100644 index ba55d7ce53ca..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ /dev/null @@ -1,224 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import print_function - -import os -from pathlib import Path -import sys - -import nox - - -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING -# DO NOT EDIT THIS FILE EVER! -# WARNING - WARNING - WARNING - WARNING - WARNING -# WARNING - WARNING - WARNING - WARNING - WARNING - -# Copy `noxfile_config.py` to your directory and modify it instead. - - -# `TEST_CONFIG` dict is a configuration hook that allows users to -# modify the test configurations. The values here should be in sync -# with `noxfile_config.py`. Users will copy `noxfile_config.py` into -# their directory and modify it. - -TEST_CONFIG = { - # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - - # An envvar key for determining the project id to use. Change it - # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a - # build specific Cloud project. You can also use your own string - # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', - # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - - # A dictionary you want to inject into your test. Don't put any - # secrets here. These values will override predefined values. - 'envs': {}, -} - - -try: - # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') - from noxfile_config import TEST_CONFIG_OVERRIDE -except ImportError as e: - print("No user noxfile_config found: detail: {}".format(e)) - TEST_CONFIG_OVERRIDE = {} - -# Update the TEST_CONFIG with the user supplied values. -TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) - - -def get_pytest_env_vars(): - """Returns a dict for pytest invocation.""" - ret = {} - - # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] - # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] - - # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) - return ret - - -# DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] - -# Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] - -TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) - -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) -# -# Style Checks -# - - -def _determine_local_import_names(start_dir): - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - -# Linting with flake8. -# -# We ignore the following rules: -# E203: whitespace before ‘:’ -# E266: too many leading ‘#’ for block comment -# E501: line too long -# I202: Additional newline in a section of imports -# -# We also need to specify the rules which are ignored by default: -# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] -FLAKE8_COMMON_ARGS = [ - "--show-source", - "--builtin=gettext", - "--max-complexity=20", - "--import-order-style=google", - "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", - "--max-line-length=88", -] - - -@nox.session -def lint(session): - session.install("flake8", "flake8-import-order") - - local_names = _determine_local_import_names(".") - args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), - "." - ] - session.run("flake8", *args) - - -# -# Sample Tests -# - - -PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] - - -def _session_tests(session, post_install=None): - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars() - ) - - -@nox.session(python=ALL_VERSIONS) -def py(session): - """Runs py.test for a sample using the specified version of Python.""" - if session.python in TESTED_VERSIONS: - _session_tests(session) - else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) - - -# -# Readmegen -# - - -def _get_repo_root(): - """ Returns the root folder of the project. """ - # Get root of this repository. Assume we don't have directories nested deeper than 10 items. - p = Path(os.getcwd()) - for i in range(10): - if p is None: - break - if Path(p / ".git").exists(): - return str(p) - p = p.parent - raise Exception("Unable to detect repository root.") - - -GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) - - -@nox.session -@nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): - """(Re-)generates the readme for a sample.""" - session.install("jinja2", "pyyaml") - dir_ = os.path.dirname(path) - - if os.path.exists(os.path.join(dir_, "requirements.txt")): - session.install("-r", os.path.join(dir_, "requirements.txt")) - - in_file = os.path.join(dir_, "README.rst.in") - session.run( - "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file - ) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py deleted file mode 100644 index 477b31b9cf71..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ /dev/null @@ -1,334 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google LLC. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to perform basic operations on topics -with the Cloud Pub/Sub API. - -For more information, see the README.md under /pubsub and the documentation -at https://cloud.google.com/pubsub/docs. -""" - -import argparse - - -def list_topics(project_id): - """Lists all Pub/Sub topics in the given project.""" - # [START pubsub_list_topics] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - - publisher = pubsub_v1.PublisherClient() - project_path = publisher.project_path(project_id) - - for topic in publisher.list_topics(project_path): - print(topic) - # [END pubsub_list_topics] - - -def create_topic(project_id, topic_id): - """Create a new Pub/Sub topic.""" - # [START pubsub_quickstart_create_topic] - # [START pubsub_create_topic] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_id) - - topic = publisher.create_topic(topic_path) - - print("Topic created: {}".format(topic)) - # [END pubsub_quickstart_create_topic] - # [END pubsub_create_topic] - - -def delete_topic(project_id, topic_id): - """Deletes an existing Pub/Sub topic.""" - # [START pubsub_delete_topic] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_id) - - publisher.delete_topic(topic_path) - - print("Topic deleted: {}".format(topic_path)) - # [END pubsub_delete_topic] - - -def publish_messages(project_id, topic_id): - """Publishes multiple messages to a Pub/Sub topic.""" - # [START pubsub_quickstart_publisher] - # [START pubsub_publish] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - publisher = pubsub_v1.PublisherClient() - # The `topic_path` method creates a fully qualified identifier - # in the form `projects/{project_id}/topics/{topic_id}` - topic_path = publisher.topic_path(project_id, topic_id) - - for n in range(1, 10): - data = u"Message number {}".format(n) - # Data must be a bytestring - data = data.encode("utf-8") - # When you publish a message, the client returns a future. - future = publisher.publish(topic_path, data=data) - print(future.result()) - - print("Published messages.") - # [END pubsub_quickstart_publisher] - # [END pubsub_publish] - - -def publish_messages_with_custom_attributes(project_id, topic_id): - """Publishes multiple messages with custom attributes - to a Pub/Sub topic.""" - # [START pubsub_publish_custom_attributes] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_id) - - for n in range(1, 10): - data = u"Message number {}".format(n) - # Data must be a bytestring - data = data.encode("utf-8") - # Add two attributes, origin and username, to the message - future = publisher.publish( - topic_path, data, origin="python-sample", username="gcp" - ) - print(future.result()) - - print("Published messages with custom attributes.") - # [END pubsub_publish_custom_attributes] - - -def publish_messages_with_error_handler(project_id, topic_id): - # [START pubsub_publish_messages_error_handler] - """Publishes multiple messages to a Pub/Sub topic with an error handler.""" - import time - - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_id) - - futures = dict() - - def get_callback(f, data): - def callback(f): - try: - print(f.result()) - futures.pop(data) - except: # noqa - print("Please handle {} for {}.".format(f.exception(), data)) - - return callback - - for i in range(10): - data = str(i) - futures.update({data: None}) - # When you publish a message, the client returns a future. - future = publisher.publish( - topic_path, data=data.encode("utf-8") # data must be a bytestring. - ) - futures[data] = future - # Publish failures shall be handled in the callback function. - future.add_done_callback(get_callback(future, data)) - - # Wait for all the publish futures to resolve before exiting. - while futures: - time.sleep(5) - - print("Published message with error handler.") - # [END pubsub_publish_messages_error_handler] - - -def publish_messages_with_batch_settings(project_id, topic_id): - """Publishes multiple messages to a Pub/Sub topic with batch settings.""" - # [START pubsub_publisher_batch_settings] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - # Configure the batch to publish as soon as there is ten messages, - # one kilobyte of data, or one second has passed. - batch_settings = pubsub_v1.types.BatchSettings( - max_messages=10, # default 100 - max_bytes=1024, # default 1 MB - max_latency=1, # default 10 ms - ) - publisher = pubsub_v1.PublisherClient(batch_settings) - topic_path = publisher.topic_path(project_id, topic_id) - - # Resolve the publish future in a separate thread. - def callback(future): - message_id = future.result() - print(message_id) - - for n in range(1, 10): - data = u"Message number {}".format(n) - # Data must be a bytestring - data = data.encode("utf-8") - future = publisher.publish(topic_path, data=data) - # Non-blocking. Allow the publisher client to batch multiple messages. - future.add_done_callback(callback) - - print("Published messages with batch settings.") - # [END pubsub_publisher_batch_settings] - - -def publish_messages_with_retry_settings(project_id, topic_id): - """Publishes messages with custom retry settings.""" - # [START pubsub_publisher_retry_settings] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - # Configure the retry settings. Defaults will be overwritten. - retry_settings = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_codes": { - "publish": [ - "ABORTED", - "CANCELLED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - "UNKNOWN", - ] - }, - "retry_params": { - "messaging": { - "initial_retry_delay_millis": 100, # default: 100 - "retry_delay_multiplier": 1.3, # default: 1.3 - "max_retry_delay_millis": 60000, # default: 60000 - "initial_rpc_timeout_millis": 5000, # default: 25000 - "rpc_timeout_multiplier": 1.0, # default: 1.0 - "max_rpc_timeout_millis": 600000, # default: 30000 - "total_timeout_millis": 600000, # default: 600000 - } - }, - "methods": { - "Publish": { - "retry_codes_name": "publish", - "retry_params_name": "messaging", - } - }, - } - } - } - - publisher = pubsub_v1.PublisherClient(client_config=retry_settings) - topic_path = publisher.topic_path(project_id, topic_id) - - for n in range(1, 10): - data = u"Message number {}".format(n) - # Data must be a bytestring - data = data.encode("utf-8") - future = publisher.publish(topic_path, data=data) - print(future.result()) - - print("Published messages with retry settings.") - # [END pubsub_publisher_retry_settings] - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, - ) - parser.add_argument("project_id", help="Your Google Cloud project ID") - - subparsers = parser.add_subparsers(dest="command") - subparsers.add_parser("list", help=list_topics.__doc__) - - create_parser = subparsers.add_parser("create", help=create_topic.__doc__) - create_parser.add_argument("topic_id") - - delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) - delete_parser.add_argument("topic_id") - - publish_parser = subparsers.add_parser("publish", help=publish_messages.__doc__) - publish_parser.add_argument("topic_id") - - publish_with_custom_attributes_parser = subparsers.add_parser( - "publish-with-custom-attributes", - help=publish_messages_with_custom_attributes.__doc__, - ) - publish_with_custom_attributes_parser.add_argument("topic_id") - - publish_with_error_handler_parser = subparsers.add_parser( - "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, - ) - publish_with_error_handler_parser.add_argument("topic_id") - - publish_with_batch_settings_parser = subparsers.add_parser( - "publish-with-batch-settings", - help=publish_messages_with_batch_settings.__doc__, - ) - publish_with_batch_settings_parser.add_argument("topic_id") - - publish_with_retry_settings_parser = subparsers.add_parser( - "publish-with-retry-settings", - help=publish_messages_with_retry_settings.__doc__, - ) - publish_with_retry_settings_parser.add_argument("topic_id") - - args = parser.parse_args() - - if args.command == "list": - list_topics(args.project_id) - elif args.command == "create": - create_topic(args.project_id, args.topic_id) - elif args.command == "delete": - delete_topic(args.project_id, args.topic_id) - elif args.command == "publish": - publish_messages(args.project_id, args.topic_id) - elif args.command == "publish-with-custom-attributes": - publish_messages_with_custom_attributes(args.project_id, args.topic_id) - elif args.command == "publish-with-error-handler": - publish_messages_with_error_handler(args.project_id, args.topic_id) - elif args.command == "publish-with-batch-settings": - publish_messages_with_batch_settings(args.project_id, args.topic_id) - elif args.command == "publish-with-retry-settings": - publish_messages_with_retry_settings(args.project_id, args.topic_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py deleted file mode 100644 index b5c2ea1ea4b5..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ /dev/null @@ -1,146 +0,0 @@ -# Copyright 2016 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import time -import uuid - -import backoff -from google.cloud import pubsub_v1 -import mock -import pytest - -import publisher - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC_ADMIN = "publisher-test-topic-admin-" + UUID -TOPIC_PUBLISH = "publisher-test-topic-publish-" + UUID - - -@pytest.fixture -def client(): - yield pubsub_v1.PublisherClient() - - -@pytest.fixture -def topic_admin(client): - topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) - - try: - topic = client.get_topic(topic_path) - except: # noqa - topic = client.create_topic(topic_path) - - yield topic.name - # Teardown of `topic_admin` is handled in `test_delete()`. - - -@pytest.fixture -def topic_publish(client): - topic_path = client.topic_path(PROJECT, TOPIC_PUBLISH) - - try: - topic = client.get_topic(topic_path) - except: # noqa - topic = client.create_topic(topic_path) - - yield topic.name - - client.delete_topic(topic.name) - - -def _make_sleep_patch(): - real_sleep = time.sleep - - def new_sleep(period): - if period == 60: - real_sleep(5) - raise RuntimeError("sigil") - else: - real_sleep(period) - - return mock.patch("time.sleep", new=new_sleep) - - -def test_list(client, topic_admin, capsys): - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - publisher.list_topics(PROJECT) - out, _ = capsys.readouterr() - assert topic_admin in out - - eventually_consistent_test() - - -def test_create(client): - topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) - try: - client.delete_topic(topic_path) - except Exception: - pass - - publisher.create_topic(PROJECT, TOPIC_ADMIN) - - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - assert client.get_topic(topic_path) - - eventually_consistent_test() - - -def test_delete(client, topic_admin): - publisher.delete_topic(PROJECT, TOPIC_ADMIN) - - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - with pytest.raises(Exception): - client.get_topic(client.topic_path(PROJECT, TOPIC_ADMIN)) - - eventually_consistent_test() - - -def test_publish(topic_publish, capsys): - publisher.publish_messages(PROJECT, TOPIC_PUBLISH) - - out, _ = capsys.readouterr() - assert "Published" in out - - -def test_publish_with_custom_attributes(topic_publish, capsys): - publisher.publish_messages_with_custom_attributes(PROJECT, TOPIC_PUBLISH) - - out, _ = capsys.readouterr() - assert "Published" in out - - -def test_publish_with_batch_settings(topic_publish, capsys): - publisher.publish_messages_with_batch_settings(PROJECT, TOPIC_PUBLISH) - - out, _ = capsys.readouterr() - assert "Published" in out - - -def test_publish_with_retry_settings(topic_publish, capsys): - publisher.publish_messages_with_retry_settings(PROJECT, TOPIC_PUBLISH) - - out, _ = capsys.readouterr() - assert "Published" in out - - -def test_publish_with_error_handler(topic_publish, capsys): - publisher.publish_messages_with_error_handler(PROJECT, TOPIC_PUBLISH) - - out, _ = capsys.readouterr() - assert "Published" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py deleted file mode 100644 index 16432c0c3627..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START pubsub_quickstart_pub_all] -import argparse -import time - -# [START pubsub_quickstart_pub_deps] -from google.cloud import pubsub_v1 - -# [END pubsub_quickstart_pub_deps] - - -def get_callback(api_future, data, ref): - """Wrap message data in the context of the callback function.""" - - def callback(api_future): - try: - print( - "Published message {} now has message ID {}".format( - data, api_future.result() - ) - ) - ref["num_messages"] += 1 - except Exception: - print( - "A problem occurred when publishing {}: {}\n".format( - data, api_future.exception() - ) - ) - raise - - return callback - - -def pub(project_id, topic_id): - """Publishes a message to a Pub/Sub topic.""" - # [START pubsub_quickstart_pub_client] - # Initialize a Publisher client. - client = pubsub_v1.PublisherClient() - # [END pubsub_quickstart_pub_client] - # Create a fully qualified identifier in the form of - # `projects/{project_id}/topics/{topic_id}` - topic_path = client.topic_path(project_id, topic_id) - - # Data sent to Cloud Pub/Sub must be a bytestring. - data = b"Hello, World!" - - # Keep track of the number of published messages. - ref = dict({"num_messages": 0}) - - # When you publish a message, the client returns a future. - api_future = client.publish(topic_path, data=data) - api_future.add_done_callback(get_callback(api_future, data, ref)) - - # Keep the main thread from exiting while the message future - # gets resolved in the background. - while api_future.running(): - time.sleep(0.5) - print("Published {} message(s).".format(ref["num_messages"])) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, - ) - parser.add_argument("project_id", help="Google Cloud project ID") - parser.add_argument("topic_id", help="Pub/Sub topic ID") - - args = parser.parse_args() - - pub(args.project_id, args.topic_id) -# [END pubsub_quickstart_pub_all] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py deleted file mode 100644 index 6f5cc06c4456..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import uuid - -from google.api_core.exceptions import AlreadyExists -from google.cloud import pubsub_v1 -import pytest - -import pub # noqa - - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "quickstart-pub-test-topic-" + UUID - - -@pytest.fixture(scope="module") -def publisher_client(): - yield pubsub_v1.PublisherClient() - - -@pytest.fixture(scope="module") -def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) - - try: - publisher_client.create_topic(topic_path) - except AlreadyExists: - pass - - yield TOPIC - - publisher_client.delete_topic(topic_path) - - -def test_pub(publisher_client, topic, capsys): - pub.pub(PROJECT, topic) - - out, _ = capsys.readouterr() - - assert "Hello, World!" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py deleted file mode 100644 index efe00891593e..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# [START pubsub_quickstart_sub_all] -import argparse - -# [START pubsub_quickstart_sub_deps] -from google.cloud import pubsub_v1 - -# [END pubsub_quickstart_sub_deps] - - -def sub(project_id, subscription_id): - """Receives messages from a Pub/Sub subscription.""" - # [START pubsub_quickstart_sub_client] - # Initialize a Subscriber client - subscriber_client = pubsub_v1.SubscriberClient() - # [END pubsub_quickstart_sub_client] - # Create a fully qualified identifier in the form of - # `projects/{project_id}/subscriptions/{subscription_id}` - subscription_path = subscriber_client.subscription_path(project_id, subscription_id) - - def callback(message): - print( - "Received message {} of message ID {}\n".format(message, message.message_id) - ) - # Acknowledge the message. Unack'ed messages will be redelivered. - message.ack() - print("Acknowledged message {}\n".format(message.message_id)) - - streaming_pull_future = subscriber_client.subscribe( - subscription_path, callback=callback - ) - print("Listening for messages on {}..\n".format(subscription_path)) - - try: - # Calling result() on StreamingPullFuture keeps the main thread from - # exiting while messages get processed in the callbacks. - streaming_pull_future.result() - except: # noqa - streaming_pull_future.cancel() - - subscriber_client.close() - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, - ) - parser.add_argument("project_id", help="Google Cloud project ID") - parser.add_argument("subscription_id", help="Pub/Sub subscription ID") - - args = parser.parse_args() - - sub(args.project_id, args.subscription_id) -# [END pubsub_quickstart_sub_all] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py deleted file mode 100644 index 38047422a935..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -import uuid - -from google.api_core.exceptions import AlreadyExists -from google.cloud import pubsub_v1 -import mock -import pytest - -import sub # noqa - - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "quickstart-sub-test-topic-" + UUID -SUBSCRIPTION = "quickstart-sub-test-topic-sub-" + UUID - -publisher_client = pubsub_v1.PublisherClient() -subscriber_client = pubsub_v1.SubscriberClient() - - -@pytest.fixture(scope="module") -def topic_path(): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) - - try: - topic = publisher_client.create_topic(topic_path) - yield topic.name - except AlreadyExists: - yield topic_path - - publisher_client.delete_topic(topic_path) - - -@pytest.fixture(scope="module") -def subscription_path(topic_path): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) - - try: - subscription = subscriber_client.create_subscription( - subscription_path, topic_path - ) - yield subscription.name - except AlreadyExists: - yield subscription_path - - subscriber_client.delete_subscription(subscription_path) - subscriber_client.close() - - -def _publish_messages(topic_path): - publish_future = publisher_client.publish(topic_path, data=b"Hello World!") - publish_future.result() - - -def test_sub(monkeypatch, topic_path, subscription_path, capsys): - - real_client = pubsub_v1.SubscriberClient() - mock_client = mock.Mock(spec=pubsub_v1.SubscriberClient, wraps=real_client) - - # Attributes on mock_client_constructor uses the corresponding - # attributes on pubsub_v1.SubscriberClient. - mock_client_constructor = mock.create_autospec(pubsub_v1.SubscriberClient) - mock_client_constructor.return_value = mock_client - - monkeypatch.setattr(pubsub_v1, "SubscriberClient", mock_client_constructor) - - def mock_subscribe(subscription_path, callback=None): - real_future = real_client.subscribe(subscription_path, callback=callback) - mock_future = mock.Mock(spec=real_future, wraps=real_future) - - def mock_result(): - return real_future.result(timeout=10) - - mock_future.result.side_effect = mock_result - return mock_future - - mock_client.subscribe.side_effect = mock_subscribe - - _publish_messages(topic_path) - - sub.sub(PROJECT, SUBSCRIPTION) - - out, _ = capsys.readouterr() - assert "Received message" in out - assert "Acknowledged message" in out - - real_client.close() diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt deleted file mode 100644 index adf26b9f98bb..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ /dev/null @@ -1,3 +0,0 @@ -backoff==1.10.0 -pytest==5.3.2 -mock==3.0.5 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py deleted file mode 100644 index f079e7d423f8..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ /dev/null @@ -1,783 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This application demonstrates how to perform basic operations on -subscriptions with the Cloud Pub/Sub API. - -For more information, see the README.md under /pubsub and the documentation -at https://cloud.google.com/pubsub/docs. -""" - -import argparse - - -def list_subscriptions_in_topic(project_id, topic_id): - """Lists all subscriptions for a given topic.""" - # [START pubsub_list_topic_subscriptions] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_id) - - for subscription in publisher.list_topic_subscriptions(topic_path): - print(subscription) - # [END pubsub_list_topic_subscriptions] - - -def list_subscriptions_in_project(project_id): - """Lists all subscriptions in the current project.""" - # [START pubsub_list_subscriptions] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - - subscriber = pubsub_v1.SubscriberClient() - project_path = subscriber.project_path(project_id) - - # Wrap the subscriber in a 'with' block to automatically call close() to - # close the underlying gRPC channel when done. - with subscriber: - for subscription in subscriber.list_subscriptions(project_path): - print(subscription.name) - # [END pubsub_list_subscriptions] - - -def create_subscription(project_id, topic_id, subscription_id): - """Create a new pull subscription on the given topic.""" - # [START pubsub_create_pull_subscription] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - # subscription_id = "your-subscription-id" - - subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - # Wrap the subscriber in a 'with' block to automatically call close() to - # close the underlying gRPC channel when done. - with subscriber: - subscription = subscriber.create_subscription(subscription_path, topic_path) - - print("Subscription created: {}".format(subscription)) - # [END pubsub_create_pull_subscription] - - -def create_subscription_with_dead_letter_topic( - project_id, topic_id, subscription_id, dead_letter_topic_id -): - """Create a subscription with dead letter policy.""" - # [START pubsub_dead_letter_create_subscription] - from google.cloud import pubsub_v1 - from google.cloud.pubsub_v1.types import DeadLetterPolicy - - # TODO(developer) - # project_id = "your-project-id" - # endpoint = "https://my-test-project.appspot.com/push" - # TODO(developer): This is an existing topic that the subscription - # with dead letter policy is attached to. - # topic_id = "your-topic-id" - # TODO(developer): This is an existing subscription with a dead letter policy. - # subscription_id = "your-subscription-id" - # TODO(developer): This is an existing dead letter topic that the subscription - # with dead letter policy will forward dead letter messages to. - # dead_letter_topic_id = "your-dead-letter-topic-id" - - subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) - subscription_path = subscriber.subscription_path(project_id, subscription_id) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) - - dead_letter_policy = DeadLetterPolicy( - dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=10 - ) - - with subscriber: - subscription = subscriber.create_subscription( - subscription_path, topic_path, dead_letter_policy=dead_letter_policy - ) - - print("Subscription created: {}".format(subscription.name)) - print( - "It will forward dead letter messages to: {}".format( - subscription.dead_letter_policy.dead_letter_topic - ) - ) - print( - "After {} delivery attempts.".format( - subscription.dead_letter_policy.max_delivery_attempts - ) - ) - # [END pubsub_dead_letter_create_subscription] - - -def create_push_subscription(project_id, topic_id, subscription_id, endpoint): - """Create a new push subscription on the given topic.""" - # [START pubsub_create_push_subscription] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - # subscription_id = "your-subscription-id" - # endpoint = "https://my-test-project.appspot.com/push" - - subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) - - # Wrap the subscriber in a 'with' block to automatically call close() to - # close the underlying gRPC channel when done. - with subscriber: - subscription = subscriber.create_subscription( - subscription_path, topic_path, push_config - ) - - print("Push subscription created: {}".format(subscription)) - print("Endpoint for subscription is: {}".format(endpoint)) - # [END pubsub_create_push_subscription] - - -def delete_subscription(project_id, subscription_id): - """Deletes an existing Pub/Sub topic.""" - # [START pubsub_delete_subscription] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - # Wrap the subscriber in a 'with' block to automatically call close() to - # close the underlying gRPC channel when done. - with subscriber: - subscriber.delete_subscription(subscription_path) - - print("Subscription deleted: {}".format(subscription_path)) - # [END pubsub_delete_subscription] - - -def update_push_subscription(project_id, topic_id, subscription_id, endpoint): - """ - Updates an existing Pub/Sub subscription's push endpoint URL. - Note that certain properties of a subscription, such as - its topic, are not modifiable. - """ - # [START pubsub_update_push_configuration] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # topic_id = "your-topic-id" - # subscription_id = "your-subscription-id" - # endpoint = "https://my-test-project.appspot.com/push" - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) - - subscription = pubsub_v1.types.Subscription( - name=subscription_path, topic=topic_id, push_config=push_config - ) - - update_mask = {"paths": {"push_config"}} - - # Wrap the subscriber in a 'with' block to automatically call close() to - # close the underlying gRPC channel when done. - with subscriber: - result = subscriber.update_subscription(subscription, update_mask) - - print("Subscription updated: {}".format(subscription_path)) - print("New endpoint for subscription is: {}".format(result.push_config)) - # [END pubsub_update_push_configuration] - - -def update_subscription_with_dead_letter_policy( - project_id, topic_id, subscription_id, dead_letter_topic_id -): - """Update a subscription's dead letter policy.""" - # [START pubsub_dead_letter_update_subscription] - from google.cloud import pubsub_v1 - from google.cloud.pubsub_v1.types import DeadLetterPolicy, FieldMask - - # TODO(developer) - # project_id = "your-project-id" - # TODO(developer): This is an existing topic that the subscription - # with dead letter policy is attached to. - # topic_id = "your-topic-id" - # TODO(developer): This is an existing subscription with a dead letter policy. - # subscription_id = "your-subscription-id" - # TODO(developer): This is an existing dead letter topic that the subscription - # with dead letter policy will forward dead letter messages to. - # dead_letter_topic_id = "your-dead-letter-topic-id" - - subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) - subscription_path = subscriber.subscription_path(project_id, subscription_id) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) - - subscription_before_update = subscriber.get_subscription(subscription_path) - print("Before the update: {}".format(subscription_before_update)) - - # Indicates which fields in the provided subscription to update. - update_mask = FieldMask(paths=["dead_letter_policy.max_delivery_attempts"]) - - # Construct a dead letter policy you expect to have after the update. - dead_letter_policy = DeadLetterPolicy( - dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=20 - ) - - # Construct the subscription with the dead letter policy you expect to have - # after the update. Here, values in the required fields (name, topic) help - # identify the subscription. - subscription = pubsub_v1.types.Subscription( - name=subscription_path, topic=topic_path, dead_letter_policy=dead_letter_policy, - ) - - with subscriber: - subscription_after_update = subscriber.update_subscription( - subscription, update_mask - ) - - print("After the update: {}".format(subscription_after_update)) - # [END pubsub_dead_letter_update_subscription] - return subscription_after_update - - -def remove_dead_letter_policy(project_id, topic_id, subscription_id): - """Remove dead letter policy from a subscription.""" - # [START pubsub_dead_letter_remove] - from google.cloud import pubsub_v1 - from google.cloud.pubsub_v1.types import FieldMask - - # TODO(developer) - # project_id = "your-project-id" - # TODO(developer): This is an existing topic that the subscription - # with dead letter policy is attached to. - # topic_id = "your-topic-id" - # TODO(developer): This is an existing subscription with a dead letter policy. - # subscription_id = "your-subscription-id" - - subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - subscription_before_update = subscriber.get_subscription(subscription_path) - print("Before removing the policy: {}".format(subscription_before_update)) - - # Indicates which fields in the provided subscription to update. - update_mask = FieldMask( - paths=[ - "dead_letter_policy.dead_letter_topic", - "dead_letter_policy.max_delivery_attempts", - ] - ) - - # Construct the subscription (without any dead letter policy) that you - # expect to have after the update. - subscription = pubsub_v1.types.Subscription( - name=subscription_path, topic=topic_path - ) - - with subscriber: - subscription_after_update = subscriber.update_subscription( - subscription, update_mask - ) - - print("After removing the policy: {}".format(subscription_after_update)) - # [END pubsub_dead_letter_remove] - return subscription_after_update - - -def receive_messages(project_id, subscription_id, timeout=None): - """Receives messages from a pull subscription.""" - # [START pubsub_subscriber_async_pull] - # [START pubsub_quickstart_subscriber] - from concurrent.futures import TimeoutError - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - # Number of seconds the subscriber should listen for messages - # timeout = 5.0 - - subscriber = pubsub_v1.SubscriberClient() - # The `subscription_path` method creates a fully qualified identifier - # in the form `projects/{project_id}/subscriptions/{subscription_id}` - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - def callback(message): - print("Received message: {}".format(message)) - message.ack() - - streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) - - # Wrap subscriber in a 'with' block to automatically call close() when done. - with subscriber: - try: - # When `timeout` is not set, result() will block indefinitely, - # unless an exception is encountered first. - streaming_pull_future.result(timeout=timeout) - except TimeoutError: - streaming_pull_future.cancel() - # [END pubsub_subscriber_async_pull] - # [END pubsub_quickstart_subscriber] - - -def receive_messages_with_custom_attributes(project_id, subscription_id, timeout=None): - """Receives messages from a pull subscription.""" - # [START pubsub_subscriber_async_pull_custom_attributes] - from concurrent.futures import TimeoutError - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - # Number of seconds the subscriber should listen for messages - # timeout = 5.0 - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - def callback(message): - print("Received message: {}".format(message.data)) - if message.attributes: - print("Attributes:") - for key in message.attributes: - value = message.attributes.get(key) - print("{}: {}".format(key, value)) - message.ack() - - streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) - - # Wrap subscriber in a 'with' block to automatically call close() when done. - with subscriber: - try: - # When `timeout` is not set, result() will block indefinitely, - # unless an exception is encountered first. - streaming_pull_future.result(timeout=timeout) - except TimeoutError: - streaming_pull_future.cancel() - # [END pubsub_subscriber_async_pull_custom_attributes] - - -def receive_messages_with_flow_control(project_id, subscription_id, timeout=None): - """Receives messages from a pull subscription with flow control.""" - # [START pubsub_subscriber_flow_settings] - from concurrent.futures import TimeoutError - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - # Number of seconds the subscriber should listen for messages - # timeout = 5.0 - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - def callback(message): - print("Received message: {}".format(message.data)) - message.ack() - - # Limit the subscriber to only have ten outstanding messages at a time. - flow_control = pubsub_v1.types.FlowControl(max_messages=10) - - streaming_pull_future = subscriber.subscribe( - subscription_path, callback=callback, flow_control=flow_control - ) - print("Listening for messages on {}..\n".format(subscription_path)) - - # Wrap subscriber in a 'with' block to automatically call close() when done. - with subscriber: - try: - # When `timeout` is not set, result() will block indefinitely, - # unless an exception is encountered first. - streaming_pull_future.result(timeout=timeout) - except TimeoutError: - streaming_pull_future.cancel() - # [END pubsub_subscriber_flow_settings] - - -def synchronous_pull(project_id, subscription_id): - """Pulling messages synchronously.""" - # [START pubsub_subscriber_sync_pull] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - NUM_MESSAGES = 3 - - # Wrap the subscriber in a 'with' block to automatically call close() to - # close the underlying gRPC channel when done. - with subscriber: - # The subscriber pulls a specific number of messages. - response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) - - ack_ids = [] - for received_message in response.received_messages: - print("Received: {}".format(received_message.message.data)) - ack_ids.append(received_message.ack_id) - - # Acknowledges the received messages so they will not be sent again. - subscriber.acknowledge(subscription_path, ack_ids) - - print( - "Received and acknowledged {} messages. Done.".format( - len(response.received_messages) - ) - ) - # [END pubsub_subscriber_sync_pull] - - -def synchronous_pull_with_lease_management(project_id, subscription_id): - """Pulling messages synchronously with lease management""" - # [START pubsub_subscriber_sync_pull_with_lease] - import logging - import multiprocessing - import random - import time - - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - NUM_MESSAGES = 2 - ACK_DEADLINE = 30 - SLEEP_TIME = 10 - - # The subscriber pulls a specific number of messages. - response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) - - multiprocessing.log_to_stderr() - logger = multiprocessing.get_logger() - logger.setLevel(logging.INFO) - - def worker(msg): - """Simulates a long-running process.""" - RUN_TIME = random.randint(1, 60) - logger.info( - "{}: Running {} for {}s".format( - time.strftime("%X", time.gmtime()), msg.message.data, RUN_TIME - ) - ) - time.sleep(RUN_TIME) - - # `processes` stores process as key and ack id and message as values. - processes = dict() - for message in response.received_messages: - process = multiprocessing.Process(target=worker, args=(message,)) - processes[process] = (message.ack_id, message.message.data) - process.start() - - while processes: - for process in list(processes): - ack_id, msg_data = processes[process] - # If the process is still running, reset the ack deadline as - # specified by ACK_DEADLINE once every while as specified - # by SLEEP_TIME. - if process.is_alive(): - # `ack_deadline_seconds` must be between 10 to 600. - subscriber.modify_ack_deadline( - subscription_path, [ack_id], ack_deadline_seconds=ACK_DEADLINE, - ) - logger.info( - "{}: Reset ack deadline for {} for {}s".format( - time.strftime("%X", time.gmtime()), msg_data, ACK_DEADLINE, - ) - ) - - # If the processs is finished, acknowledges using `ack_id`. - else: - subscriber.acknowledge(subscription_path, [ack_id]) - logger.info( - "{}: Acknowledged {}".format( - time.strftime("%X", time.gmtime()), msg_data - ) - ) - processes.pop(process) - - # If there are still processes running, sleeps the thread. - if processes: - time.sleep(SLEEP_TIME) - - print( - "Received and acknowledged {} messages. Done.".format( - len(response.received_messages) - ) - ) - - # Close the underlying gPRC channel. Alternatively, wrap subscriber in - # a 'with' block to automatically call close() when done. - subscriber.close() - # [END pubsub_subscriber_sync_pull_with_lease] - - -def listen_for_errors(project_id, subscription_id, timeout=None): - """Receives messages and catches errors from a pull subscription.""" - # [START pubsub_subscriber_error_listener] - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - # Number of seconds the subscriber should listen for messages - # timeout = 5.0 - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - def callback(message): - print("Received message: {}".format(message)) - message.ack() - - streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) - - # Wrap subscriber in a 'with' block to automatically call close() when done. - with subscriber: - # When `timeout` is not set, result() will block indefinitely, - # unless an exception is encountered first. - try: - streaming_pull_future.result(timeout=timeout) - except Exception as e: - streaming_pull_future.cancel() - print( - "Listening for messages on {} threw an exception: {}.".format( - subscription_id, e - ) - ) - # [END pubsub_subscriber_error_listener] - - -def receive_messages_with_delivery_attempts(project_id, subscription_id, timeout=None): - # [START pubsub_dead_letter_delivery_attempt] - from concurrent.futures import TimeoutError - from google.cloud import pubsub_v1 - - # TODO(developer) - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - - subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_id) - - def callback(message): - print("Received message: {}".format(message)) - print("With delivery attempts: {}".format(message.delivery_attempt)) - message.ack() - - streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) - - # Wrap subscriber in a 'with' block to automatically call close() when done. - with subscriber: - # When `timeout` is not set, result() will block indefinitely, - # unless an exception is encountered first. - try: - streaming_pull_future.result(timeout=timeout) - except TimeoutError: - streaming_pull_future.cancel() - # [END pubsub_dead_letter_delivery_attempt] - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, - ) - parser.add_argument("project_id", help="Your Google Cloud project ID") - - subparsers = parser.add_subparsers(dest="command") - list_in_topic_parser = subparsers.add_parser( - "list-in-topic", help=list_subscriptions_in_topic.__doc__ - ) - list_in_topic_parser.add_argument("topic_id") - - list_in_project_parser = subparsers.add_parser( - "list-in-project", help=list_subscriptions_in_project.__doc__ - ) - - create_parser = subparsers.add_parser("create", help=create_subscription.__doc__) - create_parser.add_argument("topic_id") - create_parser.add_argument("subscription_id") - - create_with_dead_letter_policy_parser = subparsers.add_parser( - "create-with-dead-letter-policy", - help=create_subscription_with_dead_letter_topic.__doc__, - ) - create_with_dead_letter_policy_parser.add_argument("topic_id") - create_with_dead_letter_policy_parser.add_argument("subscription_id") - create_with_dead_letter_policy_parser.add_argument("dead_letter_topic_id") - - create_push_parser = subparsers.add_parser( - "create-push", help=create_push_subscription.__doc__ - ) - create_push_parser.add_argument("topic_id") - create_push_parser.add_argument("subscription_id") - create_push_parser.add_argument("endpoint") - - delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) - delete_parser.add_argument("subscription_id") - - update_push_parser = subparsers.add_parser( - "update-push", help=update_push_subscription.__doc__ - ) - update_push_parser.add_argument("topic_id") - update_push_parser.add_argument("subscription_id") - update_push_parser.add_argument("endpoint") - - update_dead_letter_policy_parser = subparsers.add_parser( - "update-dead-letter-policy", - help=update_subscription_with_dead_letter_policy.__doc__, - ) - update_dead_letter_policy_parser.add_argument("topic_id") - update_dead_letter_policy_parser.add_argument("subscription_id") - update_dead_letter_policy_parser.add_argument("dead_letter_topic_id") - - remove_dead_letter_policy_parser = subparsers.add_parser( - "remove-dead-letter-policy", help=remove_dead_letter_policy.__doc__ - ) - remove_dead_letter_policy_parser.add_argument("topic_id") - remove_dead_letter_policy_parser.add_argument("subscription_id") - - receive_parser = subparsers.add_parser("receive", help=receive_messages.__doc__) - receive_parser.add_argument("subscription_id") - receive_parser.add_argument("timeout", default=None, type=float, nargs="?") - - receive_with_custom_attributes_parser = subparsers.add_parser( - "receive-custom-attributes", - help=receive_messages_with_custom_attributes.__doc__, - ) - receive_with_custom_attributes_parser.add_argument("subscription_id") - receive_with_custom_attributes_parser.add_argument( - "timeout", default=None, type=float, nargs="?" - ) - - receive_with_flow_control_parser = subparsers.add_parser( - "receive-flow-control", help=receive_messages_with_flow_control.__doc__ - ) - receive_with_flow_control_parser.add_argument("subscription_id") - receive_with_flow_control_parser.add_argument( - "timeout", default=None, type=float, nargs="?" - ) - - synchronous_pull_parser = subparsers.add_parser( - "receive-synchronously", help=synchronous_pull.__doc__ - ) - synchronous_pull_parser.add_argument("subscription_id") - - synchronous_pull_with_lease_management_parser = subparsers.add_parser( - "receive-synchronously-with-lease", - help=synchronous_pull_with_lease_management.__doc__, - ) - synchronous_pull_with_lease_management_parser.add_argument("subscription_id") - - listen_for_errors_parser = subparsers.add_parser( - "listen-for-errors", help=listen_for_errors.__doc__ - ) - listen_for_errors_parser.add_argument("subscription_id") - listen_for_errors_parser.add_argument( - "timeout", default=None, type=float, nargs="?" - ) - - receive_messages_with_delivery_attempts_parser = subparsers.add_parser( - "receive-messages-with-delivery-attempts", - help=receive_messages_with_delivery_attempts.__doc__, - ) - receive_messages_with_delivery_attempts_parser.add_argument("subscription_id") - receive_messages_with_delivery_attempts_parser.add_argument( - "timeout", default=None, type=float, nargs="?" - ) - - args = parser.parse_args() - - if args.command == "list-in-topic": - list_subscriptions_in_topic(args.project_id, args.topic_id) - elif args.command == "list-in-project": - list_subscriptions_in_project(args.project_id) - elif args.command == "create": - create_subscription(args.project_id, args.topic_id, args.subscription_id) - elif args.command == "create-with-dead-letter-policy": - create_subscription_with_dead_letter_topic( - args.project_id, - args.topic_id, - args.subscription_id, - args.dead_letter_topic_id, - ) - elif args.command == "create-push": - create_push_subscription( - args.project_id, args.topic_id, args.subscription_id, args.endpoint, - ) - elif args.command == "delete": - delete_subscription(args.project_id, args.subscription_id) - elif args.command == "update-push": - update_push_subscription( - args.project_id, args.topic_id, args.subscription_id, args.endpoint, - ) - elif args.command == "update-dead-letter-policy": - update_subscription_with_dead_letter_policy( - args.project_id, - args.topic_id, - args.subscription_id, - args.dead_letter_topic_id, - ) - elif args.command == "remove-dead-letter-policy": - remove_dead_letter_policy(args.project_id, args.topic_id, args.subscription_id) - elif args.command == "receive": - receive_messages(args.project_id, args.subscription_id, args.timeout) - elif args.command == "receive-custom-attributes": - receive_messages_with_custom_attributes( - args.project_id, args.subscription_id, args.timeout - ) - elif args.command == "receive-flow-control": - receive_messages_with_flow_control( - args.project_id, args.subscription_id, args.timeout - ) - elif args.command == "receive-synchronously": - synchronous_pull(args.project_id, args.subscription_id) - elif args.command == "receive-synchronously-with-lease": - synchronous_pull_with_lease_management(args.project_id, args.subscription_id) - elif args.command == "listen-for-errors": - listen_for_errors(args.project_id, args.subscription_id, args.timeout) - elif args.command == "receive-messages-with-delivery-attempts": - receive_messages_with_delivery_attempts( - args.project_id, args.subscription_id, args.timeout - ) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py deleted file mode 100644 index a7f7c139c258..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ /dev/null @@ -1,341 +0,0 @@ -# Copyright 2016 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import uuid - -import backoff -from google.cloud import pubsub_v1 -import pytest - -import subscriber - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "subscription-test-topic-" + UUID -DEAD_LETTER_TOPIC = "subscription-test-dead-letter-topic-" + UUID -SUBSCRIPTION_ADMIN = "subscription-test-subscription-admin-" + UUID -SUBSCRIPTION_ASYNC = "subscription-test-subscription-async-" + UUID -SUBSCRIPTION_SYNC = "subscription-test-subscription-sync-" + UUID -SUBSCRIPTION_DLQ = "subscription-test-subscription-dlq-" + UUID -ENDPOINT = "https://{}.appspot.com/push".format(PROJECT) -NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT) - - -@pytest.fixture(scope="module") -def publisher_client(): - yield pubsub_v1.PublisherClient() - - -@pytest.fixture(scope="module") -def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) - - try: - topic = publisher_client.get_topic(topic_path) - except: # noqa - topic = publisher_client.create_topic(topic_path) - - yield topic.name - - publisher_client.delete_topic(topic.name) - - -@pytest.fixture(scope="module") -def dead_letter_topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) - - try: - dead_letter_topic = publisher_client.get_topic(topic_path) - except: # noqa - dead_letter_topic = publisher_client.create_topic(topic_path) - - yield dead_letter_topic.name - - publisher_client.delete_topic(dead_letter_topic.name) - - -@pytest.fixture(scope="module") -def subscriber_client(): - subscriber_client = pubsub_v1.SubscriberClient() - yield subscriber_client - subscriber_client.close() - - -@pytest.fixture(scope="module") -def subscription_admin(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) - - try: - subscription = subscriber_client.get_subscription(subscription_path) - except: # noqa - subscription = subscriber_client.create_subscription( - subscription_path, topic=topic - ) - - yield subscription.name - - -@pytest.fixture(scope="module") -def subscription_sync(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_SYNC) - - try: - subscription = subscriber_client.get_subscription(subscription_path) - except: # noqa - subscription = subscriber_client.create_subscription( - subscription_path, topic=topic - ) - - yield subscription.name - - subscriber_client.delete_subscription(subscription.name) - - -@pytest.fixture(scope="module") -def subscription_async(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ASYNC) - - try: - subscription = subscriber_client.get_subscription(subscription_path) - except: # noqa - subscription = subscriber_client.create_subscription( - subscription_path, topic=topic - ) - - yield subscription.name - - subscriber_client.delete_subscription(subscription.name) - - -@pytest.fixture(scope="module") -def subscription_dlq(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) - - try: - subscription = subscriber_client.get_subscription(subscription_path) - except: # noqa - subscription = subscriber_client.create_subscription( - subscription_path, topic=topic - ) - - yield subscription.name - - subscriber_client.delete_subscription(subscription.name) - - -def test_list_in_topic(subscription_admin, capsys): - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - subscriber.list_subscriptions_in_topic(PROJECT, TOPIC) - out, _ = capsys.readouterr() - assert subscription_admin in out - - eventually_consistent_test() - - -def test_list_in_project(subscription_admin, capsys): - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - subscriber.list_subscriptions_in_project(PROJECT) - out, _ = capsys.readouterr() - assert subscription_admin in out - - eventually_consistent_test() - - -def test_create(subscriber_client): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) - - try: - subscriber_client.delete_subscription(subscription_path) - except Exception: - pass - - subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN) - - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - assert subscriber_client.get_subscription(subscription_path) - - eventually_consistent_test() - - -def test_create_subscription_with_dead_letter_policy( - subscriber_client, publisher_client, topic, dead_letter_topic, capsys -): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) - dead_letter_topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) - - try: - subscriber_client.delete_subscription(subscription_path) - except Exception: - pass - - subscriber.create_subscription_with_dead_letter_topic( - PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC - ) - - out, _ = capsys.readouterr() - assert "Subscription created: " + subscription_path in out - assert "It will forward dead letter messages to: " + dead_letter_topic_path in out - assert "After 10 delivery attempts." in out - - -def test_create_push(subscriber_client): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) - try: - subscriber_client.delete_subscription(subscription_path) - except Exception: - pass - - subscriber.create_push_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT) - - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - assert subscriber_client.get_subscription(subscription_path) - - eventually_consistent_test() - - -def test_update(subscriber_client, subscription_admin, capsys): - subscriber.update_push_subscription( - PROJECT, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT - ) - - out, _ = capsys.readouterr() - assert "Subscription updated" in out - - -def test_update_dead_letter_policy( - subscriber_client, topic, subscription_dlq, dead_letter_topic, capsys -): - _ = subscriber.update_subscription_with_dead_letter_policy( - PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC - ) - - out, _ = capsys.readouterr() - assert "max_delivery_attempts: 20" in out - - -def test_delete(subscriber_client, subscription_admin): - subscriber.delete_subscription(PROJECT, SUBSCRIPTION_ADMIN) - - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - with pytest.raises(Exception): - subscriber_client.get_subscription(subscription_admin) - - eventually_consistent_test() - - -def _publish_messages(publisher_client, topic): - for n in range(5): - data = u"message {}".format(n).encode("utf-8") - publish_future = publisher_client.publish( - topic, data=data, origin="python-sample" - ) - publish_future.result() - - -def test_receive(publisher_client, topic, subscription_async, capsys): - _publish_messages(publisher_client, topic) - - subscriber.receive_messages(PROJECT, SUBSCRIPTION_ASYNC, 5) - - out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_async in out - assert "message" in out - - -def test_receive_with_custom_attributes( - publisher_client, topic, subscription_async, capsys -): - - _publish_messages(publisher_client, topic) - - subscriber.receive_messages_with_custom_attributes(PROJECT, SUBSCRIPTION_ASYNC, 5) - - out, _ = capsys.readouterr() - assert "message" in out - assert "origin" in out - assert "python-sample" in out - - -def test_receive_with_flow_control(publisher_client, topic, subscription_async, capsys): - - _publish_messages(publisher_client, topic) - - subscriber.receive_messages_with_flow_control(PROJECT, SUBSCRIPTION_ASYNC, 5) - - out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_async in out - assert "message" in out - - -def test_receive_synchronously(publisher_client, topic, subscription_sync, capsys): - _publish_messages(publisher_client, topic) - - subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_SYNC) - - out, _ = capsys.readouterr() - assert "Done." in out - - -def test_receive_synchronously_with_lease( - publisher_client, topic, subscription_sync, capsys -): - _publish_messages(publisher_client, topic) - - subscriber.synchronous_pull_with_lease_management(PROJECT, SUBSCRIPTION_SYNC) - - out, _ = capsys.readouterr() - assert "Done." in out - - -def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): - - _publish_messages(publisher_client, topic) - - subscriber.listen_for_errors(PROJECT, SUBSCRIPTION_ASYNC, 5) - - out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_async in out - assert "threw an exception" in out - - -def test_receive_with_delivery_attempts( - publisher_client, topic, subscription_dlq, dead_letter_topic, capsys -): - _publish_messages(publisher_client, topic) - - subscriber.receive_messages_with_delivery_attempts(PROJECT, SUBSCRIPTION_DLQ, 10) - - out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_dlq in out - assert "Received message: " in out - assert "message 4" in out - assert "With delivery attempts: " in out - - -def test_remove_dead_letter_policy(subscriber_client, subscription_dlq): - subscription_after_update = subscriber.remove_dead_letter_policy( - PROJECT, TOPIC, SUBSCRIPTION_DLQ - ) - - assert subscription_after_update.dead_letter_policy.dead_letter_topic == "" diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 0e2c96e42de2..b44cc0acff57 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -18,7 +18,6 @@ import synthtool as s from synthtool import gcp -from synthtool.languages import python gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() @@ -267,16 +266,8 @@ def _merge_dict(d1, d2): # Add templated files # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library( - unit_cov_level=97, - cov_level=99, - system_test_external_dependencies=["psutil"], - samples=True, + unit_cov_level=97, cov_level=99, system_test_external_dependencies=["psutil"], ) s.move(templated_files) -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- -python.py_samples() - s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 44320637b4edd8fd1786f1e3eb6a25c6a84c4b98 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 24 Aug 2016 14:51:23 -0700 Subject: [PATCH 0478/1197] Add pubsub publisher and subscriber samples Change-Id: I38b90c10aef72c37188c4520897302933b9d2ea7 --- .../samples/snippets/README.md | 16 +++ .../samples/snippets/publisher.py | 107 +++++++++++++++ .../samples/snippets/publisher_test.py | 67 +++++++++ .../samples/snippets/requirements.txt | 2 +- .../samples/snippets/subscriber.py | 127 ++++++++++++++++++ .../samples/snippets/subscriber_test.py | 83 ++++++++++++ 6 files changed, 401 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-pubsub/samples/snippets/README.md create mode 100644 packages/google-cloud-pubsub/samples/snippets/publisher.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/publisher_test.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/subscriber.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/subscriber_test.py diff --git a/packages/google-cloud-pubsub/samples/snippets/README.md b/packages/google-cloud-pubsub/samples/snippets/README.md new file mode 100644 index 000000000000..8691c90171bc --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/README.md @@ -0,0 +1,16 @@ +# Google Cloud Pub/Sub Samples + + + + +## Prerequisites + +All samples require a [Google Cloud Project](https://console.cloud.google.com). + +Use the [Cloud SDK](https://cloud.google.com/sdk) to provide authentication: + + gcloud beta auth application-default login + +Run the samples: + + TODO diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py new file mode 100644 index 000000000000..61387d67ee1c --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on topics +with the Cloud Pub/Sub API. + +For more information, see the README.md under /pubsub and the documentation +at https://cloud.google.com/pubsub/docs. +""" + +import argparse + +from gcloud import pubsub + + +def list_topics(): + """Lists all Pub/Sub topics in the current project.""" + pubsub_client = pubsub.Client() + + topics = [] + next_page_token = None + while True: + page, next_page_token = pubsub_client.list_topics() + topics.extend(page) + if not next_page_token: + break + + for topic in topics: + print(topic.name) + + +def create_topic(topic_name): + """Create a new Pub/Sub topic.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + + topic.create() + + print('Topic {} created.'.format(topic.name)) + + +def delete_topic(topic_name): + """Deletes an existing Pub/Sub topic.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + + topic.delete() + + print('Topic {} deleted.'.format(topic.name)) + + +def publish_message(topic_name, data): + """Publishes a message to a Pub/Sub topic with the given data.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + + # Data must be a bytestring + data = data.encode('utf-8') + + message_id = topic.publish(data) + + print('Message {} published.'.format(message_id)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + + subparsers = parser.add_subparsers(dest='command') + subparsers.add_parser('list', help=list_topics.__doc__) + + create_parser = subparsers.add_parser('create', help=create_topic.__doc__) + create_parser.add_argument('topic_name') + + delete_parser = subparsers.add_parser('delete', help=delete_topic.__doc__) + delete_parser.add_argument('topic_name') + + publish_parser = subparsers.add_parser( + 'publish', help=publish_message.__doc__) + publish_parser.add_argument('topic_name') + publish_parser.add_argument('data') + + args = parser.parse_args() + + if args.command == 'list': + list_topics() + elif args.command == 'create': + create_topic(args.topic_name) + elif args.command == 'delete': + delete_topic(args.topic_name) + elif args.command == 'publish': + publish_message(args.topic_name, args.data) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py new file mode 100644 index 000000000000..3cce3c962005 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -0,0 +1,67 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gcloud import pubsub +from gcp.testing import eventually_consistent +import pytest + +import publisher + +TEST_TOPIC = 'publisher-test-topic' + + +@pytest.fixture +def test_topic(): + client = pubsub.Client() + topic = client.topic(TEST_TOPIC) + yield topic + if topic.exists(): + topic.delete() + + +def test_list(test_topic, capsys): + test_topic.create() + + @eventually_consistent.call + def _(): + publisher.list_topics() + out, _ = capsys.readouterr() + assert test_topic.name in out + + +def test_create(test_topic): + publisher.create_topic(test_topic.name) + + @eventually_consistent.call + def _(): + assert test_topic.exists() + + +def test_delete(test_topic): + test_topic.create() + + publisher.delete_topic(test_topic.name) + + @eventually_consistent.call + def _(): + assert not test_topic.exists() + + +def test_publish(test_topic, capsys): + test_topic.create() + + publisher.publish_message(test_topic.name, 'hello') + + out, _ = capsys.readouterr() + assert 'published' in out diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 42ab449b1ba1..2beeafe63a8a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.6.1 +gcloud==0.18.1 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py new file mode 100644 index 000000000000..2b3371cd6165 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on +subscriptions with the Cloud Pub/Sub API. + +For more information, see the README.md under /pubsub and the documentation +at https://cloud.google.com/pubsub/docs. +""" + +import argparse + +from gcloud import pubsub + + +def list_subscriptions(topic_name): + """Lists all subscriptions for a given topic.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + + subscriptions = [] + next_page_token = None + while True: + page, next_page_token = topic.list_subscriptions() + subscriptions.extend(page) + if not next_page_token: + break + + for subscription in subscriptions: + print(subscription.name) + + +def create_subscription(topic_name, subscription_name): + """Create a new pull subscription on the given topic.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + + subscription = topic.subscription(subscription_name) + subscription.create() + + print('Subscription {} created on topic {}.'.format( + subscription.name, topic.name)) + + +def delete_subscription(topic_name, subscription_name): + """Deletes an existing Pub/Sub topic.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + subscription = topic.subscription(subscription_name) + + subscription.delete() + + print('Subscription {} deleted on topic {}.'.format( + subscription.name, topic.name)) + + +def receive_message(topic_name, subscription_name): + """Receives a message from a pull subscription.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + subscription = topic.subscription(subscription_name) + + # Change return_immediately=False to block until messages are + # received. + results = subscription.pull(return_immediately=True) + + print('Received {} messages.'.format(len(results))) + + for ack_id, message in results: + print('* {}: {}, {}'.format( + message.message_id, message.data, message.attributes)) + + # Acknowledge received messages. If you do not acknowledge, Pub/Sub will + # redeliver the message. + if results: + subscription.acknowledge([ack_id for ack_id, message in results]) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + + subparsers = parser.add_subparsers(dest='command') + list_parser = subparsers.add_parser( + 'list', help=list_subscriptions.__doc__) + list_parser.add_argument('topic_name') + + create_parser = subparsers.add_parser( + 'create', help=create_subscription.__doc__) + create_parser.add_argument('topic_name') + create_parser.add_argument('subscription_name') + + delete_parser = subparsers.add_parser( + 'delete', help=delete_subscription.__doc__) + delete_parser.add_argument('topic_name') + delete_parser.add_argument('subscription_name') + + receive_parser = subparsers.add_parser( + 'receive', help=receive_message.__doc__) + receive_parser.add_argument('topic_name') + receive_parser.add_argument('subscription_name') + + args = parser.parse_args() + + if args.command == 'list': + list_subscriptions(args.topic_name) + elif args.command == 'create': + create_subscription(args.topic_name, args.subscription_name) + elif args.command == 'delete': + delete_subscription(args.topic_name, args.subscription_name) + elif args.command == 'receive': + receive_message(args.topic_name, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py new file mode 100644 index 000000000000..6335aa9733cd --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -0,0 +1,83 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gcloud import pubsub +from gcp.testing import eventually_consistent +import pytest + +import subscriber + +TEST_TOPIC = 'subscription-test-topic' +TEST_SUBSCRIPTION = 'subscription-test-subscription' + + +@pytest.fixture +def test_topic(): + client = pubsub.Client() + topic = client.topic(TEST_TOPIC) + topic.create() + yield topic + if topic.exists(): + topic.delete() + + +@pytest.fixture +def test_subscription(test_topic): + subscription = test_topic.subscription(TEST_SUBSCRIPTION) + yield subscription + if subscription.exists(): + subscription.delete() + + +def test_list(test_subscription, capsys): + test_subscription.create() + + @eventually_consistent.call + def _(): + subscriber.list_subscriptions(test_subscription.topic.name) + out, _ = capsys.readouterr() + assert test_subscription.name in out + + +def test_create(test_subscription): + subscriber.create_subscription( + test_subscription.topic.name, test_subscription.name) + + @eventually_consistent.call + def _(): + assert test_subscription.exists() + + +def test_delete(test_subscription): + test_subscription.create() + + subscriber.delete_subscription( + test_subscription.topic.name, test_subscription.name) + + @eventually_consistent.call + def _(): + assert not test_subscription.exists() + + +def test_receive(test_subscription, capsys): + topic = test_subscription.topic + test_subscription.create() + + topic.publish('hello'.encode('utf-8')) + + @eventually_consistent.call + def _(): + subscriber.receive_message(topic.name, test_subscription.name) + out, _ = capsys.readouterr() + assert 'hello' in out From 1a810de0ef4f18bde606b29c075fa3de6dee71a8 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 24 Aug 2016 14:56:47 -0700 Subject: [PATCH 0479/1197] Update readme Change-Id: Ie95e2e1556a8d97b5321dc86bf8de431aa36a2d5 --- packages/google-cloud-pubsub/samples/snippets/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.md b/packages/google-cloud-pubsub/samples/snippets/README.md index 8691c90171bc..00c7d2f7ac95 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.md +++ b/packages/google-cloud-pubsub/samples/snippets/README.md @@ -13,4 +13,5 @@ Use the [Cloud SDK](https://cloud.google.com/sdk) to provide authentication: Run the samples: - TODO + python publisher.py -h + python subscriber.py -h From 07046051b5273c35c6b91815fce0bb625c8b801b Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 25 Aug 2016 11:26:06 -0700 Subject: [PATCH 0480/1197] Add pubsub iam samples Change-Id: I12c407d3cdf4a3f9736dfaeca6f20b31df6d310a --- .../samples/snippets/iam.py | 182 ++++++++++++++++++ .../samples/snippets/iam_test.py | 102 ++++++++++ 2 files changed, 284 insertions(+) create mode 100644 packages/google-cloud-pubsub/samples/snippets/iam.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/iam_test.py diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py new file mode 100644 index 000000000000..3828195bce00 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic operations on IAM +policies with the Cloud Pub/Sub API. + +For more information, see the README.md under /pubsub and the documentation +at https://cloud.google.com/pubsub/docs. +""" + +import argparse + +from gcloud import pubsub + + +def get_topic_policy(topic_name): + """Prints the IAM policy for the given topic.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + + policy = topic.get_iam_policy() + + print('Policy for topic {}:'.format(topic.name)) + print('Version: {}'.format(policy.version)) + print('Owners: {}'.format(policy.owners)) + print('Editors: {}'.format(policy.editors)) + print('Viewers: {}'.format(policy.viewers)) + print('Publishers: {}'.format(policy.publishers)) + print('Subscribers: {}'.format(policy.subscribers)) + + +def get_subscription_policy(topic_name, subscription_name): + """Prints the IAM policy for the given subscription.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + subscription = topic.subscription(subscription_name) + + policy = subscription.get_iam_policy() + + print('Policy for subscription {} on topic {}:'.format( + subscription.name, topic.name)) + print('Version: {}'.format(policy.version)) + print('Owners: {}'.format(policy.owners)) + print('Editors: {}'.format(policy.editors)) + print('Viewers: {}'.format(policy.viewers)) + print('Publishers: {}'.format(policy.publishers)) + print('Subscribers: {}'.format(policy.subscribers)) + + +def set_topic_policy(topic_name): + """Sets the IAM policy for a topic.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + policy = topic.get_iam_policy() + + # Add all users as viewers. + policy.viewers.add(policy.all_users()) + # Add a group as editors. + policy.editors.add(policy.group('cloud-logs@google.com')) + + # Set the policy + topic.set_iam_policy(policy) + + print('IAM policy for topic {} set.'.format(topic.name)) + + +def set_subscription_policy(topic_name, subscription_name): + """Sets the IAM policy for a topic.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + subscription = topic.subscription(subscription_name) + policy = subscription.get_iam_policy() + + # Add all users as viewers. + policy.viewers.add(policy.all_users()) + # Add a group as editors. + policy.editors.add(policy.group('cloud-logs@google.com')) + + # Set the policy + subscription.set_iam_policy(policy) + + print('IAM policy for subscription {} on topic {} set.'.format( + topic.name, subscription.name)) + + +def check_topic_permissions(topic_name): + """Checks to which permissions are available on the given topic.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + + permissions_to_check = [ + 'pubsub.topics.publish', + 'pubsub.topics.update' + ] + + allowed_permissions = topic.check_iam_permissions(permissions_to_check) + + print('Allowed permissions for topic {}: {}'.format( + topic.name, allowed_permissions)) + + +def check_subscription_permissions(topic_name, subscription_name): + """Checks to which permissions are available on the given subscription.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(topic_name) + subscription = topic.subscription(subscription_name) + + permissions_to_check = [ + 'pubsub.subscriptions.consume', + 'pubsub.subscriptions.update' + ] + + allowed_permissions = subscription.check_iam_permissions( + permissions_to_check) + + print('Allowed permissions for subscription {} on topic {}: {}'.format( + subscription.name, topic.name, allowed_permissions)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + + subparsers = parser.add_subparsers(dest='command') + + get_topic_policy_parser = subparsers.add_parser( + 'get-topic-policy', help=get_topic_policy.__doc__) + get_topic_policy_parser.add_argument('topic_name') + + get_subscription_policy_parser = subparsers.add_parser( + 'get-subscription-policy', help=get_subscription_policy.__doc__) + get_subscription_policy_parser.add_argument('topic_name') + get_subscription_policy_parser.add_argument('subscription_name') + + set_topic_policy_parser = subparsers.add_parser( + 'set-topic-policy', help=set_topic_policy.__doc__) + set_topic_policy_parser.add_argument('topic_name') + + set_subscription_policy_parser = subparsers.add_parser( + 'set-subscription-policy', help=set_subscription_policy.__doc__) + set_subscription_policy_parser.add_argument('topic_name') + set_subscription_policy_parser.add_argument('subscription_name') + + check_topic_permissions_parser = subparsers.add_parser( + 'check-topic-permissions', help=check_topic_permissions.__doc__) + check_topic_permissions_parser.add_argument('topic_name') + + check_subscription_permissions_parser = subparsers.add_parser( + 'check-subscription-permissions', + help=check_subscription_permissions.__doc__) + check_subscription_permissions_parser.add_argument('topic_name') + check_subscription_permissions_parser.add_argument('subscription_name') + + args = parser.parse_args() + + if args.command == 'get-topic-policy': + get_topic_policy(args.topic_name) + elif args.command == 'get-subscription-policy': + get_subscription_policy(args.topic_name, args.subscription_name) + elif args.command == 'set-topic-policy': + set_topic_policy(args.topic_name) + elif args.command == 'set-subscription-policy': + set_subscription_policy(args.topic_name, args.subscription_name) + elif args.command == 'check-topic-permissions': + check_topic_permissions(args.topic_name) + elif args.command == 'check-subscription-permissions': + check_subscription_permissions(args.topic_name, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py new file mode 100644 index 000000000000..43f53cd3d3f3 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -0,0 +1,102 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from gcloud import pubsub +from gcp.testing import eventually_consistent +import pytest + +import iam + +TEST_TOPIC = 'iam-test-topic' +TEST_SUBSCRIPTION = 'iam-test-subscription' + + +@pytest.fixture +def test_topic(): + client = pubsub.Client() + topic = client.topic(TEST_TOPIC) + topic.create() + yield topic + if topic.exists(): + topic.delete() + + +@pytest.fixture +def test_subscription(test_topic): + subscription = test_topic.subscription(TEST_SUBSCRIPTION) + yield subscription + if subscription.exists(): + subscription.delete() + + +def test_get_topic_policy(test_topic, capsys): + iam.get_topic_policy(test_topic.name) + + out, _ = capsys.readouterr() + assert test_topic.name in out + + +def test_get_subscription_policy(test_subscription, capsys): + test_subscription.create() + + iam.get_subscription_policy( + test_subscription.topic.name, + test_subscription.name) + + out, _ = capsys.readouterr() + assert test_subscription.topic.name in out + assert test_subscription.name in out + + +def test_set_topic_policy(test_topic): + iam.set_topic_policy(test_topic.name) + + policy = test_topic.get_iam_policy() + assert policy.viewers + assert policy.editors + + +def test_set_subscription_policy(test_subscription): + test_subscription.create() + + iam.set_subscription_policy( + test_subscription.topic.name, + test_subscription.name) + + policy = test_subscription.get_iam_policy() + assert policy.viewers + assert policy.editors + + +def test_check_topic_permissions(test_topic, capsys): + iam.check_topic_permissions(test_topic.name) + + out, _ = capsys.readouterr() + + assert test_topic.name in out + assert 'pubsub.topics.publish' in out + + +def test_check_subscription_permissions(test_subscription, capsys): + test_subscription.create() + + iam.check_subscription_permissions( + test_subscription.topic.name, + test_subscription.name) + + out, _ = capsys.readouterr() + + assert test_subscription.topic.name in out + assert test_subscription.name in out + assert 'pubsub.subscriptions.consume' in out From b1ccf0df31c911367f14e149d0e0fe53869c3373 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 1 Sep 2016 11:01:09 -0700 Subject: [PATCH 0481/1197] Fix lint issue Change-Id: Ifebdab0b974cc3d3fe8900a23ca7416fed9e026a --- packages/google-cloud-pubsub/samples/snippets/iam_test.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 43f53cd3d3f3..0e662fcb526c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -13,7 +13,6 @@ # limitations under the License. from gcloud import pubsub -from gcp.testing import eventually_consistent import pytest import iam From 028bd5a12262b26b093c2c27e2b5a004c9c647ce Mon Sep 17 00:00:00 2001 From: DPE bot Date: Fri, 23 Sep 2016 09:48:46 -0700 Subject: [PATCH 0482/1197] Auto-update dependencies. [(#540)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/540) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 2beeafe63a8a..dfb42aaaaaf2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -gcloud==0.18.1 +gcloud==0.18.2 From d722b9869a61e126b000e52eccc4c61db540329a Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 26 Sep 2016 11:34:45 -0700 Subject: [PATCH 0483/1197] Auto-update dependencies. [(#542)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/542) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index dfb42aaaaaf2..97a207d3aad0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -gcloud==0.18.2 +gcloud==0.18.3 From badbf645cc1e35677e84801b00f651bfcf3b2c09 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 29 Sep 2016 20:51:47 -0700 Subject: [PATCH 0484/1197] Move to google-cloud [(#544)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/544) --- packages/google-cloud-pubsub/samples/snippets/iam.py | 2 +- packages/google-cloud-pubsub/samples/snippets/iam_test.py | 2 +- packages/google-cloud-pubsub/samples/snippets/publisher.py | 2 +- packages/google-cloud-pubsub/samples/snippets/publisher_test.py | 2 +- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 2 +- .../google-cloud-pubsub/samples/snippets/subscriber_test.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index 3828195bce00..1f97e4570e7a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -23,7 +23,7 @@ import argparse -from gcloud import pubsub +from google.cloud import pubsub def get_topic_policy(topic_name): diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 0e662fcb526c..7adf87056e6d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcloud import pubsub +from google.cloud import pubsub import pytest import iam diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 61387d67ee1c..d527b54cb1c4 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -23,7 +23,7 @@ import argparse -from gcloud import pubsub +from google.cloud import pubsub def list_topics(): diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 3cce3c962005..6c0a9be004df 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcloud import pubsub from gcp.testing import eventually_consistent +from google.cloud import pubsub import pytest import publisher diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 97a207d3aad0..7aa8ce1fbd11 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -gcloud==0.18.3 +google-cloud-pubsub==0.20.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 2b3371cd6165..b4fdd95738d1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -23,7 +23,7 @@ import argparse -from gcloud import pubsub +from google.cloud import pubsub def list_subscriptions(topic_name): diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 6335aa9733cd..3b51d48afb8c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcloud import pubsub from gcp.testing import eventually_consistent +from google.cloud import pubsub import pytest import subscriber From f4d3af7084705fe0a31bd72d607b2f48f0d09383 Mon Sep 17 00:00:00 2001 From: Jason Dobry Date: Wed, 5 Oct 2016 09:56:04 -0700 Subject: [PATCH 0485/1197] Add new "quickstart" samples [(#547)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/547) --- .../samples/snippets/quickstart.py | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart.py diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart.py b/packages/google-cloud-pubsub/samples/snippets/quickstart.py new file mode 100644 index 000000000000..fdcb45003b86 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def run_quickstart(): + # [START pubsub_quickstart] + # Imports the Google Cloud client library + from google.cloud import pubsub + + # Instantiates a client + pubsub_client = pubsub.Client() + + # The name for the new topic + topic_name = 'my-new-topic' + + # Prepares the new topic + topic = pubsub_client.topic(topic_name) + + # Creates the new topic + topic.create() + + print('Topic {} created.'.format(topic.name)) + # [END pubsub_quickstart] + + +if __name__ == '__main__': + run_quickstart() From 63cde3f7c674092d5a49a912b8a936eef77aa84c Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 12 Oct 2016 10:48:57 -0700 Subject: [PATCH 0486/1197] Quickstart tests [(#569)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/569) * Add tests for quickstarts * Update secrets --- .../samples/snippets/quickstart_test.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart_test.py diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py new file mode 100644 index 000000000000..bbb3bd75f257 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py @@ -0,0 +1,45 @@ +# Copyright 2016 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from google.cloud import pubsub +import pytest + +import quickstart + + +# Must match the dataset listed in quickstart.py (there's no easy way to +# extract this). +TOPIC_NAME = 'my-new-topic' + + +@pytest.fixture +def temporary_topic(): + """Fixture that ensures the test dataset does not exist before or + after a test.""" + pubsub_client = pubsub.Client() + topic = pubsub_client.topic(TOPIC_NAME) + + if topic.exists(): + topic.delete() + + yield + + if topic.exists(): + topic.delete() + + +def test_quickstart(capsys, temporary_topic): + quickstart.run_quickstart() + out, _ = capsys.readouterr() + assert TOPIC_NAME in out From d9c4c977adfcfb18648a60e6952d6a3b9bd24e58 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 24 Oct 2016 11:03:17 -0700 Subject: [PATCH 0487/1197] Generate readmes for most service samples [(#599)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/599) --- .../samples/snippets/README.md | 17 -- .../samples/snippets/README.rst | 203 ++++++++++++++++++ .../samples/snippets/README.rst.in | 28 +++ 3 files changed, 231 insertions(+), 17 deletions(-) delete mode 100644 packages/google-cloud-pubsub/samples/snippets/README.md create mode 100644 packages/google-cloud-pubsub/samples/snippets/README.rst create mode 100644 packages/google-cloud-pubsub/samples/snippets/README.rst.in diff --git a/packages/google-cloud-pubsub/samples/snippets/README.md b/packages/google-cloud-pubsub/samples/snippets/README.md deleted file mode 100644 index 00c7d2f7ac95..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/README.md +++ /dev/null @@ -1,17 +0,0 @@ -# Google Cloud Pub/Sub Samples - - - - -## Prerequisites - -All samples require a [Google Cloud Project](https://console.cloud.google.com). - -Use the [Cloud SDK](https://cloud.google.com/sdk) to provide authentication: - - gcloud beta auth application-default login - -Run the samples: - - python publisher.py -h - python subscriber.py -h diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst new file mode 100644 index 000000000000..8ea32bab0be7 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -0,0 +1,203 @@ +.. This file is automatically generated. Do not edit this file directly. + +Google Cloud Pub/Sub Python Samples +=============================================================================== + +This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. + + + + +.. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs + +Setup +------------------------------------------------------------------------------- + + +Authentication +++++++++++++++ + +Authentication is typically done through `Application Default Credentials`_, +which means you do not have to change the code to authenticate as long as +your environment has credentials. You have a few options for setting up +authentication: + +#. When running locally, use the `Google Cloud SDK`_ + + .. code-block:: bash + + gcloud beta auth application-default login + + +#. When running on App Engine or Compute Engine, credentials are already + set-up. However, you may need to configure your Compute Engine instance + with `additional scopes`_. + +#. You can create a `Service Account key file`_. This file can be used to + authenticate to Google Cloud Platform services from any environment. To use + the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to + the path to the key file, for example: + + .. code-block:: bash + + export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json + +.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow +.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using +.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount + +Install Dependencies +++++++++++++++++++++ + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ + +Samples +------------------------------------------------------------------------------- + +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + +Publisher ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python publisher.py + + usage: publisher.py [-h] {list,create,delete,publish} ... + + This application demonstrates how to perform basic operations on topics + with the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + {list,create,delete,publish} + list Lists all Pub/Sub topics in the current project. + create Create a new Pub/Sub topic. + delete Deletes an existing Pub/Sub topic. + publish Publishes a message to a Pub/Sub topic with the given + data. + + optional arguments: + -h, --help show this help message and exit + + +Subscribers ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python subscriber.py + + usage: subscriber.py [-h] {list,create,delete,receive} ... + + This application demonstrates how to perform basic operations on + subscriptions with the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + {list,create,delete,receive} + list Lists all subscriptions for a given topic. + create Create a new pull subscription on the given topic. + delete Deletes an existing Pub/Sub topic. + receive Receives a message from a pull subscription. + + optional arguments: + -h, --help show this help message and exit + + +Identity and Access Management ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +To run this sample: + +.. code-block:: bash + + $ python iam.py + + usage: iam.py [-h] + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} + ... + + This application demonstrates how to perform basic operations on IAM + policies with the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} + get-topic-policy Prints the IAM policy for the given topic. + get-subscription-policy + Prints the IAM policy for the given subscription. + set-topic-policy Sets the IAM policy for a topic. + set-subscription-policy + Sets the IAM policy for a topic. + check-topic-permissions + Checks to which permissions are available on the given + topic. + check-subscription-permissions + Checks to which permissions are available on the given + subscription. + + optional arguments: + -h, --help show this help message and exit + + + + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst.in b/packages/google-cloud-pubsub/samples/snippets/README.rst.in new file mode 100644 index 000000000000..6a9fd00c722b --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst.in @@ -0,0 +1,28 @@ +# This file is used to generate README.rst + +product: + name: Google Cloud Pub/Sub + short_name: Cloud Pub/Sub + url: https://cloud.google.com/pubsub/docs + description: > + `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that + allows you to send and receive messages between independent applications. + +setup: +- auth +- install_deps + +samples: +- name: Quickstart + file: quickstart.py +- name: Publisher + file: publisher.py + show_help: true +- name: Subscribers + file: subscriber.py + show_help: true +- name: Identity and Access Management + file: iam.py + show_help: true + +cloud_client_library: true From cc9d45fe8754d2586bf8cad2abb474b38fc82378 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 15 Nov 2016 14:58:27 -0800 Subject: [PATCH 0488/1197] Update samples to support latest Google Cloud Python [(#656)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/656) --- .../google-cloud-pubsub/samples/snippets/publisher.py | 10 +--------- .../samples/snippets/requirements.txt | 2 +- .../google-cloud-pubsub/samples/snippets/subscriber.py | 10 +--------- 3 files changed, 3 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index d527b54cb1c4..465047560565 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -30,15 +30,7 @@ def list_topics(): """Lists all Pub/Sub topics in the current project.""" pubsub_client = pubsub.Client() - topics = [] - next_page_token = None - while True: - page, next_page_token = pubsub_client.list_topics() - topics.extend(page) - if not next_page_token: - break - - for topic in topics: + for topic in pubsub_client.list_topics(): print(topic.name) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 7aa8ce1fbd11..f2fc2b4a2aa0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.20.0 +google-cloud-pubsub==0.21.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index b4fdd95738d1..9a564496e424 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -31,15 +31,7 @@ def list_subscriptions(topic_name): pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) - subscriptions = [] - next_page_token = None - while True: - page, next_page_token = topic.list_subscriptions() - subscriptions.extend(page) - if not next_page_token: - break - - for subscription in subscriptions: + for subscription in topic.list_subscriptions(): print(subscription.name) From 6537eee5a91f78f4596de04ac5498a1ce9247960 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 13 Dec 2016 09:54:02 -0800 Subject: [PATCH 0489/1197] Auto-update dependencies. [(#715)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/715) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index f2fc2b4a2aa0..b53a39956eb9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.21.0 +google-cloud-pubsub==0.22.0 From 2b2751e4303a22dac66a3357c3b91c0826cee5ff Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 16 Feb 2017 13:39:16 -0800 Subject: [PATCH 0490/1197] Fix pubusb tests Change-Id: I7dfe60b0f1240dc58a664968fd97ca5a8fa1109d --- packages/google-cloud-pubsub/samples/snippets/iam_test.py | 8 ++++++-- .../samples/snippets/subscriber_test.py | 8 ++++++-- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 7adf87056e6d..6d9f7d4d2973 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -21,12 +21,16 @@ TEST_SUBSCRIPTION = 'iam-test-subscription' -@pytest.fixture +@pytest.fixture(scope='module') def test_topic(): client = pubsub.Client() topic = client.topic(TEST_TOPIC) - topic.create() + + if not topic.exists(): + topic.create() + yield topic + if topic.exists(): topic.delete() diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 3b51d48afb8c..e1fe2d482114 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -22,12 +22,16 @@ TEST_SUBSCRIPTION = 'subscription-test-subscription' -@pytest.fixture +@pytest.fixture(scope='module') def test_topic(): client = pubsub.Client() topic = client.topic(TEST_TOPIC) - topic.create() + + if not topic.exists(): + topic.create() + yield topic + if topic.exists(): topic.delete() From e6f8918487cfea3bf218a9a0fa60946e32884ed2 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Fri, 10 Mar 2017 21:25:51 -0800 Subject: [PATCH 0491/1197] Auto-update dependencies. [(#825)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/825) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index b53a39956eb9..2694ebdd93ec 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.22.0 +google-cloud-pubsub==0.23.0 From 7a44ce4d37a149476552295eee6a43547cb6d1bc Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 4 Apr 2017 09:39:33 -0700 Subject: [PATCH 0492/1197] Auto-update dependencies. [(#876)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/876) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 2694ebdd93ec..787d6c448581 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.23.0 +google-cloud-pubsub==0.24.0 From e3756ca28571b5260a3b1bb06777fc32a8b77de2 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Wed, 12 Apr 2017 15:14:35 -0700 Subject: [PATCH 0493/1197] Fix reference to our testing tools --- packages/google-cloud-pubsub/samples/snippets/publisher_test.py | 2 +- .../google-cloud-pubsub/samples/snippets/subscriber_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 6c0a9be004df..d7ca67098057 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcp.testing import eventually_consistent +from gcp_devrel.testing import eventually_consistent from google.cloud import pubsub import pytest diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index e1fe2d482114..9f7f5a1bf81d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from gcp.testing import eventually_consistent +from gcp_devrel.testing import eventually_consistent from google.cloud import pubsub import pytest From 28ef39e7ca1d850d9b8dba4f450b0102f86ef42d Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Thu, 27 Apr 2017 09:54:41 -0700 Subject: [PATCH 0494/1197] Re-generate all readmes --- packages/google-cloud-pubsub/samples/snippets/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 8ea32bab0be7..b507e8fc2b9f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -26,7 +26,7 @@ authentication: .. code-block:: bash - gcloud beta auth application-default login + gcloud auth application-default login #. When running on App Engine or Compute Engine, credentials are already From 0fcbd3ad666d6df29a8c749b6526565fa7cea58f Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 1 May 2017 10:49:29 -0700 Subject: [PATCH 0495/1197] Auto-update dependencies. [(#922)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/922) * Auto-update dependencies. * Fix pubsub iam samples --- .../google-cloud-pubsub/samples/snippets/iam.py | 16 ++++++++++------ .../samples/snippets/iam_test.py | 2 +- .../samples/snippets/requirements.txt | 2 +- 3 files changed, 12 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index 1f97e4570e7a..66fb6eb7350e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -67,9 +67,11 @@ def set_topic_policy(topic_name): policy = topic.get_iam_policy() # Add all users as viewers. - policy.viewers.add(policy.all_users()) - # Add a group as editors. - policy.editors.add(policy.group('cloud-logs@google.com')) + policy['roles/pubsub.viewer'] = [policy.all_users()] + # Add a group as publisherss. + publishers = policy.get('roles/pubsub.publisher', []) + publishers.append(policy.group('cloud-logs@google.com')) + policy['roles/pubsub.publisher'] = publishers # Set the policy topic.set_iam_policy(policy) @@ -85,9 +87,11 @@ def set_subscription_policy(topic_name, subscription_name): policy = subscription.get_iam_policy() # Add all users as viewers. - policy.viewers.add(policy.all_users()) - # Add a group as editors. - policy.editors.add(policy.group('cloud-logs@google.com')) + policy['roles/viewer'] = [policy.all_users()] + # # Add a group as editors. + editors = policy.get('roles/editor', []) + editors.append(policy.group('cloud-logs@google.com')) + policy['roles/editor'] = editors # Set the policy subscription.set_iam_policy(policy) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 6d9f7d4d2973..f36dbd21402f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -67,7 +67,7 @@ def test_set_topic_policy(test_topic): policy = test_topic.get_iam_policy() assert policy.viewers - assert policy.editors + assert policy['roles/pubsub.publisher'] def test_set_subscription_policy(test_subscription): diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 787d6c448581..1412eed3dcae 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.24.0 +google-cloud-pubsub==0.25.0 From 4f05700fdf05b1140405fa358cf47629851ffa02 Mon Sep 17 00:00:00 2001 From: Bill Prin Date: Tue, 23 May 2017 17:01:25 -0700 Subject: [PATCH 0496/1197] Fix README rst links [(#962)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/962) * Fix README rst links * Update all READMEs --- packages/google-cloud-pubsub/samples/snippets/README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index b507e8fc2b9f..0c18e7ff617c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -192,11 +192,11 @@ This sample uses the `Google Cloud Client Library for Python`_. You can read the documentation for more details on API usage and use GitHub to `browse the source`_ and `report issues`_. -.. Google Cloud Client Library for Python: +.. _Google Cloud Client Library for Python: https://googlecloudplatform.github.io/google-cloud-python/ -.. browse the source: +.. _browse the source: https://github.com/GoogleCloudPlatform/google-cloud-python -.. report issues: +.. _report issues: https://github.com/GoogleCloudPlatform/google-cloud-python/issues From bd11a323e111e78d51f6eb856aeda9d967b46996 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 27 Jun 2017 12:41:15 -0700 Subject: [PATCH 0497/1197] Auto-update dependencies. [(#1004)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1004) * Auto-update dependencies. * Fix natural language samples * Fix pubsub iam samples * Fix language samples * Fix bigquery samples --- packages/google-cloud-pubsub/samples/snippets/iam.py | 4 ++-- .../google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index 66fb6eb7350e..7f35f9d9fc26 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -70,7 +70,7 @@ def set_topic_policy(topic_name): policy['roles/pubsub.viewer'] = [policy.all_users()] # Add a group as publisherss. publishers = policy.get('roles/pubsub.publisher', []) - publishers.append(policy.group('cloud-logs@google.com')) + publishers.add(policy.group('cloud-logs@google.com')) policy['roles/pubsub.publisher'] = publishers # Set the policy @@ -90,7 +90,7 @@ def set_subscription_policy(topic_name, subscription_name): policy['roles/viewer'] = [policy.all_users()] # # Add a group as editors. editors = policy.get('roles/editor', []) - editors.append(policy.group('cloud-logs@google.com')) + editors.add(policy.group('cloud-logs@google.com')) policy['roles/editor'] = editors # Set the policy diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 1412eed3dcae..01606e11b50b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.25.0 +google-cloud-pubsub==0.26.0 From 2dfb3d2f056322b149baa3e6b3b9f3913a284007 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 7 Aug 2017 10:04:55 -0700 Subject: [PATCH 0498/1197] Auto-update dependencies. [(#1055)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1055) * Auto-update dependencies. * Explicitly use latest bigtable client Change-Id: Id71e9e768f020730e4ca9514a0d7ebaa794e7d9e * Revert language update for now Change-Id: I8867f154e9a5aae00d0047c9caf880e5e8f50c53 * Remove pdb. smh Change-Id: I5ff905fadc026eebbcd45512d4e76e003e3b2b43 --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 01606e11b50b..65c3daf96ff6 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.26.0 +google-cloud-pubsub==0.27.0 From 3ea5241c11e8c23fda7d03d497c94a989151d4bf Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 28 Aug 2017 09:15:55 -0700 Subject: [PATCH 0499/1197] Update pubsub samples [(#1092)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1092) --- .../samples/snippets/iam.py | 138 +++++++++--------- .../samples/snippets/iam_test.py | 110 +++++++------- .../samples/snippets/publisher.py | 118 +++++++++++---- .../samples/snippets/publisher_test.py | 76 +++++++--- .../samples/snippets/quickstart.py | 19 ++- .../samples/snippets/quickstart_test.py | 30 ++-- .../samples/snippets/requirements.txt | 2 +- .../samples/snippets/subscriber.py | 113 ++++++++------ .../samples/snippets/subscriber_test.py | 137 ++++++++++++----- 9 files changed, 460 insertions(+), 283 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index 7f35f9d9fc26..b46bc11474e2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -23,115 +23,113 @@ import argparse -from google.cloud import pubsub +from google.cloud import pubsub_v1 -def get_topic_policy(topic_name): +def get_topic_policy(project, topic_name): """Prints the IAM policy for the given topic.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) + client = pubsub_v1.PublisherClient() + topic_path = client.topic_path(project, topic_name) - policy = topic.get_iam_policy() + policy = client.get_iam_policy(topic_path) - print('Policy for topic {}:'.format(topic.name)) - print('Version: {}'.format(policy.version)) - print('Owners: {}'.format(policy.owners)) - print('Editors: {}'.format(policy.editors)) - print('Viewers: {}'.format(policy.viewers)) - print('Publishers: {}'.format(policy.publishers)) - print('Subscribers: {}'.format(policy.subscribers)) + print('Policy for topic {}:'.format(topic_path)) + for binding in policy.bindings: + print('Role: {}, Members: {}'.format(binding.role, binding.members)) -def get_subscription_policy(topic_name, subscription_name): +def get_subscription_policy(project, subscription_name): """Prints the IAM policy for the given subscription.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) - subscription = topic.subscription(subscription_name) + client = pubsub_v1.SubscriberClient() + subscription_path = client.subscription_path(project, subscription_name) - policy = subscription.get_iam_policy() + policy = client.get_iam_policy(subscription_path) - print('Policy for subscription {} on topic {}:'.format( - subscription.name, topic.name)) - print('Version: {}'.format(policy.version)) - print('Owners: {}'.format(policy.owners)) - print('Editors: {}'.format(policy.editors)) - print('Viewers: {}'.format(policy.viewers)) - print('Publishers: {}'.format(policy.publishers)) - print('Subscribers: {}'.format(policy.subscribers)) + print('Policy for subscription {}:'.format(subscription_path)) + for binding in policy.bindings: + print('Role: {}, Members: {}'.format(binding.role, binding.members)) -def set_topic_policy(topic_name): +def set_topic_policy(project, topic_name): """Sets the IAM policy for a topic.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) - policy = topic.get_iam_policy() + client = pubsub_v1.PublisherClient() + topic_path = client.topic_path(project, topic_name) + + policy = client.get_iam_policy(topic_path) # Add all users as viewers. - policy['roles/pubsub.viewer'] = [policy.all_users()] - # Add a group as publisherss. - publishers = policy.get('roles/pubsub.publisher', []) - publishers.add(policy.group('cloud-logs@google.com')) - policy['roles/pubsub.publisher'] = publishers + policy.bindings.add( + role='roles/pubsub.viewer', + members=['allUsers']) + + # Add a group as a publisher. + policy.bindings.add( + role='roles/pubsub.publisher', + members=['group:cloud-logs@google.com']) # Set the policy - topic.set_iam_policy(policy) + policy = client.set_iam_policy(topic_path, policy) - print('IAM policy for topic {} set.'.format(topic.name)) + print('IAM policy for topic {} set: {}'.format( + topic_name, policy)) -def set_subscription_policy(topic_name, subscription_name): +def set_subscription_policy(project, subscription_name): """Sets the IAM policy for a topic.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) - subscription = topic.subscription(subscription_name) - policy = subscription.get_iam_policy() + client = pubsub_v1.SubscriberClient() + subscription_path = client.subscription_path(project, subscription_name) + + policy = client.get_iam_policy(subscription_path) # Add all users as viewers. - policy['roles/viewer'] = [policy.all_users()] - # # Add a group as editors. - editors = policy.get('roles/editor', []) - editors.add(policy.group('cloud-logs@google.com')) - policy['roles/editor'] = editors + policy.bindings.add( + role='roles/pubsub.viewer', + members=['allUsers']) + + # Add a group as an editor. + policy.bindings.add( + role='roles/editor', + members=['group:cloud-logs@google.com']) # Set the policy - subscription.set_iam_policy(policy) + policy = client.set_iam_policy(subscription_path, policy) - print('IAM policy for subscription {} on topic {} set.'.format( - topic.name, subscription.name)) + print('IAM policy for subscription {} set: {}'.format( + subscription_name, policy)) -def check_topic_permissions(topic_name): +def check_topic_permissions(project, topic_name): """Checks to which permissions are available on the given topic.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) + client = pubsub_v1.PublisherClient() + topic_path = client.topic_path(project, topic_name) permissions_to_check = [ 'pubsub.topics.publish', 'pubsub.topics.update' ] - allowed_permissions = topic.check_iam_permissions(permissions_to_check) + allowed_permissions = client.test_iam_permissions( + topic_path, permissions_to_check) print('Allowed permissions for topic {}: {}'.format( - topic.name, allowed_permissions)) + topic_path, allowed_permissions)) -def check_subscription_permissions(topic_name, subscription_name): +def check_subscription_permissions(project, subscription_name): """Checks to which permissions are available on the given subscription.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) - subscription = topic.subscription(subscription_name) + client = pubsub_v1.SubscriberClient() + subscription_path = client.subscription_path(project, subscription_name) permissions_to_check = [ 'pubsub.subscriptions.consume', 'pubsub.subscriptions.update' ] - allowed_permissions = subscription.check_iam_permissions( - permissions_to_check) + allowed_permissions = client.test_iam_permissions( + subscription_path, permissions_to_check) - print('Allowed permissions for subscription {} on topic {}: {}'.format( - subscription.name, topic.name, allowed_permissions)) + print('Allowed permissions for subscription {}: {}'.format( + subscription_path, allowed_permissions)) if __name__ == '__main__': @@ -139,6 +137,7 @@ def check_subscription_permissions(topic_name, subscription_name): description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) + parser.add_argument('project', help='Your Google Cloud project ID') subparsers = parser.add_subparsers(dest='command') @@ -148,7 +147,6 @@ def check_subscription_permissions(topic_name, subscription_name): get_subscription_policy_parser = subparsers.add_parser( 'get-subscription-policy', help=get_subscription_policy.__doc__) - get_subscription_policy_parser.add_argument('topic_name') get_subscription_policy_parser.add_argument('subscription_name') set_topic_policy_parser = subparsers.add_parser( @@ -157,7 +155,6 @@ def check_subscription_permissions(topic_name, subscription_name): set_subscription_policy_parser = subparsers.add_parser( 'set-subscription-policy', help=set_subscription_policy.__doc__) - set_subscription_policy_parser.add_argument('topic_name') set_subscription_policy_parser.add_argument('subscription_name') check_topic_permissions_parser = subparsers.add_parser( @@ -167,20 +164,19 @@ def check_subscription_permissions(topic_name, subscription_name): check_subscription_permissions_parser = subparsers.add_parser( 'check-subscription-permissions', help=check_subscription_permissions.__doc__) - check_subscription_permissions_parser.add_argument('topic_name') check_subscription_permissions_parser.add_argument('subscription_name') args = parser.parse_args() if args.command == 'get-topic-policy': - get_topic_policy(args.topic_name) + get_topic_policy(args.project, args.topic_name) elif args.command == 'get-subscription-policy': - get_subscription_policy(args.topic_name, args.subscription_name) + get_subscription_policy(args.project, args.subscription_name) elif args.command == 'set-topic-policy': - set_topic_policy(args.topic_name) + set_topic_policy(args.project, args.topic_name) elif args.command == 'set-subscription-policy': - set_subscription_policy(args.topic_name, args.subscription_name) + set_subscription_policy(args.project, args.subscription_name) elif args.command == 'check-topic-permissions': - check_topic_permissions(args.topic_name) + check_topic_permissions(args.project, args.topic_name) elif args.command == 'check-subscription-permissions': - check_subscription_permissions(args.topic_name, args.subscription_name) + check_subscription_permissions(args.project, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index f36dbd21402f..3deaec7465d6 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -12,94 +12,100 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud import pubsub +import os + +from google.cloud import pubsub_v1 import pytest import iam -TEST_TOPIC = 'iam-test-topic' -TEST_SUBSCRIPTION = 'iam-test-subscription' +PROJECT = os.environ['GCLOUD_PROJECT'] +TOPIC = 'iam-test-topic' +SUBSCRIPTION = 'iam-test-subscription' @pytest.fixture(scope='module') -def test_topic(): - client = pubsub.Client() - topic = client.topic(TEST_TOPIC) +def publisher_client(): + yield pubsub_v1.PublisherClient() - if not topic.exists(): - topic.create() - yield topic +@pytest.fixture(scope='module') +def topic(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, TOPIC) - if topic.exists(): - topic.delete() + try: + publisher_client.delete_topic(topic_path) + except: + pass + publisher_client.create_topic(topic_path) -@pytest.fixture -def test_subscription(test_topic): - subscription = test_topic.subscription(TEST_SUBSCRIPTION) - yield subscription - if subscription.exists(): - subscription.delete() + yield topic_path -def test_get_topic_policy(test_topic, capsys): - iam.get_topic_policy(test_topic.name) +@pytest.fixture(scope='module') +def subscriber_client(): + yield pubsub_v1.SubscriberClient() - out, _ = capsys.readouterr() - assert test_topic.name in out +@pytest.fixture +def subscription(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION) + + try: + subscriber_client.delete_subscription(subscription_path) + except: + pass -def test_get_subscription_policy(test_subscription, capsys): - test_subscription.create() + subscriber_client.create_subscription(subscription_path, topic=topic) - iam.get_subscription_policy( - test_subscription.topic.name, - test_subscription.name) + yield subscription_path + + +def test_get_topic_policy(topic, capsys): + iam.get_topic_policy(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert test_subscription.topic.name in out - assert test_subscription.name in out + assert topic in out + +def test_get_subscription_policy(subscription, capsys): + iam.get_subscription_policy(PROJECT, SUBSCRIPTION) + + out, _ = capsys.readouterr() + assert subscription in out -def test_set_topic_policy(test_topic): - iam.set_topic_policy(test_topic.name) - policy = test_topic.get_iam_policy() - assert policy.viewers - assert policy['roles/pubsub.publisher'] +def test_set_topic_policy(publisher_client, topic): + iam.set_topic_policy(PROJECT, TOPIC) + policy = publisher_client.get_iam_policy(topic) + assert 'roles/pubsub.publisher' in str(policy) + assert 'allUsers' in str(policy) -def test_set_subscription_policy(test_subscription): - test_subscription.create() - iam.set_subscription_policy( - test_subscription.topic.name, - test_subscription.name) +def test_set_subscription_policy(subscriber_client, subscription): + iam.set_subscription_policy(PROJECT, SUBSCRIPTION) - policy = test_subscription.get_iam_policy() - assert policy.viewers - assert policy.editors + policy = subscriber_client.get_iam_policy(subscription) + assert 'roles/pubsub.viewer' in str(policy) + assert 'allUsers' in str(policy) -def test_check_topic_permissions(test_topic, capsys): - iam.check_topic_permissions(test_topic.name) +def test_check_topic_permissions(topic, capsys): + iam.check_topic_permissions(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert test_topic.name in out + assert topic in out assert 'pubsub.topics.publish' in out -def test_check_subscription_permissions(test_subscription, capsys): - test_subscription.create() - - iam.check_subscription_permissions( - test_subscription.topic.name, - test_subscription.name) +def test_check_subscription_permissions(subscription, capsys): + iam.check_subscription_permissions(PROJECT, SUBSCRIPTION) out, _ = capsys.readouterr() - assert test_subscription.topic.name in out - assert test_subscription.name in out + assert subscription in out assert 'pubsub.subscriptions.consume' in out diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 465047560565..4304ddf912b0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -23,48 +23,92 @@ import argparse -from google.cloud import pubsub +from google.cloud import pubsub_v1 -def list_topics(): - """Lists all Pub/Sub topics in the current project.""" - pubsub_client = pubsub.Client() +def list_topics(project): + """Lists all Pub/Sub topics in the given project.""" + publisher = pubsub_v1.PublisherClient() + project_path = publisher.project_path(project) - for topic in pubsub_client.list_topics(): - print(topic.name) + for topic in publisher.list_topics(project_path): + print(topic) -def create_topic(topic_name): +def create_topic(project, topic_name): """Create a new Pub/Sub topic.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project, topic_name) - topic.create() + topic = publisher.create_topic(topic_path) - print('Topic {} created.'.format(topic.name)) + print('Topic created: {}'.format(topic)) -def delete_topic(topic_name): +def delete_topic(project, topic_name): """Deletes an existing Pub/Sub topic.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project, topic_name) - topic.delete() + publisher.delete_topic(topic_path) - print('Topic {} deleted.'.format(topic.name)) + print('Topic deleted: {}'.format(topic_path)) -def publish_message(topic_name, data): - """Publishes a message to a Pub/Sub topic with the given data.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) +def publish_messages(project, topic_name): + """Publishes multiple messages to a Pub/Sub topic.""" + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project, topic_name) - # Data must be a bytestring - data = data.encode('utf-8') + for n in range(1, 10): + data = u'Message number {}'.format(n) + # Data must be a bytestring + data = data.encode('utf-8') + publisher.publish(topic_path, data=data) - message_id = topic.publish(data) + print('Published messages.') - print('Message {} published.'.format(message_id)) + +def publish_messages_with_futures(project, topic_name): + """Publishes multiple messages to a Pub/Sub topic and prints their + message IDs.""" + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project, topic_name) + + # When you publish a message, the client returns a Future. This Future + # can be used to track when the message is published. + futures = [] + + for n in range(1, 10): + data = u'Message number {}'.format(n) + # Data must be a bytestring + data = data.encode('utf-8') + message_future = publisher.publish(topic_path, data=data) + futures.append(message_future) + + print('Published message IDs:') + for future in futures: + print(future.result()) + + +def publish_messages_with_batch_settings(project, topic_name): + """Publishes multiple messages to a Pub/Sub topic with batch settings.""" + # Configure the batch to publish once there is one kilobyte of data or + # 1 second has passed. + batch_settings = pubsub_v1.types.BatchSettings( + max_bytes=1024, # One kilobyte + max_latency=1, # One second + ) + publisher = pubsub_v1.PublisherClient(batch_settings) + topic_path = publisher.topic_path(project, topic_name) + + for n in range(1, 10): + data = u'Message number {}'.format(n) + # Data must be a bytestring + data = data.encode('utf-8') + publisher.publish(topic_path, data=data) + + print('Published messages.') if __name__ == '__main__': @@ -72,6 +116,7 @@ def publish_message(topic_name, data): description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) + parser.add_argument('project', help='Your Google Cloud project ID') subparsers = parser.add_subparsers(dest='command') subparsers.add_parser('list', help=list_topics.__doc__) @@ -83,17 +128,30 @@ def publish_message(topic_name, data): delete_parser.add_argument('topic_name') publish_parser = subparsers.add_parser( - 'publish', help=publish_message.__doc__) + 'publish', help=publish_messages.__doc__) publish_parser.add_argument('topic_name') - publish_parser.add_argument('data') + + publish_with_futures_parser = subparsers.add_parser( + 'publish-with-futures', + help=publish_messages_with_futures.__doc__) + publish_with_futures_parser.add_argument('topic_name') + + publish_with_batch_settings_parser = subparsers.add_parser( + 'publish-with-batch-settings', + help=publish_messages_with_batch_settings.__doc__) + publish_with_batch_settings_parser.add_argument('topic_name') args = parser.parse_args() if args.command == 'list': - list_topics() + list_topics(args.project) elif args.command == 'create': - create_topic(args.topic_name) + create_topic(args.project, args.topic_name) elif args.command == 'delete': - delete_topic(args.topic_name) + delete_topic(args.project, args.topic_name) elif args.command == 'publish': - publish_message(args.topic_name, args.data) + publish_messages(args.project, args.topic_name) + elif args.command == 'publish-with-futures': + publish_messages_with_futures(args.project, args.topic_name) + elif args.command == 'publish-with-batch-settings': + publish_messages_with_batch_settings(args.project, args.topic_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index d7ca67098057..b400c9f2404b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -12,56 +12,84 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os + from gcp_devrel.testing import eventually_consistent -from google.cloud import pubsub +from google.cloud import pubsub_v1 import pytest import publisher -TEST_TOPIC = 'publisher-test-topic' +PROJECT = os.environ['GCLOUD_PROJECT'] +TOPIC = 'publisher-test-topic' + + +@pytest.fixture +def client(): + yield pubsub_v1.PublisherClient() @pytest.fixture -def test_topic(): - client = pubsub.Client() - topic = client.topic(TEST_TOPIC) - yield topic - if topic.exists(): - topic.delete() +def topic(client): + topic_path = client.topic_path(PROJECT, TOPIC) + try: + client.delete_topic(topic_path) + except: + pass -def test_list(test_topic, capsys): - test_topic.create() + client.create_topic(topic_path) + yield topic_path + + +def test_list(client, topic, capsys): @eventually_consistent.call def _(): - publisher.list_topics() + publisher.list_topics(PROJECT) out, _ = capsys.readouterr() - assert test_topic.name in out + assert topic in out -def test_create(test_topic): - publisher.create_topic(test_topic.name) +def test_create(client): + topic_path = client.topic_path(PROJECT, TOPIC) + try: + client.delete_topic(topic_path) + except: + pass + + publisher.create_topic(PROJECT, TOPIC) @eventually_consistent.call def _(): - assert test_topic.exists() - + assert client.get_topic(topic_path) -def test_delete(test_topic): - test_topic.create() - publisher.delete_topic(test_topic.name) +def test_delete(client, topic): + publisher.delete_topic(PROJECT, TOPIC) @eventually_consistent.call def _(): - assert not test_topic.exists() + with pytest.raises(Exception): + client.get_topic(client.topic_path(PROJECT, TOPIC)) -def test_publish(test_topic, capsys): - test_topic.create() +def test_publish(topic, capsys): + publisher.publish_messages(PROJECT, TOPIC) + + out, _ = capsys.readouterr() + assert 'Published' in out + + +def test_publish_with_batch_settings(topic, capsys): + publisher.publish_messages_with_batch_settings(PROJECT, TOPIC) + + out, _ = capsys.readouterr() + assert 'Published' in out + - publisher.publish_message(test_topic.name, 'hello') +def test_publish_with_futures(topic, capsys): + publisher.publish_messages_with_futures(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert 'published' in out + assert 'Published' in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart.py b/packages/google-cloud-pubsub/samples/snippets/quickstart.py index fdcb45003b86..c9823d789928 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart.py @@ -18,21 +18,20 @@ def run_quickstart(): # [START pubsub_quickstart] # Imports the Google Cloud client library - from google.cloud import pubsub + from google.cloud import pubsub_v1 # Instantiates a client - pubsub_client = pubsub.Client() + publisher = pubsub_v1.PublisherClient() - # The name for the new topic - topic_name = 'my-new-topic' + # The resource path for the new topic contains the project ID + # and the topic name. + topic_path = publisher.topic_path( + 'my-project', 'my-new-topic') - # Prepares the new topic - topic = pubsub_client.topic(topic_name) + # Create the topic. + topic = publisher.create_topic(topic_path) - # Creates the new topic - topic.create() - - print('Topic {} created.'.format(topic.name)) + print('Topic created: {}'.format(topic)) # [END pubsub_quickstart] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py index bbb3bd75f257..71e157d4894f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py @@ -12,34 +12,36 @@ # See the License for the specific language governing permissions and # limitations under the License. -from google.cloud import pubsub +import os + +from google.cloud import pubsub_v1 +import mock import pytest import quickstart - -# Must match the dataset listed in quickstart.py (there's no easy way to -# extract this). +PROJECT = os.environ['GCLOUD_PROJECT'] +# Must match the dataset listed in quickstart.py TOPIC_NAME = 'my-new-topic' +TOPIC_PATH = 'projects/{}/topics/{}'.format(PROJECT, TOPIC_NAME) @pytest.fixture def temporary_topic(): - """Fixture that ensures the test dataset does not exist before or - after a test.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(TOPIC_NAME) + """Fixture that ensures the test topic does not exist before the test.""" + publisher = pubsub_v1.PublisherClient() - if topic.exists(): - topic.delete() + try: + publisher.delete_topic(TOPIC_PATH) + except: + pass yield - if topic.exists(): - topic.delete() - -def test_quickstart(capsys, temporary_topic): +@mock.patch.object( + pubsub_v1.PublisherClient, 'topic_path', return_value=TOPIC_PATH) +def test_quickstart(unused_topic_path, temporary_topic, capsys): quickstart.run_quickstart() out, _ = capsys.readouterr() assert TOPIC_NAME in out diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 65c3daf96ff6..6b4c47db9052 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.27.0 +google-cloud-pubsub==0.28.2 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 9a564496e424..aef2ab679983 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -22,63 +22,83 @@ """ import argparse +import time -from google.cloud import pubsub +from google.cloud import pubsub_v1 -def list_subscriptions(topic_name): +def list_subscriptions(project, topic_name): """Lists all subscriptions for a given topic.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project, topic_name) - for subscription in topic.list_subscriptions(): + for subscription in subscriber.list_subscriptions(topic_path): print(subscription.name) -def create_subscription(topic_name, subscription_name): +def create_subscription(project, topic_name, subscription_name): """Create a new pull subscription on the given topic.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project, topic_name) + subscription_path = subscriber.subscription_path( + project, subscription_name) - subscription = topic.subscription(subscription_name) - subscription.create() + subscription = subscriber.create_subscription( + subscription_path, topic_path) - print('Subscription {} created on topic {}.'.format( - subscription.name, topic.name)) + print('Subscription created: {}'.format(subscription)) -def delete_subscription(topic_name, subscription_name): +def delete_subscription(project, subscription_name): """Deletes an existing Pub/Sub topic.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) - subscription = topic.subscription(subscription_name) + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path( + project, subscription_name) - subscription.delete() + subscriber.delete_subscription(subscription_path) - print('Subscription {} deleted on topic {}.'.format( - subscription.name, topic.name)) + print('Subscription deleted: {}'.format(subscription_path)) -def receive_message(topic_name, subscription_name): - """Receives a message from a pull subscription.""" - pubsub_client = pubsub.Client() - topic = pubsub_client.topic(topic_name) - subscription = topic.subscription(subscription_name) +def receive_messages(project, subscription_name): + """Receives messages from a pull subscription.""" + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path( + project, subscription_name) - # Change return_immediately=False to block until messages are - # received. - results = subscription.pull(return_immediately=True) + def callback(message): + print('Received message: {}'.format(message)) + message.ack() - print('Received {} messages.'.format(len(results))) + subscriber.subscribe(subscription_path, callback=callback) - for ack_id, message in results: - print('* {}: {}, {}'.format( - message.message_id, message.data, message.attributes)) + # The subscriber is non-blocking, so we must keep the main thread from + # exiting to allow it to process messages in the background. + print('Listening for messages on {}'.format(subscription_path)) + while True: + time.sleep(60) - # Acknowledge received messages. If you do not acknowledge, Pub/Sub will - # redeliver the message. - if results: - subscription.acknowledge([ack_id for ack_id, message in results]) + +def receive_messages_with_flow_control(project, subscription_name): + """Receives messages from a pull subscription with flow control.""" + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path( + project, subscription_name) + + def callback(message): + print('Received message: {}'.format(message)) + message.ack() + + # Limit the subscriber to only have ten outstanding messages at a time. + flow_control = pubsub_v1.types.FlowControl(max_messages=10) + subscriber.subscribe( + subscription_path, callback=callback, flow_control=flow_control) + + # The subscriber is non-blocking, so we must keep the main thread from + # exiting to allow it to process messages in the background. + print('Listening for messages on {}'.format(subscription_path)) + while True: + time.sleep(60) if __name__ == '__main__': @@ -86,6 +106,7 @@ def receive_message(topic_name, subscription_name): description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) + parser.add_argument('project', help='Your Google Cloud project ID') subparsers = parser.add_subparsers(dest='command') list_parser = subparsers.add_parser( @@ -99,21 +120,29 @@ def receive_message(topic_name, subscription_name): delete_parser = subparsers.add_parser( 'delete', help=delete_subscription.__doc__) - delete_parser.add_argument('topic_name') delete_parser.add_argument('subscription_name') receive_parser = subparsers.add_parser( - 'receive', help=receive_message.__doc__) - receive_parser.add_argument('topic_name') + 'receive', help=receive_messages.__doc__) receive_parser.add_argument('subscription_name') + receive_with_flow_control_parser = subparsers.add_parser( + 'receive-flow-control', + help=receive_messages_with_flow_control.__doc__) + receive_with_flow_control_parser.add_argument('subscription_name') + args = parser.parse_args() if args.command == 'list': - list_subscriptions(args.topic_name) + list_subscriptions(args.project, args.topic_name) elif args.command == 'create': - create_subscription(args.topic_name, args.subscription_name) + create_subscription( + args.project, args.topic_name, args.subscription_name) elif args.command == 'delete': - delete_subscription(args.topic_name, args.subscription_name) + delete_subscription( + args.project, args.subscription_name) elif args.command == 'receive': - receive_message(args.topic_name, args.subscription_name) + receive_messages(args.project, args.topic_name, args.subscription_name) + elif args.command == 'receive-flow-control': + receive_messages_with_flow_control( + args.project, args.topic_name, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 9f7f5a1bf81d..0acadf437ed7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -12,76 +12,135 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os +import time + from gcp_devrel.testing import eventually_consistent -from google.cloud import pubsub +from google.cloud import pubsub_v1 +import mock import pytest import subscriber -TEST_TOPIC = 'subscription-test-topic' -TEST_SUBSCRIPTION = 'subscription-test-subscription' +PROJECT = os.environ['GCLOUD_PROJECT'] +TOPIC = 'subscription-test-topic' +SUBSCRIPTION = 'subscription-test-subscription' + + +@pytest.fixture(scope='module') +def publisher_client(): + yield pubsub_v1.PublisherClient() @pytest.fixture(scope='module') -def test_topic(): - client = pubsub.Client() - topic = client.topic(TEST_TOPIC) +def topic(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, TOPIC) + + try: + publisher_client.delete_topic(topic_path) + except: + pass + + publisher_client.create_topic(topic_path) - if not topic.exists(): - topic.create() + yield topic_path - yield topic - if topic.exists(): - topic.delete() +@pytest.fixture(scope='module') +def subscriber_client(): + yield pubsub_v1.SubscriberClient() @pytest.fixture -def test_subscription(test_topic): - subscription = test_topic.subscription(TEST_SUBSCRIPTION) - yield subscription - if subscription.exists(): - subscription.delete() +def subscription(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION) + + try: + subscriber_client.delete_subscription(subscription_path) + except: + pass + + subscriber_client.create_subscription(subscription_path, topic=topic) + yield subscription_path -def test_list(test_subscription, capsys): - test_subscription.create() +def test_list(subscription, capsys): @eventually_consistent.call def _(): - subscriber.list_subscriptions(test_subscription.topic.name) + subscriber.list_subscriptions(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert test_subscription.name in out + assert subscription in out -def test_create(test_subscription): - subscriber.create_subscription( - test_subscription.topic.name, test_subscription.name) +def test_create(subscriber_client): + subscription_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION) + try: + subscriber_client.delete_subscription(subscription_path) + except: + pass + + subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION) @eventually_consistent.call def _(): - assert test_subscription.exists() - + assert subscriber_client.get_subscription(subscription_path) -def test_delete(test_subscription): - test_subscription.create() - subscriber.delete_subscription( - test_subscription.topic.name, test_subscription.name) +def test_delete(subscriber_client, subscription): + subscriber.delete_subscription(PROJECT, SUBSCRIPTION) @eventually_consistent.call def _(): - assert not test_subscription.exists() + with pytest.raises(Exception): + subscriber_client.get_subscription(subscription) -def test_receive(test_subscription, capsys): - topic = test_subscription.topic - test_subscription.create() +def _publish_messages(publisher_client, topic): + for n in range(5): + data = u'Message {}'.format(n).encode('utf-8') + publisher_client.publish( + topic, data=data) - topic.publish('hello'.encode('utf-8')) - @eventually_consistent.call - def _(): - subscriber.receive_message(topic.name, test_subscription.name) - out, _ = capsys.readouterr() - assert 'hello' in out +def _make_sleep_patch(): + real_sleep = time.sleep + + def new_sleep(period): + if period == 60: + real_sleep(5) + raise RuntimeError('sigil') + else: + real_sleep(period) + + return mock.patch('time.sleep', new=new_sleep) + + +def test_receive(publisher_client, topic, subscription, capsys): + _publish_messages(publisher_client, topic) + + with _make_sleep_patch(): + with pytest.raises(RuntimeError, match='sigil'): + subscriber.receive_messages(PROJECT, SUBSCRIPTION) + + out, _ = capsys.readouterr() + assert 'Listening' in out + assert subscription in out + assert 'Message 1' in out + + +def test_receive_with_flow_control( + publisher_client, topic, subscription, capsys): + _publish_messages(publisher_client, topic) + + with _make_sleep_patch(): + with pytest.raises(RuntimeError, match='sigil'): + subscriber.receive_messages_with_flow_control( + PROJECT, SUBSCRIPTION) + + out, _ = capsys.readouterr() + assert 'Listening' in out + assert subscription in out + assert 'Message 1' in out From df3c35a5746a7e7c02963766d6ae8704817739cd Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 28 Aug 2017 11:23:40 -0700 Subject: [PATCH 0500/1197] Fix argpraser for pubsub subscriber Change-Id: I776863091846ee8ff8a70078c8b8d5498cf81ed6 --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index aef2ab679983..401faaab3433 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -142,7 +142,7 @@ def callback(message): delete_subscription( args.project, args.subscription_name) elif args.command == 'receive': - receive_messages(args.project, args.topic_name, args.subscription_name) + receive_messages(args.project, args.subscription_name) elif args.command == 'receive-flow-control': receive_messages_with_flow_control( - args.project, args.topic_name, args.subscription_name) + args.project, args.subscription_name) From da9dc42d38fd1de08929d7f6d447cb2863fa8f53 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 28 Aug 2017 15:28:13 -0700 Subject: [PATCH 0501/1197] Add comment about result blocking in pubsub samples Change-Id: I149fc1242ceb6b2cff8eae7ef18b364dd5c26566 --- packages/google-cloud-pubsub/samples/snippets/publisher.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 4304ddf912b0..cf12b2626b4f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -88,6 +88,7 @@ def publish_messages_with_futures(project, topic_name): print('Published message IDs:') for future in futures: + # result() blocks until the message is published. print(future.result()) From afa43491ffdc25bdab2671b2887db2f2adbbc925 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 12 Sep 2017 12:26:44 -0700 Subject: [PATCH 0502/1197] Auto-update dependencies. [(#1097)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1097) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 6b4c47db9052..17edefdddb4f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.28.2 +google-cloud-pubsub==0.28.3 From 4b4da3a209880d57fefac03c56ee4c580bf0af58 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Mon, 18 Sep 2017 11:04:05 -0700 Subject: [PATCH 0503/1197] Update all generated readme auth instructions [(#1121)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1121) Change-Id: I03b5eaef8b17ac3dc3c0339fd2c7447bd3e11bd2 --- .../samples/snippets/README.rst | 65 +++++++++---------- 1 file changed, 30 insertions(+), 35 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 0c18e7ff617c..a0d39bd61cc4 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -17,34 +17,12 @@ Setup Authentication ++++++++++++++ -Authentication is typically done through `Application Default Credentials`_, -which means you do not have to change the code to authenticate as long as -your environment has credentials. You have a few options for setting up -authentication: +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. -#. When running locally, use the `Google Cloud SDK`_ - - .. code-block:: bash - - gcloud auth application-default login - - -#. When running on App Engine or Compute Engine, credentials are already - set-up. However, you may need to configure your Compute Engine instance - with `additional scopes`_. - -#. You can create a `Service Account key file`_. This file can be used to - authenticate to Google Cloud Platform services from any environment. To use - the file, set the ``GOOGLE_APPLICATION_CREDENTIALS`` environment variable to - the path to the key file, for example: - - .. code-block:: bash - - export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account.json - -.. _Application Default Credentials: https://cloud.google.com/docs/authentication#getting_credentials_for_server-centric_flow -.. _additional scopes: https://cloud.google.com/compute/docs/authentication#using -.. _Service Account key file: https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started Install Dependencies ++++++++++++++++++++ @@ -93,7 +71,10 @@ To run this sample: $ python publisher.py - usage: publisher.py [-h] {list,create,delete,publish} ... + usage: publisher.py [-h] + project + {list,create,delete,publish,publish-with-futures,publish-with-batch-settings} + ... This application demonstrates how to perform basic operations on topics with the Cloud Pub/Sub API. @@ -102,12 +83,18 @@ To run this sample: at https://cloud.google.com/pubsub/docs. positional arguments: - {list,create,delete,publish} - list Lists all Pub/Sub topics in the current project. + project Your Google Cloud project ID + {list,create,delete,publish,publish-with-futures,publish-with-batch-settings} + list Lists all Pub/Sub topics in the given project. create Create a new Pub/Sub topic. delete Deletes an existing Pub/Sub topic. - publish Publishes a message to a Pub/Sub topic with the given - data. + publish Publishes multiple messages to a Pub/Sub topic. + publish-with-futures + Publishes multiple messages to a Pub/Sub topic and + prints their message IDs. + publish-with-batch-settings + Publishes multiple messages to a Pub/Sub topic with + batch settings. optional arguments: -h, --help show this help message and exit @@ -124,7 +111,9 @@ To run this sample: $ python subscriber.py - usage: subscriber.py [-h] {list,create,delete,receive} ... + usage: subscriber.py [-h] + project {list,create,delete,receive,receive-flow-control} + ... This application demonstrates how to perform basic operations on subscriptions with the Cloud Pub/Sub API. @@ -133,11 +122,15 @@ To run this sample: at https://cloud.google.com/pubsub/docs. positional arguments: - {list,create,delete,receive} + project Your Google Cloud project ID + {list,create,delete,receive,receive-flow-control} list Lists all subscriptions for a given topic. create Create a new pull subscription on the given topic. delete Deletes an existing Pub/Sub topic. - receive Receives a message from a pull subscription. + receive Receives messages from a pull subscription. + receive-flow-control + Receives messages from a pull subscription with flow + control. optional arguments: -h, --help show this help message and exit @@ -155,6 +148,7 @@ To run this sample: $ python iam.py usage: iam.py [-h] + project {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} ... @@ -165,6 +159,7 @@ To run this sample: at https://cloud.google.com/pubsub/docs. positional arguments: + project Your Google Cloud project ID {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} get-topic-policy Prints the IAM policy for the given topic. get-subscription-policy From 9b7adffa87772cb4be7c2eaddad3033ae9d2f1f4 Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Thu, 12 Oct 2017 10:16:11 -0700 Subject: [PATCH 0504/1197] Added Link to Python Setup Guide [(#1158)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1158) * Update Readme.rst to add Python setup guide As requested in b/64770713. This sample is linked in documentation https://cloud.google.com/bigtable/docs/scaling, and it would make more sense to update the guide here than in the documentation. * Update README.rst * Update README.rst * Update README.rst * Update README.rst * Update README.rst * Update install_deps.tmpl.rst * Updated readmegen scripts and re-generated related README files * Fixed the lint error --- packages/google-cloud-pubsub/samples/snippets/README.rst | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index a0d39bd61cc4..df1dbf7c626f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -27,7 +27,10 @@ credentials for applications. Install Dependencies ++++++++++++++++++++ -#. Install `pip`_ and `virtualenv`_ if you do not already have them. +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup #. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. From ef17f59c25f75202df048ab89af925618c7a51df Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 23 Oct 2017 14:23:30 -0700 Subject: [PATCH 0505/1197] Auto-update dependencies. [(#1138)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1138) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 17edefdddb4f..8738f6887fae 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.28.3 +google-cloud-pubsub==0.28.4 From fdd287c195e42d4403271d75864d6dacd2ff6bd9 Mon Sep 17 00:00:00 2001 From: Jon Wayne Parrott Date: Tue, 24 Oct 2017 12:14:35 -0700 Subject: [PATCH 0506/1197] Fix a few more lint issues Change-Id: I0d420f3053f391fa225e4b8179e45fd1138f5c65 --- packages/google-cloud-pubsub/samples/snippets/iam_test.py | 4 ++-- .../google-cloud-pubsub/samples/snippets/publisher_test.py | 4 ++-- .../google-cloud-pubsub/samples/snippets/quickstart_test.py | 2 +- .../google-cloud-pubsub/samples/snippets/subscriber_test.py | 6 +++--- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 3deaec7465d6..8a524c35a061 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -35,7 +35,7 @@ def topic(publisher_client): try: publisher_client.delete_topic(topic_path) - except: + except Exception: pass publisher_client.create_topic(topic_path) @@ -55,7 +55,7 @@ def subscription(subscriber_client, topic): try: subscriber_client.delete_subscription(subscription_path) - except: + except Exception: pass subscriber_client.create_subscription(subscription_path, topic=topic) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index b400c9f2404b..120148c0a8cf 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -35,7 +35,7 @@ def topic(client): try: client.delete_topic(topic_path) - except: + except Exception: pass client.create_topic(topic_path) @@ -55,7 +55,7 @@ def test_create(client): topic_path = client.topic_path(PROJECT, TOPIC) try: client.delete_topic(topic_path) - except: + except Exception: pass publisher.create_topic(PROJECT, TOPIC) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py index 71e157d4894f..520213bcf32c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py @@ -33,7 +33,7 @@ def temporary_topic(): try: publisher.delete_topic(TOPIC_PATH) - except: + except Exception: pass yield diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 0acadf437ed7..2cc955d9db13 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -38,7 +38,7 @@ def topic(publisher_client): try: publisher_client.delete_topic(topic_path) - except: + except Exception: pass publisher_client.create_topic(topic_path) @@ -58,7 +58,7 @@ def subscription(subscriber_client, topic): try: subscriber_client.delete_subscription(subscription_path) - except: + except Exception: pass subscriber_client.create_subscription(subscription_path, topic=topic) @@ -79,7 +79,7 @@ def test_create(subscriber_client): PROJECT, SUBSCRIPTION) try: subscriber_client.delete_subscription(subscription_path) - except: + except Exception: pass subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION) From 82bb826daa6b39e32c5fcb0df927cf5ae28741ae Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Tue, 31 Oct 2017 10:44:51 -0700 Subject: [PATCH 0507/1197] Add Snippet for Listing All Subscriptions in a Project [(#1169)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1169) --- .../samples/snippets/subscriber.py | 26 ++++++++++++++----- .../samples/snippets/subscriber_test.py | 12 +++++++-- 2 files changed, 30 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 401faaab3433..9dded25b1112 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -27,7 +27,7 @@ from google.cloud import pubsub_v1 -def list_subscriptions(project, topic_name): +def list_subscriptions_in_topic(project, topic_name): """Lists all subscriptions for a given topic.""" subscriber = pubsub_v1.SubscriberClient() topic_path = subscriber.topic_path(project, topic_name) @@ -36,6 +36,15 @@ def list_subscriptions(project, topic_name): print(subscription.name) +def list_subscriptions_in_project(project): + """Lists all subscriptions in the current project.""" + subscriber = pubsub_v1.SubscriberClient() + project_path = subscriber.project_path(project) + + for subscription in subscriber.list_subscriptions(project_path): + print(subscription.name) + + def create_subscription(project, topic_name, subscription_name): """Create a new pull subscription on the given topic.""" subscriber = pubsub_v1.SubscriberClient() @@ -109,9 +118,12 @@ def callback(message): parser.add_argument('project', help='Your Google Cloud project ID') subparsers = parser.add_subparsers(dest='command') - list_parser = subparsers.add_parser( - 'list', help=list_subscriptions.__doc__) - list_parser.add_argument('topic_name') + list_in_topic_parser = subparsers.add_parser( + 'list_in_topic', help=list_subscriptions_in_topic.__doc__) + list_in_topic_parser.add_argument('topic_name') + + list_in_project_parser = subparsers.add_parser( + 'list_in_project', help=list_subscriptions_in_project.__doc__) create_parser = subparsers.add_parser( 'create', help=create_subscription.__doc__) @@ -133,8 +145,10 @@ def callback(message): args = parser.parse_args() - if args.command == 'list': - list_subscriptions(args.project, args.topic_name) + if args.command == 'list_in_topic': + list_subscriptions_in_topic(args.project, args.topic_name) + elif args.command == 'list_in_project': + list_subscriptions_in_project(args.project) elif args.command == 'create': create_subscription( args.project, args.topic_name, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 2cc955d9db13..8b5e97ac40cc 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -66,10 +66,18 @@ def subscription(subscriber_client, topic): yield subscription_path -def test_list(subscription, capsys): +def test_list_in_topic(subscription, capsys): @eventually_consistent.call def _(): - subscriber.list_subscriptions(PROJECT, TOPIC) + subscriber.list_subscriptions_in_topic(PROJECT, TOPIC) + out, _ = capsys.readouterr() + assert subscription in out + + +def test_list_in_project(subscription, capsys): + @eventually_consistent.call + def _(): + subscriber.list_subscriptions_in_project(PROJECT) out, _ = capsys.readouterr() assert subscription in out From 0ce840a45653916ab68c27fe58b775447c03948f Mon Sep 17 00:00:00 2001 From: DPE bot Date: Wed, 1 Nov 2017 12:30:10 -0700 Subject: [PATCH 0508/1197] Auto-update dependencies. [(#1186)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1186) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 8738f6887fae..fd3aa4fe8ae8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.28.4 +google-cloud-pubsub==0.29.0 From 3577d648f20b9824a888fe57373965a10404fb85 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 28 Nov 2017 09:52:33 -0800 Subject: [PATCH 0509/1197] Auto-update dependencies. [(#1234)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1234) * Auto-update dependencies. * Drop pytest-logcapture as it's no longer needed Change-Id: Ia8b9e8aaf248e9770db6bc4842a4532df8383893 --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index fd3aa4fe8ae8..815e7d0e40c0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.29.0 +google-cloud-pubsub==0.29.1 From 5bc126e3a292e3640a2e641578cb6aa4782033bf Mon Sep 17 00:00:00 2001 From: DPE bot Date: Thu, 30 Nov 2017 10:25:03 -0800 Subject: [PATCH 0510/1197] Auto-update dependencies. [(#1239)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1239) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 815e7d0e40c0..b6977877d7af 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.29.1 +google-cloud-pubsub==0.29.2 From d8db15599fb2b917b9feb9467eb3544a061da60b Mon Sep 17 00:00:00 2001 From: michaelawyu Date: Thu, 7 Dec 2017 10:34:29 -0800 Subject: [PATCH 0511/1197] Added "Open in Cloud Shell" buttons to README files [(#1254)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1254) --- .../samples/snippets/README.rst | 55 ++++++++++++++----- .../samples/snippets/README.rst.in | 2 + 2 files changed, 42 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index df1dbf7c626f..0ecf4b1930dd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -3,6 +3,10 @@ Google Cloud Pub/Sub Python Samples =============================================================================== +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/README.rst + + This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. @@ -54,6 +58,10 @@ Samples Quickstart +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/quickstart.py;pubsub/cloud-client/README.rst + + To run this sample: @@ -66,6 +74,10 @@ To run this sample: Publisher +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/publisher.py;pubsub/cloud-client/README.rst + + To run this sample: @@ -78,13 +90,13 @@ To run this sample: project {list,create,delete,publish,publish-with-futures,publish-with-batch-settings} ... - + This application demonstrates how to perform basic operations on topics with the Cloud Pub/Sub API. - + For more information, see the README.md under /pubsub and the documentation at https://cloud.google.com/pubsub/docs. - + positional arguments: project Your Google Cloud project ID {list,create,delete,publish,publish-with-futures,publish-with-batch-settings} @@ -98,14 +110,19 @@ To run this sample: publish-with-batch-settings Publishes multiple messages to a Pub/Sub topic with batch settings. - + optional arguments: -h, --help show this help message and exit + Subscribers +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/subscriber.py;pubsub/cloud-client/README.rst + + To run this sample: @@ -115,33 +132,40 @@ To run this sample: $ python subscriber.py usage: subscriber.py [-h] - project {list,create,delete,receive,receive-flow-control} + project + {list_in_topic,list_in_project,create,delete,receive,receive-flow-control} ... - + This application demonstrates how to perform basic operations on subscriptions with the Cloud Pub/Sub API. - + For more information, see the README.md under /pubsub and the documentation at https://cloud.google.com/pubsub/docs. - + positional arguments: project Your Google Cloud project ID - {list,create,delete,receive,receive-flow-control} - list Lists all subscriptions for a given topic. + {list_in_topic,list_in_project,create,delete,receive,receive-flow-control} + list_in_topic Lists all subscriptions for a given topic. + list_in_project Lists all subscriptions in the current project. create Create a new pull subscription on the given topic. delete Deletes an existing Pub/Sub topic. receive Receives messages from a pull subscription. receive-flow-control Receives messages from a pull subscription with flow control. - + optional arguments: -h, --help show this help message and exit + Identity and Access Management +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/iam.py;pubsub/cloud-client/README.rst + + To run this sample: @@ -154,13 +178,13 @@ To run this sample: project {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} ... - + This application demonstrates how to perform basic operations on IAM policies with the Cloud Pub/Sub API. - + For more information, see the README.md under /pubsub and the documentation at https://cloud.google.com/pubsub/docs. - + positional arguments: project Your Google Cloud project ID {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} @@ -176,13 +200,14 @@ To run this sample: check-subscription-permissions Checks to which permissions are available on the given subscription. - + optional arguments: -h, --help show this help message and exit + The client library ------------------------------------------------------------------------------- diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst.in b/packages/google-cloud-pubsub/samples/snippets/README.rst.in index 6a9fd00c722b..ddbc647121b2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst.in +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst.in @@ -26,3 +26,5 @@ samples: show_help: true cloud_client_library: true + +folder: pubsub/cloud-client \ No newline at end of file From 0929db20e900e36a9b0fbddc446ce9ddf124bb8a Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 11 Dec 2017 09:45:02 -0800 Subject: [PATCH 0512/1197] Auto-update dependencies. [(#1263)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1263) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index b6977877d7af..bf0da6a020a9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.29.2 +google-cloud-pubsub==0.29.3 From 2bbfaa133d37f70f0472f0ed61be7fc40c6844d1 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 12 Dec 2017 09:26:42 -0800 Subject: [PATCH 0513/1197] Auto-update dependencies. [(#1272)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1272) * Auto-update dependencies. * Update requirements.txt --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index bf0da6a020a9..23b9f7a6c40e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.29.3 +google-cloud-pubsub==0.29.4 From 9682b3c7d7f9e19fd7aa304a51eb5858a695552a Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 2 Jan 2018 14:02:47 -0800 Subject: [PATCH 0514/1197] Auto-update dependencies. [(#1282)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1282) * Auto-update dependencies. * Fix storage acl sample Change-Id: I413bea899fdde4c4859e4070a9da25845b81f7cf --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 23b9f7a6c40e..8c845ba26c5d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.29.4 +google-cloud-pubsub==0.30.1 From 836643a392f586cd00ae0553a3859e647addf368 Mon Sep 17 00:00:00 2001 From: noerog <32459203+noerog@users.noreply.github.com> Date: Mon, 8 Jan 2018 17:16:12 -0500 Subject: [PATCH 0515/1197] Add listen for errors sample. [(#1306)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1306) * Add listen for errors sample. * Update subscriber.py * Update subscriber.py --- .../samples/snippets/subscriber.py | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 9dded25b1112..577e77cb9e6e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -110,6 +110,30 @@ def callback(message): time.sleep(60) +def listen_for_errors(project, subscription_name): + """Receives messages and catches errors from a pull subscription.""" + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path( + project, subscription_name) + + def callback(message): + print('Received message: {}'.format(message)) + message.ack() + + subscription = subscriber.subscribe(subscription_path, callback=callback) + + # Blocks the thread while messages are coming in through the stream. Any + # exceptions that crop up on the thread will be set on the future. + future = subscription.open(callback) + try: + future.result() + except Exception as e: + print( + 'Listening for messages on {} threw an Exception: {}.'.format( + subscription_name, e)) + raise + + if __name__ == '__main__': parser = argparse.ArgumentParser( description=__doc__, @@ -143,6 +167,10 @@ def callback(message): help=receive_messages_with_flow_control.__doc__) receive_with_flow_control_parser.add_argument('subscription_name') + listen_for_errors_parser = subparsers.add_parser( + 'listen_for_errors', help=listen_for_errors.__doc__) + listen_for_errors_parser.add_argument('subscription_name') + args = parser.parse_args() if args.command == 'list_in_topic': @@ -160,3 +188,5 @@ def callback(message): elif args.command == 'receive-flow-control': receive_messages_with_flow_control( args.project, args.subscription_name) + elif args.command == 'listen_for_errors': + listen_for_errors(args.project, args.subscription_name) From 9f6aae0e3f47ffbdd0f15ec774245206a549c08f Mon Sep 17 00:00:00 2001 From: L J Date: Wed, 17 Jan 2018 13:27:44 -0500 Subject: [PATCH 0516/1197] Fix subscription.open get called twice in the client libraries [(#1321)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1321) --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 577e77cb9e6e..1fd480e59a21 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -124,9 +124,8 @@ def callback(message): # Blocks the thread while messages are coming in through the stream. Any # exceptions that crop up on the thread will be set on the future. - future = subscription.open(callback) try: - future.result() + subscription.future.result() except Exception as e: print( 'Listening for messages on {} threw an Exception: {}.'.format( From 92517a31963e8788f5350fa6680572d3b7cc8119 Mon Sep 17 00:00:00 2001 From: noerog <32459203+noerog@users.noreply.github.com> Date: Thu, 25 Jan 2018 13:41:26 -0500 Subject: [PATCH 0517/1197] Add tests for creating push subscription. [(#1332)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1332) This is a separate PR from actually adding the sample, which is in https://github.com/GoogleCloudPlatform/python-docs-samples/pull/1331. --- .../samples/snippets/subscriber_test.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 8b5e97ac40cc..0999e1218f9c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -25,6 +25,7 @@ PROJECT = os.environ['GCLOUD_PROJECT'] TOPIC = 'subscription-test-topic' SUBSCRIPTION = 'subscription-test-subscription' +ENDPOINT = 'https://{}.appspot.com/push'.format(PROJECT) @pytest.fixture(scope='module') @@ -97,6 +98,21 @@ def _(): assert subscriber_client.get_subscription(subscription_path) +def test_create_push(subscriber_client): + subscription_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION) + try: + subscriber_client.delete_subscription(subscription_path) + except Exception: + pass + + subscriber.create_push_subscription(PROJECT, TOPIC, SUBSCRIPTION, ENDPOINT) + + @eventually_consistent.call + def _(): + assert subscriber_client.get_subscription(subscription_path) + + def test_delete(subscriber_client, subscription): subscriber.delete_subscription(PROJECT, SUBSCRIPTION) From d0754c738e595af9311bb96d665797d032c74840 Mon Sep 17 00:00:00 2001 From: noerog <32459203+noerog@users.noreply.github.com> Date: Thu, 25 Jan 2018 13:41:43 -0500 Subject: [PATCH 0518/1197] Add create push subscription sample. [(#1331)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1331) --- .../samples/snippets/subscriber.py | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 1fd480e59a21..f8558e39f19c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -58,6 +58,26 @@ def create_subscription(project, topic_name, subscription_name): print('Subscription created: {}'.format(subscription)) +def create_push_subscription(project, + topic_name, + subscription_name, + endpoint): + """Create a new push subscription on the given topic.""" + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project, topic_name) + subscription_path = subscriber.subscription_path( + project, subscription_name) + + push_config = pubsub_v1.types.PushConfig( + push_endpoint=endpoint) + + subscription = subscriber.create_subscription( + subscription_path, topic_path, push_config) + + print('Push subscription created: {}'.format(subscription)) + print('Endpoint for subscription is: {}'.format(endpoint)) + + def delete_subscription(project, subscription_name): """Deletes an existing Pub/Sub topic.""" subscriber = pubsub_v1.SubscriberClient() @@ -153,6 +173,12 @@ def callback(message): create_parser.add_argument('topic_name') create_parser.add_argument('subscription_name') + create_push_parser = subparsers.add_parser( + 'create-push', help=create_push_subscription.__doc__) + create_push_parser.add_argument('topic_name') + create_push_parser.add_argument('subscription_name') + create_push_parser.add_argument('endpoint') + delete_parser = subparsers.add_parser( 'delete', help=delete_subscription.__doc__) delete_parser.add_argument('subscription_name') @@ -179,6 +205,12 @@ def callback(message): elif args.command == 'create': create_subscription( args.project, args.topic_name, args.subscription_name) + elif args.command == 'create-push': + create_push_subscription( + args.project, + args.topic_name, + args.subscription_name, + args.endpoint) elif args.command == 'delete': delete_subscription( args.project, args.subscription_name) From 337a38b1e56da26e8bd112ce5c9e81bbd4ff9001 Mon Sep 17 00:00:00 2001 From: noerog <32459203+noerog@users.noreply.github.com> Date: Fri, 2 Feb 2018 01:15:27 -0500 Subject: [PATCH 0519/1197] Add sample for updating a subscription. [(#1335)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1335) --- .../samples/snippets/subscriber.py | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index f8558e39f19c..b1a79327242f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -89,6 +89,34 @@ def delete_subscription(project, subscription_name): print('Subscription deleted: {}'.format(subscription_path)) +def update_subscription(project, subscription_name, ack_deadline_seconds): + """ + Updates an existing Pub/Sub subscription's ackDeadlineSeconds + from 10 seconds (default). Note that certain properties of a + subscription, such as its topic, are not modifiable. + """ + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path( + project, subscription_name) + + subscription = pubsub_v1.types.Subscription( + name=subscription_path, + ack_deadline_seconds=ack_deadline_seconds) + + update_mask = { + 'paths': { + 'ack_deadline_seconds', + } + } + + subscriber.update_subscription(subscription, update_mask) + result = subscriber.get_subscription(subscription_path) + + print('Subscription updated: {}'.format(subscription_path)) + print('New ack_deadline_seconds value is: {}'.format( + result.ack_deadline_seconds)) + + def receive_messages(project, subscription_name): """Receives messages from a pull subscription.""" subscriber = pubsub_v1.SubscriberClient() @@ -183,6 +211,11 @@ def callback(message): 'delete', help=delete_subscription.__doc__) delete_parser.add_argument('subscription_name') + update_parser = subparsers.add_parser( + 'update', help=update_subscription.__doc__) + update_parser.add_argument('subscription_name') + update_parser.add_argument('ack_deadline_seconds', type=int) + receive_parser = subparsers.add_parser( 'receive', help=receive_messages.__doc__) receive_parser.add_argument('subscription_name') @@ -214,6 +247,9 @@ def callback(message): elif args.command == 'delete': delete_subscription( args.project, args.subscription_name) + elif args.command == 'update': + update_subscription( + args.project, args.subscription_name, args.ack_deadline_seconds) elif args.command == 'receive': receive_messages(args.project, args.subscription_name) elif args.command == 'receive-flow-control': From 979c9909cf9774c6d543551a4a5dfa04e36f0c89 Mon Sep 17 00:00:00 2001 From: noerog <32459203+noerog@users.noreply.github.com> Date: Mon, 5 Feb 2018 11:55:13 -0500 Subject: [PATCH 0520/1197] Change update_subscription to change endpoint URL. [(#1344)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1344) The documentation specifies that the update subscription commands show how to update an endpoint URL: https://cloud.google.com/pubsub/docs/admin#update_a_subscription. --- .../samples/snippets/subscriber.py | 29 ++++++++++++------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index b1a79327242f..f03039bbccff 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -62,7 +62,10 @@ def create_push_subscription(project, topic_name, subscription_name, endpoint): - """Create a new push subscription on the given topic.""" + """Create a new push subscription on the given topic. + For example, endpoint is + "https://my-test-project.appspot.com/push". + """ subscriber = pubsub_v1.SubscriberClient() topic_path = subscriber.topic_path(project, topic_name) subscription_path = subscriber.subscription_path( @@ -89,23 +92,27 @@ def delete_subscription(project, subscription_name): print('Subscription deleted: {}'.format(subscription_path)) -def update_subscription(project, subscription_name, ack_deadline_seconds): +def update_subscription(project, subscription_name, endpoint): """ - Updates an existing Pub/Sub subscription's ackDeadlineSeconds - from 10 seconds (default). Note that certain properties of a - subscription, such as its topic, are not modifiable. + Updates an existing Pub/Sub subscription's push endpoint URL. + Note that certain properties of a subscription, such as + its topic, are not modifiable. For example, endpoint is + "https://my-test-project.appspot.com/push". """ subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) + push_config = pubsub_v1.types.PushConfig( + push_endpoint=endpoint) + subscription = pubsub_v1.types.Subscription( name=subscription_path, - ack_deadline_seconds=ack_deadline_seconds) + push_config=push_config) update_mask = { 'paths': { - 'ack_deadline_seconds', + 'push_config', } } @@ -113,8 +120,8 @@ def update_subscription(project, subscription_name, ack_deadline_seconds): result = subscriber.get_subscription(subscription_path) print('Subscription updated: {}'.format(subscription_path)) - print('New ack_deadline_seconds value is: {}'.format( - result.ack_deadline_seconds)) + print('New endpoint for subscription is: {}'.format( + result.push_config)) def receive_messages(project, subscription_name): @@ -214,7 +221,7 @@ def callback(message): update_parser = subparsers.add_parser( 'update', help=update_subscription.__doc__) update_parser.add_argument('subscription_name') - update_parser.add_argument('ack_deadline_seconds', type=int) + update_parser.add_argument('endpoint') receive_parser = subparsers.add_parser( 'receive', help=receive_messages.__doc__) @@ -249,7 +256,7 @@ def callback(message): args.project, args.subscription_name) elif args.command == 'update': update_subscription( - args.project, args.subscription_name, args.ack_deadline_seconds) + args.project, args.subscription_name, args.endpoint) elif args.command == 'receive': receive_messages(args.project, args.subscription_name) elif args.command == 'receive-flow-control': From 601e036aca8d4d064413e051fb74aa5d0a87dda2 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 26 Feb 2018 09:03:37 -0800 Subject: [PATCH 0521/1197] Auto-update dependencies. [(#1359)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1359) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 8c845ba26c5d..d300b96a86fd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.30.1 +google-cloud-pubsub==0.32.0 From 93abc73c7c64646be47a7f2a4ad8b0b5ac348d1e Mon Sep 17 00:00:00 2001 From: DPE bot Date: Thu, 8 Mar 2018 13:33:57 -0800 Subject: [PATCH 0522/1197] Auto-update dependencies. [(#1389)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1389) --- .../google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index d300b96a86fd..fdea342db45a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.32.0 +google-cloud-pubsub==0.32.1 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index f03039bbccff..827dcd313b54 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -29,11 +29,11 @@ def list_subscriptions_in_topic(project, topic_name): """Lists all subscriptions for a given topic.""" - subscriber = pubsub_v1.SubscriberClient() + subscriber = pubsub_v1.PublisherClient() topic_path = subscriber.topic_path(project, topic_name) - for subscription in subscriber.list_subscriptions(topic_path): - print(subscription.name) + for subscription in subscriber.list_topic_subscriptions(topic_path): + print(subscription) def list_subscriptions_in_project(project): From a22178a7efa03ec7c015fca6fa28d24ed5714450 Mon Sep 17 00:00:00 2001 From: chenyumic Date: Fri, 16 Mar 2018 16:41:13 -0700 Subject: [PATCH 0523/1197] Added sample for publishing/receiving messages with custom attributes [(#1409)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1409) --- .../samples/snippets/publisher.py | 24 ++++++++++++++ .../samples/snippets/publisher_test.py | 7 ++++ .../samples/snippets/subscriber.py | 32 +++++++++++++++++++ .../samples/snippets/subscriber_test.py | 21 ++++++++++++ 4 files changed, 84 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index cf12b2626b4f..76d0589931cb 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -69,6 +69,23 @@ def publish_messages(project, topic_name): print('Published messages.') +def publish_messages_with_custom_attributes(project, topic_name): + """Publishes multiple messages with custom attributes + to a Pub/Sub topic.""" + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project, topic_name) + + for n in range(1, 10): + data = u'Message number {}'.format(n) + # Data must be a bytestring + data = data.encode('utf-8') + # Add two attributes, origin and username, to the message + publisher.publish( + topic_path, data, origin='python-sample', username='gcp') + + print('Published messages with custom attributes.') + + def publish_messages_with_futures(project, topic_name): """Publishes multiple messages to a Pub/Sub topic and prints their message IDs.""" @@ -132,6 +149,11 @@ def publish_messages_with_batch_settings(project, topic_name): 'publish', help=publish_messages.__doc__) publish_parser.add_argument('topic_name') + publish_with_custom_attributes_parser = subparsers.add_parser( + 'publish-with-custom-attributes', + help=publish_messages_with_custom_attributes.__doc__) + publish_with_custom_attributes_parser.add_argument('topic_name') + publish_with_futures_parser = subparsers.add_parser( 'publish-with-futures', help=publish_messages_with_futures.__doc__) @@ -152,6 +174,8 @@ def publish_messages_with_batch_settings(project, topic_name): delete_topic(args.project, args.topic_name) elif args.command == 'publish': publish_messages(args.project, args.topic_name) + elif args.command == 'publish-with-custom-attributes': + publish_messages_with_custom_attributes(args.project, args.topic_name) elif args.command == 'publish-with-futures': publish_messages_with_futures(args.project, args.topic_name) elif args.command == 'publish-with-batch-settings': diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 120148c0a8cf..a008a06100ec 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -81,6 +81,13 @@ def test_publish(topic, capsys): assert 'Published' in out +def test_publish_with_custom_attributes(topic, capsys): + publisher.publish_messages_with_custom_attributes(PROJECT, TOPIC) + + out, _ = capsys.readouterr() + assert 'Published' in out + + def test_publish_with_batch_settings(topic, capsys): publisher.publish_messages_with_batch_settings(PROJECT, TOPIC) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 827dcd313b54..f68c06c95f55 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -143,6 +143,30 @@ def callback(message): time.sleep(60) +def receive_messages_with_custom_attributes(project, subscription_name): + """Receives messages from a pull subscription.""" + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path( + project, subscription_name) + + def callback(message): + print('Received message: {}'.format(message.data)) + if message.attributes: + print('Attributes:') + for key in message.attributes: + value = message.attributes.get(key) + print('{}: {}'.format(key, value)) + message.ack() + + subscriber.subscribe(subscription_path, callback=callback) + + # The subscriber is non-blocking, so we must keep the main thread from + # exiting to allow it to process messages in the background. + print('Listening for messages on {}'.format(subscription_path)) + while True: + time.sleep(60) + + def receive_messages_with_flow_control(project, subscription_name): """Receives messages from a pull subscription with flow control.""" subscriber = pubsub_v1.SubscriberClient() @@ -227,6 +251,11 @@ def callback(message): 'receive', help=receive_messages.__doc__) receive_parser.add_argument('subscription_name') + receive_with_custom_attributes_parser = subparsers.add_parser( + 'receive-custom-attributes', + help=receive_messages_with_custom_attributes.__doc__) + receive_with_custom_attributes_parser.add_argument('subscription_name') + receive_with_flow_control_parser = subparsers.add_parser( 'receive-flow-control', help=receive_messages_with_flow_control.__doc__) @@ -259,6 +288,9 @@ def callback(message): args.project, args.subscription_name, args.endpoint) elif args.command == 'receive': receive_messages(args.project, args.subscription_name) + elif args.command == 'receive-custom-attributes': + receive_messages_with_custom_attributes( + args.project, args.subscription_name) elif args.command == 'receive-flow-control': receive_messages_with_flow_control( args.project, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 0999e1218f9c..f04373ae8021 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -129,6 +129,11 @@ def _publish_messages(publisher_client, topic): topic, data=data) +def _publish_messages_with_custom_attributes(publisher_client, topic): + data = u'Test message'.encode('utf-8') + publisher_client.publish(topic, data=data, origin='python-sample') + + def _make_sleep_patch(): real_sleep = time.sleep @@ -155,6 +160,22 @@ def test_receive(publisher_client, topic, subscription, capsys): assert 'Message 1' in out +def test_receive_with_custom_attributes( + publisher_client, topic, subscription, capsys): + _publish_messages_with_custom_attributes(publisher_client, topic) + + with _make_sleep_patch(): + with pytest.raises(RuntimeError, match='sigil'): + subscriber.receive_messages_with_custom_attributes( + PROJECT, SUBSCRIPTION) + + out, _ = capsys.readouterr() + assert 'Test message' in out + assert 'Attributes' in out + assert 'origin' in out + assert 'python-sample' in out + + def test_receive_with_flow_control( publisher_client, topic, subscription, capsys): _publish_messages(publisher_client, topic) From 5839dbc246ec4a302ee7fb70169b701b9ef26697 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Mon, 2 Apr 2018 02:51:10 -0700 Subject: [PATCH 0524/1197] Auto-update dependencies. --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index fdea342db45a..c74eb4e3ccc1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.32.1 +google-cloud-pubsub==0.33.0 From 717ce8dcf81f06fb0c6ce6c8058883c42deb64a9 Mon Sep 17 00:00:00 2001 From: chenyumic Date: Fri, 6 Apr 2018 22:57:36 -0700 Subject: [PATCH 0525/1197] Regenerate the README files and fix the Open in Cloud Shell link for some samples [(#1441)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1441) --- .../samples/snippets/README.rst | 33 ++++++++++++++----- 1 file changed, 24 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 0ecf4b1930dd..209ab5d7f17d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -12,7 +12,7 @@ This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub` -.. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs +.. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs Setup ------------------------------------------------------------------------------- @@ -59,7 +59,7 @@ Quickstart +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/quickstart.py;pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/quickstart.py,pubsub/cloud-client/README.rst @@ -75,7 +75,7 @@ Publisher +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/publisher.py;pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/publisher.py,pubsub/cloud-client/README.rst @@ -88,7 +88,7 @@ To run this sample: usage: publisher.py [-h] project - {list,create,delete,publish,publish-with-futures,publish-with-batch-settings} + {list,create,delete,publish,publish-with-custom-attributes,publish-with-futures,publish-with-batch-settings} ... This application demonstrates how to perform basic operations on topics @@ -99,11 +99,14 @@ To run this sample: positional arguments: project Your Google Cloud project ID - {list,create,delete,publish,publish-with-futures,publish-with-batch-settings} + {list,create,delete,publish,publish-with-custom-attributes,publish-with-futures,publish-with-batch-settings} list Lists all Pub/Sub topics in the given project. create Create a new Pub/Sub topic. delete Deletes an existing Pub/Sub topic. publish Publishes multiple messages to a Pub/Sub topic. + publish-with-custom-attributes + Publishes multiple messages with custom attributes to + a Pub/Sub topic. publish-with-futures Publishes multiple messages to a Pub/Sub topic and prints their message IDs. @@ -120,7 +123,7 @@ Subscribers +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/subscriber.py;pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/subscriber.py,pubsub/cloud-client/README.rst @@ -133,7 +136,7 @@ To run this sample: usage: subscriber.py [-h] project - {list_in_topic,list_in_project,create,delete,receive,receive-flow-control} + {list_in_topic,list_in_project,create,create-push,delete,update,receive,receive-custom-attributes,receive-flow-control,listen_for_errors} ... This application demonstrates how to perform basic operations on @@ -144,15 +147,27 @@ To run this sample: positional arguments: project Your Google Cloud project ID - {list_in_topic,list_in_project,create,delete,receive,receive-flow-control} + {list_in_topic,list_in_project,create,create-push,delete,update,receive,receive-custom-attributes,receive-flow-control,listen_for_errors} list_in_topic Lists all subscriptions for a given topic. list_in_project Lists all subscriptions in the current project. create Create a new pull subscription on the given topic. + create-push Create a new push subscription on the given topic. For + example, endpoint is "https://my-test- + project.appspot.com/push". delete Deletes an existing Pub/Sub topic. + update Updates an existing Pub/Sub subscription's push + endpoint URL. Note that certain properties of a + subscription, such as its topic, are not modifiable. + For example, endpoint is "https://my-test- + project.appspot.com/push". receive Receives messages from a pull subscription. + receive-custom-attributes + Receives messages from a pull subscription. receive-flow-control Receives messages from a pull subscription with flow control. + listen_for_errors Receives messages and catches errors from a pull + subscription. optional arguments: -h, --help show this help message and exit @@ -163,7 +178,7 @@ Identity and Access Management +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/iam.py;pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/iam.py,pubsub/cloud-client/README.rst From 0046d38d8f75a613b5386c76e57c13ab42a2d96e Mon Sep 17 00:00:00 2001 From: Frank Natividad Date: Thu, 26 Apr 2018 10:26:41 -0700 Subject: [PATCH 0526/1197] Update READMEs to fix numbering and add git clone [(#1464)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1464) --- .../google-cloud-pubsub/samples/snippets/README.rst | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 209ab5d7f17d..e0e265f8d427 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -31,10 +31,16 @@ credentials for applications. Install Dependencies ++++++++++++++++++++ +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. - .. _Python Development Environment Setup Guide: - https://cloud.google.com/python/setup + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup #. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. From b1560156f57b13af36f744446d11e7f3efeea4de Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Mon, 21 May 2018 15:05:29 -0700 Subject: [PATCH 0527/1197] PubSub: adds region tags and updates existing to standard [(#1491)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1491) --- .../samples/snippets/iam.py | 12 +++++++ .../samples/snippets/publisher.py | 14 ++++++++ .../samples/snippets/quickstart.py | 4 +-- .../samples/snippets/subscriber.py | 32 +++++++++++++++---- 4 files changed, 54 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index b46bc11474e2..bd44f1ab6e0b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -28,6 +28,7 @@ def get_topic_policy(project, topic_name): """Prints the IAM policy for the given topic.""" + # [START pubsub_get_topic_policy] client = pubsub_v1.PublisherClient() topic_path = client.topic_path(project, topic_name) @@ -36,10 +37,12 @@ def get_topic_policy(project, topic_name): print('Policy for topic {}:'.format(topic_path)) for binding in policy.bindings: print('Role: {}, Members: {}'.format(binding.role, binding.members)) + # [END pubsub_get_topic_policy] def get_subscription_policy(project, subscription_name): """Prints the IAM policy for the given subscription.""" + # [START pubsub_get_subscription_policy] client = pubsub_v1.SubscriberClient() subscription_path = client.subscription_path(project, subscription_name) @@ -48,10 +51,12 @@ def get_subscription_policy(project, subscription_name): print('Policy for subscription {}:'.format(subscription_path)) for binding in policy.bindings: print('Role: {}, Members: {}'.format(binding.role, binding.members)) + # [END pubsub_get_subscription_policy] def set_topic_policy(project, topic_name): """Sets the IAM policy for a topic.""" + # [START pubsub_set_topic_policy] client = pubsub_v1.PublisherClient() topic_path = client.topic_path(project, topic_name) @@ -72,10 +77,12 @@ def set_topic_policy(project, topic_name): print('IAM policy for topic {} set: {}'.format( topic_name, policy)) + # [END pubsub_set_topic_policy] def set_subscription_policy(project, subscription_name): """Sets the IAM policy for a topic.""" + # [START pubsub_set_subscription_policy] client = pubsub_v1.SubscriberClient() subscription_path = client.subscription_path(project, subscription_name) @@ -96,10 +103,12 @@ def set_subscription_policy(project, subscription_name): print('IAM policy for subscription {} set: {}'.format( subscription_name, policy)) + # [END pubsub_set_subscription_policy] def check_topic_permissions(project, topic_name): """Checks to which permissions are available on the given topic.""" + # [START pubsub_test_topic_permissions] client = pubsub_v1.PublisherClient() topic_path = client.topic_path(project, topic_name) @@ -113,10 +122,12 @@ def check_topic_permissions(project, topic_name): print('Allowed permissions for topic {}: {}'.format( topic_path, allowed_permissions)) + # [END pubsub_test_topic_permissions] def check_subscription_permissions(project, subscription_name): """Checks to which permissions are available on the given subscription.""" + # [START pubsub_test_subscription_permissions] client = pubsub_v1.SubscriberClient() subscription_path = client.subscription_path(project, subscription_name) @@ -130,6 +141,7 @@ def check_subscription_permissions(project, subscription_name): print('Allowed permissions for subscription {}: {}'.format( subscription_path, allowed_permissions)) + # [END pubsub_test_subscription_permissions] if __name__ == '__main__': diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 76d0589931cb..f2f5e3ac6084 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -28,35 +28,42 @@ def list_topics(project): """Lists all Pub/Sub topics in the given project.""" + # [START pubsub_list_topics] publisher = pubsub_v1.PublisherClient() project_path = publisher.project_path(project) for topic in publisher.list_topics(project_path): print(topic) + # [END pubsub_list_topics] def create_topic(project, topic_name): """Create a new Pub/Sub topic.""" + # [START pubsub_create_topic] publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project, topic_name) topic = publisher.create_topic(topic_path) print('Topic created: {}'.format(topic)) + # [END pubsub_create_topic] def delete_topic(project, topic_name): """Deletes an existing Pub/Sub topic.""" + # [START pubsub_delete_topic] publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project, topic_name) publisher.delete_topic(topic_path) print('Topic deleted: {}'.format(topic_path)) + # [END pubsub_delete_topic] def publish_messages(project, topic_name): """Publishes multiple messages to a Pub/Sub topic.""" + # [START pubsub_quickstart_publisher] publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project, topic_name) @@ -67,11 +74,13 @@ def publish_messages(project, topic_name): publisher.publish(topic_path, data=data) print('Published messages.') + # [END pubsub_quickstart_publisher] def publish_messages_with_custom_attributes(project, topic_name): """Publishes multiple messages with custom attributes to a Pub/Sub topic.""" + # [START pubsub_publish_custom_attributes] publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project, topic_name) @@ -84,11 +93,13 @@ def publish_messages_with_custom_attributes(project, topic_name): topic_path, data, origin='python-sample', username='gcp') print('Published messages with custom attributes.') + # [END pubsub_publish_custom_attributes] def publish_messages_with_futures(project, topic_name): """Publishes multiple messages to a Pub/Sub topic and prints their message IDs.""" + # [START pubsub_publisher_concurrency_control] publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project, topic_name) @@ -107,10 +118,12 @@ def publish_messages_with_futures(project, topic_name): for future in futures: # result() blocks until the message is published. print(future.result()) + # [END pubsub_publisher_concurrency_control] def publish_messages_with_batch_settings(project, topic_name): """Publishes multiple messages to a Pub/Sub topic with batch settings.""" + # [START pubsub_publisher_batch_settings] # Configure the batch to publish once there is one kilobyte of data or # 1 second has passed. batch_settings = pubsub_v1.types.BatchSettings( @@ -127,6 +140,7 @@ def publish_messages_with_batch_settings(project, topic_name): publisher.publish(topic_path, data=data) print('Published messages.') + # [END pubsub_publisher_batch_settings] if __name__ == '__main__': diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart.py b/packages/google-cloud-pubsub/samples/snippets/quickstart.py index c9823d789928..1ff2efed3a32 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart.py @@ -16,7 +16,7 @@ def run_quickstart(): - # [START pubsub_quickstart] + # [START pubsub_quickstart_create_topic] # Imports the Google Cloud client library from google.cloud import pubsub_v1 @@ -32,7 +32,7 @@ def run_quickstart(): topic = publisher.create_topic(topic_path) print('Topic created: {}'.format(topic)) - # [END pubsub_quickstart] + # [END pubsub_quickstart_create_topic] if __name__ == '__main__': diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index f68c06c95f55..34f2301d85ba 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -29,24 +29,29 @@ def list_subscriptions_in_topic(project, topic_name): """Lists all subscriptions for a given topic.""" + # [START pubsub_list_topic_subscriptions] subscriber = pubsub_v1.PublisherClient() topic_path = subscriber.topic_path(project, topic_name) for subscription in subscriber.list_topic_subscriptions(topic_path): print(subscription) + # [END pubsub_list_topic_subscriptions] def list_subscriptions_in_project(project): """Lists all subscriptions in the current project.""" + # [START pubsub_list_subscriptions] subscriber = pubsub_v1.SubscriberClient() project_path = subscriber.project_path(project) for subscription in subscriber.list_subscriptions(project_path): print(subscription.name) + # [END pubsub_list_subscriptions] def create_subscription(project, topic_name, subscription_name): """Create a new pull subscription on the given topic.""" + # [START pubsub_create_pull_subscription] subscriber = pubsub_v1.SubscriberClient() topic_path = subscriber.topic_path(project, topic_name) subscription_path = subscriber.subscription_path( @@ -56,16 +61,16 @@ def create_subscription(project, topic_name, subscription_name): subscription_path, topic_path) print('Subscription created: {}'.format(subscription)) + # [END pubsub_create_pull_subscription] def create_push_subscription(project, topic_name, subscription_name, endpoint): - """Create a new push subscription on the given topic. - For example, endpoint is - "https://my-test-project.appspot.com/push". - """ + """Create a new push subscription on the given topic.""" + # [START pubsub_create_push_subscription] + # endpoint = "https://my-test-project.appspot.com/push" subscriber = pubsub_v1.SubscriberClient() topic_path = subscriber.topic_path(project, topic_name) subscription_path = subscriber.subscription_path( @@ -79,10 +84,12 @@ def create_push_subscription(project, print('Push subscription created: {}'.format(subscription)) print('Endpoint for subscription is: {}'.format(endpoint)) + # [END pubsub_create_push_subscription] def delete_subscription(project, subscription_name): """Deletes an existing Pub/Sub topic.""" + # [START pubsub_delete_subscription] subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -90,15 +97,17 @@ def delete_subscription(project, subscription_name): subscriber.delete_subscription(subscription_path) print('Subscription deleted: {}'.format(subscription_path)) + # [END pubsub_delete_subscription] def update_subscription(project, subscription_name, endpoint): """ Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a subscription, such as - its topic, are not modifiable. For example, endpoint is - "https://my-test-project.appspot.com/push". + its topic, are not modifiable. """ + # [START pubsub_update_push_configuration] + # endpoint = "https://my-test-project.appspot.com/push" subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -122,10 +131,13 @@ def update_subscription(project, subscription_name, endpoint): print('Subscription updated: {}'.format(subscription_path)) print('New endpoint for subscription is: {}'.format( result.push_config)) + # [END pubsub_update_push_configuration] def receive_messages(project, subscription_name): """Receives messages from a pull subscription.""" + # [START pubsub_subscriber_async_pull] + # [START pubsub_quickstart_subscriber] subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -141,10 +153,13 @@ def callback(message): print('Listening for messages on {}'.format(subscription_path)) while True: time.sleep(60) + # [END pubsub_subscriber_async_pull] + # [END pubsub_quickstart_subscriber] def receive_messages_with_custom_attributes(project, subscription_name): """Receives messages from a pull subscription.""" + # [START pubsub_subscriber_sync_pull_custom_attributes] subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -165,10 +180,12 @@ def callback(message): print('Listening for messages on {}'.format(subscription_path)) while True: time.sleep(60) + # [END pubsub_subscriber_sync_pull_custom_attributes] def receive_messages_with_flow_control(project, subscription_name): """Receives messages from a pull subscription with flow control.""" + # [START pubsub_subscriber_flow_settings] subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -187,10 +204,12 @@ def callback(message): print('Listening for messages on {}'.format(subscription_path)) while True: time.sleep(60) + # [END pubsub_subscriber_flow_settings] def listen_for_errors(project, subscription_name): """Receives messages and catches errors from a pull subscription.""" + # [START pubsub_subscriber_error_listener] subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -210,6 +229,7 @@ def callback(message): 'Listening for messages on {} threw an Exception: {}.'.format( subscription_name, e)) raise + # [END pubsub_subscriber_error_listener] if __name__ == '__main__': From b0935c769eb03282df23af22e1432fdf121cfa91 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Tue, 22 May 2018 09:14:19 -0700 Subject: [PATCH 0528/1197] Pubsub: Add missing region tag [(#1498)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1498) --- packages/google-cloud-pubsub/samples/snippets/publisher.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index f2f5e3ac6084..f77f6ba787e2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -64,6 +64,7 @@ def delete_topic(project, topic_name): def publish_messages(project, topic_name): """Publishes multiple messages to a Pub/Sub topic.""" # [START pubsub_quickstart_publisher] + # [START pubsub_publish] publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project, topic_name) @@ -75,6 +76,7 @@ def publish_messages(project, topic_name): print('Published messages.') # [END pubsub_quickstart_publisher] + # [END pubsub_publish] def publish_messages_with_custom_attributes(project, topic_name): From ae2ecc7c63b08ba1c7123cdd1d480d61ce0bf567 Mon Sep 17 00:00:00 2001 From: chenyumic Date: Thu, 21 Jun 2018 14:20:44 -0700 Subject: [PATCH 0529/1197] Add the Pub/Sub handle_publisher_error sample [(#1440)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1440) * Add the Pub/Sub handle_publisher_error sample * Update requirements.txt * Update publisher.py * Update publisher.py * Added region tag --- .../samples/snippets/publisher.py | 33 +++++++++++++++++++ .../samples/snippets/requirements.txt | 3 +- 2 files changed, 35 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index f77f6ba787e2..c24dddca9972 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -22,6 +22,7 @@ """ import argparse +import concurrent.futures from google.cloud import pubsub_v1 @@ -123,6 +124,38 @@ def publish_messages_with_futures(project, topic_name): # [END pubsub_publisher_concurrency_control] +def publish_messages_with_error_handler(project, topic_name): + """Publishes multiple messages to a Pub/Sub topic with an error handler.""" + # [START pubsub_publish_messages_error_handler] + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project, topic_name) + + # When you publish a message, the client returns a Future. This Future + # can be used to track if an error has occurred. + futures = [] + + def callback(f): + exc = f.exception() + if exc: + print('Publishing message on {} threw an Exception {}.'.format( + topic_name, exc)) + + for n in range(1, 10): + data = u'Message number {}'.format(n) + # Data must be a bytestring + data = data.encode('utf-8') + message_future = publisher.publish(topic_path, data=data) + message_future.add_done_callback(callback) + futures.append(message_future) + + # We must keep the main thread from exiting to allow it to process + # messages in the background. + concurrent.futures.wait(futures) + + print('Published messages.') + # [END pubsub_publish_messages_error_handler] + + def publish_messages_with_batch_settings(project, topic_name): """Publishes multiple messages to a Pub/Sub topic with batch settings.""" # [START pubsub_publisher_batch_settings] diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index c74eb4e3ccc1..81f06995b8a2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1,2 @@ -google-cloud-pubsub==0.33.0 +google-cloud-pubsub==0.32.1 +futures==3.1.1; python_version < '3' From a7ab542f09b10a381677d8184037b9759e244946 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 19 Jul 2018 13:31:04 -0700 Subject: [PATCH 0530/1197] Modified publisher with error handling [(#1568)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1568) --- .../samples/snippets/publisher.py | 31 +++++++++++-------- .../samples/snippets/publisher_test.py | 7 +++++ .../samples/snippets/requirements.txt | 3 +- 3 files changed, 26 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index c24dddca9972..96caba9f4735 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -22,7 +22,7 @@ """ import argparse -import concurrent.futures +import time from google.cloud import pubsub_v1 @@ -130,29 +130,27 @@ def publish_messages_with_error_handler(project, topic_name): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project, topic_name) - # When you publish a message, the client returns a Future. This Future - # can be used to track if an error has occurred. - futures = [] - - def callback(f): - exc = f.exception() - if exc: + def callback(message_future): + if message_future.exception(): print('Publishing message on {} threw an Exception {}.'.format( - topic_name, exc)) + topic_name, message_future.exception())) + else: + print(message_future.result()) for n in range(1, 10): data = u'Message number {}'.format(n) # Data must be a bytestring data = data.encode('utf-8') + # When you publish a message, the client returns a Future. message_future = publisher.publish(topic_path, data=data) message_future.add_done_callback(callback) - futures.append(message_future) + + print('Published message IDs:') # We must keep the main thread from exiting to allow it to process # messages in the background. - concurrent.futures.wait(futures) - - print('Published messages.') + while True: + time.sleep(60) # [END pubsub_publish_messages_error_handler] @@ -208,6 +206,11 @@ def publish_messages_with_batch_settings(project, topic_name): help=publish_messages_with_futures.__doc__) publish_with_futures_parser.add_argument('topic_name') + publish_with_error_handler_parser = subparsers.add_parser( + 'publish-with-error-handler', + help=publish_messages_with_error_handler.__doc__) + publish_with_error_handler_parser.add_argument('topic_name') + publish_with_batch_settings_parser = subparsers.add_parser( 'publish-with-batch-settings', help=publish_messages_with_batch_settings.__doc__) @@ -227,5 +230,7 @@ def publish_messages_with_batch_settings(project, topic_name): publish_messages_with_custom_attributes(args.project, args.topic_name) elif args.command == 'publish-with-futures': publish_messages_with_futures(args.project, args.topic_name) + elif args.command == 'publish-with-error-handler': + publish_messages_with_error_handler(args.project, args.topic_name) elif args.command == 'publish-with-batch-settings': publish_messages_with_batch_settings(args.project, args.topic_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index a008a06100ec..2eda09c6c185 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -95,6 +95,13 @@ def test_publish_with_batch_settings(topic, capsys): assert 'Published' in out +def test_publish_with_error_handler(topic, capsys): + publisher.publish_messages_with_error_handler(PROJECT, TOPIC) + + out, _ = capsys.readouterr() + assert 'Published' in out + + def test_publish_with_futures(topic, capsys): publisher.publish_messages_with_futures(PROJECT, TOPIC) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 81f06995b8a2..c74eb4e3ccc1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1 @@ -google-cloud-pubsub==0.32.1 -futures==3.1.1; python_version < '3' +google-cloud-pubsub==0.33.0 From 8692bdd53a9d3d8f77f21dd0953428ad126bb2f9 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 10 Aug 2018 16:01:02 -0700 Subject: [PATCH 0531/1197] Updated google-cloud-pubsub to version 0.35 [(#1624)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1624) * Updated library version * Rewrote test for publish with error handler * Custom _publish function in test prints no 'Attributes' --- .../samples/snippets/publisher_test.py | 21 ++++++++++++++++++- .../samples/snippets/requirements.txt | 2 +- .../samples/snippets/subscriber.py | 2 +- .../samples/snippets/subscriber_test.py | 1 - 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 2eda09c6c185..cdb4d0e0e766 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -13,9 +13,11 @@ # limitations under the License. import os +import time from gcp_devrel.testing import eventually_consistent from google.cloud import pubsub_v1 +import mock import pytest import publisher @@ -43,6 +45,19 @@ def topic(client): yield topic_path +def _make_sleep_patch(): + real_sleep = time.sleep + + def new_sleep(period): + if period == 60: + real_sleep(5) + raise RuntimeError('sigil') + else: + real_sleep(period) + + return mock.patch('time.sleep', new=new_sleep) + + def test_list(client, topic, capsys): @eventually_consistent.call def _(): @@ -96,7 +111,11 @@ def test_publish_with_batch_settings(topic, capsys): def test_publish_with_error_handler(topic, capsys): - publisher.publish_messages_with_error_handler(PROJECT, TOPIC) + + with _make_sleep_patch(): + with pytest.raises(RuntimeError, match='sigil'): + publisher.publish_messages_with_error_handler( + PROJECT, TOPIC) out, _ = capsys.readouterr() assert 'Published' in out diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index c74eb4e3ccc1..23ed91dd2cc9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.33.0 +google-cloud-pubsub==0.35.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 34f2301d85ba..46bb5118843b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -191,7 +191,7 @@ def receive_messages_with_flow_control(project, subscription_name): project, subscription_name) def callback(message): - print('Received message: {}'.format(message)) + print('Received message: {}'.format(message.data)) message.ack() # Limit the subscriber to only have ten outstanding messages at a time. diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index f04373ae8021..adbc44e84258 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -171,7 +171,6 @@ def test_receive_with_custom_attributes( out, _ = capsys.readouterr() assert 'Test message' in out - assert 'Attributes' in out assert 'origin' in out assert 'python-sample' in out From e1b3dc2be012b38e2b17a569cd01d777759893d4 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 15 Aug 2018 15:59:01 -0700 Subject: [PATCH 0532/1197] Added timeout in error handling [(#1636)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1636) --- packages/google-cloud-pubsub/samples/snippets/publisher.py | 3 ++- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 96caba9f4735..a577abc63721 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -131,7 +131,8 @@ def publish_messages_with_error_handler(project, topic_name): topic_path = publisher.topic_path(project, topic_name) def callback(message_future): - if message_future.exception(): + # When timeout is unspecified, the exception method waits indefinitely. + if message_future.exception(timeout=30): print('Publishing message on {} threw an Exception {}.'.format( topic_name, message_future.exception())) else: diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 46bb5118843b..51fa96b86759 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -223,12 +223,12 @@ def callback(message): # Blocks the thread while messages are coming in through the stream. Any # exceptions that crop up on the thread will be set on the future. try: - subscription.future.result() + # When timeout is unspecified, the result method waits indefinitely. + subscription.future.result(timeout=30) except Exception as e: print( 'Listening for messages on {} threw an Exception: {}.'.format( subscription_name, e)) - raise # [END pubsub_subscriber_error_listener] From a64198939606fe181099f939f3c9c31f74a42bf4 Mon Sep 17 00:00:00 2001 From: DPE bot Date: Tue, 28 Aug 2018 11:17:45 -0700 Subject: [PATCH 0533/1197] Auto-update dependencies. [(#1658)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1658) * Auto-update dependencies. * Rollback appengine/standard/bigquery/. * Rollback appengine/standard/iap/. * Rollback bigtable/metricscaler. * Rolledback appengine/flexible/datastore. * Rollback dataproc/ * Rollback jobs/api_client * Rollback vision/cloud-client. * Rollback functions/ocr/app. * Rollback iot/api-client/end_to_end_example. * Rollback storage/cloud-client. * Rollback kms/api-client. * Rollback dlp/ * Rollback bigquery/cloud-client. * Rollback iot/api-client/manager. * Rollback appengine/flexible/cloudsql_postgresql. --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 23ed91dd2cc9..936a9f0ed2f2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.35.0 +google-cloud-pubsub==0.37.2 From 119dfd95cffb14599f8c4300e90fadfec25ec5b5 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 30 Aug 2018 14:02:11 -0700 Subject: [PATCH 0534/1197] Added sample for Pub/Sub synchronous pull subscriber [(#1673)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1673) * Added sample for synchronous pull --- .../samples/snippets/subscriber.py | 45 +++++++++++++++++++ .../samples/snippets/subscriber_test.py | 15 +++++++ 2 files changed, 60 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 51fa96b86759..83e5700dd950 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -90,6 +90,8 @@ def create_push_subscription(project, def delete_subscription(project, subscription_name): """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] + # project = "Your Google Cloud Project ID" + # subscription_name = "Your Pubsub subscription name" subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -138,6 +140,8 @@ def receive_messages(project, subscription_name): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull] # [START pubsub_quickstart_subscriber] + # project = "Your Google Cloud Project ID" + # subscription_name = "Your Pubsub subscription name" subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -160,6 +164,8 @@ def callback(message): def receive_messages_with_custom_attributes(project, subscription_name): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_sync_pull_custom_attributes] + # project = "Your Google Cloud Project ID" + # subscription_name = "Your Pubsub subscription name" subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -186,6 +192,8 @@ def callback(message): def receive_messages_with_flow_control(project, subscription_name): """Receives messages from a pull subscription with flow control.""" # [START pubsub_subscriber_flow_settings] + # project = "Your Google Cloud Project ID" + # subscription_name = "Your Pubsub subscription name" subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -207,9 +215,38 @@ def callback(message): # [END pubsub_subscriber_flow_settings] +def receive_messages_synchronously(project, subscription_name): + """Pulling messages synchronously.""" + # [START pubsub_subscriber_sync_pull] + # project = "Your Google Cloud Project ID" + # subscription_name = "Your Pubsub subscription name" + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path( + project, subscription_name) + + # Builds a pull request with a specific number of messages to return. + # `return_immediately` is set to False so that the system waits (for a + # bounded amount of time) until at lease one message is available. + response = subscriber.pull( + subscription_path, + max_messages=3, + return_immediately=False) + + ack_ids = [] + for received_message in response.received_messages: + print("Received: {}".format(received_message.message.data)) + ack_ids.append(received_message.ack_id) + + # Acknowledges the received messages so they will not be sent again. + subscriber.acknowledge(subscription_path, ack_ids) + # [END pubsub_subscriber_sync_pull] + + def listen_for_errors(project, subscription_name): """Receives messages and catches errors from a pull subscription.""" # [START pubsub_subscriber_error_listener] + # project = "Your Google Cloud Project ID" + # subscription_name = "Your Pubsub subscription name" subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( project, subscription_name) @@ -281,6 +318,11 @@ def callback(message): help=receive_messages_with_flow_control.__doc__) receive_with_flow_control_parser.add_argument('subscription_name') + receive_messages_synchronously_parser = subparsers.add_parser( + 'receive-synchronously', + help=receive_messages_synchronously.__doc__) + receive_messages_synchronously_parser.add_argument('subscription_name') + listen_for_errors_parser = subparsers.add_parser( 'listen_for_errors', help=listen_for_errors.__doc__) listen_for_errors_parser.add_argument('subscription_name') @@ -314,5 +356,8 @@ def callback(message): elif args.command == 'receive-flow-control': receive_messages_with_flow_control( args.project, args.subscription_name) + elif args.command == 'receive-synchronously': + receive_messages_synchronously( + args.project, args.subscription_name) elif args.command == 'listen_for_errors': listen_for_errors(args.project, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index adbc44e84258..728f971f2e71 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -188,3 +188,18 @@ def test_receive_with_flow_control( assert 'Listening' in out assert subscription in out assert 'Message 1' in out + + +def test_receive_synchronously( + publisher_client, topic, subscription, capsys): + _publish_messages(publisher_client, topic) + + with _make_sleep_patch(): + with pytest.raises(RuntimeError, match='sigil'): + subscriber.receive_messages_with_flow_control( + PROJECT, SUBSCRIPTION) + + out, _ = capsys.readouterr() + assert 'Message 1' in out + assert 'Message 2' in out + assert 'Message 3' in out From ccfe1ae261d456b636e1fa9c2761bfc5402488ca Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 31 Aug 2018 14:49:04 -0700 Subject: [PATCH 0535/1197] Updated variable name [(#1680)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1680) --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 83e5700dd950..38f1e527f542 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -30,10 +30,10 @@ def list_subscriptions_in_topic(project, topic_name): """Lists all subscriptions for a given topic.""" # [START pubsub_list_topic_subscriptions] - subscriber = pubsub_v1.PublisherClient() - topic_path = subscriber.topic_path(project, topic_name) + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project, topic_name) - for subscription in subscriber.list_topic_subscriptions(topic_path): + for subscription in publisher.list_topic_subscriptions(topic_path): print(subscription) # [END pubsub_list_topic_subscriptions] From 65e1acdce98752d90fa855cdbd786e2fe76356d9 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 6 Sep 2018 17:28:27 -0700 Subject: [PATCH 0536/1197] Fixed return object from `subscriber.subscribe()` [(#1685)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1685) --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 38f1e527f542..e31560722077 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -255,13 +255,13 @@ def callback(message): print('Received message: {}'.format(message)) message.ack() - subscription = subscriber.subscribe(subscription_path, callback=callback) + future = subscriber.subscribe(subscription_path, callback=callback) # Blocks the thread while messages are coming in through the stream. Any # exceptions that crop up on the thread will be set on the future. try: # When timeout is unspecified, the result method waits indefinitely. - subscription.future.result(timeout=30) + future.result(timeout=30) except Exception as e: print( 'Listening for messages on {} threw an Exception: {}.'.format( From 83ac424afe4d352d29f305f8fe5446d972459e7c Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 14 Sep 2018 16:11:33 -0700 Subject: [PATCH 0537/1197] Pub/Sub: synchronous pull with lease management [(#1701)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1701) * Synchronous pull with lease management * Updated library version --- .../samples/snippets/requirements.txt | 2 +- .../samples/snippets/subscriber.py | 95 ++++++++++++++++--- .../samples/snippets/subscriber_test.py | 68 ++++++++++--- 3 files changed, 137 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 936a9f0ed2f2..81a62427cc0b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.37.2 +google-cloud-pubsub==0.38.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index e31560722077..3915a82ef9c8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -23,6 +23,9 @@ import argparse import time +import logging +import random +import multiprocessing from google.cloud import pubsub_v1 @@ -215,7 +218,7 @@ def callback(message): # [END pubsub_subscriber_flow_settings] -def receive_messages_synchronously(project, subscription_name): +def synchronous_pull(project, subscription_name): """Pulling messages synchronously.""" # [START pubsub_subscriber_sync_pull] # project = "Your Google Cloud Project ID" @@ -224,13 +227,10 @@ def receive_messages_synchronously(project, subscription_name): subscription_path = subscriber.subscription_path( project, subscription_name) - # Builds a pull request with a specific number of messages to return. - # `return_immediately` is set to False so that the system waits (for a - # bounded amount of time) until at lease one message is available. - response = subscriber.pull( - subscription_path, - max_messages=3, - return_immediately=False) + NUM_MESSAGES=3 + + # The subscriber pulls a specific number of messages. + response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) ack_ids = [] for received_message in response.received_messages: @@ -239,9 +239,72 @@ def receive_messages_synchronously(project, subscription_name): # Acknowledges the received messages so they will not be sent again. subscriber.acknowledge(subscription_path, ack_ids) + + print("Received and acknowledged {} messages. Done.".format(NUM_MESSAGES)) # [END pubsub_subscriber_sync_pull] +def synchronous_pull_with_lease_management(project, subscription_name): + """Pulling messages synchronously with lease management""" + # [START pubsub_subscriber_sync_pull_with_lease] + # project = "Your Google Cloud Project ID" + # subscription_name = "Your Pubsub subscription name" + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path( + project, subscription_name) + + NUM_MESSAGES=2 + ACK_DEADLINE=30 + SLEEP_TIME=10 + + # The subscriber pulls a specific number of messages. + response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) + + multiprocessing.log_to_stderr() + logger = multiprocessing.get_logger() + logger.setLevel(logging.INFO) + + def worker(msg): + """Simulates a long-running process.""" + RUN_TIME = random.randint(1,60) + logger.info('{}: Running {} for {}s'.format( + time.strftime("%X", time.gmtime()), msg.message.data, RUN_TIME)) + time.sleep(RUN_TIME) + + # `processes` stores process as key and ack id and message as values. + processes = dict() + for message in response.received_messages: + process = multiprocessing.Process(target=worker, args=(message,)) + processes[process] = (message.ack_id, message.message.data) + process.start() + + while processes: + for process, (ack_id, msg_data) in processes.items(): + # If the process is still running, reset the ack deadline as + # specified by ACK_DEADLINE once every while as specified + # by SLEEP_TIME. + if process.is_alive(): + # `ack_deadline_seconds` must be between 10 to 600. + subscriber.modify_ack_deadline(subscription_path, + [ack_id], ack_deadline_seconds=ACK_DEADLINE) + logger.info('{}: Reset ack deadline for {} for {}s'.format( + time.strftime("%X", time.gmtime()), msg_data, ACK_DEADLINE)) + + # If the processs is finished, acknowledges using `ack_id`. + else: + subscriber.acknowledge(subscription_path, [ack_id]) + logger.info("{}: Acknowledged {}".format( + time.strftime("%X", time.gmtime()), msg_data)) + processes.pop(process) + + # If there are still processes running, sleeps the thread. + if processes: + time.sleep(SLEEP_TIME) + + print("Received and acknowledged {} messages. Done.".format(NUM_MESSAGES)) + # [END pubsub_subscriber_sync_pull_with_lease] + + def listen_for_errors(project, subscription_name): """Receives messages and catches errors from a pull subscription.""" # [START pubsub_subscriber_error_listener] @@ -318,10 +381,15 @@ def callback(message): help=receive_messages_with_flow_control.__doc__) receive_with_flow_control_parser.add_argument('subscription_name') - receive_messages_synchronously_parser = subparsers.add_parser( + synchronous_pull_parser = subparsers.add_parser( 'receive-synchronously', - help=receive_messages_synchronously.__doc__) - receive_messages_synchronously_parser.add_argument('subscription_name') + help=synchronous_pull.__doc__) + synchronous_pull_parser.add_argument('subscription_name') + + synchronous_pull_with_lease_management_parser = subparsers.add_parser( + 'receive-synchronously-with-lease', + help=synchronous_pull_with_lease_management.__doc__) + synchronous_pull_with_lease_management_parser.add_argument('subscription_name') listen_for_errors_parser = subparsers.add_parser( 'listen_for_errors', help=listen_for_errors.__doc__) @@ -357,7 +425,10 @@ def callback(message): receive_messages_with_flow_control( args.project, args.subscription_name) elif args.command == 'receive-synchronously': - receive_messages_synchronously( + synchronous_pull( + args.project, args.subscription_name) + elif args.command == 'receive-synchronously-with-lease': + synchronous_pull_with_lease_management( args.project, args.subscription_name) elif args.command == 'listen_for_errors': listen_for_errors(args.project, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 728f971f2e71..9f554398ef45 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -25,6 +25,8 @@ PROJECT = os.environ['GCLOUD_PROJECT'] TOPIC = 'subscription-test-topic' SUBSCRIPTION = 'subscription-test-subscription' +SUBSCRIPTION_SYNC1 = 'subscription-test-subscription-sync1' +SUBSCRIPTION_SYNC2 = 'subscription-test-subscription-sync2' ENDPOINT = 'https://{}.appspot.com/push'.format(PROJECT) @@ -67,6 +69,36 @@ def subscription(subscriber_client, topic): yield subscription_path +@pytest.fixture +def subscription_sync1(subscriber_client, topic): + subscription_sync_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION_SYNC1) + + try: + subscriber_client.delete_subscription(subscription_sync_path) + except Exception: + pass + + subscriber_client.create_subscription(subscription_sync_path, topic=topic) + + yield subscription_sync_path + + +@pytest.fixture +def subscription_sync2(subscriber_client, topic): + subscription_sync_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION_SYNC2) + + try: + subscriber_client.delete_subscription(subscription_sync_path) + except Exception: + pass + + subscriber_client.create_subscription(subscription_sync_path, topic=topic) + + yield subscription_sync_path + + def test_list_in_topic(subscription, capsys): @eventually_consistent.call def _(): @@ -160,6 +192,27 @@ def test_receive(publisher_client, topic, subscription, capsys): assert 'Message 1' in out +def test_receive_synchronously( + publisher_client, topic, subscription_sync1, capsys): + _publish_messages(publisher_client, topic) + + subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_SYNC1) + + out, _ = capsys.readouterr() + assert 'Done.' in out + + +def test_receive_synchronously_with_lease( + publisher_client, topic, subscription_sync2, capsys): + _publish_messages(publisher_client, topic) + + subscriber.synchronous_pull_with_lease_management( + PROJECT, SUBSCRIPTION_SYNC2) + + out, _ = capsys.readouterr() + assert 'Done.' in out + + def test_receive_with_custom_attributes( publisher_client, topic, subscription, capsys): _publish_messages_with_custom_attributes(publisher_client, topic) @@ -188,18 +241,3 @@ def test_receive_with_flow_control( assert 'Listening' in out assert subscription in out assert 'Message 1' in out - - -def test_receive_synchronously( - publisher_client, topic, subscription, capsys): - _publish_messages(publisher_client, topic) - - with _make_sleep_patch(): - with pytest.raises(RuntimeError, match='sigil'): - subscriber.receive_messages_with_flow_control( - PROJECT, SUBSCRIPTION) - - out, _ = capsys.readouterr() - assert 'Message 1' in out - assert 'Message 2' in out - assert 'Message 3' in out From d56a44d9a01da698584d4aa8a16db0a6a1d3f17b Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 12 Oct 2018 10:39:44 -0700 Subject: [PATCH 0538/1197] Pub/Sub: moved import statements inside region tags [(#1753)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1753) * Moved import stataments inside region tags * Explained topic and subscription path methods --- .../samples/snippets/publisher.py | 105 ++++++--- .../samples/snippets/subscriber.py | 206 +++++++++++------- 2 files changed, 207 insertions(+), 104 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index a577abc63721..b7e574e04978 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -22,27 +22,33 @@ """ import argparse -import time -from google.cloud import pubsub_v1 - -def list_topics(project): +def list_topics(project_id): """Lists all Pub/Sub topics in the given project.""" # [START pubsub_list_topics] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + publisher = pubsub_v1.PublisherClient() - project_path = publisher.project_path(project) + project_path = publisher.project_path(project_id) for topic in publisher.list_topics(project_path): print(topic) # [END pubsub_list_topics] -def create_topic(project, topic_name): +def create_topic(project_id, topic_name): """Create a new Pub/Sub topic.""" # [START pubsub_create_topic] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project, topic_name) + topic_path = publisher.topic_path(project_id, topic_name) topic = publisher.create_topic(topic_path) @@ -50,11 +56,16 @@ def create_topic(project, topic_name): # [END pubsub_create_topic] -def delete_topic(project, topic_name): +def delete_topic(project_id, topic_name): """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_topic] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project, topic_name) + topic_path = publisher.topic_path(project_id, topic_name) publisher.delete_topic(topic_path) @@ -62,30 +73,44 @@ def delete_topic(project, topic_name): # [END pubsub_delete_topic] -def publish_messages(project, topic_name): +def publish_messages(project_id, topic_name): """Publishes multiple messages to a Pub/Sub topic.""" # [START pubsub_quickstart_publisher] # [START pubsub_publish] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project, topic_name) + # The `topic_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/topics/{topic_name}` + topic_path = publisher.topic_path(project_id, topic_name) for n in range(1, 10): data = u'Message number {}'.format(n) # Data must be a bytestring data = data.encode('utf-8') - publisher.publish(topic_path, data=data) + # When you publish a message, the client returns a future. + future = publisher.publish(topic_path, data=data) + print('Published {} of message ID {}.'.format(data, future.result())) print('Published messages.') # [END pubsub_quickstart_publisher] # [END pubsub_publish] -def publish_messages_with_custom_attributes(project, topic_name): +def publish_messages_with_custom_attributes(project_id, topic_name): """Publishes multiple messages with custom attributes to a Pub/Sub topic.""" # [START pubsub_publish_custom_attributes] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project, topic_name) + topic_path = publisher.topic_path(project_id, topic_name) for n in range(1, 10): data = u'Message number {}'.format(n) @@ -99,12 +124,17 @@ def publish_messages_with_custom_attributes(project, topic_name): # [END pubsub_publish_custom_attributes] -def publish_messages_with_futures(project, topic_name): +def publish_messages_with_futures(project_id, topic_name): """Publishes multiple messages to a Pub/Sub topic and prints their message IDs.""" # [START pubsub_publisher_concurrency_control] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project, topic_name) + topic_path = publisher.topic_path(project_id, topic_name) # When you publish a message, the client returns a Future. This Future # can be used to track when the message is published. @@ -124,11 +154,18 @@ def publish_messages_with_futures(project, topic_name): # [END pubsub_publisher_concurrency_control] -def publish_messages_with_error_handler(project, topic_name): +def publish_messages_with_error_handler(project_id, topic_name): """Publishes multiple messages to a Pub/Sub topic with an error handler.""" # [START pubsub_publish_messages_error_handler] + import time + + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project, topic_name) + topic_path = publisher.topic_path(project_id, topic_name) def callback(message_future): # When timeout is unspecified, the exception method waits indefinitely. @@ -155,17 +192,22 @@ def callback(message_future): # [END pubsub_publish_messages_error_handler] -def publish_messages_with_batch_settings(project, topic_name): +def publish_messages_with_batch_settings(project_id, topic_name): """Publishes multiple messages to a Pub/Sub topic with batch settings.""" # [START pubsub_publisher_batch_settings] - # Configure the batch to publish once there is one kilobyte of data or - # 1 second has passed. + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + + # Configure the batch to publish as soon as there is one kilobyte + # of data or one second has passed. batch_settings = pubsub_v1.types.BatchSettings( max_bytes=1024, # One kilobyte max_latency=1, # One second ) publisher = pubsub_v1.PublisherClient(batch_settings) - topic_path = publisher.topic_path(project, topic_name) + topic_path = publisher.topic_path(project_id, topic_name) for n in range(1, 10): data = u'Message number {}'.format(n) @@ -182,7 +224,7 @@ def publish_messages_with_batch_settings(project, topic_name): description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) - parser.add_argument('project', help='Your Google Cloud project ID') + parser.add_argument('project_id', help='Your Google Cloud project ID') subparsers = parser.add_subparsers(dest='command') subparsers.add_parser('list', help=list_topics.__doc__) @@ -220,18 +262,19 @@ def publish_messages_with_batch_settings(project, topic_name): args = parser.parse_args() if args.command == 'list': - list_topics(args.project) + list_topics(args.project_id) elif args.command == 'create': - create_topic(args.project, args.topic_name) + create_topic(args.project_id, args.topic_name) elif args.command == 'delete': - delete_topic(args.project, args.topic_name) + delete_topic(args.project_id, args.topic_name) elif args.command == 'publish': - publish_messages(args.project, args.topic_name) + publish_messages(args.project_id, args.topic_name) elif args.command == 'publish-with-custom-attributes': - publish_messages_with_custom_attributes(args.project, args.topic_name) + publish_messages_with_custom_attributes( + args.project_id, args.topic_name) elif args.command == 'publish-with-futures': - publish_messages_with_futures(args.project, args.topic_name) + publish_messages_with_futures(args.project_id, args.topic_name) elif args.command == 'publish-with-error-handler': - publish_messages_with_error_handler(args.project, args.topic_name) + publish_messages_with_error_handler(args.project_id, args.topic_name) elif args.command == 'publish-with-batch-settings': - publish_messages_with_batch_settings(args.project, args.topic_name) + publish_messages_with_batch_settings(args.project_id, args.topic_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 3915a82ef9c8..5802218b4998 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -22,43 +22,52 @@ """ import argparse -import time -import logging -import random -import multiprocessing -from google.cloud import pubsub_v1 - -def list_subscriptions_in_topic(project, topic_name): +def list_subscriptions_in_topic(project_id, topic_name): """Lists all subscriptions for a given topic.""" # [START pubsub_list_topic_subscriptions] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project, topic_name) + topic_path = publisher.topic_path(project_id, topic_name) for subscription in publisher.list_topic_subscriptions(topic_path): print(subscription) # [END pubsub_list_topic_subscriptions] -def list_subscriptions_in_project(project): +def list_subscriptions_in_project(project_id): """Lists all subscriptions in the current project.""" # [START pubsub_list_subscriptions] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + subscriber = pubsub_v1.SubscriberClient() - project_path = subscriber.project_path(project) + project_path = subscriber.project_path(project_id) for subscription in subscriber.list_subscriptions(project_path): print(subscription.name) # [END pubsub_list_subscriptions] -def create_subscription(project, topic_name, subscription_name): +def create_subscription(project_id, topic_name, subscription_name): """Create a new pull subscription on the given topic.""" # [START pubsub_create_pull_subscription] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + # TODO subscription_name = "Your Pub/Sub subscription name" + subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project, topic_name) + topic_path = subscriber.topic_path(project_id, topic_name) subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) subscription = subscriber.create_subscription( subscription_path, topic_path) @@ -67,17 +76,23 @@ def create_subscription(project, topic_name, subscription_name): # [END pubsub_create_pull_subscription] -def create_push_subscription(project, +def create_push_subscription(project_id, topic_name, subscription_name, endpoint): """Create a new push subscription on the given topic.""" # [START pubsub_create_push_subscription] - # endpoint = "https://my-test-project.appspot.com/push" + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + # TODO subscription_name = "Your Pub/Sub subscription name" + # TODO endpoint = "https://my-test-project.appspot.com/push" + subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project, topic_name) + topic_path = subscriber.topic_path(project_id, topic_name) subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) push_config = pubsub_v1.types.PushConfig( push_endpoint=endpoint) @@ -90,14 +105,17 @@ def create_push_subscription(project, # [END pubsub_create_push_subscription] -def delete_subscription(project, subscription_name): +def delete_subscription(project_id, subscription_name): """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] - # project = "Your Google Cloud Project ID" - # subscription_name = "Your Pubsub subscription name" + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO subscription_name = "Your Pub/Sub subscription name" + subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) subscriber.delete_subscription(subscription_path) @@ -105,17 +123,23 @@ def delete_subscription(project, subscription_name): # [END pubsub_delete_subscription] -def update_subscription(project, subscription_name, endpoint): +def update_subscription(project_id, subscription_name, endpoint): """ Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a subscription, such as its topic, are not modifiable. """ # [START pubsub_update_push_configuration] - # endpoint = "https://my-test-project.appspot.com/push" + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + # TODO subscription_name = "Your Pub/Sub subscription name" + # TODO endpoint = "https://my-test-project.appspot.com/push" + subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) push_config = pubsub_v1.types.PushConfig( push_endpoint=endpoint) @@ -139,15 +163,22 @@ def update_subscription(project, subscription_name, endpoint): # [END pubsub_update_push_configuration] -def receive_messages(project, subscription_name): +def receive_messages(project_id, subscription_name): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull] # [START pubsub_quickstart_subscriber] - # project = "Your Google Cloud Project ID" - # subscription_name = "Your Pubsub subscription name" + import time + + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO subscription_name = "Your Pub/Sub subscription name" + subscriber = pubsub_v1.SubscriberClient() + # The `subscription_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/subscriptions/{subscription_name}` subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) def callback(message): print('Received message: {}'.format(message)) @@ -155,8 +186,8 @@ def callback(message): subscriber.subscribe(subscription_path, callback=callback) - # The subscriber is non-blocking, so we must keep the main thread from - # exiting to allow it to process messages in the background. + # The subscriber is non-blocking. We must keep the main thread from + # exiting to allow it to process messages asynchronously in the background. print('Listening for messages on {}'.format(subscription_path)) while True: time.sleep(60) @@ -164,14 +195,19 @@ def callback(message): # [END pubsub_quickstart_subscriber] -def receive_messages_with_custom_attributes(project, subscription_name): +def receive_messages_with_custom_attributes(project_id, subscription_name): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_sync_pull_custom_attributes] - # project = "Your Google Cloud Project ID" - # subscription_name = "Your Pubsub subscription name" + import time + + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO subscription_name = "Your Pub/Sub subscription name" + subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) def callback(message): print('Received message: {}'.format(message.data)) @@ -192,14 +228,19 @@ def callback(message): # [END pubsub_subscriber_sync_pull_custom_attributes] -def receive_messages_with_flow_control(project, subscription_name): +def receive_messages_with_flow_control(project_id, subscription_name): """Receives messages from a pull subscription with flow control.""" # [START pubsub_subscriber_flow_settings] - # project = "Your Google Cloud Project ID" - # subscription_name = "Your Pubsub subscription name" + import time + + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO subscription_name = "Your Pub/Sub subscription name" + subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) def callback(message): print('Received message: {}'.format(message.data)) @@ -218,16 +259,19 @@ def callback(message): # [END pubsub_subscriber_flow_settings] -def synchronous_pull(project, subscription_name): +def synchronous_pull(project_id, subscription_name): """Pulling messages synchronously.""" # [START pubsub_subscriber_sync_pull] - # project = "Your Google Cloud Project ID" - # subscription_name = "Your Pubsub subscription name" + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO subscription_name = "Your Pub/Sub subscription name" + subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) - NUM_MESSAGES=3 + NUM_MESSAGES = 3 # The subscriber pulls a specific number of messages. response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) @@ -244,18 +288,26 @@ def synchronous_pull(project, subscription_name): # [END pubsub_subscriber_sync_pull] -def synchronous_pull_with_lease_management(project, subscription_name): +def synchronous_pull_with_lease_management(project_id, subscription_name): """Pulling messages synchronously with lease management""" # [START pubsub_subscriber_sync_pull_with_lease] - # project = "Your Google Cloud Project ID" - # subscription_name = "Your Pubsub subscription name" + import logging + import multiprocessing + import random + import time + + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO subscription_name = "Your Pub/Sub subscription name" + subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) - NUM_MESSAGES=2 - ACK_DEADLINE=30 - SLEEP_TIME=10 + NUM_MESSAGES = 2 + ACK_DEADLINE = 30 + SLEEP_TIME = 10 # The subscriber pulls a specific number of messages. response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) @@ -266,7 +318,7 @@ def synchronous_pull_with_lease_management(project, subscription_name): def worker(msg): """Simulates a long-running process.""" - RUN_TIME = random.randint(1,60) + RUN_TIME = random.randint(1, 60) logger.info('{}: Running {} for {}s'.format( time.strftime("%X", time.gmtime()), msg.message.data, RUN_TIME)) time.sleep(RUN_TIME) @@ -279,16 +331,20 @@ def worker(msg): process.start() while processes: - for process, (ack_id, msg_data) in processes.items(): + for process in list(processes): + ack_id, msg_data = processes[process] # If the process is still running, reset the ack deadline as # specified by ACK_DEADLINE once every while as specified # by SLEEP_TIME. if process.is_alive(): # `ack_deadline_seconds` must be between 10 to 600. - subscriber.modify_ack_deadline(subscription_path, - [ack_id], ack_deadline_seconds=ACK_DEADLINE) + subscriber.modify_ack_deadline( + subscription_path, + [ack_id], + ack_deadline_seconds=ACK_DEADLINE) logger.info('{}: Reset ack deadline for {} for {}s'.format( - time.strftime("%X", time.gmtime()), msg_data, ACK_DEADLINE)) + time.strftime("%X", time.gmtime()), + msg_data, ACK_DEADLINE)) # If the processs is finished, acknowledges using `ack_id`. else: @@ -305,14 +361,17 @@ def worker(msg): # [END pubsub_subscriber_sync_pull_with_lease] -def listen_for_errors(project, subscription_name): +def listen_for_errors(project_id, subscription_name): """Receives messages and catches errors from a pull subscription.""" # [START pubsub_subscriber_error_listener] - # project = "Your Google Cloud Project ID" - # subscription_name = "Your Pubsub subscription name" + from google.cloud import pubsub_v1 + + # TODO project = "Your Google Cloud Project ID" + # TODO subscription_name = "Your Pubsub subscription name" + subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project, subscription_name) + project_id, subscription_name) def callback(message): print('Received message: {}'.format(message)) @@ -337,7 +396,7 @@ def callback(message): description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter ) - parser.add_argument('project', help='Your Google Cloud project ID') + parser.add_argument('project_id', help='Your Google Cloud project ID') subparsers = parser.add_subparsers(dest='command') list_in_topic_parser = subparsers.add_parser( @@ -389,7 +448,8 @@ def callback(message): synchronous_pull_with_lease_management_parser = subparsers.add_parser( 'receive-synchronously-with-lease', help=synchronous_pull_with_lease_management.__doc__) - synchronous_pull_with_lease_management_parser.add_argument('subscription_name') + synchronous_pull_with_lease_management_parser.add_argument( + 'subscription_name') listen_for_errors_parser = subparsers.add_parser( 'listen_for_errors', help=listen_for_errors.__doc__) @@ -398,37 +458,37 @@ def callback(message): args = parser.parse_args() if args.command == 'list_in_topic': - list_subscriptions_in_topic(args.project, args.topic_name) + list_subscriptions_in_topic(args.project_id, args.topic_name) elif args.command == 'list_in_project': - list_subscriptions_in_project(args.project) + list_subscriptions_in_project(args.project_id) elif args.command == 'create': create_subscription( - args.project, args.topic_name, args.subscription_name) + args.project_id, args.topic_name, args.subscription_name) elif args.command == 'create-push': create_push_subscription( - args.project, + args.project_id, args.topic_name, args.subscription_name, args.endpoint) elif args.command == 'delete': delete_subscription( - args.project, args.subscription_name) + args.project_id, args.subscription_name) elif args.command == 'update': update_subscription( - args.project, args.subscription_name, args.endpoint) + args.project_id, args.subscription_name, args.endpoint) elif args.command == 'receive': - receive_messages(args.project, args.subscription_name) + receive_messages(args.project_id, args.subscription_name) elif args.command == 'receive-custom-attributes': receive_messages_with_custom_attributes( - args.project, args.subscription_name) + args.project_id, args.subscription_name) elif args.command == 'receive-flow-control': receive_messages_with_flow_control( - args.project, args.subscription_name) + args.project_id, args.subscription_name) elif args.command == 'receive-synchronously': synchronous_pull( - args.project, args.subscription_name) + args.project_id, args.subscription_name) elif args.command == 'receive-synchronously-with-lease': synchronous_pull_with_lease_management( - args.project, args.subscription_name) + args.project_id, args.subscription_name) elif args.command == 'listen_for_errors': - listen_for_errors(args.project, args.subscription_name) + listen_for_errors(args.project_id, args.subscription_name) From a6eb564e0a04aefce1f445a2ab097045c9466a81 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 29 Oct 2018 17:24:09 -0700 Subject: [PATCH 0539/1197] Pub/Sub end-to-end sample [(#1800)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1800) * Created new end-to-end sample, moved old sample * Add space around operator --- .../samples/snippets/publisher.py | 2 + .../samples/snippets/quickstart.py | 91 ++++++++++++++++--- .../samples/snippets/quickstart_test.py | 58 ++++++++---- 3 files changed, 123 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index b7e574e04978..fcb0d9b0f2e3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -41,6 +41,7 @@ def list_topics(project_id): def create_topic(project_id, topic_name): """Create a new Pub/Sub topic.""" + # [START pubsub_quickstart_create_topic] # [START pubsub_create_topic] from google.cloud import pubsub_v1 @@ -53,6 +54,7 @@ def create_topic(project_id, topic_name): topic = publisher.create_topic(topic_path) print('Topic created: {}'.format(topic)) + # [END pubsub_quickstart_create_topic] # [END pubsub_create_topic] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart.py b/packages/google-cloud-pubsub/samples/snippets/quickstart.py index 1ff2efed3a32..f48d085e06b5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart.py @@ -15,25 +15,94 @@ # limitations under the License. -def run_quickstart(): - # [START pubsub_quickstart_create_topic] - # Imports the Google Cloud client library +import argparse + + +def end_to_end(project_id, topic_name, subscription_name, num_messages): + # [START pubsub_end_to_end] + import time + from google.cloud import pubsub_v1 - # Instantiates a client + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + # TODO num_messages = number of messages to test end-to-end + + # Instantiates a publisher and subscriber client publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + + # The `topic_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/topics/{topic_name}` + topic_path = subscriber.topic_path(project_id, topic_name) - # The resource path for the new topic contains the project ID - # and the topic name. - topic_path = publisher.topic_path( - 'my-project', 'my-new-topic') + # The `subscription_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/subscriptions/{subscription_name}` + subscription_path = subscriber.subscription_path( + project_id, subscription_name) # Create the topic. topic = publisher.create_topic(topic_path) + print('\nTopic created: {}'.format(topic.name)) + + # Create a subscription. + subscription = subscriber.create_subscription( + subscription_path, topic_path) + print('\nSubscription created: {}\n'.format(subscription.name)) + + publish_begin = time.time() + + # Publish messages. + for n in range(num_messages): + data = u'Message number {}'.format(n) + # Data must be a bytestring + data = data.encode('utf-8') + # When you publish a message, the client returns a future. + future = publisher.publish(topic_path, data=data) + print('Published {} of message ID {}.'.format(data, future.result())) + + publish_time = time.time() - publish_begin - print('Topic created: {}'.format(topic)) - # [END pubsub_quickstart_create_topic] + messages = set() + + def callback(message): + print('Received message: {}'.format(message)) + # Unacknowledged messages will be sent again. + message.ack() + messages.add(message) + + subscribe_begin = time.time() + + # Receive messages. The subscriber is nonblocking. + subscriber.subscribe(subscription_path, callback=callback) + + print('\nListening for messages on {}...\n'.format(subscription_path)) + + while True: + if len(messages) == num_messages: + subscribe_time = time.time() - subscribe_begin + print("\nReceived all messages.") + print("Publish time lapsed: {:.2f}s.".format(publish_time)) + print("Subscribe time lapsed: {:.2f}s.".format(subscribe_time)) + break + else: + # Sleeps the thread at 50Hz to save on resources. + time.sleep(1. / 50) + # [END pubsub_end_to_end] if __name__ == '__main__': - run_quickstart() + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument('project_id', help='Your Google Cloud project ID') + parser.add_argument('topic_name', help='Your topic name') + parser.add_argument('subscription_name', help='Your subscription name') + parser.add_argument('num_msgs', type=int, help='Number of test messages') + + args = parser.parse_args() + + end_to_end(args.project_id, args.topic_name, args.subscription_name, + args.num_msgs) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py index 520213bcf32c..ee6f7d4b21a2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py @@ -1,4 +1,6 @@ -# Copyright 2016 Google Inc. All Rights Reserved. +#!/usr/bin/env python + +# Copyright 2018 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,33 +17,55 @@ import os from google.cloud import pubsub_v1 -import mock import pytest - import quickstart PROJECT = os.environ['GCLOUD_PROJECT'] -# Must match the dataset listed in quickstart.py -TOPIC_NAME = 'my-new-topic' -TOPIC_PATH = 'projects/{}/topics/{}'.format(PROJECT, TOPIC_NAME) +TOPIC = 'end-to-end-test-topic' +SUBSCRIPTION = 'end-to-end-test-topic-sub' +N = 10 + + +@pytest.fixture(scope='module') +def publisher_client(): + yield pubsub_v1.PublisherClient() -@pytest.fixture -def temporary_topic(): - """Fixture that ensures the test topic does not exist before the test.""" - publisher = pubsub_v1.PublisherClient() +@pytest.fixture(scope='module') +def topic(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - publisher.delete_topic(TOPIC_PATH) + publisher_client.delete_topic(topic_path) except Exception: pass - yield + yield TOPIC -@mock.patch.object( - pubsub_v1.PublisherClient, 'topic_path', return_value=TOPIC_PATH) -def test_quickstart(unused_topic_path, temporary_topic, capsys): - quickstart.run_quickstart() +@pytest.fixture(scope='module') +def subscriber_client(): + yield pubsub_v1.SubscriberClient() + + +@pytest.fixture(scope='module') +def subscription(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION) + + try: + subscriber_client.delete_subscription(subscription_path) + except Exception: + pass + + yield SUBSCRIPTION + + +def test_end_to_end(topic, subscription, capsys): + + quickstart.end_to_end(PROJECT, topic, subscription, N) out, _ = capsys.readouterr() - assert TOPIC_NAME in out + + assert "Received all messages" in out + assert "Publish time lapsed" in out + assert "Subscribe time lapsed" in out From 1961360b7719d8f46b92513b32d9edf8484fbc1a Mon Sep 17 00:00:00 2001 From: noerog <32459203+noerog@users.noreply.github.com> Date: Thu, 6 Dec 2018 20:58:32 -0500 Subject: [PATCH 0540/1197] Add test for updating a subscription. [(#1336)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1336) Tests for https://github.com/GoogleCloudPlatform/python-docs-samples/pull/1335. Using ack_deadline_seconds as the example. --- .../samples/snippets/subscriber_test.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 9f554398ef45..3f5de61de88f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -154,6 +154,16 @@ def _(): subscriber_client.get_subscription(subscription) +def test_update(subscriber_client, subscription, capsys): + ACK_DEADLINE_SECONDS = 100 + + subscriber.update_subscription(PROJECT, SUBSCRIPTION, ACK_DEADLINE_SECONDS) + + out, _ = capsys.readouterr() + assert subscription in out + assert '100' in out + + def _publish_messages(publisher_client, topic): for n in range(5): data = u'Message {}'.format(n).encode('utf-8') From 69db8d4399f3c16a75d65dfb078fe9a5aada686d Mon Sep 17 00:00:00 2001 From: Charles Engelke Date: Wed, 12 Dec 2018 19:44:07 -0800 Subject: [PATCH 0541/1197] Fix update test to use new endpoint [(#1925)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1925) * Fix update test to use new endpoint * Handle subscription already exists Previous deletions don't always succeed * Use a new endpoint for update --- .../samples/snippets/subscriber_test.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 3f5de61de88f..df5b1092badb 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -17,6 +17,7 @@ from gcp_devrel.testing import eventually_consistent from google.cloud import pubsub_v1 +import google.api_core.exceptions import mock import pytest @@ -28,6 +29,7 @@ SUBSCRIPTION_SYNC1 = 'subscription-test-subscription-sync1' SUBSCRIPTION_SYNC2 = 'subscription-test-subscription-sync2' ENDPOINT = 'https://{}.appspot.com/push'.format(PROJECT) +NEW_ENDPOINT = 'https://{}.appspot.com/push2'.format(PROJECT) @pytest.fixture(scope='module') @@ -64,7 +66,10 @@ def subscription(subscriber_client, topic): except Exception: pass - subscriber_client.create_subscription(subscription_path, topic=topic) + try: + subscriber_client.create_subscription(subscription_path, topic=topic) + except google.api_core.exceptions.AlreadyExists: + pass yield subscription_path @@ -155,13 +160,10 @@ def _(): def test_update(subscriber_client, subscription, capsys): - ACK_DEADLINE_SECONDS = 100 - - subscriber.update_subscription(PROJECT, SUBSCRIPTION, ACK_DEADLINE_SECONDS) + subscriber.update_subscription(PROJECT, SUBSCRIPTION, NEW_ENDPOINT) out, _ = capsys.readouterr() - assert subscription in out - assert '100' in out + assert 'Subscription updated' in out def _publish_messages(publisher_client, topic): From 02bd7daac6ce6fbd4a530814ef5b5dc31b6739f9 Mon Sep 17 00:00:00 2001 From: DPEBot Date: Wed, 6 Feb 2019 12:06:35 -0800 Subject: [PATCH 0542/1197] Auto-update dependencies. [(#1980)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/1980) * Auto-update dependencies. * Update requirements.txt * Update requirements.txt --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 81a62427cc0b..d8470ecf937d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.38.0 +google-cloud-pubsub==0.39.1 From 0e9969394548b9fe3d96f18ed8f30e8b1e469506 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 12 Feb 2019 13:04:59 -0800 Subject: [PATCH 0543/1197] Cloud Pub/Sub Quickstart V2 [(#2004)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2004) * Quickstart V2 * Adopts Kir's suggestions * Adopted Tim's suggestions * proper resource deletion during teardown --- .../samples/snippets/quickstart/pub.py | 73 +++++++++++ .../samples/snippets/quickstart/pub_test.py | 61 ++++++++++ .../samples/snippets/quickstart/sub.py | 64 ++++++++++ .../samples/snippets/quickstart/sub_test.py | 113 ++++++++++++++++++ 4 files changed, 311 insertions(+) create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py new file mode 100644 index 000000000000..9617b34ea846 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START pubsub_quickstart_pub_all] +import argparse +import time +# [START pubsub_quickstart_pub_deps] +from google.cloud import pubsub_v1 +# [END pubsub_quickstart_pub_deps] + + +def get_callback(api_future, data): + """Wrap message data in the context of the callback function.""" + + def callback(api_future): + try: + print("Published message {} now has message ID {}".format( + data, api_future.result())) + except Exception: + print("A problem occurred when publishing {}: {}\n".format( + data, api_future.exception())) + raise + return callback + + +def pub(project_id, topic_name): + """Publishes a message to a Pub/Sub topic.""" + # [START pubsub_quickstart_pub_client] + # Initialize a Publisher client + client = pubsub_v1.PublisherClient() + # [END pubsub_quickstart_pub_client] + # Create a fully qualified identifier in the form of + # `projects/{project_id}/topics/{topic_name}` + topic_path = client.topic_path(project_id, topic_name) + + # Data sent to Cloud Pub/Sub must be a bytestring + data = b"Hello, World!" + + # When you publish a message, the client returns a future. + api_future = client.publish(topic_path, data=data) + api_future.add_done_callback(get_callback(api_future, data)) + + # Keep the main thread from exiting until background message + # is processed. + while api_future.running(): + time.sleep(0.1) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument('project_id', help='Google Cloud project ID') + parser.add_argument('topic_name', help='Pub/Sub topic name') + + args = parser.parse_args() + + pub(args.project_id, args.topic_name) +# [END pubsub_quickstart_pub_all] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py new file mode 100644 index 000000000000..09443364a3f6 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import pytest + +from google.api_core.exceptions import AlreadyExists +from google.cloud import pubsub_v1 + +import pub + +PROJECT = os.environ['GCLOUD_PROJECT'] +TOPIC = 'quickstart-pub-test-topic' + + +@pytest.fixture(scope='module') +def publisher_client(): + yield pubsub_v1.PublisherClient() + + +@pytest.fixture(scope='module') +def topic(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, TOPIC) + + try: + publisher_client.create_topic(topic_path) + except AlreadyExists: + pass + + yield TOPIC + + +@pytest.fixture +def to_delete(publisher_client): + doomed = [] + yield doomed + for item in doomed: + publisher_client.delete_topic(item) + + +def test_pub(publisher_client, topic, to_delete, capsys): + pub.pub(PROJECT, topic) + + to_delete.append('projects/{}/topics/{}'.format(PROJECT, TOPIC)) + + out, _ = capsys.readouterr() + + assert "Published message b'Hello, World!'" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py new file mode 100644 index 000000000000..520803d70a5b --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# [START pubsub_quickstart_sub_all] +import argparse +import time +# [START pubsub_quickstart_sub_deps] +from google.cloud import pubsub_v1 +# [END pubsub_quickstart_sub_deps] + + +def sub(project_id, subscription_name): + """Receives messages from a Pub/Sub subscription.""" + # [START pubsub_quickstart_sub_client] + # Initialize a Subscriber client + client = pubsub_v1.SubscriberClient() + # [END pubsub_quickstart_sub_client] + # Create a fully qualified identifier in the form of + # `projects/{project_id}/subscriptions/{subscription_name}` + subscription_path = client.subscription_path( + project_id, subscription_name) + + def callback(message): + print('Received message {} of message ID {}'.format( + message, message.message_id)) + # Acknowledge the message. Unack'ed messages will be redelivered. + message.ack() + print('Acknowledged message of message ID {}\n'.format( + message.message_id)) + + client.subscribe(subscription_path, callback=callback) + print('Listening for messages on {}..\n'.format(subscription_path)) + + # Keep the main thread from exiting so the subscriber can + # process messages in the background. + while True: + time.sleep(60) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter + ) + parser.add_argument('project_id', help='Google Cloud project ID') + parser.add_argument('subscription_name', help='Pub/Sub subscription name') + + args = parser.parse_args() + + sub(args.project_id, args.subscription_name) +# [END pubsub_quickstart_sub_all] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py new file mode 100644 index 000000000000..9c70384ed693 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import mock +import os +import pytest +import time + +from google.api_core.exceptions import AlreadyExists +from google.cloud import pubsub_v1 + +import sub + + +PROJECT = os.environ['GCLOUD_PROJECT'] +TOPIC = 'quickstart-sub-test-topic' +SUBSCRIPTION = 'quickstart-sub-test-topic-sub' + + +@pytest.fixture(scope='module') +def publisher_client(): + yield pubsub_v1.PublisherClient() + + +@pytest.fixture(scope='module') +def topic_path(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, TOPIC) + + try: + publisher_client.create_topic(topic_path) + except AlreadyExists: + pass + + yield topic_path + + +@pytest.fixture(scope='module') +def subscriber_client(): + yield pubsub_v1.SubscriberClient() + + +@pytest.fixture(scope='module') +def subscription(subscriber_client, topic_path): + subscription_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION) + + try: + subscriber_client.create_subscription(subscription_path, topic_path) + except AlreadyExists: + pass + + yield SUBSCRIPTION + + +@pytest.fixture +def to_delete(publisher_client, subscriber_client): + doomed = [] + yield doomed + for client, item in doomed: + if 'topics' in item: + publisher_client.delete_topic(item) + if 'subscriptions' in item: + subscriber_client.delete_subscription(item) + + +def _make_sleep_patch(): + real_sleep = time.sleep + + def new_sleep(period): + if period == 60: + real_sleep(10) + raise RuntimeError('sigil') + else: + real_sleep(period) + + return mock.patch('time.sleep', new=new_sleep) + + +def test_sub(publisher_client, + topic_path, + subscriber_client, + subscription, + to_delete, + capsys): + + publisher_client.publish(topic_path, data=b'Hello, World!') + + to_delete.append((publisher_client, topic_path)) + + with _make_sleep_patch(): + with pytest.raises(RuntimeError, match='sigil'): + sub.sub(PROJECT, subscription) + + to_delete.append((subscriber_client, + 'projects/{}/subscriptions/{}'.format(PROJECT, + SUBSCRIPTION))) + + out, _ = capsys.readouterr() + assert "Received message" in out + assert "Acknowledged message" in out From 39e018b23bef682b9559c7b45f41c457b430132b Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 14 Jun 2019 14:54:22 -0700 Subject: [PATCH 0544/1197] Pub/Sub: publish with error-handling comments [(#2222)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2222) --- .../samples/snippets/publisher.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index fcb0d9b0f2e3..d7e51c2d87dc 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2019 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -157,8 +157,8 @@ def publish_messages_with_futures(project_id, topic_name): def publish_messages_with_error_handler(project_id, topic_name): - """Publishes multiple messages to a Pub/Sub topic with an error handler.""" # [START pubsub_publish_messages_error_handler] + """Publishes multiple messages to a Pub/Sub topic with an error handler.""" import time from google.cloud import pubsub_v1 @@ -170,10 +170,8 @@ def publish_messages_with_error_handler(project_id, topic_name): topic_path = publisher.topic_path(project_id, topic_name) def callback(message_future): - # When timeout is unspecified, the exception method waits indefinitely. - if message_future.exception(timeout=30): - print('Publishing message on {} threw an Exception {}.'.format( - topic_name, message_future.exception())) + if message_future.exception(): + print('{} needs handling.'.format(message_future.exception())) else: print(message_future.result()) @@ -183,12 +181,14 @@ def callback(message_future): data = data.encode('utf-8') # When you publish a message, the client returns a Future. message_future = publisher.publish(topic_path, data=data) + # If you wish to handle publish failures, do it in the callback. + # Otherwise, it's okay to call `message_future.result()` directly. message_future.add_done_callback(callback) print('Published message IDs:') - # We must keep the main thread from exiting to allow it to process - # messages in the background. + # We keep the main thread from exiting so message futures can be + # resolved in the background. while True: time.sleep(60) # [END pubsub_publish_messages_error_handler] From 3aa7a2a1063c46c0cf19d042d7c11ffe0462cec9 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 20 Jun 2019 16:57:11 -0700 Subject: [PATCH 0545/1197] Resolve all futures [(#2231)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2231) --- .../samples/snippets/publisher.py | 72 ++++++++++--------- .../samples/snippets/publisher_test.py | 6 +- .../samples/snippets/subscriber_test.py | 6 +- 3 files changed, 42 insertions(+), 42 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index d7e51c2d87dc..51edd7bd8644 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -95,7 +95,7 @@ def publish_messages(project_id, topic_name): data = data.encode('utf-8') # When you publish a message, the client returns a future. future = publisher.publish(topic_path, data=data) - print('Published {} of message ID {}.'.format(data, future.result())) + print(future.result()) print('Published messages.') # [END pubsub_quickstart_publisher] @@ -119,8 +119,9 @@ def publish_messages_with_custom_attributes(project_id, topic_name): # Data must be a bytestring data = data.encode('utf-8') # Add two attributes, origin and username, to the message - publisher.publish( + future = publisher.publish( topic_path, data, origin='python-sample', username='gcp') + print(future.result()) print('Published messages with custom attributes.') # [END pubsub_publish_custom_attributes] @@ -138,21 +139,15 @@ def publish_messages_with_futures(project_id, topic_name): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_name) - # When you publish a message, the client returns a Future. This Future - # can be used to track when the message is published. - futures = [] - for n in range(1, 10): data = u'Message number {}'.format(n) # Data must be a bytestring data = data.encode('utf-8') - message_future = publisher.publish(topic_path, data=data) - futures.append(message_future) - - print('Published message IDs:') - for future in futures: - # result() blocks until the message is published. + # When you publish a message, the client returns a future. + future = publisher.publish(topic_path, data=data) print(future.result()) + + print("Published messages with futures.") # [END pubsub_publisher_concurrency_control] @@ -169,28 +164,34 @@ def publish_messages_with_error_handler(project_id, topic_name): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_name) - def callback(message_future): - if message_future.exception(): - print('{} needs handling.'.format(message_future.exception())) - else: - print(message_future.result()) + futures = dict() - for n in range(1, 10): - data = u'Message number {}'.format(n) - # Data must be a bytestring - data = data.encode('utf-8') - # When you publish a message, the client returns a Future. - message_future = publisher.publish(topic_path, data=data) - # If you wish to handle publish failures, do it in the callback. - # Otherwise, it's okay to call `message_future.result()` directly. - message_future.add_done_callback(callback) - - print('Published message IDs:') - - # We keep the main thread from exiting so message futures can be - # resolved in the background. - while True: - time.sleep(60) + def get_callback(f, data): + def callback(f): + try: + print(f.result()) + futures.pop(data) + except: # noqa + print("Please handle {} for {}.".format(f.exception(), data)) + return callback + + for i in range(10): + data = str(i) + futures.update({data: None}) + # When you publish a message, the client returns a future. + future = publisher.publish( + topic_path, + data=data.encode("utf-8"), # data must be a bytestring. + ) + futures[data] = future + # Publish failures shall be handled in the callback function. + future.add_done_callback(get_callback(future, data)) + + # Wait for all the publish futures to resolve before exiting. + while futures: + time.sleep(5) + + print("Published message with error handler.") # [END pubsub_publish_messages_error_handler] @@ -215,9 +216,10 @@ def publish_messages_with_batch_settings(project_id, topic_name): data = u'Message number {}'.format(n) # Data must be a bytestring data = data.encode('utf-8') - publisher.publish(topic_path, data=data) + future = publisher.publish(topic_path, data=data) + print(future.result()) - print('Published messages.') + print('Published messages with batch settings.') # [END pubsub_publisher_batch_settings] diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index cdb4d0e0e766..c2908d746a26 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -111,11 +111,7 @@ def test_publish_with_batch_settings(topic, capsys): def test_publish_with_error_handler(topic, capsys): - - with _make_sleep_patch(): - with pytest.raises(RuntimeError, match='sigil'): - publisher.publish_messages_with_error_handler( - PROJECT, TOPIC) + publisher.publish_messages_with_error_handler(PROJECT, TOPIC) out, _ = capsys.readouterr() assert 'Published' in out diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index df5b1092badb..f91007a6dc17 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -169,13 +169,15 @@ def test_update(subscriber_client, subscription, capsys): def _publish_messages(publisher_client, topic): for n in range(5): data = u'Message {}'.format(n).encode('utf-8') - publisher_client.publish( + future = publisher_client.publish( topic, data=data) + future.result() def _publish_messages_with_custom_attributes(publisher_client, topic): data = u'Test message'.encode('utf-8') - publisher_client.publish(topic, data=data, origin='python-sample') + future = publisher_client.publish(topic, data=data, origin='python-sample') + future.result() def _make_sleep_patch(): From 231d04ed55c957f7e479afe6bc81c0e551dc13cc Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 22 Jul 2019 10:23:03 -0700 Subject: [PATCH 0546/1197] Pub/Sub: add publish retry sample [(#2273)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2273) * Publish retry sample * double to single quotes * double to single quotes * license year --- .../samples/snippets/iam.py | 2 +- .../samples/snippets/iam_test.py | 2 +- .../samples/snippets/publisher.py | 112 ++++++++++++++---- .../samples/snippets/publisher_test.py | 9 +- .../samples/snippets/quickstart.py | 2 +- .../samples/snippets/quickstart_test.py | 2 +- .../samples/snippets/subscriber.py | 2 +- .../samples/snippets/subscriber_test.py | 2 +- 8 files changed, 106 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index bd44f1ab6e0b..f9865ed3934e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 8a524c35a061..cfae98ffd00b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 51edd7bd8644..2a32dc1c7f00 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -120,7 +120,8 @@ def publish_messages_with_custom_attributes(project_id, topic_name): data = data.encode('utf-8') # Add two attributes, origin and username, to the message future = publisher.publish( - topic_path, data, origin='python-sample', username='gcp') + topic_path, data, origin='python-sample', username='gcp' + ) print(future.result()) print('Published messages with custom attributes.') @@ -147,7 +148,7 @@ def publish_messages_with_futures(project_id, topic_name): future = publisher.publish(topic_path, data=data) print(future.result()) - print("Published messages with futures.") + print('Published messages with futures.') # [END pubsub_publisher_concurrency_control] @@ -171,8 +172,9 @@ def callback(f): try: print(f.result()) futures.pop(data) - except: # noqa - print("Please handle {} for {}.".format(f.exception(), data)) + except: # noqa + print('Please handle {} for {}.'.format(f.exception(), data)) + return callback for i in range(10): @@ -180,8 +182,7 @@ def callback(f): futures.update({data: None}) # When you publish a message, the client returns a future. future = publisher.publish( - topic_path, - data=data.encode("utf-8"), # data must be a bytestring. + topic_path, data=data.encode('utf-8') # data must be a bytestring. ) futures[data] = future # Publish failures shall be handled in the callback function. @@ -191,7 +192,7 @@ def callback(f): while futures: time.sleep(5) - print("Published message with error handler.") + print('Published message with error handler.') # [END pubsub_publish_messages_error_handler] @@ -207,7 +208,7 @@ def publish_messages_with_batch_settings(project_id, topic_name): # of data or one second has passed. batch_settings = pubsub_v1.types.BatchSettings( max_bytes=1024, # One kilobyte - max_latency=1, # One second + max_latency=1, # One second ) publisher = pubsub_v1.PublisherClient(batch_settings) topic_path = publisher.topic_path(project_id, topic_name) @@ -223,7 +224,65 @@ def publish_messages_with_batch_settings(project_id, topic_name): # [END pubsub_publisher_batch_settings] -if __name__ == '__main__': +def publish_messages_with_retry_settings(project_id, topic_name): + """Publishes messages with custom retry settings.""" + # [START pubsub_publisher_retry_settings] + from google.cloud import pubsub_v1 + + # TODO project_id = "Your Google Cloud Project ID" + # TODO topic_name = "Your Pub/Sub topic name" + + # Configure the retry settings. Defaults will be overwritten. + retry_settings = { + 'interfaces': { + 'google.pubsub.v1.Publisher': { + 'retry_codes': { + 'publish': [ + 'ABORTED', + 'CANCELLED', + 'DEADLINE_EXCEEDED', + 'INTERNAL', + 'RESOURCE_EXHAUSTED', + 'UNAVAILABLE', + 'UNKNOWN', + ] + }, + 'retry_params': { + 'messaging': { + 'initial_retry_delay_millis': 150, # default: 100 + 'retry_delay_multiplier': 1.5, # default: 1.3 + 'max_retry_delay_millis': 65000, # default: 60000 + 'initial_rpc_timeout_millis': 25000, # default: 25000 + 'rpc_timeout_multiplier': 1.0, # default: 1.0 + 'max_rpc_timeout_millis': 35000, # default: 30000 + 'total_timeout_millis': 650000, # default: 600000 + } + }, + 'methods': { + 'Publish': { + 'retry_codes_name': 'publish', + 'retry_params_name': 'messaging', + } + }, + } + } + } + + publisher = pubsub_v1.PublisherClient(client_config=retry_settings) + topic_path = publisher.topic_path(project_id, topic_name) + + for n in range(1, 10): + data = u'Message number {}'.format(n) + # Data must be a bytestring + data = data.encode('utf-8') + future = publisher.publish(topic_path, data=data) + print(future.result()) + + print('Published messages with retry settings.') + # [END pubsub_publisher_retry_settings] + + +if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter @@ -233,36 +292,47 @@ def publish_messages_with_batch_settings(project_id, topic_name): subparsers = parser.add_subparsers(dest='command') subparsers.add_parser('list', help=list_topics.__doc__) - create_parser = subparsers.add_parser('create', help=create_topic.__doc__) + create_parser = subparsers.add_parser('create', + help=create_topic.__doc__) create_parser.add_argument('topic_name') - delete_parser = subparsers.add_parser('delete', help=delete_topic.__doc__) + delete_parser = subparsers.add_parser('delete', + help=delete_topic.__doc__) delete_parser.add_argument('topic_name') - publish_parser = subparsers.add_parser( - 'publish', help=publish_messages.__doc__) + publish_parser = subparsers.add_parser('publish', + help=publish_messages.__doc__) publish_parser.add_argument('topic_name') publish_with_custom_attributes_parser = subparsers.add_parser( 'publish-with-custom-attributes', - help=publish_messages_with_custom_attributes.__doc__) + help=publish_messages_with_custom_attributes.__doc__, + ) publish_with_custom_attributes_parser.add_argument('topic_name') publish_with_futures_parser = subparsers.add_parser( - 'publish-with-futures', - help=publish_messages_with_futures.__doc__) + 'publish-with-futures', help=publish_messages_with_futures.__doc__ + ) publish_with_futures_parser.add_argument('topic_name') publish_with_error_handler_parser = subparsers.add_parser( 'publish-with-error-handler', - help=publish_messages_with_error_handler.__doc__) + help=publish_messages_with_error_handler.__doc__ + ) publish_with_error_handler_parser.add_argument('topic_name') publish_with_batch_settings_parser = subparsers.add_parser( 'publish-with-batch-settings', - help=publish_messages_with_batch_settings.__doc__) + help=publish_messages_with_batch_settings.__doc__ + ) publish_with_batch_settings_parser.add_argument('topic_name') + publish_with_retry_settings_parser = subparsers.add_parser( + 'publish-with-retry-settings', + help=publish_messages_with_retry_settings.__doc__ + ) + publish_with_retry_settings_parser.add_argument('topic_name') + args = parser.parse_args() if args.command == 'list': @@ -274,11 +344,13 @@ def publish_messages_with_batch_settings(project_id, topic_name): elif args.command == 'publish': publish_messages(args.project_id, args.topic_name) elif args.command == 'publish-with-custom-attributes': - publish_messages_with_custom_attributes( - args.project_id, args.topic_name) + publish_messages_with_custom_attributes(args.project_id, + args.topic_name) elif args.command == 'publish-with-futures': publish_messages_with_futures(args.project_id, args.topic_name) elif args.command == 'publish-with-error-handler': publish_messages_with_error_handler(args.project_id, args.topic_name) elif args.command == 'publish-with-batch-settings': publish_messages_with_batch_settings(args.project_id, args.topic_name) + elif args.command == 'publish-with-retry-settings': + publish_messages_with_retry_settings(args.project_id, args.topic_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index c2908d746a26..b364553c2d41 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -110,6 +110,13 @@ def test_publish_with_batch_settings(topic, capsys): assert 'Published' in out +def test_publish_with_retry_settings(topic, capsys): + publisher.publish_messages_with_retry_settings(PROJECT, TOPIC) + + out, _ = capsys.readouterr() + assert 'Published' in out + + def test_publish_with_error_handler(topic, capsys): publisher.publish_messages_with_error_handler(PROJECT, TOPIC) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart.py b/packages/google-cloud-pubsub/samples/snippets/quickstart.py index f48d085e06b5..10ff76f9b632 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py index ee6f7d4b21a2..3fce09dc8f5b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2018 Google Inc. All Rights Reserved. +# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 5802218b4998..92d7791352ef 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index f91007a6dc17..2dcfb33e2311 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -1,4 +1,4 @@ -# Copyright 2016 Google Inc. All Rights Reserved. +# Copyright 2019 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 74a42653c31cb74a2e4aac8dacacdda438631d69 Mon Sep 17 00:00:00 2001 From: Keiji Yoshida Date: Wed, 31 Jul 2019 05:52:46 +0900 Subject: [PATCH 0547/1197] Fix a TODO comment on pubsub/cloud-client/subscriber.py [(#2302)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2302) --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 92d7791352ef..64e93951505b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -366,7 +366,7 @@ def listen_for_errors(project_id, subscription_name): # [START pubsub_subscriber_error_listener] from google.cloud import pubsub_v1 - # TODO project = "Your Google Cloud Project ID" + # TODO project_id = "Your Google Cloud Project ID" # TODO subscription_name = "Your Pubsub subscription name" subscriber = pubsub_v1.SubscriberClient() From c0663fd4f4278078afff0335fef98e9227d055b5 Mon Sep 17 00:00:00 2001 From: oli Date: Tue, 13 Aug 2019 06:02:18 +1000 Subject: [PATCH 0548/1197] Print actual number of messages pulled [(#2078)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2078) * Print actual number of messages pulled --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 64e93951505b..181e8235b219 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -284,7 +284,8 @@ def synchronous_pull(project_id, subscription_name): # Acknowledges the received messages so they will not be sent again. subscriber.acknowledge(subscription_path, ack_ids) - print("Received and acknowledged {} messages. Done.".format(NUM_MESSAGES)) + print('Received and acknowledged {} messages. Done.'.format( + len(response.received_messages))) # [END pubsub_subscriber_sync_pull] @@ -357,7 +358,8 @@ def worker(msg): if processes: time.sleep(SLEEP_TIME) - print("Received and acknowledged {} messages. Done.".format(NUM_MESSAGES)) + print('Received and acknowledged {} messages. Done.'.format( + len(response.received_messages))) # [END pubsub_subscriber_sync_pull_with_lease] From e81b9ee48549b483d0fd023f4348c271e2f2fd32 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 13 Aug 2019 10:07:22 -0700 Subject: [PATCH 0549/1197] Pub/Sub: fix subscriber async region tag mistake [(#2334)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2334) --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 181e8235b219..dbaa396cddd5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -198,6 +198,7 @@ def callback(message): def receive_messages_with_custom_attributes(project_id, subscription_name): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_sync_pull_custom_attributes] + # [START pubsub_subscriber_async_pull_custom_attributes] import time from google.cloud import pubsub_v1 @@ -225,6 +226,7 @@ def callback(message): print('Listening for messages on {}'.format(subscription_path)) while True: time.sleep(60) + # [END pubsub_subscriber_async_pull_custom_attributes] # [END pubsub_subscriber_sync_pull_custom_attributes] From 1374c8bc2a06415a09121a3b433ab94bbc3c4dad Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 12 Sep 2019 14:01:54 -0700 Subject: [PATCH 0550/1197] Pub/Sub: update retry settings in sample [(#2395)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2395) --- .../samples/snippets/publisher.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 2a32dc1c7f00..76554d0258fe 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -249,13 +249,13 @@ def publish_messages_with_retry_settings(project_id, topic_name): }, 'retry_params': { 'messaging': { - 'initial_retry_delay_millis': 150, # default: 100 - 'retry_delay_multiplier': 1.5, # default: 1.3 - 'max_retry_delay_millis': 65000, # default: 60000 - 'initial_rpc_timeout_millis': 25000, # default: 25000 + 'initial_retry_delay_millis': 100, # default: 100 + 'retry_delay_multiplier': 1.3, # default: 1.3 + 'max_retry_delay_millis': 60000, # default: 60000 + 'initial_rpc_timeout_millis': 5000, # default: 25000 'rpc_timeout_multiplier': 1.0, # default: 1.0 - 'max_rpc_timeout_millis': 35000, # default: 30000 - 'total_timeout_millis': 650000, # default: 600000 + 'max_rpc_timeout_millis': 600000, # default: 30000 + 'total_timeout_millis': 600000, # default: 600000 } }, 'methods': { From da166c27d7f57c8649c136b0455aebf4ef9343f2 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 17 Sep 2019 10:48:30 -0700 Subject: [PATCH 0551/1197] Pub/Sub: improve pub.py [(#2403)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2403) * print number of messages published * two nit's --- .../samples/snippets/quickstart/pub.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py index 9617b34ea846..e340eb4f36ec 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -22,13 +22,13 @@ # [END pubsub_quickstart_pub_deps] -def get_callback(api_future, data): +def get_callback(api_future, data, ref): """Wrap message data in the context of the callback function.""" - def callback(api_future): try: print("Published message {} now has message ID {}".format( data, api_future.result())) + ref["num_messages"] += 1 except Exception: print("A problem occurred when publishing {}: {}\n".format( data, api_future.exception())) @@ -39,24 +39,28 @@ def callback(api_future): def pub(project_id, topic_name): """Publishes a message to a Pub/Sub topic.""" # [START pubsub_quickstart_pub_client] - # Initialize a Publisher client + # Initialize a Publisher client. client = pubsub_v1.PublisherClient() # [END pubsub_quickstart_pub_client] # Create a fully qualified identifier in the form of # `projects/{project_id}/topics/{topic_name}` topic_path = client.topic_path(project_id, topic_name) - # Data sent to Cloud Pub/Sub must be a bytestring + # Data sent to Cloud Pub/Sub must be a bytestring. data = b"Hello, World!" + # Keep track of the number of published messages. + ref = dict({"num_messages": 0}) + # When you publish a message, the client returns a future. api_future = client.publish(topic_path, data=data) - api_future.add_done_callback(get_callback(api_future, data)) + api_future.add_done_callback(get_callback(api_future, data, ref)) - # Keep the main thread from exiting until background message - # is processed. + # Keep the main thread from exiting while the message future + # gets resolved in the background. while api_future.running(): - time.sleep(0.1) + time.sleep(0.5) + print("Published {} message(s).".format(ref["num_messages"])) if __name__ == '__main__': From 6f6b6628db80d3f82edb5fba6218068bf093be68 Mon Sep 17 00:00:00 2001 From: Gus Class Date: Mon, 7 Oct 2019 15:45:22 -0700 Subject: [PATCH 0552/1197] Adds updates for samples profiler ... vision [(#2439)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2439) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- .../google-cloud-pubsub/samples/snippets/subscriber_test.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index d8470ecf937d..a97fc0997e2c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==0.39.1 +google-cloud-pubsub==1.0.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 2dcfb33e2311..1a5de9930fa5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -16,6 +16,7 @@ import time from gcp_devrel.testing import eventually_consistent +from gcp_devrel.testing.flaky import flaky from google.cloud import pubsub_v1 import google.api_core.exceptions import mock @@ -193,6 +194,7 @@ def new_sleep(period): return mock.patch('time.sleep', new=new_sleep) +@flaky def test_receive(publisher_client, topic, subscription, capsys): _publish_messages(publisher_client, topic) From e7df792f939ae4073f1d29455aafea85232133a1 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 21 Oct 2019 13:56:39 -0700 Subject: [PATCH 0553/1197] Pub/Sub: update how subscriber client listens to StreamingPullFuture [(#2475)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2475) * update sub.py & requirements.txt * fix flaky subscriber test with separate subscriptions --- .../samples/snippets/iam_test.py | 2 +- .../samples/snippets/publisher.py | 2 +- .../samples/snippets/publisher_test.py | 12 +- .../samples/snippets/quickstart.py | 2 +- .../samples/snippets/quickstart/sub.py | 19 +- .../samples/snippets/quickstart/sub_test.py | 91 ++++----- .../samples/snippets/quickstart_test.py | 2 +- .../samples/snippets/subscriber.py | 2 +- .../samples/snippets/subscriber_test.py | 180 +++++++++--------- 9 files changed, 148 insertions(+), 164 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index cfae98ffd00b..8a524c35a061 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google Inc. All Rights Reserved. +# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 76554d0258fe..490c903b2c1b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2019 Google LLC. All Rights Reserved. +# Copyright 2016 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index b364553c2d41..5e550abd641d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google Inc. All Rights Reserved. +# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -36,13 +36,11 @@ def topic(client): topic_path = client.topic_path(PROJECT, TOPIC) try: - client.delete_topic(topic_path) - except Exception: - pass - - client.create_topic(topic_path) + response = client.get_topic(topic_path) + except: # noqa + response = client.create_topic(topic_path) - yield topic_path + yield response.name def _make_sleep_patch(): diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart.py b/packages/google-cloud-pubsub/samples/snippets/quickstart.py index 10ff76f9b632..f48d085e06b5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2019 Google Inc. All Rights Reserved. +# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index 520803d70a5b..e39f14105b1a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -16,7 +16,6 @@ # [START pubsub_quickstart_sub_all] import argparse -import time # [START pubsub_quickstart_sub_deps] from google.cloud import pubsub_v1 # [END pubsub_quickstart_sub_deps] @@ -34,20 +33,22 @@ def sub(project_id, subscription_name): project_id, subscription_name) def callback(message): - print('Received message {} of message ID {}'.format( + print('Received message {} of message ID {}\n'.format( message, message.message_id)) # Acknowledge the message. Unack'ed messages will be redelivered. message.ack() - print('Acknowledged message of message ID {}\n'.format( - message.message_id)) + print('Acknowledged message {}\n'.format(message.message_id)) - client.subscribe(subscription_path, callback=callback) + streaming_pull_future = client.subscribe( + subscription_path, callback=callback) print('Listening for messages on {}..\n'.format(subscription_path)) - # Keep the main thread from exiting so the subscriber can - # process messages in the background. - while True: - time.sleep(60) + # Calling result() on StreamingPullFuture keeps the main thread from + # exiting while messages get processed in the callbacks. + try: + streaming_pull_future.result() + except: # noqa + streaming_pull_future.cancel() if __name__ == '__main__': diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py index 9c70384ed693..476139a02642 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py @@ -14,10 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock import os import pytest -import time from google.api_core.exceptions import AlreadyExists from google.cloud import pubsub_v1 @@ -29,84 +27,79 @@ TOPIC = 'quickstart-sub-test-topic' SUBSCRIPTION = 'quickstart-sub-test-topic-sub' - -@pytest.fixture(scope='module') -def publisher_client(): - yield pubsub_v1.PublisherClient() +publisher_client = pubsub_v1.PublisherClient() +subscriber_client = pubsub_v1.SubscriberClient() @pytest.fixture(scope='module') -def topic_path(publisher_client): +def topic_path(): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - publisher_client.create_topic(topic_path) + topic = publisher_client.create_topic(topic_path) + return topic.name except AlreadyExists: - pass - - yield topic_path - - -@pytest.fixture(scope='module') -def subscriber_client(): - yield pubsub_v1.SubscriberClient() + return topic_path @pytest.fixture(scope='module') -def subscription(subscriber_client, topic_path): +def subscription_path(topic_path): subscription_path = subscriber_client.subscription_path( PROJECT, SUBSCRIPTION) try: - subscriber_client.create_subscription(subscription_path, topic_path) + subscription = subscriber_client.create_subscription( + subscription_path, topic_path) + return subscription.name except AlreadyExists: - pass - - yield SUBSCRIPTION + return subscription_path -@pytest.fixture -def to_delete(publisher_client, subscriber_client): - doomed = [] - yield doomed - for client, item in doomed: +def _to_delete(resource_paths): + for item in resource_paths: if 'topics' in item: publisher_client.delete_topic(item) if 'subscriptions' in item: subscriber_client.delete_subscription(item) -def _make_sleep_patch(): - real_sleep = time.sleep +def _publish_messages(topic_path): + publish_future = publisher_client.publish(topic_path, data=b'Hello World!') + publish_future.result() + - def new_sleep(period): - if period == 60: - real_sleep(10) - raise RuntimeError('sigil') - else: - real_sleep(period) +def _sub_timeout(project_id, subscription_name): + # This is an exactly copy of `sub.py` except + # StreamingPullFuture.result() will time out after 10s. + client = pubsub_v1.SubscriberClient() + subscription_path = client.subscription_path( + project_id, subscription_name) - return mock.patch('time.sleep', new=new_sleep) + def callback(message): + print('Received message {} of message ID {}\n'.format( + message, message.message_id)) + message.ack() + print('Acknowledged message {}\n'.format(message.message_id)) + streaming_pull_future = client.subscribe( + subscription_path, callback=callback) + print('Listening for messages on {}..\n'.format(subscription_path)) + + try: + streaming_pull_future.result(timeout=10) + except: # noqa + streaming_pull_future.cancel() -def test_sub(publisher_client, - topic_path, - subscriber_client, - subscription, - to_delete, - capsys): - publisher_client.publish(topic_path, data=b'Hello, World!') +def test_sub(monkeypatch, topic_path, subscription_path, capsys): + monkeypatch.setattr(sub, 'sub', _sub_timeout) - to_delete.append((publisher_client, topic_path)) + _publish_messages(topic_path) - with _make_sleep_patch(): - with pytest.raises(RuntimeError, match='sigil'): - sub.sub(PROJECT, subscription) + sub.sub(PROJECT, SUBSCRIPTION) - to_delete.append((subscriber_client, - 'projects/{}/subscriptions/{}'.format(PROJECT, - SUBSCRIPTION))) + # Clean up resources. + _to_delete([topic_path, subscription_path]) out, _ = capsys.readouterr() assert "Received message" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py index 3fce09dc8f5b..d318b260c63c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2019 Google Inc. All Rights Reserved. +# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index dbaa396cddd5..3bbad0ead1b0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2019 Google Inc. All Rights Reserved. +# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 1a5de9930fa5..4c5fd61223db 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google Inc. All Rights Reserved. +# Copyright 2016 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,9 +16,7 @@ import time from gcp_devrel.testing import eventually_consistent -from gcp_devrel.testing.flaky import flaky from google.cloud import pubsub_v1 -import google.api_core.exceptions import mock import pytest @@ -26,9 +24,9 @@ PROJECT = os.environ['GCLOUD_PROJECT'] TOPIC = 'subscription-test-topic' -SUBSCRIPTION = 'subscription-test-subscription' -SUBSCRIPTION_SYNC1 = 'subscription-test-subscription-sync1' -SUBSCRIPTION_SYNC2 = 'subscription-test-subscription-sync2' +SUBSCRIPTION_ONE = 'subscription-test-subscription-one' +SUBSCRIPTION_TWO = 'subscription-test-subscription-two' +SUBSCRIPTION_THREE = 'subscription-test-subscription-three' ENDPOINT = 'https://{}.appspot.com/push'.format(PROJECT) NEW_ENDPOINT = 'https://{}.appspot.com/push2'.format(PROJECT) @@ -43,13 +41,11 @@ def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - publisher_client.delete_topic(topic_path) - except Exception: - pass - - publisher_client.create_topic(topic_path) + response = publisher_client.get_topic(topic_path) + except: # noqa + response = publisher_client.create_topic(topic_path) - yield topic_path + yield response.name @pytest.fixture(scope='module') @@ -57,79 +53,74 @@ def subscriber_client(): yield pubsub_v1.SubscriberClient() -@pytest.fixture -def subscription(subscriber_client, topic): +@pytest.fixture(scope='module') +def subscription_one(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION) + PROJECT, SUBSCRIPTION_ONE) try: - subscriber_client.delete_subscription(subscription_path) - except Exception: - pass + response = subscriber_client.get_subscription(subscription_path) + except: # noqa + response = subscriber_client.create_subscription( + subscription_path, topic=topic) - try: - subscriber_client.create_subscription(subscription_path, topic=topic) - except google.api_core.exceptions.AlreadyExists: - pass + yield response.name - yield subscription_path - -@pytest.fixture -def subscription_sync1(subscriber_client, topic): - subscription_sync_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_SYNC1) +@pytest.fixture(scope='module') +def subscription_two(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION_TWO) try: - subscriber_client.delete_subscription(subscription_sync_path) - except Exception: - pass - - subscriber_client.create_subscription(subscription_sync_path, topic=topic) + response = subscriber_client.get_subscription(subscription_path) + except: # noqa + response = subscriber_client.create_subscription( + subscription_path, topic=topic) - yield subscription_sync_path + yield response.name -@pytest.fixture -def subscription_sync2(subscriber_client, topic): - subscription_sync_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_SYNC2) +@pytest.fixture(scope='module') +def subscription_three(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path( + PROJECT, SUBSCRIPTION_THREE) try: - subscriber_client.delete_subscription(subscription_sync_path) - except Exception: - pass - - subscriber_client.create_subscription(subscription_sync_path, topic=topic) + response = subscriber_client.get_subscription(subscription_path) + except: # noqa + response = subscriber_client.create_subscription( + subscription_path, topic=topic) - yield subscription_sync_path + yield response.name -def test_list_in_topic(subscription, capsys): +def test_list_in_topic(subscription_one, capsys): @eventually_consistent.call def _(): subscriber.list_subscriptions_in_topic(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert subscription in out + assert subscription_one in out -def test_list_in_project(subscription, capsys): +def test_list_in_project(subscription_one, capsys): @eventually_consistent.call def _(): subscriber.list_subscriptions_in_project(PROJECT) out, _ = capsys.readouterr() - assert subscription in out + assert subscription_one in out def test_create(subscriber_client): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION) + PROJECT, SUBSCRIPTION_ONE) + try: subscriber_client.delete_subscription(subscription_path) except Exception: pass - subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION) + subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION_ONE) @eventually_consistent.call def _(): @@ -138,40 +129,40 @@ def _(): def test_create_push(subscriber_client): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION) + PROJECT, SUBSCRIPTION_ONE) try: subscriber_client.delete_subscription(subscription_path) except Exception: pass - subscriber.create_push_subscription(PROJECT, TOPIC, SUBSCRIPTION, ENDPOINT) + subscriber.create_push_subscription( + PROJECT, TOPIC, SUBSCRIPTION_ONE, ENDPOINT) @eventually_consistent.call def _(): assert subscriber_client.get_subscription(subscription_path) -def test_delete(subscriber_client, subscription): - subscriber.delete_subscription(PROJECT, SUBSCRIPTION) +def test_update(subscriber_client, subscription_one, capsys): + subscriber.update_subscription(PROJECT, SUBSCRIPTION_ONE, NEW_ENDPOINT) - @eventually_consistent.call - def _(): - with pytest.raises(Exception): - subscriber_client.get_subscription(subscription) + out, _ = capsys.readouterr() + assert 'Subscription updated' in out -def test_update(subscriber_client, subscription, capsys): - subscriber.update_subscription(PROJECT, SUBSCRIPTION, NEW_ENDPOINT) +def test_delete(subscriber_client, subscription_one): + subscriber.delete_subscription(PROJECT, SUBSCRIPTION_ONE) - out, _ = capsys.readouterr() - assert 'Subscription updated' in out + @eventually_consistent.call + def _(): + with pytest.raises(Exception): + subscriber_client.get_subscription(subscription_one) def _publish_messages(publisher_client, topic): for n in range(5): data = u'Message {}'.format(n).encode('utf-8') - future = publisher_client.publish( - topic, data=data) + future = publisher_client.publish(topic, data=data) future.result() @@ -194,49 +185,28 @@ def new_sleep(period): return mock.patch('time.sleep', new=new_sleep) -@flaky -def test_receive(publisher_client, topic, subscription, capsys): +def test_receive(publisher_client, topic, subscription_two, capsys): _publish_messages(publisher_client, topic) with _make_sleep_patch(): with pytest.raises(RuntimeError, match='sigil'): - subscriber.receive_messages(PROJECT, SUBSCRIPTION) + subscriber.receive_messages(PROJECT, SUBSCRIPTION_TWO) out, _ = capsys.readouterr() assert 'Listening' in out - assert subscription in out + assert subscription_two in out assert 'Message 1' in out -def test_receive_synchronously( - publisher_client, topic, subscription_sync1, capsys): - _publish_messages(publisher_client, topic) - - subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_SYNC1) - - out, _ = capsys.readouterr() - assert 'Done.' in out - - -def test_receive_synchronously_with_lease( - publisher_client, topic, subscription_sync2, capsys): - _publish_messages(publisher_client, topic) - - subscriber.synchronous_pull_with_lease_management( - PROJECT, SUBSCRIPTION_SYNC2) - - out, _ = capsys.readouterr() - assert 'Done.' in out - - def test_receive_with_custom_attributes( - publisher_client, topic, subscription, capsys): + publisher_client, topic, subscription_two, capsys): + _publish_messages_with_custom_attributes(publisher_client, topic) with _make_sleep_patch(): with pytest.raises(RuntimeError, match='sigil'): subscriber.receive_messages_with_custom_attributes( - PROJECT, SUBSCRIPTION) + PROJECT, SUBSCRIPTION_TWO) out, _ = capsys.readouterr() assert 'Test message' in out @@ -245,15 +215,37 @@ def test_receive_with_custom_attributes( def test_receive_with_flow_control( - publisher_client, topic, subscription, capsys): + publisher_client, topic, subscription_two, capsys): + _publish_messages(publisher_client, topic) with _make_sleep_patch(): with pytest.raises(RuntimeError, match='sigil'): subscriber.receive_messages_with_flow_control( - PROJECT, SUBSCRIPTION) + PROJECT, SUBSCRIPTION_TWO) out, _ = capsys.readouterr() assert 'Listening' in out - assert subscription in out + assert subscription_two in out assert 'Message 1' in out + + +def test_receive_synchronously( + publisher_client, topic, subscription_three, capsys): + _publish_messages(publisher_client, topic) + + subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_THREE) + + out, _ = capsys.readouterr() + assert 'Done.' in out + + +def test_receive_synchronously_with_lease( + publisher_client, topic, subscription_three, capsys): + _publish_messages(publisher_client, topic) + + subscriber.synchronous_pull_with_lease_management( + PROJECT, SUBSCRIPTION_THREE) + + out, _ = capsys.readouterr() + assert 'Done.' in out From 2cdac26f17394fadfe77f44a58be7336e180ddc3 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 10 Dec 2019 16:31:20 -0800 Subject: [PATCH 0554/1197] Pub/Sub: update how to test with mock [(#2555)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2555) * Update test with mock * Clean up resources after tests * Use unique resource names avoid test failures * Delete subscriptions in cleanup phase * Ensure unique topic name * Update assert to remove bytestring notation * Rewrite PubSubToGCS test using dataflow testing module --- .../samples/snippets/iam.py | 115 ++++---- .../samples/snippets/iam_test.py | 33 ++- .../samples/snippets/publisher.py | 167 ++++++----- .../samples/snippets/publisher_test.py | 31 +- .../samples/snippets/quickstart.py | 39 +-- .../samples/snippets/quickstart/pub.py | 26 +- .../samples/snippets/quickstart/pub_test.py | 24 +- .../samples/snippets/quickstart/sub.py | 27 +- .../samples/snippets/quickstart/sub_test.py | 82 +++--- .../samples/snippets/quickstart_test.py | 23 +- .../samples/snippets/subscriber.py | 269 ++++++++++-------- .../samples/snippets/subscriber_test.py | 131 ++++++--- 12 files changed, 543 insertions(+), 424 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index f9865ed3934e..f014ce749022 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -34,9 +34,9 @@ def get_topic_policy(project, topic_name): policy = client.get_iam_policy(topic_path) - print('Policy for topic {}:'.format(topic_path)) + print("Policy for topic {}:".format(topic_path)) for binding in policy.bindings: - print('Role: {}, Members: {}'.format(binding.role, binding.members)) + print("Role: {}, Members: {}".format(binding.role, binding.members)) # [END pubsub_get_topic_policy] @@ -48,9 +48,9 @@ def get_subscription_policy(project, subscription_name): policy = client.get_iam_policy(subscription_path) - print('Policy for subscription {}:'.format(subscription_path)) + print("Policy for subscription {}:".format(subscription_path)) for binding in policy.bindings: - print('Role: {}, Members: {}'.format(binding.role, binding.members)) + print("Role: {}, Members: {}".format(binding.role, binding.members)) # [END pubsub_get_subscription_policy] @@ -63,20 +63,17 @@ def set_topic_policy(project, topic_name): policy = client.get_iam_policy(topic_path) # Add all users as viewers. - policy.bindings.add( - role='roles/pubsub.viewer', - members=['allUsers']) + policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) # Add a group as a publisher. policy.bindings.add( - role='roles/pubsub.publisher', - members=['group:cloud-logs@google.com']) + role="roles/pubsub.publisher", members=["group:cloud-logs@google.com"] + ) # Set the policy policy = client.set_iam_policy(topic_path, policy) - print('IAM policy for topic {} set: {}'.format( - topic_name, policy)) + print("IAM policy for topic {} set: {}".format(topic_name, policy)) # [END pubsub_set_topic_policy] @@ -89,20 +86,21 @@ def set_subscription_policy(project, subscription_name): policy = client.get_iam_policy(subscription_path) # Add all users as viewers. - policy.bindings.add( - role='roles/pubsub.viewer', - members=['allUsers']) + policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) # Add a group as an editor. policy.bindings.add( - role='roles/editor', - members=['group:cloud-logs@google.com']) + role="roles/editor", members=["group:cloud-logs@google.com"] + ) # Set the policy policy = client.set_iam_policy(subscription_path, policy) - print('IAM policy for subscription {} set: {}'.format( - subscription_name, policy)) + print( + "IAM policy for subscription {} set: {}".format( + subscription_name, policy + ) + ) # [END pubsub_set_subscription_policy] @@ -112,16 +110,17 @@ def check_topic_permissions(project, topic_name): client = pubsub_v1.PublisherClient() topic_path = client.topic_path(project, topic_name) - permissions_to_check = [ - 'pubsub.topics.publish', - 'pubsub.topics.update' - ] + permissions_to_check = ["pubsub.topics.publish", "pubsub.topics.update"] allowed_permissions = client.test_iam_permissions( - topic_path, permissions_to_check) + topic_path, permissions_to_check + ) - print('Allowed permissions for topic {}: {}'.format( - topic_path, allowed_permissions)) + print( + "Allowed permissions for topic {}: {}".format( + topic_path, allowed_permissions + ) + ) # [END pubsub_test_topic_permissions] @@ -132,63 +131,73 @@ def check_subscription_permissions(project, subscription_name): subscription_path = client.subscription_path(project, subscription_name) permissions_to_check = [ - 'pubsub.subscriptions.consume', - 'pubsub.subscriptions.update' + "pubsub.subscriptions.consume", + "pubsub.subscriptions.update", ] allowed_permissions = client.test_iam_permissions( - subscription_path, permissions_to_check) + subscription_path, permissions_to_check + ) - print('Allowed permissions for subscription {}: {}'.format( - subscription_path, allowed_permissions)) + print( + "Allowed permissions for subscription {}: {}".format( + subscription_path, allowed_permissions + ) + ) # [END pubsub_test_subscription_permissions] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument('project', help='Your Google Cloud project ID') + parser.add_argument("project", help="Your Google Cloud project ID") - subparsers = parser.add_subparsers(dest='command') + subparsers = parser.add_subparsers(dest="command") get_topic_policy_parser = subparsers.add_parser( - 'get-topic-policy', help=get_topic_policy.__doc__) - get_topic_policy_parser.add_argument('topic_name') + "get-topic-policy", help=get_topic_policy.__doc__ + ) + get_topic_policy_parser.add_argument("topic_name") get_subscription_policy_parser = subparsers.add_parser( - 'get-subscription-policy', help=get_subscription_policy.__doc__) - get_subscription_policy_parser.add_argument('subscription_name') + "get-subscription-policy", help=get_subscription_policy.__doc__ + ) + get_subscription_policy_parser.add_argument("subscription_name") set_topic_policy_parser = subparsers.add_parser( - 'set-topic-policy', help=set_topic_policy.__doc__) - set_topic_policy_parser.add_argument('topic_name') + "set-topic-policy", help=set_topic_policy.__doc__ + ) + set_topic_policy_parser.add_argument("topic_name") set_subscription_policy_parser = subparsers.add_parser( - 'set-subscription-policy', help=set_subscription_policy.__doc__) - set_subscription_policy_parser.add_argument('subscription_name') + "set-subscription-policy", help=set_subscription_policy.__doc__ + ) + set_subscription_policy_parser.add_argument("subscription_name") check_topic_permissions_parser = subparsers.add_parser( - 'check-topic-permissions', help=check_topic_permissions.__doc__) - check_topic_permissions_parser.add_argument('topic_name') + "check-topic-permissions", help=check_topic_permissions.__doc__ + ) + check_topic_permissions_parser.add_argument("topic_name") check_subscription_permissions_parser = subparsers.add_parser( - 'check-subscription-permissions', - help=check_subscription_permissions.__doc__) - check_subscription_permissions_parser.add_argument('subscription_name') + "check-subscription-permissions", + help=check_subscription_permissions.__doc__, + ) + check_subscription_permissions_parser.add_argument("subscription_name") args = parser.parse_args() - if args.command == 'get-topic-policy': + if args.command == "get-topic-policy": get_topic_policy(args.project, args.topic_name) - elif args.command == 'get-subscription-policy': + elif args.command == "get-subscription-policy": get_subscription_policy(args.project, args.subscription_name) - elif args.command == 'set-topic-policy': + elif args.command == "set-topic-policy": set_topic_policy(args.project, args.topic_name) - elif args.command == 'set-subscription-policy': + elif args.command == "set-subscription-policy": set_subscription_policy(args.project, args.subscription_name) - elif args.command == 'check-topic-permissions': + elif args.command == "check-topic-permissions": check_topic_permissions(args.project, args.topic_name) - elif args.command == 'check-subscription-permissions': + elif args.command == "check-subscription-permissions": check_subscription_permissions(args.project, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 8a524c35a061..2b019f9ea16f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -13,23 +13,25 @@ # limitations under the License. import os +import uuid from google.cloud import pubsub_v1 import pytest import iam -PROJECT = os.environ['GCLOUD_PROJECT'] -TOPIC = 'iam-test-topic' -SUBSCRIPTION = 'iam-test-subscription' +UUID = uuid.uuid4().hex +PROJECT = os.environ["GCLOUD_PROJECT"] +TOPIC = "iam-test-topic-" + UUID +SUBSCRIPTION = "iam-test-subscription-" + UUID -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def publisher_client(): yield pubsub_v1.PublisherClient() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) @@ -42,8 +44,10 @@ def topic(publisher_client): yield topic_path + publisher_client.delete_topic(topic_path) -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def subscriber_client(): yield pubsub_v1.SubscriberClient() @@ -51,7 +55,8 @@ def subscriber_client(): @pytest.fixture def subscription(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION) + PROJECT, SUBSCRIPTION + ) try: subscriber_client.delete_subscription(subscription_path) @@ -62,6 +67,8 @@ def subscription(subscriber_client, topic): yield subscription_path + subscriber_client.delete_subscription(subscription_path) + def test_get_topic_policy(topic, capsys): iam.get_topic_policy(PROJECT, TOPIC) @@ -81,16 +88,16 @@ def test_set_topic_policy(publisher_client, topic): iam.set_topic_policy(PROJECT, TOPIC) policy = publisher_client.get_iam_policy(topic) - assert 'roles/pubsub.publisher' in str(policy) - assert 'allUsers' in str(policy) + assert "roles/pubsub.publisher" in str(policy) + assert "allUsers" in str(policy) def test_set_subscription_policy(subscriber_client, subscription): iam.set_subscription_policy(PROJECT, SUBSCRIPTION) policy = subscriber_client.get_iam_policy(subscription) - assert 'roles/pubsub.viewer' in str(policy) - assert 'allUsers' in str(policy) + assert "roles/pubsub.viewer" in str(policy) + assert "allUsers" in str(policy) def test_check_topic_permissions(topic, capsys): @@ -99,7 +106,7 @@ def test_check_topic_permissions(topic, capsys): out, _ = capsys.readouterr() assert topic in out - assert 'pubsub.topics.publish' in out + assert "pubsub.topics.publish" in out def test_check_subscription_permissions(subscription, capsys): @@ -108,4 +115,4 @@ def test_check_subscription_permissions(subscription, capsys): out, _ = capsys.readouterr() assert subscription in out - assert 'pubsub.subscriptions.consume' in out + assert "pubsub.subscriptions.consume" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 490c903b2c1b..d227baab9584 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -53,7 +53,7 @@ def create_topic(project_id, topic_name): topic = publisher.create_topic(topic_path) - print('Topic created: {}'.format(topic)) + print("Topic created: {}".format(topic)) # [END pubsub_quickstart_create_topic] # [END pubsub_create_topic] @@ -71,7 +71,7 @@ def delete_topic(project_id, topic_name): publisher.delete_topic(topic_path) - print('Topic deleted: {}'.format(topic_path)) + print("Topic deleted: {}".format(topic_path)) # [END pubsub_delete_topic] @@ -90,14 +90,14 @@ def publish_messages(project_id, topic_name): topic_path = publisher.topic_path(project_id, topic_name) for n in range(1, 10): - data = u'Message number {}'.format(n) + data = u"Message number {}".format(n) # Data must be a bytestring - data = data.encode('utf-8') + data = data.encode("utf-8") # When you publish a message, the client returns a future. future = publisher.publish(topic_path, data=data) print(future.result()) - print('Published messages.') + print("Published messages.") # [END pubsub_quickstart_publisher] # [END pubsub_publish] @@ -115,16 +115,16 @@ def publish_messages_with_custom_attributes(project_id, topic_name): topic_path = publisher.topic_path(project_id, topic_name) for n in range(1, 10): - data = u'Message number {}'.format(n) + data = u"Message number {}".format(n) # Data must be a bytestring - data = data.encode('utf-8') + data = data.encode("utf-8") # Add two attributes, origin and username, to the message future = publisher.publish( - topic_path, data, origin='python-sample', username='gcp' + topic_path, data, origin="python-sample", username="gcp" ) print(future.result()) - print('Published messages with custom attributes.') + print("Published messages with custom attributes.") # [END pubsub_publish_custom_attributes] @@ -141,14 +141,14 @@ def publish_messages_with_futures(project_id, topic_name): topic_path = publisher.topic_path(project_id, topic_name) for n in range(1, 10): - data = u'Message number {}'.format(n) + data = u"Message number {}".format(n) # Data must be a bytestring - data = data.encode('utf-8') + data = data.encode("utf-8") # When you publish a message, the client returns a future. future = publisher.publish(topic_path, data=data) print(future.result()) - print('Published messages with futures.') + print("Published messages with futures.") # [END pubsub_publisher_concurrency_control] @@ -173,7 +173,7 @@ def callback(f): print(f.result()) futures.pop(data) except: # noqa - print('Please handle {} for {}.'.format(f.exception(), data)) + print("Please handle {} for {}.".format(f.exception(), data)) return callback @@ -182,7 +182,7 @@ def callback(f): futures.update({data: None}) # When you publish a message, the client returns a future. future = publisher.publish( - topic_path, data=data.encode('utf-8') # data must be a bytestring. + topic_path, data=data.encode("utf-8") # data must be a bytestring. ) futures[data] = future # Publish failures shall be handled in the callback function. @@ -192,7 +192,7 @@ def callback(f): while futures: time.sleep(5) - print('Published message with error handler.') + print("Published message with error handler.") # [END pubsub_publish_messages_error_handler] @@ -207,20 +207,19 @@ def publish_messages_with_batch_settings(project_id, topic_name): # Configure the batch to publish as soon as there is one kilobyte # of data or one second has passed. batch_settings = pubsub_v1.types.BatchSettings( - max_bytes=1024, # One kilobyte - max_latency=1, # One second + max_bytes=1024, max_latency=1 # One kilobyte # One second ) publisher = pubsub_v1.PublisherClient(batch_settings) topic_path = publisher.topic_path(project_id, topic_name) for n in range(1, 10): - data = u'Message number {}'.format(n) + data = u"Message number {}".format(n) # Data must be a bytestring - data = data.encode('utf-8') + data = data.encode("utf-8") future = publisher.publish(topic_path, data=data) print(future.result()) - print('Published messages with batch settings.') + print("Published messages with batch settings.") # [END pubsub_publisher_batch_settings] @@ -234,34 +233,34 @@ def publish_messages_with_retry_settings(project_id, topic_name): # Configure the retry settings. Defaults will be overwritten. retry_settings = { - 'interfaces': { - 'google.pubsub.v1.Publisher': { - 'retry_codes': { - 'publish': [ - 'ABORTED', - 'CANCELLED', - 'DEADLINE_EXCEEDED', - 'INTERNAL', - 'RESOURCE_EXHAUSTED', - 'UNAVAILABLE', - 'UNKNOWN', + "interfaces": { + "google.pubsub.v1.Publisher": { + "retry_codes": { + "publish": [ + "ABORTED", + "CANCELLED", + "DEADLINE_EXCEEDED", + "INTERNAL", + "RESOURCE_EXHAUSTED", + "UNAVAILABLE", + "UNKNOWN", ] }, - 'retry_params': { - 'messaging': { - 'initial_retry_delay_millis': 100, # default: 100 - 'retry_delay_multiplier': 1.3, # default: 1.3 - 'max_retry_delay_millis': 60000, # default: 60000 - 'initial_rpc_timeout_millis': 5000, # default: 25000 - 'rpc_timeout_multiplier': 1.0, # default: 1.0 - 'max_rpc_timeout_millis': 600000, # default: 30000 - 'total_timeout_millis': 600000, # default: 600000 + "retry_params": { + "messaging": { + "initial_retry_delay_millis": 100, # default: 100 + "retry_delay_multiplier": 1.3, # default: 1.3 + "max_retry_delay_millis": 60000, # default: 60000 + "initial_rpc_timeout_millis": 5000, # default: 25000 + "rpc_timeout_multiplier": 1.0, # default: 1.0 + "max_rpc_timeout_millis": 600000, # default: 30000 + "total_timeout_millis": 600000, # default: 600000 } }, - 'methods': { - 'Publish': { - 'retry_codes_name': 'publish', - 'retry_params_name': 'messaging', + "methods": { + "Publish": { + "retry_codes_name": "publish", + "retry_params_name": "messaging", } }, } @@ -272,85 +271,85 @@ def publish_messages_with_retry_settings(project_id, topic_name): topic_path = publisher.topic_path(project_id, topic_name) for n in range(1, 10): - data = u'Message number {}'.format(n) + data = u"Message number {}".format(n) # Data must be a bytestring - data = data.encode('utf-8') + data = data.encode("utf-8") future = publisher.publish(topic_path, data=data) print(future.result()) - print('Published messages with retry settings.') + print("Published messages with retry settings.") # [END pubsub_publisher_retry_settings] if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument('project_id', help='Your Google Cloud project ID') + parser.add_argument("project_id", help="Your Google Cloud project ID") - subparsers = parser.add_subparsers(dest='command') - subparsers.add_parser('list', help=list_topics.__doc__) + subparsers = parser.add_subparsers(dest="command") + subparsers.add_parser("list", help=list_topics.__doc__) - create_parser = subparsers.add_parser('create', - help=create_topic.__doc__) - create_parser.add_argument('topic_name') + create_parser = subparsers.add_parser("create", help=create_topic.__doc__) + create_parser.add_argument("topic_name") - delete_parser = subparsers.add_parser('delete', - help=delete_topic.__doc__) - delete_parser.add_argument('topic_name') + delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) + delete_parser.add_argument("topic_name") - publish_parser = subparsers.add_parser('publish', - help=publish_messages.__doc__) - publish_parser.add_argument('topic_name') + publish_parser = subparsers.add_parser( + "publish", help=publish_messages.__doc__ + ) + publish_parser.add_argument("topic_name") publish_with_custom_attributes_parser = subparsers.add_parser( - 'publish-with-custom-attributes', + "publish-with-custom-attributes", help=publish_messages_with_custom_attributes.__doc__, ) - publish_with_custom_attributes_parser.add_argument('topic_name') + publish_with_custom_attributes_parser.add_argument("topic_name") publish_with_futures_parser = subparsers.add_parser( - 'publish-with-futures', help=publish_messages_with_futures.__doc__ + "publish-with-futures", help=publish_messages_with_futures.__doc__ ) - publish_with_futures_parser.add_argument('topic_name') + publish_with_futures_parser.add_argument("topic_name") publish_with_error_handler_parser = subparsers.add_parser( - 'publish-with-error-handler', - help=publish_messages_with_error_handler.__doc__ + "publish-with-error-handler", + help=publish_messages_with_error_handler.__doc__, ) - publish_with_error_handler_parser.add_argument('topic_name') + publish_with_error_handler_parser.add_argument("topic_name") publish_with_batch_settings_parser = subparsers.add_parser( - 'publish-with-batch-settings', - help=publish_messages_with_batch_settings.__doc__ + "publish-with-batch-settings", + help=publish_messages_with_batch_settings.__doc__, ) - publish_with_batch_settings_parser.add_argument('topic_name') + publish_with_batch_settings_parser.add_argument("topic_name") publish_with_retry_settings_parser = subparsers.add_parser( - 'publish-with-retry-settings', - help=publish_messages_with_retry_settings.__doc__ + "publish-with-retry-settings", + help=publish_messages_with_retry_settings.__doc__, ) - publish_with_retry_settings_parser.add_argument('topic_name') + publish_with_retry_settings_parser.add_argument("topic_name") args = parser.parse_args() - if args.command == 'list': + if args.command == "list": list_topics(args.project_id) - elif args.command == 'create': + elif args.command == "create": create_topic(args.project_id, args.topic_name) - elif args.command == 'delete': + elif args.command == "delete": delete_topic(args.project_id, args.topic_name) - elif args.command == 'publish': + elif args.command == "publish": publish_messages(args.project_id, args.topic_name) - elif args.command == 'publish-with-custom-attributes': - publish_messages_with_custom_attributes(args.project_id, - args.topic_name) - elif args.command == 'publish-with-futures': + elif args.command == "publish-with-custom-attributes": + publish_messages_with_custom_attributes( + args.project_id, args.topic_name + ) + elif args.command == "publish-with-futures": publish_messages_with_futures(args.project_id, args.topic_name) - elif args.command == 'publish-with-error-handler': + elif args.command == "publish-with-error-handler": publish_messages_with_error_handler(args.project_id, args.topic_name) - elif args.command == 'publish-with-batch-settings': + elif args.command == "publish-with-batch-settings": publish_messages_with_batch_settings(args.project_id, args.topic_name) - elif args.command == 'publish-with-retry-settings': + elif args.command == "publish-with-retry-settings": publish_messages_with_retry_settings(args.project_id, args.topic_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 5e550abd641d..125fae3c06b9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -14,6 +14,7 @@ import os import time +import uuid from gcp_devrel.testing import eventually_consistent from google.cloud import pubsub_v1 @@ -22,8 +23,9 @@ import publisher -PROJECT = os.environ['GCLOUD_PROJECT'] -TOPIC = 'publisher-test-topic' +UUID = uuid.uuid4().hex +PROJECT = os.environ["GCLOUD_PROJECT"] +TOPIC = "publisher-test-topic-" + UUID @pytest.fixture @@ -49,11 +51,18 @@ def _make_sleep_patch(): def new_sleep(period): if period == 60: real_sleep(5) - raise RuntimeError('sigil') + raise RuntimeError("sigil") else: real_sleep(period) - return mock.patch('time.sleep', new=new_sleep) + return mock.patch("time.sleep", new=new_sleep) + + +def _to_delete(): + publisher_client = pubsub_v1.PublisherClient() + publisher_client.delete_topic( + "projects/{}/topics/{}".format(PROJECT, TOPIC) + ) def test_list(client, topic, capsys): @@ -91,39 +100,41 @@ def test_publish(topic, capsys): publisher.publish_messages(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert 'Published' in out + assert "Published" in out def test_publish_with_custom_attributes(topic, capsys): publisher.publish_messages_with_custom_attributes(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert 'Published' in out + assert "Published" in out def test_publish_with_batch_settings(topic, capsys): publisher.publish_messages_with_batch_settings(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert 'Published' in out + assert "Published" in out def test_publish_with_retry_settings(topic, capsys): publisher.publish_messages_with_retry_settings(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert 'Published' in out + assert "Published" in out def test_publish_with_error_handler(topic, capsys): publisher.publish_messages_with_error_handler(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert 'Published' in out + assert "Published" in out def test_publish_with_futures(topic, capsys): publisher.publish_messages_with_futures(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert 'Published' in out + assert "Published" in out + + _to_delete() diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart.py b/packages/google-cloud-pubsub/samples/snippets/quickstart.py index f48d085e06b5..d01105885cb8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart.py @@ -39,34 +39,36 @@ def end_to_end(project_id, topic_name, subscription_name, num_messages): # The `subscription_path` method creates a fully qualified identifier # in the form `projects/{project_id}/subscriptions/{subscription_name}` subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) # Create the topic. topic = publisher.create_topic(topic_path) - print('\nTopic created: {}'.format(topic.name)) + print("\nTopic created: {}".format(topic.name)) # Create a subscription. subscription = subscriber.create_subscription( - subscription_path, topic_path) - print('\nSubscription created: {}\n'.format(subscription.name)) + subscription_path, topic_path + ) + print("\nSubscription created: {}\n".format(subscription.name)) publish_begin = time.time() # Publish messages. for n in range(num_messages): - data = u'Message number {}'.format(n) + data = u"Message number {}".format(n) # Data must be a bytestring - data = data.encode('utf-8') + data = data.encode("utf-8") # When you publish a message, the client returns a future. future = publisher.publish(topic_path, data=data) - print('Published {} of message ID {}.'.format(data, future.result())) + print("Published {} of message ID {}.".format(data, future.result())) publish_time = time.time() - publish_begin messages = set() def callback(message): - print('Received message: {}'.format(message)) + print("Received message: {}".format(message)) # Unacknowledged messages will be sent again. message.ack() messages.add(message) @@ -76,7 +78,7 @@ def callback(message): # Receive messages. The subscriber is nonblocking. subscriber.subscribe(subscription_path, callback=callback) - print('\nListening for messages on {}...\n'.format(subscription_path)) + print("\nListening for messages on {}...\n".format(subscription_path)) while True: if len(messages) == num_messages: @@ -87,22 +89,23 @@ def callback(message): break else: # Sleeps the thread at 50Hz to save on resources. - time.sleep(1. / 50) + time.sleep(1.0 / 50) # [END pubsub_end_to_end] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument('project_id', help='Your Google Cloud project ID') - parser.add_argument('topic_name', help='Your topic name') - parser.add_argument('subscription_name', help='Your subscription name') - parser.add_argument('num_msgs', type=int, help='Number of test messages') + parser.add_argument("project_id", help="Your Google Cloud project ID") + parser.add_argument("topic_name", help="Your topic name") + parser.add_argument("subscription_name", help="Your subscription name") + parser.add_argument("num_msgs", type=int, help="Number of test messages") args = parser.parse_args() - end_to_end(args.project_id, args.topic_name, args.subscription_name, - args.num_msgs) + end_to_end( + args.project_id, args.topic_name, args.subscription_name, args.num_msgs + ) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py index e340eb4f36ec..a3f8087ecd15 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -17,22 +17,32 @@ # [START pubsub_quickstart_pub_all] import argparse import time + # [START pubsub_quickstart_pub_deps] from google.cloud import pubsub_v1 + # [END pubsub_quickstart_pub_deps] def get_callback(api_future, data, ref): """Wrap message data in the context of the callback function.""" + def callback(api_future): try: - print("Published message {} now has message ID {}".format( - data, api_future.result())) + print( + "Published message {} now has message ID {}".format( + data, api_future.result() + ) + ) ref["num_messages"] += 1 except Exception: - print("A problem occurred when publishing {}: {}\n".format( - data, api_future.exception())) + print( + "A problem occurred when publishing {}: {}\n".format( + data, api_future.exception() + ) + ) raise + return callback @@ -63,13 +73,13 @@ def pub(project_id, topic_name): print("Published {} message(s).".format(ref["num_messages"])) -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument('project_id', help='Google Cloud project ID') - parser.add_argument('topic_name', help='Pub/Sub topic name') + parser.add_argument("project_id", help="Google Cloud project ID") + parser.add_argument("topic_name", help="Pub/Sub topic name") args = parser.parse_args() diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py index 09443364a3f6..b9a6f807f37d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py @@ -16,22 +16,24 @@ import os import pytest +import uuid from google.api_core.exceptions import AlreadyExists from google.cloud import pubsub_v1 import pub -PROJECT = os.environ['GCLOUD_PROJECT'] -TOPIC = 'quickstart-pub-test-topic' +UUID = uuid.uuid4().hex +PROJECT = os.environ["GCLOUD_PROJECT"] +TOPIC = "quickstart-pub-test-topic-" + UUID -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def publisher_client(): yield pubsub_v1.PublisherClient() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) @@ -42,20 +44,12 @@ def topic(publisher_client): yield TOPIC + publisher_client.delete_topic(topic_path) -@pytest.fixture -def to_delete(publisher_client): - doomed = [] - yield doomed - for item in doomed: - publisher_client.delete_topic(item) - -def test_pub(publisher_client, topic, to_delete, capsys): +def test_pub(publisher_client, topic, capsys): pub.pub(PROJECT, topic) - to_delete.append('projects/{}/topics/{}'.format(PROJECT, TOPIC)) - out, _ = capsys.readouterr() - assert "Published message b'Hello, World!'" in out + assert "Hello, World!" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index e39f14105b1a..5791af14d799 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -16,8 +16,10 @@ # [START pubsub_quickstart_sub_all] import argparse + # [START pubsub_quickstart_sub_deps] from google.cloud import pubsub_v1 + # [END pubsub_quickstart_sub_deps] @@ -29,19 +31,22 @@ def sub(project_id, subscription_name): # [END pubsub_quickstart_sub_client] # Create a fully qualified identifier in the form of # `projects/{project_id}/subscriptions/{subscription_name}` - subscription_path = client.subscription_path( - project_id, subscription_name) + subscription_path = client.subscription_path(project_id, subscription_name) def callback(message): - print('Received message {} of message ID {}\n'.format( - message, message.message_id)) + print( + "Received message {} of message ID {}\n".format( + message, message.message_id + ) + ) # Acknowledge the message. Unack'ed messages will be redelivered. message.ack() - print('Acknowledged message {}\n'.format(message.message_id)) + print("Acknowledged message {}\n".format(message.message_id)) streaming_pull_future = client.subscribe( - subscription_path, callback=callback) - print('Listening for messages on {}..\n'.format(subscription_path)) + subscription_path, callback=callback + ) + print("Listening for messages on {}..\n".format(subscription_path)) # Calling result() on StreamingPullFuture keeps the main thread from # exiting while messages get processed in the callbacks. @@ -51,13 +56,13 @@ def callback(message): streaming_pull_future.cancel() -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument('project_id', help='Google Cloud project ID') - parser.add_argument('subscription_name', help='Pub/Sub subscription name') + parser.add_argument("project_id", help="Google Cloud project ID") + parser.add_argument("subscription_name", help="Pub/Sub subscription name") args = parser.parse_args() diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py index 476139a02642..07edfad7c4d2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py @@ -14,8 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import mock import os import pytest +import uuid from google.api_core.exceptions import AlreadyExists from google.cloud import pubsub_v1 @@ -23,84 +25,80 @@ import sub -PROJECT = os.environ['GCLOUD_PROJECT'] -TOPIC = 'quickstart-sub-test-topic' -SUBSCRIPTION = 'quickstart-sub-test-topic-sub' +UUID = uuid.uuid4().hex +PROJECT = os.environ["GCLOUD_PROJECT"] +TOPIC = "quickstart-sub-test-topic-" + UUID +SUBSCRIPTION = "quickstart-sub-test-topic-sub-" + UUID publisher_client = pubsub_v1.PublisherClient() subscriber_client = pubsub_v1.SubscriberClient() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def topic_path(): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: topic = publisher_client.create_topic(topic_path) - return topic.name + yield topic.name except AlreadyExists: - return topic_path + yield topic_path + publisher_client.delete_topic(topic_path) -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def subscription_path(topic_path): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION) + PROJECT, SUBSCRIPTION + ) try: subscription = subscriber_client.create_subscription( - subscription_path, topic_path) - return subscription.name + subscription_path, topic_path + ) + yield subscription.name except AlreadyExists: - return subscription_path - + yield subscription_path -def _to_delete(resource_paths): - for item in resource_paths: - if 'topics' in item: - publisher_client.delete_topic(item) - if 'subscriptions' in item: - subscriber_client.delete_subscription(item) + subscriber_client.delete_subscription(subscription_path) def _publish_messages(topic_path): - publish_future = publisher_client.publish(topic_path, data=b'Hello World!') + publish_future = publisher_client.publish(topic_path, data=b"Hello World!") publish_future.result() -def _sub_timeout(project_id, subscription_name): - # This is an exactly copy of `sub.py` except - # StreamingPullFuture.result() will time out after 10s. - client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path( - project_id, subscription_name) +def test_sub(monkeypatch, topic_path, subscription_path, capsys): - def callback(message): - print('Received message {} of message ID {}\n'.format( - message, message.message_id)) - message.ack() - print('Acknowledged message {}\n'.format(message.message_id)) + real_client = pubsub_v1.SubscriberClient() + mock_client = mock.Mock(spec=pubsub_v1.SubscriberClient, wraps=real_client) - streaming_pull_future = client.subscribe( - subscription_path, callback=callback) - print('Listening for messages on {}..\n'.format(subscription_path)) + # Attributes on mock_client_constructor uses the corresponding + # attributes on pubsub_v1.SubscriberClient. + mock_client_constructor = mock.create_autospec(pubsub_v1.SubscriberClient) + mock_client_constructor.return_value = mock_client - try: - streaming_pull_future.result(timeout=10) - except: # noqa - streaming_pull_future.cancel() + monkeypatch.setattr(pubsub_v1, "SubscriberClient", mock_client_constructor) + def mock_subscribe(subscription_path, callback=None): + real_future = real_client.subscribe( + subscription_path, callback=callback + ) + mock_future = mock.Mock(spec=real_future, wraps=real_future) -def test_sub(monkeypatch, topic_path, subscription_path, capsys): - monkeypatch.setattr(sub, 'sub', _sub_timeout) + def mock_result(): + return real_future.result(timeout=10) + + mock_future.result.side_effect = mock_result + return mock_future + + mock_client.subscribe.side_effect = mock_subscribe _publish_messages(topic_path) sub.sub(PROJECT, SUBSCRIPTION) - # Clean up resources. - _to_delete([topic_path, subscription_path]) - out, _ = capsys.readouterr() assert "Received message" in out assert "Acknowledged message" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py index d318b260c63c..6a1d4aae1b5f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py @@ -15,23 +15,25 @@ # limitations under the License. import os +import uuid from google.cloud import pubsub_v1 import pytest import quickstart -PROJECT = os.environ['GCLOUD_PROJECT'] -TOPIC = 'end-to-end-test-topic' -SUBSCRIPTION = 'end-to-end-test-topic-sub' +UUID = uuid.uuid4().hex +PROJECT = os.environ["GCLOUD_PROJECT"] +TOPIC = "end-to-end-test-topic-" + UUID +SUBSCRIPTION = "end-to-end-test-topic-sub-" + UUID N = 10 -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def publisher_client(): yield pubsub_v1.PublisherClient() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) @@ -42,16 +44,19 @@ def topic(publisher_client): yield TOPIC + publisher_client.delete_topic(topic_path) -@pytest.fixture(scope='module') + +@pytest.fixture(scope="module") def subscriber_client(): yield pubsub_v1.SubscriberClient() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def subscription(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION) + PROJECT, SUBSCRIPTION + ) try: subscriber_client.delete_subscription(subscription_path) @@ -60,6 +65,8 @@ def subscription(subscriber_client, topic): yield SUBSCRIPTION + subscriber_client.delete_subscription(subscription_path) + def test_end_to_end(topic, subscription, capsys): diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 3bbad0ead1b0..ea1cc9ff9e72 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -67,19 +67,20 @@ def create_subscription(project_id, topic_name, subscription_name): subscriber = pubsub_v1.SubscriberClient() topic_path = subscriber.topic_path(project_id, topic_name) subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) subscription = subscriber.create_subscription( - subscription_path, topic_path) + subscription_path, topic_path + ) - print('Subscription created: {}'.format(subscription)) + print("Subscription created: {}".format(subscription)) # [END pubsub_create_pull_subscription] -def create_push_subscription(project_id, - topic_name, - subscription_name, - endpoint): +def create_push_subscription( + project_id, topic_name, subscription_name, endpoint +): """Create a new push subscription on the given topic.""" # [START pubsub_create_push_subscription] from google.cloud import pubsub_v1 @@ -92,16 +93,17 @@ def create_push_subscription(project_id, subscriber = pubsub_v1.SubscriberClient() topic_path = subscriber.topic_path(project_id, topic_name) subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) - push_config = pubsub_v1.types.PushConfig( - push_endpoint=endpoint) + push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) subscription = subscriber.create_subscription( - subscription_path, topic_path, push_config) + subscription_path, topic_path, push_config + ) - print('Push subscription created: {}'.format(subscription)) - print('Endpoint for subscription is: {}'.format(endpoint)) + print("Push subscription created: {}".format(subscription)) + print("Endpoint for subscription is: {}".format(endpoint)) # [END pubsub_create_push_subscription] @@ -115,11 +117,12 @@ def delete_subscription(project_id, subscription_name): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) subscriber.delete_subscription(subscription_path) - print('Subscription deleted: {}'.format(subscription_path)) + print("Subscription deleted: {}".format(subscription_path)) # [END pubsub_delete_subscription] @@ -139,27 +142,22 @@ def update_subscription(project_id, subscription_name, endpoint): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) - push_config = pubsub_v1.types.PushConfig( - push_endpoint=endpoint) + push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) subscription = pubsub_v1.types.Subscription( - name=subscription_path, - push_config=push_config) + name=subscription_path, push_config=push_config + ) - update_mask = { - 'paths': { - 'push_config', - } - } + update_mask = {"paths": {"push_config"}} subscriber.update_subscription(subscription, update_mask) result = subscriber.get_subscription(subscription_path) - print('Subscription updated: {}'.format(subscription_path)) - print('New endpoint for subscription is: {}'.format( - result.push_config)) + print("Subscription updated: {}".format(subscription_path)) + print("New endpoint for subscription is: {}".format(result.push_config)) # [END pubsub_update_push_configuration] @@ -178,17 +176,18 @@ def receive_messages(project_id, subscription_name): # The `subscription_path` method creates a fully qualified identifier # in the form `projects/{project_id}/subscriptions/{subscription_name}` subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) def callback(message): - print('Received message: {}'.format(message)) + print("Received message: {}".format(message)) message.ack() subscriber.subscribe(subscription_path, callback=callback) # The subscriber is non-blocking. We must keep the main thread from # exiting to allow it to process messages asynchronously in the background. - print('Listening for messages on {}'.format(subscription_path)) + print("Listening for messages on {}".format(subscription_path)) while True: time.sleep(60) # [END pubsub_subscriber_async_pull] @@ -208,22 +207,23 @@ def receive_messages_with_custom_attributes(project_id, subscription_name): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) def callback(message): - print('Received message: {}'.format(message.data)) + print("Received message: {}".format(message.data)) if message.attributes: - print('Attributes:') + print("Attributes:") for key in message.attributes: value = message.attributes.get(key) - print('{}: {}'.format(key, value)) + print("{}: {}".format(key, value)) message.ack() subscriber.subscribe(subscription_path, callback=callback) # The subscriber is non-blocking, so we must keep the main thread from # exiting to allow it to process messages in the background. - print('Listening for messages on {}'.format(subscription_path)) + print("Listening for messages on {}".format(subscription_path)) while True: time.sleep(60) # [END pubsub_subscriber_async_pull_custom_attributes] @@ -242,20 +242,22 @@ def receive_messages_with_flow_control(project_id, subscription_name): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) def callback(message): - print('Received message: {}'.format(message.data)) + print("Received message: {}".format(message.data)) message.ack() # Limit the subscriber to only have ten outstanding messages at a time. flow_control = pubsub_v1.types.FlowControl(max_messages=10) subscriber.subscribe( - subscription_path, callback=callback, flow_control=flow_control) + subscription_path, callback=callback, flow_control=flow_control + ) # The subscriber is non-blocking, so we must keep the main thread from # exiting to allow it to process messages in the background. - print('Listening for messages on {}'.format(subscription_path)) + print("Listening for messages on {}".format(subscription_path)) while True: time.sleep(60) # [END pubsub_subscriber_flow_settings] @@ -271,7 +273,8 @@ def synchronous_pull(project_id, subscription_name): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) NUM_MESSAGES = 3 @@ -286,8 +289,11 @@ def synchronous_pull(project_id, subscription_name): # Acknowledges the received messages so they will not be sent again. subscriber.acknowledge(subscription_path, ack_ids) - print('Received and acknowledged {} messages. Done.'.format( - len(response.received_messages))) + print( + "Received and acknowledged {} messages. Done.".format( + len(response.received_messages) + ) + ) # [END pubsub_subscriber_sync_pull] @@ -306,7 +312,8 @@ def synchronous_pull_with_lease_management(project_id, subscription_name): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) NUM_MESSAGES = 2 ACK_DEADLINE = 30 @@ -322,8 +329,11 @@ def synchronous_pull_with_lease_management(project_id, subscription_name): def worker(msg): """Simulates a long-running process.""" RUN_TIME = random.randint(1, 60) - logger.info('{}: Running {} for {}s'.format( - time.strftime("%X", time.gmtime()), msg.message.data, RUN_TIME)) + logger.info( + "{}: Running {} for {}s".format( + time.strftime("%X", time.gmtime()), msg.message.data, RUN_TIME + ) + ) time.sleep(RUN_TIME) # `processes` stores process as key and ack id and message as values. @@ -344,24 +354,35 @@ def worker(msg): subscriber.modify_ack_deadline( subscription_path, [ack_id], - ack_deadline_seconds=ACK_DEADLINE) - logger.info('{}: Reset ack deadline for {} for {}s'.format( - time.strftime("%X", time.gmtime()), - msg_data, ACK_DEADLINE)) + ack_deadline_seconds=ACK_DEADLINE, + ) + logger.info( + "{}: Reset ack deadline for {} for {}s".format( + time.strftime("%X", time.gmtime()), + msg_data, + ACK_DEADLINE, + ) + ) # If the processs is finished, acknowledges using `ack_id`. else: subscriber.acknowledge(subscription_path, [ack_id]) - logger.info("{}: Acknowledged {}".format( - time.strftime("%X", time.gmtime()), msg_data)) + logger.info( + "{}: Acknowledged {}".format( + time.strftime("%X", time.gmtime()), msg_data + ) + ) processes.pop(process) # If there are still processes running, sleeps the thread. if processes: time.sleep(SLEEP_TIME) - print('Received and acknowledged {} messages. Done.'.format( - len(response.received_messages))) + print( + "Received and acknowledged {} messages. Done.".format( + len(response.received_messages) + ) + ) # [END pubsub_subscriber_sync_pull_with_lease] @@ -375,10 +396,11 @@ def listen_for_errors(project_id, subscription_name): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( - project_id, subscription_name) + project_id, subscription_name + ) def callback(message): - print('Received message: {}'.format(message)) + print("Received message: {}".format(message)) message.ack() future = subscriber.subscribe(subscription_path, callback=callback) @@ -390,109 +412,126 @@ def callback(message): future.result(timeout=30) except Exception as e: print( - 'Listening for messages on {} threw an Exception: {}.'.format( - subscription_name, e)) + "Listening for messages on {} threw an Exception: {}.".format( + subscription_name, e + ) + ) # [END pubsub_subscriber_error_listener] -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter + formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument('project_id', help='Your Google Cloud project ID') + parser.add_argument("project_id", help="Your Google Cloud project ID") - subparsers = parser.add_subparsers(dest='command') + subparsers = parser.add_subparsers(dest="command") list_in_topic_parser = subparsers.add_parser( - 'list_in_topic', help=list_subscriptions_in_topic.__doc__) - list_in_topic_parser.add_argument('topic_name') + "list_in_topic", help=list_subscriptions_in_topic.__doc__ + ) + list_in_topic_parser.add_argument("topic_name") list_in_project_parser = subparsers.add_parser( - 'list_in_project', help=list_subscriptions_in_project.__doc__) + "list_in_project", help=list_subscriptions_in_project.__doc__ + ) create_parser = subparsers.add_parser( - 'create', help=create_subscription.__doc__) - create_parser.add_argument('topic_name') - create_parser.add_argument('subscription_name') + "create", help=create_subscription.__doc__ + ) + create_parser.add_argument("topic_name") + create_parser.add_argument("subscription_name") create_push_parser = subparsers.add_parser( - 'create-push', help=create_push_subscription.__doc__) - create_push_parser.add_argument('topic_name') - create_push_parser.add_argument('subscription_name') - create_push_parser.add_argument('endpoint') + "create-push", help=create_push_subscription.__doc__ + ) + create_push_parser.add_argument("topic_name") + create_push_parser.add_argument("subscription_name") + create_push_parser.add_argument("endpoint") delete_parser = subparsers.add_parser( - 'delete', help=delete_subscription.__doc__) - delete_parser.add_argument('subscription_name') + "delete", help=delete_subscription.__doc__ + ) + delete_parser.add_argument("subscription_name") update_parser = subparsers.add_parser( - 'update', help=update_subscription.__doc__) - update_parser.add_argument('subscription_name') - update_parser.add_argument('endpoint') + "update", help=update_subscription.__doc__ + ) + update_parser.add_argument("subscription_name") + update_parser.add_argument("endpoint") receive_parser = subparsers.add_parser( - 'receive', help=receive_messages.__doc__) - receive_parser.add_argument('subscription_name') + "receive", help=receive_messages.__doc__ + ) + receive_parser.add_argument("subscription_name") receive_with_custom_attributes_parser = subparsers.add_parser( - 'receive-custom-attributes', - help=receive_messages_with_custom_attributes.__doc__) - receive_with_custom_attributes_parser.add_argument('subscription_name') + "receive-custom-attributes", + help=receive_messages_with_custom_attributes.__doc__, + ) + receive_with_custom_attributes_parser.add_argument("subscription_name") receive_with_flow_control_parser = subparsers.add_parser( - 'receive-flow-control', - help=receive_messages_with_flow_control.__doc__) - receive_with_flow_control_parser.add_argument('subscription_name') + "receive-flow-control", help=receive_messages_with_flow_control.__doc__ + ) + receive_with_flow_control_parser.add_argument("subscription_name") synchronous_pull_parser = subparsers.add_parser( - 'receive-synchronously', - help=synchronous_pull.__doc__) - synchronous_pull_parser.add_argument('subscription_name') + "receive-synchronously", help=synchronous_pull.__doc__ + ) + synchronous_pull_parser.add_argument("subscription_name") synchronous_pull_with_lease_management_parser = subparsers.add_parser( - 'receive-synchronously-with-lease', - help=synchronous_pull_with_lease_management.__doc__) + "receive-synchronously-with-lease", + help=synchronous_pull_with_lease_management.__doc__, + ) synchronous_pull_with_lease_management_parser.add_argument( - 'subscription_name') + "subscription_name" + ) listen_for_errors_parser = subparsers.add_parser( - 'listen_for_errors', help=listen_for_errors.__doc__) - listen_for_errors_parser.add_argument('subscription_name') + "listen_for_errors", help=listen_for_errors.__doc__ + ) + listen_for_errors_parser.add_argument("subscription_name") args = parser.parse_args() - if args.command == 'list_in_topic': + if args.command == "list_in_topic": list_subscriptions_in_topic(args.project_id, args.topic_name) - elif args.command == 'list_in_project': + elif args.command == "list_in_project": list_subscriptions_in_project(args.project_id) - elif args.command == 'create': + elif args.command == "create": create_subscription( - args.project_id, args.topic_name, args.subscription_name) - elif args.command == 'create-push': + args.project_id, args.topic_name, args.subscription_name + ) + elif args.command == "create-push": create_push_subscription( args.project_id, args.topic_name, args.subscription_name, - args.endpoint) - elif args.command == 'delete': - delete_subscription( - args.project_id, args.subscription_name) - elif args.command == 'update': + args.endpoint, + ) + elif args.command == "delete": + delete_subscription(args.project_id, args.subscription_name) + elif args.command == "update": update_subscription( - args.project_id, args.subscription_name, args.endpoint) - elif args.command == 'receive': + args.project_id, args.subscription_name, args.endpoint + ) + elif args.command == "receive": receive_messages(args.project_id, args.subscription_name) - elif args.command == 'receive-custom-attributes': + elif args.command == "receive-custom-attributes": receive_messages_with_custom_attributes( - args.project_id, args.subscription_name) - elif args.command == 'receive-flow-control': + args.project_id, args.subscription_name + ) + elif args.command == "receive-flow-control": receive_messages_with_flow_control( - args.project_id, args.subscription_name) - elif args.command == 'receive-synchronously': - synchronous_pull( - args.project_id, args.subscription_name) - elif args.command == 'receive-synchronously-with-lease': + args.project_id, args.subscription_name + ) + elif args.command == "receive-synchronously": + synchronous_pull(args.project_id, args.subscription_name) + elif args.command == "receive-synchronously-with-lease": synchronous_pull_with_lease_management( - args.project_id, args.subscription_name) - elif args.command == 'listen_for_errors': + args.project_id, args.subscription_name + ) + elif args.command == "listen_for_errors": listen_for_errors(args.project_id, args.subscription_name) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 4c5fd61223db..0645c0738e1c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -14,6 +14,7 @@ import os import time +import uuid from gcp_devrel.testing import eventually_consistent from google.cloud import pubsub_v1 @@ -22,21 +23,22 @@ import subscriber -PROJECT = os.environ['GCLOUD_PROJECT'] -TOPIC = 'subscription-test-topic' -SUBSCRIPTION_ONE = 'subscription-test-subscription-one' -SUBSCRIPTION_TWO = 'subscription-test-subscription-two' -SUBSCRIPTION_THREE = 'subscription-test-subscription-three' -ENDPOINT = 'https://{}.appspot.com/push'.format(PROJECT) -NEW_ENDPOINT = 'https://{}.appspot.com/push2'.format(PROJECT) +UUID = uuid.uuid4().hex +PROJECT = os.environ["GCLOUD_PROJECT"] +TOPIC = "subscription-test-topic-" + UUID +SUBSCRIPTION_ONE = "subscription-test-subscription-one-" + UUID +SUBSCRIPTION_TWO = "subscription-test-subscription-two-" + UUID +SUBSCRIPTION_THREE = "subscription-test-subscription-three-" + UUID +ENDPOINT = "https://{}.appspot.com/push".format(PROJECT) +NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT) -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def publisher_client(): yield pubsub_v1.PublisherClient() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) @@ -48,49 +50,55 @@ def topic(publisher_client): yield response.name -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def subscriber_client(): yield pubsub_v1.SubscriberClient() -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def subscription_one(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ONE) + PROJECT, SUBSCRIPTION_ONE + ) try: response = subscriber_client.get_subscription(subscription_path) except: # noqa response = subscriber_client.create_subscription( - subscription_path, topic=topic) + subscription_path, topic=topic + ) yield response.name -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def subscription_two(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_TWO) + PROJECT, SUBSCRIPTION_TWO + ) try: response = subscriber_client.get_subscription(subscription_path) except: # noqa response = subscriber_client.create_subscription( - subscription_path, topic=topic) + subscription_path, topic=topic + ) yield response.name -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def subscription_three(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_THREE) + PROJECT, SUBSCRIPTION_THREE + ) try: response = subscriber_client.get_subscription(subscription_path) except: # noqa response = subscriber_client.create_subscription( - subscription_path, topic=topic) + subscription_path, topic=topic + ) yield response.name @@ -113,7 +121,8 @@ def _(): def test_create(subscriber_client): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ONE) + PROJECT, SUBSCRIPTION_ONE + ) try: subscriber_client.delete_subscription(subscription_path) @@ -129,14 +138,16 @@ def _(): def test_create_push(subscriber_client): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ONE) + PROJECT, SUBSCRIPTION_ONE + ) try: subscriber_client.delete_subscription(subscription_path) except Exception: pass subscriber.create_push_subscription( - PROJECT, TOPIC, SUBSCRIPTION_ONE, ENDPOINT) + PROJECT, TOPIC, SUBSCRIPTION_ONE, ENDPOINT + ) @eventually_consistent.call def _(): @@ -147,7 +158,7 @@ def test_update(subscriber_client, subscription_one, capsys): subscriber.update_subscription(PROJECT, SUBSCRIPTION_ONE, NEW_ENDPOINT) out, _ = capsys.readouterr() - assert 'Subscription updated' in out + assert "Subscription updated" in out def test_delete(subscriber_client, subscription_one): @@ -161,14 +172,14 @@ def _(): def _publish_messages(publisher_client, topic): for n in range(5): - data = u'Message {}'.format(n).encode('utf-8') + data = u"Message {}".format(n).encode("utf-8") future = publisher_client.publish(topic, data=data) future.result() def _publish_messages_with_custom_attributes(publisher_client, topic): - data = u'Test message'.encode('utf-8') - future = publisher_client.publish(topic, data=data, origin='python-sample') + data = u"Test message".encode("utf-8") + future = publisher_client.publish(topic, data=data, origin="python-sample") future.result() @@ -178,74 +189,100 @@ def _make_sleep_patch(): def new_sleep(period): if period == 60: real_sleep(5) - raise RuntimeError('sigil') + raise RuntimeError("sigil") else: real_sleep(period) - return mock.patch('time.sleep', new=new_sleep) + return mock.patch("time.sleep", new=new_sleep) + + +def _to_delete(): + publisher_client = pubsub_v1.PublisherClient() + subscriber_client = pubsub_v1.SubscriberClient() + resources = [TOPIC, SUBSCRIPTION_TWO, SUBSCRIPTION_THREE] + + for item in resources: + if "subscription-test-topic" in item: + publisher_client.delete_topic( + "projects/{}/topics/{}".format(PROJECT, item) + ) + if "subscription-test-subscription" in item: + subscriber_client.delete_subscription( + "projects/{}/subscriptions/{}".format(PROJECT, item) + ) def test_receive(publisher_client, topic, subscription_two, capsys): _publish_messages(publisher_client, topic) with _make_sleep_patch(): - with pytest.raises(RuntimeError, match='sigil'): + with pytest.raises(RuntimeError, match="sigil"): subscriber.receive_messages(PROJECT, SUBSCRIPTION_TWO) out, _ = capsys.readouterr() - assert 'Listening' in out + assert "Listening" in out assert subscription_two in out - assert 'Message 1' in out + assert "Message" in out def test_receive_with_custom_attributes( - publisher_client, topic, subscription_two, capsys): + publisher_client, topic, subscription_two, capsys +): _publish_messages_with_custom_attributes(publisher_client, topic) with _make_sleep_patch(): - with pytest.raises(RuntimeError, match='sigil'): + with pytest.raises(RuntimeError, match="sigil"): subscriber.receive_messages_with_custom_attributes( - PROJECT, SUBSCRIPTION_TWO) + PROJECT, SUBSCRIPTION_TWO + ) out, _ = capsys.readouterr() - assert 'Test message' in out - assert 'origin' in out - assert 'python-sample' in out + assert "Test message" in out + assert "origin" in out + assert "python-sample" in out def test_receive_with_flow_control( - publisher_client, topic, subscription_two, capsys): + publisher_client, topic, subscription_two, capsys +): _publish_messages(publisher_client, topic) with _make_sleep_patch(): - with pytest.raises(RuntimeError, match='sigil'): + with pytest.raises(RuntimeError, match="sigil"): subscriber.receive_messages_with_flow_control( - PROJECT, SUBSCRIPTION_TWO) + PROJECT, SUBSCRIPTION_TWO + ) out, _ = capsys.readouterr() - assert 'Listening' in out + assert "Listening" in out assert subscription_two in out - assert 'Message 1' in out + assert "Message" in out def test_receive_synchronously( - publisher_client, topic, subscription_three, capsys): + publisher_client, topic, subscription_three, capsys +): _publish_messages(publisher_client, topic) subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_THREE) out, _ = capsys.readouterr() - assert 'Done.' in out + assert "Done." in out def test_receive_synchronously_with_lease( - publisher_client, topic, subscription_three, capsys): + publisher_client, topic, subscription_three, capsys +): _publish_messages(publisher_client, topic) subscriber.synchronous_pull_with_lease_management( - PROJECT, SUBSCRIPTION_THREE) + PROJECT, SUBSCRIPTION_THREE + ) out, _ = capsys.readouterr() - assert 'Done.' in out + assert "Done." in out + + # Clean up resources after all the tests. + _to_delete() From 3accb6ce64b3fe36c7bcfb9713ef1d6be6fbc1ff Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 12 Dec 2019 12:22:16 -0800 Subject: [PATCH 0555/1197] Pub/Sub: remove infinite while loops in subscriber examples [(#2604)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2604) * use result() on streaming pull futures instead of infinite while * remove unused imports --- .../samples/snippets/publisher_test.py | 75 ++++---- .../samples/snippets/subscriber.py | 98 ++++++---- .../samples/snippets/subscriber_test.py | 180 ++++++++---------- 3 files changed, 179 insertions(+), 174 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 125fae3c06b9..fbe30694ae45 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -25,7 +25,8 @@ UUID = uuid.uuid4().hex PROJECT = os.environ["GCLOUD_PROJECT"] -TOPIC = "publisher-test-topic-" + UUID +TOPIC_ADMIN = "publisher-test-topic-admin-" + UUID +TOPIC_PUBLISH = "publisher-test-topic-publish-" + UUID @pytest.fixture @@ -34,15 +35,30 @@ def client(): @pytest.fixture -def topic(client): - topic_path = client.topic_path(PROJECT, TOPIC) +def topic_admin(client): + topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) try: - response = client.get_topic(topic_path) + topic = client.get_topic(topic_path) except: # noqa - response = client.create_topic(topic_path) + topic = client.create_topic(topic_path) - yield response.name + yield topic.name + # Teardown of `topic_admin` is handled in `test_delete()`. + + +@pytest.fixture +def topic_publish(client): + topic_path = client.topic_path(PROJECT, TOPIC_PUBLISH) + + try: + topic = client.get_topic(topic_path) + except: # noqa + topic = client.create_topic(topic_path) + + yield topic.name + + client.delete_topic(topic.name) def _make_sleep_patch(): @@ -58,83 +74,74 @@ def new_sleep(period): return mock.patch("time.sleep", new=new_sleep) -def _to_delete(): - publisher_client = pubsub_v1.PublisherClient() - publisher_client.delete_topic( - "projects/{}/topics/{}".format(PROJECT, TOPIC) - ) - - -def test_list(client, topic, capsys): +def test_list(client, topic_admin, capsys): @eventually_consistent.call def _(): publisher.list_topics(PROJECT) out, _ = capsys.readouterr() - assert topic in out + assert topic_admin in out def test_create(client): - topic_path = client.topic_path(PROJECT, TOPIC) + topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) try: client.delete_topic(topic_path) except Exception: pass - publisher.create_topic(PROJECT, TOPIC) + publisher.create_topic(PROJECT, TOPIC_ADMIN) @eventually_consistent.call def _(): assert client.get_topic(topic_path) -def test_delete(client, topic): - publisher.delete_topic(PROJECT, TOPIC) +def test_delete(client, topic_admin): + publisher.delete_topic(PROJECT, TOPIC_ADMIN) @eventually_consistent.call def _(): with pytest.raises(Exception): - client.get_topic(client.topic_path(PROJECT, TOPIC)) + client.get_topic(client.topic_path(PROJECT, TOPIC_ADMIN)) -def test_publish(topic, capsys): - publisher.publish_messages(PROJECT, TOPIC) +def test_publish(topic_publish, capsys): + publisher.publish_messages(PROJECT, TOPIC_PUBLISH) out, _ = capsys.readouterr() assert "Published" in out -def test_publish_with_custom_attributes(topic, capsys): - publisher.publish_messages_with_custom_attributes(PROJECT, TOPIC) +def test_publish_with_custom_attributes(topic_publish, capsys): + publisher.publish_messages_with_custom_attributes(PROJECT, TOPIC_PUBLISH) out, _ = capsys.readouterr() assert "Published" in out -def test_publish_with_batch_settings(topic, capsys): - publisher.publish_messages_with_batch_settings(PROJECT, TOPIC) +def test_publish_with_batch_settings(topic_publish, capsys): + publisher.publish_messages_with_batch_settings(PROJECT, TOPIC_PUBLISH) out, _ = capsys.readouterr() assert "Published" in out -def test_publish_with_retry_settings(topic, capsys): - publisher.publish_messages_with_retry_settings(PROJECT, TOPIC) +def test_publish_with_retry_settings(topic_publish, capsys): + publisher.publish_messages_with_retry_settings(PROJECT, TOPIC_PUBLISH) out, _ = capsys.readouterr() assert "Published" in out -def test_publish_with_error_handler(topic, capsys): - publisher.publish_messages_with_error_handler(PROJECT, TOPIC) +def test_publish_with_error_handler(topic_publish, capsys): + publisher.publish_messages_with_error_handler(PROJECT, TOPIC_PUBLISH) out, _ = capsys.readouterr() assert "Published" in out -def test_publish_with_futures(topic, capsys): - publisher.publish_messages_with_futures(PROJECT, TOPIC) +def test_publish_with_futures(topic_publish, capsys): + publisher.publish_messages_with_futures(PROJECT, TOPIC_PUBLISH) out, _ = capsys.readouterr() assert "Published" in out - - _to_delete() diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index ea1cc9ff9e72..0d328d232d05 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -161,16 +161,16 @@ def update_subscription(project_id, subscription_name, endpoint): # [END pubsub_update_push_configuration] -def receive_messages(project_id, subscription_name): +def receive_messages(project_id, subscription_name, timeout=None): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull] # [START pubsub_quickstart_subscriber] - import time - from google.cloud import pubsub_v1 # TODO project_id = "Your Google Cloud Project ID" # TODO subscription_name = "Your Pub/Sub subscription name" + # TODO timeout = 5.0 # "How long the subscriber should listen for + # messages in seconds" subscriber = pubsub_v1.SubscriberClient() # The `subscription_path` method creates a fully qualified identifier @@ -183,27 +183,33 @@ def callback(message): print("Received message: {}".format(message)) message.ack() - subscriber.subscribe(subscription_path, callback=callback) + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback + ) + print("Listening for messages on {}..\n".format(subscription_path)) - # The subscriber is non-blocking. We must keep the main thread from - # exiting to allow it to process messages asynchronously in the background. - print("Listening for messages on {}".format(subscription_path)) - while True: - time.sleep(60) + # result() in a future will block indefinitely if `timeout` is not set, + # unless an exception is encountered first. + try: + streaming_pull_future.result(timeout=timeout) + except: # noqa + streaming_pull_future.cancel() # [END pubsub_subscriber_async_pull] # [END pubsub_quickstart_subscriber] -def receive_messages_with_custom_attributes(project_id, subscription_name): +def receive_messages_with_custom_attributes( + project_id, subscription_name, timeout=None +): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_sync_pull_custom_attributes] # [START pubsub_subscriber_async_pull_custom_attributes] - import time - from google.cloud import pubsub_v1 # TODO project_id = "Your Google Cloud Project ID" # TODO subscription_name = "Your Pub/Sub subscription name" + # TODO timeout = 5.0 # "How long the subscriber should listen for + # messages in seconds" subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( @@ -219,26 +225,32 @@ def callback(message): print("{}: {}".format(key, value)) message.ack() - subscriber.subscribe(subscription_path, callback=callback) + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback + ) + print("Listening for messages on {}..\n".format(subscription_path)) - # The subscriber is non-blocking, so we must keep the main thread from - # exiting to allow it to process messages in the background. - print("Listening for messages on {}".format(subscription_path)) - while True: - time.sleep(60) + # result() in a future will block indefinitely if `timeout` is not set, + # unless an exception is encountered first. + try: + streaming_pull_future.result(timeout=timeout) + except: # noqa + streaming_pull_future.cancel() # [END pubsub_subscriber_async_pull_custom_attributes] # [END pubsub_subscriber_sync_pull_custom_attributes] -def receive_messages_with_flow_control(project_id, subscription_name): +def receive_messages_with_flow_control( + project_id, subscription_name, timeout=None +): """Receives messages from a pull subscription with flow control.""" # [START pubsub_subscriber_flow_settings] - import time - from google.cloud import pubsub_v1 # TODO project_id = "Your Google Cloud Project ID" # TODO subscription_name = "Your Pub/Sub subscription name" + # TODO timeout = 5.0 # "How long the subscriber should listen for + # messages in seconds" subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( @@ -251,15 +263,18 @@ def callback(message): # Limit the subscriber to only have ten outstanding messages at a time. flow_control = pubsub_v1.types.FlowControl(max_messages=10) - subscriber.subscribe( + + streaming_pull_future = subscriber.subscribe( subscription_path, callback=callback, flow_control=flow_control ) + print("Listening for messages on {}..\n".format(subscription_path)) - # The subscriber is non-blocking, so we must keep the main thread from - # exiting to allow it to process messages in the background. - print("Listening for messages on {}".format(subscription_path)) - while True: - time.sleep(60) + # result() in a future will block indefinitely if `timeout` is not set, + # unless an exception is encountered first. + try: + streaming_pull_future.result(timeout=timeout) + except: # noqa + streaming_pull_future.cancel() # [END pubsub_subscriber_flow_settings] @@ -386,13 +401,15 @@ def worker(msg): # [END pubsub_subscriber_sync_pull_with_lease] -def listen_for_errors(project_id, subscription_name): +def listen_for_errors(project_id, subscription_name, timeout=None): """Receives messages and catches errors from a pull subscription.""" # [START pubsub_subscriber_error_listener] from google.cloud import pubsub_v1 # TODO project_id = "Your Google Cloud Project ID" # TODO subscription_name = "Your Pubsub subscription name" + # TODO timeout = 5.0 # "How long the subscriber should listen for + # messages in seconds" subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path( @@ -403,16 +420,19 @@ def callback(message): print("Received message: {}".format(message)) message.ack() - future = subscriber.subscribe(subscription_path, callback=callback) + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback + ) + print("Listening for messages on {}..\n".format(subscription_path)) - # Blocks the thread while messages are coming in through the stream. Any - # exceptions that crop up on the thread will be set on the future. + # result() in a future will block indefinitely if `timeout` is not set, + # unless an exception is encountered first. try: - # When timeout is unspecified, the result method waits indefinitely. - future.result(timeout=30) + streaming_pull_future.result(timeout=timeout) except Exception as e: + streaming_pull_future.cancel() print( - "Listening for messages on {} threw an Exception: {}.".format( + "Listening for messages on {} threw an exception: {}.".format( subscription_name, e ) ) @@ -518,14 +538,14 @@ def callback(message): args.project_id, args.subscription_name, args.endpoint ) elif args.command == "receive": - receive_messages(args.project_id, args.subscription_name) + receive_messages(args.project_id, args.subscription_name, args.timeout) elif args.command == "receive-custom-attributes": receive_messages_with_custom_attributes( - args.project_id, args.subscription_name + args.project_id, args.subscription_name, args.timeout ) elif args.command == "receive-flow-control": receive_messages_with_flow_control( - args.project_id, args.subscription_name + args.project_id, args.subscription_name, args.timeout ) elif args.command == "receive-synchronously": synchronous_pull(args.project_id, args.subscription_name) @@ -534,4 +554,6 @@ def callback(message): args.project_id, args.subscription_name ) elif args.command == "listen_for_errors": - listen_for_errors(args.project_id, args.subscription_name) + listen_for_errors( + args.project_id, args.subscription_name, args.timeout + ) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 0645c0738e1c..50353c1c6e42 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -13,12 +13,10 @@ # limitations under the License. import os -import time import uuid from gcp_devrel.testing import eventually_consistent from google.cloud import pubsub_v1 -import mock import pytest import subscriber @@ -26,9 +24,9 @@ UUID = uuid.uuid4().hex PROJECT = os.environ["GCLOUD_PROJECT"] TOPIC = "subscription-test-topic-" + UUID -SUBSCRIPTION_ONE = "subscription-test-subscription-one-" + UUID -SUBSCRIPTION_TWO = "subscription-test-subscription-two-" + UUID -SUBSCRIPTION_THREE = "subscription-test-subscription-three-" + UUID +SUBSCRIPTION_ADMIN = "subscription-test-subscription-admin-" + UUID +SUBSCRIPTION_ASYNC = "subscription-test-subscription-async-" + UUID +SUBSCRIPTION_SYNC = "subscription-test-subscription-sync-" + UUID ENDPOINT = "https://{}.appspot.com/push".format(PROJECT) NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT) @@ -43,11 +41,13 @@ def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - response = publisher_client.get_topic(topic_path) + subscription = publisher_client.get_topic(topic_path) except: # noqa - response = publisher_client.create_topic(topic_path) + subscription = publisher_client.create_topic(topic_path) - yield response.name + yield subscription.name + + publisher_client.delete_topic(subscription.name) @pytest.fixture(scope="module") @@ -56,72 +56,76 @@ def subscriber_client(): @pytest.fixture(scope="module") -def subscription_one(subscriber_client, topic): +def subscription_admin(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ONE + PROJECT, SUBSCRIPTION_ADMIN ) try: - response = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription(subscription_path) except: # noqa - response = subscriber_client.create_subscription( + subscription = subscriber_client.create_subscription( subscription_path, topic=topic ) - yield response.name + yield subscription.name @pytest.fixture(scope="module") -def subscription_two(subscriber_client, topic): +def subscription_sync(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_TWO + PROJECT, SUBSCRIPTION_SYNC ) try: - response = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription(subscription_path) except: # noqa - response = subscriber_client.create_subscription( + subscription = subscriber_client.create_subscription( subscription_path, topic=topic ) - yield response.name + yield subscription.name + + subscriber_client.delete_subscription(subscription.name) @pytest.fixture(scope="module") -def subscription_three(subscriber_client, topic): +def subscription_async(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_THREE + PROJECT, SUBSCRIPTION_ASYNC ) try: - response = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription(subscription_path) except: # noqa - response = subscriber_client.create_subscription( + subscription = subscriber_client.create_subscription( subscription_path, topic=topic ) - yield response.name + yield subscription.name + + subscriber_client.delete_subscription(subscription.name) -def test_list_in_topic(subscription_one, capsys): +def test_list_in_topic(subscription_admin, capsys): @eventually_consistent.call def _(): subscriber.list_subscriptions_in_topic(PROJECT, TOPIC) out, _ = capsys.readouterr() - assert subscription_one in out + assert subscription_admin in out -def test_list_in_project(subscription_one, capsys): +def test_list_in_project(subscription_admin, capsys): @eventually_consistent.call def _(): subscriber.list_subscriptions_in_project(PROJECT) out, _ = capsys.readouterr() - assert subscription_one in out + assert subscription_admin in out def test_create(subscriber_client): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ONE + PROJECT, SUBSCRIPTION_ADMIN ) try: @@ -129,7 +133,7 @@ def test_create(subscriber_client): except Exception: pass - subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION_ONE) + subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN) @eventually_consistent.call def _(): @@ -138,7 +142,7 @@ def _(): def test_create_push(subscriber_client): subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ONE + PROJECT, SUBSCRIPTION_ADMIN ) try: subscriber_client.delete_subscription(subscription_path) @@ -146,7 +150,7 @@ def test_create_push(subscriber_client): pass subscriber.create_push_subscription( - PROJECT, TOPIC, SUBSCRIPTION_ONE, ENDPOINT + PROJECT, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT ) @eventually_consistent.call @@ -154,135 +158,107 @@ def _(): assert subscriber_client.get_subscription(subscription_path) -def test_update(subscriber_client, subscription_one, capsys): - subscriber.update_subscription(PROJECT, SUBSCRIPTION_ONE, NEW_ENDPOINT) +def test_update(subscriber_client, subscription_admin, capsys): + subscriber.update_subscription(PROJECT, SUBSCRIPTION_ADMIN, NEW_ENDPOINT) out, _ = capsys.readouterr() assert "Subscription updated" in out -def test_delete(subscriber_client, subscription_one): - subscriber.delete_subscription(PROJECT, SUBSCRIPTION_ONE) +def test_delete(subscriber_client, subscription_admin): + subscriber.delete_subscription(PROJECT, SUBSCRIPTION_ADMIN) @eventually_consistent.call def _(): with pytest.raises(Exception): - subscriber_client.get_subscription(subscription_one) + subscriber_client.get_subscription(subscription_admin) def _publish_messages(publisher_client, topic): for n in range(5): - data = u"Message {}".format(n).encode("utf-8") - future = publisher_client.publish(topic, data=data) - future.result() - - -def _publish_messages_with_custom_attributes(publisher_client, topic): - data = u"Test message".encode("utf-8") - future = publisher_client.publish(topic, data=data, origin="python-sample") - future.result() - - -def _make_sleep_patch(): - real_sleep = time.sleep - - def new_sleep(period): - if period == 60: - real_sleep(5) - raise RuntimeError("sigil") - else: - real_sleep(period) - - return mock.patch("time.sleep", new=new_sleep) - - -def _to_delete(): - publisher_client = pubsub_v1.PublisherClient() - subscriber_client = pubsub_v1.SubscriberClient() - resources = [TOPIC, SUBSCRIPTION_TWO, SUBSCRIPTION_THREE] - - for item in resources: - if "subscription-test-topic" in item: - publisher_client.delete_topic( - "projects/{}/topics/{}".format(PROJECT, item) - ) - if "subscription-test-subscription" in item: - subscriber_client.delete_subscription( - "projects/{}/subscriptions/{}".format(PROJECT, item) - ) + data = u"message {}".format(n).encode("utf-8") + publish_future = publisher_client.publish( + topic, data=data, origin="python-sample" + ) + publish_future.result() -def test_receive(publisher_client, topic, subscription_two, capsys): +def test_receive(publisher_client, topic, subscription_async, capsys): _publish_messages(publisher_client, topic) - with _make_sleep_patch(): - with pytest.raises(RuntimeError, match="sigil"): - subscriber.receive_messages(PROJECT, SUBSCRIPTION_TWO) + subscriber.receive_messages(PROJECT, SUBSCRIPTION_ASYNC, 5) out, _ = capsys.readouterr() assert "Listening" in out - assert subscription_two in out - assert "Message" in out + assert subscription_async in out + assert "message" in out def test_receive_with_custom_attributes( - publisher_client, topic, subscription_two, capsys + publisher_client, topic, subscription_async, capsys ): - _publish_messages_with_custom_attributes(publisher_client, topic) + _publish_messages(publisher_client, topic) - with _make_sleep_patch(): - with pytest.raises(RuntimeError, match="sigil"): - subscriber.receive_messages_with_custom_attributes( - PROJECT, SUBSCRIPTION_TWO - ) + subscriber.receive_messages_with_custom_attributes( + PROJECT, SUBSCRIPTION_ASYNC, 5 + ) out, _ = capsys.readouterr() - assert "Test message" in out + assert "message" in out assert "origin" in out assert "python-sample" in out def test_receive_with_flow_control( - publisher_client, topic, subscription_two, capsys + publisher_client, topic, subscription_async, capsys ): _publish_messages(publisher_client, topic) - with _make_sleep_patch(): - with pytest.raises(RuntimeError, match="sigil"): - subscriber.receive_messages_with_flow_control( - PROJECT, SUBSCRIPTION_TWO - ) + subscriber.receive_messages_with_flow_control( + PROJECT, SUBSCRIPTION_ASYNC, 5 + ) out, _ = capsys.readouterr() assert "Listening" in out - assert subscription_two in out - assert "Message" in out + assert subscription_async in out + assert "message" in out def test_receive_synchronously( - publisher_client, topic, subscription_three, capsys + publisher_client, topic, subscription_sync, capsys ): _publish_messages(publisher_client, topic) - subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_THREE) + subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_SYNC) out, _ = capsys.readouterr() assert "Done." in out def test_receive_synchronously_with_lease( - publisher_client, topic, subscription_three, capsys + publisher_client, topic, subscription_sync, capsys ): _publish_messages(publisher_client, topic) subscriber.synchronous_pull_with_lease_management( - PROJECT, SUBSCRIPTION_THREE + PROJECT, SUBSCRIPTION_SYNC ) out, _ = capsys.readouterr() assert "Done." in out - # Clean up resources after all the tests. - _to_delete() + +def test_listen_for_errors( + publisher_client, topic, subscription_async, capsys +): + + _publish_messages(publisher_client, topic) + + subscriber.listen_for_errors(PROJECT, SUBSCRIPTION_ASYNC, 5) + + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async in out + assert "threw an exception" in out From dc395e55438d38f9f06f3adc8e820807bf4444c1 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 18 Dec 2019 14:31:23 -0800 Subject: [PATCH 0556/1197] Pub/Sub: add timeout in argparse [(#2637)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2637) --- .../google-cloud-pubsub/samples/snippets/subscriber.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 0d328d232d05..79c9bc4a0aaa 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -484,17 +484,24 @@ def callback(message): "receive", help=receive_messages.__doc__ ) receive_parser.add_argument("subscription_name") + receive_parser.add_argument("--timeout", default=None, type=float) receive_with_custom_attributes_parser = subparsers.add_parser( "receive-custom-attributes", help=receive_messages_with_custom_attributes.__doc__, ) receive_with_custom_attributes_parser.add_argument("subscription_name") + receive_with_custom_attributes_parser.add_argument( + "--timeout", default=None, type=float + ) receive_with_flow_control_parser = subparsers.add_parser( "receive-flow-control", help=receive_messages_with_flow_control.__doc__ ) receive_with_flow_control_parser.add_argument("subscription_name") + receive_with_flow_control_parser.add_argument( + "--timeout", default=None, type=float + ) synchronous_pull_parser = subparsers.add_parser( "receive-synchronously", help=synchronous_pull.__doc__ @@ -513,6 +520,9 @@ def callback(message): "listen_for_errors", help=listen_for_errors.__doc__ ) listen_for_errors_parser.add_argument("subscription_name") + listen_for_errors_parser.add_argument( + "--timeout", default=None, type=float + ) args = parser.parse_args() From b92dad00a0a8b90be9ff93c264b159eb5894249c Mon Sep 17 00:00:00 2001 From: DPEBot Date: Fri, 20 Dec 2019 17:41:38 -0800 Subject: [PATCH 0557/1197] Auto-update dependencies. [(#2005)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2005) * Auto-update dependencies. * Revert update of appengine/flexible/datastore. * revert update of appengine/flexible/scipy * revert update of bigquery/bqml * revert update of bigquery/cloud-client * revert update of bigquery/datalab-migration * revert update of bigtable/quickstart * revert update of compute/api * revert update of container_registry/container_analysis * revert update of dataflow/run_template * revert update of datastore/cloud-ndb * revert update of dialogflow/cloud-client * revert update of dlp * revert update of functions/imagemagick * revert update of functions/ocr/app * revert update of healthcare/api-client/fhir * revert update of iam/api-client * revert update of iot/api-client/gcs_file_to_device * revert update of iot/api-client/mqtt_example * revert update of language/automl * revert update of run/image-processing * revert update of vision/automl * revert update testing/requirements.txt * revert update of vision/cloud-client/detect * revert update of vision/cloud-client/product_search * revert update of jobs/v2/api_client * revert update of jobs/v3/api_client * revert update of opencensus * revert update of translate/cloud-client * revert update to speech/cloud-client Co-authored-by: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Co-authored-by: Doug Mahugh --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index a97fc0997e2c..a5a8b2bb921a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.0.0 +google-cloud-pubsub==1.1.0 From 0f22e7f9364a807f4254ff280ceaa076e782b569 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 24 Feb 2020 10:03:17 -0800 Subject: [PATCH 0558/1197] remove publish concurrency control sample [(#2960)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2960) --- .../samples/snippets/README.rst | 36 ++++++++++--------- .../samples/snippets/publisher.py | 31 ---------------- .../samples/snippets/publisher_test.py | 7 ---- 3 files changed, 19 insertions(+), 55 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index e0e265f8d427..21a4b231f1a0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -93,8 +93,8 @@ To run this sample: $ python publisher.py usage: publisher.py [-h] - project - {list,create,delete,publish,publish-with-custom-attributes,publish-with-futures,publish-with-batch-settings} + project_id + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} ... This application demonstrates how to perform basic operations on topics @@ -104,8 +104,8 @@ To run this sample: at https://cloud.google.com/pubsub/docs. positional arguments: - project Your Google Cloud project ID - {list,create,delete,publish,publish-with-custom-attributes,publish-with-futures,publish-with-batch-settings} + project_id Your Google Cloud project ID + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} list Lists all Pub/Sub topics in the given project. create Create a new Pub/Sub topic. delete Deletes an existing Pub/Sub topic. @@ -113,12 +113,14 @@ To run this sample: publish-with-custom-attributes Publishes multiple messages with custom attributes to a Pub/Sub topic. - publish-with-futures - Publishes multiple messages to a Pub/Sub topic and - prints their message IDs. + publish-with-error-handler + Publishes multiple messages to a Pub/Sub topic with an + error handler. publish-with-batch-settings Publishes multiple messages to a Pub/Sub topic with batch settings. + publish-with-retry-settings + Publishes messages with custom retry settings. optional arguments: -h, --help show this help message and exit @@ -141,8 +143,8 @@ To run this sample: $ python subscriber.py usage: subscriber.py [-h] - project - {list_in_topic,list_in_project,create,create-push,delete,update,receive,receive-custom-attributes,receive-flow-control,listen_for_errors} + project_id + {list_in_topic,list_in_project,create,create-push,delete,update,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen_for_errors} ... This application demonstrates how to perform basic operations on @@ -152,26 +154,26 @@ To run this sample: at https://cloud.google.com/pubsub/docs. positional arguments: - project Your Google Cloud project ID - {list_in_topic,list_in_project,create,create-push,delete,update,receive,receive-custom-attributes,receive-flow-control,listen_for_errors} + project_id Your Google Cloud project ID + {list_in_topic,list_in_project,create,create-push,delete,update,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen_for_errors} list_in_topic Lists all subscriptions for a given topic. list_in_project Lists all subscriptions in the current project. create Create a new pull subscription on the given topic. - create-push Create a new push subscription on the given topic. For - example, endpoint is "https://my-test- - project.appspot.com/push". + create-push Create a new push subscription on the given topic. delete Deletes an existing Pub/Sub topic. update Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a subscription, such as its topic, are not modifiable. - For example, endpoint is "https://my-test- - project.appspot.com/push". receive Receives messages from a pull subscription. receive-custom-attributes Receives messages from a pull subscription. receive-flow-control Receives messages from a pull subscription with flow control. + receive-synchronously + Pulling messages synchronously. + receive-synchronously-with-lease + Pulling messages synchronously with lease management listen_for_errors Receives messages and catches errors from a pull subscription. @@ -244,4 +246,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index d227baab9584..df7a9f23fd95 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -128,30 +128,6 @@ def publish_messages_with_custom_attributes(project_id, topic_name): # [END pubsub_publish_custom_attributes] -def publish_messages_with_futures(project_id, topic_name): - """Publishes multiple messages to a Pub/Sub topic and prints their - message IDs.""" - # [START pubsub_publisher_concurrency_control] - from google.cloud import pubsub_v1 - - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" - - publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) - - for n in range(1, 10): - data = u"Message number {}".format(n) - # Data must be a bytestring - data = data.encode("utf-8") - # When you publish a message, the client returns a future. - future = publisher.publish(topic_path, data=data) - print(future.result()) - - print("Published messages with futures.") - # [END pubsub_publisher_concurrency_control] - - def publish_messages_with_error_handler(project_id, topic_name): # [START pubsub_publish_messages_error_handler] """Publishes multiple messages to a Pub/Sub topic with an error handler.""" @@ -308,11 +284,6 @@ def publish_messages_with_retry_settings(project_id, topic_name): ) publish_with_custom_attributes_parser.add_argument("topic_name") - publish_with_futures_parser = subparsers.add_parser( - "publish-with-futures", help=publish_messages_with_futures.__doc__ - ) - publish_with_futures_parser.add_argument("topic_name") - publish_with_error_handler_parser = subparsers.add_parser( "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, @@ -345,8 +316,6 @@ def publish_messages_with_retry_settings(project_id, topic_name): publish_messages_with_custom_attributes( args.project_id, args.topic_name ) - elif args.command == "publish-with-futures": - publish_messages_with_futures(args.project_id, args.topic_name) elif args.command == "publish-with-error-handler": publish_messages_with_error_handler(args.project_id, args.topic_name) elif args.command == "publish-with-batch-settings": diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index fbe30694ae45..aa55011c190d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -138,10 +138,3 @@ def test_publish_with_error_handler(topic_publish, capsys): out, _ = capsys.readouterr() assert "Published" in out - - -def test_publish_with_futures(topic_publish, capsys): - publisher.publish_messages_with_futures(PROJECT, TOPIC_PUBLISH) - - out, _ = capsys.readouterr() - assert "Published" in out From 6402e258c8877a99e6f912e9300d108d962cb2a9 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 26 Feb 2020 10:13:03 -0800 Subject: [PATCH 0559/1197] Pub/Sub: remove unreferenced samples [(#2986)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2986) * remove qs samples * update README --- .../samples/snippets/README.rst | 16 --- .../samples/snippets/quickstart.py | 111 ------------------ .../samples/snippets/quickstart_test.py | 78 ------------ 3 files changed, 205 deletions(-) delete mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart_test.py diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 21a4b231f1a0..c30fd190a233 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -61,22 +61,6 @@ Install Dependencies Samples ------------------------------------------------------------------------------- -Quickstart -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -.. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/quickstart.py,pubsub/cloud-client/README.rst - - - - -To run this sample: - -.. code-block:: bash - - $ python quickstart.py - - Publisher +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart.py b/packages/google-cloud-pubsub/samples/snippets/quickstart.py deleted file mode 100644 index d01105885cb8..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import argparse - - -def end_to_end(project_id, topic_name, subscription_name, num_messages): - # [START pubsub_end_to_end] - import time - - from google.cloud import pubsub_v1 - - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" - # TODO num_messages = number of messages to test end-to-end - - # Instantiates a publisher and subscriber client - publisher = pubsub_v1.PublisherClient() - subscriber = pubsub_v1.SubscriberClient() - - # The `topic_path` method creates a fully qualified identifier - # in the form `projects/{project_id}/topics/{topic_name}` - topic_path = subscriber.topic_path(project_id, topic_name) - - # The `subscription_path` method creates a fully qualified identifier - # in the form `projects/{project_id}/subscriptions/{subscription_name}` - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) - - # Create the topic. - topic = publisher.create_topic(topic_path) - print("\nTopic created: {}".format(topic.name)) - - # Create a subscription. - subscription = subscriber.create_subscription( - subscription_path, topic_path - ) - print("\nSubscription created: {}\n".format(subscription.name)) - - publish_begin = time.time() - - # Publish messages. - for n in range(num_messages): - data = u"Message number {}".format(n) - # Data must be a bytestring - data = data.encode("utf-8") - # When you publish a message, the client returns a future. - future = publisher.publish(topic_path, data=data) - print("Published {} of message ID {}.".format(data, future.result())) - - publish_time = time.time() - publish_begin - - messages = set() - - def callback(message): - print("Received message: {}".format(message)) - # Unacknowledged messages will be sent again. - message.ack() - messages.add(message) - - subscribe_begin = time.time() - - # Receive messages. The subscriber is nonblocking. - subscriber.subscribe(subscription_path, callback=callback) - - print("\nListening for messages on {}...\n".format(subscription_path)) - - while True: - if len(messages) == num_messages: - subscribe_time = time.time() - subscribe_begin - print("\nReceived all messages.") - print("Publish time lapsed: {:.2f}s.".format(publish_time)) - print("Subscribe time lapsed: {:.2f}s.".format(subscribe_time)) - break - else: - # Sleeps the thread at 50Hz to save on resources. - time.sleep(1.0 / 50) - # [END pubsub_end_to_end] - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, - ) - parser.add_argument("project_id", help="Your Google Cloud project ID") - parser.add_argument("topic_name", help="Your topic name") - parser.add_argument("subscription_name", help="Your subscription name") - parser.add_argument("num_msgs", type=int, help="Number of test messages") - - args = parser.parse_args() - - end_to_end( - args.project_id, args.topic_name, args.subscription_name, args.num_msgs - ) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py deleted file mode 100644 index 6a1d4aae1b5f..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart_test.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2016 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import uuid - -from google.cloud import pubsub_v1 -import pytest -import quickstart - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GCLOUD_PROJECT"] -TOPIC = "end-to-end-test-topic-" + UUID -SUBSCRIPTION = "end-to-end-test-topic-sub-" + UUID -N = 10 - - -@pytest.fixture(scope="module") -def publisher_client(): - yield pubsub_v1.PublisherClient() - - -@pytest.fixture(scope="module") -def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) - - try: - publisher_client.delete_topic(topic_path) - except Exception: - pass - - yield TOPIC - - publisher_client.delete_topic(topic_path) - - -@pytest.fixture(scope="module") -def subscriber_client(): - yield pubsub_v1.SubscriberClient() - - -@pytest.fixture(scope="module") -def subscription(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION - ) - - try: - subscriber_client.delete_subscription(subscription_path) - except Exception: - pass - - yield SUBSCRIPTION - - subscriber_client.delete_subscription(subscription_path) - - -def test_end_to_end(topic, subscription, capsys): - - quickstart.end_to_end(PROJECT, topic, subscription, N) - out, _ = capsys.readouterr() - - assert "Received all messages" in out - assert "Publish time lapsed" in out - assert "Subscribe time lapsed" in out From 9159b3b37083611b6178fbfa9d6adeecc643188d Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 23 Mar 2020 15:17:49 -0700 Subject: [PATCH 0560/1197] Pub/Sub: add SubscriberClient.close() to examples [(#3118)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3118) * Add SubscriberClient.close() to examples. Co-authored-by: Prad Nelluru Co-authored-by: Prad Nelluru --- .../samples/snippets/iam.py | 6 ++ .../samples/snippets/iam_test.py | 4 +- .../samples/snippets/quickstart/sub.py | 14 ++-- .../samples/snippets/quickstart/sub_test.py | 3 + .../samples/snippets/requirements.txt | 2 +- .../samples/snippets/subscriber.py | 78 ++++++++++++------- .../samples/snippets/subscriber_test.py | 4 +- 7 files changed, 75 insertions(+), 36 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index f014ce749022..eb0c8246307b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -51,6 +51,8 @@ def get_subscription_policy(project, subscription_name): print("Policy for subscription {}:".format(subscription_path)) for binding in policy.bindings: print("Role: {}, Members: {}".format(binding.role, binding.members)) + + client.close() # [END pubsub_get_subscription_policy] @@ -101,6 +103,8 @@ def set_subscription_policy(project, subscription_name): subscription_name, policy ) ) + + client.close() # [END pubsub_set_subscription_policy] @@ -144,6 +148,8 @@ def check_subscription_permissions(project, subscription_name): subscription_path, allowed_permissions ) ) + + client.close() # [END pubsub_test_subscription_permissions] diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 2b019f9ea16f..f88cde851e7d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -49,7 +49,9 @@ def topic(publisher_client): @pytest.fixture(scope="module") def subscriber_client(): - yield pubsub_v1.SubscriberClient() + subscriber_client = pubsub_v1.SubscriberClient() + yield subscriber_client + subscriber_client.close() @pytest.fixture diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index 5791af14d799..1d90726f5b04 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -27,11 +27,13 @@ def sub(project_id, subscription_name): """Receives messages from a Pub/Sub subscription.""" # [START pubsub_quickstart_sub_client] # Initialize a Subscriber client - client = pubsub_v1.SubscriberClient() + subscriber_client = pubsub_v1.SubscriberClient() # [END pubsub_quickstart_sub_client] # Create a fully qualified identifier in the form of # `projects/{project_id}/subscriptions/{subscription_name}` - subscription_path = client.subscription_path(project_id, subscription_name) + subscription_path = subscriber_client.subscription_path( + project_id, subscription_name + ) def callback(message): print( @@ -43,18 +45,20 @@ def callback(message): message.ack() print("Acknowledged message {}\n".format(message.message_id)) - streaming_pull_future = client.subscribe( + streaming_pull_future = subscriber_client.subscribe( subscription_path, callback=callback ) print("Listening for messages on {}..\n".format(subscription_path)) - # Calling result() on StreamingPullFuture keeps the main thread from - # exiting while messages get processed in the callbacks. try: + # Calling result() on StreamingPullFuture keeps the main thread from + # exiting while messages get processed in the callbacks. streaming_pull_future.result() except: # noqa streaming_pull_future.cancel() + subscriber_client.close() + if __name__ == "__main__": parser = argparse.ArgumentParser( diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py index 07edfad7c4d2..1b59a3d043ac 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py @@ -62,6 +62,7 @@ def subscription_path(topic_path): yield subscription_path subscriber_client.delete_subscription(subscription_path) + subscriber_client.close() def _publish_messages(topic_path): @@ -102,3 +103,5 @@ def mock_result(): out, _ = capsys.readouterr() assert "Received message" in out assert "Acknowledged message" in out + + real_client.close() diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index a5a8b2bb921a..cc192f6f7c58 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.1.0 +google-cloud-pubsub==1.3.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 79c9bc4a0aaa..e22efc7b16f3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -52,6 +52,8 @@ def list_subscriptions_in_project(project_id): for subscription in subscriber.list_subscriptions(project_path): print(subscription.name) + + subscriber.close() # [END pubsub_list_subscriptions] @@ -75,6 +77,8 @@ def create_subscription(project_id, topic_name, subscription_name): ) print("Subscription created: {}".format(subscription)) + + subscriber.close() # [END pubsub_create_pull_subscription] @@ -104,6 +108,8 @@ def create_push_subscription( print("Push subscription created: {}".format(subscription)) print("Endpoint for subscription is: {}".format(endpoint)) + + subscriber.close() # [END pubsub_create_push_subscription] @@ -123,6 +129,8 @@ def delete_subscription(project_id, subscription_name): subscriber.delete_subscription(subscription_path) print("Subscription deleted: {}".format(subscription_path)) + + subscriber.close() # [END pubsub_delete_subscription] @@ -158,6 +166,8 @@ def update_subscription(project_id, subscription_name, endpoint): print("Subscription updated: {}".format(subscription_path)) print("New endpoint for subscription is: {}".format(result.push_config)) + + subscriber.close() # [END pubsub_update_push_configuration] @@ -188,12 +198,14 @@ def callback(message): ) print("Listening for messages on {}..\n".format(subscription_path)) - # result() in a future will block indefinitely if `timeout` is not set, - # unless an exception is encountered first. - try: - streaming_pull_future.result(timeout=timeout) - except: # noqa - streaming_pull_future.cancel() + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except: # noqa + streaming_pull_future.cancel() # [END pubsub_subscriber_async_pull] # [END pubsub_quickstart_subscriber] @@ -230,12 +242,14 @@ def callback(message): ) print("Listening for messages on {}..\n".format(subscription_path)) - # result() in a future will block indefinitely if `timeout` is not set, - # unless an exception is encountered first. - try: - streaming_pull_future.result(timeout=timeout) - except: # noqa - streaming_pull_future.cancel() + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except: # noqa + streaming_pull_future.cancel() # [END pubsub_subscriber_async_pull_custom_attributes] # [END pubsub_subscriber_sync_pull_custom_attributes] @@ -269,12 +283,14 @@ def callback(message): ) print("Listening for messages on {}..\n".format(subscription_path)) - # result() in a future will block indefinitely if `timeout` is not set, - # unless an exception is encountered first. - try: - streaming_pull_future.result(timeout=timeout) - except: # noqa - streaming_pull_future.cancel() + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except: # noqa + streaming_pull_future.cancel() # [END pubsub_subscriber_flow_settings] @@ -309,6 +325,8 @@ def synchronous_pull(project_id, subscription_name): len(response.received_messages) ) ) + + subscriber.close() # [END pubsub_subscriber_sync_pull] @@ -398,6 +416,8 @@ def worker(msg): len(response.received_messages) ) ) + + subscriber.close() # [END pubsub_subscriber_sync_pull_with_lease] @@ -425,17 +445,19 @@ def callback(message): ) print("Listening for messages on {}..\n".format(subscription_path)) - # result() in a future will block indefinitely if `timeout` is not set, - # unless an exception is encountered first. - try: - streaming_pull_future.result(timeout=timeout) - except Exception as e: - streaming_pull_future.cancel() - print( - "Listening for messages on {} threw an exception: {}.".format( - subscription_name, e + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + try: + streaming_pull_future.result(timeout=timeout) + except Exception as e: + streaming_pull_future.cancel() + print( + "Listening for messages on {} threw an exception: {}.".format( + subscription_name, e + ) ) - ) # [END pubsub_subscriber_error_listener] diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 50353c1c6e42..94905d63525d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -52,7 +52,9 @@ def topic(publisher_client): @pytest.fixture(scope="module") def subscriber_client(): - yield pubsub_v1.SubscriberClient() + subscriber_client = pubsub_v1.SubscriberClient() + yield subscriber_client + subscriber_client.close() @pytest.fixture(scope="module") From 2d7416317abf8803d55e712b0b92400fdd8e5210 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 25 Mar 2020 12:47:33 -0700 Subject: [PATCH 0561/1197] Pub/Sub: update publish with batch settings sample [(#3137)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3137) * non-blocking publish * remove unused lib * lint * add defaults --- .../samples/snippets/publisher.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index df7a9f23fd95..6802ec85fc13 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -180,20 +180,28 @@ def publish_messages_with_batch_settings(project_id, topic_name): # TODO project_id = "Your Google Cloud Project ID" # TODO topic_name = "Your Pub/Sub topic name" - # Configure the batch to publish as soon as there is one kilobyte - # of data or one second has passed. + # Configure the batch to publish as soon as there is ten messages, + # one kilobyte of data, or one second has passed. batch_settings = pubsub_v1.types.BatchSettings( - max_bytes=1024, max_latency=1 # One kilobyte # One second + max_messages=10, # default 100 + max_bytes=1024, # default 1 MB + max_latency=1, # default 10 ms ) publisher = pubsub_v1.PublisherClient(batch_settings) topic_path = publisher.topic_path(project_id, topic_name) + # Resolve the publish future in a separate thread. + def callback(future): + message_id = future.result() + print(message_id) + for n in range(1, 10): data = u"Message number {}".format(n) # Data must be a bytestring data = data.encode("utf-8") future = publisher.publish(topic_path, data=data) - print(future.result()) + # Non-blocking. Allow the publisher client to batch multiple messages. + future.add_done_callback(callback) print("Published messages with batch settings.") # [END pubsub_publisher_batch_settings] From 429187106e11eaf04a254f2af924f5925721c415 Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Wed, 1 Apr 2020 19:11:50 -0700 Subject: [PATCH 0562/1197] Simplify noxfile setup. [(#2806)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/2806) * chore(deps): update dependency requests to v2.23.0 * Simplify noxfile and add version control. * Configure appengine/standard to only test Python 2.7. * Update Kokokro configs to match noxfile. * Add requirements-test to each folder. * Remove Py2 versions from everything execept appengine/standard. * Remove conftest.py. * Remove appengine/standard/conftest.py * Remove 'no-sucess-flaky-report' from pytest.ini. * Add GAE SDK back to appengine/standard tests. * Fix typo. * Roll pytest to python 2 version. * Add a bunch of testing requirements. * Remove typo. * Add appengine lib directory back in. * Add some additional requirements. * Fix issue with flake8 args. * Even more requirements. * Readd appengine conftest.py. * Add a few more requirements. * Even more Appengine requirements. * Add webtest for appengine/standard/mailgun. * Add some additional requirements. * Add workaround for issue with mailjet-rest. * Add responses for appengine/standard/mailjet. Co-authored-by: Renovate Bot --- .../samples/snippets/requirements-test.txt | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 packages/google-cloud-pubsub/samples/snippets/requirements-test.txt diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt new file mode 100644 index 000000000000..c445bcb1aecf --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -0,0 +1,4 @@ +pytest==5.3.2 +gcp-devrel-py-tools==0.0.15 +mock==3.0.5 +google-cloud-core==1.3.0 From d79a7b73b313d63ff9f383179ff6abe9ebddaa45 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Wed, 22 Apr 2020 18:08:44 -0700 Subject: [PATCH 0563/1197] chore: remove gcp-devrel-py-tools from iot and pubsub [(#3470)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3470) * [iot] chore: remove unused dependency * [pubsub] chore: remove gcp-devrel-py-tools --- .../samples/snippets/publisher_test.py | 20 ++++++++---- .../samples/snippets/requirements-test.txt | 3 +- .../samples/snippets/subscriber_test.py | 32 ++++++++++++------- 3 files changed, 35 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index aa55011c190d..dc6095508112 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -16,7 +16,7 @@ import time import uuid -from gcp_devrel.testing import eventually_consistent +import backoff from google.cloud import pubsub_v1 import mock import pytest @@ -75,12 +75,14 @@ def new_sleep(period): def test_list(client, topic_admin, capsys): - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): publisher.list_topics(PROJECT) out, _ = capsys.readouterr() assert topic_admin in out + eventually_consistent_test() + def test_create(client): topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) @@ -91,19 +93,23 @@ def test_create(client): publisher.create_topic(PROJECT, TOPIC_ADMIN) - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): assert client.get_topic(topic_path) + eventually_consistent_test() + def test_delete(client, topic_admin): publisher.delete_topic(PROJECT, TOPIC_ADMIN) - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): with pytest.raises(Exception): client.get_topic(client.topic_path(PROJECT, TOPIC_ADMIN)) + eventually_consistent_test() + def test_publish(topic_publish, capsys): publisher.publish_messages(PROJECT, TOPIC_PUBLISH) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index c445bcb1aecf..adf26b9f98bb 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,3 @@ +backoff==1.10.0 pytest==5.3.2 -gcp-devrel-py-tools==0.0.15 mock==3.0.5 -google-cloud-core==1.3.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 94905d63525d..1c9520866f17 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -15,7 +15,7 @@ import os import uuid -from gcp_devrel.testing import eventually_consistent +import backoff from google.cloud import pubsub_v1 import pytest @@ -110,20 +110,24 @@ def subscription_async(subscriber_client, topic): def test_list_in_topic(subscription_admin, capsys): - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): subscriber.list_subscriptions_in_topic(PROJECT, TOPIC) out, _ = capsys.readouterr() assert subscription_admin in out + eventually_consistent_test() + def test_list_in_project(subscription_admin, capsys): - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): subscriber.list_subscriptions_in_project(PROJECT) out, _ = capsys.readouterr() assert subscription_admin in out + eventually_consistent_test() + def test_create(subscriber_client): subscription_path = subscriber_client.subscription_path( @@ -137,10 +141,12 @@ def test_create(subscriber_client): subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN) - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): assert subscriber_client.get_subscription(subscription_path) + eventually_consistent_test() + def test_create_push(subscriber_client): subscription_path = subscriber_client.subscription_path( @@ -155,10 +161,12 @@ def test_create_push(subscriber_client): PROJECT, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT ) - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): assert subscriber_client.get_subscription(subscription_path) + eventually_consistent_test() + def test_update(subscriber_client, subscription_admin, capsys): subscriber.update_subscription(PROJECT, SUBSCRIPTION_ADMIN, NEW_ENDPOINT) @@ -170,11 +178,13 @@ def test_update(subscriber_client, subscription_admin, capsys): def test_delete(subscriber_client, subscription_admin): subscriber.delete_subscription(PROJECT, SUBSCRIPTION_ADMIN) - @eventually_consistent.call - def _(): + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test(): with pytest.raises(Exception): subscriber_client.get_subscription(subscription_admin) + eventually_consistent_test() + def _publish_messages(publisher_client, topic): for n in range(5): From 45df1be51dc57fd16027fbb57b9ed9db84cf683d Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 24 Apr 2020 12:28:38 -0700 Subject: [PATCH 0564/1197] Update dependency google-cloud-pubsub to v1.4.2 in Storage and Pub/Sub [(#3343)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3343) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index cc192f6f7c58..fbfc0f4d395e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.3.0 +google-cloud-pubsub==1.4.2 From 07e51e798b0b3321b40aeb9aa026c39d7e06823d Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Tue, 12 May 2020 17:30:32 -0700 Subject: [PATCH 0565/1197] chore: some lint fixes [(#3748)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3748) --- .../samples/snippets/quickstart/pub_test.py | 5 +++-- .../samples/snippets/quickstart/sub_test.py | 7 +++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py index b9a6f807f37d..24010c76e830 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py @@ -15,13 +15,14 @@ # limitations under the License. import os -import pytest import uuid from google.api_core.exceptions import AlreadyExists from google.cloud import pubsub_v1 +import pytest + +import pub # noqa -import pub UUID = uuid.uuid4().hex PROJECT = os.environ["GCLOUD_PROJECT"] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py index 1b59a3d043ac..2754dc56b5e5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py @@ -13,16 +13,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -import mock import os -import pytest import uuid from google.api_core.exceptions import AlreadyExists from google.cloud import pubsub_v1 +import mock +import pytest -import sub +import sub # noqa UUID = uuid.uuid4().hex From 8f12e81575b0e573246a85889ffccdd4740b8848 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 13 May 2020 07:31:59 +0200 Subject: [PATCH 0566/1197] chore(deps): update dependency google-cloud-pubsub to v1.4.3 [(#3725)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3725) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Takashi Matsuo --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index fbfc0f4d395e..110c3b79a339 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.4.2 +google-cloud-pubsub==1.4.3 From a29d1569f729399f761b16fded7121000f9a7356 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 21 May 2020 04:50:04 +0200 Subject: [PATCH 0567/1197] chore(deps): update dependency google-cloud-pubsub to v1.5.0 [(#3781)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3781) Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 110c3b79a339..9cc17af84c35 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.4.3 +google-cloud-pubsub==1.5.0 From 45bcdda1f1eff9ab67b4be89cd88d0a2b59781b4 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 29 May 2020 17:12:50 -0700 Subject: [PATCH 0568/1197] samples: add Pub/Sub dead letter queue samples [(#3904)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3904) --- .../samples/snippets/README.rst | 23 +- .../samples/snippets/publisher.py | 52 +- .../samples/snippets/subscriber.py | 458 ++++++++++++------ .../samples/snippets/subscriber_test.py | 147 ++++-- 4 files changed, 471 insertions(+), 209 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index c30fd190a233..f27f9438ea96 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -74,7 +74,7 @@ To run this sample: .. code-block:: bash - $ python publisher.py + $ python publisher.py --help usage: publisher.py [-h] project_id @@ -124,11 +124,11 @@ To run this sample: .. code-block:: bash - $ python subscriber.py + $ python subscriber.py --help usage: subscriber.py [-h] project_id - {list_in_topic,list_in_project,create,create-push,delete,update,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen_for_errors} + {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} ... This application demonstrates how to perform basic operations on @@ -139,15 +139,21 @@ To run this sample: positional arguments: project_id Your Google Cloud project ID - {list_in_topic,list_in_project,create,create-push,delete,update,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen_for_errors} - list_in_topic Lists all subscriptions for a given topic. - list_in_project Lists all subscriptions in the current project. + {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} + list-in-topic Lists all subscriptions for a given topic. + list-in-project Lists all subscriptions in the current project. create Create a new pull subscription on the given topic. + create-with-dead-letter-policy + Create a subscription with dead letter policy. create-push Create a new push subscription on the given topic. delete Deletes an existing Pub/Sub topic. - update Updates an existing Pub/Sub subscription's push + update-push Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a subscription, such as its topic, are not modifiable. + update-dead-letter-policy + Update a subscription's dead letter policy. + remove-dead-letter-policy + Remove dead letter policy from a subscription. receive Receives messages from a pull subscription. receive-custom-attributes Receives messages from a pull subscription. @@ -158,8 +164,9 @@ To run this sample: Pulling messages synchronously. receive-synchronously-with-lease Pulling messages synchronously with lease management - listen_for_errors Receives messages and catches errors from a pull + listen-for-errors Receives messages and catches errors from a pull subscription. + receive-messages-with-delivery-attempts optional arguments: -h, --help show this help message and exit diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 6802ec85fc13..9e7820fbf305 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -29,7 +29,8 @@ def list_topics(project_id): # [START pubsub_list_topics] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" + # TODO(developer) + # project_id = "your-project-id" publisher = pubsub_v1.PublisherClient() project_path = publisher.project_path(project_id) @@ -45,8 +46,9 @@ def create_topic(project_id, topic_name): # [START pubsub_create_topic] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_name) @@ -63,8 +65,9 @@ def delete_topic(project_id, topic_name): # [START pubsub_delete_topic] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_name) @@ -81,8 +84,9 @@ def publish_messages(project_id, topic_name): # [START pubsub_publish] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" publisher = pubsub_v1.PublisherClient() # The `topic_path` method creates a fully qualified identifier @@ -108,8 +112,9 @@ def publish_messages_with_custom_attributes(project_id, topic_name): # [START pubsub_publish_custom_attributes] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_name) @@ -135,8 +140,9 @@ def publish_messages_with_error_handler(project_id, topic_name): from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_name) @@ -177,8 +183,9 @@ def publish_messages_with_batch_settings(project_id, topic_name): # [START pubsub_publisher_batch_settings] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" # Configure the batch to publish as soon as there is ten messages, # one kilobyte of data, or one second has passed. @@ -212,8 +219,9 @@ def publish_messages_with_retry_settings(project_id, topic_name): # [START pubsub_publisher_retry_settings] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" # Configure the retry settings. Defaults will be overwritten. retry_settings = { @@ -267,8 +275,7 @@ def publish_messages_with_retry_settings(project_id, topic_name): if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") @@ -281,9 +288,7 @@ def publish_messages_with_retry_settings(project_id, topic_name): delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) delete_parser.add_argument("topic_name") - publish_parser = subparsers.add_parser( - "publish", help=publish_messages.__doc__ - ) + publish_parser = subparsers.add_parser("publish", help=publish_messages.__doc__) publish_parser.add_argument("topic_name") publish_with_custom_attributes_parser = subparsers.add_parser( @@ -293,8 +298,7 @@ def publish_messages_with_retry_settings(project_id, topic_name): publish_with_custom_attributes_parser.add_argument("topic_name") publish_with_error_handler_parser = subparsers.add_parser( - "publish-with-error-handler", - help=publish_messages_with_error_handler.__doc__, + "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, ) publish_with_error_handler_parser.add_argument("topic_name") @@ -321,9 +325,7 @@ def publish_messages_with_retry_settings(project_id, topic_name): elif args.command == "publish": publish_messages(args.project_id, args.topic_name) elif args.command == "publish-with-custom-attributes": - publish_messages_with_custom_attributes( - args.project_id, args.topic_name - ) + publish_messages_with_custom_attributes(args.project_id, args.topic_name) elif args.command == "publish-with-error-handler": publish_messages_with_error_handler(args.project_id, args.topic_name) elif args.command == "publish-with-batch-settings": diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index e22efc7b16f3..b5af760aed49 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -29,8 +29,9 @@ def list_subscriptions_in_topic(project_id, topic_name): # [START pubsub_list_topic_subscriptions] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_name) @@ -45,7 +46,8 @@ def list_subscriptions_in_project(project_id): # [START pubsub_list_subscriptions] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" + # TODO(developer) + # project_id = "your-project-id" subscriber = pubsub_v1.SubscriberClient() project_path = subscriber.project_path(project_id) @@ -62,19 +64,16 @@ def create_subscription(project_id, topic_name, subscription_name): # [START pubsub_create_pull_subscription] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" - # TODO subscription_name = "Your Pub/Sub subscription name" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" + # subscription_name = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() topic_path = subscriber.topic_path(project_id, topic_name) - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) - subscription = subscriber.create_subscription( - subscription_path, topic_path - ) + subscription = subscriber.create_subscription(subscription_path, topic_path) print("Subscription created: {}".format(subscription)) @@ -82,23 +81,68 @@ def create_subscription(project_id, topic_name, subscription_name): # [END pubsub_create_pull_subscription] -def create_push_subscription( - project_id, topic_name, subscription_name, endpoint +def create_subscription_with_dead_letter_topic( + project_id, topic_name, subscription_name, dead_letter_topic_name ): + """Create a subscription with dead letter policy.""" + # [START pubsub_dead_letter_create_subscription] + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.types import DeadLetterPolicy + + # TODO(developer) + # project_id = "your-project-id" + # endpoint = "https://my-test-project.appspot.com/push" + # TODO(developer): This is an existing topic that the subscription + # with dead letter policy is attached to. + # topic_name = "your-topic-id" + # TODO(developer): This is an existing subscription with a dead letter policy. + # subscription_name = "your-subscription-id" + # TODO(developer): This is an existing dead letter topic that the subscription + # with dead letter policy will forward dead letter messages to. + # dead_letter_topic_name = "your-dead-letter-topic-id" + + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project_id, topic_name) + subscription_path = subscriber.subscription_path(project_id, subscription_name) + dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_name) + + dead_letter_policy = DeadLetterPolicy( + dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=10 + ) + + with subscriber: + subscription = subscriber.create_subscription( + subscription_path, topic_path, dead_letter_policy=dead_letter_policy + ) + + print("Subscription created: {}".format(subscription.name)) + print( + "It will forward dead letter messages to: {}".format( + subscription.dead_letter_policy.dead_letter_topic + ) + ) + print( + "After {} delivery attempts.".format( + subscription.dead_letter_policy.max_delivery_attempts + ) + ) + # [END pubsub_dead_letter_create_subscription] + + +def create_push_subscription(project_id, topic_name, subscription_name, endpoint): """Create a new push subscription on the given topic.""" # [START pubsub_create_push_subscription] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" - # TODO subscription_name = "Your Pub/Sub subscription name" - # TODO endpoint = "https://my-test-project.appspot.com/push" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" + # subscription_name = "your-subscription-id" + # endpoint = "https://my-test-project.appspot.com/push" subscriber = pubsub_v1.SubscriberClient() topic_path = subscriber.topic_path(project_id, topic_name) - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) @@ -118,13 +162,12 @@ def delete_subscription(project_id, subscription_name): # [START pubsub_delete_subscription] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO subscription_name = "Your Pub/Sub subscription name" + # TODO(developer) + # project_id = "your-project-id" + # subscription_name = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) subscriber.delete_subscription(subscription_path) @@ -134,7 +177,7 @@ def delete_subscription(project_id, subscription_name): # [END pubsub_delete_subscription] -def update_subscription(project_id, subscription_name, endpoint): +def update_push_subscription(project_id, topic_name, subscription_name, endpoint): """ Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a subscription, such as @@ -143,26 +186,24 @@ def update_subscription(project_id, subscription_name, endpoint): # [START pubsub_update_push_configuration] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO topic_name = "Your Pub/Sub topic name" - # TODO subscription_name = "Your Pub/Sub subscription name" - # TODO endpoint = "https://my-test-project.appspot.com/push" + # TODO(developer) + # project_id = "your-project-id" + # topic_name = "your-topic-id" + # subscription_name = "your-subscription-id" + # endpoint = "https://my-test-project.appspot.com/push" subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) subscription = pubsub_v1.types.Subscription( - name=subscription_path, push_config=push_config + name=subscription_path, topic=topic_name, push_config=push_config ) update_mask = {"paths": {"push_config"}} - subscriber.update_subscription(subscription, update_mask) - result = subscriber.get_subscription(subscription_path) + result = subscriber.update_subscription(subscription, update_mask) print("Subscription updated: {}".format(subscription_path)) print("New endpoint for subscription is: {}".format(result.push_config)) @@ -171,31 +212,126 @@ def update_subscription(project_id, subscription_name, endpoint): # [END pubsub_update_push_configuration] +def update_subscription_with_dead_letter_policy( + project_id, topic_name, subscription_name, dead_letter_topic_name +): + """Update a subscription's dead letter policy.""" + # [START pubsub_dead_letter_update_subscription] + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.types import DeadLetterPolicy, FieldMask + + # TODO(developer) + # project_id = "your-project-id" + # TODO(developer): This is an existing topic that the subscription + # with dead letter policy is attached to. + # topic_name = "your-topic-name" + # TODO(developer): This is an existing subscription with a dead letter policy. + # subscription_name = "your-subscription-id" + # TODO(developer): This is an existing dead letter topic that the subscription + # with dead letter policy will forward dead letter messages to. + # dead_letter_topic_name = "your-dead-letter-topic-id" + + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project_id, topic_name) + subscription_path = subscriber.subscription_path(project_id, subscription_name) + dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_name) + + subscription_before_update = subscriber.get_subscription(subscription_path) + print("Before the update: {}".format(subscription_before_update)) + + # Indicates which fields in the provided subscription to update. + update_mask = FieldMask(paths=["dead_letter_policy.max_delivery_attempts"]) + + # Construct a dead letter policy you expect to have after the update. + dead_letter_policy = DeadLetterPolicy( + dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=20 + ) + + # Construct the subscription with the dead letter policy you expect to have + # after the update. Here, values in the required fields (name, topic) help + # identify the subscription. + subscription = pubsub_v1.types.Subscription( + name=subscription_path, topic=topic_path, dead_letter_policy=dead_letter_policy, + ) + + with subscriber: + subscription_after_update = subscriber.update_subscription( + subscription, update_mask + ) + + print("After the update: {}".format(subscription_after_update)) + # [END pubsub_dead_letter_update_subscription] + return subscription_after_update + + +def remove_dead_letter_policy(project_id, topic_name, subscription_name): + """Remove dead letter policy from a subscription.""" + # [START pubsub_dead_letter_remove] + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.types import FieldMask + + # TODO(developer) + # project_id = "your-project-id" + # TODO(developer): This is an existing topic that the subscription + # with dead letter policy is attached to. + # topic_name = "your-topic-name" + # TODO(developer): This is an existing subscription with a dead letter policy. + # subscription_name = "your-subscription-id" + + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project_id, topic_name) + subscription_path = subscriber.subscription_path(project_id, subscription_name) + + subscription_before_update = subscriber.get_subscription(subscription_path) + print("Before removing the policy: {}".format(subscription_before_update)) + + # Indicates which fields in the provided subscription to update. + update_mask = FieldMask( + paths=[ + "dead_letter_policy.dead_letter_topic", + "dead_letter_policy.max_delivery_attempts", + ] + ) + + # Construct the subscription (without any dead letter policy) that you + # expect to have after the update. + subscription = pubsub_v1.types.Subscription( + name=subscription_path, topic=topic_path + ) + + with subscriber: + subscription_after_update = subscriber.update_subscription( + subscription, update_mask + ) + + print("After removing the policy: {}".format(subscription_after_update)) + # [END pubsub_dead_letter_remove] + return subscription_after_update + + def receive_messages(project_id, subscription_name, timeout=None): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull] # [START pubsub_quickstart_subscriber] + from concurrent.futures import TimeoutError from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO subscription_name = "Your Pub/Sub subscription name" - # TODO timeout = 5.0 # "How long the subscriber should listen for - # messages in seconds" + # TODO(developer) + # project_id = "your-project-id" + # subscription_name = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 subscriber = pubsub_v1.SubscriberClient() # The `subscription_path` method creates a fully qualified identifier # in the form `projects/{project_id}/subscriptions/{subscription_name}` - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) def callback(message): print("Received message: {}".format(message)) message.ack() - streaming_pull_future = subscriber.subscribe( - subscription_path, callback=callback - ) + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) print("Listening for messages on {}..\n".format(subscription_path)) # Wrap subscriber in a 'with' block to automatically call close() when done. @@ -204,7 +340,7 @@ def callback(message): # When `timeout` is not set, result() will block indefinitely, # unless an exception is encountered first. streaming_pull_future.result(timeout=timeout) - except: # noqa + except TimeoutError: streaming_pull_future.cancel() # [END pubsub_subscriber_async_pull] # [END pubsub_quickstart_subscriber] @@ -216,17 +352,17 @@ def receive_messages_with_custom_attributes( """Receives messages from a pull subscription.""" # [START pubsub_subscriber_sync_pull_custom_attributes] # [START pubsub_subscriber_async_pull_custom_attributes] + from concurrent.futures import TimeoutError from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO subscription_name = "Your Pub/Sub subscription name" - # TODO timeout = 5.0 # "How long the subscriber should listen for - # messages in seconds" + # TODO(developer) + # project_id = "your-project-id" + # subscription_name = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) def callback(message): print("Received message: {}".format(message.data)) @@ -237,9 +373,7 @@ def callback(message): print("{}: {}".format(key, value)) message.ack() - streaming_pull_future = subscriber.subscribe( - subscription_path, callback=callback - ) + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) print("Listening for messages on {}..\n".format(subscription_path)) # Wrap subscriber in a 'with' block to automatically call close() when done. @@ -248,28 +382,26 @@ def callback(message): # When `timeout` is not set, result() will block indefinitely, # unless an exception is encountered first. streaming_pull_future.result(timeout=timeout) - except: # noqa + except TimeoutError: streaming_pull_future.cancel() # [END pubsub_subscriber_async_pull_custom_attributes] # [END pubsub_subscriber_sync_pull_custom_attributes] -def receive_messages_with_flow_control( - project_id, subscription_name, timeout=None -): +def receive_messages_with_flow_control(project_id, subscription_name, timeout=None): """Receives messages from a pull subscription with flow control.""" # [START pubsub_subscriber_flow_settings] + from concurrent.futures import TimeoutError from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO subscription_name = "Your Pub/Sub subscription name" - # TODO timeout = 5.0 # "How long the subscriber should listen for - # messages in seconds" + # TODO(developer) + # project_id = "your-project-id" + # subscription_name = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) def callback(message): print("Received message: {}".format(message.data)) @@ -289,7 +421,7 @@ def callback(message): # When `timeout` is not set, result() will block indefinitely, # unless an exception is encountered first. streaming_pull_future.result(timeout=timeout) - except: # noqa + except TimeoutError: streaming_pull_future.cancel() # [END pubsub_subscriber_flow_settings] @@ -299,13 +431,12 @@ def synchronous_pull(project_id, subscription_name): # [START pubsub_subscriber_sync_pull] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO subscription_name = "Your Pub/Sub subscription name" + # TODO(developer) + # project_id = "your-project-id" + # subscription_name = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) NUM_MESSAGES = 3 @@ -340,13 +471,12 @@ def synchronous_pull_with_lease_management(project_id, subscription_name): from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO subscription_name = "Your Pub/Sub subscription name" + # TODO(developer) + # project_id = "your-project-id" + # subscription_name = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) NUM_MESSAGES = 2 ACK_DEADLINE = 30 @@ -385,15 +515,11 @@ def worker(msg): if process.is_alive(): # `ack_deadline_seconds` must be between 10 to 600. subscriber.modify_ack_deadline( - subscription_path, - [ack_id], - ack_deadline_seconds=ACK_DEADLINE, + subscription_path, [ack_id], ack_deadline_seconds=ACK_DEADLINE, ) logger.info( "{}: Reset ack deadline for {} for {}s".format( - time.strftime("%X", time.gmtime()), - msg_data, - ACK_DEADLINE, + time.strftime("%X", time.gmtime()), msg_data, ACK_DEADLINE, ) ) @@ -426,23 +552,20 @@ def listen_for_errors(project_id, subscription_name, timeout=None): # [START pubsub_subscriber_error_listener] from google.cloud import pubsub_v1 - # TODO project_id = "Your Google Cloud Project ID" - # TODO subscription_name = "Your Pubsub subscription name" - # TODO timeout = 5.0 # "How long the subscriber should listen for - # messages in seconds" + # TODO(developer) + # project_id = "Your Google Cloud Project ID" + # subscription_name = "Your Pubsub subscription name" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path( - project_id, subscription_name - ) + subscription_path = subscriber.subscription_path(project_id, subscription_name) def callback(message): print("Received message: {}".format(message)) message.ack() - streaming_pull_future = subscriber.subscribe( - subscription_path, callback=callback - ) + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) print("Listening for messages on {}..\n".format(subscription_path)) # Wrap subscriber in a 'with' block to automatically call close() when done. @@ -461,29 +584,67 @@ def callback(message): # [END pubsub_subscriber_error_listener] +def receive_messages_with_delivery_attempts( + project_id, subscription_name, timeout=None +): + # [START pubsub_dead_letter_delivery_attempt] + from concurrent.futures import TimeoutError + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_name = "your-subscription-id" + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_name) + + def callback(message): + print("Received message: {}".format(message)) + print("With delivery attempts: {}".format(message.delivery_attempt)) + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print("Listening for messages on {}..\n".format(subscription_path)) + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + try: + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() + # [END pubsub_dead_letter_delivery_attempt] + + if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") subparsers = parser.add_subparsers(dest="command") list_in_topic_parser = subparsers.add_parser( - "list_in_topic", help=list_subscriptions_in_topic.__doc__ + "list-in-topic", help=list_subscriptions_in_topic.__doc__ ) list_in_topic_parser.add_argument("topic_name") list_in_project_parser = subparsers.add_parser( - "list_in_project", help=list_subscriptions_in_project.__doc__ + "list-in-project", help=list_subscriptions_in_project.__doc__ ) - create_parser = subparsers.add_parser( - "create", help=create_subscription.__doc__ - ) + create_parser = subparsers.add_parser("create", help=create_subscription.__doc__) create_parser.add_argument("topic_name") create_parser.add_argument("subscription_name") + create_with_dead_letter_policy_parser = subparsers.add_parser( + "create-with-dead-letter-policy", + help=create_subscription_with_dead_letter_topic.__doc__, + ) + create_with_dead_letter_policy_parser.add_argument("topic_name") + create_with_dead_letter_policy_parser.add_argument("subscription_name") + create_with_dead_letter_policy_parser.add_argument("dead_letter_topic_name") + create_push_parser = subparsers.add_parser( "create-push", help=create_push_subscription.__doc__ ) @@ -491,20 +652,31 @@ def callback(message): create_push_parser.add_argument("subscription_name") create_push_parser.add_argument("endpoint") - delete_parser = subparsers.add_parser( - "delete", help=delete_subscription.__doc__ - ) + delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) delete_parser.add_argument("subscription_name") - update_parser = subparsers.add_parser( - "update", help=update_subscription.__doc__ + update_push_parser = subparsers.add_parser( + "update-push", help=update_push_subscription.__doc__ + ) + update_push_parser.add_argument("topic_name") + update_push_parser.add_argument("subscription_name") + update_push_parser.add_argument("endpoint") + + update_dead_letter_policy_parser = subparsers.add_parser( + "update-dead-letter-policy", + help=update_subscription_with_dead_letter_policy.__doc__, ) - update_parser.add_argument("subscription_name") - update_parser.add_argument("endpoint") + update_dead_letter_policy_parser.add_argument("topic_name") + update_dead_letter_policy_parser.add_argument("subscription_name") + update_dead_letter_policy_parser.add_argument("dead_letter_topic_name") - receive_parser = subparsers.add_parser( - "receive", help=receive_messages.__doc__ + remove_dead_letter_policy_parser = subparsers.add_parser( + "remove-dead-letter-policy", help=remove_dead_letter_policy.__doc__ ) + remove_dead_letter_policy_parser.add_argument("topic_name") + remove_dead_letter_policy_parser.add_argument("subscription_name") + + receive_parser = subparsers.add_parser("receive", help=receive_messages.__doc__) receive_parser.add_argument("subscription_name") receive_parser.add_argument("--timeout", default=None, type=float) @@ -521,9 +693,7 @@ def callback(message): "receive-flow-control", help=receive_messages_with_flow_control.__doc__ ) receive_with_flow_control_parser.add_argument("subscription_name") - receive_with_flow_control_parser.add_argument( - "--timeout", default=None, type=float - ) + receive_with_flow_control_parser.add_argument("--timeout", default=None, type=float) synchronous_pull_parser = subparsers.add_parser( "receive-synchronously", help=synchronous_pull.__doc__ @@ -534,40 +704,58 @@ def callback(message): "receive-synchronously-with-lease", help=synchronous_pull_with_lease_management.__doc__, ) - synchronous_pull_with_lease_management_parser.add_argument( - "subscription_name" - ) + synchronous_pull_with_lease_management_parser.add_argument("subscription_name") listen_for_errors_parser = subparsers.add_parser( - "listen_for_errors", help=listen_for_errors.__doc__ + "listen-for-errors", help=listen_for_errors.__doc__ ) listen_for_errors_parser.add_argument("subscription_name") - listen_for_errors_parser.add_argument( + listen_for_errors_parser.add_argument("--timeout", default=None, type=float) + + receive_messages_with_delivery_attempts_parser = subparsers.add_parser( + "receive-messages-with-delivery-attempts", + help=receive_messages_with_delivery_attempts.__doc__, + ) + receive_messages_with_delivery_attempts_parser.add_argument("subscription_name") + receive_messages_with_delivery_attempts_parser.add_argument( "--timeout", default=None, type=float ) args = parser.parse_args() - if args.command == "list_in_topic": + if args.command == "list-in-topic": list_subscriptions_in_topic(args.project_id, args.topic_name) - elif args.command == "list_in_project": + elif args.command == "list-in-project": list_subscriptions_in_project(args.project_id) elif args.command == "create": - create_subscription( - args.project_id, args.topic_name, args.subscription_name + create_subscription(args.project_id, args.topic_name, args.subscription_name) + elif args.command == "create-with-dead-letter-policy": + create_subscription_with_dead_letter_topic( + args.project_id, + args.topic_name, + args.subscription_name, + args.dead_letter_topic_name, ) elif args.command == "create-push": create_push_subscription( + args.project_id, args.topic_name, args.subscription_name, args.endpoint, + ) + elif args.command == "delete": + delete_subscription(args.project_id, args.subscription_name) + elif args.command == "update-push": + update_push_subscription( + args.project_id, args.topic_name, args.subscription_name, args.endpoint, + ) + elif args.command == "update-dead-letter-policy": + update_subscription_with_dead_letter_policy( args.project_id, args.topic_name, args.subscription_name, - args.endpoint, + args.dead_letter_topic_name, ) - elif args.command == "delete": - delete_subscription(args.project_id, args.subscription_name) - elif args.command == "update": - update_subscription( - args.project_id, args.subscription_name, args.endpoint + elif args.command == "remove-dead-letter-policy": + remove_dead_letter_policy( + args.project_id, args.topic_name, args.subscription_name ) elif args.command == "receive": receive_messages(args.project_id, args.subscription_name, args.timeout) @@ -582,10 +770,10 @@ def callback(message): elif args.command == "receive-synchronously": synchronous_pull(args.project_id, args.subscription_name) elif args.command == "receive-synchronously-with-lease": - synchronous_pull_with_lease_management( - args.project_id, args.subscription_name - ) - elif args.command == "listen_for_errors": - listen_for_errors( + synchronous_pull_with_lease_management(args.project_id, args.subscription_name) + elif args.command == "listen-for-errors": + listen_for_errors(args.project_id, args.subscription_name, args.timeout) + elif args.command == "receive-messages-with-delivery-attempts": + receive_messages_with_delivery_attempts( args.project_id, args.subscription_name, args.timeout ) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 1c9520866f17..6b90396f942e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -24,9 +24,11 @@ UUID = uuid.uuid4().hex PROJECT = os.environ["GCLOUD_PROJECT"] TOPIC = "subscription-test-topic-" + UUID +DEAD_LETTER_TOPIC = "subscription-test-dead-letter-topic-" + UUID SUBSCRIPTION_ADMIN = "subscription-test-subscription-admin-" + UUID SUBSCRIPTION_ASYNC = "subscription-test-subscription-async-" + UUID SUBSCRIPTION_SYNC = "subscription-test-subscription-sync-" + UUID +SUBSCRIPTION_DLQ = "subscription-test-subscription-dlq-" + UUID ENDPOINT = "https://{}.appspot.com/push".format(PROJECT) NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT) @@ -41,13 +43,27 @@ def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - subscription = publisher_client.get_topic(topic_path) + topic = publisher_client.get_topic(topic_path) except: # noqa - subscription = publisher_client.create_topic(topic_path) + topic = publisher_client.create_topic(topic_path) - yield subscription.name + yield topic.name + + publisher_client.delete_topic(topic.name) + + +@pytest.fixture(scope="module") +def dead_letter_topic(publisher_client): + topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) + + try: + dead_letter_topic = publisher_client.get_topic(topic_path) + except: # noqa + dead_letter_topic = publisher_client.create_topic(topic_path) + + yield dead_letter_topic.name - publisher_client.delete_topic(subscription.name) + publisher_client.delete_topic(dead_letter_topic.name) @pytest.fixture(scope="module") @@ -59,9 +75,7 @@ def subscriber_client(): @pytest.fixture(scope="module") def subscription_admin(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ADMIN - ) + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) try: subscription = subscriber_client.get_subscription(subscription_path) @@ -75,9 +89,7 @@ def subscription_admin(subscriber_client, topic): @pytest.fixture(scope="module") def subscription_sync(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_SYNC - ) + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_SYNC) try: subscription = subscriber_client.get_subscription(subscription_path) @@ -93,9 +105,23 @@ def subscription_sync(subscriber_client, topic): @pytest.fixture(scope="module") def subscription_async(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ASYNC - ) + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ASYNC) + + try: + subscription = subscriber_client.get_subscription(subscription_path) + except: # noqa + subscription = subscriber_client.create_subscription( + subscription_path, topic=topic + ) + + yield subscription.name + + subscriber_client.delete_subscription(subscription.name) + + +@pytest.fixture(scope="module") +def subscription_dlq(subscriber_client, topic): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) try: subscription = subscriber_client.get_subscription(subscription_path) @@ -130,9 +156,7 @@ def eventually_consistent_test(): def test_create(subscriber_client): - subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ADMIN - ) + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) try: subscriber_client.delete_subscription(subscription_path) @@ -148,19 +172,36 @@ def eventually_consistent_test(): eventually_consistent_test() -def test_create_push(subscriber_client): - subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION_ADMIN - ) +def test_create_subscription_with_dead_letter_policy( + subscriber_client, publisher_client, topic, dead_letter_topic, capsys +): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) + dead_letter_topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) + try: subscriber_client.delete_subscription(subscription_path) except Exception: pass - subscriber.create_push_subscription( - PROJECT, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT + subscriber.create_subscription_with_dead_letter_topic( + PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC ) + out, _ = capsys.readouterr() + assert "Subscription created: " + subscription_path in out + assert "It will forward dead letter messages to: " + dead_letter_topic_path in out + assert "After 10 delivery attempts." in out + + +def test_create_push(subscriber_client): + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) + try: + subscriber_client.delete_subscription(subscription_path) + except Exception: + pass + + subscriber.create_push_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT) + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): assert subscriber_client.get_subscription(subscription_path) @@ -169,12 +210,25 @@ def eventually_consistent_test(): def test_update(subscriber_client, subscription_admin, capsys): - subscriber.update_subscription(PROJECT, SUBSCRIPTION_ADMIN, NEW_ENDPOINT) + subscriber.update_push_subscription( + PROJECT, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT + ) out, _ = capsys.readouterr() assert "Subscription updated" in out +def test_update_dead_letter_policy( + subscriber_client, topic, subscription_dlq, dead_letter_topic, capsys +): + _ = subscriber.update_subscription_with_dead_letter_policy( + PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC + ) + + out, _ = capsys.readouterr() + assert "max_delivery_attempts: 20" in out + + def test_delete(subscriber_client, subscription_admin): subscriber.delete_subscription(PROJECT, SUBSCRIPTION_ADMIN) @@ -212,9 +266,7 @@ def test_receive_with_custom_attributes( _publish_messages(publisher_client, topic) - subscriber.receive_messages_with_custom_attributes( - PROJECT, SUBSCRIPTION_ASYNC, 5 - ) + subscriber.receive_messages_with_custom_attributes(PROJECT, SUBSCRIPTION_ASYNC, 5) out, _ = capsys.readouterr() assert "message" in out @@ -222,15 +274,11 @@ def test_receive_with_custom_attributes( assert "python-sample" in out -def test_receive_with_flow_control( - publisher_client, topic, subscription_async, capsys -): +def test_receive_with_flow_control(publisher_client, topic, subscription_async, capsys): _publish_messages(publisher_client, topic) - subscriber.receive_messages_with_flow_control( - PROJECT, SUBSCRIPTION_ASYNC, 5 - ) + subscriber.receive_messages_with_flow_control(PROJECT, SUBSCRIPTION_ASYNC, 5) out, _ = capsys.readouterr() assert "Listening" in out @@ -238,9 +286,7 @@ def test_receive_with_flow_control( assert "message" in out -def test_receive_synchronously( - publisher_client, topic, subscription_sync, capsys -): +def test_receive_synchronously(publisher_client, topic, subscription_sync, capsys): _publish_messages(publisher_client, topic) subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_SYNC) @@ -254,17 +300,13 @@ def test_receive_synchronously_with_lease( ): _publish_messages(publisher_client, topic) - subscriber.synchronous_pull_with_lease_management( - PROJECT, SUBSCRIPTION_SYNC - ) + subscriber.synchronous_pull_with_lease_management(PROJECT, SUBSCRIPTION_SYNC) out, _ = capsys.readouterr() assert "Done." in out -def test_listen_for_errors( - publisher_client, topic, subscription_async, capsys -): +def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): _publish_messages(publisher_client, topic) @@ -274,3 +316,26 @@ def test_listen_for_errors( assert "Listening" in out assert subscription_async in out assert "threw an exception" in out + + +def test_receive_with_delivery_attempts( + publisher_client, topic, subscription_dlq, dead_letter_topic, capsys +): + _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_delivery_attempts(PROJECT, SUBSCRIPTION_DLQ, 10) + + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_dlq in out + assert "Received message: " in out + assert "message 4" in out + assert "With delivery attempts: " in out + + +def test_remove_dead_letter_policy(subscriber_client, subscription_dlq): + subscription_after_update = subscriber.remove_dead_letter_policy( + PROJECT, TOPIC, SUBSCRIPTION_DLQ + ) + + assert subscription_after_update.dead_letter_policy.dead_letter_topic == "" From ca15d0aa6400c8f7eb82a0c430a73b85426d587b Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 2 Jun 2020 17:04:56 -0700 Subject: [PATCH 0569/1197] fix: make timeout an optional positional arg [(#3938)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3938) * fix: make timeout an optional positional arg * place `none` back in function signature Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- .../samples/snippets/subscriber.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index b5af760aed49..5dc468e23a80 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -678,7 +678,7 @@ def callback(message): receive_parser = subparsers.add_parser("receive", help=receive_messages.__doc__) receive_parser.add_argument("subscription_name") - receive_parser.add_argument("--timeout", default=None, type=float) + receive_parser.add_argument("timeout", default=None, type=float, nargs="?") receive_with_custom_attributes_parser = subparsers.add_parser( "receive-custom-attributes", @@ -686,14 +686,16 @@ def callback(message): ) receive_with_custom_attributes_parser.add_argument("subscription_name") receive_with_custom_attributes_parser.add_argument( - "--timeout", default=None, type=float + "timeout", default=None, type=float, nargs="?" ) receive_with_flow_control_parser = subparsers.add_parser( "receive-flow-control", help=receive_messages_with_flow_control.__doc__ ) receive_with_flow_control_parser.add_argument("subscription_name") - receive_with_flow_control_parser.add_argument("--timeout", default=None, type=float) + receive_with_flow_control_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) synchronous_pull_parser = subparsers.add_parser( "receive-synchronously", help=synchronous_pull.__doc__ @@ -710,7 +712,9 @@ def callback(message): "listen-for-errors", help=listen_for_errors.__doc__ ) listen_for_errors_parser.add_argument("subscription_name") - listen_for_errors_parser.add_argument("--timeout", default=None, type=float) + listen_for_errors_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) receive_messages_with_delivery_attempts_parser = subparsers.add_parser( "receive-messages-with-delivery-attempts", @@ -718,7 +722,7 @@ def callback(message): ) receive_messages_with_delivery_attempts_parser.add_argument("subscription_name") receive_messages_with_delivery_attempts_parser.add_argument( - "--timeout", default=None, type=float + "timeout", default=None, type=float, nargs="?" ) args = parser.parse_args() From 2645c3f127291020e9d9fd7d2303a9da0a046e83 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 4 Jun 2020 13:46:07 -0700 Subject: [PATCH 0570/1197] fix: replace name with id in samples [(#3953)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/3953) --- .../samples/snippets/iam.py | 112 +++++---- .../samples/snippets/iam_test.py | 4 +- .../samples/snippets/publisher.py | 72 +++--- .../samples/snippets/quickstart/pub.py | 13 +- .../samples/snippets/quickstart/sub.py | 19 +- .../samples/snippets/quickstart/sub_test.py | 8 +- .../samples/snippets/subscriber.py | 218 +++++++++--------- 7 files changed, 225 insertions(+), 221 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index eb0c8246307b..71c55d764c0c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -23,14 +23,18 @@ import argparse -from google.cloud import pubsub_v1 - -def get_topic_policy(project, topic_name): +def get_topic_policy(project, topic_id): """Prints the IAM policy for the given topic.""" # [START pubsub_get_topic_policy] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_name) + topic_path = client.topic_path(project, topic_id) policy = client.get_iam_policy(topic_path) @@ -40,11 +44,17 @@ def get_topic_policy(project, topic_name): # [END pubsub_get_topic_policy] -def get_subscription_policy(project, subscription_name): +def get_subscription_policy(project, subscription_id): """Prints the IAM policy for the given subscription.""" # [START pubsub_get_subscription_policy] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_name) + subscription_path = client.subscription_path(project, subscription_id) policy = client.get_iam_policy(subscription_path) @@ -56,11 +66,17 @@ def get_subscription_policy(project, subscription_name): # [END pubsub_get_subscription_policy] -def set_topic_policy(project, topic_name): +def set_topic_policy(project, topic_id): """Sets the IAM policy for a topic.""" # [START pubsub_set_topic_policy] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_name) + topic_path = client.topic_path(project, topic_id) policy = client.get_iam_policy(topic_path) @@ -75,15 +91,21 @@ def set_topic_policy(project, topic_name): # Set the policy policy = client.set_iam_policy(topic_path, policy) - print("IAM policy for topic {} set: {}".format(topic_name, policy)) + print("IAM policy for topic {} set: {}".format(topic_id, policy)) # [END pubsub_set_topic_policy] -def set_subscription_policy(project, subscription_name): +def set_subscription_policy(project, subscription_id): """Sets the IAM policy for a topic.""" # [START pubsub_set_subscription_policy] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_name) + subscription_path = client.subscription_path(project, subscription_id) policy = client.get_iam_policy(subscription_path) @@ -91,48 +113,50 @@ def set_subscription_policy(project, subscription_name): policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) # Add a group as an editor. - policy.bindings.add( - role="roles/editor", members=["group:cloud-logs@google.com"] - ) + policy.bindings.add(role="roles/editor", members=["group:cloud-logs@google.com"]) # Set the policy policy = client.set_iam_policy(subscription_path, policy) - print( - "IAM policy for subscription {} set: {}".format( - subscription_name, policy - ) - ) + print("IAM policy for subscription {} set: {}".format(subscription_id, policy)) client.close() # [END pubsub_set_subscription_policy] -def check_topic_permissions(project, topic_name): +def check_topic_permissions(project, topic_id): """Checks to which permissions are available on the given topic.""" # [START pubsub_test_topic_permissions] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_name) + topic_path = client.topic_path(project, topic_id) permissions_to_check = ["pubsub.topics.publish", "pubsub.topics.update"] - allowed_permissions = client.test_iam_permissions( - topic_path, permissions_to_check - ) + allowed_permissions = client.test_iam_permissions(topic_path, permissions_to_check) print( - "Allowed permissions for topic {}: {}".format( - topic_path, allowed_permissions - ) + "Allowed permissions for topic {}: {}".format(topic_path, allowed_permissions) ) # [END pubsub_test_topic_permissions] -def check_subscription_permissions(project, subscription_name): +def check_subscription_permissions(project, subscription_id): """Checks to which permissions are available on the given subscription.""" # [START pubsub_test_subscription_permissions] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_name) + subscription_path = client.subscription_path(project, subscription_id) permissions_to_check = [ "pubsub.subscriptions.consume", @@ -155,8 +179,7 @@ def check_subscription_permissions(project, subscription_name): if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project", help="Your Google Cloud project ID") @@ -165,45 +188,44 @@ def check_subscription_permissions(project, subscription_name): get_topic_policy_parser = subparsers.add_parser( "get-topic-policy", help=get_topic_policy.__doc__ ) - get_topic_policy_parser.add_argument("topic_name") + get_topic_policy_parser.add_argument("topic_id") get_subscription_policy_parser = subparsers.add_parser( "get-subscription-policy", help=get_subscription_policy.__doc__ ) - get_subscription_policy_parser.add_argument("subscription_name") + get_subscription_policy_parser.add_argument("subscription_id") set_topic_policy_parser = subparsers.add_parser( "set-topic-policy", help=set_topic_policy.__doc__ ) - set_topic_policy_parser.add_argument("topic_name") + set_topic_policy_parser.add_argument("topic_id") set_subscription_policy_parser = subparsers.add_parser( "set-subscription-policy", help=set_subscription_policy.__doc__ ) - set_subscription_policy_parser.add_argument("subscription_name") + set_subscription_policy_parser.add_argument("subscription_id") check_topic_permissions_parser = subparsers.add_parser( "check-topic-permissions", help=check_topic_permissions.__doc__ ) - check_topic_permissions_parser.add_argument("topic_name") + check_topic_permissions_parser.add_argument("topic_id") check_subscription_permissions_parser = subparsers.add_parser( - "check-subscription-permissions", - help=check_subscription_permissions.__doc__, + "check-subscription-permissions", help=check_subscription_permissions.__doc__, ) - check_subscription_permissions_parser.add_argument("subscription_name") + check_subscription_permissions_parser.add_argument("subscription_id") args = parser.parse_args() if args.command == "get-topic-policy": - get_topic_policy(args.project, args.topic_name) + get_topic_policy(args.project, args.topic_id) elif args.command == "get-subscription-policy": - get_subscription_policy(args.project, args.subscription_name) + get_subscription_policy(args.project, args.subscription_id) elif args.command == "set-topic-policy": - set_topic_policy(args.project, args.topic_name) + set_topic_policy(args.project, args.topic_id) elif args.command == "set-subscription-policy": - set_subscription_policy(args.project, args.subscription_name) + set_subscription_policy(args.project, args.subscription_id) elif args.command == "check-topic-permissions": - check_topic_permissions(args.project, args.topic_name) + check_topic_permissions(args.project, args.topic_id) elif args.command == "check-subscription-permissions": - check_subscription_permissions(args.project, args.subscription_name) + check_subscription_permissions(args.project, args.subscription_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index f88cde851e7d..31764a056e8d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -56,9 +56,7 @@ def subscriber_client(): @pytest.fixture def subscription(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION - ) + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) try: subscriber_client.delete_subscription(subscription_path) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 9e7820fbf305..477b31b9cf71 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -40,7 +40,7 @@ def list_topics(project_id): # [END pubsub_list_topics] -def create_topic(project_id, topic_name): +def create_topic(project_id, topic_id): """Create a new Pub/Sub topic.""" # [START pubsub_quickstart_create_topic] # [START pubsub_create_topic] @@ -48,10 +48,10 @@ def create_topic(project_id, topic_name): # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" + # topic_id = "your-topic-id" publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) + topic_path = publisher.topic_path(project_id, topic_id) topic = publisher.create_topic(topic_path) @@ -60,17 +60,17 @@ def create_topic(project_id, topic_name): # [END pubsub_create_topic] -def delete_topic(project_id, topic_name): +def delete_topic(project_id, topic_id): """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_topic] from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" + # topic_id = "your-topic-id" publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) + topic_path = publisher.topic_path(project_id, topic_id) publisher.delete_topic(topic_path) @@ -78,7 +78,7 @@ def delete_topic(project_id, topic_name): # [END pubsub_delete_topic] -def publish_messages(project_id, topic_name): +def publish_messages(project_id, topic_id): """Publishes multiple messages to a Pub/Sub topic.""" # [START pubsub_quickstart_publisher] # [START pubsub_publish] @@ -86,12 +86,12 @@ def publish_messages(project_id, topic_name): # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" + # topic_id = "your-topic-id" publisher = pubsub_v1.PublisherClient() # The `topic_path` method creates a fully qualified identifier - # in the form `projects/{project_id}/topics/{topic_name}` - topic_path = publisher.topic_path(project_id, topic_name) + # in the form `projects/{project_id}/topics/{topic_id}` + topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): data = u"Message number {}".format(n) @@ -106,7 +106,7 @@ def publish_messages(project_id, topic_name): # [END pubsub_publish] -def publish_messages_with_custom_attributes(project_id, topic_name): +def publish_messages_with_custom_attributes(project_id, topic_id): """Publishes multiple messages with custom attributes to a Pub/Sub topic.""" # [START pubsub_publish_custom_attributes] @@ -114,10 +114,10 @@ def publish_messages_with_custom_attributes(project_id, topic_name): # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" + # topic_id = "your-topic-id" publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) + topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): data = u"Message number {}".format(n) @@ -133,7 +133,7 @@ def publish_messages_with_custom_attributes(project_id, topic_name): # [END pubsub_publish_custom_attributes] -def publish_messages_with_error_handler(project_id, topic_name): +def publish_messages_with_error_handler(project_id, topic_id): # [START pubsub_publish_messages_error_handler] """Publishes multiple messages to a Pub/Sub topic with an error handler.""" import time @@ -142,10 +142,10 @@ def publish_messages_with_error_handler(project_id, topic_name): # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" + # topic_id = "your-topic-id" publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) + topic_path = publisher.topic_path(project_id, topic_id) futures = dict() @@ -178,14 +178,14 @@ def callback(f): # [END pubsub_publish_messages_error_handler] -def publish_messages_with_batch_settings(project_id, topic_name): +def publish_messages_with_batch_settings(project_id, topic_id): """Publishes multiple messages to a Pub/Sub topic with batch settings.""" # [START pubsub_publisher_batch_settings] from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" + # topic_id = "your-topic-id" # Configure the batch to publish as soon as there is ten messages, # one kilobyte of data, or one second has passed. @@ -195,7 +195,7 @@ def publish_messages_with_batch_settings(project_id, topic_name): max_latency=1, # default 10 ms ) publisher = pubsub_v1.PublisherClient(batch_settings) - topic_path = publisher.topic_path(project_id, topic_name) + topic_path = publisher.topic_path(project_id, topic_id) # Resolve the publish future in a separate thread. def callback(future): @@ -214,14 +214,14 @@ def callback(future): # [END pubsub_publisher_batch_settings] -def publish_messages_with_retry_settings(project_id, topic_name): +def publish_messages_with_retry_settings(project_id, topic_id): """Publishes messages with custom retry settings.""" # [START pubsub_publisher_retry_settings] from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" + # topic_id = "your-topic-id" # Configure the retry settings. Defaults will be overwritten. retry_settings = { @@ -260,7 +260,7 @@ def publish_messages_with_retry_settings(project_id, topic_name): } publisher = pubsub_v1.PublisherClient(client_config=retry_settings) - topic_path = publisher.topic_path(project_id, topic_name) + topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): data = u"Message number {}".format(n) @@ -283,52 +283,52 @@ def publish_messages_with_retry_settings(project_id, topic_name): subparsers.add_parser("list", help=list_topics.__doc__) create_parser = subparsers.add_parser("create", help=create_topic.__doc__) - create_parser.add_argument("topic_name") + create_parser.add_argument("topic_id") delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) - delete_parser.add_argument("topic_name") + delete_parser.add_argument("topic_id") publish_parser = subparsers.add_parser("publish", help=publish_messages.__doc__) - publish_parser.add_argument("topic_name") + publish_parser.add_argument("topic_id") publish_with_custom_attributes_parser = subparsers.add_parser( "publish-with-custom-attributes", help=publish_messages_with_custom_attributes.__doc__, ) - publish_with_custom_attributes_parser.add_argument("topic_name") + publish_with_custom_attributes_parser.add_argument("topic_id") publish_with_error_handler_parser = subparsers.add_parser( "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, ) - publish_with_error_handler_parser.add_argument("topic_name") + publish_with_error_handler_parser.add_argument("topic_id") publish_with_batch_settings_parser = subparsers.add_parser( "publish-with-batch-settings", help=publish_messages_with_batch_settings.__doc__, ) - publish_with_batch_settings_parser.add_argument("topic_name") + publish_with_batch_settings_parser.add_argument("topic_id") publish_with_retry_settings_parser = subparsers.add_parser( "publish-with-retry-settings", help=publish_messages_with_retry_settings.__doc__, ) - publish_with_retry_settings_parser.add_argument("topic_name") + publish_with_retry_settings_parser.add_argument("topic_id") args = parser.parse_args() if args.command == "list": list_topics(args.project_id) elif args.command == "create": - create_topic(args.project_id, args.topic_name) + create_topic(args.project_id, args.topic_id) elif args.command == "delete": - delete_topic(args.project_id, args.topic_name) + delete_topic(args.project_id, args.topic_id) elif args.command == "publish": - publish_messages(args.project_id, args.topic_name) + publish_messages(args.project_id, args.topic_id) elif args.command == "publish-with-custom-attributes": - publish_messages_with_custom_attributes(args.project_id, args.topic_name) + publish_messages_with_custom_attributes(args.project_id, args.topic_id) elif args.command == "publish-with-error-handler": - publish_messages_with_error_handler(args.project_id, args.topic_name) + publish_messages_with_error_handler(args.project_id, args.topic_id) elif args.command == "publish-with-batch-settings": - publish_messages_with_batch_settings(args.project_id, args.topic_name) + publish_messages_with_batch_settings(args.project_id, args.topic_id) elif args.command == "publish-with-retry-settings": - publish_messages_with_retry_settings(args.project_id, args.topic_name) + publish_messages_with_retry_settings(args.project_id, args.topic_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py index a3f8087ecd15..16432c0c3627 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -46,15 +46,15 @@ def callback(api_future): return callback -def pub(project_id, topic_name): +def pub(project_id, topic_id): """Publishes a message to a Pub/Sub topic.""" # [START pubsub_quickstart_pub_client] # Initialize a Publisher client. client = pubsub_v1.PublisherClient() # [END pubsub_quickstart_pub_client] # Create a fully qualified identifier in the form of - # `projects/{project_id}/topics/{topic_name}` - topic_path = client.topic_path(project_id, topic_name) + # `projects/{project_id}/topics/{topic_id}` + topic_path = client.topic_path(project_id, topic_id) # Data sent to Cloud Pub/Sub must be a bytestring. data = b"Hello, World!" @@ -75,13 +75,12 @@ def pub(project_id, topic_name): if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Google Cloud project ID") - parser.add_argument("topic_name", help="Pub/Sub topic name") + parser.add_argument("topic_id", help="Pub/Sub topic ID") args = parser.parse_args() - pub(args.project_id, args.topic_name) + pub(args.project_id, args.topic_id) # [END pubsub_quickstart_pub_all] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index 1d90726f5b04..efe00891593e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -23,23 +23,19 @@ # [END pubsub_quickstart_sub_deps] -def sub(project_id, subscription_name): +def sub(project_id, subscription_id): """Receives messages from a Pub/Sub subscription.""" # [START pubsub_quickstart_sub_client] # Initialize a Subscriber client subscriber_client = pubsub_v1.SubscriberClient() # [END pubsub_quickstart_sub_client] # Create a fully qualified identifier in the form of - # `projects/{project_id}/subscriptions/{subscription_name}` - subscription_path = subscriber_client.subscription_path( - project_id, subscription_name - ) + # `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber_client.subscription_path(project_id, subscription_id) def callback(message): print( - "Received message {} of message ID {}\n".format( - message, message.message_id - ) + "Received message {} of message ID {}\n".format(message, message.message_id) ) # Acknowledge the message. Unack'ed messages will be redelivered. message.ack() @@ -62,13 +58,12 @@ def callback(message): if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Google Cloud project ID") - parser.add_argument("subscription_name", help="Pub/Sub subscription name") + parser.add_argument("subscription_id", help="Pub/Sub subscription ID") args = parser.parse_args() - sub(args.project_id, args.subscription_name) + sub(args.project_id, args.subscription_id) # [END pubsub_quickstart_sub_all] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py index 2754dc56b5e5..65d5fa111a05 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py @@ -48,9 +48,7 @@ def topic_path(): @pytest.fixture(scope="module") def subscription_path(topic_path): - subscription_path = subscriber_client.subscription_path( - PROJECT, SUBSCRIPTION - ) + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) try: subscription = subscriber_client.create_subscription( @@ -82,9 +80,7 @@ def test_sub(monkeypatch, topic_path, subscription_path, capsys): monkeypatch.setattr(pubsub_v1, "SubscriberClient", mock_client_constructor) def mock_subscribe(subscription_path, callback=None): - real_future = real_client.subscribe( - subscription_path, callback=callback - ) + real_future = real_client.subscribe(subscription_path, callback=callback) mock_future = mock.Mock(spec=real_future, wraps=real_future) def mock_result(): diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 5dc468e23a80..677bfe359fd1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -24,17 +24,17 @@ import argparse -def list_subscriptions_in_topic(project_id, topic_name): +def list_subscriptions_in_topic(project_id, topic_id): """Lists all subscriptions for a given topic.""" # [START pubsub_list_topic_subscriptions] from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" + # topic_id = "your-topic-id" publisher = pubsub_v1.PublisherClient() - topic_path = publisher.topic_path(project_id, topic_name) + topic_path = publisher.topic_path(project_id, topic_id) for subscription in publisher.list_topic_subscriptions(topic_path): print(subscription) @@ -59,19 +59,19 @@ def list_subscriptions_in_project(project_id): # [END pubsub_list_subscriptions] -def create_subscription(project_id, topic_name, subscription_name): +def create_subscription(project_id, topic_id, subscription_id): """Create a new pull subscription on the given topic.""" # [START pubsub_create_pull_subscription] from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" - # subscription_name = "your-subscription-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_name) - subscription_path = subscriber.subscription_path(project_id, subscription_name) + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) subscription = subscriber.create_subscription(subscription_path, topic_path) @@ -82,7 +82,7 @@ def create_subscription(project_id, topic_name, subscription_name): def create_subscription_with_dead_letter_topic( - project_id, topic_name, subscription_name, dead_letter_topic_name + project_id, topic_id, subscription_id, dead_letter_topic_id ): """Create a subscription with dead letter policy.""" # [START pubsub_dead_letter_create_subscription] @@ -94,17 +94,17 @@ def create_subscription_with_dead_letter_topic( # endpoint = "https://my-test-project.appspot.com/push" # TODO(developer): This is an existing topic that the subscription # with dead letter policy is attached to. - # topic_name = "your-topic-id" + # topic_id = "your-topic-id" # TODO(developer): This is an existing subscription with a dead letter policy. - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" # TODO(developer): This is an existing dead letter topic that the subscription # with dead letter policy will forward dead letter messages to. - # dead_letter_topic_name = "your-dead-letter-topic-id" + # dead_letter_topic_id = "your-dead-letter-topic-id" subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_name) - subscription_path = subscriber.subscription_path(project_id, subscription_name) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_name) + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) dead_letter_policy = DeadLetterPolicy( dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=10 @@ -129,20 +129,20 @@ def create_subscription_with_dead_letter_topic( # [END pubsub_dead_letter_create_subscription] -def create_push_subscription(project_id, topic_name, subscription_name, endpoint): +def create_push_subscription(project_id, topic_id, subscription_id, endpoint): """Create a new push subscription on the given topic.""" # [START pubsub_create_push_subscription] from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" - # subscription_name = "your-subscription-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" # endpoint = "https://my-test-project.appspot.com/push" subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_name) - subscription_path = subscriber.subscription_path(project_id, subscription_name) + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) @@ -157,17 +157,17 @@ def create_push_subscription(project_id, topic_name, subscription_name, endpoint # [END pubsub_create_push_subscription] -def delete_subscription(project_id, subscription_name): +def delete_subscription(project_id, subscription_id): """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) + subscription_path = subscriber.subscription_path(project_id, subscription_id) subscriber.delete_subscription(subscription_path) @@ -177,7 +177,7 @@ def delete_subscription(project_id, subscription_name): # [END pubsub_delete_subscription] -def update_push_subscription(project_id, topic_name, subscription_name, endpoint): +def update_push_subscription(project_id, topic_id, subscription_id, endpoint): """ Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a subscription, such as @@ -188,17 +188,17 @@ def update_push_subscription(project_id, topic_name, subscription_name, endpoint # TODO(developer) # project_id = "your-project-id" - # topic_name = "your-topic-id" - # subscription_name = "your-subscription-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" # endpoint = "https://my-test-project.appspot.com/push" subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) + subscription_path = subscriber.subscription_path(project_id, subscription_id) push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) subscription = pubsub_v1.types.Subscription( - name=subscription_path, topic=topic_name, push_config=push_config + name=subscription_path, topic=topic_id, push_config=push_config ) update_mask = {"paths": {"push_config"}} @@ -213,7 +213,7 @@ def update_push_subscription(project_id, topic_name, subscription_name, endpoint def update_subscription_with_dead_letter_policy( - project_id, topic_name, subscription_name, dead_letter_topic_name + project_id, topic_id, subscription_id, dead_letter_topic_id ): """Update a subscription's dead letter policy.""" # [START pubsub_dead_letter_update_subscription] @@ -224,17 +224,17 @@ def update_subscription_with_dead_letter_policy( # project_id = "your-project-id" # TODO(developer): This is an existing topic that the subscription # with dead letter policy is attached to. - # topic_name = "your-topic-name" + # topic_id = "your-topic-id" # TODO(developer): This is an existing subscription with a dead letter policy. - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" # TODO(developer): This is an existing dead letter topic that the subscription # with dead letter policy will forward dead letter messages to. - # dead_letter_topic_name = "your-dead-letter-topic-id" + # dead_letter_topic_id = "your-dead-letter-topic-id" subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_name) - subscription_path = subscriber.subscription_path(project_id, subscription_name) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_name) + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) subscription_before_update = subscriber.get_subscription(subscription_path) print("Before the update: {}".format(subscription_before_update)) @@ -264,7 +264,7 @@ def update_subscription_with_dead_letter_policy( return subscription_after_update -def remove_dead_letter_policy(project_id, topic_name, subscription_name): +def remove_dead_letter_policy(project_id, topic_id, subscription_id): """Remove dead letter policy from a subscription.""" # [START pubsub_dead_letter_remove] from google.cloud import pubsub_v1 @@ -274,13 +274,13 @@ def remove_dead_letter_policy(project_id, topic_name, subscription_name): # project_id = "your-project-id" # TODO(developer): This is an existing topic that the subscription # with dead letter policy is attached to. - # topic_name = "your-topic-name" + # topic_id = "your-topic-id" # TODO(developer): This is an existing subscription with a dead letter policy. - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_name) - subscription_path = subscriber.subscription_path(project_id, subscription_name) + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) subscription_before_update = subscriber.get_subscription(subscription_path) print("Before removing the policy: {}".format(subscription_before_update)) @@ -309,7 +309,7 @@ def remove_dead_letter_policy(project_id, topic_name, subscription_name): return subscription_after_update -def receive_messages(project_id, subscription_name, timeout=None): +def receive_messages(project_id, subscription_id, timeout=None): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull] # [START pubsub_quickstart_subscriber] @@ -318,14 +318,14 @@ def receive_messages(project_id, subscription_name, timeout=None): # TODO(developer) # project_id = "your-project-id" - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" # Number of seconds the subscriber should listen for messages # timeout = 5.0 subscriber = pubsub_v1.SubscriberClient() # The `subscription_path` method creates a fully qualified identifier - # in the form `projects/{project_id}/subscriptions/{subscription_name}` - subscription_path = subscriber.subscription_path(project_id, subscription_name) + # in the form `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): print("Received message: {}".format(message)) @@ -346,9 +346,7 @@ def callback(message): # [END pubsub_quickstart_subscriber] -def receive_messages_with_custom_attributes( - project_id, subscription_name, timeout=None -): +def receive_messages_with_custom_attributes(project_id, subscription_id, timeout=None): """Receives messages from a pull subscription.""" # [START pubsub_subscriber_sync_pull_custom_attributes] # [START pubsub_subscriber_async_pull_custom_attributes] @@ -357,12 +355,12 @@ def receive_messages_with_custom_attributes( # TODO(developer) # project_id = "your-project-id" - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" # Number of seconds the subscriber should listen for messages # timeout = 5.0 subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) + subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): print("Received message: {}".format(message.data)) @@ -388,7 +386,7 @@ def callback(message): # [END pubsub_subscriber_sync_pull_custom_attributes] -def receive_messages_with_flow_control(project_id, subscription_name, timeout=None): +def receive_messages_with_flow_control(project_id, subscription_id, timeout=None): """Receives messages from a pull subscription with flow control.""" # [START pubsub_subscriber_flow_settings] from concurrent.futures import TimeoutError @@ -396,12 +394,12 @@ def receive_messages_with_flow_control(project_id, subscription_name, timeout=No # TODO(developer) # project_id = "your-project-id" - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" # Number of seconds the subscriber should listen for messages # timeout = 5.0 subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) + subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): print("Received message: {}".format(message.data)) @@ -426,17 +424,17 @@ def callback(message): # [END pubsub_subscriber_flow_settings] -def synchronous_pull(project_id, subscription_name): +def synchronous_pull(project_id, subscription_id): """Pulling messages synchronously.""" # [START pubsub_subscriber_sync_pull] from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) + subscription_path = subscriber.subscription_path(project_id, subscription_id) NUM_MESSAGES = 3 @@ -461,7 +459,7 @@ def synchronous_pull(project_id, subscription_name): # [END pubsub_subscriber_sync_pull] -def synchronous_pull_with_lease_management(project_id, subscription_name): +def synchronous_pull_with_lease_management(project_id, subscription_id): """Pulling messages synchronously with lease management""" # [START pubsub_subscriber_sync_pull_with_lease] import logging @@ -473,10 +471,10 @@ def synchronous_pull_with_lease_management(project_id, subscription_name): # TODO(developer) # project_id = "your-project-id" - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) + subscription_path = subscriber.subscription_path(project_id, subscription_id) NUM_MESSAGES = 2 ACK_DEADLINE = 30 @@ -547,19 +545,19 @@ def worker(msg): # [END pubsub_subscriber_sync_pull_with_lease] -def listen_for_errors(project_id, subscription_name, timeout=None): +def listen_for_errors(project_id, subscription_id, timeout=None): """Receives messages and catches errors from a pull subscription.""" # [START pubsub_subscriber_error_listener] from google.cloud import pubsub_v1 # TODO(developer) - # project_id = "Your Google Cloud Project ID" - # subscription_name = "Your Pubsub subscription name" + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" # Number of seconds the subscriber should listen for messages # timeout = 5.0 subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) + subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): print("Received message: {}".format(message)) @@ -578,25 +576,23 @@ def callback(message): streaming_pull_future.cancel() print( "Listening for messages on {} threw an exception: {}.".format( - subscription_name, e + subscription_id, e ) ) # [END pubsub_subscriber_error_listener] -def receive_messages_with_delivery_attempts( - project_id, subscription_name, timeout=None -): +def receive_messages_with_delivery_attempts(project_id, subscription_id, timeout=None): # [START pubsub_dead_letter_delivery_attempt] from concurrent.futures import TimeoutError from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" - # subscription_name = "your-subscription-id" + # subscription_id = "your-subscription-id" subscriber = pubsub_v1.SubscriberClient() - subscription_path = subscriber.subscription_path(project_id, subscription_name) + subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): print("Received message: {}".format(message)) @@ -627,64 +623,64 @@ def callback(message): list_in_topic_parser = subparsers.add_parser( "list-in-topic", help=list_subscriptions_in_topic.__doc__ ) - list_in_topic_parser.add_argument("topic_name") + list_in_topic_parser.add_argument("topic_id") list_in_project_parser = subparsers.add_parser( "list-in-project", help=list_subscriptions_in_project.__doc__ ) create_parser = subparsers.add_parser("create", help=create_subscription.__doc__) - create_parser.add_argument("topic_name") - create_parser.add_argument("subscription_name") + create_parser.add_argument("topic_id") + create_parser.add_argument("subscription_id") create_with_dead_letter_policy_parser = subparsers.add_parser( "create-with-dead-letter-policy", help=create_subscription_with_dead_letter_topic.__doc__, ) - create_with_dead_letter_policy_parser.add_argument("topic_name") - create_with_dead_letter_policy_parser.add_argument("subscription_name") - create_with_dead_letter_policy_parser.add_argument("dead_letter_topic_name") + create_with_dead_letter_policy_parser.add_argument("topic_id") + create_with_dead_letter_policy_parser.add_argument("subscription_id") + create_with_dead_letter_policy_parser.add_argument("dead_letter_topic_id") create_push_parser = subparsers.add_parser( "create-push", help=create_push_subscription.__doc__ ) - create_push_parser.add_argument("topic_name") - create_push_parser.add_argument("subscription_name") + create_push_parser.add_argument("topic_id") + create_push_parser.add_argument("subscription_id") create_push_parser.add_argument("endpoint") delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) - delete_parser.add_argument("subscription_name") + delete_parser.add_argument("subscription_id") update_push_parser = subparsers.add_parser( "update-push", help=update_push_subscription.__doc__ ) - update_push_parser.add_argument("topic_name") - update_push_parser.add_argument("subscription_name") + update_push_parser.add_argument("topic_id") + update_push_parser.add_argument("subscription_id") update_push_parser.add_argument("endpoint") update_dead_letter_policy_parser = subparsers.add_parser( "update-dead-letter-policy", help=update_subscription_with_dead_letter_policy.__doc__, ) - update_dead_letter_policy_parser.add_argument("topic_name") - update_dead_letter_policy_parser.add_argument("subscription_name") - update_dead_letter_policy_parser.add_argument("dead_letter_topic_name") + update_dead_letter_policy_parser.add_argument("topic_id") + update_dead_letter_policy_parser.add_argument("subscription_id") + update_dead_letter_policy_parser.add_argument("dead_letter_topic_id") remove_dead_letter_policy_parser = subparsers.add_parser( "remove-dead-letter-policy", help=remove_dead_letter_policy.__doc__ ) - remove_dead_letter_policy_parser.add_argument("topic_name") - remove_dead_letter_policy_parser.add_argument("subscription_name") + remove_dead_letter_policy_parser.add_argument("topic_id") + remove_dead_letter_policy_parser.add_argument("subscription_id") receive_parser = subparsers.add_parser("receive", help=receive_messages.__doc__) - receive_parser.add_argument("subscription_name") + receive_parser.add_argument("subscription_id") receive_parser.add_argument("timeout", default=None, type=float, nargs="?") receive_with_custom_attributes_parser = subparsers.add_parser( "receive-custom-attributes", help=receive_messages_with_custom_attributes.__doc__, ) - receive_with_custom_attributes_parser.add_argument("subscription_name") + receive_with_custom_attributes_parser.add_argument("subscription_id") receive_with_custom_attributes_parser.add_argument( "timeout", default=None, type=float, nargs="?" ) @@ -692,7 +688,7 @@ def callback(message): receive_with_flow_control_parser = subparsers.add_parser( "receive-flow-control", help=receive_messages_with_flow_control.__doc__ ) - receive_with_flow_control_parser.add_argument("subscription_name") + receive_with_flow_control_parser.add_argument("subscription_id") receive_with_flow_control_parser.add_argument( "timeout", default=None, type=float, nargs="?" ) @@ -700,18 +696,18 @@ def callback(message): synchronous_pull_parser = subparsers.add_parser( "receive-synchronously", help=synchronous_pull.__doc__ ) - synchronous_pull_parser.add_argument("subscription_name") + synchronous_pull_parser.add_argument("subscription_id") synchronous_pull_with_lease_management_parser = subparsers.add_parser( "receive-synchronously-with-lease", help=synchronous_pull_with_lease_management.__doc__, ) - synchronous_pull_with_lease_management_parser.add_argument("subscription_name") + synchronous_pull_with_lease_management_parser.add_argument("subscription_id") listen_for_errors_parser = subparsers.add_parser( "listen-for-errors", help=listen_for_errors.__doc__ ) - listen_for_errors_parser.add_argument("subscription_name") + listen_for_errors_parser.add_argument("subscription_id") listen_for_errors_parser.add_argument( "timeout", default=None, type=float, nargs="?" ) @@ -720,7 +716,7 @@ def callback(message): "receive-messages-with-delivery-attempts", help=receive_messages_with_delivery_attempts.__doc__, ) - receive_messages_with_delivery_attempts_parser.add_argument("subscription_name") + receive_messages_with_delivery_attempts_parser.add_argument("subscription_id") receive_messages_with_delivery_attempts_parser.add_argument( "timeout", default=None, type=float, nargs="?" ) @@ -728,56 +724,54 @@ def callback(message): args = parser.parse_args() if args.command == "list-in-topic": - list_subscriptions_in_topic(args.project_id, args.topic_name) + list_subscriptions_in_topic(args.project_id, args.topic_id) elif args.command == "list-in-project": list_subscriptions_in_project(args.project_id) elif args.command == "create": - create_subscription(args.project_id, args.topic_name, args.subscription_name) + create_subscription(args.project_id, args.topic_id, args.subscription_id) elif args.command == "create-with-dead-letter-policy": create_subscription_with_dead_letter_topic( args.project_id, - args.topic_name, - args.subscription_name, - args.dead_letter_topic_name, + args.topic_id, + args.subscription_id, + args.dead_letter_topic_id, ) elif args.command == "create-push": create_push_subscription( - args.project_id, args.topic_name, args.subscription_name, args.endpoint, + args.project_id, args.topic_id, args.subscription_id, args.endpoint, ) elif args.command == "delete": - delete_subscription(args.project_id, args.subscription_name) + delete_subscription(args.project_id, args.subscription_id) elif args.command == "update-push": update_push_subscription( - args.project_id, args.topic_name, args.subscription_name, args.endpoint, + args.project_id, args.topic_id, args.subscription_id, args.endpoint, ) elif args.command == "update-dead-letter-policy": update_subscription_with_dead_letter_policy( args.project_id, - args.topic_name, - args.subscription_name, - args.dead_letter_topic_name, + args.topic_id, + args.subscription_id, + args.dead_letter_topic_id, ) elif args.command == "remove-dead-letter-policy": - remove_dead_letter_policy( - args.project_id, args.topic_name, args.subscription_name - ) + remove_dead_letter_policy(args.project_id, args.topic_id, args.subscription_id) elif args.command == "receive": - receive_messages(args.project_id, args.subscription_name, args.timeout) + receive_messages(args.project_id, args.subscription_id, args.timeout) elif args.command == "receive-custom-attributes": receive_messages_with_custom_attributes( - args.project_id, args.subscription_name, args.timeout + args.project_id, args.subscription_id, args.timeout ) elif args.command == "receive-flow-control": receive_messages_with_flow_control( - args.project_id, args.subscription_name, args.timeout + args.project_id, args.subscription_id, args.timeout ) elif args.command == "receive-synchronously": - synchronous_pull(args.project_id, args.subscription_name) + synchronous_pull(args.project_id, args.subscription_id) elif args.command == "receive-synchronously-with-lease": - synchronous_pull_with_lease_management(args.project_id, args.subscription_name) + synchronous_pull_with_lease_management(args.project_id, args.subscription_id) elif args.command == "listen-for-errors": - listen_for_errors(args.project_id, args.subscription_name, args.timeout) + listen_for_errors(args.project_id, args.subscription_id, args.timeout) elif args.command == "receive-messages-with-delivery-attempts": receive_messages_with_delivery_attempts( - args.project_id, args.subscription_name, args.timeout + args.project_id, args.subscription_id, args.timeout ) From 7b6f325fbf3c027f77a99265fc5c8988f65a6bce Mon Sep 17 00:00:00 2001 From: Kurtis Van Gent <31518063+kurtisvg@users.noreply.github.com> Date: Tue, 9 Jun 2020 14:34:27 -0700 Subject: [PATCH 0571/1197] Replace GCLOUD_PROJECT with GOOGLE_CLOUD_PROJECT. [(#4022)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4022) --- packages/google-cloud-pubsub/samples/snippets/iam_test.py | 2 +- packages/google-cloud-pubsub/samples/snippets/publisher_test.py | 2 +- .../google-cloud-pubsub/samples/snippets/quickstart/pub_test.py | 2 +- .../google-cloud-pubsub/samples/snippets/quickstart/sub_test.py | 2 +- .../google-cloud-pubsub/samples/snippets/subscriber_test.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 31764a056e8d..d196953f6207 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -21,7 +21,7 @@ import iam UUID = uuid.uuid4().hex -PROJECT = os.environ["GCLOUD_PROJECT"] +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC = "iam-test-topic-" + UUID SUBSCRIPTION = "iam-test-subscription-" + UUID diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index dc6095508112..b5c2ea1ea4b5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -24,7 +24,7 @@ import publisher UUID = uuid.uuid4().hex -PROJECT = os.environ["GCLOUD_PROJECT"] +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC_ADMIN = "publisher-test-topic-admin-" + UUID TOPIC_PUBLISH = "publisher-test-topic-publish-" + UUID diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py index 24010c76e830..6f5cc06c4456 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py @@ -25,7 +25,7 @@ UUID = uuid.uuid4().hex -PROJECT = os.environ["GCLOUD_PROJECT"] +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC = "quickstart-pub-test-topic-" + UUID diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py index 65d5fa111a05..38047422a935 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py @@ -25,7 +25,7 @@ UUID = uuid.uuid4().hex -PROJECT = os.environ["GCLOUD_PROJECT"] +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC = "quickstart-sub-test-topic-" + UUID SUBSCRIPTION = "quickstart-sub-test-topic-sub-" + UUID diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 6b90396f942e..a7f7c139c258 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -22,7 +22,7 @@ import subscriber UUID = uuid.uuid4().hex -PROJECT = os.environ["GCLOUD_PROJECT"] +PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC = "subscription-test-topic-" + UUID DEAD_LETTER_TOPIC = "subscription-test-dead-letter-topic-" + UUID SUBSCRIPTION_ADMIN = "subscription-test-subscription-admin-" + UUID From a199e3750461fba641ad03f9bfeb5c73173e9fed Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 11 Jun 2020 12:31:06 -0700 Subject: [PATCH 0572/1197] nit: remove redundant/wrong Pub/Sub region tag [(#4027)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4027) --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 677bfe359fd1..a367f181c04b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -348,7 +348,6 @@ def callback(message): def receive_messages_with_custom_attributes(project_id, subscription_id, timeout=None): """Receives messages from a pull subscription.""" - # [START pubsub_subscriber_sync_pull_custom_attributes] # [START pubsub_subscriber_async_pull_custom_attributes] from concurrent.futures import TimeoutError from google.cloud import pubsub_v1 @@ -383,7 +382,6 @@ def callback(message): except TimeoutError: streaming_pull_future.cancel() # [END pubsub_subscriber_async_pull_custom_attributes] - # [END pubsub_subscriber_sync_pull_custom_attributes] def receive_messages_with_flow_control(project_id, subscription_id, timeout=None): From 1affdc14e7a991e884db7efa839097f6a3dc6e87 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 12 Jun 2020 16:00:13 -0700 Subject: [PATCH 0573/1197] Pub/Sub: wrap subscriber in a with block and add comments [(#4070)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4070) Use a `with` block to wrap subscriber and describe its purpose. Internal bug: b/157401623 --- .../samples/snippets/subscriber.py | 72 ++++++++++--------- 1 file changed, 40 insertions(+), 32 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index a367f181c04b..f079e7d423f8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -52,10 +52,11 @@ def list_subscriptions_in_project(project_id): subscriber = pubsub_v1.SubscriberClient() project_path = subscriber.project_path(project_id) - for subscription in subscriber.list_subscriptions(project_path): - print(subscription.name) - - subscriber.close() + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + for subscription in subscriber.list_subscriptions(project_path): + print(subscription.name) # [END pubsub_list_subscriptions] @@ -73,11 +74,12 @@ def create_subscription(project_id, topic_id, subscription_id): topic_path = subscriber.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - subscription = subscriber.create_subscription(subscription_path, topic_path) + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription(subscription_path, topic_path) print("Subscription created: {}".format(subscription)) - - subscriber.close() # [END pubsub_create_pull_subscription] @@ -146,14 +148,15 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) - subscription = subscriber.create_subscription( - subscription_path, topic_path, push_config - ) + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription( + subscription_path, topic_path, push_config + ) print("Push subscription created: {}".format(subscription)) print("Endpoint for subscription is: {}".format(endpoint)) - - subscriber.close() # [END pubsub_create_push_subscription] @@ -169,11 +172,12 @@ def delete_subscription(project_id, subscription_id): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - subscriber.delete_subscription(subscription_path) + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscriber.delete_subscription(subscription_path) print("Subscription deleted: {}".format(subscription_path)) - - subscriber.close() # [END pubsub_delete_subscription] @@ -203,12 +207,13 @@ def update_push_subscription(project_id, topic_id, subscription_id, endpoint): update_mask = {"paths": {"push_config"}} - result = subscriber.update_subscription(subscription, update_mask) + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + result = subscriber.update_subscription(subscription, update_mask) print("Subscription updated: {}".format(subscription_path)) print("New endpoint for subscription is: {}".format(result.push_config)) - - subscriber.close() # [END pubsub_update_push_configuration] @@ -436,24 +441,25 @@ def synchronous_pull(project_id, subscription_id): NUM_MESSAGES = 3 - # The subscriber pulls a specific number of messages. - response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + # The subscriber pulls a specific number of messages. + response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) - ack_ids = [] - for received_message in response.received_messages: - print("Received: {}".format(received_message.message.data)) - ack_ids.append(received_message.ack_id) + ack_ids = [] + for received_message in response.received_messages: + print("Received: {}".format(received_message.message.data)) + ack_ids.append(received_message.ack_id) - # Acknowledges the received messages so they will not be sent again. - subscriber.acknowledge(subscription_path, ack_ids) + # Acknowledges the received messages so they will not be sent again. + subscriber.acknowledge(subscription_path, ack_ids) - print( - "Received and acknowledged {} messages. Done.".format( - len(response.received_messages) + print( + "Received and acknowledged {} messages. Done.".format( + len(response.received_messages) + ) ) - ) - - subscriber.close() # [END pubsub_subscriber_sync_pull] @@ -539,6 +545,8 @@ def worker(msg): ) ) + # Close the underlying gPRC channel. Alternatively, wrap subscriber in + # a 'with' block to automatically call close() when done. subscriber.close() # [END pubsub_subscriber_sync_pull_with_lease] From 1bc877874270412a92c5a130eb7a0d25252eda90 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 20 Jun 2020 06:08:08 +0200 Subject: [PATCH 0574/1197] Update dependency google-cloud-pubsub to v1.6.0 [(#4039)](https://github.com/GoogleCloudPlatform/python-docs-samples/issues/4039) This PR contains the following updates: | Package | Update | Change | |---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | minor | `==1.5.0` -> `==1.6.0` | --- ### Release Notes
googleapis/python-pubsub ### [`v1.6.0`](https://togithub.com/googleapis/python-pubsub/blob/master/CHANGELOG.md#​160-httpswwwgithubcomgoogleapispython-pubsubcomparev150v160-2020-06-09) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v1.5.0...v1.6.0) ##### Features - Add flow control for message publishing ([#​96](https://www.github.com/googleapis/python-pubsub/issues/96)) ([06085c4](https://www.github.com/googleapis/python-pubsub/commit/06085c4083b9dccdd50383257799904510bbf3a0)) ##### Bug Fixes - Fix PubSub incompatibility with api-core 1.17.0+ ([#​103](https://www.github.com/googleapis/python-pubsub/issues/103)) ([c02060f](https://www.github.com/googleapis/python-pubsub/commit/c02060fbbe6e2ca4664bee08d2de10665d41dc0b)) ##### Documentation - Clarify that Schedulers shouldn't be used with multiple SubscriberClients ([#​100](https://togithub.com/googleapis/python-pubsub/pull/100)) ([cf9e87c](https://togithub.com/googleapis/python-pubsub/commit/cf9e87c80c0771f3fa6ef784a8d76cb760ad37ef)) - Fix update subscription/snapshot/topic samples ([#​113](https://togithub.com/googleapis/python-pubsub/pull/113)) ([e62c38b](https://togithub.com/googleapis/python-pubsub/commit/e62c38bb33de2434e32f866979de769382dea34a)) ##### Internal / Testing Changes - Re-generated service implementaton using synth: removed experimental notes from the RetryPolicy and filtering features in anticipation of GA, added DetachSubscription (experimental) ([#​114](https://togithub.com/googleapis/python-pubsub/pull/114)) ([0132a46](https://togithub.com/googleapis/python-pubsub/commit/0132a4680e0727ce45d5e27d98ffc9f3541a0962)) - Incorporate will_accept() checks into publish() ([#​108](https://togithub.com/googleapis/python-pubsub/pull/108)) ([6c7677e](https://togithub.com/googleapis/python-pubsub/commit/6c7677ecb259672bbb9b6f7646919e602c698570))
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Never, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#GoogleCloudPlatform/python-docs-samples). --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 9cc17af84c35..9b496510abb5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.5.0 +google-cloud-pubsub==1.6.0 From b0adb802f1422d48c8258684ef83957d4999ce53 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 26 Jun 2020 09:20:30 +0200 Subject: [PATCH 0575/1197] chore: update templates --- .../google-cloud-pubsub/.github/CODEOWNERS | 11 + .../samples/AUTHORING_GUIDE.md | 1 + .../samples/CONTRIBUTING.md | 1 + .../samples/snippets/README.rst | 48 +++- .../samples/snippets/noxfile.py | 224 ++++++++++++++++++ packages/google-cloud-pubsub/synth.py | 11 +- 6 files changed, 292 insertions(+), 4 deletions(-) create mode 100644 packages/google-cloud-pubsub/.github/CODEOWNERS create mode 100644 packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md create mode 100644 packages/google-cloud-pubsub/samples/CONTRIBUTING.md create mode 100644 packages/google-cloud-pubsub/samples/snippets/noxfile.py diff --git a/packages/google-cloud-pubsub/.github/CODEOWNERS b/packages/google-cloud-pubsub/.github/CODEOWNERS new file mode 100644 index 000000000000..cf01548a9f04 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/CODEOWNERS @@ -0,0 +1,11 @@ +# Code owners file. +# This file controls who is tagged for review for any given pull request. +# +# For syntax help see: +# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax + + +# The python-samples-owners team is the default owner for anything not +# explicitly taken by someone else. + + /samples/ @anguillanneuf @hongalex @googleapis/python-samples-owners diff --git a/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md b/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md new file mode 100644 index 000000000000..55c97b32f4c1 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/CONTRIBUTING.md b/packages/google-cloud-pubsub/samples/CONTRIBUTING.md new file mode 100644 index 000000000000..34c882b6f1a3 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index f27f9438ea96..2676680afdef 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -1,3 +1,4 @@ + .. This file is automatically generated. Do not edit this file directly. Google Cloud Pub/Sub Python Samples @@ -14,10 +15,12 @@ This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub` .. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs + Setup ------------------------------------------------------------------------------- + Authentication ++++++++++++++ @@ -28,6 +31,9 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started + + + Install Dependencies ++++++++++++++++++++ @@ -42,7 +48,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash @@ -58,9 +64,33 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ + + + + + Samples ------------------------------------------------------------------------------- + +Quickstart ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/quickstart.py,pubsub/cloud-client/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python quickstart.py + + + + Publisher +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -74,7 +104,8 @@ To run this sample: .. code-block:: bash - $ python publisher.py --help + $ python publisher.py + usage: publisher.py [-h] project_id @@ -111,6 +142,8 @@ To run this sample: + + Subscribers +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -124,7 +157,8 @@ To run this sample: .. code-block:: bash - $ python subscriber.py --help + $ python subscriber.py + usage: subscriber.py [-h] project_id @@ -173,6 +207,8 @@ To run this sample: + + Identity and Access Management +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -188,6 +224,7 @@ To run this sample: $ python iam.py + usage: iam.py [-h] project {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} @@ -222,6 +259,10 @@ To run this sample: + + + + The client library ------------------------------------------------------------------------------- @@ -237,4 +278,5 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues + .. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py new file mode 100644 index 000000000000..ba55d7ce53ca --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -0,0 +1,224 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import os +from pathlib import Path +import sys + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +# Copy `noxfile_config.py` to your directory and modify it instead. + + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + 'ignored_versions': ["2.7"], + + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + 'envs': {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append('.') + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars(): + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG['gcloud_project_env'] + # This should error out if not set. + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG['envs']) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to tested samples. +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +# +# Style Checks +# + + +def _determine_local_import_names(start_dir): + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session): + session.install("flake8", "flake8-import-order") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + "." + ] + session.run("flake8", *args) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests(session, post_install=None): + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars() + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session): + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) + + +# +# Readmegen +# + + +def _get_repo_root(): + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session, path): + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index b44cc0acff57..0e2c96e42de2 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -18,6 +18,7 @@ import synthtool as s from synthtool import gcp +from synthtool.languages import python gapic = gcp.GAPICBazel() common = gcp.CommonTemplates() @@ -266,8 +267,16 @@ def _merge_dict(d1, d2): # Add templated files # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library( - unit_cov_level=97, cov_level=99, system_test_external_dependencies=["psutil"], + unit_cov_level=97, + cov_level=99, + system_test_external_dependencies=["psutil"], + samples=True, ) s.move(templated_files) +# ---------------------------------------------------------------------------- +# Samples templates +# ---------------------------------------------------------------------------- +python.py_samples() + s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 4fcb62e96bbb3a1970cba80b1c05d0ba6e99d6e4 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 10 Jul 2020 14:52:53 +0200 Subject: [PATCH 0576/1197] chore(deps): update google-cloud-pubsub to v1.6.1 This restores the chnge from 1cb6746b00 that got lost during the branch restoration. --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 9b496510abb5..42ab449b1ba1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.6.0 +google-cloud-pubsub==1.6.1 From dfab2271815d3c6b7ba3309825d7a0b5a7f4b567 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 10 Jul 2020 15:58:22 +0200 Subject: [PATCH 0577/1197] Fix URLs, commands, etc. in samples README --- .../samples/snippets/README.rst | 54 +++++++++---------- .../samples/snippets/README.rst.in | 8 +-- .../readme-gen/templates/README.tmpl.rst | 4 +- .../templates/install_deps.tmpl.rst | 4 +- 4 files changed, 33 insertions(+), 37 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 2676680afdef..40b2e21fc4b5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -1,11 +1,10 @@ - .. This file is automatically generated. Do not edit this file directly. Google Cloud Pub/Sub Python Samples =============================================================================== .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/README.rst This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. @@ -16,11 +15,13 @@ This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub` .. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs + + + Setup ------------------------------------------------------------------------------- - Authentication ++++++++++++++ @@ -31,9 +32,6 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started - - - Install Dependencies ++++++++++++++++++++ @@ -41,7 +39,7 @@ Install Dependencies .. code-block:: bash - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + $ git clone https://github.com/googleapis/python-pubsub.git #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. @@ -64,20 +62,30 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ +Samples +------------------------------------------------------------------------------- +Quickstart (Publisher) ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/quickstart/pub.py,samples/snippets/README.rst -Samples -------------------------------------------------------------------------------- +To run this sample: + +.. code-block:: bash -Quickstart + $ python quickstart/pub.py + + +Quickstart (Subscriber) +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/quickstart.py,pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/quickstart/sub.py,samples/snippets/README.rst @@ -86,16 +94,14 @@ To run this sample: .. code-block:: bash - $ python quickstart.py - - + $ python quickstart/sub.py Publisher +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/publisher.py,pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/publisher.py,samples/snippets/README.rst @@ -106,7 +112,6 @@ To run this sample: $ python publisher.py - usage: publisher.py [-h] project_id {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} @@ -142,13 +147,11 @@ To run this sample: - - Subscribers +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/subscriber.py,pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/subscriber.py,samples/snippets/README.rst @@ -159,7 +162,6 @@ To run this sample: $ python subscriber.py - usage: subscriber.py [-h] project_id {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} @@ -207,13 +209,11 @@ To run this sample: - - Identity and Access Management +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=pubsub/cloud-client/iam.py,pubsub/cloud-client/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/iam.py,samples/snippets/README.rst @@ -224,7 +224,6 @@ To run this sample: $ python iam.py - usage: iam.py [-h] project {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} @@ -259,10 +258,6 @@ To run this sample: - - - - The client library ------------------------------------------------------------------------------- @@ -278,5 +273,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst.in b/packages/google-cloud-pubsub/samples/snippets/README.rst.in index ddbc647121b2..b0e98cbebab1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst.in +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst.in @@ -13,8 +13,10 @@ setup: - install_deps samples: -- name: Quickstart - file: quickstart.py +- name: Quickstart (Publisher) + file: quickstart/pub.py +- name: Quickstart (Subscriber) + file: quickstart/sub.py - name: Publisher file: publisher.py show_help: true @@ -27,4 +29,4 @@ samples: cloud_client_library: true -folder: pubsub/cloud-client \ No newline at end of file +folder: samples/snippets \ No newline at end of file diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst index 4fd239765b0a..df252dd6ebe9 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst @@ -6,7 +6,7 @@ =============================================================================== .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-pubsub&page=editor&open_in_editor={{folder}}/README.rst This directory contains samples for {{product.name}}. {{product.description}} @@ -46,7 +46,7 @@ Samples {% if not sample.hide_cloudshell_button %} .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst {% endif %} diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst index a0406dba8c84..7a47efe21cff 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -5,14 +5,14 @@ Install Dependencies .. code-block:: bash - $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + $ git clone https://github.com/googleapis/python-pubsub.git #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash From ba827bc80fe2700a7889455c1f7052dca26f54ab Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 13 Jul 2020 18:38:55 +0200 Subject: [PATCH 0578/1197] chore: release v1.7.0 (#154) --- packages/google-cloud-pubsub/CHANGELOG.md | 20 +++++++++++++++++++- packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 939bfd5fc0a1..e66a2e481618 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,7 +4,25 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history -### [1.6.1](https://www.github.com/googleapis/python-pubsub/compare/v1.6.0...v1.6.1) (2020-06-30) + +## [1.7.0](https://www.github.com/googleapis/python-pubsub/compare/v1.6.1...v1.7.0) (2020-07-13) + +### New Features + +- Add support for server-side flow control. ([#143](https://github.com/googleapis/python-pubsub/pull/143)) ([04e261c](https://www.github.com/googleapis/python-pubsub/commit/04e261c602a2919cc75b3efa3dab099fb2cf704c)) + +### Dependencies + +- Update samples dependency `google-cloud-pubsub` to `v1.6.1`. ([#144](https://github.com/googleapis/python-pubsub/pull/144)) ([1cb6746](https://github.com/googleapis/python-pubsub/commit/1cb6746b00ebb23dbf1663bae301b32c3fc65a88)) + +### Documentation + +- Add pubsub/cloud-client samples from the common samples repo (with commit history). ([#151](https://github.com/googleapis/python-pubsub/pull/151)) +- Add flow control section to publish overview. ([#129](https://github.com/googleapis/python-pubsub/pull/129)) ([acc19eb](https://www.github.com/googleapis/python-pubsub/commit/acc19eb048eef067d9818ef3e310b165d9c6307e)) +- Add a link to Pub/Sub filtering language public documentation to `pubsub.proto`. ([#121](https://github.com/googleapis/python-pubsub/pull/121)) ([8802d81](https://www.github.com/googleapis/python-pubsub/commit/8802d8126247f22e26057e68a42f5b5a82dcbf0d)) + + +## [1.6.1](https://www.github.com/googleapis/python-pubsub/compare/v1.6.0...v1.6.1) (2020-06-30) ### Documentation diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 528bb66a2c97..3da2e269a738 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.6.1" +version = "1.7.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 76f2f2d31986099a475ef81dd89cc0ddcbc74b98 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 15 Jul 2020 00:43:09 +0200 Subject: [PATCH 0579/1197] chore(deps): update dependency google-cloud-pubsub to v1.7.0 (#155) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 42ab449b1ba1..7d01417f9bd5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.6.1 +google-cloud-pubsub==1.7.0 From 596af4edfa3806f66aa0d6e262d2aeedaedd3c62 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 14 Jul 2020 15:43:22 -0700 Subject: [PATCH 0580/1197] Revert "chore(deps): update dependency google-cloud-pubsub to v1.7.0 (#155)" This reverts commit 475debae9d821c9cb829817d4aca90cce1dd9275. --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 7d01417f9bd5..42ab449b1ba1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.7.0 +google-cloud-pubsub==1.6.1 From 0f87a8af23888acaea1aecf3229e1af51013e917 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 15 Jul 2020 12:46:00 -0700 Subject: [PATCH 0581/1197] samples: nit remove extra white space (#159) --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index f079e7d423f8..aeffb80d7bd1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -589,7 +589,7 @@ def callback(message): def receive_messages_with_delivery_attempts(project_id, subscription_id, timeout=None): - # [START pubsub_dead_letter_delivery_attempt] + # [START pubsub_dead_letter_delivery_attempt] from concurrent.futures import TimeoutError from google.cloud import pubsub_v1 @@ -616,7 +616,7 @@ def callback(message): streaming_pull_future.result(timeout=timeout) except TimeoutError: streaming_pull_future.cancel() - # [END pubsub_dead_letter_delivery_attempt] + # [END pubsub_dead_letter_delivery_attempt] if __name__ == "__main__": From 95594d7d41b00d29fbf7e9c59fae0c175f37e4a4 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 7 Aug 2020 10:44:30 -0700 Subject: [PATCH 0582/1197] samples: publish with ordering keys (#156) * samples: add samples for publish with ordering keys * update readme * add regional endpoint * remove extra white space * fix function signature in test --- .../samples/snippets/README.rst | 31 +++-- .../samples/snippets/publisher.py | 110 +++++++++++++++++- .../samples/snippets/publisher_test.py | 14 +++ .../samples/snippets/subscriber.py | 32 +++++ .../samples/snippets/subscriber_test.py | 11 ++ 5 files changed, 179 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 40b2e21fc4b5..2c67c2c11302 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -110,11 +110,11 @@ To run this sample: .. code-block:: bash - $ python publisher.py + $ python publisher.py --help usage: publisher.py [-h] project_id - {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings,publish-with-ordering-keys,resume-publish-with-ordering-keys} ... This application demonstrates how to perform basic operations on topics @@ -125,7 +125,7 @@ To run this sample: positional arguments: project_id Your Google Cloud project ID - {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings} + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings,publish-with-ordering-keys,resume-publish-with-ordering-keys} list Lists all Pub/Sub topics in the given project. create Create a new Pub/Sub topic. delete Deletes an existing Pub/Sub topic. @@ -141,6 +141,11 @@ To run this sample: batch settings. publish-with-retry-settings Publishes messages with custom retry settings. + publish-with-ordering-keys + Publishes messages with ordering keys. + resume-publish-with-ordering-keys + Resume publishing messages with ordering keys when + unrecoverable errors occur. optional arguments: -h, --help show this help message and exit @@ -160,11 +165,11 @@ To run this sample: .. code-block:: bash - $ python subscriber.py + $ python subscriber.py --help usage: subscriber.py [-h] project_id - {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} + {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,create-with-ordering,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} ... This application demonstrates how to perform basic operations on @@ -175,13 +180,15 @@ To run this sample: positional arguments: project_id Your Google Cloud project ID - {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} + {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,create-with-ordering,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} list-in-topic Lists all subscriptions for a given topic. list-in-project Lists all subscriptions in the current project. create Create a new pull subscription on the given topic. create-with-dead-letter-policy Create a subscription with dead letter policy. create-push Create a new push subscription on the given topic. + create-with-ordering + Create a subscription with dead letter policy. delete Deletes an existing Pub/Sub topic. update-push Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a @@ -208,7 +215,6 @@ To run this sample: -h, --help show this help message and exit - Identity and Access Management +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -221,20 +227,15 @@ Identity and Access Management To run this sample: .. code-block:: bash - $ python iam.py - usage: iam.py [-h] project {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} ... - This application demonstrates how to perform basic operations on IAM policies with the Cloud Pub/Sub API. - For more information, see the README.md under /pubsub and the documentation at https://cloud.google.com/pubsub/docs. - positional arguments: project Your Google Cloud project ID {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} @@ -250,14 +251,10 @@ To run this sample: check-subscription-permissions Checks to which permissions are available on the given subscription. - optional arguments: -h, --help show this help message and exit - - - The client library ------------------------------------------------------------------------------- @@ -273,4 +270,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues -.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file +.. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 477b31b9cf71..399d37679196 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -273,6 +273,94 @@ def publish_messages_with_retry_settings(project_id, topic_id): # [END pubsub_publisher_retry_settings] +def publish_with_ordering_keys(project_id, topic_id): + """Publishes messages with ordering keys.""" + # [START pubsub_publish_with_ordering_keys] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher_options = pubsub_v1.types.PublisherOptions( + enable_message_ordering=True + ) + # Sending messages to the same region ensures they are received in order + # even when multiple publishers are used. + client_options = {"api_endpoint": "us-east1-pubsub.googleapis.com:443"} + publisher = pubsub_v1.PublisherClient( + publisher_options=publisher_options, + client_options=client_options + ) + # The `topic_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/topics/{topic_id}` + topic_path = publisher.topic_path(project_id, topic_id) + + for message in [ + ("message1", "key1"), + ("message2", "key2"), + ("message3", "key1"), + ("message4", "key2"), + ]: + # Data must be a bytestring + data = message[0].encode("utf-8") + ordering_key = message[1] + # When you publish a message, the client returns a future. + future = publisher.publish( + topic_path, data=data, ordering_key=ordering_key + ) + print(future.result()) + + print("Published messages with ordering keys.") + # [END pubsub_publish_with_ordering_keys] + + +def resume_publish_with_ordering_keys(project_id, topic_id): + """Resume publishing messages with ordering keys when unrecoverable errors occur.""" + # [START pubsub_resume_publish_with_ordering_keys] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher_options = pubsub_v1.types.PublisherOptions( + enable_message_ordering=True + ) + # Sending messages to the same region ensures they are received in order + # even when multiple publishers are used. + client_options = {"api_endpoint": "us-east1-pubsub.googleapis.com:443"} + publisher = pubsub_v1.PublisherClient( + publisher_options=publisher_options, + client_options=client_options + ) + # The `topic_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/topics/{topic_id}` + topic_path = publisher.topic_path(project_id, topic_id) + + for message in [ + ("message1", "key1"), + ("message2", "key2"), + ("message3", "key1"), + ("message4", "key2"), + ]: + # Data must be a bytestring + data = message[0].encode("utf-8") + ordering_key = message[1] + # When you publish a message, the client returns a future. + future = publisher.publish( + topic_path, data=data, ordering_key=ordering_key + ) + try: + print(future.result()) + except RuntimeError: + # Resume publish on an ordering key that has had unrecoverable errors. + publisher.resume_publish(topic_path, ordering_key) + + print("Published messages with ordering keys.") + # [END pubsub_resume_publish_with_ordering_keys] + + if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, @@ -288,7 +376,9 @@ def publish_messages_with_retry_settings(project_id, topic_id): delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) delete_parser.add_argument("topic_id") - publish_parser = subparsers.add_parser("publish", help=publish_messages.__doc__) + publish_parser = subparsers.add_parser( + "publish", help=publish_messages.__doc__ + ) publish_parser.add_argument("topic_id") publish_with_custom_attributes_parser = subparsers.add_parser( @@ -298,7 +388,8 @@ def publish_messages_with_retry_settings(project_id, topic_id): publish_with_custom_attributes_parser.add_argument("topic_id") publish_with_error_handler_parser = subparsers.add_parser( - "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, + "publish-with-error-handler", + help=publish_messages_with_error_handler.__doc__, ) publish_with_error_handler_parser.add_argument("topic_id") @@ -314,6 +405,17 @@ def publish_messages_with_retry_settings(project_id, topic_id): ) publish_with_retry_settings_parser.add_argument("topic_id") + publish_with_ordering_keys_parser = subparsers.add_parser( + "publish-with-ordering-keys", help=publish_with_ordering_keys.__doc__, + ) + publish_with_ordering_keys_parser.add_argument("topic_id") + + resume_publish_with_ordering_keys_parser = subparsers.add_parser( + "resume-publish-with-ordering-keys", + help=resume_publish_with_ordering_keys.__doc__, + ) + resume_publish_with_ordering_keys_parser.add_argument("topic_id") + args = parser.parse_args() if args.command == "list": @@ -332,3 +434,7 @@ def publish_messages_with_retry_settings(project_id, topic_id): publish_messages_with_batch_settings(args.project_id, args.topic_id) elif args.command == "publish-with-retry-settings": publish_messages_with_retry_settings(args.project_id, args.topic_id) + elif args.command == "publish-with-ordering-keys": + publish_with_ordering_keys(args.project_id, args.topic_id) + elif args.command == "resume-publish-with-ordering-keys": + resume_publish_with_ordering_keys(args.project_id, args.topic_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index b5c2ea1ea4b5..95fda846a95c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -144,3 +144,17 @@ def test_publish_with_error_handler(topic_publish, capsys): out, _ = capsys.readouterr() assert "Published" in out + + +def test_publish_with_ordering_keys(topic_publish, capsys): + publisher.publish_with_ordering_keys(PROJECT, TOPIC_PUBLISH) + + out, _ = capsys.readouterr() + assert "Published messages with ordering keys." in out + + +def test_resume_publish_with_error_handler(topic_publish, capsys): + publisher.resume_publish_with_ordering_keys(PROJECT, TOPIC_PUBLISH) + + out, _ = capsys.readouterr() + assert "Published messages with ordering keys." in out diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index aeffb80d7bd1..94e1c5cd48b2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -160,6 +160,28 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): # [END pubsub_create_push_subscription] +def create_subscription_with_ordering(project_id, topic_id, subscription_id): + """Create a subscription with dead letter policy.""" + # [START pubsub_enable_subscription_ordering] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + + subscriber = pubsub_v1.SubscriberClient() + topic_path = subscriber.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + with subscriber: + subscription = subscriber.create_subscription( + subscription_path, topic_path, enable_message_ordering=True + ) + print("Created subscription with ordering: {}".format(subscription)) + # [END pubsub_enable_subscription_ordering] + + def delete_subscription(project_id, subscription_id): """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] @@ -654,6 +676,12 @@ def callback(message): create_push_parser.add_argument("subscription_id") create_push_parser.add_argument("endpoint") + create_subscription_with_ordering_parser = subparsers.add_parser( + "create-with-ordering", help=create_subscription_with_ordering.__doc__ + ) + create_subscription_with_ordering_parser.add_argument("topic_id") + create_subscription_with_ordering_parser.add_argument("subscription_id") + delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) delete_parser.add_argument("subscription_id") @@ -746,6 +774,10 @@ def callback(message): create_push_subscription( args.project_id, args.topic_id, args.subscription_id, args.endpoint, ) + elif args.command == "create-with-ordering": + create_subscription_with_ordering( + args.project_id, args.topic_id, args.subscription_id + ) elif args.command == "delete": delete_subscription(args.project_id, args.subscription_id) elif args.command == "update-push": diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index a7f7c139c258..62018e9a937f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -29,6 +29,7 @@ SUBSCRIPTION_ASYNC = "subscription-test-subscription-async-" + UUID SUBSCRIPTION_SYNC = "subscription-test-subscription-sync-" + UUID SUBSCRIPTION_DLQ = "subscription-test-subscription-dlq-" + UUID +SUBSCRIPTION_ORDERING = "subscription-test-subscription-ordering-" + UUID ENDPOINT = "https://{}.appspot.com/push".format(PROJECT) NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT) @@ -209,6 +210,16 @@ def eventually_consistent_test(): eventually_consistent_test() +def test_create_subscription_with_ordering(subscriber_client, capsys): + subscriber.create_subscription_with_ordering(PROJECT, TOPIC, SUBSCRIPTION_ORDERING) + out, _ = capsys.readouterr() + assert "Created subscription with ordering" in out + assert "enable_message_ordering: true" in out + + subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ORDERING) + subscriber_client.delete_subscription(subscription_path) + + def test_update(subscriber_client, subscription_admin, capsys): subscriber.update_push_subscription( PROJECT, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT From 5911ca1b638862054f2bbd34ec786eba8a5e0502 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 7 Sep 2020 15:43:51 +0200 Subject: [PATCH 0583/1197] chore: transition the library to the new microgenerator (#158) * chore: regenerate the library with microgenerator * Update setup.py for Python 3.6+ * Fix GAPIC types imports * Inject SERVICE_ADDRESS and _DEFAULT_SCOPES to generated classes * Cleanup types.py * Adjust determining message size to new protobufs * Adjust Message.publish_time property to new protobuf * Remove client_config argument to publisher client The argument is not supported anymore in the new generated client. * Remove subscriber client config This configuration is not anymore supported by the generated subscriber client. * Fix method patch in gapic instance method test * Fix transport-related test failures * Adjust calls to changed client.publish() signature * Remove obsolete replacement rules from synth.py * Fix streaming pull monkeypatch (no prefetch) * Update supported Python versions in README * Adjust system tests to new method signatures * Adjust method calls in samples to new client * Fix tests creating subscription with custom settings * Regenerate library with latest synthtool * Adjust IAM tests to new IAM method signatures * Fix docstring format in generated *_iam_policy() methods * Iterate directly over topic subscriptions response A recent update in the code generator made list topic subscriptions responses iterable again. * Copy default publish Retry to a class variable * Adjust retry deadline if publish with ordering key * Add suport for custom publish retry settings * Add UPGRADING guide to docs * Mention default retry settings in sample comments * Remove synth URL changes from templates * Adjust the ordering key sample to new code * Adjust client to changes in generated code * Regenerate the code to pick the latest changes * Bump dependencies to latest versions * Optimize creating and accesing pubsub messages Profiling shows that the speed of creating a new pubsub message and the speed of accessing the message's attributes significantly affects the throughput of publisher and subscriber. This commit makes everything faster by circumventing the wrapper class around the raw protobuf pubsub messages where possible. * Regenerate the code with the latest changes --- .../__init__.py => .github/snippet-bot.yml} | 0 packages/google-cloud-pubsub/.gitignore | 3 +- packages/google-cloud-pubsub/.kokoro/build.sh | 8 +- .../.kokoro/docker/docs/Dockerfile | 98 + .../.kokoro/docker/docs/fetch_gpg_keys.sh | 45 + .../.kokoro/docs/common.cfg | 21 +- .../.kokoro/docs/docs-presubmit.cfg | 17 + .../.kokoro/publish-docs.sh | 39 +- .../.kokoro/trampoline_v2.sh | 487 ++ packages/google-cloud-pubsub/.trampolinerc | 51 + packages/google-cloud-pubsub/README.rst | 6 +- packages/google-cloud-pubsub/UPGRADING.md | 160 + .../google-cloud-pubsub/docs/UPGRADING.md | 1 + packages/google-cloud-pubsub/docs/conf.py | 11 +- packages/google-cloud-pubsub/docs/index.rst | 27 +- .../docs/publisher/api/client.rst | 4 +- .../docs/subscriber/api/client.rst | 4 +- .../cloud/pubsub_v1/gapic/publisher_client.py | 1292 ---- .../gapic/publisher_client_config.py | 111 - .../pubsub_v1/gapic/subscriber_client.py | 2006 ------- .../gapic/subscriber_client_config.py | 144 - .../pubsub_v1/gapic/transports/__init__.py | 0 .../transports/publisher_grpc_transport.py | 296 - .../transports/subscriber_grpc_transport.py | 454 -- .../google/cloud/pubsub_v1/proto/__init__.py | 0 .../google/cloud/pubsub_v1/proto/pubsub.proto | 118 +- .../cloud/pubsub_v1/proto/pubsub_pb2.py | 5246 ----------------- .../cloud/pubsub_v1/proto/pubsub_pb2_grpc.py | 1284 ---- .../pubsub_v1/publisher/_batch/thread.py | 39 +- .../pubsub_v1/publisher/_sequencer/base.py | 7 +- .../publisher/_sequencer/ordered_sequencer.py | 19 +- .../_sequencer/unordered_sequencer.py | 20 +- .../cloud/pubsub_v1/publisher/client.py | 73 +- .../pubsub_v1/publisher/flow_controller.py | 16 +- .../subscriber/_protocol/dispatcher.py | 6 +- .../_protocol/streaming_pull_manager.py | 19 +- .../cloud/pubsub_v1/subscriber/client.py | 8 +- .../cloud/pubsub_v1/subscriber/message.py | 34 +- .../google/cloud/pubsub_v1/types.py | 166 +- .../google/pubsub/__init__.py | 114 + .../google/pubsub/py.typed | 2 + .../google/pubsub_v1/__init__.py | 111 + .../google/pubsub_v1/py.typed | 2 + .../google/pubsub_v1/services/__init__.py | 16 + .../pubsub_v1/services/publisher/__init__.py} | 24 +- .../services/publisher/async_client.py | 1099 ++++ .../pubsub_v1/services/publisher/client.py | 1205 ++++ .../pubsub_v1/services/publisher/pagers.py | 404 ++ .../services/publisher/transports/__init__.py | 36 + .../services/publisher/transports/base.py | 360 ++ .../services/publisher/transports/grpc.py | 547 ++ .../publisher/transports/grpc_asyncio.py | 548 ++ .../pubsub_v1/services/subscriber/__init__.py | 24 + .../services/subscriber/async_client.py | 1826 ++++++ .../pubsub_v1/services/subscriber/client.py | 1910 ++++++ .../pubsub_v1/services/subscriber/pagers.py | 276 + .../subscriber/transports/__init__.py | 36 + .../services/subscriber/transports/base.py | 507 ++ .../services/subscriber/transports/grpc.py | 803 +++ .../subscriber/transports/grpc_asyncio.py | 809 +++ .../google/pubsub_v1/types/__init__.py | 109 + .../google/pubsub_v1/types/pubsub.py | 1299 ++++ packages/google-cloud-pubsub/mypy.ini | 3 + packages/google-cloud-pubsub/noxfile.py | 43 +- .../samples/snippets/README.rst | 44 +- .../samples/snippets/iam.py | 20 +- .../samples/snippets/iam_test.py | 20 +- .../samples/snippets/publisher.py | 82 +- .../samples/snippets/publisher_test.py | 16 +- .../samples/snippets/quickstart/pub.py | 2 +- .../samples/snippets/quickstart/pub_test.py | 4 +- .../samples/snippets/quickstart/sub_test.py | 10 +- .../samples/snippets/subscriber.py | 98 +- .../samples/snippets/subscriber_test.py | 76 +- .../scripts/fixup_pubsub_v1_keywords.py | 206 + packages/google-cloud-pubsub/setup.py | 21 +- packages/google-cloud-pubsub/synth.metadata | 18 +- packages/google-cloud-pubsub/synth.py | 235 +- packages/google-cloud-pubsub/tests/system.py | 284 +- .../tests/unit/gapic/pubsub_v1/__init__.py | 1 + .../unit/gapic/pubsub_v1/test_publisher.py | 3106 ++++++++++ .../unit/gapic/pubsub_v1/test_subscriber.py | 4394 ++++++++++++++ .../unit/gapic/v1/test_publisher_client_v1.py | 560 -- .../gapic/v1/test_subscriber_client_v1.py | 892 --- .../pubsub_v1/publisher/batch/test_base.py | 3 +- .../pubsub_v1/publisher/batch/test_thread.py | 110 +- .../sequencer/test_ordered_sequencer.py | 16 +- .../sequencer/test_unordered_sequencer.py | 15 +- .../publisher/test_flow_controller.py | 59 +- .../publisher/test_publisher_client.py | 122 +- .../pubsub_v1/subscriber/test_dispatcher.py | 14 +- .../unit/pubsub_v1/subscriber/test_message.py | 23 +- .../subscriber/test_messages_on_hold.py | 6 +- .../subscriber/test_streaming_pull_manager.py | 84 +- .../subscriber/test_subscriber_client.py | 44 +- 95 files changed, 21791 insertions(+), 13247 deletions(-) rename packages/google-cloud-pubsub/{google/cloud/pubsub_v1/gapic/__init__.py => .github/snippet-bot.yml} (100%) create mode 100644 packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile create mode 100755 packages/google-cloud-pubsub/.kokoro/docker/docs/fetch_gpg_keys.sh create mode 100644 packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg create mode 100755 packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh create mode 100644 packages/google-cloud-pubsub/.trampolinerc create mode 100644 packages/google-cloud-pubsub/UPGRADING.md create mode 120000 packages/google-cloud-pubsub/docs/UPGRADING.md delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/__init__.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/__init__.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py create mode 100644 packages/google-cloud-pubsub/google/pubsub/__init__.py create mode 100644 packages/google-cloud-pubsub/google/pubsub/py.typed create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/__init__.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/py.typed create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py rename packages/google-cloud-pubsub/{tests/system/gapic/v1/test_system_publisher_v1.py => google/pubsub_v1/services/publisher/__init__.py} (57%) create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py create mode 100644 packages/google-cloud-pubsub/mypy.ini create mode 100644 packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py create mode 100644 packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py create mode 100644 packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py create mode 100644 packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/__init__.py b/packages/google-cloud-pubsub/.github/snippet-bot.yml similarity index 100% rename from packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/__init__.py rename to packages/google-cloud-pubsub/.github/snippet-bot.yml diff --git a/packages/google-cloud-pubsub/.gitignore b/packages/google-cloud-pubsub/.gitignore index b87e1ed580d9..b9daa52f118d 100644 --- a/packages/google-cloud-pubsub/.gitignore +++ b/packages/google-cloud-pubsub/.gitignore @@ -46,6 +46,7 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ @@ -57,4 +58,4 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/packages/google-cloud-pubsub/.kokoro/build.sh b/packages/google-cloud-pubsub/.kokoro/build.sh index 6a68ebd105f1..95bc0a438942 100755 --- a/packages/google-cloud-pubsub/.kokoro/build.sh +++ b/packages/google-cloud-pubsub/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile new file mode 100644 index 000000000000..412b0b56a921 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/fetch_gpg_keys.sh b/packages/google-cloud-pubsub/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 000000000000..d653dd868e4b --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Łukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg index b602fa54258d..7815c2d6abb2 100644 --- a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2-staging" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 000000000000..1118107829b7 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh index f462c727b504..8acb14e802b0 100755 --- a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh +++ b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh @@ -18,26 +18,16 @@ set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-pubsub - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh new file mode 100755 index 000000000000..719bcd5ba84d --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/packages/google-cloud-pubsub/.trampolinerc b/packages/google-cloud-pubsub/.trampolinerc new file mode 100644 index 000000000000..995ee29111e1 --- /dev/null +++ b/packages/google-cloud-pubsub/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index a92a43087052..926e51f1e2bd 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -60,11 +60,13 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 +Python >= 3.6 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Python == 2.7. + +The last version of this library compatible with Python 2.7 is google-cloud-pubsub==1.7.0. Mac/Linux diff --git a/packages/google-cloud-pubsub/UPGRADING.md b/packages/google-cloud-pubsub/UPGRADING.md new file mode 100644 index 000000000000..9ab5d073a836 --- /dev/null +++ b/packages/google-cloud-pubsub/UPGRADING.md @@ -0,0 +1,160 @@ +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-pubsub` client is a significant upgrade based +on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), +and includes substantial interface changes. Existing code written for earlier versions +of this library will likely require updates to use this version. This document +describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an +[issue](https://github.com/googleapis/python-pubsub/issues). + + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +Almost all methods that send requests to the backend expect request objects. We +provide a script that will convert most common use cases. + +* Install the library + +```py +python3 -m pip install google-cloud-pubsub +``` + +* The script `fixup_pubsub_v1_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ scripts/fixup_pubsub_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import pubsub + +publisher = pubsub.PublisherClient() + +project_path = "projects/{}".format(PROJECT_ID) +topics = publisher.list_topics(project_path) +``` + + +**After:** +```py +from google.cloud import pubsub + +publisher = pubsub.PublisherClient() + +project_path = f"projects/{PROJECT_ID}" +topics = publisher.list_topics(request={"project": project_path}) +``` + +### More Details + +In `google-cloud-pubsub<2.0.0`, parameters required by the API were positional +parameters and optional parameters were keyword parameters. + +**Before:** +```py + def list_topics( + self, + project, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, almost all methods that interact with the backend have a single +positional parameter `request`. Method docstrings indicate whether a parameter is +required or optional. + +> **NOTE:** The exception are hand written methods such as `publisher.publish()` and +> `subscriber.subscribe()` that implement additional logic (e.g. request batching) and +> sit on top of the API methods from the generated parts of the library. The signatures +> of these methods have in large part been preserved. + +Some methods have additional keyword only parameters. The available parameters depend +on the [`google.api.method_signature` annotation](https://github.com/googleapis/python-pubsub/blob/master/google/cloud/pubsub_v1/proto/pubsub.proto#L88) +specified by the API producer. + + +**After:** +```py + def list_topics( + self, + request: pubsub.ListTopicsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsPager: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are +> mutually exclusive. Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.list_topics( + request={ + "project": project_path, + "metadata": [("foo", "bar"), ("baz", "quux")], + } +) +``` + +```py +response = client.list_topics( + project=project_path, + metadata=[("foo", "bar"), ("baz", "quux")], +) +``` + +This call is invalid because it mixes `request` with a keyword argument `metadata`. +Executing this code will result in an error: + +```py +response = client.synthesize_speech( + request={"project": project_path}, + metadata=[("foo", "bar"), ("baz", "quux")], +) +``` + +> **NOTE:** The `request` parameter of some methods can also contain a more rich set of +> options that are otherwise not available as explicit keyword only parameters, thus +> these _must_ be passed through `request`. + + +## Removed Utility Methods + +> **WARNING**: Breaking change + +Some utility methods such as publisher client's `subscription_path()` have been removed +and now only exist in the relevant client, e.g. `subscriber.subscription_path()`. + +The `project_path()` method has been removed from both the publisher and subscriber +client, this path must now be constructed manually: +```py +project_path = f"project/{PROJECT_ID}" +``` + +## Removed `client_config` Parameter + +The publisher and subscriber clients cannot be constructed with `client_config` +argument anymore. If you want to customize retry and timeout settings for a particular +method, you need to do it upon method invocation by passing the custom `timeout` and +`retry` arguments, respectively. diff --git a/packages/google-cloud-pubsub/docs/UPGRADING.md b/packages/google-cloud-pubsub/docs/UPGRADING.md new file mode 120000 index 000000000000..01097c8c0fb8 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index fb9f1ca32a08..7bd17033d3d5 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -20,6 +20,10 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ @@ -90,7 +94,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. diff --git a/packages/google-cloud-pubsub/docs/index.rst b/packages/google-cloud-pubsub/docs/index.rst index ae3053625489..06b09605f157 100644 --- a/packages/google-cloud-pubsub/docs/index.rst +++ b/packages/google-cloud-pubsub/docs/index.rst @@ -2,15 +2,34 @@ .. include:: multiprocessing.rst + API Documentation ----------------- +.. note:: + + The client library version (currently ``2.x``) should not be confused with the + backend API version (currently ``v1``), hence some references to ``v1`` can be found + across the documentation. + +.. toctree:: + :maxdepth: 3 + + Publisher Client + Subscriber Client + Types + + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + .. toctree:: - :maxdepth: 3 + :maxdepth: 2 + + UPGRADING - publisher/index - subscriber/index - types Changelog --------- diff --git a/packages/google-cloud-pubsub/docs/publisher/api/client.rst b/packages/google-cloud-pubsub/docs/publisher/api/client.rst index 47a3aa3d5d7a..d1a54ff5e380 100644 --- a/packages/google-cloud-pubsub/docs/publisher/api/client.rst +++ b/packages/google-cloud-pubsub/docs/publisher/api/client.rst @@ -1,5 +1,5 @@ -Publisher Client API -==================== +Publisher Client API (v1) +========================= .. automodule:: google.cloud.pubsub_v1.publisher.client :members: diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/client.rst b/packages/google-cloud-pubsub/docs/subscriber/api/client.rst index 965880c5a640..d26243eba820 100644 --- a/packages/google-cloud-pubsub/docs/subscriber/api/client.rst +++ b/packages/google-cloud-pubsub/docs/subscriber/api/client.rst @@ -1,5 +1,5 @@ -Subscriber Client API -===================== +Subscriber Client API (v1) +========================== .. automodule:: google.cloud.pubsub_v1.subscriber.client :members: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py deleted file mode 100644 index e8853d841841..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py +++ /dev/null @@ -1,1292 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.pubsub.v1 Publisher API.""" - -import collections -from copy import deepcopy -import functools -import pkg_resources -import six -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.path_template -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.pubsub_v1.gapic import publisher_client_config -from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import iam_policy_pb2_grpc -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub",).version - - -# TODO: remove conditional import after Python 2 support is dropped -if six.PY2: - from collections import Mapping -else: - from collections.abc import Mapping - - -def _merge_dict(d1, d2): - # Modifies d1 in-place to take values from d2 - # if the nested keys from d2 are present in d1. - # https://stackoverflow.com/a/10704003/4488789 - for k, v2 in d2.items(): - v1 = d1.get(k) # returns None if v1 has no such key - if v1 is None: - raise Exception("{} is not recognized by client_config".format(k)) - if isinstance(v1, Mapping) and isinstance(v2, Mapping): - _merge_dict(v1, v2) - else: - d1[k] = v2 - return d1 - - -class PublisherClient(object): - """ - The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - SERVICE_ADDRESS = "pubsub.googleapis.com:443" - """The default address of the service.""" - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.pubsub.v1.Publisher" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PublisherClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - @classmethod - def subscription_path(cls, project, subscription): - """Return a fully-qualified subscription string.""" - return google.api_core.path_template.expand( - "projects/{project}/subscriptions/{subscription}", - project=project, - subscription=subscription, - ) - - @classmethod - def topic_path(cls, project, topic): - """Return a fully-qualified topic string.""" - return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.PublisherGrpcTransport, - Callable[[~.Credentials, type], ~.PublisherGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - default_client_config = deepcopy(publisher_client_config.config) - - if client_config is None: - client_config = default_client_config - else: - client_config = _merge_dict(default_client_config, client_config) - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=publisher_grpc_transport.PublisherGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = publisher_grpc_transport.PublisherGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_topic( - self, - name, - labels=None, - message_storage_policy=None, - kms_key_name=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates the given topic with the given name. See the resource name - rules. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> name = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> response = client.create_topic(name) - - Args: - name (str): Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` must start with a - letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), - dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), - plus (``+``) or percent signs (``%``). It must be between 3 and 255 - characters in length, and it must not start with ``"goog"``. - labels (dict[str -> str]): See Creating and - managing labels. - message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining the set of Google Cloud Platform regions where messages - published to the topic may be stored. If not present, then no constraints - are in effect. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.MessageStoragePolicy` - kms_key_name (str): The resource name of the Cloud KMS CryptoKey to be used to protect - access to messages published on this topic. - - The expected format is - ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Topic` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "create_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_topic, - default_retry=self._method_configs["CreateTopic"].retry, - default_timeout=self._method_configs["CreateTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.Topic( - name=name, - labels=labels, - message_storage_policy=message_storage_policy, - kms_key_name=kms_key_name, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_topic( - self, - topic, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing topic. Note that certain properties of a - topic are not modifiable. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic_name = 'projects/my-project/topics/my-topic' - >>> topic_labels = {'source': 'external'} - >>> topic = {'name': topic_name, 'labels': topic_labels} - >>> - >>> paths_element = 'labels' - >>> paths = [paths_element] - >>> update_mask = {'paths': paths} - >>> - >>> response = client.update_topic(topic, update_mask) - - Args: - topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): Required. The updated topic object. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Topic` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided topic to update. - Must be specified and non-empty. Note that if ``update_mask`` contains - "message_storage_policy" but the ``message_storage_policy`` is not set - in the ``topic`` provided above, then the updated value is determined by - the policy configured at the project or organization level. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Topic` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "update_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_topic, - default_retry=self._method_configs["UpdateTopic"].retry, - default_timeout=self._method_configs["UpdateTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.UpdateTopicRequest(topic=topic, update_mask=update_mask,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic.name", topic.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def publish( - self, - topic, - messages, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the - topic does not exist. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> data = b'' - >>> messages_element = {'data': data} - >>> messages = [messages_element] - >>> - >>> response = client.publish(topic, messages) - - Args: - topic (str): Required. The messages in the request will be published on this - topic. Format is ``projects/{project}/topics/{topic}``. - messages (list[Union[dict, ~google.cloud.pubsub_v1.types.PubsubMessage]]): Required. The messages to publish. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.PubsubMessage` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.PublishResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "publish" not in self._inner_api_calls: - self._inner_api_calls[ - "publish" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.publish, - default_retry=self._method_configs["Publish"].retry, - default_timeout=self._method_configs["Publish"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.PublishRequest(topic=topic, messages=messages,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["publish"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_topic( - self, - topic, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the configuration of a topic. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> response = client.get_topic(topic) - - Args: - topic (str): Required. The name of the topic to get. Format is - ``projects/{project}/topics/{topic}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Topic` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "get_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_topic, - default_retry=self._method_configs["GetTopic"].retry, - default_timeout=self._method_configs["GetTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.GetTopicRequest(topic=topic,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_topics( - self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists matching topics. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topics(project): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_topics(project).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project (str): Required. The name of the project in which to list topics. Format is - ``projects/{project-id}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`~google.cloud.pubsub_v1.types.Topic` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_topics" not in self._inner_api_calls: - self._inner_api_calls[ - "list_topics" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topics, - default_retry=self._method_configs["ListTopics"].retry, - default_timeout=self._method_configs["ListTopics"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListTopicsRequest(project=project, page_size=page_size,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project", project)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_topics"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="topics", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_topic_subscriptions( - self, - topic, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the names of the attached subscriptions on this topic. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topic_subscriptions(topic): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_topic_subscriptions(topic).pages: - ... for element in page: - ... # process element - ... pass - - Args: - topic (str): Required. The name of the topic that subscriptions are attached to. - Format is ``projects/{project}/topics/{topic}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`str` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_topic_subscriptions" not in self._inner_api_calls: - self._inner_api_calls[ - "list_topic_subscriptions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topic_subscriptions, - default_retry=self._method_configs["ListTopicSubscriptions"].retry, - default_timeout=self._method_configs["ListTopicSubscriptions"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListTopicSubscriptionsRequest( - topic=topic, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_topic_subscriptions"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="subscriptions", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def list_topic_snapshots( - self, - topic, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> # Iterate over all results - >>> for element in client.list_topic_snapshots(topic): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_topic_snapshots(topic).pages: - ... for element in page: - ... # process element - ... pass - - Args: - topic (str): Required. The name of the topic that snapshots are attached to. - Format is ``projects/{project}/topics/{topic}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`str` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_topic_snapshots" not in self._inner_api_calls: - self._inner_api_calls[ - "list_topic_snapshots" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_topic_snapshots, - default_retry=self._method_configs["ListTopicSnapshots"].retry, - default_timeout=self._method_configs["ListTopicSnapshots"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListTopicSnapshotsRequest( - topic=topic, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_topic_snapshots"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="snapshots", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_topic( - self, - topic, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes the topic with the given name. Returns ``NOT_FOUND`` if the - topic does not exist. After a topic is deleted, a new topic may be - created with the same name; this is an entirely new topic with none of - the old configuration or subscriptions. Existing subscriptions to this - topic are not deleted, but their ``topic`` field is set to - ``_deleted-topic_``. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> client.delete_topic(topic) - - Args: - topic (str): Required. Name of the topic to delete. Format is - ``projects/{project}/topics/{topic}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_topic" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_topic" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_topic, - default_retry=self._method_configs["DeleteTopic"].retry, - default_timeout=self._method_configs["DeleteTopic"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DeleteTopicRequest(topic=topic,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("topic", topic)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_topic"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def set_iam_policy( - self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (str): REQUIRED: The resource for which the policy is being specified. - See the operation documentation for the appropriate value for this field. - policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The - size of the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Policy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "set_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "set_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs["SetIamPolicy"].retry, - default_timeout=self._method_configs["SetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["set_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_iam_policy( - self, - resource, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> response = client.get_iam_policy(resource) - - Args: - resource (str): REQUIRED: The resource for which the policy is being requested. - See the operation documentation for the appropriate value for this field. - options_ (Union[dict, ~google.cloud.pubsub_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to - ``GetIamPolicy``. This field is only used by Cloud IAM. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs["GetIamPolicy"].retry, - default_timeout=self._method_configs["GetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def test_iam_permissions( - self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `permissions`: - >>> permissions = [] - >>> - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (str): REQUIRED: The resource for which the policy detail is being requested. - See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions - with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see `IAM - Overview `__. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "test_iam_permissions" not in self._inner_api_calls: - self._inner_api_calls[ - "test_iam_permissions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs["TestIamPermissions"].retry, - default_timeout=self._method_configs["TestIamPermissions"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["test_iam_permissions"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def detach_subscription( - self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Detaches a subscription from this topic. All messages retained in - the subscription are dropped. Subsequent ``Pull`` and ``StreamingPull`` - requests will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.PublisherClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.detach_subscription(subscription) - - Args: - subscription (str): Required. The subscription to detach. Format is - ``projects/{project}/subscriptions/{subscription}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.DetachSubscriptionResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "detach_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "detach_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.detach_subscription, - default_retry=self._method_configs["DetachSubscription"].retry, - default_timeout=self._method_configs["DetachSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DetachSubscriptionRequest(subscription=subscription,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["detach_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py deleted file mode 100644 index 8c96fd10b066..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/publisher_client_config.py +++ /dev/null @@ -1,111 +0,0 @@ -config = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_codes": { - "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], - "non_idempotent2": [], - "non_idempotent": ["UNAVAILABLE"], - "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "none": [], - "publish": [ - "ABORTED", - "CANCELLED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - "UNKNOWN", - ], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, - }, - "messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 5000, - "rpc_timeout_multiplier": 1.3, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 60000, - }, - }, - "methods": { - "CreateTopic": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateTopic": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "Publish": { - "timeout_millis": 20000, - "retry_codes_name": "publish", - "retry_params_name": "messaging", - "bundling": { - "element_count_threshold": 100, - "element_count_limit": 1000, - "request_byte_threshold": 1048576, - "request_byte_limit": 10485760, - "delay_threshold_millis": 10, - }, - }, - "GetTopic": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTopics": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTopicSubscriptions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListTopicSnapshots": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent2", - "retry_params_name": "default", - }, - "DeleteTopic": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "SetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "TestIamPermissions": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DetachSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent2", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py deleted file mode 100644 index 1e24ba02a4f6..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client.py +++ /dev/null @@ -1,2006 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.pubsub.v1 Subscriber API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.pubsub_v1.gapic import subscriber_client_config -from google.cloud.pubsub_v1.gapic.transports import subscriber_grpc_transport -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import iam_policy_pb2_grpc -from google.iam.v1 import options_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-pubsub",).version - - -class SubscriberClient(object): - """ - The service that an application uses to manipulate subscriptions and - to consume messages from a subscription via the ``Pull`` method or by - establishing a bi-directional stream using the ``StreamingPull`` method. - """ - - SERVICE_ADDRESS = "pubsub.googleapis.com:443" - """The default address of the service.""" - - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.pubsub.v1.Subscriber" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SubscriberClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - @classmethod - def snapshot_path(cls, project, snapshot): - """Return a fully-qualified snapshot string.""" - return google.api_core.path_template.expand( - "projects/{project}/snapshots/{snapshot}", - project=project, - snapshot=snapshot, - ) - - @classmethod - def subscription_path(cls, project, subscription): - """Return a fully-qualified subscription string.""" - return google.api_core.path_template.expand( - "projects/{project}/subscriptions/{subscription}", - project=project, - subscription=subscription, - ) - - @classmethod - def topic_path(cls, project, topic): - """Return a fully-qualified topic string.""" - return google.api_core.path_template.expand( - "projects/{project}/topics/{topic}", project=project, topic=topic, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.SubscriberGrpcTransport, - Callable[[~.Credentials, type], ~.SubscriberGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = subscriber_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=subscriber_grpc_transport.SubscriberGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = subscriber_grpc_transport.SubscriberGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_subscription( - self, - name, - topic, - push_config=None, - ack_deadline_seconds=None, - retain_acked_messages=None, - message_retention_duration=None, - labels=None, - enable_message_ordering=None, - expiration_policy=None, - filter_=None, - dead_letter_policy=None, - retry_policy=None, - detached=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a subscription to a given topic. See the resource name - rules. If the subscription already exists, returns ``ALREADY_EXISTS``. - If the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a - random name for this subscription on the same project as the topic, - conforming to the `resource name - format `__. - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> name = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> topic = client.topic_path('[PROJECT]', '[TOPIC]') - >>> - >>> response = client.create_subscription(name, topic) - - Args: - name (str): Required. The name of the subscription. It must have the format - ``"projects/{project}/subscriptions/{subscription}"``. - ``{subscription}`` must start with a letter, and contain only letters - (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), underscores - (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent signs - (``%``). It must be between 3 and 255 characters in length, and it must - not start with ``"goog"``. - topic (str): Required. The name of the topic from which this subscription is - receiving messages. Format is ``projects/{project}/topics/{topic}``. The - value of this field will be ``_deleted-topic_`` if the topic has been - deleted. - push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): If push delivery is used with this subscription, this field is used - to configure it. An empty ``pushConfig`` signifies that the subscriber - will pull and ack messages using API methods. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.PushConfig` - ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub - waits for the subscriber to acknowledge receipt before resending the - message. In the interval after the message is delivered and before it is - acknowledged, it is considered to be outstanding. During that time - period, the message will not be redelivered (on a best-effort basis). - - For pull subscriptions, this value is used as the initial value for the - ack deadline. To override this value for a given message, call - ``ModifyAckDeadline`` with the corresponding ``ack_id`` if using - non-streaming pull or send the ``ack_id`` in a - ``StreamingModifyAckDeadlineRequest`` if using streaming pull. The - minimum custom deadline you can specify is 10 seconds. The maximum - custom deadline you can specify is 600 seconds (10 minutes). If this - parameter is 0, a default value of 10 seconds is used. - - For push delivery, this value is also used to set the request timeout - for the call to the push endpoint. - - If the subscriber never acknowledges the message, the Pub/Sub system - will eventually redeliver the message. - retain_acked_messages (bool): Indicates whether to retain acknowledged messages. If true, then - messages are not expunged from the subscription's backlog, even if they - are acknowledged, until they fall out of the - ``message_retention_duration`` window. This must be true if you would - like to Seek to a timestamp. - message_retention_duration (Union[dict, ~google.cloud.pubsub_v1.types.Duration]): How long to retain unacknowledged messages in the subscription's - backlog, from the moment a message is published. If - ``retain_acked_messages`` is true, then this also configures the - retention of acknowledged messages, and thus configures how far back in - time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 - days or less than 10 minutes. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Duration` - labels (dict[str -> str]): See Creating and - managing labels. - enable_message_ordering (bool): If true, messages published with the same ``ordering_key`` in - ``PubsubMessage`` will be delivered to the subscribers in the order in - which they are received by the Pub/Sub system. Otherwise, they may be - delivered in any order. EXPERIMENTAL: This feature is part of a closed - alpha release. This API might be changed in backward-incompatible ways - and is not recommended for production use. It is not subject to any SLA - or deprecation policy. - expiration_policy (Union[dict, ~google.cloud.pubsub_v1.types.ExpirationPolicy]): A policy that specifies the conditions for this subscription's - expiration. A subscription is considered active as long as any connected - subscriber is successfully consuming messages from the subscription or - is issuing operations on the subscription. If ``expiration_policy`` is - not set, a *default policy* with ``ttl`` of 31 days will be used. The - minimum allowed value for ``expiration_policy.ttl`` is 1 day. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.ExpirationPolicy` - filter_ (str): An expression written in the Pub/Sub `filter - language `__. If - non-empty, then only ``PubsubMessage``\ s whose ``attributes`` field - matches the filter are delivered on this subscription. If empty, then no - messages are filtered out. - dead_letter_policy (Union[dict, ~google.cloud.pubsub_v1.types.DeadLetterPolicy]): A policy that specifies the conditions for dead lettering messages - in this subscription. If dead_letter_policy is not set, dead lettering - is disabled. - - The Cloud Pub/Sub service account associated with this subscriptions's - parent project (i.e., - service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must - have permission to Acknowledge() messages on this subscription. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.DeadLetterPolicy` - retry_policy (Union[dict, ~google.cloud.pubsub_v1.types.RetryPolicy]): A policy that specifies how Pub/Sub retries message delivery for this - subscription. - - If not set, the default retry policy is applied. This generally implies - that messages will be retried as soon as possible for healthy subscribers. - RetryPolicy will be triggered on NACKs or acknowledgement deadline - exceeded events for a given message. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.RetryPolicy` - detached (bool): Indicates whether the subscription is detached from its topic. - Detached subscriptions don't receive messages from their topic and don't - retain any backlog. ``Pull`` and ``StreamingPull`` requests will return - FAILED_PRECONDITION. If the subscription is a push subscription, pushes - to the endpoint will not be made. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "create_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_subscription, - default_retry=self._method_configs["CreateSubscription"].retry, - default_timeout=self._method_configs["CreateSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.Subscription( - name=name, - topic=topic, - push_config=push_config, - ack_deadline_seconds=ack_deadline_seconds, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, - labels=labels, - enable_message_ordering=enable_message_ordering, - expiration_policy=expiration_policy, - filter=filter_, - dead_letter_policy=dead_letter_policy, - retry_policy=retry_policy, - detached=detached, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_subscription( - self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the configuration details of a subscription. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.get_subscription(subscription) - - Args: - subscription (str): Required. The name of the subscription to get. Format is - ``projects/{project}/subscriptions/{sub}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "get_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_subscription, - default_retry=self._method_configs["GetSubscription"].retry, - default_timeout=self._method_configs["GetSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_subscription( - self, - subscription, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> ack_deadline_seconds = 42 - >>> subscription_name = 'projects/my-project/subscriptions/my-subscription' - >>> subscription = { - ... 'name': subscription_name, - ... 'ack_deadline_seconds': ack_deadline_seconds, - ... } - >>> paths_element = 'ack_deadline_seconds' - >>> paths = [paths_element] - >>> update_mask = {'paths': paths} - >>> - >>> response = client.update_subscription(subscription, update_mask) - - Args: - subscription (Union[dict, ~google.cloud.pubsub_v1.types.Subscription]): Required. The updated subscription object. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Subscription` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided subscription to update. - Must be specified and non-empty. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Subscription` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "update_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_subscription, - default_retry=self._method_configs["UpdateSubscription"].retry, - default_timeout=self._method_configs["UpdateSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription.name", subscription.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_subscriptions( - self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists matching subscriptions. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_subscriptions(project): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_subscriptions(project).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project (str): Required. The name of the project in which to list subscriptions. - Format is ``projects/{project-id}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`~google.cloud.pubsub_v1.types.Subscription` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_subscriptions" not in self._inner_api_calls: - self._inner_api_calls[ - "list_subscriptions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_subscriptions, - default_retry=self._method_configs["ListSubscriptions"].retry, - default_timeout=self._method_configs["ListSubscriptions"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListSubscriptionsRequest( - project=project, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project", project)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_subscriptions"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="subscriptions", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_subscription( - self, - subscription, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after deletion - will return ``NOT_FOUND``. After a subscription is deleted, a new one - may be created with the same name, but the new one has no association - with the old subscription or its topic unless the same topic is - specified. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> client.delete_subscription(subscription) - - Args: - subscription (str): Required. The subscription to delete. Format is - ``projects/{project}/subscriptions/{sub}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_subscription" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_subscription" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_subscription, - default_retry=self._method_configs["DeleteSubscription"].retry, - default_timeout=self._method_configs["DeleteSubscription"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DeleteSubscriptionRequest(subscription=subscription,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_subscription"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_snapshot( - self, - snapshot, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> - >>> response = client.get_snapshot(snapshot) - - Args: - snapshot (str): Required. The name of the snapshot to get. Format is - ``projects/{project}/snapshots/{snap}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "get_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_snapshot, - default_retry=self._method_configs["GetSnapshot"].retry, - default_timeout=self._method_configs["GetSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("snapshot", snapshot)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def modify_ack_deadline( - self, - subscription, - ack_ids, - ack_deadline_seconds, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level ``ackDeadlineSeconds`` used for subsequent messages. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `ack_ids`: - >>> ack_ids = [] - >>> - >>> # TODO: Initialize `ack_deadline_seconds`: - >>> ack_deadline_seconds = 0 - >>> - >>> client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - Args: - subscription (str): Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): Required. List of acknowledgment IDs. - ack_deadline_seconds (int): Required. The new ack deadline with respect to the time this request - was sent to the Pub/Sub system. For example, if the value is 10, the new - ack deadline will expire 10 seconds after the ``ModifyAckDeadline`` call - was made. Specifying zero might immediately make the message available - for delivery to another subscriber client. This typically results in an - increase in the rate of message redeliveries (that is, duplicates). The - minimum deadline you can specify is 0 seconds. The maximum deadline you - can specify is 600 seconds (10 minutes). - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "modify_ack_deadline" not in self._inner_api_calls: - self._inner_api_calls[ - "modify_ack_deadline" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.modify_ack_deadline, - default_retry=self._method_configs["ModifyAckDeadline"].retry, - default_timeout=self._method_configs["ModifyAckDeadline"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ModifyAckDeadlineRequest( - subscription=subscription, - ack_ids=ack_ids, - ack_deadline_seconds=ack_deadline_seconds, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["modify_ack_deadline"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def acknowledge( - self, - subscription, - ack_ids, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant - messages from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, but - such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `ack_ids`: - >>> ack_ids = [] - >>> - >>> client.acknowledge(subscription, ack_ids) - - Args: - subscription (str): Required. The subscription whose message is being acknowledged. - Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (list[str]): Required. The acknowledgment ID for the messages being acknowledged - that was returned by the Pub/Sub system in the ``Pull`` response. Must - not be empty. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "acknowledge" not in self._inner_api_calls: - self._inner_api_calls[ - "acknowledge" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.acknowledge, - default_retry=self._method_configs["Acknowledge"].retry, - default_timeout=self._method_configs["Acknowledge"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["acknowledge"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def pull( - self, - subscription, - max_messages, - return_immediately=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests pending - for the given subscription. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `max_messages`: - >>> max_messages = 0 - >>> - >>> response = client.pull(subscription, max_messages) - - Args: - subscription (str): Required. The subscription from which messages should be pulled. - Format is ``projects/{project}/subscriptions/{sub}``. - max_messages (int): Required. The maximum number of messages to return for this request. Must - be a positive integer. The Pub/Sub system may return fewer than the number - specified. - return_immediately (bool): Optional. If this field set to true, the system will respond - immediately even if it there are no messages available to return in the - ``Pull`` response. Otherwise, the system may wait (for a bounded amount - of time) until at least one message is available, rather than returning - no messages. Warning: setting this field to ``true`` is discouraged - because it adversely impacts the performance of ``Pull`` operations. We - recommend that users do not set this field. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.PullResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "pull" not in self._inner_api_calls: - self._inner_api_calls["pull"] = google.api_core.gapic_v1.method.wrap_method( - self.transport.pull, - default_retry=self._method_configs["Pull"].retry, - default_timeout=self._method_configs["Pull"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.PullRequest( - subscription=subscription, - max_messages=max_messages, - return_immediately=return_immediately, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["pull"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def streaming_pull( - self, - requests, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Establishes a stream with the server, which sends messages down to - the client. The client streams acknowledgements and ack deadline - modifications back to the server. The server will close the stream and - return the status on any error. The server may close the stream with - status ``UNAVAILABLE`` to reassign server-side resources, in which case, - the client should re-establish the stream. Flow control can be achieved - by configuring the underlying RPC channel. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `stream_ack_deadline_seconds`: - >>> stream_ack_deadline_seconds = 0 - >>> request = {'subscription': subscription, 'stream_ack_deadline_seconds': stream_ack_deadline_seconds} - >>> - >>> requests = [request] - >>> for element in client.streaming_pull(requests): - ... # process element - ... pass - - Args: - requests (iterator[dict|google.cloud.pubsub_v1.proto.pubsub_pb2.StreamingPullRequest]): The input objects. If a dict is provided, it must be of the - same form as the protobuf message :class:`~google.cloud.pubsub_v1.types.StreamingPullRequest` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - Iterable[~google.cloud.pubsub_v1.types.StreamingPullResponse]. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "streaming_pull" not in self._inner_api_calls: - self._inner_api_calls[ - "streaming_pull" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.streaming_pull, - default_retry=self._method_configs["StreamingPull"].retry, - default_timeout=self._method_configs["StreamingPull"].timeout, - client_info=self._client_info, - ) - - # Wrappers in api-core should not automatically pre-fetch the first - # stream result, as this breaks the stream when re-opening it. - # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 - self.transport.streaming_pull._prefetch_first_result_ = False - - return self._inner_api_calls["streaming_pull"]( - requests, retry=retry, timeout=timeout, metadata=metadata - ) - - def modify_push_config( - self, - subscription, - push_config, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified - by an empty ``PushConfig``) or vice versa, or change the endpoint URL - and other attributes of a push subscription. Messages will accumulate - for delivery continuously through the call regardless of changes to the - ``PushConfig``. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> # TODO: Initialize `push_config`: - >>> push_config = {} - >>> - >>> client.modify_push_config(subscription, push_config) - - Args: - subscription (str): Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - push_config (Union[dict, ~google.cloud.pubsub_v1.types.PushConfig]): Required. The push configuration for future deliveries. - - An empty ``pushConfig`` indicates that the Pub/Sub system should stop - pushing messages from the given subscription and allow messages to be - pulled and acknowledged - effectively pausing the subscription if - ``Pull`` or ``StreamingPull`` is not called. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.PushConfig` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "modify_push_config" not in self._inner_api_calls: - self._inner_api_calls[ - "modify_push_config" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.modify_push_config, - default_retry=self._method_configs["ModifyPushConfig"].retry, - default_timeout=self._method_configs["ModifyPushConfig"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["modify_push_config"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_snapshots( - self, - project, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> project = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_snapshots(project): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_snapshots(project).pages: - ... for element in page: - ... # process element - ... pass - - Args: - project (str): Required. The name of the project in which to list snapshots. Format - is ``projects/{project-id}``. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.GRPCIterator` instance. - An iterable of :class:`~google.cloud.pubsub_v1.types.Snapshot` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_snapshots" not in self._inner_api_calls: - self._inner_api_calls[ - "list_snapshots" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_snapshots, - default_retry=self._method_configs["ListSnapshots"].retry, - default_timeout=self._method_configs["ListSnapshots"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.ListSnapshotsRequest(project=project, page_size=page_size,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("project", project)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_snapshots"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="snapshots", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def create_snapshot( - self, - name, - subscription, - labels=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a snapshot from the requested subscription. Snapshots are - used in Seek operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the acknowledgment state - of messages in an existing subscription to the state captured by a - snapshot. If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. If the - backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is - returned. See also the ``Snapshot.expire_time`` field. If the name is - not provided in the request, the server will assign a random name for - this snapshot on the same project as the subscription, conforming to the - `resource name - format `__. - The generated name is populated in the returned Snapshot object. Note - that for REST API requests, you must specify a name in the request. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> name = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.create_snapshot(name, subscription) - - Args: - name (str): Required. User-provided name for this snapshot. If the name is not - provided in the request, the server will assign a random name for this - snapshot on the same project as the subscription. Note that for REST API - requests, you must specify a name. See the resource name rules. Format - is ``projects/{project}/snapshots/{snap}``. - subscription (str): Required. The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: (a) The - existing backlog on the subscription. More precisely, this is defined as - the messages in the subscription's backlog that are unacknowledged upon - the successful completion of the ``CreateSnapshot`` request; as well as: - (b) Any messages published to the subscription's topic following the - successful completion of the CreateSnapshot request. Format is - ``projects/{project}/subscriptions/{sub}``. - labels (dict[str -> str]): See Creating and - managing labels. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "create_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_snapshot, - default_retry=self._method_configs["CreateSnapshot"].retry, - default_timeout=self._method_configs["CreateSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription, labels=labels, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_snapshot( - self, - snapshot, - update_mask, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> seconds = 123456 - >>> expire_time = {'seconds': seconds} - >>> snapshot_name = 'projects/my-project/snapshots/my-snapshot' - >>> snapshot = { - ... 'name': snapshot_name, - ... 'expire_time': expire_time, - ... } - >>> paths_element = 'expire_time' - >>> paths = [paths_element] - >>> update_mask = {'paths': paths} - >>> - >>> response = client.update_snapshot(snapshot, update_mask) - - Args: - snapshot (Union[dict, ~google.cloud.pubsub_v1.types.Snapshot]): Required. The updated snapshot object. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Snapshot` - update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Required. Indicates which fields in the provided snapshot to update. - Must be specified and non-empty. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Snapshot` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "update_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_snapshot, - default_retry=self._method_configs["UpdateSnapshot"].retry, - default_timeout=self._method_configs["UpdateSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, update_mask=update_mask, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("snapshot.name", snapshot.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_snapshot( - self, - snapshot, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> snapshot = client.snapshot_path('[PROJECT]', '[SNAPSHOT]') - >>> - >>> client.delete_snapshot(snapshot) - - Args: - snapshot (str): Required. The name of the snapshot to delete. Format is - ``projects/{project}/snapshots/{snap}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_snapshot" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_snapshot" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_snapshot, - default_retry=self._method_configs["DeleteSnapshot"].retry, - default_timeout=self._method_configs["DeleteSnapshot"].timeout, - client_info=self._client_info, - ) - - request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("snapshot", snapshot)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_snapshot"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def seek( - self, - subscription, - time=None, - snapshot=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> subscription = client.subscription_path('[PROJECT]', '[SUBSCRIPTION]') - >>> - >>> response = client.seek(subscription) - - Args: - subscription (str): Required. The subscription to affect. - time (Union[dict, ~google.cloud.pubsub_v1.types.Timestamp]): The time to seek to. Messages retained in the subscription that were - published before this time are marked as acknowledged, and messages - retained in the subscription that were published after this time are - marked as unacknowledged. Note that this operation affects only those - messages retained in the subscription (configured by the combination of - ``message_retention_duration`` and ``retain_acked_messages``). For - example, if ``time`` corresponds to a point before the message retention - window (or to a point before the system's notion of the subscription - creation time), only retained messages will be marked as unacknowledged, - and already-expunged messages will not be restored. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Timestamp` - snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as - that of the provided subscription. Format is - ``projects/{project}/snapshots/{snap}``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.SeekResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "seek" not in self._inner_api_calls: - self._inner_api_calls["seek"] = google.api_core.gapic_v1.method.wrap_method( - self.transport.seek, - default_retry=self._method_configs["Seek"].retry, - default_timeout=self._method_configs["Seek"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - time=time, snapshot=snapshot, - ) - - request = pubsub_pb2.SeekRequest( - subscription=subscription, time=time, snapshot=snapshot, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("subscription", subscription)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["seek"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def set_iam_policy( - self, - resource, - policy, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `policy`: - >>> policy = {} - >>> - >>> response = client.set_iam_policy(resource, policy) - - Args: - resource (str): REQUIRED: The resource for which the policy is being specified. - See the operation documentation for the appropriate value for this field. - policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The - size of the policy is limited to a few 10s of KB. An empty policy is a - valid policy but certain Cloud Platform services (such as Projects) - might reject them. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.Policy` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "set_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "set_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.set_iam_policy, - default_retry=self._method_configs["SetIamPolicy"].retry, - default_timeout=self._method_configs["SetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["set_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_iam_policy( - self, - resource, - options_=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> response = client.get_iam_policy(resource) - - Args: - resource (str): REQUIRED: The resource for which the policy is being requested. - See the operation documentation for the appropriate value for this field. - options_ (Union[dict, ~google.cloud.pubsub_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to - ``GetIamPolicy``. This field is only used by Cloud IAM. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.pubsub_v1.types.GetPolicyOptions` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.Policy` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_iam_policy" not in self._inner_api_calls: - self._inner_api_calls[ - "get_iam_policy" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_iam_policy, - default_retry=self._method_configs["GetIamPolicy"].retry, - default_timeout=self._method_configs["GetIamPolicy"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.GetIamPolicyRequest( - resource=resource, options=options_, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_iam_policy"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def test_iam_permissions( - self, - resource, - permissions, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Example: - >>> from google.cloud import pubsub_v1 - >>> - >>> client = pubsub_v1.SubscriberClient() - >>> - >>> # TODO: Initialize `resource`: - >>> resource = '' - >>> - >>> # TODO: Initialize `permissions`: - >>> permissions = [] - >>> - >>> response = client.test_iam_permissions(resource, permissions) - - Args: - resource (str): REQUIRED: The resource for which the policy detail is being requested. - See the operation documentation for the appropriate value for this field. - permissions (list[str]): The set of permissions to check for the ``resource``. Permissions - with wildcards (such as '*' or 'storage.*') are not allowed. For more - information see `IAM - Overview `__. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "test_iam_permissions" not in self._inner_api_calls: - self._inner_api_calls[ - "test_iam_permissions" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.test_iam_permissions, - default_retry=self._method_configs["TestIamPermissions"].retry, - default_timeout=self._method_configs["TestIamPermissions"].timeout, - client_info=self._client_info, - ) - - request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("resource", resource)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["test_iam_permissions"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py deleted file mode 100644 index fc3254975dae..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/subscriber_client_config.py +++ /dev/null @@ -1,144 +0,0 @@ -config = { - "interfaces": { - "google.pubsub.v1.Subscriber": { - "retry_codes": { - "idempotent": ["ABORTED", "UNAVAILABLE", "UNKNOWN"], - "non_idempotent": ["UNAVAILABLE"], - "idempotent2": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "streaming_pull": [ - "ABORTED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - ], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000, - }, - "messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 25000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 25000, - "total_timeout_millis": 600000, - }, - "streaming_messaging": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 600000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 600000, - "total_timeout_millis": 600000, - }, - }, - "methods": { - "CreateSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "UpdateSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListSubscriptions": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteSubscription": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent2", - "retry_params_name": "default", - }, - "ModifyAckDeadline": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "Acknowledge": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "messaging", - }, - "Pull": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "messaging", - }, - "StreamingPull": { - "timeout_millis": 900000, - "retry_codes_name": "streaming_pull", - "retry_params_name": "streaming_messaging", - }, - "ModifyPushConfig": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListSnapshots": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "DeleteSnapshot": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "Seek": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "SetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetIamPolicy": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "TestIamPermissions": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py deleted file mode 100644 index bdba635553f5..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/publisher_grpc_transport.py +++ /dev/null @@ -1,296 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2_grpc - - -class PublisherGrpcTransport(object): - """gRPC transport class providing stubs for - google.pubsub.v1 Publisher API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - def __init__( - self, channel=None, credentials=None, address="pubsub.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "iam_policy_stub": iam_policy_pb2_grpc.IAMPolicyStub(channel), - "publisher_stub": pubsub_pb2_grpc.PublisherStub(channel), - } - - @classmethod - def create_channel( - cls, address="pubsub.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.create_topic`. - - Creates the given topic with the given name. See the resource name - rules. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].CreateTopic - - @property - def update_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.update_topic`. - - Updates an existing topic. Note that certain properties of a - topic are not modifiable. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].UpdateTopic - - @property - def publish(self): - """Return the gRPC stub for :meth:`PublisherClient.publish`. - - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the - topic does not exist. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].Publish - - @property - def get_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.get_topic`. - - Gets the configuration of a topic. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].GetTopic - - @property - def list_topics(self): - """Return the gRPC stub for :meth:`PublisherClient.list_topics`. - - Lists matching topics. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].ListTopics - - @property - def list_topic_subscriptions(self): - """Return the gRPC stub for :meth:`PublisherClient.list_topic_subscriptions`. - - Lists the names of the attached subscriptions on this topic. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].ListTopicSubscriptions - - @property - def list_topic_snapshots(self): - """Return the gRPC stub for :meth:`PublisherClient.list_topic_snapshots`. - - Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].ListTopicSnapshots - - @property - def delete_topic(self): - """Return the gRPC stub for :meth:`PublisherClient.delete_topic`. - - Deletes the topic with the given name. Returns ``NOT_FOUND`` if the - topic does not exist. After a topic is deleted, a new topic may be - created with the same name; this is an entirely new topic with none of - the old configuration or subscriptions. Existing subscriptions to this - topic are not deleted, but their ``topic`` field is set to - ``_deleted-topic_``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].DeleteTopic - - @property - def set_iam_policy(self): - """Return the gRPC stub for :meth:`PublisherClient.set_iam_policy`. - - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].SetIamPolicy - - @property - def get_iam_policy(self): - """Return the gRPC stub for :meth:`PublisherClient.get_iam_policy`. - - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].GetIamPolicy - - @property - def test_iam_permissions(self): - """Return the gRPC stub for :meth:`PublisherClient.test_iam_permissions`. - - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].TestIamPermissions - - @property - def detach_subscription(self): - """Return the gRPC stub for :meth:`PublisherClient.detach_subscription`. - - Detaches a subscription from this topic. All messages retained in - the subscription are dropped. Subsequent ``Pull`` and ``StreamingPull`` - requests will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["publisher_stub"].DetachSubscription diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py deleted file mode 100644 index cd7a19bbe55f..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/gapic/transports/subscriber_grpc_transport.py +++ /dev/null @@ -1,454 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc -from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2_grpc - - -class SubscriberGrpcTransport(object): - """gRPC transport class providing stubs for - google.pubsub.v1 Subscriber API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ) - - def __init__( - self, channel=None, credentials=None, address="pubsub.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "iam_policy_stub": iam_policy_pb2_grpc.IAMPolicyStub(channel), - "subscriber_stub": pubsub_pb2_grpc.SubscriberStub(channel), - } - - @classmethod - def create_channel( - cls, address="pubsub.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.create_subscription`. - - Creates a subscription to a given topic. See the resource name - rules. If the subscription already exists, returns ``ALREADY_EXISTS``. - If the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will assign a - random name for this subscription on the same project as the topic, - conforming to the `resource name - format `__. - The generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].CreateSubscription - - @property - def get_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.get_subscription`. - - Gets the configuration details of a subscription. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].GetSubscription - - @property - def update_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.update_subscription`. - - Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].UpdateSubscription - - @property - def list_subscriptions(self): - """Return the gRPC stub for :meth:`SubscriberClient.list_subscriptions`. - - Lists matching subscriptions. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ListSubscriptions - - @property - def delete_subscription(self): - """Return the gRPC stub for :meth:`SubscriberClient.delete_subscription`. - - Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after deletion - will return ``NOT_FOUND``. After a subscription is deleted, a new one - may be created with the same name, but the new one has no association - with the old subscription or its topic unless the same topic is - specified. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].DeleteSubscription - - @property - def get_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.get_snapshot`. - - Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].GetSnapshot - - @property - def modify_ack_deadline(self): - """Return the gRPC stub for :meth:`SubscriberClient.modify_ack_deadline`. - - Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level ``ackDeadlineSeconds`` used for subsequent messages. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ModifyAckDeadline - - @property - def acknowledge(self): - """Return the gRPC stub for :meth:`SubscriberClient.acknowledge`. - - Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the relevant - messages from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, but - such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].Acknowledge - - @property - def pull(self): - """Return the gRPC stub for :meth:`SubscriberClient.pull`. - - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests pending - for the given subscription. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].Pull - - @property - def streaming_pull(self): - """Return the gRPC stub for :meth:`SubscriberClient.streaming_pull`. - - Establishes a stream with the server, which sends messages down to - the client. The client streams acknowledgements and ack deadline - modifications back to the server. The server will close the stream and - return the status on any error. The server may close the stream with - status ``UNAVAILABLE`` to reassign server-side resources, in which case, - the client should re-establish the stream. Flow control can be achieved - by configuring the underlying RPC channel. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].StreamingPull - - @property - def modify_push_config(self): - """Return the gRPC stub for :meth:`SubscriberClient.modify_push_config`. - - Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified - by an empty ``PushConfig``) or vice versa, or change the endpoint URL - and other attributes of a push subscription. Messages will accumulate - for delivery continuously through the call regardless of changes to the - ``PushConfig``. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ModifyPushConfig - - @property - def list_snapshots(self): - """Return the gRPC stub for :meth:`SubscriberClient.list_snapshots`. - - Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].ListSnapshots - - @property - def create_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.create_snapshot`. - - Creates a snapshot from the requested subscription. Snapshots are - used in Seek operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the acknowledgment state - of messages in an existing subscription to the state captured by a - snapshot. If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. If the - backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then ``FAILED_PRECONDITION`` is - returned. See also the ``Snapshot.expire_time`` field. If the name is - not provided in the request, the server will assign a random name for - this snapshot on the same project as the subscription, conforming to the - `resource name - format `__. - The generated name is populated in the returned Snapshot object. Note - that for REST API requests, you must specify a name in the request. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].CreateSnapshot - - @property - def update_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.update_snapshot`. - - Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].UpdateSnapshot - - @property - def delete_snapshot(self): - """Return the gRPC stub for :meth:`SubscriberClient.delete_snapshot`. - - Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].DeleteSnapshot - - @property - def seek(self): - """Return the gRPC stub for :meth:`SubscriberClient.seek`. - - Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["subscriber_stub"].Seek - - @property - def set_iam_policy(self): - """Return the gRPC stub for :meth:`SubscriberClient.set_iam_policy`. - - Sets the access control policy on the specified resource. Replaces - any existing policy. - - Can return `NOT_FOUND`, `INVALID_ARGUMENT`, and `PERMISSION_DENIED` - errors. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].SetIamPolicy - - @property - def get_iam_policy(self): - """Return the gRPC stub for :meth:`SubscriberClient.get_iam_policy`. - - Gets the access control policy for a resource. Returns an empty policy - if the resource exists and does not have a policy set. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].GetIamPolicy - - @property - def test_iam_permissions(self): - """Return the gRPC stub for :meth:`SubscriberClient.test_iam_permissions`. - - Returns permissions that a caller has on the specified resource. If the - resource does not exist, this will return an empty set of - permissions, not a `NOT_FOUND` error. - - Note: This operation is designed to be used for building - permission-aware UIs and command-line tools, not for authorization - checking. This operation may "fail open" without warning. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["iam_policy_stub"].TestIamPermissions diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index dc9151446fe5..909863eb98b4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -42,9 +42,8 @@ service Publisher { "https://www.googleapis.com/auth/cloud-platform," "https://www.googleapis.com/auth/pubsub"; - // Creates the given topic with the given name. See the - // - // resource name rules. + // Creates the given topic with the given name. See the [resource name rules]( + // https://cloud.google.com/pubsub/docs/admin#resource_names). rpc CreateTopic(Topic) returns (Topic) { option (google.api.http) = { put: "/v1/{name=projects/*/topics/*}" @@ -98,11 +97,10 @@ service Publisher { } // Lists the names of the snapshots on this topic. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. + // [Seek](https://cloud.google.com/pubsub/docs/replay-overview) operations, + // which allow you to manage message acknowledgments in bulk. That is, you can + // set the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. rpc ListTopicSnapshots(ListTopicSnapshotsRequest) returns (ListTopicSnapshotsResponse) { option (google.api.http) = { @@ -161,8 +159,8 @@ message Topic { // must not start with `"goog"`. string name = 1 [(google.api.field_behavior) = REQUIRED]; - // See Creating and - // managing labels. + // See [Creating and managing labels] + // (https://cloud.google.com/pubsub/docs/labels). map labels = 2; // Policy constraining the set of Google Cloud Platform regions where messages @@ -180,11 +178,11 @@ message Topic { // A message that is published by publishers and consumed by subscribers. The // message must contain either a non-empty data field or at least one attribute. // Note that client libraries represent this object differently -// depending on the language. See the corresponding -// client -// library documentation for more information. See -// Quotas and limits -// for more information about message limits. +// depending on the language. See the corresponding [client library +// documentation](https://cloud.google.com/pubsub/docs/reference/libraries) for +// more information. See [quotas and limits] +// (https://cloud.google.com/pubsub/quotas) for more information about message +// limits. message PubsubMessage { // The message data field. If this field is empty, the message must contain // at least one attribute. @@ -212,9 +210,6 @@ message PubsubMessage { // delivered to subscribers in the order in which they are received by the // Pub/Sub system. All `PubsubMessage`s published in a given `PublishRequest` // must specify the same `ordering_key` value. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. string ordering_key = 5; } @@ -388,19 +383,17 @@ service Subscriber { "https://www.googleapis.com/auth/cloud-platform," "https://www.googleapis.com/auth/pubsub"; - // Creates a subscription to a given topic. See the - // - // resource name rules. + // Creates a subscription to a given topic. See the [resource name rules] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). // If the subscription already exists, returns `ALREADY_EXISTS`. // If the corresponding topic doesn't exist, returns `NOT_FOUND`. // // If the name is not provided in the request, the server will assign a random // name for this subscription on the same project as the topic, conforming - // to the - // [resource name - // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - // generated name is populated in the returned Subscription object. Note that - // for REST API requests, you must specify a name in the request. + // to the [resource name format] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). The generated + // name is populated in the returned Subscription object. Note that for REST + // API requests, you must specify a name in the request. rpc CreateSubscription(Subscription) returns (Subscription) { option (google.api.http) = { put: "/v1/{name=projects/*/subscriptions/*}" @@ -528,12 +521,11 @@ service Subscriber { option (google.api.method_signature) = "snapshot"; } - // Lists the existing snapshots. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. + // Lists the existing snapshots. Snapshots are used in [Seek]( + // https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // allow you to manage message acknowledgments in bulk. That is, you can set + // the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. rpc ListSnapshots(ListSnapshotsRequest) returns (ListSnapshotsResponse) { option (google.api.http) = { get: "/v1/{project=projects/*}/snapshots" @@ -542,21 +534,19 @@ service Subscriber { } // Creates a snapshot from the requested subscription. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. - //

If the snapshot already exists, returns `ALREADY_EXISTS`. + // [Seek](https://cloud.google.com/pubsub/docs/replay-overview) operations, + // which allow you to manage message acknowledgments in bulk. That is, you can + // set the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. + // If the snapshot already exists, returns `ALREADY_EXISTS`. // If the requested subscription doesn't exist, returns `NOT_FOUND`. // If the backlog in the subscription is too old -- and the resulting snapshot // would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. // See also the `Snapshot.expire_time` field. If the name is not provided in // the request, the server will assign a random // name for this snapshot on the same project as the subscription, conforming - // to the - // [resource name - // format](https://cloud.google.com/pubsub/docs/admin#resource_names). The + // to the [resource name format] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). The // generated name is populated in the returned Snapshot object. Note that for // REST API requests, you must specify a name in the request. rpc CreateSnapshot(CreateSnapshotRequest) returns (Snapshot) { @@ -580,12 +570,11 @@ service Subscriber { }; } - // Removes an existing snapshot. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot.

+ // Removes an existing snapshot. Snapshots are used in [Seek] + // (https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // allow you to manage message acknowledgments in bulk. That is, you can set + // the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. // When the snapshot is deleted, all messages retained in the snapshot // are immediately dropped. After a snapshot is deleted, a new one may be // created with the same name, but the new one has no association with the old @@ -598,13 +587,12 @@ service Subscriber { } // Seeks an existing subscription to a point in time or to a given snapshot, - // whichever is provided in the request. Snapshots are used in - // Seek - // operations, which allow - // you to manage message acknowledgments in bulk. That is, you can set the - // acknowledgment state of messages in an existing subscription to the state - // captured by a snapshot. Note that both the subscription and the snapshot - // must be on the same topic. + // whichever is provided in the request. Snapshots are used in [Seek]( + // https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // allow you to manage message acknowledgments in bulk. That is, you can set + // the acknowledgment state of messages in an existing subscription to the + // state captured by a snapshot. Note that both the subscription and the + // snapshot must be on the same topic. rpc Seek(SeekRequest) returns (SeekResponse) { option (google.api.http) = { post: "/v1/{subscription=projects/*/subscriptions/*}:seek" @@ -666,10 +654,8 @@ message Subscription { // Indicates whether to retain acknowledged messages. If true, then // messages are not expunged from the subscription's backlog, even if they are // acknowledged, until they fall out of the `message_retention_duration` - // window. This must be true if you would like to - // - // Seek to a timestamp. + // window. This must be true if you would like to [Seek to a timestamp] + // (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time). bool retain_acked_messages = 7; // How long to retain unacknowledged messages in the subscription's backlog, @@ -688,9 +674,6 @@ message Subscription { // will be delivered to the subscribers in the order in which they // are received by the Pub/Sub system. Otherwise, they may be delivered in // any order. - // EXPERIMENTAL: This feature is part of a closed alpha release. This - // API might be changed in backward-incompatible ways and is not recommended - // for production use. It is not subject to any SLA or deprecation policy. bool enable_message_ordering = 10; // A policy that specifies the conditions for this subscription's expiration. @@ -1186,11 +1169,10 @@ message UpdateSnapshotRequest { } // A snapshot resource. Snapshots are used in -// Seek -// operations, which allow -// you to manage message acknowledgments in bulk. That is, you can set the -// acknowledgment state of messages in an existing subscription to the state -// captured by a snapshot. +// [Seek](https://cloud.google.com/pubsub/docs/replay-overview) +// operations, which allow you to manage message acknowledgments in bulk. That +// is, you can set the acknowledgment state of messages in an existing +// subscription to the state captured by a snapshot. message Snapshot { option (google.api.resource) = { type: "pubsub.googleapis.com/Snapshot" @@ -1217,8 +1199,8 @@ message Snapshot { // snapshot that would expire in less than 1 hour after creation. google.protobuf.Timestamp expire_time = 3; - // See Creating and - // managing labels. + // See [Creating and managing labels] + // (https://cloud.google.com/pubsub/docs/labels). map labels = 4; } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py deleted file mode 100644 index 44dc068981d9..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2.py +++ /dev/null @@ -1,5246 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/pubsub_v1/proto/pubsub.proto -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/pubsub_v1/proto/pubsub.proto", - package="google.pubsub.v1", - syntax="proto3", - serialized_options=b"\n\024com.google.pubsub.v1B\013PubsubProtoP\001Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\370\001\001\252\002\026Google.Cloud.PubSub.V1\312\002\026Google\\Cloud\\PubSub\\V1\352\002\031Google::Cloud::PubSub::V1", - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n)google/cloud/pubsub_v1/proto/pubsub.proto\x12\x10google.pubsub.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto";\n\x14MessageStoragePolicy\x12#\n\x1b\x61llowed_persistence_regions\x18\x01 \x03(\t"\xb2\x02\n\x05Topic\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x33\n\x06labels\x18\x02 \x03(\x0b\x32#.google.pubsub.v1.Topic.LabelsEntry\x12\x46\n\x16message_storage_policy\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.MessageStoragePolicy\x12\x14\n\x0ckms_key_name\x18\x05 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:T\xea\x41Q\n\x1bpubsub.googleapis.com/Topic\x12!projects/{project}/topics/{topic}\x12\x0f_deleted-topic_"\xf1\x01\n\rPubsubMessage\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x43\n\nattributes\x18\x02 \x03(\x0b\x32/.google.pubsub.v1.PubsubMessage.AttributesEntry\x12\x12\n\nmessage_id\x18\x03 \x01(\t\x12\x30\n\x0cpublish_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x14\n\x0cordering_key\x18\x05 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"E\n\x0fGetTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"w\n\x12UpdateTopicRequest\x12+\n\x05topic\x18\x01 \x01(\x0b\x32\x17.google.pubsub.v1.TopicB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"|\n\x0ePublishRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x36\n\x08messages\x18\x02 \x03(\x0b\x32\x1f.google.pubsub.v1.PubsubMessageB\x03\xe0\x41\x02"&\n\x0fPublishResponse\x12\x13\n\x0bmessage_ids\x18\x01 \x03(\t"\x80\x01\n\x11ListTopicsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"V\n\x12ListTopicsResponse\x12\'\n\x06topics\x18\x01 \x03(\x0b\x32\x17.google.pubsub.v1.Topic\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"z\n\x1dListTopicSubscriptionsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"y\n\x1eListTopicSubscriptionsResponse\x12>\n\rsubscriptions\x18\x01 \x03(\tB\'\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"v\n\x19ListTopicSnapshotsRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"H\n\x1aListTopicSnapshotsResponse\x12\x11\n\tsnapshots\x18\x01 \x03(\t\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"H\n\x12\x44\x65leteTopicRequest\x12\x32\n\x05topic\x18\x01 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic"]\n\x19\x44\x65tachSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x1c\n\x1a\x44\x65tachSubscriptionResponse"\xc0\x05\n\x0cSubscription\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x32\n\x05topic\x18\x02 \x01(\tB#\xe0\x41\x02\xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12\x31\n\x0bpush_config\x18\x04 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfig\x12\x1c\n\x14\x61\x63k_deadline_seconds\x18\x05 \x01(\x05\x12\x1d\n\x15retain_acked_messages\x18\x07 \x01(\x08\x12=\n\x1amessage_retention_duration\x18\x08 \x01(\x0b\x32\x19.google.protobuf.Duration\x12:\n\x06labels\x18\t \x03(\x0b\x32*.google.pubsub.v1.Subscription.LabelsEntry\x12\x1f\n\x17\x65nable_message_ordering\x18\n \x01(\x08\x12=\n\x11\x65xpiration_policy\x18\x0b \x01(\x0b\x32".google.pubsub.v1.ExpirationPolicy\x12\x0e\n\x06\x66ilter\x18\x0c \x01(\t\x12>\n\x12\x64\x65\x61\x64_letter_policy\x18\r \x01(\x0b\x32".google.pubsub.v1.DeadLetterPolicy\x12\x33\n\x0cretry_policy\x18\x0e \x01(\x0b\x32\x1d.google.pubsub.v1.RetryPolicy\x12\x10\n\x08\x64\x65tached\x18\x0f \x01(\x08\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:X\xea\x41U\n"pubsub.googleapis.com/Subscription\x12/projects/{project}/subscriptions/{subscription}"u\n\x0bRetryPolicy\x12\x32\n\x0fminimum_backoff\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x32\n\x0fmaximum_backoff\x18\x02 \x01(\x0b\x32\x19.google.protobuf.Duration"L\n\x10\x44\x65\x61\x64LetterPolicy\x12\x19\n\x11\x64\x65\x61\x64_letter_topic\x18\x01 \x01(\t\x12\x1d\n\x15max_delivery_attempts\x18\x02 \x01(\x05":\n\x10\x45xpirationPolicy\x12&\n\x03ttl\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration"\xad\x02\n\nPushConfig\x12\x15\n\rpush_endpoint\x18\x01 \x01(\t\x12@\n\nattributes\x18\x02 \x03(\x0b\x32,.google.pubsub.v1.PushConfig.AttributesEntry\x12<\n\noidc_token\x18\x03 \x01(\x0b\x32&.google.pubsub.v1.PushConfig.OidcTokenH\x00\x1a<\n\tOidcToken\x12\x1d\n\x15service_account_email\x18\x01 \x01(\t\x12\x10\n\x08\x61udience\x18\x02 \x01(\t\x1a\x31\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42\x17\n\x15\x61uthentication_method"m\n\x0fReceivedMessage\x12\x0e\n\x06\x61\x63k_id\x18\x01 \x01(\t\x12\x30\n\x07message\x18\x02 \x01(\x0b\x32\x1f.google.pubsub.v1.PubsubMessage\x12\x18\n\x10\x64\x65livery_attempt\x18\x03 \x01(\x05"Z\n\x16GetSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x8c\x01\n\x19UpdateSubscriptionRequest\x12\x39\n\x0csubscription\x18\x01 \x01(\x0b\x32\x1e.google.pubsub.v1.SubscriptionB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\x87\x01\n\x18ListSubscriptionsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"k\n\x19ListSubscriptionsResponse\x12\x35\n\rsubscriptions\x18\x01 \x03(\x0b\x32\x1e.google.pubsub.v1.Subscription\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"]\n\x19\x44\x65leteSubscriptionRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription"\x93\x01\n\x17ModifyPushConfigRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x36\n\x0bpush_config\x18\x02 \x01(\x0b\x32\x1c.google.pubsub.v1.PushConfigB\x03\xe0\x41\x02"\x8d\x01\n\x0bPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12!\n\x12return_immediately\x18\x02 \x01(\x08\x42\x05\x18\x01\xe0\x41\x01\x12\x19\n\x0cmax_messages\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"L\n\x0cPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x95\x01\n\x18ModifyAckDeadlineRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x04 \x03(\tB\x03\xe0\x41\x02\x12!\n\x14\x61\x63k_deadline_seconds\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02"l\n\x12\x41\x63knowledgeRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x14\n\x07\x61\x63k_ids\x18\x02 \x03(\tB\x03\xe0\x41\x02"\xa9\x02\n\x14StreamingPullRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x0f\n\x07\x61\x63k_ids\x18\x02 \x03(\t\x12\x1f\n\x17modify_deadline_seconds\x18\x03 \x03(\x05\x12\x1f\n\x17modify_deadline_ack_ids\x18\x04 \x03(\t\x12(\n\x1bstream_ack_deadline_seconds\x18\x05 \x01(\x05\x42\x03\xe0\x41\x02\x12\x11\n\tclient_id\x18\x06 \x01(\t\x12 \n\x18max_outstanding_messages\x18\x07 \x01(\x03\x12\x1d\n\x15max_outstanding_bytes\x18\x08 \x01(\x03"U\n\x15StreamingPullResponse\x12<\n\x11received_messages\x18\x01 \x03(\x0b\x32!.google.pubsub.v1.ReceivedMessage"\x83\x02\n\x15\x43reateSnapshotRequest\x12\x34\n\x04name\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot\x12@\n\x0csubscription\x18\x02 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12\x43\n\x06labels\x18\x03 \x03(\x0b\x32\x33.google.pubsub.v1.CreateSnapshotRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x80\x01\n\x15UpdateSnapshotRequest\x12\x31\n\x08snapshot\x18\x01 \x01(\x0b\x32\x1a.google.pubsub.v1.SnapshotB\x03\xe0\x41\x02\x12\x34\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMaskB\x03\xe0\x41\x02"\xaf\x02\n\x08Snapshot\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x05topic\x18\x02 \x01(\tB \xfa\x41\x1d\n\x1bpubsub.googleapis.com/Topic\x12/\n\x0b\x65xpire_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06labels\x18\x04 \x03(\x0b\x32&.google.pubsub.v1.Snapshot.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:L\xea\x41I\n\x1epubsub.googleapis.com/Snapshot\x12\'projects/{project}/snapshots/{snapshot}"N\n\x12GetSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\x83\x01\n\x14ListSnapshotsRequest\x12\x44\n\x07project\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"_\n\x15ListSnapshotsResponse\x12-\n\tsnapshots\x18\x01 \x03(\x0b\x32\x1a.google.pubsub.v1.Snapshot\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"Q\n\x15\x44\x65leteSnapshotRequest\x12\x38\n\x08snapshot\x18\x01 \x01(\tB&\xe0\x41\x02\xfa\x41 \n\x1epubsub.googleapis.com/Snapshot"\xbe\x01\n\x0bSeekRequest\x12@\n\x0csubscription\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"pubsub.googleapis.com/Subscription\x12*\n\x04time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12\x37\n\x08snapshot\x18\x03 \x01(\tB#\xfa\x41 \n\x1epubsub.googleapis.com/SnapshotH\x00\x42\x08\n\x06target"\x0e\n\x0cSeekResponse2\xa3\x0b\n\tPublisher\x12q\n\x0b\x43reateTopic\x12\x17.google.pubsub.v1.Topic\x1a\x17.google.pubsub.v1.Topic"0\x82\xd3\xe4\x93\x02#\x1a\x1e/v1/{name=projects/*/topics/*}:\x01*\xda\x41\x04name\x12}\n\x0bUpdateTopic\x12$.google.pubsub.v1.UpdateTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02)2$/v1/{topic.name=projects/*/topics/*}:\x01*\x12\x93\x01\n\x07Publish\x12 .google.pubsub.v1.PublishRequest\x1a!.google.pubsub.v1.PublishResponse"C\x82\xd3\xe4\x93\x02,"\'/v1/{topic=projects/*/topics/*}:publish:\x01*\xda\x41\x0etopic,messages\x12w\n\x08GetTopic\x12!.google.pubsub.v1.GetTopicRequest\x1a\x17.google.pubsub.v1.Topic"/\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\x8a\x01\n\nListTopics\x12#.google.pubsub.v1.ListTopicsRequest\x1a$.google.pubsub.v1.ListTopicsResponse"1\x82\xd3\xe4\x93\x02!\x12\x1f/v1/{project=projects/*}/topics\xda\x41\x07project\x12\xba\x01\n\x16ListTopicSubscriptions\x12/.google.pubsub.v1.ListTopicSubscriptionsRequest\x1a\x30.google.pubsub.v1.ListTopicSubscriptionsResponse"=\x82\xd3\xe4\x93\x02/\x12-/v1/{topic=projects/*/topics/*}/subscriptions\xda\x41\x05topic\x12\xaa\x01\n\x12ListTopicSnapshots\x12+.google.pubsub.v1.ListTopicSnapshotsRequest\x1a,.google.pubsub.v1.ListTopicSnapshotsResponse"9\x82\xd3\xe4\x93\x02+\x12)/v1/{topic=projects/*/topics/*}/snapshots\xda\x41\x05topic\x12|\n\x0b\x44\x65leteTopic\x12$.google.pubsub.v1.DeleteTopicRequest\x1a\x16.google.protobuf.Empty"/\x82\xd3\xe4\x93\x02!*\x1f/v1/{topic=projects/*/topics/*}\xda\x41\x05topic\x12\xad\x01\n\x12\x44\x65tachSubscription\x12+.google.pubsub.v1.DetachSubscriptionRequest\x1a,.google.pubsub.v1.DetachSubscriptionResponse"<\x82\xd3\xe4\x93\x02\x36"4/v1/{subscription=projects/*/subscriptions/*}:detach\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub2\x83\x15\n\nSubscriber\x12\xb4\x01\n\x12\x43reateSubscription\x12\x1e.google.pubsub.v1.Subscription\x1a\x1e.google.pubsub.v1.Subscription"^\x82\xd3\xe4\x93\x02*\x1a%/v1/{name=projects/*/subscriptions/*}:\x01*\xda\x41+name,topic,push_config,ack_deadline_seconds\x12\xa1\x01\n\x0fGetSubscription\x12(.google.pubsub.v1.GetSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"D\x82\xd3\xe4\x93\x02/\x12-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xa0\x01\n\x12UpdateSubscription\x12+.google.pubsub.v1.UpdateSubscriptionRequest\x1a\x1e.google.pubsub.v1.Subscription"=\x82\xd3\xe4\x93\x02\x37\x32\x32/v1/{subscription.name=projects/*/subscriptions/*}:\x01*\x12\xa6\x01\n\x11ListSubscriptions\x12*.google.pubsub.v1.ListSubscriptionsRequest\x1a+.google.pubsub.v1.ListSubscriptionsResponse"8\x82\xd3\xe4\x93\x02(\x12&/v1/{project=projects/*}/subscriptions\xda\x41\x07project\x12\x9f\x01\n\x12\x44\x65leteSubscription\x12+.google.pubsub.v1.DeleteSubscriptionRequest\x1a\x16.google.protobuf.Empty"D\x82\xd3\xe4\x93\x02/*-/v1/{subscription=projects/*/subscriptions/*}\xda\x41\x0csubscription\x12\xcf\x01\n\x11ModifyAckDeadline\x12*.google.pubsub.v1.ModifyAckDeadlineRequest\x1a\x16.google.protobuf.Empty"v\x82\xd3\xe4\x93\x02\x44"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\x01*\xda\x41)subscription,ack_ids,ack_deadline_seconds\x12\xa8\x01\n\x0b\x41\x63knowledge\x12$.google.pubsub.v1.AcknowledgeRequest\x1a\x16.google.protobuf.Empty"[\x82\xd3\xe4\x93\x02>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\x01*\xda\x41\x14subscription,ack_ids\x12\xb3\x01\n\x04Pull\x12\x1d.google.pubsub.v1.PullRequest\x1a\x1e.google.pubsub.v1.PullResponse"l\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:pull:\x01*\xda\x41,subscription,return_immediately,max_messages\x12\x66\n\rStreamingPull\x12&.google.pubsub.v1.StreamingPullRequest\x1a\'.google.pubsub.v1.StreamingPullResponse"\x00(\x01\x30\x01\x12\xbb\x01\n\x10ModifyPushConfig\x12).google.pubsub.v1.ModifyPushConfigRequest\x1a\x16.google.protobuf.Empty"d\x82\xd3\xe4\x93\x02\x43">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\x01*\xda\x41\x18subscription,push_config\x12\x89\x01\n\x0bGetSnapshot\x12$.google.pubsub.v1.GetSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"8\x82\xd3\xe4\x93\x02\'\x12%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x96\x01\n\rListSnapshots\x12&.google.pubsub.v1.ListSnapshotsRequest\x1a\'.google.pubsub.v1.ListSnapshotsResponse"4\x82\xd3\xe4\x93\x02$\x12"/v1/{project=projects/*}/snapshots\xda\x41\x07project\x12\x97\x01\n\x0e\x43reateSnapshot\x12\'.google.pubsub.v1.CreateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"@\x82\xd3\xe4\x93\x02&\x1a!/v1/{name=projects/*/snapshots/*}:\x01*\xda\x41\x11name,subscription\x12\x8c\x01\n\x0eUpdateSnapshot\x12\'.google.pubsub.v1.UpdateSnapshotRequest\x1a\x1a.google.pubsub.v1.Snapshot"5\x82\xd3\xe4\x93\x02/2*/v1/{snapshot.name=projects/*/snapshots/*}:\x01*\x12\x8b\x01\n\x0e\x44\x65leteSnapshot\x12\'.google.pubsub.v1.DeleteSnapshotRequest\x1a\x16.google.protobuf.Empty"8\x82\xd3\xe4\x93\x02\'*%/v1/{snapshot=projects/*/snapshots/*}\xda\x41\x08snapshot\x12\x84\x01\n\x04Seek\x12\x1d.google.pubsub.v1.SeekRequest\x1a\x1e.google.pubsub.v1.SeekResponse"=\x82\xd3\xe4\x93\x02\x37"2/v1/{subscription=projects/*/subscriptions/*}:seek:\x01*\x1ap\xca\x41\x15pubsub.googleapis.com\xd2\x41Uhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsubB\xae\x01\n\x14\x63om.google.pubsub.v1B\x0bPubsubProtoP\x01Z6google.golang.org/genproto/googleapis/pubsub/v1;pubsub\xf8\x01\x01\xaa\x02\x16Google.Cloud.PubSub.V1\xca\x02\x16Google\\Cloud\\PubSub\\V1\xea\x02\x19Google::Cloud::PubSub::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - - -_MESSAGESTORAGEPOLICY = _descriptor.Descriptor( - name="MessageStoragePolicy", - full_name="google.pubsub.v1.MessageStoragePolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="allowed_persistence_regions", - full_name="google.pubsub.v1.MessageStoragePolicy.allowed_persistence_regions", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=306, - serialized_end=365, -) - - -_TOPIC_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.Topic.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.Topic.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.Topic.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_TOPIC = _descriptor.Descriptor( - name="Topic", - full_name="google.pubsub.v1.Topic", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.Topic.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.Topic.labels", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_storage_policy", - full_name="google.pubsub.v1.Topic.message_storage_policy", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="kms_key_name", - full_name="google.pubsub.v1.Topic.kms_key_name", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_TOPIC_LABELSENTRY,], - enum_types=[], - serialized_options=b"\352AQ\n\033pubsub.googleapis.com/Topic\022!projects/{project}/topics/{topic}\022\017_deleted-topic_", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=368, - serialized_end=674, -) - - -_PUBSUBMESSAGE_ATTRIBUTESENTRY = _descriptor.Descriptor( - name="AttributesEntry", - full_name="google.pubsub.v1.PubsubMessage.AttributesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.PubsubMessage.AttributesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.PubsubMessage.AttributesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=869, - serialized_end=918, -) - -_PUBSUBMESSAGE = _descriptor.Descriptor( - name="PubsubMessage", - full_name="google.pubsub.v1.PubsubMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="data", - full_name="google.pubsub.v1.PubsubMessage.data", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="attributes", - full_name="google.pubsub.v1.PubsubMessage.attributes", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_id", - full_name="google.pubsub.v1.PubsubMessage.message_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="publish_time", - full_name="google.pubsub.v1.PubsubMessage.publish_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ordering_key", - full_name="google.pubsub.v1.PubsubMessage.ordering_key", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_PUBSUBMESSAGE_ATTRIBUTESENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=677, - serialized_end=918, -) - - -_GETTOPICREQUEST = _descriptor.Descriptor( - name="GetTopicRequest", - full_name="google.pubsub.v1.GetTopicRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.GetTopicRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=920, - serialized_end=989, -) - - -_UPDATETOPICREQUEST = _descriptor.Descriptor( - name="UpdateTopicRequest", - full_name="google.pubsub.v1.UpdateTopicRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.UpdateTopicRequest.topic", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.pubsub.v1.UpdateTopicRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=991, - serialized_end=1110, -) - - -_PUBLISHREQUEST = _descriptor.Descriptor( - name="PublishRequest", - full_name="google.pubsub.v1.PublishRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.PublishRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="messages", - full_name="google.pubsub.v1.PublishRequest.messages", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1112, - serialized_end=1236, -) - - -_PUBLISHRESPONSE = _descriptor.Descriptor( - name="PublishResponse", - full_name="google.pubsub.v1.PublishResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="message_ids", - full_name="google.pubsub.v1.PublishResponse.message_ids", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1238, - serialized_end=1276, -) - - -_LISTTOPICSREQUEST = _descriptor.Descriptor( - name="ListTopicsRequest", - full_name="google.pubsub.v1.ListTopicsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project", - full_name="google.pubsub.v1.ListTopicsRequest.project", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListTopicsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListTopicsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1279, - serialized_end=1407, -) - - -_LISTTOPICSRESPONSE = _descriptor.Descriptor( - name="ListTopicsResponse", - full_name="google.pubsub.v1.ListTopicsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topics", - full_name="google.pubsub.v1.ListTopicsResponse.topics", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListTopicsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1409, - serialized_end=1495, -) - - -_LISTTOPICSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( - name="ListTopicSubscriptionsRequest", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListTopicSubscriptionsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1497, - serialized_end=1619, -) - - -_LISTTOPICSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( - name="ListTopicSubscriptionsResponse", - full_name="google.pubsub.v1.ListTopicSubscriptionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscriptions", - full_name="google.pubsub.v1.ListTopicSubscriptionsResponse.subscriptions", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListTopicSubscriptionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1621, - serialized_end=1742, -) - - -_LISTTOPICSNAPSHOTSREQUEST = _descriptor.Descriptor( - name="ListTopicSnapshotsRequest", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListTopicSnapshotsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1744, - serialized_end=1862, -) - - -_LISTTOPICSNAPSHOTSRESPONSE = _descriptor.Descriptor( - name="ListTopicSnapshotsResponse", - full_name="google.pubsub.v1.ListTopicSnapshotsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshots", - full_name="google.pubsub.v1.ListTopicSnapshotsResponse.snapshots", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListTopicSnapshotsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1864, - serialized_end=1936, -) - - -_DELETETOPICREQUEST = _descriptor.Descriptor( - name="DeleteTopicRequest", - full_name="google.pubsub.v1.DeleteTopicRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.DeleteTopicRequest.topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1938, - serialized_end=2010, -) - - -_DETACHSUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="DetachSubscriptionRequest", - full_name="google.pubsub.v1.DetachSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.DetachSubscriptionRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2012, - serialized_end=2105, -) - - -_DETACHSUBSCRIPTIONRESPONSE = _descriptor.Descriptor( - name="DetachSubscriptionResponse", - full_name="google.pubsub.v1.DetachSubscriptionResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2107, - serialized_end=2135, -) - - -_SUBSCRIPTION_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.Subscription.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.Subscription.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.Subscription.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_SUBSCRIPTION = _descriptor.Descriptor( - name="Subscription", - full_name="google.pubsub.v1.Subscription", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.Subscription.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.Subscription.topic", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="push_config", - full_name="google.pubsub.v1.Subscription.push_config", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_deadline_seconds", - full_name="google.pubsub.v1.Subscription.ack_deadline_seconds", - index=3, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="retain_acked_messages", - full_name="google.pubsub.v1.Subscription.retain_acked_messages", - index=4, - number=7, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message_retention_duration", - full_name="google.pubsub.v1.Subscription.message_retention_duration", - index=5, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.Subscription.labels", - index=6, - number=9, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="enable_message_ordering", - full_name="google.pubsub.v1.Subscription.enable_message_ordering", - index=7, - number=10, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="expiration_policy", - full_name="google.pubsub.v1.Subscription.expiration_policy", - index=8, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.pubsub.v1.Subscription.filter", - index=9, - number=12, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dead_letter_policy", - full_name="google.pubsub.v1.Subscription.dead_letter_policy", - index=10, - number=13, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="retry_policy", - full_name="google.pubsub.v1.Subscription.retry_policy", - index=11, - number=14, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="detached", - full_name="google.pubsub.v1.Subscription.detached", - index=12, - number=15, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_SUBSCRIPTION_LABELSENTRY,], - enum_types=[], - serialized_options=b'\352AU\n"pubsub.googleapis.com/Subscription\022/projects/{project}/subscriptions/{subscription}', - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2138, - serialized_end=2842, -) - - -_RETRYPOLICY = _descriptor.Descriptor( - name="RetryPolicy", - full_name="google.pubsub.v1.RetryPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="minimum_backoff", - full_name="google.pubsub.v1.RetryPolicy.minimum_backoff", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="maximum_backoff", - full_name="google.pubsub.v1.RetryPolicy.maximum_backoff", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2844, - serialized_end=2961, -) - - -_DEADLETTERPOLICY = _descriptor.Descriptor( - name="DeadLetterPolicy", - full_name="google.pubsub.v1.DeadLetterPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="dead_letter_topic", - full_name="google.pubsub.v1.DeadLetterPolicy.dead_letter_topic", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_delivery_attempts", - full_name="google.pubsub.v1.DeadLetterPolicy.max_delivery_attempts", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2963, - serialized_end=3039, -) - - -_EXPIRATIONPOLICY = _descriptor.Descriptor( - name="ExpirationPolicy", - full_name="google.pubsub.v1.ExpirationPolicy", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ttl", - full_name="google.pubsub.v1.ExpirationPolicy.ttl", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3041, - serialized_end=3099, -) - - -_PUSHCONFIG_OIDCTOKEN = _descriptor.Descriptor( - name="OidcToken", - full_name="google.pubsub.v1.PushConfig.OidcToken", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="service_account_email", - full_name="google.pubsub.v1.PushConfig.OidcToken.service_account_email", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="audience", - full_name="google.pubsub.v1.PushConfig.OidcToken.audience", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3267, - serialized_end=3327, -) - -_PUSHCONFIG_ATTRIBUTESENTRY = _descriptor.Descriptor( - name="AttributesEntry", - full_name="google.pubsub.v1.PushConfig.AttributesEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.PushConfig.AttributesEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.PushConfig.AttributesEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=869, - serialized_end=918, -) - -_PUSHCONFIG = _descriptor.Descriptor( - name="PushConfig", - full_name="google.pubsub.v1.PushConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="push_endpoint", - full_name="google.pubsub.v1.PushConfig.push_endpoint", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="attributes", - full_name="google.pubsub.v1.PushConfig.attributes", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="oidc_token", - full_name="google.pubsub.v1.PushConfig.oidc_token", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_PUSHCONFIG_OIDCTOKEN, _PUSHCONFIG_ATTRIBUTESENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="authentication_method", - full_name="google.pubsub.v1.PushConfig.authentication_method", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=3102, - serialized_end=3403, -) - - -_RECEIVEDMESSAGE = _descriptor.Descriptor( - name="ReceivedMessage", - full_name="google.pubsub.v1.ReceivedMessage", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ack_id", - full_name="google.pubsub.v1.ReceivedMessage.ack_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="message", - full_name="google.pubsub.v1.ReceivedMessage.message", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="delivery_attempt", - full_name="google.pubsub.v1.ReceivedMessage.delivery_attempt", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3405, - serialized_end=3514, -) - - -_GETSUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="GetSubscriptionRequest", - full_name="google.pubsub.v1.GetSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.GetSubscriptionRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3516, - serialized_end=3606, -) - - -_UPDATESUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="UpdateSubscriptionRequest", - full_name="google.pubsub.v1.UpdateSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.UpdateSubscriptionRequest.subscription", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.pubsub.v1.UpdateSubscriptionRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3609, - serialized_end=3749, -) - - -_LISTSUBSCRIPTIONSREQUEST = _descriptor.Descriptor( - name="ListSubscriptionsRequest", - full_name="google.pubsub.v1.ListSubscriptionsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project", - full_name="google.pubsub.v1.ListSubscriptionsRequest.project", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListSubscriptionsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListSubscriptionsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3752, - serialized_end=3887, -) - - -_LISTSUBSCRIPTIONSRESPONSE = _descriptor.Descriptor( - name="ListSubscriptionsResponse", - full_name="google.pubsub.v1.ListSubscriptionsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscriptions", - full_name="google.pubsub.v1.ListSubscriptionsResponse.subscriptions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListSubscriptionsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3889, - serialized_end=3996, -) - - -_DELETESUBSCRIPTIONREQUEST = _descriptor.Descriptor( - name="DeleteSubscriptionRequest", - full_name="google.pubsub.v1.DeleteSubscriptionRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.DeleteSubscriptionRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3998, - serialized_end=4091, -) - - -_MODIFYPUSHCONFIGREQUEST = _descriptor.Descriptor( - name="ModifyPushConfigRequest", - full_name="google.pubsub.v1.ModifyPushConfigRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.ModifyPushConfigRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="push_config", - full_name="google.pubsub.v1.ModifyPushConfigRequest.push_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4094, - serialized_end=4241, -) - - -_PULLREQUEST = _descriptor.Descriptor( - name="PullRequest", - full_name="google.pubsub.v1.PullRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.PullRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="return_immediately", - full_name="google.pubsub.v1.PullRequest.return_immediately", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\030\001\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_messages", - full_name="google.pubsub.v1.PullRequest.max_messages", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4244, - serialized_end=4385, -) - - -_PULLRESPONSE = _descriptor.Descriptor( - name="PullResponse", - full_name="google.pubsub.v1.PullResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="received_messages", - full_name="google.pubsub.v1.PullResponse.received_messages", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4387, - serialized_end=4463, -) - - -_MODIFYACKDEADLINEREQUEST = _descriptor.Descriptor( - name="ModifyAckDeadlineRequest", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_ids", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest.ack_ids", - index=1, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_deadline_seconds", - full_name="google.pubsub.v1.ModifyAckDeadlineRequest.ack_deadline_seconds", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4466, - serialized_end=4615, -) - - -_ACKNOWLEDGEREQUEST = _descriptor.Descriptor( - name="AcknowledgeRequest", - full_name="google.pubsub.v1.AcknowledgeRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.AcknowledgeRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_ids", - full_name="google.pubsub.v1.AcknowledgeRequest.ack_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4617, - serialized_end=4725, -) - - -_STREAMINGPULLREQUEST = _descriptor.Descriptor( - name="StreamingPullRequest", - full_name="google.pubsub.v1.StreamingPullRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.StreamingPullRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="ack_ids", - full_name="google.pubsub.v1.StreamingPullRequest.ack_ids", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="modify_deadline_seconds", - full_name="google.pubsub.v1.StreamingPullRequest.modify_deadline_seconds", - index=2, - number=3, - type=5, - cpp_type=1, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="modify_deadline_ack_ids", - full_name="google.pubsub.v1.StreamingPullRequest.modify_deadline_ack_ids", - index=3, - number=4, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="stream_ack_deadline_seconds", - full_name="google.pubsub.v1.StreamingPullRequest.stream_ack_deadline_seconds", - index=4, - number=5, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="client_id", - full_name="google.pubsub.v1.StreamingPullRequest.client_id", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_outstanding_messages", - full_name="google.pubsub.v1.StreamingPullRequest.max_outstanding_messages", - index=6, - number=7, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="max_outstanding_bytes", - full_name="google.pubsub.v1.StreamingPullRequest.max_outstanding_bytes", - index=7, - number=8, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4728, - serialized_end=5025, -) - - -_STREAMINGPULLRESPONSE = _descriptor.Descriptor( - name="StreamingPullResponse", - full_name="google.pubsub.v1.StreamingPullResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="received_messages", - full_name="google.pubsub.v1.StreamingPullResponse.received_messages", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5027, - serialized_end=5112, -) - - -_CREATESNAPSHOTREQUEST_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.CreateSnapshotRequest.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_CREATESNAPSHOTREQUEST = _descriptor.Descriptor( - name="CreateSnapshotRequest", - full_name="google.pubsub.v1.CreateSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.CreateSnapshotRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.CreateSnapshotRequest.subscription", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.CreateSnapshotRequest.labels", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_CREATESNAPSHOTREQUEST_LABELSENTRY,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5115, - serialized_end=5374, -) - - -_UPDATESNAPSHOTREQUEST = _descriptor.Descriptor( - name="UpdateSnapshotRequest", - full_name="google.pubsub.v1.UpdateSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.UpdateSnapshotRequest.snapshot", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="update_mask", - full_name="google.pubsub.v1.UpdateSnapshotRequest.update_mask", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5377, - serialized_end=5505, -) - - -_SNAPSHOT_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.pubsub.v1.Snapshot.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.pubsub.v1.Snapshot.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.pubsub.v1.Snapshot.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=543, - serialized_end=588, -) - -_SNAPSHOT = _descriptor.Descriptor( - name="Snapshot", - full_name="google.pubsub.v1.Snapshot", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.pubsub.v1.Snapshot.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="topic", - full_name="google.pubsub.v1.Snapshot.topic", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A\035\n\033pubsub.googleapis.com/Topic", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="expire_time", - full_name="google.pubsub.v1.Snapshot.expire_time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.pubsub.v1.Snapshot.labels", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_SNAPSHOT_LABELSENTRY,], - enum_types=[], - serialized_options=b"\352AI\n\036pubsub.googleapis.com/Snapshot\022'projects/{project}/snapshots/{snapshot}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5508, - serialized_end=5811, -) - - -_GETSNAPSHOTREQUEST = _descriptor.Descriptor( - name="GetSnapshotRequest", - full_name="google.pubsub.v1.GetSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.GetSnapshotRequest.snapshot", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5813, - serialized_end=5891, -) - - -_LISTSNAPSHOTSREQUEST = _descriptor.Descriptor( - name="ListSnapshotsRequest", - full_name="google.pubsub.v1.ListSnapshotsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="project", - full_name="google.pubsub.v1.ListSnapshotsRequest.project", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.pubsub.v1.ListSnapshotsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.pubsub.v1.ListSnapshotsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5894, - serialized_end=6025, -) - - -_LISTSNAPSHOTSRESPONSE = _descriptor.Descriptor( - name="ListSnapshotsResponse", - full_name="google.pubsub.v1.ListSnapshotsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshots", - full_name="google.pubsub.v1.ListSnapshotsResponse.snapshots", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.pubsub.v1.ListSnapshotsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6027, - serialized_end=6122, -) - - -_DELETESNAPSHOTREQUEST = _descriptor.Descriptor( - name="DeleteSnapshotRequest", - full_name="google.pubsub.v1.DeleteSnapshotRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.DeleteSnapshotRequest.snapshot", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6124, - serialized_end=6205, -) - - -_SEEKREQUEST = _descriptor.Descriptor( - name="SeekRequest", - full_name="google.pubsub.v1.SeekRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="subscription", - full_name="google.pubsub.v1.SeekRequest.subscription", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b'\340A\002\372A$\n"pubsub.googleapis.com/Subscription', - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time", - full_name="google.pubsub.v1.SeekRequest.time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="snapshot", - full_name="google.pubsub.v1.SeekRequest.snapshot", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A \n\036pubsub.googleapis.com/Snapshot", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="target", - full_name="google.pubsub.v1.SeekRequest.target", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=6208, - serialized_end=6398, -) - - -_SEEKRESPONSE = _descriptor.Descriptor( - name="SeekResponse", - full_name="google.pubsub.v1.SeekResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6400, - serialized_end=6414, -) - -_TOPIC_LABELSENTRY.containing_type = _TOPIC -_TOPIC.fields_by_name["labels"].message_type = _TOPIC_LABELSENTRY -_TOPIC.fields_by_name["message_storage_policy"].message_type = _MESSAGESTORAGEPOLICY -_PUBSUBMESSAGE_ATTRIBUTESENTRY.containing_type = _PUBSUBMESSAGE -_PUBSUBMESSAGE.fields_by_name[ - "attributes" -].message_type = _PUBSUBMESSAGE_ATTRIBUTESENTRY -_PUBSUBMESSAGE.fields_by_name[ - "publish_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_UPDATETOPICREQUEST.fields_by_name["topic"].message_type = _TOPIC -_UPDATETOPICREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_PUBLISHREQUEST.fields_by_name["messages"].message_type = _PUBSUBMESSAGE -_LISTTOPICSRESPONSE.fields_by_name["topics"].message_type = _TOPIC -_SUBSCRIPTION_LABELSENTRY.containing_type = _SUBSCRIPTION -_SUBSCRIPTION.fields_by_name["push_config"].message_type = _PUSHCONFIG -_SUBSCRIPTION.fields_by_name[ - "message_retention_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_SUBSCRIPTION.fields_by_name["labels"].message_type = _SUBSCRIPTION_LABELSENTRY -_SUBSCRIPTION.fields_by_name["expiration_policy"].message_type = _EXPIRATIONPOLICY -_SUBSCRIPTION.fields_by_name["dead_letter_policy"].message_type = _DEADLETTERPOLICY -_SUBSCRIPTION.fields_by_name["retry_policy"].message_type = _RETRYPOLICY -_RETRYPOLICY.fields_by_name[ - "minimum_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_RETRYPOLICY.fields_by_name[ - "maximum_backoff" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_EXPIRATIONPOLICY.fields_by_name[ - "ttl" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_PUSHCONFIG_OIDCTOKEN.containing_type = _PUSHCONFIG -_PUSHCONFIG_ATTRIBUTESENTRY.containing_type = _PUSHCONFIG -_PUSHCONFIG.fields_by_name["attributes"].message_type = _PUSHCONFIG_ATTRIBUTESENTRY -_PUSHCONFIG.fields_by_name["oidc_token"].message_type = _PUSHCONFIG_OIDCTOKEN -_PUSHCONFIG.oneofs_by_name["authentication_method"].fields.append( - _PUSHCONFIG.fields_by_name["oidc_token"] -) -_PUSHCONFIG.fields_by_name["oidc_token"].containing_oneof = _PUSHCONFIG.oneofs_by_name[ - "authentication_method" -] -_RECEIVEDMESSAGE.fields_by_name["message"].message_type = _PUBSUBMESSAGE -_UPDATESUBSCRIPTIONREQUEST.fields_by_name["subscription"].message_type = _SUBSCRIPTION -_UPDATESUBSCRIPTIONREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_LISTSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"].message_type = _SUBSCRIPTION -_MODIFYPUSHCONFIGREQUEST.fields_by_name["push_config"].message_type = _PUSHCONFIG -_PULLRESPONSE.fields_by_name["received_messages"].message_type = _RECEIVEDMESSAGE -_STREAMINGPULLRESPONSE.fields_by_name[ - "received_messages" -].message_type = _RECEIVEDMESSAGE -_CREATESNAPSHOTREQUEST_LABELSENTRY.containing_type = _CREATESNAPSHOTREQUEST -_CREATESNAPSHOTREQUEST.fields_by_name[ - "labels" -].message_type = _CREATESNAPSHOTREQUEST_LABELSENTRY -_UPDATESNAPSHOTREQUEST.fields_by_name["snapshot"].message_type = _SNAPSHOT -_UPDATESNAPSHOTREQUEST.fields_by_name[ - "update_mask" -].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK -_SNAPSHOT_LABELSENTRY.containing_type = _SNAPSHOT -_SNAPSHOT.fields_by_name[ - "expire_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SNAPSHOT.fields_by_name["labels"].message_type = _SNAPSHOT_LABELSENTRY -_LISTSNAPSHOTSRESPONSE.fields_by_name["snapshots"].message_type = _SNAPSHOT -_SEEKREQUEST.fields_by_name[ - "time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_SEEKREQUEST.oneofs_by_name["target"].fields.append(_SEEKREQUEST.fields_by_name["time"]) -_SEEKREQUEST.fields_by_name["time"].containing_oneof = _SEEKREQUEST.oneofs_by_name[ - "target" -] -_SEEKREQUEST.oneofs_by_name["target"].fields.append( - _SEEKREQUEST.fields_by_name["snapshot"] -) -_SEEKREQUEST.fields_by_name["snapshot"].containing_oneof = _SEEKREQUEST.oneofs_by_name[ - "target" -] -DESCRIPTOR.message_types_by_name["MessageStoragePolicy"] = _MESSAGESTORAGEPOLICY -DESCRIPTOR.message_types_by_name["Topic"] = _TOPIC -DESCRIPTOR.message_types_by_name["PubsubMessage"] = _PUBSUBMESSAGE -DESCRIPTOR.message_types_by_name["GetTopicRequest"] = _GETTOPICREQUEST -DESCRIPTOR.message_types_by_name["UpdateTopicRequest"] = _UPDATETOPICREQUEST -DESCRIPTOR.message_types_by_name["PublishRequest"] = _PUBLISHREQUEST -DESCRIPTOR.message_types_by_name["PublishResponse"] = _PUBLISHRESPONSE -DESCRIPTOR.message_types_by_name["ListTopicsRequest"] = _LISTTOPICSREQUEST -DESCRIPTOR.message_types_by_name["ListTopicsResponse"] = _LISTTOPICSRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListTopicSubscriptionsRequest" -] = _LISTTOPICSUBSCRIPTIONSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTopicSubscriptionsResponse" -] = _LISTTOPICSUBSCRIPTIONSRESPONSE -DESCRIPTOR.message_types_by_name[ - "ListTopicSnapshotsRequest" -] = _LISTTOPICSNAPSHOTSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListTopicSnapshotsResponse" -] = _LISTTOPICSNAPSHOTSRESPONSE -DESCRIPTOR.message_types_by_name["DeleteTopicRequest"] = _DELETETOPICREQUEST -DESCRIPTOR.message_types_by_name[ - "DetachSubscriptionRequest" -] = _DETACHSUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name[ - "DetachSubscriptionResponse" -] = _DETACHSUBSCRIPTIONRESPONSE -DESCRIPTOR.message_types_by_name["Subscription"] = _SUBSCRIPTION -DESCRIPTOR.message_types_by_name["RetryPolicy"] = _RETRYPOLICY -DESCRIPTOR.message_types_by_name["DeadLetterPolicy"] = _DEADLETTERPOLICY -DESCRIPTOR.message_types_by_name["ExpirationPolicy"] = _EXPIRATIONPOLICY -DESCRIPTOR.message_types_by_name["PushConfig"] = _PUSHCONFIG -DESCRIPTOR.message_types_by_name["ReceivedMessage"] = _RECEIVEDMESSAGE -DESCRIPTOR.message_types_by_name["GetSubscriptionRequest"] = _GETSUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name[ - "UpdateSubscriptionRequest" -] = _UPDATESUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name["ListSubscriptionsRequest"] = _LISTSUBSCRIPTIONSREQUEST -DESCRIPTOR.message_types_by_name[ - "ListSubscriptionsResponse" -] = _LISTSUBSCRIPTIONSRESPONSE -DESCRIPTOR.message_types_by_name[ - "DeleteSubscriptionRequest" -] = _DELETESUBSCRIPTIONREQUEST -DESCRIPTOR.message_types_by_name["ModifyPushConfigRequest"] = _MODIFYPUSHCONFIGREQUEST -DESCRIPTOR.message_types_by_name["PullRequest"] = _PULLREQUEST -DESCRIPTOR.message_types_by_name["PullResponse"] = _PULLRESPONSE -DESCRIPTOR.message_types_by_name["ModifyAckDeadlineRequest"] = _MODIFYACKDEADLINEREQUEST -DESCRIPTOR.message_types_by_name["AcknowledgeRequest"] = _ACKNOWLEDGEREQUEST -DESCRIPTOR.message_types_by_name["StreamingPullRequest"] = _STREAMINGPULLREQUEST -DESCRIPTOR.message_types_by_name["StreamingPullResponse"] = _STREAMINGPULLRESPONSE -DESCRIPTOR.message_types_by_name["CreateSnapshotRequest"] = _CREATESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["UpdateSnapshotRequest"] = _UPDATESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["Snapshot"] = _SNAPSHOT -DESCRIPTOR.message_types_by_name["GetSnapshotRequest"] = _GETSNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["ListSnapshotsRequest"] = _LISTSNAPSHOTSREQUEST -DESCRIPTOR.message_types_by_name["ListSnapshotsResponse"] = _LISTSNAPSHOTSRESPONSE -DESCRIPTOR.message_types_by_name["DeleteSnapshotRequest"] = _DELETESNAPSHOTREQUEST -DESCRIPTOR.message_types_by_name["SeekRequest"] = _SEEKREQUEST -DESCRIPTOR.message_types_by_name["SeekResponse"] = _SEEKRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -MessageStoragePolicy = _reflection.GeneratedProtocolMessageType( - "MessageStoragePolicy", - (_message.Message,), - { - "DESCRIPTOR": _MESSAGESTORAGEPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A policy constraining the storage of messages published to the topic. - - - Attributes: - allowed_persistence_regions: - A list of IDs of GCP regions where messages that are published - to the topic may be persisted in storage. Messages published - by publishers running in non-allowed GCP regions (or running - outside of GCP altogether) will be routed for storage in one - of the allowed regions. An empty list means that no regions - are allowed, and is not a valid configuration. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.MessageStoragePolicy) - }, -) -_sym_db.RegisterMessage(MessageStoragePolicy) - -Topic = _reflection.GeneratedProtocolMessageType( - "Topic", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _TOPIC_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic.LabelsEntry) - }, - ), - "DESCRIPTOR": _TOPIC, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A topic resource. - - - Attributes: - name: - Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` must - start with a letter, and contain only letters (``[A-Za-z]``), - numbers (``[0-9]``), dashes (``-``), underscores (``_``), - periods (``.``), tildes (``~``), plus (``+``) or percent signs - (``%``). It must be between 3 and 255 characters in length, - and it must not start with ``"goog"``. - labels: - See Creating and managing labels. - message_storage_policy: - Policy constraining the set of Google Cloud Platform regions - where messages published to the topic may be stored. If not - present, then no constraints are in effect. - kms_key_name: - The resource name of the Cloud KMS CryptoKey to be used to - protect access to messages published on this topic. The - expected format is - ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Topic) - }, -) -_sym_db.RegisterMessage(Topic) -_sym_db.RegisterMessage(Topic.LabelsEntry) - -PubsubMessage = _reflection.GeneratedProtocolMessageType( - "PubsubMessage", - (_message.Message,), - { - "AttributesEntry": _reflection.GeneratedProtocolMessageType( - "AttributesEntry", - (_message.Message,), - { - "DESCRIPTOR": _PUBSUBMESSAGE_ATTRIBUTESENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage.AttributesEntry) - }, - ), - "DESCRIPTOR": _PUBSUBMESSAGE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A message that is published by publishers and consumed by subscribers. - The message must contain either a non-empty data field or at least one - attribute. Note that client libraries represent this object - differently depending on the language. See the corresponding client - library documentation for more information. See Quotas and limits for - more information about message limits. - - - Attributes: - data: - The message data field. If this field is empty, the message - must contain at least one attribute. - attributes: - Attributes for this message. If this field is empty, the - message must contain non-empty data. This can be used to - filter messages on the subscription. - message_id: - ID of this message, assigned by the server when the message is - published. Guaranteed to be unique within the topic. This - value may be read by a subscriber that receives a - ``PubsubMessage`` via a ``Pull`` call or a push delivery. It - must not be populated by the publisher in a ``Publish`` call. - publish_time: - The time at which the message was published, populated by the - server when it receives the ``Publish`` call. It must not be - populated by the publisher in a ``Publish`` call. - ordering_key: - If non-empty, identifies related messages for which publish - order should be respected. If a ``Subscription`` has - ``enable_message_ordering`` set to ``true``, messages - published with the same non-empty ``ordering_key`` value will - be delivered to subscribers in the order in which they are - received by the Pub/Sub system. All ``PubsubMessage``\ s - published in a given ``PublishRequest`` must specify the same - ``ordering_key`` value. EXPERIMENTAL: This feature is part of - a closed alpha release. This API might be changed in backward- - incompatible ways and is not recommended for production use. - It is not subject to any SLA or deprecation policy. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PubsubMessage) - }, -) -_sym_db.RegisterMessage(PubsubMessage) -_sym_db.RegisterMessage(PubsubMessage.AttributesEntry) - -GetTopicRequest = _reflection.GeneratedProtocolMessageType( - "GetTopicRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETTOPICREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the GetTopic method. - - - Attributes: - topic: - Required. The name of the topic to get. Format is - ``projects/{project}/topics/{topic}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetTopicRequest) - }, -) -_sym_db.RegisterMessage(GetTopicRequest) - -UpdateTopicRequest = _reflection.GeneratedProtocolMessageType( - "UpdateTopicRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATETOPICREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the UpdateTopic method. - - - Attributes: - topic: - Required. The updated topic object. - update_mask: - Required. Indicates which fields in the provided topic to - update. Must be specified and non-empty. Note that if - ``update_mask`` contains “message_storage_policy” but the - ``message_storage_policy`` is not set in the ``topic`` - provided above, then the updated value is determined by the - policy configured at the project or organization level. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateTopicRequest) - }, -) -_sym_db.RegisterMessage(UpdateTopicRequest) - -PublishRequest = _reflection.GeneratedProtocolMessageType( - "PublishRequest", - (_message.Message,), - { - "DESCRIPTOR": _PUBLISHREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the Publish method. - - - Attributes: - topic: - Required. The messages in the request will be published on - this topic. Format is ``projects/{project}/topics/{topic}``. - messages: - Required. The messages to publish. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishRequest) - }, -) -_sym_db.RegisterMessage(PublishRequest) - -PublishResponse = _reflection.GeneratedProtocolMessageType( - "PublishResponse", - (_message.Message,), - { - "DESCRIPTOR": _PUBLISHRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``Publish`` method. - - - Attributes: - message_ids: - The server-assigned ID of each published message, in the same - order as the messages in the request. IDs are guaranteed to be - unique within the topic. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PublishResponse) - }, -) -_sym_db.RegisterMessage(PublishResponse) - -ListTopicsRequest = _reflection.GeneratedProtocolMessageType( - "ListTopicsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListTopics`` method. - - - Attributes: - project: - Required. The name of the project in which to list topics. - Format is ``projects/{project-id}``. - page_size: - Maximum number of topics to return. - page_token: - The value returned by the last ``ListTopicsResponse``; - indicates that this is a continuation of a prior - ``ListTopics`` call, and that the system should return the - next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsRequest) - }, -) -_sym_db.RegisterMessage(ListTopicsRequest) - -ListTopicsResponse = _reflection.GeneratedProtocolMessageType( - "ListTopicsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListTopics`` method. - - - Attributes: - topics: - The resulting topics. - next_page_token: - If not empty, indicates that there may be more topics that - match the request; this value should be passed in a new - ``ListTopicsRequest``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicsResponse) - }, -) -_sym_db.RegisterMessage(ListTopicsResponse) - -ListTopicSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( - "ListTopicSubscriptionsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSUBSCRIPTIONSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListTopicSubscriptions`` method. - - - Attributes: - topic: - Required. The name of the topic that subscriptions are - attached to. Format is ``projects/{project}/topics/{topic}``. - page_size: - Maximum number of subscription names to return. - page_token: - The value returned by the last - ``ListTopicSubscriptionsResponse``; indicates that this is a - continuation of a prior ``ListTopicSubscriptions`` call, and - that the system should return the next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsRequest) - }, -) -_sym_db.RegisterMessage(ListTopicSubscriptionsRequest) - -ListTopicSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( - "ListTopicSubscriptionsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSUBSCRIPTIONSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListTopicSubscriptions`` method. - - - Attributes: - subscriptions: - The names of subscriptions attached to the topic specified in - the request. - next_page_token: - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListTopicSubscriptionsRequest`` to get more subscriptions. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSubscriptionsResponse) - }, -) -_sym_db.RegisterMessage(ListTopicSubscriptionsResponse) - -ListTopicSnapshotsRequest = _reflection.GeneratedProtocolMessageType( - "ListTopicSnapshotsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSNAPSHOTSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListTopicSnapshots`` method. - - - Attributes: - topic: - Required. The name of the topic that snapshots are attached - to. Format is ``projects/{project}/topics/{topic}``. - page_size: - Maximum number of snapshot names to return. - page_token: - The value returned by the last ``ListTopicSnapshotsResponse``; - indicates that this is a continuation of a prior - ``ListTopicSnapshots`` call, and that the system should return - the next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsRequest) - }, -) -_sym_db.RegisterMessage(ListTopicSnapshotsRequest) - -ListTopicSnapshotsResponse = _reflection.GeneratedProtocolMessageType( - "ListTopicSnapshotsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTTOPICSNAPSHOTSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListTopicSnapshots`` method. - - - Attributes: - snapshots: - The names of the snapshots that match the request. - next_page_token: - If not empty, indicates that there may be more snapshots that - match the request; this value should be passed in a new - ``ListTopicSnapshotsRequest`` to get more snapshots. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListTopicSnapshotsResponse) - }, -) -_sym_db.RegisterMessage(ListTopicSnapshotsResponse) - -DeleteTopicRequest = _reflection.GeneratedProtocolMessageType( - "DeleteTopicRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETETOPICREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``DeleteTopic`` method. - - - Attributes: - topic: - Required. Name of the topic to delete. Format is - ``projects/{project}/topics/{topic}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteTopicRequest) - }, -) -_sym_db.RegisterMessage(DeleteTopicRequest) - -DetachSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "DetachSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _DETACHSUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the DetachSubscription method. - - - Attributes: - subscription: - Required. The subscription to detach. Format is - ``projects/{project}/subscriptions/{subscription}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DetachSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(DetachSubscriptionRequest) - -DetachSubscriptionResponse = _reflection.GeneratedProtocolMessageType( - "DetachSubscriptionResponse", - (_message.Message,), - { - "DESCRIPTOR": _DETACHSUBSCRIPTIONRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the DetachSubscription method. Reserved for future use.""", - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DetachSubscriptionResponse) - }, -) -_sym_db.RegisterMessage(DetachSubscriptionResponse) - -Subscription = _reflection.GeneratedProtocolMessageType( - "Subscription", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _SUBSCRIPTION_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription.LabelsEntry) - }, - ), - "DESCRIPTOR": _SUBSCRIPTION, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A subscription resource. - - - Attributes: - name: - Required. The name of the subscription. It must have the - format ``"projects/{project}/subscriptions/{subscription}"``. - ``{subscription}`` must start with a letter, and contain only - letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), plus - (``+``) or percent signs (``%``). It must be between 3 and 255 - characters in length, and it must not start with ``"goog"``. - topic: - Required. The name of the topic from which this subscription - is receiving messages. Format is - ``projects/{project}/topics/{topic}``. The value of this field - will be ``_deleted-topic_`` if the topic has been deleted. - push_config: - If push delivery is used with this subscription, this field is - used to configure it. An empty ``pushConfig`` signifies that - the subscriber will pull and ack messages using API methods. - ack_deadline_seconds: - The approximate amount of time (on a best-effort basis) - Pub/Sub waits for the subscriber to acknowledge receipt before - resending the message. In the interval after the message is - delivered and before it is acknowledged, it is considered to - be outstanding. During that time period, the message will not - be redelivered (on a best-effort basis). For pull - subscriptions, this value is used as the initial value for the - ack deadline. To override this value for a given message, call - ``ModifyAckDeadline`` with the corresponding ``ack_id`` if - using non-streaming pull or send the ``ack_id`` in a - ``StreamingModifyAckDeadlineRequest`` if using streaming pull. - The minimum custom deadline you can specify is 10 seconds. The - maximum custom deadline you can specify is 600 seconds (10 - minutes). If this parameter is 0, a default value of 10 - seconds is used. For push delivery, this value is also used - to set the request timeout for the call to the push endpoint. - If the subscriber never acknowledges the message, the Pub/Sub - system will eventually redeliver the message. - retain_acked_messages: - Indicates whether to retain acknowledged messages. If true, - then messages are not expunged from the subscription’s - backlog, even if they are acknowledged, until they fall out of - the ``message_retention_duration`` window. This must be true - if you would like to Seek to a timestamp. - message_retention_duration: - How long to retain unacknowledged messages in the - subscription’s backlog, from the moment a message is - published. If ``retain_acked_messages`` is true, then this - also configures the retention of acknowledged messages, and - thus configures how far back in time a ``Seek`` can be done. - Defaults to 7 days. Cannot be more than 7 days or less than 10 - minutes. - labels: - See Creating and managing labels. - enable_message_ordering: - If true, messages published with the same ``ordering_key`` in - ``PubsubMessage`` will be delivered to the subscribers in the - order in which they are received by the Pub/Sub system. - Otherwise, they may be delivered in any order. EXPERIMENTAL: - This feature is part of a closed alpha release. This API might - be changed in backward-incompatible ways and is not - recommended for production use. It is not subject to any SLA - or deprecation policy. - expiration_policy: - A policy that specifies the conditions for this subscription’s - expiration. A subscription is considered active as long as any - connected subscriber is successfully consuming messages from - the subscription or is issuing operations on the subscription. - If ``expiration_policy`` is not set, a *default policy* with - ``ttl`` of 31 days will be used. The minimum allowed value for - ``expiration_policy.ttl`` is 1 day. - filter: - An expression written in the Pub/Sub `filter language - `__. If non- - empty, then only ``PubsubMessage``\ s whose ``attributes`` - field matches the filter are delivered on this subscription. - If empty, then no messages are filtered out. - dead_letter_policy: - A policy that specifies the conditions for dead lettering - messages in this subscription. If dead_letter_policy is not - set, dead lettering is disabled. The Cloud Pub/Sub service - account associated with this subscriptions’s parent project - (i.e., service-{project_number}@gcp-sa- - pubsub.iam.gserviceaccount.com) must have permission to - Acknowledge() messages on this subscription. - retry_policy: - A policy that specifies how Pub/Sub retries message delivery - for this subscription. If not set, the default retry policy - is applied. This generally implies that messages will be - retried as soon as possible for healthy subscribers. - RetryPolicy will be triggered on NACKs or acknowledgement - deadline exceeded events for a given message. - detached: - Indicates whether the subscription is detached from its topic. - Detached subscriptions don’t receive messages from their topic - and don’t retain any backlog. ``Pull`` and ``StreamingPull`` - requests will return FAILED_PRECONDITION. If the subscription - is a push subscription, pushes to the endpoint will not be - made. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Subscription) - }, -) -_sym_db.RegisterMessage(Subscription) -_sym_db.RegisterMessage(Subscription.LabelsEntry) - -RetryPolicy = _reflection.GeneratedProtocolMessageType( - "RetryPolicy", - (_message.Message,), - { - "DESCRIPTOR": _RETRYPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A policy that specifies how Cloud Pub/Sub retries message delivery. - Retry delay will be exponential based on provided minimum and maximum - backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. - RetryPolicy will be triggered on NACKs or acknowledgement deadline - exceeded events for a given message. Retry Policy is implemented on a - best effort basis. At times, the delay between consecutive deliveries - may not match the configuration. That is, delay can be more or less - than configured backoff. - - - Attributes: - minimum_backoff: - The minimum delay between consecutive deliveries of a given - message. Value should be between 0 and 600 seconds. Defaults - to 10 seconds. - maximum_backoff: - The maximum delay between consecutive deliveries of a given - message. Value should be between 0 and 600 seconds. Defaults - to 600 seconds. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.RetryPolicy) - }, -) -_sym_db.RegisterMessage(RetryPolicy) - -DeadLetterPolicy = _reflection.GeneratedProtocolMessageType( - "DeadLetterPolicy", - (_message.Message,), - { - "DESCRIPTOR": _DEADLETTERPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Dead lettering is done on a best effort basis. The same message might - be dead lettered multiple times. If validation on any of the fields - fails at subscription creation/updation, the create/update - subscription request will fail. - - - Attributes: - dead_letter_topic: - The name of the topic to which dead letter messages should be - published. Format is ``projects/{project}/topics/{topic}``.The - Cloud Pub/Sub service account associated with the enclosing - subscription’s parent project (i.e., - service-{project_number}@gcp-sa- - pubsub.iam.gserviceaccount.com) must have permission to - Publish() to this topic. The operation will fail if the topic - does not exist. Users should ensure that there is a - subscription attached to this topic since messages published - to a topic with no subscriptions are lost. - max_delivery_attempts: - The maximum number of delivery attempts for any message. The - value must be between 5 and 100. The number of delivery - attempts is defined as 1 + (the sum of number of NACKs and - number of times the acknowledgement deadline has been exceeded - for the message). A NACK is any call to ModifyAckDeadline - with a 0 deadline. Note that client libraries may - automatically extend ack_deadlines. This field will be - honored on a best effort basis. If this parameter is 0, a - default value of 5 is used. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeadLetterPolicy) - }, -) -_sym_db.RegisterMessage(DeadLetterPolicy) - -ExpirationPolicy = _reflection.GeneratedProtocolMessageType( - "ExpirationPolicy", - (_message.Message,), - { - "DESCRIPTOR": _EXPIRATIONPOLICY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A policy that specifies the conditions for resource expiration (i.e., - automatic resource deletion). - - - Attributes: - ttl: - Specifies the “time-to-live” duration for an associated - resource. The resource expires if it is not active for a - period of ``ttl``. The definition of “activity” depends on the - type of the associated resource. The minimum and maximum - allowed values for ``ttl`` depend on the type of the - associated resource, as well. If ``ttl`` is not set, the - associated resource never expires. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ExpirationPolicy) - }, -) -_sym_db.RegisterMessage(ExpirationPolicy) - -PushConfig = _reflection.GeneratedProtocolMessageType( - "PushConfig", - (_message.Message,), - { - "OidcToken": _reflection.GeneratedProtocolMessageType( - "OidcToken", - (_message.Message,), - { - "DESCRIPTOR": _PUSHCONFIG_OIDCTOKEN, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Contains information needed for generating an `OpenID Connect token - `__. - - - Attributes: - service_account_email: - \ `Service account email - `__ to be - used for generating the OIDC token. The caller (for - CreateSubscription, UpdateSubscription, and ModifyPushConfig - RPCs) must have the iam.serviceAccounts.actAs permission for - the service account. - audience: - Audience to be used when generating OIDC token. The audience - claim identifies the recipients that the JWT is intended for. - The audience value is a single case-sensitive string. Having - multiple values (array) for the audience field is not - supported. More info about the OIDC JWT token audience here: - https://tools.ietf.org/html/rfc7519#section-4.1.3 Note: if not - specified, the Push endpoint URL will be used. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.OidcToken) - }, - ), - "AttributesEntry": _reflection.GeneratedProtocolMessageType( - "AttributesEntry", - (_message.Message,), - { - "DESCRIPTOR": _PUSHCONFIG_ATTRIBUTESENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig.AttributesEntry) - }, - ), - "DESCRIPTOR": _PUSHCONFIG, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Configuration for a push delivery endpoint. - - - Attributes: - push_endpoint: - A URL locating the endpoint to which messages should be - pushed. For example, a Webhook endpoint might use - ``https://example.com/push``. - attributes: - Endpoint configuration attributes that can be used to control - different aspects of the message delivery. The only currently - supported attribute is ``x-goog-version``, which you can use - to change the format of the pushed message. This attribute - indicates the version of the data expected by the endpoint. - This controls the shape of the pushed message (i.e., its - fields and metadata). If not present during the - ``CreateSubscription`` call, it will default to the version of - the Pub/Sub API used to make such call. If not present in a - ``ModifyPushConfig`` call, its value will not be changed. - ``GetSubscription`` calls will always return a valid version, - even if the subscription was created without this attribute. - The only supported values for the ``x-goog-version`` attribute - are: - ``v1beta1``: uses the push format defined in the - v1beta1 Pub/Sub API. - ``v1`` or ``v1beta2``: uses the push - format defined in the v1 Pub/Sub API. For example: .. - raw:: html
attributes { "x-goog-version": "v1"
-          } 
- authentication_method: - An authentication method used by push endpoints to verify the - source of push requests. This can be used with push endpoints - that are private by default to allow requests only from the - Cloud Pub/Sub system, for example. This field is optional and - should be set only by users interested in authenticated push. - oidc_token: - If specified, Pub/Sub will generate and attach an OIDC JWT - token as an ``Authorization`` header in the HTTP request for - every pushed message. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PushConfig) - }, -) -_sym_db.RegisterMessage(PushConfig) -_sym_db.RegisterMessage(PushConfig.OidcToken) -_sym_db.RegisterMessage(PushConfig.AttributesEntry) - -ReceivedMessage = _reflection.GeneratedProtocolMessageType( - "ReceivedMessage", - (_message.Message,), - { - "DESCRIPTOR": _RECEIVEDMESSAGE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A message and its corresponding acknowledgment ID. - - - Attributes: - ack_id: - This ID can be used to acknowledge the received message. - message: - The message. - delivery_attempt: - The approximate number of times that Cloud Pub/Sub has - attempted to deliver the associated message to a subscriber. - More precisely, this is 1 + (number of NACKs) + (number of - ack_deadline exceeds) for this message. A NACK is any call to - ModifyAckDeadline with a 0 deadline. An ack_deadline exceeds - event is whenever a message is not acknowledged within - ack_deadline. Note that ack_deadline is initially - Subscription.ackDeadlineSeconds, but may get extended - automatically by the client library. Upon the first delivery - of a given message, ``delivery_attempt`` will have a value of - 1. The value is calculated at best effort and is approximate. - If a DeadLetterPolicy is not set on the subscription, this - will be 0. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ReceivedMessage) - }, -) -_sym_db.RegisterMessage(ReceivedMessage) - -GetSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "GetSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETSUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the GetSubscription method. - - - Attributes: - subscription: - Required. The name of the subscription to get. Format is - ``projects/{project}/subscriptions/{sub}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(GetSubscriptionRequest) - -UpdateSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATESUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the UpdateSubscription method. - - - Attributes: - subscription: - Required. The updated subscription object. - update_mask: - Required. Indicates which fields in the provided subscription - to update. Must be specified and non-empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(UpdateSubscriptionRequest) - -ListSubscriptionsRequest = _reflection.GeneratedProtocolMessageType( - "ListSubscriptionsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTSUBSCRIPTIONSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListSubscriptions`` method. - - - Attributes: - project: - Required. The name of the project in which to list - subscriptions. Format is ``projects/{project-id}``. - page_size: - Maximum number of subscriptions to return. - page_token: - The value returned by the last ``ListSubscriptionsResponse``; - indicates that this is a continuation of a prior - ``ListSubscriptions`` call, and that the system should return - the next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsRequest) - }, -) -_sym_db.RegisterMessage(ListSubscriptionsRequest) - -ListSubscriptionsResponse = _reflection.GeneratedProtocolMessageType( - "ListSubscriptionsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTSUBSCRIPTIONSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListSubscriptions`` method. - - - Attributes: - subscriptions: - The subscriptions that match the request. - next_page_token: - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListSubscriptionsRequest`` to get more subscriptions. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSubscriptionsResponse) - }, -) -_sym_db.RegisterMessage(ListSubscriptionsResponse) - -DeleteSubscriptionRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSubscriptionRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETESUBSCRIPTIONREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the DeleteSubscription method. - - - Attributes: - subscription: - Required. The subscription to delete. Format is - ``projects/{project}/subscriptions/{sub}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSubscriptionRequest) - }, -) -_sym_db.RegisterMessage(DeleteSubscriptionRequest) - -ModifyPushConfigRequest = _reflection.GeneratedProtocolMessageType( - "ModifyPushConfigRequest", - (_message.Message,), - { - "DESCRIPTOR": _MODIFYPUSHCONFIGREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ModifyPushConfig method. - - - Attributes: - subscription: - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - push_config: - Required. The push configuration for future deliveries. An - empty ``pushConfig`` indicates that the Pub/Sub system should - stop pushing messages from the given subscription and allow - messages to be pulled and acknowledged - effectively pausing - the subscription if ``Pull`` or ``StreamingPull`` is not - called. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyPushConfigRequest) - }, -) -_sym_db.RegisterMessage(ModifyPushConfigRequest) - -PullRequest = _reflection.GeneratedProtocolMessageType( - "PullRequest", - (_message.Message,), - { - "DESCRIPTOR": _PULLREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``Pull`` method. - - - Attributes: - subscription: - Required. The subscription from which messages should be - pulled. Format is ``projects/{project}/subscriptions/{sub}``. - return_immediately: - Optional. If this field set to true, the system will respond - immediately even if it there are no messages available to - return in the ``Pull`` response. Otherwise, the system may - wait (for a bounded amount of time) until at least one message - is available, rather than returning no messages. Warning: - setting this field to ``true`` is discouraged because it - adversely impacts the performance of ``Pull`` operations. We - recommend that users do not set this field. - max_messages: - Required. The maximum number of messages to return for this - request. Must be a positive integer. The Pub/Sub system may - return fewer than the number specified. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullRequest) - }, -) -_sym_db.RegisterMessage(PullRequest) - -PullResponse = _reflection.GeneratedProtocolMessageType( - "PullResponse", - (_message.Message,), - { - "DESCRIPTOR": _PULLRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``Pull`` method. - - - Attributes: - received_messages: - Received Pub/Sub messages. The list will be empty if there are - no more messages available in the backlog. For JSON, the - response can be entirely empty. The Pub/Sub system may return - fewer than the ``maxMessages`` requested even if there are - more messages available in the backlog. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.PullResponse) - }, -) -_sym_db.RegisterMessage(PullResponse) - -ModifyAckDeadlineRequest = _reflection.GeneratedProtocolMessageType( - "ModifyAckDeadlineRequest", - (_message.Message,), - { - "DESCRIPTOR": _MODIFYACKDEADLINEREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ModifyAckDeadline method. - - - Attributes: - subscription: - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids: - Required. List of acknowledgment IDs. - ack_deadline_seconds: - Required. The new ack deadline with respect to the time this - request was sent to the Pub/Sub system. For example, if the - value is 10, the new ack deadline will expire 10 seconds after - the ``ModifyAckDeadline`` call was made. Specifying zero might - immediately make the message available for delivery to another - subscriber client. This typically results in an increase in - the rate of message redeliveries (that is, duplicates). The - minimum deadline you can specify is 0 seconds. The maximum - deadline you can specify is 600 seconds (10 minutes). - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ModifyAckDeadlineRequest) - }, -) -_sym_db.RegisterMessage(ModifyAckDeadlineRequest) - -AcknowledgeRequest = _reflection.GeneratedProtocolMessageType( - "AcknowledgeRequest", - (_message.Message,), - { - "DESCRIPTOR": _ACKNOWLEDGEREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the Acknowledge method. - - - Attributes: - subscription: - Required. The subscription whose message is being - acknowledged. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids: - Required. The acknowledgment ID for the messages being - acknowledged that was returned by the Pub/Sub system in the - ``Pull`` response. Must not be empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.AcknowledgeRequest) - }, -) -_sym_db.RegisterMessage(AcknowledgeRequest) - -StreamingPullRequest = _reflection.GeneratedProtocolMessageType( - "StreamingPullRequest", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGPULLREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``StreamingPull`` streaming RPC method. This request - is used to establish the initial stream as well as to stream - acknowledgements and ack deadline modifications from the client to the - server. - - - Attributes: - subscription: - Required. The subscription for which to initialize the new - stream. This must be provided in the first request on the - stream, and must not be set in subsequent requests from client - to server. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids: - List of acknowledgement IDs for acknowledging previously - received messages (received on this stream or a different - stream). If an ack ID has expired, the corresponding message - may be redelivered later. Acknowledging a message more than - once will not result in an error. If the acknowledgement ID is - malformed, the stream will be aborted with status - ``INVALID_ARGUMENT``. - modify_deadline_seconds: - The list of new ack deadlines for the IDs listed in - ``modify_deadline_ack_ids``. The size of this list must be the - same as the size of ``modify_deadline_ack_ids``. If it differs - the stream will be aborted with ``INVALID_ARGUMENT``. Each - element in this list is applied to the element in the same - position in ``modify_deadline_ack_ids``. The new ack deadline - is with respect to the time this request was sent to the - Pub/Sub system. Must be >= 0. For example, if the value is 10, - the new ack deadline will expire 10 seconds after this request - is received. If the value is 0, the message is immediately - made available for another streaming or non-streaming pull - request. If the value is < 0 (an error), the stream will be - aborted with status ``INVALID_ARGUMENT``. - modify_deadline_ack_ids: - List of acknowledgement IDs whose deadline will be modified - based on the corresponding element in - ``modify_deadline_seconds``. This field can be used to - indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if - the processing was interrupted. - stream_ack_deadline_seconds: - Required. The ack deadline to use for the stream. This must be - provided in the first request on the stream, but it can also - be updated on subsequent requests from client to server. The - minimum deadline you can specify is 10 seconds. The maximum - deadline you can specify is 600 seconds (10 minutes). - client_id: - A unique identifier that is used to distinguish client - instances from each other. Only needs to be provided on the - initial request. When a stream disconnects and reconnects for - the same stream, the client_id should be set to the same value - so that state associated with the old stream can be - transferred to the new stream. The same client_id should not - be used for different client instances. - max_outstanding_messages: - Flow control settings for the maximum number of outstanding - messages. When there are ``max_outstanding_messages`` or more - currently sent to the streaming pull client that have not yet - been acked or nacked, the server stops sending more messages. - The sending of messages resumes once the number of outstanding - messages is less than this value. If the value is <= 0, there - is no limit to the number of outstanding messages. This - property can only be set on the initial StreamingPullRequest. - If it is set on a subsequent request, the stream will be - aborted with status ``INVALID_ARGUMENT``. - max_outstanding_bytes: - Flow control settings for the maximum number of outstanding - bytes. When there are ``max_outstanding_bytes`` or more worth - of messages currently sent to the streaming pull client that - have not yet been acked or nacked, the server will stop - sending more messages. The sending of messages resumes once - the number of outstanding bytes is less than this value. If - the value is <= 0, there is no limit to the number of - outstanding bytes. This property can only be set on the - initial StreamingPullRequest. If it is set on a subsequent - request, the stream will be aborted with status - ``INVALID_ARGUMENT``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullRequest) - }, -) -_sym_db.RegisterMessage(StreamingPullRequest) - -StreamingPullResponse = _reflection.GeneratedProtocolMessageType( - "StreamingPullResponse", - (_message.Message,), - { - "DESCRIPTOR": _STREAMINGPULLRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``StreamingPull`` method. This response is used to - stream messages from the server to the client. - - - Attributes: - received_messages: - Received Pub/Sub messages. This will not be empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.StreamingPullResponse) - }, -) -_sym_db.RegisterMessage(StreamingPullResponse) - -CreateSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "CreateSnapshotRequest", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _CREATESNAPSHOTREQUEST_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest.LabelsEntry) - }, - ), - "DESCRIPTOR": _CREATESNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``CreateSnapshot`` method. - - - Attributes: - name: - Required. User-provided name for this snapshot. If the name is - not provided in the request, the server will assign a random - name for this snapshot on the same project as the - subscription. Note that for REST API requests, you must - specify a name. See the resource name rules. Format is - ``projects/{project}/snapshots/{snap}``. - subscription: - Required. The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: - (a) The existing backlog on the subscription. More precisely, - this is defined as the messages in the subscription’s backlog - that are unacknowledged upon the successful completion of the - ``CreateSnapshot`` request; as well as: (b) Any messages - published to the subscription’s topic following the successful - completion of the CreateSnapshot request. Format is - ``projects/{project}/subscriptions/{sub}``. - labels: - See Creating and managing labels. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.CreateSnapshotRequest) - }, -) -_sym_db.RegisterMessage(CreateSnapshotRequest) -_sym_db.RegisterMessage(CreateSnapshotRequest.LabelsEntry) - -UpdateSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "UpdateSnapshotRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATESNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the UpdateSnapshot method. - - - Attributes: - snapshot: - Required. The updated snapshot object. - update_mask: - Required. Indicates which fields in the provided snapshot to - update. Must be specified and non-empty. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.UpdateSnapshotRequest) - }, -) -_sym_db.RegisterMessage(UpdateSnapshotRequest) - -Snapshot = _reflection.GeneratedProtocolMessageType( - "Snapshot", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _SNAPSHOT_LABELSENTRY, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2" - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot.LabelsEntry) - }, - ), - "DESCRIPTOR": _SNAPSHOT, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """A snapshot resource. Snapshots are used in Seek operations, which - allow you to manage message acknowledgments in bulk. That is, you can - set the acknowledgment state of messages in an existing subscription - to the state captured by a snapshot. - - - Attributes: - name: - The name of the snapshot. - topic: - The name of the topic from which this snapshot is retaining - messages. - expire_time: - The snapshot is guaranteed to exist up until this time. A - newly-created snapshot expires no later than 7 days from the - time of its creation. Its exact lifetime is determined at - creation by the existing backlog in the source subscription. - Specifically, the lifetime of the snapshot is ``7 days - (age - of oldest unacked message in the subscription)``. For example, - consider a subscription whose oldest unacked message is 3 days - old. If a snapshot is created from this subscription, the - snapshot – which will always capture this 3-day-old backlog as - long as the snapshot exists – will expire in 4 days. The - service will refuse to create a snapshot that would expire in - less than 1 hour after creation. - labels: - See Creating and managing labels. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.Snapshot) - }, -) -_sym_db.RegisterMessage(Snapshot) -_sym_db.RegisterMessage(Snapshot.LabelsEntry) - -GetSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "GetSnapshotRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETSNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the GetSnapshot method. - - - Attributes: - snapshot: - Required. The name of the snapshot to get. Format is - ``projects/{project}/snapshots/{snap}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.GetSnapshotRequest) - }, -) -_sym_db.RegisterMessage(GetSnapshotRequest) - -ListSnapshotsRequest = _reflection.GeneratedProtocolMessageType( - "ListSnapshotsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTSNAPSHOTSREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``ListSnapshots`` method. - - - Attributes: - project: - Required. The name of the project in which to list snapshots. - Format is ``projects/{project-id}``. - page_size: - Maximum number of snapshots to return. - page_token: - The value returned by the last ``ListSnapshotsResponse``; - indicates that this is a continuation of a prior - ``ListSnapshots`` call, and that the system should return the - next page of data. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsRequest) - }, -) -_sym_db.RegisterMessage(ListSnapshotsRequest) - -ListSnapshotsResponse = _reflection.GeneratedProtocolMessageType( - "ListSnapshotsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTSNAPSHOTSRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``ListSnapshots`` method. - - - Attributes: - snapshots: - The resulting snapshots. - next_page_token: - If not empty, indicates that there may be more snapshot that - match the request; this value should be passed in a new - ``ListSnapshotsRequest``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.ListSnapshotsResponse) - }, -) -_sym_db.RegisterMessage(ListSnapshotsResponse) - -DeleteSnapshotRequest = _reflection.GeneratedProtocolMessageType( - "DeleteSnapshotRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETESNAPSHOTREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``DeleteSnapshot`` method. - - - Attributes: - snapshot: - Required. The name of the snapshot to delete. Format is - ``projects/{project}/snapshots/{snap}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.DeleteSnapshotRequest) - }, -) -_sym_db.RegisterMessage(DeleteSnapshotRequest) - -SeekRequest = _reflection.GeneratedProtocolMessageType( - "SeekRequest", - (_message.Message,), - { - "DESCRIPTOR": _SEEKREQUEST, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Request for the ``Seek`` method. - - - Attributes: - subscription: - Required. The subscription to affect. - time: - The time to seek to. Messages retained in the subscription - that were published before this time are marked as - acknowledged, and messages retained in the subscription that - were published after this time are marked as unacknowledged. - Note that this operation affects only those messages retained - in the subscription (configured by the combination of - ``message_retention_duration`` and ``retain_acked_messages``). - For example, if ``time`` corresponds to a point before the - message retention window (or to a point before the system’s - notion of the subscription creation time), only retained - messages will be marked as unacknowledged, and already- - expunged messages will not be restored. - snapshot: - The snapshot to seek to. The snapshot’s topic must be the same - as that of the provided subscription. Format is - ``projects/{project}/snapshots/{snap}``. - """, - # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekRequest) - }, -) -_sym_db.RegisterMessage(SeekRequest) - -SeekResponse = _reflection.GeneratedProtocolMessageType( - "SeekResponse", - (_message.Message,), - { - "DESCRIPTOR": _SEEKRESPONSE, - "__module__": "google.cloud.pubsub_v1.proto.pubsub_pb2", - "__doc__": """Response for the ``Seek`` method (this response is empty).""", - # @@protoc_insertion_point(class_scope:google.pubsub.v1.SeekResponse) - }, -) -_sym_db.RegisterMessage(SeekResponse) - - -DESCRIPTOR._options = None -_TOPIC_LABELSENTRY._options = None -_TOPIC.fields_by_name["name"]._options = None -_TOPIC._options = None -_PUBSUBMESSAGE_ATTRIBUTESENTRY._options = None -_GETTOPICREQUEST.fields_by_name["topic"]._options = None -_UPDATETOPICREQUEST.fields_by_name["topic"]._options = None -_UPDATETOPICREQUEST.fields_by_name["update_mask"]._options = None -_PUBLISHREQUEST.fields_by_name["topic"]._options = None -_PUBLISHREQUEST.fields_by_name["messages"]._options = None -_LISTTOPICSREQUEST.fields_by_name["project"]._options = None -_LISTTOPICSUBSCRIPTIONSREQUEST.fields_by_name["topic"]._options = None -_LISTTOPICSUBSCRIPTIONSRESPONSE.fields_by_name["subscriptions"]._options = None -_LISTTOPICSNAPSHOTSREQUEST.fields_by_name["topic"]._options = None -_DELETETOPICREQUEST.fields_by_name["topic"]._options = None -_DETACHSUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_SUBSCRIPTION_LABELSENTRY._options = None -_SUBSCRIPTION.fields_by_name["name"]._options = None -_SUBSCRIPTION.fields_by_name["topic"]._options = None -_SUBSCRIPTION._options = None -_PUSHCONFIG_ATTRIBUTESENTRY._options = None -_GETSUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_UPDATESUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_UPDATESUBSCRIPTIONREQUEST.fields_by_name["update_mask"]._options = None -_LISTSUBSCRIPTIONSREQUEST.fields_by_name["project"]._options = None -_DELETESUBSCRIPTIONREQUEST.fields_by_name["subscription"]._options = None -_MODIFYPUSHCONFIGREQUEST.fields_by_name["subscription"]._options = None -_MODIFYPUSHCONFIGREQUEST.fields_by_name["push_config"]._options = None -_PULLREQUEST.fields_by_name["subscription"]._options = None -_PULLREQUEST.fields_by_name["return_immediately"]._options = None -_PULLREQUEST.fields_by_name["max_messages"]._options = None -_MODIFYACKDEADLINEREQUEST.fields_by_name["subscription"]._options = None -_MODIFYACKDEADLINEREQUEST.fields_by_name["ack_ids"]._options = None -_MODIFYACKDEADLINEREQUEST.fields_by_name["ack_deadline_seconds"]._options = None -_ACKNOWLEDGEREQUEST.fields_by_name["subscription"]._options = None -_ACKNOWLEDGEREQUEST.fields_by_name["ack_ids"]._options = None -_STREAMINGPULLREQUEST.fields_by_name["subscription"]._options = None -_STREAMINGPULLREQUEST.fields_by_name["stream_ack_deadline_seconds"]._options = None -_CREATESNAPSHOTREQUEST_LABELSENTRY._options = None -_CREATESNAPSHOTREQUEST.fields_by_name["name"]._options = None -_CREATESNAPSHOTREQUEST.fields_by_name["subscription"]._options = None -_UPDATESNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None -_UPDATESNAPSHOTREQUEST.fields_by_name["update_mask"]._options = None -_SNAPSHOT_LABELSENTRY._options = None -_SNAPSHOT.fields_by_name["topic"]._options = None -_SNAPSHOT._options = None -_GETSNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None -_LISTSNAPSHOTSREQUEST.fields_by_name["project"]._options = None -_DELETESNAPSHOTREQUEST.fields_by_name["snapshot"]._options = None -_SEEKREQUEST.fields_by_name["subscription"]._options = None -_SEEKREQUEST.fields_by_name["snapshot"]._options = None - -_PUBLISHER = _descriptor.ServiceDescriptor( - name="Publisher", - full_name="google.pubsub.v1.Publisher", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", - create_key=_descriptor._internal_create_key, - serialized_start=6417, - serialized_end=7860, - methods=[ - _descriptor.MethodDescriptor( - name="CreateTopic", - full_name="google.pubsub.v1.Publisher.CreateTopic", - index=0, - containing_service=None, - input_type=_TOPIC, - output_type=_TOPIC, - serialized_options=b"\202\323\344\223\002#\032\036/v1/{name=projects/*/topics/*}:\001*\332A\004name", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateTopic", - full_name="google.pubsub.v1.Publisher.UpdateTopic", - index=1, - containing_service=None, - input_type=_UPDATETOPICREQUEST, - output_type=_TOPIC, - serialized_options=b"\202\323\344\223\002)2$/v1/{topic.name=projects/*/topics/*}:\001*", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Publish", - full_name="google.pubsub.v1.Publisher.Publish", - index=2, - containing_service=None, - input_type=_PUBLISHREQUEST, - output_type=_PUBLISHRESPONSE, - serialized_options=b"\202\323\344\223\002,\"'/v1/{topic=projects/*/topics/*}:publish:\001*\332A\016topic,messages", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetTopic", - full_name="google.pubsub.v1.Publisher.GetTopic", - index=3, - containing_service=None, - input_type=_GETTOPICREQUEST, - output_type=_TOPIC, - serialized_options=b"\202\323\344\223\002!\022\037/v1/{topic=projects/*/topics/*}\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTopics", - full_name="google.pubsub.v1.Publisher.ListTopics", - index=4, - containing_service=None, - input_type=_LISTTOPICSREQUEST, - output_type=_LISTTOPICSRESPONSE, - serialized_options=b"\202\323\344\223\002!\022\037/v1/{project=projects/*}/topics\332A\007project", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTopicSubscriptions", - full_name="google.pubsub.v1.Publisher.ListTopicSubscriptions", - index=5, - containing_service=None, - input_type=_LISTTOPICSUBSCRIPTIONSREQUEST, - output_type=_LISTTOPICSUBSCRIPTIONSRESPONSE, - serialized_options=b"\202\323\344\223\002/\022-/v1/{topic=projects/*/topics/*}/subscriptions\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListTopicSnapshots", - full_name="google.pubsub.v1.Publisher.ListTopicSnapshots", - index=6, - containing_service=None, - input_type=_LISTTOPICSNAPSHOTSREQUEST, - output_type=_LISTTOPICSNAPSHOTSRESPONSE, - serialized_options=b"\202\323\344\223\002+\022)/v1/{topic=projects/*/topics/*}/snapshots\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteTopic", - full_name="google.pubsub.v1.Publisher.DeleteTopic", - index=7, - containing_service=None, - input_type=_DELETETOPICREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002!*\037/v1/{topic=projects/*/topics/*}\332A\005topic", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DetachSubscription", - full_name="google.pubsub.v1.Publisher.DetachSubscription", - index=8, - containing_service=None, - input_type=_DETACHSUBSCRIPTIONREQUEST, - output_type=_DETACHSUBSCRIPTIONRESPONSE, - serialized_options=b'\202\323\344\223\0026"4/v1/{subscription=projects/*/subscriptions/*}:detach', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_PUBLISHER) - -DESCRIPTOR.services_by_name["Publisher"] = _PUBLISHER - - -_SUBSCRIBER = _descriptor.ServiceDescriptor( - name="Subscriber", - full_name="google.pubsub.v1.Subscriber", - file=DESCRIPTOR, - index=1, - serialized_options=b"\312A\025pubsub.googleapis.com\322AUhttps://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/pubsub", - create_key=_descriptor._internal_create_key, - serialized_start=7863, - serialized_end=10554, - methods=[ - _descriptor.MethodDescriptor( - name="CreateSubscription", - full_name="google.pubsub.v1.Subscriber.CreateSubscription", - index=0, - containing_service=None, - input_type=_SUBSCRIPTION, - output_type=_SUBSCRIPTION, - serialized_options=b"\202\323\344\223\002*\032%/v1/{name=projects/*/subscriptions/*}:\001*\332A+name,topic,push_config,ack_deadline_seconds", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetSubscription", - full_name="google.pubsub.v1.Subscriber.GetSubscription", - index=1, - containing_service=None, - input_type=_GETSUBSCRIPTIONREQUEST, - output_type=_SUBSCRIPTION, - serialized_options=b"\202\323\344\223\002/\022-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateSubscription", - full_name="google.pubsub.v1.Subscriber.UpdateSubscription", - index=2, - containing_service=None, - input_type=_UPDATESUBSCRIPTIONREQUEST, - output_type=_SUBSCRIPTION, - serialized_options=b"\202\323\344\223\002722/v1/{subscription.name=projects/*/subscriptions/*}:\001*", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListSubscriptions", - full_name="google.pubsub.v1.Subscriber.ListSubscriptions", - index=3, - containing_service=None, - input_type=_LISTSUBSCRIPTIONSREQUEST, - output_type=_LISTSUBSCRIPTIONSRESPONSE, - serialized_options=b"\202\323\344\223\002(\022&/v1/{project=projects/*}/subscriptions\332A\007project", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteSubscription", - full_name="google.pubsub.v1.Subscriber.DeleteSubscription", - index=4, - containing_service=None, - input_type=_DELETESUBSCRIPTIONREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002/*-/v1/{subscription=projects/*/subscriptions/*}\332A\014subscription", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ModifyAckDeadline", - full_name="google.pubsub.v1.Subscriber.ModifyAckDeadline", - index=5, - containing_service=None, - input_type=_MODIFYACKDEADLINEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002D"?/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline:\001*\332A)subscription,ack_ids,ack_deadline_seconds', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Acknowledge", - full_name="google.pubsub.v1.Subscriber.Acknowledge", - index=6, - containing_service=None, - input_type=_ACKNOWLEDGEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002>"9/v1/{subscription=projects/*/subscriptions/*}:acknowledge:\001*\332A\024subscription,ack_ids', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Pull", - full_name="google.pubsub.v1.Subscriber.Pull", - index=7, - containing_service=None, - input_type=_PULLREQUEST, - output_type=_PULLRESPONSE, - serialized_options=b'\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:pull:\001*\332A,subscription,return_immediately,max_messages', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="StreamingPull", - full_name="google.pubsub.v1.Subscriber.StreamingPull", - index=8, - containing_service=None, - input_type=_STREAMINGPULLREQUEST, - output_type=_STREAMINGPULLRESPONSE, - serialized_options=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ModifyPushConfig", - full_name="google.pubsub.v1.Subscriber.ModifyPushConfig", - index=9, - containing_service=None, - input_type=_MODIFYPUSHCONFIGREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002C">/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig:\001*\332A\030subscription,push_config', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetSnapshot", - full_name="google.pubsub.v1.Subscriber.GetSnapshot", - index=10, - containing_service=None, - input_type=_GETSNAPSHOTREQUEST, - output_type=_SNAPSHOT, - serialized_options=b"\202\323\344\223\002'\022%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListSnapshots", - full_name="google.pubsub.v1.Subscriber.ListSnapshots", - index=11, - containing_service=None, - input_type=_LISTSNAPSHOTSREQUEST, - output_type=_LISTSNAPSHOTSRESPONSE, - serialized_options=b'\202\323\344\223\002$\022"/v1/{project=projects/*}/snapshots\332A\007project', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="CreateSnapshot", - full_name="google.pubsub.v1.Subscriber.CreateSnapshot", - index=12, - containing_service=None, - input_type=_CREATESNAPSHOTREQUEST, - output_type=_SNAPSHOT, - serialized_options=b"\202\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}:\001*\332A\021name,subscription", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateSnapshot", - full_name="google.pubsub.v1.Subscriber.UpdateSnapshot", - index=13, - containing_service=None, - input_type=_UPDATESNAPSHOTREQUEST, - output_type=_SNAPSHOT, - serialized_options=b"\202\323\344\223\002/2*/v1/{snapshot.name=projects/*/snapshots/*}:\001*", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteSnapshot", - full_name="google.pubsub.v1.Subscriber.DeleteSnapshot", - index=14, - containing_service=None, - input_type=_DELETESNAPSHOTREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002'*%/v1/{snapshot=projects/*/snapshots/*}\332A\010snapshot", - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="Seek", - full_name="google.pubsub.v1.Subscriber.Seek", - index=15, - containing_service=None, - input_type=_SEEKREQUEST, - output_type=_SEEKRESPONSE, - serialized_options=b'\202\323\344\223\0027"2/v1/{subscription=projects/*/subscriptions/*}:seek:\001*', - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_SUBSCRIBER) - -DESCRIPTOR.services_by_name["Subscriber"] = _SUBSCRIBER - -# @@protoc_insertion_point(module_scope) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py deleted file mode 100644 index ca2cf7903d86..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub_pb2_grpc.py +++ /dev/null @@ -1,1284 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -"""Client and server classes corresponding to protobuf-defined services.""" -import grpc - -from google.cloud.pubsub_v1.proto import ( - pubsub_pb2 as google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class PublisherStub(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/CreateTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.UpdateTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/UpdateTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.Publish = channel.unary_unary( - "/google.pubsub.v1.Publisher/Publish", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, - ) - self.GetTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/GetTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - ) - self.ListTopics = channel.unary_unary( - "/google.pubsub.v1.Publisher/ListTopics", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, - ) - self.ListTopicSubscriptions = channel.unary_unary( - "/google.pubsub.v1.Publisher/ListTopicSubscriptions", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, - ) - self.ListTopicSnapshots = channel.unary_unary( - "/google.pubsub.v1.Publisher/ListTopicSnapshots", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, - ) - self.DeleteTopic = channel.unary_unary( - "/google.pubsub.v1.Publisher/DeleteTopic", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.DetachSubscription = channel.unary_unary( - "/google.pubsub.v1.Publisher/DetachSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.FromString, - ) - - -class PublisherServicer(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - def CreateTopic(self, request, context): - """Creates the given topic with the given name. See the - - resource name rules. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateTopic(self, request, context): - """Updates an existing topic. Note that certain properties of a - topic are not modifiable. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Publish(self, request, context): - """Adds one or more messages to the topic. Returns `NOT_FOUND` if the topic - does not exist. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetTopic(self, request, context): - """Gets the configuration of a topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTopics(self, request, context): - """Lists matching topics. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTopicSubscriptions(self, request, context): - """Lists the names of the attached subscriptions on this topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListTopicSnapshots(self, request, context): - """Lists the names of the snapshots on this topic. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteTopic(self, request, context): - """Deletes the topic with the given name. Returns `NOT_FOUND` if the topic - does not exist. After a topic is deleted, a new topic may be created with - the same name; this is an entirely new topic with none of the old - configuration or subscriptions. Existing subscriptions to this topic are - not deleted, but their `topic` field is set to `_deleted-topic_`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DetachSubscription(self, request, context): - """Detaches a subscription from this topic. All messages retained in the - subscription are dropped. Subsequent `Pull` and `StreamingPull` requests - will return FAILED_PRECONDITION. If the subscription is a push - subscription, pushes to the endpoint will stop. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_PublisherServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateTopic": grpc.unary_unary_rpc_method_handler( - servicer.CreateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - "UpdateTopic": grpc.unary_unary_rpc_method_handler( - servicer.UpdateTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - "Publish": grpc.unary_unary_rpc_method_handler( - servicer.Publish, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.SerializeToString, - ), - "GetTopic": grpc.unary_unary_rpc_method_handler( - servicer.GetTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - ), - "ListTopics": grpc.unary_unary_rpc_method_handler( - servicer.ListTopics, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.SerializeToString, - ), - "ListTopicSubscriptions": grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSubscriptions, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.SerializeToString, - ), - "ListTopicSnapshots": grpc.unary_unary_rpc_method_handler( - servicer.ListTopicSnapshots, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.SerializeToString, - ), - "DeleteTopic": grpc.unary_unary_rpc_method_handler( - servicer.DeleteTopic, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "DetachSubscription": grpc.unary_unary_rpc_method_handler( - servicer.DetachSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.pubsub.v1.Publisher", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class Publisher(object): - """The service that an application uses to manipulate topics, and to send - messages to a topic. - """ - - @staticmethod - def CreateTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/CreateTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/UpdateTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateTopicRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Publish( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/Publish", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PublishResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/GetTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetTopicRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Topic.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTopics( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/ListTopics", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTopicSubscriptions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/ListTopicSubscriptions", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSubscriptionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListTopicSnapshots( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/ListTopicSnapshots", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListTopicSnapshotsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteTopic( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/DeleteTopic", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteTopicRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DetachSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Publisher/DetachSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DetachSubscriptionResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - -class SubscriberStub(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/CreateSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - ) - self.GetSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/GetSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - ) - self.UpdateSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/UpdateSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - ) - self.ListSubscriptions = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ListSubscriptions", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, - ) - self.DeleteSubscription = channel.unary_unary( - "/google.pubsub.v1.Subscriber/DeleteSubscription", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ModifyAckDeadline = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ModifyAckDeadline", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Acknowledge = channel.unary_unary( - "/google.pubsub.v1.Subscriber/Acknowledge", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Pull = channel.unary_unary( - "/google.pubsub.v1.Subscriber/Pull", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, - ) - self.StreamingPull = channel.stream_stream( - "/google.pubsub.v1.Subscriber/StreamingPull", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, - ) - self.ModifyPushConfig = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ModifyPushConfig", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/GetSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - ) - self.ListSnapshots = channel.unary_unary( - "/google.pubsub.v1.Subscriber/ListSnapshots", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, - ) - self.CreateSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/CreateSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - ) - self.UpdateSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/UpdateSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - ) - self.DeleteSnapshot = channel.unary_unary( - "/google.pubsub.v1.Subscriber/DeleteSnapshot", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.Seek = channel.unary_unary( - "/google.pubsub.v1.Subscriber/Seek", - request_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, - ) - - -class SubscriberServicer(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ - - def CreateSubscription(self, request, context): - """Creates a subscription to a given topic. See the - - resource name rules. - If the subscription already exists, returns `ALREADY_EXISTS`. - If the corresponding topic doesn't exist, returns `NOT_FOUND`. - - If the name is not provided in the request, the server will assign a random - name for this subscription on the same project as the topic, conforming - to the - [resource name - format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. Note that - for REST API requests, you must specify a name in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetSubscription(self, request, context): - """Gets the configuration details of a subscription. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateSubscription(self, request, context): - """Updates an existing subscription. Note that certain properties of a - subscription, such as its topic, are not modifiable. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListSubscriptions(self, request, context): - """Lists matching subscriptions. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteSubscription(self, request, context): - """Deletes an existing subscription. All messages retained in the subscription - are immediately dropped. Calls to `Pull` after deletion will return - `NOT_FOUND`. After a subscription is deleted, a new one may be created with - the same name, but the new one has no association with the old - subscription or its topic unless the same topic is specified. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ModifyAckDeadline(self, request, context): - """Modifies the ack deadline for a specific message. This method is useful - to indicate that more time is needed to process a message by the - subscriber, or to make the message available for redelivery if the - processing was interrupted. Note that this does not modify the - subscription-level `ackDeadlineSeconds` used for subsequent messages. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Acknowledge(self, request, context): - """Acknowledges the messages associated with the `ack_ids` in the - `AcknowledgeRequest`. The Pub/Sub system can remove the relevant messages - from the subscription. - - Acknowledging a message whose ack deadline has expired may succeed, - but such a message may be redelivered later. Acknowledging a message more - than once will not result in an error. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Pull(self, request, context): - """Pulls messages from the server. The server may return `UNAVAILABLE` if - there are too many concurrent pull requests pending for the given - subscription. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def StreamingPull(self, request_iterator, context): - """Establishes a stream with the server, which sends messages down to the - client. The client streams acknowledgements and ack deadline modifications - back to the server. The server will close the stream and return the status - on any error. The server may close the stream with status `UNAVAILABLE` to - reassign server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by configuring the - underlying RPC channel. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ModifyPushConfig(self, request, context): - """Modifies the `PushConfig` for a specified subscription. - - This may be used to change a push subscription to a pull one (signified by - an empty `PushConfig`) or vice versa, or change the endpoint URL and other - attributes of a push subscription. Messages will accumulate for delivery - continuously through the call regardless of changes to the `PushConfig`. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetSnapshot(self, request, context): - """Gets the configuration details of a snapshot. Snapshots are used in - Seek - operations, which allow you to manage message acknowledgments in bulk. That - is, you can set the acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListSnapshots(self, request, context): - """Lists the existing snapshots. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateSnapshot(self, request, context): - """Creates a snapshot from the requested subscription. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. -

If the snapshot already exists, returns `ALREADY_EXISTS`. - If the requested subscription doesn't exist, returns `NOT_FOUND`. - If the backlog in the subscription is too old -- and the resulting snapshot - would expire in less than 1 hour -- then `FAILED_PRECONDITION` is returned. - See also the `Snapshot.expire_time` field. If the name is not provided in - the request, the server will assign a random - name for this snapshot on the same project as the subscription, conforming - to the - [resource name - format](https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. Note that for - REST API requests, you must specify a name in the request. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateSnapshot(self, request, context): - """Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteSnapshot(self, request, context): - """Removes an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot.

- When the snapshot is deleted, all messages retained in the snapshot - are immediately dropped. After a snapshot is deleted, a new one may be - created with the same name, but the new one has no association with the old - snapshot or its subscription, unless the same subscription is specified. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def Seek(self, request, context): - """Seeks an existing subscription to a point in time or to a given snapshot, - whichever is provided in the request. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing subscription to the state - captured by a snapshot. Note that both the subscription and the snapshot - must be on the same topic. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_SubscriberServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateSubscription": grpc.unary_unary_rpc_method_handler( - servicer.CreateSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - "GetSubscription": grpc.unary_unary_rpc_method_handler( - servicer.GetSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - "UpdateSubscription": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - ), - "ListSubscriptions": grpc.unary_unary_rpc_method_handler( - servicer.ListSubscriptions, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.SerializeToString, - ), - "DeleteSubscription": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSubscription, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ModifyAckDeadline": grpc.unary_unary_rpc_method_handler( - servicer.ModifyAckDeadline, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "Acknowledge": grpc.unary_unary_rpc_method_handler( - servicer.Acknowledge, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "Pull": grpc.unary_unary_rpc_method_handler( - servicer.Pull, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.SerializeToString, - ), - "StreamingPull": grpc.stream_stream_rpc_method_handler( - servicer.StreamingPull, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.SerializeToString, - ), - "ModifyPushConfig": grpc.unary_unary_rpc_method_handler( - servicer.ModifyPushConfig, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "GetSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.GetSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - "ListSnapshots": grpc.unary_unary_rpc_method_handler( - servicer.ListSnapshots, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.SerializeToString, - ), - "CreateSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.CreateSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - "UpdateSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.UpdateSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.SerializeToString, - ), - "DeleteSnapshot": grpc.unary_unary_rpc_method_handler( - servicer.DeleteSnapshot, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "Seek": grpc.unary_unary_rpc_method_handler( - servicer.Seek, - request_deserializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.FromString, - response_serializer=google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.pubsub.v1.Subscriber", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) - - -# This class is part of an EXPERIMENTAL API. -class Subscriber(object): - """The service that an application uses to manipulate subscriptions and to - consume messages from a subscription via the `Pull` method or by - establishing a bi-directional stream using the `StreamingPull` method. - """ - - @staticmethod - def CreateSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/CreateSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/GetSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSubscriptionRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/UpdateSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSubscriptionRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Subscription.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListSubscriptions( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ListSubscriptions", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSubscriptionsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteSubscription( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/DeleteSubscription", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSubscriptionRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ModifyAckDeadline( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ModifyAckDeadline", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyAckDeadlineRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Acknowledge( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/Acknowledge", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.AcknowledgeRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Pull( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/Pull", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.PullResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def StreamingPull( - request_iterator, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.stream_stream( - request_iterator, - target, - "/google.pubsub.v1.Subscriber/StreamingPull", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.StreamingPullResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ModifyPushConfig( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ModifyPushConfig", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ModifyPushConfigRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def GetSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/GetSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.GetSnapshotRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def ListSnapshots( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/ListSnapshots", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.ListSnapshotsResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def CreateSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/CreateSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.CreateSnapshotRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def UpdateSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/UpdateSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.UpdateSnapshotRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.Snapshot.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def DeleteSnapshot( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/DeleteSnapshot", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.DeleteSnapshotRequest.SerializeToString, - google_dot_protobuf_dot_empty__pb2.Empty.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) - - @staticmethod - def Seek( - request, - target, - options=(), - channel_credentials=None, - call_credentials=None, - compression=None, - wait_for_ready=None, - timeout=None, - metadata=None, - ): - return grpc.experimental.unary_unary( - request, - target, - "/google.pubsub.v1.Subscriber/Seek", - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekRequest.SerializeToString, - google_dot_cloud_dot_pubsub__v1_dot_proto_dot_pubsub__pb2.SeekResponse.FromString, - options, - channel_credentials, - call_credentials, - compression, - wait_for_ready, - timeout, - metadata, - ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 67c9f2de3a3c..fc4e6ba6d4f7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -21,16 +21,19 @@ import six import google.api_core.exceptions -from google.cloud.pubsub_v1 import types +from google.api_core import gapic_v1 from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher._batch import base +from google.pubsub_v1 import types as gapic_types _LOGGER = logging.getLogger(__name__) _CAN_COMMIT = (base.BatchStatus.ACCEPTING_MESSAGES, base.BatchStatus.STARTING) _SERVER_PUBLISH_MAX_BYTES = 10 * 1000 * 1000 # max accepted size of PublishRequest +_raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() + class Batch(base.Batch): """A batch of messages. @@ -69,10 +72,19 @@ class Batch(base.Batch): at a lower level. commit_when_full (bool): Whether to commit the batch when the batch is full. + commit_retry (Optional[google.api_core.retry.Retry]): Designation of what + errors, if any, should be retried when commiting the batch. If not + provided, a default retry is used. """ def __init__( - self, client, topic, settings, batch_done_callback=None, commit_when_full=True + self, + client, + topic, + settings, + batch_done_callback=None, + commit_when_full=True, + commit_retry=gapic_v1.method.DEFAULT, ): self._client = client self._topic = topic @@ -92,9 +104,11 @@ def __init__( # The initial size is not zero, we need to account for the size overhead # of the PublishRequest message itself. - self._base_request_size = types.PublishRequest(topic=topic).ByteSize() + self._base_request_size = gapic_types.PublishRequest(topic=topic)._pb.ByteSize() self._size = self._base_request_size + self._commit_retry = commit_retry + @staticmethod def make_lock(): """Return a threading lock. @@ -245,9 +259,10 @@ def _commit(self): batch_transport_succeeded = True try: - # Performs retries for errors defined in retry_codes.publish in the - # publisher_client_config.py file. - response = self._client.api.publish(self._topic, self._messages) + # Performs retries for errors defined by the retry configuration. + response = self._client.api.publish( + topic=self._topic, messages=self._messages, retry=self._commit_retry + ) except google.api_core.exceptions.GoogleAPIError as exc: # We failed to publish, even after retries, so set the exception on # all futures and exit. @@ -323,8 +338,12 @@ def publish(self, message): """ # Coerce the type, just in case. - if not isinstance(message, types.PubsubMessage): - message = types.PubsubMessage(**message) + if not isinstance(message, gapic_types.PubsubMessage): + # For performance reasons, the message should be constructed by directly + # using the raw protobuf class, and only then wrapping it into the + # higher-level PubsubMessage class. + vanilla_pb = _raw_proto_pubbsub_message(**message) + message = gapic_types.PubsubMessage.wrap(vanilla_pb) future = None @@ -336,7 +355,9 @@ def publish(self, message): if self.status != base.BatchStatus.ACCEPTING_MESSAGES: return - size_increase = types.PublishRequest(messages=[message]).ByteSize() + size_increase = gapic_types.PublishRequest( + messages=[message] + )._pb.ByteSize() if (self._base_request_size + size_increase) > _SERVER_PUBLISH_MAX_BYTES: err_msg = ( diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py index fda5c1ee96cd..3cfa809f7ba8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -48,11 +48,14 @@ def unpause(self, message): @staticmethod @abc.abstractmethod - def publish(self, message): + def publish(self, message, retry=None): """ Publish message for this ordering key. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message (~.pubsub_v1.types.PubsubMessage): + The Pub/Sub message. + retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the message. Returns: A class instance that conforms to Python Standard library's diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py index d8ddb3f8f6eb..f7c0be08447b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py @@ -17,6 +17,7 @@ import concurrent.futures as futures import threading +from google.api_core import gapic_v1 from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import base as sequencer_base from google.cloud.pubsub_v1.publisher._batch import base as batch_base @@ -225,9 +226,13 @@ def unpause(self): raise RuntimeError("Ordering key is not paused.") self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES - def _create_batch(self): + def _create_batch(self, commit_retry=gapic_v1.method.DEFAULT): """ Create a new batch using the client's batch class and other stored settings. + + Args: + commit_retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the batch. """ return self._client._batch_class( client=self._client, @@ -235,13 +240,17 @@ def _create_batch(self): settings=self._client.batch_settings, batch_done_callback=self._batch_done_callback, commit_when_full=False, + commit_retry=commit_retry, ) - def publish(self, message): + def publish(self, message, retry=gapic_v1.method.DEFAULT): """ Publish message for this ordering key. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message (~.pubsub_v1.types.PubsubMessage): + The Pub/Sub message. + retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the message. Returns: A class instance that conforms to Python Standard library's @@ -278,13 +287,13 @@ def publish(self, message): ), "Publish is only allowed in accepting-messages state." if not self._ordered_batches: - new_batch = self._create_batch() + new_batch = self._create_batch(commit_retry=retry) self._ordered_batches.append(new_batch) batch = self._ordered_batches[-1] future = batch.publish(message) while future is None: - batch = self._create_batch() + batch = self._create_batch(commit_retry=retry) self._ordered_batches.append(batch) future = batch.publish(message) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py index dff114630b48..d343ed945b1b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from google.api_core import gapic_v1 + from google.cloud.pubsub_v1.publisher._sequencer import base @@ -75,9 +77,13 @@ def unpause(self): """ Not relevant for this class. """ raise NotImplementedError - def _create_batch(self): + def _create_batch(self, commit_retry=gapic_v1.method.DEFAULT): """ Create a new batch using the client's batch class and other stored settings. + + Args: + commit_retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the batch. """ return self._client._batch_class( client=self._client, @@ -85,13 +91,17 @@ def _create_batch(self): settings=self._client.batch_settings, batch_done_callback=None, commit_when_full=True, + commit_retry=commit_retry, ) - def publish(self, message): + def publish(self, message, retry=gapic_v1.method.DEFAULT): """ Batch message into existing or new batch. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message (~.pubsub_v1.types.PubsubMessage): + The Pub/Sub message. + retry (Optional[google.api_core.retry.Retry]): + The retry settings to apply when publishing the message. Returns: ~google.api_core.future.Future: An object conforming to @@ -109,7 +119,7 @@ def publish(self, message): raise RuntimeError("Unordered sequencer already stopped.") if not self._current_batch: - newbatch = self._create_batch() + newbatch = self._create_batch(commit_retry=retry) self._current_batch = newbatch batch = self._current_batch @@ -119,7 +129,7 @@ def publish(self, message): future = batch.publish(message) # batch is full, triggering commit_when_full if future is None: - batch = self._create_batch() + batch = self._create_batch(commit_retry=retry) # At this point, we lose track of the old batch, but we don't # care since it's already committed (because it was full.) self._current_batch = batch diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 8dbbea6348a8..ea371190cc31 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -24,19 +24,23 @@ import grpc import six +from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.oauth2 import service_account from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import publisher_client -from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher._batch import thread from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer from google.cloud.pubsub_v1.publisher.flow_controller import FlowController +from google.pubsub_v1 import types as gapic_types +from google.pubsub_v1.services.publisher import client as publisher_client +from google.pubsub_v1.services.publisher.transports import ( + grpc as publisher_grpc_transport, +) __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version @@ -48,6 +52,8 @@ "from_service_account_json", ) +_raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() + def _set_nested_value(container, value, keys): current = container @@ -75,11 +81,8 @@ class Client(object): arguments to the underlying :class:`~google.cloud.pubsub_v1.gapic.publisher_client.PublisherClient`. Generally you should not need to set additional keyword - arguments. Optionally, publish retry settings can be set via - ``client_config`` where user-provided retry configurations are - applied to default retry settings. And regional endpoints can be - set via ``client_options`` that takes a single key-value pair that - defines the endpoint. + arguments. Regional endpoints can be set via ``client_options`` that + takes a single key-value pair that defines the endpoint. Example: @@ -103,19 +106,6 @@ class Client(object): ), ), - # Optional - client_config = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_params": { - "messaging": { - 'total_timeout_millis': 650000, # default: 600000 - } - } - } - } - }, - # Optional client_options = { "api_endpoint": REGIONAL_ENDPOINT @@ -173,22 +163,6 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): # For a transient failure, retry publishing the message infinitely. self.publisher_options = types.PublisherOptions(*publisher_options) self._enable_message_ordering = self.publisher_options[0] - if self._enable_message_ordering: - # Set retry timeout to "infinite" when message ordering is enabled. - # Note that this then also impacts messages added with an empty ordering - # key. - client_config = _set_nested_value( - kwargs.pop("client_config", {}), - 2 ** 32, - [ - "interfaces", - "google.pubsub.v1.Publisher", - "retry_params", - "messaging", - "total_timeout_millis", - ], - ) - kwargs["client_config"] = client_config # Add the metrics headers, and instantiate the underlying GAPIC # client. @@ -292,7 +266,9 @@ def resume_publish(self, topic, ordering_key): else: sequencer.unpause() - def publish(self, topic, data, ordering_key="", **attrs): + def publish( + self, topic, data, ordering_key="", retry=gapic_v1.method.DEFAULT, **attrs + ): """Publish a single message. .. note:: @@ -327,6 +303,9 @@ def publish(self, topic, data, ordering_key="", **attrs): enabled for this client to use this feature. EXPERIMENTAL: This feature is currently available in a closed alpha. Please contact the Cloud Pub/Sub team to use it. + retry (Optional[google.api_core.retry.Retry]): Designation of what + errors, if any, should be retried. If `ordering_key` is specified, + the total retry deadline will be changed to "infinity". attrs (Mapping[str, str]): A dictionary of attributes to be sent as metadata. (These may be text strings or byte strings.) @@ -369,10 +348,13 @@ def publish(self, topic, data, ordering_key="", **attrs): "be sent as text strings." ) - # Create the Pub/Sub message object. - message = types.PubsubMessage( + # Create the Pub/Sub message object. For performance reasons, the message + # should be constructed by directly using the raw protobuf class, and only + # then wrapping it into the higher-level PubsubMessage class. + vanilla_pb = _raw_proto_pubbsub_message( data=data, ordering_key=ordering_key, attributes=attrs ) + message = gapic_types.PubsubMessage.wrap(vanilla_pb) # Messages should go through flow control to prevent excessive # queuing on the client side (depending on the settings). @@ -390,10 +372,19 @@ def on_publish_done(future): if self._is_stopped: raise RuntimeError("Cannot publish on a stopped publisher.") - sequencer = self._get_or_create_sequencer(topic, ordering_key) + # Set retry timeout to "infinite" when message ordering is enabled. + # Note that this then also impacts messages added with an empty + # ordering key. + if self._enable_message_ordering: + if retry is gapic_v1.method.DEFAULT: + # use the default retry for the publish GRPC method as a base + transport = self.api._transport + retry = transport._wrapped_methods[transport.publish]._retry + retry = retry.with_deadline(2.0 ** 32) # Delegate the publishing to the sequencer. - future = sequencer.publish(message) + sequencer = self._get_or_create_sequencer(topic, ordering_key) + future = sequencer.publish(message, retry=retry) future.add_done_callback(on_publish_done) # Create a timer thread if necessary to enforce the batching diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py index c10fadcef9f0..300e273aabbc 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py @@ -88,7 +88,7 @@ def add(self, message): with self._operational_lock: if not self._would_overflow(message): self._message_count += 1 - self._total_bytes += message.ByteSize() + self._total_bytes += message._pb.ByteSize() return # Adding a message would overflow, react. @@ -101,7 +101,7 @@ def add(self, message): # load if we accepted the message. load_info = self._load_info( message_count=self._message_count + 1, - total_bytes=self._total_bytes + message.ByteSize(), + total_bytes=self._total_bytes + message._pb.ByteSize(), ) error_msg = "Flow control limits would be exceeded - {}.".format( load_info @@ -116,11 +116,11 @@ def add(self, message): # Sanity check - if a message exceeds total flow control limits all # by itself, it would block forever, thus raise error. if ( - message.ByteSize() > self._settings.byte_limit + message._pb.ByteSize() > self._settings.byte_limit or self._settings.message_limit < 1 ): load_info = self._load_info( - message_count=1, total_bytes=message.ByteSize() + message_count=1, total_bytes=message._pb.ByteSize() ) error_msg = ( "Total flow control limits too low for the message, " @@ -134,7 +134,7 @@ def add(self, message): if current_thread not in self._byte_reservations: self._waiting.append(current_thread) self._byte_reservations[current_thread] = _QuantityReservation( - reserved=0, needed=message.ByteSize() + reserved=0, needed=message._pb.ByteSize() ) _LOGGER.debug( @@ -151,7 +151,7 @@ def add(self, message): # Message accepted, increase the load and remove thread stats. self._message_count += 1 - self._total_bytes += message.ByteSize() + self._total_bytes += message._pb.ByteSize() self._reserved_bytes -= self._byte_reservations[current_thread].reserved del self._byte_reservations[current_thread] self._waiting.remove(current_thread) @@ -169,7 +169,7 @@ def release(self, message): with self._operational_lock: # Releasing a message decreases the load. self._message_count -= 1 - self._total_bytes -= message.ByteSize() + self._total_bytes -= message._pb.ByteSize() if self._message_count < 0 or self._total_bytes < 0: warnings.warn( @@ -252,7 +252,7 @@ def _would_overflow(self, message): else: enough_reserved = False - bytes_taken = self._total_bytes + self._reserved_bytes + message.ByteSize() + bytes_taken = self._total_bytes + self._reserved_bytes + message._pb.ByteSize() size_overflow = bytes_taken > self._settings.byte_limit and not enough_reserved msg_count_overflow = self._message_count + 1 > self._settings.message_limit diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index dd324fe21aa4..7a89508446f8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -21,9 +21,9 @@ import math import threading -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.pubsub_v1 import types as gapic_types _LOGGER = logging.getLogger(__name__) @@ -140,7 +140,7 @@ def ack(self, items): total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) for _ in range(total_chunks): - request = types.StreamingPullRequest( + request = gapic_types.StreamingPullRequest( ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE) ) self._manager.send(request) @@ -181,7 +181,7 @@ def modify_ack_deadline(self, items): total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) for _ in range(total_chunks): - request = types.StreamingPullRequest( + request = gapic_types.StreamingPullRequest( modify_deadline_ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE), modify_deadline_seconds=itertools.islice(seconds, _ACK_IDS_BATCH_SIZE), ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 4e3f2493362e..7476e887bcac 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -34,6 +34,7 @@ from google.cloud.pubsub_v1.subscriber._protocol import requests import google.cloud.pubsub_v1.subscriber.message import google.cloud.pubsub_v1.subscriber.scheduler +from google.pubsub_v1 import types as gapic_types _LOGGER = logging.getLogger(__name__) _RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" @@ -369,7 +370,7 @@ def _send_unary_request(self, request): stream. Args: - request (types.StreamingPullRequest): The stream request to be + request (gapic_types.StreamingPullRequest): The stream request to be mapped into unary requests. """ if request.ack_ids: @@ -430,7 +431,7 @@ def heartbeat(self): ``self._UNARY_REQUESTS`` is set or not. """ if self._rpc is not None and self._rpc.is_active: - self._rpc.send(types.StreamingPullRequest()) + self._rpc.send(gapic_types.StreamingPullRequest()) def open(self, callback, on_callback_error): """Begin consuming messages. @@ -555,7 +556,7 @@ def _get_initial_request(self, stream_ack_deadline_seconds): The default message acknowledge deadline for the stream. Returns: - google.cloud.pubsub_v1.types.StreamingPullRequest: A request + google.pubsub_v1.types.StreamingPullRequest: A request suitable for being the first request on the stream (and not suitable for any other purpose). """ @@ -569,7 +570,7 @@ def _get_initial_request(self, stream_ack_deadline_seconds): lease_ids = [] # Put the request together. - request = types.StreamingPullRequest( + request = gapic_types.StreamingPullRequest( modify_deadline_ack_ids=list(lease_ids), modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), stream_ack_deadline_seconds=stream_ack_deadline_seconds, @@ -601,9 +602,13 @@ def _on_response(self, response): ) return + # IMPORTANT: Circumvent the wrapper class and operate on the raw underlying + # protobuf message to significantly gain on attribute access performance. + received_messages = response._pb.received_messages + _LOGGER.debug( "Processing %s received message(s), currently on hold %s (bytes %s).", - len(response.received_messages), + len(received_messages), self._messages_on_hold.size, self._on_hold_bytes, ) @@ -613,12 +618,12 @@ def _on_response(self, response): # received them. items = [ requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) - for message in response.received_messages + for message in received_messages ] self._dispatcher.modify_ack_deadline(items) with self._pause_resume_lock: - for received_message in response.received_messages: + for received_message in received_messages: message = google.cloud.pubsub_v1.subscriber.message.Message( received_message.message, received_message.ack_id, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 00c8f2498893..98d6d75c7baf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -25,10 +25,12 @@ from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import subscriber_client -from google.cloud.pubsub_v1.gapic.transports import subscriber_grpc_transport from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.pubsub_v1.services.subscriber import client as subscriber_client +from google.pubsub_v1.services.subscriber.transports import ( + grpc as subscriber_grpc_transport, +) __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version @@ -250,7 +252,7 @@ def close(self): This method is idempotent. """ - self.api.transport.channel.close() + self.api._transport.grpc_channel.close() def __enter__(self): return self diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 864d697e0375..c08e0a60563e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -14,12 +14,12 @@ from __future__ import absolute_import -import datetime +import datetime as dt import json import math +import pytz import time -from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -81,7 +81,9 @@ def __init__(self, message, ack_id, delivery_attempt, request_queue): Args: message (~.pubsub_v1.types.PubsubMessage): The message received - from Pub/Sub. + from Pub/Sub. For performance reasons it should be the the raw + protobuf message wrapped by the ``PubsubMessage`` class obtained + through the message's ``.pb()`` method. ack_id (str): The ack_id received from Pub/Sub. delivery_attempt (int): The delivery attempt counter received from Pub/Sub if a DeadLetterPolicy is set on the subscription, @@ -101,6 +103,18 @@ def __init__(self, message, ack_id, delivery_attempt, request_queue): # the default lease deadline. self._received_timestamp = time.time() + # Store the message attributes directly to speed up attribute access, i.e. + # to avoid two lookups if self._message. pattern was used in + # properties. + self._attributes = message.attributes + self._data = message.data + self._publish_time = dt.datetime.fromtimestamp( + message.publish_time.seconds + message.publish_time.nanos / 1e9, + tz=pytz.UTC, + ) + self._ordering_key = message.ordering_key + self._size = message.ByteSize() + def __repr__(self): # Get an abbreviated version of the data. abbv_data = self._message.data @@ -132,7 +146,7 @@ def attributes(self): .ScalarMapContainer: The message's attributes. This is a ``dict``-like object provided by ``google.protobuf``. """ - return self._message.attributes + return self._attributes @property def data(self): @@ -142,7 +156,7 @@ def data(self): bytes: The message data. This is always a bytestring; if you want a text string, call :meth:`bytes.decode`. """ - return self._message.data + return self._data @property def publish_time(self): @@ -151,21 +165,17 @@ def publish_time(self): Returns: datetime: The date and time that the message was published. """ - timestamp = self._message.publish_time - delta = datetime.timedelta( - seconds=timestamp.seconds, microseconds=timestamp.nanos // 1000 - ) - return datetime_helpers._UTC_EPOCH + delta + return self._publish_time @property def ordering_key(self): """str: the ordering key used to publish the message.""" - return self._message.ordering_key + return self._ordering_key @property def size(self): """Return the size of the underlying message, in bytes.""" - return self._message.ByteSize() + return self._size @property def ack_id(self): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index b52b3ea60049..b875f3cd25ab 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -16,8 +16,11 @@ import collections import enum +import inspect import sys +import proto + from google.api import http_pb2 from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 @@ -29,7 +32,8 @@ from google.protobuf import timestamp_pb2 from google.api_core.protobuf_helpers import get_messages -from google.cloud.pubsub_v1.proto import pubsub_pb2 + +from google.pubsub_v1.types import pubsub as pubsub_gapic_types # Define the default values for batching. @@ -45,23 +49,21 @@ 0.01, # max_latency: 10 ms 100, # max_messages: 100 ) - -if sys.version_info >= (3, 5): - BatchSettings.__doc__ = "The settings for batch publishing the messages." - BatchSettings.max_bytes.__doc__ = ( - "The maximum total size of the messages to collect before automatically " - "publishing the batch, including any byte size overhead of the publish " - "request itself. The maximum value is bound by the server-side limit of " - "10_000_000 bytes." - ) - BatchSettings.max_latency.__doc__ = ( - "The maximum number of seconds to wait for additional messages before " - "automatically publishing the batch." - ) - BatchSettings.max_messages.__doc__ = ( - "The maximum number of messages to collect before automatically " - "publishing the batch." - ) +BatchSettings.__doc__ = "The settings for batch publishing the messages." +BatchSettings.max_bytes.__doc__ = ( + "The maximum total size of the messages to collect before automatically " + "publishing the batch, including any byte size overhead of the publish " + "request itself. The maximum value is bound by the server-side limit of " + "10_000_000 bytes." +) +BatchSettings.max_latency.__doc__ = ( + "The maximum number of seconds to wait for additional messages before " + "automatically publishing the batch." +) +BatchSettings.max_messages.__doc__ = ( + "The maximum number of messages to collect before automatically " + "publishing the batch." +) class LimitExceededBehavior(str, enum.Enum): @@ -80,20 +82,16 @@ class LimitExceededBehavior(str, enum.Enum): 10 * BatchSettings.__new__.__defaults__[0], # byte limit LimitExceededBehavior.IGNORE, # desired behavior ) - -if sys.version_info >= (3, 5): - PublishFlowControl.__doc__ = ( - "The client flow control settings for message publishing." - ) - PublishFlowControl.message_limit.__doc__ = ( - "The maximum number of messages awaiting to be published." - ) - PublishFlowControl.byte_limit.__doc__ = ( - "The maximum total size of messages awaiting to be published." - ) - PublishFlowControl.limit_exceeded_behavior.__doc__ = ( - "The action to take when publish flow control limits are exceeded." - ) +PublishFlowControl.__doc__ = "The client flow control settings for message publishing." +PublishFlowControl.message_limit.__doc__ = ( + "The maximum number of messages awaiting to be published." +) +PublishFlowControl.byte_limit.__doc__ = ( + "The maximum total size of messages awaiting to be published." +) +PublishFlowControl.limit_exceeded_behavior.__doc__ = ( + "The action to take when publish flow control limits are exceeded." +) # Define the default publisher options. # @@ -106,20 +104,17 @@ class LimitExceededBehavior(str, enum.Enum): False, # enable_message_ordering: False PublishFlowControl(), # default flow control settings ) - -if sys.version_info >= (3, 5): - PublisherOptions.__doc__ = "The options for the publisher client." - PublisherOptions.enable_message_ordering.__doc__ = ( - "Whether to order messages in a batch by a supplied ordering key." - "EXPERIMENTAL: Message ordering is an alpha feature that requires " - "special permissions to use. Please contact the Cloud Pub/Sub team for " - "more information." - ) - PublisherOptions.flow_control.__doc__ = ( - "Flow control settings for message publishing by the client. By default " - "the publisher client does not do any throttling." - ) - +PublisherOptions.__doc__ = "The options for the publisher client." +PublisherOptions.enable_message_ordering.__doc__ = ( + "Whether to order messages in a batch by a supplied ordering key." + "EXPERIMENTAL: Message ordering is an alpha feature that requires " + "special permissions to use. Please contact the Cloud Pub/Sub team for " + "more information." +) +PublisherOptions.flow_control.__doc__ = ( + "Flow control settings for message publishing by the client. By default " + "the publisher client does not do any throttling." +) # Define the type class and default values for flow control settings. # @@ -141,29 +136,50 @@ class LimitExceededBehavior(str, enum.Enum): 1 * 60 * 60, # max_lease_duration: 1 hour. 0, # max_duration_per_lease_extension: disabled ) +FlowControl.__doc__ = ( + "The settings for controlling the rate at which messages are pulled " + "with an asynchronous subscription." +) +FlowControl.max_bytes.__doc__ = ( + "The maximum total size of received - but not yet processed - messages " + "before pausing the message stream." +) +FlowControl.max_messages.__doc__ = ( + "The maximum number of received - but not yet processed - messages before " + "pausing the message stream." +) +FlowControl.max_lease_duration.__doc__ = ( + "The maximum amount of time in seconds to hold a lease on a message " + "before dropping it from the lease management." +) +FlowControl.max_duration_per_lease_extension.__doc__ = ( + "The max amount of time in seconds for a single lease extension attempt. " + "Bounds the delay before a message redelivery if the subscriber " + "fails to extend the deadline." +) + + +# The current api core helper does not find new proto messages of type proto.Message, +# thus we need our own helper. Adjusted from +# https://github.com/googleapis/python-api-core/blob/8595f620e7d8295b6a379d6fd7979af3bef717e2/google/api_core/protobuf_helpers.py#L101-L118 +def _get_protobuf_messages(module): + """Discover all protobuf Message classes in a given import module. -if sys.version_info >= (3, 5): - FlowControl.__doc__ = ( - "The settings for controlling the rate at which messages are pulled " - "with an asynchronous subscription." - ) - FlowControl.max_bytes.__doc__ = ( - "The maximum total size of received - but not yet processed - messages " - "before pausing the message stream." - ) - FlowControl.max_messages.__doc__ = ( - "The maximum number of received - but not yet processed - messages before " - "pausing the message stream." - ) - FlowControl.max_lease_duration.__doc__ = ( - "The maximum amount of time in seconds to hold a lease on a message " - "before dropping it from the lease management." - ) - FlowControl.max_duration_per_lease_extension.__doc__ = ( - "The max amount of time in seconds for a single lease extension attempt. " - "Bounds the delay before a message redelivery if the subscriber " - "fails to extend the deadline." - ) + Args: + module (module): A Python module; :func:`dir` will be run against this + module to find Message subclasses. + + Returns: + dict[str, proto.Message]: A dictionary with the + Message class names as keys, and the Message subclasses themselves + as values. + """ + answer = collections.OrderedDict() + for name in dir(module): + candidate = getattr(module, name) + if inspect.isclass(candidate) and issubclass(candidate, proto.Message): + answer[name] = candidate + return answer _shared_modules = [ @@ -178,11 +194,15 @@ class LimitExceededBehavior(str, enum.Enum): timestamp_pb2, ] -_local_modules = [pubsub_pb2] - - -names = ["BatchSettings", "FlowControl"] +_local_modules = [pubsub_gapic_types] +names = [ + "BatchSettings", + "LimitExceededBehavior", + "PublishFlowControl", + "PublisherOptions", + "FlowControl", +] for module in _shared_modules: for name, message in get_messages(module).items(): @@ -190,7 +210,7 @@ class LimitExceededBehavior(str, enum.Enum): names.append(name) for module in _local_modules: - for name, message in get_messages(module).items(): + for name, message in _get_protobuf_messages(module).items(): message.__module__ = "google.cloud.pubsub_v1.types" setattr(sys.modules[__name__], name, message) names.append(name) diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py new file mode 100644 index 000000000000..f441543ccd16 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.pubsub_v1.services.publisher.async_client import PublisherAsyncClient +from google.pubsub_v1.services.publisher.client import PublisherClient +from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient +from google.pubsub_v1.services.subscriber.client import SubscriberClient +from google.pubsub_v1.types.pubsub import AcknowledgeRequest +from google.pubsub_v1.types.pubsub import CreateSnapshotRequest +from google.pubsub_v1.types.pubsub import DeadLetterPolicy +from google.pubsub_v1.types.pubsub import DeleteSnapshotRequest +from google.pubsub_v1.types.pubsub import DeleteSubscriptionRequest +from google.pubsub_v1.types.pubsub import DeleteTopicRequest +from google.pubsub_v1.types.pubsub import DetachSubscriptionRequest +from google.pubsub_v1.types.pubsub import DetachSubscriptionResponse +from google.pubsub_v1.types.pubsub import ExpirationPolicy +from google.pubsub_v1.types.pubsub import GetSnapshotRequest +from google.pubsub_v1.types.pubsub import GetSubscriptionRequest +from google.pubsub_v1.types.pubsub import GetTopicRequest +from google.pubsub_v1.types.pubsub import ListSnapshotsRequest +from google.pubsub_v1.types.pubsub import ListSnapshotsResponse +from google.pubsub_v1.types.pubsub import ListSubscriptionsRequest +from google.pubsub_v1.types.pubsub import ListSubscriptionsResponse +from google.pubsub_v1.types.pubsub import ListTopicSnapshotsRequest +from google.pubsub_v1.types.pubsub import ListTopicSnapshotsResponse +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsRequest +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsResponse +from google.pubsub_v1.types.pubsub import ListTopicsRequest +from google.pubsub_v1.types.pubsub import ListTopicsResponse +from google.pubsub_v1.types.pubsub import MessageStoragePolicy +from google.pubsub_v1.types.pubsub import ModifyAckDeadlineRequest +from google.pubsub_v1.types.pubsub import ModifyPushConfigRequest +from google.pubsub_v1.types.pubsub import PublishRequest +from google.pubsub_v1.types.pubsub import PublishResponse +from google.pubsub_v1.types.pubsub import PubsubMessage +from google.pubsub_v1.types.pubsub import PullRequest +from google.pubsub_v1.types.pubsub import PullResponse +from google.pubsub_v1.types.pubsub import PushConfig +from google.pubsub_v1.types.pubsub import ReceivedMessage +from google.pubsub_v1.types.pubsub import RetryPolicy +from google.pubsub_v1.types.pubsub import SeekRequest +from google.pubsub_v1.types.pubsub import SeekResponse +from google.pubsub_v1.types.pubsub import Snapshot +from google.pubsub_v1.types.pubsub import StreamingPullRequest +from google.pubsub_v1.types.pubsub import StreamingPullResponse +from google.pubsub_v1.types.pubsub import Subscription +from google.pubsub_v1.types.pubsub import Topic +from google.pubsub_v1.types.pubsub import UpdateSnapshotRequest +from google.pubsub_v1.types.pubsub import UpdateSubscriptionRequest +from google.pubsub_v1.types.pubsub import UpdateTopicRequest + +__all__ = ( + "AcknowledgeRequest", + "CreateSnapshotRequest", + "DeadLetterPolicy", + "DeleteSnapshotRequest", + "DeleteSubscriptionRequest", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "ExpirationPolicy", + "GetSnapshotRequest", + "GetSubscriptionRequest", + "GetTopicRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "MessageStoragePolicy", + "ModifyAckDeadlineRequest", + "ModifyPushConfigRequest", + "PublishRequest", + "PublishResponse", + "PublisherAsyncClient", + "PublisherClient", + "PubsubMessage", + "PullRequest", + "PullResponse", + "PushConfig", + "ReceivedMessage", + "RetryPolicy", + "SeekRequest", + "SeekResponse", + "Snapshot", + "StreamingPullRequest", + "StreamingPullResponse", + "SubscriberAsyncClient", + "SubscriberClient", + "Subscription", + "Topic", + "UpdateSnapshotRequest", + "UpdateSubscriptionRequest", + "UpdateTopicRequest", +) diff --git a/packages/google-cloud-pubsub/google/pubsub/py.typed b/packages/google-cloud-pubsub/google/pubsub/py.typed new file mode 100644 index 000000000000..9b0e3743353f --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-pubsub package uses inline types. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py new file mode 100644 index 000000000000..5e7a6cc72449 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.publisher import PublisherClient +from .services.subscriber import SubscriberClient +from .types.pubsub import AcknowledgeRequest +from .types.pubsub import CreateSnapshotRequest +from .types.pubsub import DeadLetterPolicy +from .types.pubsub import DeleteSnapshotRequest +from .types.pubsub import DeleteSubscriptionRequest +from .types.pubsub import DeleteTopicRequest +from .types.pubsub import DetachSubscriptionRequest +from .types.pubsub import DetachSubscriptionResponse +from .types.pubsub import ExpirationPolicy +from .types.pubsub import GetSnapshotRequest +from .types.pubsub import GetSubscriptionRequest +from .types.pubsub import GetTopicRequest +from .types.pubsub import ListSnapshotsRequest +from .types.pubsub import ListSnapshotsResponse +from .types.pubsub import ListSubscriptionsRequest +from .types.pubsub import ListSubscriptionsResponse +from .types.pubsub import ListTopicSnapshotsRequest +from .types.pubsub import ListTopicSnapshotsResponse +from .types.pubsub import ListTopicSubscriptionsRequest +from .types.pubsub import ListTopicSubscriptionsResponse +from .types.pubsub import ListTopicsRequest +from .types.pubsub import ListTopicsResponse +from .types.pubsub import MessageStoragePolicy +from .types.pubsub import ModifyAckDeadlineRequest +from .types.pubsub import ModifyPushConfigRequest +from .types.pubsub import PublishRequest +from .types.pubsub import PublishResponse +from .types.pubsub import PubsubMessage +from .types.pubsub import PullRequest +from .types.pubsub import PullResponse +from .types.pubsub import PushConfig +from .types.pubsub import ReceivedMessage +from .types.pubsub import RetryPolicy +from .types.pubsub import SeekRequest +from .types.pubsub import SeekResponse +from .types.pubsub import Snapshot +from .types.pubsub import StreamingPullRequest +from .types.pubsub import StreamingPullResponse +from .types.pubsub import Subscription +from .types.pubsub import Topic +from .types.pubsub import UpdateSnapshotRequest +from .types.pubsub import UpdateSubscriptionRequest +from .types.pubsub import UpdateTopicRequest + + +__all__ = ( + "AcknowledgeRequest", + "CreateSnapshotRequest", + "DeadLetterPolicy", + "DeleteSnapshotRequest", + "DeleteSubscriptionRequest", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "ExpirationPolicy", + "GetSnapshotRequest", + "GetSubscriptionRequest", + "GetTopicRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "MessageStoragePolicy", + "ModifyAckDeadlineRequest", + "ModifyPushConfigRequest", + "PublishRequest", + "PublishResponse", + "PubsubMessage", + "PullRequest", + "PullResponse", + "PushConfig", + "ReceivedMessage", + "RetryPolicy", + "SeekRequest", + "SeekResponse", + "Snapshot", + "StreamingPullRequest", + "StreamingPullResponse", + "SubscriberClient", + "Subscription", + "Topic", + "UpdateSnapshotRequest", + "UpdateSubscriptionRequest", + "UpdateTopicRequest", + "PublisherClient", +) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/py.typed b/packages/google-cloud-pubsub/google/pubsub_v1/py.typed new file mode 100644 index 000000000000..9b0e3743353f --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-pubsub package uses inline types. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py new file mode 100644 index 000000000000..42ffdf2bc43d --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py similarity index 57% rename from packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py rename to packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py index 2ccebf07f0de..970a1a3b408b 100644 --- a/packages/google-cloud-pubsub/tests/system/gapic/v1/test_system_publisher_v1.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py @@ -1,30 +1,24 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -import os -import time - -from google.cloud import pubsub_v1 -from google.cloud.pubsub_v1.proto import pubsub_pb2 - - -class TestSystemPublisher(object): - def test_list_topics(self): - project_id = os.environ["PROJECT_ID"] +from .client import PublisherClient +from .async_client import PublisherAsyncClient - client = pubsub_v1.PublisherClient() - project = client.project_path(project_id) - response = client.list_topics(project) +__all__ = ( + "PublisherClient", + "PublisherAsyncClient", +) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py new file mode 100644 index 000000000000..6338887a7be0 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -0,0 +1,1099 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.types import pubsub + +from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport +from .client import PublisherClient + + +class PublisherAsyncClient: + """The service that an application uses to manipulate topics, + and to send messages to a topic. + """ + + _client: PublisherClient + + DEFAULT_ENDPOINT = PublisherClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PublisherClient.DEFAULT_MTLS_ENDPOINT + + topic_path = staticmethod(PublisherClient.topic_path) + + from_service_account_file = PublisherClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(PublisherClient).get_transport_class, type(PublisherClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, PublisherTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the publisher client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PublisherTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = PublisherClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_topic( + self, + request: pubsub.Topic = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Creates the given topic with the given name. See the `resource + name + rules `__. + + Args: + request (:class:`~.pubsub.Topic`): + The request object. A topic resource. + name (:class:`str`): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` + must start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.Topic(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_topic( + self, + request: pubsub.UpdateTopicRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + Args: + request (:class:`~.pubsub.UpdateTopicRequest`): + The request object. Request for the UpdateTopic method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + + request = pubsub.UpdateTopicRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("topic.name", request.topic.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def publish( + self, + request: pubsub.PublishRequest = None, + *, + topic: str = None, + messages: Sequence[pubsub.PubsubMessage] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PublishResponse: + r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Args: + request (:class:`~.pubsub.PublishRequest`): + The request object. Request for the Publish method. + topic (:class:`str`): + Required. The messages in the request will be published + on this topic. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + messages (:class:`Sequence[~.pubsub.PubsubMessage]`): + Required. The messages to publish. + This corresponds to the ``messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PublishResponse: + Response for the ``Publish`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic, messages]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.PublishRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + if messages is not None: + request.messages = messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.publish, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.Cancelled, + exceptions.DeadlineExceeded, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + exceptions.InternalServerError, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_topic( + self, + request: pubsub.GetTopicRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Gets the configuration of a topic. + + Args: + request (:class:`~.pubsub.GetTopicRequest`): + The request object. Request for the GetTopic method. + topic (:class:`str`): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.GetTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_topics( + self, + request: pubsub.ListTopicsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsAsyncPager: + r"""Lists matching topics. + + Args: + request (:class:`~.pubsub.ListTopicsRequest`): + The request object. Request for the `ListTopics` method. + project (:class:`str`): + Required. The name of the project in which to list + topics. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicsAsyncPager: + Response for the ``ListTopics`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListTopicsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_topics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topic_subscriptions( + self, + request: pubsub.ListTopicSubscriptionsRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSubscriptionsAsyncPager: + r"""Lists the names of the attached subscriptions on this + topic. + + Args: + request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + The request object. Request for the + `ListTopicSubscriptions` method. + topic (:class:`str`): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicSubscriptionsAsyncPager: + Response for the ``ListTopicSubscriptions`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListTopicSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_topic_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicSubscriptionsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topic_snapshots( + self, + request: pubsub.ListTopicSnapshotsRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSnapshotsAsyncPager: + r"""Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Args: + request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + The request object. Request for the `ListTopicSnapshots` + method. + topic (:class:`str`): + Required. The name of the topic that snapshots are + attached to. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicSnapshotsAsyncPager: + Response for the ``ListTopicSnapshots`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListTopicSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_topic_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicSnapshotsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_topic( + self, + request: pubsub.DeleteTopicRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Args: + request (:class:`~.pubsub.DeleteTopicRequest`): + The request object. Request for the `DeleteTopic` + method. + topic (:class:`str`): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([topic]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.DeleteTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def detach_subscription( + self, + request: pubsub.DetachSubscriptionRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Args: + request (:class:`~.pubsub.DetachSubscriptionRequest`): + The request object. Request for the DetachSubscription + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + # Create or coerce a protobuf request object. + + request = pubsub.DetachSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.detach_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PublisherAsyncClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py new file mode 100644 index 000000000000..22225b83fbe2 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -0,0 +1,1205 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.types import pubsub + +from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PublisherGrpcTransport +from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport + + +class PublisherClientMeta(type): + """Metaclass for the Publisher client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] + _transport_registry["grpc"] = PublisherGrpcTransport + _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[PublisherTransport]: + """Return an appropriate transport class. + + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PublisherClient(metaclass=PublisherClientMeta): + """The service that an application uses to manipulate topics, + and to send messages to a topic. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + """The default address of the service.""" + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def topic_path(project: str, topic: str,) -> str: + """Return a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parse a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, PublisherTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the publisher client. + + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PublisherTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PublisherTransport): + # transport is a PublisherTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def create_topic( + self, + request: pubsub.Topic = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Creates the given topic with the given name. See the `resource + name + rules `__. + + + Args: + request (:class:`~.pubsub.Topic`): + The request object. A topic resource. + name (:class:`str`): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` + must start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.Topic. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.Topic): + request = pubsub.Topic(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_topic( + self, + request: pubsub.UpdateTopicRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + + Args: + request (:class:`~.pubsub.UpdateTopicRequest`): + The request object. Request for the UpdateTopic method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.UpdateTopicRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.UpdateTopicRequest): + request = pubsub.UpdateTopicRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("topic.name", request.topic.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def publish( + self, + request: pubsub.PublishRequest = None, + *, + topic: str = None, + messages: Sequence[pubsub.PubsubMessage] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PublishResponse: + r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + + Args: + request (:class:`~.pubsub.PublishRequest`): + The request object. Request for the Publish method. + topic (:class:`str`): + Required. The messages in the request will be published + on this topic. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + messages (:class:`Sequence[~.pubsub.PubsubMessage]`): + Required. The messages to publish. + This corresponds to the ``messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PublishResponse: + Response for the ``Publish`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic, messages]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.PublishRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.PublishRequest): + request = pubsub.PublishRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + if messages is not None: + request.messages = messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.publish] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_topic( + self, + request: pubsub.GetTopicRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Gets the configuration of a topic. + + + Args: + request (:class:`~.pubsub.GetTopicRequest`): + The request object. Request for the GetTopic method. + topic (:class:`str`): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.GetTopicRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.GetTopicRequest): + request = pubsub.GetTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_topics( + self, + request: pubsub.ListTopicsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsPager: + r"""Lists matching topics. + + + Args: + request (:class:`~.pubsub.ListTopicsRequest`): + The request object. Request for the `ListTopics` method. + project (:class:`str`): + Required. The name of the project in which to list + topics. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicsPager: + Response for the ``ListTopics`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListTopicsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListTopicsRequest): + request = pubsub.ListTopicsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topic_subscriptions( + self, + request: pubsub.ListTopicSubscriptionsRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSubscriptionsPager: + r"""Lists the names of the attached subscriptions on this + topic. + + + Args: + request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + The request object. Request for the + `ListTopicSubscriptions` method. + topic (:class:`str`): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicSubscriptionsPager: + Response for the ``ListTopicSubscriptions`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListTopicSubscriptionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListTopicSubscriptionsRequest): + request = pubsub.ListTopicSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topic_subscriptions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicSubscriptionsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topic_snapshots( + self, + request: pubsub.ListTopicSnapshotsRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSnapshotsPager: + r"""Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + + Args: + request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + The request object. Request for the `ListTopicSnapshots` + method. + topic (:class:`str`): + Required. The name of the topic that snapshots are + attached to. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListTopicSnapshotsPager: + Response for the ``ListTopicSnapshots`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListTopicSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListTopicSnapshotsRequest): + request = pubsub.ListTopicSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topic_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicSnapshotsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_topic( + self, + request: pubsub.DeleteTopicRequest = None, + *, + topic: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + + Args: + request (:class:`~.pubsub.DeleteTopicRequest`): + The request object. Request for the `DeleteTopic` + method. + topic (:class:`str`): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DeleteTopicRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DeleteTopicRequest): + request = pubsub.DeleteTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def detach_subscription( + self, + request: pubsub.DetachSubscriptionRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + + Args: + request (:class:`~.pubsub.DetachSubscriptionRequest`): + The request object. Request for the DetachSubscription + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DetachSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DetachSubscriptionRequest): + request = pubsub.DetachSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example**:: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example**:: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example**:: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example**:: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("PublisherClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py new file mode 100644 index 000000000000..52242ff17796 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -0,0 +1,404 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.pubsub_v1.types import pubsub + + +class ListTopicsPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListTopicsResponse], + request: pubsub.ListTopicsRequest, + response: pubsub.ListTopicsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[pubsub.Topic]: + for page in self.pages: + yield from page.topics + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicsAsyncPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListTopicsResponse]], + request: pubsub.ListTopicsRequest, + response: pubsub.ListTopicsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[pubsub.Topic]: + async def async_generator(): + async for page in self.pages: + for response in page.topics: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSubscriptionsPager: + """A pager for iterating through ``list_topic_subscriptions`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopicSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListTopicSubscriptionsResponse], + request: pubsub.ListTopicSubscriptionsRequest, + response: pubsub.ListTopicSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicSubscriptionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListTopicSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[str]: + for page in self.pages: + yield from page.subscriptions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSubscriptionsAsyncPager: + """A pager for iterating through ``list_topic_subscriptions`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopicSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListTopicSubscriptionsResponse]], + request: pubsub.ListTopicSubscriptionsRequest, + response: pubsub.ListTopicSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicSubscriptionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListTopicSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[str]: + async def async_generator(): + async for page in self.pages: + for response in page.subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSnapshotsPager: + """A pager for iterating through ``list_topic_snapshots`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicSnapshotsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopicSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListTopicSnapshotsResponse], + request: pubsub.ListTopicSnapshotsRequest, + response: pubsub.ListTopicSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicSnapshotsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListTopicSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[str]: + for page in self.pages: + yield from page.snapshots + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListTopicSnapshotsAsyncPager: + """A pager for iterating through ``list_topic_snapshots`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListTopicSnapshotsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopicSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListTopicSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListTopicSnapshotsResponse]], + request: pubsub.ListTopicSnapshotsRequest, + response: pubsub.ListTopicSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + The initial request object. + response (:class:`~.pubsub.ListTopicSnapshotsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListTopicSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[str]: + async def async_generator(): + async for page in self.pages: + for response in page.snapshots: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py new file mode 100644 index 000000000000..bf7dc8f2b26f --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import PublisherTransport +from .grpc import PublisherGrpcTransport +from .grpc_asyncio import PublisherGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] +_transport_registry["grpc"] = PublisherGrpcTransport +_transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport + + +__all__ = ( + "PublisherTransport", + "PublisherGrpcTransport", + "PublisherGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py new file mode 100644 index 000000000000..9a99a899e43f --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -0,0 +1,360 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class PublisherTransport(abc.ABC): + """Abstract transport class for Publisher.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_topic: gapic_v1.method.wrap_method( + self.create_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_topic: gapic_v1.method.wrap_method( + self.update_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.publish: gapic_v1.method.wrap_method( + self.publish, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.Cancelled, + exceptions.DeadlineExceeded, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + exceptions.InternalServerError, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_topic: gapic_v1.method.wrap_method( + self.get_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topics: gapic_v1.method.wrap_method( + self.list_topics, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_subscriptions: gapic_v1.method.wrap_method( + self.list_topic_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_snapshots: gapic_v1.method.wrap_method( + self.list_topic_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_topic: gapic_v1.method.wrap_method( + self.delete_topic, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.detach_subscription: gapic_v1.method.wrap_method( + self.detach_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def create_topic( + self, + ) -> typing.Callable[ + [pubsub.Topic], typing.Union[pubsub.Topic, typing.Awaitable[pubsub.Topic]] + ]: + raise NotImplementedError() + + @property + def update_topic( + self, + ) -> typing.Callable[ + [pubsub.UpdateTopicRequest], + typing.Union[pubsub.Topic, typing.Awaitable[pubsub.Topic]], + ]: + raise NotImplementedError() + + @property + def publish( + self, + ) -> typing.Callable[ + [pubsub.PublishRequest], + typing.Union[pubsub.PublishResponse, typing.Awaitable[pubsub.PublishResponse]], + ]: + raise NotImplementedError() + + @property + def get_topic( + self, + ) -> typing.Callable[ + [pubsub.GetTopicRequest], + typing.Union[pubsub.Topic, typing.Awaitable[pubsub.Topic]], + ]: + raise NotImplementedError() + + @property + def list_topics( + self, + ) -> typing.Callable[ + [pubsub.ListTopicsRequest], + typing.Union[ + pubsub.ListTopicsResponse, typing.Awaitable[pubsub.ListTopicsResponse] + ], + ]: + raise NotImplementedError() + + @property + def list_topic_subscriptions( + self, + ) -> typing.Callable[ + [pubsub.ListTopicSubscriptionsRequest], + typing.Union[ + pubsub.ListTopicSubscriptionsResponse, + typing.Awaitable[pubsub.ListTopicSubscriptionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_topic_snapshots( + self, + ) -> typing.Callable[ + [pubsub.ListTopicSnapshotsRequest], + typing.Union[ + pubsub.ListTopicSnapshotsResponse, + typing.Awaitable[pubsub.ListTopicSnapshotsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_topic( + self, + ) -> typing.Callable[ + [pubsub.DeleteTopicRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def detach_subscription( + self, + ) -> typing.Callable[ + [pubsub.DetachSubscriptionRequest], + typing.Union[ + pubsub.DetachSubscriptionResponse, + typing.Awaitable[pubsub.DetachSubscriptionResponse], + ], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("PublisherTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py new file mode 100644 index 000000000000..ad5f95684e37 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -0,0 +1,547 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import PublisherTransport, DEFAULT_CLIENT_INFO + + +class PublisherGrpcTransport(PublisherTransport): + """gRPC backend transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_topic(self) -> Callable[[pubsub.Topic], pubsub.Topic]: + r"""Return a callable for the create topic method over gRPC. + + Creates the given topic with the given name. See the `resource + name + rules `__. + + Returns: + Callable[[~.Topic], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_topic" not in self._stubs: + self._stubs["create_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/CreateTopic", + request_serializer=pubsub.Topic.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["create_topic"] + + @property + def update_topic(self) -> Callable[[pubsub.UpdateTopicRequest], pubsub.Topic]: + r"""Return a callable for the update topic method over gRPC. + + Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + Returns: + Callable[[~.UpdateTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_topic" not in self._stubs: + self._stubs["update_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/UpdateTopic", + request_serializer=pubsub.UpdateTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["update_topic"] + + @property + def publish(self) -> Callable[[pubsub.PublishRequest], pubsub.PublishResponse]: + r"""Return a callable for the publish method over gRPC. + + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Returns: + Callable[[~.PublishRequest], + ~.PublishResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "publish" not in self._stubs: + self._stubs["publish"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/Publish", + request_serializer=pubsub.PublishRequest.serialize, + response_deserializer=pubsub.PublishResponse.deserialize, + ) + return self._stubs["publish"] + + @property + def get_topic(self) -> Callable[[pubsub.GetTopicRequest], pubsub.Topic]: + r"""Return a callable for the get topic method over gRPC. + + Gets the configuration of a topic. + + Returns: + Callable[[~.GetTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_topic" not in self._stubs: + self._stubs["get_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/GetTopic", + request_serializer=pubsub.GetTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["get_topic"] + + @property + def list_topics( + self, + ) -> Callable[[pubsub.ListTopicsRequest], pubsub.ListTopicsResponse]: + r"""Return a callable for the list topics method over gRPC. + + Lists matching topics. + + Returns: + Callable[[~.ListTopicsRequest], + ~.ListTopicsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topics" not in self._stubs: + self._stubs["list_topics"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopics", + request_serializer=pubsub.ListTopicsRequest.serialize, + response_deserializer=pubsub.ListTopicsResponse.deserialize, + ) + return self._stubs["list_topics"] + + @property + def list_topic_subscriptions( + self, + ) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], pubsub.ListTopicSubscriptionsResponse + ]: + r"""Return a callable for the list topic subscriptions method over gRPC. + + Lists the names of the attached subscriptions on this + topic. + + Returns: + Callable[[~.ListTopicSubscriptionsRequest], + ~.ListTopicSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_subscriptions" not in self._stubs: + self._stubs["list_topic_subscriptions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSubscriptions", + request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, + ) + return self._stubs["list_topic_subscriptions"] + + @property + def list_topic_snapshots( + self, + ) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], pubsub.ListTopicSnapshotsResponse + ]: + r"""Return a callable for the list topic snapshots method over gRPC. + + Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListTopicSnapshotsRequest], + ~.ListTopicSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_snapshots" not in self._stubs: + self._stubs["list_topic_snapshots"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSnapshots", + request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, + response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, + ) + return self._stubs["list_topic_snapshots"] + + @property + def delete_topic(self) -> Callable[[pubsub.DeleteTopicRequest], empty.Empty]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Returns: + Callable[[~.DeleteTopicRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_topic" not in self._stubs: + self._stubs["delete_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/DeleteTopic", + request_serializer=pubsub.DeleteTopicRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_topic"] + + @property + def detach_subscription( + self, + ) -> Callable[ + [pubsub.DetachSubscriptionRequest], pubsub.DetachSubscriptionResponse + ]: + r"""Return a callable for the detach subscription method over gRPC. + + Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Returns: + Callable[[~.DetachSubscriptionRequest], + ~.DetachSubscriptionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "detach_subscription" not in self._stubs: + self._stubs["detach_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/DetachSubscription", + request_serializer=pubsub.DetachSubscriptionRequest.serialize, + response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, + ) + return self._stubs["detach_subscription"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("PublisherGrpcTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py new file mode 100644 index 000000000000..5d70e3d626d7 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -0,0 +1,548 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import PublisherTransport, DEFAULT_CLIENT_INFO +from .grpc import PublisherGrpcTransport + + +class PublisherGrpcAsyncIOTransport(PublisherTransport): + """gRPC AsyncIO backend transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_topic(self) -> Callable[[pubsub.Topic], Awaitable[pubsub.Topic]]: + r"""Return a callable for the create topic method over gRPC. + + Creates the given topic with the given name. See the `resource + name + rules `__. + + Returns: + Callable[[~.Topic], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_topic" not in self._stubs: + self._stubs["create_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/CreateTopic", + request_serializer=pubsub.Topic.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["create_topic"] + + @property + def update_topic( + self, + ) -> Callable[[pubsub.UpdateTopicRequest], Awaitable[pubsub.Topic]]: + r"""Return a callable for the update topic method over gRPC. + + Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + Returns: + Callable[[~.UpdateTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_topic" not in self._stubs: + self._stubs["update_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/UpdateTopic", + request_serializer=pubsub.UpdateTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["update_topic"] + + @property + def publish( + self, + ) -> Callable[[pubsub.PublishRequest], Awaitable[pubsub.PublishResponse]]: + r"""Return a callable for the publish method over gRPC. + + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Returns: + Callable[[~.PublishRequest], + Awaitable[~.PublishResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "publish" not in self._stubs: + self._stubs["publish"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/Publish", + request_serializer=pubsub.PublishRequest.serialize, + response_deserializer=pubsub.PublishResponse.deserialize, + ) + return self._stubs["publish"] + + @property + def get_topic(self) -> Callable[[pubsub.GetTopicRequest], Awaitable[pubsub.Topic]]: + r"""Return a callable for the get topic method over gRPC. + + Gets the configuration of a topic. + + Returns: + Callable[[~.GetTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_topic" not in self._stubs: + self._stubs["get_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/GetTopic", + request_serializer=pubsub.GetTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs["get_topic"] + + @property + def list_topics( + self, + ) -> Callable[[pubsub.ListTopicsRequest], Awaitable[pubsub.ListTopicsResponse]]: + r"""Return a callable for the list topics method over gRPC. + + Lists matching topics. + + Returns: + Callable[[~.ListTopicsRequest], + Awaitable[~.ListTopicsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topics" not in self._stubs: + self._stubs["list_topics"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopics", + request_serializer=pubsub.ListTopicsRequest.serialize, + response_deserializer=pubsub.ListTopicsResponse.deserialize, + ) + return self._stubs["list_topics"] + + @property + def list_topic_subscriptions( + self, + ) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], + Awaitable[pubsub.ListTopicSubscriptionsResponse], + ]: + r"""Return a callable for the list topic subscriptions method over gRPC. + + Lists the names of the attached subscriptions on this + topic. + + Returns: + Callable[[~.ListTopicSubscriptionsRequest], + Awaitable[~.ListTopicSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_subscriptions" not in self._stubs: + self._stubs["list_topic_subscriptions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSubscriptions", + request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, + ) + return self._stubs["list_topic_subscriptions"] + + @property + def list_topic_snapshots( + self, + ) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], Awaitable[pubsub.ListTopicSnapshotsResponse] + ]: + r"""Return a callable for the list topic snapshots method over gRPC. + + Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListTopicSnapshotsRequest], + Awaitable[~.ListTopicSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_topic_snapshots" not in self._stubs: + self._stubs["list_topic_snapshots"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/ListTopicSnapshots", + request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, + response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, + ) + return self._stubs["list_topic_snapshots"] + + @property + def delete_topic( + self, + ) -> Callable[[pubsub.DeleteTopicRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Returns: + Callable[[~.DeleteTopicRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_topic" not in self._stubs: + self._stubs["delete_topic"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/DeleteTopic", + request_serializer=pubsub.DeleteTopicRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_topic"] + + @property + def detach_subscription( + self, + ) -> Callable[ + [pubsub.DetachSubscriptionRequest], Awaitable[pubsub.DetachSubscriptionResponse] + ]: + r"""Return a callable for the detach subscription method over gRPC. + + Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Returns: + Callable[[~.DetachSubscriptionRequest], + Awaitable[~.DetachSubscriptionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "detach_subscription" not in self._stubs: + self._stubs["detach_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Publisher/DetachSubscription", + request_serializer=pubsub.DetachSubscriptionRequest.serialize, + response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, + ) + return self._stubs["detach_subscription"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("PublisherGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py new file mode 100644 index 000000000000..361085a5e1ac --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import SubscriberClient +from .async_client import SubscriberAsyncClient + +__all__ = ( + "SubscriberClient", + "SubscriberAsyncClient", +) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py new file mode 100644 index 000000000000..a169e37808bd --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -0,0 +1,1826 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, AsyncIterable, AsyncIterator, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.types import pubsub + +from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport +from .client import SubscriberClient + + +class SubscriberAsyncClient: + """The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + """ + + _client: SubscriberClient + + DEFAULT_ENDPOINT = SubscriberClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SubscriberClient.DEFAULT_MTLS_ENDPOINT + + snapshot_path = staticmethod(SubscriberClient.snapshot_path) + + subscription_path = staticmethod(SubscriberClient.subscription_path) + + from_service_account_file = SubscriberClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(SubscriberClient).get_transport_class, type(SubscriberClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, SubscriberTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the subscriber client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SubscriberTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = SubscriberClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_subscription( + self, + request: pubsub.Subscription = None, + *, + name: str = None, + topic: str = None, + push_config: pubsub.PushConfig = None, + ack_deadline_seconds: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + Args: + request (:class:`~.pubsub.Subscription`): + The request object. A subscription resource. + name (:class:`str`): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (:class:`str`): + Required. The name of the topic from which this + subscription is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`~.pubsub.PushConfig`): + If push delivery is used with this subscription, this + field is used to configure it. An empty ``pushConfig`` + signifies that the subscriber will pull and ack messages + using API methods. + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt + before resending the message. In the interval after the + message is delivered and before it is acknowledged, it + is considered to be outstanding. During that time + period, the message will not be redelivered (on a + best-effort basis). + + For pull subscriptions, this value is used as the + initial value for the ack deadline. To override this + value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using non-streaming + pull or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming + pull. The minimum custom deadline you can specify is 10 + seconds. The maximum custom deadline you can specify is + 600 seconds (10 minutes). If this parameter is 0, a + default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any( + [name, topic, push_config, ack_deadline_seconds] + ): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.Subscription(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if topic is not None: + request.topic = topic + if push_config is not None: + request.push_config = push_config + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_subscription( + self, + request: pubsub.GetSubscriptionRequest = None, + *, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Gets the configuration details of a subscription. + + Args: + request (:class:`~.pubsub.GetSubscriptionRequest`): + The request object. Request for the GetSubscription + method. + subscription (:class:`str`): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.GetSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_subscription( + self, + request: pubsub.UpdateSubscriptionRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Args: + request (:class:`~.pubsub.UpdateSubscriptionRequest`): + The request object. Request for the UpdateSubscription + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + + request = pubsub.UpdateSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription.name", request.subscription.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_subscriptions( + self, + request: pubsub.ListSubscriptionsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubscriptionsAsyncPager: + r"""Lists matching subscriptions. + + Args: + request (:class:`~.pubsub.ListSubscriptionsRequest`): + The request object. Request for the `ListSubscriptions` + method. + project (:class:`str`): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSubscriptionsAsyncPager: + Response for the ``ListSubscriptions`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSubscriptionsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_subscription( + self, + request: pubsub.DeleteSubscriptionRequest = None, + *, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Args: + request (:class:`~.pubsub.DeleteSubscriptionRequest`): + The request object. Request for the DeleteSubscription + method. + subscription (:class:`str`): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.DeleteSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def modify_ack_deadline( + self, + request: pubsub.ModifyAckDeadlineRequest = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + ack_deadline_seconds: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Args: + request (:class:`~.pubsub.ModifyAckDeadlineRequest`): + The request object. Request for the ModifyAckDeadline + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. List of acknowledgment IDs. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + Required. The new ack deadline with respect to the time + this request was sent to the Pub/Sub system. For + example, if the value is 10, the new ack deadline will + expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero might immediately make the + message available for delivery to another subscriber + client. This typically results in an increase in the + rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The + maximum deadline you can specify is 600 seconds (10 + minutes). + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription, ack_ids, ack_deadline_seconds]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ModifyAckDeadlineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.modify_ack_deadline, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def acknowledge( + self, + request: pubsub.AcknowledgeRequest = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Args: + request (:class:`~.pubsub.AcknowledgeRequest`): + The request object. Request for the Acknowledge method. + subscription (:class:`str`): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in + the ``Pull`` response. Must not be empty. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription, ack_ids]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.AcknowledgeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.acknowledge, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def pull( + self, + request: pubsub.PullRequest = None, + *, + subscription: str = None, + return_immediately: bool = None, + max_messages: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PullResponse: + r"""Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + Args: + request (:class:`~.pubsub.PullRequest`): + The request object. Request for the `Pull` method. + subscription (:class:`str`): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + return_immediately (:class:`bool`): + Optional. If this field set to true, the system will + respond immediately even if it there are no messages + available to return in the ``Pull`` response. Otherwise, + the system may wait (for a bounded amount of time) until + at least one message is available, rather than returning + no messages. Warning: setting this field to ``true`` is + discouraged because it adversely impacts the performance + of ``Pull`` operations. We recommend that users do not + set this field. + This corresponds to the ``return_immediately`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_messages (:class:`int`): + Required. The maximum number of + messages to return for this request. + Must be a positive integer. The Pub/Sub + system may return fewer than the number + specified. + This corresponds to the ``max_messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PullResponse: + Response for the ``Pull`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any( + [subscription, return_immediately, max_messages] + ): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.PullRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if return_immediately is not None: + request.return_immediately = return_immediately + if max_messages is not None: + request.max_messages = max_messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def streaming_pull( + self, + requests: AsyncIterator[pubsub.StreamingPullRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> AsyncIterable[pubsub.StreamingPullResponse]: + r"""Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Args: + requests (AsyncIterator[`~.pubsub.StreamingPullRequest`]): + The request object AsyncIterator. Request for the `StreamingPull` + streaming RPC method. This request is used to establish + the initial stream as well as to stream acknowledgements + and ack deadline modifications from the client to the + server. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[~.pubsub.StreamingPullResponse]: + Response for the ``StreamingPull`` method. This response + is used to stream messages from the server to the + client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.streaming_pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.DeadlineExceeded, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, + exceptions.InternalServerError, + ), + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def modify_push_config( + self, + request: pubsub.ModifyPushConfigRequest = None, + *, + subscription: str = None, + push_config: pubsub.PushConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Args: + request (:class:`~.pubsub.ModifyPushConfigRequest`): + The request object. Request for the ModifyPushConfig + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`~.pubsub.PushConfig`): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub + system should stop pushing messages from the given + subscription and allow messages to be pulled and + acknowledged - effectively pausing the subscription if + ``Pull`` or ``StreamingPull`` is not called. + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([subscription, push_config]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ModifyPushConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if push_config is not None: + request.push_config = push_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.modify_push_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def get_snapshot( + self, + request: pubsub.GetSnapshotRequest = None, + *, + snapshot: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Gets the configuration details of a snapshot. + Snapshots are used in Seek operations, which allow you to manage + message acknowledgments in bulk. That is, you can set + the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Args: + request (:class:`~.pubsub.GetSnapshotRequest`): + The request object. Request for the GetSnapshot method. + snapshot (:class:`str`): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([snapshot]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.GetSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_snapshots( + self, + request: pubsub.ListSnapshotsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSnapshotsAsyncPager: + r"""Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Args: + request (:class:`~.pubsub.ListSnapshotsRequest`): + The request object. Request for the `ListSnapshots` + method. + project (:class:`str`): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSnapshotsAsyncPager: + Response for the ``ListSnapshots`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([project]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.ListSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSnapshotsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_snapshot( + self, + request: pubsub.CreateSnapshotRequest = None, + *, + name: str = None, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Args: + request (:class:`~.pubsub.CreateSnapshotRequest`): + The request object. Request for the `CreateSnapshot` + method. + name (:class:`str`): + Required. User-provided name for this snapshot. If the + name is not provided in the request, the server will + assign a random name for this snapshot on the same + project as the subscription. Note that for REST API + requests, you must specify a name. See the resource name + rules. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subscription (:class:`str`): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is + guaranteed to retain: (a) The existing backlog on the + subscription. More precisely, this is defined as the + messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, subscription]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.CreateSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_snapshot( + self, + request: pubsub.UpdateSnapshotRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Updates an existing snapshot. Snapshots are used in + Seek operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + Args: + request (:class:`~.pubsub.UpdateSnapshotRequest`): + The request object. Request for the UpdateSnapshot + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + + request = pubsub.UpdateSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("snapshot.name", request.snapshot.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_snapshot( + self, + request: pubsub.DeleteSnapshotRequest = None, + *, + snapshot: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Args: + request (:class:`~.pubsub.DeleteSnapshotRequest`): + The request object. Request for the `DeleteSnapshot` + method. + snapshot (:class:`str`): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([snapshot]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = pubsub.DeleteSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def seek( + self, + request: pubsub.SeekRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.SeekResponse: + r"""Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Args: + request (:class:`~.pubsub.SeekRequest`): + The request object. Request for the `Seek` method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.SeekResponse: + Response for the ``Seek`` method (this response is + empty). + + """ + # Create or coerce a protobuf request object. + + request = pubsub.SeekRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.seek, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("SubscriberAsyncClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py new file mode 100644 index 000000000000..58a7cd1f9290 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -0,0 +1,1910 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Iterable, Iterator, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.types import pubsub + +from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SubscriberGrpcTransport +from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport + + +class SubscriberClientMeta(type): + """Metaclass for the Subscriber client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] + _transport_registry["grpc"] = SubscriberGrpcTransport + _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[SubscriberTransport]: + """Return an appropriate transport class. + + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SubscriberClient(metaclass=SubscriberClientMeta): + """The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + """The default address of the service.""" + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def snapshot_path(project: str, snapshot: str,) -> str: + """Return a fully-qualified snapshot string.""" + return "projects/{project}/snapshots/{snapshot}".format( + project=project, snapshot=snapshot, + ) + + @staticmethod + def parse_snapshot_path(path: str) -> Dict[str, str]: + """Parse a snapshot path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/snapshots/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def subscription_path(project: str, subscription: str,) -> str: + """Return a fully-qualified subscription string.""" + return "projects/{project}/subscriptions/{subscription}".format( + project=project, subscription=subscription, + ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str, str]: + """Parse a subscription path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, SubscriberTransport] = None, + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the subscriber client. + + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SubscriberTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SubscriberTransport): + # transport is a SubscriberTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def create_subscription( + self, + request: pubsub.Subscription = None, + *, + name: str = None, + topic: str = None, + push_config: pubsub.PushConfig = None, + ack_deadline_seconds: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + + Args: + request (:class:`~.pubsub.Subscription`): + The request object. A subscription resource. + name (:class:`str`): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (:class:`str`): + Required. The name of the topic from which this + subscription is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`~.pubsub.PushConfig`): + If push delivery is used with this subscription, this + field is used to configure it. An empty ``pushConfig`` + signifies that the subscriber will pull and ack messages + using API methods. + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt + before resending the message. In the interval after the + message is delivered and before it is acknowledged, it + is considered to be outstanding. During that time + period, the message will not be redelivered (on a + best-effort basis). + + For pull subscriptions, this value is used as the + initial value for the ack deadline. To override this + value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using non-streaming + pull or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming + pull. The minimum custom deadline you can specify is 10 + seconds. The maximum custom deadline you can specify is + 600 seconds (10 minutes). If this parameter is 0, a + default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.Subscription. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.Subscription): + request = pubsub.Subscription(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if topic is not None: + request.topic = topic + if push_config is not None: + request.push_config = push_config + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_subscription( + self, + request: pubsub.GetSubscriptionRequest = None, + *, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Gets the configuration details of a subscription. + + + Args: + request (:class:`~.pubsub.GetSubscriptionRequest`): + The request object. Request for the GetSubscription + method. + subscription (:class:`str`): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.GetSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.GetSubscriptionRequest): + request = pubsub.GetSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_subscription( + self, + request: pubsub.UpdateSubscriptionRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + + Args: + request (:class:`~.pubsub.UpdateSubscriptionRequest`): + The request object. Request for the UpdateSubscription + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.UpdateSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.UpdateSubscriptionRequest): + request = pubsub.UpdateSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription.name", request.subscription.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_subscriptions( + self, + request: pubsub.ListSubscriptionsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubscriptionsPager: + r"""Lists matching subscriptions. + + + Args: + request (:class:`~.pubsub.ListSubscriptionsRequest`): + The request object. Request for the `ListSubscriptions` + method. + project (:class:`str`): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSubscriptionsPager: + Response for the ``ListSubscriptions`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListSubscriptionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListSubscriptionsRequest): + request = pubsub.ListSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_subscriptions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSubscriptionsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_subscription( + self, + request: pubsub.DeleteSubscriptionRequest = None, + *, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + + Args: + request (:class:`~.pubsub.DeleteSubscriptionRequest`): + The request object. Request for the DeleteSubscription + method. + subscription (:class:`str`): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DeleteSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DeleteSubscriptionRequest): + request = pubsub.DeleteSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def modify_ack_deadline( + self, + request: pubsub.ModifyAckDeadlineRequest = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + ack_deadline_seconds: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + + Args: + request (:class:`~.pubsub.ModifyAckDeadlineRequest`): + The request object. Request for the ModifyAckDeadline + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. List of acknowledgment IDs. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + Required. The new ack deadline with respect to the time + this request was sent to the Pub/Sub system. For + example, if the value is 10, the new ack deadline will + expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero might immediately make the + message available for delivery to another subscriber + client. This typically results in an increase in the + rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The + maximum deadline you can specify is 600 seconds (10 + minutes). + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ModifyAckDeadlineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ModifyAckDeadlineRequest): + request = pubsub.ModifyAckDeadlineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_ack_deadline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def acknowledge( + self, + request: pubsub.AcknowledgeRequest = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + + Args: + request (:class:`~.pubsub.AcknowledgeRequest`): + The request object. Request for the Acknowledge method. + subscription (:class:`str`): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in + the ``Pull`` response. Must not be empty. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, ack_ids]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.AcknowledgeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.AcknowledgeRequest): + request = pubsub.AcknowledgeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.acknowledge] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def pull( + self, + request: pubsub.PullRequest = None, + *, + subscription: str = None, + return_immediately: bool = None, + max_messages: int = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PullResponse: + r"""Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + + Args: + request (:class:`~.pubsub.PullRequest`): + The request object. Request for the `Pull` method. + subscription (:class:`str`): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + return_immediately (:class:`bool`): + Optional. If this field set to true, the system will + respond immediately even if it there are no messages + available to return in the ``Pull`` response. Otherwise, + the system may wait (for a bounded amount of time) until + at least one message is available, rather than returning + no messages. Warning: setting this field to ``true`` is + discouraged because it adversely impacts the performance + of ``Pull`` operations. We recommend that users do not + set this field. + This corresponds to the ``return_immediately`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_messages (:class:`int`): + Required. The maximum number of + messages to return for this request. + Must be a positive integer. The Pub/Sub + system may return fewer than the number + specified. + This corresponds to the ``max_messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PullResponse: + Response for the ``Pull`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, return_immediately, max_messages]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.PullRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.PullRequest): + request = pubsub.PullRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if return_immediately is not None: + request.return_immediately = return_immediately + if max_messages is not None: + request.max_messages = max_messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pull] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def streaming_pull( + self, + requests: Iterator[pubsub.StreamingPullRequest] = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[pubsub.StreamingPullResponse]: + r"""Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + + Args: + requests (Iterator[`~.pubsub.StreamingPullRequest`]): + The request object iterator. Request for the `StreamingPull` + streaming RPC method. This request is used to establish + the initial stream as well as to stream acknowledgements + and ack deadline modifications from the client to the + server. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[~.pubsub.StreamingPullResponse]: + Response for the ``StreamingPull`` method. This response + is used to stream messages from the server to the + client. + + """ + + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self._transport.streaming_pull._prefetch_first_result_ = False + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_pull] + + # Send the request. + response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def modify_push_config( + self, + request: pubsub.ModifyPushConfigRequest = None, + *, + subscription: str = None, + push_config: pubsub.PushConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + + Args: + request (:class:`~.pubsub.ModifyPushConfigRequest`): + The request object. Request for the ModifyPushConfig + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`~.pubsub.PushConfig`): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub + system should stop pushing messages from the given + subscription and allow messages to be pulled and + acknowledged - effectively pausing the subscription if + ``Pull`` or ``StreamingPull`` is not called. + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, push_config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ModifyPushConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ModifyPushConfigRequest): + request = pubsub.ModifyPushConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if subscription is not None: + request.subscription = subscription + if push_config is not None: + request.push_config = push_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_push_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def get_snapshot( + self, + request: pubsub.GetSnapshotRequest = None, + *, + snapshot: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Gets the configuration details of a snapshot. + Snapshots are used in Seek operations, which allow you to manage + message acknowledgments in bulk. That is, you can set + the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + + Args: + request (:class:`~.pubsub.GetSnapshotRequest`): + The request object. Request for the GetSnapshot method. + snapshot (:class:`str`): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.GetSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.GetSnapshotRequest): + request = pubsub.GetSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_snapshots( + self, + request: pubsub.ListSnapshotsRequest = None, + *, + project: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSnapshotsPager: + r"""Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + + Args: + request (:class:`~.pubsub.ListSnapshotsRequest`): + The request object. Request for the `ListSnapshots` + method. + project (:class:`str`): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSnapshotsPager: + Response for the ``ListSnapshots`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListSnapshotsRequest): + request = pubsub.ListSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSnapshotsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def create_snapshot( + self, + request: pubsub.CreateSnapshotRequest = None, + *, + name: str = None, + subscription: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + + Args: + request (:class:`~.pubsub.CreateSnapshotRequest`): + The request object. Request for the `CreateSnapshot` + method. + name (:class:`str`): + Required. User-provided name for this snapshot. If the + name is not provided in the request, the server will + assign a random name for this snapshot on the same + project as the subscription. Note that for REST API + requests, you must specify a name. See the resource name + rules. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subscription (:class:`str`): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is + guaranteed to retain: (a) The existing backlog on the + subscription. More precisely, this is defined as the + messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, subscription]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.CreateSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.CreateSnapshotRequest): + request = pubsub.CreateSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_snapshot( + self, + request: pubsub.UpdateSnapshotRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Updates an existing snapshot. Snapshots are used in + Seek operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + + Args: + request (:class:`~.pubsub.UpdateSnapshotRequest`): + The request object. Request for the UpdateSnapshot + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.UpdateSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.UpdateSnapshotRequest): + request = pubsub.UpdateSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("snapshot.name", request.snapshot.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_snapshot( + self, + request: pubsub.DeleteSnapshotRequest = None, + *, + snapshot: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + + Args: + request (:class:`~.pubsub.DeleteSnapshotRequest`): + The request object. Request for the `DeleteSnapshot` + method. + snapshot (:class:`str`): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DeleteSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DeleteSnapshotRequest): + request = pubsub.DeleteSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def seek( + self, + request: pubsub.SeekRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.SeekResponse: + r"""Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + + Args: + request (:class:`~.pubsub.SeekRequest`): + The request object. Request for the `Seek` method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.SeekResponse: + Response for the ``Seek`` method (this response is + empty). + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.SeekRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.SeekRequest): + request = pubsub.SeekRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.seek] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("subscription", request.subscription),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example**:: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example**:: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + + **JSON Example**:: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + + **YAML Example**:: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("SubscriberClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py new file mode 100644 index 000000000000..713184d790ee --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -0,0 +1,276 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.pubsub_v1.types import pubsub + + +class ListSubscriptionsPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListSubscriptionsResponse], + request: pubsub.ListSubscriptionsRequest, + response: pubsub.ListSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListSubscriptionsRequest`): + The initial request object. + response (:class:`~.pubsub.ListSubscriptionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[pubsub.Subscription]: + for page in self.pages: + yield from page.subscriptions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSubscriptionsAsyncPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListSubscriptionsResponse]], + request: pubsub.ListSubscriptionsRequest, + response: pubsub.ListSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListSubscriptionsRequest`): + The initial request object. + response (:class:`~.pubsub.ListSubscriptionsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[pubsub.Subscription]: + async def async_generator(): + async for page in self.pages: + for response in page.subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSnapshotsPager: + """A pager for iterating through ``list_snapshots`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListSnapshotsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., pubsub.ListSnapshotsResponse], + request: pubsub.ListSnapshotsRequest, + response: pubsub.ListSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListSnapshotsRequest`): + The initial request object. + response (:class:`~.pubsub.ListSnapshotsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[pubsub.ListSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[pubsub.Snapshot]: + for page in self.pages: + yield from page.snapshots + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSnapshotsAsyncPager: + """A pager for iterating through ``list_snapshots`` requests. + + This class thinly wraps an initial + :class:`~.pubsub.ListSnapshotsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`~.pubsub.ListSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[pubsub.ListSnapshotsResponse]], + request: pubsub.ListSnapshotsRequest, + response: pubsub.ListSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.pubsub.ListSnapshotsRequest`): + The initial request object. + response (:class:`~.pubsub.ListSnapshotsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[pubsub.ListSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[pubsub.Snapshot]: + async def async_generator(): + async for page in self.pages: + for response in page.snapshots: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py new file mode 100644 index 000000000000..08282e11d4a3 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import SubscriberTransport +from .grpc import SubscriberGrpcTransport +from .grpc_asyncio import SubscriberGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] +_transport_registry["grpc"] = SubscriberGrpcTransport +_transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport + + +__all__ = ( + "SubscriberTransport", + "SubscriberGrpcTransport", + "SubscriberGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py new file mode 100644 index 000000000000..7d7dfc6223f2 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -0,0 +1,507 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class SubscriberTransport(abc.ABC): + """Abstract transport class for Subscriber.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_subscription: gapic_v1.method.wrap_method( + self.create_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_subscription: gapic_v1.method.wrap_method( + self.get_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_subscription: gapic_v1.method.wrap_method( + self.update_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_subscriptions: gapic_v1.method.wrap_method( + self.list_subscriptions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_subscription: gapic_v1.method.wrap_method( + self.delete_subscription, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.modify_ack_deadline: gapic_v1.method.wrap_method( + self.modify_ack_deadline, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.acknowledge: gapic_v1.method.wrap_method( + self.acknowledge, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.pull: gapic_v1.method.wrap_method( + self.pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.streaming_pull: gapic_v1.method.wrap_method( + self.streaming_pull, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.DeadlineExceeded, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, + exceptions.InternalServerError, + ), + ), + default_timeout=900.0, + client_info=client_info, + ), + self.modify_push_config: gapic_v1.method.wrap_method( + self.modify_push_config, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_snapshot: gapic_v1.method.wrap_method( + self.get_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_snapshots: gapic_v1.method.wrap_method( + self.list_snapshots, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_snapshot: gapic_v1.method.wrap_method( + self.create_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_snapshot: gapic_v1.method.wrap_method( + self.update_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_snapshot: gapic_v1.method.wrap_method( + self.delete_snapshot, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + ), + default_timeout=60.0, + client_info=client_info, + ), + self.seek: gapic_v1.method.wrap_method( + self.seek, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.Aborted, + exceptions.ServiceUnavailable, + exceptions.Unknown, + ), + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def create_subscription( + self, + ) -> typing.Callable[ + [pubsub.Subscription], + typing.Union[pubsub.Subscription, typing.Awaitable[pubsub.Subscription]], + ]: + raise NotImplementedError() + + @property + def get_subscription( + self, + ) -> typing.Callable[ + [pubsub.GetSubscriptionRequest], + typing.Union[pubsub.Subscription, typing.Awaitable[pubsub.Subscription]], + ]: + raise NotImplementedError() + + @property + def update_subscription( + self, + ) -> typing.Callable[ + [pubsub.UpdateSubscriptionRequest], + typing.Union[pubsub.Subscription, typing.Awaitable[pubsub.Subscription]], + ]: + raise NotImplementedError() + + @property + def list_subscriptions( + self, + ) -> typing.Callable[ + [pubsub.ListSubscriptionsRequest], + typing.Union[ + pubsub.ListSubscriptionsResponse, + typing.Awaitable[pubsub.ListSubscriptionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_subscription( + self, + ) -> typing.Callable[ + [pubsub.DeleteSubscriptionRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def modify_ack_deadline( + self, + ) -> typing.Callable[ + [pubsub.ModifyAckDeadlineRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def acknowledge( + self, + ) -> typing.Callable[ + [pubsub.AcknowledgeRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def pull( + self, + ) -> typing.Callable[ + [pubsub.PullRequest], + typing.Union[pubsub.PullResponse, typing.Awaitable[pubsub.PullResponse]], + ]: + raise NotImplementedError() + + @property + def streaming_pull( + self, + ) -> typing.Callable[ + [pubsub.StreamingPullRequest], + typing.Union[ + pubsub.StreamingPullResponse, typing.Awaitable[pubsub.StreamingPullResponse] + ], + ]: + raise NotImplementedError() + + @property + def modify_push_config( + self, + ) -> typing.Callable[ + [pubsub.ModifyPushConfigRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def get_snapshot( + self, + ) -> typing.Callable[ + [pubsub.GetSnapshotRequest], + typing.Union[pubsub.Snapshot, typing.Awaitable[pubsub.Snapshot]], + ]: + raise NotImplementedError() + + @property + def list_snapshots( + self, + ) -> typing.Callable[ + [pubsub.ListSnapshotsRequest], + typing.Union[ + pubsub.ListSnapshotsResponse, typing.Awaitable[pubsub.ListSnapshotsResponse] + ], + ]: + raise NotImplementedError() + + @property + def create_snapshot( + self, + ) -> typing.Callable[ + [pubsub.CreateSnapshotRequest], + typing.Union[pubsub.Snapshot, typing.Awaitable[pubsub.Snapshot]], + ]: + raise NotImplementedError() + + @property + def update_snapshot( + self, + ) -> typing.Callable[ + [pubsub.UpdateSnapshotRequest], + typing.Union[pubsub.Snapshot, typing.Awaitable[pubsub.Snapshot]], + ]: + raise NotImplementedError() + + @property + def delete_snapshot( + self, + ) -> typing.Callable[ + [pubsub.DeleteSnapshotRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def seek( + self, + ) -> typing.Callable[ + [pubsub.SeekRequest], + typing.Union[pubsub.SeekResponse, typing.Awaitable[pubsub.SeekResponse]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("SubscriberTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py new file mode 100644 index 000000000000..721d31d36032 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -0,0 +1,803 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO + + +class SubscriberGrpcTransport(SubscriberTransport): + """gRPC backend transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_subscription( + self, + ) -> Callable[[pubsub.Subscription], pubsub.Subscription]: + r"""Return a callable for the create subscription method over gRPC. + + Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.Subscription], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_subscription" not in self._stubs: + self._stubs["create_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSubscription", + request_serializer=pubsub.Subscription.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["create_subscription"] + + @property + def get_subscription( + self, + ) -> Callable[[pubsub.GetSubscriptionRequest], pubsub.Subscription]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the configuration details of a subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subscription" not in self._stubs: + self._stubs["get_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSubscription", + request_serializer=pubsub.GetSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["get_subscription"] + + @property + def update_subscription( + self, + ) -> Callable[[pubsub.UpdateSubscriptionRequest], pubsub.Subscription]: + r"""Return a callable for the update subscription method over gRPC. + + Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Returns: + Callable[[~.UpdateSubscriptionRequest], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_subscription" not in self._stubs: + self._stubs["update_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSubscription", + request_serializer=pubsub.UpdateSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["update_subscription"] + + @property + def list_subscriptions( + self, + ) -> Callable[[pubsub.ListSubscriptionsRequest], pubsub.ListSubscriptionsResponse]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists matching subscriptions. + + Returns: + Callable[[~.ListSubscriptionsRequest], + ~.ListSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subscriptions" not in self._stubs: + self._stubs["list_subscriptions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSubscriptions", + request_serializer=pubsub.ListSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs["list_subscriptions"] + + @property + def delete_subscription( + self, + ) -> Callable[[pubsub.DeleteSubscriptionRequest], empty.Empty]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subscription" not in self._stubs: + self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSubscription", + request_serializer=pubsub.DeleteSubscriptionRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_subscription"] + + @property + def modify_ack_deadline( + self, + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], empty.Empty]: + r"""Return a callable for the modify ack deadline method over gRPC. + + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Returns: + Callable[[~.ModifyAckDeadlineRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_ack_deadline" not in self._stubs: + self._stubs["modify_ack_deadline"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyAckDeadline", + request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["modify_ack_deadline"] + + @property + def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty.Empty]: + r"""Return a callable for the acknowledge method over gRPC. + + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Returns: + Callable[[~.AcknowledgeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge" not in self._stubs: + self._stubs["acknowledge"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Acknowledge", + request_serializer=pubsub.AcknowledgeRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["acknowledge"] + + @property + def pull(self) -> Callable[[pubsub.PullRequest], pubsub.PullResponse]: + r"""Return a callable for the pull method over gRPC. + + Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + Returns: + Callable[[~.PullRequest], + ~.PullResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pull" not in self._stubs: + self._stubs["pull"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Pull", + request_serializer=pubsub.PullRequest.serialize, + response_deserializer=pubsub.PullResponse.deserialize, + ) + return self._stubs["pull"] + + @property + def streaming_pull( + self, + ) -> Callable[[pubsub.StreamingPullRequest], pubsub.StreamingPullResponse]: + r"""Return a callable for the streaming pull method over gRPC. + + Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Returns: + Callable[[~.StreamingPullRequest], + ~.StreamingPullResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_pull" not in self._stubs: + self._stubs["streaming_pull"] = self.grpc_channel.stream_stream( + "/google.pubsub.v1.Subscriber/StreamingPull", + request_serializer=pubsub.StreamingPullRequest.serialize, + response_deserializer=pubsub.StreamingPullResponse.deserialize, + ) + return self._stubs["streaming_pull"] + + @property + def modify_push_config( + self, + ) -> Callable[[pubsub.ModifyPushConfigRequest], empty.Empty]: + r"""Return a callable for the modify push config method over gRPC. + + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Returns: + Callable[[~.ModifyPushConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_push_config" not in self._stubs: + self._stubs["modify_push_config"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyPushConfig", + request_serializer=pubsub.ModifyPushConfigRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["modify_push_config"] + + @property + def get_snapshot(self) -> Callable[[pubsub.GetSnapshotRequest], pubsub.Snapshot]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets the configuration details of a snapshot. + Snapshots are used in Seek operations, which allow you to manage + message acknowledgments in bulk. That is, you can set + the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_snapshot" not in self._stubs: + self._stubs["get_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSnapshot", + request_serializer=pubsub.GetSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["get_snapshot"] + + @property + def list_snapshots( + self, + ) -> Callable[[pubsub.ListSnapshotsRequest], pubsub.ListSnapshotsResponse]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListSnapshotsRequest], + ~.ListSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_snapshots" not in self._stubs: + self._stubs["list_snapshots"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSnapshots", + request_serializer=pubsub.ListSnapshotsRequest.serialize, + response_deserializer=pubsub.ListSnapshotsResponse.deserialize, + ) + return self._stubs["list_snapshots"] + + @property + def create_snapshot( + self, + ) -> Callable[[pubsub.CreateSnapshotRequest], pubsub.Snapshot]: + r"""Return a callable for the create snapshot method over gRPC. + + Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.CreateSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_snapshot" not in self._stubs: + self._stubs["create_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSnapshot", + request_serializer=pubsub.CreateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["create_snapshot"] + + @property + def update_snapshot( + self, + ) -> Callable[[pubsub.UpdateSnapshotRequest], pubsub.Snapshot]: + r"""Return a callable for the update snapshot method over gRPC. + + Updates an existing snapshot. Snapshots are used in + Seek operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + Returns: + Callable[[~.UpdateSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_snapshot" not in self._stubs: + self._stubs["update_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSnapshot", + request_serializer=pubsub.UpdateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["update_snapshot"] + + @property + def delete_snapshot(self) -> Callable[[pubsub.DeleteSnapshotRequest], empty.Empty]: + r"""Return a callable for the delete snapshot method over gRPC. + + Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Returns: + Callable[[~.DeleteSnapshotRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_snapshot" not in self._stubs: + self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSnapshot", + request_serializer=pubsub.DeleteSnapshotRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_snapshot"] + + @property + def seek(self) -> Callable[[pubsub.SeekRequest], pubsub.SeekResponse]: + r"""Return a callable for the seek method over gRPC. + + Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Returns: + Callable[[~.SeekRequest], + ~.SeekResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seek" not in self._stubs: + self._stubs["seek"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Seek", + request_serializer=pubsub.SeekRequest.serialize, + response_deserializer=pubsub.SeekResponse.deserialize, + ) + return self._stubs["seek"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SubscriberGrpcTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py new file mode 100644 index 000000000000..0e844728f445 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -0,0 +1,809 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .grpc import SubscriberGrpcTransport + + +class SubscriberGrpcAsyncIOTransport(SubscriberTransport): + """gRPC AsyncIO backend transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_subscription( + self, + ) -> Callable[[pubsub.Subscription], Awaitable[pubsub.Subscription]]: + r"""Return a callable for the create subscription method over gRPC. + + Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.Subscription], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_subscription" not in self._stubs: + self._stubs["create_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSubscription", + request_serializer=pubsub.Subscription.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["create_subscription"] + + @property + def get_subscription( + self, + ) -> Callable[[pubsub.GetSubscriptionRequest], Awaitable[pubsub.Subscription]]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the configuration details of a subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_subscription" not in self._stubs: + self._stubs["get_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSubscription", + request_serializer=pubsub.GetSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["get_subscription"] + + @property + def update_subscription( + self, + ) -> Callable[[pubsub.UpdateSubscriptionRequest], Awaitable[pubsub.Subscription]]: + r"""Return a callable for the update subscription method over gRPC. + + Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Returns: + Callable[[~.UpdateSubscriptionRequest], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_subscription" not in self._stubs: + self._stubs["update_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSubscription", + request_serializer=pubsub.UpdateSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs["update_subscription"] + + @property + def list_subscriptions( + self, + ) -> Callable[ + [pubsub.ListSubscriptionsRequest], Awaitable[pubsub.ListSubscriptionsResponse] + ]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists matching subscriptions. + + Returns: + Callable[[~.ListSubscriptionsRequest], + Awaitable[~.ListSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_subscriptions" not in self._stubs: + self._stubs["list_subscriptions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSubscriptions", + request_serializer=pubsub.ListSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs["list_subscriptions"] + + @property + def delete_subscription( + self, + ) -> Callable[[pubsub.DeleteSubscriptionRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_subscription" not in self._stubs: + self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSubscription", + request_serializer=pubsub.DeleteSubscriptionRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_subscription"] + + @property + def modify_ack_deadline( + self, + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the modify ack deadline method over gRPC. + + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Returns: + Callable[[~.ModifyAckDeadlineRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_ack_deadline" not in self._stubs: + self._stubs["modify_ack_deadline"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyAckDeadline", + request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["modify_ack_deadline"] + + @property + def acknowledge( + self, + ) -> Callable[[pubsub.AcknowledgeRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the acknowledge method over gRPC. + + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Returns: + Callable[[~.AcknowledgeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "acknowledge" not in self._stubs: + self._stubs["acknowledge"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Acknowledge", + request_serializer=pubsub.AcknowledgeRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["acknowledge"] + + @property + def pull(self) -> Callable[[pubsub.PullRequest], Awaitable[pubsub.PullResponse]]: + r"""Return a callable for the pull method over gRPC. + + Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + Returns: + Callable[[~.PullRequest], + Awaitable[~.PullResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "pull" not in self._stubs: + self._stubs["pull"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Pull", + request_serializer=pubsub.PullRequest.serialize, + response_deserializer=pubsub.PullResponse.deserialize, + ) + return self._stubs["pull"] + + @property + def streaming_pull( + self, + ) -> Callable[ + [pubsub.StreamingPullRequest], Awaitable[pubsub.StreamingPullResponse] + ]: + r"""Return a callable for the streaming pull method over gRPC. + + Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Returns: + Callable[[~.StreamingPullRequest], + Awaitable[~.StreamingPullResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "streaming_pull" not in self._stubs: + self._stubs["streaming_pull"] = self.grpc_channel.stream_stream( + "/google.pubsub.v1.Subscriber/StreamingPull", + request_serializer=pubsub.StreamingPullRequest.serialize, + response_deserializer=pubsub.StreamingPullResponse.deserialize, + ) + return self._stubs["streaming_pull"] + + @property + def modify_push_config( + self, + ) -> Callable[[pubsub.ModifyPushConfigRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the modify push config method over gRPC. + + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Returns: + Callable[[~.ModifyPushConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "modify_push_config" not in self._stubs: + self._stubs["modify_push_config"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ModifyPushConfig", + request_serializer=pubsub.ModifyPushConfigRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["modify_push_config"] + + @property + def get_snapshot( + self, + ) -> Callable[[pubsub.GetSnapshotRequest], Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets the configuration details of a snapshot. + Snapshots are used in Seek operations, which allow you to manage + message acknowledgments in bulk. That is, you can set + the acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_snapshot" not in self._stubs: + self._stubs["get_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/GetSnapshot", + request_serializer=pubsub.GetSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["get_snapshot"] + + @property + def list_snapshots( + self, + ) -> Callable[ + [pubsub.ListSnapshotsRequest], Awaitable[pubsub.ListSnapshotsResponse] + ]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListSnapshotsRequest], + Awaitable[~.ListSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_snapshots" not in self._stubs: + self._stubs["list_snapshots"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/ListSnapshots", + request_serializer=pubsub.ListSnapshotsRequest.serialize, + response_deserializer=pubsub.ListSnapshotsResponse.deserialize, + ) + return self._stubs["list_snapshots"] + + @property + def create_snapshot( + self, + ) -> Callable[[pubsub.CreateSnapshotRequest], Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the create snapshot method over gRPC. + + Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.CreateSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_snapshot" not in self._stubs: + self._stubs["create_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/CreateSnapshot", + request_serializer=pubsub.CreateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["create_snapshot"] + + @property + def update_snapshot( + self, + ) -> Callable[[pubsub.UpdateSnapshotRequest], Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the update snapshot method over gRPC. + + Updates an existing snapshot. Snapshots are used in + Seek operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + Returns: + Callable[[~.UpdateSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_snapshot" not in self._stubs: + self._stubs["update_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/UpdateSnapshot", + request_serializer=pubsub.UpdateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs["update_snapshot"] + + @property + def delete_snapshot( + self, + ) -> Callable[[pubsub.DeleteSnapshotRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete snapshot method over gRPC. + + Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Returns: + Callable[[~.DeleteSnapshotRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_snapshot" not in self._stubs: + self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/DeleteSnapshot", + request_serializer=pubsub.DeleteSnapshotRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_snapshot"] + + @property + def seek(self) -> Callable[[pubsub.SeekRequest], Awaitable[pubsub.SeekResponse]]: + r"""Return a callable for the seek method over gRPC. + + Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Returns: + Callable[[~.SeekRequest], + Awaitable[~.SeekResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "seek" not in self._stubs: + self._stubs["seek"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.Subscriber/Seek", + request_serializer=pubsub.SeekRequest.serialize, + response_deserializer=pubsub.SeekResponse.deserialize, + ) + return self._stubs["seek"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SubscriberGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py new file mode 100644 index 000000000000..915c3f2e22fb --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .pubsub import ( + MessageStoragePolicy, + Topic, + PubsubMessage, + GetTopicRequest, + UpdateTopicRequest, + PublishRequest, + PublishResponse, + ListTopicsRequest, + ListTopicsResponse, + ListTopicSubscriptionsRequest, + ListTopicSubscriptionsResponse, + ListTopicSnapshotsRequest, + ListTopicSnapshotsResponse, + DeleteTopicRequest, + DetachSubscriptionRequest, + DetachSubscriptionResponse, + Subscription, + RetryPolicy, + DeadLetterPolicy, + ExpirationPolicy, + PushConfig, + ReceivedMessage, + GetSubscriptionRequest, + UpdateSubscriptionRequest, + ListSubscriptionsRequest, + ListSubscriptionsResponse, + DeleteSubscriptionRequest, + ModifyPushConfigRequest, + PullRequest, + PullResponse, + ModifyAckDeadlineRequest, + AcknowledgeRequest, + StreamingPullRequest, + StreamingPullResponse, + CreateSnapshotRequest, + UpdateSnapshotRequest, + Snapshot, + GetSnapshotRequest, + ListSnapshotsRequest, + ListSnapshotsResponse, + DeleteSnapshotRequest, + SeekRequest, + SeekResponse, +) + + +__all__ = ( + "MessageStoragePolicy", + "Topic", + "PubsubMessage", + "GetTopicRequest", + "UpdateTopicRequest", + "PublishRequest", + "PublishResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "Subscription", + "RetryPolicy", + "DeadLetterPolicy", + "ExpirationPolicy", + "PushConfig", + "ReceivedMessage", + "GetSubscriptionRequest", + "UpdateSubscriptionRequest", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "DeleteSubscriptionRequest", + "ModifyPushConfigRequest", + "PullRequest", + "PullResponse", + "ModifyAckDeadlineRequest", + "AcknowledgeRequest", + "StreamingPullRequest", + "StreamingPullResponse", + "CreateSnapshotRequest", + "UpdateSnapshotRequest", + "Snapshot", + "GetSnapshotRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "DeleteSnapshotRequest", + "SeekRequest", + "SeekResponse", +) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py new file mode 100644 index 000000000000..61bb089f5d79 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -0,0 +1,1299 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.pubsub.v1", + manifest={ + "MessageStoragePolicy", + "Topic", + "PubsubMessage", + "GetTopicRequest", + "UpdateTopicRequest", + "PublishRequest", + "PublishResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "DeleteTopicRequest", + "DetachSubscriptionRequest", + "DetachSubscriptionResponse", + "Subscription", + "RetryPolicy", + "DeadLetterPolicy", + "ExpirationPolicy", + "PushConfig", + "ReceivedMessage", + "GetSubscriptionRequest", + "UpdateSubscriptionRequest", + "ListSubscriptionsRequest", + "ListSubscriptionsResponse", + "DeleteSubscriptionRequest", + "ModifyPushConfigRequest", + "PullRequest", + "PullResponse", + "ModifyAckDeadlineRequest", + "AcknowledgeRequest", + "StreamingPullRequest", + "StreamingPullResponse", + "CreateSnapshotRequest", + "UpdateSnapshotRequest", + "Snapshot", + "GetSnapshotRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", + "DeleteSnapshotRequest", + "SeekRequest", + "SeekResponse", + }, +) + + +class MessageStoragePolicy(proto.Message): + r"""A policy constraining the storage of messages published to + the topic. + + Attributes: + allowed_persistence_regions (Sequence[str]): + A list of IDs of GCP regions where messages + that are published to the topic may be persisted + in storage. Messages published by publishers + running in non-allowed GCP regions (or running + outside of GCP altogether) will be routed for + storage in one of the allowed regions. An empty + list means that no regions are allowed, and is + not a valid configuration. + """ + + allowed_persistence_regions = proto.RepeatedField(proto.STRING, number=1) + + +class Topic(proto.Message): + r"""A topic resource. + + Attributes: + name (str): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` must + start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus + (``+``) or percent signs (``%``). It must be between 3 and + 255 characters in length, and it must not start with + ``"goog"``. + labels (Sequence[~.pubsub.Topic.LabelsEntry]): + See [Creating and managing labels] + (https://cloud.google.com/pubsub/docs/labels). + message_storage_policy (~.pubsub.MessageStoragePolicy): + Policy constraining the set of Google Cloud + Platform regions where messages published to the + topic may be stored. If not present, then no + constraints are in effect. + kms_key_name (str): + The resource name of the Cloud KMS CryptoKey to be used to + protect access to messages published on this topic. + + The expected format is + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + """ + + name = proto.Field(proto.STRING, number=1) + + labels = proto.MapField(proto.STRING, proto.STRING, number=2) + + message_storage_policy = proto.Field( + proto.MESSAGE, number=3, message=MessageStoragePolicy, + ) + + kms_key_name = proto.Field(proto.STRING, number=5) + + +class PubsubMessage(proto.Message): + r"""A message that is published by publishers and consumed by + subscribers. The message must contain either a non-empty data field + or at least one attribute. Note that client libraries represent this + object differently depending on the language. See the corresponding + `client library + documentation `__ + for more information. See [quotas and limits] + (https://cloud.google.com/pubsub/quotas) for more information about + message limits. + + Attributes: + data (bytes): + The message data field. If this field is + empty, the message must contain at least one + attribute. + attributes (Sequence[~.pubsub.PubsubMessage.AttributesEntry]): + Attributes for this message. If this field is + empty, the message must contain non-empty data. + This can be used to filter messages on the + subscription. + message_id (str): + ID of this message, assigned by the server when the message + is published. Guaranteed to be unique within the topic. This + value may be read by a subscriber that receives a + ``PubsubMessage`` via a ``Pull`` call or a push delivery. It + must not be populated by the publisher in a ``Publish`` + call. + publish_time (~.timestamp.Timestamp): + The time at which the message was published, populated by + the server when it receives the ``Publish`` call. It must + not be populated by the publisher in a ``Publish`` call. + ordering_key (str): + If non-empty, identifies related messages for which publish + order should be respected. If a ``Subscription`` has + ``enable_message_ordering`` set to ``true``, messages + published with the same non-empty ``ordering_key`` value + will be delivered to subscribers in the order in which they + are received by the Pub/Sub system. All ``PubsubMessage``\ s + published in a given ``PublishRequest`` must specify the + same ``ordering_key`` value. + """ + + data = proto.Field(proto.BYTES, number=1) + + attributes = proto.MapField(proto.STRING, proto.STRING, number=2) + + message_id = proto.Field(proto.STRING, number=3) + + publish_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + ordering_key = proto.Field(proto.STRING, number=5) + + +class GetTopicRequest(proto.Message): + r"""Request for the GetTopic method. + + Attributes: + topic (str): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + """ + + topic = proto.Field(proto.STRING, number=1) + + +class UpdateTopicRequest(proto.Message): + r"""Request for the UpdateTopic method. + + Attributes: + topic (~.pubsub.Topic): + Required. The updated topic object. + update_mask (~.field_mask.FieldMask): + Required. Indicates which fields in the provided topic to + update. Must be specified and non-empty. Note that if + ``update_mask`` contains "message_storage_policy" but the + ``message_storage_policy`` is not set in the ``topic`` + provided above, then the updated value is determined by the + policy configured at the project or organization level. + """ + + topic = proto.Field(proto.MESSAGE, number=1, message=Topic,) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class PublishRequest(proto.Message): + r"""Request for the Publish method. + + Attributes: + topic (str): + Required. The messages in the request will be published on + this topic. Format is ``projects/{project}/topics/{topic}``. + messages (Sequence[~.pubsub.PubsubMessage]): + Required. The messages to publish. + """ + + topic = proto.Field(proto.STRING, number=1) + + messages = proto.RepeatedField(proto.MESSAGE, number=2, message=PubsubMessage,) + + +class PublishResponse(proto.Message): + r"""Response for the ``Publish`` method. + + Attributes: + message_ids (Sequence[str]): + The server-assigned ID of each published + message, in the same order as the messages in + the request. IDs are guaranteed to be unique + within the topic. + """ + + message_ids = proto.RepeatedField(proto.STRING, number=1) + + +class ListTopicsRequest(proto.Message): + r"""Request for the ``ListTopics`` method. + + Attributes: + project (str): + Required. The name of the project in which to list topics. + Format is ``projects/{project-id}``. + page_size (int): + Maximum number of topics to return. + page_token (str): + The value returned by the last ``ListTopicsResponse``; + indicates that this is a continuation of a prior + ``ListTopics`` call, and that the system should return the + next page of data. + """ + + project = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListTopicsResponse(proto.Message): + r"""Response for the ``ListTopics`` method. + + Attributes: + topics (Sequence[~.pubsub.Topic]): + The resulting topics. + next_page_token (str): + If not empty, indicates that there may be more topics that + match the request; this value should be passed in a new + ``ListTopicsRequest``. + """ + + @property + def raw_page(self): + return self + + topics = proto.RepeatedField(proto.MESSAGE, number=1, message=Topic,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class ListTopicSubscriptionsRequest(proto.Message): + r"""Request for the ``ListTopicSubscriptions`` method. + + Attributes: + topic (str): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + page_size (int): + Maximum number of subscription names to + return. + page_token (str): + The value returned by the last + ``ListTopicSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListTopicSubscriptions`` call, and + that the system should return the next page of data. + """ + + topic = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListTopicSubscriptionsResponse(proto.Message): + r"""Response for the ``ListTopicSubscriptions`` method. + + Attributes: + subscriptions (Sequence[str]): + The names of subscriptions attached to the + topic specified in the request. + next_page_token (str): + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListTopicSubscriptionsRequest`` to get more subscriptions. + """ + + @property + def raw_page(self): + return self + + subscriptions = proto.RepeatedField(proto.STRING, number=1) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class ListTopicSnapshotsRequest(proto.Message): + r"""Request for the ``ListTopicSnapshots`` method. + + Attributes: + topic (str): + Required. The name of the topic that snapshots are attached + to. Format is ``projects/{project}/topics/{topic}``. + page_size (int): + Maximum number of snapshot names to return. + page_token (str): + The value returned by the last + ``ListTopicSnapshotsResponse``; indicates that this is a + continuation of a prior ``ListTopicSnapshots`` call, and + that the system should return the next page of data. + """ + + topic = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListTopicSnapshotsResponse(proto.Message): + r"""Response for the ``ListTopicSnapshots`` method. + + Attributes: + snapshots (Sequence[str]): + The names of the snapshots that match the + request. + next_page_token (str): + If not empty, indicates that there may be more snapshots + that match the request; this value should be passed in a new + ``ListTopicSnapshotsRequest`` to get more snapshots. + """ + + @property + def raw_page(self): + return self + + snapshots = proto.RepeatedField(proto.STRING, number=1) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteTopicRequest(proto.Message): + r"""Request for the ``DeleteTopic`` method. + + Attributes: + topic (str): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + """ + + topic = proto.Field(proto.STRING, number=1) + + +class DetachSubscriptionRequest(proto.Message): + r"""Request for the DetachSubscription method. + + Attributes: + subscription (str): + Required. The subscription to detach. Format is + ``projects/{project}/subscriptions/{subscription}``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + +class DetachSubscriptionResponse(proto.Message): + r"""Response for the DetachSubscription method. + Reserved for future use. + """ + + +class Subscription(proto.Message): + r"""A subscription resource. + + Attributes: + name (str): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + topic (str): + Required. The name of the topic from which this subscription + is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + push_config (~.pubsub.PushConfig): + If push delivery is used with this subscription, this field + is used to configure it. An empty ``pushConfig`` signifies + that the subscriber will pull and ack messages using API + methods. + ack_deadline_seconds (int): + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt + before resending the message. In the interval after the + message is delivered and before it is acknowledged, it is + considered to be outstanding. During that time period, the + message will not be redelivered (on a best-effort basis). + + For pull subscriptions, this value is used as the initial + value for the ack deadline. To override this value for a + given message, call ``ModifyAckDeadline`` with the + corresponding ``ack_id`` if using non-streaming pull or send + the ``ack_id`` in a ``StreamingModifyAckDeadlineRequest`` if + using streaming pull. The minimum custom deadline you can + specify is 10 seconds. The maximum custom deadline you can + specify is 600 seconds (10 minutes). If this parameter is 0, + a default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + retain_acked_messages (bool): + Indicates whether to retain acknowledged messages. If true, + then messages are not expunged from the subscription's + backlog, even if they are acknowledged, until they fall out + of the ``message_retention_duration`` window. This must be + true if you would like to [Seek to a timestamp] + (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time). + message_retention_duration (~.duration.Duration): + How long to retain unacknowledged messages in the + subscription's backlog, from the moment a message is + published. If ``retain_acked_messages`` is true, then this + also configures the retention of acknowledged messages, and + thus configures how far back in time a ``Seek`` can be done. + Defaults to 7 days. Cannot be more than 7 days or less than + 10 minutes. + labels (Sequence[~.pubsub.Subscription.LabelsEntry]): + See + Creating and managing labels. + enable_message_ordering (bool): + If true, messages published with the same ``ordering_key`` + in ``PubsubMessage`` will be delivered to the subscribers in + the order in which they are received by the Pub/Sub system. + Otherwise, they may be delivered in any order. + expiration_policy (~.pubsub.ExpirationPolicy): + A policy that specifies the conditions for this + subscription's expiration. A subscription is considered + active as long as any connected subscriber is successfully + consuming messages from the subscription or is issuing + operations on the subscription. If ``expiration_policy`` is + not set, a *default policy* with ``ttl`` of 31 days will be + used. The minimum allowed value for + ``expiration_policy.ttl`` is 1 day. + filter (str): + An expression written in the Pub/Sub `filter + language `__. + If non-empty, then only ``PubsubMessage``\ s whose + ``attributes`` field matches the filter are delivered on + this subscription. If empty, then no messages are filtered + out. + dead_letter_policy (~.pubsub.DeadLetterPolicy): + A policy that specifies the conditions for dead lettering + messages in this subscription. If dead_letter_policy is not + set, dead lettering is disabled. + + The Cloud Pub/Sub service account associated with this + subscriptions's parent project (i.e., + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) + must have permission to Acknowledge() messages on this + subscription. + retry_policy (~.pubsub.RetryPolicy): + A policy that specifies how Pub/Sub retries + message delivery for this subscription. + + If not set, the default retry policy is applied. + This generally implies that messages will be + retried as soon as possible for healthy + subscribers. RetryPolicy will be triggered on + NACKs or acknowledgement deadline exceeded + events for a given message. + detached (bool): + Indicates whether the subscription is detached from its + topic. Detached subscriptions don't receive messages from + their topic and don't retain any backlog. ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. + If the subscription is a push subscription, pushes to the + endpoint will not be made. + """ + + name = proto.Field(proto.STRING, number=1) + + topic = proto.Field(proto.STRING, number=2) + + push_config = proto.Field(proto.MESSAGE, number=4, message="PushConfig",) + + ack_deadline_seconds = proto.Field(proto.INT32, number=5) + + retain_acked_messages = proto.Field(proto.BOOL, number=7) + + message_retention_duration = proto.Field( + proto.MESSAGE, number=8, message=duration.Duration, + ) + + labels = proto.MapField(proto.STRING, proto.STRING, number=9) + + enable_message_ordering = proto.Field(proto.BOOL, number=10) + + expiration_policy = proto.Field( + proto.MESSAGE, number=11, message="ExpirationPolicy", + ) + + filter = proto.Field(proto.STRING, number=12) + + dead_letter_policy = proto.Field( + proto.MESSAGE, number=13, message="DeadLetterPolicy", + ) + + retry_policy = proto.Field(proto.MESSAGE, number=14, message="RetryPolicy",) + + detached = proto.Field(proto.BOOL, number=15) + + +class RetryPolicy(proto.Message): + r"""A policy that specifies how Cloud Pub/Sub retries message delivery. + + Retry delay will be exponential based on provided minimum and + maximum backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. + + RetryPolicy will be triggered on NACKs or acknowledgement deadline + exceeded events for a given message. + + Retry Policy is implemented on a best effort basis. At times, the + delay between consecutive deliveries may not match the + configuration. That is, delay can be more or less than configured + backoff. + + Attributes: + minimum_backoff (~.duration.Duration): + The minimum delay between consecutive + deliveries of a given message. Value should be + between 0 and 600 seconds. Defaults to 10 + seconds. + maximum_backoff (~.duration.Duration): + The maximum delay between consecutive + deliveries of a given message. Value should be + between 0 and 600 seconds. Defaults to 600 + seconds. + """ + + minimum_backoff = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + + maximum_backoff = proto.Field(proto.MESSAGE, number=2, message=duration.Duration,) + + +class DeadLetterPolicy(proto.Message): + r"""Dead lettering is done on a best effort basis. The same + message might be dead lettered multiple times. + + If validation on any of the fields fails at subscription + creation/updation, the create/update subscription request will + fail. + + Attributes: + dead_letter_topic (str): + The name of the topic to which dead letter messages should + be published. Format is + ``projects/{project}/topics/{topic}``.The Cloud Pub/Sub + service account associated with the enclosing subscription's + parent project (i.e., + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) + must have permission to Publish() to this topic. + + The operation will fail if the topic does not exist. Users + should ensure that there is a subscription attached to this + topic since messages published to a topic with no + subscriptions are lost. + max_delivery_attempts (int): + The maximum number of delivery attempts for any message. The + value must be between 5 and 100. + + The number of delivery attempts is defined as 1 + (the sum + of number of NACKs and number of times the acknowledgement + deadline has been exceeded for the message). + + A NACK is any call to ModifyAckDeadline with a 0 deadline. + Note that client libraries may automatically extend + ack_deadlines. + + This field will be honored on a best effort basis. + + If this parameter is 0, a default value of 5 is used. + """ + + dead_letter_topic = proto.Field(proto.STRING, number=1) + + max_delivery_attempts = proto.Field(proto.INT32, number=2) + + +class ExpirationPolicy(proto.Message): + r"""A policy that specifies the conditions for resource + expiration (i.e., automatic resource deletion). + + Attributes: + ttl (~.duration.Duration): + Specifies the "time-to-live" duration for an associated + resource. The resource expires if it is not active for a + period of ``ttl``. The definition of "activity" depends on + the type of the associated resource. The minimum and maximum + allowed values for ``ttl`` depend on the type of the + associated resource, as well. If ``ttl`` is not set, the + associated resource never expires. + """ + + ttl = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + + +class PushConfig(proto.Message): + r"""Configuration for a push delivery endpoint. + + Attributes: + push_endpoint (str): + A URL locating the endpoint to which messages should be + pushed. For example, a Webhook endpoint might use + ``https://example.com/push``. + attributes (Sequence[~.pubsub.PushConfig.AttributesEntry]): + Endpoint configuration attributes that can be used to + control different aspects of the message delivery. + + The only currently supported attribute is + ``x-goog-version``, which you can use to change the format + of the pushed message. This attribute indicates the version + of the data expected by the endpoint. This controls the + shape of the pushed message (i.e., its fields and metadata). + + If not present during the ``CreateSubscription`` call, it + will default to the version of the Pub/Sub API used to make + such call. If not present in a ``ModifyPushConfig`` call, + its value will not be changed. ``GetSubscription`` calls + will always return a valid version, even if the subscription + was created without this attribute. + + The only supported values for the ``x-goog-version`` + attribute are: + + - ``v1beta1``: uses the push format defined in the v1beta1 + Pub/Sub API. + - ``v1`` or ``v1beta2``: uses the push format defined in + the v1 Pub/Sub API. + + For example: + + .. raw:: html + +
attributes { "x-goog-version": "v1" } 
+ oidc_token (~.pubsub.PushConfig.OidcToken): + If specified, Pub/Sub will generate and attach an OIDC JWT + token as an ``Authorization`` header in the HTTP request for + every pushed message. + """ + + class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating the OIDC token. The caller (for + CreateSubscription, UpdateSubscription, and ModifyPushConfig + RPCs) must have the iam.serviceAccounts.actAs permission for + the service account. + audience (str): + Audience to be used when generating OIDC + token. The audience claim identifies the + recipients that the JWT is intended for. The + audience value is a single case-sensitive + string. Having multiple values (array) for the + audience field is not supported. More info about + the OIDC JWT token audience here: + https://tools.ietf.org/html/rfc7519#section-4.1.3 + Note: if not specified, the Push endpoint URL + will be used. + """ + + service_account_email = proto.Field(proto.STRING, number=1) + + audience = proto.Field(proto.STRING, number=2) + + push_endpoint = proto.Field(proto.STRING, number=1) + + attributes = proto.MapField(proto.STRING, proto.STRING, number=2) + + oidc_token = proto.Field( + proto.MESSAGE, number=3, oneof="authentication_method", message=OidcToken, + ) + + +class ReceivedMessage(proto.Message): + r"""A message and its corresponding acknowledgment ID. + + Attributes: + ack_id (str): + This ID can be used to acknowledge the + received message. + message (~.pubsub.PubsubMessage): + The message. + delivery_attempt (int): + The approximate number of times that Cloud Pub/Sub has + attempted to deliver the associated message to a subscriber. + + More precisely, this is 1 + (number of NACKs) + (number of + ack_deadline exceeds) for this message. + + A NACK is any call to ModifyAckDeadline with a 0 deadline. + An ack_deadline exceeds event is whenever a message is not + acknowledged within ack_deadline. Note that ack_deadline is + initially Subscription.ackDeadlineSeconds, but may get + extended automatically by the client library. + + Upon the first delivery of a given message, + ``delivery_attempt`` will have a value of 1. The value is + calculated at best effort and is approximate. + + If a DeadLetterPolicy is not set on the subscription, this + will be 0. + """ + + ack_id = proto.Field(proto.STRING, number=1) + + message = proto.Field(proto.MESSAGE, number=2, message=PubsubMessage,) + + delivery_attempt = proto.Field(proto.INT32, number=3) + + +class GetSubscriptionRequest(proto.Message): + r"""Request for the GetSubscription method. + + Attributes: + subscription (str): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + +class UpdateSubscriptionRequest(proto.Message): + r"""Request for the UpdateSubscription method. + + Attributes: + subscription (~.pubsub.Subscription): + Required. The updated subscription object. + update_mask (~.field_mask.FieldMask): + Required. Indicates which fields in the + provided subscription to update. Must be + specified and non-empty. + """ + + subscription = proto.Field(proto.MESSAGE, number=1, message=Subscription,) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class ListSubscriptionsRequest(proto.Message): + r"""Request for the ``ListSubscriptions`` method. + + Attributes: + project (str): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + page_size (int): + Maximum number of subscriptions to return. + page_token (str): + The value returned by the last + ``ListSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListSubscriptions`` call, and that + the system should return the next page of data. + """ + + project = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListSubscriptionsResponse(proto.Message): + r"""Response for the ``ListSubscriptions`` method. + + Attributes: + subscriptions (Sequence[~.pubsub.Subscription]): + The subscriptions that match the request. + next_page_token (str): + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListSubscriptionsRequest`` to get more subscriptions. + """ + + @property + def raw_page(self): + return self + + subscriptions = proto.RepeatedField(proto.MESSAGE, number=1, message=Subscription,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteSubscriptionRequest(proto.Message): + r"""Request for the DeleteSubscription method. + + Attributes: + subscription (str): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + +class ModifyPushConfigRequest(proto.Message): + r"""Request for the ModifyPushConfig method. + + Attributes: + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + push_config (~.pubsub.PushConfig): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub system + should stop pushing messages from the given subscription and + allow messages to be pulled and acknowledged - effectively + pausing the subscription if ``Pull`` or ``StreamingPull`` is + not called. + """ + + subscription = proto.Field(proto.STRING, number=1) + + push_config = proto.Field(proto.MESSAGE, number=2, message=PushConfig,) + + +class PullRequest(proto.Message): + r"""Request for the ``Pull`` method. + + Attributes: + subscription (str): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + return_immediately (bool): + Optional. If this field set to true, the system will respond + immediately even if it there are no messages available to + return in the ``Pull`` response. Otherwise, the system may + wait (for a bounded amount of time) until at least one + message is available, rather than returning no messages. + Warning: setting this field to ``true`` is discouraged + because it adversely impacts the performance of ``Pull`` + operations. We recommend that users do not set this field. + max_messages (int): + Required. The maximum number of messages to + return for this request. Must be a positive + integer. The Pub/Sub system may return fewer + than the number specified. + """ + + subscription = proto.Field(proto.STRING, number=1) + + return_immediately = proto.Field(proto.BOOL, number=2) + + max_messages = proto.Field(proto.INT32, number=3) + + +class PullResponse(proto.Message): + r"""Response for the ``Pull`` method. + + Attributes: + received_messages (Sequence[~.pubsub.ReceivedMessage]): + Received Pub/Sub messages. The list will be empty if there + are no more messages available in the backlog. For JSON, the + response can be entirely empty. The Pub/Sub system may + return fewer than the ``maxMessages`` requested even if + there are more messages available in the backlog. + """ + + received_messages = proto.RepeatedField( + proto.MESSAGE, number=1, message=ReceivedMessage, + ) + + +class ModifyAckDeadlineRequest(proto.Message): + r"""Request for the ModifyAckDeadline method. + + Attributes: + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (Sequence[str]): + Required. List of acknowledgment IDs. + ack_deadline_seconds (int): + Required. The new ack deadline with respect to the time this + request was sent to the Pub/Sub system. For example, if the + value is 10, the new ack deadline will expire 10 seconds + after the ``ModifyAckDeadline`` call was made. Specifying + zero might immediately make the message available for + delivery to another subscriber client. This typically + results in an increase in the rate of message redeliveries + (that is, duplicates). The minimum deadline you can specify + is 0 seconds. The maximum deadline you can specify is 600 + seconds (10 minutes). + """ + + subscription = proto.Field(proto.STRING, number=1) + + ack_ids = proto.RepeatedField(proto.STRING, number=4) + + ack_deadline_seconds = proto.Field(proto.INT32, number=3) + + +class AcknowledgeRequest(proto.Message): + r"""Request for the Acknowledge method. + + Attributes: + subscription (str): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (Sequence[str]): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in the + ``Pull`` response. Must not be empty. + """ + + subscription = proto.Field(proto.STRING, number=1) + + ack_ids = proto.RepeatedField(proto.STRING, number=2) + + +class StreamingPullRequest(proto.Message): + r"""Request for the ``StreamingPull`` streaming RPC method. This request + is used to establish the initial stream as well as to stream + acknowledgements and ack deadline modifications from the client to + the server. + + Attributes: + subscription (str): + Required. The subscription for which to initialize the new + stream. This must be provided in the first request on the + stream, and must not be set in subsequent requests from + client to server. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (Sequence[str]): + List of acknowledgement IDs for acknowledging previously + received messages (received on this stream or a different + stream). If an ack ID has expired, the corresponding message + may be redelivered later. Acknowledging a message more than + once will not result in an error. If the acknowledgement ID + is malformed, the stream will be aborted with status + ``INVALID_ARGUMENT``. + modify_deadline_seconds (Sequence[int]): + The list of new ack deadlines for the IDs listed in + ``modify_deadline_ack_ids``. The size of this list must be + the same as the size of ``modify_deadline_ack_ids``. If it + differs the stream will be aborted with + ``INVALID_ARGUMENT``. Each element in this list is applied + to the element in the same position in + ``modify_deadline_ack_ids``. The new ack deadline is with + respect to the time this request was sent to the Pub/Sub + system. Must be >= 0. For example, if the value is 10, the + new ack deadline will expire 10 seconds after this request + is received. If the value is 0, the message is immediately + made available for another streaming or non-streaming pull + request. If the value is < 0 (an error), the stream will be + aborted with status ``INVALID_ARGUMENT``. + modify_deadline_ack_ids (Sequence[str]): + List of acknowledgement IDs whose deadline will be modified + based on the corresponding element in + ``modify_deadline_seconds``. This field can be used to + indicate that more time is needed to process a message by + the subscriber, or to make the message available for + redelivery if the processing was interrupted. + stream_ack_deadline_seconds (int): + Required. The ack deadline to use for the + stream. This must be provided in the first + request on the stream, but it can also be + updated on subsequent requests from client to + server. The minimum deadline you can specify is + 10 seconds. The maximum deadline you can specify + is 600 seconds (10 minutes). + client_id (str): + A unique identifier that is used to distinguish client + instances from each other. Only needs to be provided on the + initial request. When a stream disconnects and reconnects + for the same stream, the client_id should be set to the same + value so that state associated with the old stream can be + transferred to the new stream. The same client_id should not + be used for different client instances. + max_outstanding_messages (int): + Flow control settings for the maximum number of outstanding + messages. When there are ``max_outstanding_messages`` or + more currently sent to the streaming pull client that have + not yet been acked or nacked, the server stops sending more + messages. The sending of messages resumes once the number of + outstanding messages is less than this value. If the value + is <= 0, there is no limit to the number of outstanding + messages. This property can only be set on the initial + StreamingPullRequest. If it is set on a subsequent request, + the stream will be aborted with status ``INVALID_ARGUMENT``. + max_outstanding_bytes (int): + Flow control settings for the maximum number of outstanding + bytes. When there are ``max_outstanding_bytes`` or more + worth of messages currently sent to the streaming pull + client that have not yet been acked or nacked, the server + will stop sending more messages. The sending of messages + resumes once the number of outstanding bytes is less than + this value. If the value is <= 0, there is no limit to the + number of outstanding bytes. This property can only be set + on the initial StreamingPullRequest. If it is set on a + subsequent request, the stream will be aborted with status + ``INVALID_ARGUMENT``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + ack_ids = proto.RepeatedField(proto.STRING, number=2) + + modify_deadline_seconds = proto.RepeatedField(proto.INT32, number=3) + + modify_deadline_ack_ids = proto.RepeatedField(proto.STRING, number=4) + + stream_ack_deadline_seconds = proto.Field(proto.INT32, number=5) + + client_id = proto.Field(proto.STRING, number=6) + + max_outstanding_messages = proto.Field(proto.INT64, number=7) + + max_outstanding_bytes = proto.Field(proto.INT64, number=8) + + +class StreamingPullResponse(proto.Message): + r"""Response for the ``StreamingPull`` method. This response is used to + stream messages from the server to the client. + + Attributes: + received_messages (Sequence[~.pubsub.ReceivedMessage]): + Received Pub/Sub messages. This will not be + empty. + """ + + received_messages = proto.RepeatedField( + proto.MESSAGE, number=1, message=ReceivedMessage, + ) + + +class CreateSnapshotRequest(proto.Message): + r"""Request for the ``CreateSnapshot`` method. + + Attributes: + name (str): + Required. User-provided name for this snapshot. If the name + is not provided in the request, the server will assign a + random name for this snapshot on the same project as the + subscription. Note that for REST API requests, you must + specify a name. See the resource name rules. Format is + ``projects/{project}/snapshots/{snap}``. + subscription (str): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is guaranteed to + retain: (a) The existing backlog on the subscription. More + precisely, this is defined as the messages in the + subscription's backlog that are unacknowledged upon the + successful completion of the ``CreateSnapshot`` request; as + well as: (b) Any messages published to the subscription's + topic following the successful completion of the + CreateSnapshot request. Format is + ``projects/{project}/subscriptions/{sub}``. + labels (Sequence[~.pubsub.CreateSnapshotRequest.LabelsEntry]): + See + Creating and managing labels. + """ + + name = proto.Field(proto.STRING, number=1) + + subscription = proto.Field(proto.STRING, number=2) + + labels = proto.MapField(proto.STRING, proto.STRING, number=3) + + +class UpdateSnapshotRequest(proto.Message): + r"""Request for the UpdateSnapshot method. + + Attributes: + snapshot (~.pubsub.Snapshot): + Required. The updated snapshot object. + update_mask (~.field_mask.FieldMask): + Required. Indicates which fields in the + provided snapshot to update. Must be specified + and non-empty. + """ + + snapshot = proto.Field(proto.MESSAGE, number=1, message="Snapshot",) + + update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + + +class Snapshot(proto.Message): + r"""A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages in + an existing subscription to the state captured by a snapshot. + + Attributes: + name (str): + The name of the snapshot. + topic (str): + The name of the topic from which this + snapshot is retaining messages. + expire_time (~.timestamp.Timestamp): + The snapshot is guaranteed to exist up until this time. A + newly-created snapshot expires no later than 7 days from the + time of its creation. Its exact lifetime is determined at + creation by the existing backlog in the source subscription. + Specifically, the lifetime of the snapshot is + ``7 days - (age of oldest unacked message in the subscription)``. + For example, consider a subscription whose oldest unacked + message is 3 days old. If a snapshot is created from this + subscription, the snapshot -- which will always capture this + 3-day-old backlog as long as the snapshot exists -- will + expire in 4 days. The service will refuse to create a + snapshot that would expire in less than 1 hour after + creation. + labels (Sequence[~.pubsub.Snapshot.LabelsEntry]): + See [Creating and managing labels] + (https://cloud.google.com/pubsub/docs/labels). + """ + + name = proto.Field(proto.STRING, number=1) + + topic = proto.Field(proto.STRING, number=2) + + expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) + + labels = proto.MapField(proto.STRING, proto.STRING, number=4) + + +class GetSnapshotRequest(proto.Message): + r"""Request for the GetSnapshot method. + + Attributes: + snapshot (str): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + snapshot = proto.Field(proto.STRING, number=1) + + +class ListSnapshotsRequest(proto.Message): + r"""Request for the ``ListSnapshots`` method. + + Attributes: + project (str): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + page_size (int): + Maximum number of snapshots to return. + page_token (str): + The value returned by the last ``ListSnapshotsResponse``; + indicates that this is a continuation of a prior + ``ListSnapshots`` call, and that the system should return + the next page of data. + """ + + project = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListSnapshotsResponse(proto.Message): + r"""Response for the ``ListSnapshots`` method. + + Attributes: + snapshots (Sequence[~.pubsub.Snapshot]): + The resulting snapshots. + next_page_token (str): + If not empty, indicates that there may be more snapshot that + match the request; this value should be passed in a new + ``ListSnapshotsRequest``. + """ + + @property + def raw_page(self): + return self + + snapshots = proto.RepeatedField(proto.MESSAGE, number=1, message=Snapshot,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteSnapshotRequest(proto.Message): + r"""Request for the ``DeleteSnapshot`` method. + + Attributes: + snapshot (str): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + snapshot = proto.Field(proto.STRING, number=1) + + +class SeekRequest(proto.Message): + r"""Request for the ``Seek`` method. + + Attributes: + subscription (str): + Required. The subscription to affect. + time (~.timestamp.Timestamp): + The time to seek to. Messages retained in the subscription + that were published before this time are marked as + acknowledged, and messages retained in the subscription that + were published after this time are marked as unacknowledged. + Note that this operation affects only those messages + retained in the subscription (configured by the combination + of ``message_retention_duration`` and + ``retain_acked_messages``). For example, if ``time`` + corresponds to a point before the message retention window + (or to a point before the system's notion of the + subscription creation time), only retained messages will be + marked as unacknowledged, and already-expunged messages will + not be restored. + snapshot (str): + The snapshot to seek to. The snapshot's topic must be the + same as that of the provided subscription. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + subscription = proto.Field(proto.STRING, number=1) + + time = proto.Field( + proto.MESSAGE, number=2, oneof="target", message=timestamp.Timestamp, + ) + + snapshot = proto.Field(proto.STRING, number=3, oneof="target") + + +class SeekResponse(proto.Message): + r"""Response for the ``Seek`` method (this response is empty).""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/mypy.ini b/packages/google-cloud-pubsub/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/google-cloud-pubsub/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 615358c2e4b0..09e7acbda316 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") @@ -100,6 +102,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -158,3 +164,36 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 2c67c2c11302..8c2c31a8e795 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -1,3 +1,4 @@ + .. This file is automatically generated. Do not edit this file directly. Google Cloud Pub/Sub Python Samples @@ -15,13 +16,11 @@ This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub` .. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs - - - Setup ------------------------------------------------------------------------------- + Authentication ++++++++++++++ @@ -32,6 +31,9 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started + + + Install Dependencies ++++++++++++++++++++ @@ -62,9 +64,15 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ + + + + + Samples ------------------------------------------------------------------------------- + Quickstart (Publisher) +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -81,6 +89,8 @@ To run this sample: $ python quickstart/pub.py + + Quickstart (Subscriber) +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -97,6 +107,8 @@ To run this sample: $ python quickstart/sub.py + + Publisher +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -110,7 +122,8 @@ To run this sample: .. code-block:: bash - $ python publisher.py --help + $ python publisher.py + usage: publisher.py [-h] project_id @@ -152,6 +165,8 @@ To run this sample: + + Subscribers +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -165,7 +180,8 @@ To run this sample: .. code-block:: bash - $ python subscriber.py --help + $ python subscriber.py + usage: subscriber.py [-h] project_id @@ -215,6 +231,9 @@ To run this sample: -h, --help show this help message and exit + + + Identity and Access Management +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -227,15 +246,21 @@ Identity and Access Management To run this sample: .. code-block:: bash + $ python iam.py + + usage: iam.py [-h] project {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} ... + This application demonstrates how to perform basic operations on IAM policies with the Cloud Pub/Sub API. + For more information, see the README.md under /pubsub and the documentation at https://cloud.google.com/pubsub/docs. + positional arguments: project Your Google Cloud project ID {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} @@ -251,10 +276,18 @@ To run this sample: check-subscription-permissions Checks to which permissions are available on the given subscription. + optional arguments: -h, --help show this help message and exit + + + + + + + The client library ------------------------------------------------------------------------------- @@ -270,4 +303,5 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues + .. _Google Cloud SDK: https://cloud.google.com/sdk/ diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index 71c55d764c0c..71ee5da1b7c8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -36,7 +36,7 @@ def get_topic_policy(project, topic_id): client = pubsub_v1.PublisherClient() topic_path = client.topic_path(project, topic_id) - policy = client.get_iam_policy(topic_path) + policy = client.get_iam_policy(request={"resource": topic_path}) print("Policy for topic {}:".format(topic_path)) for binding in policy.bindings: @@ -56,7 +56,7 @@ def get_subscription_policy(project, subscription_id): client = pubsub_v1.SubscriberClient() subscription_path = client.subscription_path(project, subscription_id) - policy = client.get_iam_policy(subscription_path) + policy = client.get_iam_policy(request={"resource": subscription_path}) print("Policy for subscription {}:".format(subscription_path)) for binding in policy.bindings: @@ -78,7 +78,7 @@ def set_topic_policy(project, topic_id): client = pubsub_v1.PublisherClient() topic_path = client.topic_path(project, topic_id) - policy = client.get_iam_policy(topic_path) + policy = client.get_iam_policy(request={"resource": topic_path}) # Add all users as viewers. policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) @@ -89,7 +89,7 @@ def set_topic_policy(project, topic_id): ) # Set the policy - policy = client.set_iam_policy(topic_path, policy) + policy = client.set_iam_policy(request={"resource": topic_path, "policy": policy}) print("IAM policy for topic {} set: {}".format(topic_id, policy)) # [END pubsub_set_topic_policy] @@ -107,7 +107,7 @@ def set_subscription_policy(project, subscription_id): client = pubsub_v1.SubscriberClient() subscription_path = client.subscription_path(project, subscription_id) - policy = client.get_iam_policy(subscription_path) + policy = client.get_iam_policy(request={"resource": subscription_path}) # Add all users as viewers. policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) @@ -116,7 +116,9 @@ def set_subscription_policy(project, subscription_id): policy.bindings.add(role="roles/editor", members=["group:cloud-logs@google.com"]) # Set the policy - policy = client.set_iam_policy(subscription_path, policy) + policy = client.set_iam_policy( + request={"resource": subscription_path, "policy": policy} + ) print("IAM policy for subscription {} set: {}".format(subscription_id, policy)) @@ -138,7 +140,9 @@ def check_topic_permissions(project, topic_id): permissions_to_check = ["pubsub.topics.publish", "pubsub.topics.update"] - allowed_permissions = client.test_iam_permissions(topic_path, permissions_to_check) + allowed_permissions = client.test_iam_permissions( + request={"resource": topic_path, "permissions": permissions_to_check} + ) print( "Allowed permissions for topic {}: {}".format(topic_path, allowed_permissions) @@ -164,7 +168,7 @@ def check_subscription_permissions(project, subscription_id): ] allowed_permissions = client.test_iam_permissions( - subscription_path, permissions_to_check + request={"resource": subscription_path, "permissions": permissions_to_check} ) print( diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index d196953f6207..17bfdb256a9f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -36,15 +36,15 @@ def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - publisher_client.delete_topic(topic_path) + publisher_client.delete_topic(request={"topic": topic_path}) except Exception: pass - publisher_client.create_topic(topic_path) + publisher_client.create_topic(request={"name": topic_path}) yield topic_path - publisher_client.delete_topic(topic_path) + publisher_client.delete_topic(request={"topic": topic_path}) @pytest.fixture(scope="module") @@ -59,15 +59,19 @@ def subscription(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) try: - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) except Exception: pass - subscriber_client.create_subscription(subscription_path, topic=topic) + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) yield subscription_path - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_get_topic_policy(topic, capsys): @@ -87,7 +91,7 @@ def test_get_subscription_policy(subscription, capsys): def test_set_topic_policy(publisher_client, topic): iam.set_topic_policy(PROJECT, TOPIC) - policy = publisher_client.get_iam_policy(topic) + policy = publisher_client.get_iam_policy(request={"resource": topic}) assert "roles/pubsub.publisher" in str(policy) assert "allUsers" in str(policy) @@ -95,7 +99,7 @@ def test_set_topic_policy(publisher_client, topic): def test_set_subscription_policy(subscriber_client, subscription): iam.set_subscription_policy(PROJECT, SUBSCRIPTION) - policy = subscriber_client.get_iam_policy(subscription) + policy = subscriber_client.get_iam_policy(request={"resource": subscription}) assert "roles/pubsub.viewer" in str(policy) assert "allUsers" in str(policy) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 399d37679196..d1b7602803a8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -33,9 +33,9 @@ def list_topics(project_id): # project_id = "your-project-id" publisher = pubsub_v1.PublisherClient() - project_path = publisher.project_path(project_id) + project_path = f"projects/{project_id}" - for topic in publisher.list_topics(project_path): + for topic in publisher.list_topics(request={"project": project_path}): print(topic) # [END pubsub_list_topics] @@ -53,7 +53,7 @@ def create_topic(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) - topic = publisher.create_topic(topic_path) + topic = publisher.create_topic(request={"name": topic_path}) print("Topic created: {}".format(topic)) # [END pubsub_quickstart_create_topic] @@ -72,7 +72,7 @@ def delete_topic(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) - publisher.delete_topic(topic_path) + publisher.delete_topic(request={"topic": topic_path}) print("Topic deleted: {}".format(topic_path)) # [END pubsub_delete_topic] @@ -94,11 +94,11 @@ def publish_messages(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data = "Message number {}".format(n) # Data must be a bytestring data = data.encode("utf-8") # When you publish a message, the client returns a future. - future = publisher.publish(topic_path, data=data) + future = publisher.publish(topic_path, data) print(future.result()) print("Published messages.") @@ -120,7 +120,7 @@ def publish_messages_with_custom_attributes(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data = "Message number {}".format(n) # Data must be a bytestring data = data.encode("utf-8") # Add two attributes, origin and username, to the message @@ -163,9 +163,7 @@ def callback(f): data = str(i) futures.update({data: None}) # When you publish a message, the client returns a future. - future = publisher.publish( - topic_path, data=data.encode("utf-8") # data must be a bytestring. - ) + future = publisher.publish(topic_path, data.encode("utf-8")) futures[data] = future # Publish failures shall be handled in the callback function. future.add_done_callback(get_callback(future, data)) @@ -203,10 +201,10 @@ def callback(future): print(message_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data = "Message number {}".format(n) # Data must be a bytestring data = data.encode("utf-8") - future = publisher.publish(topic_path, data=data) + future = publisher.publish(topic_path, data) # Non-blocking. Allow the publisher client to batch multiple messages. future.add_done_callback(callback) @@ -217,56 +215,38 @@ def callback(future): def publish_messages_with_retry_settings(project_id, topic_id): """Publishes messages with custom retry settings.""" # [START pubsub_publisher_retry_settings] + from google import api_core from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" # topic_id = "your-topic-id" - # Configure the retry settings. Defaults will be overwritten. - retry_settings = { - "interfaces": { - "google.pubsub.v1.Publisher": { - "retry_codes": { - "publish": [ - "ABORTED", - "CANCELLED", - "DEADLINE_EXCEEDED", - "INTERNAL", - "RESOURCE_EXHAUSTED", - "UNAVAILABLE", - "UNKNOWN", - ] - }, - "retry_params": { - "messaging": { - "initial_retry_delay_millis": 100, # default: 100 - "retry_delay_multiplier": 1.3, # default: 1.3 - "max_retry_delay_millis": 60000, # default: 60000 - "initial_rpc_timeout_millis": 5000, # default: 25000 - "rpc_timeout_multiplier": 1.0, # default: 1.0 - "max_rpc_timeout_millis": 600000, # default: 30000 - "total_timeout_millis": 600000, # default: 600000 - } - }, - "methods": { - "Publish": { - "retry_codes_name": "publish", - "retry_params_name": "messaging", - } - }, - } - } - } - - publisher = pubsub_v1.PublisherClient(client_config=retry_settings) + # Configure the retry settings. + custom_retry = api_core.retry.Retry( + initial=0.250, # seconds (default: 0.1) + maximum=90.0, # seconds (default: 60.0) + multiplier=1.45, # default: 1.3 + deadline=300.0, # seconds (default: 600.0) + predicate=api_core.retry.if_exception_type( + api_core.exceptions.Aborted, + api_core.exceptions.DeadlineExceeded, + api_core.exceptions.InternalServerError, + api_core.exceptions.ResourceExhausted, + api_core.exceptions.ServiceUnavailable, + api_core.exceptions.Unknown, + api_core.exceptions.Cancelled, + ), + ) + + publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = u"Message number {}".format(n) + data = "Message number {}".format(n) # Data must be a bytestring data = data.encode("utf-8") - future = publisher.publish(topic_path, data=data) + future = publisher.publish(topic=topic_path, data=data, retry=custom_retry) print(future.result()) print("Published messages with retry settings.") diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 95fda846a95c..c95ea7ed57fe 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -39,9 +39,9 @@ def topic_admin(client): topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) try: - topic = client.get_topic(topic_path) + topic = client.get_topic(request={"topic": topic_path}) except: # noqa - topic = client.create_topic(topic_path) + topic = client.create_topic(request={"name": topic_path}) yield topic.name # Teardown of `topic_admin` is handled in `test_delete()`. @@ -52,13 +52,13 @@ def topic_publish(client): topic_path = client.topic_path(PROJECT, TOPIC_PUBLISH) try: - topic = client.get_topic(topic_path) + topic = client.get_topic(request={"topic": topic_path}) except: # noqa - topic = client.create_topic(topic_path) + topic = client.create_topic(request={"name": topic_path}) yield topic.name - client.delete_topic(topic.name) + client.delete_topic(request={"topic": topic.name}) def _make_sleep_patch(): @@ -87,7 +87,7 @@ def eventually_consistent_test(): def test_create(client): topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) try: - client.delete_topic(topic_path) + client.delete_topic(request={"topic": topic_path}) except Exception: pass @@ -95,7 +95,7 @@ def test_create(client): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): - assert client.get_topic(topic_path) + assert client.get_topic(request={"topic": topic_path}) eventually_consistent_test() @@ -106,7 +106,7 @@ def test_delete(client, topic_admin): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): with pytest.raises(Exception): - client.get_topic(client.topic_path(PROJECT, TOPIC_ADMIN)) + client.get_topic(request={"topic": client.topic_path(PROJECT, TOPIC_ADMIN)}) eventually_consistent_test() diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py index 16432c0c3627..8585711f302c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -63,7 +63,7 @@ def pub(project_id, topic_id): ref = dict({"num_messages": 0}) # When you publish a message, the client returns a future. - api_future = client.publish(topic_path, data=data) + api_future = client.publish(topic_path, data) api_future.add_done_callback(get_callback(api_future, data, ref)) # Keep the main thread from exiting while the message future diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py index 6f5cc06c4456..0be087bd2b98 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py @@ -39,13 +39,13 @@ def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - publisher_client.create_topic(topic_path) + publisher_client.create_topic(request={"name": topic_path}) except AlreadyExists: pass yield TOPIC - publisher_client.delete_topic(topic_path) + publisher_client.delete_topic(request={"topic": topic_path}) def test_pub(publisher_client, topic, capsys): diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py index 38047422a935..089705af6397 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py @@ -38,12 +38,12 @@ def topic_path(): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - topic = publisher_client.create_topic(topic_path) + topic = publisher_client.create_topic(request={"name": topic_path}) yield topic.name except AlreadyExists: yield topic_path - publisher_client.delete_topic(topic_path) + publisher_client.delete_topic(request={"topic": topic_path}) @pytest.fixture(scope="module") @@ -52,18 +52,18 @@ def subscription_path(topic_path): try: subscription = subscriber_client.create_subscription( - subscription_path, topic_path + request={"name": subscription_path, "topic": topic_path} ) yield subscription.name except AlreadyExists: yield subscription_path - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription(request={"subscription": subscription_path}) subscriber_client.close() def _publish_messages(topic_path): - publish_future = publisher_client.publish(topic_path, data=b"Hello World!") + publish_future = publisher_client.publish(topic_path, b"Hello World!") publish_future.result() diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 94e1c5cd48b2..39a05ef37c3c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -36,7 +36,8 @@ def list_subscriptions_in_topic(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) - for subscription in publisher.list_topic_subscriptions(topic_path): + response = publisher.list_topic_subscriptions(request={"topic": topic_path}) + for subscription in response: print(subscription) # [END pubsub_list_topic_subscriptions] @@ -50,12 +51,14 @@ def list_subscriptions_in_project(project_id): # project_id = "your-project-id" subscriber = pubsub_v1.SubscriberClient() - project_path = subscriber.project_path(project_id) + project_path = f"projects/{project_id}" # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - for subscription in subscriber.list_subscriptions(project_path): + for subscription in subscriber.list_subscriptions( + request={"project": project_path} + ): print(subscription.name) # [END pubsub_list_subscriptions] @@ -70,14 +73,17 @@ def create_subscription(project_id, topic_id, subscription_id): # topic_id = "your-topic-id" # subscription_id = "your-subscription-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - subscription = subscriber.create_subscription(subscription_path, topic_path) + subscription = subscriber.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) print("Subscription created: {}".format(subscription)) # [END pubsub_create_pull_subscription] @@ -103,19 +109,24 @@ def create_subscription_with_dead_letter_topic( # with dead letter policy will forward dead letter messages to. # dead_letter_topic_id = "your-dead-letter-topic-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) + dead_letter_topic_path = publisher.topic_path(project_id, dead_letter_topic_id) dead_letter_policy = DeadLetterPolicy( dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=10 ) with subscriber: - subscription = subscriber.create_subscription( - subscription_path, topic_path, dead_letter_policy=dead_letter_policy - ) + request = { + "name": subscription_path, + "topic": topic_path, + "dead_letter_policy": dead_letter_policy, + } + subscription = subscriber.create_subscription(request) print("Subscription created: {}".format(subscription.name)) print( @@ -142,8 +153,9 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): # subscription_id = "your-subscription-id" # endpoint = "https://my-test-project.appspot.com/push" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) push_config = pubsub_v1.types.PushConfig(push_endpoint=endpoint) @@ -152,7 +164,11 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): # close the underlying gRPC channel when done. with subscriber: subscription = subscriber.create_subscription( - subscription_path, topic_path, push_config + request={ + "name": subscription_path, + "topic": topic_path, + "push_config": push_config, + } ) print("Push subscription created: {}".format(subscription)) @@ -170,13 +186,18 @@ def create_subscription_with_ordering(project_id, topic_id, subscription_id): # topic_id = "your-topic-id" # subscription_id = "your-subscription-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) with subscriber: subscription = subscriber.create_subscription( - subscription_path, topic_path, enable_message_ordering=True + request={ + "name": subscription_path, + "topic": topic_path, + "enable_message_ordering": True, + } ) print("Created subscription with ordering: {}".format(subscription)) # [END pubsub_enable_subscription_ordering] @@ -197,7 +218,7 @@ def delete_subscription(project_id, subscription_id): # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - subscriber.delete_subscription(subscription_path) + subscriber.delete_subscription(request={"subscription": subscription_path}) print("Subscription deleted: {}".format(subscription_path)) # [END pubsub_delete_subscription] @@ -232,7 +253,9 @@ def update_push_subscription(project_id, topic_id, subscription_id, endpoint): # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - result = subscriber.update_subscription(subscription, update_mask) + result = subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} + ) print("Subscription updated: {}".format(subscription_path)) print("New endpoint for subscription is: {}".format(result.push_config)) @@ -258,12 +281,16 @@ def update_subscription_with_dead_letter_policy( # with dead letter policy will forward dead letter messages to. # dead_letter_topic_id = "your-dead-letter-topic-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - dead_letter_topic_path = subscriber.topic_path(project_id, dead_letter_topic_id) + dead_letter_topic_path = publisher.topic_path(project_id, dead_letter_topic_id) - subscription_before_update = subscriber.get_subscription(subscription_path) + subscription_before_update = subscriber.get_subscription( + request={"subscription": subscription_path} + ) print("Before the update: {}".format(subscription_before_update)) # Indicates which fields in the provided subscription to update. @@ -283,7 +310,7 @@ def update_subscription_with_dead_letter_policy( with subscriber: subscription_after_update = subscriber.update_subscription( - subscription, update_mask + request={"subscription": subscription, "update_mask": update_mask} ) print("After the update: {}".format(subscription_after_update)) @@ -305,11 +332,14 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): # TODO(developer): This is an existing subscription with a dead letter policy. # subscription_id = "your-subscription-id" + publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() - topic_path = subscriber.topic_path(project_id, topic_id) + topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - subscription_before_update = subscriber.get_subscription(subscription_path) + subscription_before_update = subscriber.get_subscription( + request={"subscription": subscription_path} + ) print("Before removing the policy: {}".format(subscription_before_update)) # Indicates which fields in the provided subscription to update. @@ -328,7 +358,7 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): with subscriber: subscription_after_update = subscriber.update_subscription( - subscription, update_mask + request={"subscription": subscription, "update_mask": update_mask} ) print("After removing the policy: {}".format(subscription_after_update)) @@ -467,7 +497,9 @@ def synchronous_pull(project_id, subscription_id): # close the underlying gRPC channel when done. with subscriber: # The subscriber pulls a specific number of messages. - response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) + response = subscriber.pull( + request={"subscription": subscription_path, "max_messages": NUM_MESSAGES} + ) ack_ids = [] for received_message in response.received_messages: @@ -475,7 +507,9 @@ def synchronous_pull(project_id, subscription_id): ack_ids.append(received_message.ack_id) # Acknowledges the received messages so they will not be sent again. - subscriber.acknowledge(subscription_path, ack_ids) + subscriber.acknowledge( + request={"subscription": subscription_path, "ack_ids": ack_ids} + ) print( "Received and acknowledged {} messages. Done.".format( @@ -507,7 +541,9 @@ def synchronous_pull_with_lease_management(project_id, subscription_id): SLEEP_TIME = 10 # The subscriber pulls a specific number of messages. - response = subscriber.pull(subscription_path, max_messages=NUM_MESSAGES) + response = subscriber.pull( + request={"subscription": subscription_path, "max_messages": NUM_MESSAGES} + ) multiprocessing.log_to_stderr() logger = multiprocessing.get_logger() @@ -539,7 +575,11 @@ def worker(msg): if process.is_alive(): # `ack_deadline_seconds` must be between 10 to 600. subscriber.modify_ack_deadline( - subscription_path, [ack_id], ack_deadline_seconds=ACK_DEADLINE, + request={ + "subscription": subscription_path, + "ack_ids": [ack_id], + "ack_deadline_seconds": ACK_DEADLINE, + } ) logger.info( "{}: Reset ack deadline for {} for {}s".format( @@ -549,7 +589,9 @@ def worker(msg): # If the processs is finished, acknowledges using `ack_id`. else: - subscriber.acknowledge(subscription_path, [ack_id]) + subscriber.acknowledge( + request={"subscription": subscription_path, "ack_ids": [ack_id]} + ) logger.info( "{}: Acknowledged {}".format( time.strftime("%X", time.gmtime()), msg_data diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 62018e9a937f..37b83b877205 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -44,13 +44,13 @@ def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, TOPIC) try: - topic = publisher_client.get_topic(topic_path) + topic = publisher_client.get_topic(request={"topic": topic_path}) except: # noqa - topic = publisher_client.create_topic(topic_path) + topic = publisher_client.create_topic(request={"name": topic_path}) yield topic.name - publisher_client.delete_topic(topic.name) + publisher_client.delete_topic(request={"topic": topic.name}) @pytest.fixture(scope="module") @@ -58,13 +58,13 @@ def dead_letter_topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) try: - dead_letter_topic = publisher_client.get_topic(topic_path) + dead_letter_topic = publisher_client.get_topic(request={"topic": topic_path}) except: # noqa - dead_letter_topic = publisher_client.create_topic(topic_path) + dead_letter_topic = publisher_client.create_topic(request={"name": topic_path}) yield dead_letter_topic.name - publisher_client.delete_topic(dead_letter_topic.name) + publisher_client.delete_topic(request={"topic": dead_letter_topic.name}) @pytest.fixture(scope="module") @@ -79,10 +79,12 @@ def subscription_admin(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) try: - subscription = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) except: # noqa subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + request={"name": subscription_path, "topic": topic} ) yield subscription.name @@ -93,15 +95,17 @@ def subscription_sync(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_SYNC) try: - subscription = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) except: # noqa subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + request={"name": subscription_path, "topic": topic} ) yield subscription.name - subscriber_client.delete_subscription(subscription.name) + subscriber_client.delete_subscription(request={"subscription": subscription.name}) @pytest.fixture(scope="module") @@ -109,15 +113,17 @@ def subscription_async(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ASYNC) try: - subscription = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) except: # noqa subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + request={"name": subscription_path, "topic": topic} ) yield subscription.name - subscriber_client.delete_subscription(subscription.name) + subscriber_client.delete_subscription(request={"subscription": subscription.name}) @pytest.fixture(scope="module") @@ -125,15 +131,17 @@ def subscription_dlq(subscriber_client, topic): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) try: - subscription = subscriber_client.get_subscription(subscription_path) + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) except: # noqa subscription = subscriber_client.create_subscription( - subscription_path, topic=topic + request={"name": subscription_path, "topic": topic} ) yield subscription.name - subscriber_client.delete_subscription(subscription.name) + subscriber_client.delete_subscription(request={"subscription": subscription.name}) def test_list_in_topic(subscription_admin, capsys): @@ -160,7 +168,9 @@ def test_create(subscriber_client): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) try: - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) except Exception: pass @@ -168,7 +178,9 @@ def test_create(subscriber_client): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): - assert subscriber_client.get_subscription(subscription_path) + assert subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) eventually_consistent_test() @@ -180,7 +192,9 @@ def test_create_subscription_with_dead_letter_policy( dead_letter_topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) try: - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) except Exception: pass @@ -197,7 +211,9 @@ def test_create_subscription_with_dead_letter_policy( def test_create_push(subscriber_client): subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) try: - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) except Exception: pass @@ -205,7 +221,9 @@ def test_create_push(subscriber_client): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): - assert subscriber_client.get_subscription(subscription_path) + assert subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) eventually_consistent_test() @@ -217,7 +235,7 @@ def test_create_subscription_with_ordering(subscriber_client, capsys): assert "enable_message_ordering: true" in out subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ORDERING) - subscriber_client.delete_subscription(subscription_path) + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_update(subscriber_client, subscription_admin, capsys): @@ -246,17 +264,17 @@ def test_delete(subscriber_client, subscription_admin): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): with pytest.raises(Exception): - subscriber_client.get_subscription(subscription_admin) + subscriber_client.get_subscription( + request={"subscription": subscription_admin} + ) eventually_consistent_test() -def _publish_messages(publisher_client, topic): +def _publish_messages(publisher_client, topic, **attrs): for n in range(5): data = u"message {}".format(n).encode("utf-8") - publish_future = publisher_client.publish( - topic, data=data, origin="python-sample" - ) + publish_future = publisher_client.publish(topic, data, **attrs) publish_future.result() @@ -275,7 +293,7 @@ def test_receive_with_custom_attributes( publisher_client, topic, subscription_async, capsys ): - _publish_messages(publisher_client, topic) + _publish_messages(publisher_client, topic, origin="python-sample") subscriber.receive_messages_with_custom_attributes(PROJECT, SUBSCRIPTION_ASYNC, 5) diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py new file mode 100644 index 000000000000..b54a7ad375d3 --- /dev/null +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -0,0 +1,206 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class pubsubCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'acknowledge': ('subscription', 'ack_ids', ), + 'create_snapshot': ('name', 'subscription', 'labels', ), + 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', ), + 'delete_snapshot': ('snapshot', ), + 'delete_subscription': ('subscription', ), + 'delete_topic': ('topic', ), + 'detach_subscription': ('subscription', ), + 'get_snapshot': ('snapshot', ), + 'get_subscription': ('subscription', ), + 'get_topic': ('topic', ), + 'list_snapshots': ('project', 'page_size', 'page_token', ), + 'list_subscriptions': ('project', 'page_size', 'page_token', ), + 'list_topics': ('project', 'page_size', 'page_token', ), + 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), + 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), + 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), + 'modify_push_config': ('subscription', 'push_config', ), + 'publish': ('topic', 'messages', ), + 'pull': ('subscription', 'max_messages', 'return_immediately', ), + 'seek': ('subscription', 'time', 'snapshot', ), + 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), + 'update_snapshot': ('snapshot', 'update_mask', ), + 'update_subscription': ('subscription', 'update_mask', ), + 'update_topic': ('topic', 'update_mask', ), + + 'get_iam_policy': ('resource', 'options', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=pubsubCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the pubsub client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 3da2e269a738..c26d140a471c 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,19 +22,17 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "1.7.0" +version = "2.0.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - # google-api-core[grpc] 1.17.0 up to 1.19.1 causes problems with stream - # recovery, thus those versions should not be used. - # https://github.com/googleapis/python-pubsub/issues/74 - "google-api-core[grpc] >= 1.14.0, != 1.17.*, != 1.18.*, != 1.19.*", + "google-api-core[grpc] >= 1.22.1, < 2.0.0dev", + "libcst >= 0.3.10", + "proto-plus >= 1.7.1", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - 'enum34; python_version < "3.4"', ] extras = {} @@ -50,7 +48,9 @@ # Only include packages under the 'google' namespace. Do not include tests, # benchmarks, etc. packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] # Determine which namespaces are needed. @@ -73,12 +73,10 @@ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -87,7 +85,8 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*", + python_requires=">=3.6", + scripts=["scripts/fixup_pubsub_v1_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index f67fbeec5314..46bc8fdd2426 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -3,37 +3,29 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "c8f63788636c2e3436c8ce6a01ef3b59e3df772a" - } - }, - { - "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "b882b8e6bfcd708042ff00f7adc67ce750817dd0", - "internalRef": "318028816" + "remote": "git@github.com:plamut/python-pubsub.git", + "sha": "c29d7f891c776e1a3fcb1cbfc7f549ca0772f38e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "b2c32f1c8a4094f0f47fcf5d10f0b6f2bfb3387d" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "b2c32f1c8a4094f0f47fcf5d10f0b6f2bfb3387d" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "b2c32f1c8a4094f0f47fcf5d10f0b6f2bfb3387d" } } ], diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 0e2c96e42de2..fe1b0838ea29 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -45,234 +45,75 @@ ], ) -# Adjust tests to import the clients directly. +# DEFAULT SCOPES and SERVICE_ADDRESS are being used. so let's force them in. s.replace( - "tests/unit/gapic/v1/test_publisher_client_v1.py", - "from google.cloud import pubsub_v1", - "from google.cloud.pubsub_v1.gapic import publisher_client", -) - -s.replace( - "tests/unit/gapic/v1/test_publisher_client_v1.py", " pubsub_v1", " publisher_client" -) - -s.replace( - "tests/unit/gapic/v1/test_subscriber_client_v1.py", - "from google.cloud import pubsub_v1", - "from google.cloud.pubsub_v1.gapic import subscriber_client", -) - -s.replace( - "tests/unit/gapic/v1/test_subscriber_client_v1.py", - " pubsub_v1", - " subscriber_client", -) - -# DEFAULT SCOPES are being used. so let's force them in. -s.replace( - "google/cloud/pubsub_v1/gapic/*er_client.py", - "# The name of the interface for this client. This is the key used to", - """# The scopes needed to make gRPC calls to all of the methods defined in + "google/pubsub_v1/services/*er/*client.py", + r"DEFAULT_ENDPOINT = 'pubsub\.googleapis\.com'", + """ + # The scopes needed to make gRPC calls to all of the methods defined in # this service _DEFAULT_SCOPES = ( 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', ) - - \g<0>""", -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "import google.api_core.gapic_v1.method\n", - "\g<0>import google.api_core.path_template\n", -) - -# Doc strings are formatted poorly -s.replace( - "google/cloud/pubsub_v1/proto/pubsub_pb2.py", - 'DESCRIPTOR = _MESSAGESTORAGEPOLICY,\n\s+__module__.*\n\s+,\n\s+__doc__ = """', - "\g<0>A message storage policy.\n\n\n ", -) - -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - "subscription \(str\): The subscription whose backlog .*\n(.*\n)+?" - "\s+Format is .*", - """subscription (str): The subscription whose backlog the snapshot retains. - Specifically, the created snapshot is guaranteed to retain: \\ - (a) The existing backlog on the subscription. More precisely, this is \\ - defined as the messages in the subscription's backlog that are \\ - unacknowledged upon the successful completion of the \\ - `CreateSnapshot` request; as well as: \\ - (b) Any messages published to the subscription's topic following the \\ - successful completion of the CreateSnapshot request. \\ - - Format is ``projects/{project}/subscriptions/{sub}``.""", -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "import functools\n", - "import collections\n" - "from copy import deepcopy\n\g<0>" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "import pkg_resources\n", - "\g<0>import six\n" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "class PublisherClient", - """# TODO: remove conditional import after Python 2 support is dropped -if six.PY2: - from collections import Mapping -else: - from collections.abc import Mapping - - -def _merge_dict(d1, d2): - # Modifies d1 in-place to take values from d2 - # if the nested keys from d2 are present in d1. - # https://stackoverflow.com/a/10704003/4488789 - for k, v2 in d2.items(): - v1 = d1.get(k) # returns None if v1 has no such key - if v1 is None: - raise Exception("{} is not recognized by client_config".format(k)) - if isinstance(v1, Mapping) and isinstance(v2, Mapping): - _merge_dict(v1, v2) - else: - d1[k] = v2 - return d1 - \n\n\g<0>""" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "client_config \(dict\): DEPRECATED.", - "client_config (dict):" -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "# Raise deprecation warnings .*\n.*\n.*\n.*\n.*\n.*\n", - """default_client_config = deepcopy(publisher_client_config.config) - - if client_config is None: - client_config = default_client_config - else: - client_config = _merge_dict(default_client_config, client_config) - """ -) - -s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - "~google.api_core.page_iterator.PageIterator", - "~google.api_core.page_iterator.GRPCIterator" -) + 'https://www.googleapis.com/auth/pubsub', + ) -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - "~google.api_core.page_iterator.PageIterator", - "~google.api_core.page_iterator.GRPCIterator" -) + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + \"""The default address of the service.\""" -# Temporary fixup for 'grpc-google-iam-vi 0.12.4' (before generation). -s.replace( - "google/cloud/pubsub_v1/gapic/transports/*_grpc_transport.py", - "from google.iam.v1 import iam_policy_pb2", - "from google.iam.v1 import iam_policy_pb2_grpc as iam_policy_pb2", + \g<0>""", ) # Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream # results. s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - r"return self\._inner_api_calls\['streaming_pull'\]\(.*", + "google/pubsub_v1/services/subscriber/client.py", + ( + r"# Wrap the RPC method.*\n" + r"\s+# and friendly error.*\n" + r"\s+rpc = self\._transport\._wrapped_methods\[self\._transport\.streaming_pull\]" + ), """ - # Wrappers in api-core should not automatically pre-fetch the first - # stream result, as this breaks the stream when re-opening it. - # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 - self.transport.streaming_pull._prefetch_first_result_ = False + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self._transport.streaming_pull._prefetch_first_result_ = False - \g<0>""" + \g<0>""", ) -# Add missing blank line before Attributes: in generated docstrings -# https://github.com/googleapis/protoc-docs-plugin/pull/31 +# Docstrings of *_iam_policy() methods are formatted poorly and must be fixed +# in order to avoid docstring format warnings in docs. s.replace( - "google/cloud/pubsub_v1/proto/pubsub_pb2.py", - "(\s+)Attributes:", - "\n\g<1>Attributes:" -) - -# Fix incomplete docstring examples. -s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - r"\s+>>> subscription = \{'ack_deadline_seconds': ack_deadline_seconds\}", - textwrap.indent( - """ ->>> subscription_name = 'projects/my-project/subscriptions/my-subscription' ->>> subscription = { -... 'name': subscription_name, -... 'ack_deadline_seconds': ack_deadline_seconds, -... }""", - prefix=" " * 12, - ) + "google/pubsub_v1/services/*er/client.py", + r"(\s+)Args:", + "\n\g<1>Args:" ) - s.replace( - "google/cloud/pubsub_v1/gapic/subscriber_client.py", - r"\s+>>> snapshot = \{'expire_time': expire_time\}", - textwrap.indent( - """ ->>> snapshot_name = 'projects/my-project/snapshots/my-snapshot' ->>> snapshot = { -... 'name': snapshot_name, -... 'expire_time': expire_time, -... }""", - prefix=" " * 12, - ) + "google/pubsub_v1/services/*er/client.py", + r"(\s+)\*\*JSON Example\*\*\s+::", + "\n\g<1>**JSON Example**::\n", ) - s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - r"\s+>>> # TODO: Initialize `topic`:\n\s+>>> topic = \{\}\n", - textwrap.indent( - """ ->>> topic_name = 'projects/my-project/topics/my-topic' ->>> topic_labels = {'source': 'external'} ->>> topic = {'name': topic_name, 'labels': topic_labels} -""", - prefix=" " * 12, - ), + "google/pubsub_v1/services/*er/client.py", + r"(\s+)\*\*YAML Example\*\*\s+::", + "\n\g<1>**YAML Example**::\n", ) - s.replace( - "google/cloud/pubsub_v1/gapic/publisher_client.py", - r"\s+>>> # TODO: Initialize `update_mask`:\n\s+>>> update_mask = \{\}\n", - textwrap.indent( - """ ->>> paths_element = 'labels' ->>> paths = [paths_element] ->>> update_mask = {'paths': paths} -""", - prefix=" " * 12, - ), + "google/pubsub_v1/services/*er/client.py", + r"(\s+)For a description of IAM and its features, see", + "\n\g<0>", ) # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library( - unit_cov_level=97, + microgenerator=True, + samples=True, cov_level=99, system_test_external_dependencies=["psutil"], - samples=True, ) -s.move(templated_files) +s.move(templated_files, excludes=[".coveragerc"]) # ---------------------------------------------------------------------------- # Samples templates diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 1694c50229f1..bbedd9a11ff9 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -32,18 +32,19 @@ from google.cloud.pubsub_v1 import exceptions from google.cloud.pubsub_v1 import futures from google.cloud.pubsub_v1 import types +from google.pubsub_v1 import types as gapic_types from test_utils.system import unique_resource_id -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def project(): _, default_project = google.auth.default() yield default_project -@pytest.fixture(scope=u"module") +@pytest.fixture(scope="module") def publisher(): yield pubsub_v1.PublisherClient() @@ -71,15 +72,15 @@ def cleanup(): yield registry # Perform all clean up. - for to_call, argument in registry: - to_call(argument) + for to_call, args, kwargs in registry: + to_call(*args, **kwargs) def test_publish_messages(publisher, topic_path, cleanup): # Make sure the topic gets deleted. - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) futures = [ publisher.publish( @@ -95,7 +96,7 @@ def test_publish_messages(publisher, topic_path, cleanup): def test_publish_large_messages(publisher, topic_path, cleanup): # Make sure the topic gets deleted. - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # Each message should be smaller than 10**7 bytes (the server side limit for # PublishRequest), but all messages combined in a PublishRequest should @@ -110,7 +111,7 @@ def test_publish_large_messages(publisher, topic_path, cleanup): max_latency=2.0, # so that autocommit happens after publishing all messages max_messages=100, ) - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) futures = [publisher.publish(topic_path, msg_data, num=str(i)) for i in range(5)] @@ -126,15 +127,17 @@ def test_subscribe_to_messages( publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # Subscribe to the topic. This must happen before the messages # are published. - subscriber.create_subscription(subscription_path, topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # Publish some messages. futures = [ @@ -169,15 +172,17 @@ def test_subscribe_to_messages_async_callbacks( publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # Subscribe to the topic. This must happen before the messages # are published. - subscriber.create_subscription(subscription_path, topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # Publish some messages. futures = [ @@ -219,26 +224,27 @@ def test_creating_subscriptions_with_non_default_settings( publisher, subscriber, project, topic_path, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and a subscription, customize the latter's policy - publisher.create_topic(topic_path) - - msg_retention_duration = {"seconds": 911} - expiration_policy = {"ttl": {"seconds": 90210}} - new_subscription = subscriber.create_subscription( - subscription_path, - topic_path, - ack_deadline_seconds=30, - retain_acked_messages=True, - message_retention_duration=msg_retention_duration, - expiration_policy=expiration_policy, - ) + publisher.create_topic(name=topic_path) + + request = { + "name": subscription_path, + "topic": topic_path, + "ack_deadline_seconds": 30, + "retain_acked_messages": True, + "message_retention_duration": {"seconds": 911}, + "expiration_policy": {"ttl": {"seconds": 90210}}, # 1 day, 3810 seconds + } + new_subscription = subscriber.create_subscription(request) # fetch the subscription and check its settings - project_path = subscriber.project_path(project) - subscriptions = subscriber.list_subscriptions(project_path) + project_path = f"projects/{project}" + subscriptions = subscriber.list_subscriptions(project=project_path) subscriptions = [sub for sub in subscriptions if sub.topic == topic_path] assert len(subscriptions) == 1 @@ -248,7 +254,9 @@ def test_creating_subscriptions_with_non_default_settings( assert subscription.ack_deadline_seconds == 30 assert subscription.retain_acked_messages assert subscription.message_retention_duration.seconds == 911 - assert subscription.expiration_policy.ttl.seconds == 90210 + assert subscription.expiration_policy.ttl == datetime.timedelta( + days=1, seconds=3810 + ) def test_listing_project_topics(publisher, project, cleanup): @@ -257,11 +265,11 @@ def test_listing_project_topics(publisher, project, cleanup): for i in range(1, 4) ] for topic in topic_paths: - cleanup.append((publisher.delete_topic, topic)) - publisher.create_topic(topic) + cleanup.append((publisher.delete_topic, (), {"topic": topic})) + publisher.create_topic(name=topic) - project_path = publisher.project_path(project) - project_topics = publisher.list_topics(project_path) + project_path = f"projects/{project}" + project_topics = publisher.list_topics(project=project_path) project_topics = set(t.name for t in project_topics) # there might be other topics in the project, thus do a "is subset" check @@ -275,8 +283,8 @@ def test_listing_project_subscriptions(publisher, subscriber, project, cleanup): publisher.topic_path(project, "topic-2" + unique_resource_id(".")), ] for topic in topic_paths: - cleanup.append((publisher.delete_topic, topic)) - publisher.create_topic(topic) + cleanup.append((publisher.delete_topic, (), {"topic": topic})) + publisher.create_topic(name=topic) # create subscriptions subscription_paths = [ @@ -287,12 +295,14 @@ def test_listing_project_subscriptions(publisher, subscriber, project, cleanup): ] for i, subscription in enumerate(subscription_paths): topic = topic_paths[i % 2] - cleanup.append((subscriber.delete_subscription, subscription)) - subscriber.create_subscription(subscription, topic) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription}) + ) + subscriber.create_subscription(name=subscription, topic=topic) # retrieve subscriptions and check that the list matches the expected - project_path = subscriber.project_path(project) - subscriptions = subscriber.list_subscriptions(project_path) + project_path = f"projects/{project}" + subscriptions = subscriber.list_subscriptions(project=project_path) subscriptions = set(s.name for s in subscriptions) # there might be other subscriptions in the project, thus do a "is subset" check @@ -306,8 +316,8 @@ def test_listing_topic_subscriptions(publisher, subscriber, project, cleanup): publisher.topic_path(project, "topic-2" + unique_resource_id(".")), ] for topic in topic_paths: - cleanup.append((publisher.delete_topic, topic)) - publisher.create_topic(topic) + cleanup.append((publisher.delete_topic, (), {"topic": topic})) + publisher.create_topic(name=topic) # create subscriptions subscription_paths = [ @@ -318,31 +328,35 @@ def test_listing_topic_subscriptions(publisher, subscriber, project, cleanup): ] for i, subscription in enumerate(subscription_paths): topic = topic_paths[i % 2] - cleanup.append((subscriber.delete_subscription, subscription)) - subscriber.create_subscription(subscription, topic) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription}) + ) + subscriber.create_subscription(name=subscription, topic=topic) # retrieve subscriptions and check that the list matches the expected - subscriptions = publisher.list_topic_subscriptions(topic_paths[0]) - subscriptions = set(subscriptions) + response = publisher.list_topic_subscriptions(topic=topic_paths[0]) + subscriptions = set(response) assert subscriptions == {subscription_paths[0], subscription_paths[2]} def test_managing_topic_iam_policy(publisher, topic_path, cleanup): - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # create a topic and customize its policy - publisher.create_topic(topic_path) - topic_policy = publisher.get_iam_policy(topic_path) + publisher.create_topic(name=topic_path) + topic_policy = publisher.get_iam_policy(request={"resource": topic_path}) topic_policy.bindings.add(role="roles/pubsub.editor", members=["domain:google.com"]) topic_policy.bindings.add( role="roles/pubsub.viewer", members=["group:cloud-logs@google.com"] ) - new_policy = publisher.set_iam_policy(topic_path, topic_policy) + new_policy = publisher.set_iam_policy( + request={"resource": topic_path, "policy": topic_policy} + ) # fetch the topic policy again and check its values - topic_policy = publisher.get_iam_policy(topic_path) + topic_policy = publisher.get_iam_policy(request={"resource": topic_path}) assert topic_policy.bindings == new_policy.bindings assert len(topic_policy.bindings) == 2 @@ -358,22 +372,26 @@ def test_managing_subscription_iam_policy( publisher, subscriber, topic_path, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and a subscription, customize the latter's policy - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) - sub_policy = subscriber.get_iam_policy(subscription_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) + sub_policy = subscriber.get_iam_policy(request={"resource": subscription_path}) sub_policy.bindings.add(role="roles/pubsub.editor", members=["domain:google.com"]) sub_policy.bindings.add( role="roles/pubsub.viewer", members=["group:cloud-logs@google.com"] ) - new_policy = subscriber.set_iam_policy(subscription_path, sub_policy) + new_policy = subscriber.set_iam_policy( + request={"resource": subscription_path, "policy": sub_policy} + ) # fetch the subscription policy again and check its values - sub_policy = subscriber.get_iam_policy(subscription_path) + sub_policy = subscriber.get_iam_policy(request={"resource": subscription_path}) assert sub_policy.bindings == new_policy.bindings assert len(sub_policy.bindings) == 2 @@ -397,17 +415,15 @@ def test_subscriber_not_leaking_open_sockets( # subscriber releases the sockets, too. subscriber = pubsub_v1.SubscriberClient() subscriber_2 = pubsub_v1.SubscriberClient() - cleanup.append((subscriber_2.delete_subscription, subscription_path)) - - def one_arg_close(subscriber): # the cleanup helper expects exactly one argument - subscriber.close() - - cleanup.append((one_arg_close, subscriber_2)) - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append( + (subscriber_2.delete_subscription, (), {"subscription": subscription_path}) + ) + cleanup.append((subscriber_2.close, (), {})) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # Create topic before starting to track connection count (any sockets opened # by the publisher client are not counted by this test). - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) current_process = psutil.Process() conn_count_start = len(current_process.connections()) @@ -419,14 +435,14 @@ def one_arg_close(subscriber): # the cleanup helper expects exactly one argumen # Publish a few messages, wait for the publish to succeed. publish_futures = [ - publisher.publish(topic_path, u"message {}".format(i).encode()) + publisher.publish(topic_path, "message {}".format(i).encode()) for i in range(1, 4) ] for future in publish_futures: future.result() # Synchronously pull messages. - response = subscriber.pull(subscription_path, max_messages=3) + response = subscriber.pull(subscription=subscription_path, max_messages=3) assert len(response.received_messages) == 3 conn_count_end = len(current_process.connections()) @@ -437,15 +453,17 @@ def test_synchronous_pull_no_deadline_error_if_no_messages( publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic and subscribe to it. - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) try: - response = subscriber.pull(subscription_path, max_messages=2) + response = subscriber.pull(subscription=subscription_path, max_messages=2) except core_exceptions.DeadlineExceeded: pytest.fail( "Unexpected DeadlineExceeded error on synchronous pull when no " @@ -460,12 +478,14 @@ def test_streaming_pull_callback_error_propagation( self, publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and subscribe to it - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # publish a messages and wait until published future = publisher.publish(topic_path, b"hello!") @@ -486,17 +506,19 @@ def test_streaming_pull_ack_deadline( self, publisher, subscriber, project, topic_path, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic and a subscription, then subscribe to the topic. This # must happen before the messages are published. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # Subscribe to the topic. This must happen before the messages # are published. subscriber.create_subscription( - subscription_path, topic_path, ack_deadline_seconds=45 + name=subscription_path, topic=topic_path, ack_deadline_seconds=45 ) # publish some messages and wait for completion @@ -535,12 +557,14 @@ def test_streaming_pull_max_messages( self, publisher, topic_path, subscriber, subscription_path, cleanup ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and subscribe to it - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) batch_sizes = (7, 4, 8, 2, 10, 1, 3, 8, 6, 1) # total: 50 _publish_messages(publisher, topic_path, batch_sizes=batch_sizes) @@ -596,12 +620,14 @@ def test_streaming_pull_subscriber_permissions_sufficient( ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # create a topic and subscribe to it - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # A service account granting only the pubsub.subscriber role must be used. filename = os.path.join( @@ -631,12 +657,14 @@ def test_publisher_role_can_publish_messages( ): # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) # Create a topic and subscribe to it. - publisher.create_topic(topic_path) - subscriber.create_subscription(subscription_path, topic_path) + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) # Create a publisher client with only the publisher role only. filename = os.path.join( @@ -646,7 +674,7 @@ def test_publisher_role_can_publish_messages( _publish_messages(publisher_only_client, topic_path, batch_sizes=[2]) - response = subscriber.pull(subscription_path, max_messages=2) + response = subscriber.pull(subscription=subscription_path, max_messages=2) assert len(response.received_messages) == 2 @pytest.mark.skip( @@ -659,14 +687,16 @@ def test_snapshot_seek_subscriber_permissions_sufficient( snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, topic_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) - cleanup.append((subscriber.delete_snapshot, snapshot_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) + cleanup.append((subscriber.delete_snapshot, (), {"snapshot": snapshot_path})) # Create a topic and subscribe to it. - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) subscriber.create_subscription( - subscription_path, topic_path, retain_acked_messages=True + name=subscription_path, topic=topic_path, retain_acked_messages=True ) # A service account granting only the pubsub.subscriber role must be used. @@ -677,20 +707,23 @@ def test_snapshot_seek_subscriber_permissions_sufficient( # Publish two messages and create a snapshot inbetween. _publish_messages(publisher, topic_path, batch_sizes=[1]) - response = subscriber.pull(subscription_path, max_messages=10) + response = subscriber.pull(subscription=subscription_path, max_messages=10) assert len(response.received_messages) == 1 - subscriber.create_snapshot(snapshot_path, subscription_path) + subscriber.create_snapshot(name=snapshot_path, subscription=subscription_path) _publish_messages(publisher, topic_path, batch_sizes=[1]) - response = subscriber.pull(subscription_path, max_messages=10) + response = subscriber.pull(subscription=subscription_path, max_messages=10) assert len(response.received_messages) == 1 # A subscriber-only client should be allowed to seek to a snapshot. - subscriber_only_client.seek(subscription_path, snapshot=snapshot_path) + seek_request = gapic_types.SeekRequest( + subscription=subscription_path, snapshot=snapshot_path + ) + subscriber_only_client.seek(seek_request) # We should receive one message again, since we sought back to a snapshot. - response = subscriber.pull(subscription_path, max_messages=10) + response = subscriber.pull(subscription=subscription_path, max_messages=10) assert len(response.received_messages) == 1 def test_viewer_role_can_list_resources( @@ -699,9 +732,9 @@ def test_viewer_role_can_list_resources( project_path = "projects/" + project # Make sure the created topic gets deleted. - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) - publisher.create_topic(topic_path) + publisher.create_topic(name=topic_path) # A service account granting only the pubsub.viewer role must be used. filename = os.path.join( @@ -712,10 +745,17 @@ def test_viewer_role_can_list_resources( # The following operations should not raise permission denied errors. # NOTE: At least one topic exists. - topic = next(iter(viewer_only_publisher.list_topics(project_path))) - next(iter(viewer_only_publisher.list_topic_subscriptions(topic.name)), None) - next(iter(viewer_only_subscriber.list_subscriptions(project_path)), None) - next(iter(viewer_only_subscriber.list_snapshots(project_path)), None) + topic = next(iter(viewer_only_publisher.list_topics(project=project_path))) + + next( + iter(viewer_only_publisher.list_topic_subscriptions(topic=topic.name)), None + ) + + next( + iter(viewer_only_subscriber.list_subscriptions(project=project_path)), None + ) + + next(iter(viewer_only_subscriber.list_snapshots(project=project_path)), None) def test_editor_role_can_create_resources( self, project, publisher, topic_path, subscriber, subscription_path, cleanup @@ -724,9 +764,11 @@ def test_editor_role_can_create_resources( snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) # Make sure the created resources get deleted. - cleanup.append((subscriber.delete_snapshot, snapshot_path)) - cleanup.append((subscriber.delete_subscription, subscription_path)) - cleanup.append((publisher.delete_topic, topic_path)) + cleanup.append((subscriber.delete_snapshot, (), {"snapshot": snapshot_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # A service account granting only the pubsub.editor role must be used. filename = os.path.join( @@ -736,9 +778,11 @@ def test_editor_role_can_create_resources( editor_publisher = type(publisher).from_service_account_file(filename) # The following operations should not raise permission denied errors. - editor_publisher.create_topic(topic_path) - editor_subscriber.create_subscription(subscription_path, topic_path) - editor_subscriber.create_snapshot(snapshot_path, subscription_path) + editor_publisher.create_topic(name=topic_path) + editor_subscriber.create_subscription(name=subscription_path, topic=topic_path) + editor_subscriber.create_snapshot( + name=snapshot_path, subscription=subscription_path + ) def _publish_messages(publisher, topic_path, batch_sizes): @@ -760,7 +804,7 @@ def _publish_messages(publisher, topic_path, batch_sizes): def _make_messages(count): messages = [ - u"message {}/{}".format(i, count).encode("utf-8") for i in range(1, count + 1) + "message {}/{}".format(i, count).encode("utf-8") for i in range(1, count + 1) ] return messages diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py new file mode 100644 index 000000000000..8b137891791f --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py new file mode 100644 index 000000000000..f159c1cfb90a --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -0,0 +1,3106 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.oauth2 import service_account +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.pubsub_v1.services.publisher import PublisherAsyncClient +from google.pubsub_v1.services.publisher import PublisherClient +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.services.publisher import transports +from google.pubsub_v1.types import pubsub + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PublisherClient._get_default_mtls_endpoint(None) is None + assert PublisherClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert ( + PublisherClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + PublisherClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + PublisherClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert PublisherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient]) +def test_publisher_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "pubsub.googleapis.com:443" + + +def test_publisher_client_get_transport_class(): + transport = PublisherClient.get_transport_class() + assert transport == transports.PublisherGrpcTransport + + transport = PublisherClient.get_transport_class("grpc") + assert transport == transports.PublisherGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient) +) +@mock.patch.object( + PublisherAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PublisherAsyncClient), +) +def test_publisher_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PublisherClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PublisherClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_publisher_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_publisher_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_publisher_client_client_options_from_dict(): + with mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = PublisherClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name="name_value", kms_key_name="kms_key_name_value", + ) + + response = client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.Topic() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_create_topic_from_dict(): + test_create_topic(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_topic_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.Topic() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) + ) + + response = await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_create_topic_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Topic() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_topic), "__call__") as call: + call.return_value = pubsub.Topic() + + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_topic_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Topic() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_topic), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + + await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_topic_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_topic(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_create_topic_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_topic( + pubsub.Topic(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_create_topic_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_topic(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_create_topic_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_topic( + pubsub.Topic(), name="name_value", + ) + + +def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name="name_value", kms_key_name="kms_key_name_value", + ) + + response = client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.UpdateTopicRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_update_topic_from_dict(): + test_update_topic(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_topic_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.UpdateTopicRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) + ) + + response = await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_update_topic_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateTopicRequest() + request.topic.name = "topic.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_topic), "__call__") as call: + call.return_value = pubsub.Topic() + + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic.name=topic.name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_topic_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateTopicRequest() + request.topic.name = "topic.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_topic), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + + await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic.name=topic.name/value",) in kw["metadata"] + + +def test_publish(transport: str = "grpc", request_type=pubsub.PublishRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse(message_ids=["message_ids_value"],) + + response = client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.PublishRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + + assert response.message_ids == ["message_ids_value"] + + +def test_publish_from_dict(): + test_publish(request_type=dict) + + +@pytest.mark.asyncio +async def test_publish_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.PublishRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse(message_ids=["message_ids_value"],) + ) + + response = await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + + assert response.message_ids == ["message_ids_value"] + + +def test_publish_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PublishRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.publish), "__call__") as call: + call.return_value = pubsub.PublishResponse() + + client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_publish_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PublishRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.publish), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse() + ) + + await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_publish_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.publish( + topic="topic_value", messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + assert args[0].messages == [pubsub.PubsubMessage(data=b"data_blob")] + + +def test_publish_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.publish( + pubsub.PublishRequest(), + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + +@pytest.mark.asyncio +async def test_publish_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.publish( + topic="topic_value", messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + assert args[0].messages == [pubsub.PubsubMessage(data=b"data_blob")] + + +@pytest.mark.asyncio +async def test_publish_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.publish( + pubsub.PublishRequest(), + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + +def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name="name_value", kms_key_name="kms_key_name_value", + ) + + response = client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.GetTopicRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_get_topic_from_dict(): + test_get_topic(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_topic_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.GetTopicRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) + ) + + response = await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + + assert response.name == "name_value" + + assert response.kms_key_name == "kms_key_name_value" + + +def test_get_topic_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetTopicRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_topic), "__call__") as call: + call.return_value = pubsub.Topic() + + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_topic_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetTopicRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_topic), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + + await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_get_topic_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_topic(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +def test_get_topic_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_topic( + pubsub.GetTopicRequest(), topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_get_topic_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_topic(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +@pytest.mark.asyncio +async def test_get_topic_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_topic( + pubsub.GetTopicRequest(), topic="topic_value", + ) + + +def test_list_topics(transport: str = "grpc", request_type=pubsub.ListTopicsRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListTopicsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topics_from_dict(): + test_list_topics(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_topics_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListTopicsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topics_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + call.return_value = pubsub.ListTopicsResponse() + + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topics_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse() + ) + + await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +def test_list_topics_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topics(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +def test_list_topics_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topics( + pubsub.ListTopicsRequest(), project="project_value", + ) + + +@pytest.mark.asyncio +async def test_list_topics_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topics(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +@pytest.mark.asyncio +async def test_list_topics_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topics( + pubsub.ListTopicsRequest(), project="project_value", + ) + + +def test_list_topics_pager(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + next_page_token="abc", + ), + pubsub.ListTopicsResponse(topics=[], next_page_token="def",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), + ) + pager = client.list_topics(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, pubsub.Topic) for i in results) + + +def test_list_topics_pages(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + next_page_token="abc", + ), + pubsub.ListTopicsResponse(topics=[], next_page_token="def",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + RuntimeError, + ) + pages = list(client.list_topics(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topics_async_pager(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + next_page_token="abc", + ), + pubsub.ListTopicsResponse(topics=[], next_page_token="def",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + RuntimeError, + ) + async_pager = await client.list_topics(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Topic) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topics_async_pages(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topics), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + next_page_token="abc", + ), + pubsub.ListTopicsResponse(topics=[], next_page_token="def",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), + pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_topics(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_topic_subscriptions( + transport: str = "grpc", request_type=pubsub.ListTopicSubscriptionsRequest +): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + + response = client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListTopicSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsPager) + + assert response.subscriptions == ["subscriptions_value"] + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_subscriptions_from_dict(): + test_list_topic_subscriptions(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListTopicSubscriptionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsAsyncPager) + + assert response.subscriptions == ["subscriptions_value"] + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_subscriptions_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSubscriptionsRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSubscriptionsResponse() + + client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSubscriptionsRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse() + ) + + await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_list_topic_subscriptions_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topic_subscriptions(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +def test_list_topic_subscriptions_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topic_subscriptions(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), topic="topic_value", + ) + + +def test_list_topic_subscriptions_pager(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), + ) + pager = client.list_topic_subscriptions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_list_topic_subscriptions_pages(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), + RuntimeError, + ) + pages = list(client.list_topic_subscriptions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_pager(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), + RuntimeError, + ) + async_pager = await client.list_topic_subscriptions(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_pages(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_topic_subscriptions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_topic_snapshots( + transport: str = "grpc", request_type=pubsub.ListTopicSnapshotsRequest +): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], next_page_token="next_page_token_value", + ) + + response = client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListTopicSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsPager) + + assert response.snapshots == ["snapshots_value"] + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_snapshots_from_dict(): + test_list_topic_snapshots(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListTopicSnapshotsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], next_page_token="next_page_token_value", + ) + ) + + response = await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsAsyncPager) + + assert response.snapshots == ["snapshots_value"] + + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_snapshots_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSnapshotsRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSnapshotsResponse() + + client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSnapshotsRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse() + ) + + await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_list_topic_snapshots_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topic_snapshots(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +def test_list_topic_snapshots_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topic_snapshots(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), topic="topic_value", + ) + + +def test_list_topic_snapshots_pager(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), + ) + pager = client.list_topic_snapshots(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + +def test_list_topic_snapshots_pages(): + client = PublisherClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_topic_snapshots), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), + RuntimeError, + ) + pages = list(client.list_topic_snapshots(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_pager(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), + RuntimeError, + ) + async_pager = await client.list_topic_snapshots(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) for i in responses) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_pages(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_topic_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(), str(), str(),], next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListTopicSnapshotsResponse( + snapshots=[str(),], next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_topic_snapshots(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_topic(transport: str = "grpc", request_type=pubsub.DeleteTopicRequest): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DeleteTopicRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_topic_from_dict(): + test_delete_topic(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_topic_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.DeleteTopicRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_topic_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteTopicRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_topic), "__call__") as call: + call.return_value = None + + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_topic_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteTopicRequest() + request.topic = "topic/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_topic), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + + +def test_delete_topic_flattened(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_topic(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +def test_delete_topic_flattened_error(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_topic( + pubsub.DeleteTopicRequest(), topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_delete_topic_flattened_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_topic), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_topic(topic="topic_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].topic == "topic_value" + + +@pytest.mark.asyncio +async def test_delete_topic_flattened_error_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_topic( + pubsub.DeleteTopicRequest(), topic="topic_value", + ) + + +def test_detach_subscription( + transport: str = "grpc", request_type=pubsub.DetachSubscriptionRequest +): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.detach_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.DetachSubscriptionResponse() + + response = client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DetachSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +def test_detach_subscription_from_dict(): + test_detach_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_detach_subscription_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.DetachSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.detach_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.DetachSubscriptionResponse() + ) + + response = await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +def test_detach_subscription_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DetachSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.detach_subscription), "__call__" + ) as call: + call.return_value = pubsub.DetachSubscriptionResponse() + + client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_detach_subscription_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DetachSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.detach_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.DetachSubscriptionResponse() + ) + + await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = PublisherClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PublisherGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.PublisherGrpcTransport,) + + +def test_publisher_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.PublisherTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_publisher_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PublisherTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_topic", + "update_topic", + "publish", + "get_topic", + "list_topics", + "list_topic_subscriptions", + "list_topic_snapshots", + "delete_topic", + "detach_subscription", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_publisher_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.PublisherTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_publisher_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + PublisherClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +def test_publisher_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.PublisherGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_publisher_host_no_port(): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com" + ), + ) + assert client._transport._host == "pubsub.googleapis.com:443" + + +def test_publisher_host_with_port(): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com:8000" + ), + ) + assert client._transport._host == "pubsub.googleapis.com:8000" + + +def test_publisher_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.PublisherGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_publisher_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.PublisherGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_publisher_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.PublisherGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_publisher_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.PublisherGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_publisher_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.PublisherGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_publisher_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.PublisherGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_topic_path(): + project = "squid" + topic = "clam" + + expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + actual = PublisherClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "whelk", + "topic": "octopus", + } + path = PublisherClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_topic_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.PublisherTransport, "_prep_wrapped_messages" + ) as prep: + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.PublisherTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = PublisherClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_set_iam_policy(transport: str = "grpc"): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py new file mode 100644 index 000000000000..be5460b4868b --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -0,0 +1,4394 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.pubsub_v1.services.subscriber import SubscriberAsyncClient +from google.pubsub_v1.services.subscriber import SubscriberClient +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.services.subscriber import transports +from google.pubsub_v1.types import pubsub + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SubscriberClient._get_default_mtls_endpoint(None) is None + assert ( + SubscriberClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + SubscriberClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SubscriberClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SubscriberClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert SubscriberClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient]) +def test_subscriber_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "pubsub.googleapis.com:443" + + +def test_subscriber_client_get_transport_class(): + transport = SubscriberClient.get_transport_class() + assert transport == transports.SubscriberGrpcTransport + + transport = SubscriberClient.get_transport_class("grpc") + assert transport == transports.SubscriberGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient) +) +@mock.patch.object( + SubscriberAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SubscriberAsyncClient), +) +def test_subscriber_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SubscriberClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SubscriberClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_subscriber_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_subscriber_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_subscriber_client_client_options_from_dict(): + with mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SubscriberClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_create_subscription(transport: str = "grpc", request_type=pubsub.Subscription): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + + response = client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.Subscription() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_create_subscription_from_dict(): + test_create_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_subscription_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.Subscription() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + ) + + response = await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_create_subscription_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Subscription() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + + client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_subscription_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Subscription() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + + await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_subscription_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_subscription( + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].topic == "topic_value" + + assert args[0].push_config == pubsub.PushConfig( + push_endpoint="push_endpoint_value" + ) + + assert args[0].ack_deadline_seconds == 2066 + + +def test_create_subscription_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_subscription( + pubsub.Subscription(), + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + +@pytest.mark.asyncio +async def test_create_subscription_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_subscription( + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].topic == "topic_value" + + assert args[0].push_config == pubsub.PushConfig( + push_endpoint="push_endpoint_value" + ) + + assert args[0].ack_deadline_seconds == 2066 + + +@pytest.mark.asyncio +async def test_create_subscription_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_subscription( + pubsub.Subscription(), + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + + +def test_get_subscription( + transport: str = "grpc", request_type=pubsub.GetSubscriptionRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + + response = client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.GetSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_get_subscription_from_dict(): + test_get_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_subscription_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.GetSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + ) + + response = await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_get_subscription_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + + client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_get_subscription_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + + await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_get_subscription_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_subscription(subscription="subscription_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + +def test_get_subscription_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_subscription( + pubsub.GetSubscriptionRequest(), subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_get_subscription_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_subscription(subscription="subscription_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + +@pytest.mark.asyncio +async def test_get_subscription_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_subscription( + pubsub.GetSubscriptionRequest(), subscription="subscription_value", + ) + + +def test_update_subscription( + transport: str = "grpc", request_type=pubsub.UpdateSubscriptionRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + + response = client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.UpdateSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_update_subscription_from_dict(): + test_update_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_subscription_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.UpdateSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + ) + ) + + response = await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + assert response.ack_deadline_seconds == 2066 + + assert response.retain_acked_messages is True + + assert response.enable_message_ordering is True + + assert response.filter == "filter_value" + + assert response.detached is True + + +def test_update_subscription_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSubscriptionRequest() + request.subscription.name = "subscription.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + + client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription.name=subscription.name/value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_subscription_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSubscriptionRequest() + request.subscription.name = "subscription.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + + await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "subscription.name=subscription.name/value", + ) in kw["metadata"] + + +def test_list_subscriptions( + transport: str = "grpc", request_type=pubsub.ListSubscriptionsRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_subscriptions_from_dict(): + test_list_subscriptions(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_subscriptions_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListSubscriptionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_subscriptions_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSubscriptionsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListSubscriptionsResponse() + + client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_subscriptions_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSubscriptionsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse() + ) + + await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +def test_list_subscriptions_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_subscriptions(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +def test_list_subscriptions_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), project="project_value", + ) + + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_subscriptions(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), project="project_value", + ) + + +def test_list_subscriptions_pager(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), + ) + pager = client.list_subscriptions(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, pubsub.Subscription) for i in results) + + +def test_list_subscriptions_pages(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_subscriptions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + ), + RuntimeError, + ) + pages = list(client.list_subscriptions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pager(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + ), + RuntimeError, + ) + async_pager = await client.list_subscriptions(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Subscription) for i in responses) + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pages(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_subscriptions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_subscriptions(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_subscription( + transport: str = "grpc", request_type=pubsub.DeleteSubscriptionRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DeleteSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subscription_from_dict(): + test_delete_subscription(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_subscription_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.DeleteSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subscription_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_subscription), "__call__" + ) as call: + call.return_value = None + + client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_delete_subscription_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSubscriptionRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_subscription), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_delete_subscription_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_subscription(subscription="subscription_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + +def test_delete_subscription_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_subscription(subscription="subscription_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), subscription="subscription_value", + ) + + +def test_modify_ack_deadline( + transport: str = "grpc", request_type=pubsub.ModifyAckDeadlineRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ModifyAckDeadlineRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_ack_deadline_from_dict(): + test_modify_ack_deadline(request_type=dict) + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ModifyAckDeadlineRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_ack_deadline_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyAckDeadlineRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_ack_deadline), "__call__" + ) as call: + call.return_value = None + + client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyAckDeadlineRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_ack_deadline), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_modify_ack_deadline_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.modify_ack_deadline( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].ack_ids == ["ack_ids_value"] + + assert args[0].ack_deadline_seconds == 2066 + + +def test_modify_ack_deadline_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.modify_ack_deadline( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].ack_ids == ["ack_ids_value"] + + assert args[0].ack_deadline_seconds == 2066 + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + +def test_acknowledge(transport: str = "grpc", request_type=pubsub.AcknowledgeRequest): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.AcknowledgeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_from_dict(): + test_acknowledge(request_type=dict) + + +@pytest.mark.asyncio +async def test_acknowledge_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.AcknowledgeRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.acknowledge), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.AcknowledgeRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.acknowledge), "__call__") as call: + call.return_value = None + + client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_acknowledge_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.AcknowledgeRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.acknowledge), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_acknowledge_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.acknowledge( + subscription="subscription_value", ack_ids=["ack_ids_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].ack_ids == ["ack_ids_value"] + + +def test_acknowledge_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + +@pytest.mark.asyncio +async def test_acknowledge_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.acknowledge), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.acknowledge( + subscription="subscription_value", ack_ids=["ack_ids_value"], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].ack_ids == ["ack_ids_value"] + + +@pytest.mark.asyncio +async def test_acknowledge_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + +def test_pull(transport: str = "grpc", request_type=pubsub.PullRequest): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + + response = client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.PullRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +def test_pull_from_dict(): + test_pull(request_type=dict) + + +@pytest.mark.asyncio +async def test_pull_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.PullRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + + response = await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +def test_pull_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PullRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pull), "__call__") as call: + call.return_value = pubsub.PullResponse() + + client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_pull_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PullRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.pull), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + + await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_pull_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pull( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].return_immediately == True + + assert args[0].max_messages == 1277 + + +def test_pull_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pull( + pubsub.PullRequest(), + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + +@pytest.mark.asyncio +async def test_pull_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pull( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].return_immediately == True + + assert args[0].max_messages == 1277 + + +@pytest.mark.asyncio +async def test_pull_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pull( + pubsub.PullRequest(), + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + +def test_streaming_pull( + transport: str = "grpc", request_type=pubsub.StreamingPullRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.streaming_pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = iter([pubsub.StreamingPullResponse()]) + + response = client.streaming_pull(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, pubsub.StreamingPullResponse) + + +def test_streaming_pull_from_dict(): + test_streaming_pull(request_type=dict) + + +@pytest.mark.asyncio +async def test_streaming_pull_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.StreamingPullRequest() + + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.streaming_pull), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[pubsub.StreamingPullResponse()] + ) + + response = await client.streaming_pull(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, pubsub.StreamingPullResponse) + + +def test_modify_push_config( + transport: str = "grpc", request_type=pubsub.ModifyPushConfigRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ModifyPushConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_push_config_from_dict(): + test_modify_push_config(request_type=dict) + + +@pytest.mark.asyncio +async def test_modify_push_config_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ModifyPushConfigRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_push_config_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyPushConfigRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_push_config), "__call__" + ) as call: + call.return_value = None + + client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_modify_push_config_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyPushConfigRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_push_config), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_modify_push_config_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.modify_push_config( + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].push_config == pubsub.PushConfig( + push_endpoint="push_endpoint_value" + ) + + +def test_modify_push_config_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + +@pytest.mark.asyncio +async def test_modify_push_config_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.modify_push_config( + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].subscription == "subscription_value" + + assert args[0].push_config == pubsub.PushConfig( + push_endpoint="push_endpoint_value" + ) + + +@pytest.mark.asyncio +async def test_modify_push_config_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + +def test_get_snapshot(transport: str = "grpc", request_type=pubsub.GetSnapshotRequest): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) + + response = client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.GetSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_get_snapshot_from_dict(): + test_get_snapshot(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_snapshot_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.GetSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot(name="name_value", topic="topic_value",) + ) + + response = await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_get_snapshot_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSnapshotRequest() + request.snapshot = "snapshot/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_snapshot_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSnapshotRequest() + request.snapshot = "snapshot/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + + await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + + +def test_get_snapshot_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_snapshot(snapshot="snapshot_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].snapshot == "snapshot_value" + + +def test_get_snapshot_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_snapshot( + pubsub.GetSnapshotRequest(), snapshot="snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_get_snapshot_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_snapshot(snapshot="snapshot_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].snapshot == "snapshot_value" + + +@pytest.mark.asyncio +async def test_get_snapshot_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_snapshot( + pubsub.GetSnapshotRequest(), snapshot="snapshot_value", + ) + + +def test_list_snapshots( + transport: str = "grpc", request_type=pubsub.ListSnapshotsRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_snapshots_from_dict(): + test_list_snapshots(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_snapshots_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.ListSnapshotsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_snapshots_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSnapshotsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + call.return_value = pubsub.ListSnapshotsResponse() + + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_snapshots_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSnapshotsRequest() + request.project = "project/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse() + ) + + await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + + +def test_list_snapshots_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_snapshots(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +def test_list_snapshots_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_snapshots( + pubsub.ListSnapshotsRequest(), project="project_value", + ) + + +@pytest.mark.asyncio +async def test_list_snapshots_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_snapshots(project="project_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].project == "project_value" + + +@pytest.mark.asyncio +async def test_list_snapshots_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_snapshots( + pubsub.ListSnapshotsRequest(), project="project_value", + ) + + +def test_list_snapshots_pager(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), + ) + pager = client.list_snapshots(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, pubsub.Snapshot) for i in results) + + +def test_list_snapshots_pages(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + ), + RuntimeError, + ) + pages = list(client.list_snapshots(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_snapshots_async_pager(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + ), + RuntimeError, + ) + async_pager = await client.list_snapshots(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Snapshot) for i in responses) + + +@pytest.mark.asyncio +async def test_list_snapshots_async_pages(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_snapshots), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_snapshots(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_snapshot( + transport: str = "grpc", request_type=pubsub.CreateSnapshotRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) + + response = client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.CreateSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_create_snapshot_from_dict(): + test_create_snapshot(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_snapshot_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.CreateSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot(name="name_value", topic="topic_value",) + ) + + response = await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_create_snapshot_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.CreateSnapshotRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_snapshot_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.CreateSnapshotRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + + await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_snapshot_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_snapshot( + name="name_value", subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].subscription == "subscription_value" + + +def test_create_snapshot_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name="name_value", + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_create_snapshot_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_snapshot( + name="name_value", subscription="subscription_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].subscription == "subscription_value" + + +@pytest.mark.asyncio +async def test_create_snapshot_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name="name_value", + subscription="subscription_value", + ) + + +def test_update_snapshot( + transport: str = "grpc", request_type=pubsub.UpdateSnapshotRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) + + response = client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.UpdateSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_update_snapshot_from_dict(): + test_update_snapshot(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_snapshot_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.UpdateSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot(name="name_value", topic="topic_value",) + ) + + response = await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + + assert response.name == "name_value" + + assert response.topic == "topic_value" + + +def test_update_snapshot_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSnapshotRequest() + request.snapshot.name = "snapshot.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.update_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + + client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot.name=snapshot.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_snapshot_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSnapshotRequest() + request.snapshot.name = "snapshot.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + + await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot.name=snapshot.name/value",) in kw[ + "metadata" + ] + + +def test_delete_snapshot( + transport: str = "grpc", request_type=pubsub.DeleteSnapshotRequest +): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DeleteSnapshotRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_snapshot_from_dict(): + test_delete_snapshot(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_snapshot_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.DeleteSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_snapshot_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSnapshotRequest() + request.snapshot = "snapshot/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_snapshot), "__call__") as call: + call.return_value = None + + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_snapshot_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSnapshotRequest() + request.snapshot = "snapshot/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_snapshot), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + + +def test_delete_snapshot_flattened(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_snapshot(snapshot="snapshot_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].snapshot == "snapshot_value" + + +def test_delete_snapshot_flattened_error(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), snapshot="snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_delete_snapshot_flattened_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_snapshot), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_snapshot(snapshot="snapshot_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].snapshot == "snapshot_value" + + +@pytest.mark.asyncio +async def test_delete_snapshot_flattened_error_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), snapshot="snapshot_value", + ) + + +def test_seek(transport: str = "grpc", request_type=pubsub.SeekRequest): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.seek), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.SeekResponse() + + response = client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.SeekRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +def test_seek_from_dict(): + test_seek(request_type=dict) + + +@pytest.mark.asyncio +async def test_seek_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = pubsub.SeekRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.seek), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + + response = await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +def test_seek_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.SeekRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.seek), "__call__") as call: + call.return_value = pubsub.SeekResponse() + + client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_seek_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.SeekRequest() + request.subscription = "subscription/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._client._transport.seek), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + + await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ + "metadata" + ] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = SubscriberClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SubscriberGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.SubscriberGrpcTransport,) + + +def test_subscriber_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.SubscriberTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_subscriber_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SubscriberTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_subscription", + "get_subscription", + "update_subscription", + "list_subscriptions", + "delete_subscription", + "modify_ack_deadline", + "acknowledge", + "pull", + "streaming_pull", + "modify_push_config", + "get_snapshot", + "list_snapshots", + "create_snapshot", + "update_snapshot", + "delete_snapshot", + "seek", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_subscriber_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.SubscriberTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_subscriber_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + SubscriberClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +def test_subscriber_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.SubscriberGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_subscriber_host_no_port(): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com" + ), + ) + assert client._transport._host == "pubsub.googleapis.com:443" + + +def test_subscriber_host_with_port(): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com:8000" + ), + ) + assert client._transport._host == "pubsub.googleapis.com:8000" + + +def test_subscriber_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.SubscriberGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_subscriber_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.SubscriberGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_subscriber_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.SubscriberGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_subscriber_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.SubscriberGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_subscriber_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.SubscriberGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_subscriber_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.SubscriberGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_snapshot_path(): + project = "squid" + snapshot = "clam" + + expected = "projects/{project}/snapshots/{snapshot}".format( + project=project, snapshot=snapshot, + ) + actual = SubscriberClient.snapshot_path(project, snapshot) + assert expected == actual + + +def test_parse_snapshot_path(): + expected = { + "project": "whelk", + "snapshot": "octopus", + } + path = SubscriberClient.snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_snapshot_path(path) + assert expected == actual + + +def test_subscription_path(): + project = "squid" + subscription = "clam" + + expected = "projects/{project}/subscriptions/{subscription}".format( + project=project, subscription=subscription, + ) + actual = SubscriberClient.subscription_path(project, subscription) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "whelk", + "subscription": "octopus", + } + path = SubscriberClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_subscription_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SubscriberTransport, "_prep_wrapped_messages" + ) as prep: + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SubscriberTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SubscriberClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_set_iam_policy(transport: str = "grpc"): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.set_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py deleted file mode 100644 index ad4f38d8b9de..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_publisher_client_v1.py +++ /dev/null @@ -1,560 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud.pubsub_v1.gapic import publisher_client -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestPublisherClient(object): - def test_create_topic(self): - # Setup Expected Response - name_2 = "name2-1052831874" - kms_key_name = "kmsKeyName2094986649" - expected_response = {"name": name_2, "kms_key_name": kms_key_name} - expected_response = pubsub_pb2.Topic(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - name = client.topic_path("[PROJECT]", "[TOPIC]") - - response = client.create_topic(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.Topic(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - name = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.create_topic(name) - - def test_update_topic(self): - # Setup Expected Response - name = "name3373707" - kms_key_name = "kmsKeyName2094986649" - expected_response = {"name": name, "kms_key_name": kms_key_name} - expected_response = pubsub_pb2.Topic(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = {} - update_mask = {} - - response = client.update_topic(topic, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.UpdateTopicRequest( - topic=topic, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = {} - update_mask = {} - - with pytest.raises(CustomException): - client.update_topic(topic, update_mask) - - def test_publish(self): - # Setup Expected Response - message_ids_element = "messageIdsElement-744837059" - message_ids = [message_ids_element] - expected_response = {"message_ids": message_ids} - expected_response = pubsub_pb2.PublishResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - data = b"-86" - messages_element = {"data": data} - messages = [messages_element] - - response = client.publish(topic, messages) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.PublishRequest(topic=topic, messages=messages) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_publish_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - data = b"-86" - messages_element = {"data": data} - messages = [messages_element] - - with pytest.raises(CustomException): - client.publish(topic, messages) - - def test_get_topic(self): - # Setup Expected Response - name = "name3373707" - kms_key_name = "kmsKeyName2094986649" - expected_response = {"name": name, "kms_key_name": kms_key_name} - expected_response = pubsub_pb2.Topic(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - response = client.get_topic(topic) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetTopicRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.get_topic(topic) - - def test_list_topics(self): - # Setup Expected Response - next_page_token = "" - topics_element = {} - topics = [topics_element] - expected_response = {"next_page_token": next_page_token, "topics": topics} - expected_response = pubsub_pb2.ListTopicsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_topics(project) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.topics[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicsRequest(project=project) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_topics_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_topics(project) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_topic_subscriptions(self): - # Setup Expected Response - next_page_token = "" - subscriptions_element = "subscriptionsElement1698708147" - subscriptions = [subscriptions_element] - expected_response = { - "next_page_token": next_page_token, - "subscriptions": subscriptions, - } - expected_response = pubsub_pb2.ListTopicSubscriptionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_subscriptions(topic) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.subscriptions[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicSubscriptionsRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_topic_subscriptions_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_subscriptions(topic) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_list_topic_snapshots(self): - # Setup Expected Response - next_page_token = "" - snapshots_element = "snapshotsElement1339034092" - snapshots = [snapshots_element] - expected_response = {"next_page_token": next_page_token, "snapshots": snapshots} - expected_response = pubsub_pb2.ListTopicSnapshotsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_snapshots(topic) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.snapshots[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListTopicSnapshotsRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_topic_snapshots_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - paged_list_response = client.list_topic_snapshots(topic) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_topic(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - client.delete_topic(topic) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DeleteTopicRequest(topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_topic_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.delete_topic(topic) - - def test_set_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - resource = "resource-341064690" - policy = {} - - response = client.set_iam_policy(resource, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_set_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - resource = "resource-341064690" - policy = {} - - with pytest.raises(CustomException): - client.set_iam_policy(resource, policy) - - def test_get_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - resource = "resource-341064690" - - response = client.get_iam_policy(resource) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - resource = "resource-341064690" - - with pytest.raises(CustomException): - client.get_iam_policy(resource) - - def test_test_iam_permissions(self): - # Setup Expected Response - expected_response = {} - expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - resource = "resource-341064690" - permissions = [] - - response = client.test_iam_permissions(resource, permissions) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_test_iam_permissions_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - resource = "resource-341064690" - permissions = [] - - with pytest.raises(CustomException): - client.test_iam_permissions(resource, permissions) - - def test_detach_subscription(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.DetachSubscriptionResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.detach_subscription(subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DetachSubscriptionRequest( - subscription=subscription - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_detach_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = publisher_client.PublisherClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.detach_subscription(subscription) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py b/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py deleted file mode 100644 index b059214d7f5c..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/gapic/v1/test_subscriber_client_v1.py +++ /dev/null @@ -1,892 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud.pubsub_v1.gapic import subscriber_client -from google.cloud.pubsub_v1.proto import pubsub_pb2 -from google.iam.v1 import iam_policy_pb2 -from google.iam.v1 import policy_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - def stream_stream( - self, method, request_serializer=None, response_deserializer=None - ): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestSubscriberClient(object): - def test_create_subscription(self): - # Setup Expected Response - name_2 = "name2-1052831874" - topic_2 = "topic2-1139259102" - ack_deadline_seconds = 2135351438 - retain_acked_messages = False - enable_message_ordering = True - filter_ = "filter-1274492040" - detached = True - expected_response = { - "name": name_2, - "topic": topic_2, - "ack_deadline_seconds": ack_deadline_seconds, - "retain_acked_messages": retain_acked_messages, - "enable_message_ordering": enable_message_ordering, - "filter": filter_, - "detached": detached, - } - expected_response = pubsub_pb2.Subscription(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - response = client.create_subscription(name, topic) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.Subscription(name=name, topic=topic) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - name = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - topic = client.topic_path("[PROJECT]", "[TOPIC]") - - with pytest.raises(CustomException): - client.create_subscription(name, topic) - - def test_get_subscription(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - ack_deadline_seconds = 2135351438 - retain_acked_messages = False - enable_message_ordering = True - filter_ = "filter-1274492040" - detached = True - expected_response = { - "name": name, - "topic": topic, - "ack_deadline_seconds": ack_deadline_seconds, - "retain_acked_messages": retain_acked_messages, - "enable_message_ordering": enable_message_ordering, - "filter": filter_, - "detached": detached, - } - expected_response = pubsub_pb2.Subscription(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.get_subscription(subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetSubscriptionRequest(subscription=subscription) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.get_subscription(subscription) - - def test_update_subscription(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - ack_deadline_seconds_2 = 921632575 - retain_acked_messages = False - enable_message_ordering = True - filter_ = "filter-1274492040" - detached = True - expected_response = { - "name": name, - "topic": topic, - "ack_deadline_seconds": ack_deadline_seconds_2, - "retain_acked_messages": retain_acked_messages, - "enable_message_ordering": enable_message_ordering, - "filter": filter_, - "detached": detached, - } - expected_response = pubsub_pb2.Subscription(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - ack_deadline_seconds = 42 - subscription = {"ack_deadline_seconds": ack_deadline_seconds} - paths_element = "ack_deadline_seconds" - paths = [paths_element] - update_mask = {"paths": paths} - - response = client.update_subscription(subscription, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.UpdateSubscriptionRequest( - subscription=subscription, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - ack_deadline_seconds = 42 - subscription = {"ack_deadline_seconds": ack_deadline_seconds} - paths_element = "ack_deadline_seconds" - paths = [paths_element] - update_mask = {"paths": paths} - - with pytest.raises(CustomException): - client.update_subscription(subscription, update_mask) - - def test_list_subscriptions(self): - # Setup Expected Response - next_page_token = "" - subscriptions_element = {} - subscriptions = [subscriptions_element] - expected_response = { - "next_page_token": next_page_token, - "subscriptions": subscriptions, - } - expected_response = pubsub_pb2.ListSubscriptionsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_subscriptions(project) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.subscriptions[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListSubscriptionsRequest(project=project) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_subscriptions_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_subscriptions(project) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_subscription(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - client.delete_subscription(subscription) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DeleteSubscriptionRequest( - subscription=subscription - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_subscription_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.delete_subscription(subscription) - - def test_get_snapshot(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - expected_response = {"name": name, "topic": topic} - expected_response = pubsub_pb2.Snapshot(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - response = client.get_snapshot(snapshot) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.GetSnapshotRequest(snapshot=snapshot) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - with pytest.raises(CustomException): - client.get_snapshot(snapshot) - - def test_modify_ack_deadline(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - ack_deadline_seconds = 2135351438 - - client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ModifyAckDeadlineRequest( - subscription=subscription, - ack_ids=ack_ids, - ack_deadline_seconds=ack_deadline_seconds, - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_modify_ack_deadline_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - ack_deadline_seconds = 2135351438 - - with pytest.raises(CustomException): - client.modify_ack_deadline(subscription, ack_ids, ack_deadline_seconds) - - def test_acknowledge(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - - client.acknowledge(subscription, ack_ids) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.AcknowledgeRequest( - subscription=subscription, ack_ids=ack_ids - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_acknowledge_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - ack_ids = [] - - with pytest.raises(CustomException): - client.acknowledge(subscription, ack_ids) - - def test_pull(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.PullResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - max_messages = 496131527 - - response = client.pull(subscription, max_messages) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.PullRequest( - subscription=subscription, max_messages=max_messages - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_pull_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - max_messages = 496131527 - - with pytest.raises(CustomException): - client.pull(subscription, max_messages) - - def test_streaming_pull(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.StreamingPullResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[iter([expected_response])]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - stream_ack_deadline_seconds = 1875467245 - request = { - "subscription": subscription, - "stream_ack_deadline_seconds": stream_ack_deadline_seconds, - } - request = pubsub_pb2.StreamingPullRequest(**request) - requests = [request] - - response = client.streaming_pull(requests) - resources = list(response) - assert len(resources) == 1 - assert expected_response == resources[0] - - assert len(channel.requests) == 1 - actual_requests = channel.requests[0][1] - assert len(actual_requests) == 1 - actual_request = list(actual_requests)[0] - assert request == actual_request - - def test_streaming_pull_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - stream_ack_deadline_seconds = 1875467245 - request = { - "subscription": subscription, - "stream_ack_deadline_seconds": stream_ack_deadline_seconds, - } - - request = pubsub_pb2.StreamingPullRequest(**request) - requests = [request] - - with pytest.raises(CustomException): - client.streaming_pull(requests) - - def test_modify_push_config(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - push_config = {} - - client.modify_push_config(subscription, push_config) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ModifyPushConfigRequest( - subscription=subscription, push_config=push_config - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_modify_push_config_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - push_config = {} - - with pytest.raises(CustomException): - client.modify_push_config(subscription, push_config) - - def test_list_snapshots(self): - # Setup Expected Response - next_page_token = "" - snapshots_element = {} - snapshots = [snapshots_element] - expected_response = {"next_page_token": next_page_token, "snapshots": snapshots} - expected_response = pubsub_pb2.ListSnapshotsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_snapshots(project) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.snapshots[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.ListSnapshotsRequest(project=project) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_snapshots_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - project = client.project_path("[PROJECT]") - - paged_list_response = client.list_snapshots(project) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_create_snapshot(self): - # Setup Expected Response - name_2 = "name2-1052831874" - topic = "topic110546223" - expected_response = {"name": name_2, "topic": topic} - expected_response = pubsub_pb2.Snapshot(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.create_snapshot(name, subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.CreateSnapshotRequest( - name=name, subscription=subscription - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - name = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.create_snapshot(name, subscription) - - def test_update_snapshot(self): - # Setup Expected Response - name = "name3373707" - topic = "topic110546223" - expected_response = {"name": name, "topic": topic} - expected_response = pubsub_pb2.Snapshot(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - seconds = 123456 - expire_time = {"seconds": seconds} - snapshot = {"expire_time": expire_time} - paths_element = "expire_time" - paths = [paths_element] - update_mask = {"paths": paths} - - response = client.update_snapshot(snapshot, update_mask) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.UpdateSnapshotRequest( - snapshot=snapshot, update_mask=update_mask - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - seconds = 123456 - expire_time = {"seconds": seconds} - snapshot = {"expire_time": expire_time} - paths_element = "expire_time" - paths = [paths_element] - update_mask = {"paths": paths} - - with pytest.raises(CustomException): - client.update_snapshot(snapshot, update_mask) - - def test_delete_snapshot(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - client.delete_snapshot(snapshot) - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.DeleteSnapshotRequest(snapshot=snapshot) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_snapshot_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - snapshot = client.snapshot_path("[PROJECT]", "[SNAPSHOT]") - - with pytest.raises(CustomException): - client.delete_snapshot(snapshot) - - def test_seek(self): - # Setup Expected Response - expected_response = {} - expected_response = pubsub_pb2.SeekResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - response = client.seek(subscription) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = pubsub_pb2.SeekRequest(subscription=subscription) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_seek_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - subscription = client.subscription_path("[PROJECT]", "[SUBSCRIPTION]") - - with pytest.raises(CustomException): - client.seek(subscription) - - def test_set_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - resource = "resource-341064690" - policy = {} - - response = client.set_iam_policy(resource, policy) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.SetIamPolicyRequest( - resource=resource, policy=policy - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_set_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - resource = "resource-341064690" - policy = {} - - with pytest.raises(CustomException): - client.set_iam_policy(resource, policy) - - def test_get_iam_policy(self): - # Setup Expected Response - version = 351608024 - etag = b"21" - expected_response = {"version": version, "etag": etag} - expected_response = policy_pb2.Policy(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - resource = "resource-341064690" - - response = client.get_iam_policy(resource) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.GetIamPolicyRequest(resource=resource) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_iam_policy_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - resource = "resource-341064690" - - with pytest.raises(CustomException): - client.get_iam_policy(resource) - - def test_test_iam_permissions(self): - # Setup Expected Response - expected_response = {} - expected_response = iam_policy_pb2.TestIamPermissionsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup Request - resource = "resource-341064690" - permissions = [] - - response = client.test_iam_permissions(resource, permissions) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = iam_policy_pb2.TestIamPermissionsRequest( - resource=resource, permissions=permissions - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_test_iam_permissions_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = subscriber_client.SubscriberClient() - - # Setup request - resource = "resource-341064690" - permissions = [] - - with pytest.raises(CustomException): - client.test_iam_permissions(resource, permissions) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index f10b54ee5f79..b03dd99de745 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -21,6 +21,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._batch.base import BatchStatus from google.cloud.pubsub_v1.publisher._batch.thread import Batch +from google.pubsub_v1 import types as gapic_types def create_batch(status=None, settings=types.BatchSettings()): @@ -44,5 +45,5 @@ def create_batch(status=None, settings=types.BatchSettings()): def test_len(): batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) assert len(batch) == 0 - batch.publish(types.PubsubMessage(data=b"foo")) + batch.publish(gapic_types.PubsubMessage(data=b"foo")) assert len(batch) == 1 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index e9d2b09c0761..cd634f8f813b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -20,6 +20,7 @@ import pytest import google.api_core.exceptions +from google.api_core import gapic_v1 from google.auth import credentials from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types @@ -28,6 +29,7 @@ from google.cloud.pubsub_v1.publisher._batch.base import BatchCancellationReason from google.cloud.pubsub_v1.publisher._batch import thread from google.cloud.pubsub_v1.publisher._batch.thread import Batch +from google.pubsub_v1 import types as gapic_types def create_client(): @@ -39,6 +41,7 @@ def create_batch( topic="topic_name", batch_done_callback=None, commit_when_full=True, + commit_retry=gapic_v1.method.DEFAULT, **batch_settings ): """Return a batch object suitable for testing. @@ -49,6 +52,8 @@ def create_batch( the batch is done, either with a success or a failure flag. commit_when_full (bool): Whether to commit the batch when the batch has reached byte-size or number-of-messages limits. + commit_retry (Optional[google.api_core.retry.Retry]): The retry settings + for the batch commit call. batch_settings (Mapping[str, str]): Arguments passed on to the :class:``~.pubsub_v1.types.BatchSettings`` constructor. @@ -63,6 +68,7 @@ def create_batch( settings, batch_done_callback=batch_done_callback, commit_when_full=commit_when_full, + commit_retry=commit_retry, ) @@ -116,7 +122,7 @@ def test_blocking__commit(): ) # Set up the underlying API publish method to return a PublishResponse. - publish_response = types.PublishResponse(message_ids=["a", "b"]) + publish_response = gapic_types.PublishResponse(message_ids=["a", "b"]) patch = mock.patch.object( type(batch.client.api), "publish", return_value=publish_response ) @@ -126,11 +132,12 @@ def test_blocking__commit(): # Establish that the underlying API call was made with expected # arguments. publish.assert_called_once_with( - "topic_name", - [ - types.PubsubMessage(data=b"This is my message."), - types.PubsubMessage(data=b"This is another message."), + topic="topic_name", + messages=[ + gapic_types.PubsubMessage(data=b"This is my message."), + gapic_types.PubsubMessage(data=b"This is another message."), ], + retry=gapic_v1.method.DEFAULT, ) # Establish that all of the futures are done, and that they have the @@ -141,15 +148,36 @@ def test_blocking__commit(): assert futures[1].result() == "b" +def test_blocking__commit_custom_retry(): + batch = create_batch(commit_retry=mock.sentinel.custom_retry) + batch.publish({"data": b"This is my message."}) + + # Set up the underlying API publish method to return a PublishResponse. + publish_response = gapic_types.PublishResponse(message_ids=["a"]) + patch = mock.patch.object( + type(batch.client.api), "publish", return_value=publish_response + ) + with patch as publish: + batch._commit() + + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with( + topic="topic_name", + messages=[gapic_types.PubsubMessage(data=b"This is my message.")], + retry=mock.sentinel.custom_retry, + ) + + def test_client_api_publish_not_blocking_additional_publish_calls(): batch = create_batch(max_messages=1) api_publish_called = threading.Event() - def api_publish_delay(_, messages): + def api_publish_delay(topic="", messages=(), retry=None): api_publish_called.set() time.sleep(1.0) message_ids = [str(i) for i in range(len(messages))] - return types.PublishResponse(message_ids=message_ids) + return gapic_types.PublishResponse(message_ids=message_ids) api_publish_patch = mock.patch.object( type(batch.client.api), "publish", side_effect=api_publish_delay @@ -210,7 +238,7 @@ def test_blocking__commit_wrong_messageid_length(): ) # Set up a PublishResponse that only returns one message ID. - publish_response = types.PublishResponse(message_ids=["a"]) + publish_response = gapic_types.PublishResponse(message_ids=["a"]) patch = mock.patch.object( type(batch.client.api), "publish", return_value=publish_response ) @@ -264,9 +292,9 @@ def test_block__commmit_retry_error(): def test_publish_updating_batch_size(): batch = create_batch(topic="topic_foo") messages = ( - types.PubsubMessage(data=b"foobarbaz"), - types.PubsubMessage(data=b"spameggs"), - types.PubsubMessage(data=b"1335020400"), + gapic_types.PubsubMessage(data=b"foobarbaz"), + gapic_types.PubsubMessage(data=b"spameggs"), + gapic_types.PubsubMessage(data=b"1335020400"), ) # Publish each of the messages, which should save them to the batch. @@ -278,9 +306,9 @@ def test_publish_updating_batch_size(): # The size should have been incremented by the sum of the size # contributions of each message to the PublishRequest. - base_request_size = types.PublishRequest(topic="topic_foo").ByteSize() + base_request_size = gapic_types.PublishRequest(topic="topic_foo")._pb.ByteSize() expected_request_size = base_request_size + sum( - types.PublishRequest(messages=[msg]).ByteSize() for msg in messages + gapic_types.PublishRequest(messages=[msg])._pb.ByteSize() for msg in messages ) assert batch.size == expected_request_size @@ -289,7 +317,7 @@ def test_publish_updating_batch_size(): def test_publish(): batch = create_batch() - message = types.PubsubMessage() + message = gapic_types.PubsubMessage() future = batch.publish(message) assert len(batch.messages) == 1 @@ -299,7 +327,7 @@ def test_publish(): def test_publish_max_messages_zero(): batch = create_batch(topic="topic_foo", max_messages=0) - message = types.PubsubMessage(data=b"foobarbaz") + message = gapic_types.PubsubMessage(data=b"foobarbaz") with mock.patch.object(batch, "commit") as commit: future = batch.publish(message) @@ -312,8 +340,8 @@ def test_publish_max_messages_zero(): def test_publish_max_messages_enforced(): batch = create_batch(topic="topic_foo", max_messages=1) - message = types.PubsubMessage(data=b"foobarbaz") - message2 = types.PubsubMessage(data=b"foobarbaz2") + message = gapic_types.PubsubMessage(data=b"foobarbaz") + message2 = gapic_types.PubsubMessage(data=b"foobarbaz2") future = batch.publish(message) future2 = batch.publish(message2) @@ -327,8 +355,8 @@ def test_publish_max_messages_enforced(): def test_publish_max_bytes_enforced(): batch = create_batch(topic="topic_foo", max_bytes=15) - message = types.PubsubMessage(data=b"foobarbaz") - message2 = types.PubsubMessage(data=b"foobarbaz2") + message = gapic_types.PubsubMessage(data=b"foobarbaz") + message2 = gapic_types.PubsubMessage(data=b"foobarbaz2") future = batch.publish(message) future2 = batch.publish(message2) @@ -343,9 +371,9 @@ def test_publish_exceed_max_messages(): max_messages = 4 batch = create_batch(max_messages=max_messages) messages = ( - types.PubsubMessage(data=b"foobarbaz"), - types.PubsubMessage(data=b"spameggs"), - types.PubsubMessage(data=b"1335020400"), + gapic_types.PubsubMessage(data=b"foobarbaz"), + gapic_types.PubsubMessage(data=b"spameggs"), + gapic_types.PubsubMessage(data=b"1335020400"), ) # Publish each of the messages, which should save them to the batch. @@ -359,7 +387,7 @@ def test_publish_exceed_max_messages(): # When a fourth message is published, commit should be called. # No future will be returned in this case. - future = batch.publish(types.PubsubMessage(data=b"last one")) + future = batch.publish(gapic_types.PubsubMessage(data=b"last one")) commit.assert_called_once_with() assert future is None @@ -374,11 +402,11 @@ def test_publish_single_message_size_exceeds_server_size_limit(): max_bytes=1000 * 1000, # way larger than (mocked) server side limit ) - big_message = types.PubsubMessage(data=b"x" * 984) + big_message = gapic_types.PubsubMessage(data=b"x" * 984) - request_size = types.PublishRequest( + request_size = gapic_types.PublishRequest( topic="topic_foo", messages=[big_message] - ).ByteSize() + )._pb.ByteSize() assert request_size == 1001 # sanity check, just above the (mocked) server limit with pytest.raises(exceptions.MessageTooLargeError): @@ -390,13 +418,15 @@ def test_publish_total_messages_size_exceeds_server_size_limit(): batch = create_batch(topic="topic_foo", max_messages=10, max_bytes=1500) messages = ( - types.PubsubMessage(data=b"x" * 500), - types.PubsubMessage(data=b"x" * 600), + gapic_types.PubsubMessage(data=b"x" * 500), + gapic_types.PubsubMessage(data=b"x" * 600), ) # Sanity check - request size is still below BatchSettings.max_bytes, # but it exceeds the server-side size limit. - request_size = types.PublishRequest(topic="topic_foo", messages=messages).ByteSize() + request_size = gapic_types.PublishRequest( + topic="topic_foo", messages=messages + )._pb.ByteSize() assert 1000 < request_size < 1500 with mock.patch.object(batch, "commit") as fake_commit: @@ -412,7 +442,7 @@ def test_publish_dict(): future = batch.publish({"data": b"foobarbaz", "attributes": {"spam": "eggs"}}) # There should be one message on the batch. - expected_message = types.PubsubMessage( + expected_message = gapic_types.PubsubMessage( data=b"foobarbaz", attributes={"spam": "eggs"} ) assert batch.messages == [expected_message] @@ -440,9 +470,9 @@ def test_do_not_commit_when_full_when_flag_is_off(): # Set commit_when_full flag to False batch = create_batch(max_messages=max_messages, commit_when_full=False) messages = ( - types.PubsubMessage(data=b"foobarbaz"), - types.PubsubMessage(data=b"spameggs"), - types.PubsubMessage(data=b"1335020400"), + gapic_types.PubsubMessage(data=b"foobarbaz"), + gapic_types.PubsubMessage(data=b"spameggs"), + gapic_types.PubsubMessage(data=b"1335020400"), ) with mock.patch.object(batch, "commit") as commit: @@ -451,7 +481,7 @@ def test_do_not_commit_when_full_when_flag_is_off(): assert len(futures) == 3 # When a fourth message is published, commit should not be called. - future = batch.publish(types.PubsubMessage(data=b"last one")) + future = batch.publish(gapic_types.PubsubMessage(data=b"last one")) assert commit.call_count == 0 assert future is None @@ -471,11 +501,11 @@ def test_batch_done_callback_called_on_success(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = types.PubsubMessage(data=b"foobarbaz") + message = gapic_types.PubsubMessage(data=b"foobarbaz") batch.publish(message) # One response for one published message. - publish_response = types.PublishResponse(message_ids=["a"]) + publish_response = gapic_types.PublishResponse(message_ids=["a"]) with mock.patch.object( type(batch.client.api), "publish", return_value=publish_response @@ -491,11 +521,11 @@ def test_batch_done_callback_called_on_publish_failure(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = types.PubsubMessage(data=b"foobarbaz") + message = gapic_types.PubsubMessage(data=b"foobarbaz") batch.publish(message) # One response for one published message. - publish_response = types.PublishResponse(message_ids=["a"]) + publish_response = gapic_types.PublishResponse(message_ids=["a"]) # Induce publish error. error = google.api_core.exceptions.InternalServerError("uh oh") @@ -517,11 +547,11 @@ def test_batch_done_callback_called_on_publish_response_invalid(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = types.PubsubMessage(data=b"foobarbaz") + message = gapic_types.PubsubMessage(data=b"foobarbaz") batch.publish(message) # No message ids returned in successful publish response -> invalid. - publish_response = types.PublishResponse(message_ids=[]) + publish_response = gapic_types.PublishResponse(message_ids=[]) with mock.patch.object( type(batch.client.api), "publish", return_value=publish_response diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py index 08e1954e6de3..de5dd0523706 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -18,14 +18,14 @@ from google.auth import credentials from google.cloud.pubsub_v1 import publisher -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer +from google.pubsub_v1 import types as gapic_types _ORDERING_KEY = "ordering_key_1" def create_message(): - return types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) def create_client(): @@ -172,6 +172,18 @@ def test_basic_publish(): batch.publish.assert_called_once_with(message) +def test_publish_custom_retry(): + client = create_client() + message = create_message() + sequencer = create_ordered_sequencer(client) + + sequencer.publish(message, retry=mock.sentinel.custom_retry) + + assert sequencer._ordered_batches # batch exists + batch = sequencer._ordered_batches[0] + assert batch._commit_retry is mock.sentinel.custom_retry + + def test_publish_batch_full(): client = create_client() message = create_message() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index 22e24ed064b0..b8aff0d2c92d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -17,13 +17,13 @@ from google.auth import credentials from google.cloud.pubsub_v1 import publisher -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._batch import base from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer +from google.pubsub_v1 import types as gapic_types def create_message(): - return types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) def create_client(): @@ -89,6 +89,17 @@ def test_basic_publish(): batch.publish.assert_called_once_with(message) +def test_publish_custom_retry(): + client = create_client() + message = create_message() + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + + sequencer.publish(message, retry=mock.sentinel.custom_retry) + + assert sequencer._current_batch is not None + assert sequencer._current_batch._commit_retry is mock.sentinel.custom_retry + + def test_publish_batch_full(): client = create_client() message = create_message() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py index 26a61663b948..54484520d803 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py @@ -23,6 +23,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher.flow_controller import FlowController +from google.pubsub_v1 import types as grpc_types def _run_in_daemon( @@ -66,7 +67,7 @@ def test_no_overflow_no_error(): # there should be no errors for data in (b"foo", b"bar", b"baz"): - msg = types.PubsubMessage(data=data) + msg = grpc_types.PubsubMessage(data=data) flow_controller.add(msg) @@ -79,8 +80,8 @@ def test_overflow_no_error_on_ignore(): flow_controller = FlowController(settings) # there should be no overflow errors - flow_controller.add(types.PubsubMessage(data=b"foo")) - flow_controller.add(types.PubsubMessage(data=b"bar")) + flow_controller.add(grpc_types.PubsubMessage(data=b"foo")) + flow_controller.add(grpc_types.PubsubMessage(data=b"bar")) def test_message_count_overflow_error(): @@ -91,9 +92,9 @@ def test_message_count_overflow_error(): ) flow_controller = FlowController(settings) - flow_controller.add(types.PubsubMessage(data=b"foo")) + flow_controller.add(grpc_types.PubsubMessage(data=b"foo")) with pytest.raises(exceptions.FlowControlLimitError) as error: - flow_controller.add(types.PubsubMessage(data=b"bar")) + flow_controller.add(grpc_types.PubsubMessage(data=b"bar")) assert "messages: 2 / 1" in str(error.value) @@ -109,14 +110,14 @@ def test_byte_size_overflow_error(): # Since the message data itself occupies 100 bytes, it means that both # messages combined will exceed the imposed byte limit of 199, but a single # message will not (the message size overhead is way lower than data size). - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) flow_controller.add(msg1) with pytest.raises(exceptions.FlowControlLimitError) as error: flow_controller.add(msg2) - total_size = msg1.ByteSize() + msg2.ByteSize() + total_size = msg1._pb.ByteSize() + msg2._pb.ByteSize() expected_info = "bytes: {} / 199".format(total_size) assert expected_info in str(error.value) @@ -129,9 +130,9 @@ def test_no_error_on_moderate_message_flow(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) - msg3 = types.PubsubMessage(data=b"z" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) + msg3 = grpc_types.PubsubMessage(data=b"z" * 100) # The flow control settings will accept two in-flight messages, but not three. # If releasing messages works correctly, the sequence below will not raise errors. @@ -151,14 +152,14 @@ def test_rejected_messages_do_not_increase_total_load(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) flow_controller.add(msg1) for _ in range(5): with pytest.raises(exceptions.FlowControlLimitError): - flow_controller.add(types.PubsubMessage(data=b"z" * 100)) + flow_controller.add(grpc_types.PubsubMessage(data=b"z" * 100)) # After releasing a message we should again be able to add another one, despite # previously trying to add a lot of other messages. @@ -174,9 +175,9 @@ def test_incorrectly_releasing_too_many_messages(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) - msg3 = types.PubsubMessage(data=b"z" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) + msg3 = grpc_types.PubsubMessage(data=b"z" * 100) # Releasing a message that would make the load negative should result in a warning. with warnings.catch_warnings(record=True) as warned: @@ -196,7 +197,7 @@ def test_incorrectly_releasing_too_many_messages(): error_msg = str(error.value) assert "messages: 2 / 1" in error_msg - total_size = msg2.ByteSize() + msg3.ByteSize() + total_size = msg2._pb.ByteSize() + msg3._pb.ByteSize() expected_size_info = "bytes: {} / 150".format(total_size) assert expected_size_info in error_msg @@ -209,10 +210,10 @@ def test_blocking_on_overflow_until_free_capacity(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) - msg3 = types.PubsubMessage(data=b"z" * 100) - msg4 = types.PubsubMessage(data=b"w" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) + msg3 = grpc_types.PubsubMessage(data=b"z" * 100) + msg4 = grpc_types.PubsubMessage(data=b"w" * 100) # If there is a concurrency bug in FlowController, we do not want to block # the main thread running the tests, thus we delegate all add/release @@ -286,7 +287,7 @@ def test_error_if_mesage_would_block_indefinitely(): ) flow_controller = FlowController(settings) - msg = types.PubsubMessage(data=b"xyz") + msg = grpc_types.PubsubMessage(data=b"xyz") adding_done = threading.Event() error_event = threading.Event() @@ -303,7 +304,7 @@ def test_error_if_mesage_would_block_indefinitely(): error_msg = str(error_info.value) assert "would block forever" in error_msg assert "messages: 1 / 0" in error_msg - assert "bytes: {} / 1".format(msg.ByteSize()) in error_msg + assert "bytes: {} / 1".format(msg._pb.ByteSize()) in error_msg def test_threads_posting_large_messages_do_not_starve(): @@ -314,7 +315,7 @@ def test_threads_posting_large_messages_do_not_starve(): ) flow_controller = FlowController(settings) - large_msg = types.PubsubMessage(data=b"x" * 100) # close to entire byte limit + large_msg = grpc_types.PubsubMessage(data=b"x" * 100) # close to entire byte limit adding_initial_done = threading.Event() adding_large_done = threading.Event() @@ -325,14 +326,14 @@ def test_threads_posting_large_messages_do_not_starve(): # Occupy some of the flow capacity, then try to add a large message. Releasing # enough messages should eventually allow the large message to come through, even # if more messages are added after it (those should wait for the large message). - initial_messages = [types.PubsubMessage(data=b"x" * 10)] * 5 + initial_messages = [grpc_types.PubsubMessage(data=b"x" * 10)] * 5 _run_in_daemon(flow_controller, "add", initial_messages, adding_initial_done) assert adding_initial_done.wait(timeout=0.1) _run_in_daemon(flow_controller, "add", [large_msg], adding_large_done) # Continuously keep adding more messages after the large one. - messages = [types.PubsubMessage(data=b"x" * 10)] * 10 + messages = [grpc_types.PubsubMessage(data=b"x" * 10)] * 10 _run_in_daemon(flow_controller, "add", messages, adding_busy_done, action_pause=0.1) # At the same time, gradually keep releasing the messages - the freeed up @@ -372,8 +373,8 @@ def test_warning_on_internal_reservation_stats_error_when_unblocking(): ) flow_controller = FlowController(settings) - msg1 = types.PubsubMessage(data=b"x" * 100) - msg2 = types.PubsubMessage(data=b"y" * 100) + msg1 = grpc_types.PubsubMessage(data=b"x" * 100) + msg2 = grpc_types.PubsubMessage(data=b"y" * 100) # If there is a concurrency bug in FlowController, we do not want to block # the main thread running the tests, thus we delegate all add/release diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index b58ed133f8bf..bcdbb2f34007 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -15,19 +15,39 @@ from __future__ import absolute_import from __future__ import division +import inspect + from google.auth import credentials import mock import pytest import time -from google.cloud.pubsub_v1.gapic import publisher_client +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer +from google.pubsub_v1 import types as gapic_types +from google.pubsub_v1.services.publisher import client as publisher_client +from google.pubsub_v1.services.publisher.transports.grpc import PublisherGrpcTransport + + +def _assert_retries_equal(retry, retry2): + # Retry instances cannot be directly compared, because their predicates are + # different instances of the same function. We thus manually compare their other + # attributes, and then heuristically compare their predicates. + for attr in ("_deadline", "_initial", "_maximum", "_multiplier"): + assert getattr(retry, attr) == getattr(retry2, attr) + + pred = retry._predicate + pred2 = retry2._predicate + assert inspect.getsource(pred) == inspect.getsource(pred2) + assert inspect.getclosurevars(pred) == inspect.getclosurevars(pred2) + def test_init(): creds = mock.Mock(spec=credentials.Credentials) @@ -42,13 +62,13 @@ def test_init(): def test_init_w_custom_transport(): - transport = object() + transport = PublisherGrpcTransport() client = publisher.Client(transport=transport) # A plain client should have an `api` (the underlying GAPIC) and a # batch settings object, which should have the defaults. assert isinstance(client.api, publisher_client.PublisherClient) - assert client.api.transport is transport + assert client.api._transport is transport assert client.batch_settings.max_bytes == 1 * 1000 * 1000 assert client.batch_settings.max_latency == 0.01 assert client.batch_settings.max_messages == 100 @@ -59,7 +79,7 @@ def test_init_w_api_endpoint(): client = publisher.Client(client_options=client_options) assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com" @@ -69,7 +89,7 @@ def test_init_w_unicode_api_endpoint(): client = publisher.Client(client_options=client_options) assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com" @@ -78,7 +98,7 @@ def test_init_w_empty_client_options(): client = publisher.Client(client_options={}) assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == publisher_client.PublisherClient.SERVICE_ADDRESS @@ -93,7 +113,7 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api.transport.publish._channel + channel = client.api._transport.publish._channel assert channel.target().decode("utf8") == "/foo/bar/" @@ -109,19 +129,6 @@ def test_message_ordering_enabled(): assert client._enable_message_ordering -def test_message_ordering_changes_retry_deadline(): - creds = mock.Mock(spec=credentials.Credentials) - - client = publisher.Client(credentials=creds) - assert client.api._method_configs["Publish"].retry._deadline == 60 - - client = publisher.Client( - publisher_options=types.PublisherOptions(enable_message_ordering=True), - credentials=creds, - ) - assert client.api._method_configs["Publish"].retry._deadline == 2 ** 32 / 1000 - - def test_publish(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) @@ -150,8 +157,10 @@ def test_publish(): # Check mock. batch.publish.assert_has_calls( [ - mock.call(types.PubsubMessage(data=b"spam")), - mock.call(types.PubsubMessage(data=b"foo", attributes={"bar": "baz"})), + mock.call(gapic_types.PubsubMessage(data=b"spam")), + mock.call( + gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + ), ] ) @@ -207,6 +216,46 @@ def test_publish_empty_ordering_key_when_message_ordering_enabled(): assert client.publish(topic, b"bytestring body", ordering_key="") is not None +def test_publish_with_ordering_key_uses_extended_retry_deadline(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions(enable_message_ordering=True), + ) + + # Use mocks in lieu of the actual batch class. + batch = mock.Mock(spec=client._batch_class) + future = mock.sentinel.future + future.add_done_callback = mock.Mock(spec=["__call__"]) + batch.publish.return_value = future + + topic = "topic/path" + client._set_batch(topic, batch) + + # Actually mock the batch class now. + batch_class = mock.Mock(spec=(), return_value=batch) + client._set_batch_class(batch_class) + + # Publish a message with custom retry settings. + custom_retry = retries.Retry( + initial=1, + maximum=20, + multiplier=3.3, + deadline=999, + predicate=retries.if_exception_type(TimeoutError, KeyboardInterrupt), + ) + future = client.publish(topic, b"foo", ordering_key="first", retry=custom_retry) + assert future is mock.sentinel.future + + # Check the retry settings used for the batch. + batch_class.assert_called_once() + _, kwargs = batch_class.call_args + + batch_commit_retry = kwargs["commit_retry"] + expected_retry = custom_retry.with_deadline(2.0 ** 32) + _assert_retries_equal(batch_commit_retry, expected_retry) + + def test_publish_attrs_bytestring(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) @@ -225,7 +274,7 @@ def test_publish_attrs_bytestring(): # The attributes should have been sent as text. batch.publish.assert_called_once_with( - types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) ) @@ -262,8 +311,9 @@ def test_publish_new_batch_needed(): settings=client.batch_settings, batch_done_callback=None, commit_when_full=True, + commit_retry=gapic_v1.method.DEFAULT, ) - message_pb = types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + message_pb = gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) batch1.publish.assert_called_once_with(message_pb) batch2.publish.assert_called_once_with(message_pb) @@ -302,13 +352,21 @@ def test_gapic_instance_method(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) - ct = mock.Mock() - client.api._inner_api_calls["create_topic"] = ct + transport_mock = mock.Mock(create_topic=mock.sentinel) + fake_create_topic_rpc = mock.Mock() + transport_mock._wrapped_methods = { + transport_mock.create_topic: fake_create_topic_rpc + } + patcher = mock.patch.object(client.api, "_transport", new=transport_mock) + + topic = gapic_types.Topic(name="projects/foo/topics/bar") + + with patcher: + client.create_topic(topic) - client.create_topic("projects/foo/topics/bar") - assert ct.call_count == 1 - _, args, _ = ct.mock_calls[0] - assert args[0] == types.Topic(name="projects/foo/topics/bar") + assert fake_create_topic_rpc.call_count == 1 + _, args, _ = fake_create_topic_rpc.mock_calls[0] + assert args[0] == gapic_types.Topic(name="projects/foo/topics/bar") def test_gapic_class_method_on_class(): @@ -444,9 +502,9 @@ def test_publish_with_ordering_key(): # Check mock. batch.publish.assert_has_calls( [ - mock.call(types.PubsubMessage(data=b"spam", ordering_key="k1")), + mock.call(gapic_types.PubsubMessage(data=b"spam", ordering_key="k1")), mock.call( - types.PubsubMessage( + gapic_types.PubsubMessage( data=b"foo", attributes={"bar": "baz"}, ordering_key="k1" ) ), diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 43822e96e88c..288e4bd18314 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -15,11 +15,11 @@ import collections import threading -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.pubsub_v1 import types as gapic_types import mock from six.moves import queue @@ -76,7 +76,7 @@ def test_ack(): dispatcher_.ack(items) manager.send.assert_called_once_with( - types.StreamingPullRequest(ack_ids=["ack_id_string"]) + gapic_types.StreamingPullRequest(ack_ids=["ack_id_string"]) ) manager.leaser.remove.assert_called_once_with(items) @@ -98,7 +98,7 @@ def test_ack_no_time(): dispatcher_.ack(items) manager.send.assert_called_once_with( - types.StreamingPullRequest(ack_ids=["ack_id_string"]) + gapic_types.StreamingPullRequest(ack_ids=["ack_id_string"]) ) manager.ack_histogram.add.assert_not_called() @@ -127,7 +127,7 @@ def test_ack_splitting_large_payload(): for call in calls: message = call.args[0] - assert message.ByteSize() <= 524288 # server-side limit (2**19) + assert message._pb.ByteSize() <= 524288 # server-side limit (2**19) sent_ack_ids.update(message.ack_ids) assert set(sent_ack_ids) == all_ack_ids # all messages should have been ACK-ed @@ -195,7 +195,7 @@ def test_nack(): dispatcher_.nack(items) manager.send.assert_called_once_with( - types.StreamingPullRequest( + gapic_types.StreamingPullRequest( modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[0] ) ) @@ -211,7 +211,7 @@ def test_modify_ack_deadline(): dispatcher_.modify_ack_deadline(items) manager.send.assert_called_once_with( - types.StreamingPullRequest( + gapic_types.StreamingPullRequest( modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[60] ) ) @@ -238,7 +238,7 @@ def test_modify_ack_deadline_splitting_large_payload(): for call in calls: message = call.args[0] - assert message.ByteSize() <= 524288 # server-side limit (2**19) + assert message._pb.ByteSize() <= 524288 # server-side limit (2**19) sent_ack_ids.update(message.modify_deadline_ack_ids) assert set(sent_ack_ids) == all_ack_ids # all messages should have been MODACK-ed diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 0c8a6d181a59..09f796480eed 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -21,9 +21,9 @@ from google.protobuf import timestamp_pb2 from google.api_core import datetime_helpers -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.pubsub_v1 import types as gapic_types RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=pytz.utc) @@ -36,16 +36,19 @@ def create_message(data, ack_id="ACKID", delivery_attempt=0, ordering_key="", **attrs): with mock.patch.object(time, "time") as time_: time_.return_value = RECEIVED_SECONDS - msg = message.Message( - message=types.PubsubMessage( - attributes=attrs, - data=data, - message_id="message_id", - publish_time=timestamp_pb2.Timestamp( - seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 - ), - ordering_key=ordering_key, + gapic_pubsub_message = gapic_types.PubsubMessage( + attributes=attrs, + data=data, + message_id="message_id", + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 ), + ordering_key=ordering_key, + ) + msg = message.Message( + # The code under test uses a raw protobuf PubsubMessage, i.e. w/o additional + # Python class wrappers, hence the "_pb" + message=gapic_pubsub_message._pb, ack_id=ack_id, delivery_attempt=delivery_attempt, request_queue=queue.Queue(), diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py index 946e2598ac8f..6fd83d13a515 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py @@ -14,14 +14,14 @@ from six.moves import queue -from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold +from google.pubsub_v1 import types as gapic_types def make_message(ack_id, ordering_key): - proto_msg = types.PubsubMessage(data=b"Q", ordering_key=ordering_key) - return message.Message(proto_msg, ack_id, 0, queue.Queue()) + proto_msg = gapic_types.PubsubMessage(data=b"Q", ordering_key=ordering_key) + return message.Message(proto_msg._pb, ack_id, 0, queue.Queue()) def test_init(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 3f2881df6c09..d1bac4335294 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -23,7 +23,6 @@ from google.api_core import bidi from google.api_core import exceptions from google.cloud.pubsub_v1 import types -from google.cloud.pubsub_v1.gapic import subscriber_client_config from google.cloud.pubsub_v1.subscriber import client from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber import scheduler @@ -33,6 +32,7 @@ from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.pubsub_v1 import types as gapic_types import grpc @@ -363,7 +363,7 @@ def test_send_unary(): manager._UNARY_REQUESTS = True manager.send( - types.StreamingPullRequest( + gapic_types.StreamingPullRequest( ack_ids=["ack_id1", "ack_id2"], modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], modify_deadline_seconds=[10, 20, 20], @@ -395,7 +395,7 @@ def test_send_unary_empty(): manager = make_manager() manager._UNARY_REQUESTS = True - manager.send(types.StreamingPullRequest()) + manager.send(gapic_types.StreamingPullRequest()) manager._client.acknowledge.assert_not_called() manager._client.modify_ack_deadline.assert_not_called() @@ -410,7 +410,7 @@ def test_send_unary_api_call_error(caplog): error = exceptions.GoogleAPICallError("The front fell off") manager._client.acknowledge.side_effect = error - manager.send(types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) + manager.send(gapic_types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) assert "The front fell off" in caplog.text @@ -427,7 +427,7 @@ def test_send_unary_retry_error(caplog): manager._client.acknowledge.side_effect = error with pytest.raises(exceptions.RetryError): - manager.send(types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) + manager.send(gapic_types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) assert "RetryError while sending unary RPC" in caplog.text assert "signaled streaming pull manager shutdown" in caplog.text @@ -450,7 +450,7 @@ def test_heartbeat(): manager.heartbeat() - manager._rpc.send.assert_called_once_with(types.StreamingPullRequest()) + manager._rpc.send.assert_called_once_with(gapic_types.StreamingPullRequest()) def test_heartbeat_inactive(): @@ -661,7 +661,7 @@ def test__get_initial_request(): initial_request = manager._get_initial_request(123) - assert isinstance(initial_request, types.StreamingPullRequest) + assert isinstance(initial_request, gapic_types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == ["1", "2"] @@ -674,7 +674,7 @@ def test__get_initial_request_wo_leaser(): initial_request = manager._get_initial_request(123) - assert isinstance(initial_request, types.StreamingPullRequest) + assert isinstance(initial_request, gapic_types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" assert initial_request.stream_ack_deadline_seconds == 123 assert initial_request.modify_deadline_ack_ids == [] @@ -686,14 +686,15 @@ def test__on_response_delivery_attempt(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( - ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), ), - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="back", - message=types.PubsubMessage(data=b"bar", message_id="2"), + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), delivery_attempt=6, ), ] @@ -717,13 +718,15 @@ def test__on_response_no_leaser_overload(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( - ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), ), - types.ReceivedMessage( - ack_id="back", message=types.PubsubMessage(data=b"bar", message_id="2") + gapic_types.ReceivedMessage( + ack_id="back", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), ), ] ) @@ -754,16 +757,19 @@ def test__on_response_with_leaser_overload(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( - ack_id="fack", message=types.PubsubMessage(data=b"foo", message_id="1") + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), ), - types.ReceivedMessage( - ack_id="back", message=types.PubsubMessage(data=b"bar", message_id="2") + gapic_types.ReceivedMessage( + ack_id="back", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), ), - types.ReceivedMessage( - ack_id="zack", message=types.PubsubMessage(data=b"baz", message_id="3") + gapic_types.ReceivedMessage( + ack_id="zack", + message=gapic_types.PubsubMessage(data=b"baz", message_id="3"), ), ] ) @@ -825,23 +831,23 @@ def test__on_response_with_ordering_keys(): manager._callback = mock.sentinel.callback # Set up the messages. - response = types.StreamingPullResponse( + response = gapic_types.StreamingPullResponse( received_messages=[ - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="fack", - message=types.PubsubMessage( + message=gapic_types.PubsubMessage( data=b"foo", message_id="1", ordering_key="" ), ), - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="back", - message=types.PubsubMessage( + message=gapic_types.PubsubMessage( data=b"bar", message_id="2", ordering_key="key1" ), ), - types.ReceivedMessage( + gapic_types.ReceivedMessage( ack_id="zack", - message=types.PubsubMessage( + message=gapic_types.PubsubMessage( data=b"baz", message_id="3", ordering_key="key1" ), ), @@ -901,20 +907,6 @@ def test__on_response_with_ordering_keys(): assert manager._messages_on_hold.get() is None -def test_retryable_stream_errors(): - # Make sure the config matches our hard-coded tuple of exceptions. - interfaces = subscriber_client_config.config["interfaces"] - retry_codes = interfaces["google.pubsub.v1.Subscriber"]["retry_codes"] - idempotent = retry_codes["idempotent"] - - status_codes = tuple(getattr(grpc.StatusCode, name, None) for name in idempotent) - expected = tuple( - exceptions.exception_class_for_grpc_status(status_code) - for status_code in status_codes - ) - assert set(expected).issubset(set(streaming_pull_manager._RETRYABLE_STREAM_ERRORS)) - - def test__should_recover_true(): manager = make_manager() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 310485279d9e..f75f1dae21b6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -16,9 +16,10 @@ import mock from google.cloud.pubsub_v1 import subscriber -from google.cloud.pubsub_v1.gapic import subscriber_client from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import futures +from google.pubsub_v1.services.subscriber import client as subscriber_client +from google.pubsub_v1.services.subscriber.transports.grpc import SubscriberGrpcTransport def test_init(): @@ -28,10 +29,10 @@ def test_init(): def test_init_w_custom_transport(): - transport = object() + transport = SubscriberGrpcTransport() client = subscriber.Client(transport=transport) assert isinstance(client.api, subscriber_client.SubscriberClient) - assert client.api.transport is transport + assert client.api._transport is transport def test_init_w_api_endpoint(): @@ -39,7 +40,7 @@ def test_init_w_api_endpoint(): client = subscriber.Client(client_options=client_options) assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com" @@ -49,7 +50,7 @@ def test_init_w_unicode_api_endpoint(): client = subscriber.Client(client_options=client_options) assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com" @@ -58,7 +59,7 @@ def test_init_w_empty_client_options(): client = subscriber.Client(client_options={}) assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api.transport._channel._channel.target()).decode( + assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" ) == subscriber_client.SubscriberClient.SERVICE_ADDRESS @@ -73,7 +74,7 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api.transport.pull._channel + channel = client.api._transport.pull._channel assert channel.target().decode("utf8") == "/baz/bacon/" @@ -138,31 +139,32 @@ def test_subscribe_options(manager_open): def test_close(): - mock_transport = mock.NonCallableMock() - client = subscriber.Client(transport=mock_transport) + client = subscriber.Client() + patcher = mock.patch.object(client.api._transport.grpc_channel, "close") - client.close() + with patcher as patched_close: + client.close() - mock_transport.channel.close.assert_called() + patched_close.assert_called() def test_closes_channel_as_context_manager(): - mock_transport = mock.NonCallableMock() - client = subscriber.Client(transport=mock_transport) + client = subscriber.Client() + patcher = mock.patch.object(client.api._transport.grpc_channel, "close") - with client: - pass + with patcher as patched_close: + with client: + pass - mock_transport.channel.close.assert_called() + patched_close.assert_called() def test_streaming_pull_gapic_monkeypatch(): - transport = mock.NonCallableMock(spec=["streaming_pull"]) - transport.streaming_pull = mock.Mock(spec=[]) - client = subscriber.Client(transport=transport) + client = subscriber.Client() - client.streaming_pull(requests=iter([])) + with mock.patch("google.api_core.gapic_v1.method.wrap_method"): + client.streaming_pull(requests=iter([])) - assert client.api.transport is transport + transport = client.api._transport assert hasattr(transport.streaming_pull, "_prefetch_first_result_") assert not transport.streaming_pull._prefetch_first_result_ From 2952226e65a457100b7171cee73ef53a73f561f2 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 10 Sep 2020 16:52:14 +0200 Subject: [PATCH 0584/1197] test: fix flaky sequencer unit tests (#187) Patching the client under test should be done on an instance used in a test, not on the instance's class - patching the latter can cause all other instances of the same class to share the patched method, possibly interfering with the patched method's call count. --- .../unit/pubsub_v1/publisher/test_publisher_client.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index bcdbb2f34007..1760482ac4a0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -435,9 +435,7 @@ def test_wait_and_commit_sequencers(): # Mock out time so no sleep is actually done. with mock.patch.object(time, "sleep"): - with mock.patch.object( - publisher.Client, "_commit_sequencers" - ) as _commit_sequencers: + with mock.patch.object(client, "_commit_sequencers") as _commit_sequencers: assert ( client.publish("topic", b"bytestring body", ordering_key="") is not None ) @@ -456,9 +454,7 @@ def test_stopped_client_does_not_commit_sequencers(): # Mock out time so no sleep is actually done. with mock.patch.object(time, "sleep"): - with mock.patch.object( - publisher.Client, "_commit_sequencers" - ) as _commit_sequencers: + with mock.patch.object(client, "_commit_sequencers") as _commit_sequencers: assert ( client.publish("topic", b"bytestring body", ordering_key="") is not None ) From 3e455743cf5ee6118de0d0f843f601abc8844fa9 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 14 Sep 2020 11:24:19 +0200 Subject: [PATCH 0585/1197] chore: Release v2.0.0 (#189) * chore: release v2.0.0 * Add compatibility notice to 1.7.0 release notes. * Fix typo in pull request URL in CHANGELOG. --- packages/google-cloud-pubsub/CHANGELOG.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index e66a2e481618..270b3f561947 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,8 +5,30 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 2.0.0 + +09-11-2020 05:03 PDT + + +### Implementation Changes + +- Transition the library to microgenerator. ([#158](https://github.com/googleapis/python-pubsub/pull/158)) + This is a **breaking change** that introduces several **method signature changes** and **drops support + for Python 2.7 and 3.5**. + +### Documentation + +- Add samples for using ordering keys. ([#156](https://github.com/googleapis/python-pubsub/pull/156)) +- Remove extra white space in delivery attempt sample. ([#159](https://github.com/googleapis/python-pubsub/pull/159)) + +### Internal / Testing Changes + +- Fix flaky sequencer unit tests. ([#187](https://github.com/googleapis/python-pubsub/pull/187)) + ## [1.7.0](https://www.github.com/googleapis/python-pubsub/compare/v1.6.1...v1.7.0) (2020-07-13) +This is the last release that supports Python 2.7 and 3.5. + ### New Features - Add support for server-side flow control. ([#143](https://github.com/googleapis/python-pubsub/pull/143)) ([04e261c](https://www.github.com/googleapis/python-pubsub/commit/04e261c602a2919cc75b3efa3dab099fb2cf704c)) From 7006bdd81cc824d67ed2671f6dde3e77c9e74f73 Mon Sep 17 00:00:00 2001 From: Carlos de la Guardia Date: Mon, 14 Sep 2020 12:12:23 -0500 Subject: [PATCH 0586/1197] fix: pass client options to publisher and subscriber clients (#166) (#190) Co-authored-by: Peter Lamut --- .../google/cloud/pubsub_v1/publisher/client.py | 2 +- .../cloud/pubsub_v1/subscriber/client.py | 2 +- .../publisher/test_publisher_client.py | 18 ++++++++++++++++++ .../subscriber/test_subscriber_client.py | 18 ++++++++++++++++++ 4 files changed, 38 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index ea371190cc31..f1e198b1ac90 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -130,7 +130,7 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): target=os.environ.get("PUBSUB_EMULATOR_HOST") ) - client_options = kwargs.pop("client_options", None) + client_options = kwargs.get("client_options", None) if ( client_options and "api_endpoint" in client_options diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 98d6d75c7baf..e0b10c888464 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -83,7 +83,7 @@ def __init__(self, **kwargs): ) # api_endpoint wont be applied if 'transport' is passed in. - client_options = kwargs.pop("client_options", None) + client_options = kwargs.get("client_options", None) if ( client_options and "api_endpoint" in client_options diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 1760482ac4a0..3b6aa1477e7b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -103,6 +103,24 @@ def test_init_w_empty_client_options(): ) == publisher_client.PublisherClient.SERVICE_ADDRESS +def test_init_client_options_pass_through(): + def init(self, *args, **kwargs): + self.kwargs = kwargs + + with mock.patch.object(publisher_client.PublisherClient, "__init__", init): + client = publisher.Client( + client_options={ + "quota_project_id": "42", + "scopes": [], + "credentials_file": "file.json", + } + ) + client_options = client.api.kwargs["client_options"] + assert client_options.get("quota_project_id") == "42" + assert client_options.get("scopes") == [] + assert client_options.get("credentials_file") == "file.json" + + def test_init_emulator(monkeypatch): monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar/") # NOTE: When the emulator host is set, a custom channel will be used, so diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index f75f1dae21b6..634351757ea1 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -64,6 +64,24 @@ def test_init_w_empty_client_options(): ) == subscriber_client.SubscriberClient.SERVICE_ADDRESS +def test_init_client_options_pass_through(): + def init(self, *args, **kwargs): + self.kwargs = kwargs + + with mock.patch.object(subscriber_client.SubscriberClient, "__init__", init): + client = subscriber.Client( + client_options={ + "quota_project_id": "42", + "scopes": [], + "credentials_file": "file.json", + } + ) + client_options = client._api.kwargs["client_options"] + assert client_options.get("quota_project_id") == "42" + assert client_options.get("scopes") == [] + assert client_options.get("credentials_file") == "file.json" + + def test_init_emulator(monkeypatch): monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/baz/bacon/") # NOTE: When the emulator host is set, a custom channel will be used, so From e775aa0baa4dfad8f4d2c795cbd0444842d14ff0 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 14 Sep 2020 11:59:50 -0700 Subject: [PATCH 0587/1197] samples: add subscription detachment sample (#152) * samples: add subscription detachment sample * address prad's comments * construct request --- .../samples/snippets/README.rst | 11 ++- .../samples/snippets/iam.py | 88 +++++++++++++------ .../samples/snippets/iam_test.py | 30 ++++--- 3 files changed, 89 insertions(+), 40 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 8c2c31a8e795..699e896e2d9a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -251,8 +251,8 @@ To run this sample: usage: iam.py [-h] - project - {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} + project_id + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions,detach-subscription} ... This application demonstrates how to perform basic operations on IAM @@ -262,8 +262,8 @@ To run this sample: at https://cloud.google.com/pubsub/docs. positional arguments: - project Your Google Cloud project ID - {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} + project_id Your Google Cloud project ID + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions,detach-subscription} get-topic-policy Prints the IAM policy for the given topic. get-subscription-policy Prints the IAM policy for the given subscription. @@ -276,6 +276,9 @@ To run this sample: check-subscription-permissions Checks to which permissions are available on the given subscription. + detach-subscription + Detaches a subscription from a topic and drops all + messages retained in it. optional arguments: -h, --help show this help message and exit diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index 71ee5da1b7c8..ce443403f740 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -24,17 +24,17 @@ import argparse -def get_topic_policy(project, topic_id): +def get_topic_policy(project_id, topic_id): """Prints the IAM policy for the given topic.""" # [START pubsub_get_topic_policy] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing topic. # project_id = "your-project-id" # topic_id = "your-topic-id" client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_id) + topic_path = client.topic_path(project_id, topic_id) policy = client.get_iam_policy(request={"resource": topic_path}) @@ -44,17 +44,17 @@ def get_topic_policy(project, topic_id): # [END pubsub_get_topic_policy] -def get_subscription_policy(project, subscription_id): +def get_subscription_policy(project_id, subscription_id): """Prints the IAM policy for the given subscription.""" # [START pubsub_get_subscription_policy] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing subscription. # project_id = "your-project-id" # subscription_id = "your-subscription-id" client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_id) + subscription_path = client.subscription_path(project_id, subscription_id) policy = client.get_iam_policy(request={"resource": subscription_path}) @@ -66,17 +66,17 @@ def get_subscription_policy(project, subscription_id): # [END pubsub_get_subscription_policy] -def set_topic_policy(project, topic_id): +def set_topic_policy(project_id, topic_id): """Sets the IAM policy for a topic.""" # [START pubsub_set_topic_policy] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing topic. # project_id = "your-project-id" # topic_id = "your-topic-id" client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_id) + topic_path = client.topic_path(project_id, topic_id) policy = client.get_iam_policy(request={"resource": topic_path}) @@ -95,17 +95,17 @@ def set_topic_policy(project, topic_id): # [END pubsub_set_topic_policy] -def set_subscription_policy(project, subscription_id): +def set_subscription_policy(project_id, subscription_id): """Sets the IAM policy for a topic.""" # [START pubsub_set_subscription_policy] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing subscription. # project_id = "your-project-id" # subscription_id = "your-subscription-id" client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_id) + subscription_path = client.subscription_path(project_id, subscription_id) policy = client.get_iam_policy(request={"resource": subscription_path}) @@ -126,17 +126,17 @@ def set_subscription_policy(project, subscription_id): # [END pubsub_set_subscription_policy] -def check_topic_permissions(project, topic_id): +def check_topic_permissions(project_id, topic_id): """Checks to which permissions are available on the given topic.""" # [START pubsub_test_topic_permissions] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing topic. # project_id = "your-project-id" # topic_id = "your-topic-id" client = pubsub_v1.PublisherClient() - topic_path = client.topic_path(project, topic_id) + topic_path = client.topic_path(project_id, topic_id) permissions_to_check = ["pubsub.topics.publish", "pubsub.topics.update"] @@ -150,17 +150,17 @@ def check_topic_permissions(project, topic_id): # [END pubsub_test_topic_permissions] -def check_subscription_permissions(project, subscription_id): +def check_subscription_permissions(project_id, subscription_id): """Checks to which permissions are available on the given subscription.""" # [START pubsub_test_subscription_permissions] from google.cloud import pubsub_v1 - # TODO(developer) + # TODO(developer): Choose an existing subscription. # project_id = "your-project-id" # subscription_id = "your-subscription-id" client = pubsub_v1.SubscriberClient() - subscription_path = client.subscription_path(project, subscription_id) + subscription_path = client.subscription_path(project_id, subscription_id) permissions_to_check = [ "pubsub.subscriptions.consume", @@ -181,11 +181,42 @@ def check_subscription_permissions(project, subscription_id): # [END pubsub_test_subscription_permissions] +def detach_subscription(project_id, subscription_id): + """Detaches a subscription from a topic and drops all messages retained in it.""" + # [START pubsub_detach_subscription] + from google.api_core.exceptions import GoogleAPICallError, RetryError + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing subscription. + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + publisher_client = pubsub_v1.PublisherClient() + subscriber_client = pubsub_v1.SubscriberClient() + subscription_path = subscriber_client.subscription_path(project_id, subscription_id) + + try: + publisher_client.detach_subscription( + request={"subscription": subscription_path} + ) + except (GoogleAPICallError, RetryError, ValueError, Exception) as err: + print(err) + + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) + if subscription.detached: + print("Subscription is detached.") + else: + print("Subscription is NOT detached.") + # [END pubsub_detach_subscription] + + if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) - parser.add_argument("project", help="Your Google Cloud project ID") + parser.add_argument("project_id", help="Your Google Cloud project ID") subparsers = parser.add_subparsers(dest="command") @@ -219,17 +250,24 @@ def check_subscription_permissions(project, subscription_id): ) check_subscription_permissions_parser.add_argument("subscription_id") + detach_subscription_parser = subparsers.add_parser( + "detach-subscription", help=detach_subscription.__doc__, + ) + detach_subscription_parser.add_argument("subscription_id") + args = parser.parse_args() if args.command == "get-topic-policy": - get_topic_policy(args.project, args.topic_id) + get_topic_policy(args.project_id, args.topic_id) elif args.command == "get-subscription-policy": - get_subscription_policy(args.project, args.subscription_id) + get_subscription_policy(args.project_id, args.subscription_id) elif args.command == "set-topic-policy": - set_topic_policy(args.project, args.topic_id) + set_topic_policy(args.project_id, args.topic_id) elif args.command == "set-subscription-policy": - set_subscription_policy(args.project, args.subscription_id) + set_subscription_policy(args.project_id, args.subscription_id) elif args.command == "check-topic-permissions": - check_topic_permissions(args.project, args.topic_id) + check_topic_permissions(args.project_id, args.topic_id) elif args.command == "check-subscription-permissions": - check_subscription_permissions(args.project, args.subscription_id) + check_subscription_permissions(args.project_id, args.subscription_id) + elif args.command == "detach-subscription": + detach_subscription(args.project_id, args.subscription_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 17bfdb256a9f..033b6ccc7b41 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -21,9 +21,9 @@ import iam UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "iam-test-topic-" + UUID -SUBSCRIPTION = "iam-test-subscription-" + UUID +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC_ID = "iam-test-topic-" + UUID +SUBSCRIPTION_ID = "iam-test-subscription-" + UUID @pytest.fixture(scope="module") @@ -33,7 +33,7 @@ def publisher_client(): @pytest.fixture(scope="module") def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) try: publisher_client.delete_topic(request={"topic": topic_path}) @@ -56,7 +56,7 @@ def subscriber_client(): @pytest.fixture def subscription(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) + subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) try: subscriber_client.delete_subscription( @@ -75,21 +75,21 @@ def subscription(subscriber_client, topic): def test_get_topic_policy(topic, capsys): - iam.get_topic_policy(PROJECT, TOPIC) + iam.get_topic_policy(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert topic in out def test_get_subscription_policy(subscription, capsys): - iam.get_subscription_policy(PROJECT, SUBSCRIPTION) + iam.get_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() assert subscription in out def test_set_topic_policy(publisher_client, topic): - iam.set_topic_policy(PROJECT, TOPIC) + iam.set_topic_policy(PROJECT_ID, TOPIC_ID) policy = publisher_client.get_iam_policy(request={"resource": topic}) assert "roles/pubsub.publisher" in str(policy) @@ -97,7 +97,7 @@ def test_set_topic_policy(publisher_client, topic): def test_set_subscription_policy(subscriber_client, subscription): - iam.set_subscription_policy(PROJECT, SUBSCRIPTION) + iam.set_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) policy = subscriber_client.get_iam_policy(request={"resource": subscription}) assert "roles/pubsub.viewer" in str(policy) @@ -105,7 +105,7 @@ def test_set_subscription_policy(subscriber_client, subscription): def test_check_topic_permissions(topic, capsys): - iam.check_topic_permissions(PROJECT, TOPIC) + iam.check_topic_permissions(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() @@ -114,9 +114,17 @@ def test_check_topic_permissions(topic, capsys): def test_check_subscription_permissions(subscription, capsys): - iam.check_subscription_permissions(PROJECT, SUBSCRIPTION) + iam.check_subscription_permissions(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() assert subscription in out assert "pubsub.subscriptions.consume" in out + + +def test_detach_subscription(subscription, capsys): + iam.detach_subscription(PROJECT_ID, SUBSCRIPTION_ID) + + out, _ = capsys.readouterr() + + assert "Subscription is detached." in out From 2c288ba4be688546db1e225d1cf96aa9244616fb Mon Sep 17 00:00:00 2001 From: Stephanie Wang Date: Mon, 14 Sep 2020 17:38:02 -0400 Subject: [PATCH 0588/1197] chore: update CODEOWNERS (#193) --- packages/google-cloud-pubsub/.github/CODEOWNERS | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/CODEOWNERS b/packages/google-cloud-pubsub/.github/CODEOWNERS index cf01548a9f04..a4def22dbd66 100644 --- a/packages/google-cloud-pubsub/.github/CODEOWNERS +++ b/packages/google-cloud-pubsub/.github/CODEOWNERS @@ -5,7 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -# The python-samples-owners team is the default owner for anything not -# explicitly taken by someone else. +# The @googleapis/api-pubsub is the default owner for changes in this repo +* @googleapis/api-pubsub - /samples/ @anguillanneuf @hongalex @googleapis/python-samples-owners +# The python-samples-reviewers team is the default owner for samples changes +/samples/ @googleapis/python-samples-owners From 8843db8c183b4d3e388b30c45e9b4c5872419bb9 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 16 Sep 2020 16:33:13 +0200 Subject: [PATCH 0589/1197] fix: convert all RPC error types to exceptions (#170) --- .../_protocol/streaming_pull_manager.py | 29 +++++++++++++------ .../subscriber/test_streaming_pull_manager.py | 12 ++++++-- 2 files changed, 29 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 7476e887bcac..3159ba84838f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -54,11 +54,22 @@ """The load threshold below which to resume the incoming message stream.""" -def _maybe_wrap_exception(exception): - """Wraps a gRPC exception class, if needed.""" - if isinstance(exception, grpc.RpcError): - return exceptions.from_grpc_error(exception) - return exception +def _wrap_as_exception(maybe_exception): + """Wrap an object as a Python exception, if needed. + + Args: + maybe_exception (Any): The object to wrap, usually a gRPC exception class. + + Returns: + The argument itself if an instance of ``BaseException``, otherwise + the argument represented as an instance of ``Exception`` (sub)class. + """ + if isinstance(maybe_exception, grpc.RpcError): + return exceptions.from_grpc_error(maybe_exception) + elif isinstance(maybe_exception, BaseException): + return maybe_exception + + return Exception(maybe_exception) def _wrap_callback_errors(callback, on_callback_error, message): @@ -656,7 +667,7 @@ def _should_recover(self, exception): Will be :data:`True` if the ``exception`` is "acceptable", i.e. in a list of retryable / idempotent exceptions. """ - exception = _maybe_wrap_exception(exception) + exception = _wrap_as_exception(exception) # If this is in the list of idempotent exceptions, then we want to # recover. if isinstance(exception, _RETRYABLE_STREAM_ERRORS): @@ -678,7 +689,7 @@ def _should_terminate(self, exception): Will be :data:`True` if the ``exception`` is "acceptable", i.e. in a list of terminating exceptions. """ - exception = _maybe_wrap_exception(exception) + exception = _wrap_as_exception(exception) if isinstance(exception, _TERMINATING_STREAM_ERRORS): _LOGGER.info("Observed terminating stream error %s", exception) return True @@ -697,9 +708,9 @@ def _on_rpc_done(self, future): background consumer and preventing it from being ``joined()``. """ _LOGGER.info("RPC termination has signaled streaming pull manager shutdown.") - future = _maybe_wrap_exception(future) + error = _wrap_as_exception(future) thread = threading.Thread( - name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future} + name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": error} ) thread.daemon = True thread.start() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index d1bac4335294..d3eb4351bde8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -44,11 +44,13 @@ mock.create_autospec(grpc.RpcError, instance=True), exceptions.GoogleAPICallError, ), + ({"error": "RPC terminated"}, Exception), + ("something broke", Exception), ], ) -def test__maybe_wrap_exception(exception, expected_cls): +def test__wrap_as_exception(exception, expected_cls): assert isinstance( - streaming_pull_manager._maybe_wrap_exception(exception), expected_cls + streaming_pull_manager._wrap_as_exception(exception), expected_cls ) @@ -948,8 +950,12 @@ def test__on_rpc_done(thread): manager._on_rpc_done(mock.sentinel.error) thread.assert_called_once_with( - name=mock.ANY, target=manager.close, kwargs={"reason": mock.sentinel.error} + name=mock.ANY, target=manager.close, kwargs={"reason": mock.ANY} ) + _, kwargs = thread.call_args + reason = kwargs["kwargs"]["reason"] + assert isinstance(reason, Exception) + assert reason.args == (mock.sentinel.error,) # Exception wraps the original error def test_activate_ordering_keys(): From 0dafc64a94016ce4937f4e28634f2bba7ecdd93d Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Fri, 18 Sep 2020 14:10:48 -0700 Subject: [PATCH 0590/1197] feat: regenerate the client lib to pick new mtls env (#197) * feat: regenerate the client lib to pick new mtls env * update google-api-core version * revert some files in samples/ and scripts/ --- .../.kokoro/populate-secrets.sh | 43 ++ .../.kokoro/release/common.cfg | 50 +- .../google-cloud-pubsub/.kokoro/trampoline.sh | 15 +- packages/google-cloud-pubsub/docs/conf.py | 2 +- .../services/publisher/async_client.py | 32 +- .../pubsub_v1/services/publisher/client.py | 70 ++- .../services/publisher/transports/base.py | 12 +- .../services/publisher/transports/grpc.py | 46 +- .../publisher/transports/grpc_asyncio.py | 51 +- .../services/subscriber/async_client.py | 40 +- .../pubsub_v1/services/subscriber/client.py | 70 ++- .../services/subscriber/transports/base.py | 18 +- .../services/subscriber/transports/grpc.py | 46 +- .../subscriber/transports/grpc_asyncio.py | 51 +- packages/google-cloud-pubsub/noxfile.py | 4 +- .../scripts/decrypt-secrets.sh | 15 +- packages/google-cloud-pubsub/setup.py | 2 +- packages/google-cloud-pubsub/synth.metadata | 10 +- .../unit/gapic/pubsub_v1/test_publisher.py | 491 +++++++++--------- .../unit/gapic/pubsub_v1/test_subscriber.py | 491 +++++++++--------- 20 files changed, 881 insertions(+), 678 deletions(-) create mode 100755 packages/google-cloud-pubsub/.kokoro/populate-secrets.sh diff --git a/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh b/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh new file mode 100755 index 000000000000..f52514257ef0 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/packages/google-cloud-pubsub/.kokoro/release/common.cfg b/packages/google-cloud-pubsub/.kokoro/release/common.cfg index d9c893b597ee..625c3fdbb3ba 100644 --- a/packages/google-cloud-pubsub/.kokoro/release/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-pubsub/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline.sh b/packages/google-cloud-pubsub/.kokoro/trampoline.sh index e8c4251f3ed4..f39236e943a8 100755 --- a/packages/google-cloud-pubsub/.kokoro/trampoline.sh +++ b/packages/google-cloud-pubsub/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 7bd17033d3d5..a785da8a80fc 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -29,7 +29,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 6338887a7be0..601fc09d2671 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -49,6 +49,7 @@ class PublisherAsyncClient: DEFAULT_MTLS_ENDPOINT = PublisherClient.DEFAULT_MTLS_ENDPOINT topic_path = staticmethod(PublisherClient.topic_path) + parse_topic_path = staticmethod(PublisherClient.parse_topic_path) from_service_account_file = PublisherClient.from_service_account_file from_service_account_json = from_service_account_file @@ -79,16 +80,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -308,13 +312,13 @@ async def publish( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, exceptions.Cancelled, exceptions.DeadlineExceeded, + exceptions.InternalServerError, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, exceptions.Unknown, - exceptions.InternalServerError, + exceptions.Aborted, ), ), default_timeout=60.0, @@ -391,8 +395,8 @@ async def get_topic( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -473,8 +477,8 @@ async def list_topics( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -564,8 +568,8 @@ async def list_topic_subscriptions( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -659,8 +663,8 @@ async def list_topic_snapshots( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 22225b83fbe2..2df57b0caa30 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -16,6 +16,7 @@ # from collections import OrderedDict +from distutils import util import os import re from typing import Callable, Dict, Sequence, Tuple, Type, Union @@ -27,6 +28,7 @@ from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -177,16 +179,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -202,25 +207,43 @@ def __init__( if client_options is None: client_options = ClientOptions.ClientOptions() - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -244,10 +267,9 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 9a99a899e43f..fcb3b99a84bc 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -139,13 +139,13 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, exceptions.Cancelled, exceptions.DeadlineExceeded, + exceptions.InternalServerError, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, exceptions.Unknown, - exceptions.InternalServerError, + exceptions.Aborted, ), ), default_timeout=60.0, @@ -159,8 +159,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -174,8 +174,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -189,8 +189,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -204,8 +204,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index ad5f95684e37..d3d015f6bd1e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -15,6 +15,7 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore @@ -23,7 +24,6 @@ from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore @@ -60,6 +60,7 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -80,14 +81,16 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -110,6 +113,11 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -140,6 +148,23 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) self._stubs = {} # type: Dict[str, Callable] @@ -205,13 +230,6 @@ def grpc_channel(self) -> grpc.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 5d70e3d626d7..6d30b31fd10f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -15,10 +15,12 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -102,6 +104,7 @@ def __init__( channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -123,14 +126,16 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -153,12 +158,22 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -178,6 +193,23 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) # Run the base constructor. super().__init__( @@ -198,13 +230,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index a169e37808bd..bcf3649f9d31 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -53,8 +53,9 @@ class SubscriberAsyncClient: DEFAULT_MTLS_ENDPOINT = SubscriberClient.DEFAULT_MTLS_ENDPOINT snapshot_path = staticmethod(SubscriberClient.snapshot_path) - + parse_snapshot_path = staticmethod(SubscriberClient.parse_snapshot_path) subscription_path = staticmethod(SubscriberClient.subscription_path) + parse_subscription_path = staticmethod(SubscriberClient.parse_subscription_path) from_service_account_file = SubscriberClient.from_service_account_file from_service_account_json = from_service_account_file @@ -85,16 +86,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -241,8 +245,8 @@ async def create_subscription( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -320,8 +324,8 @@ async def get_subscription( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -464,8 +468,8 @@ async def list_subscriptions( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -850,8 +854,8 @@ async def pull( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -919,11 +923,11 @@ def streaming_pull( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, exceptions.DeadlineExceeded, + exceptions.InternalServerError, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, - exceptions.InternalServerError, + exceptions.Aborted, ), ), default_timeout=900.0, @@ -1098,8 +1102,8 @@ async def get_snapshot( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -1185,8 +1189,8 @@ async def list_snapshots( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -1528,8 +1532,8 @@ async def seek( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 58a7cd1f9290..1193044c464f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -16,6 +16,7 @@ # from collections import OrderedDict +from distutils import util import os import re from typing import Callable, Dict, Iterable, Iterator, Sequence, Tuple, Type, Union @@ -27,6 +28,7 @@ from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -198,16 +200,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -223,25 +228,43 @@ def __init__( if client_options is None: client_options = ClientOptions.ClientOptions() - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -265,10 +288,9 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, quota_project_id=client_options.quota_project_id, client_info=client_info, ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 7d7dfc6223f2..600369d83d28 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -118,8 +118,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -133,8 +133,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -159,8 +159,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -207,8 +207,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -221,11 +221,11 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, exceptions.DeadlineExceeded, + exceptions.InternalServerError, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, - exceptions.InternalServerError, + exceptions.Aborted, ), ), default_timeout=900.0, @@ -250,8 +250,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -265,8 +265,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, @@ -313,8 +313,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.ServiceUnavailable, exceptions.Unknown, + exceptions.ServiceUnavailable, ), ), default_timeout=60.0, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 721d31d36032..e2f20b1c994f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -15,6 +15,7 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore @@ -23,7 +24,6 @@ from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore @@ -62,6 +62,7 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -82,14 +83,16 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -112,6 +115,11 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -142,6 +150,23 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) self._stubs = {} # type: Dict[str, Callable] @@ -207,13 +232,6 @@ def grpc_channel(self) -> grpc.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 0e844728f445..cc2f3a240c83 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -15,10 +15,12 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -104,6 +106,7 @@ def __init__( channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -125,14 +128,16 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -155,12 +160,22 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -180,6 +195,23 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) # Run the base constructor. super().__init__( @@ -200,13 +232,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 09e7acbda316..6a59685cd580 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -171,7 +171,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh b/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh index ff599eb2af25..21f6d2a26d90 100755 --- a/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh @@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" ) # Work from the project root. cd $ROOT +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ > testing/test-env.sh gcloud secrets versions access latest \ --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ > testing/service-account.json gcloud secrets versions access latest \ --secret="python-docs-samples-client-secrets" \ - > testing/client-secrets.json \ No newline at end of file + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index c26d140a471c..88dd9d5444cd 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.1, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "libcst >= 0.3.10", "proto-plus >= 1.7.1", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 46bc8fdd2426..a29820d3d21a 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -3,29 +3,29 @@ { "git": { "name": ".", - "remote": "git@github.com:plamut/python-pubsub.git", - "sha": "c29d7f891c776e1a3fcb1cbfc7f549ca0772f38e" + "remote": "https://github.com/googleapis/python-pubsub.git", + "sha": "89c671aeb4de2c47f45ca1e438b91c440bead958" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "b2c32f1c8a4094f0f47fcf5d10f0b6f2bfb3387d" + "sha": "dba48bb9bc6959c232bec9150ac6313b608fe7bd" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "b2c32f1c8a4094f0f47fcf5d10f0b6f2bfb3387d" + "sha": "dba48bb9bc6959c232bec9150ac6313b608fe7bd" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "b2c32f1c8a4094f0f47fcf5d10f0b6f2bfb3387d" + "sha": "dba48bb9bc6959c232bec9150ac6313b608fe7bd" } } ], diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index f159c1cfb90a..b7d99542c33c 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -149,15 +149,14 @@ def test_publisher_client_client_options(client_class, transport_class, transpor credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -166,15 +165,14 @@ def test_publisher_client_client_options(client_class, transport_class, transpor credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -183,95 +181,173 @@ def test_publisher_client_client_options(client_class, transport_class, transpor credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc", "true"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (PublisherClient, transports.PublisherGrpcTransport, "grpc", "false"), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient) +) +@mock.patch.object( + PublisherAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PublisherAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_publisher_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds ): patched.return_value = None - client = client_class() + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=expected_host, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=expected_ssl_channel_creds, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -298,8 +374,7 @@ def test_publisher_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -329,8 +404,7 @@ def test_publisher_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -347,8 +421,7 @@ def test_publisher_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -2353,6 +2426,18 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -2421,6 +2506,17 @@ def test_publisher_base_transport_with_credentials_file(): ) +def test_publisher_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.PublisherTransport() + adc.assert_called_once() + + def test_publisher_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: @@ -2475,191 +2571,108 @@ def test_publisher_host_with_port(): def test_publisher_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.PublisherGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called def test_publisher_grpc_asyncio_transport_channel(): channel = aio.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.PublisherGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_publisher_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.PublisherGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_publisher_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.PublisherGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_publisher_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.PublisherGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel +def test_publisher_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_publisher_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. +def test_publisher_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - mock_cred = mock.Mock() - transport = transports.PublisherGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel def test_topic_path(): diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index be5460b4868b..89223117a296 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -154,15 +154,14 @@ def test_subscriber_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -171,15 +170,14 @@ def test_subscriber_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -188,95 +186,173 @@ def test_subscriber_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", "true"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", "false"), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient) +) +@mock.patch.object( + SubscriberAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SubscriberAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_subscriber_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds ): patched.return_value = None - client = client_class() + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=expected_host, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=expected_ssl_channel_creds, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -303,8 +379,7 @@ def test_subscriber_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -334,8 +409,7 @@ def test_subscriber_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -352,8 +426,7 @@ def test_subscriber_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -3609,6 +3682,18 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -3684,6 +3769,17 @@ def test_subscriber_base_transport_with_credentials_file(): ) +def test_subscriber_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.SubscriberTransport() + adc.assert_called_once() + + def test_subscriber_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: @@ -3738,191 +3834,108 @@ def test_subscriber_host_with_port(): def test_subscriber_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.SubscriberGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called def test_subscriber_grpc_asyncio_transport_channel(): channel = aio.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.SubscriberGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_subscriber_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.SubscriberGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_subscriber_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.SubscriberGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_subscriber_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.SubscriberGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel +def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_subscriber_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. +def test_subscriber_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - mock_cred = mock.Mock() - transport = transports.SubscriberGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel def test_snapshot_path(): From 7bf435eb0c4f70bd1622dc23cd50fea5e206b33e Mon Sep 17 00:00:00 2001 From: Carlos de la Guardia Date: Mon, 21 Sep 2020 04:02:04 -0500 Subject: [PATCH 0591/1197] docs: use new call syntax in subscriber docs (#203) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [X] Appropriate docs were updated (if necessary) Fixes #198 🦕 --- .../docs/subscriber/index.rst | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/subscriber/index.rst b/packages/google-cloud-pubsub/docs/subscriber/index.rst index ed99566cd3bd..bfd2fff266b5 100644 --- a/packages/google-cloud-pubsub/docs/subscriber/index.rst +++ b/packages/google-cloud-pubsub/docs/subscriber/index.rst @@ -38,7 +38,7 @@ to subscribe to, and it must already exist. Once you have that, it is easy: # your application. sub_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) topic_path = subscriber.topic_path(PROJECT, TOPIC) - subscriber.create_subscription(sub_path, topic_path) + subscriber.create_subscription(request={"name": sub_path, "topic": topic_path}) Once you have created a subscription (or if you already had one), the next step is to pull data from it. @@ -55,13 +55,23 @@ To pull the messages synchronously, use the client's # Substitute PROJECT and SUBSCRIPTION with appropriate values for your # application. subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) - response = subscriber.pull(subscription_path, max_messages=5) + response = subscriber.pull( + request={ + "subscription": subscription_path, + "max_messages": 5, + } + ) for msg in response.received_messages: print("Received message:", msg.message.data) ack_ids = [msg.ack_id for msg in response.received_messages] - subscriber.acknowledge(subscription_path, ack_ids) + subscriber.acknowledge( + request={ + "subscription": subscription_path, + "ack_ids": ack_ids, + } + ) The method returns a :class:`~.pubsub_v1.types.PullResponse` instance that contains a list of received :class:`~.pubsub_v1.types.ReceivedMessage` @@ -76,7 +86,13 @@ be dropped by this client and the backend will try to re-deliver them. ack_ids = [] # TODO: populate with `ack_ids` of the messages to NACK ack_deadline_seconds = 0 - subscriber.modify_ack_deadline(subscription_path, ack_ids, ack_deadline_seconds) + subscriber.modify_ack_deadline( + request={ + "subscription": subscription_path, + "ack_ids": ack_ids, + "ack_deadline_seconds": ack_deadline_seconds, + } + ) Pulling a Subscription Asynchronously From add0f456691549c3245160b3ecd9e1bcb8c3145f Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 21 Sep 2020 20:33:16 +0200 Subject: [PATCH 0592/1197] chore: release v2.1.0 (#204) --- packages/google-cloud-pubsub/CHANGELOG.md | 26 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 270b3f561947..ceb528423da8 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,32 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 2.1.0 + +09-21-2020 02:19 PDT + + +### Implementation Changes + +- Convert all RPC error types to exceptions. ([#163](https://github.com/googleapis/python-pubsub/issues/163)) ([#170](https://github.com/googleapis/python-pubsub/pull/170)) +- Pass client options to publisher and subscriber clients. ([#166](https://github.com/googleapis/python-pubsub/issues/166)) ([#190](https://github.com/googleapis/python-pubsub/pull/190)) + + +### New Features + +- Regenerate the client lib to pick new mtls env (via synth). ([#197](https://github.com/googleapis/python-pubsub/pull/197)) + + +### Documentation + +- Add subscription detachment sample. ([#152](https://github.com/googleapis/python-pubsub/pull/152)) +- Use new call syntax in subscriber docs. ([#198](https://github.com/googleapis/python-pubsub/issues/198)) ([#203](https://github.com/googleapis/python-pubsub/pull/203)) + + +### Internal / Testing Changes + +- Update CODEOWNERS. ([#193](https://github.com/googleapis/python-pubsub/pull/193)) + ## 2.0.0 09-11-2020 05:03 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 88dd9d5444cd..4b33ecac7d81 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.0.0" +version = "2.1.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 680d9399ada18d128bcb102c2d94b8f4a47599dc Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 25 Sep 2020 12:56:07 -0700 Subject: [PATCH 0593/1197] chore: clean up region tags (#207) --- packages/google-cloud-pubsub/samples/snippets/publisher.py | 4 ++-- .../google-cloud-pubsub/samples/snippets/quickstart/pub.py | 7 ------- .../google-cloud-pubsub/samples/snippets/quickstart/sub.py | 7 ------- 3 files changed, 2 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index d1b7602803a8..78dff307dda3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -134,7 +134,7 @@ def publish_messages_with_custom_attributes(project_id, topic_id): def publish_messages_with_error_handler(project_id, topic_id): - # [START pubsub_publish_messages_error_handler] + # [START pubsub_publish_with_error_handler] """Publishes multiple messages to a Pub/Sub topic with an error handler.""" import time @@ -173,7 +173,7 @@ def callback(f): time.sleep(5) print("Published message with error handler.") - # [END pubsub_publish_messages_error_handler] + # [END pubsub_publish_with_error_handler] def publish_messages_with_batch_settings(project_id, topic_id): diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py index 8585711f302c..404bcbb77190 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -14,15 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -# [START pubsub_quickstart_pub_all] import argparse import time -# [START pubsub_quickstart_pub_deps] from google.cloud import pubsub_v1 -# [END pubsub_quickstart_pub_deps] - def get_callback(api_future, data, ref): """Wrap message data in the context of the callback function.""" @@ -48,10 +44,8 @@ def callback(api_future): def pub(project_id, topic_id): """Publishes a message to a Pub/Sub topic.""" - # [START pubsub_quickstart_pub_client] # Initialize a Publisher client. client = pubsub_v1.PublisherClient() - # [END pubsub_quickstart_pub_client] # Create a fully qualified identifier in the form of # `projects/{project_id}/topics/{topic_id}` topic_path = client.topic_path(project_id, topic_id) @@ -83,4 +77,3 @@ def pub(project_id, topic_id): args = parser.parse_args() pub(args.project_id, args.topic_id) -# [END pubsub_quickstart_pub_all] diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index efe00891593e..492d312f94c3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -14,21 +14,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -# [START pubsub_quickstart_sub_all] import argparse -# [START pubsub_quickstart_sub_deps] from google.cloud import pubsub_v1 -# [END pubsub_quickstart_sub_deps] - def sub(project_id, subscription_id): """Receives messages from a Pub/Sub subscription.""" - # [START pubsub_quickstart_sub_client] # Initialize a Subscriber client subscriber_client = pubsub_v1.SubscriberClient() - # [END pubsub_quickstart_sub_client] # Create a fully qualified identifier in the form of # `projects/{project_id}/subscriptions/{subscription_id}` subscription_path = subscriber_client.subscription_path(project_id, subscription_id) @@ -66,4 +60,3 @@ def callback(message): args = parser.parse_args() sub(args.project_id, args.subscription_id) -# [END pubsub_quickstart_sub_all] From f4111abee4be2ba3f755f49afb9d1f97b8a32216 Mon Sep 17 00:00:00 2001 From: Ryan Yuan Date: Tue, 29 Sep 2020 07:25:34 +1000 Subject: [PATCH 0594/1197] docs: fix get topic_path in subscriber sample (#210) --- packages/google-cloud-pubsub/docs/subscriber/index.rst | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/docs/subscriber/index.rst b/packages/google-cloud-pubsub/docs/subscriber/index.rst index bfd2fff266b5..2c9fd91cefd5 100644 --- a/packages/google-cloud-pubsub/docs/subscriber/index.rst +++ b/packages/google-cloud-pubsub/docs/subscriber/index.rst @@ -36,8 +36,12 @@ to subscribe to, and it must already exist. Once you have that, it is easy: # Substitute PROJECT, SUBSCRIPTION, and TOPIC with appropriate values for # your application. + + # from google.cloud import pubsub + # publisher = pubsub.PublisherClient() + + topic_path = publisher.topic_path(PROJECT, TOPIC) sub_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) - topic_path = subscriber.topic_path(PROJECT, TOPIC) subscriber.create_subscription(request={"name": sub_path, "topic": topic_path}) Once you have created a subscription (or if you already had one), the next From 99d58ee5ef8f3cb004b63182e994ffa9d92ca6b4 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Fri, 2 Oct 2020 12:46:02 -0400 Subject: [PATCH 0595/1197] chore: bump google-cloud-pubsub version for sample in requirement.txt file (#216) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 42ab449b1ba1..c5daf4f3985e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==1.6.1 +google-cloud-pubsub==2.0.0 From eba593c45c59afcdd22ee28fa99cda172320cf14 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 12 Oct 2020 09:51:52 -0700 Subject: [PATCH 0596/1197] samples: clean up tests (#221) * fix: increase timeout for tests * lint * clean up tests and rewrote sync pull * a few nits --- .../samples/snippets/README.rst | 46 ++--- .../samples/snippets/iam.py | 38 ---- .../samples/snippets/iam_test.py | 76 +++----- .../samples/snippets/noxfile.py | 26 ++- .../samples/snippets/publisher.py | 83 +++++--- .../samples/snippets/publisher_test.py | 158 +++++++++------- .../samples/snippets/quickstart/pub.py | 37 +--- .../samples/snippets/quickstart/pub_test.py | 56 ------ .../snippets/quickstart/quickstart_test.py | 92 +++++++++ .../samples/snippets/quickstart/sub.py | 17 +- .../samples/snippets/quickstart/sub_test.py | 102 ---------- .../samples/snippets/subscriber.py | 138 +++++--------- .../samples/snippets/subscriber_test.py | 179 ++++++++++-------- 13 files changed, 448 insertions(+), 600 deletions(-) delete mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py delete mode 100644 packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 699e896e2d9a..5fdfbde0dc06 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -1,4 +1,3 @@ - .. This file is automatically generated. Do not edit this file directly. Google Cloud Pub/Sub Python Samples @@ -16,11 +15,13 @@ This directory contains samples for Google Cloud Pub/Sub. `Google Cloud Pub/Sub` .. _Google Cloud Pub/Sub: https://cloud.google.com/pubsub/docs + + + Setup ------------------------------------------------------------------------------- - Authentication ++++++++++++++ @@ -31,9 +32,6 @@ credentials for applications. .. _Authentication Getting Started Guide: https://cloud.google.com/docs/authentication/getting-started - - - Install Dependencies ++++++++++++++++++++ @@ -64,15 +62,9 @@ Install Dependencies .. _pip: https://pip.pypa.io/ .. _virtualenv: https://virtualenv.pypa.io/ - - - - - Samples ------------------------------------------------------------------------------- - Quickstart (Publisher) +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -89,8 +81,6 @@ To run this sample: $ python quickstart/pub.py - - Quickstart (Subscriber) +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -107,8 +97,6 @@ To run this sample: $ python quickstart/sub.py - - Publisher +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -124,10 +112,9 @@ To run this sample: $ python publisher.py - usage: publisher.py [-h] project_id - {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings,publish-with-ordering-keys,resume-publish-with-ordering-keys} + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings,publish-with-ordering-keys,resume-publish-with-ordering-keys,detach-subscription} ... This application demonstrates how to perform basic operations on topics @@ -138,7 +125,7 @@ To run this sample: positional arguments: project_id Your Google Cloud project ID - {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings,publish-with-ordering-keys,resume-publish-with-ordering-keys} + {list,create,delete,publish,publish-with-custom-attributes,publish-with-error-handler,publish-with-batch-settings,publish-with-retry-settings,publish-with-ordering-keys,resume-publish-with-ordering-keys,detach-subscription} list Lists all Pub/Sub topics in the given project. create Create a new Pub/Sub topic. delete Deletes an existing Pub/Sub topic. @@ -159,14 +146,15 @@ To run this sample: resume-publish-with-ordering-keys Resume publishing messages with ordering keys when unrecoverable errors occur. + detach-subscription + Detaches a subscription from a topic and drops all + messages retained in it. optional arguments: -h, --help show this help message and exit - - Subscribers +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -182,7 +170,6 @@ To run this sample: $ python subscriber.py - usage: subscriber.py [-h] project_id {list-in-topic,list-in-project,create,create-with-dead-letter-policy,create-push,create-with-ordering,delete,update-push,update-dead-letter-policy,remove-dead-letter-policy,receive,receive-custom-attributes,receive-flow-control,receive-synchronously,receive-synchronously-with-lease,listen-for-errors,receive-messages-with-delivery-attempts} @@ -232,8 +219,6 @@ To run this sample: - - Identity and Access Management +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ @@ -249,10 +234,9 @@ To run this sample: $ python iam.py - usage: iam.py [-h] project_id - {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions,detach-subscription} + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} ... This application demonstrates how to perform basic operations on IAM @@ -263,7 +247,7 @@ To run this sample: positional arguments: project_id Your Google Cloud project ID - {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions,detach-subscription} + {get-topic-policy,get-subscription-policy,set-topic-policy,set-subscription-policy,check-topic-permissions,check-subscription-permissions} get-topic-policy Prints the IAM policy for the given topic. get-subscription-policy Prints the IAM policy for the given subscription. @@ -276,9 +260,6 @@ To run this sample: check-subscription-permissions Checks to which permissions are available on the given subscription. - detach-subscription - Detaches a subscription from a topic and drops all - messages retained in it. optional arguments: -h, --help show this help message and exit @@ -287,10 +268,6 @@ To run this sample: - - - - The client library ------------------------------------------------------------------------------- @@ -306,5 +283,4 @@ to `browse the source`_ and `report issues`_. https://github.com/GoogleCloudPlatform/google-cloud-python/issues - -.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index ce443403f740..2cc134ca5da5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -181,37 +181,6 @@ def check_subscription_permissions(project_id, subscription_id): # [END pubsub_test_subscription_permissions] -def detach_subscription(project_id, subscription_id): - """Detaches a subscription from a topic and drops all messages retained in it.""" - # [START pubsub_detach_subscription] - from google.api_core.exceptions import GoogleAPICallError, RetryError - from google.cloud import pubsub_v1 - - # TODO(developer): Choose an existing subscription. - # project_id = "your-project-id" - # subscription_id = "your-subscription-id" - - publisher_client = pubsub_v1.PublisherClient() - subscriber_client = pubsub_v1.SubscriberClient() - subscription_path = subscriber_client.subscription_path(project_id, subscription_id) - - try: - publisher_client.detach_subscription( - request={"subscription": subscription_path} - ) - except (GoogleAPICallError, RetryError, ValueError, Exception) as err: - print(err) - - subscription = subscriber_client.get_subscription( - request={"subscription": subscription_path} - ) - if subscription.detached: - print("Subscription is detached.") - else: - print("Subscription is NOT detached.") - # [END pubsub_detach_subscription] - - if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, @@ -250,11 +219,6 @@ def detach_subscription(project_id, subscription_id): ) check_subscription_permissions_parser.add_argument("subscription_id") - detach_subscription_parser = subparsers.add_parser( - "detach-subscription", help=detach_subscription.__doc__, - ) - detach_subscription_parser.add_argument("subscription_id") - args = parser.parse_args() if args.command == "get-topic-policy": @@ -269,5 +233,3 @@ def detach_subscription(project_id, subscription_id): check_topic_permissions(args.project_id, args.topic_id) elif args.command == "check-subscription-permissions": check_subscription_permissions(args.project_id, args.subscription_id) - elif args.command == "detach-subscription": - detach_subscription(args.project_id, args.subscription_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 033b6ccc7b41..a2deb208901e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -15,6 +15,7 @@ import os import uuid +from google.api_core.exceptions import NotFound from google.cloud import pubsub_v1 import pytest @@ -32,19 +33,20 @@ def publisher_client(): @pytest.fixture(scope="module") -def topic(publisher_client): +def topic_path(publisher_client): topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) try: - publisher_client.delete_topic(request={"topic": topic_path}) - except Exception: - pass - - publisher_client.create_topic(request={"name": topic_path}) + topic = publisher_client.get_topic(request={"topic": topic_path}) + except NotFound: + topic = publisher_client.create_topic(request={"name": topic_path}) - yield topic_path + yield topic.name - publisher_client.delete_topic(request={"topic": topic_path}) + try: + publisher_client.delete_topic(request={"topic": topic.name}) + except NotFound: + pass @pytest.fixture(scope="module") @@ -54,77 +56,57 @@ def subscriber_client(): subscriber_client.close() -@pytest.fixture -def subscription(subscriber_client, topic): +@pytest.fixture(scope="module") +def subscription_path(subscriber_client, topic_path): subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) + subscription = subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) + yield subscription.name try: subscriber_client.delete_subscription( request={"subscription": subscription_path} ) - except Exception: + except NotFound: pass - subscriber_client.create_subscription( - request={"name": subscription_path, "topic": topic} - ) - - yield subscription_path - - subscriber_client.delete_subscription(request={"subscription": subscription_path}) - -def test_get_topic_policy(topic, capsys): +def test_get_topic_policy(topic_path, capsys): iam.get_topic_policy(PROJECT_ID, TOPIC_ID) - out, _ = capsys.readouterr() - assert topic in out + assert topic_path in out -def test_get_subscription_policy(subscription, capsys): +def test_get_subscription_policy(subscription_path, capsys): iam.get_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) - out, _ = capsys.readouterr() - assert subscription in out + assert subscription_path in out -def test_set_topic_policy(publisher_client, topic): +def test_set_topic_policy(publisher_client, topic_path): iam.set_topic_policy(PROJECT_ID, TOPIC_ID) - - policy = publisher_client.get_iam_policy(request={"resource": topic}) + policy = publisher_client.get_iam_policy(request={"resource": topic_path}) assert "roles/pubsub.publisher" in str(policy) assert "allUsers" in str(policy) -def test_set_subscription_policy(subscriber_client, subscription): +def test_set_subscription_policy(subscriber_client, subscription_path): iam.set_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) - - policy = subscriber_client.get_iam_policy(request={"resource": subscription}) + policy = subscriber_client.get_iam_policy(request={"resource": subscription_path}) assert "roles/pubsub.viewer" in str(policy) assert "allUsers" in str(policy) -def test_check_topic_permissions(topic, capsys): +def test_check_topic_permissions(topic_path, capsys): iam.check_topic_permissions(PROJECT_ID, TOPIC_ID) - out, _ = capsys.readouterr() - - assert topic in out + assert topic_path in out assert "pubsub.topics.publish" in out -def test_check_subscription_permissions(subscription, capsys): +def test_check_subscription_permissions(subscription_path, capsys): iam.check_subscription_permissions(PROJECT_ID, SUBSCRIPTION_ID) - out, _ = capsys.readouterr() - - assert subscription in out + assert subscription_path in out assert "pubsub.subscriptions.consume" in out - - -def test_detach_subscription(subscription, capsys): - iam.detach_subscription(PROJECT_ID, SUBSCRIPTION_ID) - - out, _ = capsys.readouterr() - - assert "Subscription is detached." in out diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index ba55d7ce53ca..5660f08be441 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -37,24 +37,22 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": ["2.7"], # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -69,12 +67,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -83,7 +81,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -138,7 +136,7 @@ def lint(session): args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) @@ -182,9 +180,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 78dff307dda3..accf71dcac68 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -55,7 +55,7 @@ def create_topic(project_id, topic_id): topic = publisher.create_topic(request={"name": topic_path}) - print("Topic created: {}".format(topic)) + print("Created topic: {}".format(topic.name)) # [END pubsub_quickstart_create_topic] # [END pubsub_create_topic] @@ -101,7 +101,7 @@ def publish_messages(project_id, topic_id): future = publisher.publish(topic_path, data) print(future.result()) - print("Published messages.") + print(f"Published messages to {topic_path}.") # [END pubsub_quickstart_publisher] # [END pubsub_publish] @@ -129,7 +129,7 @@ def publish_messages_with_custom_attributes(project_id, topic_id): ) print(future.result()) - print("Published messages with custom attributes.") + print(f"Published messages with custom attributes to {topic_path}.") # [END pubsub_publish_custom_attributes] @@ -172,7 +172,7 @@ def callback(f): while futures: time.sleep(5) - print("Published message with error handler.") + print(f"Published messages with error handler to {topic_path}.") # [END pubsub_publish_with_error_handler] @@ -208,7 +208,7 @@ def callback(future): # Non-blocking. Allow the publisher client to batch multiple messages. future.add_done_callback(callback) - print("Published messages with batch settings.") + print(f"Published messages with batch settings to {topic_path}.") # [END pubsub_publisher_batch_settings] @@ -249,7 +249,7 @@ def publish_messages_with_retry_settings(project_id, topic_id): future = publisher.publish(topic=topic_path, data=data, retry=custom_retry) print(future.result()) - print("Published messages with retry settings.") + print(f"Published messages with retry settings to {topic_path}.") # [END pubsub_publisher_retry_settings] @@ -262,15 +262,12 @@ def publish_with_ordering_keys(project_id, topic_id): # project_id = "your-project-id" # topic_id = "your-topic-id" - publisher_options = pubsub_v1.types.PublisherOptions( - enable_message_ordering=True - ) + publisher_options = pubsub_v1.types.PublisherOptions(enable_message_ordering=True) # Sending messages to the same region ensures they are received in order # even when multiple publishers are used. client_options = {"api_endpoint": "us-east1-pubsub.googleapis.com:443"} publisher = pubsub_v1.PublisherClient( - publisher_options=publisher_options, - client_options=client_options + publisher_options=publisher_options, client_options=client_options ) # The `topic_path` method creates a fully qualified identifier # in the form `projects/{project_id}/topics/{topic_id}` @@ -286,12 +283,10 @@ def publish_with_ordering_keys(project_id, topic_id): data = message[0].encode("utf-8") ordering_key = message[1] # When you publish a message, the client returns a future. - future = publisher.publish( - topic_path, data=data, ordering_key=ordering_key - ) + future = publisher.publish(topic_path, data=data, ordering_key=ordering_key) print(future.result()) - print("Published messages with ordering keys.") + print(f"Published messages with ordering keys to {topic_path}.") # [END pubsub_publish_with_ordering_keys] @@ -304,15 +299,12 @@ def resume_publish_with_ordering_keys(project_id, topic_id): # project_id = "your-project-id" # topic_id = "your-topic-id" - publisher_options = pubsub_v1.types.PublisherOptions( - enable_message_ordering=True - ) + publisher_options = pubsub_v1.types.PublisherOptions(enable_message_ordering=True) # Sending messages to the same region ensures they are received in order # even when multiple publishers are used. client_options = {"api_endpoint": "us-east1-pubsub.googleapis.com:443"} publisher = pubsub_v1.PublisherClient( - publisher_options=publisher_options, - client_options=client_options + publisher_options=publisher_options, client_options=client_options ) # The `topic_path` method creates a fully qualified identifier # in the form `projects/{project_id}/topics/{topic_id}` @@ -328,19 +320,48 @@ def resume_publish_with_ordering_keys(project_id, topic_id): data = message[0].encode("utf-8") ordering_key = message[1] # When you publish a message, the client returns a future. - future = publisher.publish( - topic_path, data=data, ordering_key=ordering_key - ) + future = publisher.publish(topic_path, data=data, ordering_key=ordering_key) try: print(future.result()) except RuntimeError: # Resume publish on an ordering key that has had unrecoverable errors. publisher.resume_publish(topic_path, ordering_key) - print("Published messages with ordering keys.") + print(f"Resumed publishing messages with ordering keys to {topic_path}.") # [END pubsub_resume_publish_with_ordering_keys] +def detach_subscription(project_id, subscription_id): + """Detaches a subscription from a topic and drops all messages retained in it.""" + # [START pubsub_detach_subscription] + from google.api_core.exceptions import GoogleAPICallError, RetryError + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing subscription. + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + + publisher_client = pubsub_v1.PublisherClient() + subscriber_client = pubsub_v1.SubscriberClient() + subscription_path = subscriber_client.subscription_path(project_id, subscription_id) + + try: + publisher_client.detach_subscription( + request={"subscription": subscription_path} + ) + except (GoogleAPICallError, RetryError, ValueError, Exception) as err: + print(err) + + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) + if subscription.detached: + print(f"{subscription_path} is detached.") + else: + print(f"{subscription_path} is NOT detached.") + # [END pubsub_detach_subscription] + + if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, @@ -356,9 +377,7 @@ def resume_publish_with_ordering_keys(project_id, topic_id): delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) delete_parser.add_argument("topic_id") - publish_parser = subparsers.add_parser( - "publish", help=publish_messages.__doc__ - ) + publish_parser = subparsers.add_parser("publish", help=publish_messages.__doc__) publish_parser.add_argument("topic_id") publish_with_custom_attributes_parser = subparsers.add_parser( @@ -368,8 +387,7 @@ def resume_publish_with_ordering_keys(project_id, topic_id): publish_with_custom_attributes_parser.add_argument("topic_id") publish_with_error_handler_parser = subparsers.add_parser( - "publish-with-error-handler", - help=publish_messages_with_error_handler.__doc__, + "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, ) publish_with_error_handler_parser.add_argument("topic_id") @@ -396,6 +414,11 @@ def resume_publish_with_ordering_keys(project_id, topic_id): ) resume_publish_with_ordering_keys_parser.add_argument("topic_id") + detach_subscription_parser = subparsers.add_parser( + "detach-subscription", help=detach_subscription.__doc__, + ) + detach_subscription_parser.add_argument("subscription_id") + args = parser.parse_args() if args.command == "list": @@ -418,3 +441,5 @@ def resume_publish_with_ordering_keys(project_id, topic_id): publish_with_ordering_keys(args.project_id, args.topic_id) elif args.command == "resume-publish-with-ordering-keys": resume_publish_with_ordering_keys(args.project_id, args.topic_id) + elif args.command == "detach-subscription": + detach_subscription(args.project_id, args.subscription_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index c95ea7ed57fe..cd81fcaf1bd7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -17,6 +17,7 @@ import uuid import backoff +from google.api_core.exceptions import NotFound from google.cloud import pubsub_v1 import mock import pytest @@ -24,41 +25,57 @@ import publisher UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC_ADMIN = "publisher-test-topic-admin-" + UUID -TOPIC_PUBLISH = "publisher-test-topic-publish-" + UUID +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC_ID = "publisher-test-topic-" + UUID +SUBSCRIPTION_ID = "publisher-test-subscription-" + UUID +# Allow 60s for tests to finish. +MAX_TIME = 60 -@pytest.fixture -def client(): +@pytest.fixture(scope="module") +def publisher_client(): yield pubsub_v1.PublisherClient() -@pytest.fixture -def topic_admin(client): - topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) +@pytest.fixture(scope="module") +def subscriber_client(): + subscriber_client = pubsub_v1.SubscriberClient() + yield subscriber_client + # Close the subscriber client properly during teardown. + subscriber_client.close() - try: - topic = client.get_topic(request={"topic": topic_path}) - except: # noqa - topic = client.create_topic(request={"name": topic_path}) - yield topic.name - # Teardown of `topic_admin` is handled in `test_delete()`. +@pytest.fixture(scope="module") +def topic_path(publisher_client): + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + try: + topic = publisher_client.get_topic(request={"topic": topic_path}) + except NotFound: + topic = publisher_client.create_topic(request={"name": topic_path}) -@pytest.fixture -def topic_publish(client): - topic_path = client.topic_path(PROJECT, TOPIC_PUBLISH) + yield topic.name try: - topic = client.get_topic(request={"topic": topic_path}) - except: # noqa - topic = client.create_topic(request={"name": topic_path}) + publisher_client.delete_topic(request={"topic": topic.name}) + except NotFound: + pass - yield topic.name - client.delete_topic(request={"topic": topic.name}) +@pytest.fixture(scope="module") +def subscription_path(subscriber_client, topic_path): + subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) + subscription = subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) + yield subscription.name + + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass def _make_sleep_patch(): @@ -74,87 +91,92 @@ def new_sleep(period): return mock.patch("time.sleep", new=new_sleep) -def test_list(client, topic_admin, capsys): - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - publisher.list_topics(PROJECT) - out, _ = capsys.readouterr() - assert topic_admin in out - - eventually_consistent_test() +def test_create(publisher_client, capsys): + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) - -def test_create(client): - topic_path = client.topic_path(PROJECT, TOPIC_ADMIN) try: - client.delete_topic(request={"topic": topic_path}) - except Exception: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: pass - publisher.create_topic(PROJECT, TOPIC_ADMIN) + publisher.create_topic(PROJECT_ID, TOPIC_ID) - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - assert client.get_topic(request={"topic": topic_path}) + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path}" in out - eventually_consistent_test() +def test_list(topic_path, capsys): + publisher.list_topics(PROJECT_ID) + out, _ = capsys.readouterr() -def test_delete(client, topic_admin): - publisher.delete_topic(PROJECT, TOPIC_ADMIN) + assert topic_path in out - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - with pytest.raises(Exception): - client.get_topic(request={"topic": client.topic_path(PROJECT, TOPIC_ADMIN)}) - eventually_consistent_test() +def test_publish(topic_path, capsys): + publisher.publish_messages(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Published messages to {topic_path}." in out -def test_publish(topic_publish, capsys): - publisher.publish_messages(PROJECT, TOPIC_PUBLISH) +def test_publish_with_custom_attributes(topic_path, capsys): + publisher.publish_messages_with_custom_attributes(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Published messages with custom attributes to {topic_path}." in out -def test_publish_with_custom_attributes(topic_publish, capsys): - publisher.publish_messages_with_custom_attributes(PROJECT, TOPIC_PUBLISH) +def test_publish_with_batch_settings(topic_path, capsys): + publisher.publish_messages_with_batch_settings(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Published messages with batch settings to {topic_path}." in out -def test_publish_with_batch_settings(topic_publish, capsys): - publisher.publish_messages_with_batch_settings(PROJECT, TOPIC_PUBLISH) +def test_publish_with_retry_settings(topic_path, capsys): + publisher.publish_messages_with_retry_settings(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Published messages with retry settings to {topic_path}." in out -def test_publish_with_retry_settings(topic_publish, capsys): - publisher.publish_messages_with_retry_settings(PROJECT, TOPIC_PUBLISH) +def test_publish_with_error_handler(topic_path, capsys): + publisher.publish_messages_with_error_handler(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Published messages with error handler to {topic_path}." in out -def test_publish_with_error_handler(topic_publish, capsys): - publisher.publish_messages_with_error_handler(PROJECT, TOPIC_PUBLISH) +def test_publish_with_ordering_keys(topic_path, capsys): + publisher.publish_with_ordering_keys(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published" in out + assert f"Published messages with ordering keys to {topic_path}." in out -def test_publish_with_ordering_keys(topic_publish, capsys): - publisher.publish_with_ordering_keys(PROJECT, TOPIC_PUBLISH) +def test_resume_publish_with_error_handler(topic_path, capsys): + publisher.resume_publish_with_ordering_keys(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() - assert "Published messages with ordering keys." in out + assert f"Resumed publishing messages with ordering keys to {topic_path}." in out -def test_resume_publish_with_error_handler(topic_publish, capsys): - publisher.resume_publish_with_ordering_keys(PROJECT, TOPIC_PUBLISH) +def test_detach_subscription(subscription_path, capsys): + publisher.detach_subscription(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() - assert "Published messages with ordering keys." in out + assert f"{subscription_path} is detached." in out + + +def test_delete(publisher_client): + publisher.delete_topic(PROJECT_ID, TOPIC_ID) + + @backoff.on_exception(backoff.expo, AssertionError, max_time=MAX_TIME) + def eventually_consistent_test(): + with pytest.raises(Exception): + publisher_client.get_topic( + request={"topic": publisher_client.topic_path(PROJECT_ID, TOPIC_ID)} + ) + + eventually_consistent_test() diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py index 404bcbb77190..1e4868cb3bd2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -15,56 +15,25 @@ # limitations under the License. import argparse -import time from google.cloud import pubsub_v1 -def get_callback(api_future, data, ref): - """Wrap message data in the context of the callback function.""" - - def callback(api_future): - try: - print( - "Published message {} now has message ID {}".format( - data, api_future.result() - ) - ) - ref["num_messages"] += 1 - except Exception: - print( - "A problem occurred when publishing {}: {}\n".format( - data, api_future.exception() - ) - ) - raise - - return callback - - def pub(project_id, topic_id): """Publishes a message to a Pub/Sub topic.""" # Initialize a Publisher client. client = pubsub_v1.PublisherClient() - # Create a fully qualified identifier in the form of - # `projects/{project_id}/topics/{topic_id}` + # Create a fully qualified identifier of form `projects/{project_id}/topics/{topic_id}` topic_path = client.topic_path(project_id, topic_id) # Data sent to Cloud Pub/Sub must be a bytestring. data = b"Hello, World!" - # Keep track of the number of published messages. - ref = dict({"num_messages": 0}) - # When you publish a message, the client returns a future. api_future = client.publish(topic_path, data) - api_future.add_done_callback(get_callback(api_future, data, ref)) + message_id = api_future.result() - # Keep the main thread from exiting while the message future - # gets resolved in the background. - while api_future.running(): - time.sleep(0.5) - print("Published {} message(s).".format(ref["num_messages"])) + print(f"Published {data} to {topic_path}: {message_id}") if __name__ == "__main__": diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py deleted file mode 100644 index 0be087bd2b98..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub_test.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import os -import uuid - -from google.api_core.exceptions import AlreadyExists -from google.cloud import pubsub_v1 -import pytest - -import pub # noqa - - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "quickstart-pub-test-topic-" + UUID - - -@pytest.fixture(scope="module") -def publisher_client(): - yield pubsub_v1.PublisherClient() - - -@pytest.fixture(scope="module") -def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) - - try: - publisher_client.create_topic(request={"name": topic_path}) - except AlreadyExists: - pass - - yield TOPIC - - publisher_client.delete_topic(request={"topic": topic_path}) - - -def test_pub(publisher_client, topic, capsys): - pub.pub(PROJECT, topic) - - out, _ = capsys.readouterr() - - assert "Hello, World!" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py new file mode 100644 index 000000000000..700f57d71b08 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python + +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +from google.api_core.exceptions import AlreadyExists +from google.cloud import pubsub_v1 +import pytest + + +UUID = uuid.uuid4().hex +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +TOPIC_ID = "quickstart-sub-test-topic-" + UUID +SUBSCRIPTION_ID = "quickstart-sub-test-topic-sub-" + UUID + + +@pytest.fixture(scope="module") +def publisher_client(): + yield pubsub_v1.PublisherClient() + + +@pytest.fixture(scope="module") +def subscriber_client(): + subscriber_client = pubsub_v1.SubscriberClient() + yield subscriber_client + subscriber_client.close() + + +@pytest.fixture(scope="module") +def topic_path(publisher_client): + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + try: + topic = publisher_client.create_topic(request={"name": topic_path}) + yield topic.name + except AlreadyExists: + yield topic_path + + publisher_client.delete_topic(request={"topic": topic_path}) + + +@pytest.fixture(scope="module") +def subscription_path(subscriber_client, topic_path): + subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) + + try: + subscription = subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) + yield subscription.name + except AlreadyExists: + yield subscription_path + + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + subscriber_client.close() + + +def test_pub(topic_path, capsys): + import pub + + pub.pub(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert topic_path in out + assert "Hello, World!" in out + + +def test_sub(publisher_client, topic_path, subscription_path, capsys): + publisher_client.publish(topic_path, b"Hello World!") + + import sub + + sub.sub(PROJECT_ID, SUBSCRIPTION_ID, 10) + + out, _ = capsys.readouterr() + assert f"Listening for messages on {subscription_path}" in out + assert "Received" in out + assert "Acknowledged" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index 492d312f94c3..0a7576e23bc8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -19,7 +19,7 @@ from google.cloud import pubsub_v1 -def sub(project_id, subscription_id): +def sub(project_id, subscription_id, timeout=None): """Receives messages from a Pub/Sub subscription.""" # Initialize a Subscriber client subscriber_client = pubsub_v1.SubscriberClient() @@ -28,22 +28,20 @@ def sub(project_id, subscription_id): subscription_path = subscriber_client.subscription_path(project_id, subscription_id) def callback(message): - print( - "Received message {} of message ID {}\n".format(message, message.message_id) - ) + print(f"Received {message}.") # Acknowledge the message. Unack'ed messages will be redelivered. message.ack() - print("Acknowledged message {}\n".format(message.message_id)) + print(f"Acknowledged {message.message_id}.") streaming_pull_future = subscriber_client.subscribe( subscription_path, callback=callback ) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") try: # Calling result() on StreamingPullFuture keeps the main thread from # exiting while messages get processed in the callbacks. - streaming_pull_future.result() + streaming_pull_future.result(timeout=timeout) except: # noqa streaming_pull_future.cancel() @@ -56,7 +54,10 @@ def callback(message): ) parser.add_argument("project_id", help="Google Cloud project ID") parser.add_argument("subscription_id", help="Pub/Sub subscription ID") + parser.add_argument( + "timeout", default=None, nargs="?", const=1, help="Pub/Sub subscription ID" + ) args = parser.parse_args() - sub(args.project_id, args.subscription_id) + sub(args.project_id, args.subscription_id, args.timeout) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py deleted file mode 100644 index 089705af6397..000000000000 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub_test.py +++ /dev/null @@ -1,102 +0,0 @@ -#!/usr/bin/env python - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -import os -import uuid - -from google.api_core.exceptions import AlreadyExists -from google.cloud import pubsub_v1 -import mock -import pytest - -import sub # noqa - - -UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "quickstart-sub-test-topic-" + UUID -SUBSCRIPTION = "quickstart-sub-test-topic-sub-" + UUID - -publisher_client = pubsub_v1.PublisherClient() -subscriber_client = pubsub_v1.SubscriberClient() - - -@pytest.fixture(scope="module") -def topic_path(): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) - - try: - topic = publisher_client.create_topic(request={"name": topic_path}) - yield topic.name - except AlreadyExists: - yield topic_path - - publisher_client.delete_topic(request={"topic": topic_path}) - - -@pytest.fixture(scope="module") -def subscription_path(topic_path): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION) - - try: - subscription = subscriber_client.create_subscription( - request={"name": subscription_path, "topic": topic_path} - ) - yield subscription.name - except AlreadyExists: - yield subscription_path - - subscriber_client.delete_subscription(request={"subscription": subscription_path}) - subscriber_client.close() - - -def _publish_messages(topic_path): - publish_future = publisher_client.publish(topic_path, b"Hello World!") - publish_future.result() - - -def test_sub(monkeypatch, topic_path, subscription_path, capsys): - - real_client = pubsub_v1.SubscriberClient() - mock_client = mock.Mock(spec=pubsub_v1.SubscriberClient, wraps=real_client) - - # Attributes on mock_client_constructor uses the corresponding - # attributes on pubsub_v1.SubscriberClient. - mock_client_constructor = mock.create_autospec(pubsub_v1.SubscriberClient) - mock_client_constructor.return_value = mock_client - - monkeypatch.setattr(pubsub_v1, "SubscriberClient", mock_client_constructor) - - def mock_subscribe(subscription_path, callback=None): - real_future = real_client.subscribe(subscription_path, callback=callback) - mock_future = mock.Mock(spec=real_future, wraps=real_future) - - def mock_result(): - return real_future.result(timeout=10) - - mock_future.result.side_effect = mock_result - return mock_future - - mock_client.subscribe.side_effect = mock_subscribe - - _publish_messages(topic_path) - - sub.sub(PROJECT, SUBSCRIPTION) - - out, _ = capsys.readouterr() - assert "Received message" in out - assert "Acknowledged message" in out - - real_client.close() diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 39a05ef37c3c..936b12476290 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -85,7 +85,7 @@ def create_subscription(project_id, topic_id, subscription_id): request={"name": subscription_path, "topic": topic_path} ) - print("Subscription created: {}".format(subscription)) + print(f"Subscription created: {subscription}") # [END pubsub_create_pull_subscription] @@ -128,16 +128,12 @@ def create_subscription_with_dead_letter_topic( } subscription = subscriber.create_subscription(request) - print("Subscription created: {}".format(subscription.name)) + print(f"Subscription created: {subscription.name}") print( - "It will forward dead letter messages to: {}".format( - subscription.dead_letter_policy.dead_letter_topic - ) + f"It will forward dead letter messages to: {subscription.dead_letter_policy.dead_letter_topic}." ) print( - "After {} delivery attempts.".format( - subscription.dead_letter_policy.max_delivery_attempts - ) + f"After {subscription.dead_letter_policy.max_delivery_attempts} delivery attempts." ) # [END pubsub_dead_letter_create_subscription] @@ -171,8 +167,8 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): } ) - print("Push subscription created: {}".format(subscription)) - print("Endpoint for subscription is: {}".format(endpoint)) + print(f"Push subscription created: {subscription}.") + print(f"Endpoint for subscription is: {endpoint}") # [END pubsub_create_push_subscription] @@ -199,7 +195,7 @@ def create_subscription_with_ordering(project_id, topic_id, subscription_id): "enable_message_ordering": True, } ) - print("Created subscription with ordering: {}".format(subscription)) + print(f"Created subscription with ordering: {subscription}") # [END pubsub_enable_subscription_ordering] @@ -220,7 +216,7 @@ def delete_subscription(project_id, subscription_id): with subscriber: subscriber.delete_subscription(request={"subscription": subscription_path}) - print("Subscription deleted: {}".format(subscription_path)) + print(f"Subscription deleted: {subscription_path}.") # [END pubsub_delete_subscription] @@ -257,8 +253,8 @@ def update_push_subscription(project_id, topic_id, subscription_id, endpoint): request={"subscription": subscription, "update_mask": update_mask} ) - print("Subscription updated: {}".format(subscription_path)) - print("New endpoint for subscription is: {}".format(result.push_config)) + print(f"Subscription updated: {subscription_path}") + print(f"New endpoint for subscription is: {result.push_config}.") # [END pubsub_update_push_configuration] @@ -291,7 +287,7 @@ def update_subscription_with_dead_letter_policy( subscription_before_update = subscriber.get_subscription( request={"subscription": subscription_path} ) - print("Before the update: {}".format(subscription_before_update)) + print(f"Before the update: {subscription_before_update}.") # Indicates which fields in the provided subscription to update. update_mask = FieldMask(paths=["dead_letter_policy.max_delivery_attempts"]) @@ -313,7 +309,7 @@ def update_subscription_with_dead_letter_policy( request={"subscription": subscription, "update_mask": update_mask} ) - print("After the update: {}".format(subscription_after_update)) + print(f"After the update: {subscription_after_update}.") # [END pubsub_dead_letter_update_subscription] return subscription_after_update @@ -340,7 +336,7 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): subscription_before_update = subscriber.get_subscription( request={"subscription": subscription_path} ) - print("Before removing the policy: {}".format(subscription_before_update)) + print(f"Before removing the policy: {subscription_before_update}.") # Indicates which fields in the provided subscription to update. update_mask = FieldMask( @@ -361,7 +357,7 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): request={"subscription": subscription, "update_mask": update_mask} ) - print("After removing the policy: {}".format(subscription_after_update)) + print(f"After removing the policy: {subscription_after_update}.") # [END pubsub_dead_letter_remove] return subscription_after_update @@ -385,11 +381,11 @@ def receive_messages(project_id, subscription_id, timeout=None): subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): - print("Received message: {}".format(message)) + print(f"Received {message}.") message.ack() streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: @@ -419,16 +415,16 @@ def receive_messages_with_custom_attributes(project_id, subscription_id, timeout subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): - print("Received message: {}".format(message.data)) + print(f"Received {message.data}.") if message.attributes: print("Attributes:") for key in message.attributes: value = message.attributes.get(key) - print("{}: {}".format(key, value)) + print(f"{key}: {value}") message.ack() streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: @@ -457,7 +453,7 @@ def receive_messages_with_flow_control(project_id, subscription_id, timeout=None subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): - print("Received message: {}".format(message.data)) + print(f"Received {message.data}.") message.ack() # Limit the subscriber to only have ten outstanding messages at a time. @@ -466,7 +462,7 @@ def callback(message): streaming_pull_future = subscriber.subscribe( subscription_path, callback=callback, flow_control=flow_control ) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: @@ -503,7 +499,7 @@ def synchronous_pull(project_id, subscription_id): ack_ids = [] for received_message in response.received_messages: - print("Received: {}".format(received_message.message.data)) + print(f"Received: {received_message.message.data}.") ack_ids.append(received_message.ack_id) # Acknowledges the received messages so they will not be sent again. @@ -512,9 +508,7 @@ def synchronous_pull(project_id, subscription_id): ) print( - "Received and acknowledged {} messages. Done.".format( - len(response.received_messages) - ) + f"Received and acknowledged {len(response.received_messages)} messages from {subscription_path}." ) # [END pubsub_subscriber_sync_pull] @@ -524,11 +518,16 @@ def synchronous_pull_with_lease_management(project_id, subscription_id): # [START pubsub_subscriber_sync_pull_with_lease] import logging import multiprocessing - import random + import sys import time from google.cloud import pubsub_v1 + multiprocessing.log_to_stderr() + logger = multiprocessing.get_logger() + logger.setLevel(logging.INFO) + processes = dict() + # TODO(developer) # project_id = "your-project-id" # subscription_id = "your-subscription-id" @@ -536,77 +535,46 @@ def synchronous_pull_with_lease_management(project_id, subscription_id): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - NUM_MESSAGES = 2 - ACK_DEADLINE = 30 - SLEEP_TIME = 10 - - # The subscriber pulls a specific number of messages. response = subscriber.pull( - request={"subscription": subscription_path, "max_messages": NUM_MESSAGES} + request={"subscription": subscription_path, "max_messages": 3} ) - multiprocessing.log_to_stderr() - logger = multiprocessing.get_logger() - logger.setLevel(logging.INFO) - - def worker(msg): - """Simulates a long-running process.""" - RUN_TIME = random.randint(1, 60) - logger.info( - "{}: Running {} for {}s".format( - time.strftime("%X", time.gmtime()), msg.message.data, RUN_TIME - ) - ) - time.sleep(RUN_TIME) - - # `processes` stores process as key and ack id and message as values. - processes = dict() + # Start a process for each message based on its size modulo 10. for message in response.received_messages: - process = multiprocessing.Process(target=worker, args=(message,)) + process = multiprocessing.Process( + target=time.sleep, args=(sys.getsizeof(message) % 10,) + ) processes[process] = (message.ack_id, message.message.data) process.start() while processes: + # Take a break every second. + if processes: + time.sleep(1) + for process in list(processes): ack_id, msg_data = processes[process] - # If the process is still running, reset the ack deadline as - # specified by ACK_DEADLINE once every while as specified - # by SLEEP_TIME. + # If the process is running, reset the ack deadline. if process.is_alive(): - # `ack_deadline_seconds` must be between 10 to 600. subscriber.modify_ack_deadline( request={ "subscription": subscription_path, "ack_ids": [ack_id], - "ack_deadline_seconds": ACK_DEADLINE, + # Must be between 10 and 600. + "ack_deadline_seconds": 15, } ) - logger.info( - "{}: Reset ack deadline for {} for {}s".format( - time.strftime("%X", time.gmtime()), msg_data, ACK_DEADLINE, - ) - ) + logger.info(f"Reset ack deadline for {msg_data}.") - # If the processs is finished, acknowledges using `ack_id`. + # If the process is complete, acknowledge the message. else: subscriber.acknowledge( request={"subscription": subscription_path, "ack_ids": [ack_id]} ) - logger.info( - "{}: Acknowledged {}".format( - time.strftime("%X", time.gmtime()), msg_data - ) - ) + logger.info(f"Acknowledged {msg_data}.") processes.pop(process) - - # If there are still processes running, sleeps the thread. - if processes: - time.sleep(SLEEP_TIME) - print( - "Received and acknowledged {} messages. Done.".format( - len(response.received_messages) - ) + f"Received and acknowledged {len(response.received_messages)} messages from {subscription_path}." ) # Close the underlying gPRC channel. Alternatively, wrap subscriber in @@ -630,11 +598,11 @@ def listen_for_errors(project_id, subscription_id, timeout=None): subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): - print("Received message: {}".format(message)) + print(f"Received {message}.") message.ack() streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: @@ -644,11 +612,7 @@ def callback(message): streaming_pull_future.result(timeout=timeout) except Exception as e: streaming_pull_future.cancel() - print( - "Listening for messages on {} threw an exception: {}.".format( - subscription_id, e - ) - ) + print(f"Listening for messages on {subscription_path} threw an exception: {e}.") # [END pubsub_subscriber_error_listener] @@ -665,12 +629,12 @@ def receive_messages_with_delivery_attempts(project_id, subscription_id, timeout subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message): - print("Received message: {}".format(message)) - print("With delivery attempts: {}".format(message.delivery_attempt)) + print(f"Received {message}.") + print(f"With delivery attempts: {message.delivery_attempt}.") message.ack() streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) - print("Listening for messages on {}..\n".format(subscription_path)) + print(f"Listening for messages on {subscription_path}..\n") # Wrap subscriber in a 'with' block to automatically call close() when done. with subscriber: diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 37b83b877205..5ffeaa5c23a2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -16,22 +16,22 @@ import uuid import backoff +from google.api_core.exceptions import NotFound from google.cloud import pubsub_v1 import pytest import subscriber UUID = uuid.uuid4().hex -PROJECT = os.environ["GOOGLE_CLOUD_PROJECT"] +PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC = "subscription-test-topic-" + UUID DEAD_LETTER_TOPIC = "subscription-test-dead-letter-topic-" + UUID SUBSCRIPTION_ADMIN = "subscription-test-subscription-admin-" + UUID SUBSCRIPTION_ASYNC = "subscription-test-subscription-async-" + UUID SUBSCRIPTION_SYNC = "subscription-test-subscription-sync-" + UUID SUBSCRIPTION_DLQ = "subscription-test-subscription-dlq-" + UUID -SUBSCRIPTION_ORDERING = "subscription-test-subscription-ordering-" + UUID -ENDPOINT = "https://{}.appspot.com/push".format(PROJECT) -NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT) +ENDPOINT = "https://{}.appspot.com/push".format(PROJECT_ID) +NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT_ID) @pytest.fixture(scope="module") @@ -41,7 +41,7 @@ def publisher_client(): @pytest.fixture(scope="module") def topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, TOPIC) + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC) try: topic = publisher_client.get_topic(request={"topic": topic_path}) @@ -55,11 +55,11 @@ def topic(publisher_client): @pytest.fixture(scope="module") def dead_letter_topic(publisher_client): - topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) + topic_path = publisher_client.topic_path(PROJECT_ID, DEAD_LETTER_TOPIC) try: dead_letter_topic = publisher_client.get_topic(request={"topic": topic_path}) - except: # noqa + except NotFound: dead_letter_topic = publisher_client.create_topic(request={"name": topic_path}) yield dead_letter_topic.name @@ -76,13 +76,15 @@ def subscriber_client(): @pytest.fixture(scope="module") def subscription_admin(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ADMIN + ) try: subscription = subscriber_client.get_subscription( request={"subscription": subscription_path} ) - except: # noqa + except NotFound: subscription = subscriber_client.create_subscription( request={"name": subscription_path, "topic": topic} ) @@ -92,13 +94,15 @@ def subscription_admin(subscriber_client, topic): @pytest.fixture(scope="module") def subscription_sync(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_SYNC) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_SYNC + ) try: subscription = subscriber_client.get_subscription( request={"subscription": subscription_path} ) - except: # noqa + except NotFound: subscription = subscriber_client.create_subscription( request={"name": subscription_path, "topic": topic} ) @@ -110,13 +114,15 @@ def subscription_sync(subscriber_client, topic): @pytest.fixture(scope="module") def subscription_async(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ASYNC) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ASYNC + ) try: subscription = subscriber_client.get_subscription( request={"subscription": subscription_path} ) - except: # noqa + except NotFound: subscription = subscriber_client.create_subscription( request={"name": subscription_path, "topic": topic} ) @@ -128,13 +134,15 @@ def subscription_async(subscriber_client, topic): @pytest.fixture(scope="module") def subscription_dlq(subscriber_client, topic): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_DLQ + ) try: subscription = subscriber_client.get_subscription( request={"subscription": subscription_path} ) - except: # noqa + except NotFound: subscription = subscriber_client.create_subscription( request={"name": subscription_path, "topic": topic} ) @@ -147,7 +155,7 @@ def subscription_dlq(subscriber_client, topic): def test_list_in_topic(subscription_admin, capsys): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): - subscriber.list_subscriptions_in_topic(PROJECT, TOPIC) + subscriber.list_subscriptions_in_topic(PROJECT_ID, TOPIC) out, _ = capsys.readouterr() assert subscription_admin in out @@ -157,109 +165,111 @@ def eventually_consistent_test(): def test_list_in_project(subscription_admin, capsys): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): - subscriber.list_subscriptions_in_project(PROJECT) + subscriber.list_subscriptions_in_project(PROJECT_ID) out, _ = capsys.readouterr() assert subscription_admin in out eventually_consistent_test() -def test_create(subscriber_client): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) +def test_create(subscriber_client, subscription_admin, capsys): + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ADMIN + ) try: subscriber_client.delete_subscription( request={"subscription": subscription_path} ) - except Exception: + except NotFound: pass - subscriber.create_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN) - - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - assert subscriber_client.get_subscription( - request={"subscription": subscription_path} - ) + subscriber.create_subscription(PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN) - eventually_consistent_test() + out, _ = capsys.readouterr() + assert f"{subscription_admin}" in out def test_create_subscription_with_dead_letter_policy( - subscriber_client, publisher_client, topic, dead_letter_topic, capsys + subscriber_client, subscription_dlq, dead_letter_topic, capsys ): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_DLQ) - dead_letter_topic_path = publisher_client.topic_path(PROJECT, DEAD_LETTER_TOPIC) - try: subscriber_client.delete_subscription( - request={"subscription": subscription_path} + request={"subscription": subscription_dlq} ) - except Exception: + except NotFound: pass subscriber.create_subscription_with_dead_letter_topic( - PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC + PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC ) out, _ = capsys.readouterr() - assert "Subscription created: " + subscription_path in out - assert "It will forward dead letter messages to: " + dead_letter_topic_path in out + assert f"Subscription created: {subscription_dlq}" in out + assert f"It will forward dead letter messages to: {dead_letter_topic}" in out assert "After 10 delivery attempts." in out -def test_create_push(subscriber_client): - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ADMIN) +def test_update_dead_letter_policy(capsys): + _ = subscriber.update_subscription_with_dead_letter_policy( + PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC + ) + + out, _ = capsys.readouterr() + assert "max_delivery_attempts: 20" in out + + +def test_create_subscription_with_ordering( + subscriber_client, subscription_admin, capsys +): + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ADMIN + ) try: subscriber_client.delete_subscription( request={"subscription": subscription_path} ) - except Exception: + except NotFound: pass - subscriber.create_push_subscription(PROJECT, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT) + subscriber.create_subscription_with_ordering(PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN) - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): - assert subscriber_client.get_subscription( - request={"subscription": subscription_path} - ) - - eventually_consistent_test() - - -def test_create_subscription_with_ordering(subscriber_client, capsys): - subscriber.create_subscription_with_ordering(PROJECT, TOPIC, SUBSCRIPTION_ORDERING) out, _ = capsys.readouterr() assert "Created subscription with ordering" in out + assert f"{subscription_admin}" in out assert "enable_message_ordering: true" in out - subscription_path = subscriber_client.subscription_path(PROJECT, SUBSCRIPTION_ORDERING) - subscriber_client.delete_subscription(request={"subscription": subscription_path}) - -def test_update(subscriber_client, subscription_admin, capsys): - subscriber.update_push_subscription( - PROJECT, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT +def test_create_push(subscriber_client, subscription_admin, capsys): + # The scope of `subscription_path` is limited to this function. + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ADMIN ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_push_subscription(PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT) out, _ = capsys.readouterr() - assert "Subscription updated" in out + assert f"{subscription_admin}" in out -def test_update_dead_letter_policy( - subscriber_client, topic, subscription_dlq, dead_letter_topic, capsys -): - _ = subscriber.update_subscription_with_dead_letter_policy( - PROJECT, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC +def test_update(subscription_admin, capsys): + subscriber.update_push_subscription( + PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT ) out, _ = capsys.readouterr() - assert "max_delivery_attempts: 20" in out + assert "Subscription updated" in out + assert f"{subscription_admin}" in out def test_delete(subscriber_client, subscription_admin): - subscriber.delete_subscription(PROJECT, SUBSCRIPTION_ADMIN) + subscriber.delete_subscription(PROJECT_ID, SUBSCRIPTION_ADMIN) @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): @@ -273,7 +283,7 @@ def eventually_consistent_test(): def _publish_messages(publisher_client, topic, **attrs): for n in range(5): - data = u"message {}".format(n).encode("utf-8") + data = "message {}".format(n).encode("utf-8") publish_future = publisher_client.publish(topic, data, **attrs) publish_future.result() @@ -281,7 +291,7 @@ def _publish_messages(publisher_client, topic, **attrs): def test_receive(publisher_client, topic, subscription_async, capsys): _publish_messages(publisher_client, topic) - subscriber.receive_messages(PROJECT, SUBSCRIPTION_ASYNC, 5) + subscriber.receive_messages(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) out, _ = capsys.readouterr() assert "Listening" in out @@ -295,9 +305,12 @@ def test_receive_with_custom_attributes( _publish_messages(publisher_client, topic, origin="python-sample") - subscriber.receive_messages_with_custom_attributes(PROJECT, SUBSCRIPTION_ASYNC, 5) + subscriber.receive_messages_with_custom_attributes( + PROJECT_ID, SUBSCRIPTION_ASYNC, 5 + ) out, _ = capsys.readouterr() + assert subscription_async in out assert "message" in out assert "origin" in out assert "python-sample" in out @@ -307,7 +320,7 @@ def test_receive_with_flow_control(publisher_client, topic, subscription_async, _publish_messages(publisher_client, topic) - subscriber.receive_messages_with_flow_control(PROJECT, SUBSCRIPTION_ASYNC, 5) + subscriber.receive_messages_with_flow_control(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) out, _ = capsys.readouterr() assert "Listening" in out @@ -318,10 +331,12 @@ def test_receive_with_flow_control(publisher_client, topic, subscription_async, def test_receive_synchronously(publisher_client, topic, subscription_sync, capsys): _publish_messages(publisher_client, topic) - subscriber.synchronous_pull(PROJECT, SUBSCRIPTION_SYNC) + subscriber.synchronous_pull(PROJECT_ID, SUBSCRIPTION_SYNC) out, _ = capsys.readouterr() - assert "Done." in out + + assert "Received" in out + assert f"{subscription_sync}" in out def test_receive_synchronously_with_lease( @@ -329,42 +344,42 @@ def test_receive_synchronously_with_lease( ): _publish_messages(publisher_client, topic) - subscriber.synchronous_pull_with_lease_management(PROJECT, SUBSCRIPTION_SYNC) + subscriber.synchronous_pull_with_lease_management(PROJECT_ID, SUBSCRIPTION_SYNC) out, _ = capsys.readouterr() - assert "Done." in out + assert f"Received and acknowledged 3 messages from {subscription_sync}." in out def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): _publish_messages(publisher_client, topic) - subscriber.listen_for_errors(PROJECT, SUBSCRIPTION_ASYNC, 5) + subscriber.listen_for_errors(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) out, _ = capsys.readouterr() - assert "Listening" in out assert subscription_async in out assert "threw an exception" in out def test_receive_with_delivery_attempts( - publisher_client, topic, subscription_dlq, dead_letter_topic, capsys + publisher_client, topic, subscription_dlq, capsys ): _publish_messages(publisher_client, topic) - subscriber.receive_messages_with_delivery_attempts(PROJECT, SUBSCRIPTION_DLQ, 10) + subscriber.receive_messages_with_delivery_attempts(PROJECT_ID, SUBSCRIPTION_DLQ, 10) out, _ = capsys.readouterr() - assert "Listening" in out assert subscription_dlq in out - assert "Received message: " in out + assert "Received" in out assert "message 4" in out assert "With delivery attempts: " in out -def test_remove_dead_letter_policy(subscriber_client, subscription_dlq): +def test_remove_dead_letter_policy(subscription_dlq, capsys): subscription_after_update = subscriber.remove_dead_letter_policy( - PROJECT, TOPIC, SUBSCRIPTION_DLQ + PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ ) + out, _ = capsys.readouterr() + assert subscription_dlq in out assert subscription_after_update.dead_letter_policy.dead_letter_topic == "" From 422e7841298e2a5a5ea1c5444ad35827b6db2dee Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 13 Oct 2020 01:48:19 +0200 Subject: [PATCH 0597/1197] chore(deps): update dependency google-cloud-pubsub to v2.1.0 (#191) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index c5daf4f3985e..e5d328183e15 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==2.0.0 +google-cloud-pubsub==2.1.0 From 79a1677d860d923c662659aed69009c503c30415 Mon Sep 17 00:00:00 2001 From: Carlos de la Guardia Date: Mon, 12 Oct 2020 20:23:47 -0500 Subject: [PATCH 0598/1197] fix: make fixup script consistent with migration docs (#208) * Generate code consistent with the upgrade documentation. * Add an option to generate the code using keyword arguments instead of a request. * Generate stylistically consistent code (no spaces in keywords, double quotes for strings). * Reformat the script itself to use the same code styling. --- packages/google-cloud-pubsub/UPGRADING.md | 3 + .../scripts/fixup_pubsub_v1_keywords.py | 278 +++++++++++++----- 2 files changed, 214 insertions(+), 67 deletions(-) diff --git a/packages/google-cloud-pubsub/UPGRADING.md b/packages/google-cloud-pubsub/UPGRADING.md index 9ab5d073a836..3837464fcff3 100644 --- a/packages/google-cloud-pubsub/UPGRADING.md +++ b/packages/google-cloud-pubsub/UPGRADING.md @@ -32,6 +32,9 @@ python3 -m pip install google-cloud-pubsub * The script `fixup_pubsub_v1_keywords.py` is shipped with the library. It expects an input directory (with the code to convert) and an empty destination directory. +Optionally, the `--use-keywords` switch can be added to generate flattened keyword +parameters instead of a request dictionary (see the following section for an +explanation). ```sh $ scripts/fixup_pubsub_v1_keywords.py --input-directory .samples/ --output-directory samples/ diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index b54a7ad375d3..1c9ec6f8c0fc 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -20,12 +20,11 @@ import libcst as cst import pathlib import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) +from typing import Any, Callable, Dict, List, Sequence, Tuple def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] + predicate: Callable[[Any], bool], iterator: Sequence[Any] ) -> Tuple[List[Any], List[Any]]: """A stable, out-of-place partition.""" results = ([], []) @@ -38,40 +37,128 @@ def partition( class pubsubCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + CTRL_PARAMS: Tuple[str] = ("retry", "timeout", "metadata") METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'acknowledge': ('subscription', 'ack_ids', ), - 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', ), - 'delete_snapshot': ('snapshot', ), - 'delete_subscription': ('subscription', ), - 'delete_topic': ('topic', ), - 'detach_subscription': ('subscription', ), - 'get_snapshot': ('snapshot', ), - 'get_subscription': ('subscription', ), - 'get_topic': ('topic', ), - 'list_snapshots': ('project', 'page_size', 'page_token', ), - 'list_subscriptions': ('project', 'page_size', 'page_token', ), - 'list_topics': ('project', 'page_size', 'page_token', ), - 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), - 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), - 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), - 'modify_push_config': ('subscription', 'push_config', ), - 'publish': ('topic', 'messages', ), - 'pull': ('subscription', 'max_messages', 'return_immediately', ), - 'seek': ('subscription', 'time', 'snapshot', ), - 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), - 'update_snapshot': ('snapshot', 'update_mask', ), - 'update_subscription': ('subscription', 'update_mask', ), - 'update_topic': ('topic', 'update_mask', ), - - 'get_iam_policy': ('resource', 'options', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - + "acknowledge": ( + "subscription", + "ack_ids", + ), + "create_snapshot": ( + "name", + "subscription", + "labels", + ), + "create_subscription": ( + "name", + "topic", + "push_config", + "ack_deadline_seconds", + "retain_acked_messages", + "message_retention_duration", + "labels", + "enable_message_ordering", + "expiration_policy", + "filter", + "dead_letter_policy", + "retry_policy", + "detached", + ), + "create_topic": ( + "name", + "labels", + "message_storage_policy", + "kms_key_name", + ), + "delete_snapshot": ("snapshot",), + "delete_subscription": ("subscription",), + "delete_topic": ("topic",), + "detach_subscription": ("subscription",), + "get_snapshot": ("snapshot",), + "get_subscription": ("subscription",), + "get_topic": ("topic",), + "list_snapshots": ( + "project", + "page_size", + "page_token", + ), + "list_subscriptions": ( + "project", + "page_size", + "page_token", + ), + "list_topics": ( + "project", + "page_size", + "page_token", + ), + "list_topic_snapshots": ( + "topic", + "page_size", + "page_token", + ), + "list_topic_subscriptions": ( + "topic", + "page_size", + "page_token", + ), + "modify_ack_deadline": ( + "subscription", + "ack_ids", + "ack_deadline_seconds", + ), + "modify_push_config": ( + "subscription", + "push_config", + ), + "pull": ( + "subscription", + "max_messages", + "return_immediately", + ), + "seek": ( + "subscription", + "time", + "snapshot", + ), + "streaming_pull": ( + "subscription", + "stream_ack_deadline_seconds", + "ack_ids", + "modify_deadline_seconds", + "modify_deadline_ack_ids", + "client_id", + "max_outstanding_messages", + "max_outstanding_bytes", + ), + "update_snapshot": ( + "snapshot", + "update_mask", + ), + "update_subscription": ( + "subscription", + "update_mask", + ), + "update_topic": ( + "topic", + "update_mask", + ), + "get_iam_policy": ( + "resource", + "options", + ), + "set_iam_policy": ( + "resource", + "policy", + ), + "test_iam_permissions": ( + "resource", + "permissions", + ), } + def __init__(self, use_keywords=False): + self._use_keywords = use_keywords + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: try: key = original.func.attr.value @@ -88,35 +175,80 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs + lambda a: not a.keyword.value in self.CTRL_PARAMS, kwargs ) - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + args, ctrl_args = args[: len(kword_params)], args[len(kword_params) :] + ctrl_kwargs.extend( + cst.Arg( + value=a.value, + keyword=cst.Name(value=ctrl), + equal=cst.AssignEqual( + whitespace_before=cst.SimpleWhitespace(""), + whitespace_after=cst.SimpleWhitespace(""), + ), + ) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS) + ) - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) + if self._use_keywords: + new_kwargs = [ + cst.Arg( + value=arg.value, + keyword=cst.Name(value=name), + equal=cst.AssignEqual( + whitespace_before=cst.SimpleWhitespace(""), + whitespace_after=cst.SimpleWhitespace(""), + ), ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) + for name, arg in zip(kword_params, args + kwargs) + ] + new_kwargs.extend( + [ + cst.Arg( + value=arg.value, + keyword=cst.Name(value=arg.keyword.value), + equal=cst.AssignEqual( + whitespace_before=cst.SimpleWhitespace(""), + whitespace_after=cst.SimpleWhitespace(""), + ), + ) + for arg in ctrl_kwargs + ] + ) + return updated.with_changes(args=new_kwargs) + else: + request_arg = cst.Arg( + value=cst.Dict( + [ + cst.DictElement( + cst.SimpleString('"{}"'.format(name)), + cst.Element(value=arg.value), + ) + for name, arg in zip(kword_params, args + kwargs) + ] + + [ + cst.DictElement( + cst.SimpleString('"{}"'.format(arg.keyword.value)), + cst.Element(value=arg.value), + ) + for arg in ctrl_kwargs + ] + ), + keyword=cst.Name("request"), + equal=cst.AssignEqual( + whitespace_before=cst.SimpleWhitespace(""), + whitespace_after=cst.SimpleWhitespace(""), + ), + ) - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) + return updated.with_changes(args=[request_arg]) def fix_files( in_dir: pathlib.Path, out_dir: pathlib.Path, + use_keywords: bool = False, *, transformer=pubsubCallTransformer(), ): @@ -129,11 +261,12 @@ def fix_files( pyfile_gen = ( pathlib.Path(os.path.join(root, f)) for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" + for f in files + if os.path.splitext(f)[1] == ".py" ) for fpath in pyfile_gen: - with open(fpath, 'r') as f: + with open(fpath, "r") as f: src = f.read() # Parse the code and insert method call fixes. @@ -145,11 +278,11 @@ def fix_files( updated_path.parent.mkdir(parents=True, exist_ok=True) # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: + with open(updated_path, "w") as f: f.write(updated.code) -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser( description="""Fix up source that uses the pubsub client library. @@ -164,24 +297,34 @@ def fix_files( These all constitute false negatives. The tool will also detect false positives when an API method shares a name with another method. -""") +""" + ) parser.add_argument( - '-d', - '--input-directory', + "-d", + "--input-directory", required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', + dest="input_dir", + help="the input directory to walk for python files to fix up", ) parser.add_argument( - '-o', - '--output-directory', + "-o", + "--output-directory", required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', + dest="output_dir", + help="the directory to output files fixed via un-flattening", + ) + parser.add_argument( + "-k", + "--use-keywords", + required=False, + action="store_true", + dest="use_keywords", + help="Use keyword arguments instead of constructing a request", ) args = parser.parse_args() input_dir = pathlib.Path(args.input_dir) output_dir = pathlib.Path(args.output_dir) + use_keywords = args.use_keywords if not input_dir.is_dir(): print( f"input directory '{input_dir}' does not exist or is not a directory", @@ -203,4 +346,5 @@ def fix_files( ) sys.exit(-1) - fix_files(input_dir, output_dir) + transformer = pubsubCallTransformer(use_keywords=use_keywords) + fix_files(input_dir, output_dir, use_keywords, transformer=transformer) From 24f0d40b512fb46bde16f8d725dc424c09a3dff2 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 13 Oct 2020 15:41:18 -0700 Subject: [PATCH 0599/1197] samples: clarify comments in samples (#223) * samples: clarify comments in samples * fix: create a dlq sub instead --- .../samples/snippets/publisher.py | 5 ++-- .../samples/snippets/subscriber.py | 4 ++- .../samples/snippets/subscriber_test.py | 27 ++++++++++++------- 3 files changed, 23 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index accf71dcac68..3aca244c5a45 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -222,12 +222,13 @@ def publish_messages_with_retry_settings(project_id, topic_id): # project_id = "your-project-id" # topic_id = "your-topic-id" - # Configure the retry settings. + # Configure the retry settings. Defaults shown in comments are values applied + # by the library by default, instead of default values in the Retry object. custom_retry = api_core.retry.Retry( initial=0.250, # seconds (default: 0.1) maximum=90.0, # seconds (default: 60.0) multiplier=1.45, # default: 1.3 - deadline=300.0, # seconds (default: 600.0) + deadline=300.0, # seconds (default: 60.0) predicate=api_core.retry.if_exception_type( api_core.exceptions.Aborted, api_core.exceptions.DeadlineExceeded, diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 936b12476290..07da80d93b1f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -612,7 +612,9 @@ def callback(message): streaming_pull_future.result(timeout=timeout) except Exception as e: streaming_pull_future.cancel() - print(f"Listening for messages on {subscription_path} threw an exception: {e}.") + print( + f"Listening for messages on {subscription_path} threw an exception: {e}." + ) # [END pubsub_subscriber_error_listener] diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 5ffeaa5c23a2..d722ebdec8b4 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -133,19 +133,26 @@ def subscription_async(subscriber_client, topic): @pytest.fixture(scope="module") -def subscription_dlq(subscriber_client, topic): +def subscription_dlq(subscriber_client, topic, dead_letter_topic): + from google.cloud.pubsub_v1.types import DeadLetterPolicy + subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_DLQ ) try: - subscription = subscriber_client.get_subscription( + subscription = subscriber_client.delete_subscription( request={"subscription": subscription_path} ) except NotFound: - subscription = subscriber_client.create_subscription( - request={"name": subscription_path, "topic": topic} - ) + request = { + "name": subscription_path, + "topic": topic, + "dead_letter_policy": DeadLetterPolicy( + dead_letter_topic=dead_letter_topic, max_delivery_attempts=10 + ), + } + subscription = subscriber_client.create_subscription(request) yield subscription.name @@ -210,12 +217,14 @@ def test_create_subscription_with_dead_letter_policy( assert "After 10 delivery attempts." in out -def test_update_dead_letter_policy(capsys): +def test_update_dead_letter_policy(subscription_dlq, dead_letter_topic, capsys): _ = subscriber.update_subscription_with_dead_letter_policy( PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC ) out, _ = capsys.readouterr() + assert dead_letter_topic in out + assert subscription_dlq in out assert "max_delivery_attempts: 20" in out @@ -366,12 +375,10 @@ def test_receive_with_delivery_attempts( ): _publish_messages(publisher_client, topic) - subscriber.receive_messages_with_delivery_attempts(PROJECT_ID, SUBSCRIPTION_DLQ, 10) + subscriber.receive_messages_with_delivery_attempts(PROJECT_ID, SUBSCRIPTION_DLQ, 15) out, _ = capsys.readouterr() - assert subscription_dlq in out - assert "Received" in out - assert "message 4" in out + assert f"Listening for messages on {subscription_dlq}.." in out assert "With delivery attempts: " in out From e41a2b90ab0c3b36af37d03b5ab52e9029b8a89d Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 21 Oct 2020 09:45:12 -0700 Subject: [PATCH 0600/1197] chore: re-run synthtool (#225) --- .../services/publisher/async_client.py | 70 +- .../pubsub_v1/services/publisher/client.py | 114 ++- .../services/publisher/transports/base.py | 10 +- .../services/publisher/transports/grpc.py | 18 +- .../publisher/transports/grpc_asyncio.py | 4 + .../services/subscriber/async_client.py | 113 ++- .../pubsub_v1/services/subscriber/client.py | 125 +++- .../services/subscriber/transports/base.py | 16 +- .../services/subscriber/transports/grpc.py | 18 +- .../subscriber/transports/grpc_asyncio.py | 4 + .../google/pubsub_v1/types/pubsub.py | 24 +- packages/google-cloud-pubsub/synth.metadata | 8 +- .../unit/gapic/pubsub_v1/test_publisher.py | 523 +++++++++---- .../unit/gapic/pubsub_v1/test_subscriber.py | 696 ++++++++++++------ 14 files changed, 1239 insertions(+), 504 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 601fc09d2671..fdf3aeb37a3a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -48,12 +48,46 @@ class PublisherAsyncClient: DEFAULT_ENDPOINT = PublisherClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = PublisherClient.DEFAULT_MTLS_ENDPOINT + subscription_path = staticmethod(PublisherClient.subscription_path) + parse_subscription_path = staticmethod(PublisherClient.parse_subscription_path) topic_path = staticmethod(PublisherClient.topic_path) parse_topic_path = staticmethod(PublisherClient.parse_topic_path) + common_billing_account_path = staticmethod( + PublisherClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + PublisherClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(PublisherClient.common_folder_path) + parse_common_folder_path = staticmethod(PublisherClient.parse_common_folder_path) + + common_organization_path = staticmethod(PublisherClient.common_organization_path) + parse_common_organization_path = staticmethod( + PublisherClient.parse_common_organization_path + ) + + common_project_path = staticmethod(PublisherClient.common_project_path) + parse_common_project_path = staticmethod(PublisherClient.parse_common_project_path) + + common_location_path = staticmethod(PublisherClient.common_location_path) + parse_common_location_path = staticmethod( + PublisherClient.parse_common_location_path + ) + from_service_account_file = PublisherClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> PublisherTransport: + """Return the transport used by the client instance. + + Returns: + PublisherTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(PublisherClient).get_transport_class, type(PublisherClient) ) @@ -148,7 +182,8 @@ async def create_topic( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -287,7 +322,8 @@ async def publish( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([topic, messages]): + has_flattened_params = any([topic, messages]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -300,8 +336,9 @@ async def publish( if topic is not None: request.topic = topic - if messages is not None: - request.messages = messages + + if messages: + request.messages.extend(messages) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -312,13 +349,13 @@ async def publish( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( + exceptions.Aborted, exceptions.Cancelled, exceptions.DeadlineExceeded, exceptions.InternalServerError, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, exceptions.Unknown, - exceptions.Aborted, ), ), default_timeout=60.0, @@ -371,7 +408,8 @@ async def get_topic( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([topic]): + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -395,8 +433,8 @@ async def get_topic( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -453,7 +491,8 @@ async def list_topics( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project]): + has_flattened_params = any([project]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -477,8 +516,8 @@ async def list_topics( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -544,7 +583,8 @@ async def list_topic_subscriptions( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([topic]): + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -568,8 +608,8 @@ async def list_topic_subscriptions( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -639,7 +679,8 @@ async def list_topic_snapshots( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([topic]): + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -663,8 +704,8 @@ async def list_topic_snapshots( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -725,7 +766,8 @@ async def delete_topic( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([topic]): + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 2df57b0caa30..188b3dccb772 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -19,10 +19,10 @@ from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -145,6 +145,30 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> PublisherTransport: + """Return the transport used by the client instance. + + Returns: + PublisherTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def subscription_path(project: str, subscription: str,) -> str: + """Return a fully-qualified subscription string.""" + return "projects/{project}/subscriptions/{subscription}".format( + project=project, subscription=subscription, + ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str, str]: + """Parse a subscription path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path + ) + return m.groupdict() if m else {} + @staticmethod def topic_path(project: str, topic: str,) -> str: """Return a fully-qualified topic string.""" @@ -156,12 +180,71 @@ def parse_topic_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, PublisherTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, PublisherTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the publisher client. @@ -176,8 +259,8 @@ def __init__( transport (Union[str, ~.PublisherTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: @@ -192,10 +275,10 @@ def __init__( not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -203,9 +286,9 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool( @@ -468,8 +551,9 @@ def publish( if topic is not None: request.topic = topic - if messages is not None: - request.messages = messages + + if messages: + request.messages.extend(messages) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index fcb3b99a84bc..fe84ac415e37 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -139,13 +139,13 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( + exceptions.Aborted, exceptions.Cancelled, exceptions.DeadlineExceeded, exceptions.InternalServerError, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, exceptions.Unknown, - exceptions.Aborted, ), ), default_timeout=60.0, @@ -159,8 +159,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -174,8 +174,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -189,8 +189,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -204,8 +204,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index d3d015f6bd1e..15de8f87d1b6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -93,10 +93,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -105,6 +105,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -112,6 +114,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -148,6 +151,7 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -225,12 +229,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 6d30b31fd10f..31ad368f8a1e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -150,6 +150,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -157,6 +159,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -193,6 +196,7 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index bcf3649f9d31..61d79ce8b4d0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -18,7 +18,16 @@ from collections import OrderedDict import functools import re -from typing import Dict, AsyncIterable, AsyncIterator, Sequence, Tuple, Type, Union +from typing import ( + Dict, + AsyncIterable, + Awaitable, + AsyncIterator, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -56,10 +65,44 @@ class SubscriberAsyncClient: parse_snapshot_path = staticmethod(SubscriberClient.parse_snapshot_path) subscription_path = staticmethod(SubscriberClient.subscription_path) parse_subscription_path = staticmethod(SubscriberClient.parse_subscription_path) + topic_path = staticmethod(SubscriberClient.topic_path) + parse_topic_path = staticmethod(SubscriberClient.parse_topic_path) + + common_billing_account_path = staticmethod( + SubscriberClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SubscriberClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(SubscriberClient.common_folder_path) + parse_common_folder_path = staticmethod(SubscriberClient.parse_common_folder_path) + + common_organization_path = staticmethod(SubscriberClient.common_organization_path) + parse_common_organization_path = staticmethod( + SubscriberClient.parse_common_organization_path + ) + + common_project_path = staticmethod(SubscriberClient.common_project_path) + parse_common_project_path = staticmethod(SubscriberClient.parse_common_project_path) + + common_location_path = staticmethod(SubscriberClient.common_location_path) + parse_common_location_path = staticmethod( + SubscriberClient.parse_common_location_path + ) from_service_account_file = SubscriberClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> SubscriberTransport: + """Return the transport used by the client instance. + + Returns: + SubscriberTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(SubscriberClient).get_transport_class, type(SubscriberClient) ) @@ -213,9 +256,8 @@ async def create_subscription( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any( - [name, topic, push_config, ack_deadline_seconds] - ): + has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -245,8 +287,8 @@ async def create_subscription( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -300,7 +342,8 @@ async def get_subscription( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([subscription]): + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -324,8 +367,8 @@ async def get_subscription( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -444,7 +487,8 @@ async def list_subscriptions( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project]): + has_flattened_params = any([project]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -468,8 +512,8 @@ async def list_subscriptions( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -530,7 +574,8 @@ async def delete_subscription( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([subscription]): + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -629,7 +674,8 @@ async def modify_ack_deadline( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([subscription, ack_ids, ack_deadline_seconds]): + has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -642,11 +688,12 @@ async def modify_ack_deadline( if subscription is not None: request.subscription = subscription - if ack_ids is not None: - request.ack_ids = ack_ids if ack_deadline_seconds is not None: request.ack_deadline_seconds = ack_deadline_seconds + if ack_ids: + request.ack_ids.extend(ack_ids) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( @@ -720,7 +767,8 @@ async def acknowledge( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([subscription, ack_ids]): + has_flattened_params = any([subscription, ack_ids]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -733,8 +781,9 @@ async def acknowledge( if subscription is not None: request.subscription = subscription - if ack_ids is not None: - request.ack_ids = ack_ids + + if ack_ids: + request.ack_ids.extend(ack_ids) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -824,9 +873,8 @@ async def pull( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any( - [subscription, return_immediately, max_messages] - ): + has_flattened_params = any([subscription, return_immediately, max_messages]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -854,8 +902,8 @@ async def pull( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -883,7 +931,7 @@ def streaming_pull( retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> AsyncIterable[pubsub.StreamingPullResponse]: + ) -> Awaitable[AsyncIterable[pubsub.StreamingPullResponse]]: r"""Establishes a stream with the server, which sends messages down to the client. The client streams acknowledgements and ack deadline modifications back to the server. The server will close @@ -923,11 +971,11 @@ def streaming_pull( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( + exceptions.Aborted, exceptions.DeadlineExceeded, exceptions.InternalServerError, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, - exceptions.Aborted, ), ), default_timeout=900.0, @@ -989,7 +1037,8 @@ async def modify_push_config( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([subscription, push_config]): + has_flattened_params = any([subscription, push_config]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1078,7 +1127,8 @@ async def get_snapshot( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([snapshot]): + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1102,8 +1152,8 @@ async def get_snapshot( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -1165,7 +1215,8 @@ async def list_snapshots( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project]): + has_flattened_params = any([project]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1189,8 +1240,8 @@ async def list_snapshots( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -1294,7 +1345,8 @@ async def create_snapshot( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, subscription]): + has_flattened_params = any([name, subscription]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1445,7 +1497,8 @@ async def delete_snapshot( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([snapshot]): + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1532,8 +1585,8 @@ async def seek( multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 1193044c464f..60d44b5792a9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -19,10 +19,20 @@ from distutils import util import os import re -from typing import Callable, Dict, Iterable, Iterator, Sequence, Tuple, Type, Union +from typing import ( + Callable, + Dict, + Optional, + Iterable, + Iterator, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -149,6 +159,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> SubscriberTransport: + """Return the transport used by the client instance. + + Returns: + SubscriberTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def snapshot_path(project: str, snapshot: str,) -> str: """Return a fully-qualified snapshot string.""" @@ -177,12 +196,82 @@ def parse_subscription_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def topic_path(project: str, topic: str,) -> str: + """Return a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str, str]: + """Parse a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, SubscriberTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, SubscriberTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the subscriber client. @@ -197,8 +286,8 @@ def __init__( transport (Union[str, ~.SubscriberTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: @@ -213,10 +302,10 @@ def __init__( not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -224,9 +313,9 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. use_client_cert = bool( @@ -802,11 +891,12 @@ def modify_ack_deadline( if subscription is not None: request.subscription = subscription - if ack_ids is not None: - request.ack_ids = ack_ids if ack_deadline_seconds is not None: request.ack_deadline_seconds = ack_deadline_seconds + if ack_ids: + request.ack_ids.extend(ack_ids) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.modify_ack_deadline] @@ -890,8 +980,9 @@ def acknowledge( if subscription is not None: request.subscription = subscription - if ack_ids is not None: - request.ack_ids = ack_ids + + if ack_ids: + request.ack_ids.extend(ack_ids) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 600369d83d28..8442fc0feb72 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -118,8 +118,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -133,8 +133,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -159,8 +159,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -207,8 +207,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -221,11 +221,11 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( + exceptions.Aborted, exceptions.DeadlineExceeded, exceptions.InternalServerError, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, - exceptions.Aborted, ), ), default_timeout=900.0, @@ -250,8 +250,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -265,8 +265,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, @@ -313,8 +313,8 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( exceptions.Aborted, - exceptions.Unknown, exceptions.ServiceUnavailable, + exceptions.Unknown, ), ), default_timeout=60.0, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index e2f20b1c994f..5b39bb1d6b85 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -95,10 +95,10 @@ def __init__( for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -107,6 +107,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -114,6 +116,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -150,6 +153,7 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" @@ -227,12 +231,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Return the channel from cache. return self._grpc_channel @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index cc2f3a240c83..f64f1a18e9c2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -152,6 +152,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -159,6 +161,7 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", @@ -195,6 +198,7 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 61bb089f5d79..c34cf422d046 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -126,7 +126,7 @@ class Topic(proto.Message): labels = proto.MapField(proto.STRING, proto.STRING, number=2) message_storage_policy = proto.Field( - proto.MESSAGE, number=3, message=MessageStoragePolicy, + proto.MESSAGE, number=3, message="MessageStoragePolicy", ) kms_key_name = proto.Field(proto.STRING, number=5) @@ -213,7 +213,7 @@ class UpdateTopicRequest(proto.Message): policy configured at the project or organization level. """ - topic = proto.Field(proto.MESSAGE, number=1, message=Topic,) + topic = proto.Field(proto.MESSAGE, number=1, message="Topic",) update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) @@ -231,7 +231,7 @@ class PublishRequest(proto.Message): topic = proto.Field(proto.STRING, number=1) - messages = proto.RepeatedField(proto.MESSAGE, number=2, message=PubsubMessage,) + messages = proto.RepeatedField(proto.MESSAGE, number=2, message="PubsubMessage",) class PublishResponse(proto.Message): @@ -287,7 +287,7 @@ class ListTopicsResponse(proto.Message): def raw_page(self): return self - topics = proto.RepeatedField(proto.MESSAGE, number=1, message=Topic,) + topics = proto.RepeatedField(proto.MESSAGE, number=1, message="Topic",) next_page_token = proto.Field(proto.STRING, number=2) @@ -772,7 +772,7 @@ class ReceivedMessage(proto.Message): ack_id = proto.Field(proto.STRING, number=1) - message = proto.Field(proto.MESSAGE, number=2, message=PubsubMessage,) + message = proto.Field(proto.MESSAGE, number=2, message="PubsubMessage",) delivery_attempt = proto.Field(proto.INT32, number=3) @@ -801,7 +801,7 @@ class UpdateSubscriptionRequest(proto.Message): specified and non-empty. """ - subscription = proto.Field(proto.MESSAGE, number=1, message=Subscription,) + subscription = proto.Field(proto.MESSAGE, number=1, message="Subscription",) update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) @@ -845,7 +845,9 @@ class ListSubscriptionsResponse(proto.Message): def raw_page(self): return self - subscriptions = proto.RepeatedField(proto.MESSAGE, number=1, message=Subscription,) + subscriptions = proto.RepeatedField( + proto.MESSAGE, number=1, message="Subscription", + ) next_page_token = proto.Field(proto.STRING, number=2) @@ -881,7 +883,7 @@ class ModifyPushConfigRequest(proto.Message): subscription = proto.Field(proto.STRING, number=1) - push_config = proto.Field(proto.MESSAGE, number=2, message=PushConfig,) + push_config = proto.Field(proto.MESSAGE, number=2, message="PushConfig",) class PullRequest(proto.Message): @@ -928,7 +930,7 @@ class PullResponse(proto.Message): """ received_messages = proto.RepeatedField( - proto.MESSAGE, number=1, message=ReceivedMessage, + proto.MESSAGE, number=1, message="ReceivedMessage", ) @@ -1092,7 +1094,7 @@ class StreamingPullResponse(proto.Message): """ received_messages = proto.RepeatedField( - proto.MESSAGE, number=1, message=ReceivedMessage, + proto.MESSAGE, number=1, message="ReceivedMessage", ) @@ -1240,7 +1242,7 @@ class ListSnapshotsResponse(proto.Message): def raw_page(self): return self - snapshots = proto.RepeatedField(proto.MESSAGE, number=1, message=Snapshot,) + snapshots = proto.RepeatedField(proto.MESSAGE, number=1, message="Snapshot",) next_page_token = proto.Field(proto.STRING, number=2) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index a29820d3d21a..148d44682fb1 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,28 +4,28 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "89c671aeb4de2c47f45ca1e438b91c440bead958" + "sha": "c957047c84c5586e4a782e9ae297094be6cdba2e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "dba48bb9bc6959c232bec9150ac6313b608fe7bd" + "sha": "6abb59097be84599a1d6091fe534a49e5c5cf948" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "dba48bb9bc6959c232bec9150ac6313b608fe7bd" + "sha": "6abb59097be84599a1d6091fe534a49e5c5cf948" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "dba48bb9bc6959c232bec9150ac6313b608fe7bd" + "sha": "6abb59097be84599a1d6091fe534a49e5c5cf948" } } ], diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index b7d99542c33c..e955e5b82859 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -92,12 +92,12 @@ def test_publisher_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == "pubsub.googleapis.com:443" def test_publisher_client_get_transport_class(): @@ -437,7 +437,7 @@ def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_topic), "__call__") as call: + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic( name="name_value", kms_key_name="kms_key_name_value", @@ -452,6 +452,7 @@ def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): assert args[0] == pubsub.Topic() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) assert response.name == "name_value" @@ -464,19 +465,19 @@ def test_create_topic_from_dict(): @pytest.mark.asyncio -async def test_create_topic_async(transport: str = "grpc_asyncio"): +async def test_create_topic_async( + transport: str = "grpc_asyncio", request_type=pubsub.Topic +): client = PublisherAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.Topic() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) @@ -488,7 +489,7 @@ async def test_create_topic_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.Topic() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) @@ -498,6 +499,11 @@ async def test_create_topic_async(transport: str = "grpc_asyncio"): assert response.kms_key_name == "kms_key_name_value" +@pytest.mark.asyncio +async def test_create_topic_async_from_dict(): + await test_create_topic_async(request_type=dict) + + def test_create_topic_field_headers(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -507,7 +513,7 @@ def test_create_topic_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_topic), "__call__") as call: + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: call.return_value = pubsub.Topic() client.create_topic(request) @@ -532,9 +538,7 @@ async def test_create_topic_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) await client.create_topic(request) @@ -553,7 +557,7 @@ def test_create_topic_flattened(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_topic), "__call__") as call: + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic() @@ -585,9 +589,7 @@ async def test_create_topic_flattened_async(): client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic() @@ -626,7 +628,7 @@ def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRe request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_topic), "__call__") as call: + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic( name="name_value", kms_key_name="kms_key_name_value", @@ -641,6 +643,7 @@ def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRe assert args[0] == pubsub.UpdateTopicRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) assert response.name == "name_value" @@ -653,19 +656,19 @@ def test_update_topic_from_dict(): @pytest.mark.asyncio -async def test_update_topic_async(transport: str = "grpc_asyncio"): +async def test_update_topic_async( + transport: str = "grpc_asyncio", request_type=pubsub.UpdateTopicRequest +): client = PublisherAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.UpdateTopicRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) @@ -677,7 +680,7 @@ async def test_update_topic_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.UpdateTopicRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) @@ -687,6 +690,11 @@ async def test_update_topic_async(transport: str = "grpc_asyncio"): assert response.kms_key_name == "kms_key_name_value" +@pytest.mark.asyncio +async def test_update_topic_async_from_dict(): + await test_update_topic_async(request_type=dict) + + def test_update_topic_field_headers(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -696,7 +704,7 @@ def test_update_topic_field_headers(): request.topic.name = "topic.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_topic), "__call__") as call: + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: call.return_value = pubsub.Topic() client.update_topic(request) @@ -721,9 +729,7 @@ async def test_update_topic_field_headers_async(): request.topic.name = "topic.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) await client.update_topic(request) @@ -748,7 +754,7 @@ def test_publish(transport: str = "grpc", request_type=pubsub.PublishRequest): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.publish), "__call__") as call: + with mock.patch.object(type(client.transport.publish), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PublishResponse(message_ids=["message_ids_value"],) @@ -761,6 +767,7 @@ def test_publish(transport: str = "grpc", request_type=pubsub.PublishRequest): assert args[0] == pubsub.PublishRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) assert response.message_ids == ["message_ids_value"] @@ -771,17 +778,19 @@ def test_publish_from_dict(): @pytest.mark.asyncio -async def test_publish_async(transport: str = "grpc_asyncio"): +async def test_publish_async( + transport: str = "grpc_asyncio", request_type=pubsub.PublishRequest +): client = PublisherAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.PublishRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.publish), "__call__") as call: + with mock.patch.object(type(client.transport.publish), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.PublishResponse(message_ids=["message_ids_value"],) @@ -793,7 +802,7 @@ async def test_publish_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.PublishRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.PublishResponse) @@ -801,6 +810,11 @@ async def test_publish_async(transport: str = "grpc_asyncio"): assert response.message_ids == ["message_ids_value"] +@pytest.mark.asyncio +async def test_publish_async_from_dict(): + await test_publish_async(request_type=dict) + + def test_publish_field_headers(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -810,7 +824,7 @@ def test_publish_field_headers(): request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.publish), "__call__") as call: + with mock.patch.object(type(client.transport.publish), "__call__") as call: call.return_value = pubsub.PublishResponse() client.publish(request) @@ -835,7 +849,7 @@ async def test_publish_field_headers_async(): request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.publish), "__call__") as call: + with mock.patch.object(type(client.transport.publish), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.PublishResponse() ) @@ -856,7 +870,7 @@ def test_publish_flattened(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.publish), "__call__") as call: + with mock.patch.object(type(client.transport.publish), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PublishResponse() @@ -894,7 +908,7 @@ async def test_publish_flattened_async(): client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.publish), "__call__") as call: + with mock.patch.object(type(client.transport.publish), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PublishResponse() @@ -941,7 +955,7 @@ def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest) request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_topic), "__call__") as call: + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic( name="name_value", kms_key_name="kms_key_name_value", @@ -956,6 +970,7 @@ def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest) assert args[0] == pubsub.GetTopicRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) assert response.name == "name_value" @@ -968,19 +983,19 @@ def test_get_topic_from_dict(): @pytest.mark.asyncio -async def test_get_topic_async(transport: str = "grpc_asyncio"): +async def test_get_topic_async( + transport: str = "grpc_asyncio", request_type=pubsub.GetTopicRequest +): client = PublisherAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.GetTopicRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) @@ -992,7 +1007,7 @@ async def test_get_topic_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.GetTopicRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) @@ -1002,6 +1017,11 @@ async def test_get_topic_async(transport: str = "grpc_asyncio"): assert response.kms_key_name == "kms_key_name_value" +@pytest.mark.asyncio +async def test_get_topic_async_from_dict(): + await test_get_topic_async(request_type=dict) + + def test_get_topic_field_headers(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -1011,7 +1031,7 @@ def test_get_topic_field_headers(): request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_topic), "__call__") as call: + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: call.return_value = pubsub.Topic() client.get_topic(request) @@ -1036,9 +1056,7 @@ async def test_get_topic_field_headers_async(): request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) await client.get_topic(request) @@ -1057,7 +1075,7 @@ def test_get_topic_flattened(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_topic), "__call__") as call: + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic() @@ -1089,9 +1107,7 @@ async def test_get_topic_flattened_async(): client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic() @@ -1130,7 +1146,7 @@ def test_list_topics(transport: str = "grpc", request_type=pubsub.ListTopicsRequ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicsResponse( next_page_token="next_page_token_value", @@ -1145,6 +1161,7 @@ def test_list_topics(transport: str = "grpc", request_type=pubsub.ListTopicsRequ assert args[0] == pubsub.ListTopicsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) assert response.next_page_token == "next_page_token_value" @@ -1155,19 +1172,19 @@ def test_list_topics_from_dict(): @pytest.mark.asyncio -async def test_list_topics_async(transport: str = "grpc_asyncio"): +async def test_list_topics_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListTopicsRequest +): client = PublisherAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.ListTopicsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_topics), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListTopicsResponse(next_page_token="next_page_token_value",) @@ -1179,7 +1196,7 @@ async def test_list_topics_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.ListTopicsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicsAsyncPager) @@ -1187,6 +1204,11 @@ async def test_list_topics_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_topics_async_from_dict(): + await test_list_topics_async(request_type=dict) + + def test_list_topics_field_headers(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -1196,7 +1218,7 @@ def test_list_topics_field_headers(): request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: call.return_value = pubsub.ListTopicsResponse() client.list_topics(request) @@ -1221,9 +1243,7 @@ async def test_list_topics_field_headers_async(): request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_topics), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListTopicsResponse() ) @@ -1244,7 +1264,7 @@ def test_list_topics_flattened(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicsResponse() @@ -1276,9 +1296,7 @@ async def test_list_topics_flattened_async(): client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_topics), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicsResponse() @@ -1313,7 +1331,7 @@ def test_list_topics_pager(): client = PublisherClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicsResponse( @@ -1343,7 +1361,7 @@ def test_list_topics_pages(): client = PublisherClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_topics), "__call__") as call: + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicsResponse( @@ -1366,9 +1384,7 @@ async def test_list_topics_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topics), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_topics), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1397,9 +1413,7 @@ async def test_list_topics_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topics), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_topics), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1432,7 +1446,7 @@ def test_list_topic_subscriptions( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_subscriptions), "__call__" + type(client.transport.list_topic_subscriptions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicSubscriptionsResponse( @@ -1449,6 +1463,7 @@ def test_list_topic_subscriptions( assert args[0] == pubsub.ListTopicSubscriptionsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsPager) assert response.subscriptions == ["subscriptions_value"] @@ -1461,18 +1476,20 @@ def test_list_topic_subscriptions_from_dict(): @pytest.mark.asyncio -async def test_list_topic_subscriptions_async(transport: str = "grpc_asyncio"): +async def test_list_topic_subscriptions_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSubscriptionsRequest +): client = PublisherAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.ListTopicSubscriptionsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_subscriptions), "__call__" + type(client.transport.list_topic_subscriptions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1488,7 +1505,7 @@ async def test_list_topic_subscriptions_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.ListTopicSubscriptionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicSubscriptionsAsyncPager) @@ -1498,6 +1515,11 @@ async def test_list_topic_subscriptions_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_from_dict(): + await test_list_topic_subscriptions_async(request_type=dict) + + def test_list_topic_subscriptions_field_headers(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -1508,7 +1530,7 @@ def test_list_topic_subscriptions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_subscriptions), "__call__" + type(client.transport.list_topic_subscriptions), "__call__" ) as call: call.return_value = pubsub.ListTopicSubscriptionsResponse() @@ -1535,7 +1557,7 @@ async def test_list_topic_subscriptions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_subscriptions), "__call__" + type(client.transport.list_topic_subscriptions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListTopicSubscriptionsResponse() @@ -1558,7 +1580,7 @@ def test_list_topic_subscriptions_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_subscriptions), "__call__" + type(client.transport.list_topic_subscriptions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicSubscriptionsResponse() @@ -1592,7 +1614,7 @@ async def test_list_topic_subscriptions_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_subscriptions), "__call__" + type(client.transport.list_topic_subscriptions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicSubscriptionsResponse() @@ -1629,7 +1651,7 @@ def test_list_topic_subscriptions_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_subscriptions), "__call__" + type(client.transport.list_topic_subscriptions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1664,7 +1686,7 @@ def test_list_topic_subscriptions_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_subscriptions), "__call__" + type(client.transport.list_topic_subscriptions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1691,7 +1713,7 @@ async def test_list_topic_subscriptions_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_subscriptions), + type(client.transport.list_topic_subscriptions), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1725,7 +1747,7 @@ async def test_list_topic_subscriptions_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_subscriptions), + type(client.transport.list_topic_subscriptions), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1763,7 +1785,7 @@ def test_list_topic_snapshots( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_snapshots), "__call__" + type(client.transport.list_topic_snapshots), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicSnapshotsResponse( @@ -1779,6 +1801,7 @@ def test_list_topic_snapshots( assert args[0] == pubsub.ListTopicSnapshotsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsPager) assert response.snapshots == ["snapshots_value"] @@ -1791,18 +1814,20 @@ def test_list_topic_snapshots_from_dict(): @pytest.mark.asyncio -async def test_list_topic_snapshots_async(transport: str = "grpc_asyncio"): +async def test_list_topic_snapshots_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSnapshotsRequest +): client = PublisherAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.ListTopicSnapshotsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_snapshots), "__call__" + type(client.transport.list_topic_snapshots), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1817,7 +1842,7 @@ async def test_list_topic_snapshots_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.ListTopicSnapshotsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicSnapshotsAsyncPager) @@ -1827,6 +1852,11 @@ async def test_list_topic_snapshots_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_from_dict(): + await test_list_topic_snapshots_async(request_type=dict) + + def test_list_topic_snapshots_field_headers(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -1837,7 +1867,7 @@ def test_list_topic_snapshots_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_snapshots), "__call__" + type(client.transport.list_topic_snapshots), "__call__" ) as call: call.return_value = pubsub.ListTopicSnapshotsResponse() @@ -1864,7 +1894,7 @@ async def test_list_topic_snapshots_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_snapshots), "__call__" + type(client.transport.list_topic_snapshots), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListTopicSnapshotsResponse() @@ -1887,7 +1917,7 @@ def test_list_topic_snapshots_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_snapshots), "__call__" + type(client.transport.list_topic_snapshots), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicSnapshotsResponse() @@ -1921,7 +1951,7 @@ async def test_list_topic_snapshots_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_snapshots), "__call__" + type(client.transport.list_topic_snapshots), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicSnapshotsResponse() @@ -1958,7 +1988,7 @@ def test_list_topic_snapshots_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_snapshots), "__call__" + type(client.transport.list_topic_snapshots), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1991,7 +2021,7 @@ def test_list_topic_snapshots_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_topic_snapshots), "__call__" + type(client.transport.list_topic_snapshots), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -2016,7 +2046,7 @@ async def test_list_topic_snapshots_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_snapshots), + type(client.transport.list_topic_snapshots), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -2048,7 +2078,7 @@ async def test_list_topic_snapshots_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_topic_snapshots), + type(client.transport.list_topic_snapshots), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -2081,7 +2111,7 @@ def test_delete_topic(transport: str = "grpc", request_type=pubsub.DeleteTopicRe request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_topic), "__call__") as call: + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2102,19 +2132,19 @@ def test_delete_topic_from_dict(): @pytest.mark.asyncio -async def test_delete_topic_async(transport: str = "grpc_asyncio"): +async def test_delete_topic_async( + transport: str = "grpc_asyncio", request_type=pubsub.DeleteTopicRequest +): client = PublisherAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.DeleteTopicRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2124,12 +2154,17 @@ async def test_delete_topic_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.DeleteTopicRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_topic_async_from_dict(): + await test_delete_topic_async(request_type=dict) + + def test_delete_topic_field_headers(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -2139,7 +2174,7 @@ def test_delete_topic_field_headers(): request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_topic), "__call__") as call: + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: call.return_value = None client.delete_topic(request) @@ -2164,9 +2199,7 @@ async def test_delete_topic_field_headers_async(): request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_topic(request) @@ -2185,7 +2218,7 @@ def test_delete_topic_flattened(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_topic), "__call__") as call: + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2217,9 +2250,7 @@ async def test_delete_topic_flattened_async(): client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_topic), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2261,7 +2292,7 @@ def test_detach_subscription( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.detach_subscription), "__call__" + type(client.transport.detach_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.DetachSubscriptionResponse() @@ -2275,6 +2306,7 @@ def test_detach_subscription( assert args[0] == pubsub.DetachSubscriptionRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) @@ -2283,18 +2315,20 @@ def test_detach_subscription_from_dict(): @pytest.mark.asyncio -async def test_detach_subscription_async(transport: str = "grpc_asyncio"): +async def test_detach_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.DetachSubscriptionRequest +): client = PublisherAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.DetachSubscriptionRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.detach_subscription), "__call__" + type(client.transport.detach_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2307,12 +2341,17 @@ async def test_detach_subscription_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.DetachSubscriptionRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.DetachSubscriptionResponse) +@pytest.mark.asyncio +async def test_detach_subscription_async_from_dict(): + await test_detach_subscription_async(request_type=dict) + + def test_detach_subscription_field_headers(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) @@ -2323,7 +2362,7 @@ def test_detach_subscription_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.detach_subscription), "__call__" + type(client.transport.detach_subscription), "__call__" ) as call: call.return_value = pubsub.DetachSubscriptionResponse() @@ -2352,7 +2391,7 @@ async def test_detach_subscription_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.detach_subscription), "__call__" + type(client.transport.detach_subscription), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.DetachSubscriptionResponse() @@ -2408,7 +2447,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = PublisherClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -2441,7 +2480,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = PublisherClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.PublisherGrpcTransport,) + assert isinstance(client.transport, transports.PublisherGrpcTransport,) def test_publisher_base_transport_error(): @@ -2555,7 +2594,7 @@ def test_publisher_host_no_port(): api_endpoint="pubsub.googleapis.com" ), ) - assert client._transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == "pubsub.googleapis.com:443" def test_publisher_host_with_port(): @@ -2565,7 +2604,7 @@ def test_publisher_host_with_port(): api_endpoint="pubsub.googleapis.com:8000" ), ) - assert client._transport._host == "pubsub.googleapis.com:8000" + assert client.transport._host == "pubsub.googleapis.com:8000" def test_publisher_grpc_transport_channel(): @@ -2577,6 +2616,7 @@ def test_publisher_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None def test_publisher_grpc_asyncio_transport_channel(): @@ -2588,6 +2628,7 @@ def test_publisher_grpc_asyncio_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( @@ -2633,6 +2674,7 @@ def test_publisher_transport_channel_mtls_with_client_cert_source(transport_clas quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( @@ -2675,9 +2717,32 @@ def test_publisher_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel -def test_topic_path(): +def test_subscription_path(): project = "squid" - topic = "clam" + subscription = "clam" + + expected = "projects/{project}/subscriptions/{subscription}".format( + project=project, subscription=subscription, + ) + actual = PublisherClient.subscription_path(project, subscription) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "whelk", + "subscription": "octopus", + } + path = PublisherClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_subscription_path(path) + assert expected == actual + + +def test_topic_path(): + project = "oyster" + topic = "nudibranch" expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic,) actual = PublisherClient.topic_path(project, topic) @@ -2686,8 +2751,8 @@ def test_topic_path(): def test_parse_topic_path(): expected = { - "project": "whelk", - "topic": "octopus", + "project": "cuttlefish", + "topic": "mussel", } path = PublisherClient.topic_path(**expected) @@ -2696,6 +2761,107 @@ def test_parse_topic_path(): assert expected == actual +def test_common_billing_account_path(): + billing_account = "winkle" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = PublisherClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nautilus", + } + path = PublisherClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "scallop" + + expected = "folders/{folder}".format(folder=folder,) + actual = PublisherClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "abalone", + } + path = PublisherClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "squid" + + expected = "organizations/{organization}".format(organization=organization,) + actual = PublisherClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "clam", + } + path = PublisherClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "whelk" + + expected = "projects/{project}".format(project=project,) + actual = PublisherClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "octopus", + } + path = PublisherClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "oyster" + location = "nudibranch" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = PublisherClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "cuttlefish", + "location": "mussel", + } + path = PublisherClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() @@ -2727,7 +2893,7 @@ def test_set_iam_policy(transport: str = "grpc"): request = iam_policy.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -2758,9 +2924,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): request = iam_policy.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -2791,7 +2955,7 @@ def test_set_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.set_iam_policy(request) @@ -2816,9 +2980,7 @@ async def test_set_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.set_iam_policy(request) @@ -2836,7 +2998,7 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -2849,6 +3011,23 @@ def test_set_iam_policy_from_dict(): call.assert_called() +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + def test_get_iam_policy(transport: str = "grpc"): client = PublisherClient( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -2859,7 +3038,7 @@ def test_get_iam_policy(transport: str = "grpc"): request = iam_policy.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -2890,9 +3069,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): request = iam_policy.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -2923,7 +3100,7 @@ def test_get_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.get_iam_policy(request) @@ -2948,9 +3125,7 @@ async def test_get_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.get_iam_policy(request) @@ -2968,7 +3143,7 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -2981,6 +3156,23 @@ def test_get_iam_policy_from_dict(): call.assert_called() +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + def test_test_iam_permissions(transport: str = "grpc"): client = PublisherClient( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -2992,7 +3184,7 @@ def test_test_iam_permissions(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse( @@ -3025,7 +3217,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3056,7 +3248,7 @@ def test_test_iam_permissions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = iam_policy.TestIamPermissionsResponse() @@ -3083,7 +3275,7 @@ async def test_test_iam_permissions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy.TestIamPermissionsResponse() @@ -3105,7 +3297,7 @@ def test_test_iam_permissions_from_dict(): client = PublisherClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse() @@ -3117,3 +3309,24 @@ def test_test_iam_permissions_from_dict(): } ) call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 89223117a296..f837df651833 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -95,12 +95,12 @@ def test_subscriber_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == "pubsub.googleapis.com:443" def test_subscriber_client_get_transport_class(): @@ -443,7 +443,7 @@ def test_create_subscription(transport: str = "grpc", request_type=pubsub.Subscr # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_subscription), "__call__" + type(client.transport.create_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Subscription( @@ -465,6 +465,7 @@ def test_create_subscription(transport: str = "grpc", request_type=pubsub.Subscr assert args[0] == pubsub.Subscription() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) assert response.name == "name_value" @@ -487,18 +488,20 @@ def test_create_subscription_from_dict(): @pytest.mark.asyncio -async def test_create_subscription_async(transport: str = "grpc_asyncio"): +async def test_create_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.Subscription +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.Subscription() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_subscription), "__call__" + type(client.transport.create_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -519,7 +522,7 @@ async def test_create_subscription_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.Subscription() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) @@ -539,6 +542,11 @@ async def test_create_subscription_async(transport: str = "grpc_asyncio"): assert response.detached is True +@pytest.mark.asyncio +async def test_create_subscription_async_from_dict(): + await test_create_subscription_async(request_type=dict) + + def test_create_subscription_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -549,7 +557,7 @@ def test_create_subscription_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_subscription), "__call__" + type(client.transport.create_subscription), "__call__" ) as call: call.return_value = pubsub.Subscription() @@ -576,7 +584,7 @@ async def test_create_subscription_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_subscription), "__call__" + type(client.transport.create_subscription), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) @@ -597,7 +605,7 @@ def test_create_subscription_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_subscription), "__call__" + type(client.transport.create_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Subscription() @@ -648,7 +656,7 @@ async def test_create_subscription_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_subscription), "__call__" + type(client.transport.create_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Subscription() @@ -707,9 +715,7 @@ def test_get_subscription( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_subscription), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Subscription( name="name_value", @@ -730,6 +736,7 @@ def test_get_subscription( assert args[0] == pubsub.GetSubscriptionRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) assert response.name == "name_value" @@ -752,19 +759,19 @@ def test_get_subscription_from_dict(): @pytest.mark.asyncio -async def test_get_subscription_async(transport: str = "grpc_asyncio"): +async def test_get_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.GetSubscriptionRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.GetSubscriptionRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_subscription), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Subscription( @@ -784,7 +791,7 @@ async def test_get_subscription_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.GetSubscriptionRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) @@ -804,6 +811,11 @@ async def test_get_subscription_async(transport: str = "grpc_asyncio"): assert response.detached is True +@pytest.mark.asyncio +async def test_get_subscription_async_from_dict(): + await test_get_subscription_async(request_type=dict) + + def test_get_subscription_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -813,9 +825,7 @@ def test_get_subscription_field_headers(): request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_subscription), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: call.return_value = pubsub.Subscription() client.get_subscription(request) @@ -842,9 +852,7 @@ async def test_get_subscription_field_headers_async(): request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_subscription), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) await client.get_subscription(request) @@ -865,9 +873,7 @@ def test_get_subscription_flattened(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._transport.get_subscription), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Subscription() @@ -899,9 +905,7 @@ async def test_get_subscription_flattened_async(): client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_subscription), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Subscription() @@ -943,7 +947,7 @@ def test_update_subscription( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_subscription), "__call__" + type(client.transport.update_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Subscription( @@ -965,6 +969,7 @@ def test_update_subscription( assert args[0] == pubsub.UpdateSubscriptionRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) assert response.name == "name_value" @@ -987,18 +992,20 @@ def test_update_subscription_from_dict(): @pytest.mark.asyncio -async def test_update_subscription_async(transport: str = "grpc_asyncio"): +async def test_update_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.UpdateSubscriptionRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.UpdateSubscriptionRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_subscription), "__call__" + type(client.transport.update_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1019,7 +1026,7 @@ async def test_update_subscription_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.UpdateSubscriptionRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) @@ -1039,6 +1046,11 @@ async def test_update_subscription_async(transport: str = "grpc_asyncio"): assert response.detached is True +@pytest.mark.asyncio +async def test_update_subscription_async_from_dict(): + await test_update_subscription_async(request_type=dict) + + def test_update_subscription_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -1049,7 +1061,7 @@ def test_update_subscription_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_subscription), "__call__" + type(client.transport.update_subscription), "__call__" ) as call: call.return_value = pubsub.Subscription() @@ -1079,7 +1091,7 @@ async def test_update_subscription_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_subscription), "__call__" + type(client.transport.update_subscription), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) @@ -1111,7 +1123,7 @@ def test_list_subscriptions( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_subscriptions), "__call__" + type(client.transport.list_subscriptions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListSubscriptionsResponse( @@ -1127,6 +1139,7 @@ def test_list_subscriptions( assert args[0] == pubsub.ListSubscriptionsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsPager) assert response.next_page_token == "next_page_token_value" @@ -1137,18 +1150,20 @@ def test_list_subscriptions_from_dict(): @pytest.mark.asyncio -async def test_list_subscriptions_async(transport: str = "grpc_asyncio"): +async def test_list_subscriptions_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListSubscriptionsRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.ListSubscriptionsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_subscriptions), "__call__" + type(client.transport.list_subscriptions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1161,7 +1176,7 @@ async def test_list_subscriptions_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.ListSubscriptionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSubscriptionsAsyncPager) @@ -1169,6 +1184,11 @@ async def test_list_subscriptions_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_subscriptions_async_from_dict(): + await test_list_subscriptions_async(request_type=dict) + + def test_list_subscriptions_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -1179,7 +1199,7 @@ def test_list_subscriptions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_subscriptions), "__call__" + type(client.transport.list_subscriptions), "__call__" ) as call: call.return_value = pubsub.ListSubscriptionsResponse() @@ -1206,7 +1226,7 @@ async def test_list_subscriptions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_subscriptions), "__call__" + type(client.transport.list_subscriptions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListSubscriptionsResponse() @@ -1229,7 +1249,7 @@ def test_list_subscriptions_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_subscriptions), "__call__" + type(client.transport.list_subscriptions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListSubscriptionsResponse() @@ -1263,7 +1283,7 @@ async def test_list_subscriptions_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_subscriptions), "__call__" + type(client.transport.list_subscriptions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListSubscriptionsResponse() @@ -1300,7 +1320,7 @@ def test_list_subscriptions_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_subscriptions), "__call__" + type(client.transport.list_subscriptions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1340,7 +1360,7 @@ def test_list_subscriptions_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_subscriptions), "__call__" + type(client.transport.list_subscriptions), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1372,7 +1392,7 @@ async def test_list_subscriptions_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_subscriptions), + type(client.transport.list_subscriptions), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1411,7 +1431,7 @@ async def test_list_subscriptions_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_subscriptions), + type(client.transport.list_subscriptions), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1454,7 +1474,7 @@ def test_delete_subscription( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_subscription), "__call__" + type(client.transport.delete_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1476,18 +1496,20 @@ def test_delete_subscription_from_dict(): @pytest.mark.asyncio -async def test_delete_subscription_async(transport: str = "grpc_asyncio"): +async def test_delete_subscription_async( + transport: str = "grpc_asyncio", request_type=pubsub.DeleteSubscriptionRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.DeleteSubscriptionRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_subscription), "__call__" + type(client.transport.delete_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1498,12 +1520,17 @@ async def test_delete_subscription_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.DeleteSubscriptionRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_subscription_async_from_dict(): + await test_delete_subscription_async(request_type=dict) + + def test_delete_subscription_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -1514,7 +1541,7 @@ def test_delete_subscription_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_subscription), "__call__" + type(client.transport.delete_subscription), "__call__" ) as call: call.return_value = None @@ -1543,7 +1570,7 @@ async def test_delete_subscription_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_subscription), "__call__" + type(client.transport.delete_subscription), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1566,7 +1593,7 @@ def test_delete_subscription_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_subscription), "__call__" + type(client.transport.delete_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1600,7 +1627,7 @@ async def test_delete_subscription_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_subscription), "__call__" + type(client.transport.delete_subscription), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1643,7 +1670,7 @@ def test_modify_ack_deadline( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.modify_ack_deadline), "__call__" + type(client.transport.modify_ack_deadline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1665,18 +1692,20 @@ def test_modify_ack_deadline_from_dict(): @pytest.mark.asyncio -async def test_modify_ack_deadline_async(transport: str = "grpc_asyncio"): +async def test_modify_ack_deadline_async( + transport: str = "grpc_asyncio", request_type=pubsub.ModifyAckDeadlineRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.ModifyAckDeadlineRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.modify_ack_deadline), "__call__" + type(client.transport.modify_ack_deadline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1687,12 +1716,17 @@ async def test_modify_ack_deadline_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.ModifyAckDeadlineRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_modify_ack_deadline_async_from_dict(): + await test_modify_ack_deadline_async(request_type=dict) + + def test_modify_ack_deadline_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -1703,7 +1737,7 @@ def test_modify_ack_deadline_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.modify_ack_deadline), "__call__" + type(client.transport.modify_ack_deadline), "__call__" ) as call: call.return_value = None @@ -1732,7 +1766,7 @@ async def test_modify_ack_deadline_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.modify_ack_deadline), "__call__" + type(client.transport.modify_ack_deadline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1755,7 +1789,7 @@ def test_modify_ack_deadline_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.modify_ack_deadline), "__call__" + type(client.transport.modify_ack_deadline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1800,7 +1834,7 @@ async def test_modify_ack_deadline_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.modify_ack_deadline), "__call__" + type(client.transport.modify_ack_deadline), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1851,7 +1885,7 @@ def test_acknowledge(transport: str = "grpc", request_type=pubsub.AcknowledgeReq request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.acknowledge), "__call__") as call: + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1872,19 +1906,19 @@ def test_acknowledge_from_dict(): @pytest.mark.asyncio -async def test_acknowledge_async(transport: str = "grpc_asyncio"): +async def test_acknowledge_async( + transport: str = "grpc_asyncio", request_type=pubsub.AcknowledgeRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.AcknowledgeRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.acknowledge), "__call__" - ) as call: + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1894,12 +1928,17 @@ async def test_acknowledge_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.AcknowledgeRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_acknowledge_async_from_dict(): + await test_acknowledge_async(request_type=dict) + + def test_acknowledge_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -1909,7 +1948,7 @@ def test_acknowledge_field_headers(): request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.acknowledge), "__call__") as call: + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: call.return_value = None client.acknowledge(request) @@ -1936,9 +1975,7 @@ async def test_acknowledge_field_headers_async(): request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.acknowledge), "__call__" - ) as call: + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.acknowledge(request) @@ -1959,7 +1996,7 @@ def test_acknowledge_flattened(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.acknowledge), "__call__") as call: + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1997,9 +2034,7 @@ async def test_acknowledge_flattened_async(): client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.acknowledge), "__call__" - ) as call: + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2044,7 +2079,7 @@ def test_pull(transport: str = "grpc", request_type=pubsub.PullRequest): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.pull), "__call__") as call: + with mock.patch.object(type(client.transport.pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PullResponse() @@ -2057,6 +2092,7 @@ def test_pull(transport: str = "grpc", request_type=pubsub.PullRequest): assert args[0] == pubsub.PullRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) @@ -2065,17 +2101,19 @@ def test_pull_from_dict(): @pytest.mark.asyncio -async def test_pull_async(transport: str = "grpc_asyncio"): +async def test_pull_async( + transport: str = "grpc_asyncio", request_type=pubsub.PullRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.PullRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.pull), "__call__") as call: + with mock.patch.object(type(client.transport.pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) @@ -2085,12 +2123,17 @@ async def test_pull_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.PullRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.PullResponse) +@pytest.mark.asyncio +async def test_pull_async_from_dict(): + await test_pull_async(request_type=dict) + + def test_pull_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -2100,7 +2143,7 @@ def test_pull_field_headers(): request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.pull), "__call__") as call: + with mock.patch.object(type(client.transport.pull), "__call__") as call: call.return_value = pubsub.PullResponse() client.pull(request) @@ -2127,7 +2170,7 @@ async def test_pull_field_headers_async(): request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.pull), "__call__") as call: + with mock.patch.object(type(client.transport.pull), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) await client.pull(request) @@ -2148,7 +2191,7 @@ def test_pull_flattened(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.pull), "__call__") as call: + with mock.patch.object(type(client.transport.pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PullResponse() @@ -2191,7 +2234,7 @@ async def test_pull_flattened_async(): client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.pull), "__call__") as call: + with mock.patch.object(type(client.transport.pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PullResponse() @@ -2245,7 +2288,7 @@ def test_streaming_pull( requests = [request] # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.streaming_pull), "__call__") as call: + with mock.patch.object(type(client.transport.streaming_pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([pubsub.StreamingPullResponse()]) @@ -2267,21 +2310,21 @@ def test_streaming_pull_from_dict(): @pytest.mark.asyncio -async def test_streaming_pull_async(transport: str = "grpc_asyncio"): +async def test_streaming_pull_async( + transport: str = "grpc_asyncio", request_type=pubsub.StreamingPullRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.StreamingPullRequest() + request = request_type() requests = [request] # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.streaming_pull), "__call__" - ) as call: + with mock.patch.object(type(client.transport.streaming_pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) call.return_value.read = mock.AsyncMock( @@ -2301,6 +2344,11 @@ async def test_streaming_pull_async(transport: str = "grpc_asyncio"): assert isinstance(message, pubsub.StreamingPullResponse) +@pytest.mark.asyncio +async def test_streaming_pull_async_from_dict(): + await test_streaming_pull_async(request_type=dict) + + def test_modify_push_config( transport: str = "grpc", request_type=pubsub.ModifyPushConfigRequest ): @@ -2314,7 +2362,7 @@ def test_modify_push_config( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.modify_push_config), "__call__" + type(client.transport.modify_push_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2336,18 +2384,20 @@ def test_modify_push_config_from_dict(): @pytest.mark.asyncio -async def test_modify_push_config_async(transport: str = "grpc_asyncio"): +async def test_modify_push_config_async( + transport: str = "grpc_asyncio", request_type=pubsub.ModifyPushConfigRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.ModifyPushConfigRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.modify_push_config), "__call__" + type(client.transport.modify_push_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2358,12 +2408,17 @@ async def test_modify_push_config_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.ModifyPushConfigRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_modify_push_config_async_from_dict(): + await test_modify_push_config_async(request_type=dict) + + def test_modify_push_config_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -2374,7 +2429,7 @@ def test_modify_push_config_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.modify_push_config), "__call__" + type(client.transport.modify_push_config), "__call__" ) as call: call.return_value = None @@ -2403,7 +2458,7 @@ async def test_modify_push_config_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.modify_push_config), "__call__" + type(client.transport.modify_push_config), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2426,7 +2481,7 @@ def test_modify_push_config_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.modify_push_config), "__call__" + type(client.transport.modify_push_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2469,7 +2524,7 @@ async def test_modify_push_config_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.modify_push_config), "__call__" + type(client.transport.modify_push_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2518,7 +2573,7 @@ def test_get_snapshot(transport: str = "grpc", request_type=pubsub.GetSnapshotRe request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) @@ -2531,6 +2586,7 @@ def test_get_snapshot(transport: str = "grpc", request_type=pubsub.GetSnapshotRe assert args[0] == pubsub.GetSnapshotRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) assert response.name == "name_value" @@ -2543,19 +2599,19 @@ def test_get_snapshot_from_dict(): @pytest.mark.asyncio -async def test_get_snapshot_async(transport: str = "grpc_asyncio"): +async def test_get_snapshot_async( + transport: str = "grpc_asyncio", request_type=pubsub.GetSnapshotRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.GetSnapshotRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Snapshot(name="name_value", topic="topic_value",) @@ -2567,7 +2623,7 @@ async def test_get_snapshot_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.GetSnapshotRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) @@ -2577,6 +2633,11 @@ async def test_get_snapshot_async(transport: str = "grpc_asyncio"): assert response.topic == "topic_value" +@pytest.mark.asyncio +async def test_get_snapshot_async_from_dict(): + await test_get_snapshot_async(request_type=dict) + + def test_get_snapshot_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -2586,7 +2647,7 @@ def test_get_snapshot_field_headers(): request.snapshot = "snapshot/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: call.return_value = pubsub.Snapshot() client.get_snapshot(request) @@ -2611,9 +2672,7 @@ async def test_get_snapshot_field_headers_async(): request.snapshot = "snapshot/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) await client.get_snapshot(request) @@ -2632,7 +2691,7 @@ def test_get_snapshot_flattened(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot() @@ -2664,9 +2723,7 @@ async def test_get_snapshot_flattened_async(): client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot() @@ -2707,7 +2764,7 @@ def test_list_snapshots( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListSnapshotsResponse( next_page_token="next_page_token_value", @@ -2722,6 +2779,7 @@ def test_list_snapshots( assert args[0] == pubsub.ListSnapshotsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) assert response.next_page_token == "next_page_token_value" @@ -2732,19 +2790,19 @@ def test_list_snapshots_from_dict(): @pytest.mark.asyncio -async def test_list_snapshots_async(transport: str = "grpc_asyncio"): +async def test_list_snapshots_async( + transport: str = "grpc_asyncio", request_type=pubsub.ListSnapshotsRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.ListSnapshotsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_snapshots), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListSnapshotsResponse(next_page_token="next_page_token_value",) @@ -2756,7 +2814,7 @@ async def test_list_snapshots_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.ListSnapshotsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSnapshotsAsyncPager) @@ -2764,6 +2822,11 @@ async def test_list_snapshots_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_snapshots_async_from_dict(): + await test_list_snapshots_async(request_type=dict) + + def test_list_snapshots_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -2773,7 +2836,7 @@ def test_list_snapshots_field_headers(): request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: call.return_value = pubsub.ListSnapshotsResponse() client.list_snapshots(request) @@ -2798,9 +2861,7 @@ async def test_list_snapshots_field_headers_async(): request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_snapshots), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListSnapshotsResponse() ) @@ -2821,7 +2882,7 @@ def test_list_snapshots_flattened(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListSnapshotsResponse() @@ -2853,9 +2914,7 @@ async def test_list_snapshots_flattened_async(): client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_snapshots), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListSnapshotsResponse() @@ -2890,7 +2949,7 @@ def test_list_snapshots_pager(): client = SubscriberClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( pubsub.ListSnapshotsResponse( @@ -2924,7 +2983,7 @@ def test_list_snapshots_pages(): client = SubscriberClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_snapshots), "__call__") as call: + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( pubsub.ListSnapshotsResponse( @@ -2951,9 +3010,7 @@ async def test_list_snapshots_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_snapshots), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_snapshots), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -2986,9 +3043,7 @@ async def test_list_snapshots_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_snapshots), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_snapshots), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -3024,7 +3079,7 @@ def test_create_snapshot( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) @@ -3037,6 +3092,7 @@ def test_create_snapshot( assert args[0] == pubsub.CreateSnapshotRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) assert response.name == "name_value" @@ -3049,19 +3105,19 @@ def test_create_snapshot_from_dict(): @pytest.mark.asyncio -async def test_create_snapshot_async(transport: str = "grpc_asyncio"): +async def test_create_snapshot_async( + transport: str = "grpc_asyncio", request_type=pubsub.CreateSnapshotRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.CreateSnapshotRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Snapshot(name="name_value", topic="topic_value",) @@ -3073,7 +3129,7 @@ async def test_create_snapshot_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.CreateSnapshotRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) @@ -3083,6 +3139,11 @@ async def test_create_snapshot_async(transport: str = "grpc_asyncio"): assert response.topic == "topic_value" +@pytest.mark.asyncio +async def test_create_snapshot_async_from_dict(): + await test_create_snapshot_async(request_type=dict) + + def test_create_snapshot_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -3092,7 +3153,7 @@ def test_create_snapshot_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: call.return_value = pubsub.Snapshot() client.create_snapshot(request) @@ -3117,9 +3178,7 @@ async def test_create_snapshot_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) await client.create_snapshot(request) @@ -3138,7 +3197,7 @@ def test_create_snapshot_flattened(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot() @@ -3176,9 +3235,7 @@ async def test_create_snapshot_flattened_async(): client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot() @@ -3225,7 +3282,7 @@ def test_update_snapshot( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) @@ -3238,6 +3295,7 @@ def test_update_snapshot( assert args[0] == pubsub.UpdateSnapshotRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) assert response.name == "name_value" @@ -3250,19 +3308,19 @@ def test_update_snapshot_from_dict(): @pytest.mark.asyncio -async def test_update_snapshot_async(transport: str = "grpc_asyncio"): +async def test_update_snapshot_async( + transport: str = "grpc_asyncio", request_type=pubsub.UpdateSnapshotRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.UpdateSnapshotRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Snapshot(name="name_value", topic="topic_value",) @@ -3274,7 +3332,7 @@ async def test_update_snapshot_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.UpdateSnapshotRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) @@ -3284,6 +3342,11 @@ async def test_update_snapshot_async(transport: str = "grpc_asyncio"): assert response.topic == "topic_value" +@pytest.mark.asyncio +async def test_update_snapshot_async_from_dict(): + await test_update_snapshot_async(request_type=dict) + + def test_update_snapshot_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -3293,7 +3356,7 @@ def test_update_snapshot_field_headers(): request.snapshot.name = "snapshot.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: call.return_value = pubsub.Snapshot() client.update_snapshot(request) @@ -3320,9 +3383,7 @@ async def test_update_snapshot_field_headers_async(): request.snapshot.name = "snapshot.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) await client.update_snapshot(request) @@ -3351,7 +3412,7 @@ def test_delete_snapshot( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3372,19 +3433,19 @@ def test_delete_snapshot_from_dict(): @pytest.mark.asyncio -async def test_delete_snapshot_async(transport: str = "grpc_asyncio"): +async def test_delete_snapshot_async( + transport: str = "grpc_asyncio", request_type=pubsub.DeleteSnapshotRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.DeleteSnapshotRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -3394,12 +3455,17 @@ async def test_delete_snapshot_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.DeleteSnapshotRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_snapshot_async_from_dict(): + await test_delete_snapshot_async(request_type=dict) + + def test_delete_snapshot_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -3409,7 +3475,7 @@ def test_delete_snapshot_field_headers(): request.snapshot = "snapshot/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: call.return_value = None client.delete_snapshot(request) @@ -3434,9 +3500,7 @@ async def test_delete_snapshot_field_headers_async(): request.snapshot = "snapshot/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_snapshot(request) @@ -3455,7 +3519,7 @@ def test_delete_snapshot_flattened(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_snapshot), "__call__") as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3487,9 +3551,7 @@ async def test_delete_snapshot_flattened_async(): client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_snapshot), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3528,7 +3590,7 @@ def test_seek(transport: str = "grpc", request_type=pubsub.SeekRequest): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.seek), "__call__") as call: + with mock.patch.object(type(client.transport.seek), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.SeekResponse() @@ -3541,6 +3603,7 @@ def test_seek(transport: str = "grpc", request_type=pubsub.SeekRequest): assert args[0] == pubsub.SeekRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) @@ -3549,17 +3612,19 @@ def test_seek_from_dict(): @pytest.mark.asyncio -async def test_seek_async(transport: str = "grpc_asyncio"): +async def test_seek_async( + transport: str = "grpc_asyncio", request_type=pubsub.SeekRequest +): client = SubscriberAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = pubsub.SeekRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.seek), "__call__") as call: + with mock.patch.object(type(client.transport.seek), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) @@ -3569,12 +3634,17 @@ async def test_seek_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == pubsub.SeekRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.SeekResponse) +@pytest.mark.asyncio +async def test_seek_async_from_dict(): + await test_seek_async(request_type=dict) + + def test_seek_field_headers(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) @@ -3584,7 +3654,7 @@ def test_seek_field_headers(): request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.seek), "__call__") as call: + with mock.patch.object(type(client.transport.seek), "__call__") as call: call.return_value = pubsub.SeekResponse() client.seek(request) @@ -3611,7 +3681,7 @@ async def test_seek_field_headers_async(): request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._client._transport.seek), "__call__") as call: + with mock.patch.object(type(client.transport.seek), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) await client.seek(request) @@ -3664,7 +3734,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = SubscriberClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -3697,7 +3767,7 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.SubscriberGrpcTransport,) + assert isinstance(client.transport, transports.SubscriberGrpcTransport,) def test_subscriber_base_transport_error(): @@ -3818,7 +3888,7 @@ def test_subscriber_host_no_port(): api_endpoint="pubsub.googleapis.com" ), ) - assert client._transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == "pubsub.googleapis.com:443" def test_subscriber_host_with_port(): @@ -3828,7 +3898,7 @@ def test_subscriber_host_with_port(): api_endpoint="pubsub.googleapis.com:8000" ), ) - assert client._transport._host == "pubsub.googleapis.com:8000" + assert client.transport._host == "pubsub.googleapis.com:8000" def test_subscriber_grpc_transport_channel(): @@ -3840,6 +3910,7 @@ def test_subscriber_grpc_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None def test_subscriber_grpc_asyncio_transport_channel(): @@ -3851,6 +3922,7 @@ def test_subscriber_grpc_asyncio_transport_channel(): ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( @@ -3896,6 +3968,7 @@ def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_cla quota_project_id=None, ) assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( @@ -3962,8 +4035,8 @@ def test_parse_snapshot_path(): def test_subscription_path(): - project = "squid" - subscription = "clam" + project = "oyster" + subscription = "nudibranch" expected = "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, @@ -3974,8 +4047,8 @@ def test_subscription_path(): def test_parse_subscription_path(): expected = { - "project": "whelk", - "subscription": "octopus", + "project": "cuttlefish", + "subscription": "mussel", } path = SubscriberClient.subscription_path(**expected) @@ -3984,6 +4057,128 @@ def test_parse_subscription_path(): assert expected == actual +def test_topic_path(): + project = "winkle" + topic = "nautilus" + + expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + actual = SubscriberClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "scallop", + "topic": "abalone", + } + path = SubscriberClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_topic_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SubscriberClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SubscriberClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + + expected = "folders/{folder}".format(folder=folder,) + actual = SubscriberClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SubscriberClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + + expected = "organizations/{organization}".format(organization=organization,) + actual = SubscriberClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SubscriberClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + + expected = "projects/{project}".format(project=project,) + actual = SubscriberClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SubscriberClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = SubscriberClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SubscriberClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_location_path(path) + assert expected == actual + + def test_client_withDEFAULT_CLIENT_INFO(): client_info = gapic_v1.client_info.ClientInfo() @@ -4015,7 +4210,7 @@ def test_set_iam_policy(transport: str = "grpc"): request = iam_policy.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -4046,9 +4241,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): request = iam_policy.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -4079,7 +4272,7 @@ def test_set_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.set_iam_policy(request) @@ -4104,9 +4297,7 @@ async def test_set_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.set_iam_policy(request) @@ -4124,7 +4315,7 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -4137,6 +4328,23 @@ def test_set_iam_policy_from_dict(): call.assert_called() +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + def test_get_iam_policy(transport: str = "grpc"): client = SubscriberClient( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -4147,7 +4355,7 @@ def test_get_iam_policy(transport: str = "grpc"): request = iam_policy.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -4178,9 +4386,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): request = iam_policy.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -4211,7 +4417,7 @@ def test_get_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.get_iam_policy(request) @@ -4236,9 +4442,7 @@ async def test_get_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.get_iam_policy(request) @@ -4256,7 +4460,7 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -4269,6 +4473,23 @@ def test_get_iam_policy_from_dict(): call.assert_called() +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + def test_test_iam_permissions(transport: str = "grpc"): client = SubscriberClient( credentials=credentials.AnonymousCredentials(), transport=transport, @@ -4280,7 +4501,7 @@ def test_test_iam_permissions(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse( @@ -4313,7 +4534,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4344,7 +4565,7 @@ def test_test_iam_permissions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = iam_policy.TestIamPermissionsResponse() @@ -4371,7 +4592,7 @@ async def test_test_iam_permissions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy.TestIamPermissionsResponse() @@ -4393,7 +4614,7 @@ def test_test_iam_permissions_from_dict(): client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse() @@ -4405,3 +4626,24 @@ def test_test_iam_permissions_from_dict(): } ) call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() From 51a66aa8afa2d6d522aba862cf52a55b6763c48c Mon Sep 17 00:00:00 2001 From: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Date: Wed, 21 Oct 2020 17:38:09 -0700 Subject: [PATCH 0601/1197] fix: fix mtls issue in handwritten layer (#226) * fix: fix mtls issue in handwritten layer * chore: update scripts * chore: update noxfile.py --- .../.kokoro/docs/common.cfg | 2 +- .../.kokoro/samples/python3.6/common.cfg | 6 +++++ .../.kokoro/samples/python3.7/common.cfg | 6 +++++ .../.kokoro/samples/python3.8/common.cfg | 6 +++++ .../.kokoro/test-samples.sh | 8 ++++++- packages/google-cloud-pubsub/docs/conf.py | 1 + .../cloud/pubsub_v1/publisher/client.py | 21 ++++++++++------- .../cloud/pubsub_v1/subscriber/client.py | 23 +++++++++++-------- .../samples/snippets/noxfile.py | 5 ++++ packages/google-cloud-pubsub/synth.metadata | 8 +++---- .../publisher/test_publisher_client.py | 12 ++++++++-- .../subscriber/test_subscriber_client.py | 12 ++++++++-- 12 files changed, 82 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg index 7815c2d6abb2..b81c7b901136 100644 --- a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg index 354ad19efb4c..6b989042284b 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-pubsub/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg index becd0399dfe1..e2cb0f168769 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-pubsub/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg index 685dfdc590c9..cb7a71d5c8b5 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-pubsub/.kokoro/test-samples.sh" diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples.sh b/packages/google-cloud-pubsub/.kokoro/test-samples.sh index 98851b56ba8f..6064e7ad6390 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples.sh @@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then git checkout $LATEST_RELEASE fi +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -101,4 +107,4 @@ cd "$ROOT" # Workaround for Kokoro permissions issue: delete secrets rm testing/{test-env.sh,client-secrets.json,service-account.json} -exit "$RTN" \ No newline at end of file +exit "$RTN" diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index a785da8a80fc..48cf7364230e 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -39,6 +39,7 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index f1e198b1ac90..f1de9f1f47ed 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -130,15 +130,19 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): target=os.environ.get("PUBSUB_EMULATOR_HOST") ) + # The GAPIC client has mTLS logic to determine the api endpoint and the + # ssl credentials to use. Here we create a GAPIC client to help compute the + # api endpoint and ssl credentials. The api endpoint will be used to set + # `self._target`, and ssl credentials will be passed to + # `grpc_helpers.create_channel` to establish a mTLS channel (if ssl + # credentials is not None). client_options = kwargs.get("client_options", None) - if ( - client_options - and "api_endpoint" in client_options - and isinstance(client_options["api_endpoint"], six.string_types) - ): - self._target = client_options["api_endpoint"] - else: - self._target = publisher_client.PublisherClient.SERVICE_ADDRESS + credentials = kwargs.get("credentials", None) + client_for_mtls_info = publisher_client.PublisherClient( + credentials=credentials, client_options=client_options + ) + + self._target = client_for_mtls_info._transport._host # Use a custom channel. # We need this in order to set appropriate default message size and @@ -149,6 +153,7 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): channel = grpc_helpers.create_channel( credentials=kwargs.pop("credentials", None), target=self.target, + ssl_credentials=client_for_mtls_info._transport._ssl_channel_credentials, scopes=publisher_client.PublisherClient._DEFAULT_SCOPES, options={ "grpc.max_send_message_length": -1, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index e0b10c888464..e33a0e2e6043 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -16,7 +16,6 @@ import os import pkg_resources -import six import grpc @@ -82,16 +81,19 @@ def __init__(self, **kwargs): target=os.environ.get("PUBSUB_EMULATOR_HOST") ) - # api_endpoint wont be applied if 'transport' is passed in. + # The GAPIC client has mTLS logic to determine the api endpoint and the + # ssl credentials to use. Here we create a GAPIC client to help compute the + # api endpoint and ssl credentials. The api endpoint will be used to set + # `self._target`, and ssl credentials will be passed to + # `grpc_helpers.create_channel` to establish a mTLS channel (if ssl + # credentials is not None). client_options = kwargs.get("client_options", None) - if ( - client_options - and "api_endpoint" in client_options - and isinstance(client_options["api_endpoint"], six.string_types) - ): - self._target = client_options["api_endpoint"] - else: - self._target = subscriber_client.SubscriberClient.SERVICE_ADDRESS + credentials = kwargs.get("credentials", None) + client_for_mtls_info = subscriber_client.SubscriberClient( + credentials=credentials, client_options=client_options + ) + + self._target = client_for_mtls_info._transport._host # Use a custom channel. # We need this in order to set appropriate default message size and @@ -102,6 +104,7 @@ def __init__(self, **kwargs): channel = grpc_helpers.create_channel( credentials=kwargs.pop("credentials", None), target=self.target, + ssl_credentials=client_for_mtls_info._transport._ssl_channel_credentials, scopes=subscriber_client.SubscriberClient._DEFAULT_SCOPES, options={ "grpc.max_send_message_length": -1, diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 5660f08be441..f3a90583ad5a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -199,6 +199,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 148d44682fb1..347608c98e82 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,28 +4,28 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "c957047c84c5586e4a782e9ae297094be6cdba2e" + "sha": "0bf5d593573afea43bba7de90d2bb40ee0fc101e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6abb59097be84599a1d6091fe534a49e5c5cf948" + "sha": "901ddd44e9ef7887ee681b9183bbdea99437fdcc" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6abb59097be84599a1d6091fe534a49e5c5cf948" + "sha": "901ddd44e9ef7887ee681b9183bbdea99437fdcc" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6abb59097be84599a1d6091fe534a49e5c5cf948" + "sha": "901ddd44e9ef7887ee681b9183bbdea99437fdcc" } } ], diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 3b6aa1477e7b..0f661c2fa653 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -18,6 +18,7 @@ import inspect from google.auth import credentials +import grpc import mock import pytest @@ -81,7 +82,7 @@ def test_init_w_api_endpoint(): assert isinstance(client.api, publisher_client.PublisherClient) assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == "testendpoint.google.com" + ) == "testendpoint.google.com:443" def test_init_w_unicode_api_endpoint(): @@ -91,7 +92,7 @@ def test_init_w_unicode_api_endpoint(): assert isinstance(client.api, publisher_client.PublisherClient) assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == "testendpoint.google.com" + ) == "testendpoint.google.com:443" def test_init_w_empty_client_options(): @@ -104,8 +105,13 @@ def test_init_w_empty_client_options(): def test_init_client_options_pass_through(): + mock_ssl_creds = grpc.ssl_channel_credentials() + def init(self, *args, **kwargs): self.kwargs = kwargs + self._transport = mock.Mock() + self._transport._host = "testendpoint.google.com" + self._transport._ssl_channel_credentials = mock_ssl_creds with mock.patch.object(publisher_client.PublisherClient, "__init__", init): client = publisher.Client( @@ -119,6 +125,8 @@ def init(self, *args, **kwargs): assert client_options.get("quota_project_id") == "42" assert client_options.get("scopes") == [] assert client_options.get("credentials_file") == "file.json" + assert client.target == "testendpoint.google.com" + assert client.api.transport._ssl_channel_credentials == mock_ssl_creds def test_init_emulator(monkeypatch): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 634351757ea1..d5628927609e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -13,6 +13,7 @@ # limitations under the License. from google.auth import credentials +import grpc import mock from google.cloud.pubsub_v1 import subscriber @@ -42,7 +43,7 @@ def test_init_w_api_endpoint(): assert isinstance(client.api, subscriber_client.SubscriberClient) assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == "testendpoint.google.com" + ) == "testendpoint.google.com:443" def test_init_w_unicode_api_endpoint(): @@ -52,7 +53,7 @@ def test_init_w_unicode_api_endpoint(): assert isinstance(client.api, subscriber_client.SubscriberClient) assert (client.api._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == "testendpoint.google.com" + ) == "testendpoint.google.com:443" def test_init_w_empty_client_options(): @@ -65,8 +66,13 @@ def test_init_w_empty_client_options(): def test_init_client_options_pass_through(): + mock_ssl_creds = grpc.ssl_channel_credentials() + def init(self, *args, **kwargs): self.kwargs = kwargs + self._transport = mock.Mock() + self._transport._host = "testendpoint.google.com" + self._transport._ssl_channel_credentials = mock_ssl_creds with mock.patch.object(subscriber_client.SubscriberClient, "__init__", init): client = subscriber.Client( @@ -80,6 +86,8 @@ def init(self, *args, **kwargs): assert client_options.get("quota_project_id") == "42" assert client_options.get("scopes") == [] assert client_options.get("credentials_file") == "file.json" + assert client.target == "testendpoint.google.com" + assert client.api.transport._ssl_channel_credentials == mock_ssl_creds def test_init_emulator(monkeypatch): From e6457847fbacee330ffc12a14e9c4ee6be7a8fee Mon Sep 17 00:00:00 2001 From: dpcollins-google <40498610+dpcollins-google@users.noreply.github.com> Date: Tue, 10 Nov 2020 13:02:21 -0500 Subject: [PATCH 0602/1197] docs: document potentially unexpected blocking behavior of publish() method (#214) * docs: Document potentially unexpected blocking behavior of publish() method. * Update google/cloud/pubsub_v1/publisher/client.py Co-authored-by: Tres Seaver Co-authored-by: Prad Nelluru --- .../google/cloud/pubsub_v1/publisher/client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index f1de9f1f47ed..6a9418e69560 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -291,6 +291,8 @@ def publish( Add the given message to this object; this will cause it to be published once the batch either has enough messages or a sufficient period of time has elapsed. + This method may block if LimitExceededBehavior.BLOCK is used in the + flow control settings. Example: >>> from google.cloud import pubsub_v1 From 94622f4238170c3131591500e053c22cf5d7b090 Mon Sep 17 00:00:00 2001 From: fayssalmartanigcp <73672393+fayssalmartanigcp@users.noreply.github.com> Date: Tue, 10 Nov 2020 14:59:52 -0500 Subject: [PATCH 0603/1197] feat: Enable server side flow control by default with the option to turn it off (#231) * Enable server side flow control by default with the option to turn it off This change enables sending flow control settings automatically to the server. If flow_control.max_messages > 0 or flow_control.max_bytes > 0, flow control will be enforced at the server side (in addition to the client side). This behavior is enabled by default and users who would like to opt-out of this feature --in case they encouter issues with server side flow control-- can pass in use_legacy_flow_control=True in SubscriberClient.subscribe(). * Enable server side flow control by default with the option to turn it off This change enables sending flow control settings automatically to the server. If flow_control.max_messages > 0 or flow_control.max_bytes > 0, flow control will be enforced at the server side (in addition to the client side). This behavior is enabled by default and users who would like to opt-out of this feature --in case they encouter issues with server side flow control-- can pass in use_legacy_flow_control=true in subscriberclient.subscribe(). Co-authored-by: Tianzi Cai --- .../_protocol/streaming_pull_manager.py | 19 ++++++++++++++++--- .../cloud/pubsub_v1/subscriber/client.py | 19 +++++++++++++++++-- .../subscriber/test_streaming_pull_manager.py | 10 ++++++++++ 3 files changed, 43 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 3159ba84838f..e8a4a8caf9d5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -105,6 +105,9 @@ class StreamingPullManager(object): ``projects/{project}/subscriptions/{subscription}``. flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow control settings. + use_legacy_flow_control (bool): Disables enforcing flow control settings + at the Cloud PubSub server and uses the less accurate method of only + enforcing flow control at the client side. scheduler (~google.cloud.pubsub_v1.scheduler.Scheduler): The scheduler to use to process messages. If not provided, a thread pool-based scheduler will be used. @@ -115,11 +118,17 @@ class StreamingPullManager(object): RPC instead of over the streaming RPC.""" def __init__( - self, client, subscription, flow_control=types.FlowControl(), scheduler=None + self, + client, + subscription, + flow_control=types.FlowControl(), + scheduler=None, + use_legacy_flow_control=False, ): self._client = client self._subscription = subscription self._flow_control = flow_control + self._use_legacy_flow_control = use_legacy_flow_control self._ack_histogram = histogram.Histogram() self._last_histogram_size = 0 self._ack_deadline = 10 @@ -587,8 +596,12 @@ def _get_initial_request(self, stream_ack_deadline_seconds): stream_ack_deadline_seconds=stream_ack_deadline_seconds, subscription=self._subscription, client_id=self._client_id, - max_outstanding_messages=self._flow_control.max_messages, - max_outstanding_bytes=self._flow_control.max_bytes, + max_outstanding_messages=( + 0 if self._use_legacy_flow_control else self._flow_control.max_messages + ), + max_outstanding_bytes=( + 0 if self._use_legacy_flow_control else self._flow_control.max_bytes + ), ) # Return the initial request. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index e33a0e2e6043..937be1552d50 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -157,7 +157,14 @@ def api(self): """The underlying gapic API client.""" return self._api - def subscribe(self, subscription, callback, flow_control=(), scheduler=None): + def subscribe( + self, + subscription, + callback, + flow_control=(), + scheduler=None, + use_legacy_flow_control=False, + ): """Asynchronously start receiving messages on a given subscription. This method starts a background thread to begin pulling messages from @@ -179,6 +186,10 @@ def subscribe(self, subscription, callback, flow_control=(), scheduler=None): settings may lead to faster throughput for messages that do not take a long time to process. + The ``use_legacy_flow_control`` argument disables enforcing flow control + settings at the Cloud PubSub server and uses the less accurate method of + only enforcing flow control at the client side. + This method starts the receiver in the background and returns a *Future* representing its execution. Waiting on the future (calling ``result()``) will block forever or until a non-recoverable error @@ -238,7 +249,11 @@ def callback(message): flow_control = types.FlowControl(*flow_control) manager = streaming_pull_manager.StreamingPullManager( - self, subscription, flow_control=flow_control, scheduler=scheduler + self, + subscription, + flow_control=flow_control, + scheduler=scheduler, + use_legacy_flow_control=use_legacy_flow_control, ) future = futures.StreamingPullFuture(manager) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index d3eb4351bde8..242c0804ac58 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -170,6 +170,16 @@ def test_streaming_flow_control(): assert request.max_outstanding_bytes == 1000 +def test_streaming_flow_control_use_legacy_flow_control(): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000), + use_legacy_flow_control=True, + ) + request = manager._get_initial_request(stream_ack_deadline_seconds=10) + assert request.max_outstanding_messages == 0 + assert request.max_outstanding_bytes == 0 + + def test_ack_deadline_with_max_duration_per_lease_extension(): manager = make_manager() manager._flow_control = types.FlowControl(max_duration_per_lease_extension=5) From c6285ea7201652b3dc2b85f655a3f4f07a443245 Mon Sep 17 00:00:00 2001 From: danavaziri-ga <74256206+danavaziri-ga@users.noreply.github.com> Date: Mon, 16 Nov 2020 15:21:24 -0600 Subject: [PATCH 0604/1197] feat: Add dead lettering max delivery attempts argument (#236) * Add max_delivery_attempts input to subsciber.py Add functionality so users could set max_delivery_attempts while creating or updating a subscription with dead lettering enabled instead of it's value being set to an arbitrary number. * Make max_delivery_attempts argument optional Make the argument optional and set the value to 5 if the user doesn't set it just like Cloud Pub/Sub does. * Add max_delivery_attempts parameter to create and update subscription with dead lettering calls Added max delivery attempts parameter to calls to update and create subscriber to match the methods in subscriber.py * Add constants and defaults for max_delivery_attempts argument * Fix comments related to added max_delivery_attempts parameter * Fix typo in max_delivery_attempts comments --- .../samples/snippets/subscriber.py | 41 ++++++++++++++----- .../samples/snippets/subscriber_test.py | 9 ++-- 2 files changed, 36 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 07da80d93b1f..2235c0060ec4 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -90,7 +90,8 @@ def create_subscription(project_id, topic_id, subscription_id): def create_subscription_with_dead_letter_topic( - project_id, topic_id, subscription_id, dead_letter_topic_id + project_id, topic_id, subscription_id, dead_letter_topic_id, + max_delivery_attempts=5 ): """Create a subscription with dead letter policy.""" # [START pubsub_dead_letter_create_subscription] @@ -108,6 +109,9 @@ def create_subscription_with_dead_letter_topic( # TODO(developer): This is an existing dead letter topic that the subscription # with dead letter policy will forward dead letter messages to. # dead_letter_topic_id = "your-dead-letter-topic-id" + # TODO(developer): This is the maximum number of delivery attempts allowed + # for a message before it gets delivered to a dead letter topic. + # max_delivery_attempts = 5 publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() @@ -117,7 +121,8 @@ def create_subscription_with_dead_letter_topic( dead_letter_topic_path = publisher.topic_path(project_id, dead_letter_topic_id) dead_letter_policy = DeadLetterPolicy( - dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=10 + dead_letter_topic=dead_letter_topic_path, + max_delivery_attempts=max_delivery_attempts ) with subscriber: @@ -259,7 +264,8 @@ def update_push_subscription(project_id, topic_id, subscription_id, endpoint): def update_subscription_with_dead_letter_policy( - project_id, topic_id, subscription_id, dead_letter_topic_id + project_id, topic_id, subscription_id, dead_letter_topic_id, + max_delivery_attempts=5 ): """Update a subscription's dead letter policy.""" # [START pubsub_dead_letter_update_subscription] @@ -276,6 +282,9 @@ def update_subscription_with_dead_letter_policy( # TODO(developer): This is an existing dead letter topic that the subscription # with dead letter policy will forward dead letter messages to. # dead_letter_topic_id = "your-dead-letter-topic-id" + # TODO(developer): This is the maximum number of delivery attempts allowed + # for a message before it gets delivered to a dead letter topic. + # max_delivery_attempts = 5 publisher = pubsub_v1.PublisherClient() subscriber = pubsub_v1.SubscriberClient() @@ -290,11 +299,12 @@ def update_subscription_with_dead_letter_policy( print(f"Before the update: {subscription_before_update}.") # Indicates which fields in the provided subscription to update. - update_mask = FieldMask(paths=["dead_letter_policy.max_delivery_attempts"]) + update_mask = FieldMask(paths=["dead_letter_policy"]) # Construct a dead letter policy you expect to have after the update. dead_letter_policy = DeadLetterPolicy( - dead_letter_topic=dead_letter_topic_path, max_delivery_attempts=20 + dead_letter_topic=dead_letter_topic_path, + max_delivery_attempts=max_delivery_attempts ) # Construct the subscription with the dead letter policy you expect to have @@ -339,12 +349,7 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): print(f"Before removing the policy: {subscription_before_update}.") # Indicates which fields in the provided subscription to update. - update_mask = FieldMask( - paths=[ - "dead_letter_policy.dead_letter_topic", - "dead_letter_policy.max_delivery_attempts", - ] - ) + update_mask = FieldMask(paths=["dead_letter_policy"]) # Construct the subscription (without any dead letter policy) that you # expect to have after the update. @@ -676,6 +681,12 @@ def callback(message): create_with_dead_letter_policy_parser.add_argument("topic_id") create_with_dead_letter_policy_parser.add_argument("subscription_id") create_with_dead_letter_policy_parser.add_argument("dead_letter_topic_id") + create_with_dead_letter_policy_parser.add_argument( + "max_delivery_attempts", + type=int, + nargs="?", + default=5 + ) create_push_parser = subparsers.add_parser( "create-push", help=create_push_subscription.__doc__ @@ -707,6 +718,12 @@ def callback(message): update_dead_letter_policy_parser.add_argument("topic_id") update_dead_letter_policy_parser.add_argument("subscription_id") update_dead_letter_policy_parser.add_argument("dead_letter_topic_id") + update_dead_letter_policy_parser.add_argument( + "max_delivery_attempts", + type=int, + nargs="?", + default=5 + ) remove_dead_letter_policy_parser = subparsers.add_parser( "remove-dead-letter-policy", help=remove_dead_letter_policy.__doc__ @@ -777,6 +794,7 @@ def callback(message): args.topic_id, args.subscription_id, args.dead_letter_topic_id, + args.max_delivery_attempts, ) elif args.command == "create-push": create_push_subscription( @@ -798,6 +816,7 @@ def callback(message): args.topic_id, args.subscription_id, args.dead_letter_topic_id, + args.max_delivery_attempts, ) elif args.command == "remove-dead-letter-policy": remove_dead_letter_policy(args.project_id, args.topic_id, args.subscription_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index d722ebdec8b4..e69212f8c206 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -32,6 +32,8 @@ SUBSCRIPTION_DLQ = "subscription-test-subscription-dlq-" + UUID ENDPOINT = "https://{}.appspot.com/push".format(PROJECT_ID) NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT_ID) +DEFAULT_MAX_DELIVERY_ATTEMPTS = 5 +UPDATED_MAX_DELIVERY_ATTEMPTS = 20 @pytest.fixture(scope="module") @@ -214,18 +216,19 @@ def test_create_subscription_with_dead_letter_policy( out, _ = capsys.readouterr() assert f"Subscription created: {subscription_dlq}" in out assert f"It will forward dead letter messages to: {dead_letter_topic}" in out - assert "After 10 delivery attempts." in out + assert f"After {DEFAULT_MAX_DELIVERY_ATTEMPTS} delivery attempts." in out def test_update_dead_letter_policy(subscription_dlq, dead_letter_topic, capsys): _ = subscriber.update_subscription_with_dead_letter_policy( - PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC + PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC, + UPDATED_MAX_DELIVERY_ATTEMPTS ) out, _ = capsys.readouterr() assert dead_letter_topic in out assert subscription_dlq in out - assert "max_delivery_attempts: 20" in out + assert f"max_delivery_attempts: {UPDATED_MAX_DELIVERY_ATTEMPTS}" in out def test_create_subscription_with_ordering( From 7586dce977b40ce77d55180472cd7241565adcfa Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 23 Nov 2020 16:46:15 -0500 Subject: [PATCH 0605/1197] chore: release 2.2.0 (#234) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 20 ++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index ceb528423da8..0193fb54868d 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,26 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.2.0](https://www.github.com/googleapis/python-pubsub/compare/v2.1.0...v2.2.0) (2020-11-16) + + +### Features + +* Add dead lettering max delivery attempts argument ([#236](https://www.github.com/googleapis/python-pubsub/issues/236)) ([7687ae5](https://www.github.com/googleapis/python-pubsub/commit/7687ae500bdb9c76e3ffb23302b4f32dc9627d81)) +* Enable server side flow control by default with the option to turn it off ([#231](https://www.github.com/googleapis/python-pubsub/issues/231)) ([94d738c](https://www.github.com/googleapis/python-pubsub/commit/94d738c07c6404a152c6729f5ba4b106b1fe9355)) + + +### Bug Fixes + +* fix mtls issue in handwritten layer ([#226](https://www.github.com/googleapis/python-pubsub/issues/226)) ([09a409c](https://www.github.com/googleapis/python-pubsub/commit/09a409c6240a74dcb46d8f3f86d4fb95a52274a7)) +* make fixup script consistent with migration docs ([#208](https://www.github.com/googleapis/python-pubsub/issues/208)) ([b64e218](https://www.github.com/googleapis/python-pubsub/commit/b64e2187ab0810437575580d6ddb5315ff60e274)) + + +### Documentation + +* document potentially unexpected blocking behavior of publish() method ([#214](https://www.github.com/googleapis/python-pubsub/issues/214)) ([b6d9bd7](https://www.github.com/googleapis/python-pubsub/commit/b6d9bd7c38d4fe597c25b7b5869fd4a1259c7687)) +* fix get topic_path in subscriber sample ([#210](https://www.github.com/googleapis/python-pubsub/issues/210)) ([7228f6c](https://www.github.com/googleapis/python-pubsub/commit/7228f6c9a4c050bf22bb4bc3582b89b04eaa8702)) + ## 2.1.0 09-21-2020 02:19 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 4b33ecac7d81..a50b8f23859a 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.1.0" +version = "2.2.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 72eb13e55579a13ca9dc01b49b48dc1452984845 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Thu, 3 Dec 2020 16:12:03 -0700 Subject: [PATCH 0606/1197] chore: require samples checks (#245) Make samples kokoro sessions required --- .../.github/sync-repo-settings.yaml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 packages/google-cloud-pubsub/.github/sync-repo-settings.yaml diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml new file mode 100644 index 000000000000..af59935321a9 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -0,0 +1,13 @@ +# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings +# Rules for master branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `master` +- pattern: master + requiredStatusCheckContexts: + - 'Kokoro' + - 'cla/google' + - 'Samples - Lint' + - 'Samples - Python 3.6' + - 'Samples - Python 3.7' + - 'Samples - Python 3.8' From 4b49b6c3ff876a3341142343c6057da8f9df1f3b Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 11 Dec 2020 12:28:45 -0800 Subject: [PATCH 0607/1197] samples: fix flaky tests (#233) * fix: reorder tests * fix: increase timeout * fix: use backoff feat: Enable server side flow control by default with the option to turn it off (#231) * Enable server side flow control by default with the option to turn it off This change enables sending flow control settings automatically to the server. If flow_control.max_messages > 0 or flow_control.max_bytes > 0, flow control will be enforced at the server side (in addition to the client side). This behavior is enabled by default and users who would like to opt-out of this feature --in case they encouter issues with server side flow control-- can pass in use_legacy_flow_control=True in SubscriberClient.subscribe(). * Enable server side flow control by default with the option to turn it off This change enables sending flow control settings automatically to the server. If flow_control.max_messages > 0 or flow_control.max_bytes > 0, flow control will be enforced at the server side (in addition to the client side). This behavior is enabled by default and users who would like to opt-out of this feature --in case they encouter issues with server side flow control-- can pass in use_legacy_flow_control=true in subscriberclient.subscribe(). Co-authored-by: Tianzi Cai fix: replace AssertionError with NotFound fix: add another pytest fixture in failing test remove backoff * add py version in resource names * keep pulling until response is not None * use fstrings * change scope to session and set retry deadline * lint and increase timeout to 90 for dlq receive --- .../samples/snippets/requirements.txt | 2 +- .../samples/snippets/subscriber.py | 31 ++--- .../samples/snippets/subscriber_test.py | 121 +++++++++--------- 3 files changed, 78 insertions(+), 76 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index e5d328183e15..84edd80118e2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==2.1.0 +google-cloud-pubsub==2.2.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 2235c0060ec4..aa5771b862db 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -90,8 +90,7 @@ def create_subscription(project_id, topic_id, subscription_id): def create_subscription_with_dead_letter_topic( - project_id, topic_id, subscription_id, dead_letter_topic_id, - max_delivery_attempts=5 + project_id, topic_id, subscription_id, dead_letter_topic_id, max_delivery_attempts=5 ): """Create a subscription with dead letter policy.""" # [START pubsub_dead_letter_create_subscription] @@ -122,7 +121,7 @@ def create_subscription_with_dead_letter_topic( dead_letter_policy = DeadLetterPolicy( dead_letter_topic=dead_letter_topic_path, - max_delivery_attempts=max_delivery_attempts + max_delivery_attempts=max_delivery_attempts, ) with subscriber: @@ -264,8 +263,7 @@ def update_push_subscription(project_id, topic_id, subscription_id, endpoint): def update_subscription_with_dead_letter_policy( - project_id, topic_id, subscription_id, dead_letter_topic_id, - max_delivery_attempts=5 + project_id, topic_id, subscription_id, dead_letter_topic_id, max_delivery_attempts=5 ): """Update a subscription's dead letter policy.""" # [START pubsub_dead_letter_update_subscription] @@ -304,7 +302,7 @@ def update_subscription_with_dead_letter_policy( # Construct a dead letter policy you expect to have after the update. dead_letter_policy = DeadLetterPolicy( dead_letter_topic=dead_letter_topic_path, - max_delivery_attempts=max_delivery_attempts + max_delivery_attempts=max_delivery_attempts, ) # Construct the subscription with the dead letter policy you expect to have @@ -483,6 +481,7 @@ def callback(message): def synchronous_pull(project_id, subscription_id): """Pulling messages synchronously.""" # [START pubsub_subscriber_sync_pull] + from google.api_core import retry from google.cloud import pubsub_v1 # TODO(developer) @@ -497,9 +496,11 @@ def synchronous_pull(project_id, subscription_id): # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. with subscriber: - # The subscriber pulls a specific number of messages. + # The subscriber pulls a specific number of messages. The actual + # number of messages pulled may be smaller than max_messages. response = subscriber.pull( - request={"subscription": subscription_path, "max_messages": NUM_MESSAGES} + request={"subscription": subscription_path, "max_messages": NUM_MESSAGES}, + retry=retry.Retry(deadline=300), ) ack_ids = [] @@ -526,6 +527,7 @@ def synchronous_pull_with_lease_management(project_id, subscription_id): import sys import time + from google.api_core import retry from google.cloud import pubsub_v1 multiprocessing.log_to_stderr() @@ -541,7 +543,8 @@ def synchronous_pull_with_lease_management(project_id, subscription_id): subscription_path = subscriber.subscription_path(project_id, subscription_id) response = subscriber.pull( - request={"subscription": subscription_path, "max_messages": 3} + request={"subscription": subscription_path, "max_messages": 3}, + retry=retry.Retry(deadline=300), ) # Start a process for each message based on its size modulo 10. @@ -682,10 +685,7 @@ def callback(message): create_with_dead_letter_policy_parser.add_argument("subscription_id") create_with_dead_letter_policy_parser.add_argument("dead_letter_topic_id") create_with_dead_letter_policy_parser.add_argument( - "max_delivery_attempts", - type=int, - nargs="?", - default=5 + "max_delivery_attempts", type=int, nargs="?", default=5 ) create_push_parser = subparsers.add_parser( @@ -719,10 +719,7 @@ def callback(message): update_dead_letter_policy_parser.add_argument("subscription_id") update_dead_letter_policy_parser.add_argument("dead_letter_topic_id") update_dead_letter_policy_parser.add_argument( - "max_delivery_attempts", - type=int, - nargs="?", - default=5 + "max_delivery_attempts", type=int, nargs="?", default=5 ) remove_dead_letter_policy_parser = subparsers.add_parser( diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index e69212f8c206..91a005ee24fd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -13,6 +13,7 @@ # limitations under the License. import os +import sys import uuid import backoff @@ -23,25 +24,26 @@ import subscriber UUID = uuid.uuid4().hex +PY_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}" PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] -TOPIC = "subscription-test-topic-" + UUID -DEAD_LETTER_TOPIC = "subscription-test-dead-letter-topic-" + UUID -SUBSCRIPTION_ADMIN = "subscription-test-subscription-admin-" + UUID -SUBSCRIPTION_ASYNC = "subscription-test-subscription-async-" + UUID -SUBSCRIPTION_SYNC = "subscription-test-subscription-sync-" + UUID -SUBSCRIPTION_DLQ = "subscription-test-subscription-dlq-" + UUID -ENDPOINT = "https://{}.appspot.com/push".format(PROJECT_ID) -NEW_ENDPOINT = "https://{}.appspot.com/push2".format(PROJECT_ID) +TOPIC = f"subscription-test-topic-{PY_VERSION}-{UUID}" +DEAD_LETTER_TOPIC = f"subscription-test-dead-letter-topic-{PY_VERSION}-{UUID}" +SUBSCRIPTION_ADMIN = f"subscription-test-subscription-admin-{PY_VERSION}-{UUID}" +SUBSCRIPTION_ASYNC = f"subscription-test-subscription-async-{PY_VERSION}-{UUID}" +SUBSCRIPTION_SYNC = f"subscription-test-subscription-sync-{PY_VERSION}-{UUID}" +SUBSCRIPTION_DLQ = f"subscription-test-subscription-dlq-{PY_VERSION}-{UUID}" +ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push" +NEW_ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push2" DEFAULT_MAX_DELIVERY_ATTEMPTS = 5 UPDATED_MAX_DELIVERY_ATTEMPTS = 20 -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def publisher_client(): yield pubsub_v1.PublisherClient() -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC) @@ -55,7 +57,7 @@ def topic(publisher_client): publisher_client.delete_topic(request={"topic": topic.name}) -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def dead_letter_topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT_ID, DEAD_LETTER_TOPIC) @@ -69,14 +71,14 @@ def dead_letter_topic(publisher_client): publisher_client.delete_topic(request={"topic": dead_letter_topic.name}) -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def subscriber_client(): subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client subscriber_client.close() -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def subscription_admin(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ADMIN @@ -94,7 +96,7 @@ def subscription_admin(subscriber_client, topic): yield subscription.name -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def subscription_sync(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_SYNC @@ -114,7 +116,7 @@ def subscription_sync(subscriber_client, topic): subscriber_client.delete_subscription(request={"subscription": subscription.name}) -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def subscription_async(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ASYNC @@ -134,7 +136,7 @@ def subscription_async(subscriber_client, topic): subscriber_client.delete_subscription(request={"subscription": subscription.name}) -@pytest.fixture(scope="module") +@pytest.fixture(scope="session") def subscription_dlq(subscriber_client, topic, dead_letter_topic): from google.cloud.pubsub_v1.types import DeadLetterPolicy @@ -161,6 +163,13 @@ def subscription_dlq(subscriber_client, topic, dead_letter_topic): subscriber_client.delete_subscription(request={"subscription": subscription.name}) +def _publish_messages(publisher_client, topic, **attrs): + for n in range(5): + data = f"message {n}".encode("utf-8") + publish_future = publisher_client.publish(topic, data, **attrs) + publish_future.result() + + def test_list_in_topic(subscription_admin, capsys): @backoff.on_exception(backoff.expo, AssertionError, max_time=60) def eventually_consistent_test(): @@ -219,10 +228,25 @@ def test_create_subscription_with_dead_letter_policy( assert f"After {DEFAULT_MAX_DELIVERY_ATTEMPTS} delivery attempts." in out +def test_receive_with_delivery_attempts( + publisher_client, topic, dead_letter_topic, subscription_dlq, capsys +): + _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_delivery_attempts(PROJECT_ID, SUBSCRIPTION_DLQ, 90) + + out, _ = capsys.readouterr() + assert f"Listening for messages on {subscription_dlq}.." in out + assert "With delivery attempts: " in out + + def test_update_dead_letter_policy(subscription_dlq, dead_letter_topic, capsys): _ = subscriber.update_subscription_with_dead_letter_policy( - PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC, - UPDATED_MAX_DELIVERY_ATTEMPTS + PROJECT_ID, + TOPIC, + SUBSCRIPTION_DLQ, + DEAD_LETTER_TOPIC, + UPDATED_MAX_DELIVERY_ATTEMPTS, ) out, _ = capsys.readouterr() @@ -231,6 +255,16 @@ def test_update_dead_letter_policy(subscription_dlq, dead_letter_topic, capsys): assert f"max_delivery_attempts: {UPDATED_MAX_DELIVERY_ATTEMPTS}" in out +def test_remove_dead_letter_policy(subscription_dlq, capsys): + subscription_after_update = subscriber.remove_dead_letter_policy( + PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ + ) + + out, _ = capsys.readouterr() + assert subscription_dlq in out + assert subscription_after_update.dead_letter_policy.dead_letter_topic == "" + + def test_create_subscription_with_ordering( subscriber_client, subscription_admin, capsys ): @@ -293,13 +327,6 @@ def eventually_consistent_test(): eventually_consistent_test() -def _publish_messages(publisher_client, topic, **attrs): - for n in range(5): - data = "message {}".format(n).encode("utf-8") - publish_future = publisher_client.publish(topic, data, **attrs) - publish_future.result() - - def test_receive(publisher_client, topic, subscription_async, capsys): _publish_messages(publisher_client, topic) @@ -340,6 +367,17 @@ def test_receive_with_flow_control(publisher_client, topic, subscription_async, assert "message" in out +def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): + + _publish_messages(publisher_client, topic) + + subscriber.listen_for_errors(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) + + out, _ = capsys.readouterr() + assert subscription_async in out + assert "threw an exception" in out + + def test_receive_synchronously(publisher_client, topic, subscription_sync, capsys): _publish_messages(publisher_client, topic) @@ -360,36 +398,3 @@ def test_receive_synchronously_with_lease( out, _ = capsys.readouterr() assert f"Received and acknowledged 3 messages from {subscription_sync}." in out - - -def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): - - _publish_messages(publisher_client, topic) - - subscriber.listen_for_errors(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) - - out, _ = capsys.readouterr() - assert subscription_async in out - assert "threw an exception" in out - - -def test_receive_with_delivery_attempts( - publisher_client, topic, subscription_dlq, capsys -): - _publish_messages(publisher_client, topic) - - subscriber.receive_messages_with_delivery_attempts(PROJECT_ID, SUBSCRIPTION_DLQ, 15) - - out, _ = capsys.readouterr() - assert f"Listening for messages on {subscription_dlq}.." in out - assert "With delivery attempts: " in out - - -def test_remove_dead_letter_policy(subscription_dlq, capsys): - subscription_after_update = subscriber.remove_dead_letter_policy( - PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ - ) - - out, _ = capsys.readouterr() - assert subscription_dlq in out - assert subscription_after_update.dead_letter_policy.dead_letter_topic == "" From 5ff5b497931c35f46fb0456a8a6f7999132713cc Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 15 Dec 2020 19:57:27 -0800 Subject: [PATCH 0608/1197] chore: add noxfile_config to samples dir (#255) --- .../samples/snippets/noxfile_config.py | 38 +++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 packages/google-cloud-pubsub/samples/snippets/noxfile_config.py diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py b/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py new file mode 100644 index 000000000000..32f8b4351c77 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py @@ -0,0 +1,38 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default TEST_CONFIG_OVERRIDE for python repos. + +# You can copy this file into your directory, then it will be inported from +# the noxfile.py. + +# The source of truth: +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py + +TEST_CONFIG_OVERRIDE = { + # You can opt out from the test for specific Python versions. + "ignored_versions": ["2.7"], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": True, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} From 94a7f4d0cd61ed0604dd6d069bae820869170447 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Tue, 15 Dec 2020 21:13:04 -0800 Subject: [PATCH 0609/1197] chore: pick up changes from synthtool (#253) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore(docs): update code of conduct of synthtool and templates Source-Author: Christopher Wilcox Source-Date: Thu Oct 22 14:22:01 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 5f6ef0ec5501d33c4667885b37a7685a30d41a76 Source-Link: https://github.com/googleapis/synthtool/commit/5f6ef0ec5501d33c4667885b37a7685a30d41a76 * docs: add proto-plus to intersphinx mapping Source-Author: Tim Swast Source-Date: Tue Oct 27 12:01:14 2020 -0500 Source-Repo: googleapis/synthtool Source-Sha: ea52b8a0bd560f72f376efcf45197fb7c8869120 Source-Link: https://github.com/googleapis/synthtool/commit/ea52b8a0bd560f72f376efcf45197fb7c8869120 * fix(python_library): fix external unit test dependencies I recently submitted https://github.com/googleapis/synthtool/pull/811/files, allowing external dependencies for unit tests. This fixes a small missing comma bug Source-Author: Daniel Sanche Source-Date: Thu Oct 29 16:58:01 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 6542bd723403513626f61642fc02ddca528409aa Source-Link: https://github.com/googleapis/synthtool/commit/6542bd723403513626f61642fc02ddca528409aa * chore: add type hint check Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Wed Nov 4 17:36:32 2020 -0800 Source-Repo: googleapis/synthtool Source-Sha: 3d3e94c4e02370f307a9a200b0c743c3d8d19f29 Source-Link: https://github.com/googleapis/synthtool/commit/3d3e94c4e02370f307a9a200b0c743c3d8d19f29 * chore: add blacken to template Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Thu Nov 5 15:22:03 2020 -0800 Source-Repo: googleapis/synthtool Source-Sha: 1f1148d3c7a7a52f0c98077f976bd9b3c948ee2b Source-Link: https://github.com/googleapis/synthtool/commit/1f1148d3c7a7a52f0c98077f976bd9b3c948ee2b * fix: address lint issues Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Thu Nov 12 11:30:49 2020 -0800 Source-Repo: googleapis/synthtool Source-Sha: e89175cf074dccc4babb4eca66ae913696e47a71 Source-Link: https://github.com/googleapis/synthtool/commit/e89175cf074dccc4babb4eca66ae913696e47a71 * docs(python): update intersphinx for grpc and auth * docs(python): update intersphinx for grpc and auth * use https for python intersphinx Co-authored-by: Tim Swast Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Nov 18 14:37:25 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: 9a7d9fbb7045c34c9d3d22c1ff766eeae51f04c9 Source-Link: https://github.com/googleapis/synthtool/commit/9a7d9fbb7045c34c9d3d22c1ff766eeae51f04c9 * docs(python): fix intersphinx link for google-auth Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Thu Nov 19 10:16:05 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: a073c873f3928c561bdf87fdfbf1d081d1998984 Source-Link: https://github.com/googleapis/synthtool/commit/a073c873f3928c561bdf87fdfbf1d081d1998984 * chore: add config / docs for 'pre-commit' support Source-Author: Tres Seaver Source-Date: Tue Dec 1 16:01:20 2020 -0500 Source-Repo: googleapis/synthtool Source-Sha: 32af6da519a6b042e3da62008e2a75e991efb6b4 Source-Link: https://github.com/googleapis/synthtool/commit/32af6da519a6b042e3da62008e2a75e991efb6b4 * chore(deps): update precommit hook pre-commit/pre-commit-hooks to v3.3.0 Source-Author: WhiteSource Renovate Source-Date: Wed Dec 2 17:18:24 2020 +0100 Source-Repo: googleapis/synthtool Source-Sha: 69629b64b83c6421d616be2b8e11795738ec8a6c Source-Link: https://github.com/googleapis/synthtool/commit/69629b64b83c6421d616be2b8e11795738ec8a6c * test(python): give filesystem paths to pytest-cov https://pytest-cov.readthedocs.io/en/latest/config.html The pytest-cov docs seem to suggest a filesystem path is expected. Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Dec 2 09:28:04 2020 -0700 Source-Repo: googleapis/synthtool Source-Sha: f94318521f63085b9ccb43d42af89f153fb39f15 Source-Link: https://github.com/googleapis/synthtool/commit/f94318521f63085b9ccb43d42af89f153fb39f15 * chore: update noxfile.py.j2 * Update noxfile.py.j2 add changes from @glasnt to the template template to ensure that enforcing type hinting doesn't fail for repos with the sample noxfile (aka all samples repos) See https://github.com/GoogleCloudPlatform/python-docs-samples/pull/4869/files for context * fix typo Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Thu Dec 3 13:44:30 2020 -0800 Source-Repo: googleapis/synthtool Source-Sha: 18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1 Source-Link: https://github.com/googleapis/synthtool/commit/18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1 --- .../.pre-commit-config.yaml | 17 ++ .../google-cloud-pubsub/CODE_OF_CONDUCT.md | 123 +++++--- packages/google-cloud-pubsub/CONTRIBUTING.rst | 29 +- packages/google-cloud-pubsub/docs/conf.py | 7 +- .../google/pubsub_v1/__init__.py | 4 +- packages/google-cloud-pubsub/noxfile.py | 11 +- .../samples/snippets/noxfile.py | 60 ++-- .../scripts/fixup_pubsub_v1_keywords.py | 279 +++++------------- .../readme-gen/templates/README.tmpl.rst | 4 +- .../templates/install_deps.tmpl.rst | 4 +- packages/google-cloud-pubsub/synth.metadata | 114 ++++++- 11 files changed, 348 insertions(+), 304 deletions(-) create mode 100644 packages/google-cloud-pubsub/.pre-commit-config.yaml diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml new file mode 100644 index 000000000000..6ad83346e261 --- /dev/null +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.3.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.8.4 + hooks: + - id: flake8 diff --git a/packages/google-cloud-pubsub/CODE_OF_CONDUCT.md b/packages/google-cloud-pubsub/CODE_OF_CONDUCT.md index b3d1f6029849..039f43681204 100644 --- a/packages/google-cloud-pubsub/CODE_OF_CONDUCT.md +++ b/packages/google-cloud-pubsub/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 7eb8028d56b0..db4cced2abdb 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** @@ -130,6 +111,16 @@ Coding Style should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + Exceptions to PEP8: - Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 48cf7364230e..f81d2979853c 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -345,10 +345,11 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), - "grpc": ("https://grpc.io/grpc/python/", None), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 5e7a6cc72449..7ecad97eae61 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -90,6 +90,7 @@ "ModifyPushConfigRequest", "PublishRequest", "PublishResponse", + "PublisherClient", "PubsubMessage", "PullRequest", "PullResponse", @@ -101,11 +102,10 @@ "Snapshot", "StreamingPullRequest", "StreamingPullResponse", - "SubscriberClient", "Subscription", "Topic", "UpdateSnapshotRequest", "UpdateSubscriptionRequest", "UpdateTopicRequest", - "PublisherClient", + "SubscriberClient", ) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 6a59685cd580..913f271d59c9 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -28,7 +28,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -72,16 +72,17 @@ def default(session): # Install all test dependencies, then install this package in-place. session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov") + session.install( + "mock", "pytest", "pytest-cov", + ) session.install("-e", ".") # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google.cloud.pubsub", - "--cov=google.cloud", - "--cov=tests.unit", + "--cov=google/cloud", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index f3a90583ad5a..bca0522ec4d9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -37,22 +38,28 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -62,17 +69,17 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -81,7 +88,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -91,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -129,17 +136,30 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG['enforce_type_hints']: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + session.run("black", *python_files) # # Sample Tests @@ -149,7 +169,7 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -175,14 +195,14 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # @@ -190,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -213,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 1c9ec6f8c0fc..c00db1ebc77f 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC @@ -20,11 +21,12 @@ import libcst as cst import pathlib import sys -from typing import Any, Callable, Dict, List, Sequence, Tuple +from typing import (Any, Callable, Dict, List, Sequence, Tuple) def partition( - predicate: Callable[[Any], bool], iterator: Sequence[Any] + predicate: Callable[[Any], bool], + iterator: Sequence[Any] ) -> Tuple[List[Any], List[Any]]: """A stable, out-of-place partition.""" results = ([], []) @@ -37,127 +39,39 @@ def partition( class pubsubCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ("retry", "timeout", "metadata") + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - "acknowledge": ( - "subscription", - "ack_ids", - ), - "create_snapshot": ( - "name", - "subscription", - "labels", - ), - "create_subscription": ( - "name", - "topic", - "push_config", - "ack_deadline_seconds", - "retain_acked_messages", - "message_retention_duration", - "labels", - "enable_message_ordering", - "expiration_policy", - "filter", - "dead_letter_policy", - "retry_policy", - "detached", - ), - "create_topic": ( - "name", - "labels", - "message_storage_policy", - "kms_key_name", - ), - "delete_snapshot": ("snapshot",), - "delete_subscription": ("subscription",), - "delete_topic": ("topic",), - "detach_subscription": ("subscription",), - "get_snapshot": ("snapshot",), - "get_subscription": ("subscription",), - "get_topic": ("topic",), - "list_snapshots": ( - "project", - "page_size", - "page_token", - ), - "list_subscriptions": ( - "project", - "page_size", - "page_token", - ), - "list_topics": ( - "project", - "page_size", - "page_token", - ), - "list_topic_snapshots": ( - "topic", - "page_size", - "page_token", - ), - "list_topic_subscriptions": ( - "topic", - "page_size", - "page_token", - ), - "modify_ack_deadline": ( - "subscription", - "ack_ids", - "ack_deadline_seconds", - ), - "modify_push_config": ( - "subscription", - "push_config", - ), - "pull": ( - "subscription", - "max_messages", - "return_immediately", - ), - "seek": ( - "subscription", - "time", - "snapshot", - ), - "streaming_pull": ( - "subscription", - "stream_ack_deadline_seconds", - "ack_ids", - "modify_deadline_seconds", - "modify_deadline_ack_ids", - "client_id", - "max_outstanding_messages", - "max_outstanding_bytes", - ), - "update_snapshot": ( - "snapshot", - "update_mask", - ), - "update_subscription": ( - "subscription", - "update_mask", - ), - "update_topic": ( - "topic", - "update_mask", - ), - "get_iam_policy": ( - "resource", - "options", - ), - "set_iam_policy": ( - "resource", - "policy", - ), - "test_iam_permissions": ( - "resource", - "permissions", - ), - } + 'acknowledge': ('subscription', 'ack_ids', ), + 'create_snapshot': ('name', 'subscription', 'labels', ), + 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', ), + 'delete_snapshot': ('snapshot', ), + 'delete_subscription': ('subscription', ), + 'delete_topic': ('topic', ), + 'detach_subscription': ('subscription', ), + 'get_snapshot': ('snapshot', ), + 'get_subscription': ('subscription', ), + 'get_topic': ('topic', ), + 'list_snapshots': ('project', 'page_size', 'page_token', ), + 'list_subscriptions': ('project', 'page_size', 'page_token', ), + 'list_topics': ('project', 'page_size', 'page_token', ), + 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), + 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), + 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), + 'modify_push_config': ('subscription', 'push_config', ), + 'publish': ('topic', 'messages', ), + 'pull': ('subscription', 'max_messages', 'return_immediately', ), + 'seek': ('subscription', 'time', 'snapshot', ), + 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), + 'update_snapshot': ('snapshot', 'update_mask', ), + 'update_subscription': ('subscription', 'update_mask', ), + 'update_topic': ('topic', 'update_mask', ), + + 'get_iam_policy': ('resource', 'options', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), - def __init__(self, use_keywords=False): - self._use_keywords = use_keywords + } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: try: @@ -175,80 +89,35 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, kwargs + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs ) - args, ctrl_args = args[: len(kword_params)], args[len(kword_params) :] - ctrl_kwargs.extend( - cst.Arg( - value=a.value, - keyword=cst.Name(value=ctrl), - equal=cst.AssignEqual( - whitespace_before=cst.SimpleWhitespace(""), - whitespace_after=cst.SimpleWhitespace(""), - ), - ) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS) - ) + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - if self._use_keywords: - new_kwargs = [ - cst.Arg( - value=arg.value, - keyword=cst.Name(value=name), - equal=cst.AssignEqual( - whitespace_before=cst.SimpleWhitespace(""), - whitespace_after=cst.SimpleWhitespace(""), - ), + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) ) - for name, arg in zip(kword_params, args + kwargs) - ] - new_kwargs.extend( - [ - cst.Arg( - value=arg.value, - keyword=cst.Name(value=arg.keyword.value), - equal=cst.AssignEqual( - whitespace_before=cst.SimpleWhitespace(""), - whitespace_after=cst.SimpleWhitespace(""), - ), - ) - for arg in ctrl_kwargs - ] - ) - return updated.with_changes(args=new_kwargs) - else: - request_arg = cst.Arg( - value=cst.Dict( - [ - cst.DictElement( - cst.SimpleString('"{}"'.format(name)), - cst.Element(value=arg.value), - ) - for name, arg in zip(kword_params, args + kwargs) - ] - + [ - cst.DictElement( - cst.SimpleString('"{}"'.format(arg.keyword.value)), - cst.Element(value=arg.value), - ) - for arg in ctrl_kwargs - ] - ), - keyword=cst.Name("request"), - equal=cst.AssignEqual( - whitespace_before=cst.SimpleWhitespace(""), - whitespace_after=cst.SimpleWhitespace(""), - ), - ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) - return updated.with_changes(args=[request_arg]) + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) def fix_files( in_dir: pathlib.Path, out_dir: pathlib.Path, - use_keywords: bool = False, *, transformer=pubsubCallTransformer(), ): @@ -261,12 +130,11 @@ def fix_files( pyfile_gen = ( pathlib.Path(os.path.join(root, f)) for root, _, files in os.walk(in_dir) - for f in files - if os.path.splitext(f)[1] == ".py" + for f in files if os.path.splitext(f)[1] == ".py" ) for fpath in pyfile_gen: - with open(fpath, "r") as f: + with open(fpath, 'r') as f: src = f.read() # Parse the code and insert method call fixes. @@ -278,11 +146,11 @@ def fix_files( updated_path.parent.mkdir(parents=True, exist_ok=True) # Generate the updated source file at the corresponding path. - with open(updated_path, "w") as f: + with open(updated_path, 'w') as f: f.write(updated.code) -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser( description="""Fix up source that uses the pubsub client library. @@ -297,34 +165,24 @@ def fix_files( These all constitute false negatives. The tool will also detect false positives when an API method shares a name with another method. -""" - ) +""") parser.add_argument( - "-d", - "--input-directory", + '-d', + '--input-directory', required=True, - dest="input_dir", - help="the input directory to walk for python files to fix up", + dest='input_dir', + help='the input directory to walk for python files to fix up', ) parser.add_argument( - "-o", - "--output-directory", + '-o', + '--output-directory', required=True, - dest="output_dir", - help="the directory to output files fixed via un-flattening", - ) - parser.add_argument( - "-k", - "--use-keywords", - required=False, - action="store_true", - dest="use_keywords", - help="Use keyword arguments instead of constructing a request", + dest='output_dir', + help='the directory to output files fixed via un-flattening', ) args = parser.parse_args() input_dir = pathlib.Path(args.input_dir) output_dir = pathlib.Path(args.output_dir) - use_keywords = args.use_keywords if not input_dir.is_dir(): print( f"input directory '{input_dir}' does not exist or is not a directory", @@ -346,5 +204,4 @@ def fix_files( ) sys.exit(-1) - transformer = pubsubCallTransformer(use_keywords=use_keywords) - fix_files(input_dir, output_dir, use_keywords, transformer=transformer) + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst index df252dd6ebe9..4fd239765b0a 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/README.tmpl.rst @@ -6,7 +6,7 @@ =============================================================================== .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-pubsub&page=editor&open_in_editor={{folder}}/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst This directory contains samples for {{product.name}}. {{product.description}} @@ -46,7 +46,7 @@ Samples {% if not sample.hide_cloudshell_button %} .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst {% endif %} diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst index 7a47efe21cff..a0406dba8c84 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -5,14 +5,14 @@ Install Dependencies .. code-block:: bash - $ git clone https://github.com/googleapis/python-pubsub.git + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. .. code-block:: bash diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 347608c98e82..88336808d62c 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,28 +4,36 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "0bf5d593573afea43bba7de90d2bb40ee0fc101e" + "sha": "f3f4e5add32128bf238b2fbab3bcf0f5b23269d3" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "53eb2512a55caabcbad1898225080a2a3dfcb6aa", + "internalRef": "346818879" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "901ddd44e9ef7887ee681b9183bbdea99437fdcc" + "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "901ddd44e9ef7887ee681b9183bbdea99437fdcc" + "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "901ddd44e9ef7887ee681b9183bbdea99437fdcc" + "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" } } ], @@ -39,5 +47,103 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".pre-commit-config.yaml", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "docs/multiprocessing.rst", + "google/cloud/pubsub_v1/proto/pubsub.proto", + "google/pubsub/__init__.py", + "google/pubsub/py.typed", + "google/pubsub_v1/__init__.py", + "google/pubsub_v1/py.typed", + "google/pubsub_v1/services/__init__.py", + "google/pubsub_v1/services/publisher/__init__.py", + "google/pubsub_v1/services/publisher/async_client.py", + "google/pubsub_v1/services/publisher/client.py", + "google/pubsub_v1/services/publisher/pagers.py", + "google/pubsub_v1/services/publisher/transports/__init__.py", + "google/pubsub_v1/services/publisher/transports/base.py", + "google/pubsub_v1/services/publisher/transports/grpc.py", + "google/pubsub_v1/services/publisher/transports/grpc_asyncio.py", + "google/pubsub_v1/services/subscriber/__init__.py", + "google/pubsub_v1/services/subscriber/async_client.py", + "google/pubsub_v1/services/subscriber/client.py", + "google/pubsub_v1/services/subscriber/pagers.py", + "google/pubsub_v1/services/subscriber/transports/__init__.py", + "google/pubsub_v1/services/subscriber/transports/base.py", + "google/pubsub_v1/services/subscriber/transports/grpc.py", + "google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py", + "google/pubsub_v1/types/__init__.py", + "google/pubsub_v1/types/pubsub.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/noxfile.py", + "scripts/decrypt-secrets.sh", + "scripts/fixup_pubsub_v1_keywords.py", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/pubsub_v1/__init__.py", + "tests/unit/gapic/pubsub_v1/test_publisher.py", + "tests/unit/gapic/pubsub_v1/test_subscriber.py" ] } \ No newline at end of file From 20a55f329f817914771de5d2634f8ca2dff47789 Mon Sep 17 00:00:00 2001 From: HemangChothani <50404902+HemangChothani@users.noreply.github.com> Date: Mon, 21 Dec 2020 16:39:25 -0500 Subject: [PATCH 0610/1197] fix: pass anonymous credentials for emulator (#250) --- .../google/cloud/pubsub_v1/publisher/client.py | 2 ++ .../google/cloud/pubsub_v1/subscriber/client.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 6a9418e69560..81cfdea100d5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -26,6 +26,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers +from google.auth.credentials import AnonymousCredentials from google.oauth2 import service_account from google.cloud.pubsub_v1 import _gapic @@ -129,6 +130,7 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): kwargs["channel"] = grpc.insecure_channel( target=os.environ.get("PUBSUB_EMULATOR_HOST") ) + kwargs["credentials"] = AnonymousCredentials() # The GAPIC client has mTLS logic to determine the api endpoint and the # ssl credentials to use. Here we create a GAPIC client to help compute the diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 937be1552d50..a1f93e52ee25 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -20,6 +20,7 @@ import grpc from google.api_core import grpc_helpers +from google.auth.credentials import AnonymousCredentials from google.oauth2 import service_account from google.cloud.pubsub_v1 import _gapic @@ -80,6 +81,7 @@ def __init__(self, **kwargs): kwargs["channel"] = grpc.insecure_channel( target=os.environ.get("PUBSUB_EMULATOR_HOST") ) + kwargs["credentials"] = AnonymousCredentials() # The GAPIC client has mTLS logic to determine the api endpoint and the # ssl credentials to use. Here we create a GAPIC client to help compute the From faf4fe42fcf54959901e044c95be8c7c8c3d9248 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 21 Dec 2020 13:47:17 -0800 Subject: [PATCH 0611/1197] fix: flaky samples tests (#263) * chore: turn off type hints * fix: try get instead of delete --- packages/google-cloud-pubsub/samples/snippets/noxfile_config.py | 2 +- .../google-cloud-pubsub/samples/snippets/subscriber_test.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py b/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py index 32f8b4351c77..98c66af09ca2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py @@ -25,7 +25,7 @@ "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - "enforce_type_hints": True, + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 91a005ee24fd..b9f0f1784cd0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -145,7 +145,7 @@ def subscription_dlq(subscriber_client, topic, dead_letter_topic): ) try: - subscription = subscriber_client.delete_subscription( + subscription = subscriber_client.get_subscription( request={"subscription": subscription_path} ) except NotFound: From d1b3740d725d548dd8b44bdb222bf23b2f2111d4 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 23 Dec 2020 09:27:44 -0800 Subject: [PATCH 0612/1197] fix: remove grpc send/recieve limits (#259) --- .../google-cloud-pubsub/google/pubsub_v1/__init__.py | 4 ++-- .../services/publisher/transports/__init__.py | 1 - .../pubsub_v1/services/publisher/transports/grpc.py | 10 +++++++++- .../services/publisher/transports/grpc_asyncio.py | 8 ++++++++ .../services/subscriber/transports/__init__.py | 1 - .../pubsub_v1/services/subscriber/transports/grpc.py | 10 +++++++++- .../services/subscriber/transports/grpc_asyncio.py | 8 ++++++++ .../google/pubsub_v1/types/__init__.py | 1 - packages/google-cloud-pubsub/synth.metadata | 6 +++--- .../tests/unit/gapic/pubsub_v1/test_publisher.py | 8 ++++++++ .../tests/unit/gapic/pubsub_v1/test_subscriber.py | 8 ++++++++ 11 files changed, 55 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 7ecad97eae61..5e7a6cc72449 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -90,7 +90,6 @@ "ModifyPushConfigRequest", "PublishRequest", "PublishResponse", - "PublisherClient", "PubsubMessage", "PullRequest", "PullResponse", @@ -102,10 +101,11 @@ "Snapshot", "StreamingPullRequest", "StreamingPullResponse", + "SubscriberClient", "Subscription", "Topic", "UpdateSnapshotRequest", "UpdateSubscriptionRequest", "UpdateTopicRequest", - "SubscriberClient", + "PublisherClient", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py index bf7dc8f2b26f..9eec7fc5de2e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = PublisherGrpcTransport _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport - __all__ = ( "PublisherTransport", "PublisherGrpcTransport", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 15de8f87d1b6..3661714b2751 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -150,6 +150,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -168,6 +172,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -194,7 +202,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 31ad368f8a1e..c8945f8e3cb9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -195,6 +195,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -213,6 +217,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py index 08282e11d4a3..0246cfeca2e2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = SubscriberGrpcTransport _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport - __all__ = ( "SubscriberTransport", "SubscriberGrpcTransport", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 5b39bb1d6b85..7b86cb63188b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -152,6 +152,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -170,6 +174,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._stubs = {} # type: Dict[str, Callable] @@ -196,7 +204,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + address (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index f64f1a18e9c2..307391dc205a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -197,6 +197,10 @@ def __init__( ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) self._ssl_channel_credentials = ssl_credentials else: @@ -215,6 +219,10 @@ def __init__( ssl_credentials=ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) # Run the base constructor. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 915c3f2e22fb..3b21183de7cb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -61,7 +61,6 @@ SeekResponse, ) - __all__ = ( "MessageStoragePolicy", "Topic", diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 88336808d62c..31f1bfd4444e 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "f3f4e5add32128bf238b2fbab3bcf0f5b23269d3" + "sha": "258414727827e56d2678ada28b9bbd88b5c6aa6f" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "53eb2512a55caabcbad1898225080a2a3dfcb6aa", - "internalRef": "346818879" + "sha": "0509f4bc8c86495f50a92f536bddc625797eda63", + "internalRef": "347398860" } }, { diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index e955e5b82859..79fc0c60cb7d 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -2672,6 +2672,10 @@ def test_publisher_transport_channel_mtls_with_client_cert_source(transport_clas ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -2713,6 +2717,10 @@ def test_publisher_transport_channel_mtls_with_adc(transport_class): ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index f837df651833..184b54026e1e 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -3966,6 +3966,10 @@ def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_cla ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel assert transport._ssl_channel_credentials == mock_ssl_cred @@ -4007,6 +4011,10 @@ def test_subscriber_transport_channel_mtls_with_adc(transport_class): ), ssl_credentials=mock_ssl_cred, quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], ) assert transport.grpc_channel == mock_grpc_channel From 29c804c8dabb841a749db1834d79d861245d52b3 Mon Sep 17 00:00:00 2001 From: dpcollins-google <40498610+dpcollins-google@users.noreply.github.com> Date: Mon, 28 Dec 2020 12:03:18 -0500 Subject: [PATCH 0613/1197] fix: Modify synth.py to update grpc transport options (#266) * fix: Modify synth.py to update grpc transport options. * fix: tweak regex replace * chore: regen other docs * chore: make replace required * chore: remove unused import Co-authored-by: Bu Sun Kim --- packages/google-cloud-pubsub/.flake8 | 1 + .../.pre-commit-config.yaml | 2 +- packages/google-cloud-pubsub/CONTRIBUTING.rst | 11 +- .../google/cloud/pubsub_v1/proto/pubsub.proto | 33 +- .../google/cloud/pubsub_v1/proto/schema.proto | 289 ++ .../cloud/pubsub_v1/publisher/client.py | 47 +- .../cloud/pubsub_v1/subscriber/client.py | 54 +- .../google/pubsub/__init__.py | 32 + .../google/pubsub_v1/__init__.py | 28 + .../services/publisher/async_client.py | 8 +- .../pubsub_v1/services/publisher/client.py | 19 +- .../services/publisher/transports/grpc.py | 8 +- .../publisher/transports/grpc_asyncio.py | 8 +- .../services/schema_service/__init__.py | 24 + .../services/schema_service/async_client.py | 858 ++++++ .../services/schema_service/client.py | 1050 +++++++ .../services/schema_service/pagers.py | 148 + .../schema_service/transports/__init__.py | 35 + .../schema_service/transports/base.py | 227 ++ .../schema_service/transports/grpc.py | 480 +++ .../schema_service/transports/grpc_asyncio.py | 493 ++++ .../services/subscriber/async_client.py | 4 +- .../pubsub_v1/services/subscriber/client.py | 4 +- .../services/subscriber/transports/grpc.py | 6 +- .../subscriber/transports/grpc_asyncio.py | 6 +- .../google/pubsub_v1/types/__init__.py | 28 + .../google/pubsub_v1/types/pubsub.py | 34 + .../google/pubsub_v1/types/schema.py | 249 ++ .../scripts/fixup_pubsub_v1_keywords.py | 8 +- packages/google-cloud-pubsub/synth.metadata | 112 +- packages/google-cloud-pubsub/synth.py | 22 +- .../unit/gapic/pubsub_v1/test_publisher.py | 102 +- .../gapic/pubsub_v1/test_schema_service.py | 2587 +++++++++++++++++ .../unit/gapic/pubsub_v1/test_subscriber.py | 2 + .../publisher/test_publisher_client.py | 4 +- .../subscriber/test_subscriber_client.py | 4 +- 36 files changed, 6763 insertions(+), 264 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/schema.proto create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py create mode 100644 packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index ed9316381c9c..29227d4cf419 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index 6ad83346e261..a9024b15d725 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -2,7 +2,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.3.0 + rev: v3.4.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index db4cced2abdb..290271295ffb 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -202,25 +202,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-pubsub/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 909863eb98b4..9bc678e3ae7c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -24,6 +24,7 @@ import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; +import "google/pubsub/v1/schema.proto"; option cc_enable_arenas = true; option csharp_namespace = "Google.Cloud.PubSub.V1"; @@ -42,8 +43,8 @@ service Publisher { "https://www.googleapis.com/auth/cloud-platform," "https://www.googleapis.com/auth/pubsub"; - // Creates the given topic with the given name. See the [resource name rules]( - // https://cloud.google.com/pubsub/docs/admin#resource_names). + // Creates the given topic with the given name. See the [resource name rules] + // (https://cloud.google.com/pubsub/docs/admin#resource_names). rpc CreateTopic(Topic) returns (Topic) { option (google.api.http) = { put: "/v1/{name=projects/*/topics/*}" @@ -143,6 +144,21 @@ message MessageStoragePolicy { repeated string allowed_persistence_regions = 1; } +// Settings for validating messages published against a schema. +message SchemaSettings { + // Required. The name of the schema that messages published should be + // validated against. Format is `projects/{project}/schemas/{schema}`. The + // value of this field will be `_deleted-schema_` if the schema has been + // deleted. + string schema = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Schema" } + ]; + + // The encoding of messages validated against `schema`. + Encoding encoding = 2; +} + // A topic resource. message Topic { option (google.api.resource) = { @@ -173,6 +189,15 @@ message Topic { // // The expected format is `projects/*/locations/*/keyRings/*/cryptoKeys/*`. string kms_key_name = 5; + + // Settings for validating messages published against a schema. + // + // EXPERIMENTAL: Schema support is in development and may not work yet. + SchemaSettings schema_settings = 6; + + // Reserved for future use. This field is set only in responses from the + // server; it is ignored if it is set in any requests. + bool satisfies_pzs = 7; } // A message that is published by publishers and consumed by subscribers. The @@ -587,8 +612,8 @@ service Subscriber { } // Seeks an existing subscription to a point in time or to a given snapshot, - // whichever is provided in the request. Snapshots are used in [Seek]( - // https://cloud.google.com/pubsub/docs/replay-overview) operations, which + // whichever is provided in the request. Snapshots are used in [Seek] + // (https://cloud.google.com/pubsub/docs/replay-overview) operations, which // allow you to manage message acknowledgments in bulk. That is, you can set // the acknowledgment state of messages in an existing subscription to the // state captured by a snapshot. Note that both the subscription and the diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/schema.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/schema.proto new file mode 100644 index 000000000000..ae402ac4de2e --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/schema.proto @@ -0,0 +1,289 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.pubsub.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/empty.proto"; + +option cc_enable_arenas = true; +option csharp_namespace = "Google.Cloud.PubSub.V1"; +option go_package = "google.golang.org/genproto/googleapis/pubsub/v1;pubsub"; +option java_multiple_files = true; +option java_outer_classname = "SchemaProto"; +option java_package = "com.google.pubsub.v1"; +option php_namespace = "Google\\Cloud\\PubSub\\V1"; +option ruby_package = "Google::Cloud::PubSub::V1"; + +// Service for doing schema-related operations. +// +// EXPERIMENTAL: The Schema service is in development and may not work yet. + +service SchemaService { + option (google.api.default_host) = "pubsub.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/pubsub"; + + // Creates a schema. + rpc CreateSchema(CreateSchemaRequest) returns (Schema) { + option (google.api.http) = { + post: "/v1/{parent=projects/*}/schemas" + body: "schema" + }; + option (google.api.method_signature) = "parent,schema,schema_id"; + } + + // Gets a schema. + rpc GetSchema(GetSchemaRequest) returns (Schema) { + option (google.api.http) = { + get: "/v1/{name=projects/*/schemas/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists schemas in a project. + rpc ListSchemas(ListSchemasRequest) returns (ListSchemasResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*}/schemas" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a schema. + rpc DeleteSchema(DeleteSchemaRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/schemas/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Validates a schema. + rpc ValidateSchema(ValidateSchemaRequest) returns (ValidateSchemaResponse) { + option (google.api.http) = { + post: "/v1/{parent=projects/*}/schemas:validate" + body: "*" + }; + option (google.api.method_signature) = "parent,schema"; + } + + // Validates a message against a schema. + rpc ValidateMessage(ValidateMessageRequest) + returns (ValidateMessageResponse) { + option (google.api.http) = { + post: "/v1/{parent=projects/*}/schemas:validateMessage" + body: "*" + }; + } +} + +// A schema resource. +message Schema { + option (google.api.resource) = { + type: "pubsub.googleapis.com/Schema" + pattern: "projects/{project}/schemas/{schema}" + }; + + // Possible schema definition types. + enum Type { + // Default value. This value is unused. + TYPE_UNSPECIFIED = 0; + + // A Protocol Buffer schema definition. + PROTOCOL_BUFFER = 1; + + // An Avro schema definition. + AVRO = 2; + } + + // Required. Name of the schema. + // Format is `projects/{project}/schemas/{schema}`. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // The type of the schema definition. + Type type = 2; + + // The definition of the schema. This should contain a string representing + // the full definition of the schema that is a valid schema definition of + // the type specified in `type`. + string definition = 3; +} + +// Request for the CreateSchema method. +message CreateSchemaRequest { + // Required. The name of the project in which to create the schema. + // Format is `projects/{project-id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "pubsub.googleapis.com/Schema" + } + ]; + + // Required. The schema object to create. + // + // This schema's `name` parameter is ignored. The schema object returned + // by CreateSchema will have a `name` made using the given `parent` and + // `schema_id`. + Schema schema = 2 [(google.api.field_behavior) = REQUIRED]; + + // The ID to use for the schema, which will become the final component of + // the schema's resource name. + // + // See https://cloud.google.com/pubsub/docs/admin#resource_names for resource + // name constraints. + string schema_id = 3; +} + +// View of Schema object fields to be returned by GetSchema and ListSchemas. +enum SchemaView { + // The default / unset value. + // The API will default to the BASIC view. + SCHEMA_VIEW_UNSPECIFIED = 0; + + // Include the name and type of the schema, but not the definition. + BASIC = 1; + + // Include all Schema object fields. + FULL = 2; +} + +// Request for the GetSchema method. +message GetSchemaRequest { + // Required. The name of the schema to get. + // Format is `projects/{project}/schemas/{schema}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Schema" } + ]; + + // The set of fields to return in the response. If not set, returns a Schema + // with `name` and `type`, but not `definition`. Set to `FULL` to retrieve all + // fields. + SchemaView view = 2; +} + +// Request for the `ListSchemas` method. +message ListSchemasRequest { + // Required. The name of the project in which to list schemas. + // Format is `projects/{project-id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // The set of Schema fields to return in the response. If not set, returns + // Schemas with `name` and `type`, but not `definition`. Set to `FULL` to + // retrieve all fields. + SchemaView view = 2; + + // Maximum number of schemas to return. + int32 page_size = 3; + + // The value returned by the last `ListSchemasResponse`; indicates that + // this is a continuation of a prior `ListSchemas` call, and that the + // system should return the next page of data. + string page_token = 4; +} + +// Response for the `ListSchemas` method. +message ListSchemasResponse { + // The resulting schemas. + repeated Schema schemas = 1; + + // If not empty, indicates that there may be more schemas that match the + // request; this value should be passed in a new `ListSchemasRequest`. + string next_page_token = 2; +} + +// Request for the `DeleteSchema` method. +message DeleteSchemaRequest { + // Required. Name of the schema to delete. + // Format is `projects/{project}/schemas/{schema}`. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Schema" } + ]; +} + +// Request for the `ValidateSchema` method. +message ValidateSchemaRequest { + // Required. The name of the project in which to validate schemas. + // Format is `projects/{project-id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // Required. The schema object to validate. + Schema schema = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// Response for the `ValidateSchema` method. +message ValidateSchemaResponse {} + +// Request for the `ValidateMessage` method. +message ValidateMessageRequest { + // Required. The name of the project in which to validate schemas. + // Format is `projects/{project-id}`. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + oneof schema_spec { + // Name of the schema against which to validate. + // + // Format is `projects/{project}/schemas/{schema}`. + string name = 2 [ + (google.api.resource_reference) = { type: "pubsub.googleapis.com/Schema" } + ]; + + // Ad-hoc schema against which to validate + Schema schema = 3; + } + + // Message to validate against the provided `schema_spec`. + bytes message = 4; + + // The encoding expected for messages + Encoding encoding = 5; +} + +// Response for the `ValidateMessage` method. +message ValidateMessageResponse {} + +// Possible encoding types for messages. +enum Encoding { + // Unspecified + ENCODING_UNSPECIFIED = 0; + + // JSON encoding + JSON = 1; + + // Binary encoding, as defined by the schema type. For some schema types, + // binary encoding may not be available. + BINARY = 2; +} diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 81cfdea100d5..9ad4a9474c44 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -21,11 +21,9 @@ import threading import time -import grpc import six from google.api_core import gapic_v1 -from google.api_core import grpc_helpers from google.auth.credentials import AnonymousCredentials from google.oauth2 import service_account @@ -39,9 +37,6 @@ from google.cloud.pubsub_v1.publisher.flow_controller import FlowController from google.pubsub_v1 import types as gapic_types from google.pubsub_v1.services.publisher import client as publisher_client -from google.pubsub_v1.services.publisher.transports import ( - grpc as publisher_grpc_transport, -) __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version @@ -127,46 +122,11 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): # If so, create a grpc insecure channel with the emulator host # as the target. if os.environ.get("PUBSUB_EMULATOR_HOST"): - kwargs["channel"] = grpc.insecure_channel( - target=os.environ.get("PUBSUB_EMULATOR_HOST") - ) + kwargs["client_options"] = { + "api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST") + } kwargs["credentials"] = AnonymousCredentials() - # The GAPIC client has mTLS logic to determine the api endpoint and the - # ssl credentials to use. Here we create a GAPIC client to help compute the - # api endpoint and ssl credentials. The api endpoint will be used to set - # `self._target`, and ssl credentials will be passed to - # `grpc_helpers.create_channel` to establish a mTLS channel (if ssl - # credentials is not None). - client_options = kwargs.get("client_options", None) - credentials = kwargs.get("credentials", None) - client_for_mtls_info = publisher_client.PublisherClient( - credentials=credentials, client_options=client_options - ) - - self._target = client_for_mtls_info._transport._host - - # Use a custom channel. - # We need this in order to set appropriate default message size and - # keepalive options. - if "transport" not in kwargs: - channel = kwargs.pop("channel", None) - if channel is None: - channel = grpc_helpers.create_channel( - credentials=kwargs.pop("credentials", None), - target=self.target, - ssl_credentials=client_for_mtls_info._transport._ssl_channel_credentials, - scopes=publisher_client.PublisherClient._DEFAULT_SCOPES, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - # cannot pass both 'channel' and 'credentials' - kwargs.pop("credentials", None) - transport = publisher_grpc_transport.PublisherGrpcTransport(channel=channel) - kwargs["transport"] = transport - # For a transient failure, retry publishing the message infinitely. self.publisher_options = types.PublisherOptions(*publisher_options) self._enable_message_ordering = self.publisher_options[0] @@ -174,6 +134,7 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): # Add the metrics headers, and instantiate the underlying GAPIC # client. self.api = publisher_client.PublisherClient(**kwargs) + self._target = self.api._transport._host self._batch_class = thread.Batch self.batch_settings = types.BatchSettings(*batch_settings) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index a1f93e52ee25..f306d2d99954 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -17,9 +17,6 @@ import os import pkg_resources -import grpc - -from google.api_core import grpc_helpers from google.auth.credentials import AnonymousCredentials from google.oauth2 import service_account @@ -28,9 +25,6 @@ from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager from google.pubsub_v1.services.subscriber import client as subscriber_client -from google.pubsub_v1.services.subscriber.transports import ( - grpc as subscriber_grpc_transport, -) __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version @@ -78,52 +72,14 @@ def __init__(self, **kwargs): # If so, create a grpc insecure channel with the emulator host # as the target. if os.environ.get("PUBSUB_EMULATOR_HOST"): - kwargs["channel"] = grpc.insecure_channel( - target=os.environ.get("PUBSUB_EMULATOR_HOST") - ) + kwargs["client_options"] = { + "api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST") + } kwargs["credentials"] = AnonymousCredentials() - # The GAPIC client has mTLS logic to determine the api endpoint and the - # ssl credentials to use. Here we create a GAPIC client to help compute the - # api endpoint and ssl credentials. The api endpoint will be used to set - # `self._target`, and ssl credentials will be passed to - # `grpc_helpers.create_channel` to establish a mTLS channel (if ssl - # credentials is not None). - client_options = kwargs.get("client_options", None) - credentials = kwargs.get("credentials", None) - client_for_mtls_info = subscriber_client.SubscriberClient( - credentials=credentials, client_options=client_options - ) - - self._target = client_for_mtls_info._transport._host - - # Use a custom channel. - # We need this in order to set appropriate default message size and - # keepalive options. - if "transport" not in kwargs: - channel = kwargs.pop("channel", None) - if channel is None: - channel = grpc_helpers.create_channel( - credentials=kwargs.pop("credentials", None), - target=self.target, - ssl_credentials=client_for_mtls_info._transport._ssl_channel_credentials, - scopes=subscriber_client.SubscriberClient._DEFAULT_SCOPES, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - "grpc.keepalive_time_ms": 30000, - }.items(), - ) - # cannot pass both 'channel' and 'credentials' - kwargs.pop("credentials", None) - transport = subscriber_grpc_transport.SubscriberGrpcTransport( - channel=channel - ) - kwargs["transport"] = transport - - # Add the metrics headers, and instantiate the underlying GAPIC - # client. + # Instantiate the underlying GAPIC client. self._api = subscriber_client.SubscriberClient(**kwargs) + self._target = self._api._transport._host @classmethod def from_service_account_file(cls, filename, **kwargs): diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index f441543ccd16..b146fadef214 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -17,6 +17,10 @@ from google.pubsub_v1.services.publisher.async_client import PublisherAsyncClient from google.pubsub_v1.services.publisher.client import PublisherClient +from google.pubsub_v1.services.schema_service.async_client import ( + SchemaServiceAsyncClient, +) +from google.pubsub_v1.services.schema_service.client import SchemaServiceClient from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient from google.pubsub_v1.services.subscriber.client import SubscriberClient from google.pubsub_v1.types.pubsub import AcknowledgeRequest @@ -52,6 +56,7 @@ from google.pubsub_v1.types.pubsub import PushConfig from google.pubsub_v1.types.pubsub import ReceivedMessage from google.pubsub_v1.types.pubsub import RetryPolicy +from google.pubsub_v1.types.pubsub import SchemaSettings from google.pubsub_v1.types.pubsub import SeekRequest from google.pubsub_v1.types.pubsub import SeekResponse from google.pubsub_v1.types.pubsub import Snapshot @@ -62,20 +67,38 @@ from google.pubsub_v1.types.pubsub import UpdateSnapshotRequest from google.pubsub_v1.types.pubsub import UpdateSubscriptionRequest from google.pubsub_v1.types.pubsub import UpdateTopicRequest +from google.pubsub_v1.types.schema import CreateSchemaRequest +from google.pubsub_v1.types.schema import DeleteSchemaRequest +from google.pubsub_v1.types.schema import Encoding +from google.pubsub_v1.types.schema import GetSchemaRequest +from google.pubsub_v1.types.schema import ListSchemasRequest +from google.pubsub_v1.types.schema import ListSchemasResponse +from google.pubsub_v1.types.schema import Schema +from google.pubsub_v1.types.schema import SchemaView +from google.pubsub_v1.types.schema import ValidateMessageRequest +from google.pubsub_v1.types.schema import ValidateMessageResponse +from google.pubsub_v1.types.schema import ValidateSchemaRequest +from google.pubsub_v1.types.schema import ValidateSchemaResponse __all__ = ( "AcknowledgeRequest", + "CreateSchemaRequest", "CreateSnapshotRequest", "DeadLetterPolicy", + "DeleteSchemaRequest", "DeleteSnapshotRequest", "DeleteSubscriptionRequest", "DeleteTopicRequest", "DetachSubscriptionRequest", "DetachSubscriptionResponse", + "Encoding", "ExpirationPolicy", + "GetSchemaRequest", "GetSnapshotRequest", "GetSubscriptionRequest", "GetTopicRequest", + "ListSchemasRequest", + "ListSchemasResponse", "ListSnapshotsRequest", "ListSnapshotsResponse", "ListSubscriptionsRequest", @@ -99,6 +122,11 @@ "PushConfig", "ReceivedMessage", "RetryPolicy", + "Schema", + "SchemaServiceAsyncClient", + "SchemaServiceClient", + "SchemaSettings", + "SchemaView", "SeekRequest", "SeekResponse", "Snapshot", @@ -111,4 +139,8 @@ "UpdateSnapshotRequest", "UpdateSubscriptionRequest", "UpdateTopicRequest", + "ValidateMessageRequest", + "ValidateMessageResponse", + "ValidateSchemaRequest", + "ValidateSchemaResponse", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 5e7a6cc72449..d5a1de488ff7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -16,6 +16,7 @@ # from .services.publisher import PublisherClient +from .services.schema_service import SchemaServiceClient from .services.subscriber import SubscriberClient from .types.pubsub import AcknowledgeRequest from .types.pubsub import CreateSnapshotRequest @@ -50,6 +51,7 @@ from .types.pubsub import PushConfig from .types.pubsub import ReceivedMessage from .types.pubsub import RetryPolicy +from .types.pubsub import SchemaSettings from .types.pubsub import SeekRequest from .types.pubsub import SeekResponse from .types.pubsub import Snapshot @@ -60,21 +62,39 @@ from .types.pubsub import UpdateSnapshotRequest from .types.pubsub import UpdateSubscriptionRequest from .types.pubsub import UpdateTopicRequest +from .types.schema import CreateSchemaRequest +from .types.schema import DeleteSchemaRequest +from .types.schema import Encoding +from .types.schema import GetSchemaRequest +from .types.schema import ListSchemasRequest +from .types.schema import ListSchemasResponse +from .types.schema import Schema +from .types.schema import SchemaView +from .types.schema import ValidateMessageRequest +from .types.schema import ValidateMessageResponse +from .types.schema import ValidateSchemaRequest +from .types.schema import ValidateSchemaResponse __all__ = ( "AcknowledgeRequest", + "CreateSchemaRequest", "CreateSnapshotRequest", "DeadLetterPolicy", + "DeleteSchemaRequest", "DeleteSnapshotRequest", "DeleteSubscriptionRequest", "DeleteTopicRequest", "DetachSubscriptionRequest", "DetachSubscriptionResponse", + "Encoding", "ExpirationPolicy", + "GetSchemaRequest", "GetSnapshotRequest", "GetSubscriptionRequest", "GetTopicRequest", + "ListSchemasRequest", + "ListSchemasResponse", "ListSnapshotsRequest", "ListSnapshotsResponse", "ListSubscriptionsRequest", @@ -96,6 +116,10 @@ "PushConfig", "ReceivedMessage", "RetryPolicy", + "Schema", + "SchemaServiceClient", + "SchemaSettings", + "SchemaView", "SeekRequest", "SeekResponse", "Snapshot", @@ -107,5 +131,9 @@ "UpdateSnapshotRequest", "UpdateSubscriptionRequest", "UpdateTopicRequest", + "ValidateMessageRequest", + "ValidateMessageResponse", + "ValidateSchemaRequest", + "ValidateSchemaResponse", "PublisherClient", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index fdf3aeb37a3a..810bf532e7bf 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -48,6 +48,8 @@ class PublisherAsyncClient: DEFAULT_ENDPOINT = PublisherClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = PublisherClient.DEFAULT_MTLS_ENDPOINT + schema_path = staticmethod(PublisherClient.schema_path) + parse_schema_path = staticmethod(PublisherClient.parse_schema_path) subscription_path = staticmethod(PublisherClient.subscription_path) parse_subscription_path = staticmethod(PublisherClient.parse_subscription_path) topic_path = staticmethod(PublisherClient.topic_path) @@ -149,9 +151,9 @@ async def create_topic( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: - r"""Creates the given topic with the given name. See the `resource - name - rules `__. + r"""Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). Args: request (:class:`~.pubsub.Topic`): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 188b3dccb772..d2c8f853f312 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -154,6 +154,19 @@ def transport(self) -> PublisherTransport: """ return self._transport + @staticmethod + def schema_path(project: str, schema: str,) -> str: + """Return a fully-qualified schema string.""" + return "projects/{project}/schemas/{schema}".format( + project=project, schema=schema, + ) + + @staticmethod + def parse_schema_path(path: str) -> Dict[str, str]: + """Parse a schema path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def subscription_path(project: str, subscription: str,) -> str: """Return a fully-qualified subscription string.""" @@ -366,9 +379,9 @@ def create_topic( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: - r"""Creates the given topic with the given name. See the `resource - name - rules `__. + r"""Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). Args: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 3661714b2751..d1212ecea03e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -153,6 +153,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) self._ssl_channel_credentials = ssl_credentials @@ -175,6 +176,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) @@ -245,9 +247,9 @@ def grpc_channel(self) -> grpc.Channel: def create_topic(self) -> Callable[[pubsub.Topic], pubsub.Topic]: r"""Return a callable for the create topic method over gRPC. - Creates the given topic with the given name. See the `resource - name - rules `__. + Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). Returns: Callable[[~.Topic], diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index c8945f8e3cb9..2b15178eff37 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -198,6 +198,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) self._ssl_channel_credentials = ssl_credentials @@ -220,6 +221,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) @@ -249,9 +251,9 @@ def grpc_channel(self) -> aio.Channel: def create_topic(self) -> Callable[[pubsub.Topic], Awaitable[pubsub.Topic]]: r"""Return a callable for the create topic method over gRPC. - Creates the given topic with the given name. See the `resource - name - rules `__. + Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). Returns: Callable[[~.Topic], diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py new file mode 100644 index 000000000000..d93cff56ff55 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import SchemaServiceClient +from .async_client import SchemaServiceAsyncClient + +__all__ = ( + "SchemaServiceClient", + "SchemaServiceAsyncClient", +) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py new file mode 100644 index 000000000000..4a6d1db93f31 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -0,0 +1,858 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.pubsub_v1.services.schema_service import pagers +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + +from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .client import SchemaServiceClient + + +class SchemaServiceAsyncClient: + """ Service for doing schema-related operations. + EXPERIMENTAL: The Schema service is in development and may not + work yet. + """ + + _client: SchemaServiceClient + + DEFAULT_ENDPOINT = SchemaServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + + schema_path = staticmethod(SchemaServiceClient.schema_path) + parse_schema_path = staticmethod(SchemaServiceClient.parse_schema_path) + + common_billing_account_path = staticmethod( + SchemaServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + SchemaServiceClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(SchemaServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + SchemaServiceClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + SchemaServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + SchemaServiceClient.parse_common_organization_path + ) + + common_project_path = staticmethod(SchemaServiceClient.common_project_path) + parse_common_project_path = staticmethod( + SchemaServiceClient.parse_common_project_path + ) + + common_location_path = staticmethod(SchemaServiceClient.common_location_path) + parse_common_location_path = staticmethod( + SchemaServiceClient.parse_common_location_path + ) + + from_service_account_file = SchemaServiceClient.from_service_account_file + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SchemaServiceTransport: + """Return the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial( + type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, SchemaServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the schema service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SchemaServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = SchemaServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + async def create_schema( + self, + request: gp_schema.CreateSchemaRequest = None, + *, + parent: str = None, + schema: gp_schema.Schema = None, + schema_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.Schema: + r"""Creates a schema. + + Args: + request (:class:`~.gp_schema.CreateSchemaRequest`): + The request object. Request for the CreateSchema method. + parent (:class:`str`): + Required. The name of the project in which to create the + schema. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`~.gp_schema.Schema`): + Required. The schema object to create. + + This schema's ``name`` parameter is ignored. The schema + object returned by CreateSchema will have a ``name`` + made using the given ``parent`` and ``schema_id``. + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (:class:`str`): + The ID to use for the schema, which will become the + final component of the schema's resource name. + + See + https://cloud.google.com/pubsub/docs/admin#resource_names + for resource name constraints. + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gp_schema.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema, schema_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gp_schema.CreateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_schema( + self, + request: schema.GetSchemaRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Gets a schema. + + Args: + request (:class:`~.schema.GetSchemaRequest`): + The request object. Request for the GetSchema method. + name (:class:`str`): + Required. The name of the schema to get. Format is + ``projects/{project}/schemas/{schema}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema.GetSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_schemas( + self, + request: schema.ListSchemasRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemasAsyncPager: + r"""Lists schemas in a project. + + Args: + request (:class:`~.schema.ListSchemasRequest`): + The request object. Request for the `ListSchemas` + method. + parent (:class:`str`): + Required. The name of the project in which to list + schemas. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSchemasAsyncPager: + Response for the ``ListSchemas`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema.ListSchemasRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_schemas, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSchemasAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_schema( + self, + request: schema.DeleteSchemaRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a schema. + + Args: + request (:class:`~.schema.DeleteSchemaRequest`): + The request object. Request for the `DeleteSchema` + method. + name (:class:`str`): + Required. Name of the schema to delete. Format is + ``projects/{project}/schemas/{schema}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema.DeleteSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def validate_schema( + self, + request: gp_schema.ValidateSchemaRequest = None, + *, + parent: str = None, + schema: gp_schema.Schema = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.ValidateSchemaResponse: + r"""Validates a schema. + + Args: + request (:class:`~.gp_schema.ValidateSchemaRequest`): + The request object. Request for the `ValidateSchema` + method. + parent (:class:`str`): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`~.gp_schema.Schema`): + Required. The schema object to + validate. + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gp_schema.ValidateSchemaResponse: + Response for the ``ValidateSchema`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gp_schema.ValidateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.validate_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def validate_message( + self, + request: schema.ValidateMessageRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.ValidateMessageResponse: + r"""Validates a message against a schema. + + Args: + request (:class:`~.schema.ValidateMessageRequest`): + The request object. Request for the `ValidateMessage` + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.ValidateMessageResponse: + Response for the ``ValidateMessage`` method. + """ + # Create or coerce a protobuf request object. + + request = schema.ValidateMessageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.validate_message, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("SchemaServiceAsyncClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py new file mode 100644 index 000000000000..0468ff993c90 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -0,0 +1,1050 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.pubsub_v1.services.schema_service import pagers +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + +from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SchemaServiceGrpcTransport +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport + + +class SchemaServiceClientMeta(type): + """Metaclass for the SchemaService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] + _transport_registry["grpc"] = SchemaServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[SchemaServiceTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SchemaServiceClient(metaclass=SchemaServiceClientMeta): + """ Service for doing schema-related operations. + EXPERIMENTAL: The Schema service is in development and may not + work yet. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SchemaServiceTransport: + """Return the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def schema_path(project: str, schema: str,) -> str: + """Return a fully-qualified schema string.""" + return "projects/{project}/schemas/{schema}".format( + project=project, schema=schema, + ) + + @staticmethod + def parse_schema_path(path: str) -> Dict[str, str]: + """Parse a schema path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, SchemaServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the schema service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SchemaServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SchemaServiceTransport): + # transport is a SchemaServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def create_schema( + self, + request: gp_schema.CreateSchemaRequest = None, + *, + parent: str = None, + schema: gp_schema.Schema = None, + schema_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.Schema: + r"""Creates a schema. + + Args: + request (:class:`~.gp_schema.CreateSchemaRequest`): + The request object. Request for the CreateSchema method. + parent (:class:`str`): + Required. The name of the project in which to create the + schema. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`~.gp_schema.Schema`): + Required. The schema object to create. + + This schema's ``name`` parameter is ignored. The schema + object returned by CreateSchema will have a ``name`` + made using the given ``parent`` and ``schema_id``. + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (:class:`str`): + The ID to use for the schema, which will become the + final component of the schema's resource name. + + See + https://cloud.google.com/pubsub/docs/admin#resource_names + for resource name constraints. + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gp_schema.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema, schema_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gp_schema.CreateSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gp_schema.CreateSchemaRequest): + request = gp_schema.CreateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_schema( + self, + request: schema.GetSchemaRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Gets a schema. + + Args: + request (:class:`~.schema.GetSchemaRequest`): + The request object. Request for the GetSchema method. + name (:class:`str`): + Required. The name of the schema to get. Format is + ``projects/{project}/schemas/{schema}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema.GetSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.GetSchemaRequest): + request = schema.GetSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_schemas( + self, + request: schema.ListSchemasRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemasPager: + r"""Lists schemas in a project. + + Args: + request (:class:`~.schema.ListSchemasRequest`): + The request object. Request for the `ListSchemas` + method. + parent (:class:`str`): + Required. The name of the project in which to list + schemas. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListSchemasPager: + Response for the ``ListSchemas`` method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema.ListSchemasRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.ListSchemasRequest): + request = schema.ListSchemasRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_schemas] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSchemasPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_schema( + self, + request: schema.DeleteSchemaRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a schema. + + Args: + request (:class:`~.schema.DeleteSchemaRequest`): + The request object. Request for the `DeleteSchema` + method. + name (:class:`str`): + Required. Name of the schema to delete. Format is + ``projects/{project}/schemas/{schema}``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema.DeleteSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.DeleteSchemaRequest): + request = schema.DeleteSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def validate_schema( + self, + request: gp_schema.ValidateSchemaRequest = None, + *, + parent: str = None, + schema: gp_schema.Schema = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.ValidateSchemaResponse: + r"""Validates a schema. + + Args: + request (:class:`~.gp_schema.ValidateSchemaRequest`): + The request object. Request for the `ValidateSchema` + method. + parent (:class:`str`): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`~.gp_schema.Schema`): + Required. The schema object to + validate. + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gp_schema.ValidateSchemaResponse: + Response for the ``ValidateSchema`` method. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gp_schema.ValidateSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gp_schema.ValidateSchemaRequest): + request = gp_schema.ValidateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def validate_message( + self, + request: schema.ValidateMessageRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.ValidateMessageResponse: + r"""Validates a message against a schema. + + Args: + request (:class:`~.schema.ValidateMessageRequest`): + The request object. Request for the `ValidateMessage` + method. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.ValidateMessageResponse: + Response for the ``ValidateMessage`` method. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a schema.ValidateMessageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.ValidateMessageRequest): + request = schema.ValidateMessageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate_message] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: iam_policy.SetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Args: + request (:class:`~.iam_policy.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy.GetIamPolicyRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy.Policy: + r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Args: + request (:class:`~.iam_policy.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy.TestIamPermissionsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Args: + request (:class:`~.iam_policy.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ("SchemaServiceClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py new file mode 100644 index 000000000000..e4da22697cc6 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.pubsub_v1.types import schema + + +class ListSchemasPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`~.schema.ListSchemasResponse` object, and + provides an ``__iter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`~.schema.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., schema.ListSchemasResponse], + request: schema.ListSchemasRequest, + response: schema.ListSchemasResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.schema.ListSchemasRequest`): + The initial request object. + response (:class:`~.schema.ListSchemasResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema.ListSchemasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[schema.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[schema.Schema]: + for page in self.pages: + yield from page.schemas + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchemasAsyncPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`~.schema.ListSchemasResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`~.schema.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[schema.ListSchemasResponse]], + request: schema.ListSchemasRequest, + response: schema.ListSchemasResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.schema.ListSchemasRequest`): + The initial request object. + response (:class:`~.schema.ListSchemasResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema.ListSchemasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[schema.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[schema.Schema]: + async def async_generator(): + async for page in self.pages: + for response in page.schemas: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py new file mode 100644 index 000000000000..015410d3d0cc --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py @@ -0,0 +1,35 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import SchemaServiceTransport +from .grpc import SchemaServiceGrpcTransport +from .grpc_asyncio import SchemaServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] +_transport_registry["grpc"] = SchemaServiceGrpcTransport +_transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport + +__all__ = ( + "SchemaServiceTransport", + "SchemaServiceGrpcTransport", + "SchemaServiceGrpcAsyncIOTransport", +) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py new file mode 100644 index 000000000000..01cf04c9e3cf --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -0,0 +1,227 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class SchemaServiceTransport(abc.ABC): + """Abstract transport class for SchemaService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_schema: gapic_v1.method.wrap_method( + self.create_schema, default_timeout=None, client_info=client_info, + ), + self.get_schema: gapic_v1.method.wrap_method( + self.get_schema, default_timeout=None, client_info=client_info, + ), + self.list_schemas: gapic_v1.method.wrap_method( + self.list_schemas, default_timeout=None, client_info=client_info, + ), + self.delete_schema: gapic_v1.method.wrap_method( + self.delete_schema, default_timeout=None, client_info=client_info, + ), + self.validate_schema: gapic_v1.method.wrap_method( + self.validate_schema, default_timeout=None, client_info=client_info, + ), + self.validate_message: gapic_v1.method.wrap_method( + self.validate_message, default_timeout=None, client_info=client_info, + ), + } + + @property + def create_schema( + self, + ) -> typing.Callable[ + [gp_schema.CreateSchemaRequest], + typing.Union[gp_schema.Schema, typing.Awaitable[gp_schema.Schema]], + ]: + raise NotImplementedError() + + @property + def get_schema( + self, + ) -> typing.Callable[ + [schema.GetSchemaRequest], + typing.Union[schema.Schema, typing.Awaitable[schema.Schema]], + ]: + raise NotImplementedError() + + @property + def list_schemas( + self, + ) -> typing.Callable[ + [schema.ListSchemasRequest], + typing.Union[ + schema.ListSchemasResponse, typing.Awaitable[schema.ListSchemasResponse] + ], + ]: + raise NotImplementedError() + + @property + def delete_schema( + self, + ) -> typing.Callable[ + [schema.DeleteSchemaRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def validate_schema( + self, + ) -> typing.Callable[ + [gp_schema.ValidateSchemaRequest], + typing.Union[ + gp_schema.ValidateSchemaResponse, + typing.Awaitable[gp_schema.ValidateSchemaResponse], + ], + ]: + raise NotImplementedError() + + @property + def validate_message( + self, + ) -> typing.Callable[ + [schema.ValidateMessageRequest], + typing.Union[ + schema.ValidateMessageResponse, + typing.Awaitable[schema.ValidateMessageResponse], + ], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.SetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> typing.Callable[ + [iam_policy.GetIamPolicyRequest], + typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> typing.Callable[ + [iam_policy.TestIamPermissionsRequest], + typing.Union[ + iam_policy.TestIamPermissionsResponse, + typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + +__all__ = ("SchemaServiceTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py new file mode 100644 index 000000000000..02f91d3587ef --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -0,0 +1,480 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + +from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO + + +class SchemaServiceGrpcTransport(SchemaServiceTransport): + """gRPC backend transport for SchemaService. + + Service for doing schema-related operations. + EXPERIMENTAL: The Schema service is in development and may not + work yet. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_schema( + self, + ) -> Callable[[gp_schema.CreateSchemaRequest], gp_schema.Schema]: + r"""Return a callable for the create schema method over gRPC. + + Creates a schema. + + Returns: + Callable[[~.CreateSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schema" not in self._stubs: + self._stubs["create_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/CreateSchema", + request_serializer=gp_schema.CreateSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs["create_schema"] + + @property + def get_schema(self) -> Callable[[schema.GetSchemaRequest], schema.Schema]: + r"""Return a callable for the get schema method over gRPC. + + Gets a schema. + + Returns: + Callable[[~.GetSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schema" not in self._stubs: + self._stubs["get_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/GetSchema", + request_serializer=schema.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["get_schema"] + + @property + def list_schemas( + self, + ) -> Callable[[schema.ListSchemasRequest], schema.ListSchemasResponse]: + r"""Return a callable for the list schemas method over gRPC. + + Lists schemas in a project. + + Returns: + Callable[[~.ListSchemasRequest], + ~.ListSchemasResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schemas" not in self._stubs: + self._stubs["list_schemas"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ListSchemas", + request_serializer=schema.ListSchemasRequest.serialize, + response_deserializer=schema.ListSchemasResponse.deserialize, + ) + return self._stubs["list_schemas"] + + @property + def delete_schema(self) -> Callable[[schema.DeleteSchemaRequest], empty.Empty]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a schema. + + Returns: + Callable[[~.DeleteSchemaRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema" not in self._stubs: + self._stubs["delete_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/DeleteSchema", + request_serializer=schema.DeleteSchemaRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_schema"] + + @property + def validate_schema( + self, + ) -> Callable[[gp_schema.ValidateSchemaRequest], gp_schema.ValidateSchemaResponse]: + r"""Return a callable for the validate schema method over gRPC. + + Validates a schema. + + Returns: + Callable[[~.ValidateSchemaRequest], + ~.ValidateSchemaResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_schema" not in self._stubs: + self._stubs["validate_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ValidateSchema", + request_serializer=gp_schema.ValidateSchemaRequest.serialize, + response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, + ) + return self._stubs["validate_schema"] + + @property + def validate_message( + self, + ) -> Callable[[schema.ValidateMessageRequest], schema.ValidateMessageResponse]: + r"""Return a callable for the validate message method over gRPC. + + Validates a message against a schema. + + Returns: + Callable[[~.ValidateMessageRequest], + ~.ValidateMessageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_message" not in self._stubs: + self._stubs["validate_message"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ValidateMessage", + request_serializer=schema.ValidateMessageRequest.serialize, + response_deserializer=schema.ValidateMessageResponse.deserialize, + ) + return self._stubs["validate_message"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SchemaServiceGrpcTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..3455c207bdb8 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -0,0 +1,493 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + +from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import SchemaServiceGrpcTransport + + +class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): + """gRPC AsyncIO backend transport for SchemaService. + + Service for doing schema-related operations. + EXPERIMENTAL: The Schema service is in development and may not + work yet. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._ssl_channel_credentials = ssl_channel_credentials + + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_schema( + self, + ) -> Callable[[gp_schema.CreateSchemaRequest], Awaitable[gp_schema.Schema]]: + r"""Return a callable for the create schema method over gRPC. + + Creates a schema. + + Returns: + Callable[[~.CreateSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_schema" not in self._stubs: + self._stubs["create_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/CreateSchema", + request_serializer=gp_schema.CreateSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs["create_schema"] + + @property + def get_schema( + self, + ) -> Callable[[schema.GetSchemaRequest], Awaitable[schema.Schema]]: + r"""Return a callable for the get schema method over gRPC. + + Gets a schema. + + Returns: + Callable[[~.GetSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_schema" not in self._stubs: + self._stubs["get_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/GetSchema", + request_serializer=schema.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["get_schema"] + + @property + def list_schemas( + self, + ) -> Callable[[schema.ListSchemasRequest], Awaitable[schema.ListSchemasResponse]]: + r"""Return a callable for the list schemas method over gRPC. + + Lists schemas in a project. + + Returns: + Callable[[~.ListSchemasRequest], + Awaitable[~.ListSchemasResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schemas" not in self._stubs: + self._stubs["list_schemas"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ListSchemas", + request_serializer=schema.ListSchemasRequest.serialize, + response_deserializer=schema.ListSchemasResponse.deserialize, + ) + return self._stubs["list_schemas"] + + @property + def delete_schema( + self, + ) -> Callable[[schema.DeleteSchemaRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a schema. + + Returns: + Callable[[~.DeleteSchemaRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema" not in self._stubs: + self._stubs["delete_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/DeleteSchema", + request_serializer=schema.DeleteSchemaRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_schema"] + + @property + def validate_schema( + self, + ) -> Callable[ + [gp_schema.ValidateSchemaRequest], Awaitable[gp_schema.ValidateSchemaResponse] + ]: + r"""Return a callable for the validate schema method over gRPC. + + Validates a schema. + + Returns: + Callable[[~.ValidateSchemaRequest], + Awaitable[~.ValidateSchemaResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_schema" not in self._stubs: + self._stubs["validate_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ValidateSchema", + request_serializer=gp_schema.ValidateSchemaRequest.serialize, + response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, + ) + return self._stubs["validate_schema"] + + @property + def validate_message( + self, + ) -> Callable[ + [schema.ValidateMessageRequest], Awaitable[schema.ValidateMessageResponse] + ]: + r"""Return a callable for the validate message method over gRPC. + + Validates a message against a schema. + + Returns: + Callable[[~.ValidateMessageRequest], + Awaitable[~.ValidateMessageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "validate_message" not in self._stubs: + self._stubs["validate_message"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ValidateMessage", + request_serializer=schema.ValidateMessageRequest.serialize, + response_deserializer=schema.ValidateMessageResponse.deserialize, + ) + return self._stubs["validate_message"] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy.TestIamPermissionsRequest], + Awaitable[iam_policy.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + +__all__ = ("SchemaServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 61d79ce8b4d0..76d5204be277 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1547,8 +1547,8 @@ async def seek( ) -> pubsub.SeekResponse: r"""Seeks an existing subscription to a point in time or to a given snapshot, whichever is provided in the request. Snapshots are - used in - `Seek `__ + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 60d44b5792a9..0e87d6a2d79c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -1686,8 +1686,8 @@ def seek( ) -> pubsub.SeekResponse: r"""Seeks an existing subscription to a point in time or to a given snapshot, whichever is provided in the request. Snapshots are - used in - `Seek `__ + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 7b86cb63188b..1be01d024437 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -155,6 +155,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) self._ssl_channel_credentials = ssl_credentials @@ -177,6 +178,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) @@ -720,8 +722,8 @@ def seek(self) -> Callable[[pubsub.SeekRequest], pubsub.SeekResponse]: Seeks an existing subscription to a point in time or to a given snapshot, whichever is provided in the request. Snapshots are - used in - `Seek `__ + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 307391dc205a..fa89c11bce2c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -200,6 +200,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) self._ssl_channel_credentials = ssl_credentials @@ -222,6 +223,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) @@ -736,8 +738,8 @@ def seek(self) -> Callable[[pubsub.SeekRequest], Awaitable[pubsub.SeekResponse]] Seeks an existing subscription to a point in time or to a given snapshot, whichever is provided in the request. Snapshots are - used in - `Seek `__ + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 3b21183de7cb..afff7e6df285 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -15,8 +15,23 @@ # limitations under the License. # +from .schema import ( + Schema, + CreateSchemaRequest, + GetSchemaRequest, + ListSchemasRequest, + ListSchemasResponse, + DeleteSchemaRequest, + ValidateSchemaRequest, + ValidateSchemaResponse, + ValidateMessageRequest, + ValidateMessageResponse, + SchemaView, + Encoding, +) from .pubsub import ( MessageStoragePolicy, + SchemaSettings, Topic, PubsubMessage, GetTopicRequest, @@ -62,7 +77,20 @@ ) __all__ = ( + "Schema", + "CreateSchemaRequest", + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "DeleteSchemaRequest", + "ValidateSchemaRequest", + "ValidateSchemaResponse", + "ValidateMessageRequest", + "ValidateMessageResponse", + "SchemaView", + "Encoding", "MessageStoragePolicy", + "SchemaSettings", "Topic", "PubsubMessage", "GetTopicRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index c34cf422d046..39563bb58dc7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -21,12 +21,14 @@ from google.protobuf import duration_pb2 as duration # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.pubsub_v1.types import schema as gp_schema __protobuf__ = proto.module( package="google.pubsub.v1", manifest={ "MessageStoragePolicy", + "SchemaSettings", "Topic", "PubsubMessage", "GetTopicRequest", @@ -92,6 +94,25 @@ class MessageStoragePolicy(proto.Message): allowed_persistence_regions = proto.RepeatedField(proto.STRING, number=1) +class SchemaSettings(proto.Message): + r"""Settings for validating messages published against a schema. + + Attributes: + schema (str): + Required. The name of the schema that messages published + should be validated against. Format is + ``projects/{project}/schemas/{schema}``. The value of this + field will be ``_deleted-schema_`` if the schema has been + deleted. + encoding (~.gp_schema.Encoding): + The encoding of messages validated against ``schema``. + """ + + schema = proto.Field(proto.STRING, number=1) + + encoding = proto.Field(proto.ENUM, number=2, enum=gp_schema.Encoding,) + + class Topic(proto.Message): r"""A topic resource. @@ -119,6 +140,15 @@ class Topic(proto.Message): The expected format is ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + schema_settings (~.pubsub.SchemaSettings): + Settings for validating messages published + against a schema. + EXPERIMENTAL: Schema support is in development + and may not work yet. + satisfies_pzs (bool): + Reserved for future use. This field is set + only in responses from the server; it is ignored + if it is set in any requests. """ name = proto.Field(proto.STRING, number=1) @@ -131,6 +161,10 @@ class Topic(proto.Message): kms_key_name = proto.Field(proto.STRING, number=5) + schema_settings = proto.Field(proto.MESSAGE, number=6, message="SchemaSettings",) + + satisfies_pzs = proto.Field(proto.BOOL, number=7) + class PubsubMessage(proto.Message): r"""A message that is published by publishers and consumed by diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py new file mode 100644 index 000000000000..2efa667c15ff --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -0,0 +1,249 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.pubsub.v1", + manifest={ + "SchemaView", + "Encoding", + "Schema", + "CreateSchemaRequest", + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "DeleteSchemaRequest", + "ValidateSchemaRequest", + "ValidateSchemaResponse", + "ValidateMessageRequest", + "ValidateMessageResponse", + }, +) + + +class SchemaView(proto.Enum): + r"""View of Schema object fields to be returned by GetSchema and + ListSchemas. + """ + SCHEMA_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + +class Encoding(proto.Enum): + r"""Possible encoding types for messages.""" + ENCODING_UNSPECIFIED = 0 + JSON = 1 + BINARY = 2 + + +class Schema(proto.Message): + r"""A schema resource. + + Attributes: + name (str): + Required. Name of the schema. Format is + ``projects/{project}/schemas/{schema}``. + type_ (~.gp_schema.Schema.Type): + The type of the schema definition. + definition (str): + The definition of the schema. This should contain a string + representing the full definition of the schema that is a + valid schema definition of the type specified in ``type``. + """ + + class Type(proto.Enum): + r"""Possible schema definition types.""" + TYPE_UNSPECIFIED = 0 + PROTOCOL_BUFFER = 1 + AVRO = 2 + + name = proto.Field(proto.STRING, number=1) + + type_ = proto.Field(proto.ENUM, number=2, enum=Type,) + + definition = proto.Field(proto.STRING, number=3) + + +class CreateSchemaRequest(proto.Message): + r"""Request for the CreateSchema method. + + Attributes: + parent (str): + Required. The name of the project in which to create the + schema. Format is ``projects/{project-id}``. + schema (~.gp_schema.Schema): + Required. The schema object to create. + + This schema's ``name`` parameter is ignored. The schema + object returned by CreateSchema will have a ``name`` made + using the given ``parent`` and ``schema_id``. + schema_id (str): + The ID to use for the schema, which will become the final + component of the schema's resource name. + + See + https://cloud.google.com/pubsub/docs/admin#resource_names + for resource name constraints. + """ + + parent = proto.Field(proto.STRING, number=1) + + schema = proto.Field(proto.MESSAGE, number=2, message="Schema",) + + schema_id = proto.Field(proto.STRING, number=3) + + +class GetSchemaRequest(proto.Message): + r"""Request for the GetSchema method. + + Attributes: + name (str): + Required. The name of the schema to get. Format is + ``projects/{project}/schemas/{schema}``. + view (~.gp_schema.SchemaView): + The set of fields to return in the response. If not set, + returns a Schema with ``name`` and ``type``, but not + ``definition``. Set to ``FULL`` to retrieve all fields. + """ + + name = proto.Field(proto.STRING, number=1) + + view = proto.Field(proto.ENUM, number=2, enum="SchemaView",) + + +class ListSchemasRequest(proto.Message): + r"""Request for the ``ListSchemas`` method. + + Attributes: + parent (str): + Required. The name of the project in which to list schemas. + Format is ``projects/{project-id}``. + view (~.gp_schema.SchemaView): + The set of Schema fields to return in the response. If not + set, returns Schemas with ``name`` and ``type``, but not + ``definition``. Set to ``FULL`` to retrieve all fields. + page_size (int): + Maximum number of schemas to return. + page_token (str): + The value returned by the last ``ListSchemasResponse``; + indicates that this is a continuation of a prior + ``ListSchemas`` call, and that the system should return the + next page of data. + """ + + parent = proto.Field(proto.STRING, number=1) + + view = proto.Field(proto.ENUM, number=2, enum="SchemaView",) + + page_size = proto.Field(proto.INT32, number=3) + + page_token = proto.Field(proto.STRING, number=4) + + +class ListSchemasResponse(proto.Message): + r"""Response for the ``ListSchemas`` method. + + Attributes: + schemas (Sequence[~.gp_schema.Schema]): + The resulting schemas. + next_page_token (str): + If not empty, indicates that there may be more schemas that + match the request; this value should be passed in a new + ``ListSchemasRequest``. + """ + + @property + def raw_page(self): + return self + + schemas = proto.RepeatedField(proto.MESSAGE, number=1, message="Schema",) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteSchemaRequest(proto.Message): + r"""Request for the ``DeleteSchema`` method. + + Attributes: + name (str): + Required. Name of the schema to delete. Format is + ``projects/{project}/schemas/{schema}``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ValidateSchemaRequest(proto.Message): + r"""Request for the ``ValidateSchema`` method. + + Attributes: + parent (str): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + schema (~.gp_schema.Schema): + Required. The schema object to validate. + """ + + parent = proto.Field(proto.STRING, number=1) + + schema = proto.Field(proto.MESSAGE, number=2, message="Schema",) + + +class ValidateSchemaResponse(proto.Message): + r"""Response for the ``ValidateSchema`` method.""" + + +class ValidateMessageRequest(proto.Message): + r"""Request for the ``ValidateMessage`` method. + + Attributes: + parent (str): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + name (str): + Name of the schema against which to validate. + + Format is ``projects/{project}/schemas/{schema}``. + schema (~.gp_schema.Schema): + Ad-hoc schema against which to validate + message (bytes): + Message to validate against the provided ``schema_spec``. + encoding (~.gp_schema.Encoding): + The encoding expected for messages + """ + + parent = proto.Field(proto.STRING, number=1) + + name = proto.Field(proto.STRING, number=2, oneof="schema_spec") + + schema = proto.Field( + proto.MESSAGE, number=3, oneof="schema_spec", message="Schema", + ) + + message = proto.Field(proto.BYTES, number=4) + + encoding = proto.Field(proto.ENUM, number=5, enum="Encoding",) + + +class ValidateMessageResponse(proto.Message): + r"""Response for the ``ValidateMessage`` method.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index c00db1ebc77f..071e3665febc 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -42,16 +42,20 @@ class pubsubCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'acknowledge': ('subscription', 'ack_ids', ), + 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', ), + 'delete_schema': ('name', ), 'delete_snapshot': ('snapshot', ), 'delete_subscription': ('subscription', ), 'delete_topic': ('topic', ), 'detach_subscription': ('subscription', ), + 'get_schema': ('name', 'view', ), 'get_snapshot': ('snapshot', ), 'get_subscription': ('subscription', ), 'get_topic': ('topic', ), + 'list_schemas': ('parent', 'view', 'page_size', 'page_token', ), 'list_snapshots': ('project', 'page_size', 'page_token', ), 'list_subscriptions': ('project', 'page_size', 'page_token', ), 'list_topics': ('project', 'page_size', 'page_token', ), @@ -66,6 +70,8 @@ class pubsubCallTransformer(cst.CSTTransformer): 'update_snapshot': ('snapshot', 'update_mask', ), 'update_subscription': ('subscription', 'update_mask', ), 'update_topic': ('topic', 'update_mask', ), + 'validate_message': ('parent', 'name', 'schema', 'message', 'encoding', ), + 'validate_schema': ('parent', 'schema', ), 'get_iam_policy': ('resource', 'options', ), 'set_iam_policy': ('resource', 'policy', ), diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 31f1bfd4444e..4718c28f5dcc 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -3,37 +3,37 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "258414727827e56d2678ada28b9bbd88b5c6aa6f" + "remote": "git@github.com:googleapis/python-pubsub", + "sha": "aa45340d999f845c67396e8740e96f8a8caafd16" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0509f4bc8c86495f50a92f536bddc625797eda63", - "internalRef": "347398860" + "sha": "57fc4a8a94a5bd015a83fb0f0a1707f62254b2cd", + "internalRef": "348813319" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" + "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" + "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "18c5dbdb4ac8cf75d4d8174e7b4558f48e76f8a1" + "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" } } ], @@ -47,103 +47,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/multiprocessing.rst", - "google/cloud/pubsub_v1/proto/pubsub.proto", - "google/pubsub/__init__.py", - "google/pubsub/py.typed", - "google/pubsub_v1/__init__.py", - "google/pubsub_v1/py.typed", - "google/pubsub_v1/services/__init__.py", - "google/pubsub_v1/services/publisher/__init__.py", - "google/pubsub_v1/services/publisher/async_client.py", - "google/pubsub_v1/services/publisher/client.py", - "google/pubsub_v1/services/publisher/pagers.py", - "google/pubsub_v1/services/publisher/transports/__init__.py", - "google/pubsub_v1/services/publisher/transports/base.py", - "google/pubsub_v1/services/publisher/transports/grpc.py", - "google/pubsub_v1/services/publisher/transports/grpc_asyncio.py", - "google/pubsub_v1/services/subscriber/__init__.py", - "google/pubsub_v1/services/subscriber/async_client.py", - "google/pubsub_v1/services/subscriber/client.py", - "google/pubsub_v1/services/subscriber/pagers.py", - "google/pubsub_v1/services/subscriber/transports/__init__.py", - "google/pubsub_v1/services/subscriber/transports/base.py", - "google/pubsub_v1/services/subscriber/transports/grpc.py", - "google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py", - "google/pubsub_v1/types/__init__.py", - "google/pubsub_v1/types/pubsub.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/fixup_pubsub_v1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/pubsub_v1/__init__.py", - "tests/unit/gapic/pubsub_v1/test_publisher.py", - "tests/unit/gapic/pubsub_v1/test_subscriber.py" ] } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index fe1b0838ea29..fb42355e073e 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -14,6 +14,7 @@ """This script is used to synthesize generated parts of this library.""" +import re import textwrap import synthtool as s @@ -63,6 +64,21 @@ \g<0>""", ) +# Modify GRPC options in transports. +count = s.replace( + ["google/pubsub_v1/services/*/transports/grpc*", "tests/unit/gapic/pubsub_v1/*"], + "options=\[.*?\]", + """options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ]""", + flags=re.MULTILINE | re.DOTALL, +) + +if count < 18: + raise Exception("Expected replacements for gRPC channel options not made.") + # Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream # results. s.replace( @@ -83,11 +99,7 @@ # Docstrings of *_iam_policy() methods are formatted poorly and must be fixed # in order to avoid docstring format warnings in docs. -s.replace( - "google/pubsub_v1/services/*er/client.py", - r"(\s+)Args:", - "\n\g<1>Args:" -) +s.replace("google/pubsub_v1/services/*er/client.py", r"(\s+)Args:", "\n\g<1>Args:") s.replace( "google/pubsub_v1/services/*er/client.py", r"(\s+)\*\*JSON Example\*\*\s+::", diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 79fc0c60cb7d..d1c34d474ba7 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -43,6 +43,7 @@ from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.services.publisher import transports from google.pubsub_v1.types import pubsub +from google.pubsub_v1.types import schema def client_cert_source_callback(): @@ -440,7 +441,7 @@ def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): with mock.patch.object(type(client.transport.create_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic( - name="name_value", kms_key_name="kms_key_name_value", + name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, ) response = client.create_topic(request) @@ -459,6 +460,8 @@ def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + def test_create_topic_from_dict(): test_create_topic(request_type=dict) @@ -480,7 +483,11 @@ async def test_create_topic_async( with mock.patch.object(type(client.transport.create_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + ) ) response = await client.create_topic(request) @@ -498,6 +505,8 @@ async def test_create_topic_async( assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + @pytest.mark.asyncio async def test_create_topic_async_from_dict(): @@ -631,7 +640,7 @@ def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRe with mock.patch.object(type(client.transport.update_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic( - name="name_value", kms_key_name="kms_key_name_value", + name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, ) response = client.update_topic(request) @@ -650,6 +659,8 @@ def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRe assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + def test_update_topic_from_dict(): test_update_topic(request_type=dict) @@ -671,7 +682,11 @@ async def test_update_topic_async( with mock.patch.object(type(client.transport.update_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + ) ) response = await client.update_topic(request) @@ -689,6 +704,8 @@ async def test_update_topic_async( assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + @pytest.mark.asyncio async def test_update_topic_async_from_dict(): @@ -958,7 +975,7 @@ def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest) with mock.patch.object(type(client.transport.get_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic( - name="name_value", kms_key_name="kms_key_name_value", + name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, ) response = client.get_topic(request) @@ -977,6 +994,8 @@ def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest) assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + def test_get_topic_from_dict(): test_get_topic(request_type=dict) @@ -998,7 +1017,11 @@ async def test_get_topic_async( with mock.patch.object(type(client.transport.get_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Topic(name="name_value", kms_key_name="kms_key_name_value",) + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + ) ) response = await client.get_topic(request) @@ -1016,6 +1039,8 @@ async def test_get_topic_async( assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + @pytest.mark.asyncio async def test_get_topic_async_from_dict(): @@ -2675,6 +2700,7 @@ def test_publisher_transport_channel_mtls_with_client_cert_source(transport_clas options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) assert transport.grpc_channel == mock_grpc_channel @@ -2720,14 +2746,38 @@ def test_publisher_transport_channel_mtls_with_adc(transport_class): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) assert transport.grpc_channel == mock_grpc_channel -def test_subscription_path(): +def test_schema_path(): project = "squid" - subscription = "clam" + schema = "clam" + + expected = "projects/{project}/schemas/{schema}".format( + project=project, schema=schema, + ) + actual = PublisherClient.schema_path(project, schema) + assert expected == actual + + +def test_parse_schema_path(): + expected = { + "project": "whelk", + "schema": "octopus", + } + path = PublisherClient.schema_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_schema_path(path) + assert expected == actual + + +def test_subscription_path(): + project = "oyster" + subscription = "nudibranch" expected = "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, @@ -2738,8 +2788,8 @@ def test_subscription_path(): def test_parse_subscription_path(): expected = { - "project": "whelk", - "subscription": "octopus", + "project": "cuttlefish", + "subscription": "mussel", } path = PublisherClient.subscription_path(**expected) @@ -2749,8 +2799,8 @@ def test_parse_subscription_path(): def test_topic_path(): - project = "oyster" - topic = "nudibranch" + project = "winkle" + topic = "nautilus" expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic,) actual = PublisherClient.topic_path(project, topic) @@ -2759,8 +2809,8 @@ def test_topic_path(): def test_parse_topic_path(): expected = { - "project": "cuttlefish", - "topic": "mussel", + "project": "scallop", + "topic": "abalone", } path = PublisherClient.topic_path(**expected) @@ -2770,7 +2820,7 @@ def test_parse_topic_path(): def test_common_billing_account_path(): - billing_account = "winkle" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -2781,7 +2831,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "nautilus", + "billing_account": "clam", } path = PublisherClient.common_billing_account_path(**expected) @@ -2791,7 +2841,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "scallop" + folder = "whelk" expected = "folders/{folder}".format(folder=folder,) actual = PublisherClient.common_folder_path(folder) @@ -2800,7 +2850,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "abalone", + "folder": "octopus", } path = PublisherClient.common_folder_path(**expected) @@ -2810,7 +2860,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "squid" + organization = "oyster" expected = "organizations/{organization}".format(organization=organization,) actual = PublisherClient.common_organization_path(organization) @@ -2819,7 +2869,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "clam", + "organization": "nudibranch", } path = PublisherClient.common_organization_path(**expected) @@ -2829,7 +2879,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "whelk" + project = "cuttlefish" expected = "projects/{project}".format(project=project,) actual = PublisherClient.common_project_path(project) @@ -2838,7 +2888,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "octopus", + "project": "mussel", } path = PublisherClient.common_project_path(**expected) @@ -2848,8 +2898,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "oyster" - location = "nudibranch" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -2860,8 +2910,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "cuttlefish", - "location": "mussel", + "project": "scallop", + "location": "abalone", } path = PublisherClient.common_location_path(**expected) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py new file mode 100644 index 000000000000..79cc71324b02 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -0,0 +1,2587 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore +from google.iam.v1 import options_pb2 as options # type: ignore +from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.oauth2 import service_account +from google.pubsub_v1.services.schema_service import SchemaServiceAsyncClient +from google.pubsub_v1.services.schema_service import SchemaServiceClient +from google.pubsub_v1.services.schema_service import pagers +from google.pubsub_v1.services.schema_service import transports +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SchemaServiceClient._get_default_mtls_endpoint(None) is None + assert ( + SchemaServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + SchemaServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient] +) +def test_schema_service_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + + assert client.transport._host == "pubsub.googleapis.com:443" + + +def test_schema_service_client_get_transport_class(): + transport = SchemaServiceClient.get_transport_class() + assert transport == transports.SchemaServiceGrpcTransport + + transport = SchemaServiceClient.get_transport_class("grpc") + assert transport == transports.SchemaServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +def test_schema_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SchemaServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SchemaServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "true"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "false"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_schema_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_schema_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_schema_service_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_schema_service_client_client_options_from_dict(): + with mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = SchemaServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_create_schema( + transport: str = "grpc", request_type=gp_schema.CreateSchemaRequest +): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + ) + + response = client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == gp_schema.CreateSchemaRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, gp_schema.Schema) + + assert response.name == "name_value" + + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + + assert response.definition == "definition_value" + + +def test_create_schema_from_dict(): + test_create_schema(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_schema_async( + transport: str = "grpc_asyncio", request_type=gp_schema.CreateSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + ) + ) + + response = await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == gp_schema.CreateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + + assert response.name == "name_value" + + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + + assert response.definition == "definition_value" + + +@pytest.mark.asyncio +async def test_create_schema_async_from_dict(): + await test_create_schema_async(request_type=dict) + + +def test_create_schema_field_headers(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CreateSchemaRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = gp_schema.Schema() + + client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_schema_field_headers_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CreateSchemaRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + + await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_schema_flattened(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_schema( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].schema == gp_schema.Schema(name="name_value") + + assert args[0].schema_id == "schema_id_value" + + +def test_create_schema_flattened_error(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schema( + gp_schema.CreateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_schema_flattened_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_schema( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].schema == gp_schema.Schema(name="name_value") + + assert args[0].schema_id == "schema_id_value" + + +@pytest.mark.asyncio +async def test_create_schema_flattened_error_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_schema( + gp_schema.CreateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + + +def test_get_schema(transport: str = "grpc", request_type=schema.GetSchemaRequest): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + ) + + response = client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.GetSchemaRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, schema.Schema) + + assert response.name == "name_value" + + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + + assert response.definition == "definition_value" + + +def test_get_schema_from_dict(): + test_get_schema(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_schema_async( + transport: str = "grpc_asyncio", request_type=schema.GetSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + ) + ) + + response = await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.GetSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + + assert response.name == "name_value" + + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + + assert response.definition == "definition_value" + + +@pytest.mark.asyncio +async def test_get_schema_async_from_dict(): + await test_get_schema_async(request_type=dict) + + +def test_get_schema_field_headers(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.GetSchemaRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = schema.Schema() + + client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_schema_field_headers_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.GetSchemaRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + + await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_schema_flattened(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_schema(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_schema_flattened_error(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schema( + schema.GetSchemaRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_schema_flattened_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_schema(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_schema_flattened_error_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_schema( + schema.GetSchemaRequest(), name="name_value", + ) + + +def test_list_schemas(transport: str = "grpc", request_type=schema.ListSchemasRequest): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.ListSchemasRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, pagers.ListSchemasPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_schemas_from_dict(): + test_list_schemas(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_schemas_async( + transport: str = "grpc_asyncio", request_type=schema.ListSchemasRequest +): + client = SchemaServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemasResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.ListSchemasRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_schemas_async_from_dict(): + await test_list_schemas_async(request_type=dict) + + +def test_list_schemas_field_headers(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemasRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = schema.ListSchemasResponse() + + client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_schemas_field_headers_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemasRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemasResponse() + ) + + await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_schemas_flattened(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemasResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_schemas(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_schemas_flattened_error(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schemas( + schema.ListSchemasRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_schemas_flattened_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemasResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemasResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_schemas(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_schemas_flattened_error_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_schemas( + schema.ListSchemasRequest(), parent="parent_value", + ) + + +def test_list_schemas_pager(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[schema.Schema(), schema.Schema(), schema.Schema(),], + next_page_token="abc", + ), + schema.ListSchemasResponse(schemas=[], next_page_token="def",), + schema.ListSchemasResponse( + schemas=[schema.Schema(),], next_page_token="ghi", + ), + schema.ListSchemasResponse(schemas=[schema.Schema(), schema.Schema(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_schemas(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + +def test_list_schemas_pages(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[schema.Schema(), schema.Schema(), schema.Schema(),], + next_page_token="abc", + ), + schema.ListSchemasResponse(schemas=[], next_page_token="def",), + schema.ListSchemasResponse( + schemas=[schema.Schema(),], next_page_token="ghi", + ), + schema.ListSchemasResponse(schemas=[schema.Schema(), schema.Schema(),],), + RuntimeError, + ) + pages = list(client.list_schemas(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_schemas_async_pager(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[schema.Schema(), schema.Schema(), schema.Schema(),], + next_page_token="abc", + ), + schema.ListSchemasResponse(schemas=[], next_page_token="def",), + schema.ListSchemasResponse( + schemas=[schema.Schema(),], next_page_token="ghi", + ), + schema.ListSchemasResponse(schemas=[schema.Schema(), schema.Schema(),],), + RuntimeError, + ) + async_pager = await client.list_schemas(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, schema.Schema) for i in responses) + + +@pytest.mark.asyncio +async def test_list_schemas_async_pages(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[schema.Schema(), schema.Schema(), schema.Schema(),], + next_page_token="abc", + ), + schema.ListSchemasResponse(schemas=[], next_page_token="def",), + schema.ListSchemasResponse( + schemas=[schema.Schema(),], next_page_token="ghi", + ), + schema.ListSchemasResponse(schemas=[schema.Schema(), schema.Schema(),],), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_schemas(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_delete_schema( + transport: str = "grpc", request_type=schema.DeleteSchemaRequest +): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.DeleteSchemaRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_schema_from_dict(): + test_delete_schema(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_schema_async( + transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.DeleteSchemaRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_schema_async_from_dict(): + await test_delete_schema_async(request_type=dict) + + +def test_delete_schema_field_headers(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = None + + client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_schema_field_headers_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_schema_flattened(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_schema(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_schema_flattened_error(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema( + schema.DeleteSchemaRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_schema_flattened_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_schema(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_schema_flattened_error_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_schema( + schema.DeleteSchemaRequest(), name="name_value", + ) + + +def test_validate_schema( + transport: str = "grpc", request_type=gp_schema.ValidateSchemaRequest +): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.ValidateSchemaResponse() + + response = client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == gp_schema.ValidateSchemaRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, gp_schema.ValidateSchemaResponse) + + +def test_validate_schema_from_dict(): + test_validate_schema(request_type=dict) + + +@pytest.mark.asyncio +async def test_validate_schema_async( + transport: str = "grpc_asyncio", request_type=gp_schema.ValidateSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.ValidateSchemaResponse() + ) + + response = await client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == gp_schema.ValidateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.ValidateSchemaResponse) + + +@pytest.mark.asyncio +async def test_validate_schema_async_from_dict(): + await test_validate_schema_async(request_type=dict) + + +def test_validate_schema_field_headers(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.ValidateSchemaRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + call.return_value = gp_schema.ValidateSchemaResponse() + + client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_validate_schema_field_headers_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.ValidateSchemaRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.ValidateSchemaResponse() + ) + + await client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_validate_schema_flattened(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.ValidateSchemaResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.validate_schema( + parent="parent_value", schema=gp_schema.Schema(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].schema == gp_schema.Schema(name="name_value") + + +def test_validate_schema_flattened_error(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.validate_schema( + gp_schema.ValidateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_validate_schema_flattened_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.ValidateSchemaResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.ValidateSchemaResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.validate_schema( + parent="parent_value", schema=gp_schema.Schema(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].schema == gp_schema.Schema(name="name_value") + + +@pytest.mark.asyncio +async def test_validate_schema_flattened_error_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.validate_schema( + gp_schema.ValidateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +def test_validate_message( + transport: str = "grpc", request_type=schema.ValidateMessageRequest +): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ValidateMessageResponse() + + response = client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.ValidateMessageRequest() + + # Establish that the response is the type that we expect. + + assert isinstance(response, schema.ValidateMessageResponse) + + +def test_validate_message_from_dict(): + test_validate_message(request_type=dict) + + +@pytest.mark.asyncio +async def test_validate_message_async( + transport: str = "grpc_asyncio", request_type=schema.ValidateMessageRequest +): + client = SchemaServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ValidateMessageResponse() + ) + + response = await client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.ValidateMessageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.ValidateMessageResponse) + + +@pytest.mark.asyncio +async def test_validate_message_async_from_dict(): + await test_validate_message_async(request_type=dict) + + +def test_validate_message_field_headers(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ValidateMessageRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + call.return_value = schema.ValidateMessageResponse() + + client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_validate_message_field_headers_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ValidateMessageRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ValidateMessageResponse() + ) + + await client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = SchemaServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SchemaServiceGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client.transport, transports.SchemaServiceGrpcTransport,) + + +def test_schema_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.SchemaServiceTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_schema_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SchemaServiceTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_schema", + "get_schema", + "list_schemas", + "delete_schema", + "validate_schema", + "validate_message", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_schema_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_schema_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport() + adc.assert_called_once() + + +def test_schema_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + SchemaServiceClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +def test_schema_service_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.SchemaServiceGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_schema_service_host_no_port(): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com" + ), + ) + assert client.transport._host == "pubsub.googleapis.com:443" + + +def test_schema_service_host_with_port(): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="pubsub.googleapis.com:8000" + ), + ) + assert client.transport._host == "pubsub.googleapis.com:8000" + + +def test_schema_service_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_schema_service_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_schema_path(): + project = "squid" + schema = "clam" + + expected = "projects/{project}/schemas/{schema}".format( + project=project, schema=schema, + ) + actual = SchemaServiceClient.schema_path(project, schema) + assert expected == actual + + +def test_parse_schema_path(): + expected = { + "project": "whelk", + "schema": "octopus", + } + path = SchemaServiceClient.schema_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_schema_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "oyster" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = SchemaServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = SchemaServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + + expected = "folders/{folder}".format(folder=folder,) + actual = SchemaServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = SchemaServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + + expected = "organizations/{organization}".format(organization=organization,) + actual = SchemaServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = SchemaServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + + expected = "projects/{project}".format(project=project,) + actual = SchemaServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = SchemaServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = SchemaServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = SchemaServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.SchemaServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.SchemaServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = SchemaServiceClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_set_iam_policy(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_set_iam_policy_field_headers(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_set_iam_policy_from_dict(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 184b54026e1e..8dec1c27d15b 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -3969,6 +3969,7 @@ def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_cla options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) assert transport.grpc_channel == mock_grpc_channel @@ -4014,6 +4015,7 @@ def test_subscriber_transport_channel_mtls_with_adc(transport_class): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), ], ) assert transport.grpc_channel == mock_grpc_channel diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 0f661c2fa653..5a95a23e24d4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -130,7 +130,7 @@ def init(self, *args, **kwargs): def test_init_emulator(monkeypatch): - monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar/") + monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar:123") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. client = publisher.Client() @@ -140,7 +140,7 @@ def test_init_emulator(monkeypatch): # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. channel = client.api._transport.publish._channel - assert channel.target().decode("utf8") == "/foo/bar/" + assert channel.target().decode("utf8") == "/foo/bar:123" def test_message_ordering_enabled(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index d5628927609e..3262600e7b4f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -91,7 +91,7 @@ def init(self, *args, **kwargs): def test_init_emulator(monkeypatch): - monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/baz/bacon/") + monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/baz/bacon:123") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. client = subscriber.Client() @@ -101,7 +101,7 @@ def test_init_emulator(monkeypatch): # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. channel = client.api._transport.pull._channel - assert channel.target().decode("utf8") == "/baz/bacon/" + assert channel.target().decode("utf8") == "/baz/bacon:123" def test_class_method_factory(): From e2b95936103fee92bce7fe112438ca75ded3f585 Mon Sep 17 00:00:00 2001 From: dpcollins-google <40498610+dpcollins-google@users.noreply.github.com> Date: Wed, 30 Dec 2020 15:12:52 -0500 Subject: [PATCH 0614/1197] fix: Don't open the google.cloud package by adding pubsub.py (#269) Instead, create a subpackage, google.cloud.pubsub and add an __init__.py file to it. --- .../google/cloud/{pubsub.py => pubsub/__init__.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename packages/google-cloud-pubsub/google/cloud/{pubsub.py => pubsub/__init__.py} (100%) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub.py b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py similarity index 100% rename from packages/google-cloud-pubsub/google/cloud/pubsub.py rename to packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py From a7baf9745398921e5f1fda0c5fc538ca58326306 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 28 Jan 2021 01:20:38 +0100 Subject: [PATCH 0615/1197] fix: client version missing from the user agent header (#275) * fix: missing client version in user agent header * Blacken --- .../services/publisher/async_client.py | 4 ++- .../pubsub_v1/services/publisher/client.py | 4 ++- .../services/publisher/transports/base.py | 4 ++- .../services/schema_service/async_client.py | 4 ++- .../services/schema_service/client.py | 4 ++- .../schema_service/transports/base.py | 4 ++- .../services/subscriber/async_client.py | 4 ++- .../pubsub_v1/services/subscriber/client.py | 4 ++- .../services/subscriber/transports/base.py | 4 ++- packages/google-cloud-pubsub/synth.metadata | 14 ++++----- packages/google-cloud-pubsub/synth.py | 17 +++++++++++ .../publisher/test_publisher_client.py | 29 ++++++++++++++++--- .../subscriber/test_subscriber_client.py | 23 ++++++++++++++- 13 files changed, 98 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 810bf532e7bf..0597b6e880d8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1138,7 +1138,9 @@ async def test_iam_permissions( try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + client_library_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index d2c8f853f312..bc18586bb18a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -1315,7 +1315,9 @@ def test_iam_permissions( try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + client_library_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index fe84ac415e37..a8d07de3ffc4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -33,7 +33,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + client_library_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 4a6d1db93f31..c333e76c29c7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -849,7 +849,9 @@ async def test_iam_permissions( try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + client_library_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 0468ff993c90..90fbce9f4a06 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1041,7 +1041,9 @@ def test_iam_permissions( try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + client_library_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 01cf04c9e3cf..bb7528cb4abd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -34,7 +34,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + client_library_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 76d5204be277..bd0191193f1f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1874,7 +1874,9 @@ async def test_iam_permissions( try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + client_library_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 0e87d6a2d79c..b80af2d81132 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -2014,7 +2014,9 @@ def test_iam_permissions( try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + client_library_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 8442fc0feb72..8bd0d5a19b90 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -33,7 +33,9 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution("google-pubsub",).version, + client_library_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 4718c28f5dcc..d4b5ca201d78 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -3,37 +3,37 @@ { "git": { "name": ".", - "remote": "git@github.com:googleapis/python-pubsub", - "sha": "aa45340d999f845c67396e8740e96f8a8caafd16" + "remote": "git@github.com:plamut/python-pubsub.git", + "sha": "a4eab77decdd7ea0d421b56a784e8a673a5595ec" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "57fc4a8a94a5bd015a83fb0f0a1707f62254b2cd", - "internalRef": "348813319" + "sha": "61ab0348bd228c942898aee291d677f0afdb888c", + "internalRef": "352069361" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" + "sha": "56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" + "sha": "56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6b026e1443948dcfc0b9e3289c85e940eb70f694" + "sha": "56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f" } } ], diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index fb42355e073e..d2ade4b278ae 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -97,6 +97,23 @@ \g<0>""", ) +# Make sure that client library version is present in user agent header. +s.replace( + [ + "google/pubsub_v1/services/publisher/async_client.py", + "google/pubsub_v1/services/publisher/client.py", + "google/pubsub_v1/services/publisher/transports/base.py", + "google/pubsub_v1/services/schema_service/async_client.py", + "google/pubsub_v1/services/schema_service/client.py", + "google/pubsub_v1/services/schema_service/transports/base.py", + "google/pubsub_v1/services/subscriber/async_client.py", + "google/pubsub_v1/services/subscriber/client.py", + "google/pubsub_v1/services/subscriber/transports/base.py", + ], + r"""gapic_version=(pkg_resources\.get_distribution\(\s+)['"]google-pubsub['"]""", + "client_library_version=\g<1>'google-cloud-pubsub'", +) + # Docstrings of *_iam_policy() methods are formatted poorly and must be fixed # in order to avoid docstring format warnings in docs. s.replace("google/pubsub_v1/services/*er/client.py", r"(\s+)Args:", "\n\g<1>Args:") diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 5a95a23e24d4..71b432aa7551 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -26,6 +26,7 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core.gapic_v1.client_info import METRICS_METADATA_KEY from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types @@ -62,6 +63,26 @@ def test_init(): assert client.batch_settings.max_messages == 100 +def test_init_default_client_info(): + creds = mock.Mock(spec=credentials.Credentials) + client = publisher.Client(credentials=creds) + + installed_version = publisher.client.__version__ + expected_client_info = f"gccl/{installed_version}" + + for wrapped_method in client.api.transport._wrapped_methods.values(): + user_agent = next( + ( + header_value + for header, header_value in wrapped_method._metadata + if header == METRICS_METADATA_KEY + ), + None, + ) + assert user_agent is not None + assert expected_client_info in user_agent + + def test_init_w_custom_transport(): transport = PublisherGrpcTransport() client = publisher.Client(transport=transport) @@ -86,7 +107,7 @@ def test_init_w_api_endpoint(): def test_init_w_unicode_api_endpoint(): - client_options = {"api_endpoint": u"testendpoint.google.com"} + client_options = {"api_endpoint": "testendpoint.google.com"} client = publisher.Client(client_options=client_options) assert isinstance(client.api, publisher_client.PublisherClient) @@ -219,7 +240,7 @@ def test_publish_data_not_bytestring_error(): client = publisher.Client(credentials=creds) topic = "topic/path" with pytest.raises(TypeError): - client.publish(topic, u"This is a text string.") + client.publish(topic, "This is a text string.") with pytest.raises(TypeError): client.publish(topic, 42) @@ -300,7 +321,7 @@ def test_publish_attrs_bytestring(): # The attributes should have been sent as text. batch.publish.assert_called_once_with( - gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) ) @@ -339,7 +360,7 @@ def test_publish_new_batch_needed(): commit_when_full=True, commit_retry=gapic_v1.method.DEFAULT, ) - message_pb = gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + message_pb = gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) batch1.publish.assert_called_once_with(message_pb) batch2.publish.assert_called_once_with(message_pb) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 3262600e7b4f..780c20de4da2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -16,6 +16,7 @@ import grpc import mock +from google.api_core.gapic_v1.client_info import METRICS_METADATA_KEY from google.cloud.pubsub_v1 import subscriber from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import futures @@ -29,6 +30,26 @@ def test_init(): assert isinstance(client.api, subscriber_client.SubscriberClient) +def test_init_default_client_info(): + creds = mock.Mock(spec=credentials.Credentials) + client = subscriber.Client(credentials=creds) + + installed_version = subscriber.client.__version__ + expected_client_info = f"gccl/{installed_version}" + + for wrapped_method in client.api.transport._wrapped_methods.values(): + user_agent = next( + ( + header_value + for header, header_value in wrapped_method._metadata + if header == METRICS_METADATA_KEY + ), + None, + ) + assert user_agent is not None + assert expected_client_info in user_agent + + def test_init_w_custom_transport(): transport = SubscriberGrpcTransport() client = subscriber.Client(transport=transport) @@ -47,7 +68,7 @@ def test_init_w_api_endpoint(): def test_init_w_unicode_api_endpoint(): - client_options = {"api_endpoint": u"testendpoint.google.com"} + client_options = {"api_endpoint": "testendpoint.google.com"} client = subscriber.Client(client_options=client_options) assert isinstance(client.api, subscriber_client.SubscriberClient) From 8a56abff6cef51799133c1c49da3e03c3653b873 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 29 Jan 2021 01:43:31 +0100 Subject: [PATCH 0616/1197] feat: surface SchemaServiceClient in google.cloud.pubsub (#281) --- .../google-cloud-pubsub/google/cloud/pubsub/__init__.py | 2 ++ .../google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py index 3dc5fea84f8f..2a6994231cfe 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub/__init__.py @@ -19,6 +19,7 @@ from google.cloud.pubsub_v1 import PublisherClient from google.cloud.pubsub_v1 import SubscriberClient +from google.cloud.pubsub_v1 import SchemaServiceClient from google.cloud.pubsub_v1 import types @@ -26,4 +27,5 @@ "types", "PublisherClient", "SubscriberClient", + "SchemaServiceClient", ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py index 67bec51b248b..99bc3e9c8503 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/__init__.py @@ -17,6 +17,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import subscriber +from google.pubsub_v1.services import schema_service class PublisherClient(publisher.Client): @@ -27,4 +28,8 @@ class SubscriberClient(subscriber.Client): __doc__ = subscriber.Client.__doc__ -__all__ = ("types", "PublisherClient", "SubscriberClient") +class SchemaServiceClient(schema_service.client.SchemaServiceClient): + __doc__ = schema_service.client.SchemaServiceClient.__doc__ + + +__all__ = ("types", "PublisherClient", "SubscriberClient", "SchemaServiceClient") From f758d3b94c172a14d824c3e6b6e3fcf5e5d16316 Mon Sep 17 00:00:00 2001 From: Justin Beckwith Date: Mon, 8 Feb 2021 12:36:39 -0800 Subject: [PATCH 0617/1197] build: migrate to flakybot (#284) --- packages/google-cloud-pubsub/.kokoro/test-samples.sh | 8 ++++---- packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples.sh b/packages/google-cloud-pubsub/.kokoro/test-samples.sh index 6064e7ad6390..9a86e0c69d19 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples.sh @@ -87,11 +87,11 @@ for file in samples/**/requirements.txt; do python3.6 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot fi if [[ $EXIT -ne 0 ]]; then diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh index 719bcd5ba84d..4af6cdc26dbc 100755 --- a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) From 043b08d615ad374e8a852b149a4ac949ea63b177 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 8 Feb 2021 18:02:21 -0500 Subject: [PATCH 0618/1197] chore: release 2.3.0 (#283) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 17 +++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 0193fb54868d..3928cb016820 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,23 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.3.0](https://www.github.com/googleapis/python-pubsub/compare/v2.2.0...v2.3.0) (2021-02-08) + + +### Features + +* surface SchemaServiceClient in google.cloud.pubsub ([#281](https://www.github.com/googleapis/python-pubsub/issues/281)) ([8751bcc](https://www.github.com/googleapis/python-pubsub/commit/8751bcc5eb782df55769b48253629a3bde3d4661)) + + +### Bug Fixes + +* client version missing from the user agent header ([#275](https://www.github.com/googleapis/python-pubsub/issues/275)) ([b112f4f](https://www.github.com/googleapis/python-pubsub/commit/b112f4fcbf6f2bce8dcf37871bdc540b11f54fe3)) +* Don't open the google.cloud package by adding pubsub.py ([#269](https://www.github.com/googleapis/python-pubsub/issues/269)) ([542d79d](https://www.github.com/googleapis/python-pubsub/commit/542d79d7c5fb7403016150ba477485756cd4097b)) +* flaky samples tests ([#263](https://www.github.com/googleapis/python-pubsub/issues/263)) ([3d6a29d](https://www.github.com/googleapis/python-pubsub/commit/3d6a29de07cc09be663c90a3333f4cd33633994f)) +* Modify synth.py to update grpc transport options ([#266](https://www.github.com/googleapis/python-pubsub/issues/266)) ([41dcd30](https://www.github.com/googleapis/python-pubsub/commit/41dcd30636168f3dd1248f1d99170d531fc9bcb8)) +* pass anonymous credentials for emulator ([#250](https://www.github.com/googleapis/python-pubsub/issues/250)) ([8eed8e1](https://www.github.com/googleapis/python-pubsub/commit/8eed8e16019510dc8b20fb6b009d61a7ac532d26)) +* remove grpc send/recieve limits ([#259](https://www.github.com/googleapis/python-pubsub/issues/259)) ([fd2840c](https://www.github.com/googleapis/python-pubsub/commit/fd2840c10f92b03da7f4b40ac69c602220757c0a)) + ## [2.2.0](https://www.github.com/googleapis/python-pubsub/compare/v2.1.0...v2.2.0) (2020-11-16) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index a50b8f23859a..0e0ffdb05b9e 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.2.0" +version = "2.3.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 86cb20ad590bc4b50553c27446a68c3654a5cbd8 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 17 Feb 2021 04:00:42 -0800 Subject: [PATCH 0619/1197] docs: update samples in documentation (#254) --- packages/google-cloud-pubsub/README.rst | 12 ++++++----- .../docs/publisher/index.rst | 4 ++-- .../docs/subscriber/index.rst | 20 +++++++++++++++---- 3 files changed, 25 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 926e51f1e2bd..9db987f2e292 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -111,7 +111,8 @@ messages to it topic='MY_TOPIC_NAME', # Set this to something appropriate. ) publisher.create_topic(topic_name) - publisher.publish(topic_name, b'My first message!', spam='eggs') + future = publisher.publish(topic_name, b'My first message!', spam='eggs') + future.result() To learn more, consult the `publishing documentation`_. @@ -129,23 +130,24 @@ the topic, and subscribe to that, passing a callback function. import os from google.cloud import pubsub_v1 - subscriber = pubsub_v1.SubscriberClient() topic_name = 'projects/{project_id}/topics/{topic}'.format( project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), topic='MY_TOPIC_NAME', # Set this to something appropriate. ) + subscription_name = 'projects/{project_id}/subscriptions/{sub}'.format( project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), sub='MY_SUBSCRIPTION_NAME', # Set this to something appropriate. ) - subscriber.create_subscription( - name=subscription_name, topic=topic_name) def callback(message): print(message.data) message.ack() - future = subscriber.subscribe(subscription_name, callback) + with pubsub_v1.SubscriberClient() as subscriber: + subscriber.create_subscription( + name=subscription_name, topic=topic_name) + future = subscriber.subscribe(subscription_name, callback) The future returned by the call to ``subscriber.subscribe`` can be used to block the current thread until a given condition obtains: diff --git a/packages/google-cloud-pubsub/docs/publisher/index.rst b/packages/google-cloud-pubsub/docs/publisher/index.rst index cd2e5cbea777..6810f023289e 100644 --- a/packages/google-cloud-pubsub/docs/publisher/index.rst +++ b/packages/google-cloud-pubsub/docs/publisher/index.rst @@ -33,7 +33,7 @@ Therefore, a very basic publishing call looks like: .. code-block:: python topic = 'projects/{project}/topics/{topic}' - publish_client.publish(topic, b'This is my message.') + future = publish_client.publish(topic, b'This is my message.') .. note:: @@ -52,7 +52,7 @@ If you want to include attributes, simply add keyword arguments: .. code-block:: python topic = 'projects/{project}/topics/{topic}' - publish_client.publish(topic, b'This is my message.', foo='bar') + future = publish_client.publish(topic, b'This is my message.', foo='bar') Batching diff --git a/packages/google-cloud-pubsub/docs/subscriber/index.rst b/packages/google-cloud-pubsub/docs/subscriber/index.rst index 2c9fd91cefd5..06f1658a4286 100644 --- a/packages/google-cloud-pubsub/docs/subscriber/index.rst +++ b/packages/google-cloud-pubsub/docs/subscriber/index.rst @@ -12,8 +12,9 @@ Instantiating a subscriber client is straightforward: .. code-block:: python from google.cloud import pubsub - subscriber = pubsub.SubscriberClient() + with pubsub.SubscriberClient() as subscriber: + # ... Creating a Subscription ----------------------- @@ -41,8 +42,10 @@ to subscribe to, and it must already exist. Once you have that, it is easy: # publisher = pubsub.PublisherClient() topic_path = publisher.topic_path(PROJECT, TOPIC) - sub_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) - subscriber.create_subscription(request={"name": sub_path, "topic": topic_path}) + + with pubsub.SubscriberClient() as subscriber: + sub_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) + subscriber.create_subscription(request={"name": sub_path, "topic": topic_path}) Once you have created a subscription (or if you already had one), the next step is to pull data from it. @@ -56,6 +59,8 @@ To pull the messages synchronously, use the client's .. code-block:: python + # Wrap the following code in `with pubsub.SubscriberClient() as subscriber:` + # Substitute PROJECT and SUBSCRIPTION with appropriate values for your # application. subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) @@ -88,6 +93,8 @@ be dropped by this client and the backend will try to re-deliver them. .. code-block:: python + # Wrap the following code in `with pubsub.SubscriberClient() as subscriber:` + ack_ids = [] # TODO: populate with `ack_ids` of the messages to NACK ack_deadline_seconds = 0 subscriber.modify_ack_deadline( @@ -109,6 +116,8 @@ each message received. .. code-block:: python + # Wrap the following code in `with pubsub.SubscriberClient() as subscriber:` + # Substitute PROJECT and SUBSCRIPTION with appropriate values for your # application. subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) @@ -147,6 +156,8 @@ Here is an example: do_something_with(message) # Replace this with your actual logic. message.ack() # Asynchronously acknowledge the message. + # Wrap the following code in `with pubsub.SubscriberClient() as subscriber:` + # Substitute PROJECT and SUBSCRIPTION with appropriate values for your # application. subscription_path = subscriber.subscription_path(PROJECT, SUBSCRIPTION) @@ -177,7 +188,8 @@ thread will be set on the future. try: future.result() except Exception as ex: - subscription.close() + # Close the subscriber if not using a context manager. + subscriber.close() raise Finally, you can use From 4dc32979e8003655fc86e1e36044c503802f3d15 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 17 Feb 2021 17:44:22 +0100 Subject: [PATCH 0620/1197] feat: add graceful streaming pull shutdown (#292) * Remove streaming pull's _UNARY_REQUESTS flag This internal flag has been hardcoded to True for several years and we do not utilize the "False" case anywhere, nor do we mention it in the docs or expose it to the end users. * Keep the leaser running even if inactive manager The leaser thread should not terminate implicitly when the streaming pull manager's consumer thread becomes inactive, as there might still be messages being processed inside the scheduler and we want to keep extending these messages' ACK deadlines until the scheduler has been shut down. NOTE: The manager's streaming pull RPC does not need to be active, since all mod-ACK requests are sent using a separate unary request instead of over the stream. * Add an option to block on scheduler shutdown * Simplify default thread pool executor factory The library only supports Python 3.6+, thus we don't need the conditional anymore. * Prevent implicit heartbeater shutdown For consistency with the leaser, the heartbeater should not terminate implicitly when the manager stops the background stream, but should instead wait for the manager to stop it explicitly. * NACK all messages not yet dispatched on shutdown * Keep dispatching requests after stream shutdown After the streaming pull manager shuts down the consumer thread and becomes "inactive", there might still be requests waiting in the queue to be dispatched, thus the dispatcher should not implicitly enter the no-op mode of operation. * Add system test for blocking shutdown --- .../subscriber/_protocol/dispatcher.py | 3 - .../subscriber/_protocol/heartbeater.py | 9 +- .../pubsub_v1/subscriber/_protocol/leaser.py | 2 +- .../_protocol/streaming_pull_manager.py | 82 +++++++++++------- .../cloud/pubsub_v1/subscriber/futures.py | 15 +++- .../cloud/pubsub_v1/subscriber/scheduler.py | 59 +++++++++---- packages/google-cloud-pubsub/tests/system.py | 82 +++++++++++++++++- .../pubsub_v1/subscriber/test_dispatcher.py | 31 +++++-- .../subscriber/test_futures_subscriber.py | 12 ++- .../pubsub_v1/subscriber/test_heartbeater.py | 41 +++++++-- .../unit/pubsub_v1/subscriber/test_leaser.py | 24 ++++-- .../pubsub_v1/subscriber/test_scheduler.py | 85 +++++++++++++++++-- .../subscriber/test_streaming_pull_manager.py | 58 +++++++++---- 13 files changed, 390 insertions(+), 113 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 7a89508446f8..382c5c38a2c2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -99,9 +99,6 @@ def dispatch_callback(self, items): ValueError: If ``action`` isn't one of the expected actions "ack", "drop", "lease", "modify_ack_deadline" or "nack". """ - if not self._manager.is_active: - return - batched_commands = collections.defaultdict(list) for item in items: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py index 9cd84a1e2397..fef158965c57 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -35,10 +35,11 @@ def __init__(self, manager, period=_DEFAULT_PERIOD): self._period = period def heartbeat(self): - """Periodically send heartbeats.""" - while self._manager.is_active and not self._stop_event.is_set(): - self._manager.heartbeat() - _LOGGER.debug("Sent heartbeat.") + """Periodically send streaming pull heartbeats. + """ + while not self._stop_event.is_set(): + if self._manager.heartbeat(): + _LOGGER.debug("Sent heartbeat.") self._stop_event.wait(timeout=self._period) _LOGGER.info("%s exiting.", _HEARTBEAT_WORKER_NAME) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 5830680da8eb..4a19792fc901 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -126,7 +126,7 @@ def maintain_leases(self): ack IDs, then waits for most of that time (but with jitter), and repeats. """ - while self._manager.is_active and not self._stop_event.is_set(): + while not self._stop_event.is_set(): # Determine the appropriate duration for the lease. This is # based off of how long previous messages have taken to ack, with # a sensible default and within the ranges allowed by Pub/Sub. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index e8a4a8caf9d5..8a3ff0e87cbf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -16,6 +16,7 @@ import collections import functools +import itertools import logging import threading import uuid @@ -113,10 +114,6 @@ class StreamingPullManager(object): scheduler will be used. """ - _UNARY_REQUESTS = True - """If set to True, this class will make requests over a separate unary - RPC instead of over the streaming RPC.""" - def __init__( self, client, @@ -292,6 +289,9 @@ def activate_ordering_keys(self, ordering_keys): activate. May be empty. """ with self._pause_resume_lock: + if self._scheduler is None: + return # We are shutting down, don't try to dispatch any more messages. + self._messages_on_hold.activate_ordering_keys( ordering_keys, self._schedule_message_on_hold ) @@ -421,37 +421,36 @@ def send(self, request): If a RetryError occurs, the manager shutdown is triggered, and the error is re-raised. """ - if self._UNARY_REQUESTS: - try: - self._send_unary_request(request) - except exceptions.GoogleAPICallError: - _LOGGER.debug( - "Exception while sending unary RPC. This is typically " - "non-fatal as stream requests are best-effort.", - exc_info=True, - ) - except exceptions.RetryError as exc: - _LOGGER.debug( - "RetryError while sending unary RPC. Waiting on a transient " - "error resolution for too long, will now trigger shutdown.", - exc_info=False, - ) - # The underlying channel has been suffering from a retryable error - # for too long, time to give up and shut the streaming pull down. - self._on_rpc_done(exc) - raise - - else: - self._rpc.send(request) + try: + self._send_unary_request(request) + except exceptions.GoogleAPICallError: + _LOGGER.debug( + "Exception while sending unary RPC. This is typically " + "non-fatal as stream requests are best-effort.", + exc_info=True, + ) + except exceptions.RetryError as exc: + _LOGGER.debug( + "RetryError while sending unary RPC. Waiting on a transient " + "error resolution for too long, will now trigger shutdown.", + exc_info=False, + ) + # The underlying channel has been suffering from a retryable error + # for too long, time to give up and shut the streaming pull down. + self._on_rpc_done(exc) + raise def heartbeat(self): """Sends an empty request over the streaming pull RPC. - This always sends over the stream, regardless of if - ``self._UNARY_REQUESTS`` is set or not. + Returns: + bool: If a heartbeat request has actually been sent. """ if self._rpc is not None and self._rpc.is_active: self._rpc.send(gapic_types.StreamingPullRequest()) + return True + + return False def open(self, callback, on_callback_error): """Begin consuming messages. @@ -513,7 +512,7 @@ def open(self, callback, on_callback_error): # Start the stream heartbeater thread. self._heartbeater.start() - def close(self, reason=None): + def close(self, reason=None, await_msg_callbacks=False): """Stop consuming messages and shutdown all helper threads. This method is idempotent. Additional calls will have no effect. @@ -522,6 +521,15 @@ def close(self, reason=None): reason (Any): The reason to close this. If None, this is considered an "intentional" shutdown. This is passed to the callbacks specified via :meth:`add_close_callback`. + + await_msg_callbacks (bool): + If ``True``, the method will wait until all scheduler threads terminate + and only then proceed with the shutdown with the remaining shutdown + tasks, + + If ``False`` (default), the method will shut down the scheduler in a + non-blocking fashion, i.e. it will not wait for the currently executing + scheduler threads to terminate. """ with self._closing: if self._closed: @@ -535,7 +543,9 @@ def close(self, reason=None): # Shutdown all helper threads _LOGGER.debug("Stopping scheduler.") - self._scheduler.shutdown() + dropped_messages = self._scheduler.shutdown( + await_msg_callbacks=await_msg_callbacks + ) self._scheduler = None # Leaser and dispatcher reference each other through the shared @@ -549,11 +559,23 @@ def close(self, reason=None): # because the consumer gets shut down first. _LOGGER.debug("Stopping leaser.") self._leaser.stop() + + total = len(dropped_messages) + len( + self._messages_on_hold._messages_on_hold + ) + _LOGGER.debug(f"NACK-ing all not-yet-dispatched messages (total: {total}).") + messages_to_nack = itertools.chain( + dropped_messages, self._messages_on_hold._messages_on_hold + ) + for msg in messages_to_nack: + msg.nack() + _LOGGER.debug("Stopping dispatcher.") self._dispatcher.stop() self._dispatcher = None # dispatcher terminated, OK to dispose the leaser reference now self._leaser = None + _LOGGER.debug("Stopping heartbeater.") self._heartbeater.stop() self._heartbeater = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index f9fdd76abc87..cefe1aa91844 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -43,12 +43,23 @@ def _on_close_callback(self, manager, result): else: self.set_exception(result) - def cancel(self): + def cancel(self, await_msg_callbacks=False): """Stops pulling messages and shutdowns the background thread consuming messages. + + Args: + await_msg_callbacks (bool): + If ``True``, the method will block until the background stream and its + helper threads have has been terminated, as well as all currently + executing message callbacks are done processing. + + If ``False`` (default), the method returns immediately after the + background stream and its helper threads have has been terminated, but + some of the message callback threads might still be running at that + point. """ self._cancelled = True - return self._manager.close() + return self._manager.close(await_msg_callbacks=await_msg_callbacks) def cancelled(self): """ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index ef2ef59cb6bf..2690c1fc6872 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -20,7 +20,6 @@ import abc import concurrent.futures -import sys import six from six.moves import queue @@ -58,19 +57,29 @@ def schedule(self, callback, *args, **kwargs): raise NotImplementedError @abc.abstractmethod - def shutdown(self): + def shutdown(self, await_msg_callbacks=False): """Shuts down the scheduler and immediately end all pending callbacks. + + Args: + await_msg_callbacks (bool): + If ``True``, the method will block until all currently executing + callbacks are done processing. If ``False`` (default), the + method will not wait for the currently running callbacks to complete. + + Returns: + List[pubsub_v1.subscriber.message.Message]: + The messages submitted to the scheduler that were not yet dispatched + to their callbacks. + It is assumed that each message was submitted to the scheduler as the + first positional argument to the provided callback. """ raise NotImplementedError def _make_default_thread_pool_executor(): - # Python 2.7 and 3.6+ have the thread_name_prefix argument, which is useful - # for debugging. - executor_kwargs = {} - if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6): - executor_kwargs["thread_name_prefix"] = "ThreadPoolExecutor-ThreadScheduler" - return concurrent.futures.ThreadPoolExecutor(max_workers=10, **executor_kwargs) + return concurrent.futures.ThreadPoolExecutor( + max_workers=10, thread_name_prefix="ThreadPoolExecutor-ThreadScheduler" + ) class ThreadScheduler(Scheduler): @@ -110,15 +119,35 @@ def schedule(self, callback, *args, **kwargs): """ self._executor.submit(callback, *args, **kwargs) - def shutdown(self): - """Shuts down the scheduler and immediately end all pending callbacks. + def shutdown(self, await_msg_callbacks=False): + """Shut down the scheduler and immediately end all pending callbacks. + + Args: + await_msg_callbacks (bool): + If ``True``, the method will block until all currently executing + executor threads are done processing. If ``False`` (default), the + method will not wait for the currently running threads to complete. + + Returns: + List[pubsub_v1.subscriber.message.Message]: + The messages submitted to the scheduler that were not yet dispatched + to their callbacks. + It is assumed that each message was submitted to the scheduler as the + first positional argument to the provided callback. """ - # Drop all pending item from the executor. Without this, the executor - # will block until all pending items are complete, which is - # undesirable. + dropped_messages = [] + + # Drop all pending item from the executor. Without this, the executor will also + # try to process any pending work items before termination, which is undesirable. + # + # TODO: Replace the logic below by passing `cancel_futures=True` to shutdown() + # once we only need to support Python 3.9+. try: while True: - self._executor._work_queue.get(block=False) + work_item = self._executor._work_queue.get(block=False) + dropped_messages.append(work_item.args[0]) except queue.Empty: pass - self._executor.shutdown() + + self._executor.shutdown(wait=await_msg_callbacks) + return dropped_messages diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index bbedd9a11ff9..05a91a420e07 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -14,6 +14,7 @@ from __future__ import absolute_import +import concurrent.futures import datetime import itertools import operator as op @@ -609,6 +610,78 @@ def test_streaming_pull_max_messages( finally: subscription_future.cancel() # trigger clean shutdown + def test_streaming_pull_blocking_shutdown( + self, publisher, topic_path, subscriber, subscription_path, cleanup + ): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) + + # The ACK-s are only persisted if *all* messages published in the same batch + # are ACK-ed. We thus publish each message in its own batch so that the backend + # treats all messages' ACKs independently of each other. + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) + _publish_messages(publisher, topic_path, batch_sizes=[1] * 10) + + # Artificially delay message processing, gracefully shutdown the streaming pull + # in the meantime, then verify that those messages were nevertheless processed. + processed_messages = [] + + def callback(message): + time.sleep(15) + processed_messages.append(message.data) + message.ack() + + # Flow control limits should exceed the number of worker threads, so that some + # of the messages will be blocked on waiting for free scheduler threads. + flow_control = pubsub_v1.types.FlowControl(max_messages=5) + executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) + scheduler = pubsub_v1.subscriber.scheduler.ThreadScheduler(executor=executor) + subscription_future = subscriber.subscribe( + subscription_path, + callback=callback, + flow_control=flow_control, + scheduler=scheduler, + ) + + try: + subscription_future.result(timeout=10) # less than the sleep in callback + except exceptions.TimeoutError: + subscription_future.cancel(await_msg_callbacks=True) + + # The shutdown should have waited for the already executing callbacks to finish. + assert len(processed_messages) == 3 + + # The messages that were not processed should have been NACK-ed and we should + # receive them again quite soon. + all_done = threading.Barrier(7 + 1, timeout=5) # +1 because of the main thread + remaining = [] + + def callback2(message): + remaining.append(message.data) + message.ack() + all_done.wait() + + subscription_future = subscriber.subscribe( + subscription_path, callback=callback2 + ) + + try: + all_done.wait() + except threading.BrokenBarrierError: # PRAGMA: no cover + pytest.fail("The remaining messages have not been re-delivered in time.") + finally: + subscription_future.cancel(await_msg_callbacks=False) + + # There should be 7 messages left that were not yet processed and none of them + # should be a message that should have already been sucessfully processed in the + # first streaming pull. + assert len(remaining) == 7 + assert not (set(processed_messages) & set(remaining)) # no re-delivery + @pytest.mark.skipif( "KOKORO_GFILE_DIR" not in os.environ, @@ -790,8 +863,8 @@ def _publish_messages(publisher, topic_path, batch_sizes): publish_futures = [] msg_counter = itertools.count(start=1) - for batch_size in batch_sizes: - msg_batch = _make_messages(count=batch_size) + for batch_num, batch_size in enumerate(batch_sizes, start=1): + msg_batch = _make_messages(count=batch_size, batch_num=batch_num) for msg in msg_batch: future = publisher.publish(topic_path, msg, seq_num=str(next(msg_counter))) publish_futures.append(future) @@ -802,9 +875,10 @@ def _publish_messages(publisher, topic_path, batch_sizes): future.result(timeout=30) -def _make_messages(count): +def _make_messages(count, batch_num): messages = [ - "message {}/{}".format(i, count).encode("utf-8") for i in range(1, count + 1) + f"message {i}/{count} of batch {batch_num}".encode("utf-8") + for i in range(1, count + 1) ] return messages diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 288e4bd18314..47c62bab63a6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -29,14 +29,14 @@ @pytest.mark.parametrize( "item,method_name", [ - (requests.AckRequest(0, 0, 0, ""), "ack"), - (requests.DropRequest(0, 0, ""), "drop"), - (requests.LeaseRequest(0, 0, ""), "lease"), - (requests.ModAckRequest(0, 0), "modify_ack_deadline"), - (requests.NackRequest(0, 0, ""), "nack"), + (requests.AckRequest("0", 0, 0, ""), "ack"), + (requests.DropRequest("0", 0, ""), "drop"), + (requests.LeaseRequest("0", 0, ""), "lease"), + (requests.ModAckRequest("0", 0), "modify_ack_deadline"), + (requests.NackRequest("0", 0, ""), "nack"), ], ) -def test_dispatch_callback(item, method_name): +def test_dispatch_callback_active_manager(item, method_name): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) @@ -50,16 +50,29 @@ def test_dispatch_callback(item, method_name): method.assert_called_once_with([item]) -def test_dispatch_callback_inactive(): +@pytest.mark.parametrize( + "item,method_name", + [ + (requests.AckRequest("0", 0, 0, ""), "ack"), + (requests.DropRequest("0", 0, ""), "drop"), + (requests.LeaseRequest("0", 0, ""), "lease"), + (requests.ModAckRequest("0", 0), "modify_ack_deadline"), + (requests.NackRequest("0", 0, ""), "nack"), + ], +) +def test_dispatch_callback_inactive_manager(item, method_name): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) manager.is_active = False dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - dispatcher_.dispatch_callback([requests.AckRequest(0, 0, 0, "")]) + items = [item] - manager.send.assert_not_called() + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([item]) def test_ack(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 909337cc88c7..62a3ea1da1bb 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -69,10 +69,18 @@ def test__on_close_callback_future_already_done(self): result = future.result() assert result == "foo" # on close callback was a no-op - def test_cancel(self): + def test_cancel_default_nonblocking_manager_shutdown(self): future = self.make_future() future.cancel() - future._manager.close.assert_called_once() + future._manager.close.assert_called_once_with(await_msg_callbacks=False) + assert future.cancelled() + + def test_cancel_blocking_manager_shutdown(self): + future = self.make_future() + + future.cancel(await_msg_callbacks=True) + + future._manager.close.assert_called_once_with(await_msg_callbacks=True) assert future.cancelled() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py index 8f5049691a9d..1a52af231cc5 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -22,22 +22,44 @@ import pytest -def test_heartbeat_inactive(caplog): - caplog.set_level(logging.INFO) +def test_heartbeat_inactive_manager_active_rpc(caplog): + caplog.set_level(logging.DEBUG) + + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + manager.is_active = False + manager.heartbeat.return_value = True # because of active rpc + + heartbeater_ = heartbeater.Heartbeater(manager) + make_sleep_mark_event_as_done(heartbeater_) + + heartbeater_.heartbeat() + + assert "Sent heartbeat" in caplog.text + assert "exiting" in caplog.text + + +def test_heartbeat_inactive_manager_inactive_rpc(caplog): + caplog.set_level(logging.DEBUG) + manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) manager.is_active = False + manager.heartbeat.return_value = False # because of inactive rpc heartbeater_ = heartbeater.Heartbeater(manager) + make_sleep_mark_event_as_done(heartbeater_) heartbeater_.heartbeat() + assert "Sent heartbeat" not in caplog.text assert "exiting" in caplog.text def test_heartbeat_stopped(caplog): - caplog.set_level(logging.INFO) + caplog.set_level(logging.DEBUG) manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) @@ -47,17 +69,18 @@ def test_heartbeat_stopped(caplog): heartbeater_.heartbeat() + assert "Sent heartbeat" not in caplog.text assert "exiting" in caplog.text -def make_sleep_mark_manager_as_inactive(heartbeater): - # Make sleep mark the manager as inactive so that heartbeat() +def make_sleep_mark_event_as_done(heartbeater): + # Make sleep actually trigger the done event so that heartbeat() # exits at the end of the first run. - def trigger_inactive(timeout): + def trigger_done(timeout): assert timeout - heartbeater._manager.is_active = False + heartbeater._stop_event.set() - heartbeater._stop_event.wait = trigger_inactive + heartbeater._stop_event.wait = trigger_done def test_heartbeat_once(): @@ -65,7 +88,7 @@ def test_heartbeat_once(): streaming_pull_manager.StreamingPullManager, instance=True ) heartbeater_ = heartbeater.Heartbeater(manager) - make_sleep_mark_manager_as_inactive(heartbeater_) + make_sleep_mark_event_as_done(heartbeater_) heartbeater_.heartbeat() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 17409cb3fdb8..2ecc0b9f3ce1 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -88,15 +88,21 @@ def create_manager(flow_control=types.FlowControl()): return manager -def test_maintain_leases_inactive(caplog): +def test_maintain_leases_inactive_manager(caplog): caplog.set_level(logging.INFO) manager = create_manager() manager.is_active = False leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + leaser_.add( + [requests.LeaseRequest(ack_id="my_ack_ID", byte_size=42, ordering_key="")] + ) leaser_.maintain_leases() + # Leases should still be maintained even if the manager is inactive. + manager.dispatcher.modify_ack_deadline.assert_called() assert "exiting" in caplog.text @@ -112,20 +118,20 @@ def test_maintain_leases_stopped(caplog): assert "exiting" in caplog.text -def make_sleep_mark_manager_as_inactive(leaser): - # Make sleep mark the manager as inactive so that maintain_leases +def make_sleep_mark_event_as_done(leaser): + # Make sleep actually trigger the done event so that heartbeat() # exits at the end of the first run. - def trigger_inactive(timeout): + def trigger_done(timeout): assert 0 < timeout < 10 - leaser._manager.is_active = False + leaser._stop_event.set() - leaser._stop_event.wait = trigger_inactive + leaser._stop_event.wait = trigger_done def test_maintain_leases_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_manager_as_inactive(leaser_) + make_sleep_mark_event_as_done(leaser_) leaser_.add( [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] ) @@ -140,7 +146,7 @@ def test_maintain_leases_ack_ids(): def test_maintain_leases_no_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_manager_as_inactive(leaser_) + make_sleep_mark_event_as_done(leaser_) leaser_.maintain_leases() @@ -151,7 +157,7 @@ def test_maintain_leases_no_ack_ids(): def test_maintain_leases_outdated_items(time): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_manager_as_inactive(leaser_) + make_sleep_mark_event_as_done(leaser_) # Add and start expiry timer at the beginning of the timeline. time.return_value = 0 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index 774d0d63e2a2..ede7c6b2d7a4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -14,6 +14,7 @@ import concurrent.futures import threading +import time import mock from six.moves import queue @@ -38,19 +39,89 @@ def test_constructor_options(): assert scheduler_._executor == mock.sentinel.executor -def test_schedule(): +def test_schedule_executes_submitted_items(): called_with = [] - called = threading.Event() + callback_done_twice = threading.Barrier(3) # 3 == 2x callback + 1x main thread def callback(*args, **kwargs): - called_with.append((args, kwargs)) - called.set() + called_with.append((args, kwargs)) # appends are thread-safe + callback_done_twice.wait() scheduler_ = scheduler.ThreadScheduler() scheduler_.schedule(callback, "arg1", kwarg1="meep") + scheduler_.schedule(callback, "arg2", kwarg2="boop") - called.wait() - scheduler_.shutdown() + callback_done_twice.wait(timeout=3.0) + result = scheduler_.shutdown() - assert called_with == [(("arg1",), {"kwarg1": "meep"})] + assert result == [] # no scheduled items dropped + + expected_calls = [(("arg1",), {"kwarg1": "meep"}), (("arg2",), {"kwarg2": "boop"})] + assert sorted(called_with) == expected_calls + + +def test_shutdown_nonblocking_by_default(): + called_with = [] + at_least_one_called = threading.Event() + at_least_one_completed = threading.Event() + + def callback(message): + called_with.append(message) # appends are thread-safe + at_least_one_called.set() + time.sleep(1.0) + at_least_one_completed.set() + + executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + scheduler_ = scheduler.ThreadScheduler(executor=executor) + + scheduler_.schedule(callback, "message_1") + scheduler_.schedule(callback, "message_2") + + at_least_one_called.wait() + dropped = scheduler_.shutdown() + + assert len(called_with) == 1 + assert called_with[0] in {"message_1", "message_2"} + + assert len(dropped) == 1 + assert dropped[0] in {"message_1", "message_2"} + assert dropped[0] != called_with[0] # the dropped message was not the processed one + + err_msg = ( + "Shutdown should not have waited " + "for the already running callbacks to complete." + ) + assert not at_least_one_completed.is_set(), err_msg + + +def test_shutdown_blocking_awaits_running_callbacks(): + called_with = [] + at_least_one_called = threading.Event() + at_least_one_completed = threading.Event() + + def callback(message): + called_with.append(message) # appends are thread-safe + at_least_one_called.set() + time.sleep(1.0) + at_least_one_completed.set() + + executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + scheduler_ = scheduler.ThreadScheduler(executor=executor) + + scheduler_.schedule(callback, "message_1") + scheduler_.schedule(callback, "message_2") + + at_least_one_called.wait() + dropped = scheduler_.shutdown(await_msg_callbacks=True) + + assert len(called_with) == 1 + assert called_with[0] in {"message_1", "message_2"} + + # The work items that have not been started yet should still be dropped. + assert len(dropped) == 1 + assert dropped[0] in {"message_1", "message_2"} + assert dropped[0] != called_with[0] # the dropped message was not the processed one + + err_msg = "Shutdown did not wait for the already running callbacks to complete." + assert at_least_one_completed.is_set(), err_msg diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 242c0804ac58..a6454f853412 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools import logging import threading import time @@ -372,7 +373,6 @@ def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): def test_send_unary(): manager = make_manager() - manager._UNARY_REQUESTS = True manager.send( gapic_types.StreamingPullRequest( @@ -405,7 +405,6 @@ def test_send_unary(): def test_send_unary_empty(): manager = make_manager() - manager._UNARY_REQUESTS = True manager.send(gapic_types.StreamingPullRequest()) @@ -417,7 +416,6 @@ def test_send_unary_api_call_error(caplog): caplog.set_level(logging.DEBUG) manager = make_manager() - manager._UNARY_REQUESTS = True error = exceptions.GoogleAPICallError("The front fell off") manager._client.acknowledge.side_effect = error @@ -431,7 +429,6 @@ def test_send_unary_retry_error(caplog): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() - manager._UNARY_REQUESTS = True error = exceptions.RetryError( "Too long a transient error", cause=Exception("Out of time!") @@ -445,24 +442,15 @@ def test_send_unary_retry_error(caplog): assert "signaled streaming pull manager shutdown" in caplog.text -def test_send_streaming(): - manager = make_manager() - manager._UNARY_REQUESTS = False - manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - - manager.send(mock.sentinel.request) - - manager._rpc.send.assert_called_once_with(mock.sentinel.request) - - def test_heartbeat(): manager = make_manager() manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) manager._rpc.is_active = True - manager.heartbeat() + result = manager.heartbeat() manager._rpc.send.assert_called_once_with(gapic_types.StreamingPullRequest()) + assert result def test_heartbeat_inactive(): @@ -472,7 +460,8 @@ def test_heartbeat_inactive(): manager.heartbeat() - manager._rpc.send.assert_not_called() + result = manager._rpc.send.assert_not_called() + assert not result @mock.patch("google.api_core.bidi.ResumableBidiRpc", autospec=True) @@ -632,14 +621,14 @@ def _do_work(self): while not self._stop: try: self._manager.leaser.add([mock.Mock()]) - except Exception as exc: + except Exception as exc: # pragma: NO COVER self._error_callback(exc) time.sleep(0.1) # also try to interact with the leaser after the stop flag has been set try: self._manager.leaser.remove([mock.Mock()]) - except Exception as exc: + except Exception as exc: # pragma: NO COVER self._error_callback(exc) @@ -666,6 +655,27 @@ def test_close_callbacks(): callback.assert_called_once_with(manager, "meep") +def test_close_nacks_internally_queued_messages(): + nacked_messages = [] + + def fake_nack(self): + nacked_messages.append(self.data) + + MockMsg = functools.partial(mock.create_autospec, message.Message, instance=True) + messages = [MockMsg(data=b"msg1"), MockMsg(data=b"msg2"), MockMsg(data=b"msg3")] + for msg in messages: + msg.nack = stdlib_types.MethodType(fake_nack, msg) + + manager, _, _, _, _, _ = make_running_manager() + dropped_by_scheduler = messages[:2] + manager._scheduler.shutdown.return_value = dropped_by_scheduler + manager._messages_on_hold._messages_on_hold.append(messages[2]) + + manager.close() + + assert sorted(nacked_messages) == [b"msg1", b"msg2", b"msg3"] + + def test__get_initial_request(): manager = make_manager() manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) @@ -979,3 +989,15 @@ def test_activate_ordering_keys(): manager._messages_on_hold.activate_ordering_keys.assert_called_once_with( ["key1", "key2"], mock.ANY ) + + +def test_activate_ordering_keys_stopped_scheduler(): + manager = make_manager() + manager._messages_on_hold = mock.create_autospec( + messages_on_hold.MessagesOnHold, instance=True + ) + manager._scheduler = None + + manager.activate_ordering_keys(["key1", "key2"]) + + manager._messages_on_hold.activate_ordering_keys.assert_not_called() From 89b50481f8368424671f47eb029eee363aee0675 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 22 Feb 2021 22:40:48 +0100 Subject: [PATCH 0621/1197] chore: release v2.4.0 (#296) --- packages/google-cloud-pubsub/CHANGELOG.md | 15 +++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 3928cb016820..eac6b55b35f5 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,21 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 2.4.0 + +02-22-2021 05:02 PST + + +### Implementation Changes + +### New Features + +- Add graceful streaming pull shutdown. ([#292](https://github.com/googleapis/python-pubsub/pull/292)) + +### Documentation + +- Update samples with using the subscriber client as a context manager. ([#254](https://github.com/googleapis/python-pubsub/pull/254)) + ## [2.3.0](https://www.github.com/googleapis/python-pubsub/compare/v2.2.0...v2.3.0) (2021-02-08) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 0e0ffdb05b9e..8658542f46ef 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.3.0" +version = "2.4.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 82d06248096d97736ed9104f244c92ca619a9ec3 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 25 Feb 2021 15:00:52 -0800 Subject: [PATCH 0622/1197] samples: use flaky in tests (#298) * samples: use flaky in tests * update requirements-test.txt * fix import order * mark test flaky --- .../samples/snippets/quickstart/quickstart_test.py | 2 ++ .../samples/snippets/requirements-test.txt | 1 + .../google-cloud-pubsub/samples/snippets/subscriber_test.py | 4 ++++ 3 files changed, 7 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py index 700f57d71b08..bdb24a145fc1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py @@ -17,6 +17,7 @@ import os import uuid +from flaky import flaky from google.api_core.exceptions import AlreadyExists from google.cloud import pubsub_v1 import pytest @@ -79,6 +80,7 @@ def test_pub(topic_path, capsys): assert "Hello, World!" in out +@flaky(max_runs=3, min_passes=1) def test_sub(publisher_client, topic_path, subscription_path, capsys): publisher_client.publish(topic_path, b"Hello World!") diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index adf26b9f98bb..cf5d6325d1e7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,3 +1,4 @@ backoff==1.10.0 pytest==5.3.2 mock==3.0.5 +flaky==3.7.0 \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index b9f0f1784cd0..1e4880d5d480 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -17,6 +17,7 @@ import uuid import backoff +from flaky import flaky from google.api_core.exceptions import NotFound from google.cloud import pubsub_v1 import pytest @@ -228,6 +229,7 @@ def test_create_subscription_with_dead_letter_policy( assert f"After {DEFAULT_MAX_DELIVERY_ATTEMPTS} delivery attempts." in out +@flaky(max_runs=3, min_passes=1) def test_receive_with_delivery_attempts( publisher_client, topic, dead_letter_topic, subscription_dlq, capsys ): @@ -255,6 +257,7 @@ def test_update_dead_letter_policy(subscription_dlq, dead_letter_topic, capsys): assert f"max_delivery_attempts: {UPDATED_MAX_DELIVERY_ATTEMPTS}" in out +@flaky(max_runs=3, min_passes=1) def test_remove_dead_letter_policy(subscription_dlq, capsys): subscription_after_update = subscriber.remove_dead_letter_policy( PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ @@ -389,6 +392,7 @@ def test_receive_synchronously(publisher_client, topic, subscription_sync, capsy assert f"{subscription_sync}" in out +@flaky(max_runs=3, min_passes=1) def test_receive_synchronously_with_lease( publisher_client, topic, subscription_sync, capsys ): From 18418f1bead20cf523fe06963b09fdbd37581b20 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Mon, 1 Mar 2021 19:05:08 -0800 Subject: [PATCH 0623/1197] fix(sample): mitigate flakiness in subscriber_test (#304) * fix(sample): mitigate flakiness in subscriber_test fixes #289 fixes #288 I think there were few problems with the tests. 1. google.api_core.exceptions.Unknown is not retried. 2. number of messages published in test_receive_synchronously_with_lease was 5, but the sample code is fetching 3 messages in bulk. 3. assertion in test_receive_synchronously_with_lease was too strict. 4. failure in subscriber_test was incorrectly considered a failure in teardown of quickstart_test. I hope these changes will mitigate the flakiness. * lint * also retry NotFound for dlq subscription * lint * ignore NotFound on deletion * lint --- .../samples/snippets/subscriber_test.py | 56 +++++++++++++------ 1 file changed, 38 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 1e4880d5d480..39e52afff79c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -19,6 +19,7 @@ import backoff from flaky import flaky from google.api_core.exceptions import NotFound +from google.api_core.exceptions import Unknown from google.cloud import pubsub_v1 import pytest @@ -39,12 +40,12 @@ UPDATED_MAX_DELIVERY_ATTEMPTS = 20 -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def publisher_client(): yield pubsub_v1.PublisherClient() -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC) @@ -58,7 +59,7 @@ def topic(publisher_client): publisher_client.delete_topic(request={"topic": topic.name}) -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def dead_letter_topic(publisher_client): topic_path = publisher_client.topic_path(PROJECT_ID, DEAD_LETTER_TOPIC) @@ -72,14 +73,14 @@ def dead_letter_topic(publisher_client): publisher_client.delete_topic(request={"topic": dead_letter_topic.name}) -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def subscriber_client(): subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client subscriber_client.close() -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def subscription_admin(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ADMIN @@ -97,7 +98,7 @@ def subscription_admin(subscriber_client, topic): yield subscription.name -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def subscription_sync(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_SYNC @@ -114,10 +115,18 @@ def subscription_sync(subscriber_client, topic): yield subscription.name - subscriber_client.delete_subscription(request={"subscription": subscription.name}) + @backoff.on_exception(backoff.expo, Unknown, max_time=300) + def delete_subscription(): + try: + subscriber_client.delete_subscription(request={"subscription": subscription.name}) + except NotFound: + print("When Unknown error happens, the server might have" + " successfully deleted the subscription under the cover, so" + " we ignore NotFound") + delete_subscription() -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def subscription_async(subscriber_client, topic): subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ASYNC @@ -137,7 +146,7 @@ def subscription_async(subscriber_client, topic): subscriber_client.delete_subscription(request={"subscription": subscription.name}) -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def subscription_dlq(subscriber_client, topic, dead_letter_topic): from google.cloud.pubsub_v1.types import DeadLetterPolicy @@ -164,8 +173,8 @@ def subscription_dlq(subscriber_client, topic, dead_letter_topic): subscriber_client.delete_subscription(request={"subscription": subscription.name}) -def _publish_messages(publisher_client, topic, **attrs): - for n in range(5): +def _publish_messages(publisher_client, topic, message_num=5, **attrs): + for n in range(message_num): data = f"message {n}".encode("utf-8") publish_future = publisher_client.publish(topic, data, **attrs) publish_future.result() @@ -229,13 +238,18 @@ def test_create_subscription_with_dead_letter_policy( assert f"After {DEFAULT_MAX_DELIVERY_ATTEMPTS} delivery attempts." in out -@flaky(max_runs=3, min_passes=1) def test_receive_with_delivery_attempts( publisher_client, topic, dead_letter_topic, subscription_dlq, capsys ): - _publish_messages(publisher_client, topic) - subscriber.receive_messages_with_delivery_attempts(PROJECT_ID, SUBSCRIPTION_DLQ, 90) + # The dlq subscription raises 404 before it's ready. + @backoff.on_exception(backoff.expo, (Unknown, NotFound), max_time=300) + def run_sample(): + _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_delivery_attempts(PROJECT_ID, SUBSCRIPTION_DLQ, 90) + + run_sample() out, _ = capsys.readouterr() assert f"Listening for messages on {subscription_dlq}.." in out @@ -392,13 +406,19 @@ def test_receive_synchronously(publisher_client, topic, subscription_sync, capsy assert f"{subscription_sync}" in out -@flaky(max_runs=3, min_passes=1) def test_receive_synchronously_with_lease( publisher_client, topic, subscription_sync, capsys ): - _publish_messages(publisher_client, topic) + @backoff.on_exception(backoff.expo, Unknown, max_time=300) + def run_sample(): + _publish_messages(publisher_client, topic, message_num=3) + subscriber.synchronous_pull_with_lease_management(PROJECT_ID, SUBSCRIPTION_SYNC) - subscriber.synchronous_pull_with_lease_management(PROJECT_ID, SUBSCRIPTION_SYNC) + run_sample() out, _ = capsys.readouterr() - assert f"Received and acknowledged 3 messages from {subscription_sync}." in out + + # Sometimes the subscriber only gets 1 or 2 messages and test fails. + # I think it's ok to consider those cases as passing. + assert "Received and acknowledged" in out + assert f"messages from {subscription_sync}." in out From 4ab4f608cf6814e670fb44222af31d7e2c1ae998 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 2 Mar 2021 14:02:21 +0100 Subject: [PATCH 0624/1197] fix: SSL error when using the client with the emulator (#297) * fix: use insecure gRPC channel with the emulator * Regenerate the library --- packages/google-cloud-pubsub/.coveragerc | 31 +-- .../.github/header-checker-lint.yml | 15 ++ packages/google-cloud-pubsub/.gitignore | 4 +- packages/google-cloud-pubsub/.kokoro/build.sh | 26 +- .../.kokoro/docs/docs-presubmit.cfg | 11 + packages/google-cloud-pubsub/.trampolinerc | 1 + packages/google-cloud-pubsub/CONTRIBUTING.rst | 22 +- packages/google-cloud-pubsub/LICENSE | 7 +- packages/google-cloud-pubsub/MANIFEST.in | 4 +- .../docs/_static/custom.css | 7 +- .../services/publisher/async_client.py | 52 ++-- .../pubsub_v1/services/publisher/client.py | 123 ++++++---- .../pubsub_v1/services/publisher/pagers.py | 59 +++-- .../services/publisher/transports/grpc.py | 23 +- .../publisher/transports/grpc_asyncio.py | 23 +- .../services/schema_service/async_client.py | 41 ++-- .../services/schema_service/client.py | 92 ++++--- .../services/schema_service/pagers.py | 27 ++- .../schema_service/transports/grpc.py | 23 +- .../schema_service/transports/grpc_asyncio.py | 23 +- .../services/subscriber/async_client.py | 125 ++++++---- .../pubsub_v1/services/subscriber/client.py | 227 +++++++++++------- .../pubsub_v1/services/subscriber/pagers.py | 43 ++-- .../services/subscriber/transports/grpc.py | 23 +- .../subscriber/transports/grpc_asyncio.py | 23 +- .../google/pubsub_v1/types/pubsub.py | 70 +++--- .../google/pubsub_v1/types/schema.py | 16 +- packages/google-cloud-pubsub/noxfile.py | 29 ++- .../samples/snippets/noxfile.py | 2 +- packages/google-cloud-pubsub/synth.metadata | 12 +- packages/google-cloud-pubsub/synth.py | 44 ++++ .../unit/gapic/pubsub_v1/test_publisher.py | 213 +++++++++------- .../gapic/pubsub_v1/test_schema_service.py | 214 ++++++++++------- .../unit/gapic/pubsub_v1/test_subscriber.py | 213 +++++++++------- 34 files changed, 1177 insertions(+), 691 deletions(-) create mode 100644 packages/google-cloud-pubsub/.github/header-checker-lint.yml diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index dd39c8546c41..580a30e10c4a 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -1,35 +1,18 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! [run] branch = True [report] fail_under = 100 show_missing = True +omit = + google/pubsub/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-pubsub/.github/header-checker-lint.yml b/packages/google-cloud-pubsub/.github/header-checker-lint.yml new file mode 100644 index 000000000000..fc281c05bd55 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.gitignore b/packages/google-cloud-pubsub/.gitignore index b9daa52f118d..b4243ced74e4 100644 --- a/packages/google-cloud-pubsub/.gitignore +++ b/packages/google-cloud-pubsub/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/packages/google-cloud-pubsub/.kokoro/build.sh b/packages/google-cloud-pubsub/.kokoro/build.sh index 95bc0a438942..8286412b63b9 100755 --- a/packages/google-cloud-pubsub/.kokoro/build.sh +++ b/packages/google-cloud-pubsub/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-pubsub +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-pubsub" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg index 1118107829b7..2c532d9db771 100644 --- a/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg +++ b/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/packages/google-cloud-pubsub/.trampolinerc b/packages/google-cloud-pubsub/.trampolinerc index 995ee29111e1..383b6ec89fbc 100644 --- a/packages/google-cloud-pubsub/.trampolinerc +++ b/packages/google-cloud-pubsub/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 290271295ffb..6dbc9d2d6cbe 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -133,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-pubsub/LICENSE b/packages/google-cloud-pubsub/LICENSE index a8ee855de2aa..d64569567334 100644 --- a/packages/google-cloud-pubsub/LICENSE +++ b/packages/google-cloud-pubsub/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index e9e29d12033d..e783f4c6209b 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/packages/google-cloud-pubsub/docs/_static/custom.css b/packages/google-cloud-pubsub/docs/_static/custom.css index 0abaf229fce3..bcd37bbd3c4a 100644 --- a/packages/google-cloud-pubsub/docs/_static/custom.css +++ b/packages/google-cloud-pubsub/docs/_static/custom.css @@ -1,4 +1,9 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 0597b6e880d8..f8572a448862 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -78,6 +78,7 @@ class PublisherAsyncClient: PublisherClient.parse_common_location_path ) + from_service_account_info = PublisherClient.from_service_account_info from_service_account_file = PublisherClient.from_service_account_file from_service_account_json = from_service_account_file @@ -156,7 +157,7 @@ async def create_topic( (https://cloud.google.com/pubsub/docs/admin#resource_names). Args: - request (:class:`~.pubsub.Topic`): + request (:class:`google.pubsub_v1.types.Topic`): The request object. A topic resource. name (:class:`str`): Required. The name of the topic. It must have the format @@ -167,6 +168,7 @@ async def create_topic( plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``"goog"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -178,7 +180,7 @@ async def create_topic( sent along with the request as metadata. Returns: - ~.pubsub.Topic: + google.pubsub_v1.types.Topic: A topic resource. """ # Create or coerce a protobuf request object. @@ -237,7 +239,7 @@ async def update_topic( properties of a topic are not modifiable. Args: - request (:class:`~.pubsub.UpdateTopicRequest`): + request (:class:`google.pubsub_v1.types.UpdateTopicRequest`): The request object. Request for the UpdateTopic method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -247,7 +249,7 @@ async def update_topic( sent along with the request as metadata. Returns: - ~.pubsub.Topic: + google.pubsub_v1.types.Topic: A topic resource. """ # Create or coerce a protobuf request object. @@ -296,16 +298,17 @@ async def publish( the topic does not exist. Args: - request (:class:`~.pubsub.PublishRequest`): + request (:class:`google.pubsub_v1.types.PublishRequest`): The request object. Request for the Publish method. topic (:class:`str`): Required. The messages in the request will be published on this topic. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - messages (:class:`Sequence[~.pubsub.PubsubMessage]`): + messages (:class:`Sequence[google.pubsub_v1.types.PubsubMessage]`): Required. The messages to publish. This corresponds to the ``messages`` field on the ``request`` instance; if ``request`` is provided, this @@ -318,8 +321,8 @@ async def publish( sent along with the request as metadata. Returns: - ~.pubsub.PublishResponse: - Response for the ``Publish`` method. + google.pubsub_v1.types.PublishResponse: + Response for the Publish method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -388,11 +391,12 @@ async def get_topic( r"""Gets the configuration of a topic. Args: - request (:class:`~.pubsub.GetTopicRequest`): + request (:class:`google.pubsub_v1.types.GetTopicRequest`): The request object. Request for the GetTopic method. topic (:class:`str`): Required. The name of the topic to get. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -404,7 +408,7 @@ async def get_topic( sent along with the request as metadata. Returns: - ~.pubsub.Topic: + google.pubsub_v1.types.Topic: A topic resource. """ # Create or coerce a protobuf request object. @@ -467,11 +471,12 @@ async def list_topics( r"""Lists matching topics. Args: - request (:class:`~.pubsub.ListTopicsRequest`): + request (:class:`google.pubsub_v1.types.ListTopicsRequest`): The request object. Request for the `ListTopics` method. project (:class:`str`): Required. The name of the project in which to list topics. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -483,8 +488,8 @@ async def list_topics( sent along with the request as metadata. Returns: - ~.pagers.ListTopicsAsyncPager: - Response for the ``ListTopics`` method. + google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager: + Response for the ListTopics method. Iterating over this object will yield results and resolve additional pages automatically. @@ -557,13 +562,14 @@ async def list_topic_subscriptions( topic. Args: - request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + request (:class:`google.pubsub_v1.types.ListTopicSubscriptionsRequest`): The request object. Request for the `ListTopicSubscriptions` method. topic (:class:`str`): Required. The name of the topic that subscriptions are attached to. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -575,8 +581,8 @@ async def list_topic_subscriptions( sent along with the request as metadata. Returns: - ~.pagers.ListTopicSubscriptionsAsyncPager: - Response for the ``ListTopicSubscriptions`` method. + google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager: + Response for the ListTopicSubscriptions method. Iterating over this object will yield results and resolve additional pages automatically. @@ -653,13 +659,14 @@ async def list_topic_snapshots( in an existing subscription to the state captured by a snapshot. Args: - request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + request (:class:`google.pubsub_v1.types.ListTopicSnapshotsRequest`): The request object. Request for the `ListTopicSnapshots` method. topic (:class:`str`): Required. The name of the topic that snapshots are attached to. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -671,8 +678,8 @@ async def list_topic_snapshots( sent along with the request as metadata. Returns: - ~.pagers.ListTopicSnapshotsAsyncPager: - Response for the ``ListTopicSnapshots`` method. + google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager: + Response for the ListTopicSnapshots method. Iterating over this object will yield results and resolve additional pages automatically. @@ -749,12 +756,13 @@ async def delete_topic( field is set to ``_deleted-topic_``. Args: - request (:class:`~.pubsub.DeleteTopicRequest`): + request (:class:`google.pubsub_v1.types.DeleteTopicRequest`): The request object. Request for the `DeleteTopic` method. topic (:class:`str`): Required. Name of the topic to delete. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -823,7 +831,7 @@ async def detach_subscription( will stop. Args: - request (:class:`~.pubsub.DetachSubscriptionRequest`): + request (:class:`google.pubsub_v1.types.DetachSubscriptionRequest`): The request object. Request for the DetachSubscription method. @@ -834,7 +842,7 @@ async def detach_subscription( sent along with the request as metadata. Returns: - ~.pubsub.DetachSubscriptionResponse: + google.pubsub_v1.types.DetachSubscriptionResponse: Response for the DetachSubscription method. Reserved for future use. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index bc18586bb18a..f74e85a0fd15 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -17,6 +17,7 @@ from collections import OrderedDict from distutils import util +import functools import os import re from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union @@ -37,6 +38,8 @@ from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub +import grpc + from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO from .transports.grpc import PublisherGrpcTransport from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport @@ -124,6 +127,23 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -137,7 +157,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + PublisherClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -269,10 +289,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.PublisherTransport]): The + transport (Union[str, PublisherTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -308,21 +328,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -360,12 +376,21 @@ def __init__( self._transport = transport else: Transport = type(self).get_transport_class(transport) + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(Transport, type(self)._transport_registry["grpc"]): + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + Transport = functools.partial(Transport, channel=channel) + self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -385,9 +410,9 @@ def create_topic( Args: - request (:class:`~.pubsub.Topic`): + request (google.pubsub_v1.types.Topic): The request object. A topic resource. - name (:class:`str`): + name (str): Required. The name of the topic. It must have the format ``"projects/{project}/topics/{topic}"``. ``{topic}`` must start with a letter, and contain only letters @@ -396,6 +421,7 @@ def create_topic( plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``"goog"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -407,7 +433,7 @@ def create_topic( sent along with the request as metadata. Returns: - ~.pubsub.Topic: + google.pubsub_v1.types.Topic: A topic resource. """ # Create or coerce a protobuf request object. @@ -462,7 +488,7 @@ def update_topic( Args: - request (:class:`~.pubsub.UpdateTopicRequest`): + request (google.pubsub_v1.types.UpdateTopicRequest): The request object. Request for the UpdateTopic method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -472,7 +498,7 @@ def update_topic( sent along with the request as metadata. Returns: - ~.pubsub.Topic: + google.pubsub_v1.types.Topic: A topic resource. """ # Create or coerce a protobuf request object. @@ -517,16 +543,17 @@ def publish( Args: - request (:class:`~.pubsub.PublishRequest`): + request (google.pubsub_v1.types.PublishRequest): The request object. Request for the Publish method. - topic (:class:`str`): + topic (str): Required. The messages in the request will be published on this topic. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - messages (:class:`Sequence[~.pubsub.PubsubMessage]`): + messages (Sequence[google.pubsub_v1.types.PubsubMessage]): Required. The messages to publish. This corresponds to the ``messages`` field on the ``request`` instance; if ``request`` is provided, this @@ -539,8 +566,8 @@ def publish( sent along with the request as metadata. Returns: - ~.pubsub.PublishResponse: - Response for the ``Publish`` method. + google.pubsub_v1.types.PublishResponse: + Response for the Publish method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -564,9 +591,8 @@ def publish( if topic is not None: request.topic = topic - - if messages: - request.messages.extend(messages) + if messages is not None: + request.messages = messages # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -597,11 +623,12 @@ def get_topic( Args: - request (:class:`~.pubsub.GetTopicRequest`): + request (google.pubsub_v1.types.GetTopicRequest): The request object. Request for the GetTopic method. - topic (:class:`str`): + topic (str): Required. The name of the topic to get. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -613,7 +640,7 @@ def get_topic( sent along with the request as metadata. Returns: - ~.pubsub.Topic: + google.pubsub_v1.types.Topic: A topic resource. """ # Create or coerce a protobuf request object. @@ -668,11 +695,12 @@ def list_topics( Args: - request (:class:`~.pubsub.ListTopicsRequest`): + request (google.pubsub_v1.types.ListTopicsRequest): The request object. Request for the `ListTopics` method. - project (:class:`str`): + project (str): Required. The name of the project in which to list topics. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -684,8 +712,8 @@ def list_topics( sent along with the request as metadata. Returns: - ~.pagers.ListTopicsPager: - Response for the ``ListTopics`` method. + google.pubsub_v1.services.publisher.pagers.ListTopicsPager: + Response for the ListTopics method. Iterating over this object will yield results and resolve additional pages automatically. @@ -750,13 +778,14 @@ def list_topic_subscriptions( Args: - request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): The request object. Request for the `ListTopicSubscriptions` method. - topic (:class:`str`): + topic (str): Required. The name of the topic that subscriptions are attached to. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -768,8 +797,8 @@ def list_topic_subscriptions( sent along with the request as metadata. Returns: - ~.pagers.ListTopicSubscriptionsPager: - Response for the ``ListTopicSubscriptions`` method. + google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager: + Response for the ListTopicSubscriptions method. Iterating over this object will yield results and resolve additional pages automatically. @@ -838,13 +867,14 @@ def list_topic_snapshots( Args: - request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + request (google.pubsub_v1.types.ListTopicSnapshotsRequest): The request object. Request for the `ListTopicSnapshots` method. - topic (:class:`str`): + topic (str): Required. The name of the topic that snapshots are attached to. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -856,8 +886,8 @@ def list_topic_snapshots( sent along with the request as metadata. Returns: - ~.pagers.ListTopicSnapshotsPager: - Response for the ``ListTopicSnapshots`` method. + google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager: + Response for the ListTopicSnapshots method. Iterating over this object will yield results and resolve additional pages automatically. @@ -926,12 +956,13 @@ def delete_topic( Args: - request (:class:`~.pubsub.DeleteTopicRequest`): + request (google.pubsub_v1.types.DeleteTopicRequest): The request object. Request for the `DeleteTopic` method. - topic (:class:`str`): + topic (str): Required. Name of the topic to delete. Format is ``projects/{project}/topics/{topic}``. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -996,7 +1027,7 @@ def detach_subscription( Args: - request (:class:`~.pubsub.DetachSubscriptionRequest`): + request (google.pubsub_v1.types.DetachSubscriptionRequest): The request object. Request for the DetachSubscription method. @@ -1007,7 +1038,7 @@ def detach_subscription( sent along with the request as metadata. Returns: - ~.pubsub.DetachSubscriptionResponse: + google.pubsub_v1.types.DetachSubscriptionResponse: Response for the DetachSubscription method. Reserved for future use. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py index 52242ff17796..e8836d410bc6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.pubsub_v1.types import pubsub @@ -24,7 +33,7 @@ class ListTopicsPager: """A pager for iterating through ``list_topics`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListTopicsResponse` object, and + :class:`google.pubsub_v1.types.ListTopicsResponse` object, and provides an ``__iter__`` method to iterate through its ``topics`` field. @@ -33,7 +42,7 @@ class ListTopicsPager: through the ``topics`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListTopicsResponse` + All the usual :class:`google.pubsub_v1.types.ListTopicsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListTopicsRequest`): + request (google.pubsub_v1.types.ListTopicsRequest): The initial request object. - response (:class:`~.pubsub.ListTopicsResponse`): + response (google.pubsub_v1.types.ListTopicsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListTopicsAsyncPager: """A pager for iterating through ``list_topics`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListTopicsResponse` object, and + :class:`google.pubsub_v1.types.ListTopicsResponse` object, and provides an ``__aiter__`` method to iterate through its ``topics`` field. @@ -95,7 +104,7 @@ class ListTopicsAsyncPager: through the ``topics`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListTopicsResponse` + All the usual :class:`google.pubsub_v1.types.ListTopicsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListTopicsRequest`): + request (google.pubsub_v1.types.ListTopicsRequest): The initial request object. - response (:class:`~.pubsub.ListTopicsResponse`): + response (google.pubsub_v1.types.ListTopicsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -152,7 +161,7 @@ class ListTopicSubscriptionsPager: """A pager for iterating through ``list_topic_subscriptions`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListTopicSubscriptionsResponse` object, and + :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` object, and provides an ``__iter__`` method to iterate through its ``subscriptions`` field. @@ -161,7 +170,7 @@ class ListTopicSubscriptionsPager: through the ``subscriptions`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListTopicSubscriptionsResponse` + All the usual :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -179,9 +188,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): The initial request object. - response (:class:`~.pubsub.ListTopicSubscriptionsResponse`): + response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -214,7 +223,7 @@ class ListTopicSubscriptionsAsyncPager: """A pager for iterating through ``list_topic_subscriptions`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListTopicSubscriptionsResponse` object, and + :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` object, and provides an ``__aiter__`` method to iterate through its ``subscriptions`` field. @@ -223,7 +232,7 @@ class ListTopicSubscriptionsAsyncPager: through the ``subscriptions`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListTopicSubscriptionsResponse` + All the usual :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -241,9 +250,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListTopicSubscriptionsRequest`): + request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): The initial request object. - response (:class:`~.pubsub.ListTopicSubscriptionsResponse`): + response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -280,7 +289,7 @@ class ListTopicSnapshotsPager: """A pager for iterating through ``list_topic_snapshots`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListTopicSnapshotsResponse` object, and + :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` object, and provides an ``__iter__`` method to iterate through its ``snapshots`` field. @@ -289,7 +298,7 @@ class ListTopicSnapshotsPager: through the ``snapshots`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListTopicSnapshotsResponse` + All the usual :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -307,9 +316,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + request (google.pubsub_v1.types.ListTopicSnapshotsRequest): The initial request object. - response (:class:`~.pubsub.ListTopicSnapshotsResponse`): + response (google.pubsub_v1.types.ListTopicSnapshotsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -342,7 +351,7 @@ class ListTopicSnapshotsAsyncPager: """A pager for iterating through ``list_topic_snapshots`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListTopicSnapshotsResponse` object, and + :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` object, and provides an ``__aiter__`` method to iterate through its ``snapshots`` field. @@ -351,7 +360,7 @@ class ListTopicSnapshotsAsyncPager: through the ``snapshots`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListTopicSnapshotsResponse` + All the usual :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -369,9 +378,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListTopicSnapshotsRequest`): + request (google.pubsub_v1.types.ListTopicSnapshotsRequest): The initial request object. - response (:class:`~.pubsub.ListTopicSnapshotsResponse`): + response (google.pubsub_v1.types.ListTopicSnapshotsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index d1212ecea03e..670a08bb7eed 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -61,6 +61,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -91,6 +92,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -107,6 +112,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -116,11 +126,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -165,12 +170,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 2b15178eff37..ea6e0483710d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -105,6 +105,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -136,6 +137,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -152,6 +157,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -161,11 +171,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -210,12 +215,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index c333e76c29c7..542053f39101 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -82,6 +82,7 @@ class SchemaServiceAsyncClient: SchemaServiceClient.parse_common_location_path ) + from_service_account_info = SchemaServiceClient.from_service_account_info from_service_account_file = SchemaServiceClient.from_service_account_file from_service_account_json = from_service_account_file @@ -160,20 +161,22 @@ async def create_schema( r"""Creates a schema. Args: - request (:class:`~.gp_schema.CreateSchemaRequest`): + request (:class:`google.pubsub_v1.types.CreateSchemaRequest`): The request object. Request for the CreateSchema method. parent (:class:`str`): Required. The name of the project in which to create the schema. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - schema (:class:`~.gp_schema.Schema`): + schema (:class:`google.pubsub_v1.types.Schema`): Required. The schema object to create. This schema's ``name`` parameter is ignored. The schema object returned by CreateSchema will have a ``name`` made using the given ``parent`` and ``schema_id``. + This corresponds to the ``schema`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -184,6 +187,7 @@ async def create_schema( See https://cloud.google.com/pubsub/docs/admin#resource_names for resource name constraints. + This corresponds to the ``schema_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -195,7 +199,7 @@ async def create_schema( sent along with the request as metadata. Returns: - ~.gp_schema.Schema: + google.pubsub_v1.types.Schema: A schema resource. """ # Create or coerce a protobuf request object. @@ -252,11 +256,12 @@ async def get_schema( r"""Gets a schema. Args: - request (:class:`~.schema.GetSchemaRequest`): + request (:class:`google.pubsub_v1.types.GetSchemaRequest`): The request object. Request for the GetSchema method. name (:class:`str`): Required. The name of the schema to get. Format is ``projects/{project}/schemas/{schema}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -268,7 +273,7 @@ async def get_schema( sent along with the request as metadata. Returns: - ~.schema.Schema: + google.pubsub_v1.types.Schema: A schema resource. """ # Create or coerce a protobuf request object. @@ -321,12 +326,13 @@ async def list_schemas( r"""Lists schemas in a project. Args: - request (:class:`~.schema.ListSchemasRequest`): + request (:class:`google.pubsub_v1.types.ListSchemasRequest`): The request object. Request for the `ListSchemas` method. parent (:class:`str`): Required. The name of the project in which to list schemas. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -338,8 +344,8 @@ async def list_schemas( sent along with the request as metadata. Returns: - ~.pagers.ListSchemasAsyncPager: - Response for the ``ListSchemas`` method. + google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager: + Response for the ListSchemas method. Iterating over this object will yield results and resolve additional pages automatically. @@ -401,12 +407,13 @@ async def delete_schema( r"""Deletes a schema. Args: - request (:class:`~.schema.DeleteSchemaRequest`): + request (:class:`google.pubsub_v1.types.DeleteSchemaRequest`): The request object. Request for the `DeleteSchema` method. name (:class:`str`): Required. Name of the schema to delete. Format is ``projects/{project}/schemas/{schema}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -467,18 +474,20 @@ async def validate_schema( r"""Validates a schema. Args: - request (:class:`~.gp_schema.ValidateSchemaRequest`): + request (:class:`google.pubsub_v1.types.ValidateSchemaRequest`): The request object. Request for the `ValidateSchema` method. parent (:class:`str`): Required. The name of the project in which to validate schemas. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - schema (:class:`~.gp_schema.Schema`): + schema (:class:`google.pubsub_v1.types.Schema`): Required. The schema object to validate. + This corresponds to the ``schema`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -490,8 +499,8 @@ async def validate_schema( sent along with the request as metadata. Returns: - ~.gp_schema.ValidateSchemaResponse: - Response for the ``ValidateSchema`` method. + google.pubsub_v1.types.ValidateSchemaResponse: + Response for the ValidateSchema method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -544,7 +553,7 @@ async def validate_message( r"""Validates a message against a schema. Args: - request (:class:`~.schema.ValidateMessageRequest`): + request (:class:`google.pubsub_v1.types.ValidateMessageRequest`): The request object. Request for the `ValidateMessage` method. @@ -555,8 +564,8 @@ async def validate_message( sent along with the request as metadata. Returns: - ~.schema.ValidateMessageResponse: - Response for the ``ValidateMessage`` method. + google.pubsub_v1.types.ValidateMessageResponse: + Response for the ValidateMessage method. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 90fbce9f4a06..d569af8f34e0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -114,6 +114,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -126,7 +142,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + SchemaServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -231,10 +247,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.SchemaServiceTransport]): The + transport (Union[str, SchemaServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -270,21 +286,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -327,7 +339,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -346,30 +358,33 @@ def create_schema( r"""Creates a schema. Args: - request (:class:`~.gp_schema.CreateSchemaRequest`): + request (google.pubsub_v1.types.CreateSchemaRequest): The request object. Request for the CreateSchema method. - parent (:class:`str`): + parent (str): Required. The name of the project in which to create the schema. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - schema (:class:`~.gp_schema.Schema`): + schema (google.pubsub_v1.types.Schema): Required. The schema object to create. This schema's ``name`` parameter is ignored. The schema object returned by CreateSchema will have a ``name`` made using the given ``parent`` and ``schema_id``. + This corresponds to the ``schema`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - schema_id (:class:`str`): + schema_id (str): The ID to use for the schema, which will become the final component of the schema's resource name. See https://cloud.google.com/pubsub/docs/admin#resource_names for resource name constraints. + This corresponds to the ``schema_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -381,7 +396,7 @@ def create_schema( sent along with the request as metadata. Returns: - ~.gp_schema.Schema: + google.pubsub_v1.types.Schema: A schema resource. """ # Create or coerce a protobuf request object. @@ -439,11 +454,12 @@ def get_schema( r"""Gets a schema. Args: - request (:class:`~.schema.GetSchemaRequest`): + request (google.pubsub_v1.types.GetSchemaRequest): The request object. Request for the GetSchema method. - name (:class:`str`): + name (str): Required. The name of the schema to get. Format is ``projects/{project}/schemas/{schema}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -455,7 +471,7 @@ def get_schema( sent along with the request as metadata. Returns: - ~.schema.Schema: + google.pubsub_v1.types.Schema: A schema resource. """ # Create or coerce a protobuf request object. @@ -509,12 +525,13 @@ def list_schemas( r"""Lists schemas in a project. Args: - request (:class:`~.schema.ListSchemasRequest`): + request (google.pubsub_v1.types.ListSchemasRequest): The request object. Request for the `ListSchemas` method. - parent (:class:`str`): + parent (str): Required. The name of the project in which to list schemas. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -526,8 +543,8 @@ def list_schemas( sent along with the request as metadata. Returns: - ~.pagers.ListSchemasPager: - Response for the ``ListSchemas`` method. + google.pubsub_v1.services.schema_service.pagers.ListSchemasPager: + Response for the ListSchemas method. Iterating over this object will yield results and resolve additional pages automatically. @@ -590,12 +607,13 @@ def delete_schema( r"""Deletes a schema. Args: - request (:class:`~.schema.DeleteSchemaRequest`): + request (google.pubsub_v1.types.DeleteSchemaRequest): The request object. Request for the `DeleteSchema` method. - name (:class:`str`): + name (str): Required. Name of the schema to delete. Format is ``projects/{project}/schemas/{schema}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -657,18 +675,20 @@ def validate_schema( r"""Validates a schema. Args: - request (:class:`~.gp_schema.ValidateSchemaRequest`): + request (google.pubsub_v1.types.ValidateSchemaRequest): The request object. Request for the `ValidateSchema` method. - parent (:class:`str`): + parent (str): Required. The name of the project in which to validate schemas. Format is ``projects/{project-id}``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - schema (:class:`~.gp_schema.Schema`): + schema (google.pubsub_v1.types.Schema): Required. The schema object to validate. + This corresponds to the ``schema`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -680,8 +700,8 @@ def validate_schema( sent along with the request as metadata. Returns: - ~.gp_schema.ValidateSchemaResponse: - Response for the ``ValidateSchema`` method. + google.pubsub_v1.types.ValidateSchemaResponse: + Response for the ValidateSchema method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -735,7 +755,7 @@ def validate_message( r"""Validates a message against a schema. Args: - request (:class:`~.schema.ValidateMessageRequest`): + request (google.pubsub_v1.types.ValidateMessageRequest): The request object. Request for the `ValidateMessage` method. @@ -746,8 +766,8 @@ def validate_message( sent along with the request as metadata. Returns: - ~.schema.ValidateMessageResponse: - Response for the ``ValidateMessage`` method. + google.pubsub_v1.types.ValidateMessageResponse: + Response for the ValidateMessage method. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index e4da22697cc6..2712f37c64c7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.pubsub_v1.types import schema @@ -24,7 +33,7 @@ class ListSchemasPager: """A pager for iterating through ``list_schemas`` requests. This class thinly wraps an initial - :class:`~.schema.ListSchemasResponse` object, and + :class:`google.pubsub_v1.types.ListSchemasResponse` object, and provides an ``__iter__`` method to iterate through its ``schemas`` field. @@ -33,7 +42,7 @@ class ListSchemasPager: through the ``schemas`` field on the corresponding responses. - All the usual :class:`~.schema.ListSchemasResponse` + All the usual :class:`google.pubsub_v1.types.ListSchemasResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.schema.ListSchemasRequest`): + request (google.pubsub_v1.types.ListSchemasRequest): The initial request object. - response (:class:`~.schema.ListSchemasResponse`): + response (google.pubsub_v1.types.ListSchemasResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListSchemasAsyncPager: """A pager for iterating through ``list_schemas`` requests. This class thinly wraps an initial - :class:`~.schema.ListSchemasResponse` object, and + :class:`google.pubsub_v1.types.ListSchemasResponse` object, and provides an ``__aiter__`` method to iterate through its ``schemas`` field. @@ -95,7 +104,7 @@ class ListSchemasAsyncPager: through the ``schemas`` field on the corresponding responses. - All the usual :class:`~.schema.ListSchemasResponse` + All the usual :class:`google.pubsub_v1.types.ListSchemasResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.schema.ListSchemasRequest`): + request (google.pubsub_v1.types.ListSchemasRequest): The initial request object. - response (:class:`~.schema.ListSchemasResponse`): + response (google.pubsub_v1.types.ListSchemasResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 02f91d3587ef..3a724e0734cd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -63,6 +63,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -93,6 +94,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -109,6 +114,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -118,11 +128,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -167,12 +172,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 3455c207bdb8..080bcc6f5b7c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -138,6 +139,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -154,6 +159,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -163,11 +173,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -212,12 +217,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index bd0191193f1f..7bfaeb9034fc 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -91,6 +91,7 @@ class SubscriberAsyncClient: SubscriberClient.parse_common_location_path ) + from_service_account_info = SubscriberClient.from_service_account_info from_service_account_file = SubscriberClient.from_service_account_file from_service_account_json = from_service_account_file @@ -182,7 +183,7 @@ async def create_subscription( request. Args: - request (:class:`~.pubsub.Subscription`): + request (:class:`google.pubsub_v1.types.Subscription`): The request object. A subscription resource. name (:class:`str`): Required. The name of the subscription. It must have the @@ -194,6 +195,7 @@ async def create_subscription( (``~``), plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``"goog"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -203,14 +205,16 @@ async def create_subscription( ``projects/{project}/topics/{topic}``. The value of this field will be ``_deleted-topic_`` if the topic has been deleted. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - push_config (:class:`~.pubsub.PushConfig`): + push_config (:class:`google.pubsub_v1.types.PushConfig`): If push delivery is used with this subscription, this field is used to configure it. An empty ``pushConfig`` signifies that the subscriber will pull and ack messages using API methods. + This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -239,6 +243,7 @@ async def create_subscription( If the subscriber never acknowledges the message, the Pub/Sub system will eventually redeliver the message. + This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -250,7 +255,7 @@ async def create_subscription( sent along with the request as metadata. Returns: - ~.pubsub.Subscription: + google.pubsub_v1.types.Subscription: A subscription resource. """ # Create or coerce a protobuf request object. @@ -319,12 +324,13 @@ async def get_subscription( r"""Gets the configuration details of a subscription. Args: - request (:class:`~.pubsub.GetSubscriptionRequest`): + request (:class:`google.pubsub_v1.types.GetSubscriptionRequest`): The request object. Request for the GetSubscription method. subscription (:class:`str`): Required. The name of the subscription to get. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -336,7 +342,7 @@ async def get_subscription( sent along with the request as metadata. Returns: - ~.pubsub.Subscription: + google.pubsub_v1.types.Subscription: A subscription resource. """ # Create or coerce a protobuf request object. @@ -402,7 +408,7 @@ async def update_subscription( modifiable. Args: - request (:class:`~.pubsub.UpdateSubscriptionRequest`): + request (:class:`google.pubsub_v1.types.UpdateSubscriptionRequest`): The request object. Request for the UpdateSubscription method. @@ -413,7 +419,7 @@ async def update_subscription( sent along with the request as metadata. Returns: - ~.pubsub.Subscription: + google.pubsub_v1.types.Subscription: A subscription resource. """ # Create or coerce a protobuf request object. @@ -460,12 +466,13 @@ async def list_subscriptions( r"""Lists matching subscriptions. Args: - request (:class:`~.pubsub.ListSubscriptionsRequest`): + request (:class:`google.pubsub_v1.types.ListSubscriptionsRequest`): The request object. Request for the `ListSubscriptions` method. project (:class:`str`): Required. The name of the project in which to list subscriptions. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -477,8 +484,8 @@ async def list_subscriptions( sent along with the request as metadata. Returns: - ~.pagers.ListSubscriptionsAsyncPager: - Response for the ``ListSubscriptions`` method. + google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager: + Response for the ListSubscriptions method. Iterating over this object will yield results and resolve additional pages automatically. @@ -555,12 +562,13 @@ async def delete_subscription( topic unless the same topic is specified. Args: - request (:class:`~.pubsub.DeleteSubscriptionRequest`): + request (:class:`google.pubsub_v1.types.DeleteSubscriptionRequest`): The request object. Request for the DeleteSubscription method. subscription (:class:`str`): Required. The subscription to delete. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -635,12 +643,13 @@ async def modify_ack_deadline( used for subsequent messages. Args: - request (:class:`~.pubsub.ModifyAckDeadlineRequest`): + request (:class:`google.pubsub_v1.types.ModifyAckDeadlineRequest`): The request object. Request for the ModifyAckDeadline method. subscription (:class:`str`): Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -661,6 +670,7 @@ async def modify_ack_deadline( minimum deadline you can specify is 0 seconds. The maximum deadline you can specify is 600 seconds (10 minutes). + This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -741,12 +751,13 @@ async def acknowledge( error. Args: - request (:class:`~.pubsub.AcknowledgeRequest`): + request (:class:`google.pubsub_v1.types.AcknowledgeRequest`): The request object. Request for the Acknowledge method. subscription (:class:`str`): Required. The subscription whose message is being acknowledged. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -754,6 +765,7 @@ async def acknowledge( Required. The acknowledgment ID for the messages being acknowledged that was returned by the Pub/Sub system in the ``Pull`` response. Must not be empty. + This corresponds to the ``ack_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -828,12 +840,13 @@ async def pull( pending for the given subscription. Args: - request (:class:`~.pubsub.PullRequest`): + request (:class:`google.pubsub_v1.types.PullRequest`): The request object. Request for the `Pull` method. subscription (:class:`str`): Required. The subscription from which messages should be pulled. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -847,6 +860,7 @@ async def pull( discouraged because it adversely impacts the performance of ``Pull`` operations. We recommend that users do not set this field. + This corresponds to the ``return_immediately`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -856,6 +870,7 @@ async def pull( Must be a positive integer. The Pub/Sub system may return fewer than the number specified. + This corresponds to the ``max_messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -867,8 +882,8 @@ async def pull( sent along with the request as metadata. Returns: - ~.pubsub.PullResponse: - Response for the ``Pull`` method. + google.pubsub_v1.types.PullResponse: + Response for the Pull method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -942,7 +957,7 @@ def streaming_pull( configuring the underlying RPC channel. Args: - requests (AsyncIterator[`~.pubsub.StreamingPullRequest`]): + requests (AsyncIterator[`google.pubsub_v1.types.StreamingPullRequest`]): The request object AsyncIterator. Request for the `StreamingPull` streaming RPC method. This request is used to establish the initial stream as well as to stream acknowledgements @@ -955,10 +970,9 @@ def streaming_pull( sent along with the request as metadata. Returns: - AsyncIterable[~.pubsub.StreamingPullResponse]: - Response for the ``StreamingPull`` method. This response - is used to stream messages from the server to the - client. + AsyncIterable[google.pubsub_v1.types.StreamingPullResponse]: + Response for the StreamingPull method. This response is used to stream + messages from the server to the client. """ @@ -1007,16 +1021,17 @@ async def modify_push_config( call regardless of changes to the ``PushConfig``. Args: - request (:class:`~.pubsub.ModifyPushConfigRequest`): + request (:class:`google.pubsub_v1.types.ModifyPushConfigRequest`): The request object. Request for the ModifyPushConfig method. subscription (:class:`str`): Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - push_config (:class:`~.pubsub.PushConfig`): + push_config (:class:`google.pubsub_v1.types.PushConfig`): Required. The push configuration for future deliveries. An empty ``pushConfig`` indicates that the Pub/Sub @@ -1024,6 +1039,7 @@ async def modify_push_config( subscription and allow messages to be pulled and acknowledged - effectively pausing the subscription if ``Pull`` or ``StreamingPull`` is not called. + This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1099,11 +1115,12 @@ async def get_snapshot( subscription to the state captured by a snapshot. Args: - request (:class:`~.pubsub.GetSnapshotRequest`): + request (:class:`google.pubsub_v1.types.GetSnapshotRequest`): The request object. Request for the GetSnapshot method. snapshot (:class:`str`): Required. The name of the snapshot to get. Format is ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1115,13 +1132,13 @@ async def get_snapshot( sent along with the request as metadata. Returns: - ~.pubsub.Snapshot: + google.pubsub_v1.types.Snapshot: A snapshot resource. Snapshots are used in - `Seek `__ - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. """ # Create or coerce a protobuf request object. @@ -1188,12 +1205,13 @@ async def list_snapshots( in an existing subscription to the state captured by a snapshot. Args: - request (:class:`~.pubsub.ListSnapshotsRequest`): + request (:class:`google.pubsub_v1.types.ListSnapshotsRequest`): The request object. Request for the `ListSnapshots` method. project (:class:`str`): Required. The name of the project in which to list snapshots. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1205,8 +1223,8 @@ async def list_snapshots( sent along with the request as metadata. Returns: - ~.pagers.ListSnapshotsAsyncPager: - Response for the ``ListSnapshots`` method. + google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager: + Response for the ListSnapshots method. Iterating over this object will yield results and resolve additional pages automatically. @@ -1297,7 +1315,7 @@ async def create_snapshot( request. Args: - request (:class:`~.pubsub.CreateSnapshotRequest`): + request (:class:`google.pubsub_v1.types.CreateSnapshotRequest`): The request object. Request for the `CreateSnapshot` method. name (:class:`str`): @@ -1308,6 +1326,7 @@ async def create_snapshot( requests, you must specify a name. See the resource name rules. Format is ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1322,6 +1341,7 @@ async def create_snapshot( published to the subscription's topic following the successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1333,13 +1353,13 @@ async def create_snapshot( sent along with the request as metadata. Returns: - ~.pubsub.Snapshot: + google.pubsub_v1.types.Snapshot: A snapshot resource. Snapshots are used in - `Seek `__ - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. """ # Create or coerce a protobuf request object. @@ -1405,7 +1425,7 @@ async def update_snapshot( snapshot. Args: - request (:class:`~.pubsub.UpdateSnapshotRequest`): + request (:class:`google.pubsub_v1.types.UpdateSnapshotRequest`): The request object. Request for the UpdateSnapshot method. @@ -1416,13 +1436,13 @@ async def update_snapshot( sent along with the request as metadata. Returns: - ~.pubsub.Snapshot: + google.pubsub_v1.types.Snapshot: A snapshot resource. Snapshots are used in - `Seek `__ - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. """ # Create or coerce a protobuf request object. @@ -1478,12 +1498,13 @@ async def delete_snapshot( the same subscription is specified. Args: - request (:class:`~.pubsub.DeleteSnapshotRequest`): + request (:class:`google.pubsub_v1.types.DeleteSnapshotRequest`): The request object. Request for the `DeleteSnapshot` method. snapshot (:class:`str`): Required. The name of the snapshot to delete. Format is ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1556,7 +1577,7 @@ async def seek( same topic. Args: - request (:class:`~.pubsub.SeekRequest`): + request (:class:`google.pubsub_v1.types.SeekRequest`): The request object. Request for the `Seek` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1566,10 +1587,8 @@ async def seek( sent along with the request as metadata. Returns: - ~.pubsub.SeekResponse: - Response for the ``Seek`` method (this response is - empty). - + google.pubsub_v1.types.SeekResponse: + Response for the Seek method (this response is empty). """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index b80af2d81132..67956a53db8a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -17,6 +17,7 @@ from collections import OrderedDict from distutils import util +import functools import os import re from typing import ( @@ -49,6 +50,8 @@ from google.pubsub_v1.services.subscriber import pagers from google.pubsub_v1.types import pubsub +import grpc + from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO from .transports.grpc import SubscriberGrpcTransport from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport @@ -138,6 +141,23 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -151,7 +171,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + SubscriberClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -283,10 +303,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.SubscriberTransport]): The + transport (Union[str, SubscriberTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -322,21 +342,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -374,12 +390,21 @@ def __init__( self._transport = transport else: Transport = type(self).get_transport_class(transport) + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(Transport, type(self)._transport_registry["grpc"]): + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + Transport = functools.partial(Transport, channel=channel) + self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -412,9 +437,9 @@ def create_subscription( Args: - request (:class:`~.pubsub.Subscription`): + request (google.pubsub_v1.types.Subscription): The request object. A subscription resource. - name (:class:`str`): + name (str): Required. The name of the subscription. It must have the format ``"projects/{project}/subscriptions/{subscription}"``. @@ -424,27 +449,30 @@ def create_subscription( (``~``), plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``"goog"``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - topic (:class:`str`): + topic (str): Required. The name of the topic from which this subscription is receiving messages. Format is ``projects/{project}/topics/{topic}``. The value of this field will be ``_deleted-topic_`` if the topic has been deleted. + This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - push_config (:class:`~.pubsub.PushConfig`): + push_config (google.pubsub_v1.types.PushConfig): If push delivery is used with this subscription, this field is used to configure it. An empty ``pushConfig`` signifies that the subscriber will pull and ack messages using API methods. + This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - ack_deadline_seconds (:class:`int`): + ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits for the subscriber to acknowledge receipt before resending the message. In the interval after the @@ -469,6 +497,7 @@ def create_subscription( If the subscriber never acknowledges the message, the Pub/Sub system will eventually redeliver the message. + This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -480,7 +509,7 @@ def create_subscription( sent along with the request as metadata. Returns: - ~.pubsub.Subscription: + google.pubsub_v1.types.Subscription: A subscription resource. """ # Create or coerce a protobuf request object. @@ -541,12 +570,13 @@ def get_subscription( Args: - request (:class:`~.pubsub.GetSubscriptionRequest`): + request (google.pubsub_v1.types.GetSubscriptionRequest): The request object. Request for the GetSubscription method. - subscription (:class:`str`): + subscription (str): Required. The name of the subscription to get. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -558,7 +588,7 @@ def get_subscription( sent along with the request as metadata. Returns: - ~.pubsub.Subscription: + google.pubsub_v1.types.Subscription: A subscription resource. """ # Create or coerce a protobuf request object. @@ -616,7 +646,7 @@ def update_subscription( Args: - request (:class:`~.pubsub.UpdateSubscriptionRequest`): + request (google.pubsub_v1.types.UpdateSubscriptionRequest): The request object. Request for the UpdateSubscription method. @@ -627,7 +657,7 @@ def update_subscription( sent along with the request as metadata. Returns: - ~.pubsub.Subscription: + google.pubsub_v1.types.Subscription: A subscription resource. """ # Create or coerce a protobuf request object. @@ -670,12 +700,13 @@ def list_subscriptions( Args: - request (:class:`~.pubsub.ListSubscriptionsRequest`): + request (google.pubsub_v1.types.ListSubscriptionsRequest): The request object. Request for the `ListSubscriptions` method. - project (:class:`str`): + project (str): Required. The name of the project in which to list subscriptions. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -687,8 +718,8 @@ def list_subscriptions( sent along with the request as metadata. Returns: - ~.pagers.ListSubscriptionsPager: - Response for the ``ListSubscriptions`` method. + google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager: + Response for the ListSubscriptions method. Iterating over this object will yield results and resolve additional pages automatically. @@ -757,12 +788,13 @@ def delete_subscription( Args: - request (:class:`~.pubsub.DeleteSubscriptionRequest`): + request (google.pubsub_v1.types.DeleteSubscriptionRequest): The request object. Request for the DeleteSubscription method. - subscription (:class:`str`): + subscription (str): Required. The subscription to delete. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -833,21 +865,22 @@ def modify_ack_deadline( Args: - request (:class:`~.pubsub.ModifyAckDeadlineRequest`): + request (google.pubsub_v1.types.ModifyAckDeadlineRequest): The request object. Request for the ModifyAckDeadline method. - subscription (:class:`str`): + subscription (str): Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - ack_ids (:class:`Sequence[str]`): + ack_ids (Sequence[str]): Required. List of acknowledgment IDs. This corresponds to the ``ack_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - ack_deadline_seconds (:class:`int`): + ack_deadline_seconds (int): Required. The new ack deadline with respect to the time this request was sent to the Pub/Sub system. For example, if the value is 10, the new ack deadline will @@ -859,6 +892,7 @@ def modify_ack_deadline( minimum deadline you can specify is 0 seconds. The maximum deadline you can specify is 600 seconds (10 minutes). + This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -891,12 +925,11 @@ def modify_ack_deadline( if subscription is not None: request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids if ack_deadline_seconds is not None: request.ack_deadline_seconds = ack_deadline_seconds - if ack_ids: - request.ack_ids.extend(ack_ids) - # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.modify_ack_deadline] @@ -935,19 +968,21 @@ def acknowledge( Args: - request (:class:`~.pubsub.AcknowledgeRequest`): + request (google.pubsub_v1.types.AcknowledgeRequest): The request object. Request for the Acknowledge method. - subscription (:class:`str`): + subscription (str): Required. The subscription whose message is being acknowledged. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - ack_ids (:class:`Sequence[str]`): + ack_ids (Sequence[str]): Required. The acknowledgment ID for the messages being acknowledged that was returned by the Pub/Sub system in the ``Pull`` response. Must not be empty. + This corresponds to the ``ack_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -980,9 +1015,8 @@ def acknowledge( if subscription is not None: request.subscription = subscription - - if ack_ids: - request.ack_ids.extend(ack_ids) + if ack_ids is not None: + request.ack_ids = ack_ids # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1018,16 +1052,17 @@ def pull( Args: - request (:class:`~.pubsub.PullRequest`): + request (google.pubsub_v1.types.PullRequest): The request object. Request for the `Pull` method. - subscription (:class:`str`): + subscription (str): Required. The subscription from which messages should be pulled. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - return_immediately (:class:`bool`): + return_immediately (bool): Optional. If this field set to true, the system will respond immediately even if it there are no messages available to return in the ``Pull`` response. Otherwise, @@ -1037,15 +1072,17 @@ def pull( discouraged because it adversely impacts the performance of ``Pull`` operations. We recommend that users do not set this field. + This corresponds to the ``return_immediately`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - max_messages (:class:`int`): + max_messages (int): Required. The maximum number of messages to return for this request. Must be a positive integer. The Pub/Sub system may return fewer than the number specified. + This corresponds to the ``max_messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1057,8 +1094,8 @@ def pull( sent along with the request as metadata. Returns: - ~.pubsub.PullResponse: - Response for the ``Pull`` method. + google.pubsub_v1.types.PullResponse: + Response for the Pull method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -1124,7 +1161,7 @@ def streaming_pull( Args: - requests (Iterator[`~.pubsub.StreamingPullRequest`]): + requests (Iterator[google.pubsub_v1.types.StreamingPullRequest]): The request object iterator. Request for the `StreamingPull` streaming RPC method. This request is used to establish the initial stream as well as to stream acknowledgements @@ -1137,10 +1174,9 @@ def streaming_pull( sent along with the request as metadata. Returns: - Iterable[~.pubsub.StreamingPullResponse]: - Response for the ``StreamingPull`` method. This response - is used to stream messages from the server to the - client. + Iterable[google.pubsub_v1.types.StreamingPullResponse]: + Response for the StreamingPull method. This response is used to stream + messages from the server to the client. """ @@ -1179,16 +1215,17 @@ def modify_push_config( Args: - request (:class:`~.pubsub.ModifyPushConfigRequest`): + request (google.pubsub_v1.types.ModifyPushConfigRequest): The request object. Request for the ModifyPushConfig method. - subscription (:class:`str`): + subscription (str): Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - push_config (:class:`~.pubsub.PushConfig`): + push_config (google.pubsub_v1.types.PushConfig): Required. The push configuration for future deliveries. An empty ``pushConfig`` indicates that the Pub/Sub @@ -1196,6 +1233,7 @@ def modify_push_config( subscription and allow messages to be pulled and acknowledged - effectively pausing the subscription if ``Pull`` or ``StreamingPull`` is not called. + This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1267,11 +1305,12 @@ def get_snapshot( Args: - request (:class:`~.pubsub.GetSnapshotRequest`): + request (google.pubsub_v1.types.GetSnapshotRequest): The request object. Request for the GetSnapshot method. - snapshot (:class:`str`): + snapshot (str): Required. The name of the snapshot to get. Format is ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1283,13 +1322,13 @@ def get_snapshot( sent along with the request as metadata. Returns: - ~.pubsub.Snapshot: + google.pubsub_v1.types.Snapshot: A snapshot resource. Snapshots are used in - `Seek `__ - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. """ # Create or coerce a protobuf request object. @@ -1348,12 +1387,13 @@ def list_snapshots( Args: - request (:class:`~.pubsub.ListSnapshotsRequest`): + request (google.pubsub_v1.types.ListSnapshotsRequest): The request object. Request for the `ListSnapshots` method. - project (:class:`str`): + project (str): Required. The name of the project in which to list snapshots. Format is ``projects/{project-id}``. + This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1365,8 +1405,8 @@ def list_snapshots( sent along with the request as metadata. Returns: - ~.pagers.ListSnapshotsPager: - Response for the ``ListSnapshots`` method. + google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager: + Response for the ListSnapshots method. Iterating over this object will yield results and resolve additional pages automatically. @@ -1449,10 +1489,10 @@ def create_snapshot( Args: - request (:class:`~.pubsub.CreateSnapshotRequest`): + request (google.pubsub_v1.types.CreateSnapshotRequest): The request object. Request for the `CreateSnapshot` method. - name (:class:`str`): + name (str): Required. User-provided name for this snapshot. If the name is not provided in the request, the server will assign a random name for this snapshot on the same @@ -1460,10 +1500,11 @@ def create_snapshot( requests, you must specify a name. See the resource name rules. Format is ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - subscription (:class:`str`): + subscription (str): Required. The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to retain: (a) The existing backlog on the @@ -1474,6 +1515,7 @@ def create_snapshot( published to the subscription's topic following the successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. + This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1485,13 +1527,13 @@ def create_snapshot( sent along with the request as metadata. Returns: - ~.pubsub.Snapshot: + google.pubsub_v1.types.Snapshot: A snapshot resource. Snapshots are used in - `Seek `__ - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. """ # Create or coerce a protobuf request object. @@ -1553,7 +1595,7 @@ def update_snapshot( Args: - request (:class:`~.pubsub.UpdateSnapshotRequest`): + request (google.pubsub_v1.types.UpdateSnapshotRequest): The request object. Request for the UpdateSnapshot method. @@ -1564,13 +1606,13 @@ def update_snapshot( sent along with the request as metadata. Returns: - ~.pubsub.Snapshot: + google.pubsub_v1.types.Snapshot: A snapshot resource. Snapshots are used in - `Seek `__ - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. """ # Create or coerce a protobuf request object. @@ -1622,12 +1664,13 @@ def delete_snapshot( Args: - request (:class:`~.pubsub.DeleteSnapshotRequest`): + request (google.pubsub_v1.types.DeleteSnapshotRequest): The request object. Request for the `DeleteSnapshot` method. - snapshot (:class:`str`): + snapshot (str): Required. The name of the snapshot to delete. Format is ``projects/{project}/snapshots/{snap}``. + This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1696,7 +1739,7 @@ def seek( Args: - request (:class:`~.pubsub.SeekRequest`): + request (google.pubsub_v1.types.SeekRequest): The request object. Request for the `Seek` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1706,10 +1749,8 @@ def seek( sent along with the request as metadata. Returns: - ~.pubsub.SeekResponse: - Response for the ``Seek`` method (this response is - empty). - + google.pubsub_v1.types.SeekResponse: + Response for the Seek method (this response is empty). """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py index 713184d790ee..b7ec9f6e3676 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.pubsub_v1.types import pubsub @@ -24,7 +33,7 @@ class ListSubscriptionsPager: """A pager for iterating through ``list_subscriptions`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListSubscriptionsResponse` object, and + :class:`google.pubsub_v1.types.ListSubscriptionsResponse` object, and provides an ``__iter__`` method to iterate through its ``subscriptions`` field. @@ -33,7 +42,7 @@ class ListSubscriptionsPager: through the ``subscriptions`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListSubscriptionsResponse` + All the usual :class:`google.pubsub_v1.types.ListSubscriptionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListSubscriptionsRequest`): + request (google.pubsub_v1.types.ListSubscriptionsRequest): The initial request object. - response (:class:`~.pubsub.ListSubscriptionsResponse`): + response (google.pubsub_v1.types.ListSubscriptionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListSubscriptionsAsyncPager: """A pager for iterating through ``list_subscriptions`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListSubscriptionsResponse` object, and + :class:`google.pubsub_v1.types.ListSubscriptionsResponse` object, and provides an ``__aiter__`` method to iterate through its ``subscriptions`` field. @@ -95,7 +104,7 @@ class ListSubscriptionsAsyncPager: through the ``subscriptions`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListSubscriptionsResponse` + All the usual :class:`google.pubsub_v1.types.ListSubscriptionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListSubscriptionsRequest`): + request (google.pubsub_v1.types.ListSubscriptionsRequest): The initial request object. - response (:class:`~.pubsub.ListSubscriptionsResponse`): + response (google.pubsub_v1.types.ListSubscriptionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -152,7 +161,7 @@ class ListSnapshotsPager: """A pager for iterating through ``list_snapshots`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListSnapshotsResponse` object, and + :class:`google.pubsub_v1.types.ListSnapshotsResponse` object, and provides an ``__iter__`` method to iterate through its ``snapshots`` field. @@ -161,7 +170,7 @@ class ListSnapshotsPager: through the ``snapshots`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListSnapshotsResponse` + All the usual :class:`google.pubsub_v1.types.ListSnapshotsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -179,9 +188,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListSnapshotsRequest`): + request (google.pubsub_v1.types.ListSnapshotsRequest): The initial request object. - response (:class:`~.pubsub.ListSnapshotsResponse`): + response (google.pubsub_v1.types.ListSnapshotsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -214,7 +223,7 @@ class ListSnapshotsAsyncPager: """A pager for iterating through ``list_snapshots`` requests. This class thinly wraps an initial - :class:`~.pubsub.ListSnapshotsResponse` object, and + :class:`google.pubsub_v1.types.ListSnapshotsResponse` object, and provides an ``__aiter__`` method to iterate through its ``snapshots`` field. @@ -223,7 +232,7 @@ class ListSnapshotsAsyncPager: through the ``snapshots`` field on the corresponding responses. - All the usual :class:`~.pubsub.ListSnapshotsResponse` + All the usual :class:`google.pubsub_v1.types.ListSnapshotsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -241,9 +250,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.pubsub.ListSnapshotsRequest`): + request (google.pubsub_v1.types.ListSnapshotsRequest): The initial request object. - response (:class:`~.pubsub.ListSnapshotsResponse`): + response (google.pubsub_v1.types.ListSnapshotsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 1be01d024437..83815049e1ef 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -63,6 +63,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -93,6 +94,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -109,6 +114,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -118,11 +128,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -167,12 +172,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index fa89c11bce2c..d5efabf2fd52 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -138,6 +139,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -154,6 +159,11 @@ def __init__( """ self._ssl_channel_credentials = ssl_channel_credentials + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -163,11 +173,6 @@ def __init__( self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -212,12 +217,18 @@ def __init__( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, + ssl_credentials=self._ssl_channel_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 39563bb58dc7..001eea88a59b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -104,7 +104,7 @@ class SchemaSettings(proto.Message): ``projects/{project}/schemas/{schema}``. The value of this field will be ``_deleted-schema_`` if the schema has been deleted. - encoding (~.gp_schema.Encoding): + encoding (google.pubsub_v1.types.Encoding): The encoding of messages validated against ``schema``. """ @@ -126,10 +126,10 @@ class Topic(proto.Message): (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``"goog"``. - labels (Sequence[~.pubsub.Topic.LabelsEntry]): + labels (Sequence[google.pubsub_v1.types.Topic.LabelsEntry]): See [Creating and managing labels] (https://cloud.google.com/pubsub/docs/labels). - message_storage_policy (~.pubsub.MessageStoragePolicy): + message_storage_policy (google.pubsub_v1.types.MessageStoragePolicy): Policy constraining the set of Google Cloud Platform regions where messages published to the topic may be stored. If not present, then no @@ -140,7 +140,7 @@ class Topic(proto.Message): The expected format is ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. - schema_settings (~.pubsub.SchemaSettings): + schema_settings (google.pubsub_v1.types.SchemaSettings): Settings for validating messages published against a schema. EXPERIMENTAL: Schema support is in development @@ -182,7 +182,7 @@ class PubsubMessage(proto.Message): The message data field. If this field is empty, the message must contain at least one attribute. - attributes (Sequence[~.pubsub.PubsubMessage.AttributesEntry]): + attributes (Sequence[google.pubsub_v1.types.PubsubMessage.AttributesEntry]): Attributes for this message. If this field is empty, the message must contain non-empty data. This can be used to filter messages on the @@ -194,7 +194,7 @@ class PubsubMessage(proto.Message): ``PubsubMessage`` via a ``Pull`` call or a push delivery. It must not be populated by the publisher in a ``Publish`` call. - publish_time (~.timestamp.Timestamp): + publish_time (google.protobuf.timestamp_pb2.Timestamp): The time at which the message was published, populated by the server when it receives the ``Publish`` call. It must not be populated by the publisher in a ``Publish`` call. @@ -236,9 +236,9 @@ class UpdateTopicRequest(proto.Message): r"""Request for the UpdateTopic method. Attributes: - topic (~.pubsub.Topic): + topic (google.pubsub_v1.types.Topic): Required. The updated topic object. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Indicates which fields in the provided topic to update. Must be specified and non-empty. Note that if ``update_mask`` contains "message_storage_policy" but the @@ -259,7 +259,7 @@ class PublishRequest(proto.Message): topic (str): Required. The messages in the request will be published on this topic. Format is ``projects/{project}/topics/{topic}``. - messages (Sequence[~.pubsub.PubsubMessage]): + messages (Sequence[google.pubsub_v1.types.PubsubMessage]): Required. The messages to publish. """ @@ -309,7 +309,7 @@ class ListTopicsResponse(proto.Message): r"""Response for the ``ListTopics`` method. Attributes: - topics (Sequence[~.pubsub.Topic]): + topics (Sequence[google.pubsub_v1.types.Topic]): The resulting topics. next_page_token (str): If not empty, indicates that there may be more topics that @@ -468,7 +468,7 @@ class Subscription(proto.Message): ``projects/{project}/topics/{topic}``. The value of this field will be ``_deleted-topic_`` if the topic has been deleted. - push_config (~.pubsub.PushConfig): + push_config (google.pubsub_v1.types.PushConfig): If push delivery is used with this subscription, this field is used to configure it. An empty ``pushConfig`` signifies that the subscriber will pull and ack messages using API @@ -503,7 +503,7 @@ class Subscription(proto.Message): of the ``message_retention_duration`` window. This must be true if you would like to [Seek to a timestamp] (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time). - message_retention_duration (~.duration.Duration): + message_retention_duration (google.protobuf.duration_pb2.Duration): How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If ``retain_acked_messages`` is true, then this @@ -511,7 +511,7 @@ class Subscription(proto.Message): thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes. - labels (Sequence[~.pubsub.Subscription.LabelsEntry]): + labels (Sequence[google.pubsub_v1.types.Subscription.LabelsEntry]): See Creating and managing labels. @@ -520,7 +520,7 @@ class Subscription(proto.Message): in ``PubsubMessage`` will be delivered to the subscribers in the order in which they are received by the Pub/Sub system. Otherwise, they may be delivered in any order. - expiration_policy (~.pubsub.ExpirationPolicy): + expiration_policy (google.pubsub_v1.types.ExpirationPolicy): A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any connected subscriber is successfully @@ -536,7 +536,7 @@ class Subscription(proto.Message): ``attributes`` field matches the filter are delivered on this subscription. If empty, then no messages are filtered out. - dead_letter_policy (~.pubsub.DeadLetterPolicy): + dead_letter_policy (google.pubsub_v1.types.DeadLetterPolicy): A policy that specifies the conditions for dead lettering messages in this subscription. If dead_letter_policy is not set, dead lettering is disabled. @@ -546,7 +546,7 @@ class Subscription(proto.Message): service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have permission to Acknowledge() messages on this subscription. - retry_policy (~.pubsub.RetryPolicy): + retry_policy (google.pubsub_v1.types.RetryPolicy): A policy that specifies how Pub/Sub retries message delivery for this subscription. @@ -613,12 +613,12 @@ class RetryPolicy(proto.Message): backoff. Attributes: - minimum_backoff (~.duration.Duration): + minimum_backoff (google.protobuf.duration_pb2.Duration): The minimum delay between consecutive deliveries of a given message. Value should be between 0 and 600 seconds. Defaults to 10 seconds. - maximum_backoff (~.duration.Duration): + maximum_backoff (google.protobuf.duration_pb2.Duration): The maximum delay between consecutive deliveries of a given message. Value should be between 0 and 600 seconds. Defaults to 600 @@ -679,7 +679,7 @@ class ExpirationPolicy(proto.Message): expiration (i.e., automatic resource deletion). Attributes: - ttl (~.duration.Duration): + ttl (google.protobuf.duration_pb2.Duration): Specifies the "time-to-live" duration for an associated resource. The resource expires if it is not active for a period of ``ttl``. The definition of "activity" depends on @@ -700,7 +700,7 @@ class PushConfig(proto.Message): A URL locating the endpoint to which messages should be pushed. For example, a Webhook endpoint might use ``https://example.com/push``. - attributes (Sequence[~.pubsub.PushConfig.AttributesEntry]): + attributes (Sequence[google.pubsub_v1.types.PushConfig.AttributesEntry]): Endpoint configuration attributes that can be used to control different aspects of the message delivery. @@ -730,7 +730,7 @@ class PushConfig(proto.Message): .. raw:: html
attributes { "x-goog-version": "v1" } 
- oidc_token (~.pubsub.PushConfig.OidcToken): + oidc_token (google.pubsub_v1.types.PushConfig.OidcToken): If specified, Pub/Sub will generate and attach an OIDC JWT token as an ``Authorization`` header in the HTTP request for every pushed message. @@ -781,7 +781,7 @@ class ReceivedMessage(proto.Message): ack_id (str): This ID can be used to acknowledge the received message. - message (~.pubsub.PubsubMessage): + message (google.pubsub_v1.types.PubsubMessage): The message. delivery_attempt (int): The approximate number of times that Cloud Pub/Sub has @@ -827,9 +827,9 @@ class UpdateSubscriptionRequest(proto.Message): r"""Request for the UpdateSubscription method. Attributes: - subscription (~.pubsub.Subscription): + subscription (google.pubsub_v1.types.Subscription): Required. The updated subscription object. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Indicates which fields in the provided subscription to update. Must be specified and non-empty. @@ -867,7 +867,7 @@ class ListSubscriptionsResponse(proto.Message): r"""Response for the ``ListSubscriptions`` method. Attributes: - subscriptions (Sequence[~.pubsub.Subscription]): + subscriptions (Sequence[google.pubsub_v1.types.Subscription]): The subscriptions that match the request. next_page_token (str): If not empty, indicates that there may be more subscriptions @@ -905,7 +905,7 @@ class ModifyPushConfigRequest(proto.Message): subscription (str): Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. - push_config (~.pubsub.PushConfig): + push_config (google.pubsub_v1.types.PushConfig): Required. The push configuration for future deliveries. An empty ``pushConfig`` indicates that the Pub/Sub system @@ -955,7 +955,7 @@ class PullResponse(proto.Message): r"""Response for the ``Pull`` method. Attributes: - received_messages (Sequence[~.pubsub.ReceivedMessage]): + received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): Received Pub/Sub messages. The list will be empty if there are no more messages available in the backlog. For JSON, the response can be entirely empty. The Pub/Sub system may @@ -1122,7 +1122,7 @@ class StreamingPullResponse(proto.Message): stream messages from the server to the client. Attributes: - received_messages (Sequence[~.pubsub.ReceivedMessage]): + received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): Received Pub/Sub messages. This will not be empty. """ @@ -1154,7 +1154,7 @@ class CreateSnapshotRequest(proto.Message): topic following the successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. - labels (Sequence[~.pubsub.CreateSnapshotRequest.LabelsEntry]): + labels (Sequence[google.pubsub_v1.types.CreateSnapshotRequest.LabelsEntry]): See Creating and managing labels. @@ -1171,9 +1171,9 @@ class UpdateSnapshotRequest(proto.Message): r"""Request for the UpdateSnapshot method. Attributes: - snapshot (~.pubsub.Snapshot): + snapshot (google.pubsub_v1.types.Snapshot): Required. The updated snapshot object. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Indicates which fields in the provided snapshot to update. Must be specified and non-empty. @@ -1197,7 +1197,7 @@ class Snapshot(proto.Message): topic (str): The name of the topic from which this snapshot is retaining messages. - expire_time (~.timestamp.Timestamp): + expire_time (google.protobuf.timestamp_pb2.Timestamp): The snapshot is guaranteed to exist up until this time. A newly-created snapshot expires no later than 7 days from the time of its creation. Its exact lifetime is determined at @@ -1211,7 +1211,7 @@ class Snapshot(proto.Message): expire in 4 days. The service will refuse to create a snapshot that would expire in less than 1 hour after creation. - labels (Sequence[~.pubsub.Snapshot.LabelsEntry]): + labels (Sequence[google.pubsub_v1.types.Snapshot.LabelsEntry]): See [Creating and managing labels] (https://cloud.google.com/pubsub/docs/labels). """ @@ -1264,7 +1264,7 @@ class ListSnapshotsResponse(proto.Message): r"""Response for the ``ListSnapshots`` method. Attributes: - snapshots (Sequence[~.pubsub.Snapshot]): + snapshots (Sequence[google.pubsub_v1.types.Snapshot]): The resulting snapshots. next_page_token (str): If not empty, indicates that there may be more snapshot that @@ -1299,7 +1299,7 @@ class SeekRequest(proto.Message): Attributes: subscription (str): Required. The subscription to affect. - time (~.timestamp.Timestamp): + time (google.protobuf.timestamp_pb2.Timestamp): The time to seek to. Messages retained in the subscription that were published before this time are marked as acknowledged, and messages retained in the subscription that diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 2efa667c15ff..e4f71d1328a6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -60,7 +60,7 @@ class Schema(proto.Message): name (str): Required. Name of the schema. Format is ``projects/{project}/schemas/{schema}``. - type_ (~.gp_schema.Schema.Type): + type_ (google.pubsub_v1.types.Schema.Type): The type of the schema definition. definition (str): The definition of the schema. This should contain a string @@ -88,7 +88,7 @@ class CreateSchemaRequest(proto.Message): parent (str): Required. The name of the project in which to create the schema. Format is ``projects/{project-id}``. - schema (~.gp_schema.Schema): + schema (google.pubsub_v1.types.Schema): Required. The schema object to create. This schema's ``name`` parameter is ignored. The schema @@ -117,7 +117,7 @@ class GetSchemaRequest(proto.Message): name (str): Required. The name of the schema to get. Format is ``projects/{project}/schemas/{schema}``. - view (~.gp_schema.SchemaView): + view (google.pubsub_v1.types.SchemaView): The set of fields to return in the response. If not set, returns a Schema with ``name`` and ``type``, but not ``definition``. Set to ``FULL`` to retrieve all fields. @@ -135,7 +135,7 @@ class ListSchemasRequest(proto.Message): parent (str): Required. The name of the project in which to list schemas. Format is ``projects/{project-id}``. - view (~.gp_schema.SchemaView): + view (google.pubsub_v1.types.SchemaView): The set of Schema fields to return in the response. If not set, returns Schemas with ``name`` and ``type``, but not ``definition``. Set to ``FULL`` to retrieve all fields. @@ -161,7 +161,7 @@ class ListSchemasResponse(proto.Message): r"""Response for the ``ListSchemas`` method. Attributes: - schemas (Sequence[~.gp_schema.Schema]): + schemas (Sequence[google.pubsub_v1.types.Schema]): The resulting schemas. next_page_token (str): If not empty, indicates that there may be more schemas that @@ -197,7 +197,7 @@ class ValidateSchemaRequest(proto.Message): parent (str): Required. The name of the project in which to validate schemas. Format is ``projects/{project-id}``. - schema (~.gp_schema.Schema): + schema (google.pubsub_v1.types.Schema): Required. The schema object to validate. """ @@ -221,11 +221,11 @@ class ValidateMessageRequest(proto.Message): Name of the schema against which to validate. Format is ``projects/{project}/schemas/{schema}``. - schema (~.gp_schema.Schema): + schema (google.pubsub_v1.types.Schema): Ad-hoc schema against which to validate message (bytes): Message to validate against the provided ``schema_spec``. - encoding (~.gp_schema.Encoding): + encoding (google.pubsub_v1.types.Encoding): The encoding expected for messages """ diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 913f271d59c9..6b178ad185c4 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -30,6 +30,17 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -75,12 +86,14 @@ def default(session): session.install( "mock", "pytest", "pytest-cov", ) + session.install("-e", ".") # Run py.test against the unit tests. session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -127,9 +140,21 @@ def system(session): # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index bca0522ec4d9..97bf7da80e39 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -85,7 +85,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index d4b5ca201d78..8d6a7126f441 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,36 +4,36 @@ "git": { "name": ".", "remote": "git@github.com:plamut/python-pubsub.git", - "sha": "a4eab77decdd7ea0d421b56a784e8a673a5595ec" + "sha": "12307d8f12d96974130c7a49bd1eba4d62956f21" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "61ab0348bd228c942898aee291d677f0afdb888c", - "internalRef": "352069361" + "sha": "8b3d36daaf5561496b7d4075fba4f2c52d18ca1c", + "internalRef": "359285402" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f" + "sha": "79ab0b44a2cc7d803d07c107f9faf07729fc4012" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f" + "sha": "79ab0b44a2cc7d803d07c107f9faf07729fc4012" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "56ddc68f36b32341e9f22c2c59b4ce6aa3ba635f" + "sha": "79ab0b44a2cc7d803d07c107f9faf07729fc4012" } } ], diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index d2ade4b278ae..2c0bc560735f 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -79,6 +79,50 @@ if count < 18: raise Exception("Expected replacements for gRPC channel options not made.") +# If the emulator is used, force an insecure gRPC channel to avoid SSL errors. +clients_to_patch = [ + "google/pubsub_v1/services/publisher/client.py", + "google/pubsub_v1/services/subscriber/client.py", +] +err_msg = "Expected replacements for gRPC channel to use with the emulator not made." + +count = s.replace( + clients_to_patch, + r"import os", + "import functools\n\g<0>" +) + +if count < len(clients_to_patch): + raise Exception(err_msg) + +count = s.replace( + clients_to_patch, + r"from google\.pubsub_v1\.types import pubsub", + "\g<0>\n\nimport grpc" +) + +if count < len(clients_to_patch): + raise Exception(err_msg) + +count = s.replace( + clients_to_patch, + r"Transport = type\(self\)\.get_transport_class\(transport\)", + """\g<0> + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(Transport, type(self)._transport_registry["grpc"]): + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + Transport = functools.partial(Transport, channel=channel) + + """, +) + +if count < len(clients_to_patch): + raise Exception(err_msg) + # Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream # results. s.replace( diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index d1c34d474ba7..cf24f5273627 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -85,7 +85,20 @@ def test__get_default_mtls_endpoint(): assert PublisherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient]) +def test_publisher_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = PublisherClient.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "pubsub.googleapis.com:443" + + +@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient,]) def test_publisher_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( @@ -103,7 +116,10 @@ def test_publisher_client_from_service_account_file(client_class): def test_publisher_client_get_transport_class(): transport = PublisherClient.get_transport_class() - assert transport == transports.PublisherGrpcTransport + available_transports = [ + transports.PublisherGrpcTransport, + ] + assert transport in available_transports transport = PublisherClient.get_transport_class("grpc") assert transport == transports.PublisherGrpcTransport @@ -150,7 +166,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -166,7 +182,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -182,7 +198,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -210,7 +226,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -259,29 +275,25 @@ def test_publisher_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -290,66 +302,53 @@ def test_publisher_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -375,7 +374,7 @@ def test_publisher_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -405,7 +404,7 @@ def test_publisher_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -422,7 +421,7 @@ def test_publisher_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -2492,7 +2491,7 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport,], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -2612,6 +2611,52 @@ def test_publisher_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], +) +def test_publisher_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_publisher_host_no_port(): client = PublisherClient( credentials=credentials.AnonymousCredentials(), @@ -2633,7 +2678,7 @@ def test_publisher_host_with_port(): def test_publisher_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.PublisherGrpcTransport( @@ -2645,7 +2690,7 @@ def test_publisher_grpc_transport_channel(): def test_publisher_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.PublisherGrpcAsyncIOTransport( @@ -2656,6 +2701,8 @@ def test_publisher_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], @@ -2665,7 +2712,7 @@ def test_publisher_transport_channel_mtls_with_client_cert_source(transport_clas "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2707,6 +2754,8 @@ def test_publisher_transport_channel_mtls_with_client_cert_source(transport_clas assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], @@ -2719,7 +2768,7 @@ def test_publisher_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 79cc71324b02..7f2f9d055246 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -88,8 +88,21 @@ def test__get_default_mtls_endpoint(): ) +def test_schema_service_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = SchemaServiceClient.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "pubsub.googleapis.com:443" + + @pytest.mark.parametrize( - "client_class", [SchemaServiceClient, SchemaServiceAsyncClient] + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient,] ) def test_schema_service_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -108,7 +121,10 @@ def test_schema_service_client_from_service_account_file(client_class): def test_schema_service_client_get_transport_class(): transport = SchemaServiceClient.get_transport_class() - assert transport == transports.SchemaServiceGrpcTransport + available_transports = [ + transports.SchemaServiceGrpcTransport, + ] + assert transport in available_transports transport = SchemaServiceClient.get_transport_class("grpc") assert transport == transports.SchemaServiceGrpcTransport @@ -159,7 +175,7 @@ def test_schema_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -175,7 +191,7 @@ def test_schema_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -191,7 +207,7 @@ def test_schema_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -219,7 +235,7 @@ def test_schema_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -270,29 +286,25 @@ def test_schema_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -301,66 +313,53 @@ def test_schema_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -386,7 +385,7 @@ def test_schema_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -416,7 +415,7 @@ def test_schema_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -435,7 +434,7 @@ def test_schema_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -1847,6 +1846,55 @@ def test_schema_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_schema_service_host_no_port(): client = SchemaServiceClient( credentials=credentials.AnonymousCredentials(), @@ -1868,7 +1916,7 @@ def test_schema_service_host_with_port(): def test_schema_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.SchemaServiceGrpcTransport( @@ -1880,7 +1928,7 @@ def test_schema_service_grpc_transport_channel(): def test_schema_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.SchemaServiceGrpcAsyncIOTransport( @@ -1891,6 +1939,8 @@ def test_schema_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1903,7 +1953,7 @@ def test_schema_service_transport_channel_mtls_with_client_cert_source(transport "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -1945,6 +1995,8 @@ def test_schema_service_transport_channel_mtls_with_client_cert_source(transport assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -1960,7 +2012,7 @@ def test_schema_service_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 8dec1c27d15b..2eb3503cd983 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -87,7 +87,20 @@ def test__get_default_mtls_endpoint(): assert SubscriberClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient]) +def test_subscriber_client_from_service_account_info(): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = SubscriberClient.from_service_account_info(info) + assert client.transport._credentials == creds + + assert client.transport._host == "pubsub.googleapis.com:443" + + +@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient,]) def test_subscriber_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( @@ -105,7 +118,10 @@ def test_subscriber_client_from_service_account_file(client_class): def test_subscriber_client_get_transport_class(): transport = SubscriberClient.get_transport_class() - assert transport == transports.SubscriberGrpcTransport + available_transports = [ + transports.SubscriberGrpcTransport, + ] + assert transport in available_transports transport = SubscriberClient.get_transport_class("grpc") assert transport == transports.SubscriberGrpcTransport @@ -154,7 +170,7 @@ def test_subscriber_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -170,7 +186,7 @@ def test_subscriber_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -186,7 +202,7 @@ def test_subscriber_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -214,7 +230,7 @@ def test_subscriber_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -263,29 +279,25 @@ def test_subscriber_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -294,66 +306,53 @@ def test_subscriber_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -379,7 +378,7 @@ def test_subscriber_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -409,7 +408,7 @@ def test_subscriber_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -426,7 +425,7 @@ def test_subscriber_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -3754,7 +3753,7 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport,], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -3881,6 +3880,52 @@ def test_subscriber_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], +) +def test_subscriber_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_subscriber_host_no_port(): client = SubscriberClient( credentials=credentials.AnonymousCredentials(), @@ -3902,7 +3947,7 @@ def test_subscriber_host_with_port(): def test_subscriber_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.SubscriberGrpcTransport( @@ -3914,7 +3959,7 @@ def test_subscriber_grpc_transport_channel(): def test_subscriber_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.SubscriberGrpcAsyncIOTransport( @@ -3925,6 +3970,8 @@ def test_subscriber_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], @@ -3934,7 +3981,7 @@ def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_cla "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -3976,6 +4023,8 @@ def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_cla assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], @@ -3988,7 +4037,7 @@ def test_subscriber_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel From 6288c55dc653738431f2366875cb344d591fc6a3 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 3 Mar 2021 02:43:28 -0800 Subject: [PATCH 0625/1197] chore: re-generate GAPIC layer with changes from from googleapis (via synth) (#267) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * chore: update gapic-generator-python to 0.40.11 PiperOrigin-RevId: 359562873 Source-Author: Google APIs Source-Date: Thu Feb 25 10:52:32 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 07932bb995e7dc91b43620ea8402c6668c7d102c Source-Link: https://github.com/googleapis/googleapis/commit/07932bb995e7dc91b43620ea8402c6668c7d102c * feat: Added client libraries for Google Workspace Marketplace API PiperOrigin-RevId: 359658993 Source-Author: Google APIs Source-Date: Thu Feb 25 18:10:16 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: ee4bac5de3e4bfc4bbca87a6b256f1a58dcfadd0 Source-Link: https://github.com/googleapis/googleapis/commit/ee4bac5de3e4bfc4bbca87a6b256f1a58dcfadd0 * fix: Update to PHP micro-generator beta02; fixes some bazel build issues. PiperOrigin-RevId: 359844338 Source-Author: Google APIs Source-Date: Fri Feb 26 14:51:50 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 9adc11e84218d5e234392e7fe81676e778895525 Source-Link: https://github.com/googleapis/googleapis/commit/9adc11e84218d5e234392e7fe81676e778895525 --- .../google/pubsub_v1/__init__.py | 4 +- .../services/publisher/async_client.py | 32 ++- .../services/schema_service/async_client.py | 32 ++- .../services/subscriber/async_client.py | 32 ++- packages/google-cloud-pubsub/synth.metadata | 119 +++++++- .../tests/unit/gapic/pubsub_v1/__init__.py | 15 + .../unit/gapic/pubsub_v1/test_publisher.py | 158 ++++++++++- .../gapic/pubsub_v1/test_schema_service.py | 106 ++++++- .../unit/gapic/pubsub_v1/test_subscriber.py | 260 +++++++++++++++++- 9 files changed, 740 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index d5a1de488ff7..8152d60f39f5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -110,6 +110,7 @@ "ModifyPushConfigRequest", "PublishRequest", "PublishResponse", + "PublisherClient", "PubsubMessage", "PullRequest", "PullResponse", @@ -125,7 +126,6 @@ "Snapshot", "StreamingPullRequest", "StreamingPullResponse", - "SubscriberClient", "Subscription", "Topic", "UpdateSnapshotRequest", @@ -135,5 +135,5 @@ "ValidateMessageResponse", "ValidateSchemaRequest", "ValidateSchemaResponse", - "PublisherClient", + "SubscriberClient", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index f8572a448862..d2752db2eb8d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -78,8 +78,36 @@ class PublisherAsyncClient: PublisherClient.parse_common_location_path ) - from_service_account_info = PublisherClient.from_service_account_info - from_service_account_file = PublisherClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherAsyncClient: The constructed client. + """ + return PublisherClient.from_service_account_info.__func__(PublisherAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherAsyncClient: The constructed client. + """ + return PublisherClient.from_service_account_file.__func__(PublisherAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 542053f39101..39de60587f89 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -82,8 +82,36 @@ class SchemaServiceAsyncClient: SchemaServiceClient.parse_common_location_path ) - from_service_account_info = SchemaServiceClient.from_service_account_info - from_service_account_file = SchemaServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_info.__func__(SchemaServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_file.__func__(SchemaServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 7bfaeb9034fc..0f77f314b50f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -91,8 +91,36 @@ class SubscriberAsyncClient: SubscriberClient.parse_common_location_path ) - from_service_account_info = SubscriberClient.from_service_account_info - from_service_account_file = SubscriberClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberAsyncClient: The constructed client. + """ + return SubscriberClient.from_service_account_info.__func__(SubscriberAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberAsyncClient: The constructed client. + """ + return SubscriberClient.from_service_account_file.__func__(SubscriberAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 8d6a7126f441..4f68f6796a2f 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -3,16 +3,16 @@ { "git": { "name": ".", - "remote": "git@github.com:plamut/python-pubsub.git", - "sha": "12307d8f12d96974130c7a49bd1eba4d62956f21" + "remote": "https://github.com/googleapis/python-pubsub.git", + "sha": "83db67239d3521457138699109f766d574a0a2c4" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "8b3d36daaf5561496b7d4075fba4f2c52d18ca1c", - "internalRef": "359285402" + "sha": "9adc11e84218d5e234392e7fe81676e778895525", + "internalRef": "359844338" } }, { @@ -47,5 +47,116 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".coveragerc", + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/header-checker-lint.yml", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".pre-commit-config.yaml", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "docs/multiprocessing.rst", + "google/cloud/pubsub_v1/proto/pubsub.proto", + "google/cloud/pubsub_v1/proto/schema.proto", + "google/pubsub/__init__.py", + "google/pubsub/py.typed", + "google/pubsub_v1/__init__.py", + "google/pubsub_v1/py.typed", + "google/pubsub_v1/services/__init__.py", + "google/pubsub_v1/services/publisher/__init__.py", + "google/pubsub_v1/services/publisher/async_client.py", + "google/pubsub_v1/services/publisher/client.py", + "google/pubsub_v1/services/publisher/pagers.py", + "google/pubsub_v1/services/publisher/transports/__init__.py", + "google/pubsub_v1/services/publisher/transports/base.py", + "google/pubsub_v1/services/publisher/transports/grpc.py", + "google/pubsub_v1/services/publisher/transports/grpc_asyncio.py", + "google/pubsub_v1/services/schema_service/__init__.py", + "google/pubsub_v1/services/schema_service/async_client.py", + "google/pubsub_v1/services/schema_service/client.py", + "google/pubsub_v1/services/schema_service/pagers.py", + "google/pubsub_v1/services/schema_service/transports/__init__.py", + "google/pubsub_v1/services/schema_service/transports/base.py", + "google/pubsub_v1/services/schema_service/transports/grpc.py", + "google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py", + "google/pubsub_v1/services/subscriber/__init__.py", + "google/pubsub_v1/services/subscriber/async_client.py", + "google/pubsub_v1/services/subscriber/client.py", + "google/pubsub_v1/services/subscriber/pagers.py", + "google/pubsub_v1/services/subscriber/transports/__init__.py", + "google/pubsub_v1/services/subscriber/transports/base.py", + "google/pubsub_v1/services/subscriber/transports/grpc.py", + "google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py", + "google/pubsub_v1/types/__init__.py", + "google/pubsub_v1/types/pubsub.py", + "google/pubsub_v1/types/schema.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/noxfile.py", + "scripts/decrypt-secrets.sh", + "scripts/fixup_pubsub_v1_keywords.py", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/pubsub_v1/__init__.py", + "tests/unit/gapic/pubsub_v1/test_publisher.py", + "tests/unit/gapic/pubsub_v1/test_schema_service.py", + "tests/unit/gapic/pubsub_v1/test_subscriber.py" ] } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py index 8b137891791f..42ffdf2bc43d 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index cf24f5273627..5434ae944dda 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -85,15 +85,17 @@ def test__get_default_mtls_endpoint(): assert PublisherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -def test_publisher_client_from_service_account_info(): +@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient,]) +def test_publisher_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = PublisherClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "pubsub.googleapis.com:443" @@ -107,9 +109,11 @@ def test_publisher_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "pubsub.googleapis.com:443" @@ -466,6 +470,22 @@ def test_create_topic_from_dict(): test_create_topic(request_type=dict) +def test_create_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + client.create_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.Topic() + + @pytest.mark.asyncio async def test_create_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.Topic @@ -665,6 +685,22 @@ def test_update_topic_from_dict(): test_update_topic(request_type=dict) +def test_update_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + client.update_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.UpdateTopicRequest() + + @pytest.mark.asyncio async def test_update_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateTopicRequest @@ -793,6 +829,22 @@ def test_publish_from_dict(): test_publish(request_type=dict) +def test_publish_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + client.publish() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.PublishRequest() + + @pytest.mark.asyncio async def test_publish_async( transport: str = "grpc_asyncio", request_type=pubsub.PublishRequest @@ -1000,6 +1052,22 @@ def test_get_topic_from_dict(): test_get_topic(request_type=dict) +def test_get_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + client.get_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.GetTopicRequest() + + @pytest.mark.asyncio async def test_get_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.GetTopicRequest @@ -1195,6 +1263,22 @@ def test_list_topics_from_dict(): test_list_topics(request_type=dict) +def test_list_topics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + client.list_topics() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListTopicsRequest() + + @pytest.mark.asyncio async def test_list_topics_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicsRequest @@ -1499,6 +1583,24 @@ def test_list_topic_subscriptions_from_dict(): test_list_topic_subscriptions(request_type=dict) +def test_list_topic_subscriptions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + client.list_topic_subscriptions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListTopicSubscriptionsRequest() + + @pytest.mark.asyncio async def test_list_topic_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSubscriptionsRequest @@ -1837,6 +1939,24 @@ def test_list_topic_snapshots_from_dict(): test_list_topic_snapshots(request_type=dict) +def test_list_topic_snapshots_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + client.list_topic_snapshots() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListTopicSnapshotsRequest() + + @pytest.mark.asyncio async def test_list_topic_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSnapshotsRequest @@ -2155,6 +2275,22 @@ def test_delete_topic_from_dict(): test_delete_topic(request_type=dict) +def test_delete_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + client.delete_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DeleteTopicRequest() + + @pytest.mark.asyncio async def test_delete_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteTopicRequest @@ -2338,6 +2474,24 @@ def test_detach_subscription_from_dict(): test_detach_subscription(request_type=dict) +def test_detach_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + client.detach_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DetachSubscriptionRequest() + + @pytest.mark.asyncio async def test_detach_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DetachSubscriptionRequest diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 7f2f9d055246..62a3aa7300a1 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -88,15 +88,19 @@ def test__get_default_mtls_endpoint(): ) -def test_schema_service_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient,] +) +def test_schema_service_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = SchemaServiceClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "pubsub.googleapis.com:443" @@ -112,9 +116,11 @@ def test_schema_service_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "pubsub.googleapis.com:443" @@ -483,6 +489,22 @@ def test_create_schema_from_dict(): test_create_schema(request_type=dict) +def test_create_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + client.create_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == gp_schema.CreateSchemaRequest() + + @pytest.mark.asyncio async def test_create_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.CreateSchemaRequest @@ -706,6 +728,22 @@ def test_get_schema_from_dict(): test_get_schema(request_type=dict) +def test_get_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + client.get_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.GetSchemaRequest() + + @pytest.mark.asyncio async def test_get_schema_async( transport: str = "grpc_asyncio", request_type=schema.GetSchemaRequest @@ -901,6 +939,22 @@ def test_list_schemas_from_dict(): test_list_schemas(request_type=dict) +def test_list_schemas_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + client.list_schemas() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.ListSchemasRequest() + + @pytest.mark.asyncio async def test_list_schemas_async( transport: str = "grpc_asyncio", request_type=schema.ListSchemasRequest @@ -1203,6 +1257,22 @@ def test_delete_schema_from_dict(): test_delete_schema(request_type=dict) +def test_delete_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + client.delete_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.DeleteSchemaRequest() + + @pytest.mark.asyncio async def test_delete_schema_async( transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRequest @@ -1384,6 +1454,22 @@ def test_validate_schema_from_dict(): test_validate_schema(request_type=dict) +def test_validate_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + client.validate_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == gp_schema.ValidateSchemaRequest() + + @pytest.mark.asyncio async def test_validate_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.ValidateSchemaRequest @@ -1583,6 +1669,22 @@ def test_validate_message_from_dict(): test_validate_message(request_type=dict) +def test_validate_message_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + client.validate_message() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == schema.ValidateMessageRequest() + + @pytest.mark.asyncio async def test_validate_message_async( transport: str = "grpc_asyncio", request_type=schema.ValidateMessageRequest diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 2eb3503cd983..5931d609d3af 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -87,15 +87,17 @@ def test__get_default_mtls_endpoint(): assert SubscriberClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -def test_subscriber_client_from_service_account_info(): +@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient,]) +def test_subscriber_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = SubscriberClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "pubsub.googleapis.com:443" @@ -109,9 +111,11 @@ def test_subscriber_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "pubsub.googleapis.com:443" @@ -486,6 +490,24 @@ def test_create_subscription_from_dict(): test_create_subscription(request_type=dict) +def test_create_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + client.create_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.Subscription() + + @pytest.mark.asyncio async def test_create_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.Subscription @@ -757,6 +779,22 @@ def test_get_subscription_from_dict(): test_get_subscription(request_type=dict) +def test_get_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + client.get_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.GetSubscriptionRequest() + + @pytest.mark.asyncio async def test_get_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSubscriptionRequest @@ -990,6 +1028,24 @@ def test_update_subscription_from_dict(): test_update_subscription(request_type=dict) +def test_update_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + client.update_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.UpdateSubscriptionRequest() + + @pytest.mark.asyncio async def test_update_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSubscriptionRequest @@ -1148,6 +1204,24 @@ def test_list_subscriptions_from_dict(): test_list_subscriptions(request_type=dict) +def test_list_subscriptions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + client.list_subscriptions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListSubscriptionsRequest() + + @pytest.mark.asyncio async def test_list_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSubscriptionsRequest @@ -1494,6 +1568,24 @@ def test_delete_subscription_from_dict(): test_delete_subscription(request_type=dict) +def test_delete_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + client.delete_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DeleteSubscriptionRequest() + + @pytest.mark.asyncio async def test_delete_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSubscriptionRequest @@ -1690,6 +1782,24 @@ def test_modify_ack_deadline_from_dict(): test_modify_ack_deadline(request_type=dict) +def test_modify_ack_deadline_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + client.modify_ack_deadline() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ModifyAckDeadlineRequest() + + @pytest.mark.asyncio async def test_modify_ack_deadline_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyAckDeadlineRequest @@ -1904,6 +2014,22 @@ def test_acknowledge_from_dict(): test_acknowledge(request_type=dict) +def test_acknowledge_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + client.acknowledge() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.AcknowledgeRequest() + + @pytest.mark.asyncio async def test_acknowledge_async( transport: str = "grpc_asyncio", request_type=pubsub.AcknowledgeRequest @@ -2099,6 +2225,22 @@ def test_pull_from_dict(): test_pull(request_type=dict) +def test_pull_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + client.pull() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.PullRequest() + + @pytest.mark.asyncio async def test_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.PullRequest @@ -2382,6 +2524,24 @@ def test_modify_push_config_from_dict(): test_modify_push_config(request_type=dict) +def test_modify_push_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + client.modify_push_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ModifyPushConfigRequest() + + @pytest.mark.asyncio async def test_modify_push_config_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyPushConfigRequest @@ -2597,6 +2757,22 @@ def test_get_snapshot_from_dict(): test_get_snapshot(request_type=dict) +def test_get_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + client.get_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.GetSnapshotRequest() + + @pytest.mark.asyncio async def test_get_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSnapshotRequest @@ -2788,6 +2964,22 @@ def test_list_snapshots_from_dict(): test_list_snapshots(request_type=dict) +def test_list_snapshots_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + client.list_snapshots() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.ListSnapshotsRequest() + + @pytest.mark.asyncio async def test_list_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSnapshotsRequest @@ -3103,6 +3295,22 @@ def test_create_snapshot_from_dict(): test_create_snapshot(request_type=dict) +def test_create_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + client.create_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.CreateSnapshotRequest() + + @pytest.mark.asyncio async def test_create_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.CreateSnapshotRequest @@ -3306,6 +3514,22 @@ def test_update_snapshot_from_dict(): test_update_snapshot(request_type=dict) +def test_update_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + client.update_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.UpdateSnapshotRequest() + + @pytest.mark.asyncio async def test_update_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSnapshotRequest @@ -3431,6 +3655,22 @@ def test_delete_snapshot_from_dict(): test_delete_snapshot(request_type=dict) +def test_delete_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + client.delete_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.DeleteSnapshotRequest() + + @pytest.mark.asyncio async def test_delete_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSnapshotRequest @@ -3610,6 +3850,22 @@ def test_seek_from_dict(): test_seek(request_type=dict) +def test_seek_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + client.seek() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == pubsub.SeekRequest() + + @pytest.mark.asyncio async def test_seek_async( transport: str = "grpc_asyncio", request_type=pubsub.SeekRequest From e62a9a05239c16a8af01d2d3e58bb3898242221b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 3 Mar 2021 12:51:37 -0800 Subject: [PATCH 0626/1197] test: install pyopenssl for mtls testing (#260) Source-Author: arithmetic1728 <58957152+arithmetic1728@users.noreply.github.com> Source-Date: Tue Mar 2 12:27:56 2021 -0800 Source-Repo: googleapis/synthtool Source-Sha: 0780323da96d5a53925fe0547757181fe76e8f1e Source-Link: https://github.com/googleapis/synthtool/commit/0780323da96d5a53925fe0547757181fe76e8f1e --- packages/google-cloud-pubsub/noxfile.py | 3 +++ packages/google-cloud-pubsub/synth.metadata | 8 ++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 6b178ad185c4..eae1a6458700 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -123,6 +123,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 4f68f6796a2f..71975f6325e7 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "83db67239d3521457138699109f766d574a0a2c4" + "sha": "970924ec144a8dd2556fa7e1b55c14088935d5fd" } }, { @@ -19,21 +19,21 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "79ab0b44a2cc7d803d07c107f9faf07729fc4012" + "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "79ab0b44a2cc7d803d07c107f9faf07729fc4012" + "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "79ab0b44a2cc7d803d07c107f9faf07729fc4012" + "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" } } ], From a4f4ec8b27aca2d61033564174b144d0d17c259b Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 4 Mar 2021 10:44:28 -0800 Subject: [PATCH 0627/1197] docs: remove EXPERIMENTAL tag from Schema service (via synth) (#307) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * fix: Update to PHP micro-generator beta02; fixes some bazel build issues. PiperOrigin-RevId: 359844338 Source-Author: Google APIs Source-Date: Fri Feb 26 14:51:50 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 9adc11e84218d5e234392e7fe81676e778895525 Source-Link: https://github.com/googleapis/googleapis/commit/9adc11e84218d5e234392e7fe81676e778895525 * docs: Remove experimental note for schema APIs PiperOrigin-RevId: 360714464 Source-Author: Google APIs Source-Date: Wed Mar 3 11:13:45 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 479acf6990eada1213f8666f1c795a018f8496f9 Source-Link: https://github.com/googleapis/googleapis/commit/479acf6990eada1213f8666f1c795a018f8496f9 * chore(release): update Java microgenerator to 1.0.1 Committer: @miraleung PiperOrigin-RevId: 360805639 Source-Author: Google APIs Source-Date: Wed Mar 3 18:34:03 2021 -0800 Source-Repo: googleapis/googleapis Source-Sha: 3befd26ca55723d3e8111909331eac1249837987 Source-Link: https://github.com/googleapis/googleapis/commit/3befd26ca55723d3e8111909331eac1249837987 --- .../google/cloud/pubsub_v1/proto/pubsub.proto | 2 -- .../google/cloud/pubsub_v1/proto/schema.proto | 3 --- packages/google-cloud-pubsub/google/pubsub_v1/__init__.py | 4 ++-- .../pubsub_v1/services/schema_service/async_client.py | 5 +---- .../google/pubsub_v1/services/schema_service/client.py | 5 +---- .../pubsub_v1/services/schema_service/transports/grpc.py | 4 +--- .../services/schema_service/transports/grpc_asyncio.py | 4 +--- .../google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 2 -- packages/google-cloud-pubsub/synth.metadata | 6 +++--- 9 files changed, 9 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 9bc678e3ae7c..0da37dd7dad1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -191,8 +191,6 @@ message Topic { string kms_key_name = 5; // Settings for validating messages published against a schema. - // - // EXPERIMENTAL: Schema support is in development and may not work yet. SchemaSettings schema_settings = 6; // Reserved for future use. This field is set only in responses from the diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/schema.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/schema.proto index ae402ac4de2e..1ace7ef3b0d7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/schema.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/schema.proto @@ -32,9 +32,6 @@ option php_namespace = "Google\\Cloud\\PubSub\\V1"; option ruby_package = "Google::Cloud::PubSub::V1"; // Service for doing schema-related operations. -// -// EXPERIMENTAL: The Schema service is in development and may not work yet. - service SchemaService { option (google.api.default_host) = "pubsub.googleapis.com"; option (google.api.oauth_scopes) = diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 8152d60f39f5..fce58c7ad522 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -118,7 +118,6 @@ "ReceivedMessage", "RetryPolicy", "Schema", - "SchemaServiceClient", "SchemaSettings", "SchemaView", "SeekRequest", @@ -126,6 +125,7 @@ "Snapshot", "StreamingPullRequest", "StreamingPullResponse", + "SubscriberClient", "Subscription", "Topic", "UpdateSnapshotRequest", @@ -135,5 +135,5 @@ "ValidateMessageResponse", "ValidateSchemaRequest", "ValidateSchemaResponse", - "SubscriberClient", + "SchemaServiceClient", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 39de60587f89..9aeb62990ebe 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -40,10 +40,7 @@ class SchemaServiceAsyncClient: - """ Service for doing schema-related operations. - EXPERIMENTAL: The Schema service is in development and may not - work yet. - """ + """Service for doing schema-related operations.""" _client: SchemaServiceClient diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index d569af8f34e0..5f65a0388233 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -75,10 +75,7 @@ def get_transport_class(cls, label: str = None,) -> Type[SchemaServiceTransport] class SchemaServiceClient(metaclass=SchemaServiceClientMeta): - """ Service for doing schema-related operations. - EXPERIMENTAL: The Schema service is in development and may not - work yet. - """ + """Service for doing schema-related operations.""" @staticmethod def _get_default_mtls_endpoint(api_endpoint): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 3a724e0734cd..51fff0e85103 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -38,9 +38,7 @@ class SchemaServiceGrpcTransport(SchemaServiceTransport): """gRPC backend transport for SchemaService. - Service for doing schema-related operations. - EXPERIMENTAL: The Schema service is in development and may not - work yet. + Service for doing schema-related operations. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 080bcc6f5b7c..d8c7cac097a8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -40,9 +40,7 @@ class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): """gRPC AsyncIO backend transport for SchemaService. - Service for doing schema-related operations. - EXPERIMENTAL: The Schema service is in development and may not - work yet. + Service for doing schema-related operations. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 001eea88a59b..6fb73e8c98c9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -143,8 +143,6 @@ class Topic(proto.Message): schema_settings (google.pubsub_v1.types.SchemaSettings): Settings for validating messages published against a schema. - EXPERIMENTAL: Schema support is in development - and may not work yet. satisfies_pzs (bool): Reserved for future use. This field is set only in responses from the server; it is ignored diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 71975f6325e7..a0d4cbdb6cb1 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,15 +4,15 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "970924ec144a8dd2556fa7e1b55c14088935d5fd" + "sha": "6e43f4ca8411c6625bac1720172807632dc4b500" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "9adc11e84218d5e234392e7fe81676e778895525", - "internalRef": "359844338" + "sha": "3befd26ca55723d3e8111909331eac1249837987", + "internalRef": "360805639" } }, { From a26155758ee8784632a99b05d23cb4433bf64663 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Mar 2021 12:21:05 +0100 Subject: [PATCH 0628/1197] chore(deps): update dependency google-cloud-pubsub to v2.4.0 (#286) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 84edd80118e2..f0bf9618269b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1 @@ -google-cloud-pubsub==2.2.0 +google-cloud-pubsub==2.4.0 From fd798f5b146411ceb2532c55e49779ac9b4f3698 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 5 Mar 2021 14:42:02 -0700 Subject: [PATCH 0629/1197] chore: add constraints file (#272) Add constraints file to test lower bounds. These files will not be used until the noxfile is changed in googleapis/synthtool#869. --- .../google-cloud-pubsub/testing/constraints-3.10.txt | 0 .../google-cloud-pubsub/testing/constraints-3.11.txt | 0 .../google-cloud-pubsub/testing/constraints-3.6.txt | 11 +++++++++++ .../google-cloud-pubsub/testing/constraints-3.7.txt | 0 .../google-cloud-pubsub/testing/constraints-3.8.txt | 0 .../google-cloud-pubsub/testing/constraints-3.9.txt | 0 6 files changed, 11 insertions(+) create mode 100644 packages/google-cloud-pubsub/testing/constraints-3.10.txt create mode 100644 packages/google-cloud-pubsub/testing/constraints-3.11.txt create mode 100644 packages/google-cloud-pubsub/testing/constraints-3.6.txt create mode 100644 packages/google-cloud-pubsub/testing/constraints-3.7.txt create mode 100644 packages/google-cloud-pubsub/testing/constraints-3.8.txt create mode 100644 packages/google-cloud-pubsub/testing/constraints-3.9.txt diff --git a/packages/google-cloud-pubsub/testing/constraints-3.10.txt b/packages/google-cloud-pubsub/testing/constraints-3.10.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.11.txt b/packages/google-cloud-pubsub/testing/constraints-3.11.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.6.txt b/packages/google-cloud-pubsub/testing/constraints-3.6.txt new file mode 100644 index 000000000000..89c937ce8f9a --- /dev/null +++ b/packages/google-cloud-pubsub/testing/constraints-3.6.txt @@ -0,0 +1,11 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.22.2 +libcst==0.3.10 +proto-plus==1.7.1 +grpc-google-iam-v1==0.12.3 \ No newline at end of file diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.8.txt b/packages/google-cloud-pubsub/testing/constraints-3.8.txt new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.9.txt b/packages/google-cloud-pubsub/testing/constraints-3.9.txt new file mode 100644 index 000000000000..e69de29bb2d1 From dfe801f5a55e3ff7e403bb4aef50e52f6a120fb4 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Mon, 8 Mar 2021 09:49:43 -0800 Subject: [PATCH 0630/1197] fix(sample): bump the max_time to 10 mins for a flaky test (#311) fixes #291 --- .../google-cloud-pubsub/samples/snippets/subscriber_test.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 39e52afff79c..b9dd38ec3603 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -243,7 +243,8 @@ def test_receive_with_delivery_attempts( ): # The dlq subscription raises 404 before it's ready. - @backoff.on_exception(backoff.expo, (Unknown, NotFound), max_time=300) + # We keep retrying up to 10 minutes for mitigating the flakiness. + @backoff.on_exception(backoff.expo, (Unknown, NotFound), max_time=600) def run_sample(): _publish_messages(publisher_client, topic) From 1c5a3d63ffdd48a6a73e35acc114ce5f8b7223ea Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 9 Mar 2021 10:27:31 -0800 Subject: [PATCH 0631/1197] samples: add schema samples (#306) * samples: create, delete, get, list samples * add publish and subscribe with avro schema samples * add publish and subscribe with proto schema samples * add protoc compiled class * address leah's and peter's comments * avoid using google.auth.default() for testing * use choice in argparse * update python to python3 in readme --- .../samples/snippets/README.rst | 52 +- .../samples/snippets/requirements.txt | 1 + .../samples/snippets/resources/us-states.avsc | 18 + .../snippets/resources/us-states.proto | 8 + .../samples/snippets/schema.py | 490 ++++++++++++++++++ .../samples/snippets/schema_test.py | 258 +++++++++ .../snippets/utilities/us_states_pb2.py | 77 +++ 7 files changed, 899 insertions(+), 5 deletions(-) create mode 100644 packages/google-cloud-pubsub/samples/snippets/resources/us-states.avsc create mode 100644 packages/google-cloud-pubsub/samples/snippets/resources/us-states.proto create mode 100644 packages/google-cloud-pubsub/samples/snippets/schema.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/schema_test.py create mode 100644 packages/google-cloud-pubsub/samples/snippets/utilities/us_states_pb2.py diff --git a/packages/google-cloud-pubsub/samples/snippets/README.rst b/packages/google-cloud-pubsub/samples/snippets/README.rst index 5fdfbde0dc06..5c1d4be68513 100644 --- a/packages/google-cloud-pubsub/samples/snippets/README.rst +++ b/packages/google-cloud-pubsub/samples/snippets/README.rst @@ -78,7 +78,7 @@ To run this sample: .. code-block:: bash - $ python quickstart/pub.py + $ python3 quickstart/pub.py Quickstart (Subscriber) @@ -94,7 +94,7 @@ To run this sample: .. code-block:: bash - $ python quickstart/sub.py + $ python3 quickstart/sub.py Publisher @@ -110,7 +110,7 @@ To run this sample: .. code-block:: bash - $ python publisher.py + $ python3 publisher.py usage: publisher.py [-h] project_id @@ -168,7 +168,7 @@ To run this sample: .. code-block:: bash - $ python subscriber.py + $ python3 subscriber.py usage: subscriber.py [-h] project_id @@ -232,7 +232,7 @@ To run this sample: .. code-block:: bash - $ python iam.py + $ python3 iam.py usage: iam.py [-h] project_id @@ -266,6 +266,48 @@ To run this sample: +Schema ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com//googleapis/python-pubsub&page=editor&open_in_editor=samples/snippets/schema.py,samples/snippets/README.rst + + + + +To run this sample: + +.. code-block:: bash + + $ python3 schema.py + + usage: schema.py [-h] + project_id + {create-avro,create-proto,get,list,delete,create-topic,publish-avro,publish-proto,receive-avro,receive-proto} + ... + + This application demonstrates how to perform basic schema operations + using the Cloud Pub/Sub API. + + For more information, see the README.md under /pubsub and the documentation + at https://cloud.google.com/pubsub/docs. + + positional arguments: + project_id Your Google Cloud project ID + {create-avro,create-proto,get,list,delete,create-topic,publish-avro,publish-proto,receive-avro,receive-proto} + create-avro Create a schema resource from an Avro schema file formatted in JSON. + create-proto Create a schema resource from a protobuf schema file. + get Get a schema resource. + list List schema resources. + delete Delete a schema resource. + create-topic Create a topic resource with a schema. + publish-avro Pulbish a BINARY or JSON encoded message to a topic configured with an Avro schema. + publish-proto Publish a BINARY or JSON encoded message to a topic configured with a protobuf schema. + receive-avro Receive and decode messages sent to a topic with an Avro schema. + receive-proto Receive and decode messages sent to a topic with a protobuf schema. + + optional arguments: + -h, --help show this help message and exit The client library diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index f0bf9618269b..08606769bbb7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1 +1,2 @@ google-cloud-pubsub==2.4.0 +avro==1.10.1 diff --git a/packages/google-cloud-pubsub/samples/snippets/resources/us-states.avsc b/packages/google-cloud-pubsub/samples/snippets/resources/us-states.avsc new file mode 100644 index 000000000000..7521882c7df7 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/resources/us-states.avsc @@ -0,0 +1,18 @@ +{ + "type":"record", + "name":"State", + "namespace":"utilities", + "doc":"A list of states in the United States of America.", + "fields":[ + { + "name":"name", + "type":"string", + "doc":"The common name of the state." + }, + { + "name":"post_abbr", + "type":"string", + "doc":"The postal code abbreviation of the state." + } + ] +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/resources/us-states.proto b/packages/google-cloud-pubsub/samples/snippets/resources/us-states.proto new file mode 100644 index 000000000000..576c2ea1c908 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/resources/us-states.proto @@ -0,0 +1,8 @@ +syntax = "proto3"; + +package utilities; + +message StateProto { + string name = 1; + string post_abbr = 2; +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py new file mode 100644 index 000000000000..37f9bba55427 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -0,0 +1,490 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This application demonstrates how to perform basic schema operations +using the Cloud Pub/Sub API. + +For more information, see the README.md under /pubsub and the documentation +at https://cloud.google.com/pubsub/docs/schemas. +""" + +import argparse + + +def create_avro_schema(project_id, schema_id, avsc_file): + """Create a schema resource from a JSON-formatted Avro schema file.""" + # [START pubsub_create_avro_schema] + from google.api_core.exceptions import AlreadyExists + from google.cloud.pubsub import SchemaServiceClient + from google.pubsub_v1.types import Schema + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + + project_path = f"projects/{project_id}" + + # Read a JSON-formatted Avro schema file as a string. + with open(avsc_file, "rb") as f: + avsc_source = f.read().decode("utf-8") + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + schema = Schema(name=schema_path, type_=Schema.Type.AVRO, definition=avsc_source) + + try: + result = schema_client.create_schema( + request={"parent": project_path, "schema": schema, "schema_id": schema_id} + ) + print(f"Created a schema using an Avro schema file:\n{result}") + except AlreadyExists: + print(f"{schema_id} already exists.") + # [END pubsub_create_avro_schema] + + +def create_proto_schema(project_id, schema_id, proto_file): + """Create a schema resource from a protobuf schema file.""" + # [START pubsub_create_proto_schema] + from google.api_core.exceptions import AlreadyExists + from google.cloud.pubsub import SchemaServiceClient + from google.pubsub_v1.types import Schema + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # proto_file = "path/to/a/proto/file/(.proto)/formatted/in/protocol/buffers" + + project_path = f"projects/{project_id}" + + # Read a protobuf schema file as a string. + with open(proto_file, "rb") as f: + proto_source = f.read().decode("utf-8") + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + schema = Schema( + name=schema_path, type_=Schema.Type.PROTOCOL_BUFFER, definition=proto_source + ) + + try: + result = schema_client.create_schema( + request={"parent": project_path, "schema": schema, "schema_id": schema_id} + ) + print(f"Created a schema using a protobuf schema file:\n{result}") + except AlreadyExists: + print(f"{schema_id} already exists.") + # [END pubsub_create_proto_schema] + + +def get_schema(project_id, schema_id): + """Get a schema resource.""" + # [START pubsub_get_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + try: + result = schema_client.get_schema(request={"name": schema_path}) + print(f"Got a schema:\n{result}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_get_schema] + + +def list_schemas(project_id): + """List schema resources.""" + # [START pubsub_list_schemas] + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + + project_path = f"projects/{project_id}" + schema_client = SchemaServiceClient() + + for schema in schema_client.list_schemas(request={"parent": project_path}): + print(schema) + + print("Listed schemas.") + # [END pubsub_list_schemas] + + +def delete_schema(project_id, schema_id): + """Delete a schema resource.""" + # [START pubsub_delete_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + try: + schema_client.delete_schema(request={"name": schema_path}) + print(f"Deleted a schema:\n{schema_path}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_delete_schema] + + +def create_topic_with_schema(project_id, topic_id, schema_id, message_encoding): + """Create a topic resource with a schema.""" + # [START pubsub_create_topic_with_schema] + from google.api_core.exceptions import AlreadyExists, InvalidArgument + from google.cloud.pubsub import PublisherClient, SchemaServiceClient + from google.pubsub_v1.types import Encoding + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # schema_id = "your-schema-id" + # Choose either BINARY or JSON as valid message encoding in this topic. + # message_encoding = "BINARY" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + if message_encoding == "BINARY": + encoding = Encoding.BINARY + elif message_encoding == "JSON": + encoding = Encoding.JSON + else: + encoding = Encoding.ENCODING_UNSPECIFIED + + try: + response = publisher_client.create_topic( + request={ + "name": topic_path, + "schema_settings": {"schema": schema_path, "encoding": encoding}, + } + ) + print(f"Created a topic:\n{response}") + + except AlreadyExists: + print(f"{topic_id} already exists.") + except InvalidArgument: + print("Please choose either BINARY or JSON as a valid message encoding type.") + # [END pubsub_create_topic_with_schema] + + +def publish_avro_records(project_id, topic_id, avsc_file): + """Pulbish a BINARY or JSON encoded message to a topic configured with an Avro schema.""" + # [START pubsub_publish_avro_records] + from avro.io import BinaryEncoder, DatumWriter + import avro + import io + import json + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import PublisherClient + from google.pubsub_v1.types import Encoding + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + # Prepare to write Avro records to the binary output stream. + avro_schema = avro.schema.parse(open(avsc_file, "rb").read()) + writer = DatumWriter(avro_schema) + bout = io.BytesIO() + + # Prepare some data using a Python dictionary that matches the Avro schema + record = {"name": "Alaska", "post_abbr": "AK"} + + try: + # Get the topic encoding type. + topic = publisher_client.get_topic(request={"topic": topic_path}) + encoding = topic.schema_settings.encoding + + # Encode the data according to the message serialization type. + if encoding == Encoding.BINARY: + encoder = BinaryEncoder(bout) + writer.write(record, encoder) + data = bout.getvalue() + print(f"Preparing a binary-encoded message:\n{data}") + elif encoding == Encoding.JSON: + data = json.dumps(record).encode("utf-8") + print(f"Preparing a JSON-encoded message:\n{data}") + else: + print(f"No encoding specified in {topic_path}. Abort.") + exit(0) + + future = publisher_client.publish(topic_path, data) + print(f"Published message ID: {future.result()}") + + except NotFound: + print(f"{topic_id} not found.") + # [END pubsub_publish_avro_records] + + +def publish_proto_messages(project_id, topic_id): + """Publish a BINARY or JSON encoded message to a topic configured with a protobuf schema.""" + # [START pubsub_publish_proto_messages] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import PublisherClient + from google.protobuf.json_format import MessageToJson + from google.pubsub_v1.types import Encoding + + from utilities import us_states_pb2 + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + try: + # Get the topic encoding type. + topic = publisher_client.get_topic(request={"topic": topic_path}) + encoding = topic.schema_settings.encoding + + # Instantiate a protoc-generated class defined in `us-states.proto`. + state = us_states_pb2.StateProto() + state.name = "Alaska" + state.post_abbr = "AK" + + # Encode the data according to the message serialization type. + if encoding == Encoding.BINARY: + data = state.SerializeToString() + print(f"Preparing a binary-encoded message:\n{data}") + elif encoding == Encoding.JSON: + json_object = MessageToJson(state) + data = str(json_object).encode("utf-8") + print(f"Preparing a JSON-encoded message:\n{data}") + else: + print(f"No encoding specified in {topic_path}. Abort.") + exit(0) + + future = publisher_client.publish(topic_path, data) + print(f"Published message ID: {future.result()}") + + except NotFound: + print(f"{topic_id} not found.") + # [END pubsub_publish_proto_messages] + + +def subscribe_with_avro_schema(project_id, subscription_id, avsc_file, timeout=None): + """Receive and decode messages sent to a topic with an Avro schema.""" + # [START pubsub_subscribe_avro_records] + import avro + from avro.io import BinaryDecoder, DatumReader + from concurrent.futures import TimeoutError + import io + import json + from google.cloud.pubsub import SubscriberClient + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + # Number of seconds the subscriber listens for messages + # timeout = 5.0 + + subscriber = SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + avro_schema = avro.schema.parse(open(avsc_file, "rb").read()) + + def callback(message): + # Get the message serialization type. + encoding = message.attributes.get("googclient_schemaencoding") + # Deserialize the message data accordingly. + if encoding == "BINARY": + bout = io.BytesIO(message.data) + decoder = BinaryDecoder(bout) + reader = DatumReader(avro_schema) + message_data = reader.read(decoder) + print(f"Received a binary-encoded message:\n{message_data}") + elif encoding == "JSON": + message_data = json.loads(message.data) + print(f"Received a JSON-encoded message:\n{message_data}") + else: + print(f"Received a message with no encoding:\n{message}") + + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception occurs first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() + # [END pubsub_subscribe_avro_records] + + +def subscribe_with_proto_schema(project_id, subscription_id, timeout): + """Receive and decode messages sent to a topic with a protobuf schema.""" + # [[START pubsub_subscribe_proto_messages] + from concurrent.futures import TimeoutError + from google.cloud.pubsub import SubscriberClient + from google.protobuf.json_format import Parse + + from utilities import us_states_pb2 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber listens for messages + # timeout = 5.0 + + subscriber = SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + # Instantiate a protoc-generated class defined in `us-states.proto`. + state = us_states_pb2.StateProto() + + def callback(message): + # Get the message serialization type. + encoding = message.attributes.get("googclient_schemaencoding") + # Deserialize the message data accordingly. + if encoding == "BINARY": + state.ParseFromString(message.data) + print("Received a binary-encoded message:\n{state}") + elif encoding == "JSON": + Parse(message.data, state) + print(f"Received a JSON-encoded message:\n{state}") + else: + print(f"Received a message with no encoding:\n{message}") + + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception occurs first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() + # [END pubsub_subscribe_proto_messages] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("project_id", help="Your Google Cloud project ID") + + subparsers = parser.add_subparsers(dest="command") + + create_avro_schema_parser = subparsers.add_parser( + "create-avro", help=create_avro_schema.__doc__ + ) + create_avro_schema_parser.add_argument("schema_id") + create_avro_schema_parser.add_argument("avsc_file") + + create_proto_schema_parser = subparsers.add_parser( + "create-proto", help=create_proto_schema.__doc__ + ) + create_proto_schema_parser.add_argument("schema_id") + create_proto_schema_parser.add_argument("proto_file") + + get_schema_parser = subparsers.add_parser("get", help=get_schema.__doc__) + get_schema_parser.add_argument("schema_id") + + list_schemas_parser = subparsers.add_parser("list", help=list_schemas.__doc__) + + delete_schema_parser = subparsers.add_parser("delete", help=delete_schema.__doc__) + delete_schema_parser.add_argument("schema_id") + + create_topic_with_schema_parser = subparsers.add_parser( + "create-topic", help=create_topic_with_schema.__doc__ + ) + create_topic_with_schema_parser.add_argument("topic_id") + create_topic_with_schema_parser.add_argument("schema_id") + create_topic_with_schema_parser.add_argument( + "message_encoding", choices=["BINARY", "JSON"] + ) + + publish_avro_records_parser = subparsers.add_parser( + "publish-avro", help=publish_avro_records.__doc__ + ) + publish_avro_records_parser.add_argument("topic_id") + publish_avro_records_parser.add_argument("avsc_file") + + publish_proto_messages_parser = subparsers.add_parser( + "publish-proto", help=publish_proto_messages.__doc__ + ) + publish_proto_messages_parser.add_argument("topic_id") + + subscribe_with_avro_schema_parser = subparsers.add_parser( + "receive-avro", help=subscribe_with_avro_schema.__doc__ + ) + subscribe_with_avro_schema_parser.add_argument("subscription_id") + subscribe_with_avro_schema_parser.add_argument("avsc_file") + subscribe_with_avro_schema_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + + subscribe_with_proto_schema_parser = subparsers.add_parser( + "receive-proto", help=subscribe_with_proto_schema.__doc__ + ) + subscribe_with_proto_schema_parser.add_argument("subscription_id") + subscribe_with_proto_schema_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + + args = parser.parse_args() + + if args.command == "create-avro": + create_avro_schema(args.project_id, args.schema_id, args.avsc_file) + if args.command == "create-proto": + create_proto_schema(args.project_id, args.schema_id, args.proto_file) + if args.command == "get": + get_schema(args.project_id, args.schema_id) + if args.command == "list": + list_schemas(args.project_id) + if args.command == "delete": + delete_schema(args.project_id, args.schema_id) + if args.command == "create-topic": + create_topic_with_schema( + args.project_id, args.topic_id, args.schema_id, args.message_encoding + ) + if args.command == "publish-avro": + publish_avro_records(args.project_id, args.topic_id, args.avsc_file) + if args.command == "publish-proto": + publish_proto_messages(args.project_id, args.topic_id) + if args.command == "receive-avro": + subscribe_with_avro_schema( + args.project_id, args.subscription_id, args.avsc_file, args.timeout + ) + if args.command == "receive-proto": + subscribe_with_proto_schema(args.project_id, args.subscription_id, args.timeout) diff --git a/packages/google-cloud-pubsub/samples/snippets/schema_test.py b/packages/google-cloud-pubsub/samples/snippets/schema_test.py new file mode 100644 index 000000000000..5447701875b8 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/schema_test.py @@ -0,0 +1,258 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +from google.api_core.exceptions import NotFound +from google.cloud.pubsub import PublisherClient, SchemaServiceClient, SubscriberClient +from google.pubsub_v1.types import Encoding +import pytest + +import schema + +UUID = uuid.uuid4().hex +try: + PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] +except KeyError: + raise KeyError("Need to set GOOGLE_CLOUD_PROJECT as an environment variable.") +AVRO_TOPIC_ID = f"schema-test-avro-topic-{UUID}" +PROTO_TOPIC_ID = f"schema-test-proto-topic-{UUID}" +AVRO_SUBSCRIPTION_ID = f"schema-test-avro-subscription-{UUID}" +PROTO_SUBSCRIPTION_ID = f"schema-test-proto-subscription-{UUID}" +AVRO_SCHEMA_ID = f"schema-test-avro-schema-{UUID}" +PROTO_SCHEMA_ID = f"schema-test-proto-schema-{UUID}" +AVSC_FILE = "resources/us-states.avsc" +PROTO_FILE = "resources/us-states.proto" + + +@pytest.fixture(scope="module") +def schema_client(): + schema_client = SchemaServiceClient() + yield schema_client + + +@pytest.fixture(scope="module") +def avro_schema(schema_client): + avro_schema_path = schema_client.schema_path(PROJECT_ID, AVRO_SCHEMA_ID) + + yield avro_schema_path + + try: + schema_client.delete_schema(request={"name": avro_schema_path}) + except NotFound: + pass + + +@pytest.fixture(scope="module") +def proto_schema(schema_client): + proto_schema_path = schema_client.schema_path(PROJECT_ID, PROTO_SCHEMA_ID) + + yield proto_schema_path + + try: + schema_client.delete_schema(request={"name": proto_schema_path}) + except NotFound: + pass + + +@pytest.fixture(scope="module") +def publisher_client(): + yield PublisherClient() + + +@pytest.fixture(scope="module") +def avro_topic(publisher_client, avro_schema): + from google.pubsub_v1.types import Encoding + + avro_topic_path = publisher_client.topic_path(PROJECT_ID, AVRO_TOPIC_ID) + + try: + avro_topic = publisher_client.get_topic(request={"topic": avro_topic_path}) + except NotFound: + avro_topic = publisher_client.create_topic( + request={ + "name": avro_topic_path, + "schema_settings": { + "schema": avro_schema, + "encoding": Encoding.BINAARY, + }, + } + ) + + yield avro_topic.name + + publisher_client.delete_topic(request={"topic": avro_topic.name}) + + +@pytest.fixture(scope="module") +def proto_topic(publisher_client, proto_schema): + proto_topic_path = publisher_client.topic_path(PROJECT_ID, PROTO_TOPIC_ID) + + try: + proto_topic = publisher_client.get_topic(request={"topic": proto_topic_path}) + except NotFound: + proto_topic = publisher_client.create_topic( + request={ + "name": proto_topic_path, + "schema_settings": { + "schema": proto_schema, + "encoding": Encoding.BINARY, + }, + } + ) + + yield proto_topic.name + + publisher_client.delete_topic(request={"topic": proto_topic.name}) + + +@pytest.fixture(scope="module") +def subscriber_client(): + subscriber_client = SubscriberClient() + yield subscriber_client + subscriber_client.close() + + +@pytest.fixture(scope="module") +def avro_subscription(subscriber_client, avro_topic): + avro_subscription_path = subscriber_client.subscription_path( + PROJECT_ID, AVRO_SUBSCRIPTION_ID + ) + + try: + avro_subscription = subscriber_client.get_subscription( + request={"subscription": avro_subscription_path} + ) + except NotFound: + avro_subscription = subscriber_client.create_subscription( + request={"name": avro_subscription_path, "topic": avro_topic} + ) + + yield avro_subscription.name + + subscriber_client.delete_subscription( + request={"subscription": avro_subscription.name} + ) + + +@pytest.fixture(scope="module") +def proto_subscription(subscriber_client, proto_topic): + proto_subscription_path = subscriber_client.subscription_path( + PROJECT_ID, PROTO_SUBSCRIPTION_ID + ) + + try: + proto_subscription = subscriber_client.get_subscription( + request={"subscription": proto_subscription_path} + ) + except NotFound: + proto_subscription = subscriber_client.create_subscription( + request={"name": proto_subscription_path, "topic": proto_topic} + ) + + yield proto_subscription.name + + subscriber_client.delete_subscription( + request={"subscription": proto_subscription.name} + ) + + +def test_create_avro_schema(schema_client, avro_schema, capsys): + try: + schema_client.delete_schema(request={"name": avro_schema}) + except NotFound: + pass + + schema.create_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID, AVSC_FILE) + + out, _ = capsys.readouterr() + assert "Created a schema using an Avro schema file:" in out + assert f"{avro_schema}" in out + + +def test_create_proto_schema(schema_client, proto_schema, capsys): + try: + schema_client.delete_schema(request={"name": proto_schema}) + except NotFound: + pass + + schema.create_proto_schema(PROJECT_ID, PROTO_SCHEMA_ID, PROTO_FILE) + + out, _ = capsys.readouterr() + assert "Created a schema using a protobuf schema file:" in out + assert f"{proto_schema}" in out + + +def test_get_schema(avro_schema, capsys): + schema.get_schema(PROJECT_ID, AVRO_SCHEMA_ID) + out, _ = capsys.readouterr() + assert "Got a schema" in out + assert f"{avro_schema}" in out + + +def test_list_schemas(capsys): + schema.list_schemas(PROJECT_ID) + out, _ = capsys.readouterr() + assert "Listed schemas." in out + + +def test_create_topic_with_schema(avro_schema, capsys): + schema.create_topic_with_schema(PROJECT_ID, AVRO_TOPIC_ID, AVRO_SCHEMA_ID, "BINARY") + out, _ = capsys.readouterr() + assert "Created a topic" in out + assert f"{AVRO_TOPIC_ID}" in out + assert f"{avro_schema}" in out + assert "BINARY" in out or "2" in out + + +def test_publish_avro_records(avro_schema, avro_topic, capsys): + schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) + out, _ = capsys.readouterr() + assert "Preparing a binary-encoded message" in out + assert "Published message ID" in out + + +def test_subscribe_with_avro_schema(avro_schema, avro_topic, avro_subscription, capsys): + schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) + + schema.subscribe_with_avro_schema(PROJECT_ID, AVRO_SUBSCRIPTION_ID, AVSC_FILE, 9) + out, _ = capsys.readouterr() + assert "Received a binary-encoded message:" in out + + +def test_publish_proto_records(proto_topic, capsys): + schema.publish_proto_messages(PROJECT_ID, PROTO_TOPIC_ID) + out, _ = capsys.readouterr() + assert "Preparing a binary-encoded message" in out + assert "Published message ID" in out + + +def test_subscribe_with_proto_schema( + proto_schema, proto_topic, proto_subscription, capsys +): + schema.publish_proto_messages(PROJECT_ID, PROTO_TOPIC_ID) + + schema.subscribe_with_proto_schema(PROJECT_ID, PROTO_SUBSCRIPTION_ID, 9) + out, _ = capsys.readouterr() + assert "Received a binary-encoded message" in out + + +def test_delete_schema(proto_schema, capsys): + schema.delete_schema(PROJECT_ID, PROTO_SCHEMA_ID) + out, _ = capsys.readouterr() + assert "Deleted a schema" in out + assert f"{proto_schema}" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/utilities/us_states_pb2.py b/packages/google-cloud-pubsub/samples/snippets/utilities/us_states_pb2.py new file mode 100644 index 000000000000..0b0c325dc728 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/utilities/us_states_pb2.py @@ -0,0 +1,77 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: us-states.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='us-states.proto', + package='utilities', + syntax='proto3', + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\x0fus-states.proto\x12\tutilities\"-\n\nStateProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tpost_abbr\x18\x02 \x01(\tb\x06proto3' +) + + + + +_STATEPROTO = _descriptor.Descriptor( + name='StateProto', + full_name='utilities.StateProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='utilities.StateProto.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='post_abbr', full_name='utilities.StateProto.post_abbr', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=30, + serialized_end=75, +) + +DESCRIPTOR.message_types_by_name['StateProto'] = _STATEPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +StateProto = _reflection.GeneratedProtocolMessageType('StateProto', (_message.Message,), { + 'DESCRIPTOR' : _STATEPROTO, + '__module__' : 'us_states_pb2' + # @@protoc_insertion_point(class_scope:utilities.StateProto) + }) +_sym_db.RegisterMessage(StateProto) + + +# @@protoc_insertion_point(module_scope) From 7090de9268859c7747c749d5962c1698cb03696b Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 9 Mar 2021 17:42:27 -0800 Subject: [PATCH 0632/1197] revert: add graceful streaming pull shutdown (#315) This reverts commit 00874fef4929a3a9e6ee0a8950f252b1851359c2. --- .../subscriber/_protocol/dispatcher.py | 3 + .../subscriber/_protocol/heartbeater.py | 9 +- .../pubsub_v1/subscriber/_protocol/leaser.py | 2 +- .../_protocol/streaming_pull_manager.py | 82 +++++++----------- .../cloud/pubsub_v1/subscriber/futures.py | 15 +--- .../cloud/pubsub_v1/subscriber/scheduler.py | 59 ++++--------- packages/google-cloud-pubsub/tests/system.py | 82 +----------------- .../pubsub_v1/subscriber/test_dispatcher.py | 31 ++----- .../subscriber/test_futures_subscriber.py | 12 +-- .../pubsub_v1/subscriber/test_heartbeater.py | 41 ++------- .../unit/pubsub_v1/subscriber/test_leaser.py | 24 ++---- .../pubsub_v1/subscriber/test_scheduler.py | 85 ++----------------- .../subscriber/test_streaming_pull_manager.py | 58 ++++--------- 13 files changed, 113 insertions(+), 390 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 382c5c38a2c2..7a89508446f8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -99,6 +99,9 @@ def dispatch_callback(self, items): ValueError: If ``action`` isn't one of the expected actions "ack", "drop", "lease", "modify_ack_deadline" or "nack". """ + if not self._manager.is_active: + return + batched_commands = collections.defaultdict(list) for item in items: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py index fef158965c57..9cd84a1e2397 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -35,11 +35,10 @@ def __init__(self, manager, period=_DEFAULT_PERIOD): self._period = period def heartbeat(self): - """Periodically send streaming pull heartbeats. - """ - while not self._stop_event.is_set(): - if self._manager.heartbeat(): - _LOGGER.debug("Sent heartbeat.") + """Periodically send heartbeats.""" + while self._manager.is_active and not self._stop_event.is_set(): + self._manager.heartbeat() + _LOGGER.debug("Sent heartbeat.") self._stop_event.wait(timeout=self._period) _LOGGER.info("%s exiting.", _HEARTBEAT_WORKER_NAME) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 4a19792fc901..5830680da8eb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -126,7 +126,7 @@ def maintain_leases(self): ack IDs, then waits for most of that time (but with jitter), and repeats. """ - while not self._stop_event.is_set(): + while self._manager.is_active and not self._stop_event.is_set(): # Determine the appropriate duration for the lease. This is # based off of how long previous messages have taken to ack, with # a sensible default and within the ranges allowed by Pub/Sub. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 8a3ff0e87cbf..e8a4a8caf9d5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -16,7 +16,6 @@ import collections import functools -import itertools import logging import threading import uuid @@ -114,6 +113,10 @@ class StreamingPullManager(object): scheduler will be used. """ + _UNARY_REQUESTS = True + """If set to True, this class will make requests over a separate unary + RPC instead of over the streaming RPC.""" + def __init__( self, client, @@ -289,9 +292,6 @@ def activate_ordering_keys(self, ordering_keys): activate. May be empty. """ with self._pause_resume_lock: - if self._scheduler is None: - return # We are shutting down, don't try to dispatch any more messages. - self._messages_on_hold.activate_ordering_keys( ordering_keys, self._schedule_message_on_hold ) @@ -421,36 +421,37 @@ def send(self, request): If a RetryError occurs, the manager shutdown is triggered, and the error is re-raised. """ - try: - self._send_unary_request(request) - except exceptions.GoogleAPICallError: - _LOGGER.debug( - "Exception while sending unary RPC. This is typically " - "non-fatal as stream requests are best-effort.", - exc_info=True, - ) - except exceptions.RetryError as exc: - _LOGGER.debug( - "RetryError while sending unary RPC. Waiting on a transient " - "error resolution for too long, will now trigger shutdown.", - exc_info=False, - ) - # The underlying channel has been suffering from a retryable error - # for too long, time to give up and shut the streaming pull down. - self._on_rpc_done(exc) - raise + if self._UNARY_REQUESTS: + try: + self._send_unary_request(request) + except exceptions.GoogleAPICallError: + _LOGGER.debug( + "Exception while sending unary RPC. This is typically " + "non-fatal as stream requests are best-effort.", + exc_info=True, + ) + except exceptions.RetryError as exc: + _LOGGER.debug( + "RetryError while sending unary RPC. Waiting on a transient " + "error resolution for too long, will now trigger shutdown.", + exc_info=False, + ) + # The underlying channel has been suffering from a retryable error + # for too long, time to give up and shut the streaming pull down. + self._on_rpc_done(exc) + raise + + else: + self._rpc.send(request) def heartbeat(self): """Sends an empty request over the streaming pull RPC. - Returns: - bool: If a heartbeat request has actually been sent. + This always sends over the stream, regardless of if + ``self._UNARY_REQUESTS`` is set or not. """ if self._rpc is not None and self._rpc.is_active: self._rpc.send(gapic_types.StreamingPullRequest()) - return True - - return False def open(self, callback, on_callback_error): """Begin consuming messages. @@ -512,7 +513,7 @@ def open(self, callback, on_callback_error): # Start the stream heartbeater thread. self._heartbeater.start() - def close(self, reason=None, await_msg_callbacks=False): + def close(self, reason=None): """Stop consuming messages and shutdown all helper threads. This method is idempotent. Additional calls will have no effect. @@ -521,15 +522,6 @@ def close(self, reason=None, await_msg_callbacks=False): reason (Any): The reason to close this. If None, this is considered an "intentional" shutdown. This is passed to the callbacks specified via :meth:`add_close_callback`. - - await_msg_callbacks (bool): - If ``True``, the method will wait until all scheduler threads terminate - and only then proceed with the shutdown with the remaining shutdown - tasks, - - If ``False`` (default), the method will shut down the scheduler in a - non-blocking fashion, i.e. it will not wait for the currently executing - scheduler threads to terminate. """ with self._closing: if self._closed: @@ -543,9 +535,7 @@ def close(self, reason=None, await_msg_callbacks=False): # Shutdown all helper threads _LOGGER.debug("Stopping scheduler.") - dropped_messages = self._scheduler.shutdown( - await_msg_callbacks=await_msg_callbacks - ) + self._scheduler.shutdown() self._scheduler = None # Leaser and dispatcher reference each other through the shared @@ -559,23 +549,11 @@ def close(self, reason=None, await_msg_callbacks=False): # because the consumer gets shut down first. _LOGGER.debug("Stopping leaser.") self._leaser.stop() - - total = len(dropped_messages) + len( - self._messages_on_hold._messages_on_hold - ) - _LOGGER.debug(f"NACK-ing all not-yet-dispatched messages (total: {total}).") - messages_to_nack = itertools.chain( - dropped_messages, self._messages_on_hold._messages_on_hold - ) - for msg in messages_to_nack: - msg.nack() - _LOGGER.debug("Stopping dispatcher.") self._dispatcher.stop() self._dispatcher = None # dispatcher terminated, OK to dispose the leaser reference now self._leaser = None - _LOGGER.debug("Stopping heartbeater.") self._heartbeater.stop() self._heartbeater = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index cefe1aa91844..f9fdd76abc87 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -43,23 +43,12 @@ def _on_close_callback(self, manager, result): else: self.set_exception(result) - def cancel(self, await_msg_callbacks=False): + def cancel(self): """Stops pulling messages and shutdowns the background thread consuming messages. - - Args: - await_msg_callbacks (bool): - If ``True``, the method will block until the background stream and its - helper threads have has been terminated, as well as all currently - executing message callbacks are done processing. - - If ``False`` (default), the method returns immediately after the - background stream and its helper threads have has been terminated, but - some of the message callback threads might still be running at that - point. """ self._cancelled = True - return self._manager.close(await_msg_callbacks=await_msg_callbacks) + return self._manager.close() def cancelled(self): """ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index 2690c1fc6872..ef2ef59cb6bf 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -20,6 +20,7 @@ import abc import concurrent.futures +import sys import six from six.moves import queue @@ -57,29 +58,19 @@ def schedule(self, callback, *args, **kwargs): raise NotImplementedError @abc.abstractmethod - def shutdown(self, await_msg_callbacks=False): + def shutdown(self): """Shuts down the scheduler and immediately end all pending callbacks. - - Args: - await_msg_callbacks (bool): - If ``True``, the method will block until all currently executing - callbacks are done processing. If ``False`` (default), the - method will not wait for the currently running callbacks to complete. - - Returns: - List[pubsub_v1.subscriber.message.Message]: - The messages submitted to the scheduler that were not yet dispatched - to their callbacks. - It is assumed that each message was submitted to the scheduler as the - first positional argument to the provided callback. """ raise NotImplementedError def _make_default_thread_pool_executor(): - return concurrent.futures.ThreadPoolExecutor( - max_workers=10, thread_name_prefix="ThreadPoolExecutor-ThreadScheduler" - ) + # Python 2.7 and 3.6+ have the thread_name_prefix argument, which is useful + # for debugging. + executor_kwargs = {} + if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6): + executor_kwargs["thread_name_prefix"] = "ThreadPoolExecutor-ThreadScheduler" + return concurrent.futures.ThreadPoolExecutor(max_workers=10, **executor_kwargs) class ThreadScheduler(Scheduler): @@ -119,35 +110,15 @@ def schedule(self, callback, *args, **kwargs): """ self._executor.submit(callback, *args, **kwargs) - def shutdown(self, await_msg_callbacks=False): - """Shut down the scheduler and immediately end all pending callbacks. - - Args: - await_msg_callbacks (bool): - If ``True``, the method will block until all currently executing - executor threads are done processing. If ``False`` (default), the - method will not wait for the currently running threads to complete. - - Returns: - List[pubsub_v1.subscriber.message.Message]: - The messages submitted to the scheduler that were not yet dispatched - to their callbacks. - It is assumed that each message was submitted to the scheduler as the - first positional argument to the provided callback. + def shutdown(self): + """Shuts down the scheduler and immediately end all pending callbacks. """ - dropped_messages = [] - - # Drop all pending item from the executor. Without this, the executor will also - # try to process any pending work items before termination, which is undesirable. - # - # TODO: Replace the logic below by passing `cancel_futures=True` to shutdown() - # once we only need to support Python 3.9+. + # Drop all pending item from the executor. Without this, the executor + # will block until all pending items are complete, which is + # undesirable. try: while True: - work_item = self._executor._work_queue.get(block=False) - dropped_messages.append(work_item.args[0]) + self._executor._work_queue.get(block=False) except queue.Empty: pass - - self._executor.shutdown(wait=await_msg_callbacks) - return dropped_messages + self._executor.shutdown() diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 05a91a420e07..bbedd9a11ff9 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -14,7 +14,6 @@ from __future__ import absolute_import -import concurrent.futures import datetime import itertools import operator as op @@ -610,78 +609,6 @@ def test_streaming_pull_max_messages( finally: subscription_future.cancel() # trigger clean shutdown - def test_streaming_pull_blocking_shutdown( - self, publisher, topic_path, subscriber, subscription_path, cleanup - ): - # Make sure the topic and subscription get deleted. - cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) - cleanup.append( - (subscriber.delete_subscription, (), {"subscription": subscription_path}) - ) - - # The ACK-s are only persisted if *all* messages published in the same batch - # are ACK-ed. We thus publish each message in its own batch so that the backend - # treats all messages' ACKs independently of each other. - publisher.create_topic(name=topic_path) - subscriber.create_subscription(name=subscription_path, topic=topic_path) - _publish_messages(publisher, topic_path, batch_sizes=[1] * 10) - - # Artificially delay message processing, gracefully shutdown the streaming pull - # in the meantime, then verify that those messages were nevertheless processed. - processed_messages = [] - - def callback(message): - time.sleep(15) - processed_messages.append(message.data) - message.ack() - - # Flow control limits should exceed the number of worker threads, so that some - # of the messages will be blocked on waiting for free scheduler threads. - flow_control = pubsub_v1.types.FlowControl(max_messages=5) - executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) - scheduler = pubsub_v1.subscriber.scheduler.ThreadScheduler(executor=executor) - subscription_future = subscriber.subscribe( - subscription_path, - callback=callback, - flow_control=flow_control, - scheduler=scheduler, - ) - - try: - subscription_future.result(timeout=10) # less than the sleep in callback - except exceptions.TimeoutError: - subscription_future.cancel(await_msg_callbacks=True) - - # The shutdown should have waited for the already executing callbacks to finish. - assert len(processed_messages) == 3 - - # The messages that were not processed should have been NACK-ed and we should - # receive them again quite soon. - all_done = threading.Barrier(7 + 1, timeout=5) # +1 because of the main thread - remaining = [] - - def callback2(message): - remaining.append(message.data) - message.ack() - all_done.wait() - - subscription_future = subscriber.subscribe( - subscription_path, callback=callback2 - ) - - try: - all_done.wait() - except threading.BrokenBarrierError: # PRAGMA: no cover - pytest.fail("The remaining messages have not been re-delivered in time.") - finally: - subscription_future.cancel(await_msg_callbacks=False) - - # There should be 7 messages left that were not yet processed and none of them - # should be a message that should have already been sucessfully processed in the - # first streaming pull. - assert len(remaining) == 7 - assert not (set(processed_messages) & set(remaining)) # no re-delivery - @pytest.mark.skipif( "KOKORO_GFILE_DIR" not in os.environ, @@ -863,8 +790,8 @@ def _publish_messages(publisher, topic_path, batch_sizes): publish_futures = [] msg_counter = itertools.count(start=1) - for batch_num, batch_size in enumerate(batch_sizes, start=1): - msg_batch = _make_messages(count=batch_size, batch_num=batch_num) + for batch_size in batch_sizes: + msg_batch = _make_messages(count=batch_size) for msg in msg_batch: future = publisher.publish(topic_path, msg, seq_num=str(next(msg_counter))) publish_futures.append(future) @@ -875,10 +802,9 @@ def _publish_messages(publisher, topic_path, batch_sizes): future.result(timeout=30) -def _make_messages(count, batch_num): +def _make_messages(count): messages = [ - f"message {i}/{count} of batch {batch_num}".encode("utf-8") - for i in range(1, count + 1) + "message {}/{}".format(i, count).encode("utf-8") for i in range(1, count + 1) ] return messages diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 47c62bab63a6..288e4bd18314 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -29,14 +29,14 @@ @pytest.mark.parametrize( "item,method_name", [ - (requests.AckRequest("0", 0, 0, ""), "ack"), - (requests.DropRequest("0", 0, ""), "drop"), - (requests.LeaseRequest("0", 0, ""), "lease"), - (requests.ModAckRequest("0", 0), "modify_ack_deadline"), - (requests.NackRequest("0", 0, ""), "nack"), + (requests.AckRequest(0, 0, 0, ""), "ack"), + (requests.DropRequest(0, 0, ""), "drop"), + (requests.LeaseRequest(0, 0, ""), "lease"), + (requests.ModAckRequest(0, 0), "modify_ack_deadline"), + (requests.NackRequest(0, 0, ""), "nack"), ], ) -def test_dispatch_callback_active_manager(item, method_name): +def test_dispatch_callback(item, method_name): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) @@ -50,29 +50,16 @@ def test_dispatch_callback_active_manager(item, method_name): method.assert_called_once_with([item]) -@pytest.mark.parametrize( - "item,method_name", - [ - (requests.AckRequest("0", 0, 0, ""), "ack"), - (requests.DropRequest("0", 0, ""), "drop"), - (requests.LeaseRequest("0", 0, ""), "lease"), - (requests.ModAckRequest("0", 0), "modify_ack_deadline"), - (requests.NackRequest("0", 0, ""), "nack"), - ], -) -def test_dispatch_callback_inactive_manager(item, method_name): +def test_dispatch_callback_inactive(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) manager.is_active = False dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [item] + dispatcher_.dispatch_callback([requests.AckRequest(0, 0, 0, "")]) - with mock.patch.object(dispatcher_, method_name) as method: - dispatcher_.dispatch_callback(items) - - method.assert_called_once_with([item]) + manager.send.assert_not_called() def test_ack(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 62a3ea1da1bb..909337cc88c7 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -69,18 +69,10 @@ def test__on_close_callback_future_already_done(self): result = future.result() assert result == "foo" # on close callback was a no-op - def test_cancel_default_nonblocking_manager_shutdown(self): + def test_cancel(self): future = self.make_future() future.cancel() - future._manager.close.assert_called_once_with(await_msg_callbacks=False) - assert future.cancelled() - - def test_cancel_blocking_manager_shutdown(self): - future = self.make_future() - - future.cancel(await_msg_callbacks=True) - - future._manager.close.assert_called_once_with(await_msg_callbacks=True) + future._manager.close.assert_called_once() assert future.cancelled() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py index 1a52af231cc5..8f5049691a9d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -22,44 +22,22 @@ import pytest -def test_heartbeat_inactive_manager_active_rpc(caplog): - caplog.set_level(logging.DEBUG) - - manager = mock.create_autospec( - streaming_pull_manager.StreamingPullManager, instance=True - ) - manager.is_active = False - manager.heartbeat.return_value = True # because of active rpc - - heartbeater_ = heartbeater.Heartbeater(manager) - make_sleep_mark_event_as_done(heartbeater_) - - heartbeater_.heartbeat() - - assert "Sent heartbeat" in caplog.text - assert "exiting" in caplog.text - - -def test_heartbeat_inactive_manager_inactive_rpc(caplog): - caplog.set_level(logging.DEBUG) - +def test_heartbeat_inactive(caplog): + caplog.set_level(logging.INFO) manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) manager.is_active = False - manager.heartbeat.return_value = False # because of inactive rpc heartbeater_ = heartbeater.Heartbeater(manager) - make_sleep_mark_event_as_done(heartbeater_) heartbeater_.heartbeat() - assert "Sent heartbeat" not in caplog.text assert "exiting" in caplog.text def test_heartbeat_stopped(caplog): - caplog.set_level(logging.DEBUG) + caplog.set_level(logging.INFO) manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) @@ -69,18 +47,17 @@ def test_heartbeat_stopped(caplog): heartbeater_.heartbeat() - assert "Sent heartbeat" not in caplog.text assert "exiting" in caplog.text -def make_sleep_mark_event_as_done(heartbeater): - # Make sleep actually trigger the done event so that heartbeat() +def make_sleep_mark_manager_as_inactive(heartbeater): + # Make sleep mark the manager as inactive so that heartbeat() # exits at the end of the first run. - def trigger_done(timeout): + def trigger_inactive(timeout): assert timeout - heartbeater._stop_event.set() + heartbeater._manager.is_active = False - heartbeater._stop_event.wait = trigger_done + heartbeater._stop_event.wait = trigger_inactive def test_heartbeat_once(): @@ -88,7 +65,7 @@ def test_heartbeat_once(): streaming_pull_manager.StreamingPullManager, instance=True ) heartbeater_ = heartbeater.Heartbeater(manager) - make_sleep_mark_event_as_done(heartbeater_) + make_sleep_mark_manager_as_inactive(heartbeater_) heartbeater_.heartbeat() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 2ecc0b9f3ce1..17409cb3fdb8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -88,21 +88,15 @@ def create_manager(flow_control=types.FlowControl()): return manager -def test_maintain_leases_inactive_manager(caplog): +def test_maintain_leases_inactive(caplog): caplog.set_level(logging.INFO) manager = create_manager() manager.is_active = False leaser_ = leaser.Leaser(manager) - make_sleep_mark_event_as_done(leaser_) - leaser_.add( - [requests.LeaseRequest(ack_id="my_ack_ID", byte_size=42, ordering_key="")] - ) leaser_.maintain_leases() - # Leases should still be maintained even if the manager is inactive. - manager.dispatcher.modify_ack_deadline.assert_called() assert "exiting" in caplog.text @@ -118,20 +112,20 @@ def test_maintain_leases_stopped(caplog): assert "exiting" in caplog.text -def make_sleep_mark_event_as_done(leaser): - # Make sleep actually trigger the done event so that heartbeat() +def make_sleep_mark_manager_as_inactive(leaser): + # Make sleep mark the manager as inactive so that maintain_leases # exits at the end of the first run. - def trigger_done(timeout): + def trigger_inactive(timeout): assert 0 < timeout < 10 - leaser._stop_event.set() + leaser._manager.is_active = False - leaser._stop_event.wait = trigger_done + leaser._stop_event.wait = trigger_inactive def test_maintain_leases_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_event_as_done(leaser_) + make_sleep_mark_manager_as_inactive(leaser_) leaser_.add( [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] ) @@ -146,7 +140,7 @@ def test_maintain_leases_ack_ids(): def test_maintain_leases_no_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_event_as_done(leaser_) + make_sleep_mark_manager_as_inactive(leaser_) leaser_.maintain_leases() @@ -157,7 +151,7 @@ def test_maintain_leases_no_ack_ids(): def test_maintain_leases_outdated_items(time): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_event_as_done(leaser_) + make_sleep_mark_manager_as_inactive(leaser_) # Add and start expiry timer at the beginning of the timeline. time.return_value = 0 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index ede7c6b2d7a4..774d0d63e2a2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -14,7 +14,6 @@ import concurrent.futures import threading -import time import mock from six.moves import queue @@ -39,89 +38,19 @@ def test_constructor_options(): assert scheduler_._executor == mock.sentinel.executor -def test_schedule_executes_submitted_items(): +def test_schedule(): called_with = [] - callback_done_twice = threading.Barrier(3) # 3 == 2x callback + 1x main thread + called = threading.Event() def callback(*args, **kwargs): - called_with.append((args, kwargs)) # appends are thread-safe - callback_done_twice.wait() + called_with.append((args, kwargs)) + called.set() scheduler_ = scheduler.ThreadScheduler() scheduler_.schedule(callback, "arg1", kwarg1="meep") - scheduler_.schedule(callback, "arg2", kwarg2="boop") - callback_done_twice.wait(timeout=3.0) - result = scheduler_.shutdown() + called.wait() + scheduler_.shutdown() - assert result == [] # no scheduled items dropped - - expected_calls = [(("arg1",), {"kwarg1": "meep"}), (("arg2",), {"kwarg2": "boop"})] - assert sorted(called_with) == expected_calls - - -def test_shutdown_nonblocking_by_default(): - called_with = [] - at_least_one_called = threading.Event() - at_least_one_completed = threading.Event() - - def callback(message): - called_with.append(message) # appends are thread-safe - at_least_one_called.set() - time.sleep(1.0) - at_least_one_completed.set() - - executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) - scheduler_ = scheduler.ThreadScheduler(executor=executor) - - scheduler_.schedule(callback, "message_1") - scheduler_.schedule(callback, "message_2") - - at_least_one_called.wait() - dropped = scheduler_.shutdown() - - assert len(called_with) == 1 - assert called_with[0] in {"message_1", "message_2"} - - assert len(dropped) == 1 - assert dropped[0] in {"message_1", "message_2"} - assert dropped[0] != called_with[0] # the dropped message was not the processed one - - err_msg = ( - "Shutdown should not have waited " - "for the already running callbacks to complete." - ) - assert not at_least_one_completed.is_set(), err_msg - - -def test_shutdown_blocking_awaits_running_callbacks(): - called_with = [] - at_least_one_called = threading.Event() - at_least_one_completed = threading.Event() - - def callback(message): - called_with.append(message) # appends are thread-safe - at_least_one_called.set() - time.sleep(1.0) - at_least_one_completed.set() - - executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) - scheduler_ = scheduler.ThreadScheduler(executor=executor) - - scheduler_.schedule(callback, "message_1") - scheduler_.schedule(callback, "message_2") - - at_least_one_called.wait() - dropped = scheduler_.shutdown(await_msg_callbacks=True) - - assert len(called_with) == 1 - assert called_with[0] in {"message_1", "message_2"} - - # The work items that have not been started yet should still be dropped. - assert len(dropped) == 1 - assert dropped[0] in {"message_1", "message_2"} - assert dropped[0] != called_with[0] # the dropped message was not the processed one - - err_msg = "Shutdown did not wait for the already running callbacks to complete." - assert at_least_one_completed.is_set(), err_msg + assert called_with == [(("arg1",), {"kwarg1": "meep"})] diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index a6454f853412..242c0804ac58 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import functools import logging import threading import time @@ -373,6 +372,7 @@ def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): def test_send_unary(): manager = make_manager() + manager._UNARY_REQUESTS = True manager.send( gapic_types.StreamingPullRequest( @@ -405,6 +405,7 @@ def test_send_unary(): def test_send_unary_empty(): manager = make_manager() + manager._UNARY_REQUESTS = True manager.send(gapic_types.StreamingPullRequest()) @@ -416,6 +417,7 @@ def test_send_unary_api_call_error(caplog): caplog.set_level(logging.DEBUG) manager = make_manager() + manager._UNARY_REQUESTS = True error = exceptions.GoogleAPICallError("The front fell off") manager._client.acknowledge.side_effect = error @@ -429,6 +431,7 @@ def test_send_unary_retry_error(caplog): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() + manager._UNARY_REQUESTS = True error = exceptions.RetryError( "Too long a transient error", cause=Exception("Out of time!") @@ -442,15 +445,24 @@ def test_send_unary_retry_error(caplog): assert "signaled streaming pull manager shutdown" in caplog.text +def test_send_streaming(): + manager = make_manager() + manager._UNARY_REQUESTS = False + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + + manager.send(mock.sentinel.request) + + manager._rpc.send.assert_called_once_with(mock.sentinel.request) + + def test_heartbeat(): manager = make_manager() manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) manager._rpc.is_active = True - result = manager.heartbeat() + manager.heartbeat() manager._rpc.send.assert_called_once_with(gapic_types.StreamingPullRequest()) - assert result def test_heartbeat_inactive(): @@ -460,8 +472,7 @@ def test_heartbeat_inactive(): manager.heartbeat() - result = manager._rpc.send.assert_not_called() - assert not result + manager._rpc.send.assert_not_called() @mock.patch("google.api_core.bidi.ResumableBidiRpc", autospec=True) @@ -621,14 +632,14 @@ def _do_work(self): while not self._stop: try: self._manager.leaser.add([mock.Mock()]) - except Exception as exc: # pragma: NO COVER + except Exception as exc: self._error_callback(exc) time.sleep(0.1) # also try to interact with the leaser after the stop flag has been set try: self._manager.leaser.remove([mock.Mock()]) - except Exception as exc: # pragma: NO COVER + except Exception as exc: self._error_callback(exc) @@ -655,27 +666,6 @@ def test_close_callbacks(): callback.assert_called_once_with(manager, "meep") -def test_close_nacks_internally_queued_messages(): - nacked_messages = [] - - def fake_nack(self): - nacked_messages.append(self.data) - - MockMsg = functools.partial(mock.create_autospec, message.Message, instance=True) - messages = [MockMsg(data=b"msg1"), MockMsg(data=b"msg2"), MockMsg(data=b"msg3")] - for msg in messages: - msg.nack = stdlib_types.MethodType(fake_nack, msg) - - manager, _, _, _, _, _ = make_running_manager() - dropped_by_scheduler = messages[:2] - manager._scheduler.shutdown.return_value = dropped_by_scheduler - manager._messages_on_hold._messages_on_hold.append(messages[2]) - - manager.close() - - assert sorted(nacked_messages) == [b"msg1", b"msg2", b"msg3"] - - def test__get_initial_request(): manager = make_manager() manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) @@ -989,15 +979,3 @@ def test_activate_ordering_keys(): manager._messages_on_hold.activate_ordering_keys.assert_called_once_with( ["key1", "key2"], mock.ANY ) - - -def test_activate_ordering_keys_stopped_scheduler(): - manager = make_manager() - manager._messages_on_hold = mock.create_autospec( - messages_on_hold.MessagesOnHold, instance=True - ) - manager._scheduler = None - - manager.activate_ordering_keys(["key1", "key2"]) - - manager._messages_on_hold.activate_ordering_keys.assert_not_called() From b2eeb163a07f392f6ec136050d69664c4dd495e2 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 10 Mar 2021 08:10:33 -0800 Subject: [PATCH 0633/1197] samples: downgrade lib version in requirements.txt (#317) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 08606769bbb7..cee03a6a771c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.4.0 +google-cloud-pubsub==2.3.0 avro==1.10.1 From c9b204e51c4e18ac5474eb923df53597fa8d6f11 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 11 Mar 2021 09:58:23 -0800 Subject: [PATCH 0634/1197] samples: mark test flaky (#322) --- packages/google-cloud-pubsub/samples/snippets/subscriber_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index b9dd38ec3603..b1a23cd103b1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -257,6 +257,7 @@ def run_sample(): assert "With delivery attempts: " in out +@flaky(max_runs=3, min_passes=1) def test_update_dead_letter_policy(subscription_dlq, dead_letter_topic, capsys): _ = subscriber.update_subscription_with_dead_letter_policy( PROJECT_ID, From eb33c85da76536115aa503668c50f4b8f35f3126 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 12 Mar 2021 14:48:26 -0800 Subject: [PATCH 0635/1197] fix(sample): mark test flaky (#326) --- packages/google-cloud-pubsub/samples/snippets/subscriber_test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index b1a23cd103b1..be9642cd2e61 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -238,6 +238,7 @@ def test_create_subscription_with_dead_letter_policy( assert f"After {DEFAULT_MAX_DELIVERY_ATTEMPTS} delivery attempts." in out +@flaky(max_runs=3, min_passes=1) def test_receive_with_delivery_attempts( publisher_client, topic, dead_letter_topic, subscription_dlq, capsys ): From 861caaba41e3ecf9d270e66bc2f07046ca633933 Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Mon, 15 Mar 2021 11:28:06 -0700 Subject: [PATCH 0636/1197] fix(sample): retry InternalServerError (#329) fixes #321 --- .../samples/snippets/subscriber_test.py | 24 ++++++++++++------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index be9642cd2e61..de54598a54e5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -18,6 +18,7 @@ import backoff from flaky import flaky +from google.api_core.exceptions import InternalServerError from google.api_core.exceptions import NotFound from google.api_core.exceptions import Unknown from google.cloud import pubsub_v1 @@ -245,7 +246,7 @@ def test_receive_with_delivery_attempts( # The dlq subscription raises 404 before it's ready. # We keep retrying up to 10 minutes for mitigating the flakiness. - @backoff.on_exception(backoff.expo, (Unknown, NotFound), max_time=600) + @backoff.on_exception(backoff.expo, (Unknown, NotFound), max_time=120) def run_sample(): _publish_messages(publisher_client, topic) @@ -260,13 +261,20 @@ def run_sample(): @flaky(max_runs=3, min_passes=1) def test_update_dead_letter_policy(subscription_dlq, dead_letter_topic, capsys): - _ = subscriber.update_subscription_with_dead_letter_policy( - PROJECT_ID, - TOPIC, - SUBSCRIPTION_DLQ, - DEAD_LETTER_TOPIC, - UPDATED_MAX_DELIVERY_ATTEMPTS, - ) + + # We saw internal server error that suggests to retry. + + @backoff.on_exception(backoff.expo, (Unknown, InternalServerError), max_time=60) + def run_sample(): + subscriber.update_subscription_with_dead_letter_policy( + PROJECT_ID, + TOPIC, + SUBSCRIPTION_DLQ, + DEAD_LETTER_TOPIC, + UPDATED_MAX_DELIVERY_ATTEMPTS, + ) + + run_sample() out, _ = capsys.readouterr() assert dead_letter_topic in out From 0f5573cb51ff2efdc1cb67146f5e2c03ee637280 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Tue, 16 Mar 2021 17:18:19 -0400 Subject: [PATCH 0637/1197] fix: Remove EXPERIMENTAL tag for ordering keys in types.py (#323) Co-authored-by: Tianzi Cai --- packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index b875f3cd25ab..677e4774fd84 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -107,9 +107,6 @@ class LimitExceededBehavior(str, enum.Enum): PublisherOptions.__doc__ = "The options for the publisher client." PublisherOptions.enable_message_ordering.__doc__ = ( "Whether to order messages in a batch by a supplied ordering key." - "EXPERIMENTAL: Message ordering is an alpha feature that requires " - "special permissions to use. Please contact the Cloud Pub/Sub team for " - "more information." ) PublisherOptions.flow_control.__doc__ = ( "Flow control settings for message publishing by the client. By default " From c5ead8345c28a5c8a425127fd53cf7b07175df6e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 16 Mar 2021 22:18:41 +0100 Subject: [PATCH 0638/1197] chore(deps): update precommit hook pycqa/flake8 to v3.9.0 (#328) --- packages/google-cloud-pubsub/.pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index a9024b15d725..32302e4883a1 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 From 79d8ed3a6b75fc2e97c810d5fb772e95516d8bd6 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 17 Mar 2021 12:38:01 +0100 Subject: [PATCH 0639/1197] chore: remove Python 2 compatibility code (#302) Closes #181. **PR checklist:** - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) --- .../cloud/pubsub_v1/publisher/_batch/base.py | 5 +---- .../cloud/pubsub_v1/publisher/_batch/thread.py | 5 +---- .../pubsub_v1/publisher/_sequencer/base.py | 5 +---- .../google/cloud/pubsub_v1/publisher/client.py | 8 +++----- .../subscriber/_protocol/helper_threads.py | 3 +-- .../pubsub_v1/subscriber/_protocol/leaser.py | 4 +--- .../_protocol/streaming_pull_manager.py | 3 +-- .../cloud/pubsub_v1/subscriber/scheduler.py | 17 +++++------------ packages/google-cloud-pubsub/tests/system.py | 15 +++++++-------- .../pubsub_v1/subscriber/test_dispatcher.py | 2 +- .../pubsub_v1/subscriber/test_helper_threads.py | 2 +- .../unit/pubsub_v1/subscriber/test_message.py | 4 ++-- .../subscriber/test_messages_on_hold.py | 2 +- .../unit/pubsub_v1/subscriber/test_scheduler.py | 2 +- 14 files changed, 27 insertions(+), 50 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py index 212a4b2774c0..6a503c098d17 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -17,11 +17,8 @@ import abc import enum -import six - -@six.add_metaclass(abc.ABCMeta) -class Batch(object): +class Batch(metaclass=abc.ABCMeta): """The base batching class for Pub/Sub publishing. Although the :class:`~.pubsub_v1.publisher.batch.thread.Batch` class, based diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index fc4e6ba6d4f7..36dd3b946fce 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -18,8 +18,6 @@ import threading import time -import six - import google.api_core.exceptions from google.api_core import gapic_v1 from google.cloud.pubsub_v1.publisher import exceptions @@ -287,8 +285,7 @@ def _commit(self): # IDs. We are trusting that there is a 1:1 mapping, and raise # an exception if not. self._status = base.BatchStatus.SUCCESS - zip_iter = six.moves.zip(response.message_ids, self._futures) - for message_id, future in zip_iter: + for message_id, future in zip(response.message_ids, self._futures): future.set_result(message_id) else: # Sanity check: If the number of message IDs is not equal to diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py index 3cfa809f7ba8..4abf4b070cf5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -16,11 +16,8 @@ import abc -import six - -@six.add_metaclass(abc.ABCMeta) -class Sequencer(object): +class Sequencer(metaclass=abc.ABCMeta): """The base class for sequencers for Pub/Sub publishing. A sequencer sequences messages to be published. """ diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 9ad4a9474c44..cf69a46be6f9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -21,8 +21,6 @@ import threading import time -import six - from google.api_core import gapic_v1 from google.auth.credentials import AnonymousCredentials from google.oauth2 import service_account @@ -295,7 +293,7 @@ def publish( """ # Sanity check: Is the data being sent as a bytestring? # If it is literally anything else, complain loudly about it. - if not isinstance(data, six.binary_type): + if not isinstance(data, bytes): raise TypeError( "Data being published to Pub/Sub must be sent as a bytestring." ) @@ -308,9 +306,9 @@ def publish( # Coerce all attributes to text strings. for k, v in copy.copy(attrs).items(): - if isinstance(v, six.text_type): + if isinstance(v, str): continue - if isinstance(v, six.binary_type): + if isinstance(v, bytes): attrs[k] = v.decode("utf-8") continue raise TypeError( diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py index 80ad58738cd1..661df7927014 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py @@ -13,11 +13,10 @@ # limitations under the License. import logging +import queue import time import uuid -from six.moves import queue - __all__ = ("QueueCallbackWorker", "STOP") diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 5830680da8eb..adb1650d2a71 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -21,8 +21,6 @@ import threading import time -import six - from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -144,7 +142,7 @@ def maintain_leases(self): cutoff = time.time() - self._manager.flow_control.max_lease_duration to_drop = [ requests.DropRequest(ack_id, item.size, item.ordering_key) - for ack_id, item in six.iteritems(leased_messages) + for ack_id, item in leased_messages.items() if item.sent_time < cutoff ] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index e8a4a8caf9d5..de333c5393ae 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -21,7 +21,6 @@ import uuid import grpc -import six from google.api_core import bidi from google.api_core import exceptions @@ -406,7 +405,7 @@ def _send_unary_request(self, request): deadline = request.modify_deadline_seconds[n] deadline_to_ack_ids[deadline].append(ack_id) - for deadline, ack_ids in six.iteritems(deadline_to_ack_ids): + for deadline, ack_ids in deadline_to_ack_ids.items(): self._client.modify_ack_deadline( subscription=self._subscription, ack_ids=ack_ids, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index ef2ef59cb6bf..84f494eb9fc0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -20,14 +20,10 @@ import abc import concurrent.futures -import sys +import queue -import six -from six.moves import queue - -@six.add_metaclass(abc.ABCMeta) -class Scheduler(object): +class Scheduler(metaclass=abc.ABCMeta): """Abstract base class for schedulers. Schedulers are used to schedule callbacks asynchronously. @@ -65,12 +61,9 @@ def shutdown(self): def _make_default_thread_pool_executor(): - # Python 2.7 and 3.6+ have the thread_name_prefix argument, which is useful - # for debugging. - executor_kwargs = {} - if sys.version_info[:2] == (2, 7) or sys.version_info >= (3, 6): - executor_kwargs["thread_name_prefix"] = "ThreadPoolExecutor-ThreadScheduler" - return concurrent.futures.ThreadPoolExecutor(max_workers=10, **executor_kwargs) + return concurrent.futures.ThreadPoolExecutor( + max_workers=10, thread_name_prefix="ThreadPoolExecutor-ThreadScheduler" + ) class ThreadScheduler(Scheduler): diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index bbedd9a11ff9..512d75a5cfa7 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -24,7 +24,6 @@ import mock import pytest -import six import google.auth from google.api_core import exceptions as core_exceptions @@ -86,12 +85,12 @@ def test_publish_messages(publisher, topic_path, cleanup): publisher.publish( topic_path, b"The hail in Wales falls mainly on the snails.", num=str(i) ) - for i in six.moves.range(500) + for i in range(500) ] for future in futures: result = future.result() - assert isinstance(result, six.string_types) + assert isinstance(result, str) def test_publish_large_messages(publisher, topic_path, cleanup): @@ -120,7 +119,7 @@ def test_publish_large_messages(publisher, topic_path, cleanup): # be no "InvalidArgument: request_size is too large" error. for future in futures: result = future.result(timeout=10) - assert isinstance(result, six.string_types) # the message ID + assert isinstance(result, str) # the message ID def test_subscribe_to_messages( @@ -142,7 +141,7 @@ def test_subscribe_to_messages( # Publish some messages. futures = [ publisher.publish(topic_path, b"Wooooo! The claaaaaw!", num=str(index)) - for index in six.moves.range(50) + for index in range(50) ] # Make sure the publish completes. @@ -154,7 +153,7 @@ def test_subscribe_to_messages( # that we got everything at least once. callback = AckCallback() future = subscriber.subscribe(subscription_path, callback) - for second in six.moves.range(10): + for second in range(10): time.sleep(1) # The callback should have fired at least fifty times, but it @@ -187,7 +186,7 @@ def test_subscribe_to_messages_async_callbacks( # Publish some messages. futures = [ publisher.publish(topic_path, b"Wooooo! The claaaaaw!", num=str(index)) - for index in six.moves.range(2) + for index in range(2) ] # Make sure the publish completes. @@ -200,7 +199,7 @@ def test_subscribe_to_messages_async_callbacks( # Actually open the subscription and hold it open for a few seconds. future = subscriber.subscribe(subscription_path, callback) - for second in six.moves.range(5): + for second in range(5): time.sleep(4) # The callback should have fired at least two times, but it may diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 288e4bd18314..097ff46afb35 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -13,6 +13,7 @@ # limitations under the License. import collections +import queue import threading from google.cloud.pubsub_v1.subscriber._protocol import dispatcher @@ -22,7 +23,6 @@ from google.pubsub_v1 import types as gapic_types import mock -from six.moves import queue import pytest diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 6e1bcc813c89..9ebd37f4fbdc 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -13,7 +13,7 @@ # limitations under the License. import mock -from six.moves import queue +import queue from google.cloud.pubsub_v1.subscriber._protocol import helper_threads diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 09f796480eed..75580c71d20b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -13,16 +13,16 @@ # limitations under the License. import datetime +import queue import time import mock import pytz -from six.moves import queue -from google.protobuf import timestamp_pb2 from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.protobuf import timestamp_pb2 from google.pubsub_v1 import types as gapic_types diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py index 6fd83d13a515..797430e0780a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from six.moves import queue +import queue from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index 774d0d63e2a2..2ed1ea55ad57 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -13,10 +13,10 @@ # limitations under the License. import concurrent.futures +import queue import threading import mock -from six.moves import queue from google.cloud.pubsub_v1.subscriber import scheduler From bff13dc127274e5b09973c96fa3960c8640d679b Mon Sep 17 00:00:00 2001 From: Takashi Matsuo Date: Thu, 18 Mar 2021 13:37:16 -0700 Subject: [PATCH 0640/1197] testing(sample): add presubmit builds against head (#337) * testing(sample): introducing a default wrapper for sample test runner This should be non-destructive change. I'll add a commit for actually adding a new type of periodic build. * fixes the script path * preserve the test runner implementation and move it back * fix a bug, also add a new wrapper * add config files for periodic builds against head * fix license years * add executable bit --- .../samples/python3.6/periodic-head.cfg | 11 ++ .../samples/python3.7/periodic-head.cfg | 11 ++ .../samples/python3.8/periodic-head.cfg | 11 ++ .../.kokoro/test-samples-against-head.sh | 28 +++++ .../.kokoro/test-samples-impl.sh | 102 ++++++++++++++++++ .../.kokoro/test-samples.sh | 96 +++-------------- 6 files changed, 179 insertions(+), 80 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic-head.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic-head.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic-head.cfg create mode 100755 packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh create mode 100755 packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh new file mode 100755 index 000000000000..8f2b723fc7ba --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-pubsub + +exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh new file mode 100755 index 000000000000..cf5de74c17a5 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples.sh b/packages/google-cloud-pubsub/.kokoro/test-samples.sh index 9a86e0c69d19..9753da38773c 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-pubsub # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh From 505bc9514c83f2843fc18aa97973fe4054c9ec6a Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 19 Mar 2021 14:39:30 -0700 Subject: [PATCH 0641/1197] chore(samples): fix typo --- packages/google-cloud-pubsub/samples/snippets/schema_test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/schema_test.py b/packages/google-cloud-pubsub/samples/snippets/schema_test.py index 5447701875b8..f0a4470f7e2b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema_test.py @@ -88,7 +88,7 @@ def avro_topic(publisher_client, avro_schema): "name": avro_topic_path, "schema_settings": { "schema": avro_schema, - "encoding": Encoding.BINAARY, + "encoding": Encoding.BINARY, }, } ) From be165bce6f228ceea59faacfd5d141b3ad77ed32 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 24 Mar 2021 09:36:58 +0100 Subject: [PATCH 0642/1197] fix: move await_msg_callbacks flag to subscribe() method (#320) * Revert "revert: add graceful streaming pull shutdown (#315)" This reverts commit 16bf58823020c6b20e03a21b8b1de46eda3a2340. * Move await_msg_callbacks to subscribe() method This is to keep the StreamingPullFuture's surface intact for compatibility with PubSub Lite client. * Make streaming pull close() method non-blocking * Add a blocking streaming pull shutdown sample * Refine docs on awaiting callbacks on shutdown --- .../subscriber/_protocol/dispatcher.py | 3 - .../subscriber/_protocol/heartbeater.py | 9 +- .../pubsub_v1/subscriber/_protocol/leaser.py | 2 +- .../_protocol/streaming_pull_manager.py | 108 +++++++++++++----- .../cloud/pubsub_v1/subscriber/client.py | 13 +++ .../cloud/pubsub_v1/subscriber/scheduler.py | 49 ++++++-- .../samples/snippets/subscriber.py | 57 +++++++++ .../samples/snippets/subscriber_test.py | 47 ++++++++ packages/google-cloud-pubsub/tests/system.py | 86 +++++++++++++- .../pubsub_v1/subscriber/test_dispatcher.py | 31 +++-- .../pubsub_v1/subscriber/test_heartbeater.py | 41 +++++-- .../unit/pubsub_v1/subscriber/test_leaser.py | 24 ++-- .../pubsub_v1/subscriber/test_scheduler.py | 85 ++++++++++++-- .../subscriber/test_streaming_pull_manager.py | 82 +++++++++---- .../subscriber/test_subscriber_client.py | 2 + 15 files changed, 533 insertions(+), 106 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 7a89508446f8..382c5c38a2c2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -99,9 +99,6 @@ def dispatch_callback(self, items): ValueError: If ``action`` isn't one of the expected actions "ack", "drop", "lease", "modify_ack_deadline" or "nack". """ - if not self._manager.is_active: - return - batched_commands = collections.defaultdict(list) for item in items: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py index 9cd84a1e2397..fef158965c57 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -35,10 +35,11 @@ def __init__(self, manager, period=_DEFAULT_PERIOD): self._period = period def heartbeat(self): - """Periodically send heartbeats.""" - while self._manager.is_active and not self._stop_event.is_set(): - self._manager.heartbeat() - _LOGGER.debug("Sent heartbeat.") + """Periodically send streaming pull heartbeats. + """ + while not self._stop_event.is_set(): + if self._manager.heartbeat(): + _LOGGER.debug("Sent heartbeat.") self._stop_event.wait(timeout=self._period) _LOGGER.info("%s exiting.", _HEARTBEAT_WORKER_NAME) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index adb1650d2a71..c1f8b46d29a7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -124,7 +124,7 @@ def maintain_leases(self): ack IDs, then waits for most of that time (but with jitter), and repeats. """ - while self._manager.is_active and not self._stop_event.is_set(): + while not self._stop_event.is_set(): # Determine the appropriate duration for the lease. This is # based off of how long previous messages have taken to ack, with # a sensible default and within the ranges allowed by Pub/Sub. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index de333c5393ae..ac940de268a7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -16,6 +16,7 @@ import collections import functools +import itertools import logging import threading import uuid @@ -36,6 +37,7 @@ from google.pubsub_v1 import types as gapic_types _LOGGER = logging.getLogger(__name__) +_REGULAR_SHUTDOWN_THREAD_NAME = "Thread-RegularStreamShutdown" _RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" _RETRYABLE_STREAM_ERRORS = ( exceptions.DeadlineExceeded, @@ -110,12 +112,20 @@ class StreamingPullManager(object): scheduler (~google.cloud.pubsub_v1.scheduler.Scheduler): The scheduler to use to process messages. If not provided, a thread pool-based scheduler will be used. + await_callbacks_on_shutdown (bool): + If ``True``, the shutdown thread will wait until all scheduler threads + terminate and only then proceed with shutting down the remaining running + helper threads. + + If ``False`` (default), the shutdown thread will shut the scheduler down, + but it will not wait for the currently executing scheduler threads to + terminate. + + This setting affects when the on close callbacks get invoked, and + consequently, when the StreamingPullFuture associated with the stream gets + resolved. """ - _UNARY_REQUESTS = True - """If set to True, this class will make requests over a separate unary - RPC instead of over the streaming RPC.""" - def __init__( self, client, @@ -123,11 +133,13 @@ def __init__( flow_control=types.FlowControl(), scheduler=None, use_legacy_flow_control=False, + await_callbacks_on_shutdown=False, ): self._client = client self._subscription = subscription self._flow_control = flow_control self._use_legacy_flow_control = use_legacy_flow_control + self._await_callbacks_on_shutdown = await_callbacks_on_shutdown self._ack_histogram = histogram.Histogram() self._last_histogram_size = 0 self._ack_deadline = 10 @@ -291,6 +303,9 @@ def activate_ordering_keys(self, ordering_keys): activate. May be empty. """ with self._pause_resume_lock: + if self._scheduler is None: + return # We are shutting down, don't try to dispatch any more messages. + self._messages_on_hold.activate_ordering_keys( ordering_keys, self._schedule_message_on_hold ) @@ -420,37 +435,36 @@ def send(self, request): If a RetryError occurs, the manager shutdown is triggered, and the error is re-raised. """ - if self._UNARY_REQUESTS: - try: - self._send_unary_request(request) - except exceptions.GoogleAPICallError: - _LOGGER.debug( - "Exception while sending unary RPC. This is typically " - "non-fatal as stream requests are best-effort.", - exc_info=True, - ) - except exceptions.RetryError as exc: - _LOGGER.debug( - "RetryError while sending unary RPC. Waiting on a transient " - "error resolution for too long, will now trigger shutdown.", - exc_info=False, - ) - # The underlying channel has been suffering from a retryable error - # for too long, time to give up and shut the streaming pull down. - self._on_rpc_done(exc) - raise - - else: - self._rpc.send(request) + try: + self._send_unary_request(request) + except exceptions.GoogleAPICallError: + _LOGGER.debug( + "Exception while sending unary RPC. This is typically " + "non-fatal as stream requests are best-effort.", + exc_info=True, + ) + except exceptions.RetryError as exc: + _LOGGER.debug( + "RetryError while sending unary RPC. Waiting on a transient " + "error resolution for too long, will now trigger shutdown.", + exc_info=False, + ) + # The underlying channel has been suffering from a retryable error + # for too long, time to give up and shut the streaming pull down. + self._on_rpc_done(exc) + raise def heartbeat(self): """Sends an empty request over the streaming pull RPC. - This always sends over the stream, regardless of if - ``self._UNARY_REQUESTS`` is set or not. + Returns: + bool: If a heartbeat request has actually been sent. """ if self._rpc is not None and self._rpc.is_active: self._rpc.send(gapic_types.StreamingPullRequest()) + return True + + return False def open(self, callback, on_callback_error): """Begin consuming messages. @@ -517,11 +531,29 @@ def close(self, reason=None): This method is idempotent. Additional calls will have no effect. + The method does not block, it delegates the shutdown operations to a background + thread. + Args: - reason (Any): The reason to close this. If None, this is considered + reason (Any): The reason to close this. If ``None``, this is considered an "intentional" shutdown. This is passed to the callbacks specified via :meth:`add_close_callback`. """ + thread = threading.Thread( + name=_REGULAR_SHUTDOWN_THREAD_NAME, + daemon=True, + target=self._shutdown, + kwargs={"reason": reason}, + ) + thread.start() + + def _shutdown(self, reason=None): + """Run the actual shutdown sequence (stop the stream and all helper threads). + + Args: + reason (Any): The reason to close the stream. If ``None``, this is + considered an "intentional" shutdown. + """ with self._closing: if self._closed: return @@ -534,7 +566,9 @@ def close(self, reason=None): # Shutdown all helper threads _LOGGER.debug("Stopping scheduler.") - self._scheduler.shutdown() + dropped_messages = self._scheduler.shutdown( + await_msg_callbacks=self._await_callbacks_on_shutdown + ) self._scheduler = None # Leaser and dispatcher reference each other through the shared @@ -548,11 +582,23 @@ def close(self, reason=None): # because the consumer gets shut down first. _LOGGER.debug("Stopping leaser.") self._leaser.stop() + + total = len(dropped_messages) + len( + self._messages_on_hold._messages_on_hold + ) + _LOGGER.debug(f"NACK-ing all not-yet-dispatched messages (total: {total}).") + messages_to_nack = itertools.chain( + dropped_messages, self._messages_on_hold._messages_on_hold + ) + for msg in messages_to_nack: + msg.nack() + _LOGGER.debug("Stopping dispatcher.") self._dispatcher.stop() self._dispatcher = None # dispatcher terminated, OK to dispose the leaser reference now self._leaser = None + _LOGGER.debug("Stopping heartbeater.") self._heartbeater.stop() self._heartbeater = None @@ -722,7 +768,7 @@ def _on_rpc_done(self, future): _LOGGER.info("RPC termination has signaled streaming pull manager shutdown.") error = _wrap_as_exception(future) thread = threading.Thread( - name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": error} + name=_RPC_ERROR_THREAD_NAME, target=self._shutdown, kwargs={"reason": error} ) thread.daemon = True thread.start() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index f306d2d99954..51bdc106ca8d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -122,6 +122,7 @@ def subscribe( flow_control=(), scheduler=None, use_legacy_flow_control=False, + await_callbacks_on_shutdown=False, ): """Asynchronously start receiving messages on a given subscription. @@ -199,6 +200,17 @@ def callback(message): *scheduler* to use when executing the callback. This controls how callbacks are executed concurrently. This object must not be shared across multiple SubscriberClients. + await_callbacks_on_shutdown (bool): + If ``True``, after canceling the returned future, the latter's + ``result()`` method will block until the background stream and its + helper threads have been terminated, and all currently executing message + callbacks are done processing. + + If ``False`` (default), the returned future's ``result()`` method will + not block after canceling the future. The method will instead return + immediately after the background stream and its helper threads have been + terminated, but some of the message callback threads might still be + running at that point. Returns: A :class:`~google.cloud.pubsub_v1.subscriber.futures.StreamingPullFuture` @@ -212,6 +224,7 @@ def callback(message): flow_control=flow_control, scheduler=scheduler, use_legacy_flow_control=use_legacy_flow_control, + await_callbacks_on_shutdown=await_callbacks_on_shutdown, ) future = futures.StreamingPullFuture(manager) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index 84f494eb9fc0..dd623517c6c6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -54,8 +54,21 @@ def schedule(self, callback, *args, **kwargs): raise NotImplementedError @abc.abstractmethod - def shutdown(self): + def shutdown(self, await_msg_callbacks=False): """Shuts down the scheduler and immediately end all pending callbacks. + + Args: + await_msg_callbacks (bool): + If ``True``, the method will block until all currently executing + callbacks are done processing. If ``False`` (default), the + method will not wait for the currently running callbacks to complete. + + Returns: + List[pubsub_v1.subscriber.message.Message]: + The messages submitted to the scheduler that were not yet dispatched + to their callbacks. + It is assumed that each message was submitted to the scheduler as the + first positional argument to the provided callback. """ raise NotImplementedError @@ -103,15 +116,35 @@ def schedule(self, callback, *args, **kwargs): """ self._executor.submit(callback, *args, **kwargs) - def shutdown(self): - """Shuts down the scheduler and immediately end all pending callbacks. + def shutdown(self, await_msg_callbacks=False): + """Shut down the scheduler and immediately end all pending callbacks. + + Args: + await_msg_callbacks (bool): + If ``True``, the method will block until all currently executing + executor threads are done processing. If ``False`` (default), the + method will not wait for the currently running threads to complete. + + Returns: + List[pubsub_v1.subscriber.message.Message]: + The messages submitted to the scheduler that were not yet dispatched + to their callbacks. + It is assumed that each message was submitted to the scheduler as the + first positional argument to the provided callback. """ - # Drop all pending item from the executor. Without this, the executor - # will block until all pending items are complete, which is - # undesirable. + dropped_messages = [] + + # Drop all pending item from the executor. Without this, the executor will also + # try to process any pending work items before termination, which is undesirable. + # + # TODO: Replace the logic below by passing `cancel_futures=True` to shutdown() + # once we only need to support Python 3.9+. try: while True: - self._executor._work_queue.get(block=False) + work_item = self._executor._work_queue.get(block=False) + dropped_messages.append(work_item.args[0]) except queue.Empty: pass - self._executor.shutdown() + + self._executor.shutdown(wait=await_msg_callbacks) + return dropped_messages diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index aa5771b862db..112c5a96af30 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -478,6 +478,50 @@ def callback(message): # [END pubsub_subscriber_flow_settings] +def receive_messages_with_blocking_shutdown(project_id, subscription_id, timeout=5.0): + """Shuts down a pull subscription by awaiting message callbacks to complete.""" + # [START pubsub_subscriber_blocking_shutdown] + import time + from concurrent.futures import TimeoutError + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message): + print(f"Received {message.data}.") + time.sleep(timeout + 5.0) # Pocess longer than streaming pull future timeout. + message.ack() + print(f"Done processing the message {message.data}.") + + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback, await_callbacks_on_shutdown=True, + ) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() + print("Streaming pull future canceled.") + streaming_pull_future.result() # Blocks until shutdown complete. + print("Done waiting for the stream shutdown.") + + # The "Done waiting..." message is only printed *after* the processing of all + # received messages has completed. + # [END pubsub_subscriber_blocking_shutdown] + + def synchronous_pull(project_id, subscription_id): """Pulling messages synchronously.""" # [START pubsub_subscriber_sync_pull] @@ -749,6 +793,15 @@ def callback(message): "timeout", default=None, type=float, nargs="?" ) + receive_with_blocking_shutdown_parser = subparsers.add_parser( + "receive-blocking-shutdown", + help=receive_messages_with_blocking_shutdown.__doc__, + ) + receive_with_blocking_shutdown_parser.add_argument("subscription_id") + receive_with_blocking_shutdown_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + synchronous_pull_parser = subparsers.add_parser( "receive-synchronously", help=synchronous_pull.__doc__ ) @@ -827,6 +880,10 @@ def callback(message): receive_messages_with_flow_control( args.project_id, args.subscription_id, args.timeout ) + elif args.command == "receive-blocking-shutdown": + receive_messages_with_blocking_shutdown( + args.project_id, args.subscription_id, args.timeout + ) elif args.command == "receive-synchronously": synchronous_pull(args.project_id, args.subscription_id) elif args.command == "receive-synchronously-with-lease": diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index de54598a54e5..8d034949dc6b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -13,6 +13,7 @@ # limitations under the License. import os +import re import sys import uuid @@ -395,6 +396,52 @@ def test_receive_with_flow_control(publisher_client, topic, subscription_async, assert "message" in out +def test_receive_with_blocking_shutdown( + publisher_client, topic, subscription_async, capsys +): + _publish_messages(publisher_client, topic, message_num=3) + + subscriber.receive_messages_with_blocking_shutdown( + PROJECT_ID, SUBSCRIPTION_ASYNC, timeout=5.0 + ) + + out, _ = capsys.readouterr() + out_lines = out.splitlines() + + msg_received_lines = [ + i for i, line in enumerate(out_lines) + if re.search(r".*received.*message.*", line, flags=re.IGNORECASE) + ] + msg_done_lines = [ + i for i, line in enumerate(out_lines) + if re.search(r".*done processing.*message.*", line, flags=re.IGNORECASE) + ] + stream_canceled_lines = [ + i for i, line in enumerate(out_lines) + if re.search(r".*streaming pull future canceled.*", line, flags=re.IGNORECASE) + ] + shutdown_done_waiting_lines = [ + i for i, line in enumerate(out_lines) + if re.search(r".*done waiting.*stream shutdown.*", line, flags=re.IGNORECASE) + ] + + assert "Listening" in out + assert subscription_async in out + + assert len(stream_canceled_lines) == 1 + assert len(shutdown_done_waiting_lines) == 1 + assert len(msg_received_lines) == 3 + assert len(msg_done_lines) == 3 + + # The stream should have been canceled *after* receiving messages, but before + # message processing was done. + assert msg_received_lines[-1] < stream_canceled_lines[0] < msg_done_lines[0] + + # Yet, waiting on the stream shutdown should have completed *after* the processing + # of received messages has ended. + assert msg_done_lines[-1] < shutdown_done_waiting_lines[0] + + def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): _publish_messages(publisher_client, topic) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 512d75a5cfa7..181632d79834 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -14,6 +14,7 @@ from __future__ import absolute_import +import concurrent.futures import datetime import itertools import operator as op @@ -608,6 +609,82 @@ def test_streaming_pull_max_messages( finally: subscription_future.cancel() # trigger clean shutdown + def test_streaming_pull_blocking_shutdown( + self, publisher, topic_path, subscriber, subscription_path, cleanup + ): + # Make sure the topic and subscription get deleted. + cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) + cleanup.append( + (subscriber.delete_subscription, (), {"subscription": subscription_path}) + ) + + # The ACK-s are only persisted if *all* messages published in the same batch + # are ACK-ed. We thus publish each message in its own batch so that the backend + # treats all messages' ACKs independently of each other. + publisher.create_topic(name=topic_path) + subscriber.create_subscription(name=subscription_path, topic=topic_path) + _publish_messages(publisher, topic_path, batch_sizes=[1] * 10) + + # Artificially delay message processing, gracefully shutdown the streaming pull + # in the meantime, then verify that those messages were nevertheless processed. + processed_messages = [] + + def callback(message): + time.sleep(15) + processed_messages.append(message.data) + message.ack() + + # Flow control limits should exceed the number of worker threads, so that some + # of the messages will be blocked on waiting for free scheduler threads. + flow_control = pubsub_v1.types.FlowControl(max_messages=5) + executor = concurrent.futures.ThreadPoolExecutor(max_workers=3) + scheduler = pubsub_v1.subscriber.scheduler.ThreadScheduler(executor=executor) + subscription_future = subscriber.subscribe( + subscription_path, + callback=callback, + flow_control=flow_control, + scheduler=scheduler, + await_callbacks_on_shutdown=True, + ) + + try: + subscription_future.result(timeout=10) # less than the sleep in callback + except exceptions.TimeoutError: + subscription_future.cancel() + subscription_future.result() # block until shutdown completes + + # Blocking om shutdown should have waited for the already executing + # callbacks to finish. + assert len(processed_messages) == 3 + + # The messages that were not processed should have been NACK-ed and we should + # receive them again quite soon. + all_done = threading.Barrier(7 + 1, timeout=5) # +1 because of the main thread + remaining = [] + + def callback2(message): + remaining.append(message.data) + message.ack() + all_done.wait() + + subscription_future = subscriber.subscribe( + subscription_path, callback=callback2, await_callbacks_on_shutdown=False + ) + + try: + all_done.wait() + except threading.BrokenBarrierError: # PRAGMA: no cover + pytest.fail("The remaining messages have not been re-delivered in time.") + finally: + subscription_future.cancel() + subscription_future.result() # block until shutdown completes + + # There should be 7 messages left that were not yet processed and none of them + # should be a message that should have already been sucessfully processed in the + # first streaming pull. + assert len(remaining) == 7 + assert not (set(processed_messages) & set(remaining)) # no re-delivery + @pytest.mark.skipif( "KOKORO_GFILE_DIR" not in os.environ, @@ -789,8 +866,8 @@ def _publish_messages(publisher, topic_path, batch_sizes): publish_futures = [] msg_counter = itertools.count(start=1) - for batch_size in batch_sizes: - msg_batch = _make_messages(count=batch_size) + for batch_num, batch_size in enumerate(batch_sizes, start=1): + msg_batch = _make_messages(count=batch_size, batch_num=batch_num) for msg in msg_batch: future = publisher.publish(topic_path, msg, seq_num=str(next(msg_counter))) publish_futures.append(future) @@ -801,9 +878,10 @@ def _publish_messages(publisher, topic_path, batch_sizes): future.result(timeout=30) -def _make_messages(count): +def _make_messages(count, batch_num): messages = [ - "message {}/{}".format(i, count).encode("utf-8") for i in range(1, count + 1) + f"message {i}/{count} of batch {batch_num}".encode("utf-8") + for i in range(1, count + 1) ] return messages diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 097ff46afb35..84e04df1b99c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -29,14 +29,14 @@ @pytest.mark.parametrize( "item,method_name", [ - (requests.AckRequest(0, 0, 0, ""), "ack"), - (requests.DropRequest(0, 0, ""), "drop"), - (requests.LeaseRequest(0, 0, ""), "lease"), - (requests.ModAckRequest(0, 0), "modify_ack_deadline"), - (requests.NackRequest(0, 0, ""), "nack"), + (requests.AckRequest("0", 0, 0, ""), "ack"), + (requests.DropRequest("0", 0, ""), "drop"), + (requests.LeaseRequest("0", 0, ""), "lease"), + (requests.ModAckRequest("0", 0), "modify_ack_deadline"), + (requests.NackRequest("0", 0, ""), "nack"), ], ) -def test_dispatch_callback(item, method_name): +def test_dispatch_callback_active_manager(item, method_name): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) @@ -50,16 +50,29 @@ def test_dispatch_callback(item, method_name): method.assert_called_once_with([item]) -def test_dispatch_callback_inactive(): +@pytest.mark.parametrize( + "item,method_name", + [ + (requests.AckRequest("0", 0, 0, ""), "ack"), + (requests.DropRequest("0", 0, ""), "drop"), + (requests.LeaseRequest("0", 0, ""), "lease"), + (requests.ModAckRequest("0", 0), "modify_ack_deadline"), + (requests.NackRequest("0", 0, ""), "nack"), + ], +) +def test_dispatch_callback_inactive_manager(item, method_name): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) manager.is_active = False dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - dispatcher_.dispatch_callback([requests.AckRequest(0, 0, 0, "")]) + items = [item] - manager.send.assert_not_called() + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([item]) def test_ack(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py index 8f5049691a9d..1a52af231cc5 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -22,22 +22,44 @@ import pytest -def test_heartbeat_inactive(caplog): - caplog.set_level(logging.INFO) +def test_heartbeat_inactive_manager_active_rpc(caplog): + caplog.set_level(logging.DEBUG) + + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + manager.is_active = False + manager.heartbeat.return_value = True # because of active rpc + + heartbeater_ = heartbeater.Heartbeater(manager) + make_sleep_mark_event_as_done(heartbeater_) + + heartbeater_.heartbeat() + + assert "Sent heartbeat" in caplog.text + assert "exiting" in caplog.text + + +def test_heartbeat_inactive_manager_inactive_rpc(caplog): + caplog.set_level(logging.DEBUG) + manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) manager.is_active = False + manager.heartbeat.return_value = False # because of inactive rpc heartbeater_ = heartbeater.Heartbeater(manager) + make_sleep_mark_event_as_done(heartbeater_) heartbeater_.heartbeat() + assert "Sent heartbeat" not in caplog.text assert "exiting" in caplog.text def test_heartbeat_stopped(caplog): - caplog.set_level(logging.INFO) + caplog.set_level(logging.DEBUG) manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) @@ -47,17 +69,18 @@ def test_heartbeat_stopped(caplog): heartbeater_.heartbeat() + assert "Sent heartbeat" not in caplog.text assert "exiting" in caplog.text -def make_sleep_mark_manager_as_inactive(heartbeater): - # Make sleep mark the manager as inactive so that heartbeat() +def make_sleep_mark_event_as_done(heartbeater): + # Make sleep actually trigger the done event so that heartbeat() # exits at the end of the first run. - def trigger_inactive(timeout): + def trigger_done(timeout): assert timeout - heartbeater._manager.is_active = False + heartbeater._stop_event.set() - heartbeater._stop_event.wait = trigger_inactive + heartbeater._stop_event.wait = trigger_done def test_heartbeat_once(): @@ -65,7 +88,7 @@ def test_heartbeat_once(): streaming_pull_manager.StreamingPullManager, instance=True ) heartbeater_ = heartbeater.Heartbeater(manager) - make_sleep_mark_manager_as_inactive(heartbeater_) + make_sleep_mark_event_as_done(heartbeater_) heartbeater_.heartbeat() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 17409cb3fdb8..2ecc0b9f3ce1 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -88,15 +88,21 @@ def create_manager(flow_control=types.FlowControl()): return manager -def test_maintain_leases_inactive(caplog): +def test_maintain_leases_inactive_manager(caplog): caplog.set_level(logging.INFO) manager = create_manager() manager.is_active = False leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + leaser_.add( + [requests.LeaseRequest(ack_id="my_ack_ID", byte_size=42, ordering_key="")] + ) leaser_.maintain_leases() + # Leases should still be maintained even if the manager is inactive. + manager.dispatcher.modify_ack_deadline.assert_called() assert "exiting" in caplog.text @@ -112,20 +118,20 @@ def test_maintain_leases_stopped(caplog): assert "exiting" in caplog.text -def make_sleep_mark_manager_as_inactive(leaser): - # Make sleep mark the manager as inactive so that maintain_leases +def make_sleep_mark_event_as_done(leaser): + # Make sleep actually trigger the done event so that heartbeat() # exits at the end of the first run. - def trigger_inactive(timeout): + def trigger_done(timeout): assert 0 < timeout < 10 - leaser._manager.is_active = False + leaser._stop_event.set() - leaser._stop_event.wait = trigger_inactive + leaser._stop_event.wait = trigger_done def test_maintain_leases_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_manager_as_inactive(leaser_) + make_sleep_mark_event_as_done(leaser_) leaser_.add( [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] ) @@ -140,7 +146,7 @@ def test_maintain_leases_ack_ids(): def test_maintain_leases_no_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_manager_as_inactive(leaser_) + make_sleep_mark_event_as_done(leaser_) leaser_.maintain_leases() @@ -151,7 +157,7 @@ def test_maintain_leases_no_ack_ids(): def test_maintain_leases_outdated_items(time): manager = create_manager() leaser_ = leaser.Leaser(manager) - make_sleep_mark_manager_as_inactive(leaser_) + make_sleep_mark_event_as_done(leaser_) # Add and start expiry timer at the beginning of the timeline. time.return_value = 0 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index 2ed1ea55ad57..82a6719d74c3 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -15,6 +15,7 @@ import concurrent.futures import queue import threading +import time import mock @@ -38,19 +39,89 @@ def test_constructor_options(): assert scheduler_._executor == mock.sentinel.executor -def test_schedule(): +def test_schedule_executes_submitted_items(): called_with = [] - called = threading.Event() + callback_done_twice = threading.Barrier(3) # 3 == 2x callback + 1x main thread def callback(*args, **kwargs): - called_with.append((args, kwargs)) - called.set() + called_with.append((args, kwargs)) # appends are thread-safe + callback_done_twice.wait() scheduler_ = scheduler.ThreadScheduler() scheduler_.schedule(callback, "arg1", kwarg1="meep") + scheduler_.schedule(callback, "arg2", kwarg2="boop") - called.wait() - scheduler_.shutdown() + callback_done_twice.wait(timeout=3.0) + result = scheduler_.shutdown() - assert called_with == [(("arg1",), {"kwarg1": "meep"})] + assert result == [] # no scheduled items dropped + + expected_calls = [(("arg1",), {"kwarg1": "meep"}), (("arg2",), {"kwarg2": "boop"})] + assert sorted(called_with) == expected_calls + + +def test_shutdown_nonblocking_by_default(): + called_with = [] + at_least_one_called = threading.Event() + at_least_one_completed = threading.Event() + + def callback(message): + called_with.append(message) # appends are thread-safe + at_least_one_called.set() + time.sleep(1.0) + at_least_one_completed.set() + + executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + scheduler_ = scheduler.ThreadScheduler(executor=executor) + + scheduler_.schedule(callback, "message_1") + scheduler_.schedule(callback, "message_2") + + at_least_one_called.wait() + dropped = scheduler_.shutdown() + + assert len(called_with) == 1 + assert called_with[0] in {"message_1", "message_2"} + + assert len(dropped) == 1 + assert dropped[0] in {"message_1", "message_2"} + assert dropped[0] != called_with[0] # the dropped message was not the processed one + + err_msg = ( + "Shutdown should not have waited " + "for the already running callbacks to complete." + ) + assert not at_least_one_completed.is_set(), err_msg + + +def test_shutdown_blocking_awaits_running_callbacks(): + called_with = [] + at_least_one_called = threading.Event() + at_least_one_completed = threading.Event() + + def callback(message): + called_with.append(message) # appends are thread-safe + at_least_one_called.set() + time.sleep(1.0) + at_least_one_completed.set() + + executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + scheduler_ = scheduler.ThreadScheduler(executor=executor) + + scheduler_.schedule(callback, "message_1") + scheduler_.schedule(callback, "message_2") + + at_least_one_called.wait() + dropped = scheduler_.shutdown(await_msg_callbacks=True) + + assert len(called_with) == 1 + assert called_with[0] in {"message_1", "message_2"} + + # The work items that have not been started yet should still be dropped. + assert len(dropped) == 1 + assert dropped[0] in {"message_1", "message_2"} + assert dropped[0] != called_with[0] # the dropped message was not the processed one + + err_msg = "Shutdown did not wait for the already running callbacks to complete." + assert at_least_one_completed.is_set(), err_msg diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 242c0804ac58..9930e8f14e09 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import functools import logging import threading import time @@ -372,7 +373,6 @@ def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): def test_send_unary(): manager = make_manager() - manager._UNARY_REQUESTS = True manager.send( gapic_types.StreamingPullRequest( @@ -405,7 +405,6 @@ def test_send_unary(): def test_send_unary_empty(): manager = make_manager() - manager._UNARY_REQUESTS = True manager.send(gapic_types.StreamingPullRequest()) @@ -417,7 +416,6 @@ def test_send_unary_api_call_error(caplog): caplog.set_level(logging.DEBUG) manager = make_manager() - manager._UNARY_REQUESTS = True error = exceptions.GoogleAPICallError("The front fell off") manager._client.acknowledge.side_effect = error @@ -431,7 +429,6 @@ def test_send_unary_retry_error(caplog): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() - manager._UNARY_REQUESTS = True error = exceptions.RetryError( "Too long a transient error", cause=Exception("Out of time!") @@ -445,24 +442,15 @@ def test_send_unary_retry_error(caplog): assert "signaled streaming pull manager shutdown" in caplog.text -def test_send_streaming(): - manager = make_manager() - manager._UNARY_REQUESTS = False - manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) - - manager.send(mock.sentinel.request) - - manager._rpc.send.assert_called_once_with(mock.sentinel.request) - - def test_heartbeat(): manager = make_manager() manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) manager._rpc.is_active = True - manager.heartbeat() + result = manager.heartbeat() manager._rpc.send.assert_called_once_with(gapic_types.StreamingPullRequest()) + assert result def test_heartbeat_inactive(): @@ -472,7 +460,8 @@ def test_heartbeat_inactive(): manager.heartbeat() - manager._rpc.send.assert_not_called() + result = manager._rpc.send.assert_not_called() + assert not result @mock.patch("google.api_core.bidi.ResumableBidiRpc", autospec=True) @@ -543,8 +532,8 @@ def test_open_has_been_closed(): manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) -def make_running_manager(): - manager = make_manager() +def make_running_manager(**kwargs): + manager = make_manager(**kwargs) manager._consumer = mock.create_autospec(bidi.BackgroundConsumer, instance=True) manager._consumer.is_active = True manager._dispatcher = mock.create_autospec(dispatcher.Dispatcher, instance=True) @@ -632,14 +621,14 @@ def _do_work(self): while not self._stop: try: self._manager.leaser.add([mock.Mock()]) - except Exception as exc: + except Exception as exc: # pragma: NO COVER self._error_callback(exc) time.sleep(0.1) # also try to interact with the leaser after the stop flag has been set try: self._manager.leaser.remove([mock.Mock()]) - except Exception as exc: + except Exception as exc: # pragma: NO COVER self._error_callback(exc) @@ -666,6 +655,45 @@ def test_close_callbacks(): callback.assert_called_once_with(manager, "meep") +def test_close_blocking_scheduler_shutdown(): + manager, _, _, _, _, _ = make_running_manager(await_callbacks_on_shutdown=True) + scheduler = manager._scheduler + + manager.close() + + scheduler.shutdown.assert_called_once_with(await_msg_callbacks=True) + + +def test_close_nonblocking_scheduler_shutdown(): + manager, _, _, _, _, _ = make_running_manager(await_callbacks_on_shutdown=False) + scheduler = manager._scheduler + + manager.close() + + scheduler.shutdown.assert_called_once_with(await_msg_callbacks=False) + + +def test_close_nacks_internally_queued_messages(): + nacked_messages = [] + + def fake_nack(self): + nacked_messages.append(self.data) + + MockMsg = functools.partial(mock.create_autospec, message.Message, instance=True) + messages = [MockMsg(data=b"msg1"), MockMsg(data=b"msg2"), MockMsg(data=b"msg3")] + for msg in messages: + msg.nack = stdlib_types.MethodType(fake_nack, msg) + + manager, _, _, _, _, _ = make_running_manager() + dropped_by_scheduler = messages[:2] + manager._scheduler.shutdown.return_value = dropped_by_scheduler + manager._messages_on_hold._messages_on_hold.append(messages[2]) + + manager.close() + + assert sorted(nacked_messages) == [b"msg1", b"msg2", b"msg3"] + + def test__get_initial_request(): manager = make_manager() manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) @@ -960,7 +988,7 @@ def test__on_rpc_done(thread): manager._on_rpc_done(mock.sentinel.error) thread.assert_called_once_with( - name=mock.ANY, target=manager.close, kwargs={"reason": mock.ANY} + name=mock.ANY, target=manager._shutdown, kwargs={"reason": mock.ANY} ) _, kwargs = thread.call_args reason = kwargs["kwargs"]["reason"] @@ -979,3 +1007,15 @@ def test_activate_ordering_keys(): manager._messages_on_hold.activate_ordering_keys.assert_called_once_with( ["key1", "key2"], mock.ANY ) + + +def test_activate_ordering_keys_stopped_scheduler(): + manager = make_manager() + manager._messages_on_hold = mock.create_autospec( + messages_on_hold.MessagesOnHold, instance=True + ) + manager._scheduler = None + + manager.activate_ordering_keys(["key1", "key2"]) + + manager._messages_on_hold.activate_ordering_keys.assert_not_called() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 780c20de4da2..6dad4b12a1b8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -172,12 +172,14 @@ def test_subscribe_options(manager_open): callback=mock.sentinel.callback, flow_control=flow_control, scheduler=scheduler, + await_callbacks_on_shutdown=mock.sentinel.await_callbacks, ) assert isinstance(future, futures.StreamingPullFuture) assert future._manager._subscription == "sub_name_a" assert future._manager.flow_control == flow_control assert future._manager._scheduler == scheduler + assert future._manager._await_callbacks_on_shutdown is mock.sentinel.await_callbacks manager_open.assert_called_once_with( mock.ANY, callback=mock.sentinel.callback, From 96f302e349df2f6ea0e7fafcbdf97333d5de7ecd Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 30 Mar 2021 18:39:49 +0200 Subject: [PATCH 0643/1197] chore: release 2.4.1 (#347) * chore: release 2.4.1 * Cleanup release notes. * Update CHANGELOG.md Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 21 +++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 22 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index eac6b55b35f5..480676a0c2a7 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,27 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.4.1](https://www.github.com/googleapis/python-pubsub/compare/v2.4.0...v2.4.1) (2021-03-30) + +### Bug Fixes + +* Move `await_msg_callbacks` flag to `subscribe()` method, fixing a regression in Pub/Sub Lite client. + ([#320](https://www.github.com/googleapis/python-pubsub/issues/320)) ([d40d027](https://www.github.com/googleapis/python-pubsub/commit/d40d02713c8c189937ae5c21d099b88a3131a59f)) +* SSL error when using the client with the emulator. ([#297](https://www.github.com/googleapis/python-pubsub/issues/297)) ([83db672](https://www.github.com/googleapis/python-pubsub/commit/83db67239d3521457138699109f766d574a0a2c4)) + +### Implementation Changes + +* (samples) Bump the max_time to 10 minutes for a flaky test. ([#311](https://www.github.com/googleapis/python-pubsub/issues/311)) ([e2678d4](https://www.github.com/googleapis/python-pubsub/commit/e2678d47c08e6b03782d2d744a4e630b933fdd51)), closes [#291](https://www.github.com/googleapis/python-pubsub/issues/291) +* (samples) Mark delivery attempts test as flaky. ([#326](https://www.github.com/googleapis/python-pubsub/issues/326)) ([5a97ef1](https://www.github.com/googleapis/python-pubsub/commit/5a97ef1bb7512fe814a8f72a43b3e9698434cd8d)) +* (samples) Mitigate flakiness in subscriber_tests. ([#304](https://www.github.com/googleapis/python-pubsub/issues/304)) ([271a385](https://www.github.com/googleapis/python-pubsub/commit/271a3856d835967f18f6becdae5ad53d585d0ccf)) +* (samples) Retry `InternalServerError` in dead letter policy test. ([#329](https://www.github.com/googleapis/python-pubsub/issues/329)) ([34c9b11](https://www.github.com/googleapis/python-pubsub/commit/34c9b11ae697c280f32642c3101b7f7da971f589)), closes [#321](https://www.github.com/googleapis/python-pubsub/issues/321) + +### Documentation + +* Remove EXPERIMENTAL tag for ordering keys in `types.py`. ([#323](https://www.github.com/googleapis/python-pubsub/issues/323)) ([659cd7a](https://www.github.com/googleapis/python-pubsub/commit/659cd7ae2784245d4217fbc722dac04bd3222d32)) +* Remove EXPERIMENTAL tag from `Schema` service (via synth). ([#307](https://www.github.com/googleapis/python-pubsub/issues/307)) ([ad85202](https://www.github.com/googleapis/python-pubsub/commit/ad852028836520db779c5cc33689ffd7e5458a7d)) + + ## 2.4.0 02-22-2021 05:02 PST diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 8658542f46ef..3b2fa8450073 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.4.0" +version = "2.4.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 14766225e29472d43317059954bc0661f90fea68 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 30 Mar 2021 19:22:52 +0200 Subject: [PATCH 0644/1197] chore(deps): update dependency avro to v1.10.2 (#334) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index cee03a6a771c..b78dbc486d2f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==2.3.0 -avro==1.10.1 +avro==1.10.2 From 82e93a9393ea770c6d97e0c6ac7d66c315f23551 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Tue, 30 Mar 2021 16:51:08 -0400 Subject: [PATCH 0645/1197] fix: Remove EXPERIMENTAL tag for ordering keys in publisher/client.py (#324) * Remove EXPERIMENTAL tag for ordering keys in publisher/client.py * fix(sample): mark test flaky (#326) * fix(sample): retry InternalServerError (#329) fixes #321 Co-authored-by: Tianzi Cai Co-authored-by: Takashi Matsuo --- .../google/cloud/pubsub_v1/publisher/client.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index cf69a46be6f9..d3efc317d0f6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -269,8 +269,6 @@ def publish( ordering_key: A string that identifies related messages for which publish order should be respected. Message ordering must be enabled for this client to use this feature. - EXPERIMENTAL: This feature is currently available in a closed - alpha. Please contact the Cloud Pub/Sub team to use it. retry (Optional[google.api_core.retry.Retry]): Designation of what errors, if any, should be retried. If `ordering_key` is specified, the total retry deadline will be changed to "infinity". From 50f0f6c4d655083e500b1dfaad17069588c8f5c0 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 31 Mar 2021 19:25:24 +0200 Subject: [PATCH 0646/1197] chore: emit a warning if return_immediately is set (#355) This flag is deprecated and should always be set to `False` when pulling messages synchronously. --- .../services/subscriber/async_client.py | 7 +++ .../pubsub_v1/services/subscriber/client.py | 7 +++ packages/google-cloud-pubsub/synth.py | 31 ++++++++++++- .../subscriber/test_subscriber_client.py | 43 +++++++++++++++++++ 4 files changed, 87 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 0f77f314b50f..2801b92a0745 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -28,6 +28,7 @@ Type, Union, ) +import warnings import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore @@ -935,6 +936,12 @@ async def pull( if max_messages is not None: request.max_messages = max_messages + if request.return_immediately: + warnings.warn( + "The return_immediately flag is deprecated and should be set to False.", + category=DeprecationWarning, + ) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 67956a53db8a..e2fbde7119f3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -31,6 +31,7 @@ Type, Union, ) +import warnings import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -1124,6 +1125,12 @@ def pull( if max_messages is not None: request.max_messages = max_messages + if request.return_immediately: + warnings.warn( + "The return_immediately flag is deprecated and should be set to False.", + category=DeprecationWarning, + ) + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.pull] diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 2c0bc560735f..c2140f369d62 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -76,7 +76,7 @@ flags=re.MULTILINE | re.DOTALL, ) -if count < 18: +if count < 15: raise Exception("Expected replacements for gRPC channel options not made.") # If the emulator is used, force an insecure gRPC channel to avoid SSL errors. @@ -141,6 +141,35 @@ \g<0>""", ) +# Emit deprecation warning if return_immediately flag is set with synchronous pull. +count = s.replace( + "google/pubsub_v1/services/subscriber/*client.py", + r"import pkg_resources", + "import warnings\n\g<0>", +) +count = s.replace( + "google/pubsub_v1/services/subscriber/*client.py", + r""" + ([^\n\S]+(?:async\ )?def\ pull\(.*?->\ pubsub\.PullResponse:.*?) + ((?P[^\n\S]+)\#\ Wrap\ the\ RPC\ method) + """, + textwrap.dedent( + """ + \g<1> + \gif request.return_immediately: + \g warnings.warn( + \g "The return_immediately flag is deprecated and should be set to False.", + \g category=DeprecationWarning, + \g ) + + \g<2>""" + ), + flags=re.MULTILINE | re.DOTALL | re.VERBOSE, +) + +if count != 2: + raise Exception("Too many or too few replacements in pull() methods.") + # Make sure that client library version is present in user agent header. s.replace( [ diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 6dad4b12a1b8..79bd213e779e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -12,9 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import warnings + from google.auth import credentials import grpc import mock +import pytest from google.api_core.gapic_v1.client_info import METRICS_METADATA_KEY from google.cloud.pubsub_v1 import subscriber @@ -217,3 +220,43 @@ def test_streaming_pull_gapic_monkeypatch(): transport = client.api._transport assert hasattr(transport.streaming_pull, "_prefetch_first_result_") assert not transport.streaming_pull._prefetch_first_result_ + + +def test_sync_pull_warning_if_return_immediately(): + client = subscriber.Client() + subscription_path = "projects/foo/subscriptions/bar" + + with mock.patch.object( + client.api._transport, "_wrapped_methods" + ), warnings.catch_warnings(record=True) as warned: + client.pull(subscription=subscription_path, return_immediately=True) + + # Setting the deprecated return_immediately flag to True should emit a warning. + assert len(warned) == 1 + assert issubclass(warned[0].category, DeprecationWarning) + warning_msg = str(warned[0].message) + assert "return_immediately" in warning_msg + assert "deprecated" in warning_msg + + +@pytest.mark.asyncio +async def test_sync_pull_warning_if_return_immediately_async(): + from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient + + client = SubscriberAsyncClient() + subscription_path = "projects/foo/subscriptions/bar" + + patcher = mock.patch( + "google.pubsub_v1.services.subscriber.async_client.gapic_v1.method_async.wrap_method", + new=mock.AsyncMock, + ) + + with patcher, warnings.catch_warnings(record=True) as warned: + await client.pull(subscription=subscription_path, return_immediately=True) + + # Setting the deprecated return_immediately flag to True should emit a warning. + assert len(warned) == 1 + assert issubclass(warned[0].category, DeprecationWarning) + warning_msg = str(warned[0].message) + assert "return_immediately" in warning_msg + assert "deprecated" in warning_msg From ca6134d8e73dd67a2cc6e2642c62c2b6101aad93 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 31 Mar 2021 19:26:09 +0200 Subject: [PATCH 0647/1197] chore(deps): update dependency google-cloud-pubsub to v2.4.1 (#356) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index b78dbc486d2f..3ef4481069e2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.3.0 +google-cloud-pubsub==2.4.1 avro==1.10.2 From 3e1ed132390bfa85609af36419f8a8caf5d0f9f0 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 2 Apr 2021 00:57:47 +0200 Subject: [PATCH 0648/1197] docs: fix `create_topic()` call in README (#360) --- packages/google-cloud-pubsub/README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 9db987f2e292..945c0beb10e1 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -110,7 +110,7 @@ messages to it project_id=os.getenv('GOOGLE_CLOUD_PROJECT'), topic='MY_TOPIC_NAME', # Set this to something appropriate. ) - publisher.create_topic(topic_name) + publisher.create_topic(name=topic_name) future = publisher.publish(topic_name, b'My first message!', spam='eggs') future.result() From afec81e9d2e79e34aea5955a8dcba97d1253d2dd Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 2 Apr 2021 09:26:10 +0200 Subject: [PATCH 0649/1197] chore: regenerate GAPIC layer with latest changes (#345) --- .../.kokoro/test-samples-against-head.sh | 2 +- .../google/pubsub_v1/__init__.py | 4 +- .../services/publisher/async_client.py | 9 + .../services/publisher/transports/base.py | 27 ++- .../services/publisher/transports/grpc.py | 102 ++++------ .../publisher/transports/grpc_asyncio.py | 110 ++++------- .../schema_service/transports/base.py | 18 +- .../schema_service/transports/grpc.py | 102 ++++------ .../schema_service/transports/grpc_asyncio.py | 110 ++++------- .../services/subscriber/async_client.py | 16 ++ .../services/subscriber/transports/base.py | 34 +++- .../services/subscriber/transports/grpc.py | 102 ++++------ .../subscriber/transports/grpc_asyncio.py | 110 ++++------- .../google/pubsub_v1/types/__init__.py | 176 +++++++++--------- packages/google-cloud-pubsub/noxfile.py | 3 + packages/google-cloud-pubsub/renovate.json | 3 +- packages/google-cloud-pubsub/synth.metadata | 125 +------------ 17 files changed, 414 insertions(+), 639 deletions(-) diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh index 8f2b723fc7ba..18e4ef2133d1 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2021 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index fce58c7ad522..d5a1de488ff7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -110,7 +110,6 @@ "ModifyPushConfigRequest", "PublishRequest", "PublishResponse", - "PublisherClient", "PubsubMessage", "PullRequest", "PullResponse", @@ -118,6 +117,7 @@ "ReceivedMessage", "RetryPolicy", "Schema", + "SchemaServiceClient", "SchemaSettings", "SchemaView", "SeekRequest", @@ -135,5 +135,5 @@ "ValidateMessageResponse", "ValidateSchemaRequest", "ValidateSchemaResponse", - "SchemaServiceClient", + "PublisherClient", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index d2752db2eb8d..d644364b2c7a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -238,6 +238,7 @@ async def create_topic( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -293,6 +294,7 @@ async def update_topic( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -390,6 +392,7 @@ async def publish( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -470,6 +473,7 @@ async def get_topic( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -554,6 +558,7 @@ async def list_topics( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -647,6 +652,7 @@ async def list_topic_subscriptions( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -744,6 +750,7 @@ async def list_topic_snapshots( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -828,6 +835,7 @@ async def delete_topic( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -888,6 +896,7 @@ async def detach_subscription( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index a8d07de3ffc4..b1111a8411ba 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -75,10 +75,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -86,6 +86,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -95,20 +98,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -119,6 +119,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -130,6 +131,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -149,6 +151,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -164,6 +167,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -179,6 +183,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -194,6 +199,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -209,6 +215,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -220,6 +227,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -231,6 +239,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 670a08bb7eed..0b9ddc8e2bb6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -110,7 +110,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -118,71 +120,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ("grpc.keepalive_time_ms", 30000), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -191,17 +172,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -215,7 +187,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index ea6e0483710d..6a3a096f0805 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -65,7 +65,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -143,10 +143,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -155,7 +155,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -163,71 +165,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ("grpc.keepalive_time_ms", 30000), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -236,17 +217,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index bb7528cb4abd..fec2169ae016 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -76,10 +76,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -87,6 +87,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -96,20 +99,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 51fff0e85103..ed9822e0174e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -110,7 +110,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -118,71 +120,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ("grpc.keepalive_time_ms", 30000), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -191,17 +172,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -215,7 +187,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index d8c7cac097a8..c7cb3ac6396c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -65,7 +65,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -143,10 +143,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -155,7 +155,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -163,71 +165,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ("grpc.keepalive_time_ms", 30000), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -236,17 +217,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 2801b92a0745..7dbc3c5edc98 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -324,6 +324,7 @@ async def create_subscription( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -405,6 +406,7 @@ async def get_subscription( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -464,6 +466,7 @@ async def update_subscription( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -551,6 +554,7 @@ async def list_subscriptions( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -635,6 +639,7 @@ async def delete_subscription( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -742,6 +747,7 @@ async def modify_ack_deadline( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -835,6 +841,7 @@ async def acknowledge( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -955,6 +962,7 @@ async def pull( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1026,6 +1034,7 @@ def streaming_pull( exceptions.ResourceExhausted, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=DEFAULT_CLIENT_INFO, @@ -1114,6 +1123,7 @@ async def modify_push_config( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1207,6 +1217,7 @@ async def get_snapshot( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1296,6 +1307,7 @@ async def list_snapshots( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1426,6 +1438,7 @@ async def create_snapshot( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1493,6 +1506,7 @@ async def update_snapshot( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1577,6 +1591,7 @@ async def delete_snapshot( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, @@ -1642,6 +1657,7 @@ async def seek( exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 8bd0d5a19b90..b8a1b97b39bb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -75,10 +75,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -86,6 +86,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -95,20 +98,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -123,6 +123,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -138,6 +139,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -149,6 +151,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -164,6 +167,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -175,6 +179,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -186,6 +191,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -197,6 +203,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -212,6 +219,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -229,6 +237,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ResourceExhausted, exceptions.ServiceUnavailable, ), + deadline=900.0, ), default_timeout=900.0, client_info=client_info, @@ -240,6 +249,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -255,6 +265,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -270,6 +281,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -281,6 +293,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -292,6 +305,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -303,6 +317,7 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, @@ -318,6 +333,7 @@ def _prep_wrapped_messages(self, client_info): exceptions.ServiceUnavailable, exceptions.Unknown, ), + deadline=60.0, ), default_timeout=60.0, client_info=client_info, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 83815049e1ef..b3f26f1f0828 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -112,7 +112,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -120,71 +122,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ("grpc.keepalive_time_ms", 30000), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -193,17 +174,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -217,7 +189,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index d5efabf2fd52..bc385d317d4a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -67,7 +67,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -145,10 +145,10 @@ def __init__( ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -157,7 +157,9 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} if api_mtls_endpoint: warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) @@ -165,71 +167,50 @@ def __init__( warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - ssl_credentials = SslCredentials().ssl_credentials - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ("grpc.keepalive_time_ms", 30000), - ], - ) - self._ssl_channel_credentials = ssl_credentials else: - host = host if ":" in host else host + ":443" + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, + scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -238,17 +219,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index afff7e6df285..2894f6668160 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -15,122 +15,122 @@ # limitations under the License. # -from .schema import ( - Schema, - CreateSchemaRequest, - GetSchemaRequest, - ListSchemasRequest, - ListSchemasResponse, - DeleteSchemaRequest, - ValidateSchemaRequest, - ValidateSchemaResponse, - ValidateMessageRequest, - ValidateMessageResponse, - SchemaView, - Encoding, -) from .pubsub import ( - MessageStoragePolicy, - SchemaSettings, - Topic, - PubsubMessage, - GetTopicRequest, - UpdateTopicRequest, - PublishRequest, - PublishResponse, - ListTopicsRequest, - ListTopicsResponse, - ListTopicSubscriptionsRequest, - ListTopicSubscriptionsResponse, - ListTopicSnapshotsRequest, - ListTopicSnapshotsResponse, + AcknowledgeRequest, + CreateSnapshotRequest, + DeadLetterPolicy, + DeleteSnapshotRequest, + DeleteSubscriptionRequest, DeleteTopicRequest, DetachSubscriptionRequest, DetachSubscriptionResponse, - Subscription, - RetryPolicy, - DeadLetterPolicy, ExpirationPolicy, - PushConfig, - ReceivedMessage, + GetSnapshotRequest, GetSubscriptionRequest, - UpdateSubscriptionRequest, + GetTopicRequest, + ListSnapshotsRequest, + ListSnapshotsResponse, ListSubscriptionsRequest, ListSubscriptionsResponse, - DeleteSubscriptionRequest, + ListTopicSnapshotsRequest, + ListTopicSnapshotsResponse, + ListTopicsRequest, + ListTopicsResponse, + ListTopicSubscriptionsRequest, + ListTopicSubscriptionsResponse, + MessageStoragePolicy, + ModifyAckDeadlineRequest, ModifyPushConfigRequest, + PublishRequest, + PublishResponse, + PubsubMessage, PullRequest, PullResponse, - ModifyAckDeadlineRequest, - AcknowledgeRequest, + PushConfig, + ReceivedMessage, + RetryPolicy, + SchemaSettings, + SeekRequest, + SeekResponse, + Snapshot, StreamingPullRequest, StreamingPullResponse, - CreateSnapshotRequest, + Subscription, + Topic, UpdateSnapshotRequest, - Snapshot, - GetSnapshotRequest, - ListSnapshotsRequest, - ListSnapshotsResponse, - DeleteSnapshotRequest, - SeekRequest, - SeekResponse, + UpdateSubscriptionRequest, + UpdateTopicRequest, +) +from .schema import ( + CreateSchemaRequest, + DeleteSchemaRequest, + GetSchemaRequest, + ListSchemasRequest, + ListSchemasResponse, + Schema, + ValidateMessageRequest, + ValidateMessageResponse, + ValidateSchemaRequest, + ValidateSchemaResponse, + Encoding, + SchemaView, ) __all__ = ( - "Schema", - "CreateSchemaRequest", - "GetSchemaRequest", - "ListSchemasRequest", - "ListSchemasResponse", - "DeleteSchemaRequest", - "ValidateSchemaRequest", - "ValidateSchemaResponse", - "ValidateMessageRequest", - "ValidateMessageResponse", - "SchemaView", - "Encoding", - "MessageStoragePolicy", - "SchemaSettings", - "Topic", - "PubsubMessage", - "GetTopicRequest", - "UpdateTopicRequest", - "PublishRequest", - "PublishResponse", - "ListTopicsRequest", - "ListTopicsResponse", - "ListTopicSubscriptionsRequest", - "ListTopicSubscriptionsResponse", - "ListTopicSnapshotsRequest", - "ListTopicSnapshotsResponse", + "AcknowledgeRequest", + "CreateSnapshotRequest", + "DeadLetterPolicy", + "DeleteSnapshotRequest", + "DeleteSubscriptionRequest", "DeleteTopicRequest", "DetachSubscriptionRequest", "DetachSubscriptionResponse", - "Subscription", - "RetryPolicy", - "DeadLetterPolicy", "ExpirationPolicy", - "PushConfig", - "ReceivedMessage", + "GetSnapshotRequest", "GetSubscriptionRequest", - "UpdateSubscriptionRequest", + "GetTopicRequest", + "ListSnapshotsRequest", + "ListSnapshotsResponse", "ListSubscriptionsRequest", "ListSubscriptionsResponse", - "DeleteSubscriptionRequest", + "ListTopicSnapshotsRequest", + "ListTopicSnapshotsResponse", + "ListTopicsRequest", + "ListTopicsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", + "MessageStoragePolicy", + "ModifyAckDeadlineRequest", "ModifyPushConfigRequest", + "PublishRequest", + "PublishResponse", + "PubsubMessage", "PullRequest", "PullResponse", - "ModifyAckDeadlineRequest", - "AcknowledgeRequest", + "PushConfig", + "ReceivedMessage", + "RetryPolicy", + "SchemaSettings", + "SeekRequest", + "SeekResponse", + "Snapshot", "StreamingPullRequest", "StreamingPullResponse", - "CreateSnapshotRequest", + "Subscription", + "Topic", "UpdateSnapshotRequest", - "Snapshot", - "GetSnapshotRequest", - "ListSnapshotsRequest", - "ListSnapshotsResponse", - "DeleteSnapshotRequest", - "SeekRequest", - "SeekResponse", + "UpdateSubscriptionRequest", + "UpdateTopicRequest", + "CreateSchemaRequest", + "DeleteSchemaRequest", + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "Schema", + "ValidateMessageRequest", + "ValidateMessageResponse", + "ValidateSchemaRequest", + "ValidateSchemaResponse", + "Encoding", + "SchemaView", ) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index eae1a6458700..de9e95dca1b6 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -41,6 +41,9 @@ "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): diff --git a/packages/google-cloud-pubsub/renovate.json b/packages/google-cloud-pubsub/renovate.json index 4fa949311b20..f08bc22c9a55 100644 --- a/packages/google-cloud-pubsub/renovate.json +++ b/packages/google-cloud-pubsub/renovate.json @@ -1,5 +1,6 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index a0d4cbdb6cb1..aac4bdee0c3e 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -3,37 +3,37 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "6e43f4ca8411c6625bac1720172807632dc4b500" + "remote": "git@github.com:plamut/python-pubsub.git", + "sha": "0a662a6daad0517fb0e01732c9dd7f9d1852924c" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "3befd26ca55723d3e8111909331eac1249837987", - "internalRef": "360805639" + "sha": "6598bb829c9e9a534be674649ffd1b4671a821f9", + "internalRef": "364449524" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" + "sha": "c84c8f156e09702e1c8946bfb9746e6f5892cf27" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" + "sha": "c84c8f156e09702e1c8946bfb9746e6f5892cf27" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0780323da96d5a53925fe0547757181fe76e8f1e" + "sha": "c84c8f156e09702e1c8946bfb9746e6f5892cf27" } } ], @@ -47,116 +47,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/multiprocessing.rst", - "google/cloud/pubsub_v1/proto/pubsub.proto", - "google/cloud/pubsub_v1/proto/schema.proto", - "google/pubsub/__init__.py", - "google/pubsub/py.typed", - "google/pubsub_v1/__init__.py", - "google/pubsub_v1/py.typed", - "google/pubsub_v1/services/__init__.py", - "google/pubsub_v1/services/publisher/__init__.py", - "google/pubsub_v1/services/publisher/async_client.py", - "google/pubsub_v1/services/publisher/client.py", - "google/pubsub_v1/services/publisher/pagers.py", - "google/pubsub_v1/services/publisher/transports/__init__.py", - "google/pubsub_v1/services/publisher/transports/base.py", - "google/pubsub_v1/services/publisher/transports/grpc.py", - "google/pubsub_v1/services/publisher/transports/grpc_asyncio.py", - "google/pubsub_v1/services/schema_service/__init__.py", - "google/pubsub_v1/services/schema_service/async_client.py", - "google/pubsub_v1/services/schema_service/client.py", - "google/pubsub_v1/services/schema_service/pagers.py", - "google/pubsub_v1/services/schema_service/transports/__init__.py", - "google/pubsub_v1/services/schema_service/transports/base.py", - "google/pubsub_v1/services/schema_service/transports/grpc.py", - "google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py", - "google/pubsub_v1/services/subscriber/__init__.py", - "google/pubsub_v1/services/subscriber/async_client.py", - "google/pubsub_v1/services/subscriber/client.py", - "google/pubsub_v1/services/subscriber/pagers.py", - "google/pubsub_v1/services/subscriber/transports/__init__.py", - "google/pubsub_v1/services/subscriber/transports/base.py", - "google/pubsub_v1/services/subscriber/transports/grpc.py", - "google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py", - "google/pubsub_v1/types/__init__.py", - "google/pubsub_v1/types/pubsub.py", - "google/pubsub_v1/types/schema.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/fixup_pubsub_v1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/pubsub_v1/__init__.py", - "tests/unit/gapic/pubsub_v1/test_publisher.py", - "tests/unit/gapic/pubsub_v1/test_schema_service.py", - "tests/unit/gapic/pubsub_v1/test_subscriber.py" ] } \ No newline at end of file From ff5028a7c420c436fc8d71146d30f08ebf28e313 Mon Sep 17 00:00:00 2001 From: Jim Fulton Date: Tue, 6 Apr 2021 15:35:05 -0600 Subject: [PATCH 0650/1197] chore: pass explicit credentials in all unit tests creating clients (#369) * Removed duplicated tests * Pass mock credentials. To allow unit tests to run without connecting to the google APIs. * Used a fixture to be DRY wrt test credentials. * Used a fixture to be DRY wrt test credentials. * Used a fixture to be DRY wrt test credentials. * Document the fixture. --- .../tests/unit/pubsub_v1/conftest.py | 12 +++ .../publisher/test_publisher_client.py | 95 ++++++------------- .../subscriber/test_subscriber_client.py | 55 ++++------- 3 files changed, 62 insertions(+), 100 deletions(-) create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py new file mode 100644 index 000000000000..2a7220e93304 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py @@ -0,0 +1,12 @@ +import google.auth.credentials +import mock +import pytest + + +@pytest.fixture +def creds(): + """ + Provide test creds to unit tests so that they can run without + GOOGLE_APPLICATION_CREDENTIALS set. + """ + yield mock.Mock(spec=google.auth.credentials.Credentials) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 71b432aa7551..0aacee5ee84e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -17,7 +17,6 @@ import inspect -from google.auth import credentials import grpc import mock @@ -51,8 +50,7 @@ def _assert_retries_equal(retry, retry2): assert inspect.getclosurevars(pred) == inspect.getclosurevars(pred2) -def test_init(): - creds = mock.Mock(spec=credentials.Credentials) +def test_init(creds): client = publisher.Client(credentials=creds) # A plain client should have an `api` (the underlying GAPIC) and a @@ -63,8 +61,7 @@ def test_init(): assert client.batch_settings.max_messages == 100 -def test_init_default_client_info(): - creds = mock.Mock(spec=credentials.Credentials) +def test_init_default_client_info(creds): client = publisher.Client(credentials=creds) installed_version = publisher.client.__version__ @@ -83,8 +80,8 @@ def test_init_default_client_info(): assert expected_client_info in user_agent -def test_init_w_custom_transport(): - transport = PublisherGrpcTransport() +def test_init_w_custom_transport(creds): + transport = PublisherGrpcTransport(credentials=creds) client = publisher.Client(transport=transport) # A plain client should have an `api` (the underlying GAPIC) and a @@ -96,9 +93,9 @@ def test_init_w_custom_transport(): assert client.batch_settings.max_messages == 100 -def test_init_w_api_endpoint(): +def test_init_w_api_endpoint(creds): client_options = {"api_endpoint": "testendpoint.google.com"} - client = publisher.Client(client_options=client_options) + client = publisher.Client(client_options=client_options, credentials=creds) assert isinstance(client.api, publisher_client.PublisherClient) assert (client.api._transport.grpc_channel._channel.target()).decode( @@ -106,18 +103,8 @@ def test_init_w_api_endpoint(): ) == "testendpoint.google.com:443" -def test_init_w_unicode_api_endpoint(): - client_options = {"api_endpoint": "testendpoint.google.com"} - client = publisher.Client(client_options=client_options) - - assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api._transport.grpc_channel._channel.target()).decode( - "utf-8" - ) == "testendpoint.google.com:443" - - -def test_init_w_empty_client_options(): - client = publisher.Client(client_options={}) +def test_init_w_empty_client_options(creds): + client = publisher.Client(client_options={}, credentials=creds) assert isinstance(client.api, publisher_client.PublisherClient) assert (client.api._transport.grpc_channel._channel.target()).decode( @@ -164,8 +151,7 @@ def test_init_emulator(monkeypatch): assert channel.target().decode("utf8") == "/foo/bar:123" -def test_message_ordering_enabled(): - creds = mock.Mock(spec=credentials.Credentials) +def test_message_ordering_enabled(creds): client = publisher.Client(credentials=creds) assert not client._enable_message_ordering @@ -176,8 +162,7 @@ def test_message_ordering_enabled(): assert client._enable_message_ordering -def test_publish(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish(creds): client = publisher.Client(credentials=creds) future1 = mock.sentinel.future1 @@ -212,8 +197,7 @@ def test_publish(): ) -def test_publish_error_exceeding_flow_control_limits(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_error_exceeding_flow_control_limits(creds): publisher_options = types.PublisherOptions( flow_control=types.PublishFlowControl( message_limit=10, @@ -235,8 +219,7 @@ def test_publish_error_exceeding_flow_control_limits(): future2.result() -def test_publish_data_not_bytestring_error(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_data_not_bytestring_error(creds): client = publisher.Client(credentials=creds) topic = "topic/path" with pytest.raises(TypeError): @@ -245,16 +228,14 @@ def test_publish_data_not_bytestring_error(): client.publish(topic, 42) -def test_publish_message_ordering_not_enabled_error(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_message_ordering_not_enabled_error(creds): client = publisher.Client(credentials=creds) topic = "topic/path" with pytest.raises(ValueError): client.publish(topic, b"bytestring body", ordering_key="ABC") -def test_publish_empty_ordering_key_when_message_ordering_enabled(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_empty_ordering_key_when_message_ordering_enabled(creds): client = publisher.Client( publisher_options=types.PublisherOptions(enable_message_ordering=True), credentials=creds, @@ -263,8 +244,7 @@ def test_publish_empty_ordering_key_when_message_ordering_enabled(): assert client.publish(topic, b"bytestring body", ordering_key="") is not None -def test_publish_with_ordering_key_uses_extended_retry_deadline(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_with_ordering_key_uses_extended_retry_deadline(creds): client = publisher.Client( credentials=creds, publisher_options=types.PublisherOptions(enable_message_ordering=True), @@ -303,8 +283,7 @@ def test_publish_with_ordering_key_uses_extended_retry_deadline(): _assert_retries_equal(batch_commit_retry, expected_retry) -def test_publish_attrs_bytestring(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_attrs_bytestring(creds): client = publisher.Client(credentials=creds) # Use a mock in lieu of the actual batch class. @@ -325,8 +304,7 @@ def test_publish_attrs_bytestring(): ) -def test_publish_new_batch_needed(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_new_batch_needed(creds): client = publisher.Client(credentials=creds) # Use mocks in lieu of the actual batch class. @@ -365,16 +343,14 @@ def test_publish_new_batch_needed(): batch2.publish.assert_called_once_with(message_pb) -def test_publish_attrs_type_error(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_attrs_type_error(creds): client = publisher.Client(credentials=creds) topic = "topic/path" with pytest.raises(TypeError): client.publish(topic, b"foo", answer=42) -def test_stop(): - creds = mock.Mock(spec=credentials.Credentials) +def test_stop(creds): client = publisher.Client(credentials=creds) batch1 = mock.Mock(spec=client._batch_class) @@ -395,8 +371,7 @@ def test_stop(): client.stop() -def test_gapic_instance_method(): - creds = mock.Mock(spec=credentials.Credentials) +def test_gapic_instance_method(creds): client = publisher.Client(credentials=creds) transport_mock = mock.Mock(create_topic=mock.sentinel) @@ -432,15 +407,13 @@ def test_class_method_factory(): assert isinstance(client, publisher.Client) -def test_gapic_class_method_on_instance(): - creds = mock.Mock(spec=credentials.Credentials) +def test_gapic_class_method_on_instance(creds): client = publisher.Client(credentials=creds) answer = client.topic_path("foo", "bar") assert answer == "projects/foo/topics/bar" -def test_commit_thread_created_on_publish(): - creds = mock.Mock(spec=credentials.Credentials) +def test_commit_thread_created_on_publish(creds): # Max latency is not infinite so a commit thread is created. batch_settings = types.BatchSettings(max_latency=600) client = publisher.Client(batch_settings=batch_settings, credentials=creds) @@ -463,8 +436,7 @@ def test_commit_thread_created_on_publish(): _start_commit_thread.assert_called_once() -def test_commit_thread_not_created_on_publish_if_max_latency_is_inf(): - creds = mock.Mock(spec=credentials.Credentials) +def test_commit_thread_not_created_on_publish_if_max_latency_is_inf(creds): # Max latency is infinite so a commit thread is not created. batch_settings = types.BatchSettings(max_latency=float("inf")) client = publisher.Client(batch_settings=batch_settings, credentials=creds) @@ -473,8 +445,7 @@ def test_commit_thread_not_created_on_publish_if_max_latency_is_inf(): assert client._commit_thread is None -def test_wait_and_commit_sequencers(): - creds = mock.Mock(spec=credentials.Credentials) +def test_wait_and_commit_sequencers(creds): # Max latency is infinite so a commit thread is not created. # We don't want a commit thread to interfere with this test. batch_settings = types.BatchSettings(max_latency=float("inf")) @@ -492,8 +463,7 @@ def test_wait_and_commit_sequencers(): assert _commit_sequencers.call_count == 1 -def test_stopped_client_does_not_commit_sequencers(): - creds = mock.Mock(spec=credentials.Credentials) +def test_stopped_client_does_not_commit_sequencers(creds): # Max latency is infinite so a commit thread is not created. # We don't want a commit thread to interfere with this test. batch_settings = types.BatchSettings(max_latency=float("inf")) @@ -515,8 +485,7 @@ def test_stopped_client_does_not_commit_sequencers(): assert _commit_sequencers.call_count == 0 -def test_publish_with_ordering_key(): - creds = mock.Mock(spec=credentials.Credentials) +def test_publish_with_ordering_key(creds): publisher_options = types.PublisherOptions(enable_message_ordering=True) client = publisher.Client(publisher_options=publisher_options, credentials=creds) @@ -555,8 +524,7 @@ def test_publish_with_ordering_key(): ) -def test_ordered_sequencer_cleaned_up(): - creds = mock.Mock(spec=credentials.Credentials) +def test_ordered_sequencer_cleaned_up(creds): # Max latency is infinite so a commit thread is not created. # We don't want a commit thread to interfere with this test. batch_settings = types.BatchSettings(max_latency=float("inf")) @@ -584,8 +552,7 @@ def test_ordered_sequencer_cleaned_up(): assert len(client._sequencers) == 0 -def test_resume_publish(): - creds = mock.Mock(spec=credentials.Credentials) +def test_resume_publish(creds): publisher_options = types.PublisherOptions(enable_message_ordering=True) client = publisher.Client(publisher_options=publisher_options, credentials=creds) @@ -598,8 +565,7 @@ def test_resume_publish(): assert sequencer.unpause.called_once() -def test_resume_publish_no_sequencer_found(): - creds = mock.Mock(spec=credentials.Credentials) +def test_resume_publish_no_sequencer_found(creds): publisher_options = types.PublisherOptions(enable_message_ordering=True) client = publisher.Client(publisher_options=publisher_options, credentials=creds) @@ -608,8 +574,7 @@ def test_resume_publish_no_sequencer_found(): client.resume_publish("topic", "ord_key") -def test_resume_publish_ordering_keys_not_enabled(): - creds = mock.Mock(spec=credentials.Credentials) +def test_resume_publish_ordering_keys_not_enabled(creds): publisher_options = types.PublisherOptions(enable_message_ordering=False) client = publisher.Client(publisher_options=publisher_options, credentials=creds) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 79bd213e779e..364417f25773 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -14,7 +14,6 @@ import warnings -from google.auth import credentials import grpc import mock import pytest @@ -27,14 +26,12 @@ from google.pubsub_v1.services.subscriber.transports.grpc import SubscriberGrpcTransport -def test_init(): - creds = mock.Mock(spec=credentials.Credentials) +def test_init(creds): client = subscriber.Client(credentials=creds) assert isinstance(client.api, subscriber_client.SubscriberClient) -def test_init_default_client_info(): - creds = mock.Mock(spec=credentials.Credentials) +def test_init_default_client_info(creds): client = subscriber.Client(credentials=creds) installed_version = subscriber.client.__version__ @@ -53,16 +50,16 @@ def test_init_default_client_info(): assert expected_client_info in user_agent -def test_init_w_custom_transport(): - transport = SubscriberGrpcTransport() +def test_init_w_custom_transport(creds): + transport = SubscriberGrpcTransport(credentials=creds) client = subscriber.Client(transport=transport) assert isinstance(client.api, subscriber_client.SubscriberClient) assert client.api._transport is transport -def test_init_w_api_endpoint(): +def test_init_w_api_endpoint(creds): client_options = {"api_endpoint": "testendpoint.google.com"} - client = subscriber.Client(client_options=client_options) + client = subscriber.Client(client_options=client_options, credentials=creds) assert isinstance(client.api, subscriber_client.SubscriberClient) assert (client.api._transport.grpc_channel._channel.target()).decode( @@ -70,18 +67,8 @@ def test_init_w_api_endpoint(): ) == "testendpoint.google.com:443" -def test_init_w_unicode_api_endpoint(): - client_options = {"api_endpoint": "testendpoint.google.com"} - client = subscriber.Client(client_options=client_options) - - assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api._transport.grpc_channel._channel.target()).decode( - "utf-8" - ) == "testendpoint.google.com:443" - - -def test_init_w_empty_client_options(): - client = subscriber.Client(client_options={}) +def test_init_w_empty_client_options(creds): + client = subscriber.Client(client_options={}, credentials=creds) assert isinstance(client.api, subscriber_client.SubscriberClient) assert (client.api._transport.grpc_channel._channel.target()).decode( @@ -144,8 +131,7 @@ def test_class_method_factory(): "StreamingPullManager.open", autospec=True, ) -def test_subscribe(manager_open): - creds = mock.Mock(spec=credentials.Credentials) +def test_subscribe(manager_open, creds): client = subscriber.Client(credentials=creds) future = client.subscribe("sub_name_a", callback=mock.sentinel.callback) @@ -164,8 +150,7 @@ def test_subscribe(manager_open): "StreamingPullManager.open", autospec=True, ) -def test_subscribe_options(manager_open): - creds = mock.Mock(spec=credentials.Credentials) +def test_subscribe_options(manager_open, creds): client = subscriber.Client(credentials=creds) flow_control = types.FlowControl(max_bytes=42) scheduler = mock.sentinel.scheduler @@ -190,8 +175,8 @@ def test_subscribe_options(manager_open): ) -def test_close(): - client = subscriber.Client() +def test_close(creds): + client = subscriber.Client(credentials=creds) patcher = mock.patch.object(client.api._transport.grpc_channel, "close") with patcher as patched_close: @@ -200,8 +185,8 @@ def test_close(): patched_close.assert_called() -def test_closes_channel_as_context_manager(): - client = subscriber.Client() +def test_closes_channel_as_context_manager(creds): + client = subscriber.Client(credentials=creds) patcher = mock.patch.object(client.api._transport.grpc_channel, "close") with patcher as patched_close: @@ -211,8 +196,8 @@ def test_closes_channel_as_context_manager(): patched_close.assert_called() -def test_streaming_pull_gapic_monkeypatch(): - client = subscriber.Client() +def test_streaming_pull_gapic_monkeypatch(creds): + client = subscriber.Client(credentials=creds) with mock.patch("google.api_core.gapic_v1.method.wrap_method"): client.streaming_pull(requests=iter([])) @@ -222,8 +207,8 @@ def test_streaming_pull_gapic_monkeypatch(): assert not transport.streaming_pull._prefetch_first_result_ -def test_sync_pull_warning_if_return_immediately(): - client = subscriber.Client() +def test_sync_pull_warning_if_return_immediately(creds): + client = subscriber.Client(credentials=creds) subscription_path = "projects/foo/subscriptions/bar" with mock.patch.object( @@ -240,10 +225,10 @@ def test_sync_pull_warning_if_return_immediately(): @pytest.mark.asyncio -async def test_sync_pull_warning_if_return_immediately_async(): +async def test_sync_pull_warning_if_return_immediately_async(creds): from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient - client = SubscriberAsyncClient() + client = SubscriberAsyncClient(credentials=creds) subscription_path = "projects/foo/subscriptions/bar" patcher = mock.patch( From af3b725545636b03f0e3121ac1c560c55c13ebb2 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 9 Apr 2021 07:53:38 +0200 Subject: [PATCH 0651/1197] chore: bump test coverage to 100% (#364) * Exclude unreachable test lines from coverage * Skip abstract methods and unused code in coverage * Assure batch ERROR status is actually set in test One of the tests attempted to set an ERROR status on a batch, but that batch was a mock, meaning that setting a status had no effect. If there was a bug in the corresponding code path, the test would probably not catch it, as that code patch would not be taken. * Skip code branch that is currently not taken * Silence deprecation warning in two gapic tests The tests for flattened arguments to the sync pull() method hit a code path that emits a deprecation warning, but that warning is expected and should not clutter the test output. * Raise required unit test coverage to 100% * Remove unused param in publish flow controller --- packages/google-cloud-pubsub/.coveragerc | 1 + .../cloud/pubsub_v1/publisher/_batch/base.py | 14 ++--- .../pubsub_v1/publisher/_sequencer/base.py | 6 +-- .../cloud/pubsub_v1/publisher/client.py | 8 --- .../pubsub_v1/publisher/flow_controller.py | 9 +--- .../cloud/pubsub_v1/subscriber/scheduler.py | 6 +-- packages/google-cloud-pubsub/synth.py | 54 ++++++++++++++++++- .../unit/gapic/pubsub_v1/test_subscriber.py | 25 +++++---- .../pubsub_v1/publisher/batch/test_base.py | 8 ++- .../pubsub_v1/publisher/batch/test_thread.py | 2 +- .../sequencer/test_unordered_sequencer.py | 13 +++-- .../publisher/test_flow_controller.py | 34 ++++++------ .../publisher/test_publisher_client.py | 2 +- .../unit/pubsub_v1/subscriber/test_message.py | 3 -- .../subscriber/test_subscriber_client.py | 2 +- .../tests/unit/pubsub_v1/test__gapic.py | 4 +- 16 files changed, 120 insertions(+), 71 deletions(-) diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index 580a30e10c4a..a48e62f2e583 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -5,6 +5,7 @@ branch = True fail_under = 100 show_missing = True omit = + google/cloud/__init__.py google/pubsub/__init__.py exclude_lines = # Re-enable the standard pragma diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py index 6a503c098d17..812e0e0e216d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -50,7 +50,7 @@ def __len__(self): @staticmethod @abc.abstractmethod - def make_lock(): + def make_lock(): # pragma: NO COVER """Return a lock in the chosen concurrency model. Returns: @@ -60,7 +60,7 @@ def make_lock(): @property @abc.abstractmethod - def messages(self): + def messages(self): # pragma: NO COVER """Return the messages currently in the batch. Returns: @@ -70,7 +70,7 @@ def messages(self): @property @abc.abstractmethod - def size(self): + def size(self): # pragma: NO COVER """Return the total size of all of the messages currently in the batch. The size includes any overhead of the actual ``PublishRequest`` that is @@ -84,7 +84,7 @@ def size(self): @property @abc.abstractmethod - def settings(self): + def settings(self): # pragma: NO COVER """Return the batch settings. Returns: @@ -95,7 +95,7 @@ def settings(self): @property @abc.abstractmethod - def status(self): + def status(self): # pragma: NO COVER """Return the status of this batch. Returns: @@ -106,7 +106,7 @@ def status(self): """ raise NotImplementedError - def cancel(self, cancellation_reason): + def cancel(self, cancellation_reason): # pragma: NO COVER """Complete pending futures with an exception. This method must be called before publishing starts (ie: while the @@ -119,7 +119,7 @@ def cancel(self, cancellation_reason): raise NotImplementedError @abc.abstractmethod - def publish(self, message): + def publish(self, message): # pragma: NO COVER """Publish a single message. Add the given message to this object; this will cause it to be diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py index 4abf4b070cf5..c14b2975d055 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -24,7 +24,7 @@ class Sequencer(metaclass=abc.ABCMeta): @staticmethod @abc.abstractmethod - def is_finished(self): + def is_finished(self): # pragma: NO COVER """ Whether the sequencer is finished and should be cleaned up. Returns: @@ -34,7 +34,7 @@ def is_finished(self): @staticmethod @abc.abstractmethod - def unpause(self, message): + def unpause(self, message): # pragma: NO COVER """ Unpauses this sequencer. Raises: @@ -45,7 +45,7 @@ def unpause(self, message): @staticmethod @abc.abstractmethod - def publish(self, message, retry=None): + def publish(self, message, retry=None): # pragma: NO COVER """ Publish message for this ordering key. Args: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index d3efc317d0f6..fc89a2adf8cc 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -49,14 +49,6 @@ _raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() -def _set_nested_value(container, value, keys): - current = container - for key in keys[:-1]: - current = current.setdefault(key, {}) - current[keys[-1]] = value - return container - - @_gapic.add_methods(publisher_client.PublisherClient, blacklist=_BLACKLISTED_METHODS) class Client(object): """A publisher client for Google Cloud Pub/Sub. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py index 300e273aabbc..fa3fac6d3fef 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py @@ -258,7 +258,7 @@ def _would_overflow(self, message): return size_overflow or msg_count_overflow - def _load_info(self, message_count=None, total_bytes=None, reserved_bytes=None): + def _load_info(self, message_count=None, total_bytes=None): """Return the current flow control load information. The caller can optionally adjust some of the values to fit its reporting @@ -271,8 +271,6 @@ def _load_info(self, message_count=None, total_bytes=None, reserved_bytes=None): The value to override the current message count with. total_bytes (Optional[int]): The value to override the current total bytes with. - reserved_bytes (Optional[int]): - The value to override the current number of reserved bytes with. Returns: str @@ -285,13 +283,10 @@ def _load_info(self, message_count=None, total_bytes=None, reserved_bytes=None): if total_bytes is None: total_bytes = self._total_bytes - if reserved_bytes is None: - reserved_bytes = self._reserved_bytes - return msg.format( message_count, self._settings.message_limit, total_bytes, self._settings.byte_limit, - reserved_bytes, + self._reserved_bytes, ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index dd623517c6c6..a11ca490b590 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -31,7 +31,7 @@ class Scheduler(metaclass=abc.ABCMeta): @property @abc.abstractmethod - def queue(self): + def queue(self): # pragma: NO COVER """Queue: A concurrency-safe queue specific to the underlying concurrency implementation. @@ -40,7 +40,7 @@ def queue(self): raise NotImplementedError @abc.abstractmethod - def schedule(self, callback, *args, **kwargs): + def schedule(self, callback, *args, **kwargs): # pragma: NO COVER """Schedule the callback to be called asynchronously. Args: @@ -54,7 +54,7 @@ def schedule(self, callback, *args, **kwargs): raise NotImplementedError @abc.abstractmethod - def shutdown(self, await_msg_callbacks=False): + def shutdown(self, await_msg_callbacks=False): # pragma: NO COVER """Shuts down the scheduler and immediately end all pending callbacks. Args: diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index c2140f369d62..2ad5d20de04d 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -142,7 +142,7 @@ ) # Emit deprecation warning if return_immediately flag is set with synchronous pull. -count = s.replace( +s.replace( "google/pubsub_v1/services/subscriber/*client.py", r"import pkg_resources", "import warnings\n\g<0>", @@ -170,6 +170,48 @@ if count != 2: raise Exception("Too many or too few replacements in pull() methods.") +# Silence deprecation warnings in pull() method flattened parameter tests. +s.replace( + "tests/unit/gapic/pubsub_v1/test_subscriber.py", + "import mock", + "\g<0>\nimport warnings", +) +count = s.replace( + "tests/unit/gapic/pubsub_v1/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+(client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags = re.MULTILINE | re.DOTALL, +) + +if count < 1: + raise Exception("Catch warnings replacement failed.") + +count = s.replace( + "tests/unit/gapic/pubsub_v1/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+response = (await client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags = re.MULTILINE | re.DOTALL, +) + +if count < 1: + raise Exception("Catch warnings replacement failed.") + # Make sure that client library version is present in user agent header. s.replace( [ @@ -206,13 +248,21 @@ "\n\g<0>", ) +# The namespace package declaration in google/cloud/__init__.py should be excluded +# from coverage. +s.replace( + ".coveragerc", + r"((?P[^\n\S]+)google/pubsub/__init__\.py)", + "\ggoogle/cloud/__init__.py\n\g<0>", +) + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- templated_files = gcp.CommonTemplates().py_library( microgenerator=True, samples=True, - cov_level=99, + cov_level=100, system_test_external_dependencies=["psutil"], ) s.move(templated_files, excludes=[".coveragerc"]) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 5931d609d3af..96f03eb1510a 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -17,6 +17,7 @@ import os import mock +import warnings import grpc from grpc.experimental import aio @@ -2338,11 +2339,13 @@ def test_pull_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.pull( - subscription="subscription_value", - return_immediately=True, - max_messages=1277, - ) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + client.pull( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) # Establish that the underlying call was made with the expected # request object values. @@ -2382,11 +2385,13 @@ async def test_pull_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.pull( - subscription="subscription_value", - return_immediately=True, - max_messages=1277, - ) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + await client.pull( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) # Establish that the underlying call was made with the expected # request object values. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index b03dd99de745..3ded77b00d86 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -24,12 +24,11 @@ from google.pubsub_v1 import types as gapic_types -def create_batch(status=None, settings=types.BatchSettings()): +def create_batch(status, settings=types.BatchSettings()): """Create a batch object, which does not commit. Args: - status (str): If provided, the batch's internal status will be set - to the provided status. + status (str): The batch's internal status will be set to the provided status. Returns: ~.pubsub_v1.publisher.batch.thread.Batch: The batch object @@ -37,8 +36,7 @@ def create_batch(status=None, settings=types.BatchSettings()): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) batch = Batch(client, "topic_name", settings) - if status: - batch._status = status + batch._status = status return batch diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index cd634f8f813b..1f1850ad222d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -188,7 +188,7 @@ def api_publish_delay(topic="", messages=(), retry=None): start = datetime.datetime.now() event_set = api_publish_called.wait(timeout=1.0) - if not event_set: + if not event_set: # pragma: NO COVER pytest.fail("API publish was not called in time") batch.publish({"data": b"second message"}) end = datetime.datetime.now() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index b8aff0d2c92d..04a89e19bb9a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -17,6 +17,7 @@ from google.auth import credentials from google.cloud.pubsub_v1 import publisher +from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher._batch import base from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer from google.pubsub_v1 import types as gapic_types @@ -119,13 +120,19 @@ def test_publish_batch_full(): def test_publish_after_batch_error(): client = create_client() message = create_message() - batch = mock.Mock(spec=client._batch_class) + + batch = client._batch_class( + client, "topic_name", types.BatchSettings(max_latency=float("inf")) + ) + batch._messages.append(mock.Mock(name="message")) # Make batch truthy (non-empty). sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") sequencer._set_batch(batch) - sequencer.commit() - batch.commit.assert_called_once() + with mock.patch.object(batch, "commit") as fake_batch_commit: + sequencer.commit() + + fake_batch_commit.assert_called_once() # Simulate publish RPC failing. batch._set_status(base.BatchStatus.ERROR) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py index 54484520d803..5e9d6c3ae3f5 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py @@ -47,7 +47,7 @@ def run_me(): time.sleep(action_pause) method(msg) except Exception: - if error_event is not None: + if error_event is not None: # pragma: NO COVER error_event.set() else: all_done_event.set() @@ -229,27 +229,29 @@ def test_blocking_on_overflow_until_free_capacity(): # Adding a message with free capacity should not block. _run_in_daemon(flow_controller, "add", [msg1], adding_1_done) if not adding_1_done.wait(timeout=0.1): - pytest.fail("Adding a message with enough flow capacity blocked or errored.") + pytest.fail( # pragma: NO COVER + "Adding a message with enough flow capacity blocked or errored." + ) # Adding messages when there is not enough capacity should block, even if # added through multiple threads. _run_in_daemon(flow_controller, "add", [msg2], adding_2_done) if adding_2_done.wait(timeout=0.1): - pytest.fail("Adding a message on overflow did not block.") + pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER _run_in_daemon(flow_controller, "add", [msg3], adding_3_done) if adding_3_done.wait(timeout=0.1): - pytest.fail("Adding a message on overflow did not block.") + pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER _run_in_daemon(flow_controller, "add", [msg4], adding_4_done) if adding_4_done.wait(timeout=0.1): - pytest.fail("Adding a message on overflow did not block.") + pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER # After releasing one message, there should be room for a new message, which # should result in unblocking one of the waiting threads. _run_in_daemon(flow_controller, "release", [msg1], releasing_1_done) if not releasing_1_done.wait(timeout=0.1): - pytest.fail("Releasing a message blocked or errored.") + pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER done_status = [ adding_2_done.wait(timeout=0.1), @@ -267,7 +269,7 @@ def test_blocking_on_overflow_until_free_capacity(): _run_in_daemon(flow_controller, "release", [added_msg], releasing_x_done) if not releasing_x_done.wait(timeout=0.1): - pytest.fail("Releasing messages blocked or errored.") + pytest.fail("Releasing messages blocked or errored.") # pragma: NO COVER released_count = sum( ( @@ -345,24 +347,24 @@ def test_threads_posting_large_messages_do_not_starve(): # Sanity check - releasing should have completed by now. if not releasing_busy_done.wait(timeout=1.1): - pytest.fail("Releasing messages blocked or errored.") + pytest.fail("Releasing messages blocked or errored.") # pragma: NO COVER # Enough messages released, the large message should have come through in # the meantime. if not adding_large_done.wait(timeout=0.1): - pytest.fail("A thread adding a large message starved.") + pytest.fail("A thread adding a large message starved.") # pragma: NO COVER if adding_busy_done.wait(timeout=0.1): - pytest.fail("Adding multiple small messages did not block.") + pytest.fail("Adding multiple small messages did not block.") # pragma: NO COVER # Releasing the large message should unblock adding the remaining "busy" messages # that have not been added yet. _run_in_daemon(flow_controller, "release", [large_msg], releasing_large_done) if not releasing_large_done.wait(timeout=0.1): - pytest.fail("Releasing a message blocked or errored.") + pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER if not adding_busy_done.wait(timeout=1.0): - pytest.fail("Adding messages blocked or errored.") + pytest.fail("Adding messages blocked or errored.") # pragma: NO COVER def test_warning_on_internal_reservation_stats_error_when_unblocking(): @@ -387,13 +389,15 @@ def test_warning_on_internal_reservation_stats_error_when_unblocking(): # Adding a message with free capacity should not block. _run_in_daemon(flow_controller, "add", [msg1], adding_1_done) if not adding_1_done.wait(timeout=0.1): - pytest.fail("Adding a message with enough flow capacity blocked or errored.") + pytest.fail( # pragma: NO COVER + "Adding a message with enough flow capacity blocked or errored." + ) # Adding messages when there is not enough capacity should block, even if # added through multiple threads. _run_in_daemon(flow_controller, "add", [msg2], adding_2_done) if adding_2_done.wait(timeout=0.1): - pytest.fail("Adding a message on overflow did not block.") + pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER # Intentionally corrupt internal stats reservation = next(iter(flow_controller._byte_reservations.values()), None) @@ -403,7 +407,7 @@ def test_warning_on_internal_reservation_stats_error_when_unblocking(): with warnings.catch_warnings(record=True) as warned: _run_in_daemon(flow_controller, "release", [msg1], releasing_1_done) if not releasing_1_done.wait(timeout=0.1): - pytest.fail("Releasing a message blocked or errored.") + pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER matches = [warning for warning in warned if warning.category is RuntimeWarning] assert len(matches) == 1 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 0aacee5ee84e..3db5d60cd7a5 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -74,7 +74,7 @@ def test_init_default_client_info(creds): for header, header_value in wrapped_method._metadata if header == METRICS_METADATA_KEY ), - None, + None, # pragma: NO COVER ) assert user_agent is not None assert expected_client_info in user_agent diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 75580c71d20b..e0c03849102c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -116,9 +116,6 @@ def check_call_types(mock, *args, **kwargs): assert len(call_args) == len(args) for n, argtype in enumerate(args): assert isinstance(call_args[n], argtype) - for argname, argtype in kwargs: - assert argname in call_kwargs - assert isinstance(call_kwargs[argname], argtype) def test_ack(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 364417f25773..dbeb7b343734 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -44,7 +44,7 @@ def test_init_default_client_info(creds): for header, header_value in wrapped_method._metadata if header == METRICS_METADATA_KEY ), - None, + None, # pragma: NO COVER ) assert user_agent is not None assert expected_client_info in user_agent diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py index 5478aee18213..cb63850a7af7 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py @@ -32,7 +32,7 @@ def class_method(cls): return "source class class method" @classmethod - def blacklisted_method(cls): + def blacklisted_method(cls): # pragma: NO COVER return "source class blacklisted method" @@ -42,7 +42,7 @@ class Foo(object): def __init__(self): self.api = SourceClass() - def method(self): + def method(self): # pragma: NO COVER return "foo class instance method" foo = Foo() From 7e254b2f7d530d9104e66af164e7cd677985f46f Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 9 Apr 2021 15:21:30 +0200 Subject: [PATCH 0652/1197] chore: fix streaming pull close unit test flakiness (#361) * chore: fix streaming pull close test flakiness * Store shutdown thread on the manager instance --- .../_protocol/streaming_pull_manager.py | 5 +++-- .../subscriber/test_streaming_pull_manager.py | 21 +++++++++++++++++++ 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index ac940de268a7..e244e871d672 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -148,6 +148,7 @@ def __init__( self._closing = threading.Lock() self._closed = False self._close_callbacks = [] + self._regular_shutdown_thread = None # Created on intentional shutdown. # Generate a random client id tied to this object. All streaming pull # connections (initial and re-connects) will then use the same client @@ -539,13 +540,13 @@ def close(self, reason=None): an "intentional" shutdown. This is passed to the callbacks specified via :meth:`add_close_callback`. """ - thread = threading.Thread( + self._regular_shutdown_thread = threading.Thread( name=_REGULAR_SHUTDOWN_THREAD_NAME, daemon=True, target=self._shutdown, kwargs={"reason": reason}, ) - thread.start() + self._regular_shutdown_thread.start() def _shutdown(self, reason=None): """Run the actual shutdown sequence (stop the stream and all helper threads). diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 9930e8f14e09..25ab4f0ae343 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -550,6 +550,19 @@ def make_running_manager(**kwargs): ) +def await_manager_shutdown(manager, timeout=None): + # NOTE: This method should be called after manager.close(), i.e. after the shutdown + # thread has been created and started. + shutdown_thread = manager._regular_shutdown_thread + + if shutdown_thread is None: # pragma: NO COVER + raise Exception("Shutdown thread does not exist on the manager instance.") + + shutdown_thread.join(timeout=timeout) + if shutdown_thread.is_alive(): # pragma: NO COVER + pytest.fail("Shutdown not completed in time.") + + def test_close(): ( manager, @@ -561,6 +574,7 @@ def test_close(): ) = make_running_manager() manager.close() + await_manager_shutdown(manager, timeout=3) consumer.stop.assert_called_once() leaser.stop.assert_called_once() @@ -583,6 +597,7 @@ def test_close_inactive_consumer(): consumer.is_active = False manager.close() + await_manager_shutdown(manager, timeout=3) consumer.stop.assert_not_called() leaser.stop.assert_called_once() @@ -596,6 +611,7 @@ def test_close_idempotent(): manager.close() manager.close() + await_manager_shutdown(manager, timeout=3) assert scheduler.shutdown.call_count == 1 @@ -640,6 +656,7 @@ def test_close_no_dispatcher_error(): dispatcher.start() manager.close() + await_manager_shutdown(manager, timeout=3) error_callback.assert_not_called() @@ -651,6 +668,7 @@ def test_close_callbacks(): manager.add_close_callback(callback) manager.close(reason="meep") + await_manager_shutdown(manager, timeout=3) callback.assert_called_once_with(manager, "meep") @@ -660,6 +678,7 @@ def test_close_blocking_scheduler_shutdown(): scheduler = manager._scheduler manager.close() + await_manager_shutdown(manager, timeout=3) scheduler.shutdown.assert_called_once_with(await_msg_callbacks=True) @@ -669,6 +688,7 @@ def test_close_nonblocking_scheduler_shutdown(): scheduler = manager._scheduler manager.close() + await_manager_shutdown(manager, timeout=3) scheduler.shutdown.assert_called_once_with(await_msg_callbacks=False) @@ -690,6 +710,7 @@ def fake_nack(self): manager._messages_on_hold._messages_on_hold.append(messages[2]) manager.close() + await_manager_shutdown(manager, timeout=3) assert sorted(nacked_messages) == [b"msg1", b"msg2", b"msg3"] From 326dd7739db7c736e658188f3492faa3263d7de2 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 13 Apr 2021 10:33:18 +0200 Subject: [PATCH 0653/1197] docs: add additional info on use_legacy_flow_control parameter (#301) * docs: add more details on use_legacy_flow_control * asdsa * Update google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py * Update google/cloud/pubsub_v1/subscriber/client.py * Remove "less accurate" from flow control docs * Reword parameter description. Co-authored-by: Tianzi Cai --- .../subscriber/_protocol/streaming_pull_manager.py | 7 ++++--- .../google/cloud/pubsub_v1/subscriber/client.py | 8 ++++++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index e244e871d672..2112ce0db65a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -106,9 +106,10 @@ class StreamingPullManager(object): ``projects/{project}/subscriptions/{subscription}``. flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow control settings. - use_legacy_flow_control (bool): Disables enforcing flow control settings - at the Cloud PubSub server and uses the less accurate method of only - enforcing flow control at the client side. + use_legacy_flow_control (bool): + If set to ``True``, flow control at the Cloud Pub/Sub server is disabled, + though client-side flow control is still enabled. If set to ``False`` + (default), both server-side and client-side flow control are enabled. scheduler (~google.cloud.pubsub_v1.scheduler.Scheduler): The scheduler to use to process messages. If not provided, a thread pool-based scheduler will be used. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 51bdc106ca8d..b137fbc5ffdd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -146,8 +146,8 @@ def subscribe( a long time to process. The ``use_legacy_flow_control`` argument disables enforcing flow control - settings at the Cloud PubSub server and uses the less accurate method of - only enforcing flow control at the client side. + settings at the Cloud Pub/Sub server, and only the client side flow control + will be enforced. This method starts the receiver in the background and returns a *Future* representing its execution. Waiting on the future (calling @@ -200,6 +200,10 @@ def callback(message): *scheduler* to use when executing the callback. This controls how callbacks are executed concurrently. This object must not be shared across multiple SubscriberClients. + use_legacy_flow_control (bool): + If set to ``True``, flow control at the Cloud Pub/Sub server is disabled, + though client-side flow control is still enabled. If set to ``False`` + (default), both server-side and client-side flow control are enabled. await_callbacks_on_shutdown (bool): If ``True``, after canceling the returned future, the latter's ``result()`` method will block until the background stream and its From 4e5be7b6295dad8296f79af3ee1bcc350dd27e90 Mon Sep 17 00:00:00 2001 From: Jim Fulton Date: Tue, 13 Apr 2021 11:51:28 -0600 Subject: [PATCH 0654/1197] chore: add missing licence header (#377) --- .../tests/unit/pubsub_v1/conftest.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py index 2a7220e93304..64729a6b5a76 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# https://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import google.auth.credentials import mock import pytest From 3749c5ea612684f3155dd4f1a8ca763d7a2047e7 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 14 Apr 2021 08:19:15 -0700 Subject: [PATCH 0655/1197] test: require 100% unit test coverage (via synth) (#359) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * feat: add RPC Priority request options PiperOrigin-RevId: 364449524 Source-Author: Google APIs Source-Date: Mon Mar 22 17:39:37 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: 6598bb829c9e9a534be674649ffd1b4671a821f9 Source-Link: https://github.com/googleapis/googleapis/commit/6598bb829c9e9a534be674649ffd1b4671a821f9 * chore(deps): update precommit hook pycqa/flake8 to v3.9.0 [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Type | Update | Change | |---|---|---|---| | [pycqa/flake8](https://gitlab.com/pycqa/flake8) | repository | minor | `3.8.4` -> `3.9.0` | --- ### Release Notes
pycqa/flake8 ### [`v3.9.0`](https://gitlab.com/pycqa/flake8/compare/3.8.4...3.9.0) [Compare Source](https://gitlab.com/pycqa/flake8/compare/3.8.4...3.9.0)
--- ### Renovate configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/synthtool). Source-Author: WhiteSource Renovate Source-Date: Tue Mar 23 17:38:03 2021 +0100 Source-Repo: googleapis/synthtool Source-Sha: f5c5904fb0c6aa3b3730eadf4e5a4485afc65726 Source-Link: https://github.com/googleapis/synthtool/commit/f5c5904fb0c6aa3b3730eadf4e5a4485afc65726 * test(python): use constraints files to check dependency lower bounds Use a constraints file when installing dependencies for system and unit tests nox sessions. https://pip.pypa.io/en/stable/user_guide/#constraints-files > Constraints files are requirements files that **only control which version of a requirement is installed, not whether it is installed or not**. Their syntax and contents is nearly identical to Requirements Files. There is one key difference: Including a package in a constraints file does not trigger installation of the package. ``` testing ├── constraints-3.10.txt ├── constraints-3.11.txt ├── constraints-3.6.txt ├── constraints-3.7.txt ├── constraints-3.8.txt └── constraints-3.9.txt ``` Going forward, one constraints file (currently 3.6) will be populated with every library requirement and extra listed in the `setup.py`. The constraints file will pin each requirement to the lower bound. This ensures that library maintainers will see test failures if they forget to update a lower bound on a dependency. See https://github.com/googleapis/python-bigquery/pull/263 for an example Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Tue Mar 23 10:52:02 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 86ed43d4f56e6404d068e62e497029018879c771 Source-Link: https://github.com/googleapis/synthtool/commit/86ed43d4f56e6404d068e62e497029018879c771 * build(python): update docfx job to use new plugin Source-Author: Dan Lee <71398022+dandhlee@users.noreply.github.com> Source-Date: Tue Mar 30 19:36:37 2021 -0400 Source-Repo: googleapis/synthtool Source-Sha: 4501974ad08b5d693311457e2ea4ce845676e329 Source-Link: https://github.com/googleapis/synthtool/commit/4501974ad08b5d693311457e2ea4ce845676e329 * chore: Add license headers for python config files Source-Author: Anthonios Partheniou Source-Date: Tue Apr 6 11:32:03 2021 -0400 Source-Repo: googleapis/synthtool Source-Sha: 5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc Source-Link: https://github.com/googleapis/synthtool/commit/5b5bf6d519b2d658d9f2e483d9f6f3d0ba8ee6bc * chore: add constraints file check for python samples This is the sibling PR to https://github.com/GoogleCloudPlatform/python-docs-samples/pull/5611 and this is the issue opened for it https://github.com/GoogleCloudPlatform/python-docs-samples/issues/5549 If you look at the files in [this example repo](https://github.com/leahecole/testrepo-githubapp/pull/31/files), you'll see that renovate successfully opened a PR on three constraints files in `samples` directories and subdirectories, and properly ignored `constraints` files at the root level cc @tswast TODO: - [x] update renovate to check for samples/constraints.txt dependency updates - [x] run lint locally to double check that I'm not introducing lint error Source-Author: Leah E. Cole <6719667+leahecole@users.noreply.github.com> Source-Date: Fri Apr 9 22:50:04 2021 -0700 Source-Repo: googleapis/synthtool Source-Sha: 0a071b3460344886297a304253bf924aa68ddb7e Source-Link: https://github.com/googleapis/synthtool/commit/0a071b3460344886297a304253bf924aa68ddb7e --- .../.github/header-checker-lint.yml | 2 +- .../.pre-commit-config.yaml | 14 ++ packages/google-cloud-pubsub/docs/conf.py | 13 ++ packages/google-cloud-pubsub/noxfile.py | 28 ++-- packages/google-cloud-pubsub/renovate.json | 5 +- .../samples/snippets/noxfile.py | 10 +- packages/google-cloud-pubsub/synth.metadata | 126 +++++++++++++++++- 7 files changed, 179 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/header-checker-lint.yml b/packages/google-cloud-pubsub/.github/header-checker-lint.yml index fc281c05bd55..6fe78aa7987a 100644 --- a/packages/google-cloud-pubsub/.github/header-checker-lint.yml +++ b/packages/google-cloud-pubsub/.github/header-checker-lint.yml @@ -1,6 +1,6 @@ {"allowedCopyrightHolders": ["Google LLC"], "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], "sourceFileExtensions": [ "ts", "js", diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index 32302e4883a1..8912e9b5d7d7 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -1,3 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index f81d2979853c..e2d55cde22fa 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-pubsub documentation build configuration file # diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index de9e95dca1b6..6ccdf5e63358 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,8 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", @@ -84,13 +87,15 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - session.install("-e", ".") + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -117,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -141,8 +149,10 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils", "psutil") - session.install("-e", ".") + session.install( + "mock", "pytest", "google-cloud-testutils", "psutil", "-c", constraints_path + ) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: @@ -171,7 +181,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") @@ -203,9 +213,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-pubsub/renovate.json b/packages/google-cloud-pubsub/renovate.json index f08bc22c9a55..c04895563e69 100644 --- a/packages/google-cloud-pubsub/renovate.json +++ b/packages/google-cloud-pubsub/renovate.json @@ -2,5 +2,8 @@ "extends": [ "config:base", ":preserveSemverRanges" ], - "ignorePaths": [".pre-commit-config.yaml"] + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 97bf7da80e39..956cdf4f9250 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -172,10 +172,16 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): - session.install("-r", "requirements.txt") + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") if os.path.exists("requirements-test.txt"): - session.install("-r", "requirements-test.txt") + if os.path.exists("constraints-test.txt"): + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") + else: + session.install("-r", "requirements-test.txt") if INSTALL_LIBRARY_FROM_SOURCE: session.install("-e", _get_repo_root()) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index aac4bdee0c3e..1a189e8e6ea4 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -3,8 +3,8 @@ { "git": { "name": ".", - "remote": "git@github.com:plamut/python-pubsub.git", - "sha": "0a662a6daad0517fb0e01732c9dd7f9d1852924c" + "remote": "https://github.com/googleapis/python-pubsub.git", + "sha": "9f451a1fe0ac5cb2fb13d72b0436e0b521a4fecb" } }, { @@ -19,21 +19,21 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "c84c8f156e09702e1c8946bfb9746e6f5892cf27" + "sha": "0a071b3460344886297a304253bf924aa68ddb7e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "c84c8f156e09702e1c8946bfb9746e6f5892cf27" + "sha": "0a071b3460344886297a304253bf924aa68ddb7e" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "c84c8f156e09702e1c8946bfb9746e6f5892cf27" + "sha": "0a071b3460344886297a304253bf924aa68ddb7e" } } ], @@ -47,5 +47,121 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".coveragerc", + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/header-checker-lint.yml", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic-head.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic-head.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic-head.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples-against-head.sh", + ".kokoro/test-samples-impl.sh", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".pre-commit-config.yaml", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "docs/multiprocessing.rst", + "google/cloud/pubsub_v1/proto/pubsub.proto", + "google/cloud/pubsub_v1/proto/schema.proto", + "google/pubsub/__init__.py", + "google/pubsub/py.typed", + "google/pubsub_v1/__init__.py", + "google/pubsub_v1/py.typed", + "google/pubsub_v1/services/__init__.py", + "google/pubsub_v1/services/publisher/__init__.py", + "google/pubsub_v1/services/publisher/async_client.py", + "google/pubsub_v1/services/publisher/client.py", + "google/pubsub_v1/services/publisher/pagers.py", + "google/pubsub_v1/services/publisher/transports/__init__.py", + "google/pubsub_v1/services/publisher/transports/base.py", + "google/pubsub_v1/services/publisher/transports/grpc.py", + "google/pubsub_v1/services/publisher/transports/grpc_asyncio.py", + "google/pubsub_v1/services/schema_service/__init__.py", + "google/pubsub_v1/services/schema_service/async_client.py", + "google/pubsub_v1/services/schema_service/client.py", + "google/pubsub_v1/services/schema_service/pagers.py", + "google/pubsub_v1/services/schema_service/transports/__init__.py", + "google/pubsub_v1/services/schema_service/transports/base.py", + "google/pubsub_v1/services/schema_service/transports/grpc.py", + "google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py", + "google/pubsub_v1/services/subscriber/__init__.py", + "google/pubsub_v1/services/subscriber/async_client.py", + "google/pubsub_v1/services/subscriber/client.py", + "google/pubsub_v1/services/subscriber/pagers.py", + "google/pubsub_v1/services/subscriber/transports/__init__.py", + "google/pubsub_v1/services/subscriber/transports/base.py", + "google/pubsub_v1/services/subscriber/transports/grpc.py", + "google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py", + "google/pubsub_v1/types/__init__.py", + "google/pubsub_v1/types/pubsub.py", + "google/pubsub_v1/types/schema.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/noxfile.py", + "scripts/decrypt-secrets.sh", + "scripts/fixup_pubsub_v1_keywords.py", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/pubsub_v1/__init__.py", + "tests/unit/gapic/pubsub_v1/test_publisher.py", + "tests/unit/gapic/pubsub_v1/test_schema_service.py", + "tests/unit/gapic/pubsub_v1/test_subscriber.py" ] } \ No newline at end of file From 19a2e35d0f4797a0a7fc5c34fe244bc0f7d6a549 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 15 Apr 2021 16:28:00 +0200 Subject: [PATCH 0656/1197] test: fix flaky test for blocking pull shutdown (#378) If a test is run in a suite with other system tests, the messages are not always published in batch sizes as desired, which can affect how ACKs are handled on the backend (the server requires all messages published in a single batch to be ACK-ed in order to accept the ACKs). If a publisher client instance is shared between the tests, the batching can apparently be affected, thus we create a new client instance before each test. Since these tests are slow system tests, the overhead should not be significant. --- packages/google-cloud-pubsub/tests/system.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 181632d79834..8ef3dca9fd23 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -44,12 +44,12 @@ def project(): yield default_project -@pytest.fixture(scope="module") +@pytest.fixture() def publisher(): yield pubsub_v1.PublisherClient() -@pytest.fixture(scope="module") +@pytest.fixture() def subscriber(): yield pubsub_v1.SubscriberClient() From 1b87174cf656c4515cf43dd3e47951e71354bb33 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 15 Apr 2021 08:11:22 -0700 Subject: [PATCH 0657/1197] chore: generate PyPI token in secrets manager, fix spacing in docs (via synth) (#384) * changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. * feat: add RPC Priority request options PiperOrigin-RevId: 364449524 Source-Author: Google APIs Source-Date: Mon Mar 22 17:39:37 2021 -0700 Source-Repo: googleapis/googleapis Source-Sha: 6598bb829c9e9a534be674649ffd1b4671a821f9 Source-Link: https://github.com/googleapis/googleapis/commit/6598bb829c9e9a534be674649ffd1b4671a821f9 * docs(python): add empty lines between methods Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Apr 14 14:41:09 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 721339ab60a6eb63b889978b3d9b295dcb3be370 Source-Link: https://github.com/googleapis/synthtool/commit/721339ab60a6eb63b889978b3d9b295dcb3be370 * build: use PyPI API token in secret manager Migrate python libraries onto the PyPI API token stored in secret manager. A PyPI API token is limited in scope to uploading new releases. https://pypi.org/help/#apitoken Verified that this works with [build](https://fusion2.corp.google.com/invocations/14bae126-83fa-4328-8da9-d390ed99315c/targets/cloud-devrel%2Fclient-libraries%2Fpython%2Fgoogleapis%2Fpython-vision%2Frelease%2Frelease;config=default/log) on https://github.com/googleapis/python-vision/pull/136 Source-Author: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Source-Date: Wed Apr 14 17:46:06 2021 -0600 Source-Repo: googleapis/synthtool Source-Sha: 043cc620d6a6111816d9e09f2a97208565fde958 Source-Link: https://github.com/googleapis/synthtool/commit/043cc620d6a6111816d9e09f2a97208565fde958 --- packages/google-cloud-pubsub/.kokoro/release.sh | 4 ++-- .../google-cloud-pubsub/.kokoro/release/common.cfg | 14 ++------------ .../google-cloud-pubsub/docs/_static/custom.css | 13 ++++++++++++- packages/google-cloud-pubsub/synth.metadata | 8 ++++---- 4 files changed, 20 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh index 321ef575ce7a..b3a2d20a8ef6 100755 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-pubsub python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-pubsub/.kokoro/release/common.cfg b/packages/google-cloud-pubsub/.kokoro/release/common.cfg index 625c3fdbb3ba..1648dd9ad75b 100644 --- a/packages/google-cloud-pubsub/.kokoro/release/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-pubsub/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/packages/google-cloud-pubsub/docs/_static/custom.css b/packages/google-cloud-pubsub/docs/_static/custom.css index bcd37bbd3c4a..b0a295464b23 100644 --- a/packages/google-cloud-pubsub/docs/_static/custom.css +++ b/packages/google-cloud-pubsub/docs/_static/custom.css @@ -1,9 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} +} /* Ensure minimum width for 'Parameters' / 'Returns' column */ dl.field-list > dt { min-width: 100px } + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 1a189e8e6ea4..02da2833c73c 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "9f451a1fe0ac5cb2fb13d72b0436e0b521a4fecb" + "sha": "b8352f91c63e0cb7d64c4d0e557651248cd301b5" } }, { @@ -19,21 +19,21 @@ "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0a071b3460344886297a304253bf924aa68ddb7e" + "sha": "043cc620d6a6111816d9e09f2a97208565fde958" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0a071b3460344886297a304253bf924aa68ddb7e" + "sha": "043cc620d6a6111816d9e09f2a97208565fde958" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "0a071b3460344886297a304253bf924aa68ddb7e" + "sha": "043cc620d6a6111816d9e09f2a97208565fde958" } } ], From 4568abc5a351d3fee947b321f9f8d9cdcece4a7e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 15 Apr 2021 17:12:20 +0200 Subject: [PATCH 0658/1197] chore(deps): update dependency pytest to v6 (#381) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index cf5d6325d1e7..5b835fe723c7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff==1.10.0 -pytest==5.3.2 +pytest==6.2.3 mock==3.0.5 flaky==3.7.0 \ No newline at end of file From 100a67746ef23e3da5646724c4c1ca46bdb632a3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 16 Apr 2021 10:28:02 +0200 Subject: [PATCH 0659/1197] chore(deps): update dependency mock to v4 (#380) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [mock](http://mock.readthedocs.org/en/latest/) | `==3.0.5` -> `==4.0.3` | [![age](https://badges.renovateapi.com/packages/pypi/mock/4.0.3/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/mock/4.0.3/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/mock/4.0.3/compatibility-slim/3.0.5)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/mock/4.0.3/confidence-slim/3.0.5)](https://docs.renovatebot.com/merge-confidence/) | --- ### Configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-pubsub). --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 5b835fe723c7..36304988755b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff==1.10.0 pytest==6.2.3 -mock==3.0.5 +mock==4.0.3 flaky==3.7.0 \ No newline at end of file From 9c3febe6e7338ec5b90f9b64b2e24a8a8ef9b84b Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Fri, 16 Apr 2021 17:37:33 -0400 Subject: [PATCH 0660/1197] chore: prevent normalization of semver versioning (#382) * chore: prevent normalization of semver versioning * chore: update workaround to make sic work --- packages/google-cloud-pubsub/setup.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 3b2fa8450073..3f6e4ac1da48 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -17,6 +17,20 @@ import setuptools +# Disable version normalization performed by setuptools.setup() +try: + # Try the approach of using sic(), added in setuptools 46.1.0 + from setuptools import sic +except ImportError: + # Try the approach of replacing packaging.version.Version + sic = lambda v: v + try: + # setuptools >=39.0.0 uses packaging from setuptools.extern + from setuptools.extern import packaging + except ImportError: + # setuptools <39.0.0 uses packaging from pkg_resources.extern + from pkg_resources.extern import packaging + packaging.version.Version = packaging.version.LegacyVersion # Package metadata. @@ -61,7 +75,7 @@ setuptools.setup( name=name, - version=version, + version=sic(version), description=description, long_description=readme, author="Google LLC", From 29f72fc278ef8530fe6d698edd49167deeb3a670 Mon Sep 17 00:00:00 2001 From: hannahrogers-google <52459909+hannahrogers-google@users.noreply.github.com> Date: Wed, 21 Apr 2021 13:31:11 -0700 Subject: [PATCH 0661/1197] fix: do not crash if distribution cannot be found when extracting semver (#393) * fix: no longer crash if pubsub distribution cannot be found wqwhen extracting semver * doc: add context to distribution not found exception --- .../google/cloud/pubsub_v1/publisher/client.py | 7 ++++++- .../google/cloud/pubsub_v1/subscriber/client.py | 7 ++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index fc89a2adf8cc..7e6801de0fa9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -36,7 +36,12 @@ from google.pubsub_v1 import types as gapic_types from google.pubsub_v1.services.publisher import client as publisher_client -__version__ = pkg_resources.get_distribution("google-cloud-pubsub").version +try: + __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version +except pkg_resources.DistributionNotFound: + # Distribution might not be available if we are not running from within a + # PIP package. + __version__ = "0.0" _LOGGER = logging.getLogger(__name__) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index b137fbc5ffdd..376530caaf8a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -27,7 +27,12 @@ from google.pubsub_v1.services.subscriber import client as subscriber_client -__version__ = pkg_resources.get_distribution("google-cloud-pubsub").version +try: + __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version +except pkg_resources.DistributionNotFound: + # Distribution might not be available if we are not running from within + # a PIP package. + __version__ = "0.0" _BLACKLISTED_METHODS = ( "publish", From 809856d60af8f38c6bdc8beb91814aa309d26378 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 26 Apr 2021 10:41:53 -0400 Subject: [PATCH 0662/1197] chore(revert): revert preventing normalization (#396) --- packages/google-cloud-pubsub/setup.py | 16 +--------------- 1 file changed, 1 insertion(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 3f6e4ac1da48..3b2fa8450073 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -17,20 +17,6 @@ import setuptools -# Disable version normalization performed by setuptools.setup() -try: - # Try the approach of using sic(), added in setuptools 46.1.0 - from setuptools import sic -except ImportError: - # Try the approach of replacing packaging.version.Version - sic = lambda v: v - try: - # setuptools >=39.0.0 uses packaging from setuptools.extern - from setuptools.extern import packaging - except ImportError: - # setuptools <39.0.0 uses packaging from pkg_resources.extern - from pkg_resources.extern import packaging - packaging.version.Version = packaging.version.LegacyVersion # Package metadata. @@ -75,7 +61,7 @@ setuptools.setup( name=name, - version=sic(version), + version=version, description=description, long_description=readme, author="Google LLC", From 85631c38e40a256a3d2f5e34976039e86a81a319 Mon Sep 17 00:00:00 2001 From: "google-cloud-policy-bot[bot]" <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> Date: Tue, 4 May 2021 11:02:31 +0200 Subject: [PATCH 0663/1197] chore: add SECURITY.md (#401) Co-authored-by: google-cloud-policy-bot[bot] <80869356+google-cloud-policy-bot[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/SECURITY.md | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 packages/google-cloud-pubsub/SECURITY.md diff --git a/packages/google-cloud-pubsub/SECURITY.md b/packages/google-cloud-pubsub/SECURITY.md new file mode 100644 index 000000000000..8b58ae9c01ae --- /dev/null +++ b/packages/google-cloud-pubsub/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. From a32b098f9f1ec53b8fbf7812bf76ef86bc4f807d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 May 2021 07:48:01 +0200 Subject: [PATCH 0664/1197] chore(deps): update dependency pytest to v6.2.4 (#408) [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [pytest](https://docs.pytest.org/en/latest/) ([source](https://togithub.com/pytest-dev/pytest), [changelog](https://docs.pytest.org/en/stable/changelog.html)) | `==6.2.3` -> `==6.2.4` | [![age](https://badges.renovateapi.com/packages/pypi/pytest/6.2.4/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/pytest/6.2.4/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/pytest/6.2.4/compatibility-slim/6.2.3)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/pytest/6.2.4/confidence-slim/6.2.3)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
pytest-dev/pytest ### [`v6.2.4`](https://togithub.com/pytest-dev/pytest/releases/6.2.4) [Compare Source](https://togithub.com/pytest-dev/pytest/compare/6.2.3...6.2.4) ### pytest 6.2.4 (2021-05-04) #### Bug Fixes - [#​8539](https://togithub.com/pytest-dev/pytest/issues/8539): Fixed assertion rewriting on Python 3.10.
--- ### Configuration :date: **Schedule**: At any time (no schedule defined). :vertical_traffic_light: **Automerge**: Disabled by config. Please merge this manually once you are satisfied. :recycle: **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. :no_bell: **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-pubsub). --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 36304988755b..357643112ecf 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff==1.10.0 -pytest==6.2.3 +pytest==6.2.4 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From 4c7380c29a875caa37740e02097b8dfee6309203 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 6 May 2021 22:12:54 +0200 Subject: [PATCH 0665/1197] fix: Memory leak when publishing messages. (#406) * fix: publisher memory leak If publish threads are marked as daemonic, the leak seemingly disappears. * Add additional link for the fix context The additional linked comment explains which `CPython` issue is the root cause of this. * fix a typo --- .../google/cloud/pubsub_v1/publisher/_batch/thread.py | 6 ++++-- .../google/cloud/pubsub_v1/publisher/client.py | 7 ++++++- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 36dd3b946fce..3f9a17f740e6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -207,9 +207,11 @@ def commit(self): def _start_commit_thread(self): """Start a new thread to actually handle the commit.""" - + # NOTE: If the thread is *not* a daemon, a memory leak exists due to a CPython issue. + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-829910303 + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-830092418 commit_thread = threading.Thread( - name="Thread-CommitBatchPublisher", target=self._commit + name="Thread-CommitBatchPublisher", target=self._commit, daemon=True ) commit_thread.start() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 7e6801de0fa9..4703cc3c456f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -375,8 +375,13 @@ def _ensure_commit_timer_runs_no_lock(self): def _start_commit_thread(self): """Start a new thread to actually wait and commit the sequencers.""" + # NOTE: If the thread is *not* a daemon, a memory leak exists due to a CPython issue. + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-829910303 + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-830092418 self._commit_thread = threading.Thread( - name="Thread-PubSubBatchCommitter", target=self._wait_and_commit_sequencers + name="Thread-PubSubBatchCommitter", + target=self._wait_and_commit_sequencers, + daemon=True, ) self._commit_thread.start() From 73d25009a954aa9038660109d0071c0f2f181cfd Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 10 May 2021 18:14:57 +0200 Subject: [PATCH 0666/1197] chore: release v2.4.2 (#409) --- packages/google-cloud-pubsub/CHANGELOG.md | 31 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 480676a0c2a7..b040b2f5f55e 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,37 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 2.4.2 + +05-06-2021 23:50 PDT + + +### Implementation Changes + +- Fix memory leak when publishing messages. ([#406](https://github.com/googleapis/python-pubsub/pull/406)) +- Do not crash if distribution cannot be found when extracting semantic version. ([#393](https://github.com/googleapis/python-pubsub/pull/393)) +- Emit a warning if `return_immediately` is set with synchronous pull. ([#355](https://github.com/googleapis/python-pubsub/pull/355)) +- Regenerate GAPIC layer with latest changes, use explicit default timeouts. ([#345](https://github.com/googleapis/python-pubsub/pull/345)) + + +### Documentation + +- Add additional info on `use_legacy_flow_control` parameter. ([#301](https://github.com/googleapis/python-pubsub/pull/301)) +- Remove EXPERIMENTAL tag for ordering keys in `publisher/client.py`. ([#324](https://github.com/googleapis/python-pubsub/pull/324)) +- Fix `create_topic()` call in README. ([#360](https://github.com/googleapis/python-pubsub/pull/360)) +- Generate PyPI token in secrets manager, fix spacing in docs (via synth). ([#384](https://github.com/googleapis/python-pubsub/pull/384)) +- Add `SECURITY.md`. ([#401](https://github.com/googleapis/python-pubsub/pull/401)) + + +### Internal / Testing Changes + +- Require 100% unit test coverage (via synth). ([#359](https://github.com/googleapis/python-pubsub/pull/359)) +- Bump test coverage to 100%. ([#364](https://github.com/googleapis/python-pubsub/pull/364)) +- Fix streaming pull close unit test flakiness. ([#361](https://github.com/googleapis/python-pubsub/pull/361)) +- Pass explicit credentials in all unit tests creating clients. ([#369](https://github.com/googleapis/python-pubsub/pull/369)) +- Fix flaky test for blocking pull shutdown. ([#378](https://github.com/googleapis/python-pubsub/pull/378)) +- Add missing licence header. ([#377](https://github.com/googleapis/python-pubsub/pull/377)) + ## [2.4.1](https://www.github.com/googleapis/python-pubsub/compare/v2.4.0...v2.4.1) (2021-03-30) ### Bug Fixes diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 3b2fa8450073..b4a3374c8ff9 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.4.1" +version = "2.4.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 6bcbaf0dcaaa7ca883dd1e667305091624eaf326 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 11 May 2021 10:29:42 +0200 Subject: [PATCH 0667/1197] chore(deps): update dependency google-cloud-pubsub to v2.4.2 (#410) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 3ef4481069e2..e45aa73e94a8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.4.1 +google-cloud-pubsub==2.4.2 avro==1.10.2 From b2dcadd4d438c1b3fa4c1a36c607db776698e286 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Sat, 15 May 2021 16:32:38 +0200 Subject: [PATCH 0668/1197] feat: make publish futures compatible with concurrent.futures.as_completed() (#397) * feat: make futures compatible with as_completed() The futures implementation is adjusted to work well with the built-in function with the same name in `concurrent.futures` package. * Fix two unit tests in pre-Python 3.8 If setting a result/exception on a concurrent.futures.Future object, an exception is raised only in Python3.8+, thus we conditionally disable two unit tests. This behavior change is fine, though, because users should never use the set_result() and set_exception() methods directly. * Cover missing code line with a test * Use double underscore for internal cancelled flag * Prefix manager reference with double underscore * Remove Future's completed parameter altogether This parameter is unlikely to be used by any 3rd party code, but even if it is, it's better to cause a loud error rather than silently changing its effect to a no-op. --- .../google/cloud/pubsub_v1/futures.py | 160 ++---------------- .../pubsub_v1/publisher/_batch/thread.py | 2 +- .../cloud/pubsub_v1/publisher/futures.py | 22 ++- .../cloud/pubsub_v1/subscriber/futures.py | 14 +- .../publisher/test_futures_publisher.py | 8 + .../subscriber/test_futures_subscriber.py | 8 +- .../subscriber/test_subscriber_client.py | 12 +- .../tests/unit/pubsub_v1/test_futures.py | 102 +++++++---- 8 files changed, 127 insertions(+), 201 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index ba861e40c653..4dc72fdaac3d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -14,14 +14,12 @@ from __future__ import absolute_import -import threading -import uuid +import concurrent.futures import google.api_core.future -from google.cloud.pubsub_v1.publisher import exceptions -class Future(google.api_core.future.Future): +class Future(concurrent.futures.Future, google.api_core.future.Future): """Encapsulation of the asynchronous execution of an action. This object is returned from asychronous Pub/Sub calls, and is the @@ -29,158 +27,32 @@ class Future(google.api_core.future.Future): This object should not be created directly, but is returned by other methods in this library. - - Args: - completed (Optional[Any]): An event, with the same interface as - :class:`threading.Event`. This is provided so that callers - with different concurrency models (e.g. ``threading`` or - ``multiprocessing``) can supply an event that is compatible - with that model. The ``wait()`` and ``set()`` methods will be - used. If this argument is not provided, then a new - :class:`threading.Event` will be created and used. """ - # This could be a sentinel object or None, but the sentinel object's ID - # can change if the process is forked, and None has the possibility of - # actually being a result. - _SENTINEL = uuid.uuid4() - - def __init__(self, completed=None): - self._result = self._SENTINEL - self._exception = self._SENTINEL - self._callbacks = [] - if completed is None: - completed = threading.Event() - self._completed = completed - - def cancel(self): - """Actions in Pub/Sub generally may not be canceled. - - This method always returns False. - """ - return False - - def cancelled(self): - """Actions in Pub/Sub generally may not be canceled. - - This method always returns False. - """ - return False - def running(self): - """Actions in Pub/Sub generally may not be canceled. + """Return ``True`` if the associated Pub/Sub action has not yet completed. - Returns: - bool: ``True`` if this method has not yet completed, or - ``False`` if it has completed. + Returns: bool: """ return not self.done() - def done(self): - """Return True the future is done, False otherwise. - - This still returns True in failure cases; checking :meth:`result` or - :meth:`exception` is the canonical way to assess success or failure. - """ - return self._exception != self._SENTINEL or self._result != self._SENTINEL - - def result(self, timeout=None): - """Resolve the future and return a value where appropriate. - - Args: - timeout (Union[int, float]): The number of seconds before this call - times out and raises TimeoutError. - - Raises: - concurrent.futures.TimeoutError: If the request times out. - Exception: For undefined exceptions in the underlying - call execution. - """ - # Attempt to get the exception if there is one. - # If there is not one, then we know everything worked, and we can - # return an appropriate value. - err = self.exception(timeout=timeout) - if err is None: - return self._result - raise err - - def exception(self, timeout=None): - """Return the exception raised by the call, if any. - - Args: - timeout (Union[int, float]): The number of seconds before this call - times out and raises TimeoutError. - - Raises: - concurrent.futures.TimeoutError: If the request times out. - - Returns: - Exception: The exception raised by the call, if any. - """ - # Wait until the future is done. - if not self._completed.wait(timeout=timeout): - raise exceptions.TimeoutError("Timed out waiting for result.") - - # If the batch completed successfully, this should return None. - if self._result != self._SENTINEL: - return None - - # Okay, this batch had an error; this should return it. - return self._exception - - def add_done_callback(self, callback): - """Attach the provided callable to the future. - - The provided function is called, with this future as its only argument, - when the future finishes running. - - Args: - callback (Callable): The function to call. - - Returns: - None - """ - if self.done(): - return callback(self) - self._callbacks.append(callback) + def set_running_or_notify_cancel(self): + raise NotImplementedError( + "Only used by executors from `concurrent.futures` package." + ) def set_result(self, result): - """Set the result of the future to the provided result. + """Set the return value of work associated with the future. - Args: - result (Any): The result + Do not use this method, it should only be used internally by the library and its + unit tests. """ - # Sanity check: A future can only complete once. - if self.done(): - raise RuntimeError("set_result can only be called once.") - - # Set the result and trigger the future. - self._result = result - self._trigger() + return super().set_result(result=result) def set_exception(self, exception): - """Set the result of the future to the given exception. - - Args: - exception (:exc:`Exception`): The exception raised. - """ - # Sanity check: A future can only complete once. - if self.done(): - raise RuntimeError("set_exception can only be called once.") - - # Set the exception and trigger the future. - self._exception = exception - self._trigger() - - def _trigger(self): - """Trigger all callbacks registered to this Future. - - This method is called internally by the batch once the batch - completes. + """Set the result of the future as being the given exception. - Args: - message_id (str): The message ID, as a string. + Do not use this method, it should only be used internally by the library and its + unit tests. """ - self._completed.set() - for callback in self._callbacks: - callback(self) + return super().set_exception(exception=exception) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 3f9a17f740e6..b3936c2159da 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -380,7 +380,7 @@ def publish(self, message): # Track the future on this batch (so that the result of the # future can be set). - future = futures.Future(completed=threading.Event()) + future = futures.Future() self._futures.append(future) # Try to commit, but it must be **without** the lock held, since diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index fa8a79998617..04748e8542eb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -25,6 +25,20 @@ class Future(futures.Future): ID, unless an error occurs. """ + def cancel(self): + """Actions in Pub/Sub generally may not be canceled. + + This method always returns ``False``. + """ + return False + + def cancelled(self): + """Actions in Pub/Sub generally may not be canceled. + + This method always returns ``False``. + """ + return False + def result(self, timeout=None): """Return the message ID or raise an exception. @@ -43,10 +57,4 @@ def result(self, timeout=None): Exception: For undefined exceptions in the underlying call execution. """ - # Attempt to get the exception if there is one. - # If there is not one, then we know everything worked, and we can - # return an appropriate value. - err = self.exception(timeout=timeout) - if err is None: - return self._result - raise err + return super().result(timeout=timeout) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index f9fdd76abc87..97a911076db4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -28,9 +28,9 @@ class StreamingPullFuture(futures.Future): def __init__(self, manager): super(StreamingPullFuture, self).__init__() - self._manager = manager - self._manager.add_close_callback(self._on_close_callback) - self._cancelled = False + self.__manager = manager + self.__manager.add_close_callback(self._on_close_callback) + self.__cancelled = False def _on_close_callback(self, manager, result): if self.done(): @@ -47,12 +47,14 @@ def cancel(self): """Stops pulling messages and shutdowns the background thread consuming messages. """ - self._cancelled = True - return self._manager.close() + # NOTE: We circumvent the base future's self._state to track the cancellation + # state, as this state has different meaning with streaming pull futures. + self.__cancelled = True + return self.__manager.close() def cancelled(self): """ returns: bool: ``True`` if the subscription has been cancelled. """ - return self._cancelled + return self.__cancelled diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures_publisher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures_publisher.py index eb32d05185b6..45bc48542479 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_futures_publisher.py @@ -20,6 +20,14 @@ class TestFuture(object): + def test_cancel(self): + future = futures.Future() + assert future.cancel() is False + + def test_cancelled(self): + future = futures.Future() + assert future.cancelled() is False + def test_result_on_success(self): future = futures.Future() future.set_result("570307942214048") diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 909337cc88c7..5411674c0082 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -31,13 +31,12 @@ def make_future(self): def test_default_state(self): future = self.make_future() + manager = future._StreamingPullFuture__manager assert future.running() assert not future.done() assert not future.cancelled() - future._manager.add_close_callback.assert_called_once_with( - future._on_close_callback - ) + manager.add_close_callback.assert_called_once_with(future._on_close_callback) def test__on_close_callback_success(self): future = self.make_future() @@ -71,8 +70,9 @@ def test__on_close_callback_future_already_done(self): def test_cancel(self): future = self.make_future() + manager = future._StreamingPullFuture__manager future.cancel() - future._manager.close.assert_called_once() + manager.close.assert_called_once() assert future.cancelled() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index dbeb7b343734..7624c9212cda 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -137,7 +137,8 @@ def test_subscribe(manager_open, creds): future = client.subscribe("sub_name_a", callback=mock.sentinel.callback) assert isinstance(future, futures.StreamingPullFuture) - assert future._manager._subscription == "sub_name_a" + manager = future._StreamingPullFuture__manager + assert manager._subscription == "sub_name_a" manager_open.assert_called_once_with( mock.ANY, callback=mock.sentinel.callback, @@ -164,10 +165,11 @@ def test_subscribe_options(manager_open, creds): ) assert isinstance(future, futures.StreamingPullFuture) - assert future._manager._subscription == "sub_name_a" - assert future._manager.flow_control == flow_control - assert future._manager._scheduler == scheduler - assert future._manager._await_callbacks_on_shutdown is mock.sentinel.await_callbacks + manager = future._StreamingPullFuture__manager + assert manager._subscription == "sub_name_a" + assert manager.flow_control == flow_control + assert manager._scheduler == scheduler + assert manager._await_callbacks_on_shutdown is mock.sentinel.await_callbacks manager_open.assert_called_once_with( mock.ANY, callback=mock.sentinel.callback, diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py index 11349d5d480a..2b26289c49e8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py @@ -12,7 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import concurrent.futures +import sys import threading +import time import mock import pytest @@ -25,36 +28,6 @@ def _future(*args, **kwargs): return futures.Future(*args, **kwargs) -def test_constructor_defaults(): - with mock.patch.object(threading, "Event", autospec=True) as Event: - future = _future() - - assert future._result == futures.Future._SENTINEL - assert future._exception == futures.Future._SENTINEL - assert future._callbacks == [] - assert future._completed is Event.return_value - - Event.assert_called_once_with() - - -def test_constructor_explicit_completed(): - completed = mock.sentinel.completed - future = _future(completed=completed) - - assert future._result == futures.Future._SENTINEL - assert future._exception == futures.Future._SENTINEL - assert future._callbacks == [] - assert future._completed is completed - - -def test_cancel(): - assert _future().cancel() is False - - -def test_cancelled(): - assert _future().cancelled() is False - - def test_running(): future = _future() assert future.running() is True @@ -112,8 +85,8 @@ def test_add_done_callback_pending_batch(): future = _future() callback = mock.Mock() future.add_done_callback(callback) - assert len(future._callbacks) == 1 - assert callback in future._callbacks + assert len(future._done_callbacks) == 1 + assert callback in future._done_callbacks assert callback.call_count == 0 @@ -134,15 +107,76 @@ def test_trigger(): callback.assert_called_once_with(future) +def test_set_running_or_notify_cancel_not_implemented_error(): + future = _future() + with pytest.raises(NotImplementedError) as exc_info: + future.set_running_or_notify_cancel() + + assert exc_info.value.args + error_msg = exc_info.value.args[0] + assert "used by executors" in error_msg + assert "concurrent.futures" in error_msg + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="InvalidStateError is only available in Python 3.8+", +) def test_set_result_once_only(): future = _future() future.set_result("12345") - with pytest.raises(RuntimeError): + with pytest.raises(concurrent.futures.InvalidStateError): future.set_result("67890") +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="InvalidStateError is only available in Python 3.8+", +) def test_set_exception_once_only(): future = _future() future.set_exception(ValueError("wah wah")) - with pytest.raises(RuntimeError): + with pytest.raises(concurrent.futures.InvalidStateError): future.set_exception(TypeError("other wah wah")) + + +def test_as_completed_compatibility(): + all_futures = {i: _future() for i in range(6)} + done_futures = [] + + def resolve_future(future_idx, delay=0): + time.sleep(delay) + future = all_futures[future_idx] + if future_idx % 2 == 0: + future.set_result(f"{future_idx}: I'm done!") + else: + future.set_exception(Exception(f"Future {future_idx} errored")) + + all_futures[2].set_result("2: I'm done!") + + # Start marking the futures as completed (either with success or error) at + # different times and check that ther "as completed" order is correct. + for future_idx, delay in ((0, 0.8), (3, 0.6), (1, 0.4), (5, 0.2)): + threading.Thread( + target=resolve_future, args=(future_idx, delay), daemon=True + ).start() + + try: + # Use a loop instead of a list comprehension to gather futures completed + # before the timeout error occurs. + for future in concurrent.futures.as_completed(all_futures.values(), timeout=1): + done_futures.append(future) + except concurrent.futures.TimeoutError: + pass + else: # pragma: NO COVER + pytest.fail("Not all Futures should have been recognized as completed.") + + # NOTE: Future 4 was never resolved. + expected = [ + all_futures[2], + all_futures[5], + all_futures[1], + all_futures[3], + all_futures[0], + ] + assert done_futures == expected From 3f857686cf4b49f364f6e556a983b4d4e3c07f39 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 18 May 2021 13:38:05 +0200 Subject: [PATCH 0669/1197] fix: scheduler errors when executor in shutdown (#399) --- .../cloud/pubsub_v1/subscriber/scheduler.py | 12 ++++- .../pubsub_v1/subscriber/test_scheduler.py | 46 +++++++++++++++++++ 2 files changed, 57 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index a11ca490b590..b8f2b592cc73 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -21,6 +21,7 @@ import abc import concurrent.futures import queue +import warnings class Scheduler(metaclass=abc.ABCMeta): @@ -114,7 +115,14 @@ def schedule(self, callback, *args, **kwargs): Returns: None """ - self._executor.submit(callback, *args, **kwargs) + try: + self._executor.submit(callback, *args, **kwargs) + except RuntimeError: + warnings.warn( + "Scheduling a callback after executor shutdown.", + category=RuntimeWarning, + stacklevel=2, + ) def shutdown(self, await_msg_callbacks=False): """Shut down the scheduler and immediately end all pending callbacks. @@ -142,6 +150,8 @@ def shutdown(self, await_msg_callbacks=False): try: while True: work_item = self._executor._work_queue.get(block=False) + if work_item is None: # Exceutor in shutdown mode. + continue dropped_messages.append(work_item.args[0]) except queue.Empty: pass diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index 82a6719d74c3..0545c967c41b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -16,6 +16,7 @@ import queue import threading import time +import warnings import mock @@ -61,6 +62,24 @@ def callback(*args, **kwargs): assert sorted(called_with) == expected_calls +def test_schedule_after_executor_shutdown_warning(): + def callback(*args, **kwargs): + pass + + scheduler_ = scheduler.ThreadScheduler() + + scheduler_.schedule(callback, "arg1", kwarg1="meep") + scheduler_._executor.shutdown() + + with warnings.catch_warnings(record=True) as warned: + scheduler_.schedule(callback, "arg2", kwarg2="boop") + + assert len(warned) == 1 + assert issubclass(warned[0].category, RuntimeWarning) + warning_msg = str(warned[0].message) + assert "after executor shutdown" in warning_msg + + def test_shutdown_nonblocking_by_default(): called_with = [] at_least_one_called = threading.Event() @@ -125,3 +144,30 @@ def callback(message): err_msg = "Shutdown did not wait for the already running callbacks to complete." assert at_least_one_completed.is_set(), err_msg + + +def test_shutdown_handles_executor_queue_sentinels(): + at_least_one_called = threading.Event() + + def callback(_): + at_least_one_called.set() + time.sleep(1.0) + + executor = concurrent.futures.ThreadPoolExecutor(max_workers=1) + scheduler_ = scheduler.ThreadScheduler(executor=executor) + + scheduler_.schedule(callback, "message_1") + scheduler_.schedule(callback, "message_2") + scheduler_.schedule(callback, "message_3") + + # Simulate executor shutdown from another thread. + executor._work_queue.put(None) + executor._work_queue.put(None) + + at_least_one_called.wait() + dropped = scheduler_.shutdown(await_msg_callbacks=True) + + assert len(set(dropped)) == 2 # Also test for item uniqueness. + for msg in dropped: + assert msg is not None + assert msg.startswith("message_") From a73a37f84e97cf56593e0e08871efdc59e69899b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 18 May 2021 14:10:05 +0000 Subject: [PATCH 0670/1197] chore: release 2.5.0 (#412) :robot: I have created a release \*beep\* \*boop\* --- ## [2.5.0](https://www.github.com/googleapis/python-pubsub/compare/v2.4.2...v2.5.0) (2021-05-18) ### Features * make publish futures compatible with concurrent.futures.as_completed() ([#397](https://www.github.com/googleapis/python-pubsub/issues/397)) ([e29a2c0](https://www.github.com/googleapis/python-pubsub/commit/e29a2c0ac6c5d2ebf2311646e552a02f184cfedc)) ### Bug Fixes * scheduler errors when executor in shutdown ([#399](https://www.github.com/googleapis/python-pubsub/issues/399)) ([39a83d3](https://www.github.com/googleapis/python-pubsub/commit/39a83d3eef196e88478ad8362201a2ab12e9f681)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-pubsub/CHANGELOG.md | 12 ++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index b040b2f5f55e..ae2f35fd612c 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,18 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.5.0](https://www.github.com/googleapis/python-pubsub/compare/v2.4.2...v2.5.0) (2021-05-18) + + +### Features + +* Make publish futures compatible with `concurrent.futures.as_completed()`. ([#397](https://www.github.com/googleapis/python-pubsub/issues/397)) ([e29a2c0](https://www.github.com/googleapis/python-pubsub/commit/e29a2c0ac6c5d2ebf2311646e552a02f184cfedc)) + + +### Bug Fixes + +* Scheduler errors when executor in shutdown. ([#399](https://www.github.com/googleapis/python-pubsub/issues/399)) ([39a83d3](https://www.github.com/googleapis/python-pubsub/commit/39a83d3eef196e88478ad8362201a2ab12e9f681)) + ## 2.4.2 05-06-2021 23:50 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index b4a3374c8ff9..4d459063e786 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.4.2" +version = "2.5.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 3860043ade30ce542df9399061d049676d1d926b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 19 May 2021 18:04:18 +0200 Subject: [PATCH 0671/1197] chore(deps): update dependency google-cloud-pubsub to v2.5.0 (#415) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index e45aa73e94a8..aaf5991b51df 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.4.2 +google-cloud-pubsub==2.5.0 avro==1.10.2 From 76ba811df8882d00a07fa4fd8b085c8ed9463b5d Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 26 May 2021 16:28:02 +0200 Subject: [PATCH 0672/1197] fix: ACK deadline set for received messages can be too low (#416) Fixes #413. This PR makes sure that the ACK deadline set for the received messages is always consistent with what the leaser uses internally when extending the ACK deadlines for the leased messages. See the issue description and a [comment](https://github.com/googleapis/python-pubsub/issues/413#issuecomment-844995852) explaining a possible sequence of events that lead to a bug. **PR checklist** - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) --- .../subscriber/_protocol/histogram.py | 37 ++--- .../pubsub_v1/subscriber/_protocol/leaser.py | 4 +- .../_protocol/streaming_pull_manager.py | 63 ++++++--- .../google/cloud/pubsub_v1/types.py | 3 +- .../pubsub_v1/subscriber/test_histogram.py | 19 +-- .../unit/pubsub_v1/subscriber/test_leaser.py | 2 +- .../subscriber/test_streaming_pull_manager.py | 127 +++++++++++++++--- 7 files changed, 191 insertions(+), 64 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py index 29ee6fc61685..0a4a81746bb8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py @@ -15,6 +15,10 @@ from __future__ import absolute_import, division +MIN_ACK_DEADLINE = 10 +MAX_ACK_DEADLINE = 600 + + class Histogram(object): """Representation of a single histogram. @@ -27,8 +31,9 @@ class Histogram(object): are free to use a different formula. The precision of data stored is to the nearest integer. Additionally, - values outside the range of ``10 <= x <= 600`` are stored as ``10`` or - ``600``, since these are the boundaries of leases in the actual API. + values outside the range of ``MIN_ACK_DEADLINE <= x <= MAX_ACK_DEADLINE`` are stored + as ``MIN_ACK_DEADLINE`` or ``MAX_ACK_DEADLINE``, since these are the boundaries of + leases in the actual API. """ def __init__(self, data=None): @@ -83,41 +88,43 @@ def __repr__(self): def max(self): """Return the maximum value in this histogram. - If there are no values in the histogram at all, return 600. + If there are no values in the histogram at all, return ``MAX_ACK_DEADLINE``. Returns: int: The maximum value in the histogram. """ if len(self._data) == 0: - return 600 + return MAX_ACK_DEADLINE return next(iter(reversed(sorted(self._data.keys())))) @property def min(self): """Return the minimum value in this histogram. - If there are no values in the histogram at all, return 10. + If there are no values in the histogram at all, return ``MIN_ACK_DEADLINE``. Returns: int: The minimum value in the histogram. """ if len(self._data) == 0: - return 10 + return MIN_ACK_DEADLINE return next(iter(sorted(self._data.keys()))) def add(self, value): """Add the value to this histogram. Args: - value (int): The value. Values outside of ``10 <= x <= 600`` - will be raised to ``10`` or reduced to ``600``. + value (int): The value. Values outside of + ``MIN_ACK_DEADLINE <= x <= MAX_ACK_DEADLINE`` + will be raised to ``MIN_ACK_DEADLINE`` or reduced to + ``MAX_ACK_DEADLINE``. """ # If the value is out of bounds, bring it in bounds. value = int(value) - if value < 10: - value = 10 - if value > 600: - value = 600 + if value < MIN_ACK_DEADLINE: + value = MIN_ACK_DEADLINE + elif value > MAX_ACK_DEADLINE: + value = MAX_ACK_DEADLINE # Add the value to the histogram's data dictionary. self._data.setdefault(value, 0) @@ -129,7 +136,7 @@ def percentile(self, percent): Args: percent (Union[int, float]): The precentile being sought. The - default consumer implementations use consistently use ``99``. + default consumer implementations consistently use ``99``. Returns: int: The value corresponding to the requested percentile. @@ -150,5 +157,5 @@ def percentile(self, percent): return k # The only way to get here is if there was no data. - # In this case, just return 10 seconds. - return 10 + # In this case, just return the shortest possible deadline. + return MIN_ACK_DEADLINE diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index c1f8b46d29a7..8fd067aaf6d4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -128,7 +128,9 @@ def maintain_leases(self): # Determine the appropriate duration for the lease. This is # based off of how long previous messages have taken to ack, with # a sensible default and within the ranges allowed by Pub/Sub. - deadline = self._manager.ack_deadline + # Also update the deadline currently used if enough new ACK data has been + # gathered since the last deadline update. + deadline = self._manager._obtain_ack_deadline(maybe_update=True) _LOGGER.debug("The current deadline value is %d seconds.", deadline) # Make a copy of the leased messages. This is needed because it's diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 2112ce0db65a..da027fcbed1e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -143,7 +143,7 @@ def __init__( self._await_callbacks_on_shutdown = await_callbacks_on_shutdown self._ack_histogram = histogram.Histogram() self._last_histogram_size = 0 - self._ack_deadline = 10 + self._ack_deadline = histogram.MIN_ACK_DEADLINE self._rpc = None self._callback = None self._closing = threading.Lock() @@ -179,6 +179,11 @@ def __init__( # currently on hold. self._pause_resume_lock = threading.Lock() + # A lock protecting the current ACK deadline used in the lease management. This + # value can be potentially updated both by the leaser thread and by the message + # consumer thread when invoking the internal _on_response() callback. + self._ack_deadline_lock = threading.Lock() + # The threads created in ``.open()``. self._dispatcher = None self._leaser = None @@ -223,29 +228,49 @@ def ack_histogram(self): @property def ack_deadline(self): - """Return the current ack deadline based on historical time-to-ack. - - This method is "sticky". It will only perform the computations to - check on the right ack deadline if the histogram has gained a - significant amount of new information. + """Return the current ACK deadline based on historical data without updating it. Returns: int: The ack deadline. """ - target_size = min( - self._last_histogram_size * 2, self._last_histogram_size + 100 - ) - hist_size = len(self.ack_histogram) + return self._obtain_ack_deadline(maybe_update=False) + + def _obtain_ack_deadline(self, maybe_update: bool) -> int: + """The actual `ack_deadline` implementation. + + This method is "sticky". It will only perform the computations to check on the + right ACK deadline if explicitly requested AND if the histogram with past + time-to-ack data has gained a significant amount of new information. + + Args: + maybe_update (bool): + If ``True``, also update the current ACK deadline before returning it if + enough new ACK data has been gathered. - if hist_size > target_size: - self._last_histogram_size = hist_size - self._ack_deadline = self.ack_histogram.percentile(percent=99) + Returns: + int: The current ACK deadline in seconds to use. + """ + with self._ack_deadline_lock: + if not maybe_update: + return self._ack_deadline - if self.flow_control.max_duration_per_lease_extension > 0: - self._ack_deadline = min( - self._ack_deadline, self.flow_control.max_duration_per_lease_extension + target_size = min( + self._last_histogram_size * 2, self._last_histogram_size + 100 ) - return self._ack_deadline + hist_size = len(self.ack_histogram) + + if hist_size > target_size: + self._last_histogram_size = hist_size + self._ack_deadline = self.ack_histogram.percentile(percent=99) + + if self.flow_control.max_duration_per_lease_extension > 0: + # The setting in flow control could be too low, adjust if needed. + flow_control_setting = max( + self.flow_control.max_duration_per_lease_extension, + histogram.MIN_ACK_DEADLINE, + ) + self._ack_deadline = min(self._ack_deadline, flow_control_setting) + return self._ack_deadline @property def load(self): @@ -490,7 +515,7 @@ def open(self, callback, on_callback_error): ) # Create the RPC - stream_ack_deadline_seconds = self.ack_histogram.percentile(99) + stream_ack_deadline_seconds = self.ack_deadline get_initial_request = functools.partial( self._get_initial_request, stream_ack_deadline_seconds @@ -688,7 +713,7 @@ def _on_response(self, response): # modack the messages we received, as this tells the server that we've # received them. items = [ - requests.ModAckRequest(message.ack_id, self._ack_histogram.percentile(99)) + requests.ModAckRequest(message.ack_id, self.ack_deadline) for message in received_messages ] self._dispatcher.modify_ack_deadline(items) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 677e4774fd84..d72541a3b57b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -152,7 +152,8 @@ class LimitExceededBehavior(str, enum.Enum): FlowControl.max_duration_per_lease_extension.__doc__ = ( "The max amount of time in seconds for a single lease extension attempt. " "Bounds the delay before a message redelivery if the subscriber " - "fails to extend the deadline." + "fails to extend the deadline. Must be between 10 and 600 (inclusive). Ignored " + "if set to 0." ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py index d3c15cdcee9c..aacdc305044f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_histogram.py @@ -33,7 +33,7 @@ def test_contains(): def test_max(): histo = histogram.Histogram() - assert histo.max == 600 + assert histo.max == histogram.MAX_ACK_DEADLINE histo.add(120) assert histo.max == 120 histo.add(150) @@ -44,7 +44,7 @@ def test_max(): def test_min(): histo = histogram.Histogram() - assert histo.min == 10 + assert histo.min == histogram.MIN_ACK_DEADLINE histo.add(60) assert histo.min == 60 histo.add(30) @@ -63,20 +63,23 @@ def test_add(): def test_add_lower_limit(): histo = histogram.Histogram() - histo.add(5) - assert 5 not in histo - assert 10 in histo + low_value = histogram.MIN_ACK_DEADLINE - 1 + histo.add(low_value) + assert low_value not in histo + assert histogram.MIN_ACK_DEADLINE in histo def test_add_upper_limit(): histo = histogram.Histogram() - histo.add(12000) - assert 12000 not in histo - assert 600 in histo + high_value = histogram.MAX_ACK_DEADLINE + 1 + histo.add(high_value) + assert high_value not in histo + assert histogram.MAX_ACK_DEADLINE in histo def test_percentile(): histo = histogram.Histogram() + assert histo.percentile(42) == histogram.MIN_ACK_DEADLINE # default when empty [histo.add(i) for i in range(101, 201)] assert histo.percentile(100) == 200 assert histo.percentile(101) == 200 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 2ecc0b9f3ce1..f389e5205fe7 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -84,7 +84,7 @@ def create_manager(flow_control=types.FlowControl()): manager.is_active = True manager.flow_control = flow_control manager.ack_histogram = histogram.Histogram() - manager.ack_deadline = 10 + manager._obtain_ack_deadline.return_value = 10 return manager diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 25ab4f0ae343..8e4f6daf0852 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -139,13 +139,78 @@ def fake_add(self, items): leaser.add = stdlib_types.MethodType(fake_add, leaser) -def test_ack_deadline(): +def test__obtain_ack_deadline_no_custom_flow_control_setting(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + manager = make_manager() - assert manager.ack_deadline == 10 - manager.ack_histogram.add(20) - assert manager.ack_deadline == 20 - manager.ack_histogram.add(10) - assert manager.ack_deadline == 20 + + # Make sure that max_duration_per_lease_extension is disabled. + manager._flow_control = types.FlowControl(max_duration_per_lease_extension=0) + + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE + + # When we get some historical data, the deadline is adjusted. + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE * 2) + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE * 2 + + # Adding just a single additional data point does not yet change the deadline. + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE) + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE * 2 + + +def test__obtain_ack_deadline_with_max_duration_per_lease_extension(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + max_duration_per_lease_extension=histogram.MIN_ACK_DEADLINE + 1 + ) + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE * 3) # make p99 value large + + # The deadline configured in flow control should prevail. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE + 1 + + +def test__obtain_ack_deadline_with_max_duration_per_lease_extension_too_low(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + max_duration_per_lease_extension=histogram.MIN_ACK_DEADLINE - 1 + ) + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE * 3) # make p99 value large + + # The deadline configured in flow control should be adjusted to the minimum allowed. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE + + +def test__obtain_ack_deadline_no_value_update(): + manager = make_manager() + + # Make sure that max_duration_per_lease_extension is disabled. + manager._flow_control = types.FlowControl(max_duration_per_lease_extension=0) + + manager.ack_histogram.add(21) + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == 21 + + for _ in range(5): + manager.ack_histogram.add(35) # Gather some new ACK data. + + deadline = manager._obtain_ack_deadline(maybe_update=False) + assert deadline == 21 # still the same + + # Accessing the value through the ack_deadline property has no side effects either. + assert manager.ack_deadline == 21 + + # Updating the ack deadline is reflected on ack_deadline wrapper, too. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert manager.ack_deadline == deadline == 35 def test_client_id(): @@ -181,17 +246,6 @@ def test_streaming_flow_control_use_legacy_flow_control(): assert request.max_outstanding_bytes == 0 -def test_ack_deadline_with_max_duration_per_lease_extension(): - manager = make_manager() - manager._flow_control = types.FlowControl(max_duration_per_lease_extension=5) - - assert manager.ack_deadline == 5 - for _ in range(5): - manager.ack_histogram.add(20) - - assert manager.ack_deadline == 5 - - def test_maybe_pause_consumer_wo_consumer_set(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) @@ -476,7 +530,10 @@ def test_heartbeat_inactive(): def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): manager = make_manager() - manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) + with mock.patch.object( + type(manager), "ack_deadline", new=mock.PropertyMock(return_value=18) + ): + manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) heartbeater.assert_called_once_with(manager) heartbeater.return_value.start.assert_called_once() @@ -503,7 +560,7 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi ) initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] assert initial_request_arg.func == manager._get_initial_request - assert initial_request_arg.args[0] == 10 # the default stream ACK timeout + assert initial_request_arg.args[0] == 18 assert not manager._client.api.get_subscription.called resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( @@ -774,6 +831,38 @@ def test__on_response_delivery_attempt(): assert msg2.delivery_attempt == 6 +def test__on_response_modifies_ack_deadline(): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack_1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack_2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + ), + ] + ) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=80) + + # Actually run the method and chack that correct MODACK value is used. + with mock.patch.object( + type(manager), "ack_deadline", new=mock.PropertyMock(return_value=18) + ): + manager._on_response(response) + + dispatcher.modify_ack_deadline.assert_called_once_with( + [requests.ModAckRequest("ack_1", 18), requests.ModAckRequest("ack_2", 18)] + ) + + def test__on_response_no_leaser_overload(): manager, _, dispatcher, leaser, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback From cf846957b820d767b26596a19a7ff4c075067045 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 11 Jun 2021 07:12:14 +0200 Subject: [PATCH 0673/1197] docs: explain that future.cancel() is non-blocking (#420) --- .../google/cloud/pubsub_v1/subscriber/futures.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index 97a911076db4..18298b956313 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -46,6 +46,11 @@ def _on_close_callback(self, manager, result): def cancel(self): """Stops pulling messages and shutdowns the background thread consuming messages. + + .. versionchanged:: 2.4.1 + The method does not block anymore, it just triggers the shutdown and returns + immediately. To block until the background stream is terminated, call + :meth:`result()` after cancelling the future. """ # NOTE: We circumvent the base future's self._state to track the cancellation # state, as this state has different meaning with streaming pull futures. From 1f3ad4c68c43ecfeecbe70dc6660400c135c0e96 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 11 Jun 2021 11:12:37 -0700 Subject: [PATCH 0674/1197] test: presubmit against Lite samples (#425) * test: presubmit against Lite samples * mark new presubmit test required --- .../.github/sync-repo-settings.yaml | 1 + .../presubmit-against-pubsublite-samples.sh | 105 ++++++++++++++++++ .../.kokoro/presubmit/common.cfg | 8 +- .../presubmit-against-pubsublite-samples.cfg | 11 ++ .../.kokoro/presubmit/presubmit.cfg | 7 +- 5 files changed, 128 insertions(+), 4 deletions(-) create mode 100755 packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh create mode 100644 packages/google-cloud-pubsub/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml index af59935321a9..109a0b4ef249 100644 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -6,6 +6,7 @@ branchProtectionRules: - pattern: master requiredStatusCheckContexts: - 'Kokoro' + - 'Kokoro - Against Pub/Sub Lite samples' - 'cla/google' - 'Samples - Lint' - 'Samples - Python 3.6' diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh new file mode 100755 index 000000000000..0962f2c812e7 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh @@ -0,0 +1,105 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +cd github/python-pubsub + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) + +# Clone googleapis/python-pubsublite +git clone https://github.com/googleapis/python-pubsublite.git + +# Find all requirements.txt in the Pub/Sub Lite samples directory (may break on whitespace). +for file in python-pubsublite/samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use pytest to execute tests for py-3.6 + python3.6 -m venv py-3.6 + source py-3.6/bin/activate + # Install python-pubsublite samples tests requirements. + python -m pip install -r requirements.txt -q + python -m pip install -r requirements-test.txt -q + # Install python-pubsub from source. + python -m pip install -e "$ROOT" -q + python -m pytest quickstart_test.py + deactivate py-3.6 + rm -rf py-3.6/ + + EXIT=$? + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg index a812af888d39..7d78fd1f769d 100644 --- a/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg @@ -21,7 +21,9 @@ env_vars: { key: "TRAMPOLINE_IMAGE" value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" } + +# Obtain environment variables for running Pub/Sub Lite samples tests env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/build.sh" -} + key: "SECRET_MANAGER_KEYS" + value: "python-docs-samples-test-env" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg new file mode 100644 index 000000000000..0ad289456ff7 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg index 8f43917d92fe..9b689c788b9b 100644 --- a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg +++ b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg @@ -1 +1,6 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/build.sh" +} \ No newline at end of file From c2cbf91a07f31f5e7f487b8eca7a05c373de0904 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 14 Jun 2021 10:12:02 +0200 Subject: [PATCH 0675/1197] docs: block until the streaming pull shuts down (#424) Fixes #423. If subscriber client is used as a context manager, we need to block until the shutdown is complete before leaving the `with` block. See the issue description for more details. **PR checklist:** - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) --- .../google/cloud/pubsub_v1/subscriber/client.py | 3 ++- .../samples/snippets/quickstart/sub.py | 3 ++- .../samples/snippets/schema.py | 6 ++++-- .../samples/snippets/subscriber.py | 15 ++++++++++----- 4 files changed, 18 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 376530caaf8a..567840859404 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -188,7 +188,8 @@ def callback(message): try: future.result() except KeyboardInterrupt: - future.cancel() + future.cancel() # Trigger the shutdown. + future.result() # Block until the shutdown is complete. Args: subscription (str): The name of the subscription. The diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index 0a7576e23bc8..7a5732d20464 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -43,7 +43,8 @@ def callback(message): # exiting while messages get processed in the callbacks. streaming_pull_future.result(timeout=timeout) except: # noqa - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. subscriber_client.close() diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py index 37f9bba55427..92c56d9acc5b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -343,7 +343,8 @@ def callback(message): # unless an exception occurs first. streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscribe_avro_records] @@ -393,7 +394,8 @@ def callback(message): # unless an exception occurs first. streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscribe_proto_messages] diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 112c5a96af30..d01860cf851a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -397,7 +397,8 @@ def callback(message): # unless an exception is encountered first. streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscriber_async_pull] # [END pubsub_quickstart_subscriber] @@ -436,7 +437,8 @@ def callback(message): # unless an exception is encountered first. streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscriber_async_pull_custom_attributes] @@ -474,7 +476,8 @@ def callback(message): # unless an exception is encountered first. streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscriber_flow_settings] @@ -663,10 +666,11 @@ def callback(message): try: streaming_pull_future.result(timeout=timeout) except Exception as e: - streaming_pull_future.cancel() print( f"Listening for messages on {subscription_path} threw an exception: {e}." ) + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_subscriber_error_listener] @@ -697,7 +701,8 @@ def callback(message): try: streaming_pull_future.result(timeout=timeout) except TimeoutError: - streaming_pull_future.cancel() + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. # [END pubsub_dead_letter_delivery_attempt] From 5245b0a920696c0e668d2b36d40a7c9c7f2f1404 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 15 Jun 2021 09:58:44 -0700 Subject: [PATCH 0676/1197] test: gather status code after pytest (#427) --- .../.kokoro/presubmit-against-pubsublite-samples.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh index 0962f2c812e7..a93980bc36d0 100755 --- a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh @@ -84,11 +84,11 @@ for file in python-pubsublite/samples/**/requirements.txt; do # Install python-pubsub from source. python -m pip install -e "$ROOT" -q python -m pytest quickstart_test.py + EXIT=$? + deactivate py-3.6 rm -rf py-3.6/ - EXIT=$? - if [[ $EXIT -ne 0 ]]; then RTN=1 echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" From 00b202e953753648adb3b0c2220cdcddf86869ea Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 15 Jun 2021 20:02:16 +0200 Subject: [PATCH 0677/1197] feat: support customizable retry and timeout settings on the publisher client (#299) * feat: allow retry and timeout settings on publisher client * build: update generated code and update doc * Propagate publish timeout to the RPC call * test publisher client * Fix timeout parameter type in docstrings * Apply timeout changes to async publisher client * Introduce TimeoutType type alias * Update PublisherOptions docs * Use type alias for timeout in upgrading guide * Widen timeout types in generated publisher clients * Use TimeoutType in handwritten code * Remove redundant backslash * Use DEFAULT as a defualt timeout in base sequencer * Do not accept plain None as a valid timeout Using no timeout is not a good idea, but if one really wants to, they can pass it in as ConstantTimeout(None). As a side effect, the logic of converting a constant into a COnstantTimeout instance can be removed, as this is already handled in api-core for int and float values. Co-authored-by: Carlos de la Guardia --- packages/google-cloud-pubsub/UPGRADING.md | 43 ++++++++++- .../pubsub_v1/publisher/_batch/thread.py | 10 ++- .../pubsub_v1/publisher/_sequencer/base.py | 12 +++- .../publisher/_sequencer/ordered_sequencer.py | 25 +++++-- .../_sequencer/unordered_sequencer.py | 23 ++++-- .../cloud/pubsub_v1/publisher/client.py | 22 +++++- .../google/cloud/pubsub_v1/types.py | 13 +++- .../services/publisher/async_client.py | 71 ++++++++++--------- .../pubsub_v1/services/publisher/client.py | 67 +++++++++-------- .../google/pubsub_v1/types/__init__.py | 11 ++- packages/google-cloud-pubsub/synth.py | 46 ++++++++++++ .../pubsub_v1/publisher/batch/test_thread.py | 30 +++++++- .../sequencer/test_ordered_sequencer.py | 12 ++++ .../sequencer/test_unordered_sequencer.py | 11 +++ .../publisher/test_publisher_client.py | 39 ++++++++++ 15 files changed, 357 insertions(+), 78 deletions(-) diff --git a/packages/google-cloud-pubsub/UPGRADING.md b/packages/google-cloud-pubsub/UPGRADING.md index 3837464fcff3..9ffdb5507cd9 100644 --- a/packages/google-cloud-pubsub/UPGRADING.md +++ b/packages/google-cloud-pubsub/UPGRADING.md @@ -100,7 +100,7 @@ specified by the API producer. *, project: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: google.pubsub_v1.types.TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicsPager: ``` @@ -161,3 +161,44 @@ The publisher and subscriber clients cannot be constructed with `client_config` argument anymore. If you want to customize retry and timeout settings for a particular method, you need to do it upon method invocation by passing the custom `timeout` and `retry` arguments, respectively. + + +## Custom Retry and Timeout settings for Publisher Client + +The ``publisher_options`` parameter to the Publisher Client, as well as all of the +client's methods, now accept custom retry and timeout settings: + +```py +custom_retry = api_core.retry.Retry( + initial=0.250, # seconds (default: 0.1) + maximum=90.0, # seconds (default: 60.0) + multiplier=1.45, # default: 1.3 + deadline=300.0, # seconds (default: 60.0) + predicate=api_core.retry.if_exception_type( + api_core.exceptions.Aborted, + api_core.exceptions.DeadlineExceeded, + api_core.exceptions.InternalServerError, + api_core.exceptions.ResourceExhausted, + api_core.exceptions.ServiceUnavailable, + api_core.exceptions.Unknown, + api_core.exceptions.Cancelled, + ), +) + +custom_timeout=api_core.timeout.ExponentialTimeout( + initial=1.0, + maximum=10.0, + multiplier=1.0, + deadline=300.0, +) + +publisher = pubsub_v1.PublisherClient( + publisher_options = pubsub_v1.types.PublisherOptions( + retry=custom_retry, + timeout=custom_timeout, + ), +) +``` + +The timeout can be either an instance of `google.api_core.timeout.ConstantTimeout`, +or an instance of `google.api_core.timeout.ExponentialTimeout`, as in the example. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index b3936c2159da..e59dff00e4fd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -73,6 +73,9 @@ class Batch(base.Batch): commit_retry (Optional[google.api_core.retry.Retry]): Designation of what errors, if any, should be retried when commiting the batch. If not provided, a default retry is used. + commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`): + The timeout to apply when commiting the batch. If not provided, a + default timeout is used. """ def __init__( @@ -83,6 +86,7 @@ def __init__( batch_done_callback=None, commit_when_full=True, commit_retry=gapic_v1.method.DEFAULT, + commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, ): self._client = client self._topic = topic @@ -106,6 +110,7 @@ def __init__( self._size = self._base_request_size self._commit_retry = commit_retry + self._commit_timeout = commit_timeout @staticmethod def make_lock(): @@ -261,7 +266,10 @@ def _commit(self): try: # Performs retries for errors defined by the retry configuration. response = self._client.api.publish( - topic=self._topic, messages=self._messages, retry=self._commit_retry + topic=self._topic, + messages=self._messages, + retry=self._commit_retry, + timeout=self._commit_timeout, ) except google.api_core.exceptions.GoogleAPIError as exc: # We failed to publish, even after retries, so set the exception on diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py index c14b2975d055..60a7d269ce4f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -16,6 +16,9 @@ import abc +from google.api_core import gapic_v1 +from google.pubsub_v1 import types as gapic_types + class Sequencer(metaclass=abc.ABCMeta): """The base class for sequencers for Pub/Sub publishing. A sequencer @@ -45,7 +48,12 @@ def unpause(self, message): # pragma: NO COVER @staticmethod @abc.abstractmethod - def publish(self, message, retry=None): # pragma: NO COVER + def publish( + self, + message, + retry=None, + timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + ): # pragma: NO COVER """ Publish message for this ordering key. Args: @@ -53,6 +61,8 @@ def publish(self, message, retry=None): # pragma: NO COVER The Pub/Sub message. retry (Optional[google.api_core.retry.Retry]): The retry settings to apply when publishing the message. + timeout (:class:`~.pubsub_v1.types.TimeoutType`): + The timeout to apply when publishing the message. Returns: A class instance that conforms to Python Standard library's diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py index f7c0be08447b..83dd0c921268 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py @@ -21,6 +21,7 @@ from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import base as sequencer_base from google.cloud.pubsub_v1.publisher._batch import base as batch_base +from google.pubsub_v1 import types as gapic_types class _OrderedSequencerStatus(str, enum.Enum): @@ -226,13 +227,19 @@ def unpause(self): raise RuntimeError("Ordering key is not paused.") self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES - def _create_batch(self, commit_retry=gapic_v1.method.DEFAULT): + def _create_batch( + self, + commit_retry=gapic_v1.method.DEFAULT, + commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + ): """ Create a new batch using the client's batch class and other stored settings. Args: commit_retry (Optional[google.api_core.retry.Retry]): The retry settings to apply when publishing the batch. + commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`): + The timeout to apply when publishing the batch. """ return self._client._batch_class( client=self._client, @@ -241,9 +248,15 @@ def _create_batch(self, commit_retry=gapic_v1.method.DEFAULT): batch_done_callback=self._batch_done_callback, commit_when_full=False, commit_retry=commit_retry, + commit_timeout=commit_timeout, ) - def publish(self, message, retry=gapic_v1.method.DEFAULT): + def publish( + self, + message, + retry=gapic_v1.method.DEFAULT, + timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + ): """ Publish message for this ordering key. Args: @@ -251,6 +264,8 @@ def publish(self, message, retry=gapic_v1.method.DEFAULT): The Pub/Sub message. retry (Optional[google.api_core.retry.Retry]): The retry settings to apply when publishing the message. + timeout (:class:`~.pubsub_v1.types.TimeoutType`): + The timeout to apply when publishing the message. Returns: A class instance that conforms to Python Standard library's @@ -287,13 +302,15 @@ def publish(self, message, retry=gapic_v1.method.DEFAULT): ), "Publish is only allowed in accepting-messages state." if not self._ordered_batches: - new_batch = self._create_batch(commit_retry=retry) + new_batch = self._create_batch( + commit_retry=retry, commit_timeout=timeout + ) self._ordered_batches.append(new_batch) batch = self._ordered_batches[-1] future = batch.publish(message) while future is None: - batch = self._create_batch(commit_retry=retry) + batch = self._create_batch(commit_retry=retry, commit_timeout=timeout) self._ordered_batches.append(batch) future = batch.publish(message) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py index d343ed945b1b..76dd1cad72b9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -15,6 +15,7 @@ from google.api_core import gapic_v1 from google.cloud.pubsub_v1.publisher._sequencer import base +from google.pubsub_v1 import types as gapic_types class UnorderedSequencer(base.Sequencer): @@ -77,13 +78,19 @@ def unpause(self): """ Not relevant for this class. """ raise NotImplementedError - def _create_batch(self, commit_retry=gapic_v1.method.DEFAULT): + def _create_batch( + self, + commit_retry=gapic_v1.method.DEFAULT, + commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + ): """ Create a new batch using the client's batch class and other stored settings. Args: commit_retry (Optional[google.api_core.retry.Retry]): The retry settings to apply when publishing the batch. + commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`): + The timeout to apply when publishing the batch. """ return self._client._batch_class( client=self._client, @@ -92,9 +99,15 @@ def _create_batch(self, commit_retry=gapic_v1.method.DEFAULT): batch_done_callback=None, commit_when_full=True, commit_retry=commit_retry, + commit_timeout=commit_timeout, ) - def publish(self, message, retry=gapic_v1.method.DEFAULT): + def publish( + self, + message, + retry=gapic_v1.method.DEFAULT, + timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + ): """ Batch message into existing or new batch. Args: @@ -102,6 +115,8 @@ def publish(self, message, retry=gapic_v1.method.DEFAULT): The Pub/Sub message. retry (Optional[google.api_core.retry.Retry]): The retry settings to apply when publishing the message. + timeout (:class:`~.pubsub_v1.types.TimeoutType`): + The timeout to apply when publishing the message. Returns: ~google.api_core.future.Future: An object conforming to @@ -119,7 +134,7 @@ def publish(self, message, retry=gapic_v1.method.DEFAULT): raise RuntimeError("Unordered sequencer already stopped.") if not self._current_batch: - newbatch = self._create_batch(commit_retry=retry) + newbatch = self._create_batch(commit_retry=retry, commit_timeout=timeout) self._current_batch = newbatch batch = self._current_batch @@ -129,7 +144,7 @@ def publish(self, message, retry=gapic_v1.method.DEFAULT): future = batch.publish(message) # batch is full, triggering commit_when_full if future is None: - batch = self._create_batch(commit_retry=retry) + batch = self._create_batch(commit_retry=retry, commit_timeout=timeout) # At this point, we lose track of the old batch, but we don't # care since it's already committed (because it was full.) self._current_batch = batch diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 4703cc3c456f..e358326530e2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -230,7 +230,13 @@ def resume_publish(self, topic, ordering_key): sequencer.unpause() def publish( - self, topic, data, ordering_key="", retry=gapic_v1.method.DEFAULT, **attrs + self, + topic, + data, + ordering_key="", + retry=gapic_v1.method.DEFAULT, + timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + **attrs ): """Publish a single message. @@ -269,6 +275,12 @@ def publish( retry (Optional[google.api_core.retry.Retry]): Designation of what errors, if any, should be retried. If `ordering_key` is specified, the total retry deadline will be changed to "infinity". + If given, it overides any retry passed into the client through + the ``publisher_options`` argument. + timeout (:class:`~.pubsub_v1.types.TimeoutType`): + The timeout for the RPC request. Can be used to override any timeout + passed in through ``publisher_options`` when instantiating the client. + attrs (Mapping[str, str]): A dictionary of attributes to be sent as metadata. (These may be text strings or byte strings.) @@ -331,6 +343,12 @@ def publish( def on_publish_done(future): self._flow_controller.release(message) + if retry is gapic_v1.method.DEFAULT: # if custom retry not passed in + retry = self.publisher_options.retry + + if timeout is gapic_v1.method.DEFAULT: # if custom timeout not passed in + timeout = self.publisher_options.timeout + with self._batch_lock: if self._is_stopped: raise RuntimeError("Cannot publish on a stopped publisher.") @@ -347,7 +365,7 @@ def on_publish_done(future): # Delegate the publishing to the sequencer. sequencer = self._get_or_create_sequencer(topic, ordering_key) - future = sequencer.publish(message, retry=retry) + future = sequencer.publish(message, retry=retry, timeout=timeout) future.add_done_callback(on_publish_done) # Create a timer thread if necessary to enforce the batching diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index d72541a3b57b..5fc7dd581ce6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -22,6 +22,7 @@ import proto from google.api import http_pb2 +from google.api_core import gapic_v1 from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from google.iam.v1.logging import audit_data_pb2 @@ -98,11 +99,13 @@ class LimitExceededBehavior(str, enum.Enum): # This class is used when creating a publisher client to pass in options # to enable/disable features. PublisherOptions = collections.namedtuple( - "PublisherConfig", ["enable_message_ordering", "flow_control"] + "PublisherOptions", ["enable_message_ordering", "flow_control", "retry", "timeout"] ) PublisherOptions.__new__.__defaults__ = ( False, # enable_message_ordering: False PublishFlowControl(), # default flow control settings + gapic_v1.method.DEFAULT, # use default api_core value for retry + gapic_v1.method.DEFAULT, # use default api_core value for timeout ) PublisherOptions.__doc__ = "The options for the publisher client." PublisherOptions.enable_message_ordering.__doc__ = ( @@ -112,6 +115,14 @@ class LimitExceededBehavior(str, enum.Enum): "Flow control settings for message publishing by the client. By default " "the publisher client does not do any throttling." ) +PublisherOptions.retry.__doc__ = ( + "Retry settings for message publishing by the client. This should be " + "an instance of :class:`google.api_core.retry.Retry`." +) +PublisherOptions.timeout.__doc__ = ( + "Timeout settings for message publishing by the client. It should be compatible " + "with :class:`~.pubsub_v1.types.TimeoutType`." +) # Define the type class and default values for flow control settings. # diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index d644364b2c7a..041391c5718c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -25,6 +25,7 @@ from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore +from google.api_core import timeout as timeouts # type: ignore from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,7 +33,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub - +from google.pubsub_v1.types import TimeoutType from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport from .client import PublisherClient @@ -177,7 +178,7 @@ async def create_topic( *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource @@ -203,7 +204,8 @@ async def create_topic( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -261,7 +263,7 @@ async def update_topic( request: pubsub.UpdateTopicRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Updates an existing topic. Note that certain @@ -270,10 +272,10 @@ async def update_topic( Args: request (:class:`google.pubsub_v1.types.UpdateTopicRequest`): The request object. Request for the UpdateTopic method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -321,7 +323,7 @@ async def publish( topic: str = None, messages: Sequence[pubsub.PubsubMessage] = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PublishResponse: r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if @@ -343,10 +345,10 @@ async def publish( This corresponds to the ``messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -416,7 +418,7 @@ async def get_topic( *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Gets the configuration of a topic. @@ -431,10 +433,10 @@ async def get_topic( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -497,7 +499,7 @@ async def list_topics( *, project: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicsAsyncPager: r"""Lists matching topics. @@ -512,10 +514,10 @@ async def list_topics( This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -588,7 +590,7 @@ async def list_topic_subscriptions( *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSubscriptionsAsyncPager: r"""Lists the names of the attached subscriptions on this @@ -606,10 +608,10 @@ async def list_topic_subscriptions( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -682,7 +684,7 @@ async def list_topic_snapshots( *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSnapshotsAsyncPager: r"""Lists the names of the snapshots on this topic. Snapshots are @@ -704,10 +706,10 @@ async def list_topic_snapshots( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -780,7 +782,7 @@ async def delete_topic( *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if @@ -801,10 +803,10 @@ async def delete_topic( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ @@ -857,7 +859,7 @@ async def detach_subscription( request: pubsub.DetachSubscriptionRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.DetachSubscriptionResponse: r"""Detaches a subscription from this topic. All messages retained @@ -870,10 +872,10 @@ async def detach_subscription( request (:class:`google.pubsub_v1.types.DetachSubscriptionRequest`): The request object. Request for the DetachSubscription method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -921,7 +923,7 @@ async def set_iam_policy( request: iam_policy.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy.Policy: r"""Sets the IAM access control policy on the specified @@ -932,7 +934,8 @@ async def set_iam_policy( method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: @@ -1027,7 +1030,7 @@ async def get_iam_policy( request: iam_policy.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy.Policy: r"""Gets the IAM access control policy for a function. @@ -1039,7 +1042,8 @@ async def get_iam_policy( method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: @@ -1134,7 +1138,7 @@ async def test_iam_permissions( request: iam_policy.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control @@ -1146,7 +1150,8 @@ async def test_iam_permissions( `TestIamPermissions` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index f74e85a0fd15..3f249b01bb49 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -27,6 +27,7 @@ from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore +from google.api_core import timeout as timeouts # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -37,6 +38,7 @@ from google.iam.v1 import policy_pb2 as policy # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub +from google.pubsub_v1.types import TimeoutType import grpc @@ -401,7 +403,7 @@ def create_topic( *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource @@ -428,7 +430,8 @@ def create_topic( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -480,7 +483,7 @@ def update_topic( request: pubsub.UpdateTopicRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Updates an existing topic. Note that certain @@ -493,7 +496,8 @@ def update_topic( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -535,7 +539,7 @@ def publish( topic: str = None, messages: Sequence[pubsub.PubsubMessage] = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PublishResponse: r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if @@ -561,7 +565,8 @@ def publish( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -616,7 +621,7 @@ def get_topic( *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Gets the configuration of a topic. @@ -635,7 +640,8 @@ def get_topic( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -688,7 +694,7 @@ def list_topics( *, project: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicsPager: r"""Lists matching topics. @@ -707,7 +713,8 @@ def list_topics( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -770,7 +777,7 @@ def list_topic_subscriptions( *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSubscriptionsPager: r"""Lists the names of the attached subscriptions on this @@ -789,10 +796,10 @@ def list_topic_subscriptions( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -855,7 +862,7 @@ def list_topic_snapshots( *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSnapshotsPager: r"""Lists the names of the snapshots on this topic. Snapshots are @@ -881,7 +888,8 @@ def list_topic_snapshots( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -944,7 +952,7 @@ def delete_topic( *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if @@ -969,7 +977,8 @@ def delete_topic( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ @@ -1016,7 +1025,7 @@ def detach_subscription( request: pubsub.DetachSubscriptionRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.DetachSubscriptionResponse: r"""Detaches a subscription from this topic. All messages retained @@ -1033,7 +1042,8 @@ def detach_subscription( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1075,7 +1085,7 @@ def set_iam_policy( request: iam_policy.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy.Policy: r"""Sets the IAM access control policy on the specified @@ -1087,7 +1097,8 @@ def set_iam_policy( method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: @@ -1185,7 +1196,7 @@ def get_iam_policy( request: iam_policy.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy.Policy: r"""Gets the IAM access control policy for a function. @@ -1196,9 +1207,8 @@ def get_iam_policy( request (:class:`~.iam_policy.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: @@ -1296,7 +1306,7 @@ def test_iam_permissions( request: iam_policy.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control @@ -1307,9 +1317,8 @@ def test_iam_permissions( request (:class:`~.iam_policy.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. + timeout (TimeoutType): + The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 2894f6668160..ebc8b53994de 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import Union from .pubsub import ( AcknowledgeRequest, @@ -76,7 +76,16 @@ SchemaView, ) +TimeoutType = Union[ + int, + float, + "google.api_core.timeout.ConstantTimeout", + "google.api_core.timeout.ExponentialTimeout", +] +"""The type of the timeout parameter of publisher client methods.""" + __all__ = ( + "TimeoutType", "AcknowledgeRequest", "CreateSnapshotRequest", "DeadLetterPolicy", diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 2ad5d20de04d..41b63e89e7e8 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -248,6 +248,52 @@ "\n\g<0>", ) +# Allow timeout to be an instance of google.api_core.timeout.* +s.replace( + "google/pubsub_v1/types/__init__.py", + r"from \.pubsub import \(", + "from typing import Union\n\n\g<0>" +) +s.replace( + "google/pubsub_v1/types/__init__.py", + r"__all__ = \(\n", + textwrap.dedent('''\ + TimeoutType = Union[ + int, + float, + "google.api_core.timeout.ConstantTimeout", + "google.api_core.timeout.ExponentialTimeout", + ] + """The type of the timeout parameter of publisher client methods.""" + + \g<0> "TimeoutType",''') +) + +s.replace( + "google/pubsub_v1/services/publisher/*client.py", + r"from google.api_core import retry as retries.*\n", + "\g<0>from google.api_core import timeout as timeouts # type: ignore\n" +) +s.replace( + "google/pubsub_v1/services/publisher/*client.py", + r"from google\.pubsub_v1\.types import pubsub", + "\g<0>\nfrom google.pubsub_v1.types import TimeoutType", +) + +s.replace( + "google/pubsub_v1/services/publisher/*client.py", + r"(\s+)timeout: float = None.*\n", + "\g<1>timeout: TimeoutType = gapic_v1.method.DEFAULT,", +) +s.replace( + "google/pubsub_v1/services/publisher/*client.py", + r"([^\S\r\n]+)timeout \(float\): (.*)\n", + ( + "\g<1>timeout (TimeoutType):\n" + "\g<1> \g<2>\n" + ), +) + # The namespace package declaration in google/cloud/__init__.py should be excluded # from coverage. s.replace( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 1f1850ad222d..abf5ec76f8bf 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -42,6 +42,7 @@ def create_batch( batch_done_callback=None, commit_when_full=True, commit_retry=gapic_v1.method.DEFAULT, + commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, **batch_settings ): """Return a batch object suitable for testing. @@ -54,6 +55,8 @@ def create_batch( has reached byte-size or number-of-messages limits. commit_retry (Optional[google.api_core.retry.Retry]): The retry settings for the batch commit call. + commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`): + The timeout to apply to the batch commit call. batch_settings (Mapping[str, str]): Arguments passed on to the :class:``~.pubsub_v1.types.BatchSettings`` constructor. @@ -69,6 +72,7 @@ def create_batch( batch_done_callback=batch_done_callback, commit_when_full=commit_when_full, commit_retry=commit_retry, + commit_timeout=commit_timeout, ) @@ -138,6 +142,7 @@ def test_blocking__commit(): gapic_types.PubsubMessage(data=b"This is another message."), ], retry=gapic_v1.method.DEFAULT, + timeout=gapic_v1.method.DEFAULT, ) # Establish that all of the futures are done, and that they have the @@ -166,6 +171,29 @@ def test_blocking__commit_custom_retry(): topic="topic_name", messages=[gapic_types.PubsubMessage(data=b"This is my message.")], retry=mock.sentinel.custom_retry, + timeout=gapic_v1.method.DEFAULT, + ) + + +def test_blocking__commit_custom_timeout(): + batch = create_batch(commit_timeout=mock.sentinel.custom_timeout) + batch.publish({"data": b"This is my message."}) + + # Set up the underlying API publish method to return a PublishResponse. + publish_response = gapic_types.PublishResponse(message_ids=["a"]) + patch = mock.patch.object( + type(batch.client.api), "publish", return_value=publish_response + ) + with patch as publish: + batch._commit() + + # Establish that the underlying API call was made with expected + # arguments. + publish.assert_called_once_with( + topic="topic_name", + messages=[gapic_types.PubsubMessage(data=b"This is my message.")], + retry=gapic_v1.method.DEFAULT, + timeout=mock.sentinel.custom_timeout, ) @@ -173,7 +201,7 @@ def test_client_api_publish_not_blocking_additional_publish_calls(): batch = create_batch(max_messages=1) api_publish_called = threading.Event() - def api_publish_delay(topic="", messages=(), retry=None): + def api_publish_delay(topic="", messages=(), retry=None, timeout=None): api_publish_called.set() time.sleep(1.0) message_ids = [str(i) for i in range(len(messages))] diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py index de5dd0523706..09795d37b236 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -184,6 +184,18 @@ def test_publish_custom_retry(): assert batch._commit_retry is mock.sentinel.custom_retry +def test_publish_custom_timeout(): + client = create_client() + message = create_message() + sequencer = create_ordered_sequencer(client) + + sequencer.publish(message, timeout=mock.sentinel.custom_timeout) + + assert sequencer._ordered_batches # batch exists + batch = sequencer._ordered_batches[0] + assert batch._commit_timeout is mock.sentinel.custom_timeout + + def test_publish_batch_full(): client = create_client() message = create_message() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index 04a89e19bb9a..486cba5f77da 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -101,6 +101,17 @@ def test_publish_custom_retry(): assert sequencer._current_batch._commit_retry is mock.sentinel.custom_retry +def test_publish_custom_timeout(): + client = create_client() + message = create_message() + sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") + + sequencer.publish(message, timeout=mock.sentinel.custom_timeout) + + assert sequencer._current_batch is not None + assert sequencer._current_batch._commit_timeout is mock.sentinel.custom_timeout + + def test_publish_batch_full(): client = create_client() message = create_message() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 3db5d60cd7a5..161f9e33bb66 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -337,6 +337,7 @@ def test_publish_new_batch_needed(creds): batch_done_callback=None, commit_when_full=True, commit_retry=gapic_v1.method.DEFAULT, + commit_timeout=gapic_v1.method.DEFAULT, ) message_pb = gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) batch1.publish.assert_called_once_with(message_pb) @@ -350,6 +351,44 @@ def test_publish_attrs_type_error(creds): client.publish(topic, b"foo", answer=42) +def test_publish_custom_retry_overrides_configured_retry(creds): + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions(retry=mock.sentinel.publish_retry), + ) + + topic = "topic/path" + client._flow_controller = mock.Mock() + fake_sequencer = mock.Mock() + client._get_or_create_sequencer = mock.Mock(return_value=fake_sequencer) + client.publish(topic, b"hello!", retry=mock.sentinel.custom_retry) + + fake_sequencer.publish.assert_called_once_with( + mock.ANY, retry=mock.sentinel.custom_retry, timeout=mock.ANY + ) + message = fake_sequencer.publish.call_args.args[0] + assert message.data == b"hello!" + + +def test_publish_custom_timeout_overrides_configured_timeout(creds): + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions(timeout=mock.sentinel.publish_timeout), + ) + + topic = "topic/path" + client._flow_controller = mock.Mock() + fake_sequencer = mock.Mock() + client._get_or_create_sequencer = mock.Mock(return_value=fake_sequencer) + client.publish(topic, b"hello!", timeout=mock.sentinel.custom_timeout) + + fake_sequencer.publish.assert_called_once_with( + mock.ANY, retry=mock.ANY, timeout=mock.sentinel.custom_timeout + ) + message = fake_sequencer.publish.call_args.args[0] + assert message.data == b"hello!" + + def test_stop(creds): client = publisher.Client(credentials=creds) From eb96b469d09e40f4f7e37217af3629e757ea0504 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 17 Jun 2021 08:50:15 -0700 Subject: [PATCH 0678/1197] sample: add publish flow control sample and other nits (#429) * sample: add publish flow control sample and other nits * restore changes in untouched files * address peter's comments --- .../samples/snippets/publisher.py | 126 +++++++++++++----- .../samples/snippets/publisher_test.py | 7 + 2 files changed, 101 insertions(+), 32 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 3aca244c5a45..e976b6b606cc 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -55,7 +55,7 @@ def create_topic(project_id, topic_id): topic = publisher.create_topic(request={"name": topic_path}) - print("Created topic: {}".format(topic.name)) + print(f"Created topic: {topic.name}") # [END pubsub_quickstart_create_topic] # [END pubsub_create_topic] @@ -74,7 +74,7 @@ def delete_topic(project_id, topic_id): publisher.delete_topic(request={"topic": topic_path}) - print("Topic deleted: {}".format(topic_path)) + print(f"Topic deleted: {topic_path}") # [END pubsub_delete_topic] @@ -94,7 +94,7 @@ def publish_messages(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = "Message number {}".format(n) + data = f"Message number {n}" # Data must be a bytestring data = data.encode("utf-8") # When you publish a message, the client returns a future. @@ -120,7 +120,7 @@ def publish_messages_with_custom_attributes(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = "Message number {}".format(n) + data = f"Message number {n}" # Data must be a bytestring data = data.encode("utf-8") # Add two attributes, origin and username, to the message @@ -136,8 +136,7 @@ def publish_messages_with_custom_attributes(project_id, topic_id): def publish_messages_with_error_handler(project_id, topic_id): # [START pubsub_publish_with_error_handler] """Publishes multiple messages to a Pub/Sub topic with an error handler.""" - import time - + from concurrent import futures from google.cloud import pubsub_v1 # TODO(developer) @@ -146,31 +145,28 @@ def publish_messages_with_error_handler(project_id, topic_id): publisher = pubsub_v1.PublisherClient() topic_path = publisher.topic_path(project_id, topic_id) + publish_futures = [] - futures = dict() - - def get_callback(f, data): - def callback(f): + def get_callback(publish_future, data): + def callback(publish_future): try: - print(f.result()) - futures.pop(data) - except: # noqa - print("Please handle {} for {}.".format(f.exception(), data)) + # Wait 100 ms for the publish call to succeed. + print(publish_future.result(timeout=0.1)) + except futures.TimeoutError: + print(f"Publishing {data} timed out.") return callback for i in range(10): data = str(i) - futures.update({data: None}) # When you publish a message, the client returns a future. - future = publisher.publish(topic_path, data.encode("utf-8")) - futures[data] = future - # Publish failures shall be handled in the callback function. - future.add_done_callback(get_callback(future, data)) + publish_future = publisher.publish(topic_path, data.encode("utf-8")) + # Non-blocking. Publish failures are handled in the callback function. + publish_future.add_done_callback(get_callback(publish_future, data)) + publish_futures.append(publish_future) # Wait for all the publish futures to resolve before exiting. - while futures: - time.sleep(5) + futures.wait(publish_futures, return_when=futures.ALL_COMPLETED) print(f"Published messages with error handler to {topic_path}.") # [END pubsub_publish_with_error_handler] @@ -179,21 +175,23 @@ def callback(f): def publish_messages_with_batch_settings(project_id, topic_id): """Publishes multiple messages to a Pub/Sub topic with batch settings.""" # [START pubsub_publisher_batch_settings] + from concurrent import futures from google.cloud import pubsub_v1 # TODO(developer) # project_id = "your-project-id" # topic_id = "your-topic-id" - # Configure the batch to publish as soon as there is ten messages, - # one kilobyte of data, or one second has passed. + # Configure the batch to publish as soon as there are 10 messages + # or 1 KiB of data, or 1 second has passed. batch_settings = pubsub_v1.types.BatchSettings( max_messages=10, # default 100 - max_bytes=1024, # default 1 MB + max_bytes=1024, # default 1 MiB max_latency=1, # default 10 ms ) publisher = pubsub_v1.PublisherClient(batch_settings) topic_path = publisher.topic_path(project_id, topic_id) + publish_futures = [] # Resolve the publish future in a separate thread. def callback(future): @@ -201,17 +199,69 @@ def callback(future): print(message_id) for n in range(1, 10): - data = "Message number {}".format(n) + data = f"Message number {n}" # Data must be a bytestring data = data.encode("utf-8") - future = publisher.publish(topic_path, data) + publish_future = publisher.publish(topic_path, data) # Non-blocking. Allow the publisher client to batch multiple messages. - future.add_done_callback(callback) + publish_future.add_done_callback(callback) + publish_futures.append(publish_future) + + futures.wait(publish_futures, return_when=futures.ALL_COMPLETED) print(f"Published messages with batch settings to {topic_path}.") # [END pubsub_publisher_batch_settings] +def publish_messages_with_flow_control_settings(project_id, topic_id): + """Publishes messages to a Pub/Sub topic with flow control settings.""" + # [START pubsub_publisher_flow_control] + from concurrent import futures + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.types import ( + LimitExceededBehavior, + PublisherOptions, + PublishFlowControl, + ) + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + # Configure how many messages the publisher client can hold in memory + # and what to do when messages exceed the limit. + flow_control_settings = PublishFlowControl( + message_limit=100, # 100 messages + byte_limit=10 * 1024 * 1024, # 10 MiB + limit_exceeded_behavior=LimitExceededBehavior.BLOCK, + ) + publisher = pubsub_v1.PublisherClient( + publisher_options=PublisherOptions(flow_control=flow_control_settings) + ) + topic_path = publisher.topic_path(project_id, topic_id) + publish_futures = [] + + # Resolve the publish future in a separate thread. + def callback(publish_future): + message_id = publish_future.result() + print(message_id) + + # Publish 1000 messages in quick succession to trigger flow control. + for n in range(1, 1000): + data = f"Message number {n}" + # Data must be a bytestring + data = data.encode("utf-8") + publish_future = publisher.publish(topic_path, data) + # Non-blocking. Allow the publisher client to batch messages. + publish_future.add_done_callback(callback) + publish_futures.append(publish_future) + + futures.wait(publish_futures, return_when=futures.ALL_COMPLETED) + + print(f"Published messages with flow control settings to {topic_path}.") + # [END pubsub_publisher_flow_control] + + def publish_messages_with_retry_settings(project_id, topic_id): """Publishes messages with custom retry settings.""" # [START pubsub_publisher_retry_settings] @@ -244,7 +294,7 @@ def publish_messages_with_retry_settings(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = "Message number {}".format(n) + data = f"Message number {n}" # Data must be a bytestring data = data.encode("utf-8") future = publisher.publish(topic=topic_path, data=data, retry=custom_retry) @@ -365,7 +415,8 @@ def detach_subscription(project_id, subscription_id): if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") @@ -388,7 +439,8 @@ def detach_subscription(project_id, subscription_id): publish_with_custom_attributes_parser.add_argument("topic_id") publish_with_error_handler_parser = subparsers.add_parser( - "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, + "publish-with-error-handler", + help=publish_messages_with_error_handler.__doc__, ) publish_with_error_handler_parser.add_argument("topic_id") @@ -398,6 +450,12 @@ def detach_subscription(project_id, subscription_id): ) publish_with_batch_settings_parser.add_argument("topic_id") + publish_with_flow_control_settings_parser = subparsers.add_parser( + "publish-with-flow-control", + help=publish_messages_with_flow_control_settings.__doc__, + ) + publish_with_flow_control_settings_parser.add_argument("topic_id") + publish_with_retry_settings_parser = subparsers.add_parser( "publish-with-retry-settings", help=publish_messages_with_retry_settings.__doc__, @@ -405,7 +463,8 @@ def detach_subscription(project_id, subscription_id): publish_with_retry_settings_parser.add_argument("topic_id") publish_with_ordering_keys_parser = subparsers.add_parser( - "publish-with-ordering-keys", help=publish_with_ordering_keys.__doc__, + "publish-with-ordering-keys", + help=publish_with_ordering_keys.__doc__, ) publish_with_ordering_keys_parser.add_argument("topic_id") @@ -416,7 +475,8 @@ def detach_subscription(project_id, subscription_id): resume_publish_with_ordering_keys_parser.add_argument("topic_id") detach_subscription_parser = subparsers.add_parser( - "detach-subscription", help=detach_subscription.__doc__, + "detach-subscription", + help=detach_subscription.__doc__, ) detach_subscription_parser.add_argument("subscription_id") @@ -436,6 +496,8 @@ def detach_subscription(project_id, subscription_id): publish_messages_with_error_handler(args.project_id, args.topic_id) elif args.command == "publish-with-batch-settings": publish_messages_with_batch_settings(args.project_id, args.topic_id) + elif args.command == "publish-with-flow-control": + publish_messages_with_flow_control_settings(args.project_id, args.topic_id) elif args.command == "publish-with-retry-settings": publish_messages_with_retry_settings(args.project_id, args.topic_id) elif args.command == "publish-with-ordering-keys": diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index cd81fcaf1bd7..51cb20a47604 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -134,6 +134,13 @@ def test_publish_with_batch_settings(topic_path, capsys): assert f"Published messages with batch settings to {topic_path}." in out +def test_publish_with_flow_control_settings(topic_path, capsys): + publisher.publish_messages_with_flow_control_settings(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Published messages with flow control settings to {topic_path}." in out + + def test_publish_with_retry_settings(topic_path, capsys): publisher.publish_messages_with_retry_settings(PROJECT_ID, TOPIC_ID) From 09991c07e9031be823cf8a063317d8d7b8bfd94d Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 17 Jun 2021 22:55:48 +0200 Subject: [PATCH 0679/1197] fix: threads can skip the line in publisher flow controller (#422) * Add publisher flow controller test for FIFO order * Simplify _run_in_daemon() test helper * Fix message slots not acquired in FIFO order * Unify the logic for distributing any free capacity * Use OrderedDict for the FIFO queue This allows to hold the queue of threads and their reservation data in a single structure, no need for the separate deque and reservations dict. --- .../pubsub_v1/publisher/flow_controller.py | 122 +++++++++++------- .../publisher/test_flow_controller.py | 92 ++++++++----- 2 files changed, 137 insertions(+), 77 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py index fa3fac6d3fef..f899f4d0859e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import deque +from collections import OrderedDict import logging import threading import warnings @@ -24,12 +24,21 @@ _LOGGER = logging.getLogger(__name__) -class _QuantityReservation(object): - """A (partial) reservation of a quantifiable resource.""" +class _QuantityReservation: + """A (partial) reservation of quantifiable resources.""" - def __init__(self, reserved, needed): - self.reserved = reserved - self.needed = needed + def __init__(self, bytes_reserved: int, bytes_needed: int, has_slot: bool): + self.bytes_reserved = bytes_reserved + self.bytes_needed = bytes_needed + self.has_slot = has_slot + + def __repr__(self): + return ( + f"{type(self).__name__}(" + f"bytes_reserved={self.bytes_reserved}, " + f"bytes_needed={self.bytes_needed}, " + f"has_slot={self.has_slot})" + ) class FlowController(object): @@ -48,14 +57,13 @@ def __init__(self, settings): self._message_count = 0 self._total_bytes = 0 - # A FIFO queue of threads blocked on adding a message, from first to last. + # A FIFO queue of threads blocked on adding a message that also tracks their + # reservations of available flow control bytes and message slots. # Only relevant if the configured limit exceeded behavior is BLOCK. - self._waiting = deque() + self._waiting = OrderedDict() - # Reservations of available flow control bytes by the waiting threads. - # Each value is a _QuantityReservation instance. - self._byte_reservations = dict() self._reserved_bytes = 0 + self._reserved_slots = 0 # The lock is used to protect all internal state (message and byte count, # waiting threads to add, etc.). @@ -131,11 +139,13 @@ def add(self, message): current_thread = threading.current_thread() while self._would_overflow(message): - if current_thread not in self._byte_reservations: - self._waiting.append(current_thread) - self._byte_reservations[current_thread] = _QuantityReservation( - reserved=0, needed=message._pb.ByteSize() + if current_thread not in self._waiting: + reservation = _QuantityReservation( + bytes_reserved=0, + bytes_needed=message._pb.ByteSize(), + has_slot=False, ) + self._waiting[current_thread] = reservation # Will be placed last. _LOGGER.debug( "Blocking until there is enough free capacity in the flow - " @@ -152,9 +162,9 @@ def add(self, message): # Message accepted, increase the load and remove thread stats. self._message_count += 1 self._total_bytes += message._pb.ByteSize() - self._reserved_bytes -= self._byte_reservations[current_thread].reserved - del self._byte_reservations[current_thread] - self._waiting.remove(current_thread) + self._reserved_bytes -= self._waiting[current_thread].bytes_reserved + self._reserved_slots -= 1 + del self._waiting[current_thread] def release(self, message): """Release a mesage from flow control. @@ -180,39 +190,52 @@ def release(self, message): self._message_count = max(0, self._message_count) self._total_bytes = max(0, self._total_bytes) - self._distribute_available_bytes() + self._distribute_available_capacity() # If at least one thread waiting to add() can be unblocked, wake them up. if self._ready_to_unblock(): _LOGGER.debug("Notifying threads waiting to add messages to flow.") self._has_capacity.notify_all() - def _distribute_available_bytes(self): - """Distribute availalbe free capacity among the waiting threads in FIFO order. + def _distribute_available_capacity(self): + """Distribute available capacity among the waiting threads in FIFO order. The method assumes that the caller has obtained ``_operational_lock``. """ - available = self._settings.byte_limit - self._total_bytes - self._reserved_bytes + available_slots = ( + self._settings.message_limit - self._message_count - self._reserved_slots + ) + available_bytes = ( + self._settings.byte_limit - self._total_bytes - self._reserved_bytes + ) + + for reservation in self._waiting.values(): + if available_slots <= 0 and available_bytes <= 0: + break # Santa is now empty-handed, better luck next time. - for thread in self._waiting: - if available <= 0: - break + # Distribute any free slots. + if available_slots > 0 and not reservation.has_slot: + reservation.has_slot = True + self._reserved_slots += 1 + available_slots -= 1 - reservation = self._byte_reservations[thread] - still_needed = reservation.needed - reservation.reserved + # Distribute any free bytes. + if available_bytes <= 0: + continue - # Sanity check for any internal inconsistencies. - if still_needed < 0: + bytes_still_needed = reservation.bytes_needed - reservation.bytes_reserved + + if bytes_still_needed < 0: # Sanity check for any internal inconsistencies. msg = "Too many bytes reserved: {} / {}".format( - reservation.reserved, reservation.needed + reservation.bytes_reserved, reservation.bytes_needed ) warnings.warn(msg, category=RuntimeWarning) - still_needed = 0 + bytes_still_needed = 0 - can_give = min(still_needed, available) - reservation.reserved += can_give + can_give = min(bytes_still_needed, available_bytes) + reservation.bytes_reserved += can_give self._reserved_bytes += can_give - available -= can_give + available_bytes -= can_give def _ready_to_unblock(self): """Determine if any of the threads waiting to add a message can proceed. @@ -225,10 +248,10 @@ def _ready_to_unblock(self): if self._waiting: # It's enough to only check the head of the queue, because FIFO # distribution of any free capacity. - reservation = self._byte_reservations[self._waiting[0]] + first_reservation = next(iter(self._waiting.values())) return ( - reservation.reserved >= reservation.needed - and self._message_count < self._settings.message_limit + first_reservation.bytes_reserved >= first_reservation.bytes_needed + and first_reservation.has_slot ) return False @@ -245,16 +268,22 @@ def _would_overflow(self, message): Returns: bool """ - reservation = self._byte_reservations.get(threading.current_thread()) + reservation = self._waiting.get(threading.current_thread()) if reservation: - enough_reserved = reservation.reserved >= reservation.needed + enough_reserved = reservation.bytes_reserved >= reservation.bytes_needed + has_slot = reservation.has_slot else: enough_reserved = False + has_slot = False bytes_taken = self._total_bytes + self._reserved_bytes + message._pb.ByteSize() size_overflow = bytes_taken > self._settings.byte_limit and not enough_reserved - msg_count_overflow = self._message_count + 1 > self._settings.message_limit + + msg_count_overflow = not has_slot and ( + (self._message_count + self._reserved_slots + 1) + > self._settings.message_limit + ) return size_overflow or msg_count_overflow @@ -275,18 +304,15 @@ def _load_info(self, message_count=None, total_bytes=None): Returns: str """ - msg = "messages: {} / {}, bytes: {} / {} (reserved: {})" - if message_count is None: message_count = self._message_count if total_bytes is None: total_bytes = self._total_bytes - return msg.format( - message_count, - self._settings.message_limit, - total_bytes, - self._settings.byte_limit, - self._reserved_bytes, + return ( + f"messages: {message_count} / {self._settings.message_limit} " + f"(reserved: {self._reserved_slots}), " + f"bytes: {total_bytes} / {self._settings.byte_limit} " + f"(reserved: {self._reserved_bytes})" ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py index 5e9d6c3ae3f5..ee923a435dec 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py @@ -16,10 +16,14 @@ import threading import time +from typing import Callable +from typing import Sequence +from typing import Union import warnings import pytest +import google from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher.flow_controller import FlowController @@ -27,25 +31,20 @@ def _run_in_daemon( - flow_controller, - action, - messages, - all_done_event, - error_event=None, - action_pause=None, + action: Callable[["google.cloud.pubsub_v1.types.PubsubMessage"], None], + messages: Sequence["google.cloud.pubsub_v1.types.PubsubMessage"], + all_done_event: threading.Event, + error_event: threading.Event = None, + action_pause: Union[int, float] = None, ): - """Run flow controller action (add or remove messages) in a daemon thread. - """ - assert action in ("add", "release") + """Run flow controller action (add or remove messages) in a daemon thread.""" def run_me(): - method = getattr(flow_controller, action) - try: for msg in messages: if action_pause is not None: time.sleep(action_pause) - method(msg) + action(msg) except Exception: if error_event is not None: # pragma: NO COVER error_event.set() @@ -227,7 +226,7 @@ def test_blocking_on_overflow_until_free_capacity(): releasing_x_done = threading.Event() # Adding a message with free capacity should not block. - _run_in_daemon(flow_controller, "add", [msg1], adding_1_done) + _run_in_daemon(flow_controller.add, [msg1], adding_1_done) if not adding_1_done.wait(timeout=0.1): pytest.fail( # pragma: NO COVER "Adding a message with enough flow capacity blocked or errored." @@ -235,21 +234,21 @@ def test_blocking_on_overflow_until_free_capacity(): # Adding messages when there is not enough capacity should block, even if # added through multiple threads. - _run_in_daemon(flow_controller, "add", [msg2], adding_2_done) + _run_in_daemon(flow_controller.add, [msg2], adding_2_done) if adding_2_done.wait(timeout=0.1): pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER - _run_in_daemon(flow_controller, "add", [msg3], adding_3_done) + _run_in_daemon(flow_controller.add, [msg3], adding_3_done) if adding_3_done.wait(timeout=0.1): pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER - _run_in_daemon(flow_controller, "add", [msg4], adding_4_done) + _run_in_daemon(flow_controller.add, [msg4], adding_4_done) if adding_4_done.wait(timeout=0.1): pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER # After releasing one message, there should be room for a new message, which # should result in unblocking one of the waiting threads. - _run_in_daemon(flow_controller, "release", [msg1], releasing_1_done) + _run_in_daemon(flow_controller.release, [msg1], releasing_1_done) if not releasing_1_done.wait(timeout=0.1): pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER @@ -266,7 +265,7 @@ def test_blocking_on_overflow_until_free_capacity(): # Release another message and verify that yet another thread gets unblocked. added_msg = [msg2, msg3, msg4][done_status.index(True)] - _run_in_daemon(flow_controller, "release", [added_msg], releasing_x_done) + _run_in_daemon(flow_controller.release, [added_msg], releasing_x_done) if not releasing_x_done.wait(timeout=0.1): pytest.fail("Releasing messages blocked or errored.") # pragma: NO COVER @@ -293,7 +292,7 @@ def test_error_if_mesage_would_block_indefinitely(): adding_done = threading.Event() error_event = threading.Event() - _run_in_daemon(flow_controller, "add", [msg], adding_done, error_event=error_event) + _run_in_daemon(flow_controller.add, [msg], adding_done, error_event=error_event) assert error_event.wait(timeout=0.1), "No error on adding too large a message." @@ -329,20 +328,20 @@ def test_threads_posting_large_messages_do_not_starve(): # enough messages should eventually allow the large message to come through, even # if more messages are added after it (those should wait for the large message). initial_messages = [grpc_types.PubsubMessage(data=b"x" * 10)] * 5 - _run_in_daemon(flow_controller, "add", initial_messages, adding_initial_done) + _run_in_daemon(flow_controller.add, initial_messages, adding_initial_done) assert adding_initial_done.wait(timeout=0.1) - _run_in_daemon(flow_controller, "add", [large_msg], adding_large_done) + _run_in_daemon(flow_controller.add, [large_msg], adding_large_done) # Continuously keep adding more messages after the large one. messages = [grpc_types.PubsubMessage(data=b"x" * 10)] * 10 - _run_in_daemon(flow_controller, "add", messages, adding_busy_done, action_pause=0.1) + _run_in_daemon(flow_controller.add, messages, adding_busy_done, action_pause=0.1) # At the same time, gradually keep releasing the messages - the freeed up # capacity should be consumed by the large message, not the other small messages # being added after it. _run_in_daemon( - flow_controller, "release", messages, releasing_busy_done, action_pause=0.1 + flow_controller.release, messages, releasing_busy_done, action_pause=0.1 ) # Sanity check - releasing should have completed by now. @@ -359,7 +358,7 @@ def test_threads_posting_large_messages_do_not_starve(): # Releasing the large message should unblock adding the remaining "busy" messages # that have not been added yet. - _run_in_daemon(flow_controller, "release", [large_msg], releasing_large_done) + _run_in_daemon(flow_controller.release, [large_msg], releasing_large_done) if not releasing_large_done.wait(timeout=0.1): pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER @@ -367,6 +366,41 @@ def test_threads_posting_large_messages_do_not_starve(): pytest.fail("Adding messages blocked or errored.") # pragma: NO COVER +def test_blocked_messages_are_accepted_in_fifo_order(): + settings = types.PublishFlowControl( + message_limit=1, + byte_limit=1_000_000, # Unlimited for practical purposes in the test. + limit_exceeded_behavior=types.LimitExceededBehavior.BLOCK, + ) + flow_controller = FlowController(settings) + + # It's OK if the message instance is shared, as flow controlelr is only concerned + # with byte sizes and counts, and not with particular message instances. + message = grpc_types.PubsubMessage(data=b"x") + + adding_done_events = [threading.Event() for _ in range(10)] + releasing_done_events = [threading.Event() for _ in adding_done_events] + + # Add messages. The first one will be accepted, and the rest should queue behind. + for adding_done in adding_done_events: + _run_in_daemon(flow_controller.add, [message], adding_done) + time.sleep(0.1) + + if not adding_done_events[0].wait(timeout=0.1): # pragma: NO COVER + pytest.fail("The first message unexpectedly got blocked on adding.") + + # For each message, check that it has indeed been added to the flow controller. + # Then release it to make room for the next message in line, and repeat the check. + enumeration = enumerate(zip(adding_done_events, releasing_done_events)) + for i, (adding_done, releasing_done) in enumeration: + if not adding_done.wait(timeout=0.1): # pragma: NO COVER + pytest.fail(f"Queued message still blocked on adding (i={i}).") + + _run_in_daemon(flow_controller.release, [message], releasing_done) + if not releasing_done.wait(timeout=0.1): # pragma: NO COVER + pytest.fail(f"Queued message was not released in time (i={i}).") + + def test_warning_on_internal_reservation_stats_error_when_unblocking(): settings = types.PublishFlowControl( message_limit=1, @@ -387,7 +421,7 @@ def test_warning_on_internal_reservation_stats_error_when_unblocking(): releasing_1_done = threading.Event() # Adding a message with free capacity should not block. - _run_in_daemon(flow_controller, "add", [msg1], adding_1_done) + _run_in_daemon(flow_controller.add, [msg1], adding_1_done) if not adding_1_done.wait(timeout=0.1): pytest.fail( # pragma: NO COVER "Adding a message with enough flow capacity blocked or errored." @@ -395,17 +429,17 @@ def test_warning_on_internal_reservation_stats_error_when_unblocking(): # Adding messages when there is not enough capacity should block, even if # added through multiple threads. - _run_in_daemon(flow_controller, "add", [msg2], adding_2_done) + _run_in_daemon(flow_controller.add, [msg2], adding_2_done) if adding_2_done.wait(timeout=0.1): pytest.fail("Adding a message on overflow did not block.") # pragma: NO COVER # Intentionally corrupt internal stats - reservation = next(iter(flow_controller._byte_reservations.values()), None) + reservation = next(iter(flow_controller._waiting.values()), None) assert reservation is not None, "No messages blocked by flow controller." - reservation.reserved = reservation.needed + 1 + reservation.bytes_reserved = reservation.bytes_needed + 1 with warnings.catch_warnings(record=True) as warned: - _run_in_daemon(flow_controller, "release", [msg1], releasing_1_done) + _run_in_daemon(flow_controller.release, [msg1], releasing_1_done) if not releasing_1_done.wait(timeout=0.1): pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER From 71c7cd411a919ea517a6eb931de39f6631c3a137 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 21 Jun 2021 12:34:46 +0200 Subject: [PATCH 0680/1197] chore: release 2.6.0 (#428) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 19 +++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index ae2f35fd612c..fc50e0c135be 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,25 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.6.0](https://www.github.com/googleapis/python-pubsub/compare/v2.5.0...v2.6.0) (2021-06-17) + + +### Features + +* support customizable retry and timeout settings on the publisher client ([#299](https://www.github.com/googleapis/python-pubsub/issues/299)) ([7597604](https://www.github.com/googleapis/python-pubsub/commit/7597604b41fa3a1e9bf34addc35c8647dde007cc)) + + +### Bug Fixes + +* ACK deadline set for received messages can be too low ([#416](https://www.github.com/googleapis/python-pubsub/issues/416)) ([e907f6e](https://www.github.com/googleapis/python-pubsub/commit/e907f6e05f59f64a3b08df3304e92ec960997be6)) +* threads can skip the line in publisher flow controller ([#422](https://www.github.com/googleapis/python-pubsub/issues/422)) ([ef89f55](https://www.github.com/googleapis/python-pubsub/commit/ef89f55a41044e9ad26b91132b4b1be9c7b2c127)) + + +### Documentation + +* block until the streaming pull shuts down ([#424](https://www.github.com/googleapis/python-pubsub/issues/424)) ([d0d0b70](https://www.github.com/googleapis/python-pubsub/commit/d0d0b704642df8dee893d3f585aeb666e19696fb)) +* explain that future.cancel() is non-blocking ([#420](https://www.github.com/googleapis/python-pubsub/issues/420)) ([c825789](https://www.github.com/googleapis/python-pubsub/commit/c825789bdff310f44cbb132a723e99d1e6331d8f)) + ## [2.5.0](https://www.github.com/googleapis/python-pubsub/compare/v2.4.2...v2.5.0) (2021-05-18) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 4d459063e786..04f8e66fb874 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.5.0" +version = "2.6.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From ef5dce1b41b2487d4eab8599e8193be9e99c30ed Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 22 Jun 2021 10:10:59 +0200 Subject: [PATCH 0681/1197] chore(deps): update dependency google-cloud-pubsub to v2.6.0 (#431) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index aaf5991b51df..718aae46a8b7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.5.0 +google-cloud-pubsub==2.6.0 avro==1.10.2 From 6e9aa1f37ee9e144e288440e87dfbdbc2b2b6074 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 23 Jun 2021 08:37:43 -0700 Subject: [PATCH 0682/1197] samples: publish with error handler and flow control (#433) --- packages/google-cloud-pubsub/samples/snippets/publisher.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index e976b6b606cc..d50c9b9db1fa 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -150,8 +150,8 @@ def publish_messages_with_error_handler(project_id, topic_id): def get_callback(publish_future, data): def callback(publish_future): try: - # Wait 100 ms for the publish call to succeed. - print(publish_future.result(timeout=0.1)) + # Wait 60 seconds for the publish call to succeed. + print(publish_future.result(timeout=60)) except futures.TimeoutError: print(f"Publishing {data} timed out.") @@ -246,7 +246,8 @@ def callback(publish_future): message_id = publish_future.result() print(message_id) - # Publish 1000 messages in quick succession to trigger flow control. + # Publish 1000 messages in quick succession may be constrained by + # publisher flow control. for n in range(1, 1000): data = f"Message number {n}" # Data must be a bytestring From 0442e06c4aacdb95179549f364bc530782ef39ed Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 23 Jun 2021 17:38:07 +0200 Subject: [PATCH 0683/1197] tests: mitigate flaky snippets tests (#432) * test: add additional debug info to snippet test * Reduce the sleep timeout in message callback * Auto retry flaky delete schema test * More flaky tests whack-a-mole --- .../samples/snippets/schema_test.py | 2 ++ .../samples/snippets/subscriber.py | 2 +- .../samples/snippets/subscriber_test.py | 30 +++++++++++-------- 3 files changed, 21 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/schema_test.py b/packages/google-cloud-pubsub/samples/snippets/schema_test.py index f0a4470f7e2b..1e0dc8f1a906 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema_test.py @@ -17,6 +17,7 @@ import os import uuid +from flaky import flaky from google.api_core.exceptions import NotFound from google.cloud.pubsub import PublisherClient, SchemaServiceClient, SubscriberClient from google.pubsub_v1.types import Encoding @@ -251,6 +252,7 @@ def test_subscribe_with_proto_schema( assert "Received a binary-encoded message" in out +@flaky(max_runs=3, min_passes=1) def test_delete_schema(proto_schema, capsys): schema.delete_schema(PROJECT_ID, PROTO_SCHEMA_ID) out, _ = capsys.readouterr() diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index d01860cf851a..0114142969be 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -499,7 +499,7 @@ def receive_messages_with_blocking_shutdown(project_id, subscription_id, timeout def callback(message): print(f"Received {message.data}.") - time.sleep(timeout + 5.0) # Pocess longer than streaming pull future timeout. + time.sleep(timeout + 3.0) # Pocess longer than streaming pull future timeout. message.ack() print(f"Done processing the message {message.data}.") diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 8d034949dc6b..20355fe2b4d0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -425,21 +425,26 @@ def test_receive_with_blocking_shutdown( if re.search(r".*done waiting.*stream shutdown.*", line, flags=re.IGNORECASE) ] - assert "Listening" in out - assert subscription_async in out + try: + assert "Listening" in out + assert subscription_async in out - assert len(stream_canceled_lines) == 1 - assert len(shutdown_done_waiting_lines) == 1 - assert len(msg_received_lines) == 3 - assert len(msg_done_lines) == 3 + assert len(stream_canceled_lines) == 1 + assert len(shutdown_done_waiting_lines) == 1 + assert len(msg_received_lines) == 3 + assert len(msg_done_lines) == 3 - # The stream should have been canceled *after* receiving messages, but before - # message processing was done. - assert msg_received_lines[-1] < stream_canceled_lines[0] < msg_done_lines[0] + # The stream should have been canceled *after* receiving messages, but before + # message processing was done. + assert msg_received_lines[-1] < stream_canceled_lines[0] < msg_done_lines[0] - # Yet, waiting on the stream shutdown should have completed *after* the processing - # of received messages has ended. - assert msg_done_lines[-1] < shutdown_done_waiting_lines[0] + # Yet, waiting on the stream shutdown should have completed *after* + # the processing of received messages has ended. + assert msg_done_lines[-1] < shutdown_done_waiting_lines[0] + except AssertionError: # pragma: NO COVER + from pprint import pprint + pprint(out_lines) # To make possible flakiness debugging easier. + raise def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): @@ -464,6 +469,7 @@ def test_receive_synchronously(publisher_client, topic, subscription_sync, capsy assert f"{subscription_sync}" in out +@flaky(max_runs=3, min_passes=1) def test_receive_synchronously_with_lease( publisher_client, topic, subscription_sync, capsys ): From 9f7ab44eff3e51593cbd6ee1420bf0de89342bb1 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 23 Jun 2021 09:43:17 -0700 Subject: [PATCH 0684/1197] samples(test): fix flaky sync pull test (#434) * samples(test): flaky sync pull test * address peter's comment --- .../samples/snippets/subscriber_test.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 20355fe2b4d0..bf26a79b905b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -15,6 +15,7 @@ import os import re import sys +import time import uuid import backoff @@ -475,7 +476,13 @@ def test_receive_synchronously_with_lease( ): @backoff.on_exception(backoff.expo, Unknown, max_time=300) def run_sample(): - _publish_messages(publisher_client, topic, message_num=3) + _publish_messages(publisher_client, topic, message_num=10) + # Pausing 10s to allow the subscriber to establish the connection + # because sync pull often returns fewer messages than requested. + # The intention is to fix flaky tests reporting errors like + # `google.api_core.exceptions.Unknown: None Stream removed` as + # in https://github.com/googleapis/python-pubsub/issues/341. + time.sleep(10) subscriber.synchronous_pull_with_lease_management(PROJECT_ID, SUBSCRIPTION_SYNC) run_sample() From d7a0d3f5d0c3e6637e46925b05d671c93d302ffd Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 1 Jul 2021 19:42:19 +0200 Subject: [PATCH 0685/1197] chore: require grpcio >= 1.38.1 (#435) --- packages/google-cloud-pubsub/setup.py | 1 + packages/google-cloud-pubsub/testing/constraints-3.6.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 04f8e66fb874..6f8ab50a901f 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -29,6 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 5 - Production/Stable" dependencies = [ + "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "libcst >= 0.3.10", "proto-plus >= 1.7.1", diff --git a/packages/google-cloud-pubsub/testing/constraints-3.6.txt b/packages/google-cloud-pubsub/testing/constraints-3.6.txt index 89c937ce8f9a..3d58c3f9cfcd 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.6.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.6.txt @@ -5,6 +5,7 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 +grpcio==1.38.1 google-api-core==1.22.2 libcst==0.3.10 proto-plus==1.7.1 From 21bb7e7b5f2aa608647acec16935b5ea1721fa77 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 9 Jul 2021 10:08:42 +0200 Subject: [PATCH 0686/1197] docs: include docs for Pager classes (#438) --- packages/google-cloud-pubsub/docs/publisher/api/pagers.rst | 6 ++++++ packages/google-cloud-pubsub/docs/publisher/index.rst | 1 + packages/google-cloud-pubsub/docs/subscriber/api/pagers.rst | 6 ++++++ packages/google-cloud-pubsub/docs/subscriber/index.rst | 1 + 4 files changed, 14 insertions(+) create mode 100644 packages/google-cloud-pubsub/docs/publisher/api/pagers.rst create mode 100644 packages/google-cloud-pubsub/docs/subscriber/api/pagers.rst diff --git a/packages/google-cloud-pubsub/docs/publisher/api/pagers.rst b/packages/google-cloud-pubsub/docs/publisher/api/pagers.rst new file mode 100644 index 000000000000..3bbfff33ca87 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/publisher/api/pagers.rst @@ -0,0 +1,6 @@ +Pagers +====== + +.. automodule:: google.pubsub_v1.services.publisher.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/docs/publisher/index.rst b/packages/google-cloud-pubsub/docs/publisher/index.rst index 6810f023289e..0e7a9b50bccf 100644 --- a/packages/google-cloud-pubsub/docs/publisher/index.rst +++ b/packages/google-cloud-pubsub/docs/publisher/index.rst @@ -172,3 +172,4 @@ API Reference api/client api/futures + api/pagers diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/pagers.rst b/packages/google-cloud-pubsub/docs/subscriber/api/pagers.rst new file mode 100644 index 000000000000..367c65ca71d4 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/subscriber/api/pagers.rst @@ -0,0 +1,6 @@ +Pagers +====== + +.. automodule:: google.pubsub_v1.services.subscriber.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/docs/subscriber/index.rst b/packages/google-cloud-pubsub/docs/subscriber/index.rst index 06f1658a4286..aa21cd37bce7 100644 --- a/packages/google-cloud-pubsub/docs/subscriber/index.rst +++ b/packages/google-cloud-pubsub/docs/subscriber/index.rst @@ -232,4 +232,5 @@ API Reference api/client api/message api/futures + api/pagers api/scheduler From f62d34a9edc255cbb2a08b32c5d3b6356e1e67ba Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 12 Jul 2021 17:05:41 +0200 Subject: [PATCH 0687/1197] chore: release v2.6.1 (#437) --- packages/google-cloud-pubsub/CHANGELOG.md | 17 +++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index fc50e0c135be..22e60fd4f1ba 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,23 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 2.6.1 + +07-05-2021 10:33 PDT + +### Dependencies + +- Fix possible crash by requiring `grpcio >= 1.38.1`. ([#414](https://github.com/googleapis/python-pubsub/issues/414)) ([7037a28](https://github.com/googleapis/python-pubsub/pull/435/commits/7037a28090aa4efa01808231721716bca80bb0b7)) + +### Documentation + +- Adjust samples for publishing with error handler and flow control. ([#433](https://github.com/googleapis/python-pubsub/pull/433)) + +### Internal / Testing Changes + +- Fix flaky sync pull sample test. ([#434](https://github.com/googleapis/python-pubsub/pull/434)) +- Mitigate flaky snippets tests. ([#432](https://github.com/googleapis/python-pubsub/pull/432)) + ## [2.6.0](https://www.github.com/googleapis/python-pubsub/compare/v2.5.0...v2.6.0) (2021-06-17) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 6f8ab50a901f..2e1d57520d2a 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.6.0" +version = "2.6.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 7645b20b47efec04a1deaabfba316d655af99ab3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 14 Jul 2021 09:32:22 +0200 Subject: [PATCH 0688/1197] chore(deps): update dependency google-cloud-pubsub to v2.6.1 (#442) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | `==2.6.0` -> `==2.6.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.6.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.6.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.6.1/compatibility-slim/2.6.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.6.1/confidence-slim/2.6.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-pubsub ### [`v2.6.1`](https://togithub.com/googleapis/python-pubsub/blob/master/CHANGELOG.md#​261) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v2.6.0...v2.6.1) 07-05-2021 10:33 PDT ##### Dependencies - Fix possible crash by requiring `grpcio >= 1.38.1`. ([#​414](https://togithub.com/googleapis/python-pubsub/issues/414)) ([7037a28](https://togithub.com/googleapis/python-pubsub/pull/435/commits/7037a28090aa4efa01808231721716bca80bb0b7)) ##### Documentation - Adjust samples for publishing with error handler and flow control. ([#​433](https://togithub.com/googleapis/python-pubsub/pull/433)) ##### Internal / Testing Changes - Fix flaky sync pull sample test. ([#​434](https://togithub.com/googleapis/python-pubsub/pull/434)) - Mitigate flaky snippets tests. ([#​432](https://togithub.com/googleapis/python-pubsub/pull/432))
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-pubsub). --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 718aae46a8b7..00c912dcedbf 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.6.0 +google-cloud-pubsub==2.6.1 avro==1.10.2 From d32e01649872ffad5873e299d7da019cbbf2442e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 14 Jul 2021 10:34:50 +0200 Subject: [PATCH 0689/1197] chore(deps): update dependency backoff to v1.11.0 (#443) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 357643112ecf..bfaedbea0ac3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ -backoff==1.10.0 +backoff==1.11.0 pytest==6.2.4 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From 6d78c508b31d73293526f58c674b967f87815c2b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 16 Jul 2021 16:24:09 +0200 Subject: [PATCH 0690/1197] chore(deps): update dependency backoff to v1.11.1 (#451) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [backoff](https://togithub.com/litl/backoff) | `==1.11.0` -> `==1.11.1` | [![age](https://badges.renovateapi.com/packages/pypi/backoff/1.11.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/backoff/1.11.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/backoff/1.11.1/compatibility-slim/1.11.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/backoff/1.11.1/confidence-slim/1.11.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
litl/backoff ### [`v1.11.1`](https://togithub.com/litl/backoff/blob/master/CHANGELOG.md#v1111-2021-07-14) [Compare Source](https://togithub.com/litl/backoff/compare/v1.11.0...v1.11.1) ##### Changed - Update **version** in backoff module
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-pubsub). --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index bfaedbea0ac3..fdd38a518ef1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ -backoff==1.11.0 +backoff==1.11.1 pytest==6.2.4 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From 3c2e4615e1b31d54be3be559486ef5be117db446 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Sun, 18 Jul 2021 03:24:22 -0700 Subject: [PATCH 0691/1197] chore: template updates (#403) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/1cb8ad84-81d7-41ae-80cc-f275d38cb08c/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) --- .../.kokoro/docker/docs/Dockerfile | 35 +-------------- .../.kokoro/docker/docs/fetch_gpg_keys.sh | 45 ------------------- .../.kokoro/presubmit/common.cfg | 8 ++-- .../.kokoro/presubmit/presubmit.cfg | 7 +-- .../.kokoro/samples/python3.9/common.cfg | 40 +++++++++++++++++ .../.kokoro/samples/python3.9/continuous.cfg | 6 +++ .../samples/python3.9/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.9/periodic.cfg | 6 +++ .../.kokoro/samples/python3.9/presubmit.cfg | 6 +++ .../.kokoro/test-samples-impl.sh | 6 +-- .../.pre-commit-config.yaml | 4 +- packages/google-cloud-pubsub/CONTRIBUTING.rst | 37 ++++----------- packages/google-cloud-pubsub/docs/conf.py | 13 +++--- .../docs/multiprocessing.rst | 4 +- .../services/publisher/async_client.py | 9 ++++ .../pubsub_v1/services/publisher/client.py | 5 +++ .../google/pubsub_v1/types/__init__.py | 2 + packages/google-cloud-pubsub/noxfile.py | 20 +++------ .../samples/snippets/noxfile.py | 13 ++++-- packages/google-cloud-pubsub/synth.metadata | 30 +++---------- 20 files changed, 137 insertions(+), 170 deletions(-) delete mode 100755 packages/google-cloud-pubsub/.kokoro/docker/docs/fetch_gpg_keys.sh create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic-head.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/presubmit.cfg diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile index 412b0b56a921..4e1b1fb8b5a5 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -40,6 +40,7 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ + python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -59,40 +60,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb - -COPY fetch_gpg_keys.sh /tmp -# Install the desired versions of Python. -RUN set -ex \ - && export GNUPGHOME="$(mktemp -d)" \ - && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ - && /tmp/fetch_gpg_keys.sh \ - && for PYTHON_VERSION in 3.7.8 3.8.5; do \ - wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ - && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ - && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ - && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ - && mkdir -p /usr/src/python-${PYTHON_VERSION} \ - && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ - && rm python-${PYTHON_VERSION}.tar.xz \ - && cd /usr/src/python-${PYTHON_VERSION} \ - && ./configure \ - --enable-shared \ - # This works only on Python 2.7 and throws a warning on every other - # version, but seems otherwise harmless. - --enable-unicode=ucs4 \ - --with-system-ffi \ - --without-ensurepip \ - && make -j$(nproc) \ - && make install \ - && ldconfig \ - ; done \ - && rm -rf "${GNUPGHOME}" \ - && rm -rf /usr/src/python* \ - && rm -rf ~/.cache/ - RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.7 /tmp/get-pip.py \ && python3.8 /tmp/get-pip.py \ && rm /tmp/get-pip.py -CMD ["python3.7"] +CMD ["python3.8"] diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/fetch_gpg_keys.sh b/packages/google-cloud-pubsub/.kokoro/docker/docs/fetch_gpg_keys.sh deleted file mode 100755 index d653dd868e4b..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/fetch_gpg_keys.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A script to fetch gpg keys with retry. -# Avoid jinja parsing the file. -# - -function retry { - if [[ "${#}" -le 1 ]]; then - echo "Usage: ${0} retry_count commands.." - exit 1 - fi - local retries=${1} - local command="${@:2}" - until [[ "${retries}" -le 0 ]]; do - $command && return 0 - if [[ $? -ne 0 ]]; then - echo "command failed, retrying" - ((retries--)) - fi - done - return 1 -} - -# 3.6.9, 3.7.5 (Ned Deily) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D - -# 3.8.0 (Łukasz Langa) -retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ - E3FF2839C048B25C084DEBE9B26995E310250568 - -# diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg index 7d78fd1f769d..a812af888d39 100644 --- a/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg @@ -21,9 +21,7 @@ env_vars: { key: "TRAMPOLINE_IMAGE" value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" } - -# Obtain environment variables for running Pub/Sub Lite samples tests env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "python-docs-samples-test-env" -} \ No newline at end of file + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/build.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg index 9b689c788b9b..8f43917d92fe 100644 --- a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg +++ b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg @@ -1,6 +1 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/build.sh" -} \ No newline at end of file +# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg new file mode 100644 index 000000000000..b858ccbd4246 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.9" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py39" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg new file mode 100644 index 000000000000..50fec9649732 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh index cf5de74c17a5..311a8d54b9f1 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh @@ -20,9 +20,9 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" +# Exit early if samples don't exist +if ! find samples -name 'requirements.txt' | grep -q .; then + echo "No tests run. './samples/**/requirements.txt' not found" exit 0 fi diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index 8912e9b5d7d7..62eb5a77d9a3 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -16,7 +16,7 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.0.1 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.0 + rev: 3.9.2 hooks: - id: flake8 diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 6dbc9d2d6cbe..e2853513fb7b 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -68,15 +68,12 @@ Using ``nox`` We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: + $ nox -s unit - $ nox -s unit-2.7 - $ nox -s unit-3.8 - $ ... +- To run a single unit test:: -- Args to pytest can be passed through the nox command separated by a `--`. For - example, to run a single test:: + $ nox -s unit-3.9 -- -k - $ nox -s unit-3.8 -- -k .. note:: @@ -143,8 +140,7 @@ Running System Tests - To run system tests, you can execute:: # Run all system tests - $ nox -s system-3.8 - $ nox -s system-2.7 + $ nox -s system # Run a single system test $ nox -s system-3.8 -- -k @@ -152,29 +148,14 @@ Running System Tests .. note:: - System tests are only configured to run under Python 2.7 and - Python 3.8. For expediency, we do not run them in older versions - of Python 3. + System tests are only configured to run under Python 3.8. + For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local auth settings and change some configuration in your project to run all the tests. -- System tests will be run against an actual project and - so you'll need to provide some environment variables to facilitate - authentication to your project: - - - ``GOOGLE_APPLICATION_CREDENTIALS``: The path to a JSON key file; - Such a file can be downloaded directly from the developer's console by clicking - "Generate new JSON key". See private key - `docs `__ - for more details. - -- Once you have downloaded your json keys, set the environment variable - ``GOOGLE_APPLICATION_CREDENTIALS`` to the absolute path of the json file:: - - $ export GOOGLE_APPLICATION_CREDENTIALS="/Users//path/to/app_credentials.json" - +- System tests will be run against an actual project. You should use local credentials from gcloud when possible. See `Best practices for application authentication `__. Some tests require a service account. For those tests see `Authenticating as a service account `__. ************* Test Coverage @@ -232,8 +213,8 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-pubsub/blob/master/noxfile.py -We also explicitly decided to support Python 3 beginning with version -3.6. Reasons for this include: +We also explicitly decided to support Python 3 beginning with version 3.6. +Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index e2d55cde22fa..b1ed409cf8e5 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -80,9 +80,9 @@ master_doc = "index" # General information about the project. -project = u"google-cloud-pubsub" -copyright = u"2019, Google" -author = u"Google APIs" +project = "google-cloud-pubsub" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -281,7 +281,7 @@ ( master_doc, "google-cloud-pubsub.tex", - u"google-cloud-pubsub Documentation", + "google-cloud-pubsub Documentation", author, "manual", ) @@ -316,7 +316,7 @@ ( master_doc, "google-cloud-pubsub", - u"google-cloud-pubsub Documentation", + "google-cloud-pubsub Documentation", [author], 1, ) @@ -335,7 +335,7 @@ ( master_doc, "google-cloud-pubsub", - u"google-cloud-pubsub Documentation", + "google-cloud-pubsub Documentation", author, "google-cloud-pubsub", "google-cloud-pubsub Library", @@ -363,6 +363,7 @@ "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), } diff --git a/packages/google-cloud-pubsub/docs/multiprocessing.rst b/packages/google-cloud-pubsub/docs/multiprocessing.rst index 1cb29d4ca967..536d17b2ea65 100644 --- a/packages/google-cloud-pubsub/docs/multiprocessing.rst +++ b/packages/google-cloud-pubsub/docs/multiprocessing.rst @@ -1,7 +1,7 @@ .. note:: - Because this client uses :mod:`grpcio` library, it is safe to + Because this client uses :mod:`grpc` library, it is safe to share instances across threads. In multiprocessing scenarios, the best practice is to create client instances *after* the invocation of - :func:`os.fork` by :class:`multiprocessing.Pool` or + :func:`os.fork` by :class:`multiprocessing.pool.Pool` or :class:`multiprocessing.Process`. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 041391c5718c..056ba288d924 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -34,6 +34,7 @@ from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType + from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport from .client import PublisherClient @@ -272,6 +273,7 @@ async def update_topic( Args: request (:class:`google.pubsub_v1.types.UpdateTopicRequest`): The request object. Request for the UpdateTopic method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -345,6 +347,7 @@ async def publish( This corresponds to the ``messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -433,6 +436,7 @@ async def get_topic( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -514,6 +518,7 @@ async def list_topics( This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -608,6 +613,7 @@ async def list_topic_subscriptions( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -706,6 +712,7 @@ async def list_topic_snapshots( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -803,6 +810,7 @@ async def delete_topic( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -872,6 +880,7 @@ async def detach_subscription( request (:class:`google.pubsub_v1.types.DetachSubscriptionRequest`): The request object. Request for the DetachSubscription method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 3f249b01bb49..3a3db1ebe00e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -796,6 +796,7 @@ def list_topic_subscriptions( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -1207,6 +1208,8 @@ def get_iam_policy( request (:class:`~.iam_policy.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. timeout (TimeoutType): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1317,6 +1320,8 @@ def test_iam_permissions( request (:class:`~.iam_policy.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. timeout (TimeoutType): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index ebc8b53994de..5203abfe7f58 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -1,4 +1,5 @@ # -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -13,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # + from typing import Union from .pubsub import ( diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 6ccdf5e63358..08c6654f4980 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -62,16 +62,9 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): - """Run black. - - Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. - """ + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( "black", *BLACK_PATHS, @@ -131,9 +124,6 @@ def system(session): # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": session.skip("RUN_SYSTEM_TESTS is set to false, skipping") - # Sanity check: Only run tests if the environment variable is set. - if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): - session.skip("Credentials must be set via environment variable") # Install pyopenssl for mTLS testing. if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": session.install("pyopenssl") @@ -191,7 +181,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -213,7 +203,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") + session.install( + "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 956cdf4f9250..6a8ccdae22c9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -28,8 +28,9 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -# Copy `noxfile_config.py` to your directory and modify it instead. +BLACK_VERSION = "black==19.10b0" +# Copy `noxfile_config.py` to your directory and modify it instead. # `TEST_CONFIG` dict is a configuration hook that allows users to # modify the test configurations. The values here should be in sync @@ -50,7 +51,10 @@ # to use your own Cloud project. 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. 'envs': {}, @@ -156,7 +160,7 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: - session.install("black") + session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) @@ -170,6 +174,9 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): if os.path.exists("constraints.txt"): diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 02da2833c73c..7938695bf9a7 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -4,7 +4,7 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "b8352f91c63e0cb7d64c4d0e557651248cd301b5" + "sha": "f1ee3463a89a1a994af0cc522d25bfd1c4412824" } }, { @@ -14,27 +14,6 @@ "sha": "6598bb829c9e9a534be674649ffd1b4671a821f9", "internalRef": "364449524" } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "043cc620d6a6111816d9e09f2a97208565fde958" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "043cc620d6a6111816d9e09f2a97208565fde958" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "043cc620d6a6111816d9e09f2a97208565fde958" - } } ], "destinations": [ @@ -64,7 +43,6 @@ ".kokoro/continuous/common.cfg", ".kokoro/continuous/continuous.cfg", ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", ".kokoro/docs/common.cfg", ".kokoro/docs/docs-presubmit.cfg", ".kokoro/docs/docs.cfg", @@ -94,6 +72,11 @@ ".kokoro/samples/python3.8/periodic-head.cfg", ".kokoro/samples/python3.8/periodic.cfg", ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/samples/python3.9/common.cfg", + ".kokoro/samples/python3.9/continuous.cfg", + ".kokoro/samples/python3.9/periodic-head.cfg", + ".kokoro/samples/python3.9/periodic.cfg", + ".kokoro/samples/python3.9/presubmit.cfg", ".kokoro/test-samples-against-head.sh", ".kokoro/test-samples-impl.sh", ".kokoro/test-samples.sh", @@ -105,6 +88,7 @@ "CONTRIBUTING.rst", "LICENSE", "MANIFEST.in", + "SECURITY.md", "docs/_static/custom.css", "docs/_templates/layout.html", "docs/conf.py", From e744567dfd06502d7532ce17e90cedce101be9b7 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Sun, 18 Jul 2021 04:42:22 -0700 Subject: [PATCH 0692/1197] chore: use gapic-generator-python 0.50.3 (#404) This PR was generated using Autosynth. :rainbow: Synth log will be available here: https://source.cloud.google.com/results/invocations/1cb8ad84-81d7-41ae-80cc-f275d38cb08c/targets - [ ] To automatically regenerate this PR, check this box. (May take up to 24 hours.) PiperOrigin-RevId: 383459959 Source-Link: https://github.com/googleapis/googleapis/commit/fe3cc5e650b8e52537f02549fe5fb49fc386f3a9 PiperOrigin-RevId: 382318016 Source-Link: https://github.com/googleapis/googleapis/commit/e9b2b616a20a11c97c7ac912536e82d33d006104 PiperOrigin-RevId: 382142900 Source-Link: https://github.com/googleapis/googleapis/commit/513440fda515f3c799c22a30e3906dcda325004e PiperOrigin-RevId: 381898347 Source-Link: https://github.com/googleapis/googleapis/commit/2bf1e2a6711b228debfadbe639973920671a01ef PiperOrigin-RevId: 380641501 Source-Link: https://github.com/googleapis/googleapis/commit/076f7e9f0b258bdb54338895d7251b202e8f0de3 PiperOrigin-RevId: 373649163 Source-Link: https://github.com/googleapis/googleapis/commit/7e1b14e6c7a9ab96d2db7e4a131981f162446d34 PiperOrigin-RevId: 373400747 Source-Link: https://github.com/googleapis/googleapis/commit/162641cfe5573c648df679a6dd30385650a08704 PiperOrigin-RevId: 372197450 Source-Link: https://github.com/googleapis/googleapis/commit/83a7e1c8c2f7421ded45ed323eb1fda99ef5ea46 PiperOrigin-RevId: 371362703 Source-Link: https://github.com/googleapis/googleapis/commit/5a04154e7c7c0e98e0e4085f6e2c67bd5bff6ff8 PiperOrigin-RevId: 370989216 Source-Link: https://github.com/googleapis/googleapis/commit/4e825559e5ab242c4d1aafca19c83c4d1bac743e PiperOrigin-RevId: 370926454 Source-Link: https://github.com/googleapis/googleapis/commit/382ed8de075e1ddc9baa4ebf8dbc5b2c257a77b8 PiperOrigin-RevId: 370525906 Source-Link: https://github.com/googleapis/googleapis/commit/60e129d0672a1be2c70b41bf76aadc7ad1b1ca0f This PR includes the following features/fixes: fix: add async client to %name_%version/init.py chore: add autogenerated snippets chore: remove auth, policy, and options from the reserved names list feat: support self-signed JWT flow for service accounts chore: enable GAPIC metadata generation chore: sort subpackages in %namespace/%name/init.py feat: add always_use_jwt_access fix: disable always_use_jwt_access feat: add subscription properties to streaming pull response feat: add method signature for Subscriber.Pull without the deprecated return_immediately field. fix(deps): require google-api-core >= 1.26.0 fix(deps): add packaging requirement --- packages/google-cloud-pubsub/.coveragerc | 1 - .../google/cloud/pubsub_v1/proto/pubsub.proto | 12 +- .../google/pubsub/__init__.py | 48 +- .../google/pubsub_v1/__init__.py | 19 +- .../google/pubsub_v1/gapic_metadata.json | 361 +++++++ .../google/pubsub_v1/services/__init__.py | 1 - .../pubsub_v1/services/publisher/__init__.py | 2 - .../services/publisher/async_client.py | 144 ++- .../pubsub_v1/services/publisher/client.py | 157 ++- .../pubsub_v1/services/publisher/pagers.py | 8 +- .../services/publisher/transports/__init__.py | 2 - .../services/publisher/transports/base.py | 217 +++-- .../services/publisher/transports/grpc.py | 55 +- .../publisher/transports/grpc_asyncio.py | 57 +- .../services/schema_service/__init__.py | 2 - .../services/schema_service/async_client.py | 83 +- .../services/schema_service/client.py | 141 ++- .../services/schema_service/pagers.py | 4 +- .../schema_service/transports/__init__.py | 2 - .../schema_service/transports/base.py | 149 ++- .../schema_service/transports/grpc.py | 55 +- .../schema_service/transports/grpc_asyncio.py | 57 +- .../pubsub_v1/services/subscriber/__init__.py | 2 - .../services/subscriber/async_client.py | 191 ++-- .../pubsub_v1/services/subscriber/client.py | 178 ++-- .../pubsub_v1/services/subscriber/pagers.py | 6 +- .../subscriber/transports/__init__.py | 2 - .../services/subscriber/transports/base.py | 274 +++--- .../services/subscriber/transports/grpc.py | 73 +- .../subscriber/transports/grpc_asyncio.py | 73 +- .../google/pubsub_v1/types/__init__.py | 2 - .../google/pubsub_v1/types/pubsub.py | 293 +++--- .../google/pubsub_v1/types/schema.py | 48 +- .../scripts/fixup_pubsub_v1_keywords.py | 68 +- packages/google-cloud-pubsub/setup.py | 3 +- packages/google-cloud-pubsub/synth.metadata | 146 +-- packages/google-cloud-pubsub/synth.py | 18 +- .../testing/constraints-3.6.txt | 7 +- .../google-cloud-pubsub/tests/__init__.py | 15 + .../tests/unit/__init__.py | 15 + .../tests/unit/gapic/__init__.py | 15 + .../tests/unit/gapic/pubsub_v1/__init__.py | 1 - .../unit/gapic/pubsub_v1/test_publisher.py | 689 ++++++------- .../gapic/pubsub_v1/test_schema_service.py | 627 +++++++----- .../unit/gapic/pubsub_v1/test_subscriber.py | 910 ++++++++---------- 45 files changed, 2815 insertions(+), 2418 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json create mode 100644 packages/google-cloud-pubsub/tests/__init__.py create mode 100644 packages/google-cloud-pubsub/tests/unit/gapic/__init__.py diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index a48e62f2e583..96190b454e0a 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -2,7 +2,6 @@ branch = True [report] -fail_under = 100 show_missing = True omit = google/cloud/__init__.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 0da37dd7dad1..173c4ce71577 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -1,4 +1,4 @@ -// Copyright 2020 Google LLC +// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -505,6 +505,7 @@ service Subscriber { }; option (google.api.method_signature) = "subscription,return_immediately,max_messages"; + option (google.api.method_signature) = "subscription,max_messages"; } // Establishes a stream with the server, which sends messages down to the @@ -1142,8 +1143,17 @@ message StreamingPullRequest { // Response for the `StreamingPull` method. This response is used to stream // messages from the server to the client. message StreamingPullResponse { + // Subscription properties sent as part of the response. + message SubscriptionProperties { + // True iff message ordering is enabled for this subscription. + bool message_ordering_enabled = 2; + } + // Received Pub/Sub messages. This will not be empty. repeated ReceivedMessage received_messages = 1; + + // Properties associated with this subscription. + SubscriptionProperties subscription_properties = 4; } // Request for the `CreateSnapshot` method. diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index b146fadef214..dfa5c7e0434f 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,14 +14,15 @@ # limitations under the License. # -from google.pubsub_v1.services.publisher.async_client import PublisherAsyncClient from google.pubsub_v1.services.publisher.client import PublisherClient +from google.pubsub_v1.services.publisher.async_client import PublisherAsyncClient +from google.pubsub_v1.services.schema_service.client import SchemaServiceClient from google.pubsub_v1.services.schema_service.async_client import ( SchemaServiceAsyncClient, ) -from google.pubsub_v1.services.schema_service.client import SchemaServiceClient -from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient from google.pubsub_v1.services.subscriber.client import SubscriberClient +from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient + from google.pubsub_v1.types.pubsub import AcknowledgeRequest from google.pubsub_v1.types.pubsub import CreateSnapshotRequest from google.pubsub_v1.types.pubsub import DeadLetterPolicy @@ -41,10 +41,10 @@ from google.pubsub_v1.types.pubsub import ListSubscriptionsResponse from google.pubsub_v1.types.pubsub import ListTopicSnapshotsRequest from google.pubsub_v1.types.pubsub import ListTopicSnapshotsResponse -from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsRequest -from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsResponse from google.pubsub_v1.types.pubsub import ListTopicsRequest from google.pubsub_v1.types.pubsub import ListTopicsResponse +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsRequest +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsResponse from google.pubsub_v1.types.pubsub import MessageStoragePolicy from google.pubsub_v1.types.pubsub import ModifyAckDeadlineRequest from google.pubsub_v1.types.pubsub import ModifyPushConfigRequest @@ -69,78 +69,78 @@ from google.pubsub_v1.types.pubsub import UpdateTopicRequest from google.pubsub_v1.types.schema import CreateSchemaRequest from google.pubsub_v1.types.schema import DeleteSchemaRequest -from google.pubsub_v1.types.schema import Encoding from google.pubsub_v1.types.schema import GetSchemaRequest from google.pubsub_v1.types.schema import ListSchemasRequest from google.pubsub_v1.types.schema import ListSchemasResponse from google.pubsub_v1.types.schema import Schema -from google.pubsub_v1.types.schema import SchemaView from google.pubsub_v1.types.schema import ValidateMessageRequest from google.pubsub_v1.types.schema import ValidateMessageResponse from google.pubsub_v1.types.schema import ValidateSchemaRequest from google.pubsub_v1.types.schema import ValidateSchemaResponse +from google.pubsub_v1.types.schema import Encoding +from google.pubsub_v1.types.schema import SchemaView __all__ = ( + "PublisherClient", + "PublisherAsyncClient", + "SchemaServiceClient", + "SchemaServiceAsyncClient", + "SubscriberClient", + "SubscriberAsyncClient", "AcknowledgeRequest", - "CreateSchemaRequest", "CreateSnapshotRequest", "DeadLetterPolicy", - "DeleteSchemaRequest", "DeleteSnapshotRequest", "DeleteSubscriptionRequest", "DeleteTopicRequest", "DetachSubscriptionRequest", "DetachSubscriptionResponse", - "Encoding", "ExpirationPolicy", - "GetSchemaRequest", "GetSnapshotRequest", "GetSubscriptionRequest", "GetTopicRequest", - "ListSchemasRequest", - "ListSchemasResponse", "ListSnapshotsRequest", "ListSnapshotsResponse", "ListSubscriptionsRequest", "ListSubscriptionsResponse", "ListTopicSnapshotsRequest", "ListTopicSnapshotsResponse", - "ListTopicSubscriptionsRequest", - "ListTopicSubscriptionsResponse", "ListTopicsRequest", "ListTopicsResponse", + "ListTopicSubscriptionsRequest", + "ListTopicSubscriptionsResponse", "MessageStoragePolicy", "ModifyAckDeadlineRequest", "ModifyPushConfigRequest", "PublishRequest", "PublishResponse", - "PublisherAsyncClient", - "PublisherClient", "PubsubMessage", "PullRequest", "PullResponse", "PushConfig", "ReceivedMessage", "RetryPolicy", - "Schema", - "SchemaServiceAsyncClient", - "SchemaServiceClient", "SchemaSettings", - "SchemaView", "SeekRequest", "SeekResponse", "Snapshot", "StreamingPullRequest", "StreamingPullResponse", - "SubscriberAsyncClient", - "SubscriberClient", "Subscription", "Topic", "UpdateSnapshotRequest", "UpdateSubscriptionRequest", "UpdateTopicRequest", + "CreateSchemaRequest", + "DeleteSchemaRequest", + "GetSchemaRequest", + "ListSchemasRequest", + "ListSchemasResponse", + "Schema", "ValidateMessageRequest", "ValidateMessageResponse", "ValidateSchemaRequest", "ValidateSchemaResponse", + "Encoding", + "SchemaView", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index d5a1de488ff7..bc78db26fcce 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -16,8 +15,12 @@ # from .services.publisher import PublisherClient +from .services.publisher import PublisherAsyncClient from .services.schema_service import SchemaServiceClient +from .services.schema_service import SchemaServiceAsyncClient from .services.subscriber import SubscriberClient +from .services.subscriber import SubscriberAsyncClient + from .types.pubsub import AcknowledgeRequest from .types.pubsub import CreateSnapshotRequest from .types.pubsub import DeadLetterPolicy @@ -36,10 +39,10 @@ from .types.pubsub import ListSubscriptionsResponse from .types.pubsub import ListTopicSnapshotsRequest from .types.pubsub import ListTopicSnapshotsResponse -from .types.pubsub import ListTopicSubscriptionsRequest -from .types.pubsub import ListTopicSubscriptionsResponse from .types.pubsub import ListTopicsRequest from .types.pubsub import ListTopicsResponse +from .types.pubsub import ListTopicSubscriptionsRequest +from .types.pubsub import ListTopicSubscriptionsResponse from .types.pubsub import MessageStoragePolicy from .types.pubsub import ModifyAckDeadlineRequest from .types.pubsub import ModifyPushConfigRequest @@ -64,19 +67,21 @@ from .types.pubsub import UpdateTopicRequest from .types.schema import CreateSchemaRequest from .types.schema import DeleteSchemaRequest -from .types.schema import Encoding from .types.schema import GetSchemaRequest from .types.schema import ListSchemasRequest from .types.schema import ListSchemasResponse from .types.schema import Schema -from .types.schema import SchemaView from .types.schema import ValidateMessageRequest from .types.schema import ValidateMessageResponse from .types.schema import ValidateSchemaRequest from .types.schema import ValidateSchemaResponse - +from .types.schema import Encoding +from .types.schema import SchemaView __all__ = ( + "PublisherAsyncClient", + "SchemaServiceAsyncClient", + "SubscriberAsyncClient", "AcknowledgeRequest", "CreateSchemaRequest", "CreateSnapshotRequest", @@ -110,6 +115,7 @@ "ModifyPushConfigRequest", "PublishRequest", "PublishResponse", + "PublisherClient", "PubsubMessage", "PullRequest", "PullResponse", @@ -135,5 +141,4 @@ "ValidateMessageResponse", "ValidateSchemaRequest", "ValidateSchemaResponse", - "PublisherClient", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json new file mode 100644 index 000000000000..4c5b86bd13bc --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json @@ -0,0 +1,361 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.pubsub_v1", + "protoPackage": "google.pubsub.v1", + "schema": "1.0", + "services": { + "Publisher": { + "clients": { + "grpc": { + "libraryClient": "PublisherClient", + "rpcs": { + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "DetachSubscription": { + "methods": [ + "detach_subscription" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListTopicSnapshots": { + "methods": [ + "list_topic_snapshots" + ] + }, + "ListTopicSubscriptions": { + "methods": [ + "list_topic_subscriptions" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "Publish": { + "methods": [ + "publish" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PublisherAsyncClient", + "rpcs": { + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "DetachSubscription": { + "methods": [ + "detach_subscription" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListTopicSnapshots": { + "methods": [ + "list_topic_snapshots" + ] + }, + "ListTopicSubscriptions": { + "methods": [ + "list_topic_subscriptions" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "Publish": { + "methods": [ + "publish" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + } + } + }, + "SchemaService": { + "clients": { + "grpc": { + "libraryClient": "SchemaServiceClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "ValidateMessage": { + "methods": [ + "validate_message" + ] + }, + "ValidateSchema": { + "methods": [ + "validate_schema" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SchemaServiceAsyncClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "ValidateMessage": { + "methods": [ + "validate_message" + ] + }, + "ValidateSchema": { + "methods": [ + "validate_schema" + ] + } + } + } + } + }, + "Subscriber": { + "clients": { + "grpc": { + "libraryClient": "SubscriberClient", + "rpcs": { + "Acknowledge": { + "methods": [ + "acknowledge" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "CreateSubscription": { + "methods": [ + "create_subscription" + ] + }, + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "ModifyAckDeadline": { + "methods": [ + "modify_ack_deadline" + ] + }, + "ModifyPushConfig": { + "methods": [ + "modify_push_config" + ] + }, + "Pull": { + "methods": [ + "pull" + ] + }, + "Seek": { + "methods": [ + "seek" + ] + }, + "StreamingPull": { + "methods": [ + "streaming_pull" + ] + }, + "UpdateSnapshot": { + "methods": [ + "update_snapshot" + ] + }, + "UpdateSubscription": { + "methods": [ + "update_subscription" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SubscriberAsyncClient", + "rpcs": { + "Acknowledge": { + "methods": [ + "acknowledge" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "CreateSubscription": { + "methods": [ + "create_subscription" + ] + }, + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "ModifyAckDeadline": { + "methods": [ + "modify_ack_deadline" + ] + }, + "ModifyPushConfig": { + "methods": [ + "modify_push_config" + ] + }, + "Pull": { + "methods": [ + "pull" + ] + }, + "Seek": { + "methods": [ + "seek" + ] + }, + "StreamingPull": { + "methods": [ + "streaming_pull" + ] + }, + "UpdateSnapshot": { + "methods": [ + "update_snapshot" + ] + }, + "UpdateSubscription": { + "methods": [ + "update_subscription" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py index 42ffdf2bc43d..4de65971c238 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py index 970a1a3b408b..98e50425da44 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import PublisherClient from .async_client import PublisherAsyncClient diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 056ba288d924..09e4b0e557fa 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,19 +20,18 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import timeout as timeouts # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType - from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport from .client import PublisherClient @@ -56,25 +53,20 @@ class PublisherAsyncClient: parse_subscription_path = staticmethod(PublisherClient.parse_subscription_path) topic_path = staticmethod(PublisherClient.topic_path) parse_topic_path = staticmethod(PublisherClient.parse_topic_path) - common_billing_account_path = staticmethod( PublisherClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( PublisherClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(PublisherClient.common_folder_path) parse_common_folder_path = staticmethod(PublisherClient.parse_common_folder_path) - common_organization_path = staticmethod(PublisherClient.common_organization_path) parse_common_organization_path = staticmethod( PublisherClient.parse_common_organization_path ) - common_project_path = staticmethod(PublisherClient.common_project_path) parse_common_project_path = staticmethod(PublisherClient.parse_common_project_path) - common_location_path = staticmethod(PublisherClient.common_location_path) parse_common_location_path = staticmethod( PublisherClient.parse_common_location_path @@ -82,7 +74,8 @@ class PublisherAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -97,7 +90,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -114,7 +107,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> PublisherTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: PublisherTransport: The transport used by the client instance. @@ -128,12 +121,12 @@ def transport(self) -> PublisherTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, PublisherTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the publisher client. + """Instantiates the publisher client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -165,7 +158,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = PublisherClient( credentials=credentials, transport=transport, @@ -202,7 +194,6 @@ async def create_topic( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -228,7 +219,6 @@ async def create_topic( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -240,7 +230,9 @@ async def create_topic( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -273,7 +265,6 @@ async def update_topic( Args: request (:class:`google.pubsub_v1.types.UpdateTopicRequest`): The request object. Request for the UpdateTopic method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -286,7 +277,6 @@ async def update_topic( A topic resource. """ # Create or coerce a protobuf request object. - request = pubsub.UpdateTopicRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -297,7 +287,9 @@ async def update_topic( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -347,7 +339,6 @@ async def publish( This corresponds to the ``messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -373,10 +364,8 @@ async def publish( # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic - if messages: request.messages.extend(messages) @@ -389,13 +378,13 @@ async def publish( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.Cancelled, - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ResourceExhausted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.Cancelled, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -436,7 +425,6 @@ async def get_topic( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -462,7 +450,6 @@ async def get_topic( # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic @@ -475,9 +462,9 @@ async def get_topic( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -518,7 +505,6 @@ async def list_topics( This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -548,7 +534,6 @@ async def list_topics( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project is not None: request.project = project @@ -561,9 +546,9 @@ async def list_topics( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -613,7 +598,6 @@ async def list_topic_subscriptions( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -643,7 +627,6 @@ async def list_topic_subscriptions( # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic @@ -656,9 +639,9 @@ async def list_topic_subscriptions( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -712,7 +695,6 @@ async def list_topic_snapshots( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -742,7 +724,6 @@ async def list_topic_snapshots( # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic @@ -755,9 +736,9 @@ async def list_topic_snapshots( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -810,7 +791,6 @@ async def delete_topic( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -832,7 +812,6 @@ async def delete_topic( # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic @@ -844,7 +823,9 @@ async def delete_topic( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -880,7 +861,6 @@ async def detach_subscription( request (:class:`google.pubsub_v1.types.DetachSubscriptionRequest`): The request object. Request for the DetachSubscription method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -895,7 +875,6 @@ async def detach_subscription( """ # Create or coerce a protobuf request object. - request = pubsub.DetachSubscriptionRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -906,7 +885,9 @@ async def detach_subscription( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -929,16 +910,18 @@ async def detach_subscription( async def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: - r"""Sets the IAM access control policy on the specified - function. Replaces any existing policy. + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`~.policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -948,7 +931,7 @@ async def set_iam_policy( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -1012,7 +995,7 @@ async def set_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1036,17 +1019,19 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does not have a policy set. + Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1056,7 +1041,7 @@ async def get_iam_policy( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -1120,7 +1105,7 @@ async def get_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1144,17 +1129,20 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will + policy for a function. + + If the function does not exist, this will return an empty set of permissions, not a NOT_FOUND error. + Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1164,7 +1152,7 @@ async def test_iam_permissions( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: + ~iam_policy_pb2.PolicyTestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. @@ -1172,7 +1160,7 @@ async def test_iam_permissions( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 3a3db1ebe00e..cb44506085ed 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import functools @@ -24,24 +22,23 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.api_core import timeout as timeouts # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType import grpc - from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO from .transports.grpc import PublisherGrpcTransport from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport @@ -60,7 +57,7 @@ class PublisherClientMeta(type): _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[PublisherTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: @@ -86,7 +83,8 @@ class PublisherClient(metaclass=PublisherClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. @@ -131,7 +129,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: @@ -149,7 +148,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: @@ -169,36 +168,37 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> PublisherTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - PublisherTransport: The transport used by the client instance. + PublisherTransport: The transport used by the client + instance. """ return self._transport @staticmethod def schema_path(project: str, schema: str,) -> str: - """Return a fully-qualified schema string.""" + """Returns a fully-qualified schema string.""" return "projects/{project}/schemas/{schema}".format( project=project, schema=schema, ) @staticmethod def parse_schema_path(path: str) -> Dict[str, str]: - """Parse a schema path into its component segments.""" + """Parses a schema path into its component segments.""" m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def subscription_path(project: str, subscription: str,) -> str: - """Return a fully-qualified subscription string.""" + """Returns a fully-qualified subscription string.""" return "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, ) @staticmethod def parse_subscription_path(path: str) -> Dict[str, str]: - """Parse a subscription path into its component segments.""" + """Parses a subscription path into its component segments.""" m = re.match( r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path ) @@ -206,18 +206,18 @@ def parse_subscription_path(path: str) -> Dict[str, str]: @staticmethod def topic_path(project: str, topic: str,) -> str: - """Return a fully-qualified topic string.""" + """Returns a fully-qualified topic string.""" return "projects/{project}/topics/{topic}".format(project=project, topic=topic,) @staticmethod def parse_topic_path(path: str) -> Dict[str, str]: - """Parse a topic path into its component segments.""" + """Parses a topic path into its component segments.""" m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -230,7 +230,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -241,7 +241,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -252,7 +252,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -263,7 +263,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -277,12 +277,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, PublisherTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the publisher client. + """Instantiates the publisher client. Args: @@ -338,9 +338,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -352,12 +353,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -372,8 +375,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -427,7 +430,6 @@ def create_topic( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -455,10 +457,8 @@ def create_topic( # there are no flattened fields. if not isinstance(request, pubsub.Topic): request = pubsub.Topic(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -493,7 +493,6 @@ def update_topic( Args: request (google.pubsub_v1.types.UpdateTopicRequest): The request object. Request for the UpdateTopic method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -506,7 +505,6 @@ def update_topic( A topic resource. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a pubsub.UpdateTopicRequest. # There's no risk of modifying the input as we've already verified @@ -562,7 +560,6 @@ def publish( This corresponds to the ``messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -590,10 +587,8 @@ def publish( # there are no flattened fields. if not isinstance(request, pubsub.PublishRequest): request = pubsub.PublishRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic if messages is not None: @@ -637,7 +632,6 @@ def get_topic( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -665,10 +659,8 @@ def get_topic( # there are no flattened fields. if not isinstance(request, pubsub.GetTopicRequest): request = pubsub.GetTopicRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic @@ -710,7 +702,6 @@ def list_topics( This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -742,10 +733,8 @@ def list_topics( # there are no flattened fields. if not isinstance(request, pubsub.ListTopicsRequest): request = pubsub.ListTopicsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project is not None: request.project = project @@ -796,7 +785,6 @@ def list_topic_subscriptions( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -828,10 +816,8 @@ def list_topic_subscriptions( # there are no flattened fields. if not isinstance(request, pubsub.ListTopicSubscriptionsRequest): request = pubsub.ListTopicSubscriptionsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic @@ -886,7 +872,6 @@ def list_topic_snapshots( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -918,10 +903,8 @@ def list_topic_snapshots( # there are no flattened fields. if not isinstance(request, pubsub.ListTopicSnapshotsRequest): request = pubsub.ListTopicSnapshotsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic @@ -975,7 +958,6 @@ def delete_topic( This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -999,10 +981,8 @@ def delete_topic( # there are no flattened fields. if not isinstance(request, pubsub.DeleteTopicRequest): request = pubsub.DeleteTopicRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if topic is not None: request.topic = topic @@ -1040,7 +1020,6 @@ def detach_subscription( request (google.pubsub_v1.types.DetachSubscriptionRequest): The request object. Request for the DetachSubscription method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -1055,7 +1034,6 @@ def detach_subscription( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a pubsub.DetachSubscriptionRequest. # There's no risk of modifying the input as we've already verified @@ -1083,17 +1061,19 @@ def detach_subscription( def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: - r"""Sets the IAM access control policy on the specified - function. Replaces any existing policy. + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1103,7 +1083,7 @@ def set_iam_policy( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -1170,7 +1150,7 @@ def set_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1194,28 +1174,30 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. + + Returns an empty policy if the function exists and does not have a + policy set. + Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. timeout (TimeoutType): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -1282,7 +1264,7 @@ def get_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1306,28 +1288,31 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. timeout (TimeoutType): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: + ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. @@ -1335,7 +1320,7 @@ def test_iam_permissions( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py index e8836d410bc6..1a826de78236 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -245,7 +243,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -373,7 +371,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py index 9eec7fc5de2e..34066edc3e4a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index b1111a8411ba..6e9f8cea85f0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub - try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( client_library_version=pkg_resources.get_distribution( @@ -40,6 +40,15 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + class PublisherTransport(abc.ABC): """Abstract transport class for Publisher.""" @@ -49,21 +58,25 @@ class PublisherTransport(abc.ABC): "https://www.googleapis.com/auth/pubsub", ) + DEFAULT_HOST: str = "pubsub.googleapis.com" + def __init__( self, *, - host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -72,7 +85,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,35 +93,70 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -118,7 +166,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -130,7 +180,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -143,13 +195,13 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.Cancelled, - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ResourceExhausted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.Cancelled, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -163,9 +215,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -179,9 +231,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -195,9 +247,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -211,9 +263,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -226,7 +278,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -238,7 +292,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -249,57 +305,51 @@ def _prep_wrapped_messages(self, client_info): @property def create_topic( self, - ) -> typing.Callable[ - [pubsub.Topic], typing.Union[pubsub.Topic, typing.Awaitable[pubsub.Topic]] - ]: + ) -> Callable[[pubsub.Topic], Union[pubsub.Topic, Awaitable[pubsub.Topic]]]: raise NotImplementedError() @property def update_topic( self, - ) -> typing.Callable[ - [pubsub.UpdateTopicRequest], - typing.Union[pubsub.Topic, typing.Awaitable[pubsub.Topic]], + ) -> Callable[ + [pubsub.UpdateTopicRequest], Union[pubsub.Topic, Awaitable[pubsub.Topic]] ]: raise NotImplementedError() @property def publish( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.PublishRequest], - typing.Union[pubsub.PublishResponse, typing.Awaitable[pubsub.PublishResponse]], + Union[pubsub.PublishResponse, Awaitable[pubsub.PublishResponse]], ]: raise NotImplementedError() @property def get_topic( self, - ) -> typing.Callable[ - [pubsub.GetTopicRequest], - typing.Union[pubsub.Topic, typing.Awaitable[pubsub.Topic]], + ) -> Callable[ + [pubsub.GetTopicRequest], Union[pubsub.Topic, Awaitable[pubsub.Topic]] ]: raise NotImplementedError() @property def list_topics( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.ListTopicsRequest], - typing.Union[ - pubsub.ListTopicsResponse, typing.Awaitable[pubsub.ListTopicsResponse] - ], + Union[pubsub.ListTopicsResponse, Awaitable[pubsub.ListTopicsResponse]], ]: raise NotImplementedError() @property def list_topic_subscriptions( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.ListTopicSubscriptionsRequest], - typing.Union[ + Union[ pubsub.ListTopicSubscriptionsResponse, - typing.Awaitable[pubsub.ListTopicSubscriptionsResponse], + Awaitable[pubsub.ListTopicSubscriptionsResponse], ], ]: raise NotImplementedError() @@ -307,11 +357,11 @@ def list_topic_subscriptions( @property def list_topic_snapshots( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.ListTopicSnapshotsRequest], - typing.Union[ + Union[ pubsub.ListTopicSnapshotsResponse, - typing.Awaitable[pubsub.ListTopicSnapshotsResponse], + Awaitable[pubsub.ListTopicSnapshotsResponse], ], ]: raise NotImplementedError() @@ -319,20 +369,19 @@ def list_topic_snapshots( @property def delete_topic( self, - ) -> typing.Callable[ - [pubsub.DeleteTopicRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [pubsub.DeleteTopicRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() @property def detach_subscription( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.DetachSubscriptionRequest], - typing.Union[ + Union[ pubsub.DetachSubscriptionResponse, - typing.Awaitable[pubsub.DetachSubscriptionResponse], + Awaitable[pubsub.DetachSubscriptionResponse], ], ]: raise NotImplementedError() @@ -340,29 +389,29 @@ def detach_subscription( @property def set_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def get_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def test_iam_permissions( self, - ) -> typing.Callable[ - [iam_policy.TestIamPermissionsRequest], - typing.Union[ - iam_policy.TestIamPermissionsResponse, - typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ], ]: raise NotImplementedError() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 0b9ddc8e2bb6..d6c127d102f2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub - from .base import PublisherTransport, DEFAULT_CLIENT_INFO @@ -54,7 +51,7 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -64,11 +61,13 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -103,6 +102,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -155,6 +156,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: @@ -179,7 +181,7 @@ def __init__( def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -210,13 +212,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -415,7 +419,7 @@ def list_topic_snapshots( return self._stubs["list_topic_snapshots"] @property - def delete_topic(self) -> Callable[[pubsub.DeleteTopicRequest], empty.Empty]: + def delete_topic(self) -> Callable[[pubsub.DeleteTopicRequest], empty_pb2.Empty]: r"""Return a callable for the delete topic method over gRPC. Deletes the topic with the given name. Returns ``NOT_FOUND`` if @@ -439,7 +443,7 @@ def delete_topic(self) -> Callable[[pubsub.DeleteTopicRequest], empty.Empty]: self._stubs["delete_topic"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Publisher/DeleteTopic", request_serializer=pubsub.DeleteTopicRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_topic"] @@ -478,7 +482,7 @@ def detach_subscription( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM access control policy on the specified function. Replaces any existing policy. @@ -495,15 +499,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does @@ -521,8 +525,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -530,7 +534,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -549,8 +554,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 6a3a096f0805..d98b8fc1c735 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub - from .base import PublisherTransport, DEFAULT_CLIENT_INFO from .grpc import PublisherGrpcTransport @@ -57,7 +54,7 @@ class PublisherGrpcAsyncIOTransport(PublisherTransport): def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -84,13 +81,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -98,7 +97,7 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -108,11 +107,13 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -148,6 +149,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -170,7 +173,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -200,6 +202,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: @@ -426,7 +429,7 @@ def list_topic_snapshots( @property def delete_topic( self, - ) -> Callable[[pubsub.DeleteTopicRequest], Awaitable[empty.Empty]]: + ) -> Callable[[pubsub.DeleteTopicRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete topic method over gRPC. Deletes the topic with the given name. Returns ``NOT_FOUND`` if @@ -450,7 +453,7 @@ def delete_topic( self._stubs["delete_topic"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Publisher/DeleteTopic", request_serializer=pubsub.DeleteTopicRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_topic"] @@ -489,7 +492,7 @@ def detach_subscription( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM access control policy on the specified function. Replaces any existing policy. @@ -506,15 +509,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does @@ -532,8 +535,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -541,8 +544,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], - Awaitable[iam_policy.TestIamPermissionsResponse], + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -561,8 +564,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py index d93cff56ff55..523d5b5f5a5c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import SchemaServiceClient from .async_client import SchemaServiceAsyncClient diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 9aeb62990ebe..6ec1fe667a4d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -22,18 +20,17 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.pubsub_v1.services.schema_service import pagers from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema - from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport from .client import SchemaServiceClient @@ -49,31 +46,26 @@ class SchemaServiceAsyncClient: schema_path = staticmethod(SchemaServiceClient.schema_path) parse_schema_path = staticmethod(SchemaServiceClient.parse_schema_path) - common_billing_account_path = staticmethod( SchemaServiceClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( SchemaServiceClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(SchemaServiceClient.common_folder_path) parse_common_folder_path = staticmethod( SchemaServiceClient.parse_common_folder_path ) - common_organization_path = staticmethod( SchemaServiceClient.common_organization_path ) parse_common_organization_path = staticmethod( SchemaServiceClient.parse_common_organization_path ) - common_project_path = staticmethod(SchemaServiceClient.common_project_path) parse_common_project_path = staticmethod( SchemaServiceClient.parse_common_project_path ) - common_location_path = staticmethod(SchemaServiceClient.common_location_path) parse_common_location_path = staticmethod( SchemaServiceClient.parse_common_location_path @@ -81,7 +73,8 @@ class SchemaServiceAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -96,7 +89,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -113,7 +106,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> SchemaServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: SchemaServiceTransport: The transport used by the client instance. @@ -127,12 +120,12 @@ def transport(self) -> SchemaServiceTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, SchemaServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the schema service client. + """Instantiates the schema service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -164,7 +157,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = SchemaServiceClient( credentials=credentials, transport=transport, @@ -216,7 +208,6 @@ async def create_schema( This corresponds to the ``schema_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -241,7 +232,6 @@ async def create_schema( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if schema is not None: @@ -290,7 +280,6 @@ async def get_schema( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -315,7 +304,6 @@ async def get_schema( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -361,7 +349,6 @@ async def list_schemas( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -390,7 +377,6 @@ async def list_schemas( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -442,7 +428,6 @@ async def delete_schema( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -463,7 +448,6 @@ async def delete_schema( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -516,7 +500,6 @@ async def validate_schema( This corresponds to the ``schema`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -541,7 +524,6 @@ async def validate_schema( # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if schema is not None: @@ -581,7 +563,6 @@ async def validate_message( request (:class:`google.pubsub_v1.types.ValidateMessageRequest`): The request object. Request for the `ValidateMessage` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -593,7 +574,6 @@ async def validate_message( Response for the ValidateMessage method. """ # Create or coerce a protobuf request object. - request = schema.ValidateMessageRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -618,16 +598,18 @@ async def validate_message( async def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: - r"""Sets the IAM access control policy on the specified - function. Replaces any existing policy. + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`~.policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -636,7 +618,7 @@ async def set_iam_policy( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -700,7 +682,7 @@ async def set_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -724,17 +706,19 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does not have a policy set. + Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -743,7 +727,7 @@ async def get_iam_policy( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -807,7 +791,7 @@ async def get_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -831,17 +815,20 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will + policy for a function. + + If the function does not exist, this will return an empty set of permissions, not a NOT_FOUND error. + Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -850,7 +837,7 @@ async def test_iam_permissions( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: + ~iam_policy_pb2.PolicyTestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. @@ -858,7 +845,7 @@ async def test_iam_permissions( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 5f65a0388233..106afa85069d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import os @@ -23,21 +21,20 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.pubsub_v1.services.schema_service import pagers from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema - from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import SchemaServiceGrpcTransport from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport @@ -56,7 +53,7 @@ class SchemaServiceClientMeta(type): _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[SchemaServiceTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: label: The name of the desired transport. If none is @@ -79,7 +76,8 @@ class SchemaServiceClient(metaclass=SchemaServiceClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. Args: @@ -113,7 +111,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -130,7 +129,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -149,29 +148,30 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> SchemaServiceTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - SchemaServiceTransport: The transport used by the client instance. + SchemaServiceTransport: The transport used by the client + instance. """ return self._transport @staticmethod def schema_path(project: str, schema: str,) -> str: - """Return a fully-qualified schema string.""" + """Returns a fully-qualified schema string.""" return "projects/{project}/schemas/{schema}".format( project=project, schema=schema, ) @staticmethod def parse_schema_path(path: str) -> Dict[str, str]: - """Parse a schema path into its component segments.""" + """Parses a schema path into its component segments.""" m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -184,7 +184,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -195,7 +195,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -206,7 +206,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -217,7 +217,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -231,12 +231,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, SchemaServiceTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the schema service client. + """Instantiates the schema service client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -291,9 +291,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -305,12 +306,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -325,8 +328,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -385,7 +388,6 @@ def create_schema( This corresponds to the ``schema_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -412,10 +414,8 @@ def create_schema( # there are no flattened fields. if not isinstance(request, gp_schema.CreateSchemaRequest): request = gp_schema.CreateSchemaRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if schema is not None: @@ -460,7 +460,6 @@ def get_schema( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -487,10 +486,8 @@ def get_schema( # there are no flattened fields. if not isinstance(request, schema.GetSchemaRequest): request = schema.GetSchemaRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -532,7 +529,6 @@ def list_schemas( This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -563,10 +559,8 @@ def list_schemas( # there are no flattened fields. if not isinstance(request, schema.ListSchemasRequest): request = schema.ListSchemasRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent @@ -614,7 +608,6 @@ def delete_schema( This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -637,10 +630,8 @@ def delete_schema( # there are no flattened fields. if not isinstance(request, schema.DeleteSchemaRequest): request = schema.DeleteSchemaRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name @@ -689,7 +680,6 @@ def validate_schema( This corresponds to the ``schema`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -716,10 +706,8 @@ def validate_schema( # there are no flattened fields. if not isinstance(request, gp_schema.ValidateSchemaRequest): request = gp_schema.ValidateSchemaRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if parent is not None: request.parent = parent if schema is not None: @@ -755,7 +743,6 @@ def validate_message( request (google.pubsub_v1.types.ValidateMessageRequest): The request object. Request for the `ValidateMessage` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -767,7 +754,6 @@ def validate_message( Response for the ValidateMessage method. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a schema.ValidateMessageRequest. # There's no risk of modifying the input as we've already verified @@ -793,16 +779,18 @@ def validate_message( def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: - r"""Sets the IAM access control policy on the specified - function. Replaces any existing policy. + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -811,7 +799,7 @@ def set_iam_policy( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -875,7 +863,7 @@ def set_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -899,26 +887,28 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. + + Returns an empty policy if the function exists and does not have a + policy set. + Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -982,7 +972,7 @@ def get_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1006,26 +996,29 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: + ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. @@ -1033,7 +1026,7 @@ def test_iam_permissions( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index 2712f37c64c7..f0248bf5ed9a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py index 015410d3d0cc..81ebf8d1c566 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index fec2169ae016..1044166cce19 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,25 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema - try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( client_library_version=pkg_resources.get_distribution( @@ -41,6 +41,15 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + class SchemaServiceTransport(abc.ABC): """Abstract transport class for SchemaService.""" @@ -50,21 +59,25 @@ class SchemaServiceTransport(abc.ABC): "https://www.googleapis.com/auth/pubsub", ) + DEFAULT_HOST: str = "pubsub.googleapis.com" + def __init__( self, *, - host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -73,7 +86,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -81,35 +94,70 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -136,49 +184,45 @@ def _prep_wrapped_messages(self, client_info): @property def create_schema( self, - ) -> typing.Callable[ + ) -> Callable[ [gp_schema.CreateSchemaRequest], - typing.Union[gp_schema.Schema, typing.Awaitable[gp_schema.Schema]], + Union[gp_schema.Schema, Awaitable[gp_schema.Schema]], ]: raise NotImplementedError() @property def get_schema( self, - ) -> typing.Callable[ - [schema.GetSchemaRequest], - typing.Union[schema.Schema, typing.Awaitable[schema.Schema]], + ) -> Callable[ + [schema.GetSchemaRequest], Union[schema.Schema, Awaitable[schema.Schema]] ]: raise NotImplementedError() @property def list_schemas( self, - ) -> typing.Callable[ + ) -> Callable[ [schema.ListSchemasRequest], - typing.Union[ - schema.ListSchemasResponse, typing.Awaitable[schema.ListSchemasResponse] - ], + Union[schema.ListSchemasResponse, Awaitable[schema.ListSchemasResponse]], ]: raise NotImplementedError() @property def delete_schema( self, - ) -> typing.Callable[ - [schema.DeleteSchemaRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [schema.DeleteSchemaRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() @property def validate_schema( self, - ) -> typing.Callable[ + ) -> Callable[ [gp_schema.ValidateSchemaRequest], - typing.Union[ + Union[ gp_schema.ValidateSchemaResponse, - typing.Awaitable[gp_schema.ValidateSchemaResponse], + Awaitable[gp_schema.ValidateSchemaResponse], ], ]: raise NotImplementedError() @@ -186,11 +230,10 @@ def validate_schema( @property def validate_message( self, - ) -> typing.Callable[ + ) -> Callable[ [schema.ValidateMessageRequest], - typing.Union[ - schema.ValidateMessageResponse, - typing.Awaitable[schema.ValidateMessageResponse], + Union[ + schema.ValidateMessageResponse, Awaitable[schema.ValidateMessageResponse] ], ]: raise NotImplementedError() @@ -198,29 +241,29 @@ def validate_message( @property def set_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def get_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def test_iam_permissions( self, - ) -> typing.Callable[ - [iam_policy.TestIamPermissionsRequest], - typing.Union[ - iam_policy.TestIamPermissionsResponse, - typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ], ]: raise NotImplementedError() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index ed9822e0174e..c99633ca25ef 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema - from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO @@ -54,7 +51,7 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -64,11 +61,13 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -103,6 +102,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -155,6 +156,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: @@ -179,7 +181,7 @@ def __init__( def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -210,13 +212,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -303,7 +307,7 @@ def list_schemas( return self._stubs["list_schemas"] @property - def delete_schema(self) -> Callable[[schema.DeleteSchemaRequest], empty.Empty]: + def delete_schema(self) -> Callable[[schema.DeleteSchemaRequest], empty_pb2.Empty]: r"""Return a callable for the delete schema method over gRPC. Deletes a schema. @@ -322,7 +326,7 @@ def delete_schema(self) -> Callable[[schema.DeleteSchemaRequest], empty.Empty]: self._stubs["delete_schema"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.SchemaService/DeleteSchema", request_serializer=schema.DeleteSchemaRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_schema"] @@ -381,7 +385,7 @@ def validate_message( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM access control policy on the specified function. Replaces any existing policy. @@ -398,15 +402,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does @@ -424,8 +428,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -433,7 +437,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -452,8 +457,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index c7cb3ac6396c..0f4c93370688 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,25 +13,23 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema - from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO from .grpc import SchemaServiceGrpcTransport @@ -57,7 +54,7 @@ class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -84,13 +81,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -98,7 +97,7 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -108,11 +107,13 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -148,6 +149,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -170,7 +173,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -200,6 +202,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: @@ -311,7 +314,7 @@ def list_schemas( @property def delete_schema( self, - ) -> Callable[[schema.DeleteSchemaRequest], Awaitable[empty.Empty]]: + ) -> Callable[[schema.DeleteSchemaRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete schema method over gRPC. Deletes a schema. @@ -330,7 +333,7 @@ def delete_schema( self._stubs["delete_schema"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.SchemaService/DeleteSchema", request_serializer=schema.DeleteSchemaRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_schema"] @@ -393,7 +396,7 @@ def validate_message( @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM access control policy on the specified function. Replaces any existing policy. @@ -410,15 +413,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does @@ -436,8 +439,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -445,8 +448,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], - Awaitable[iam_policy.TestIamPermissionsResponse], + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -465,8 +468,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py index 361085a5e1ac..0961d69d1bca 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from .client import SubscriberClient from .async_client import SubscriberAsyncClient diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 7dbc3c5edc98..502f6f158fa4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict import functools import re @@ -32,19 +30,18 @@ import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.subscriber import pagers from google.pubsub_v1.types import pubsub - from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport from .client import SubscriberClient @@ -68,25 +65,20 @@ class SubscriberAsyncClient: parse_subscription_path = staticmethod(SubscriberClient.parse_subscription_path) topic_path = staticmethod(SubscriberClient.topic_path) parse_topic_path = staticmethod(SubscriberClient.parse_topic_path) - common_billing_account_path = staticmethod( SubscriberClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( SubscriberClient.parse_common_billing_account_path ) - common_folder_path = staticmethod(SubscriberClient.common_folder_path) parse_common_folder_path = staticmethod(SubscriberClient.parse_common_folder_path) - common_organization_path = staticmethod(SubscriberClient.common_organization_path) parse_common_organization_path = staticmethod( SubscriberClient.parse_common_organization_path ) - common_project_path = staticmethod(SubscriberClient.common_project_path) parse_common_project_path = staticmethod(SubscriberClient.parse_common_project_path) - common_location_path = staticmethod(SubscriberClient.common_location_path) parse_common_location_path = staticmethod( SubscriberClient.parse_common_location_path @@ -94,7 +86,8 @@ class SubscriberAsyncClient: @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: info (dict): The service account private key info. @@ -109,7 +102,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: filename (str): The path to the service account private key json @@ -126,7 +119,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> SubscriberTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: SubscriberTransport: The transport used by the client instance. @@ -140,12 +133,12 @@ def transport(self) -> SubscriberTransport: def __init__( self, *, - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, transport: Union[str, SubscriberTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the subscriber client. + """Instantiates the subscriber client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -177,7 +170,6 @@ def __init__( google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ - self._client = SubscriberClient( credentials=credentials, transport=transport, @@ -276,7 +268,6 @@ async def create_subscription( This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -301,7 +292,6 @@ async def create_subscription( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if topic is not None: @@ -320,9 +310,9 @@ async def create_subscription( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -364,7 +354,6 @@ async def get_subscription( This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -389,7 +378,6 @@ async def get_subscription( # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription @@ -402,9 +390,9 @@ async def get_subscription( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -442,7 +430,6 @@ async def update_subscription( request (:class:`google.pubsub_v1.types.UpdateSubscriptionRequest`): The request object. Request for the UpdateSubscription method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -454,7 +441,6 @@ async def update_subscription( A subscription resource. """ # Create or coerce a protobuf request object. - request = pubsub.UpdateSubscriptionRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -465,7 +451,9 @@ async def update_subscription( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -508,7 +496,6 @@ async def list_subscriptions( This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -537,7 +524,6 @@ async def list_subscriptions( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project is not None: request.project = project @@ -550,9 +536,9 @@ async def list_subscriptions( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -605,7 +591,6 @@ async def delete_subscription( This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -626,7 +611,6 @@ async def delete_subscription( # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription @@ -638,7 +622,9 @@ async def delete_subscription( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -708,7 +694,6 @@ async def modify_ack_deadline( This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -729,12 +714,10 @@ async def modify_ack_deadline( # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription if ack_deadline_seconds is not None: request.ack_deadline_seconds = ack_deadline_seconds - if ack_ids: request.ack_ids.extend(ack_ids) @@ -746,7 +729,9 @@ async def modify_ack_deadline( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -804,7 +789,6 @@ async def acknowledge( This corresponds to the ``ack_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -825,10 +809,8 @@ async def acknowledge( # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription - if ack_ids: request.ack_ids.extend(ack_ids) @@ -840,7 +822,9 @@ async def acknowledge( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -910,7 +894,6 @@ async def pull( This corresponds to the ``max_messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -935,7 +918,6 @@ async def pull( # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription if return_immediately is not None: @@ -958,9 +940,9 @@ async def pull( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -1028,11 +1010,11 @@ def streaming_pull( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ResourceExhausted, - exceptions.ServiceUnavailable, + core_exceptions.Aborted, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -1087,7 +1069,6 @@ async def modify_push_config( This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1108,7 +1089,6 @@ async def modify_push_config( # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription if push_config is not None: @@ -1122,7 +1102,9 @@ async def modify_push_config( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -1169,7 +1151,6 @@ async def get_snapshot( This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1200,7 +1181,6 @@ async def get_snapshot( # If we have keyword arguments corresponding to fields on the # request, apply these. - if snapshot is not None: request.snapshot = snapshot @@ -1213,9 +1193,9 @@ async def get_snapshot( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -1261,7 +1241,6 @@ async def list_snapshots( This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1290,7 +1269,6 @@ async def list_snapshots( # If we have keyword arguments corresponding to fields on the # request, apply these. - if project is not None: request.project = project @@ -1303,9 +1281,9 @@ async def list_snapshots( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -1392,7 +1370,6 @@ async def create_snapshot( This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1423,7 +1400,6 @@ async def create_snapshot( # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if subscription is not None: @@ -1437,7 +1413,9 @@ async def create_snapshot( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -1476,7 +1454,6 @@ async def update_snapshot( request (:class:`google.pubsub_v1.types.UpdateSnapshotRequest`): The request object. Request for the UpdateSnapshot method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1494,7 +1471,6 @@ async def update_snapshot( """ # Create or coerce a protobuf request object. - request = pubsub.UpdateSnapshotRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1505,7 +1481,9 @@ async def update_snapshot( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -1557,7 +1535,6 @@ async def delete_snapshot( This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1578,7 +1555,6 @@ async def delete_snapshot( # If we have keyword arguments corresponding to fields on the # request, apply these. - if snapshot is not None: request.snapshot = snapshot @@ -1590,7 +1566,9 @@ async def delete_snapshot( initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -1629,7 +1607,6 @@ async def seek( Args: request (:class:`google.pubsub_v1.types.SeekRequest`): The request object. Request for the `Seek` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1641,7 +1618,6 @@ async def seek( Response for the Seek method (this response is empty). """ # Create or coerce a protobuf request object. - request = pubsub.SeekRequest(request) # Wrap the RPC method; this adds retry and timeout information, @@ -1653,9 +1629,9 @@ async def seek( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -1679,16 +1655,18 @@ async def seek( async def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: - r"""Sets the IAM access control policy on the specified - function. Replaces any existing policy. + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`~.policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1697,7 +1675,7 @@ async def set_iam_policy( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -1761,7 +1739,7 @@ async def set_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1785,17 +1763,19 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does not have a policy set. + Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1804,7 +1784,7 @@ async def get_iam_policy( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -1868,7 +1848,7 @@ async def get_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1892,17 +1872,20 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: + ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will + policy for a function. + + If the function does not exist, this will return an empty set of permissions, not a NOT_FOUND error. + Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1911,7 +1894,7 @@ async def test_iam_permissions( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: + ~iam_policy_pb2.PolicyTestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. @@ -1919,7 +1902,7 @@ async def test_iam_permissions( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index e2fbde7119f3..9f506214a2e4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from distutils import util import functools @@ -35,24 +33,23 @@ import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.subscriber import pagers from google.pubsub_v1.types import pubsub import grpc - from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO from .transports.grpc import SubscriberGrpcTransport from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport @@ -71,7 +68,7 @@ class SubscriberClientMeta(type): _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport def get_transport_class(cls, label: str = None,) -> Type[SubscriberTransport]: - """Return an appropriate transport class. + """Returns an appropriate transport class. Args: @@ -99,7 +96,8 @@ class SubscriberClient(metaclass=SubscriberClientMeta): @staticmethod def _get_default_mtls_endpoint(api_endpoint): - """Convert api endpoint to mTLS endpoint. + """Converts api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. @@ -144,7 +142,8 @@ def _get_default_mtls_endpoint(api_endpoint): @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials info. + """Creates an instance of this client using the provided credentials + info. Args: @@ -162,7 +161,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials - file. + file. Args: @@ -182,36 +181,37 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): @property def transport(self) -> SubscriberTransport: - """Return the transport used by the client instance. + """Returns the transport used by the client instance. Returns: - SubscriberTransport: The transport used by the client instance. + SubscriberTransport: The transport used by the client + instance. """ return self._transport @staticmethod def snapshot_path(project: str, snapshot: str,) -> str: - """Return a fully-qualified snapshot string.""" + """Returns a fully-qualified snapshot string.""" return "projects/{project}/snapshots/{snapshot}".format( project=project, snapshot=snapshot, ) @staticmethod def parse_snapshot_path(path: str) -> Dict[str, str]: - """Parse a snapshot path into its component segments.""" + """Parses a snapshot path into its component segments.""" m = re.match(r"^projects/(?P.+?)/snapshots/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def subscription_path(project: str, subscription: str,) -> str: - """Return a fully-qualified subscription string.""" + """Returns a fully-qualified subscription string.""" return "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, ) @staticmethod def parse_subscription_path(path: str) -> Dict[str, str]: - """Parse a subscription path into its component segments.""" + """Parses a subscription path into its component segments.""" m = re.match( r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path ) @@ -219,18 +219,18 @@ def parse_subscription_path(path: str) -> Dict[str, str]: @staticmethod def topic_path(project: str, topic: str,) -> str: - """Return a fully-qualified topic string.""" + """Returns a fully-qualified topic string.""" return "projects/{project}/topics/{topic}".format(project=project, topic=topic,) @staticmethod def parse_topic_path(path: str) -> Dict[str, str]: - """Parse a topic path into its component segments.""" + """Parses a topic path into its component segments.""" m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) return m.groupdict() if m else {} @staticmethod def common_billing_account_path(billing_account: str,) -> str: - """Return a fully-qualified billing_account string.""" + """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -243,7 +243,7 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: @staticmethod def common_folder_path(folder: str,) -> str: - """Return a fully-qualified folder string.""" + """Returns a fully-qualified folder string.""" return "folders/{folder}".format(folder=folder,) @staticmethod @@ -254,7 +254,7 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: @staticmethod def common_organization_path(organization: str,) -> str: - """Return a fully-qualified organization string.""" + """Returns a fully-qualified organization string.""" return "organizations/{organization}".format(organization=organization,) @staticmethod @@ -265,7 +265,7 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: @staticmethod def common_project_path(project: str,) -> str: - """Return a fully-qualified project string.""" + """Returns a fully-qualified project string.""" return "projects/{project}".format(project=project,) @staticmethod @@ -276,7 +276,7 @@ def parse_common_project_path(path: str) -> Dict[str, str]: @staticmethod def common_location_path(project: str, location: str,) -> str: - """Return a fully-qualified location string.""" + """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -290,12 +290,12 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def __init__( self, *, - credentials: Optional[credentials.Credentials] = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, SubscriberTransport, None] = None, client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiate the subscriber client. + """Instantiates the subscriber client. Args: @@ -351,9 +351,10 @@ def __init__( client_cert_source_func = client_options.client_cert_source else: is_mtls = mtls.has_default_client_cert_source() - client_cert_source_func = ( - mtls.default_client_cert_source() if is_mtls else None - ) + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -365,12 +366,14 @@ def __init__( elif use_mtls_env == "always": api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT - ) + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" ) # Save or instantiate the transport. @@ -385,8 +388,8 @@ def __init__( ) if client_options.scopes: raise ValueError( - "When providing a transport instance, " - "provide its scopes directly." + "When providing a transport instance, provide its scopes " + "directly." ) self._transport = transport else: @@ -502,7 +505,6 @@ def create_subscription( This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -529,10 +531,8 @@ def create_subscription( # there are no flattened fields. if not isinstance(request, pubsub.Subscription): request = pubsub.Subscription(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if topic is not None: @@ -581,7 +581,6 @@ def get_subscription( This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -608,10 +607,8 @@ def get_subscription( # there are no flattened fields. if not isinstance(request, pubsub.GetSubscriptionRequest): request = pubsub.GetSubscriptionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription @@ -650,7 +647,6 @@ def update_subscription( request (google.pubsub_v1.types.UpdateSubscriptionRequest): The request object. Request for the UpdateSubscription method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -662,7 +658,6 @@ def update_subscription( A subscription resource. """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a pubsub.UpdateSubscriptionRequest. # There's no risk of modifying the input as we've already verified @@ -711,7 +706,6 @@ def list_subscriptions( This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -742,10 +736,8 @@ def list_subscriptions( # there are no flattened fields. if not isinstance(request, pubsub.ListSubscriptionsRequest): request = pubsub.ListSubscriptionsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project is not None: request.project = project @@ -799,7 +791,6 @@ def delete_subscription( This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -822,10 +813,8 @@ def delete_subscription( # there are no flattened fields. if not isinstance(request, pubsub.DeleteSubscriptionRequest): request = pubsub.DeleteSubscriptionRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription @@ -897,7 +886,6 @@ def modify_ack_deadline( This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -920,10 +908,8 @@ def modify_ack_deadline( # there are no flattened fields. if not isinstance(request, pubsub.ModifyAckDeadlineRequest): request = pubsub.ModifyAckDeadlineRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription if ack_ids is not None: @@ -987,7 +973,6 @@ def acknowledge( This corresponds to the ``ack_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1010,10 +995,8 @@ def acknowledge( # there are no flattened fields. if not isinstance(request, pubsub.AcknowledgeRequest): request = pubsub.AcknowledgeRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription if ack_ids is not None: @@ -1087,7 +1070,6 @@ def pull( This corresponds to the ``max_messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1114,10 +1096,8 @@ def pull( # there are no flattened fields. if not isinstance(request, pubsub.PullRequest): request = pubsub.PullRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription if return_immediately is not None: @@ -1244,7 +1224,6 @@ def modify_push_config( This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1267,10 +1246,8 @@ def modify_push_config( # there are no flattened fields. if not isinstance(request, pubsub.ModifyPushConfigRequest): request = pubsub.ModifyPushConfigRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if subscription is not None: request.subscription = subscription if push_config is not None: @@ -1321,7 +1298,6 @@ def get_snapshot( This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1354,10 +1330,8 @@ def get_snapshot( # there are no flattened fields. if not isinstance(request, pubsub.GetSnapshotRequest): request = pubsub.GetSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if snapshot is not None: request.snapshot = snapshot @@ -1404,7 +1378,6 @@ def list_snapshots( This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1435,10 +1408,8 @@ def list_snapshots( # there are no flattened fields. if not isinstance(request, pubsub.ListSnapshotsRequest): request = pubsub.ListSnapshotsRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if project is not None: request.project = project @@ -1526,7 +1497,6 @@ def create_snapshot( This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1559,10 +1529,8 @@ def create_snapshot( # there are no flattened fields. if not isinstance(request, pubsub.CreateSnapshotRequest): request = pubsub.CreateSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if name is not None: request.name = name if subscription is not None: @@ -1605,7 +1573,6 @@ def update_snapshot( request (google.pubsub_v1.types.UpdateSnapshotRequest): The request object. Request for the UpdateSnapshot method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1623,7 +1590,6 @@ def update_snapshot( """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a pubsub.UpdateSnapshotRequest. # There's no risk of modifying the input as we've already verified @@ -1681,7 +1647,6 @@ def delete_snapshot( This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1704,10 +1669,8 @@ def delete_snapshot( # there are no flattened fields. if not isinstance(request, pubsub.DeleteSnapshotRequest): request = pubsub.DeleteSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the # request, apply these. - if snapshot is not None: request.snapshot = snapshot @@ -1748,7 +1711,6 @@ def seek( Args: request (google.pubsub_v1.types.SeekRequest): The request object. Request for the `Seek` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1760,7 +1722,6 @@ def seek( Response for the Seek method (this response is empty). """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes # in a pubsub.SeekRequest. # There's no risk of modifying the input as we've already verified @@ -1788,17 +1749,19 @@ def seek( def set_iam_policy( self, - request: iam_policy.SetIamPolicyRequest = None, + request: iam_policy_pb2.SetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: - r"""Sets the IAM access control policy on the specified - function. Replaces any existing policy. + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1807,7 +1770,7 @@ def set_iam_policy( metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -1874,7 +1837,7 @@ def set_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.SetIamPolicyRequest(**request) + request = iam_policy_pb2.SetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1898,27 +1861,29 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy.GetIamPolicyRequest = None, + request: iam_policy_pb2.GetIamPolicyRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> policy.Policy: + ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. + + Returns an empty policy if the function exists and does not have a + policy set. + Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.policy.Policy: + ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. It is used to specify access control policies for Cloud Platform resources. @@ -1985,7 +1950,7 @@ def get_iam_policy( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.GetIamPolicyRequest(**request) + request = iam_policy_pb2.GetIamPolicyRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -2009,27 +1974,30 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy.TestIamPermissionsRequest = None, + request: iam_policy_pb2.TestIamPermissionsRequest = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: + ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. @@ -2037,7 +2005,7 @@ def test_iam_permissions( # The request isn't a proto-plus wrapped type, # so it must be constructed via keyword expansion. if isinstance(request, dict): - request = iam_policy.TestIamPermissionsRequest(**request) + request = iam_policy_pb2.TestIamPermissionsRequest(**request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py index b7ec9f6e3676..49568852261e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import ( Any, AsyncIterable, @@ -117,7 +115,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and @@ -245,7 +243,7 @@ def __init__( *, metadata: Sequence[Tuple[str, str]] = () ): - """Instantiate the pager. + """Instantiates the pager. Args: method (Callable): The method that was originally called, and diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py index 0246cfeca2e2..023406c8f58c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from collections import OrderedDict from typing import Dict, Type diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index b8a1b97b39bb..7100e5849f52 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import abc -import typing +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version import pkg_resources -from google import auth # type: ignore -from google.api_core import exceptions # type: ignore +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub - try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( client_library_version=pkg_resources.get_distribution( @@ -40,6 +40,15 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + class SubscriberTransport(abc.ABC): """Abstract transport class for Subscriber.""" @@ -49,21 +58,25 @@ class SubscriberTransport(abc.ABC): "https://www.googleapis.com/auth/pubsub", ) + DEFAULT_HOST: str = "pubsub.googleapis.com" + def __init__( self, *, - host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, - credentials_file: typing.Optional[str] = None, - scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, - quota_project_id: typing.Optional[str] = None, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, **kwargs, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -72,7 +85,7 @@ def __init__( credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. - scope (Optional[Sequence[str]]): A list of scopes. + scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -80,35 +93,70 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" self._host = host + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: - raise exceptions.DuplicateCredentialArgs( + raise core_exceptions.DuplicateCredentialArgs( "'credentials_file' and 'credentials' are mutually exclusive" ) if credentials_file is not None: - credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) elif credentials is None: - credentials, _ = auth.default( - scopes=self._scopes, quota_project_id=quota_project_id + credentials, _ = google.auth.default( + **scopes_kwargs, quota_project_id=quota_project_id ) + # If the credentials is service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + # Save the credentials. self._credentials = credentials + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs( + cls, host: str, scopes: Optional[Sequence[str]] + ) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -119,9 +167,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -135,9 +183,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -150,7 +198,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -163,9 +213,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -178,7 +228,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -190,7 +242,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -202,7 +256,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -215,9 +271,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -231,11 +287,11 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.DeadlineExceeded, - exceptions.InternalServerError, - exceptions.ResourceExhausted, - exceptions.ServiceUnavailable, + core_exceptions.Aborted, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, ), deadline=900.0, ), @@ -248,7 +304,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -261,9 +319,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -277,9 +335,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -292,7 +350,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -304,7 +364,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -316,7 +378,9 @@ def _prep_wrapped_messages(self, client_info): initial=0.1, maximum=60.0, multiplier=1.3, - predicate=retries.if_exception_type(exceptions.ServiceUnavailable,), + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), deadline=60.0, ), default_timeout=60.0, @@ -329,9 +393,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.Aborted, - exceptions.ServiceUnavailable, - exceptions.Unknown, + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, ), deadline=60.0, ), @@ -343,38 +407,38 @@ def _prep_wrapped_messages(self, client_info): @property def create_subscription( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.Subscription], - typing.Union[pubsub.Subscription, typing.Awaitable[pubsub.Subscription]], + Union[pubsub.Subscription, Awaitable[pubsub.Subscription]], ]: raise NotImplementedError() @property def get_subscription( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.GetSubscriptionRequest], - typing.Union[pubsub.Subscription, typing.Awaitable[pubsub.Subscription]], + Union[pubsub.Subscription, Awaitable[pubsub.Subscription]], ]: raise NotImplementedError() @property def update_subscription( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.UpdateSubscriptionRequest], - typing.Union[pubsub.Subscription, typing.Awaitable[pubsub.Subscription]], + Union[pubsub.Subscription, Awaitable[pubsub.Subscription]], ]: raise NotImplementedError() @property def list_subscriptions( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.ListSubscriptionsRequest], - typing.Union[ + Union[ pubsub.ListSubscriptionsResponse, - typing.Awaitable[pubsub.ListSubscriptionsResponse], + Awaitable[pubsub.ListSubscriptionsResponse], ], ]: raise NotImplementedError() @@ -382,141 +446,133 @@ def list_subscriptions( @property def delete_subscription( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.DeleteSubscriptionRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def modify_ack_deadline( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.ModifyAckDeadlineRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def acknowledge( self, - ) -> typing.Callable[ - [pubsub.AcknowledgeRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ) -> Callable[ + [pubsub.AcknowledgeRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] ]: raise NotImplementedError() @property def pull( self, - ) -> typing.Callable[ - [pubsub.PullRequest], - typing.Union[pubsub.PullResponse, typing.Awaitable[pubsub.PullResponse]], + ) -> Callable[ + [pubsub.PullRequest], Union[pubsub.PullResponse, Awaitable[pubsub.PullResponse]] ]: raise NotImplementedError() @property def streaming_pull( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.StreamingPullRequest], - typing.Union[ - pubsub.StreamingPullResponse, typing.Awaitable[pubsub.StreamingPullResponse] - ], + Union[pubsub.StreamingPullResponse, Awaitable[pubsub.StreamingPullResponse]], ]: raise NotImplementedError() @property def modify_push_config( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.ModifyPushConfigRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def get_snapshot( self, - ) -> typing.Callable[ - [pubsub.GetSnapshotRequest], - typing.Union[pubsub.Snapshot, typing.Awaitable[pubsub.Snapshot]], + ) -> Callable[ + [pubsub.GetSnapshotRequest], Union[pubsub.Snapshot, Awaitable[pubsub.Snapshot]] ]: raise NotImplementedError() @property def list_snapshots( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.ListSnapshotsRequest], - typing.Union[ - pubsub.ListSnapshotsResponse, typing.Awaitable[pubsub.ListSnapshotsResponse] - ], + Union[pubsub.ListSnapshotsResponse, Awaitable[pubsub.ListSnapshotsResponse]], ]: raise NotImplementedError() @property def create_snapshot( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.CreateSnapshotRequest], - typing.Union[pubsub.Snapshot, typing.Awaitable[pubsub.Snapshot]], + Union[pubsub.Snapshot, Awaitable[pubsub.Snapshot]], ]: raise NotImplementedError() @property def update_snapshot( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.UpdateSnapshotRequest], - typing.Union[pubsub.Snapshot, typing.Awaitable[pubsub.Snapshot]], + Union[pubsub.Snapshot, Awaitable[pubsub.Snapshot]], ]: raise NotImplementedError() @property def delete_snapshot( self, - ) -> typing.Callable[ + ) -> Callable[ [pubsub.DeleteSnapshotRequest], - typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], ]: raise NotImplementedError() @property def seek( self, - ) -> typing.Callable[ - [pubsub.SeekRequest], - typing.Union[pubsub.SeekResponse, typing.Awaitable[pubsub.SeekResponse]], + ) -> Callable[ + [pubsub.SeekRequest], Union[pubsub.SeekResponse, Awaitable[pubsub.SeekResponse]] ]: raise NotImplementedError() @property def set_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.SetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def get_iam_policy( self, - ) -> typing.Callable[ - [iam_policy.GetIamPolicyRequest], - typing.Union[policy.Policy, typing.Awaitable[policy.Policy]], + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], ]: raise NotImplementedError() @property def test_iam_permissions( self, - ) -> typing.Callable[ - [iam_policy.TestIamPermissionsRequest], - typing.Union[ - iam_policy.TestIamPermissionsResponse, - typing.Awaitable[iam_policy.TestIamPermissionsResponse], + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ], ]: raise NotImplementedError() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index b3f26f1f0828..b116b018c85e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,23 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple +from typing import Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub - from .base import SubscriberTransport, DEFAULT_CLIENT_INFO @@ -56,7 +53,7 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, @@ -66,11 +63,13 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -105,6 +104,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -157,6 +158,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: @@ -181,7 +183,7 @@ def __init__( def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -212,13 +214,15 @@ def create_channel( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -349,7 +353,7 @@ def list_subscriptions( @property def delete_subscription( self, - ) -> Callable[[pubsub.DeleteSubscriptionRequest], empty.Empty]: + ) -> Callable[[pubsub.DeleteSubscriptionRequest], empty_pb2.Empty]: r"""Return a callable for the delete subscription method over gRPC. Deletes an existing subscription. All messages retained in the @@ -373,14 +377,14 @@ def delete_subscription( self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/DeleteSubscription", request_serializer=pubsub.DeleteSubscriptionRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_subscription"] @property def modify_ack_deadline( self, - ) -> Callable[[pubsub.ModifyAckDeadlineRequest], empty.Empty]: + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], empty_pb2.Empty]: r"""Return a callable for the modify ack deadline method over gRPC. Modifies the ack deadline for a specific message. This method is @@ -404,12 +408,12 @@ def modify_ack_deadline( self._stubs["modify_ack_deadline"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/ModifyAckDeadline", request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["modify_ack_deadline"] @property - def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty.Empty]: + def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty_pb2.Empty]: r"""Return a callable for the acknowledge method over gRPC. Acknowledges the messages associated with the ``ack_ids`` in the @@ -435,7 +439,7 @@ def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty.Empty]: self._stubs["acknowledge"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/Acknowledge", request_serializer=pubsub.AcknowledgeRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["acknowledge"] @@ -501,7 +505,7 @@ def streaming_pull( @property def modify_push_config( self, - ) -> Callable[[pubsub.ModifyPushConfigRequest], empty.Empty]: + ) -> Callable[[pubsub.ModifyPushConfigRequest], empty_pb2.Empty]: r"""Return a callable for the modify push config method over gRPC. Modifies the ``PushConfig`` for a specified subscription. @@ -526,7 +530,7 @@ def modify_push_config( self._stubs["modify_push_config"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/ModifyPushConfig", request_serializer=pubsub.ModifyPushConfigRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["modify_push_config"] @@ -667,7 +671,9 @@ def update_snapshot( return self._stubs["update_snapshot"] @property - def delete_snapshot(self) -> Callable[[pubsub.DeleteSnapshotRequest], empty.Empty]: + def delete_snapshot( + self, + ) -> Callable[[pubsub.DeleteSnapshotRequest], empty_pb2.Empty]: r"""Return a callable for the delete snapshot method over gRPC. Removes an existing snapshot. Snapshots are used in [Seek] @@ -695,7 +701,7 @@ def delete_snapshot(self) -> Callable[[pubsub.DeleteSnapshotRequest], empty.Empt self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/DeleteSnapshot", request_serializer=pubsub.DeleteSnapshotRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_snapshot"] @@ -734,7 +740,7 @@ def seek(self) -> Callable[[pubsub.SeekRequest], pubsub.SeekResponse]: @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM access control policy on the specified function. Replaces any existing policy. @@ -751,15 +757,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], policy.Policy]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does @@ -777,8 +783,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -786,7 +792,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], iam_policy.TestIamPermissionsResponse + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -805,8 +812,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index bc385d317d4a..91d5577903a4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,22 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore -from google import auth # type: ignore -from google.auth import credentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore -from google.protobuf import empty_pb2 as empty # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub - from .base import SubscriberTransport, DEFAULT_CLIENT_INFO from .grpc import SubscriberGrpcTransport @@ -59,7 +56,7 @@ class SubscriberGrpcAsyncIOTransport(SubscriberTransport): def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -86,13 +83,15 @@ def create_channel( Returns: aio.Channel: A gRPC AsyncIO channel object. """ - scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes, quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, **kwargs, ) @@ -100,7 +99,7 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: credentials.Credentials = None, + credentials: ga_credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, @@ -110,11 +109,13 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. Args: - host (Optional[str]): The hostname to connect to. + host (Optional[str]): + The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -150,6 +151,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -172,7 +175,6 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - else: if api_mtls_endpoint: host = api_mtls_endpoint @@ -202,6 +204,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: @@ -355,7 +358,7 @@ def list_subscriptions( @property def delete_subscription( self, - ) -> Callable[[pubsub.DeleteSubscriptionRequest], Awaitable[empty.Empty]]: + ) -> Callable[[pubsub.DeleteSubscriptionRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete subscription method over gRPC. Deletes an existing subscription. All messages retained in the @@ -379,14 +382,14 @@ def delete_subscription( self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/DeleteSubscription", request_serializer=pubsub.DeleteSubscriptionRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_subscription"] @property def modify_ack_deadline( self, - ) -> Callable[[pubsub.ModifyAckDeadlineRequest], Awaitable[empty.Empty]]: + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the modify ack deadline method over gRPC. Modifies the ack deadline for a specific message. This method is @@ -410,14 +413,14 @@ def modify_ack_deadline( self._stubs["modify_ack_deadline"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/ModifyAckDeadline", request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["modify_ack_deadline"] @property def acknowledge( self, - ) -> Callable[[pubsub.AcknowledgeRequest], Awaitable[empty.Empty]]: + ) -> Callable[[pubsub.AcknowledgeRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the acknowledge method over gRPC. Acknowledges the messages associated with the ``ack_ids`` in the @@ -443,7 +446,7 @@ def acknowledge( self._stubs["acknowledge"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/Acknowledge", request_serializer=pubsub.AcknowledgeRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["acknowledge"] @@ -511,7 +514,7 @@ def streaming_pull( @property def modify_push_config( self, - ) -> Callable[[pubsub.ModifyPushConfigRequest], Awaitable[empty.Empty]]: + ) -> Callable[[pubsub.ModifyPushConfigRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the modify push config method over gRPC. Modifies the ``PushConfig`` for a specified subscription. @@ -536,7 +539,7 @@ def modify_push_config( self._stubs["modify_push_config"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/ModifyPushConfig", request_serializer=pubsub.ModifyPushConfigRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["modify_push_config"] @@ -683,7 +686,7 @@ def update_snapshot( @property def delete_snapshot( self, - ) -> Callable[[pubsub.DeleteSnapshotRequest], Awaitable[empty.Empty]]: + ) -> Callable[[pubsub.DeleteSnapshotRequest], Awaitable[empty_pb2.Empty]]: r"""Return a callable for the delete snapshot method over gRPC. Removes an existing snapshot. Snapshots are used in [Seek] @@ -711,7 +714,7 @@ def delete_snapshot( self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( "/google.pubsub.v1.Subscriber/DeleteSnapshot", request_serializer=pubsub.DeleteSnapshotRequest.serialize, - response_deserializer=empty.Empty.FromString, + response_deserializer=empty_pb2.Empty.FromString, ) return self._stubs["delete_snapshot"] @@ -750,7 +753,7 @@ def seek(self) -> Callable[[pubsub.SeekRequest], Awaitable[pubsub.SeekResponse]] @property def set_iam_policy( self, - ) -> Callable[[iam_policy.SetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the set iam policy method over gRPC. Sets the IAM access control policy on the specified function. Replaces any existing policy. @@ -767,15 +770,15 @@ def set_iam_policy( if "set_iam_policy" not in self._stubs: self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["set_iam_policy"] @property def get_iam_policy( self, - ) -> Callable[[iam_policy.GetIamPolicyRequest], Awaitable[policy.Policy]]: + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: r"""Return a callable for the get iam policy method over gRPC. Gets the IAM access control policy for a function. Returns an empty policy if the function exists and does @@ -793,8 +796,8 @@ def get_iam_policy( if "get_iam_policy" not in self._stubs: self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy.Policy.FromString, + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, ) return self._stubs["get_iam_policy"] @@ -802,8 +805,8 @@ def get_iam_policy( def test_iam_permissions( self, ) -> Callable[ - [iam_policy.TestIamPermissionsRequest], - Awaitable[iam_policy.TestIamPermissionsResponse], + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], ]: r"""Return a callable for the test iam permissions method over gRPC. Tests the specified permissions against the IAM access control @@ -822,8 +825,8 @@ def test_iam_permissions( if "test_iam_permissions" not in self._stubs: self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy.TestIamPermissionsResponse.FromString, + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 5203abfe7f58..ebc8b53994de 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from typing import Union from .pubsub import ( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 6fb73e8c98c9..bfbbcaf87e5f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,13 +13,11 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore - -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.types import schema as gp_schema @@ -91,7 +88,7 @@ class MessageStoragePolicy(proto.Message): not a valid configuration. """ - allowed_persistence_regions = proto.RepeatedField(proto.STRING, number=1) + allowed_persistence_regions = proto.RepeatedField(proto.STRING, number=1,) class SchemaSettings(proto.Message): @@ -108,8 +105,7 @@ class SchemaSettings(proto.Message): The encoding of messages validated against ``schema``. """ - schema = proto.Field(proto.STRING, number=1) - + schema = proto.Field(proto.STRING, number=1,) encoding = proto.Field(proto.ENUM, number=2, enum=gp_schema.Encoding,) @@ -149,19 +145,14 @@ class Topic(proto.Message): if it is set in any requests. """ - name = proto.Field(proto.STRING, number=1) - - labels = proto.MapField(proto.STRING, proto.STRING, number=2) - + name = proto.Field(proto.STRING, number=1,) + labels = proto.MapField(proto.STRING, proto.STRING, number=2,) message_storage_policy = proto.Field( proto.MESSAGE, number=3, message="MessageStoragePolicy", ) - - kms_key_name = proto.Field(proto.STRING, number=5) - + kms_key_name = proto.Field(proto.STRING, number=5,) schema_settings = proto.Field(proto.MESSAGE, number=6, message="SchemaSettings",) - - satisfies_pzs = proto.Field(proto.BOOL, number=7) + satisfies_pzs = proto.Field(proto.BOOL, number=7,) class PubsubMessage(proto.Message): @@ -207,15 +198,13 @@ class PubsubMessage(proto.Message): same ``ordering_key`` value. """ - data = proto.Field(proto.BYTES, number=1) - - attributes = proto.MapField(proto.STRING, proto.STRING, number=2) - - message_id = proto.Field(proto.STRING, number=3) - - publish_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) - - ordering_key = proto.Field(proto.STRING, number=5) + data = proto.Field(proto.BYTES, number=1,) + attributes = proto.MapField(proto.STRING, proto.STRING, number=2,) + message_id = proto.Field(proto.STRING, number=3,) + publish_time = proto.Field( + proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, + ) + ordering_key = proto.Field(proto.STRING, number=5,) class GetTopicRequest(proto.Message): @@ -227,7 +216,7 @@ class GetTopicRequest(proto.Message): ``projects/{project}/topics/{topic}``. """ - topic = proto.Field(proto.STRING, number=1) + topic = proto.Field(proto.STRING, number=1,) class UpdateTopicRequest(proto.Message): @@ -246,8 +235,9 @@ class UpdateTopicRequest(proto.Message): """ topic = proto.Field(proto.MESSAGE, number=1, message="Topic",) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class PublishRequest(proto.Message): @@ -261,8 +251,7 @@ class PublishRequest(proto.Message): Required. The messages to publish. """ - topic = proto.Field(proto.STRING, number=1) - + topic = proto.Field(proto.STRING, number=1,) messages = proto.RepeatedField(proto.MESSAGE, number=2, message="PubsubMessage",) @@ -277,7 +266,7 @@ class PublishResponse(proto.Message): within the topic. """ - message_ids = proto.RepeatedField(proto.STRING, number=1) + message_ids = proto.RepeatedField(proto.STRING, number=1,) class ListTopicsRequest(proto.Message): @@ -296,11 +285,9 @@ class ListTopicsRequest(proto.Message): next page of data. """ - project = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + project = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListTopicsResponse(proto.Message): @@ -320,8 +307,7 @@ def raw_page(self): return self topics = proto.RepeatedField(proto.MESSAGE, number=1, message="Topic",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class ListTopicSubscriptionsRequest(proto.Message): @@ -342,11 +328,9 @@ class ListTopicSubscriptionsRequest(proto.Message): that the system should return the next page of data. """ - topic = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + topic = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListTopicSubscriptionsResponse(proto.Message): @@ -366,9 +350,8 @@ class ListTopicSubscriptionsResponse(proto.Message): def raw_page(self): return self - subscriptions = proto.RepeatedField(proto.STRING, number=1) - - next_page_token = proto.Field(proto.STRING, number=2) + subscriptions = proto.RepeatedField(proto.STRING, number=1,) + next_page_token = proto.Field(proto.STRING, number=2,) class ListTopicSnapshotsRequest(proto.Message): @@ -387,11 +370,9 @@ class ListTopicSnapshotsRequest(proto.Message): that the system should return the next page of data. """ - topic = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + topic = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListTopicSnapshotsResponse(proto.Message): @@ -411,9 +392,8 @@ class ListTopicSnapshotsResponse(proto.Message): def raw_page(self): return self - snapshots = proto.RepeatedField(proto.STRING, number=1) - - next_page_token = proto.Field(proto.STRING, number=2) + snapshots = proto.RepeatedField(proto.STRING, number=1,) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteTopicRequest(proto.Message): @@ -425,7 +405,7 @@ class DeleteTopicRequest(proto.Message): ``projects/{project}/topics/{topic}``. """ - topic = proto.Field(proto.STRING, number=1) + topic = proto.Field(proto.STRING, number=1,) class DetachSubscriptionRequest(proto.Message): @@ -437,13 +417,13 @@ class DetachSubscriptionRequest(proto.Message): ``projects/{project}/subscriptions/{subscription}``. """ - subscription = proto.Field(proto.STRING, number=1) + subscription = proto.Field(proto.STRING, number=1,) class DetachSubscriptionResponse(proto.Message): r"""Response for the DetachSubscription method. Reserved for future use. - """ + """ class Subscription(proto.Message): @@ -563,37 +543,25 @@ class Subscription(proto.Message): endpoint will not be made. """ - name = proto.Field(proto.STRING, number=1) - - topic = proto.Field(proto.STRING, number=2) - + name = proto.Field(proto.STRING, number=1,) + topic = proto.Field(proto.STRING, number=2,) push_config = proto.Field(proto.MESSAGE, number=4, message="PushConfig",) - - ack_deadline_seconds = proto.Field(proto.INT32, number=5) - - retain_acked_messages = proto.Field(proto.BOOL, number=7) - + ack_deadline_seconds = proto.Field(proto.INT32, number=5,) + retain_acked_messages = proto.Field(proto.BOOL, number=7,) message_retention_duration = proto.Field( - proto.MESSAGE, number=8, message=duration.Duration, + proto.MESSAGE, number=8, message=duration_pb2.Duration, ) - - labels = proto.MapField(proto.STRING, proto.STRING, number=9) - - enable_message_ordering = proto.Field(proto.BOOL, number=10) - + labels = proto.MapField(proto.STRING, proto.STRING, number=9,) + enable_message_ordering = proto.Field(proto.BOOL, number=10,) expiration_policy = proto.Field( proto.MESSAGE, number=11, message="ExpirationPolicy", ) - - filter = proto.Field(proto.STRING, number=12) - + filter = proto.Field(proto.STRING, number=12,) dead_letter_policy = proto.Field( proto.MESSAGE, number=13, message="DeadLetterPolicy", ) - retry_policy = proto.Field(proto.MESSAGE, number=14, message="RetryPolicy",) - - detached = proto.Field(proto.BOOL, number=15) + detached = proto.Field(proto.BOOL, number=15,) class RetryPolicy(proto.Message): @@ -623,9 +591,12 @@ class RetryPolicy(proto.Message): seconds. """ - minimum_backoff = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) - - maximum_backoff = proto.Field(proto.MESSAGE, number=2, message=duration.Duration,) + minimum_backoff = proto.Field( + proto.MESSAGE, number=1, message=duration_pb2.Duration, + ) + maximum_backoff = proto.Field( + proto.MESSAGE, number=2, message=duration_pb2.Duration, + ) class DeadLetterPolicy(proto.Message): @@ -667,9 +638,8 @@ class DeadLetterPolicy(proto.Message): If this parameter is 0, a default value of 5 is used. """ - dead_letter_topic = proto.Field(proto.STRING, number=1) - - max_delivery_attempts = proto.Field(proto.INT32, number=2) + dead_letter_topic = proto.Field(proto.STRING, number=1,) + max_delivery_attempts = proto.Field(proto.INT32, number=2,) class ExpirationPolicy(proto.Message): @@ -687,7 +657,7 @@ class ExpirationPolicy(proto.Message): associated resource never expires. """ - ttl = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + ttl = proto.Field(proto.MESSAGE, number=1, message=duration_pb2.Duration,) class PushConfig(proto.Message): @@ -759,14 +729,11 @@ class OidcToken(proto.Message): will be used. """ - service_account_email = proto.Field(proto.STRING, number=1) - - audience = proto.Field(proto.STRING, number=2) - - push_endpoint = proto.Field(proto.STRING, number=1) - - attributes = proto.MapField(proto.STRING, proto.STRING, number=2) + service_account_email = proto.Field(proto.STRING, number=1,) + audience = proto.Field(proto.STRING, number=2,) + push_endpoint = proto.Field(proto.STRING, number=1,) + attributes = proto.MapField(proto.STRING, proto.STRING, number=2,) oidc_token = proto.Field( proto.MESSAGE, number=3, oneof="authentication_method", message=OidcToken, ) @@ -802,11 +769,9 @@ class ReceivedMessage(proto.Message): will be 0. """ - ack_id = proto.Field(proto.STRING, number=1) - + ack_id = proto.Field(proto.STRING, number=1,) message = proto.Field(proto.MESSAGE, number=2, message="PubsubMessage",) - - delivery_attempt = proto.Field(proto.INT32, number=3) + delivery_attempt = proto.Field(proto.INT32, number=3,) class GetSubscriptionRequest(proto.Message): @@ -818,7 +783,7 @@ class GetSubscriptionRequest(proto.Message): ``projects/{project}/subscriptions/{sub}``. """ - subscription = proto.Field(proto.STRING, number=1) + subscription = proto.Field(proto.STRING, number=1,) class UpdateSubscriptionRequest(proto.Message): @@ -834,8 +799,9 @@ class UpdateSubscriptionRequest(proto.Message): """ subscription = proto.Field(proto.MESSAGE, number=1, message="Subscription",) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class ListSubscriptionsRequest(proto.Message): @@ -854,11 +820,9 @@ class ListSubscriptionsRequest(proto.Message): the system should return the next page of data. """ - project = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + project = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListSubscriptionsResponse(proto.Message): @@ -880,8 +844,7 @@ def raw_page(self): subscriptions = proto.RepeatedField( proto.MESSAGE, number=1, message="Subscription", ) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteSubscriptionRequest(proto.Message): @@ -893,7 +856,7 @@ class DeleteSubscriptionRequest(proto.Message): ``projects/{project}/subscriptions/{sub}``. """ - subscription = proto.Field(proto.STRING, number=1) + subscription = proto.Field(proto.STRING, number=1,) class ModifyPushConfigRequest(proto.Message): @@ -913,8 +876,7 @@ class ModifyPushConfigRequest(proto.Message): not called. """ - subscription = proto.Field(proto.STRING, number=1) - + subscription = proto.Field(proto.STRING, number=1,) push_config = proto.Field(proto.MESSAGE, number=2, message="PushConfig",) @@ -942,11 +904,9 @@ class PullRequest(proto.Message): than the number specified. """ - subscription = proto.Field(proto.STRING, number=1) - - return_immediately = proto.Field(proto.BOOL, number=2) - - max_messages = proto.Field(proto.INT32, number=3) + subscription = proto.Field(proto.STRING, number=1,) + return_immediately = proto.Field(proto.BOOL, number=2,) + max_messages = proto.Field(proto.INT32, number=3,) class PullResponse(proto.Message): @@ -988,11 +948,9 @@ class ModifyAckDeadlineRequest(proto.Message): seconds (10 minutes). """ - subscription = proto.Field(proto.STRING, number=1) - - ack_ids = proto.RepeatedField(proto.STRING, number=4) - - ack_deadline_seconds = proto.Field(proto.INT32, number=3) + subscription = proto.Field(proto.STRING, number=1,) + ack_ids = proto.RepeatedField(proto.STRING, number=4,) + ack_deadline_seconds = proto.Field(proto.INT32, number=3,) class AcknowledgeRequest(proto.Message): @@ -1009,9 +967,8 @@ class AcknowledgeRequest(proto.Message): ``Pull`` response. Must not be empty. """ - subscription = proto.Field(proto.STRING, number=1) - - ack_ids = proto.RepeatedField(proto.STRING, number=2) + subscription = proto.Field(proto.STRING, number=1,) + ack_ids = proto.RepeatedField(proto.STRING, number=2,) class StreamingPullRequest(proto.Message): @@ -1098,21 +1055,14 @@ class StreamingPullRequest(proto.Message): ``INVALID_ARGUMENT``. """ - subscription = proto.Field(proto.STRING, number=1) - - ack_ids = proto.RepeatedField(proto.STRING, number=2) - - modify_deadline_seconds = proto.RepeatedField(proto.INT32, number=3) - - modify_deadline_ack_ids = proto.RepeatedField(proto.STRING, number=4) - - stream_ack_deadline_seconds = proto.Field(proto.INT32, number=5) - - client_id = proto.Field(proto.STRING, number=6) - - max_outstanding_messages = proto.Field(proto.INT64, number=7) - - max_outstanding_bytes = proto.Field(proto.INT64, number=8) + subscription = proto.Field(proto.STRING, number=1,) + ack_ids = proto.RepeatedField(proto.STRING, number=2,) + modify_deadline_seconds = proto.RepeatedField(proto.INT32, number=3,) + modify_deadline_ack_ids = proto.RepeatedField(proto.STRING, number=4,) + stream_ack_deadline_seconds = proto.Field(proto.INT32, number=5,) + client_id = proto.Field(proto.STRING, number=6,) + max_outstanding_messages = proto.Field(proto.INT64, number=7,) + max_outstanding_bytes = proto.Field(proto.INT64, number=8,) class StreamingPullResponse(proto.Message): @@ -1123,11 +1073,27 @@ class StreamingPullResponse(proto.Message): received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): Received Pub/Sub messages. This will not be empty. + subscription_properties (google.pubsub_v1.types.StreamingPullResponse.SubscriptionProperties): + Properties associated with this subscription. """ + class SubscriptionProperties(proto.Message): + r"""Subscription properties sent as part of the response. + + Attributes: + message_ordering_enabled (bool): + True iff message ordering is enabled for this + subscription. + """ + + message_ordering_enabled = proto.Field(proto.BOOL, number=2,) + received_messages = proto.RepeatedField( proto.MESSAGE, number=1, message="ReceivedMessage", ) + subscription_properties = proto.Field( + proto.MESSAGE, number=4, message=SubscriptionProperties, + ) class CreateSnapshotRequest(proto.Message): @@ -1158,11 +1124,9 @@ class CreateSnapshotRequest(proto.Message): Creating and managing labels. """ - name = proto.Field(proto.STRING, number=1) - - subscription = proto.Field(proto.STRING, number=2) - - labels = proto.MapField(proto.STRING, proto.STRING, number=3) + name = proto.Field(proto.STRING, number=1,) + subscription = proto.Field(proto.STRING, number=2,) + labels = proto.MapField(proto.STRING, proto.STRING, number=3,) class UpdateSnapshotRequest(proto.Message): @@ -1178,8 +1142,9 @@ class UpdateSnapshotRequest(proto.Message): """ snapshot = proto.Field(proto.MESSAGE, number=1, message="Snapshot",) - - update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,) + update_mask = proto.Field( + proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + ) class Snapshot(proto.Message): @@ -1214,13 +1179,10 @@ class Snapshot(proto.Message): (https://cloud.google.com/pubsub/docs/labels). """ - name = proto.Field(proto.STRING, number=1) - - topic = proto.Field(proto.STRING, number=2) - - expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,) - - labels = proto.MapField(proto.STRING, proto.STRING, number=4) + name = proto.Field(proto.STRING, number=1,) + topic = proto.Field(proto.STRING, number=2,) + expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) + labels = proto.MapField(proto.STRING, proto.STRING, number=4,) class GetSnapshotRequest(proto.Message): @@ -1232,7 +1194,7 @@ class GetSnapshotRequest(proto.Message): ``projects/{project}/snapshots/{snap}``. """ - snapshot = proto.Field(proto.STRING, number=1) + snapshot = proto.Field(proto.STRING, number=1,) class ListSnapshotsRequest(proto.Message): @@ -1251,11 +1213,9 @@ class ListSnapshotsRequest(proto.Message): the next page of data. """ - project = proto.Field(proto.STRING, number=1) - - page_size = proto.Field(proto.INT32, number=2) - - page_token = proto.Field(proto.STRING, number=3) + project = proto.Field(proto.STRING, number=1,) + page_size = proto.Field(proto.INT32, number=2,) + page_token = proto.Field(proto.STRING, number=3,) class ListSnapshotsResponse(proto.Message): @@ -1275,8 +1235,7 @@ def raw_page(self): return self snapshots = proto.RepeatedField(proto.MESSAGE, number=1, message="Snapshot",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteSnapshotRequest(proto.Message): @@ -1288,7 +1247,7 @@ class DeleteSnapshotRequest(proto.Message): ``projects/{project}/snapshots/{snap}``. """ - snapshot = proto.Field(proto.STRING, number=1) + snapshot = proto.Field(proto.STRING, number=1,) class SeekRequest(proto.Message): @@ -1317,17 +1276,15 @@ class SeekRequest(proto.Message): ``projects/{project}/snapshots/{snap}``. """ - subscription = proto.Field(proto.STRING, number=1) - + subscription = proto.Field(proto.STRING, number=1,) time = proto.Field( - proto.MESSAGE, number=2, oneof="target", message=timestamp.Timestamp, + proto.MESSAGE, number=2, oneof="target", message=timestamp_pb2.Timestamp, ) - - snapshot = proto.Field(proto.STRING, number=3, oneof="target") + snapshot = proto.Field(proto.STRING, number=3, oneof="target",) class SeekResponse(proto.Message): - r"""Response for the ``Seek`` method (this response is empty).""" + r"""Response for the ``Seek`` method (this response is empty). """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index e4f71d1328a6..d0c96217b578 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import proto # type: ignore @@ -74,11 +72,9 @@ class Type(proto.Enum): PROTOCOL_BUFFER = 1 AVRO = 2 - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) type_ = proto.Field(proto.ENUM, number=2, enum=Type,) - - definition = proto.Field(proto.STRING, number=3) + definition = proto.Field(proto.STRING, number=3,) class CreateSchemaRequest(proto.Message): @@ -103,11 +99,9 @@ class CreateSchemaRequest(proto.Message): for resource name constraints. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) schema = proto.Field(proto.MESSAGE, number=2, message="Schema",) - - schema_id = proto.Field(proto.STRING, number=3) + schema_id = proto.Field(proto.STRING, number=3,) class GetSchemaRequest(proto.Message): @@ -123,8 +117,7 @@ class GetSchemaRequest(proto.Message): ``definition``. Set to ``FULL`` to retrieve all fields. """ - name = proto.Field(proto.STRING, number=1) - + name = proto.Field(proto.STRING, number=1,) view = proto.Field(proto.ENUM, number=2, enum="SchemaView",) @@ -148,13 +141,10 @@ class ListSchemasRequest(proto.Message): next page of data. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) view = proto.Field(proto.ENUM, number=2, enum="SchemaView",) - - page_size = proto.Field(proto.INT32, number=3) - - page_token = proto.Field(proto.STRING, number=4) + page_size = proto.Field(proto.INT32, number=3,) + page_token = proto.Field(proto.STRING, number=4,) class ListSchemasResponse(proto.Message): @@ -174,8 +164,7 @@ def raw_page(self): return self schemas = proto.RepeatedField(proto.MESSAGE, number=1, message="Schema",) - - next_page_token = proto.Field(proto.STRING, number=2) + next_page_token = proto.Field(proto.STRING, number=2,) class DeleteSchemaRequest(proto.Message): @@ -187,7 +176,7 @@ class DeleteSchemaRequest(proto.Message): ``projects/{project}/schemas/{schema}``. """ - name = proto.Field(proto.STRING, number=1) + name = proto.Field(proto.STRING, number=1,) class ValidateSchemaRequest(proto.Message): @@ -201,13 +190,12 @@ class ValidateSchemaRequest(proto.Message): Required. The schema object to validate. """ - parent = proto.Field(proto.STRING, number=1) - + parent = proto.Field(proto.STRING, number=1,) schema = proto.Field(proto.MESSAGE, number=2, message="Schema",) class ValidateSchemaResponse(proto.Message): - r"""Response for the ``ValidateSchema`` method.""" + r"""Response for the ``ValidateSchema`` method. """ class ValidateMessageRequest(proto.Message): @@ -229,21 +217,17 @@ class ValidateMessageRequest(proto.Message): The encoding expected for messages """ - parent = proto.Field(proto.STRING, number=1) - - name = proto.Field(proto.STRING, number=2, oneof="schema_spec") - + parent = proto.Field(proto.STRING, number=1,) + name = proto.Field(proto.STRING, number=2, oneof="schema_spec",) schema = proto.Field( proto.MESSAGE, number=3, oneof="schema_spec", message="Schema", ) - - message = proto.Field(proto.BYTES, number=4) - + message = proto.Field(proto.BYTES, number=4,) encoding = proto.Field(proto.ENUM, number=5, enum="Encoding",) class ValidateMessageResponse(proto.Message): - r"""Response for the ``ValidateMessage`` method.""" + r"""Response for the ``ValidateMessage`` method. """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 071e3665febc..7262e021e344 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -1,6 +1,5 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -15,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import argparse import os import libcst as cst @@ -41,42 +39,40 @@ def partition( class pubsubCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'acknowledge': ('subscription', 'ack_ids', ), - 'create_schema': ('parent', 'schema', 'schema_id', ), - 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', ), - 'delete_schema': ('name', ), - 'delete_snapshot': ('snapshot', ), - 'delete_subscription': ('subscription', ), - 'delete_topic': ('topic', ), - 'detach_subscription': ('subscription', ), - 'get_schema': ('name', 'view', ), - 'get_snapshot': ('snapshot', ), - 'get_subscription': ('subscription', ), - 'get_topic': ('topic', ), - 'list_schemas': ('parent', 'view', 'page_size', 'page_token', ), - 'list_snapshots': ('project', 'page_size', 'page_token', ), - 'list_subscriptions': ('project', 'page_size', 'page_token', ), - 'list_topics': ('project', 'page_size', 'page_token', ), - 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), - 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), - 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), - 'modify_push_config': ('subscription', 'push_config', ), - 'publish': ('topic', 'messages', ), - 'pull': ('subscription', 'max_messages', 'return_immediately', ), - 'seek': ('subscription', 'time', 'snapshot', ), - 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), - 'update_snapshot': ('snapshot', 'update_mask', ), - 'update_subscription': ('subscription', 'update_mask', ), - 'update_topic': ('topic', 'update_mask', ), - 'validate_message': ('parent', 'name', 'schema', 'message', 'encoding', ), - 'validate_schema': ('parent', 'schema', ), - + 'acknowledge': ('subscription', 'ack_ids', ), + 'create_schema': ('parent', 'schema', 'schema_id', ), + 'create_snapshot': ('name', 'subscription', 'labels', ), + 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', ), + 'delete_schema': ('name', ), + 'delete_snapshot': ('snapshot', ), + 'delete_subscription': ('subscription', ), + 'delete_topic': ('topic', ), + 'detach_subscription': ('subscription', ), + 'get_schema': ('name', 'view', ), + 'get_snapshot': ('snapshot', ), + 'get_subscription': ('subscription', ), + 'get_topic': ('topic', ), + 'list_schemas': ('parent', 'view', 'page_size', 'page_token', ), + 'list_snapshots': ('project', 'page_size', 'page_token', ), + 'list_subscriptions': ('project', 'page_size', 'page_token', ), + 'list_topics': ('project', 'page_size', 'page_token', ), + 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), + 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), + 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), + 'modify_push_config': ('subscription', 'push_config', ), + 'publish': ('topic', 'messages', ), + 'pull': ('subscription', 'max_messages', 'return_immediately', ), + 'seek': ('subscription', 'time', 'snapshot', ), + 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), + 'update_snapshot': ('snapshot', 'update_mask', ), + 'update_subscription': ('subscription', 'update_mask', ), + 'update_topic': ('topic', 'update_mask', ), + 'validate_message': ('parent', 'name', 'schema', 'message', 'encoding', ), + 'validate_schema': ('parent', 'schema', ), 'get_iam_policy': ('resource', 'options', ), 'set_iam_policy': ('resource', 'policy', ), 'test_iam_permissions': ('resource', 'permissions', ), - } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: @@ -107,7 +103,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: value=cst.Dict([ cst.DictElement( cst.SimpleString("'{}'".format(name)), - cst.Element(value=arg.value) +cst.Element(value=arg.value) ) # Note: the args + kwargs looks silly, but keep in mind that # the control parameters had to be stripped out, and that diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 2e1d57520d2a..5b05b39a3e8b 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -30,10 +30,11 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 - "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", + "google-api-core[grpc] >= 1.26.0, < 2.0.0dev", "libcst >= 0.3.10", "proto-plus >= 1.7.1", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", + "packaging >= 14.3", ] extras = {} diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata index 7938695bf9a7..ca1d82ffbf55 100644 --- a/packages/google-cloud-pubsub/synth.metadata +++ b/packages/google-cloud-pubsub/synth.metadata @@ -3,16 +3,29 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-pubsub.git", - "sha": "f1ee3463a89a1a994af0cc522d25bfd1c4412824" + "remote": "git@github.com:googleapis/python-pubsub.git", + "sha": "618ceeefab8f1f7f968ee42a94453afde4d58f7b" } }, { "git": { - "name": "googleapis", - "remote": "https://github.com/googleapis/googleapis.git", - "sha": "6598bb829c9e9a534be674649ffd1b4671a821f9", - "internalRef": "364449524" + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "4124d245133c0f740ac5e6fa2e7daeba327f1297" + } + }, + { + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "4124d245133c0f740ac5e6fa2e7daeba327f1297" + } + }, + { + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "4124d245133c0f740ac5e6fa2e7daeba327f1297" } } ], @@ -26,126 +39,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic-head.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic-head.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic-head.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/samples/python3.9/common.cfg", - ".kokoro/samples/python3.9/continuous.cfg", - ".kokoro/samples/python3.9/periodic-head.cfg", - ".kokoro/samples/python3.9/periodic.cfg", - ".kokoro/samples/python3.9/presubmit.cfg", - ".kokoro/test-samples-against-head.sh", - ".kokoro/test-samples-impl.sh", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "SECURITY.md", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/multiprocessing.rst", - "google/cloud/pubsub_v1/proto/pubsub.proto", - "google/cloud/pubsub_v1/proto/schema.proto", - "google/pubsub/__init__.py", - "google/pubsub/py.typed", - "google/pubsub_v1/__init__.py", - "google/pubsub_v1/py.typed", - "google/pubsub_v1/services/__init__.py", - "google/pubsub_v1/services/publisher/__init__.py", - "google/pubsub_v1/services/publisher/async_client.py", - "google/pubsub_v1/services/publisher/client.py", - "google/pubsub_v1/services/publisher/pagers.py", - "google/pubsub_v1/services/publisher/transports/__init__.py", - "google/pubsub_v1/services/publisher/transports/base.py", - "google/pubsub_v1/services/publisher/transports/grpc.py", - "google/pubsub_v1/services/publisher/transports/grpc_asyncio.py", - "google/pubsub_v1/services/schema_service/__init__.py", - "google/pubsub_v1/services/schema_service/async_client.py", - "google/pubsub_v1/services/schema_service/client.py", - "google/pubsub_v1/services/schema_service/pagers.py", - "google/pubsub_v1/services/schema_service/transports/__init__.py", - "google/pubsub_v1/services/schema_service/transports/base.py", - "google/pubsub_v1/services/schema_service/transports/grpc.py", - "google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py", - "google/pubsub_v1/services/subscriber/__init__.py", - "google/pubsub_v1/services/subscriber/async_client.py", - "google/pubsub_v1/services/subscriber/client.py", - "google/pubsub_v1/services/subscriber/pagers.py", - "google/pubsub_v1/services/subscriber/transports/__init__.py", - "google/pubsub_v1/services/subscriber/transports/base.py", - "google/pubsub_v1/services/subscriber/transports/grpc.py", - "google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py", - "google/pubsub_v1/types/__init__.py", - "google/pubsub_v1/types/pubsub.py", - "google/pubsub_v1/types/schema.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "samples/snippets/noxfile.py", - "scripts/decrypt-secrets.sh", - "scripts/fixup_pubsub_v1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/pubsub_v1/__init__.py", - "tests/unit/gapic/pubsub_v1/test_publisher.py", - "tests/unit/gapic/pubsub_v1/test_schema_service.py", - "tests/unit/gapic/pubsub_v1/test_subscriber.py" ] } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py index 41b63e89e7e8..f5631974bfcf 100644 --- a/packages/google-cloud-pubsub/synth.py +++ b/packages/google-cloud-pubsub/synth.py @@ -49,7 +49,7 @@ # DEFAULT SCOPES and SERVICE_ADDRESS are being used. so let's force them in. s.replace( "google/pubsub_v1/services/*er/*client.py", - r"DEFAULT_ENDPOINT = 'pubsub\.googleapis\.com'", + """DEFAULT_ENDPOINT = "pubsub\.googleapis\.com\"""", """ # The scopes needed to make gRPC calls to all of the methods defined in # this service @@ -302,6 +302,22 @@ "\ggoogle/cloud/__init__.py\n\g<0>", ) +# Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 +s.replace(f"google/pubsub_v1/types/*.py", + r""". + Attributes:""", + r""".\n + Attributes:""", +) + +# Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 +s.replace("google/pubsub_v1/types/*.py", + r""". + Attributes:""", + r""".\n + Attributes:""", +) + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- diff --git a/packages/google-cloud-pubsub/testing/constraints-3.6.txt b/packages/google-cloud-pubsub/testing/constraints-3.6.txt index 3d58c3f9cfcd..73677dc23c98 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.6.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.6.txt @@ -2,11 +2,12 @@ # are correct in setup.py # List *all* library dependencies and extras in this file. # Pin the version to the lower bound. -# # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 grpcio==1.38.1 -google-api-core==1.22.2 +google-api-core==1.26.0 libcst==0.3.10 proto-plus==1.7.1 -grpc-google-iam-v1==0.12.3 \ No newline at end of file +grpc-google-iam-v1==0.12.3 +packaging==14.3 +google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is transitively required through google-api-core diff --git a/packages/google-cloud-pubsub/tests/__init__.py b/packages/google-cloud-pubsub/tests/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/tests/unit/__init__.py b/packages/google-cloud-pubsub/tests/unit/__init__.py index e69de29bb2d1..4de65971c238 100644 --- a/packages/google-cloud-pubsub/tests/unit/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..4de65971c238 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py index 42ffdf2bc43d..4de65971c238 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 5434ae944dda..b171fb3e7983 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,26 +23,42 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.publisher import PublisherAsyncClient from google.pubsub_v1.services.publisher import PublisherClient from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.services.publisher import transports +from google.pubsub_v1.services.publisher.transports.base import _GOOGLE_AUTH_VERSION from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import schema +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) def client_cert_source_callback(): @@ -87,7 +102,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient,]) def test_publisher_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -100,9 +115,37 @@ def test_publisher_client_from_service_account_info(client_class): assert client.transport._host == "pubsub.googleapis.com:443" +@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient,]) +def test_publisher_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.PublisherGrpcTransport, "grpc"), + (transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_publisher_client_service_account_always_use_jwt_true( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + @pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient,]) def test_publisher_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -151,7 +194,7 @@ def test_publisher_client_get_transport_class(): def test_publisher_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. with mock.patch.object(PublisherClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -433,7 +476,7 @@ def test_publisher_client_client_options_from_dict(): def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -446,23 +489,17 @@ def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): call.return_value = pubsub.Topic( name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, ) - response = client.create_topic(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.satisfies_pzs is True @@ -474,7 +511,7 @@ def test_create_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -482,7 +519,6 @@ def test_create_topic_empty_call(): client.create_topic() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() @@ -491,7 +527,7 @@ async def test_create_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.Topic ): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -508,22 +544,17 @@ async def test_create_topic_async( satisfies_pzs=True, ) ) - response = await client.create_topic(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.satisfies_pzs is True @@ -533,17 +564,17 @@ async def test_create_topic_async_from_dict(): def test_create_topic_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.Topic() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: call.return_value = pubsub.Topic() - client.create_topic(request) # Establish that the underlying gRPC stub method was called. @@ -558,17 +589,17 @@ def test_create_topic_field_headers(): @pytest.mark.asyncio async def test_create_topic_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.Topic() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) - await client.create_topic(request) # Establish that the underlying gRPC stub method was called. @@ -582,13 +613,12 @@ async def test_create_topic_field_headers_async(): def test_create_topic_flattened(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_topic(name="name_value",) @@ -597,12 +627,11 @@ def test_create_topic_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_create_topic_flattened_error(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -614,7 +643,7 @@ def test_create_topic_flattened_error(): @pytest.mark.asyncio async def test_create_topic_flattened_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: @@ -630,13 +659,12 @@ async def test_create_topic_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_create_topic_flattened_error_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -648,7 +676,7 @@ async def test_create_topic_flattened_error_async(): def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRequest): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -661,23 +689,17 @@ def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRe call.return_value = pubsub.Topic( name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, ) - response = client.update_topic(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.satisfies_pzs is True @@ -689,7 +711,7 @@ def test_update_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -697,7 +719,6 @@ def test_update_topic_empty_call(): client.update_topic() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() @@ -706,7 +727,7 @@ async def test_update_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateTopicRequest ): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -723,22 +744,17 @@ async def test_update_topic_async( satisfies_pzs=True, ) ) - response = await client.update_topic(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.satisfies_pzs is True @@ -748,17 +764,17 @@ async def test_update_topic_async_from_dict(): def test_update_topic_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.UpdateTopicRequest() + request.topic.name = "topic.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_topic), "__call__") as call: call.return_value = pubsub.Topic() - client.update_topic(request) # Establish that the underlying gRPC stub method was called. @@ -773,17 +789,17 @@ def test_update_topic_field_headers(): @pytest.mark.asyncio async def test_update_topic_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.UpdateTopicRequest() + request.topic.name = "topic.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_topic), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) - await client.update_topic(request) # Establish that the underlying gRPC stub method was called. @@ -798,7 +814,7 @@ async def test_update_topic_field_headers_async(): def test_publish(transport: str = "grpc", request_type=pubsub.PublishRequest): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -809,19 +825,15 @@ def test_publish(transport: str = "grpc", request_type=pubsub.PublishRequest): with mock.patch.object(type(client.transport.publish), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PublishResponse(message_ids=["message_ids_value"],) - response = client.publish(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.PublishResponse) - assert response.message_ids == ["message_ids_value"] @@ -833,7 +845,7 @@ def test_publish_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -841,7 +853,6 @@ def test_publish_empty_call(): client.publish() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() @@ -850,7 +861,7 @@ async def test_publish_async( transport: str = "grpc_asyncio", request_type=pubsub.PublishRequest ): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -863,18 +874,15 @@ async def test_publish_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.PublishResponse(message_ids=["message_ids_value"],) ) - response = await client.publish(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.PublishResponse) - assert response.message_ids == ["message_ids_value"] @@ -884,17 +892,17 @@ async def test_publish_async_from_dict(): def test_publish_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.PublishRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: call.return_value = pubsub.PublishResponse() - client.publish(request) # Establish that the underlying gRPC stub method was called. @@ -909,11 +917,12 @@ def test_publish_field_headers(): @pytest.mark.asyncio async def test_publish_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.PublishRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -921,7 +930,6 @@ async def test_publish_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.PublishResponse() ) - await client.publish(request) # Establish that the underlying gRPC stub method was called. @@ -935,13 +943,12 @@ async def test_publish_field_headers_async(): def test_publish_flattened(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PublishResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.publish( @@ -952,14 +959,12 @@ def test_publish_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" - assert args[0].messages == [pubsub.PubsubMessage(data=b"data_blob")] def test_publish_flattened_error(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -973,7 +978,7 @@ def test_publish_flattened_error(): @pytest.mark.asyncio async def test_publish_flattened_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: @@ -993,15 +998,13 @@ async def test_publish_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" - assert args[0].messages == [pubsub.PubsubMessage(data=b"data_blob")] @pytest.mark.asyncio async def test_publish_flattened_error_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1015,7 +1018,7 @@ async def test_publish_flattened_error_async(): def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1028,23 +1031,17 @@ def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest) call.return_value = pubsub.Topic( name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, ) - response = client.get_topic(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.satisfies_pzs is True @@ -1056,7 +1053,7 @@ def test_get_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1064,7 +1061,6 @@ def test_get_topic_empty_call(): client.get_topic() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() @@ -1073,7 +1069,7 @@ async def test_get_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.GetTopicRequest ): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1090,22 +1086,17 @@ async def test_get_topic_async( satisfies_pzs=True, ) ) - response = await client.get_topic(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.satisfies_pzs is True @@ -1115,17 +1106,17 @@ async def test_get_topic_async_from_dict(): def test_get_topic_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.GetTopicRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: call.return_value = pubsub.Topic() - client.get_topic(request) # Establish that the underlying gRPC stub method was called. @@ -1140,17 +1131,17 @@ def test_get_topic_field_headers(): @pytest.mark.asyncio async def test_get_topic_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.GetTopicRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) - await client.get_topic(request) # Establish that the underlying gRPC stub method was called. @@ -1164,13 +1155,12 @@ async def test_get_topic_field_headers_async(): def test_get_topic_flattened(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_topic(topic="topic_value",) @@ -1179,12 +1169,11 @@ def test_get_topic_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" def test_get_topic_flattened_error(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1196,7 +1185,7 @@ def test_get_topic_flattened_error(): @pytest.mark.asyncio async def test_get_topic_flattened_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: @@ -1212,13 +1201,12 @@ async def test_get_topic_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" @pytest.mark.asyncio async def test_get_topic_flattened_error_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1230,7 +1218,7 @@ async def test_get_topic_flattened_error_async(): def test_list_topics(transport: str = "grpc", request_type=pubsub.ListTopicsRequest): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1243,19 +1231,15 @@ def test_list_topics(transport: str = "grpc", request_type=pubsub.ListTopicsRequ call.return_value = pubsub.ListTopicsResponse( next_page_token="next_page_token_value", ) - response = client.list_topics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicsPager) - assert response.next_page_token == "next_page_token_value" @@ -1267,7 +1251,7 @@ def test_list_topics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1275,7 +1259,6 @@ def test_list_topics_empty_call(): client.list_topics() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() @@ -1284,7 +1267,7 @@ async def test_list_topics_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicsRequest ): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1297,18 +1280,15 @@ async def test_list_topics_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListTopicsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_topics(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1318,17 +1298,17 @@ async def test_list_topics_async_from_dict(): def test_list_topics_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListTopicsRequest() + request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: call.return_value = pubsub.ListTopicsResponse() - client.list_topics(request) # Establish that the underlying gRPC stub method was called. @@ -1343,11 +1323,12 @@ def test_list_topics_field_headers(): @pytest.mark.asyncio async def test_list_topics_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListTopicsRequest() + request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1355,7 +1336,6 @@ async def test_list_topics_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListTopicsResponse() ) - await client.list_topics(request) # Establish that the underlying gRPC stub method was called. @@ -1369,13 +1349,12 @@ async def test_list_topics_field_headers_async(): def test_list_topics_flattened(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_topics(project="project_value",) @@ -1384,12 +1363,11 @@ def test_list_topics_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" def test_list_topics_flattened_error(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1401,7 +1379,7 @@ def test_list_topics_flattened_error(): @pytest.mark.asyncio async def test_list_topics_flattened_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: @@ -1419,13 +1397,12 @@ async def test_list_topics_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" @pytest.mark.asyncio async def test_list_topics_flattened_error_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1436,7 +1413,7 @@ async def test_list_topics_flattened_error_async(): def test_list_topics_pager(): - client = PublisherClient(credentials=credentials.AnonymousCredentials,) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: @@ -1466,7 +1443,7 @@ def test_list_topics_pager(): def test_list_topics_pages(): - client = PublisherClient(credentials=credentials.AnonymousCredentials,) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: @@ -1488,7 +1465,7 @@ def test_list_topics_pages(): @pytest.mark.asyncio async def test_list_topics_async_pager(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1517,7 +1494,7 @@ async def test_list_topics_async_pager(): @pytest.mark.asyncio async def test_list_topics_async_pages(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1545,7 +1522,7 @@ def test_list_topic_subscriptions( transport: str = "grpc", request_type=pubsub.ListTopicSubscriptionsRequest ): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1561,21 +1538,16 @@ def test_list_topic_subscriptions( subscriptions=["subscriptions_value"], next_page_token="next_page_token_value", ) - response = client.list_topic_subscriptions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicSubscriptionsPager) - assert response.subscriptions == ["subscriptions_value"] - assert response.next_page_token == "next_page_token_value" @@ -1587,7 +1559,7 @@ def test_list_topic_subscriptions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1597,7 +1569,6 @@ def test_list_topic_subscriptions_empty_call(): client.list_topic_subscriptions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() @@ -1606,7 +1577,7 @@ async def test_list_topic_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSubscriptionsRequest ): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1624,20 +1595,16 @@ async def test_list_topic_subscriptions_async( next_page_token="next_page_token_value", ) ) - response = await client.list_topic_subscriptions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicSubscriptionsAsyncPager) - assert response.subscriptions == ["subscriptions_value"] - assert response.next_page_token == "next_page_token_value" @@ -1647,11 +1614,12 @@ async def test_list_topic_subscriptions_async_from_dict(): def test_list_topic_subscriptions_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListTopicSubscriptionsRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1659,7 +1627,6 @@ def test_list_topic_subscriptions_field_headers(): type(client.transport.list_topic_subscriptions), "__call__" ) as call: call.return_value = pubsub.ListTopicSubscriptionsResponse() - client.list_topic_subscriptions(request) # Establish that the underlying gRPC stub method was called. @@ -1674,11 +1641,12 @@ def test_list_topic_subscriptions_field_headers(): @pytest.mark.asyncio async def test_list_topic_subscriptions_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListTopicSubscriptionsRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1688,7 +1656,6 @@ async def test_list_topic_subscriptions_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListTopicSubscriptionsResponse() ) - await client.list_topic_subscriptions(request) # Establish that the underlying gRPC stub method was called. @@ -1702,7 +1669,7 @@ async def test_list_topic_subscriptions_field_headers_async(): def test_list_topic_subscriptions_flattened(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1710,7 +1677,6 @@ def test_list_topic_subscriptions_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicSubscriptionsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_topic_subscriptions(topic="topic_value",) @@ -1719,12 +1685,11 @@ def test_list_topic_subscriptions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" def test_list_topic_subscriptions_flattened_error(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1736,7 +1701,7 @@ def test_list_topic_subscriptions_flattened_error(): @pytest.mark.asyncio async def test_list_topic_subscriptions_flattened_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1756,13 +1721,12 @@ async def test_list_topic_subscriptions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" @pytest.mark.asyncio async def test_list_topic_subscriptions_flattened_error_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1773,7 +1737,7 @@ async def test_list_topic_subscriptions_flattened_error_async(): def test_list_topic_subscriptions_pager(): - client = PublisherClient(credentials=credentials.AnonymousCredentials,) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1808,7 +1772,7 @@ def test_list_topic_subscriptions_pager(): def test_list_topic_subscriptions_pages(): - client = PublisherClient(credentials=credentials.AnonymousCredentials,) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1835,7 +1799,7 @@ def test_list_topic_subscriptions_pages(): @pytest.mark.asyncio async def test_list_topic_subscriptions_async_pager(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1869,7 +1833,7 @@ async def test_list_topic_subscriptions_async_pager(): @pytest.mark.asyncio async def test_list_topic_subscriptions_async_pages(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1902,7 +1866,7 @@ def test_list_topic_snapshots( transport: str = "grpc", request_type=pubsub.ListTopicSnapshotsRequest ): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1917,21 +1881,16 @@ def test_list_topic_snapshots( call.return_value = pubsub.ListTopicSnapshotsResponse( snapshots=["snapshots_value"], next_page_token="next_page_token_value", ) - response = client.list_topic_snapshots(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicSnapshotsPager) - assert response.snapshots == ["snapshots_value"] - assert response.next_page_token == "next_page_token_value" @@ -1943,7 +1902,7 @@ def test_list_topic_snapshots_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1953,7 +1912,6 @@ def test_list_topic_snapshots_empty_call(): client.list_topic_snapshots() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() @@ -1962,7 +1920,7 @@ async def test_list_topic_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSnapshotsRequest ): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1979,20 +1937,16 @@ async def test_list_topic_snapshots_async( snapshots=["snapshots_value"], next_page_token="next_page_token_value", ) ) - response = await client.list_topic_snapshots(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicSnapshotsAsyncPager) - assert response.snapshots == ["snapshots_value"] - assert response.next_page_token == "next_page_token_value" @@ -2002,11 +1956,12 @@ async def test_list_topic_snapshots_async_from_dict(): def test_list_topic_snapshots_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListTopicSnapshotsRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2014,7 +1969,6 @@ def test_list_topic_snapshots_field_headers(): type(client.transport.list_topic_snapshots), "__call__" ) as call: call.return_value = pubsub.ListTopicSnapshotsResponse() - client.list_topic_snapshots(request) # Establish that the underlying gRPC stub method was called. @@ -2029,11 +1983,12 @@ def test_list_topic_snapshots_field_headers(): @pytest.mark.asyncio async def test_list_topic_snapshots_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListTopicSnapshotsRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2043,7 +1998,6 @@ async def test_list_topic_snapshots_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListTopicSnapshotsResponse() ) - await client.list_topic_snapshots(request) # Establish that the underlying gRPC stub method was called. @@ -2057,7 +2011,7 @@ async def test_list_topic_snapshots_field_headers_async(): def test_list_topic_snapshots_flattened(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2065,7 +2019,6 @@ def test_list_topic_snapshots_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicSnapshotsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_topic_snapshots(topic="topic_value",) @@ -2074,12 +2027,11 @@ def test_list_topic_snapshots_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" def test_list_topic_snapshots_flattened_error(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2091,7 +2043,7 @@ def test_list_topic_snapshots_flattened_error(): @pytest.mark.asyncio async def test_list_topic_snapshots_flattened_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2111,13 +2063,12 @@ async def test_list_topic_snapshots_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" @pytest.mark.asyncio async def test_list_topic_snapshots_flattened_error_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2128,7 +2079,7 @@ async def test_list_topic_snapshots_flattened_error_async(): def test_list_topic_snapshots_pager(): - client = PublisherClient(credentials=credentials.AnonymousCredentials,) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2161,7 +2112,7 @@ def test_list_topic_snapshots_pager(): def test_list_topic_snapshots_pages(): - client = PublisherClient(credentials=credentials.AnonymousCredentials,) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2186,7 +2137,7 @@ def test_list_topic_snapshots_pages(): @pytest.mark.asyncio async def test_list_topic_snapshots_async_pager(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2218,7 +2169,7 @@ async def test_list_topic_snapshots_async_pager(): @pytest.mark.asyncio async def test_list_topic_snapshots_async_pages(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials,) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2247,7 +2198,7 @@ async def test_list_topic_snapshots_async_pages(): def test_delete_topic(transport: str = "grpc", request_type=pubsub.DeleteTopicRequest): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2258,13 +2209,11 @@ def test_delete_topic(transport: str = "grpc", request_type=pubsub.DeleteTopicRe with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_topic(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() # Establish that the response is the type that we expect. @@ -2279,7 +2228,7 @@ def test_delete_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2287,7 +2236,6 @@ def test_delete_topic_empty_call(): client.delete_topic() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() @@ -2296,7 +2244,7 @@ async def test_delete_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteTopicRequest ): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2307,13 +2255,11 @@ async def test_delete_topic_async( with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_topic(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() # Establish that the response is the type that we expect. @@ -2326,17 +2272,17 @@ async def test_delete_topic_async_from_dict(): def test_delete_topic_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.DeleteTopicRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: call.return_value = None - client.delete_topic(request) # Establish that the underlying gRPC stub method was called. @@ -2351,17 +2297,17 @@ def test_delete_topic_field_headers(): @pytest.mark.asyncio async def test_delete_topic_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.DeleteTopicRequest() + request.topic = "topic/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_topic(request) # Establish that the underlying gRPC stub method was called. @@ -2375,13 +2321,12 @@ async def test_delete_topic_field_headers_async(): def test_delete_topic_flattened(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_topic(topic="topic_value",) @@ -2390,12 +2335,11 @@ def test_delete_topic_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" def test_delete_topic_flattened_error(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2407,7 +2351,7 @@ def test_delete_topic_flattened_error(): @pytest.mark.asyncio async def test_delete_topic_flattened_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: @@ -2423,13 +2367,12 @@ async def test_delete_topic_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" @pytest.mark.asyncio async def test_delete_topic_flattened_error_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2443,7 +2386,7 @@ def test_detach_subscription( transport: str = "grpc", request_type=pubsub.DetachSubscriptionRequest ): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2456,17 +2399,14 @@ def test_detach_subscription( ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.DetachSubscriptionResponse() - response = client.detach_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.DetachSubscriptionResponse) @@ -2478,7 +2418,7 @@ def test_detach_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2488,7 +2428,6 @@ def test_detach_subscription_empty_call(): client.detach_subscription() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() @@ -2497,7 +2436,7 @@ async def test_detach_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DetachSubscriptionRequest ): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2512,13 +2451,11 @@ async def test_detach_subscription_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.DetachSubscriptionResponse() ) - response = await client.detach_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() # Establish that the response is the type that we expect. @@ -2531,11 +2468,12 @@ async def test_detach_subscription_async_from_dict(): def test_detach_subscription_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.DetachSubscriptionRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2543,7 +2481,6 @@ def test_detach_subscription_field_headers(): type(client.transport.detach_subscription), "__call__" ) as call: call.return_value = pubsub.DetachSubscriptionResponse() - client.detach_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -2560,11 +2497,12 @@ def test_detach_subscription_field_headers(): @pytest.mark.asyncio async def test_detach_subscription_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.DetachSubscriptionRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2574,7 +2512,6 @@ async def test_detach_subscription_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.DetachSubscriptionResponse() ) - await client.detach_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -2592,16 +2529,16 @@ async def test_detach_subscription_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.PublisherGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.PublisherGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = PublisherClient( @@ -2611,7 +2548,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.PublisherGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = PublisherClient( @@ -2622,7 +2559,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.PublisherGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = PublisherClient(transport=transport) assert client.transport is transport @@ -2631,13 +2568,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.PublisherGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.PublisherGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -2649,23 +2586,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.PublisherGrpcTransport,) def test_publisher_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.PublisherTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -2677,7 +2614,7 @@ def test_publisher_base_transport(): ) as Transport: Transport.return_value = None transport = transports.PublisherTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -2701,15 +2638,40 @@ def test_publisher_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_publisher_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PublisherTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_publisher_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.PublisherTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -2725,19 +2687,36 @@ def test_publisher_base_transport_with_credentials_file(): def test_publisher_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.PublisherTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_publisher_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PublisherClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_publisher_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) PublisherClient() adc.assert_called_once_with( scopes=( @@ -2748,14 +2727,38 @@ def test_publisher_auth_adc(): ) -def test_publisher_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport,], +) +@requires_google_auth_gte_1_25_0 +def test_publisher_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.PublisherGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport,], +) +@requires_google_auth_lt_1_25_0 +def test_publisher_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -2765,12 +2768,51 @@ def test_publisher_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PublisherGrpcTransport, grpc_helpers), + (transports.PublisherGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_publisher_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=["1", "2"], + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + @pytest.mark.parametrize( "transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], ) def test_publisher_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -2784,10 +2826,7 @@ def test_publisher_grpc_transport_client_cert_source_for_mtls(transport_class): "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -2813,7 +2852,7 @@ def test_publisher_grpc_transport_client_cert_source_for_mtls(transport_class): def test_publisher_host_no_port(): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com" ), @@ -2823,7 +2862,7 @@ def test_publisher_host_no_port(): def test_publisher_host_with_port(): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com:8000" ), @@ -2874,9 +2913,9 @@ def test_publisher_transport_channel_mtls_with_client_cert_source(transport_clas mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2892,10 +2931,7 @@ def test_publisher_transport_channel_mtls_with_client_cert_source(transport_clas "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2940,10 +2976,7 @@ def test_publisher_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2958,7 +2991,6 @@ def test_publisher_transport_channel_mtls_with_adc(transport_class): def test_schema_path(): project = "squid" schema = "clam" - expected = "projects/{project}/schemas/{schema}".format( project=project, schema=schema, ) @@ -2981,7 +3013,6 @@ def test_parse_schema_path(): def test_subscription_path(): project = "oyster" subscription = "nudibranch" - expected = "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, ) @@ -3004,7 +3035,6 @@ def test_parse_subscription_path(): def test_topic_path(): project = "winkle" topic = "nautilus" - expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic,) actual = PublisherClient.topic_path(project, topic) assert expected == actual @@ -3024,7 +3054,6 @@ def test_parse_topic_path(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -3045,7 +3074,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = PublisherClient.common_folder_path(folder) assert expected == actual @@ -3064,7 +3092,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = PublisherClient.common_organization_path(organization) assert expected == actual @@ -3083,7 +3110,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = PublisherClient.common_project_path(project) assert expected == actual @@ -3103,7 +3129,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -3130,7 +3155,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.PublisherTransport, "_prep_wrapped_messages" ) as prep: client = PublisherClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3139,24 +3164,24 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = PublisherClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) def test_set_iam_policy(transport: str = "grpc"): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) @@ -3167,7 +3192,7 @@ def test_set_iam_policy(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -3177,18 +3202,18 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.set_iam_policy(request) @@ -3200,7 +3225,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -3208,16 +3233,16 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): def test_set_iam_policy_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() client.set_iam_policy(request) @@ -3233,16 +3258,16 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.set_iam_policy(request) @@ -3257,16 +3282,16 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() @@ -3274,16 +3299,16 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) response = await client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() @@ -3291,17 +3316,17 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) @@ -3312,7 +3337,7 @@ def test_get_iam_policy(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -3322,18 +3347,18 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.get_iam_policy(request) @@ -3345,7 +3370,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -3353,16 +3378,16 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() client.get_iam_policy(request) @@ -3378,16 +3403,16 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy(request) @@ -3402,16 +3427,16 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -3419,16 +3444,16 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) response = await client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -3436,19 +3461,19 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = PublisherClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse( + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( permissions=["permissions_value"], ) @@ -3461,7 +3486,7 @@ def test_test_iam_permissions(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -3469,12 +3494,12 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = PublisherAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3482,7 +3507,9 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) ) response = await client.test_iam_permissions(request) @@ -3494,24 +3521,24 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] def test_test_iam_permissions_field_headers(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: - call.return_value = iam_policy.TestIamPermissionsResponse() + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() client.test_iam_permissions(request) @@ -3527,11 +3554,11 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3539,7 +3566,7 @@ async def test_test_iam_permissions_field_headers_async(): type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) await client.test_iam_permissions(request) @@ -3555,13 +3582,13 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): - client = PublisherClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse() + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() response = client.test_iam_permissions( request={ @@ -3574,14 +3601,14 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): - client = PublisherAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) response = await client.test_iam_permissions( diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 62a3aa7300a1..fcc8b68e198d 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,9 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock +import packaging.version import grpc from grpc.experimental import aio @@ -24,24 +23,42 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account from google.pubsub_v1.services.schema_service import SchemaServiceAsyncClient from google.pubsub_v1.services.schema_service import SchemaServiceClient from google.pubsub_v1.services.schema_service import pagers from google.pubsub_v1.services.schema_service import transports +from google.pubsub_v1.services.schema_service.transports.base import ( + _GOOGLE_AUTH_VERSION, +) from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) def client_cert_source_callback(): @@ -92,7 +109,7 @@ def test__get_default_mtls_endpoint(): "client_class", [SchemaServiceClient, SchemaServiceAsyncClient,] ) def test_schema_service_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -105,11 +122,41 @@ def test_schema_service_client_from_service_account_info(client_class): assert client.transport._host == "pubsub.googleapis.com:443" +@pytest.mark.parametrize( + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient,] +) +def test_schema_service_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SchemaServiceGrpcTransport, "grpc"), + (transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_schema_service_client_service_account_always_use_jwt_true( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + @pytest.mark.parametrize( "client_class", [SchemaServiceClient, SchemaServiceAsyncClient,] ) def test_schema_service_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -162,7 +209,7 @@ def test_schema_service_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(SchemaServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -450,7 +497,7 @@ def test_create_schema( transport: str = "grpc", request_type=gp_schema.CreateSchemaRequest ): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -465,23 +512,17 @@ def test_create_schema( type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, definition="definition_value", ) - response = client.create_schema(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() # Establish that the response is the type that we expect. - assert isinstance(response, gp_schema.Schema) - assert response.name == "name_value" - assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == "definition_value" @@ -493,7 +534,7 @@ def test_create_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -501,7 +542,6 @@ def test_create_schema_empty_call(): client.create_schema() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() @@ -510,7 +550,7 @@ async def test_create_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.CreateSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -527,22 +567,17 @@ async def test_create_schema_async( definition="definition_value", ) ) - response = await client.create_schema(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() # Establish that the response is the type that we expect. assert isinstance(response, gp_schema.Schema) - assert response.name == "name_value" - assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == "definition_value" @@ -552,17 +587,17 @@ async def test_create_schema_async_from_dict(): def test_create_schema_field_headers(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = gp_schema.CreateSchemaRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_schema), "__call__") as call: call.return_value = gp_schema.Schema() - client.create_schema(request) # Establish that the underlying gRPC stub method was called. @@ -577,17 +612,19 @@ def test_create_schema_field_headers(): @pytest.mark.asyncio async def test_create_schema_field_headers_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = gp_schema.CreateSchemaRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_schema), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) - await client.create_schema(request) # Establish that the underlying gRPC stub method was called. @@ -601,13 +638,12 @@ async def test_create_schema_field_headers_async(): def test_create_schema_flattened(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_schema), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = gp_schema.Schema() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_schema( @@ -620,16 +656,13 @@ def test_create_schema_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].schema == gp_schema.Schema(name="name_value") - assert args[0].schema_id == "schema_id_value" def test_create_schema_flattened_error(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -644,7 +677,9 @@ def test_create_schema_flattened_error(): @pytest.mark.asyncio async def test_create_schema_flattened_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_schema), "__call__") as call: @@ -664,17 +699,16 @@ async def test_create_schema_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].schema == gp_schema.Schema(name="name_value") - assert args[0].schema_id == "schema_id_value" @pytest.mark.asyncio async def test_create_schema_flattened_error_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -689,7 +723,7 @@ async def test_create_schema_flattened_error_async(): def test_get_schema(transport: str = "grpc", request_type=schema.GetSchemaRequest): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -704,23 +738,17 @@ def test_get_schema(transport: str = "grpc", request_type=schema.GetSchemaReques type_=schema.Schema.Type.PROTOCOL_BUFFER, definition="definition_value", ) - response = client.get_schema(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() # Establish that the response is the type that we expect. - assert isinstance(response, schema.Schema) - assert response.name == "name_value" - assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == "definition_value" @@ -732,7 +760,7 @@ def test_get_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -740,7 +768,6 @@ def test_get_schema_empty_call(): client.get_schema() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() @@ -749,7 +776,7 @@ async def test_get_schema_async( transport: str = "grpc_asyncio", request_type=schema.GetSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -766,22 +793,17 @@ async def test_get_schema_async( definition="definition_value", ) ) - response = await client.get_schema(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() # Establish that the response is the type that we expect. assert isinstance(response, schema.Schema) - assert response.name == "name_value" - assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == "definition_value" @@ -791,17 +813,17 @@ async def test_get_schema_async_from_dict(): def test_get_schema_field_headers(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = schema.GetSchemaRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_schema), "__call__") as call: call.return_value = schema.Schema() - client.get_schema(request) # Establish that the underlying gRPC stub method was called. @@ -816,17 +838,19 @@ def test_get_schema_field_headers(): @pytest.mark.asyncio async def test_get_schema_field_headers_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = schema.GetSchemaRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_schema), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) - await client.get_schema(request) # Establish that the underlying gRPC stub method was called. @@ -840,13 +864,12 @@ async def test_get_schema_field_headers_async(): def test_get_schema_flattened(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_schema), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = schema.Schema() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_schema(name="name_value",) @@ -855,12 +878,11 @@ def test_get_schema_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_get_schema_flattened_error(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -872,7 +894,9 @@ def test_get_schema_flattened_error(): @pytest.mark.asyncio async def test_get_schema_flattened_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_schema), "__call__") as call: @@ -888,13 +912,14 @@ async def test_get_schema_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_get_schema_flattened_error_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -906,7 +931,7 @@ async def test_get_schema_flattened_error_async(): def test_list_schemas(transport: str = "grpc", request_type=schema.ListSchemasRequest): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -919,19 +944,15 @@ def test_list_schemas(transport: str = "grpc", request_type=schema.ListSchemasRe call.return_value = schema.ListSchemasResponse( next_page_token="next_page_token_value", ) - response = client.list_schemas(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSchemasPager) - assert response.next_page_token == "next_page_token_value" @@ -943,7 +964,7 @@ def test_list_schemas_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -951,7 +972,6 @@ def test_list_schemas_empty_call(): client.list_schemas() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() @@ -960,7 +980,7 @@ async def test_list_schemas_async( transport: str = "grpc_asyncio", request_type=schema.ListSchemasRequest ): client = SchemaServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -973,18 +993,15 @@ async def test_list_schemas_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( schema.ListSchemasResponse(next_page_token="next_page_token_value",) ) - response = await client.list_schemas(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSchemasAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -994,17 +1011,17 @@ async def test_list_schemas_async_from_dict(): def test_list_schemas_field_headers(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = schema.ListSchemasRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: call.return_value = schema.ListSchemasResponse() - client.list_schemas(request) # Establish that the underlying gRPC stub method was called. @@ -1019,11 +1036,14 @@ def test_list_schemas_field_headers(): @pytest.mark.asyncio async def test_list_schemas_field_headers_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = schema.ListSchemasRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1031,7 +1051,6 @@ async def test_list_schemas_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( schema.ListSchemasResponse() ) - await client.list_schemas(request) # Establish that the underlying gRPC stub method was called. @@ -1045,13 +1064,12 @@ async def test_list_schemas_field_headers_async(): def test_list_schemas_flattened(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = schema.ListSchemasResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_schemas(parent="parent_value",) @@ -1060,12 +1078,11 @@ def test_list_schemas_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" def test_list_schemas_flattened_error(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1077,7 +1094,9 @@ def test_list_schemas_flattened_error(): @pytest.mark.asyncio async def test_list_schemas_flattened_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: @@ -1095,13 +1114,14 @@ async def test_list_schemas_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" @pytest.mark.asyncio async def test_list_schemas_flattened_error_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1112,7 +1132,7 @@ async def test_list_schemas_flattened_error_async(): def test_list_schemas_pager(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials,) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: @@ -1144,7 +1164,7 @@ def test_list_schemas_pager(): def test_list_schemas_pages(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials,) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: @@ -1168,7 +1188,7 @@ def test_list_schemas_pages(): @pytest.mark.asyncio async def test_list_schemas_async_pager(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = SchemaServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1199,7 +1219,7 @@ async def test_list_schemas_async_pager(): @pytest.mark.asyncio async def test_list_schemas_async_pages(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + client = SchemaServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1229,7 +1249,7 @@ def test_delete_schema( transport: str = "grpc", request_type=schema.DeleteSchemaRequest ): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1240,13 +1260,11 @@ def test_delete_schema( with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_schema(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() # Establish that the response is the type that we expect. @@ -1261,7 +1279,7 @@ def test_delete_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1269,7 +1287,6 @@ def test_delete_schema_empty_call(): client.delete_schema() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() @@ -1278,7 +1295,7 @@ async def test_delete_schema_async( transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1289,13 +1306,11 @@ async def test_delete_schema_async( with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_schema(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() # Establish that the response is the type that we expect. @@ -1308,17 +1323,17 @@ async def test_delete_schema_async_from_dict(): def test_delete_schema_field_headers(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = schema.DeleteSchemaRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: call.return_value = None - client.delete_schema(request) # Establish that the underlying gRPC stub method was called. @@ -1333,17 +1348,19 @@ def test_delete_schema_field_headers(): @pytest.mark.asyncio async def test_delete_schema_field_headers_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = schema.DeleteSchemaRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_schema(request) # Establish that the underlying gRPC stub method was called. @@ -1357,13 +1374,12 @@ async def test_delete_schema_field_headers_async(): def test_delete_schema_flattened(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_schema(name="name_value",) @@ -1372,12 +1388,11 @@ def test_delete_schema_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" def test_delete_schema_flattened_error(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1389,7 +1404,9 @@ def test_delete_schema_flattened_error(): @pytest.mark.asyncio async def test_delete_schema_flattened_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: @@ -1405,13 +1422,14 @@ async def test_delete_schema_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" @pytest.mark.asyncio async def test_delete_schema_flattened_error_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1425,7 +1443,7 @@ def test_validate_schema( transport: str = "grpc", request_type=gp_schema.ValidateSchemaRequest ): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1436,17 +1454,14 @@ def test_validate_schema( with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = gp_schema.ValidateSchemaResponse() - response = client.validate_schema(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() # Establish that the response is the type that we expect. - assert isinstance(response, gp_schema.ValidateSchemaResponse) @@ -1458,7 +1473,7 @@ def test_validate_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1466,7 +1481,6 @@ def test_validate_schema_empty_call(): client.validate_schema() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() @@ -1475,7 +1489,7 @@ async def test_validate_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.ValidateSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1488,13 +1502,11 @@ async def test_validate_schema_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( gp_schema.ValidateSchemaResponse() ) - response = await client.validate_schema(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() # Establish that the response is the type that we expect. @@ -1507,17 +1519,17 @@ async def test_validate_schema_async_from_dict(): def test_validate_schema_field_headers(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = gp_schema.ValidateSchemaRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: call.return_value = gp_schema.ValidateSchemaResponse() - client.validate_schema(request) # Establish that the underlying gRPC stub method was called. @@ -1532,11 +1544,14 @@ def test_validate_schema_field_headers(): @pytest.mark.asyncio async def test_validate_schema_field_headers_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = gp_schema.ValidateSchemaRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1544,7 +1559,6 @@ async def test_validate_schema_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( gp_schema.ValidateSchemaResponse() ) - await client.validate_schema(request) # Establish that the underlying gRPC stub method was called. @@ -1558,13 +1572,12 @@ async def test_validate_schema_field_headers_async(): def test_validate_schema_flattened(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = gp_schema.ValidateSchemaResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.validate_schema( @@ -1575,14 +1588,12 @@ def test_validate_schema_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].schema == gp_schema.Schema(name="name_value") def test_validate_schema_flattened_error(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1596,7 +1607,9 @@ def test_validate_schema_flattened_error(): @pytest.mark.asyncio async def test_validate_schema_flattened_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: @@ -1616,15 +1629,15 @@ async def test_validate_schema_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].schema == gp_schema.Schema(name="name_value") @pytest.mark.asyncio async def test_validate_schema_flattened_error_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1640,7 +1653,7 @@ def test_validate_message( transport: str = "grpc", request_type=schema.ValidateMessageRequest ): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1651,17 +1664,14 @@ def test_validate_message( with mock.patch.object(type(client.transport.validate_message), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = schema.ValidateMessageResponse() - response = client.validate_message(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() # Establish that the response is the type that we expect. - assert isinstance(response, schema.ValidateMessageResponse) @@ -1673,7 +1683,7 @@ def test_validate_message_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1681,7 +1691,6 @@ def test_validate_message_empty_call(): client.validate_message() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() @@ -1690,7 +1699,7 @@ async def test_validate_message_async( transport: str = "grpc_asyncio", request_type=schema.ValidateMessageRequest ): client = SchemaServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1703,13 +1712,11 @@ async def test_validate_message_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( schema.ValidateMessageResponse() ) - response = await client.validate_message(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() # Establish that the response is the type that we expect. @@ -1722,17 +1729,17 @@ async def test_validate_message_async_from_dict(): def test_validate_message_field_headers(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = schema.ValidateMessageRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_message), "__call__") as call: call.return_value = schema.ValidateMessageResponse() - client.validate_message(request) # Establish that the underlying gRPC stub method was called. @@ -1747,11 +1754,14 @@ def test_validate_message_field_headers(): @pytest.mark.asyncio async def test_validate_message_field_headers_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = schema.ValidateMessageRequest() + request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1759,7 +1769,6 @@ async def test_validate_message_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( schema.ValidateMessageResponse() ) - await client.validate_message(request) # Establish that the underlying gRPC stub method was called. @@ -1775,16 +1784,16 @@ async def test_validate_message_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.SchemaServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.SchemaServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = SchemaServiceClient( @@ -1794,7 +1803,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.SchemaServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = SchemaServiceClient( @@ -1805,7 +1814,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.SchemaServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = SchemaServiceClient(transport=transport) assert client.transport is transport @@ -1814,13 +1823,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.SchemaServiceGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.SchemaServiceGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -1835,23 +1844,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.SchemaServiceGrpcTransport,) def test_schema_service_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.SchemaServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -1863,7 +1872,7 @@ def test_schema_service_base_transport(): ) as Transport: Transport.return_value = None transport = transports.SchemaServiceTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -1884,15 +1893,40 @@ def test_schema_service_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_schema_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_schema_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.SchemaServiceTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -1908,19 +1942,36 @@ def test_schema_service_base_transport_with_credentials_file(): def test_schema_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.SchemaServiceTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_schema_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SchemaServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_schema_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) SchemaServiceClient() adc.assert_called_once_with( scopes=( @@ -1931,14 +1982,44 @@ def test_schema_service_auth_adc(): ) -def test_schema_service_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_schema_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.SchemaServiceGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_schema_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -1948,6 +2029,45 @@ def test_schema_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SchemaServiceGrpcTransport, grpc_helpers), + (transports.SchemaServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_schema_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=["1", "2"], + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + @pytest.mark.parametrize( "transport_class", [ @@ -1956,7 +2076,7 @@ def test_schema_service_transport_auth_adc(): ], ) def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -1970,10 +2090,7 @@ def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_cla "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -1999,7 +2116,7 @@ def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_cla def test_schema_service_host_no_port(): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com" ), @@ -2009,7 +2126,7 @@ def test_schema_service_host_no_port(): def test_schema_service_host_with_port(): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com:8000" ), @@ -2063,9 +2180,9 @@ def test_schema_service_transport_channel_mtls_with_client_cert_source(transport mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -2081,10 +2198,7 @@ def test_schema_service_transport_channel_mtls_with_client_cert_source(transport "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2132,10 +2246,7 @@ def test_schema_service_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -2150,7 +2261,6 @@ def test_schema_service_transport_channel_mtls_with_adc(transport_class): def test_schema_path(): project = "squid" schema = "clam" - expected = "projects/{project}/schemas/{schema}".format( project=project, schema=schema, ) @@ -2172,7 +2282,6 @@ def test_parse_schema_path(): def test_common_billing_account_path(): billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -2193,7 +2302,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) actual = SchemaServiceClient.common_folder_path(folder) assert expected == actual @@ -2212,7 +2320,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) actual = SchemaServiceClient.common_organization_path(organization) assert expected == actual @@ -2231,7 +2338,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) actual = SchemaServiceClient.common_project_path(project) assert expected == actual @@ -2251,7 +2357,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "squid" location = "clam" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -2278,7 +2383,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.SchemaServiceTransport, "_prep_wrapped_messages" ) as prep: client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2287,24 +2392,24 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = SchemaServiceClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) def test_set_iam_policy(transport: str = "grpc"): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) @@ -2315,7 +2420,7 @@ def test_set_iam_policy(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -2325,18 +2430,18 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = SchemaServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.set_iam_policy(request) @@ -2348,7 +2453,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -2356,16 +2461,16 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): def test_set_iam_policy_field_headers(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() client.set_iam_policy(request) @@ -2381,16 +2486,18 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.set_iam_policy(request) @@ -2405,16 +2512,16 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() @@ -2422,16 +2529,18 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) response = await client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() @@ -2439,17 +2548,17 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) @@ -2460,7 +2569,7 @@ def test_get_iam_policy(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -2470,18 +2579,18 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = SchemaServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.get_iam_policy(request) @@ -2493,7 +2602,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -2501,16 +2610,16 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() client.get_iam_policy(request) @@ -2526,16 +2635,18 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy(request) @@ -2550,16 +2661,16 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -2567,16 +2678,18 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) response = await client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -2584,19 +2697,19 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = SchemaServiceClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse( + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( permissions=["permissions_value"], ) @@ -2609,7 +2722,7 @@ def test_test_iam_permissions(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -2617,12 +2730,12 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = SchemaServiceAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2630,7 +2743,9 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) ) response = await client.test_iam_permissions(request) @@ -2642,24 +2757,24 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] def test_test_iam_permissions_field_headers(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: - call.return_value = iam_policy.TestIamPermissionsResponse() + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() client.test_iam_permissions(request) @@ -2675,11 +2790,13 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2687,7 +2804,7 @@ async def test_test_iam_permissions_field_headers_async(): type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) await client.test_iam_permissions(request) @@ -2703,13 +2820,13 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): - client = SchemaServiceClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse() + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() response = client.test_iam_permissions( request={ @@ -2722,14 +2839,16 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): - client = SchemaServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) response = await client.test_iam_permissions( diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 96f03eb1510a..67e6b26cca3e 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -1,5 +1,4 @@ # -*- coding: utf-8 -*- - # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,10 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # - import os import mock import warnings +import packaging.version import grpc from grpc.experimental import aio @@ -25,26 +24,42 @@ import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule -from google import auth + from google.api_core import client_options -from google.api_core import exceptions +from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.auth import credentials +from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore -from google.iam.v1 import options_pb2 as options # type: ignore -from google.iam.v1 import policy_pb2 as policy # type: ignore +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account -from google.protobuf import duration_pb2 as duration # type: ignore -from google.protobuf import field_mask_pb2 as field_mask # type: ignore -from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.subscriber import SubscriberAsyncClient from google.pubsub_v1.services.subscriber import SubscriberClient from google.pubsub_v1.services.subscriber import pagers from google.pubsub_v1.services.subscriber import transports +from google.pubsub_v1.services.subscriber.transports.base import _GOOGLE_AUTH_VERSION from google.pubsub_v1.types import pubsub +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) def client_cert_source_callback(): @@ -90,7 +105,7 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient,]) def test_subscriber_client_from_service_account_info(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: @@ -103,9 +118,37 @@ def test_subscriber_client_from_service_account_info(client_class): assert client.transport._host == "pubsub.googleapis.com:443" +@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient,]) +def test_subscriber_client_service_account_always_use_jwt(client_class): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.SubscriberGrpcTransport, "grpc"), + (transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_subscriber_client_service_account_always_use_jwt_true( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + @pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient,]) def test_subscriber_client_from_service_account_file(client_class): - creds = credentials.AnonymousCredentials() + creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: @@ -156,7 +199,7 @@ def test_subscriber_client_client_options( ): # Check that if channel is provided we won't create a new one. with mock.patch.object(SubscriberClient, "get_transport_class") as gtc: - transport = transport_class(credentials=credentials.AnonymousCredentials()) + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) client = client_class(transport=transport) gtc.assert_not_called() @@ -438,7 +481,7 @@ def test_subscriber_client_client_options_from_dict(): def test_create_subscription(transport: str = "grpc", request_type=pubsub.Subscription): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -459,31 +502,21 @@ def test_create_subscription(transport: str = "grpc", request_type=pubsub.Subscr filter="filter_value", detached=True, ) - response = client.create_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == "name_value" - assert response.topic == "topic_value" - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == "filter_value" - assert response.detached is True @@ -495,7 +528,7 @@ def test_create_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -505,7 +538,6 @@ def test_create_subscription_empty_call(): client.create_subscription() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() @@ -514,7 +546,7 @@ async def test_create_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.Subscription ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -537,30 +569,21 @@ async def test_create_subscription_async( detached=True, ) ) - response = await client.create_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) - assert response.name == "name_value" - assert response.topic == "topic_value" - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == "filter_value" - assert response.detached is True @@ -570,11 +593,12 @@ async def test_create_subscription_async_from_dict(): def test_create_subscription_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.Subscription() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -582,7 +606,6 @@ def test_create_subscription_field_headers(): type(client.transport.create_subscription), "__call__" ) as call: call.return_value = pubsub.Subscription() - client.create_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -597,11 +620,12 @@ def test_create_subscription_field_headers(): @pytest.mark.asyncio async def test_create_subscription_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.Subscription() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -609,7 +633,6 @@ async def test_create_subscription_field_headers_async(): type(client.transport.create_subscription), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) - await client.create_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -623,7 +646,7 @@ async def test_create_subscription_field_headers_async(): def test_create_subscription_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -631,7 +654,6 @@ def test_create_subscription_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Subscription() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_subscription( @@ -645,20 +667,16 @@ def test_create_subscription_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].topic == "topic_value" - assert args[0].push_config == pubsub.PushConfig( push_endpoint="push_endpoint_value" ) - assert args[0].ack_deadline_seconds == 2066 def test_create_subscription_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -674,7 +692,7 @@ def test_create_subscription_flattened_error(): @pytest.mark.asyncio async def test_create_subscription_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -697,21 +715,17 @@ async def test_create_subscription_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].topic == "topic_value" - assert args[0].push_config == pubsub.PushConfig( push_endpoint="push_endpoint_value" ) - assert args[0].ack_deadline_seconds == 2066 @pytest.mark.asyncio async def test_create_subscription_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -729,7 +743,7 @@ def test_get_subscription( transport: str = "grpc", request_type=pubsub.GetSubscriptionRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -748,31 +762,21 @@ def test_get_subscription( filter="filter_value", detached=True, ) - response = client.get_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == "name_value" - assert response.topic == "topic_value" - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == "filter_value" - assert response.detached is True @@ -784,7 +788,7 @@ def test_get_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -792,7 +796,6 @@ def test_get_subscription_empty_call(): client.get_subscription() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() @@ -801,7 +804,7 @@ async def test_get_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSubscriptionRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -822,30 +825,21 @@ async def test_get_subscription_async( detached=True, ) ) - response = await client.get_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) - assert response.name == "name_value" - assert response.topic == "topic_value" - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == "filter_value" - assert response.detached is True @@ -855,17 +849,17 @@ async def test_get_subscription_async_from_dict(): def test_get_subscription_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.GetSubscriptionRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: call.return_value = pubsub.Subscription() - client.get_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -882,17 +876,17 @@ def test_get_subscription_field_headers(): @pytest.mark.asyncio async def test_get_subscription_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.GetSubscriptionRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) - await client.get_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -908,13 +902,12 @@ async def test_get_subscription_field_headers_async(): def test_get_subscription_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Subscription() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_subscription(subscription="subscription_value",) @@ -923,12 +916,11 @@ def test_get_subscription_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" def test_get_subscription_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -940,7 +932,7 @@ def test_get_subscription_flattened_error(): @pytest.mark.asyncio async def test_get_subscription_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: @@ -956,13 +948,12 @@ async def test_get_subscription_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" @pytest.mark.asyncio async def test_get_subscription_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -976,7 +967,7 @@ def test_update_subscription( transport: str = "grpc", request_type=pubsub.UpdateSubscriptionRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -997,31 +988,21 @@ def test_update_subscription( filter="filter_value", detached=True, ) - response = client.update_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == "name_value" - assert response.topic == "topic_value" - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == "filter_value" - assert response.detached is True @@ -1033,7 +1014,7 @@ def test_update_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1043,7 +1024,6 @@ def test_update_subscription_empty_call(): client.update_subscription() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() @@ -1052,7 +1032,7 @@ async def test_update_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSubscriptionRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1075,30 +1055,21 @@ async def test_update_subscription_async( detached=True, ) ) - response = await client.update_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) - assert response.name == "name_value" - assert response.topic == "topic_value" - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == "filter_value" - assert response.detached is True @@ -1108,11 +1079,12 @@ async def test_update_subscription_async_from_dict(): def test_update_subscription_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.UpdateSubscriptionRequest() + request.subscription.name = "subscription.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1120,7 +1092,6 @@ def test_update_subscription_field_headers(): type(client.transport.update_subscription), "__call__" ) as call: call.return_value = pubsub.Subscription() - client.update_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -1138,11 +1109,12 @@ def test_update_subscription_field_headers(): @pytest.mark.asyncio async def test_update_subscription_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.UpdateSubscriptionRequest() + request.subscription.name = "subscription.name/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1150,7 +1122,6 @@ async def test_update_subscription_field_headers_async(): type(client.transport.update_subscription), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) - await client.update_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -1170,7 +1141,7 @@ def test_list_subscriptions( transport: str = "grpc", request_type=pubsub.ListSubscriptionsRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1185,19 +1156,15 @@ def test_list_subscriptions( call.return_value = pubsub.ListSubscriptionsResponse( next_page_token="next_page_token_value", ) - response = client.list_subscriptions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubscriptionsPager) - assert response.next_page_token == "next_page_token_value" @@ -1209,7 +1176,7 @@ def test_list_subscriptions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1219,7 +1186,6 @@ def test_list_subscriptions_empty_call(): client.list_subscriptions() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() @@ -1228,7 +1194,7 @@ async def test_list_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSubscriptionsRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1243,18 +1209,15 @@ async def test_list_subscriptions_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListSubscriptionsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_subscriptions(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSubscriptionsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -1264,11 +1227,12 @@ async def test_list_subscriptions_async_from_dict(): def test_list_subscriptions_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListSubscriptionsRequest() + request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1276,7 +1240,6 @@ def test_list_subscriptions_field_headers(): type(client.transport.list_subscriptions), "__call__" ) as call: call.return_value = pubsub.ListSubscriptionsResponse() - client.list_subscriptions(request) # Establish that the underlying gRPC stub method was called. @@ -1291,11 +1254,12 @@ def test_list_subscriptions_field_headers(): @pytest.mark.asyncio async def test_list_subscriptions_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListSubscriptionsRequest() + request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1305,7 +1269,6 @@ async def test_list_subscriptions_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListSubscriptionsResponse() ) - await client.list_subscriptions(request) # Establish that the underlying gRPC stub method was called. @@ -1319,7 +1282,7 @@ async def test_list_subscriptions_field_headers_async(): def test_list_subscriptions_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1327,7 +1290,6 @@ def test_list_subscriptions_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListSubscriptionsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_subscriptions(project="project_value",) @@ -1336,12 +1298,11 @@ def test_list_subscriptions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" def test_list_subscriptions_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1353,7 +1314,7 @@ def test_list_subscriptions_flattened_error(): @pytest.mark.asyncio async def test_list_subscriptions_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1373,13 +1334,12 @@ async def test_list_subscriptions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" @pytest.mark.asyncio async def test_list_subscriptions_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1390,7 +1350,7 @@ async def test_list_subscriptions_flattened_error_async(): def test_list_subscriptions_pager(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1430,7 +1390,7 @@ def test_list_subscriptions_pager(): def test_list_subscriptions_pages(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1462,7 +1422,7 @@ def test_list_subscriptions_pages(): @pytest.mark.asyncio async def test_list_subscriptions_async_pager(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1501,7 +1461,7 @@ async def test_list_subscriptions_async_pager(): @pytest.mark.asyncio async def test_list_subscriptions_async_pages(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1539,7 +1499,7 @@ def test_delete_subscription( transport: str = "grpc", request_type=pubsub.DeleteSubscriptionRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1552,13 +1512,11 @@ def test_delete_subscription( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() # Establish that the response is the type that we expect. @@ -1573,7 +1531,7 @@ def test_delete_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1583,7 +1541,6 @@ def test_delete_subscription_empty_call(): client.delete_subscription() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() @@ -1592,7 +1549,7 @@ async def test_delete_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSubscriptionRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1605,13 +1562,11 @@ async def test_delete_subscription_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_subscription(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() # Establish that the response is the type that we expect. @@ -1624,11 +1579,12 @@ async def test_delete_subscription_async_from_dict(): def test_delete_subscription_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.DeleteSubscriptionRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1636,7 +1592,6 @@ def test_delete_subscription_field_headers(): type(client.transport.delete_subscription), "__call__" ) as call: call.return_value = None - client.delete_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -1653,11 +1608,12 @@ def test_delete_subscription_field_headers(): @pytest.mark.asyncio async def test_delete_subscription_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.DeleteSubscriptionRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1665,7 +1621,6 @@ async def test_delete_subscription_field_headers_async(): type(client.transport.delete_subscription), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_subscription(request) # Establish that the underlying gRPC stub method was called. @@ -1681,7 +1636,7 @@ async def test_delete_subscription_field_headers_async(): def test_delete_subscription_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1689,7 +1644,6 @@ def test_delete_subscription_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_subscription(subscription="subscription_value",) @@ -1698,12 +1652,11 @@ def test_delete_subscription_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" def test_delete_subscription_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1715,7 +1668,7 @@ def test_delete_subscription_flattened_error(): @pytest.mark.asyncio async def test_delete_subscription_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1733,13 +1686,12 @@ async def test_delete_subscription_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" @pytest.mark.asyncio async def test_delete_subscription_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1753,7 +1705,7 @@ def test_modify_ack_deadline( transport: str = "grpc", request_type=pubsub.ModifyAckDeadlineRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1766,13 +1718,11 @@ def test_modify_ack_deadline( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.modify_ack_deadline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() # Establish that the response is the type that we expect. @@ -1787,7 +1737,7 @@ def test_modify_ack_deadline_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1797,7 +1747,6 @@ def test_modify_ack_deadline_empty_call(): client.modify_ack_deadline() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() @@ -1806,7 +1755,7 @@ async def test_modify_ack_deadline_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyAckDeadlineRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1819,13 +1768,11 @@ async def test_modify_ack_deadline_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.modify_ack_deadline(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() # Establish that the response is the type that we expect. @@ -1838,11 +1785,12 @@ async def test_modify_ack_deadline_async_from_dict(): def test_modify_ack_deadline_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ModifyAckDeadlineRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1850,7 +1798,6 @@ def test_modify_ack_deadline_field_headers(): type(client.transport.modify_ack_deadline), "__call__" ) as call: call.return_value = None - client.modify_ack_deadline(request) # Establish that the underlying gRPC stub method was called. @@ -1867,11 +1814,12 @@ def test_modify_ack_deadline_field_headers(): @pytest.mark.asyncio async def test_modify_ack_deadline_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ModifyAckDeadlineRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -1879,7 +1827,6 @@ async def test_modify_ack_deadline_field_headers_async(): type(client.transport.modify_ack_deadline), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.modify_ack_deadline(request) # Establish that the underlying gRPC stub method was called. @@ -1895,7 +1842,7 @@ async def test_modify_ack_deadline_field_headers_async(): def test_modify_ack_deadline_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1903,7 +1850,6 @@ def test_modify_ack_deadline_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.modify_ack_deadline( @@ -1916,16 +1862,13 @@ def test_modify_ack_deadline_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].ack_ids == ["ack_ids_value"] - assert args[0].ack_deadline_seconds == 2066 def test_modify_ack_deadline_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1940,7 +1883,7 @@ def test_modify_ack_deadline_flattened_error(): @pytest.mark.asyncio async def test_modify_ack_deadline_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1962,17 +1905,14 @@ async def test_modify_ack_deadline_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].ack_ids == ["ack_ids_value"] - assert args[0].ack_deadline_seconds == 2066 @pytest.mark.asyncio async def test_modify_ack_deadline_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1987,7 +1927,7 @@ async def test_modify_ack_deadline_flattened_error_async(): def test_acknowledge(transport: str = "grpc", request_type=pubsub.AcknowledgeRequest): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1998,13 +1938,11 @@ def test_acknowledge(transport: str = "grpc", request_type=pubsub.AcknowledgeReq with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.acknowledge(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() # Establish that the response is the type that we expect. @@ -2019,7 +1957,7 @@ def test_acknowledge_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2027,7 +1965,6 @@ def test_acknowledge_empty_call(): client.acknowledge() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() @@ -2036,7 +1973,7 @@ async def test_acknowledge_async( transport: str = "grpc_asyncio", request_type=pubsub.AcknowledgeRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2047,13 +1984,11 @@ async def test_acknowledge_async( with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.acknowledge(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() # Establish that the response is the type that we expect. @@ -2066,17 +2001,17 @@ async def test_acknowledge_async_from_dict(): def test_acknowledge_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.AcknowledgeRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: call.return_value = None - client.acknowledge(request) # Establish that the underlying gRPC stub method was called. @@ -2093,17 +2028,17 @@ def test_acknowledge_field_headers(): @pytest.mark.asyncio async def test_acknowledge_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.AcknowledgeRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.acknowledge(request) # Establish that the underlying gRPC stub method was called. @@ -2119,13 +2054,12 @@ async def test_acknowledge_field_headers_async(): def test_acknowledge_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.acknowledge( @@ -2136,14 +2070,12 @@ def test_acknowledge_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].ack_ids == ["ack_ids_value"] def test_acknowledge_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2157,7 +2089,7 @@ def test_acknowledge_flattened_error(): @pytest.mark.asyncio async def test_acknowledge_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: @@ -2175,15 +2107,13 @@ async def test_acknowledge_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].ack_ids == ["ack_ids_value"] @pytest.mark.asyncio async def test_acknowledge_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2197,7 +2127,7 @@ async def test_acknowledge_flattened_error_async(): def test_pull(transport: str = "grpc", request_type=pubsub.PullRequest): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2208,17 +2138,14 @@ def test_pull(transport: str = "grpc", request_type=pubsub.PullRequest): with mock.patch.object(type(client.transport.pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PullResponse() - response = client.pull(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.PullResponse) @@ -2230,7 +2157,7 @@ def test_pull_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2238,7 +2165,6 @@ def test_pull_empty_call(): client.pull() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() @@ -2247,7 +2173,7 @@ async def test_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.PullRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2258,13 +2184,11 @@ async def test_pull_async( with mock.patch.object(type(client.transport.pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) - response = await client.pull(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() # Establish that the response is the type that we expect. @@ -2277,17 +2201,17 @@ async def test_pull_async_from_dict(): def test_pull_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.PullRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: call.return_value = pubsub.PullResponse() - client.pull(request) # Establish that the underlying gRPC stub method was called. @@ -2304,17 +2228,17 @@ def test_pull_field_headers(): @pytest.mark.asyncio async def test_pull_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.PullRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) - await client.pull(request) # Establish that the underlying gRPC stub method was called. @@ -2330,13 +2254,12 @@ async def test_pull_field_headers_async(): def test_pull_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.PullResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. with warnings.catch_warnings(): @@ -2351,16 +2274,13 @@ def test_pull_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].return_immediately == True - assert args[0].max_messages == 1277 def test_pull_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2375,7 +2295,7 @@ def test_pull_flattened_error(): @pytest.mark.asyncio async def test_pull_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: @@ -2397,17 +2317,14 @@ async def test_pull_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].return_immediately == True - assert args[0].max_messages == 1277 @pytest.mark.asyncio async def test_pull_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2424,26 +2341,23 @@ def test_streaming_pull( transport: str = "grpc", request_type=pubsub.StreamingPullRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.streaming_pull), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = iter([pubsub.StreamingPullResponse()]) - response = client.streaming_pull(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -2460,13 +2374,12 @@ async def test_streaming_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.StreamingPullRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. request = request_type() - requests = [request] # Mock the actual call within the gRPC stub, and fake the request. @@ -2476,13 +2389,11 @@ async def test_streaming_pull_async( call.return_value.read = mock.AsyncMock( side_effect=[pubsub.StreamingPullResponse()] ) - response = await client.streaming_pull(iter(requests)) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert next(args[0]) == request # Establish that the response is the type that we expect. @@ -2499,7 +2410,7 @@ def test_modify_push_config( transport: str = "grpc", request_type=pubsub.ModifyPushConfigRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2512,13 +2423,11 @@ def test_modify_push_config( ) as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.modify_push_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() # Establish that the response is the type that we expect. @@ -2533,7 +2442,7 @@ def test_modify_push_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2543,7 +2452,6 @@ def test_modify_push_config_empty_call(): client.modify_push_config() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() @@ -2552,7 +2460,7 @@ async def test_modify_push_config_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyPushConfigRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2565,13 +2473,11 @@ async def test_modify_push_config_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.modify_push_config(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() # Establish that the response is the type that we expect. @@ -2584,11 +2490,12 @@ async def test_modify_push_config_async_from_dict(): def test_modify_push_config_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ModifyPushConfigRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2596,7 +2503,6 @@ def test_modify_push_config_field_headers(): type(client.transport.modify_push_config), "__call__" ) as call: call.return_value = None - client.modify_push_config(request) # Establish that the underlying gRPC stub method was called. @@ -2613,11 +2519,12 @@ def test_modify_push_config_field_headers(): @pytest.mark.asyncio async def test_modify_push_config_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ModifyPushConfigRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -2625,7 +2532,6 @@ async def test_modify_push_config_field_headers_async(): type(client.transport.modify_push_config), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.modify_push_config(request) # Establish that the underlying gRPC stub method was called. @@ -2641,7 +2547,7 @@ async def test_modify_push_config_field_headers_async(): def test_modify_push_config_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2649,7 +2555,6 @@ def test_modify_push_config_flattened(): ) as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.modify_push_config( @@ -2661,16 +2566,14 @@ def test_modify_push_config_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].push_config == pubsub.PushConfig( push_endpoint="push_endpoint_value" ) def test_modify_push_config_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2684,7 +2587,7 @@ def test_modify_push_config_flattened_error(): @pytest.mark.asyncio async def test_modify_push_config_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2705,9 +2608,7 @@ async def test_modify_push_config_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].push_config == pubsub.PushConfig( push_endpoint="push_endpoint_value" ) @@ -2715,7 +2616,7 @@ async def test_modify_push_config_flattened_async(): @pytest.mark.asyncio async def test_modify_push_config_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2729,7 +2630,7 @@ async def test_modify_push_config_flattened_error_async(): def test_get_snapshot(transport: str = "grpc", request_type=pubsub.GetSnapshotRequest): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2740,21 +2641,16 @@ def test_get_snapshot(transport: str = "grpc", request_type=pubsub.GetSnapshotRe with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) - response = client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == "name_value" - assert response.topic == "topic_value" @@ -2766,7 +2662,7 @@ def test_get_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2774,7 +2670,6 @@ def test_get_snapshot_empty_call(): client.get_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() @@ -2783,7 +2678,7 @@ async def test_get_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSnapshotRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2796,20 +2691,16 @@ async def test_get_snapshot_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Snapshot(name="name_value", topic="topic_value",) ) - response = await client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) - assert response.name == "name_value" - assert response.topic == "topic_value" @@ -2819,17 +2710,17 @@ async def test_get_snapshot_async_from_dict(): def test_get_snapshot_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.GetSnapshotRequest() + request.snapshot = "snapshot/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: call.return_value = pubsub.Snapshot() - client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -2844,17 +2735,17 @@ def test_get_snapshot_field_headers(): @pytest.mark.asyncio async def test_get_snapshot_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.GetSnapshotRequest() + request.snapshot = "snapshot/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) - await client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -2868,13 +2759,12 @@ async def test_get_snapshot_field_headers_async(): def test_get_snapshot_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.get_snapshot(snapshot="snapshot_value",) @@ -2883,12 +2773,11 @@ def test_get_snapshot_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].snapshot == "snapshot_value" def test_get_snapshot_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2900,7 +2789,7 @@ def test_get_snapshot_flattened_error(): @pytest.mark.asyncio async def test_get_snapshot_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: @@ -2916,13 +2805,12 @@ async def test_get_snapshot_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].snapshot == "snapshot_value" @pytest.mark.asyncio async def test_get_snapshot_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2936,7 +2824,7 @@ def test_list_snapshots( transport: str = "grpc", request_type=pubsub.ListSnapshotsRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2949,19 +2837,15 @@ def test_list_snapshots( call.return_value = pubsub.ListSnapshotsResponse( next_page_token="next_page_token_value", ) - response = client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSnapshotsPager) - assert response.next_page_token == "next_page_token_value" @@ -2973,7 +2857,7 @@ def test_list_snapshots_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2981,7 +2865,6 @@ def test_list_snapshots_empty_call(): client.list_snapshots() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() @@ -2990,7 +2873,7 @@ async def test_list_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSnapshotsRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3003,18 +2886,15 @@ async def test_list_snapshots_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListSnapshotsResponse(next_page_token="next_page_token_value",) ) - response = await client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSnapshotsAsyncPager) - assert response.next_page_token == "next_page_token_value" @@ -3024,17 +2904,17 @@ async def test_list_snapshots_async_from_dict(): def test_list_snapshots_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListSnapshotsRequest() + request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: call.return_value = pubsub.ListSnapshotsResponse() - client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. @@ -3049,11 +2929,12 @@ def test_list_snapshots_field_headers(): @pytest.mark.asyncio async def test_list_snapshots_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.ListSnapshotsRequest() + request.project = "project/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -3061,7 +2942,6 @@ async def test_list_snapshots_field_headers_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListSnapshotsResponse() ) - await client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. @@ -3075,13 +2955,12 @@ async def test_list_snapshots_field_headers_async(): def test_list_snapshots_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListSnapshotsResponse() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.list_snapshots(project="project_value",) @@ -3090,12 +2969,11 @@ def test_list_snapshots_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" def test_list_snapshots_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3107,7 +2985,7 @@ def test_list_snapshots_flattened_error(): @pytest.mark.asyncio async def test_list_snapshots_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: @@ -3125,13 +3003,12 @@ async def test_list_snapshots_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" @pytest.mark.asyncio async def test_list_snapshots_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3142,7 +3019,7 @@ async def test_list_snapshots_flattened_error_async(): def test_list_snapshots_pager(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: @@ -3176,7 +3053,7 @@ def test_list_snapshots_pager(): def test_list_snapshots_pages(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials,) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: @@ -3202,7 +3079,7 @@ def test_list_snapshots_pages(): @pytest.mark.asyncio async def test_list_snapshots_async_pager(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3235,7 +3112,7 @@ async def test_list_snapshots_async_pager(): @pytest.mark.asyncio async def test_list_snapshots_async_pages(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials,) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3267,7 +3144,7 @@ def test_create_snapshot( transport: str = "grpc", request_type=pubsub.CreateSnapshotRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3278,21 +3155,16 @@ def test_create_snapshot( with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) - response = client.create_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == "name_value" - assert response.topic == "topic_value" @@ -3304,7 +3176,7 @@ def test_create_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3312,7 +3184,6 @@ def test_create_snapshot_empty_call(): client.create_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() @@ -3321,7 +3192,7 @@ async def test_create_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.CreateSnapshotRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3334,20 +3205,16 @@ async def test_create_snapshot_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Snapshot(name="name_value", topic="topic_value",) ) - response = await client.create_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) - assert response.name == "name_value" - assert response.topic == "topic_value" @@ -3357,17 +3224,17 @@ async def test_create_snapshot_async_from_dict(): def test_create_snapshot_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.CreateSnapshotRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: call.return_value = pubsub.Snapshot() - client.create_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -3382,17 +3249,17 @@ def test_create_snapshot_field_headers(): @pytest.mark.asyncio async def test_create_snapshot_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.CreateSnapshotRequest() + request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) - await client.create_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -3406,13 +3273,12 @@ async def test_create_snapshot_field_headers_async(): def test_create_snapshot_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot() - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_snapshot( @@ -3423,14 +3289,12 @@ def test_create_snapshot_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].subscription == "subscription_value" def test_create_snapshot_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3444,7 +3308,7 @@ def test_create_snapshot_flattened_error(): @pytest.mark.asyncio async def test_create_snapshot_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: @@ -3462,15 +3326,13 @@ async def test_create_snapshot_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].subscription == "subscription_value" @pytest.mark.asyncio async def test_create_snapshot_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3486,7 +3348,7 @@ def test_update_snapshot( transport: str = "grpc", request_type=pubsub.UpdateSnapshotRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3497,21 +3359,16 @@ def test_update_snapshot( with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) - response = client.update_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == "name_value" - assert response.topic == "topic_value" @@ -3523,7 +3380,7 @@ def test_update_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3531,7 +3388,6 @@ def test_update_snapshot_empty_call(): client.update_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() @@ -3540,7 +3396,7 @@ async def test_update_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSnapshotRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3553,20 +3409,16 @@ async def test_update_snapshot_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.Snapshot(name="name_value", topic="topic_value",) ) - response = await client.update_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) - assert response.name == "name_value" - assert response.topic == "topic_value" @@ -3576,17 +3428,17 @@ async def test_update_snapshot_async_from_dict(): def test_update_snapshot_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.UpdateSnapshotRequest() + request.snapshot.name = "snapshot.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: call.return_value = pubsub.Snapshot() - client.update_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -3603,17 +3455,17 @@ def test_update_snapshot_field_headers(): @pytest.mark.asyncio async def test_update_snapshot_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.UpdateSnapshotRequest() + request.snapshot.name = "snapshot.name/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) - await client.update_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -3632,7 +3484,7 @@ def test_delete_snapshot( transport: str = "grpc", request_type=pubsub.DeleteSnapshotRequest ): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3643,13 +3495,11 @@ def test_delete_snapshot( with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - response = client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() # Establish that the response is the type that we expect. @@ -3664,7 +3514,7 @@ def test_delete_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3672,7 +3522,6 @@ def test_delete_snapshot_empty_call(): client.delete_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() @@ -3681,7 +3530,7 @@ async def test_delete_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSnapshotRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3692,13 +3541,11 @@ async def test_delete_snapshot_async( with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() # Establish that the response is the type that we expect. @@ -3711,17 +3558,17 @@ async def test_delete_snapshot_async_from_dict(): def test_delete_snapshot_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.DeleteSnapshotRequest() + request.snapshot = "snapshot/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: call.return_value = None - client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -3736,17 +3583,17 @@ def test_delete_snapshot_field_headers(): @pytest.mark.asyncio async def test_delete_snapshot_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.DeleteSnapshotRequest() + request.snapshot = "snapshot/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -3760,13 +3607,12 @@ async def test_delete_snapshot_field_headers_async(): def test_delete_snapshot_flattened(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None - # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.delete_snapshot(snapshot="snapshot_value",) @@ -3775,12 +3621,11 @@ def test_delete_snapshot_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].snapshot == "snapshot_value" def test_delete_snapshot_flattened_error(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3792,7 +3637,7 @@ def test_delete_snapshot_flattened_error(): @pytest.mark.asyncio async def test_delete_snapshot_flattened_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: @@ -3808,13 +3653,12 @@ async def test_delete_snapshot_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].snapshot == "snapshot_value" @pytest.mark.asyncio async def test_delete_snapshot_flattened_error_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3826,7 +3670,7 @@ async def test_delete_snapshot_flattened_error_async(): def test_seek(transport: str = "grpc", request_type=pubsub.SeekRequest): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3837,17 +3681,14 @@ def test_seek(transport: str = "grpc", request_type=pubsub.SeekRequest): with mock.patch.object(type(client.transport.seek), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.SeekResponse() - response = client.seek(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.SeekResponse) @@ -3859,7 +3700,7 @@ def test_seek_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3867,7 +3708,6 @@ def test_seek_empty_call(): client.seek() call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() @@ -3876,7 +3716,7 @@ async def test_seek_async( transport: str = "grpc_asyncio", request_type=pubsub.SeekRequest ): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3887,13 +3727,11 @@ async def test_seek_async( with mock.patch.object(type(client.transport.seek), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) - response = await client.seek(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() # Establish that the response is the type that we expect. @@ -3906,17 +3744,17 @@ async def test_seek_async_from_dict(): def test_seek_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.SeekRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.seek), "__call__") as call: call.return_value = pubsub.SeekResponse() - client.seek(request) # Establish that the underlying gRPC stub method was called. @@ -3933,17 +3771,17 @@ def test_seek_field_headers(): @pytest.mark.asyncio async def test_seek_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = pubsub.SeekRequest() + request.subscription = "subscription/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.seek), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) - await client.seek(request) # Establish that the underlying gRPC stub method was called. @@ -3961,16 +3799,16 @@ async def test_seek_field_headers_async(): def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.SubscriberGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. transport = transports.SubscriberGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = SubscriberClient( @@ -3980,7 +3818,7 @@ def test_credentials_transport_error(): # It is an error to provide scopes and a transport instance. transport = transports.SubscriberGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): client = SubscriberClient( @@ -3991,7 +3829,7 @@ def test_credentials_transport_error(): def test_transport_instance(): # A client may be instantiated with a custom transport instance. transport = transports.SubscriberGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) client = SubscriberClient(transport=transport) assert client.transport is transport @@ -4000,13 +3838,13 @@ def test_transport_instance(): def test_transport_get_channel(): # A client may be instantiated with a custom transport instance. transport = transports.SubscriberGrpcTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel transport = transports.SubscriberGrpcAsyncIOTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) channel = transport.grpc_channel assert channel @@ -4018,23 +3856,23 @@ def test_transport_get_channel(): ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport_class() adc.assert_called_once() def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) assert isinstance(client.transport, transports.SubscriberGrpcTransport,) def test_subscriber_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(exceptions.DuplicateCredentialArgs): + with pytest.raises(core_exceptions.DuplicateCredentialArgs): transport = transports.SubscriberTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), credentials_file="credentials.json", ) @@ -4046,7 +3884,7 @@ def test_subscriber_base_transport(): ) as Transport: Transport.return_value = None transport = transports.SubscriberTransport( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), ) # Every method on the transport should just blindly @@ -4077,15 +3915,40 @@ def test_subscriber_base_transport(): getattr(transport, method)(request=object()) +@requires_google_auth_gte_1_25_0 def test_subscriber_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( - auth, "load_credentials_from_file" + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SubscriberTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_subscriber_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True ) as load_creds, mock.patch( "google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - load_creds.return_value = (credentials.AnonymousCredentials(), None) + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.SubscriberTransport( credentials_file="credentials.json", quota_project_id="octopus", ) @@ -4101,19 +3964,36 @@ def test_subscriber_base_transport_with_credentials_file(): def test_subscriber_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(auth, "default") as adc, mock.patch( + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( "google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages" ) as Transport: Transport.return_value = None - adc.return_value = (credentials.AnonymousCredentials(), None) + adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.SubscriberTransport() adc.assert_called_once() +@requires_google_auth_gte_1_25_0 def test_subscriber_auth_adc(): # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SubscriberClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_subscriber_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) SubscriberClient() adc.assert_called_once_with( scopes=( @@ -4124,14 +4004,38 @@ def test_subscriber_auth_adc(): ) -def test_subscriber_transport_auth_adc(): +@pytest.mark.parametrize( + "transport_class", + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport,], +) +@requires_google_auth_gte_1_25_0 +def test_subscriber_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. - with mock.patch.object(auth, "default") as adc: - adc.return_value = (credentials.AnonymousCredentials(), None) - transports.SubscriberGrpcTransport( - host="squid.clam.whelk", quota_project_id="octopus" + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", ) + + +@pytest.mark.parametrize( + "transport_class", + [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport,], +) +@requires_google_auth_lt_1_25_0 +def test_subscriber_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") adc.assert_called_once_with( scopes=( "https://www.googleapis.com/auth/cloud-platform", @@ -4141,12 +4045,51 @@ def test_subscriber_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SubscriberGrpcTransport, grpc_helpers), + (transports.SubscriberGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_subscriber_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=["1", "2"], + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + @pytest.mark.parametrize( "transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], ) def test_subscriber_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() # Check ssl_channel_credentials is used if provided. with mock.patch.object(transport_class, "create_channel") as mock_create_channel: @@ -4160,10 +4103,7 @@ def test_subscriber_grpc_transport_client_cert_source_for_mtls(transport_class): "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -4189,7 +4129,7 @@ def test_subscriber_grpc_transport_client_cert_source_for_mtls(transport_class): def test_subscriber_host_no_port(): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com" ), @@ -4199,7 +4139,7 @@ def test_subscriber_host_no_port(): def test_subscriber_host_with_port(): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com:8000" ), @@ -4250,9 +4190,9 @@ def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_cla mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel - cred = credentials.AnonymousCredentials() + cred = ga_credentials.AnonymousCredentials() with pytest.warns(DeprecationWarning): - with mock.patch.object(auth, "default") as adc: + with mock.patch.object(google.auth, "default") as adc: adc.return_value = (cred, None) transport = transport_class( host="squid.clam.whelk", @@ -4268,10 +4208,7 @@ def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_cla "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -4316,10 +4253,7 @@ def test_subscriber_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -4334,7 +4268,6 @@ def test_subscriber_transport_channel_mtls_with_adc(transport_class): def test_snapshot_path(): project = "squid" snapshot = "clam" - expected = "projects/{project}/snapshots/{snapshot}".format( project=project, snapshot=snapshot, ) @@ -4357,7 +4290,6 @@ def test_parse_snapshot_path(): def test_subscription_path(): project = "oyster" subscription = "nudibranch" - expected = "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, ) @@ -4380,7 +4312,6 @@ def test_parse_subscription_path(): def test_topic_path(): project = "winkle" topic = "nautilus" - expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic,) actual = SubscriberClient.topic_path(project, topic) assert expected == actual @@ -4400,7 +4331,6 @@ def test_parse_topic_path(): def test_common_billing_account_path(): billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -4421,7 +4351,6 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) actual = SubscriberClient.common_folder_path(folder) assert expected == actual @@ -4440,7 +4369,6 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) actual = SubscriberClient.common_organization_path(organization) assert expected == actual @@ -4459,7 +4387,6 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) actual = SubscriberClient.common_project_path(project) assert expected == actual @@ -4479,7 +4406,6 @@ def test_parse_common_project_path(): def test_common_location_path(): project = "winkle" location = "nautilus" - expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) @@ -4506,7 +4432,7 @@ def test_client_withDEFAULT_CLIENT_INFO(): transports.SubscriberTransport, "_prep_wrapped_messages" ) as prep: client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -4515,24 +4441,24 @@ def test_client_withDEFAULT_CLIENT_INFO(): ) as prep: transport_class = SubscriberClient.get_transport_class() transport = transport_class( - credentials=credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) def test_set_iam_policy(transport: str = "grpc"): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.set_iam_policy(request) @@ -4543,7 +4469,7 @@ def test_set_iam_policy(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -4553,18 +4479,18 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.set_iam_policy(request) @@ -4576,7 +4502,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -4584,16 +4510,16 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): def test_set_iam_policy_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() client.set_iam_policy(request) @@ -4609,16 +4535,16 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.SetIamPolicyRequest() + request = iam_policy_pb2.SetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.set_iam_policy(request) @@ -4633,16 +4559,16 @@ async def test_set_iam_policy_field_headers_async(): def test_set_iam_policy_from_dict(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() response = client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() @@ -4650,16 +4576,16 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) response = await client.set_iam_policy( request={ "resource": "resource_value", - "policy": policy.Policy(version=774), + "policy": policy_pb2.Policy(version=774), } ) call.assert_called() @@ -4667,17 +4593,17 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) response = client.get_iam_policy(request) @@ -4688,7 +4614,7 @@ def test_get_iam_policy(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -4698,18 +4624,18 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy(version=774, etag=b"etag_blob",) ) response = await client.get_iam_policy(request) @@ -4721,7 +4647,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, policy.Policy) + assert isinstance(response, policy_pb2.Policy) assert response.version == 774 @@ -4729,16 +4655,16 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() client.get_iam_policy(request) @@ -4754,16 +4680,16 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.GetIamPolicyRequest() + request = iam_policy_pb2.GetIamPolicyRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) await client.get_iam_policy(request) @@ -4778,16 +4704,16 @@ async def test_get_iam_policy_field_headers_async(): def test_get_iam_policy_from_dict(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy.Policy() + call.return_value = policy_pb2.Policy() response = client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -4795,16 +4721,16 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) response = await client.get_iam_policy( request={ "resource": "resource_value", - "options": options.GetPolicyOptions(requested_policy_version=2598), + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), } ) call.assert_called() @@ -4812,19 +4738,19 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = SubscriberClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse( + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( permissions=["permissions_value"], ) @@ -4837,7 +4763,7 @@ def test_test_iam_permissions(transport: str = "grpc"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -4845,12 +4771,12 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = SubscriberAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -4858,7 +4784,9 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse(permissions=["permissions_value"],) + iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) ) response = await client.test_iam_permissions(request) @@ -4870,24 +4798,24 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy.TestIamPermissionsResponse) + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] def test_test_iam_permissions_field_headers(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: - call.return_value = iam_policy.TestIamPermissionsResponse() + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() client.test_iam_permissions(request) @@ -4903,11 +4831,11 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = iam_policy.TestIamPermissionsRequest() + request = iam_policy_pb2.TestIamPermissionsRequest() request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. @@ -4915,7 +4843,7 @@ async def test_test_iam_permissions_field_headers_async(): type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) await client.test_iam_permissions(request) @@ -4931,13 +4859,13 @@ async def test_test_iam_permissions_field_headers_async(): def test_test_iam_permissions_from_dict(): - client = SubscriberClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = iam_policy.TestIamPermissionsResponse() + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() response = client.test_iam_permissions( request={ @@ -4950,14 +4878,14 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): - client = SubscriberAsyncClient(credentials=credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy.TestIamPermissionsResponse() + iam_policy_pb2.TestIamPermissionsResponse() ) response = await client.test_iam_permissions( From 6bbd20939d7ab6a267ea3c0229cc088d4488c695 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 19 Jul 2021 16:37:01 -0400 Subject: [PATCH 0693/1197] chore: migrate to owl bot (#411) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: migrate to owl bot * chore: copy files from googleapis-gen 40278112d2922ec917140dcb5cc6d5ef2923aeb2 * chore: run the post processor * pull in synth.py changes from #299 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md * .github/use gcr.io/cloud-devrel-public-resources/owlbot-python post processor image * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md * fix owlbot.py replacement * Copy generated code from googleapis-gen * Work around gapic generator docstring bug * fix: require google-api-core >= 1.26.0 * Work around gapic generator docstring bug * fix(deps): add packaging requirement * revert .coveragerc Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 3 + .../google-cloud-pubsub/.github/.OwlBot.yaml | 26 ++ packages/google-cloud-pubsub/owlbot.py | 340 ++++++++++++++++++ packages/google-cloud-pubsub/synth.metadata | 43 --- packages/google-cloud-pubsub/synth.py | 337 ----------------- 5 files changed, 369 insertions(+), 380 deletions(-) create mode 100644 packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml create mode 100644 packages/google-cloud-pubsub/.github/.OwlBot.yaml create mode 100644 packages/google-cloud-pubsub/owlbot.py delete mode 100644 packages/google-cloud-pubsub/synth.metadata delete mode 100644 packages/google-cloud-pubsub/synth.py diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml new file mode 100644 index 000000000000..6bfcf3ecc6e5 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -0,0 +1,3 @@ +docker: + digest: sha256:808628fb9d5c649a28355b062ee184a4689e98e8607c241461dae26e5a6df0c2 + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.yaml new file mode 100644 index 000000000000..0bfe82f743cb --- /dev/null +++ b/packages/google-cloud-pubsub/.github/.OwlBot.yaml @@ -0,0 +1,26 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/pubsub/(v.*)/.*-py/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: 40278112d2922ec917140dcb5cc6d5ef2923aeb2 + diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py new file mode 100644 index 000000000000..6cceccf8a397 --- /dev/null +++ b/packages/google-cloud-pubsub/owlbot.py @@ -0,0 +1,340 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This script is used to synthesize generated parts of this library.""" + +import re +import textwrap + +import synthtool as s +from synthtool import gcp +from synthtool.languages import python + +common = gcp.CommonTemplates() + +default_version = "v1" + +for library in s.get_staging_dirs(default_version): + # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 + s.replace(library / f"google/pubsub_{library.name}/types/*.py", + r""". + Attributes:""", + r""".\n + Attributes:""", + ) + + # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 + s.replace(library / f"google/pubsub_{library.name}/types/*.py", + r""". + Attributes:""", + r""".\n + Attributes:""", + ) + + # DEFAULT SCOPES and SERVICE_ADDRESS are being used. so let's force them in. + s.replace( + library / f"google/pubsub_{library.name}/services/*er/*client.py", + r"""DEFAULT_ENDPOINT = \"pubsub\.googleapis\.com\"""", + """ + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + \"""The default address of the service.\""" + + \g<0>""", + ) + + # Modify GRPC options in transports. + count = s.replace( + [ + library / f"google/pubsub_{library.name}/services/*/transports/grpc*", + library / f"tests/unit/gapic/pubsub_{library.name}/*" + ], + "options=\[.*?\]", + """options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ]""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 15: + raise Exception("Expected replacements for gRPC channel options not made.") + + # If the emulator is used, force an insecure gRPC channel to avoid SSL errors. + clients_to_patch = [ + library / f"google/pubsub_{library.name}/services/publisher/client.py", + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + ] + err_msg = "Expected replacements for gRPC channel to use with the emulator not made." + + count = s.replace( + clients_to_patch, + r"import os", + "import functools\n\g<0>" + ) + + if count < len(clients_to_patch): + raise Exception(err_msg) + + count = s.replace( + clients_to_patch, + f"from google\.pubsub_{library.name}\.types import pubsub", + "\g<0>\n\nimport grpc" + ) + + if count < len(clients_to_patch): + raise Exception(err_msg) + + count = s.replace( + clients_to_patch, + r"Transport = type\(self\)\.get_transport_class\(transport\)", + """\g<0> + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(Transport, type(self)._transport_registry["grpc"]): + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + Transport = functools.partial(Transport, channel=channel) + + """, + ) + + if count < len(clients_to_patch): + raise Exception(err_msg) + + # Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream + # results. + s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + ( + r"# Wrap the RPC method.*\n" + r"\s+# and friendly error.*\n" + r"\s+rpc = self\._transport\._wrapped_methods\[self\._transport\.streaming_pull\]" + ), + """ + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self._transport.streaming_pull._prefetch_first_result_ = False + + \g<0>""", + ) + + # Emit deprecation warning if return_immediately flag is set with synchronous pull. + s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/*client.py", + r"import pkg_resources", + "import warnings\n\g<0>", + ) + + count = s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/*client.py", + r""" + ([^\n\S]+(?:async\ )?def\ pull\(.*?->\ pubsub\.PullResponse:.*?) + ((?P[^\n\S]+)\#\ Wrap\ the\ RPC\ method) + """, + textwrap.dedent( + """ + \g<1> + \gif request.return_immediately: + \g warnings.warn( + \g "The return_immediately flag is deprecated and should be set to False.", + \g category=DeprecationWarning, + \g ) + + \g<2>"""), + flags=re.MULTILINE | re.DOTALL | re.VERBOSE, + ) + + if count != 2: + raise Exception("Too many or too few replacements in pull() methods.") + + # Silence deprecation warnings in pull() method flattened parameter tests. + s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + "import mock", + "\g<0>\nimport warnings", + ) + + count = s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+(client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags = re.MULTILINE | re.DOTALL, + ) + + if count < 1: + raise Exception("Catch warnings replacement failed.") + + count = s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+response = (await client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags = re.MULTILINE | re.DOTALL, + ) + + if count < 1: + raise Exception("Catch warnings replacement failed.") + + # Make sure that client library version is present in user agent header. + s.replace( + [ + library / f"google/pubsub_{library.name}/services/publisher/async_client.py", + library / f"google/pubsub_{library.name}/services/publisher/client.py", + library / f"google/pubsub_{library.name}/services/publisher/transports/base.py", + library / f"google/pubsub_{library.name}/services/schema_service/async_client.py", + library / f"google/pubsub_{library.name}/services/schema_service/client.py", + library / f"google/pubsub_{library.name}/services/schema_service/transports/base.py", + library / f"google/pubsub_{library.name}/services/subscriber/async_client.py", + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + library / f"google/pubsub_{library.name}/services/subscriber/transports/base.py", + ], + r"""gapic_version=(pkg_resources\.get_distribution\(\s+)['"]google-pubsub['"]""", + "client_library_version=\g<1>'google-cloud-pubsub'", + ) + + # Docstrings of *_iam_policy() methods are formatted poorly and must be fixed + # in order to avoid docstring format warnings in docs. + s.replace(library / f"google/pubsub_{library.name}/services/*er/client.py", r"(\s+)Args:", "\n\g<1>Args:") + s.replace( + library / f"google/pubsub_{library.name}/services/*er/client.py", + r"(\s+)\*\*JSON Example\*\*\s+::", + "\n\g<1>**JSON Example**::\n", + ) + + s.replace( + library / f"google/pubsub_{library.name}/services/*er/client.py", + r"(\s+)\*\*YAML Example\*\*\s+::", + "\n\g<1>**YAML Example**::\n", + ) + + s.replace( + library / f"google/pubsub_{library.name}/services/*er/client.py", + r"(\s+)For a description of IAM and its features, see", + "\n\g<0>", + ) + + # Allow timeout to be an instance of google.api_core.timeout.* + s.replace( + library / f"google/pubsub_{library.name}/types/__init__.py", + r"from \.pubsub import \(", + "from typing import Union\n\n\g<0>" + ) + + s.replace( + library / f"google/pubsub_{library.name}/types/__init__.py", + r"__all__ = \(\n", + textwrap.dedent('''\ + TimeoutType = Union[ + int, + float, + "google.api_core.timeout.ConstantTimeout", + "google.api_core.timeout.ExponentialTimeout", + ] + """The type of the timeout parameter of publisher client methods.""" + + \g<0> "TimeoutType",''') + ) + + s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"from google.api_core import retry as retries.*\n", + "\g<0>from google.api_core import timeout as timeouts # type: ignore\n" + ) + + s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + f"from google\.pubsub_{library.name}\.types import pubsub", + f"\g<0>\nfrom google.pubsub_{library.name}.types import TimeoutType", + ) + + s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"(\s+)timeout: float = None.*\n", + f"\g<1>timeout: TimeoutType = gapic_{library.name}.method.DEFAULT,", + ) + + s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"([^\S\r\n]+)timeout \(float\): (.*)\n", + ( + "\g<1>timeout (TimeoutType):\n" + "\g<1> \g<2>\n" + ), + ) + + # The namespace package declaration in google/cloud/__init__.py should be excluded + # from coverage. + s.replace( + ".coveragerc", + r"((?P[^\n\S]+)google/pubsub/__init__\.py)", + "\ggoogle/cloud/__init__.py\n\g<0>", + ) + + s.move( + library, + excludes=[ + "docs/**/*", + "nox.py", + "README.rst", + "setup.py", + f"google/cloud/pubsub_{library.name}/__init__.py", + f"google/cloud/pubsub_{library.name}/types.py", + ], + ) + +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- +templated_files = gcp.CommonTemplates().py_library( + microgenerator=True, + samples=True, + cov_level=100, + system_test_external_dependencies=["psutil"], +) +s.move(templated_files, excludes=[".coveragerc"]) + +# ---------------------------------------------------------------------------- +# Samples templates +# ---------------------------------------------------------------------------- +python.py_samples() + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-pubsub/synth.metadata b/packages/google-cloud-pubsub/synth.metadata deleted file mode 100644 index ca1d82ffbf55..000000000000 --- a/packages/google-cloud-pubsub/synth.metadata +++ /dev/null @@ -1,43 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "git@github.com:googleapis/python-pubsub.git", - "sha": "618ceeefab8f1f7f968ee42a94453afde4d58f7b" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4124d245133c0f740ac5e6fa2e7daeba327f1297" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4124d245133c0f740ac5e6fa2e7daeba327f1297" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4124d245133c0f740ac5e6fa2e7daeba327f1297" - } - } - ], - "destinations": [ - { - "client": { - "source": "googleapis", - "apiName": "pubsub", - "apiVersion": "v1", - "language": "python", - "generator": "bazel" - } - } - ] -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/synth.py b/packages/google-cloud-pubsub/synth.py deleted file mode 100644 index f5631974bfcf..000000000000 --- a/packages/google-cloud-pubsub/synth.py +++ /dev/null @@ -1,337 +0,0 @@ -# Copyright 2018 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import re -import textwrap - -import synthtool as s -from synthtool import gcp -from synthtool.languages import python - -gapic = gcp.GAPICBazel() -common = gcp.CommonTemplates() -version = "v1" - -# ---------------------------------------------------------------------------- -# Generate pubsub GAPIC layer -# ---------------------------------------------------------------------------- -library = gapic.py_library( - service="pubsub", - version=version, - bazel_target="//google/pubsub/v1:pubsub-v1-py", - include_protos=True, -) -s.move( - library, - excludes=[ - "docs/**/*", - "nox.py", - "README.rst", - "setup.py", - "google/cloud/pubsub_v1/__init__.py", - "google/cloud/pubsub_v1/types.py", - ], -) - -# DEFAULT SCOPES and SERVICE_ADDRESS are being used. so let's force them in. -s.replace( - "google/pubsub_v1/services/*er/*client.py", - """DEFAULT_ENDPOINT = "pubsub\.googleapis\.com\"""", - """ - # The scopes needed to make gRPC calls to all of the methods defined in - # this service - _DEFAULT_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', - ) - - SERVICE_ADDRESS = "pubsub.googleapis.com:443" - \"""The default address of the service.\""" - - \g<0>""", -) - -# Modify GRPC options in transports. -count = s.replace( - ["google/pubsub_v1/services/*/transports/grpc*", "tests/unit/gapic/pubsub_v1/*"], - "options=\[.*?\]", - """options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ("grpc.keepalive_time_ms", 30000), - ]""", - flags=re.MULTILINE | re.DOTALL, -) - -if count < 15: - raise Exception("Expected replacements for gRPC channel options not made.") - -# If the emulator is used, force an insecure gRPC channel to avoid SSL errors. -clients_to_patch = [ - "google/pubsub_v1/services/publisher/client.py", - "google/pubsub_v1/services/subscriber/client.py", -] -err_msg = "Expected replacements for gRPC channel to use with the emulator not made." - -count = s.replace( - clients_to_patch, - r"import os", - "import functools\n\g<0>" -) - -if count < len(clients_to_patch): - raise Exception(err_msg) - -count = s.replace( - clients_to_patch, - r"from google\.pubsub_v1\.types import pubsub", - "\g<0>\n\nimport grpc" -) - -if count < len(clients_to_patch): - raise Exception(err_msg) - -count = s.replace( - clients_to_patch, - r"Transport = type\(self\)\.get_transport_class\(transport\)", - """\g<0> - - emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") - if emulator_host: - if issubclass(Transport, type(self)._transport_registry["grpc"]): - channel = grpc.insecure_channel(target=emulator_host) - else: - channel = grpc.aio.insecure_channel(target=emulator_host) - Transport = functools.partial(Transport, channel=channel) - - """, -) - -if count < len(clients_to_patch): - raise Exception(err_msg) - -# Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream -# results. -s.replace( - "google/pubsub_v1/services/subscriber/client.py", - ( - r"# Wrap the RPC method.*\n" - r"\s+# and friendly error.*\n" - r"\s+rpc = self\._transport\._wrapped_methods\[self\._transport\.streaming_pull\]" - ), - """ - # Wrappers in api-core should not automatically pre-fetch the first - # stream result, as this breaks the stream when re-opening it. - # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 - self._transport.streaming_pull._prefetch_first_result_ = False - - \g<0>""", -) - -# Emit deprecation warning if return_immediately flag is set with synchronous pull. -s.replace( - "google/pubsub_v1/services/subscriber/*client.py", - r"import pkg_resources", - "import warnings\n\g<0>", -) -count = s.replace( - "google/pubsub_v1/services/subscriber/*client.py", - r""" - ([^\n\S]+(?:async\ )?def\ pull\(.*?->\ pubsub\.PullResponse:.*?) - ((?P[^\n\S]+)\#\ Wrap\ the\ RPC\ method) - """, - textwrap.dedent( - """ - \g<1> - \gif request.return_immediately: - \g warnings.warn( - \g "The return_immediately flag is deprecated and should be set to False.", - \g category=DeprecationWarning, - \g ) - - \g<2>""" - ), - flags=re.MULTILINE | re.DOTALL | re.VERBOSE, -) - -if count != 2: - raise Exception("Too many or too few replacements in pull() methods.") - -# Silence deprecation warnings in pull() method flattened parameter tests. -s.replace( - "tests/unit/gapic/pubsub_v1/test_subscriber.py", - "import mock", - "\g<0>\nimport warnings", -) -count = s.replace( - "tests/unit/gapic/pubsub_v1/test_subscriber.py", - textwrap.dedent( - r""" - ([^\n\S]+# Call the method with a truthy value for each flattened field, - [^\n\S]+# using the keyword arguments to the method\.) - \s+(client\.pull\(.*?\))""" - ), - """\n\g<1> - with warnings.catch_warnings(): - warnings.simplefilter("ignore", category=DeprecationWarning) - \g<2>""", - flags = re.MULTILINE | re.DOTALL, -) - -if count < 1: - raise Exception("Catch warnings replacement failed.") - -count = s.replace( - "tests/unit/gapic/pubsub_v1/test_subscriber.py", - textwrap.dedent( - r""" - ([^\n\S]+# Call the method with a truthy value for each flattened field, - [^\n\S]+# using the keyword arguments to the method\.) - \s+response = (await client\.pull\(.*?\))""" - ), - """\n\g<1> - with warnings.catch_warnings(): - warnings.simplefilter("ignore", category=DeprecationWarning) - \g<2>""", - flags = re.MULTILINE | re.DOTALL, -) - -if count < 1: - raise Exception("Catch warnings replacement failed.") - -# Make sure that client library version is present in user agent header. -s.replace( - [ - "google/pubsub_v1/services/publisher/async_client.py", - "google/pubsub_v1/services/publisher/client.py", - "google/pubsub_v1/services/publisher/transports/base.py", - "google/pubsub_v1/services/schema_service/async_client.py", - "google/pubsub_v1/services/schema_service/client.py", - "google/pubsub_v1/services/schema_service/transports/base.py", - "google/pubsub_v1/services/subscriber/async_client.py", - "google/pubsub_v1/services/subscriber/client.py", - "google/pubsub_v1/services/subscriber/transports/base.py", - ], - r"""gapic_version=(pkg_resources\.get_distribution\(\s+)['"]google-pubsub['"]""", - "client_library_version=\g<1>'google-cloud-pubsub'", -) - -# Docstrings of *_iam_policy() methods are formatted poorly and must be fixed -# in order to avoid docstring format warnings in docs. -s.replace("google/pubsub_v1/services/*er/client.py", r"(\s+)Args:", "\n\g<1>Args:") -s.replace( - "google/pubsub_v1/services/*er/client.py", - r"(\s+)\*\*JSON Example\*\*\s+::", - "\n\g<1>**JSON Example**::\n", -) -s.replace( - "google/pubsub_v1/services/*er/client.py", - r"(\s+)\*\*YAML Example\*\*\s+::", - "\n\g<1>**YAML Example**::\n", -) -s.replace( - "google/pubsub_v1/services/*er/client.py", - r"(\s+)For a description of IAM and its features, see", - "\n\g<0>", -) - -# Allow timeout to be an instance of google.api_core.timeout.* -s.replace( - "google/pubsub_v1/types/__init__.py", - r"from \.pubsub import \(", - "from typing import Union\n\n\g<0>" -) -s.replace( - "google/pubsub_v1/types/__init__.py", - r"__all__ = \(\n", - textwrap.dedent('''\ - TimeoutType = Union[ - int, - float, - "google.api_core.timeout.ConstantTimeout", - "google.api_core.timeout.ExponentialTimeout", - ] - """The type of the timeout parameter of publisher client methods.""" - - \g<0> "TimeoutType",''') -) - -s.replace( - "google/pubsub_v1/services/publisher/*client.py", - r"from google.api_core import retry as retries.*\n", - "\g<0>from google.api_core import timeout as timeouts # type: ignore\n" -) -s.replace( - "google/pubsub_v1/services/publisher/*client.py", - r"from google\.pubsub_v1\.types import pubsub", - "\g<0>\nfrom google.pubsub_v1.types import TimeoutType", -) - -s.replace( - "google/pubsub_v1/services/publisher/*client.py", - r"(\s+)timeout: float = None.*\n", - "\g<1>timeout: TimeoutType = gapic_v1.method.DEFAULT,", -) -s.replace( - "google/pubsub_v1/services/publisher/*client.py", - r"([^\S\r\n]+)timeout \(float\): (.*)\n", - ( - "\g<1>timeout (TimeoutType):\n" - "\g<1> \g<2>\n" - ), -) - -# The namespace package declaration in google/cloud/__init__.py should be excluded -# from coverage. -s.replace( - ".coveragerc", - r"((?P[^\n\S]+)google/pubsub/__init__\.py)", - "\ggoogle/cloud/__init__.py\n\g<0>", -) - -# Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 -s.replace(f"google/pubsub_v1/types/*.py", - r""". - Attributes:""", - r""".\n - Attributes:""", -) - -# Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 -s.replace("google/pubsub_v1/types/*.py", - r""". - Attributes:""", - r""".\n - Attributes:""", -) - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -templated_files = gcp.CommonTemplates().py_library( - microgenerator=True, - samples=True, - cov_level=100, - system_test_external_dependencies=["psutil"], -) -s.move(templated_files, excludes=[".coveragerc"]) - -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- -python.py_samples() - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 539bc3ccd45ac90371976a8bfcda471270a06bbe Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 20 Jul 2021 02:10:24 -0600 Subject: [PATCH 0694/1197] chore: add note explaining google-api-core < 3.0.0dev pin (#454) Expand pins on library dependencies in preparation for these dependencies taking a new major version. See https://github.com/googleapis/google-cloud-python/issues/10566. --- packages/google-cloud-pubsub/setup.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 5b05b39a3e8b..ebffa39207cd 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -30,7 +30,10 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 - "google-api-core[grpc] >= 1.26.0, < 2.0.0dev", + # NOTE: Maintainers, please do not require google-api-core>=2.x.x + # Until this issue is closed + # https://github.com/googleapis/google-cloud-python/issues/10566 + "google-api-core[grpc] >= 1.26.0, <3.0.0dev", "libcst >= 0.3.10", "proto-plus >= 1.7.1", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", From 05e1c7213a0c98927bc2e930100ccc3c6aacbc01 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 22 Jul 2021 13:23:52 -0400 Subject: [PATCH 0695/1197] feat: Add Pub/Sub topic retention fields (#456) --- .../google/cloud/pubsub_v1/proto/pubsub.proto | 19 +++++++++++++ .../services/publisher/async_client.py | 1 + .../pubsub_v1/services/publisher/client.py | 1 + .../google/pubsub_v1/types/pubsub.py | 27 +++++++++++++++++++ .../scripts/fixup_pubsub_v1_keywords.py | 4 +-- .../unit/gapic/pubsub_v1/test_publisher.py | 1 + 6 files changed, 51 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index 173c4ce71577..c5cb855d67fd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -196,6 +196,16 @@ message Topic { // Reserved for future use. This field is set only in responses from the // server; it is ignored if it is set in any requests. bool satisfies_pzs = 7; + + // Indicates the minimum duration to retain a message after it is published to + // the topic. If this field is set, messages published to the topic in the + // last `message_retention_duration` are always available to subscribers. For + // instance, it allows any attached subscription to [seek to a + // timestamp](https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) + // that is up to `message_retention_duration` in the past. If this field is + // not set, message retention is controlled by settings on individual + // subscriptions. Cannot be more than 7 days or less than 10 minutes. + google.protobuf.Duration message_retention_duration = 8; } // A message that is published by publishers and consumed by subscribers. The @@ -740,6 +750,15 @@ message Subscription { // FAILED_PRECONDITION. If the subscription is a push subscription, pushes to // the endpoint will not be made. bool detached = 15; + + // Output only. Indicates the minimum duration for which a message is retained + // after it is published to the subscription's topic. If this field is set, + // messages published to the subscription's topic in the last + // `topic_message_retention_duration` are always available to subscribers. See + // the `message_retention_duration` field in `Topic`. This field is set only + // in responses from the server; it is ignored if it is set in any requests. + google.protobuf.Duration topic_message_retention_duration = 17 + [(google.api.field_behavior) = OUTPUT_ONLY]; } // A policy that specifies how Cloud Pub/Sub retries message delivery. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 09e4b0e557fa..df436e72116d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -29,6 +29,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index cb44506085ed..e68443254719 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -34,6 +34,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index bfbbcaf87e5f..dadf62c0d5bd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -143,6 +143,18 @@ class Topic(proto.Message): Reserved for future use. This field is set only in responses from the server; it is ignored if it is set in any requests. + message_retention_duration (google.protobuf.duration_pb2.Duration): + Indicates the minimum duration to retain a message after it + is published to the topic. If this field is set, messages + published to the topic in the last + ``message_retention_duration`` are always available to + subscribers. For instance, it allows any attached + subscription to `seek to a + timestamp `__ + that is up to ``message_retention_duration`` in the past. If + this field is not set, message retention is controlled by + settings on individual subscriptions. Cannot be more than 7 + days or less than 10 minutes. """ name = proto.Field(proto.STRING, number=1,) @@ -153,6 +165,9 @@ class Topic(proto.Message): kms_key_name = proto.Field(proto.STRING, number=5,) schema_settings = proto.Field(proto.MESSAGE, number=6, message="SchemaSettings",) satisfies_pzs = proto.Field(proto.BOOL, number=7,) + message_retention_duration = proto.Field( + proto.MESSAGE, number=8, message=duration_pb2.Duration, + ) class PubsubMessage(proto.Message): @@ -541,6 +556,15 @@ class Subscription(proto.Message): ``StreamingPull`` requests will return FAILED_PRECONDITION. If the subscription is a push subscription, pushes to the endpoint will not be made. + topic_message_retention_duration (google.protobuf.duration_pb2.Duration): + Output only. Indicates the minimum duration for which a + message is retained after it is published to the + subscription's topic. If this field is set, messages + published to the subscription's topic in the last + ``topic_message_retention_duration`` are always available to + subscribers. See the ``message_retention_duration`` field in + ``Topic``. This field is set only in responses from the + server; it is ignored if it is set in any requests. """ name = proto.Field(proto.STRING, number=1,) @@ -562,6 +586,9 @@ class Subscription(proto.Message): ) retry_policy = proto.Field(proto.MESSAGE, number=14, message="RetryPolicy",) detached = proto.Field(proto.BOOL, number=15,) + topic_message_retention_duration = proto.Field( + proto.MESSAGE, number=17, message=duration_pb2.Duration, + ) class RetryPolicy(proto.Message): diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 7262e021e344..da668f42f91e 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -42,8 +42,8 @@ class pubsubCallTransformer(cst.CSTTransformer): 'acknowledge': ('subscription', 'ack_ids', ), 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', ), + 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'topic_message_retention_duration', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), 'delete_schema': ('name', ), 'delete_snapshot': ('snapshot', ), 'delete_subscription': ('subscription', ), diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index b171fb3e7983..ae5654d87011 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -35,6 +35,7 @@ from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.publisher import PublisherAsyncClient From 183810a24074575da80f6214dc9d61be7005f44b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 24 Jul 2021 22:04:23 +0000 Subject: [PATCH 0696/1197] fix: enable self signed jwt for grpc (#458) PiperOrigin-RevId: 386504689 Source-Link: https://github.com/googleapis/googleapis/commit/762094a99ac6e03a17516b13dfbef37927267a70 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6bfc480e1a161d5de121c2bcc3745885d33b265a --- .../services/publisher/async_client.py | 1 - .../pubsub_v1/services/publisher/client.py | 5 +- .../services/schema_service/client.py | 4 + .../pubsub_v1/services/subscriber/client.py | 4 + packages/google-cloud-pubsub/owlbot.py | 105 +++++++++++++++++- .../scripts/fixup_pubsub_v1_keywords.py | 4 +- .../unit/gapic/pubsub_v1/test_publisher.py | 30 +++-- .../gapic/pubsub_v1/test_schema_service.py | 31 +++--- .../unit/gapic/pubsub_v1/test_subscriber.py | 29 +++-- 9 files changed, 169 insertions(+), 44 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index df436e72116d..09e4b0e557fa 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -29,7 +29,6 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index e68443254719..cb60f281080b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -34,7 +34,6 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType @@ -399,6 +398,10 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def create_topic( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 106afa85069d..5701a9bb0682 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -342,6 +342,10 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def create_schema( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 9f506214a2e4..7f3c3a9dc280 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -411,6 +411,10 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), ) def create_subscription( diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 6cceccf8a397..d6a43e71aa02 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -299,14 +299,111 @@ ), ) + # Add development feature `message_retention_duration` from pubsub_dev branch of googleapis + # See PR https://github.com/googleapis/python-pubsub/pull/456 + count = s.replace( + library / f"google/pubsub_{library.name}/types/pubsub.py", + """satisfies_pzs \(bool\): + Reserved for future use. This field is set + only in responses from the server; it is ignored + if it is set in any requests.""", + """satisfies_pzs (bool): + Reserved for future use. This field is set + only in responses from the server; it is ignored + if it is set in any requests. + message_retention_duration (google.protobuf.duration_pb2.Duration): + Indicates the minimum duration to retain a message after it + is published to the topic. If this field is set, messages + published to the topic in the last + ``message_retention_duration`` are always available to + subscribers. For instance, it allows any attached + subscription to `seek to a + timestamp `__ + that is up to ``message_retention_duration`` in the past. If + this field is not set, message retention is controlled by + settings on individual subscriptions. Cannot be more than 7 + days or less than 10 minutes.""" + ) + + # Add development feature `message_retention_duration` from pubsub_dev branch of googleapis + # See PR https://github.com/googleapis/python-pubsub/pull/456 + count += s.replace( + library / f"google/pubsub_{library.name}/types/pubsub.py", + """satisfies_pzs = proto.Field\( + proto.BOOL, + number=7, + \)""", + """satisfies_pzs = proto.Field( + proto.BOOL, + number=7, + ) + message_retention_duration = proto.Field( + proto.MESSAGE, number=8, message=duration_pb2.Duration, + )""" + ) + + # Add development feature `topic_message_retention_duration` from pubsub_dev branch of googleapis + # See PR https://github.com/googleapis/python-pubsub/pull/456 + count += s.replace( + library / f"google/pubsub_{library.name}/types/pubsub.py", + """detached \(bool\): + Indicates whether the subscription is detached from its + topic. Detached subscriptions don't receive messages from + their topic and don't retain any backlog. ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. + If the subscription is a push subscription, pushes to the + endpoint will not be made.""", + """detached (bool): + Indicates whether the subscription is detached from its + topic. Detached subscriptions don't receive messages from + their topic and don't retain any backlog. ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. + If the subscription is a push subscription, pushes to the + endpoint will not be made. + topic_message_retention_duration (google.protobuf.duration_pb2.Duration): + Output only. Indicates the minimum duration for which a + message is retained after it is published to the + subscription's topic. If this field is set, messages + published to the subscription's topic in the last + ``topic_message_retention_duration`` are always available to + subscribers. See the ``message_retention_duration`` field in + ``Topic``. This field is set only in responses from the + server; it is ignored if it is set in any requests.""" + ) + + # Add development feature `topic_message_retention_duration` from pubsub_dev branch of googleapis + # See PR https://github.com/googleapis/python-pubsub/pull/456 + count += s.replace( + library / f"google/pubsub_{library.name}/types/pubsub.py", + """detached = proto.Field\( + proto.BOOL, + number=15, + \)""", + """detached = proto.Field( + proto.BOOL, + number=15, + ) + topic_message_retention_duration = proto.Field( + proto.MESSAGE, number=17, message=duration_pb2.Duration, + ) + """ + ) + + if count != 4: + raise Exception("Pub/Sub topic retention feature not added") + # The namespace package declaration in google/cloud/__init__.py should be excluded # from coverage. - s.replace( - ".coveragerc", - r"((?P[^\n\S]+)google/pubsub/__init__\.py)", - "\ggoogle/cloud/__init__.py\n\g<0>", + count = s.replace( + library / ".coveragerc", + "google/pubsub/__init__.py", + """google/cloud/__init__.py + google/pubsub/__init__.py""", ) + if count < 1: + raise Exception(".coveragerc replacement failed.") + s.move( library, excludes=[ diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index da668f42f91e..7262e021e344 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -42,8 +42,8 @@ class pubsubCallTransformer(cst.CSTTransformer): 'acknowledge': ('subscription', 'ack_ids', ), 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'topic_message_retention_duration', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), + 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', ), 'delete_schema': ('name', ), 'delete_snapshot': ('snapshot', ), 'delete_subscription': ('subscription', ), diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index ae5654d87011..9f122309b711 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -35,7 +35,6 @@ from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.publisher import PublisherAsyncClient @@ -116,16 +115,6 @@ def test_publisher_client_from_service_account_info(client_class): assert client.transport._host == "pubsub.googleapis.com:443" -@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient,]) -def test_publisher_client_service_account_always_use_jwt(client_class): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize( "transport_class,transport_name", [ @@ -133,7 +122,7 @@ def test_publisher_client_service_account_always_use_jwt(client_class): (transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), ], ) -def test_publisher_client_service_account_always_use_jwt_true( +def test_publisher_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -143,6 +132,13 @@ def test_publisher_client_service_account_always_use_jwt_true( transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient,]) def test_publisher_client_from_service_account_file(client_class): @@ -217,6 +213,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -233,6 +230,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -249,6 +247,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -277,6 +276,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -341,6 +341,7 @@ def test_publisher_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -374,6 +375,7 @@ def test_publisher_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -395,6 +397,7 @@ def test_publisher_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -425,6 +428,7 @@ def test_publisher_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -455,6 +459,7 @@ def test_publisher_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -472,6 +477,7 @@ def test_publisher_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index fcc8b68e198d..91b58aff4ea3 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -122,18 +122,6 @@ def test_schema_service_client_from_service_account_info(client_class): assert client.transport._host == "pubsub.googleapis.com:443" -@pytest.mark.parametrize( - "client_class", [SchemaServiceClient, SchemaServiceAsyncClient,] -) -def test_schema_service_client_service_account_always_use_jwt(client_class): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize( "transport_class,transport_name", [ @@ -141,7 +129,7 @@ def test_schema_service_client_service_account_always_use_jwt(client_class): (transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), ], ) -def test_schema_service_client_service_account_always_use_jwt_true( +def test_schema_service_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -151,6 +139,13 @@ def test_schema_service_client_service_account_always_use_jwt_true( transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize( "client_class", [SchemaServiceClient, SchemaServiceAsyncClient,] @@ -231,6 +226,7 @@ def test_schema_service_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -247,6 +243,7 @@ def test_schema_service_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -263,6 +260,7 @@ def test_schema_service_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -291,6 +289,7 @@ def test_schema_service_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -357,6 +356,7 @@ def test_schema_service_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -390,6 +390,7 @@ def test_schema_service_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -411,6 +412,7 @@ def test_schema_service_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -441,6 +443,7 @@ def test_schema_service_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -471,6 +474,7 @@ def test_schema_service_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -490,6 +494,7 @@ def test_schema_service_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 67e6b26cca3e..8242d636eada 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -118,16 +118,6 @@ def test_subscriber_client_from_service_account_info(client_class): assert client.transport._host == "pubsub.googleapis.com:443" -@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient,]) -def test_subscriber_client_service_account_always_use_jwt(client_class): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - @pytest.mark.parametrize( "transport_class,transport_name", [ @@ -135,7 +125,7 @@ def test_subscriber_client_service_account_always_use_jwt(client_class): (transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), ], ) -def test_subscriber_client_service_account_always_use_jwt_true( +def test_subscriber_client_service_account_always_use_jwt( transport_class, transport_name ): with mock.patch.object( @@ -145,6 +135,13 @@ def test_subscriber_client_service_account_always_use_jwt_true( transport = transport_class(credentials=creds, always_use_jwt_access=True) use_jwt.assert_called_once_with(True) + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + @pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient,]) def test_subscriber_client_from_service_account_file(client_class): @@ -221,6 +218,7 @@ def test_subscriber_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -237,6 +235,7 @@ def test_subscriber_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -253,6 +252,7 @@ def test_subscriber_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -281,6 +281,7 @@ def test_subscriber_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -345,6 +346,7 @@ def test_subscriber_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -378,6 +380,7 @@ def test_subscriber_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -399,6 +402,7 @@ def test_subscriber_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -429,6 +433,7 @@ def test_subscriber_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -459,6 +464,7 @@ def test_subscriber_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -476,6 +482,7 @@ def test_subscriber_client_client_options_from_dict(): client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) From 13151d3a827f2439c9d2e8955b34afbfa6841fed Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 28 Jul 2021 16:38:56 +0200 Subject: [PATCH 0697/1197] chore: release 2.7.0 (#452) * chore: release 2.7.0 * Groom release notes Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Peter Lamut --- packages/google-cloud-pubsub/CHANGELOG.md | 23 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 22e60fd4f1ba..8b7dabab165f 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,29 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.7.0](https://www.github.com/googleapis/python-pubsub/compare/v2.6.1...v2.7.0) (2021-07-24) + + +### Features + +* Add `always_use_jwt_access`. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Add method signature for `Subscriber.Pull` without the deprecated `return_immediately` field. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Add Pub/Sub topic retention fields. ([#456](https://www.github.com/googleapis/python-pubsub/issues/456)) ([911829d](https://www.github.com/googleapis/python-pubsub/commit/911829d85c6ec36a87b873cbfe34497b1a493dde)) +* Add subscription properties to streaming pull response. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Support self-signed JWT flow for service accounts. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) + + +### Bug Fixes + +* Add async client to `%name_%version/init.py`. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Disable `always_use_jwt_access`. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Enable self signed JWT for gRPC. ([#458](https://www.github.com/googleapis/python-pubsub/issues/458)) ([c6e0ff6](https://www.github.com/googleapis/python-pubsub/commit/c6e0ff69faeda614aa6088af59d3420e16720d27)) + +### Dependencies + +* Add `packaging` requirement. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) +* Require `google-api-core >= 1.26.0`. ([1f30ef7](https://www.github.com/googleapis/python-pubsub/commit/1f30ef7f26ae1156751bc42305b1eb156115b5e5)) + ## 2.6.1 07-05-2021 10:33 PDT diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index ebffa39207cd..ce5ba0e533c3 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.6.1" +version = "2.7.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 9e9b20df8c8a9837e06ed49cc1fe250c73c2240b Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 2 Aug 2021 19:24:37 -0600 Subject: [PATCH 0698/1197] chore: require CODEOWNER review and up to date branches (#463) These two lines bring the rules on this repo in line with the defaults: https://github.com/googleapis/repo-automation-bots/blob/63c858e539e1f4d9bb8ea66e12f9c0a0de5fef55/packages/sync-repo-settings/src/required-checks.json#L40-L50 --- packages/google-cloud-pubsub/.github/sync-repo-settings.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml index 109a0b4ef249..c001b4152b83 100644 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -4,6 +4,8 @@ branchProtectionRules: # Identifies the protection rule pattern. Name of the branch to be protected. # Defaults to `master` - pattern: master + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: true requiredStatusCheckContexts: - 'Kokoro' - 'Kokoro - Against Pub/Sub Lite samples' From d7fff5149a20dfb91a99cf13931937f7e7ce8af6 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 4 Aug 2021 17:24:05 +0200 Subject: [PATCH 0699/1197] chore(deps): update dependency google-cloud-pubsub to v2.7.0 (#460) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 00c912dcedbf..a85b224e5fca 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.6.1 +google-cloud-pubsub==2.7.0 avro==1.10.2 From eaf0b8b93123b30e59abdcb32d79f3ddcefcafd4 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 4 Aug 2021 10:10:45 -0600 Subject: [PATCH 0700/1197] chore: add api-pubsub to samples CODEOWNERS list (#464) Co-authored-by: Peter Lamut --- packages/google-cloud-pubsub/.github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.github/CODEOWNERS b/packages/google-cloud-pubsub/.github/CODEOWNERS index a4def22dbd66..09ce82c259a3 100644 --- a/packages/google-cloud-pubsub/.github/CODEOWNERS +++ b/packages/google-cloud-pubsub/.github/CODEOWNERS @@ -9,4 +9,4 @@ * @googleapis/api-pubsub # The python-samples-reviewers team is the default owner for samples changes -/samples/ @googleapis/python-samples-owners +/samples/ @googleapis/api-pubsub @googleapis/python-samples-owners From af103f6d0944118aa87e73ef69a8022fb5c9c8cb Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 12 Aug 2021 19:36:14 +0200 Subject: [PATCH 0701/1197] process: add yoshi-python group to CODEOWNERS (#466) --- packages/google-cloud-pubsub/.github/CODEOWNERS | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/CODEOWNERS b/packages/google-cloud-pubsub/.github/CODEOWNERS index 09ce82c259a3..dfe671a9382e 100644 --- a/packages/google-cloud-pubsub/.github/CODEOWNERS +++ b/packages/google-cloud-pubsub/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -# The @googleapis/api-pubsub is the default owner for changes in this repo -* @googleapis/api-pubsub +# The @googleapis/api-pubsub and yoshi-python are the default owners for changes in this repo +* @googleapis/api-pubsub @googleapis/yoshi-python -# The python-samples-reviewers team is the default owner for samples changes -/samples/ @googleapis/api-pubsub @googleapis/python-samples-owners +# Additionally, the python-samples-reviewers team is also among the default owners for samples changes +/samples/ @googleapis/api-pubsub @googleapis/python-samples-owners @googleapis/yoshi-python From bbdaa72a247870fb3985a5278780de761858f92b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 13 Aug 2021 06:36:33 +0000 Subject: [PATCH 0702/1197] chore: remove string replacement in owlbot.py (#469) PiperOrigin-RevId: 389879008 Source-Link: https://github.com/googleapis/googleapis/commit/7b6a2ceed6cacf676b3918b1703b3ddd6322444c Source-Link: https://github.com/googleapis/googleapis-gen/commit/3443eecd0187f36db1324e5ab40ba9eef7b78b54 --- .../services/publisher/async_client.py | 1 + .../pubsub_v1/services/publisher/client.py | 1 + .../services/schema_service/async_client.py | 4 + .../services/schema_service/client.py | 4 + .../google/pubsub_v1/types/pubsub.py | 5 +- .../google/pubsub_v1/types/schema.py | 4 +- packages/google-cloud-pubsub/owlbot.py | 93 ------------------- .../scripts/fixup_pubsub_v1_keywords.py | 4 +- .../unit/gapic/pubsub_v1/test_publisher.py | 1 + 9 files changed, 18 insertions(+), 99 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 09e4b0e557fa..df436e72116d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -29,6 +29,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index cb60f281080b..bbfe870db793 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -34,6 +34,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 6ec1fe667a4d..d1f56f8d6172 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -509,6 +509,8 @@ async def validate_schema( Returns: google.pubsub_v1.types.ValidateSchemaResponse: Response for the ValidateSchema method. + Empty for now. + """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -572,6 +574,8 @@ async def validate_message( Returns: google.pubsub_v1.types.ValidateMessageResponse: Response for the ValidateMessage method. + Empty for now. + """ # Create or coerce a protobuf request object. request = schema.ValidateMessageRequest(request) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 5701a9bb0682..985676012720 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -693,6 +693,8 @@ def validate_schema( Returns: google.pubsub_v1.types.ValidateSchemaResponse: Response for the ValidateSchema method. + Empty for now. + """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -756,6 +758,8 @@ def validate_message( Returns: google.pubsub_v1.types.ValidateMessageResponse: Response for the ValidateMessage method. + Empty for now. + """ # Create or coerce a protobuf request object. # Minor optimization to avoid making a copy if the user passes diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index dadf62c0d5bd..a62b0217b352 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -494,8 +494,9 @@ class Subscription(proto.Message): then messages are not expunged from the subscription's backlog, even if they are acknowledged, until they fall out of the ``message_retention_duration`` window. This must be - true if you would like to [Seek to a timestamp] - (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time). + true if you would like to [``Seek`` to a timestamp] + (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) + in the past to replay previously-acknowledged messages. message_retention_duration (google.protobuf.duration_pb2.Duration): How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index d0c96217b578..fec9f5250768 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -195,7 +195,7 @@ class ValidateSchemaRequest(proto.Message): class ValidateSchemaResponse(proto.Message): - r"""Response for the ``ValidateSchema`` method. """ + r"""Response for the ``ValidateSchema`` method. Empty for now. """ class ValidateMessageRequest(proto.Message): @@ -227,7 +227,7 @@ class ValidateMessageRequest(proto.Message): class ValidateMessageResponse(proto.Message): - r"""Response for the ``ValidateMessage`` method. """ + r"""Response for the ``ValidateMessage`` method. Empty for now. """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index d6a43e71aa02..81bcb664824a 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -299,99 +299,6 @@ ), ) - # Add development feature `message_retention_duration` from pubsub_dev branch of googleapis - # See PR https://github.com/googleapis/python-pubsub/pull/456 - count = s.replace( - library / f"google/pubsub_{library.name}/types/pubsub.py", - """satisfies_pzs \(bool\): - Reserved for future use. This field is set - only in responses from the server; it is ignored - if it is set in any requests.""", - """satisfies_pzs (bool): - Reserved for future use. This field is set - only in responses from the server; it is ignored - if it is set in any requests. - message_retention_duration (google.protobuf.duration_pb2.Duration): - Indicates the minimum duration to retain a message after it - is published to the topic. If this field is set, messages - published to the topic in the last - ``message_retention_duration`` are always available to - subscribers. For instance, it allows any attached - subscription to `seek to a - timestamp `__ - that is up to ``message_retention_duration`` in the past. If - this field is not set, message retention is controlled by - settings on individual subscriptions. Cannot be more than 7 - days or less than 10 minutes.""" - ) - - # Add development feature `message_retention_duration` from pubsub_dev branch of googleapis - # See PR https://github.com/googleapis/python-pubsub/pull/456 - count += s.replace( - library / f"google/pubsub_{library.name}/types/pubsub.py", - """satisfies_pzs = proto.Field\( - proto.BOOL, - number=7, - \)""", - """satisfies_pzs = proto.Field( - proto.BOOL, - number=7, - ) - message_retention_duration = proto.Field( - proto.MESSAGE, number=8, message=duration_pb2.Duration, - )""" - ) - - # Add development feature `topic_message_retention_duration` from pubsub_dev branch of googleapis - # See PR https://github.com/googleapis/python-pubsub/pull/456 - count += s.replace( - library / f"google/pubsub_{library.name}/types/pubsub.py", - """detached \(bool\): - Indicates whether the subscription is detached from its - topic. Detached subscriptions don't receive messages from - their topic and don't retain any backlog. ``Pull`` and - ``StreamingPull`` requests will return FAILED_PRECONDITION. - If the subscription is a push subscription, pushes to the - endpoint will not be made.""", - """detached (bool): - Indicates whether the subscription is detached from its - topic. Detached subscriptions don't receive messages from - their topic and don't retain any backlog. ``Pull`` and - ``StreamingPull`` requests will return FAILED_PRECONDITION. - If the subscription is a push subscription, pushes to the - endpoint will not be made. - topic_message_retention_duration (google.protobuf.duration_pb2.Duration): - Output only. Indicates the minimum duration for which a - message is retained after it is published to the - subscription's topic. If this field is set, messages - published to the subscription's topic in the last - ``topic_message_retention_duration`` are always available to - subscribers. See the ``message_retention_duration`` field in - ``Topic``. This field is set only in responses from the - server; it is ignored if it is set in any requests.""" - ) - - # Add development feature `topic_message_retention_duration` from pubsub_dev branch of googleapis - # See PR https://github.com/googleapis/python-pubsub/pull/456 - count += s.replace( - library / f"google/pubsub_{library.name}/types/pubsub.py", - """detached = proto.Field\( - proto.BOOL, - number=15, - \)""", - """detached = proto.Field( - proto.BOOL, - number=15, - ) - topic_message_retention_duration = proto.Field( - proto.MESSAGE, number=17, message=duration_pb2.Duration, - ) - """ - ) - - if count != 4: - raise Exception("Pub/Sub topic retention feature not added") - # The namespace package declaration in google/cloud/__init__.py should be excluded # from coverage. count = s.replace( diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 7262e021e344..da668f42f91e 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -42,8 +42,8 @@ class pubsubCallTransformer(cst.CSTTransformer): 'acknowledge': ('subscription', 'ack_ids', ), 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', ), + 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'topic_message_retention_duration', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), 'delete_schema': ('name', ), 'delete_snapshot': ('snapshot', ), 'delete_subscription': ('subscription', ), diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 9f122309b711..db1ae85a3348 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -35,6 +35,7 @@ from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.publisher import PublisherAsyncClient From 3418e6b9fdeb7a4b3938274a83a3a7c2258b8ef9 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Fri, 13 Aug 2021 00:58:24 -0600 Subject: [PATCH 0703/1197] fix: remove dependency on pytz (#472) For https://github.com/googleapis/python-api-core/issues/250 Fixes #468. --- .../google/cloud/pubsub_v1/subscriber/message.py | 3 +-- .../tests/unit/pubsub_v1/subscriber/test_message.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index c08e0a60563e..5f6e179015c4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -17,7 +17,6 @@ import datetime as dt import json import math -import pytz import time from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -110,7 +109,7 @@ def __init__(self, message, ack_id, delivery_attempt, request_queue): self._data = message.data self._publish_time = dt.datetime.fromtimestamp( message.publish_time.seconds + message.publish_time.nanos / 1e9, - tz=pytz.UTC, + tz=dt.timezone.utc, ) self._ordering_key = message.ordering_key self._size = message.ByteSize() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index e0c03849102c..e3c14c93ca41 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -17,7 +17,6 @@ import time import mock -import pytz from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber import message @@ -26,7 +25,7 @@ from google.pubsub_v1 import types as gapic_types -RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=pytz.utc) +RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc) RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000 PUBLISHED_MICROS = 123456 PUBLISHED = RECEIVED + datetime.timedelta(days=1, microseconds=PUBLISHED_MICROS) From 58215a331dbb46ec416450d7bb743169888fcffc Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 Aug 2021 17:40:12 +0000 Subject: [PATCH 0704/1197] chore: release 2.7.1 (#473) :robot: I have created a release \*beep\* \*boop\* --- ### [2.7.1](https://www.github.com/googleapis/python-pubsub/compare/v2.7.0...v2.7.1) (2021-08-13) ### Bug Fixes * remove dependency on pytz ([#472](https://www.github.com/googleapis/python-pubsub/issues/472)) ([972cc16](https://www.github.com/googleapis/python-pubsub/commit/972cc163f5a1477b37a5ab7e329faf1468637fa2)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 8b7dabab165f..d89bc0039838 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +### [2.7.1](https://www.github.com/googleapis/python-pubsub/compare/v2.7.0...v2.7.1) (2021-08-13) + + +### Bug Fixes + +* remove dependency on pytz ([#472](https://www.github.com/googleapis/python-pubsub/issues/472)) ([972cc16](https://www.github.com/googleapis/python-pubsub/commit/972cc163f5a1477b37a5ab7e329faf1468637fa2)) + ## [2.7.0](https://www.github.com/googleapis/python-pubsub/compare/v2.6.1...v2.7.0) (2021-07-24) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index ce5ba0e533c3..b07cad90fa97 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.7.0" +version = "2.7.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 2c352b08f3f92586c78e5b38b1ae7812c1961959 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 23 Aug 2021 11:34:23 +0200 Subject: [PATCH 0705/1197] chore(deps): update dependency google-cloud-pubsub to v2.7.1 (#475) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit [![WhiteSource Renovate](https://app.renovatebot.com/images/banner.svg)](https://renovatebot.com) This PR contains the following updates: | Package | Change | Age | Adoption | Passing | Confidence | |---|---|---|---|---|---| | [google-cloud-pubsub](https://togithub.com/googleapis/python-pubsub) | `==2.7.0` -> `==2.7.1` | [![age](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.7.1/age-slim)](https://docs.renovatebot.com/merge-confidence/) | [![adoption](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.7.1/adoption-slim)](https://docs.renovatebot.com/merge-confidence/) | [![passing](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.7.1/compatibility-slim/2.7.0)](https://docs.renovatebot.com/merge-confidence/) | [![confidence](https://badges.renovateapi.com/packages/pypi/google-cloud-pubsub/2.7.1/confidence-slim/2.7.0)](https://docs.renovatebot.com/merge-confidence/) | --- ### Release Notes
googleapis/python-pubsub ### [`v2.7.1`](https://togithub.com/googleapis/python-pubsub/blob/master/CHANGELOG.md#​271-httpswwwgithubcomgoogleapispython-pubsubcomparev270v271-2021-08-13) [Compare Source](https://togithub.com/googleapis/python-pubsub/compare/v2.7.0...v2.7.1)
--- ### Configuration 📅 **Schedule**: At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box. --- This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/python-pubsub). --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index a85b224e5fca..74fcb890de39 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.7.0 +google-cloud-pubsub==2.7.1 avro==1.10.2 From fb2cd19c8c5e23068c38ffcfc30f8bf23b9e914b Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 25 Aug 2021 11:06:09 +0200 Subject: [PATCH 0706/1197] samples: add type hints to all samples (#476) Closes #264. And here they are, annotated samples. We should follow up with a `pytype` CI check, I'll open a separate issue for that. **PR checklist:** - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) --- .../samples/snippets/iam.py | 12 +- .../samples/snippets/iam_test.py | 34 +++- .../samples/snippets/noxfile_config.py | 2 +- .../samples/snippets/publisher.py | 47 +++-- .../samples/snippets/publisher_test.py | 52 +++-- .../samples/snippets/quickstart/pub.py | 2 +- .../snippets/quickstart/quickstart_test.py | 14 +- .../samples/snippets/quickstart/sub.py | 4 +- .../samples/snippets/schema.py | 32 ++-- .../samples/snippets/schema_test.py | 67 +++++-- .../samples/snippets/subscriber.py | 82 +++++--- .../samples/snippets/subscriber_test.py | 179 +++++++++++++----- 12 files changed, 351 insertions(+), 176 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index 2cc134ca5da5..e94d1a9a176a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -24,7 +24,7 @@ import argparse -def get_topic_policy(project_id, topic_id): +def get_topic_policy(project_id: str, topic_id: str) -> None: """Prints the IAM policy for the given topic.""" # [START pubsub_get_topic_policy] from google.cloud import pubsub_v1 @@ -44,7 +44,7 @@ def get_topic_policy(project_id, topic_id): # [END pubsub_get_topic_policy] -def get_subscription_policy(project_id, subscription_id): +def get_subscription_policy(project_id: str, subscription_id: str) -> None: """Prints the IAM policy for the given subscription.""" # [START pubsub_get_subscription_policy] from google.cloud import pubsub_v1 @@ -66,7 +66,7 @@ def get_subscription_policy(project_id, subscription_id): # [END pubsub_get_subscription_policy] -def set_topic_policy(project_id, topic_id): +def set_topic_policy(project_id: str, topic_id: str) -> None: """Sets the IAM policy for a topic.""" # [START pubsub_set_topic_policy] from google.cloud import pubsub_v1 @@ -95,7 +95,7 @@ def set_topic_policy(project_id, topic_id): # [END pubsub_set_topic_policy] -def set_subscription_policy(project_id, subscription_id): +def set_subscription_policy(project_id: str, subscription_id: str) -> None: """Sets the IAM policy for a topic.""" # [START pubsub_set_subscription_policy] from google.cloud import pubsub_v1 @@ -126,7 +126,7 @@ def set_subscription_policy(project_id, subscription_id): # [END pubsub_set_subscription_policy] -def check_topic_permissions(project_id, topic_id): +def check_topic_permissions(project_id: str, topic_id: str) -> None: """Checks to which permissions are available on the given topic.""" # [START pubsub_test_topic_permissions] from google.cloud import pubsub_v1 @@ -150,7 +150,7 @@ def check_topic_permissions(project_id, topic_id): # [END pubsub_test_topic_permissions] -def check_subscription_permissions(project_id, subscription_id): +def check_subscription_permissions(project_id: str, subscription_id: str) -> None: """Checks to which permissions are available on the given subscription.""" # [START pubsub_test_subscription_permissions] from google.cloud import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index a2deb208901e..fcd57781d305 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -13,8 +13,10 @@ # limitations under the License. import os +from typing import Generator import uuid +from _pytest.capture import CaptureFixture from google.api_core.exceptions import NotFound from google.cloud import pubsub_v1 import pytest @@ -28,12 +30,14 @@ @pytest.fixture(scope="module") -def publisher_client(): +def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: yield pubsub_v1.PublisherClient() @pytest.fixture(scope="module") -def topic_path(publisher_client): +def topic_path( + publisher_client: pubsub_v1.PublisherClient, +) -> Generator[str, None, None]: topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) try: @@ -50,14 +54,16 @@ def topic_path(publisher_client): @pytest.fixture(scope="module") -def subscriber_client(): +def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client subscriber_client.close() @pytest.fixture(scope="module") -def subscription_path(subscriber_client, topic_path): +def subscription_path( + subscriber_client: pubsub_v1.SubscriberClient, topic_path: str, +) -> Generator[str, None, None]: subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) subscription = subscriber_client.create_subscription( request={"name": subscription_path, "topic": topic_path} @@ -72,40 +78,48 @@ def subscription_path(subscriber_client, topic_path): pass -def test_get_topic_policy(topic_path, capsys): +def test_get_topic_policy(topic_path: str, capsys: CaptureFixture) -> None: iam.get_topic_policy(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert topic_path in out -def test_get_subscription_policy(subscription_path, capsys): +def test_get_subscription_policy( + subscription_path: str, capsys: CaptureFixture +) -> None: iam.get_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() assert subscription_path in out -def test_set_topic_policy(publisher_client, topic_path): +def test_set_topic_policy( + publisher_client: pubsub_v1.PublisherClient, topic_path: str, +) -> CaptureFixture: iam.set_topic_policy(PROJECT_ID, TOPIC_ID) policy = publisher_client.get_iam_policy(request={"resource": topic_path}) assert "roles/pubsub.publisher" in str(policy) assert "allUsers" in str(policy) -def test_set_subscription_policy(subscriber_client, subscription_path): +def test_set_subscription_policy( + subscriber_client: pubsub_v1.SubscriberClient, subscription_path: str, +) -> None: iam.set_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) policy = subscriber_client.get_iam_policy(request={"resource": subscription_path}) assert "roles/pubsub.viewer" in str(policy) assert "allUsers" in str(policy) -def test_check_topic_permissions(topic_path, capsys): +def test_check_topic_permissions(topic_path: str, capsys: CaptureFixture) -> None: iam.check_topic_permissions(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert topic_path in out assert "pubsub.topics.publish" in out -def test_check_subscription_permissions(subscription_path, capsys): +def test_check_subscription_permissions( + subscription_path: str, capsys: CaptureFixture, +) -> None: iam.check_subscription_permissions(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() assert subscription_path in out diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py b/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py index 98c66af09ca2..32f8b4351c77 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py @@ -25,7 +25,7 @@ "ignored_versions": ["2.7"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - "enforce_type_hints": False, + "enforce_type_hints": True, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index d50c9b9db1fa..821efcbb5d5d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -22,9 +22,10 @@ """ import argparse +from typing import Callable -def list_topics(project_id): +def list_topics(project_id: str) -> None: """Lists all Pub/Sub topics in the given project.""" # [START pubsub_list_topics] from google.cloud import pubsub_v1 @@ -40,7 +41,7 @@ def list_topics(project_id): # [END pubsub_list_topics] -def create_topic(project_id, topic_id): +def create_topic(project_id: str, topic_id: str) -> None: """Create a new Pub/Sub topic.""" # [START pubsub_quickstart_create_topic] # [START pubsub_create_topic] @@ -60,7 +61,7 @@ def create_topic(project_id, topic_id): # [END pubsub_create_topic] -def delete_topic(project_id, topic_id): +def delete_topic(project_id: str, topic_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_topic] from google.cloud import pubsub_v1 @@ -78,7 +79,7 @@ def delete_topic(project_id, topic_id): # [END pubsub_delete_topic] -def publish_messages(project_id, topic_id): +def publish_messages(project_id: str, topic_id: str) -> None: """Publishes multiple messages to a Pub/Sub topic.""" # [START pubsub_quickstart_publisher] # [START pubsub_publish] @@ -106,7 +107,7 @@ def publish_messages(project_id, topic_id): # [END pubsub_publish] -def publish_messages_with_custom_attributes(project_id, topic_id): +def publish_messages_with_custom_attributes(project_id: str, topic_id: str) -> None: """Publishes multiple messages with custom attributes to a Pub/Sub topic.""" # [START pubsub_publish_custom_attributes] @@ -133,7 +134,7 @@ def publish_messages_with_custom_attributes(project_id, topic_id): # [END pubsub_publish_custom_attributes] -def publish_messages_with_error_handler(project_id, topic_id): +def publish_messages_with_error_handler(project_id: str, topic_id: str) -> None: # [START pubsub_publish_with_error_handler] """Publishes multiple messages to a Pub/Sub topic with an error handler.""" from concurrent import futures @@ -147,8 +148,10 @@ def publish_messages_with_error_handler(project_id, topic_id): topic_path = publisher.topic_path(project_id, topic_id) publish_futures = [] - def get_callback(publish_future, data): - def callback(publish_future): + def get_callback( + publish_future: pubsub_v1.publisher.futures.Future, data: str + ) -> Callable[[pubsub_v1.publisher.futures.Future], None]: + def callback(publish_future: pubsub_v1.publisher.futures.Future) -> None: try: # Wait 60 seconds for the publish call to succeed. print(publish_future.result(timeout=60)) @@ -172,7 +175,7 @@ def callback(publish_future): # [END pubsub_publish_with_error_handler] -def publish_messages_with_batch_settings(project_id, topic_id): +def publish_messages_with_batch_settings(project_id: str, topic_id: str) -> None: """Publishes multiple messages to a Pub/Sub topic with batch settings.""" # [START pubsub_publisher_batch_settings] from concurrent import futures @@ -194,7 +197,7 @@ def publish_messages_with_batch_settings(project_id, topic_id): publish_futures = [] # Resolve the publish future in a separate thread. - def callback(future): + def callback(future: pubsub_v1.publisher.futures.Future) -> None: message_id = future.result() print(message_id) @@ -213,7 +216,7 @@ def callback(future): # [END pubsub_publisher_batch_settings] -def publish_messages_with_flow_control_settings(project_id, topic_id): +def publish_messages_with_flow_control_settings(project_id: str, topic_id: str) -> None: """Publishes messages to a Pub/Sub topic with flow control settings.""" # [START pubsub_publisher_flow_control] from concurrent import futures @@ -242,7 +245,7 @@ def publish_messages_with_flow_control_settings(project_id, topic_id): publish_futures = [] # Resolve the publish future in a separate thread. - def callback(publish_future): + def callback(publish_future: pubsub_v1.publisher.futures.Future) -> None: message_id = publish_future.result() print(message_id) @@ -263,7 +266,7 @@ def callback(publish_future): # [END pubsub_publisher_flow_control] -def publish_messages_with_retry_settings(project_id, topic_id): +def publish_messages_with_retry_settings(project_id: str, topic_id: str) -> None: """Publishes messages with custom retry settings.""" # [START pubsub_publisher_retry_settings] from google import api_core @@ -305,7 +308,7 @@ def publish_messages_with_retry_settings(project_id, topic_id): # [END pubsub_publisher_retry_settings] -def publish_with_ordering_keys(project_id, topic_id): +def publish_with_ordering_keys(project_id: str, topic_id: str) -> None: """Publishes messages with ordering keys.""" # [START pubsub_publish_with_ordering_keys] from google.cloud import pubsub_v1 @@ -342,7 +345,7 @@ def publish_with_ordering_keys(project_id, topic_id): # [END pubsub_publish_with_ordering_keys] -def resume_publish_with_ordering_keys(project_id, topic_id): +def resume_publish_with_ordering_keys(project_id: str, topic_id: str) -> None: """Resume publishing messages with ordering keys when unrecoverable errors occur.""" # [START pubsub_resume_publish_with_ordering_keys] from google.cloud import pubsub_v1 @@ -383,7 +386,7 @@ def resume_publish_with_ordering_keys(project_id, topic_id): # [END pubsub_resume_publish_with_ordering_keys] -def detach_subscription(project_id, subscription_id): +def detach_subscription(project_id: str, subscription_id: str) -> None: """Detaches a subscription from a topic and drops all messages retained in it.""" # [START pubsub_detach_subscription] from google.api_core.exceptions import GoogleAPICallError, RetryError @@ -416,8 +419,7 @@ def detach_subscription(project_id, subscription_id): if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") @@ -440,8 +442,7 @@ def detach_subscription(project_id, subscription_id): publish_with_custom_attributes_parser.add_argument("topic_id") publish_with_error_handler_parser = subparsers.add_parser( - "publish-with-error-handler", - help=publish_messages_with_error_handler.__doc__, + "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, ) publish_with_error_handler_parser.add_argument("topic_id") @@ -464,8 +465,7 @@ def detach_subscription(project_id, subscription_id): publish_with_retry_settings_parser.add_argument("topic_id") publish_with_ordering_keys_parser = subparsers.add_parser( - "publish-with-ordering-keys", - help=publish_with_ordering_keys.__doc__, + "publish-with-ordering-keys", help=publish_with_ordering_keys.__doc__, ) publish_with_ordering_keys_parser.add_argument("topic_id") @@ -476,8 +476,7 @@ def detach_subscription(project_id, subscription_id): resume_publish_with_ordering_keys_parser.add_argument("topic_id") detach_subscription_parser = subparsers.add_parser( - "detach-subscription", - help=detach_subscription.__doc__, + "detach-subscription", help=detach_subscription.__doc__, ) detach_subscription_parser.add_argument("subscription_id") diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 51cb20a47604..0e06d8f2afa2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -14,8 +14,10 @@ import os import time +from typing import Generator import uuid +from _pytest.capture import CaptureFixture import backoff from google.api_core.exceptions import NotFound from google.cloud import pubsub_v1 @@ -33,12 +35,12 @@ @pytest.fixture(scope="module") -def publisher_client(): +def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: yield pubsub_v1.PublisherClient() @pytest.fixture(scope="module") -def subscriber_client(): +def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client # Close the subscriber client properly during teardown. @@ -46,7 +48,9 @@ def subscriber_client(): @pytest.fixture(scope="module") -def topic_path(publisher_client): +def topic_path( + publisher_client: pubsub_v1.PublisherClient, +) -> Generator[str, None, None]: topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) try: @@ -63,7 +67,9 @@ def topic_path(publisher_client): @pytest.fixture(scope="module") -def subscription_path(subscriber_client, topic_path): +def subscription_path( + subscriber_client: pubsub_v1.SubscriberClient, topic_path: str +) -> Generator[str, None, None]: subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) subscription = subscriber_client.create_subscription( request={"name": subscription_path, "topic": topic_path} @@ -78,10 +84,10 @@ def subscription_path(subscriber_client, topic_path): pass -def _make_sleep_patch(): +def _make_sleep_patch() -> None: real_sleep = time.sleep - def new_sleep(period): + def new_sleep(period: float) -> None: if period == 60: real_sleep(5) raise RuntimeError("sigil") @@ -91,7 +97,9 @@ def new_sleep(period): return mock.patch("time.sleep", new=new_sleep) -def test_create(publisher_client, capsys): +def test_create( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture +) -> None: # The scope of `topic_path` is limited to this function. topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) @@ -106,81 +114,87 @@ def test_create(publisher_client, capsys): assert f"Created topic: {topic_path}" in out -def test_list(topic_path, capsys): +def test_list(topic_path: str, capsys: CaptureFixture) -> None: publisher.list_topics(PROJECT_ID) out, _ = capsys.readouterr() assert topic_path in out -def test_publish(topic_path, capsys): +def test_publish(topic_path: str, capsys: CaptureFixture) -> None: publisher.publish_messages(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Published messages to {topic_path}." in out -def test_publish_with_custom_attributes(topic_path, capsys): +def test_publish_with_custom_attributes( + topic_path: str, capsys: CaptureFixture +) -> None: publisher.publish_messages_with_custom_attributes(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Published messages with custom attributes to {topic_path}." in out -def test_publish_with_batch_settings(topic_path, capsys): +def test_publish_with_batch_settings(topic_path: str, capsys: CaptureFixture) -> None: publisher.publish_messages_with_batch_settings(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Published messages with batch settings to {topic_path}." in out -def test_publish_with_flow_control_settings(topic_path, capsys): +def test_publish_with_flow_control_settings( + topic_path: str, capsys: CaptureFixture +) -> None: publisher.publish_messages_with_flow_control_settings(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Published messages with flow control settings to {topic_path}." in out -def test_publish_with_retry_settings(topic_path, capsys): +def test_publish_with_retry_settings(topic_path: str, capsys: CaptureFixture) -> None: publisher.publish_messages_with_retry_settings(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Published messages with retry settings to {topic_path}." in out -def test_publish_with_error_handler(topic_path, capsys): +def test_publish_with_error_handler(topic_path: str, capsys: CaptureFixture) -> None: publisher.publish_messages_with_error_handler(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Published messages with error handler to {topic_path}." in out -def test_publish_with_ordering_keys(topic_path, capsys): +def test_publish_with_ordering_keys(topic_path: str, capsys: CaptureFixture) -> None: publisher.publish_with_ordering_keys(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Published messages with ordering keys to {topic_path}." in out -def test_resume_publish_with_error_handler(topic_path, capsys): +def test_resume_publish_with_error_handler( + topic_path: str, capsys: CaptureFixture +) -> None: publisher.resume_publish_with_ordering_keys(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Resumed publishing messages with ordering keys to {topic_path}." in out -def test_detach_subscription(subscription_path, capsys): +def test_detach_subscription(subscription_path: str, capsys: CaptureFixture) -> None: publisher.detach_subscription(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() assert f"{subscription_path} is detached." in out -def test_delete(publisher_client): +def test_delete(publisher_client: pubsub_v1.PublisherClient) -> None: publisher.delete_topic(PROJECT_ID, TOPIC_ID) @backoff.on_exception(backoff.expo, AssertionError, max_time=MAX_TIME) - def eventually_consistent_test(): + def eventually_consistent_test() -> None: with pytest.raises(Exception): publisher_client.get_topic( request={"topic": publisher_client.topic_path(PROJECT_ID, TOPIC_ID)} diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py index 1e4868cb3bd2..80bf157a36c2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -19,7 +19,7 @@ from google.cloud import pubsub_v1 -def pub(project_id, topic_id): +def pub(project_id: str, topic_id: str) -> None: """Publishes a message to a Pub/Sub topic.""" # Initialize a Publisher client. client = pubsub_v1.PublisherClient() diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py index bdb24a145fc1..e2a5e9844f8a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py @@ -15,8 +15,10 @@ # limitations under the License. import os +from typing import Generator import uuid +from _pytest.capture import CaptureFixture from flaky import flaky from google.api_core.exceptions import AlreadyExists from google.cloud import pubsub_v1 @@ -30,19 +32,19 @@ @pytest.fixture(scope="module") -def publisher_client(): +def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: yield pubsub_v1.PublisherClient() @pytest.fixture(scope="module") -def subscriber_client(): +def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client subscriber_client.close() @pytest.fixture(scope="module") -def topic_path(publisher_client): +def topic_path(publisher_client: pubsub_v1.PublisherClient) -> Generator[str, None, None]: topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) try: @@ -55,7 +57,7 @@ def topic_path(publisher_client): @pytest.fixture(scope="module") -def subscription_path(subscriber_client, topic_path): +def subscription_path(subscriber_client: pubsub_v1.SubscriberClient, topic_path: str) -> Generator[str, None, None]: subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) try: @@ -70,7 +72,7 @@ def subscription_path(subscriber_client, topic_path): subscriber_client.close() -def test_pub(topic_path, capsys): +def test_pub(topic_path: str, capsys: CaptureFixture) -> None: import pub pub.pub(PROJECT_ID, TOPIC_ID) @@ -81,7 +83,7 @@ def test_pub(topic_path, capsys): @flaky(max_runs=3, min_passes=1) -def test_sub(publisher_client, topic_path, subscription_path, capsys): +def test_sub(publisher_client: pubsub_v1.PublisherClient, topic_path: str, subscription_path: str, capsys: CaptureFixture) -> None: publisher_client.publish(topic_path, b"Hello World!") import sub diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index 7a5732d20464..d3326f9802f3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -19,7 +19,7 @@ from google.cloud import pubsub_v1 -def sub(project_id, subscription_id, timeout=None): +def sub(project_id: str, subscription_id: str, timeout: float = None) -> None: """Receives messages from a Pub/Sub subscription.""" # Initialize a Subscriber client subscriber_client = pubsub_v1.SubscriberClient() @@ -27,7 +27,7 @@ def sub(project_id, subscription_id, timeout=None): # `projects/{project_id}/subscriptions/{subscription_id}` subscription_path = subscriber_client.subscription_path(project_id, subscription_id) - def callback(message): + def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"Received {message}.") # Acknowledge the message. Unack'ed messages will be redelivered. message.ack() diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py index 92c56d9acc5b..bf7ae3fbe500 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -23,8 +23,10 @@ import argparse +from google.cloud import pubsub_v1 -def create_avro_schema(project_id, schema_id, avsc_file): + +def create_avro_schema(project_id: str, schema_id: str, avsc_file: str) -> None: """Create a schema resource from a JSON-formatted Avro schema file.""" # [START pubsub_create_avro_schema] from google.api_core.exceptions import AlreadyExists @@ -56,7 +58,7 @@ def create_avro_schema(project_id, schema_id, avsc_file): # [END pubsub_create_avro_schema] -def create_proto_schema(project_id, schema_id, proto_file): +def create_proto_schema(project_id: str, schema_id: str, proto_file: str) -> None: """Create a schema resource from a protobuf schema file.""" # [START pubsub_create_proto_schema] from google.api_core.exceptions import AlreadyExists @@ -90,7 +92,7 @@ def create_proto_schema(project_id, schema_id, proto_file): # [END pubsub_create_proto_schema] -def get_schema(project_id, schema_id): +def get_schema(project_id: str, schema_id: str) -> None: """Get a schema resource.""" # [START pubsub_get_schema] from google.api_core.exceptions import NotFound @@ -111,7 +113,7 @@ def get_schema(project_id, schema_id): # [END pubsub_get_schema] -def list_schemas(project_id): +def list_schemas(project_id: str) -> None: """List schema resources.""" # [START pubsub_list_schemas] from google.cloud.pubsub import SchemaServiceClient @@ -129,7 +131,7 @@ def list_schemas(project_id): # [END pubsub_list_schemas] -def delete_schema(project_id, schema_id): +def delete_schema(project_id: str, schema_id: str) -> None: """Delete a schema resource.""" # [START pubsub_delete_schema] from google.api_core.exceptions import NotFound @@ -150,7 +152,9 @@ def delete_schema(project_id, schema_id): # [END pubsub_delete_schema] -def create_topic_with_schema(project_id, topic_id, schema_id, message_encoding): +def create_topic_with_schema( + project_id: str, topic_id: str, schema_id: str, message_encoding: str +) -> None: """Create a topic resource with a schema.""" # [START pubsub_create_topic_with_schema] from google.api_core.exceptions import AlreadyExists, InvalidArgument @@ -193,7 +197,7 @@ def create_topic_with_schema(project_id, topic_id, schema_id, message_encoding): # [END pubsub_create_topic_with_schema] -def publish_avro_records(project_id, topic_id, avsc_file): +def publish_avro_records(project_id: str, topic_id: str, avsc_file: str) -> None: """Pulbish a BINARY or JSON encoded message to a topic configured with an Avro schema.""" # [START pubsub_publish_avro_records] from avro.io import BinaryEncoder, DatumWriter @@ -246,7 +250,7 @@ def publish_avro_records(project_id, topic_id, avsc_file): # [END pubsub_publish_avro_records] -def publish_proto_messages(project_id, topic_id): +def publish_proto_messages(project_id: str, topic_id: str) -> None: """Publish a BINARY or JSON encoded message to a topic configured with a protobuf schema.""" # [START pubsub_publish_proto_messages] from google.api_core.exceptions import NotFound @@ -293,7 +297,9 @@ def publish_proto_messages(project_id, topic_id): # [END pubsub_publish_proto_messages] -def subscribe_with_avro_schema(project_id, subscription_id, avsc_file, timeout=None): +def subscribe_with_avro_schema( + project_id: str, subscription_id: str, avsc_file: str, timeout: float = None +) -> None: """Receive and decode messages sent to a topic with an Avro schema.""" # [START pubsub_subscribe_avro_records] import avro @@ -315,7 +321,7 @@ def subscribe_with_avro_schema(project_id, subscription_id, avsc_file, timeout=N avro_schema = avro.schema.parse(open(avsc_file, "rb").read()) - def callback(message): + def callback(message: pubsub_v1.subscriber.message.Message) -> None: # Get the message serialization type. encoding = message.attributes.get("googclient_schemaencoding") # Deserialize the message data accordingly. @@ -348,7 +354,9 @@ def callback(message): # [END pubsub_subscribe_avro_records] -def subscribe_with_proto_schema(project_id, subscription_id, timeout): +def subscribe_with_proto_schema( + project_id: str, subscription_id: str, timeout: float +) -> None: """Receive and decode messages sent to a topic with a protobuf schema.""" # [[START pubsub_subscribe_proto_messages] from concurrent.futures import TimeoutError @@ -369,7 +377,7 @@ def subscribe_with_proto_schema(project_id, subscription_id, timeout): # Instantiate a protoc-generated class defined in `us-states.proto`. state = us_states_pb2.StateProto() - def callback(message): + def callback(message: pubsub_v1.subscriber.message.Message) -> None: # Get the message serialization type. encoding = message.attributes.get("googclient_schemaencoding") # Deserialize the message data accordingly. diff --git a/packages/google-cloud-pubsub/samples/snippets/schema_test.py b/packages/google-cloud-pubsub/samples/snippets/schema_test.py index 1e0dc8f1a906..c58958672399 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema_test.py @@ -15,10 +15,13 @@ # limitations under the License. import os +from typing import Generator import uuid +from _pytest.capture import CaptureFixture from flaky import flaky from google.api_core.exceptions import NotFound +from google.cloud import pubsub_v1 from google.cloud.pubsub import PublisherClient, SchemaServiceClient, SubscriberClient from google.pubsub_v1.types import Encoding import pytest @@ -41,13 +44,15 @@ @pytest.fixture(scope="module") -def schema_client(): +def schema_client() -> Generator[pubsub_v1.SchemaServiceClient, None, None]: schema_client = SchemaServiceClient() yield schema_client @pytest.fixture(scope="module") -def avro_schema(schema_client): +def avro_schema( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: avro_schema_path = schema_client.schema_path(PROJECT_ID, AVRO_SCHEMA_ID) yield avro_schema_path @@ -59,7 +64,9 @@ def avro_schema(schema_client): @pytest.fixture(scope="module") -def proto_schema(schema_client): +def proto_schema( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: proto_schema_path = schema_client.schema_path(PROJECT_ID, PROTO_SCHEMA_ID) yield proto_schema_path @@ -71,12 +78,14 @@ def proto_schema(schema_client): @pytest.fixture(scope="module") -def publisher_client(): +def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: yield PublisherClient() @pytest.fixture(scope="module") -def avro_topic(publisher_client, avro_schema): +def avro_topic( + publisher_client: pubsub_v1.PublisherClient, avro_schema: str +) -> Generator[str, None, None]: from google.pubsub_v1.types import Encoding avro_topic_path = publisher_client.topic_path(PROJECT_ID, AVRO_TOPIC_ID) @@ -100,7 +109,9 @@ def avro_topic(publisher_client, avro_schema): @pytest.fixture(scope="module") -def proto_topic(publisher_client, proto_schema): +def proto_topic( + publisher_client: pubsub_v1.PublisherClient, proto_schema: str +) -> Generator[str, None, None]: proto_topic_path = publisher_client.topic_path(PROJECT_ID, PROTO_TOPIC_ID) try: @@ -122,14 +133,16 @@ def proto_topic(publisher_client, proto_schema): @pytest.fixture(scope="module") -def subscriber_client(): +def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: subscriber_client = SubscriberClient() yield subscriber_client subscriber_client.close() @pytest.fixture(scope="module") -def avro_subscription(subscriber_client, avro_topic): +def avro_subscription( + subscriber_client: pubsub_v1.SubscriberClient, avro_topic: str +) -> Generator[str, None, None]: avro_subscription_path = subscriber_client.subscription_path( PROJECT_ID, AVRO_SUBSCRIPTION_ID ) @@ -151,7 +164,9 @@ def avro_subscription(subscriber_client, avro_topic): @pytest.fixture(scope="module") -def proto_subscription(subscriber_client, proto_topic): +def proto_subscription( + subscriber_client: pubsub_v1.SubscriberClient, proto_topic: str +) -> Generator[str, None, None]: proto_subscription_path = subscriber_client.subscription_path( PROJECT_ID, PROTO_SUBSCRIPTION_ID ) @@ -172,7 +187,11 @@ def proto_subscription(subscriber_client, proto_topic): ) -def test_create_avro_schema(schema_client, avro_schema, capsys): +def test_create_avro_schema( + schema_client: pubsub_v1.SchemaServiceClient, + avro_schema: str, + capsys: CaptureFixture, +) -> None: try: schema_client.delete_schema(request={"name": avro_schema}) except NotFound: @@ -185,7 +204,11 @@ def test_create_avro_schema(schema_client, avro_schema, capsys): assert f"{avro_schema}" in out -def test_create_proto_schema(schema_client, proto_schema, capsys): +def test_create_proto_schema( + schema_client: pubsub_v1.SchemaServiceClient, + proto_schema: str, + capsys: CaptureFixture, +) -> None: try: schema_client.delete_schema(request={"name": proto_schema}) except NotFound: @@ -198,20 +221,20 @@ def test_create_proto_schema(schema_client, proto_schema, capsys): assert f"{proto_schema}" in out -def test_get_schema(avro_schema, capsys): +def test_get_schema(avro_schema: str, capsys: CaptureFixture) -> None: schema.get_schema(PROJECT_ID, AVRO_SCHEMA_ID) out, _ = capsys.readouterr() assert "Got a schema" in out assert f"{avro_schema}" in out -def test_list_schemas(capsys): +def test_list_schemas(capsys: CaptureFixture) -> None: schema.list_schemas(PROJECT_ID) out, _ = capsys.readouterr() assert "Listed schemas." in out -def test_create_topic_with_schema(avro_schema, capsys): +def test_create_topic_with_schema(avro_schema: str, capsys: CaptureFixture) -> None: schema.create_topic_with_schema(PROJECT_ID, AVRO_TOPIC_ID, AVRO_SCHEMA_ID, "BINARY") out, _ = capsys.readouterr() assert "Created a topic" in out @@ -220,14 +243,18 @@ def test_create_topic_with_schema(avro_schema, capsys): assert "BINARY" in out or "2" in out -def test_publish_avro_records(avro_schema, avro_topic, capsys): +def test_publish_avro_records( + avro_schema: str, avro_topic: str, capsys: CaptureFixture +) -> None: schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) out, _ = capsys.readouterr() assert "Preparing a binary-encoded message" in out assert "Published message ID" in out -def test_subscribe_with_avro_schema(avro_schema, avro_topic, avro_subscription, capsys): +def test_subscribe_with_avro_schema( + avro_schema: str, avro_topic: str, avro_subscription: str, capsys: CaptureFixture +) -> None: schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) schema.subscribe_with_avro_schema(PROJECT_ID, AVRO_SUBSCRIPTION_ID, AVSC_FILE, 9) @@ -235,7 +262,7 @@ def test_subscribe_with_avro_schema(avro_schema, avro_topic, avro_subscription, assert "Received a binary-encoded message:" in out -def test_publish_proto_records(proto_topic, capsys): +def test_publish_proto_records(proto_topic: str, capsys: CaptureFixture) -> None: schema.publish_proto_messages(PROJECT_ID, PROTO_TOPIC_ID) out, _ = capsys.readouterr() assert "Preparing a binary-encoded message" in out @@ -243,8 +270,8 @@ def test_publish_proto_records(proto_topic, capsys): def test_subscribe_with_proto_schema( - proto_schema, proto_topic, proto_subscription, capsys -): + proto_schema: str, proto_topic: str, proto_subscription: str, capsys: CaptureFixture +) -> None: schema.publish_proto_messages(PROJECT_ID, PROTO_TOPIC_ID) schema.subscribe_with_proto_schema(PROJECT_ID, PROTO_SUBSCRIPTION_ID, 9) @@ -253,7 +280,7 @@ def test_subscribe_with_proto_schema( @flaky(max_runs=3, min_passes=1) -def test_delete_schema(proto_schema, capsys): +def test_delete_schema(proto_schema: str, capsys: CaptureFixture) -> None: schema.delete_schema(PROJECT_ID, PROTO_SCHEMA_ID) out, _ = capsys.readouterr() assert "Deleted a schema" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 0114142969be..0c67c95fb492 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -24,7 +24,7 @@ import argparse -def list_subscriptions_in_topic(project_id, topic_id): +def list_subscriptions_in_topic(project_id: str, topic_id: str) -> None: """Lists all subscriptions for a given topic.""" # [START pubsub_list_topic_subscriptions] from google.cloud import pubsub_v1 @@ -42,7 +42,7 @@ def list_subscriptions_in_topic(project_id, topic_id): # [END pubsub_list_topic_subscriptions] -def list_subscriptions_in_project(project_id): +def list_subscriptions_in_project(project_id: str) -> None: """Lists all subscriptions in the current project.""" # [START pubsub_list_subscriptions] from google.cloud import pubsub_v1 @@ -63,7 +63,7 @@ def list_subscriptions_in_project(project_id): # [END pubsub_list_subscriptions] -def create_subscription(project_id, topic_id, subscription_id): +def create_subscription(project_id: str, topic_id: str, subscription_id: str) -> None: """Create a new pull subscription on the given topic.""" # [START pubsub_create_pull_subscription] from google.cloud import pubsub_v1 @@ -90,8 +90,12 @@ def create_subscription(project_id, topic_id, subscription_id): def create_subscription_with_dead_letter_topic( - project_id, topic_id, subscription_id, dead_letter_topic_id, max_delivery_attempts=5 -): + project_id: str, + topic_id: str, + subscription_id: str, + dead_letter_topic_id: str, + max_delivery_attempts: int = 5, +) -> None: """Create a subscription with dead letter policy.""" # [START pubsub_dead_letter_create_subscription] from google.cloud import pubsub_v1 @@ -142,7 +146,9 @@ def create_subscription_with_dead_letter_topic( # [END pubsub_dead_letter_create_subscription] -def create_push_subscription(project_id, topic_id, subscription_id, endpoint): +def create_push_subscription( + project_id: str, topic_id: str, subscription_id: str, endpoint: str +) -> None: """Create a new push subscription on the given topic.""" # [START pubsub_create_push_subscription] from google.cloud import pubsub_v1 @@ -176,7 +182,9 @@ def create_push_subscription(project_id, topic_id, subscription_id, endpoint): # [END pubsub_create_push_subscription] -def create_subscription_with_ordering(project_id, topic_id, subscription_id): +def create_subscription_with_ordering( + project_id: str, topic_id: str, subscription_id: str +) -> None: """Create a subscription with dead letter policy.""" # [START pubsub_enable_subscription_ordering] from google.cloud import pubsub_v1 @@ -203,7 +211,7 @@ def create_subscription_with_ordering(project_id, topic_id, subscription_id): # [END pubsub_enable_subscription_ordering] -def delete_subscription(project_id, subscription_id): +def delete_subscription(project_id: str, subscription_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] from google.cloud import pubsub_v1 @@ -224,7 +232,9 @@ def delete_subscription(project_id, subscription_id): # [END pubsub_delete_subscription] -def update_push_subscription(project_id, topic_id, subscription_id, endpoint): +def update_push_subscription( + project_id: str, topic_id: str, subscription_id: str, endpoint: str +) -> None: """ Updates an existing Pub/Sub subscription's push endpoint URL. Note that certain properties of a subscription, such as @@ -263,8 +273,12 @@ def update_push_subscription(project_id, topic_id, subscription_id, endpoint): def update_subscription_with_dead_letter_policy( - project_id, topic_id, subscription_id, dead_letter_topic_id, max_delivery_attempts=5 -): + project_id: str, + topic_id: str, + subscription_id: str, + dead_letter_topic_id: str, + max_delivery_attempts: int = 5, +) -> None: """Update a subscription's dead letter policy.""" # [START pubsub_dead_letter_update_subscription] from google.cloud import pubsub_v1 @@ -322,7 +336,9 @@ def update_subscription_with_dead_letter_policy( return subscription_after_update -def remove_dead_letter_policy(project_id, topic_id, subscription_id): +def remove_dead_letter_policy( + project_id: str, topic_id: str, subscription_id: str +) -> None: """Remove dead letter policy from a subscription.""" # [START pubsub_dead_letter_remove] from google.cloud import pubsub_v1 @@ -365,7 +381,9 @@ def remove_dead_letter_policy(project_id, topic_id, subscription_id): return subscription_after_update -def receive_messages(project_id, subscription_id, timeout=None): +def receive_messages( + project_id: str, subscription_id: str, timeout: float = None +) -> None: """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull] # [START pubsub_quickstart_subscriber] @@ -383,7 +401,7 @@ def receive_messages(project_id, subscription_id, timeout=None): # in the form `projects/{project_id}/subscriptions/{subscription_id}` subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): + def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"Received {message}.") message.ack() @@ -403,7 +421,9 @@ def callback(message): # [END pubsub_quickstart_subscriber] -def receive_messages_with_custom_attributes(project_id, subscription_id, timeout=None): +def receive_messages_with_custom_attributes( + project_id: str, subscription_id: str, timeout: float = None +) -> None: """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull_custom_attributes] from concurrent.futures import TimeoutError @@ -418,7 +438,7 @@ def receive_messages_with_custom_attributes(project_id, subscription_id, timeout subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): + def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"Received {message.data}.") if message.attributes: print("Attributes:") @@ -442,7 +462,9 @@ def callback(message): # [END pubsub_subscriber_async_pull_custom_attributes] -def receive_messages_with_flow_control(project_id, subscription_id, timeout=None): +def receive_messages_with_flow_control( + project_id: str, subscription_id: str, timeout: float = None +) -> None: """Receives messages from a pull subscription with flow control.""" # [START pubsub_subscriber_flow_settings] from concurrent.futures import TimeoutError @@ -457,7 +479,7 @@ def receive_messages_with_flow_control(project_id, subscription_id, timeout=None subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): + def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"Received {message.data}.") message.ack() @@ -481,7 +503,9 @@ def callback(message): # [END pubsub_subscriber_flow_settings] -def receive_messages_with_blocking_shutdown(project_id, subscription_id, timeout=5.0): +def receive_messages_with_blocking_shutdown( + project_id: str, subscription_id: str, timeout: float = 5.0 +) -> None: """Shuts down a pull subscription by awaiting message callbacks to complete.""" # [START pubsub_subscriber_blocking_shutdown] import time @@ -497,7 +521,7 @@ def receive_messages_with_blocking_shutdown(project_id, subscription_id, timeout subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): + def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"Received {message.data}.") time.sleep(timeout + 3.0) # Pocess longer than streaming pull future timeout. message.ack() @@ -525,7 +549,7 @@ def callback(message): # [END pubsub_subscriber_blocking_shutdown] -def synchronous_pull(project_id, subscription_id): +def synchronous_pull(project_id: str, subscription_id: str) -> None: """Pulling messages synchronously.""" # [START pubsub_subscriber_sync_pull] from google.api_core import retry @@ -566,7 +590,9 @@ def synchronous_pull(project_id, subscription_id): # [END pubsub_subscriber_sync_pull] -def synchronous_pull_with_lease_management(project_id, subscription_id): +def synchronous_pull_with_lease_management( + project_id: str, subscription_id: str +) -> None: """Pulling messages synchronously with lease management""" # [START pubsub_subscriber_sync_pull_with_lease] import logging @@ -638,7 +664,9 @@ def synchronous_pull_with_lease_management(project_id, subscription_id): # [END pubsub_subscriber_sync_pull_with_lease] -def listen_for_errors(project_id, subscription_id, timeout=None): +def listen_for_errors( + project_id: str, subscription_id: str, timeout: float = None +) -> None: """Receives messages and catches errors from a pull subscription.""" # [START pubsub_subscriber_error_listener] from google.cloud import pubsub_v1 @@ -652,7 +680,7 @@ def listen_for_errors(project_id, subscription_id, timeout=None): subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): + def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"Received {message}.") message.ack() @@ -674,7 +702,9 @@ def callback(message): # [END pubsub_subscriber_error_listener] -def receive_messages_with_delivery_attempts(project_id, subscription_id, timeout=None): +def receive_messages_with_delivery_attempts( + project_id: str, subscription_id: str, timeout: float = None +) -> None: # [START pubsub_dead_letter_delivery_attempt] from concurrent.futures import TimeoutError from google.cloud import pubsub_v1 @@ -686,7 +716,7 @@ def receive_messages_with_delivery_attempts(project_id, subscription_id, timeout subscriber = pubsub_v1.SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - def callback(message): + def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"Received {message}.") print(f"With delivery attempts: {message.delivery_attempt}.") message.ack() diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index bf26a79b905b..efe89f82555f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -16,8 +16,10 @@ import re import sys import time +from typing import Generator import uuid +from _pytest.capture import CaptureFixture import backoff from flaky import flaky from google.api_core.exceptions import InternalServerError @@ -44,12 +46,12 @@ @pytest.fixture(scope="module") -def publisher_client(): +def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: yield pubsub_v1.PublisherClient() @pytest.fixture(scope="module") -def topic(publisher_client): +def topic(publisher_client: pubsub_v1.PublisherClient) -> Generator[str, None, None]: topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC) try: @@ -63,7 +65,9 @@ def topic(publisher_client): @pytest.fixture(scope="module") -def dead_letter_topic(publisher_client): +def dead_letter_topic( + publisher_client: pubsub_v1.PublisherClient, +) -> Generator[str, None, None]: topic_path = publisher_client.topic_path(PROJECT_ID, DEAD_LETTER_TOPIC) try: @@ -77,14 +81,16 @@ def dead_letter_topic(publisher_client): @pytest.fixture(scope="module") -def subscriber_client(): +def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client subscriber_client.close() @pytest.fixture(scope="module") -def subscription_admin(subscriber_client, topic): +def subscription_admin( + subscriber_client: pubsub_v1.SubscriberClient, topic: str +) -> Generator[str, None, None]: subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ADMIN ) @@ -102,7 +108,9 @@ def subscription_admin(subscriber_client, topic): @pytest.fixture(scope="module") -def subscription_sync(subscriber_client, topic): +def subscription_sync( + subscriber_client: pubsub_v1.SubscriberClient, topic: str +) -> Generator[str, None, None]: subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_SYNC ) @@ -119,18 +127,25 @@ def subscription_sync(subscriber_client, topic): yield subscription.name @backoff.on_exception(backoff.expo, Unknown, max_time=300) - def delete_subscription(): + def delete_subscription() -> None: try: - subscriber_client.delete_subscription(request={"subscription": subscription.name}) + subscriber_client.delete_subscription( + request={"subscription": subscription.name} + ) except NotFound: - print("When Unknown error happens, the server might have" - " successfully deleted the subscription under the cover, so" - " we ignore NotFound") + print( + "When Unknown error happens, the server might have" + " successfully deleted the subscription under the cover, so" + " we ignore NotFound" + ) + delete_subscription() @pytest.fixture(scope="module") -def subscription_async(subscriber_client, topic): +def subscription_async( + subscriber_client: pubsub_v1.SubscriberClient, topic: str +) -> Generator[str, None, None]: subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ASYNC ) @@ -150,7 +165,9 @@ def subscription_async(subscriber_client, topic): @pytest.fixture(scope="module") -def subscription_dlq(subscriber_client, topic, dead_letter_topic): +def subscription_dlq( + subscriber_client: pubsub_v1.SubscriberClient, topic: str, dead_letter_topic: str +) -> Generator[str, None, None]: from google.cloud.pubsub_v1.types import DeadLetterPolicy subscription_path = subscriber_client.subscription_path( @@ -176,16 +193,21 @@ def subscription_dlq(subscriber_client, topic, dead_letter_topic): subscriber_client.delete_subscription(request={"subscription": subscription.name}) -def _publish_messages(publisher_client, topic, message_num=5, **attrs): +def _publish_messages( + publisher_client: pubsub_v1.PublisherClient, + topic: str, + message_num: int = 5, + **attrs: dict, +) -> None: for n in range(message_num): data = f"message {n}".encode("utf-8") publish_future = publisher_client.publish(topic, data, **attrs) publish_future.result() -def test_list_in_topic(subscription_admin, capsys): +def test_list_in_topic(subscription_admin: str, capsys: CaptureFixture) -> None: @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): + def eventually_consistent_test() -> None: subscriber.list_subscriptions_in_topic(PROJECT_ID, TOPIC) out, _ = capsys.readouterr() assert subscription_admin in out @@ -193,9 +215,9 @@ def eventually_consistent_test(): eventually_consistent_test() -def test_list_in_project(subscription_admin, capsys): +def test_list_in_project(subscription_admin: str, capsys: CaptureFixture) -> None: @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): + def eventually_consistent_test() -> None: subscriber.list_subscriptions_in_project(PROJECT_ID) out, _ = capsys.readouterr() assert subscription_admin in out @@ -203,7 +225,11 @@ def eventually_consistent_test(): eventually_consistent_test() -def test_create(subscriber_client, subscription_admin, capsys): +def test_create( + subscriber_client: pubsub_v1.SubscriberClient, + subscription_admin: str, + capsys: CaptureFixture, +) -> None: subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ADMIN ) @@ -222,8 +248,11 @@ def test_create(subscriber_client, subscription_admin, capsys): def test_create_subscription_with_dead_letter_policy( - subscriber_client, subscription_dlq, dead_letter_topic, capsys -): + subscriber_client: pubsub_v1.SubscriberClient, + subscription_dlq: str, + dead_letter_topic: str, + capsys: CaptureFixture, +) -> None: try: subscriber_client.delete_subscription( request={"subscription": subscription_dlq} @@ -243,16 +272,22 @@ def test_create_subscription_with_dead_letter_policy( @flaky(max_runs=3, min_passes=1) def test_receive_with_delivery_attempts( - publisher_client, topic, dead_letter_topic, subscription_dlq, capsys -): + publisher_client: pubsub_v1.PublisherClient, + topic: str, + dead_letter_topic: str, + subscription_dlq: str, + capsys: CaptureFixture, +) -> None: # The dlq subscription raises 404 before it's ready. # We keep retrying up to 10 minutes for mitigating the flakiness. @backoff.on_exception(backoff.expo, (Unknown, NotFound), max_time=120) - def run_sample(): + def run_sample() -> None: _publish_messages(publisher_client, topic) - subscriber.receive_messages_with_delivery_attempts(PROJECT_ID, SUBSCRIPTION_DLQ, 90) + subscriber.receive_messages_with_delivery_attempts( + PROJECT_ID, SUBSCRIPTION_DLQ, 90 + ) run_sample() @@ -262,12 +297,14 @@ def run_sample(): @flaky(max_runs=3, min_passes=1) -def test_update_dead_letter_policy(subscription_dlq, dead_letter_topic, capsys): +def test_update_dead_letter_policy( + subscription_dlq: str, dead_letter_topic: str, capsys: CaptureFixture +) -> None: # We saw internal server error that suggests to retry. @backoff.on_exception(backoff.expo, (Unknown, InternalServerError), max_time=60) - def run_sample(): + def run_sample() -> None: subscriber.update_subscription_with_dead_letter_policy( PROJECT_ID, TOPIC, @@ -285,7 +322,9 @@ def run_sample(): @flaky(max_runs=3, min_passes=1) -def test_remove_dead_letter_policy(subscription_dlq, capsys): +def test_remove_dead_letter_policy( + subscription_dlq: str, capsys: CaptureFixture +) -> None: subscription_after_update = subscriber.remove_dead_letter_policy( PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ ) @@ -296,8 +335,10 @@ def test_remove_dead_letter_policy(subscription_dlq, capsys): def test_create_subscription_with_ordering( - subscriber_client, subscription_admin, capsys -): + subscriber_client: pubsub_v1.SubscriberClient, + subscription_admin: str, + capsys: CaptureFixture, +) -> None: subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ADMIN ) @@ -316,7 +357,11 @@ def test_create_subscription_with_ordering( assert "enable_message_ordering: true" in out -def test_create_push(subscriber_client, subscription_admin, capsys): +def test_create_push( + subscriber_client: pubsub_v1.SubscriberClient, + subscription_admin: str, + capsys: CaptureFixture, +) -> None: # The scope of `subscription_path` is limited to this function. subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ADMIN @@ -334,7 +379,7 @@ def test_create_push(subscriber_client, subscription_admin, capsys): assert f"{subscription_admin}" in out -def test_update(subscription_admin, capsys): +def test_update(subscription_admin: str, capsys: CaptureFixture) -> None: subscriber.update_push_subscription( PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT ) @@ -344,11 +389,13 @@ def test_update(subscription_admin, capsys): assert f"{subscription_admin}" in out -def test_delete(subscriber_client, subscription_admin): +def test_delete( + subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str +) -> None: subscriber.delete_subscription(PROJECT_ID, SUBSCRIPTION_ADMIN) @backoff.on_exception(backoff.expo, AssertionError, max_time=60) - def eventually_consistent_test(): + def eventually_consistent_test() -> None: with pytest.raises(Exception): subscriber_client.get_subscription( request={"subscription": subscription_admin} @@ -357,7 +404,12 @@ def eventually_consistent_test(): eventually_consistent_test() -def test_receive(publisher_client, topic, subscription_async, capsys): +def test_receive( + publisher_client: pubsub_v1.PublisherClient, + topic: str, + subscription_async: str, + capsys: CaptureFixture, +) -> None: _publish_messages(publisher_client, topic) subscriber.receive_messages(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) @@ -369,8 +421,11 @@ def test_receive(publisher_client, topic, subscription_async, capsys): def test_receive_with_custom_attributes( - publisher_client, topic, subscription_async, capsys -): + publisher_client: pubsub_v1.PublisherClient, + topic: str, + subscription_async: str, + capsys: CaptureFixture, +) -> None: _publish_messages(publisher_client, topic, origin="python-sample") @@ -385,7 +440,12 @@ def test_receive_with_custom_attributes( assert "python-sample" in out -def test_receive_with_flow_control(publisher_client, topic, subscription_async, capsys): +def test_receive_with_flow_control( + publisher_client: pubsub_v1.PublisherClient, + topic: str, + subscription_async: str, + capsys: CaptureFixture, +) -> None: _publish_messages(publisher_client, topic) @@ -398,8 +458,11 @@ def test_receive_with_flow_control(publisher_client, topic, subscription_async, def test_receive_with_blocking_shutdown( - publisher_client, topic, subscription_async, capsys -): + publisher_client: pubsub_v1.PublisherClient, + topic: str, + subscription_async: str, + capsys: CaptureFixture, +) -> None: _publish_messages(publisher_client, topic, message_num=3) subscriber.receive_messages_with_blocking_shutdown( @@ -410,19 +473,23 @@ def test_receive_with_blocking_shutdown( out_lines = out.splitlines() msg_received_lines = [ - i for i, line in enumerate(out_lines) + i + for i, line in enumerate(out_lines) if re.search(r".*received.*message.*", line, flags=re.IGNORECASE) ] msg_done_lines = [ - i for i, line in enumerate(out_lines) + i + for i, line in enumerate(out_lines) if re.search(r".*done processing.*message.*", line, flags=re.IGNORECASE) ] stream_canceled_lines = [ - i for i, line in enumerate(out_lines) + i + for i, line in enumerate(out_lines) if re.search(r".*streaming pull future canceled.*", line, flags=re.IGNORECASE) ] shutdown_done_waiting_lines = [ - i for i, line in enumerate(out_lines) + i + for i, line in enumerate(out_lines) if re.search(r".*done waiting.*stream shutdown.*", line, flags=re.IGNORECASE) ] @@ -444,11 +511,17 @@ def test_receive_with_blocking_shutdown( assert msg_done_lines[-1] < shutdown_done_waiting_lines[0] except AssertionError: # pragma: NO COVER from pprint import pprint + pprint(out_lines) # To make possible flakiness debugging easier. raise -def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): +def test_listen_for_errors( + publisher_client: pubsub_v1.PublisherClient, + topic: str, + subscription_async: str, + capsys: CaptureFixture, +) -> None: _publish_messages(publisher_client, topic) @@ -459,7 +532,12 @@ def test_listen_for_errors(publisher_client, topic, subscription_async, capsys): assert "threw an exception" in out -def test_receive_synchronously(publisher_client, topic, subscription_sync, capsys): +def test_receive_synchronously( + publisher_client: pubsub_v1.PublisherClient, + topic: str, + subscription_sync: str, + capsys: CaptureFixture, +) -> None: _publish_messages(publisher_client, topic) subscriber.synchronous_pull(PROJECT_ID, SUBSCRIPTION_SYNC) @@ -472,10 +550,13 @@ def test_receive_synchronously(publisher_client, topic, subscription_sync, capsy @flaky(max_runs=3, min_passes=1) def test_receive_synchronously_with_lease( - publisher_client, topic, subscription_sync, capsys -): + publisher_client: pubsub_v1.PublisherClient, + topic: str, + subscription_sync: str, + capsys: CaptureFixture, +) -> None: @backoff.on_exception(backoff.expo, Unknown, max_time=300) - def run_sample(): + def run_sample() -> None: _publish_messages(publisher_client, topic, message_num=10) # Pausing 10s to allow the subscriber to establish the connection # because sync pull often returns fewer messages than requested. From 952401cd537d65e9a1a393ba7847f95dde013354 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 26 Aug 2021 19:48:23 +0200 Subject: [PATCH 0707/1197] chore: migrate default branch to main (#479) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: migrate default branch from master to main * Rename additional references from master to main * Rename blacklist to denylist * Add owlbot replacement rules to persist changes * Move loose s.move() back under the loop in owlbot.py * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../.github/sync-repo-settings.yaml | 8 +- packages/google-cloud-pubsub/.kokoro/build.sh | 2 +- .../.kokoro/test-samples-impl.sh | 2 +- packages/google-cloud-pubsub/CONTRIBUTING.rst | 12 +- packages/google-cloud-pubsub/UPGRADING.md | 2 +- packages/google-cloud-pubsub/docs/conf.py | 16 +-- .../google/cloud/pubsub_v1/_gapic.py | 8 +- .../cloud/pubsub_v1/publisher/client.py | 4 +- .../cloud/pubsub_v1/subscriber/client.py | 4 +- packages/google-cloud-pubsub/owlbot.py | 127 +++++++++++++----- .../tests/unit/pubsub_v1/test__gapic.py | 10 +- 11 files changed, 123 insertions(+), 72 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml index c001b4152b83..2f68b5a4de07 100644 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -1,9 +1,9 @@ -# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings -# Rules for master branch protection +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings +# Rules for main branch protection branchProtectionRules: # Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `master` -- pattern: master +# Defaults to `main` +- pattern: main requiresCodeOwnerReviews: true requiresStrictStatusChecks: true requiredStatusCheckContexts: diff --git a/packages/google-cloud-pubsub/.kokoro/build.sh b/packages/google-cloud-pubsub/.kokoro/build.sh index 8286412b63b9..9a48d205afed 100755 --- a/packages/google-cloud-pubsub/.kokoro/build.sh +++ b/packages/google-cloud-pubsub/.kokoro/build.sh @@ -41,7 +41,7 @@ python3 -m pip install --upgrade --quiet nox python3 -m nox --version # If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then cleanup() { chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh index 311a8d54b9f1..8a324c9c7bc6 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh @@ -80,7 +80,7 @@ for file in samples/**/requirements.txt; do EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index e2853513fb7b..d470810ea93a 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -50,9 +50,9 @@ You'll have to create a development environment using a Git checkout: # Configure remotes such that you can pull changes from the googleapis/python-pubsub # repository into your local repository. $ git remote add upstream git@github.com:googleapis/python-pubsub.git - # fetch and merge changes from upstream into master + # fetch and merge changes from upstream into main $ git fetch upstream - $ git merge upstream/master + $ git merge upstream/main Now your local repo is set up such that you will push changes to your GitHub repo, from which you can submit a pull request. @@ -110,12 +110,12 @@ Coding Style variables:: export GOOGLE_CLOUD_TESTING_REMOTE="upstream" - export GOOGLE_CLOUD_TESTING_BRANCH="master" + export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date version of ``python-pubsub``. The the suggested remote name ``upstream`` should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``master``). + the branch should be the main branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking @@ -185,7 +185,7 @@ The `description on PyPI`_ for the project comes directly from the ``README``. Due to the reStructuredText (``rst``) parser used by PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` instead of -``https://github.com/googleapis/python-pubsub/blob/master/CONTRIBUTING.rst``) +``https://github.com/googleapis/python-pubsub/blob/main/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/google-cloud-pubsub @@ -210,7 +210,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/python-pubsub/blob/master/noxfile.py +.. _config: https://github.com/googleapis/python-pubsub/blob/main/noxfile.py We also explicitly decided to support Python 3 beginning with version 3.6. diff --git a/packages/google-cloud-pubsub/UPGRADING.md b/packages/google-cloud-pubsub/UPGRADING.md index 9ffdb5507cd9..3033e3fd4ad1 100644 --- a/packages/google-cloud-pubsub/UPGRADING.md +++ b/packages/google-cloud-pubsub/UPGRADING.md @@ -88,7 +88,7 @@ required or optional. > of these methods have in large part been preserved. Some methods have additional keyword only parameters. The available parameters depend -on the [`google.api.method_signature` annotation](https://github.com/googleapis/python-pubsub/blob/master/google/cloud/pubsub_v1/proto/pubsub.proto#L88) +on the [`google.api.method_signature` annotation](https://github.com/googleapis/python-pubsub/blob/main/google/cloud/pubsub_v1/proto/pubsub.proto#L88) specified by the API producer. diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index b1ed409cf8e5..52a464cb038b 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -76,8 +76,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = "google-cloud-pubsub" @@ -279,7 +279,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-cloud-pubsub.tex", "google-cloud-pubsub Documentation", author, @@ -313,13 +313,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ( - master_doc, - "google-cloud-pubsub", - "google-cloud-pubsub Documentation", - [author], - 1, - ) + (root_doc, "google-cloud-pubsub", "google-cloud-pubsub Documentation", [author], 1,) ] # If true, show URL addresses after external links. @@ -333,7 +327,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-cloud-pubsub", "google-cloud-pubsub Documentation", author, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py index da755dfbca09..2a50994feae8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py @@ -17,10 +17,10 @@ import functools -def add_methods(source_class, blacklist=()): +def add_methods(source_class, denylist=()): """Add wrapped versions of the `api` member's methods to the class. - Any methods passed in `blacklist` are not added. + Any methods passed in `denylist` are not added. Additionally, any methods explicitly defined on the wrapped class are not added. """ @@ -48,8 +48,8 @@ def actual_decorator(cls): if name.startswith("_"): continue - # Ignore anything on our blacklist. - if name in blacklist: + # Ignore anything on our denylist. + if name in denylist: continue # Retrieve the attribute, and ignore it if it is not callable. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index e358326530e2..1c5381d08722 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -45,7 +45,7 @@ _LOGGER = logging.getLogger(__name__) -_BLACKLISTED_METHODS = ( +_DENYLISTED_METHODS = ( "publish", "from_service_account_file", "from_service_account_json", @@ -54,7 +54,7 @@ _raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() -@_gapic.add_methods(publisher_client.PublisherClient, blacklist=_BLACKLISTED_METHODS) +@_gapic.add_methods(publisher_client.PublisherClient, denylist=_DENYLISTED_METHODS) class Client(object): """A publisher client for Google Cloud Pub/Sub. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 567840859404..85b88006d2a4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -34,14 +34,14 @@ # a PIP package. __version__ = "0.0" -_BLACKLISTED_METHODS = ( +_DENYLISTED_METHODS = ( "publish", "from_service_account_file", "from_service_account_json", ) -@_gapic.add_methods(subscriber_client.SubscriberClient, blacklist=_BLACKLISTED_METHODS) +@_gapic.add_methods(subscriber_client.SubscriberClient, denylist=_DENYLISTED_METHODS) class Client(object): """A subscriber client for Google Cloud Pub/Sub. diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 81bcb664824a..a6aa450e5536 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -27,18 +27,20 @@ for library in s.get_staging_dirs(default_version): # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 - s.replace(library / f"google/pubsub_{library.name}/types/*.py", - r""". + s.replace( + library / f"google/pubsub_{library.name}/types/*.py", + r""". Attributes:""", - r""".\n + r""".\n Attributes:""", ) # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 - s.replace(library / f"google/pubsub_{library.name}/types/*.py", - r""". + s.replace( + library / f"google/pubsub_{library.name}/types/*.py", + r""". Attributes:""", - r""".\n + r""".\n Attributes:""", ) @@ -63,8 +65,8 @@ # Modify GRPC options in transports. count = s.replace( [ - library / f"google/pubsub_{library.name}/services/*/transports/grpc*", - library / f"tests/unit/gapic/pubsub_{library.name}/*" + library / f"google/pubsub_{library.name}/services/*/transports/grpc*", + library / f"tests/unit/gapic/pubsub_{library.name}/*", ], "options=\[.*?\]", """options=[ @@ -83,21 +85,19 @@ library / f"google/pubsub_{library.name}/services/publisher/client.py", library / f"google/pubsub_{library.name}/services/subscriber/client.py", ] - err_msg = "Expected replacements for gRPC channel to use with the emulator not made." - - count = s.replace( - clients_to_patch, - r"import os", - "import functools\n\g<0>" + err_msg = ( + "Expected replacements for gRPC channel to use with the emulator not made." ) + count = s.replace(clients_to_patch, r"import os", "import functools\n\g<0>") + if count < len(clients_to_patch): raise Exception(err_msg) count = s.replace( clients_to_patch, f"from google\.pubsub_{library.name}\.types import pubsub", - "\g<0>\n\nimport grpc" + "\g<0>\n\nimport grpc", ) if count < len(clients_to_patch): @@ -154,7 +154,7 @@ ((?P[^\n\S]+)\#\ Wrap\ the\ RPC\ method) """, textwrap.dedent( - """ + """ \g<1> \gif request.return_immediately: \g warnings.warn( @@ -162,7 +162,8 @@ \g category=DeprecationWarning, \g ) - \g<2>"""), + \g<2>""" + ), flags=re.MULTILINE | re.DOTALL | re.VERBOSE, ) @@ -179,7 +180,7 @@ count = s.replace( library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", textwrap.dedent( - r""" + r""" ([^\n\S]+# Call the method with a truthy value for each flattened field, [^\n\S]+# using the keyword arguments to the method\.) \s+(client\.pull\(.*?\))""" @@ -188,7 +189,7 @@ with warnings.catch_warnings(): warnings.simplefilter("ignore", category=DeprecationWarning) \g<2>""", - flags = re.MULTILINE | re.DOTALL, + flags=re.MULTILINE | re.DOTALL, ) if count < 1: @@ -206,7 +207,7 @@ with warnings.catch_warnings(): warnings.simplefilter("ignore", category=DeprecationWarning) \g<2>""", - flags = re.MULTILINE | re.DOTALL, + flags=re.MULTILINE | re.DOTALL, ) if count < 1: @@ -215,15 +216,21 @@ # Make sure that client library version is present in user agent header. s.replace( [ - library / f"google/pubsub_{library.name}/services/publisher/async_client.py", + library + / f"google/pubsub_{library.name}/services/publisher/async_client.py", library / f"google/pubsub_{library.name}/services/publisher/client.py", - library / f"google/pubsub_{library.name}/services/publisher/transports/base.py", - library / f"google/pubsub_{library.name}/services/schema_service/async_client.py", + library + / f"google/pubsub_{library.name}/services/publisher/transports/base.py", + library + / f"google/pubsub_{library.name}/services/schema_service/async_client.py", library / f"google/pubsub_{library.name}/services/schema_service/client.py", - library / f"google/pubsub_{library.name}/services/schema_service/transports/base.py", - library / f"google/pubsub_{library.name}/services/subscriber/async_client.py", + library + / f"google/pubsub_{library.name}/services/schema_service/transports/base.py", + library + / f"google/pubsub_{library.name}/services/subscriber/async_client.py", library / f"google/pubsub_{library.name}/services/subscriber/client.py", - library / f"google/pubsub_{library.name}/services/subscriber/transports/base.py", + library + / f"google/pubsub_{library.name}/services/subscriber/transports/base.py", ], r"""gapic_version=(pkg_resources\.get_distribution\(\s+)['"]google-pubsub['"]""", "client_library_version=\g<1>'google-cloud-pubsub'", @@ -231,7 +238,11 @@ # Docstrings of *_iam_policy() methods are formatted poorly and must be fixed # in order to avoid docstring format warnings in docs. - s.replace(library / f"google/pubsub_{library.name}/services/*er/client.py", r"(\s+)Args:", "\n\g<1>Args:") + s.replace( + library / f"google/pubsub_{library.name}/services/*er/client.py", + r"(\s+)Args:", + "\n\g<1>Args:", + ) s.replace( library / f"google/pubsub_{library.name}/services/*er/client.py", r"(\s+)\*\*JSON Example\*\*\s+::", @@ -254,13 +265,14 @@ s.replace( library / f"google/pubsub_{library.name}/types/__init__.py", r"from \.pubsub import \(", - "from typing import Union\n\n\g<0>" + "from typing import Union\n\n\g<0>", ) s.replace( library / f"google/pubsub_{library.name}/types/__init__.py", r"__all__ = \(\n", - textwrap.dedent('''\ + textwrap.dedent( + '''\ TimeoutType = Union[ int, float, @@ -269,13 +281,14 @@ ] """The type of the timeout parameter of publisher client methods.""" - \g<0> "TimeoutType",''') + \g<0> "TimeoutType",''' + ), ) s.replace( library / f"google/pubsub_{library.name}/services/publisher/*client.py", r"from google.api_core import retry as retries.*\n", - "\g<0>from google.api_core import timeout as timeouts # type: ignore\n" + "\g<0>from google.api_core import timeout as timeouts # type: ignore\n", ) s.replace( @@ -293,10 +306,7 @@ s.replace( library / f"google/pubsub_{library.name}/services/publisher/*client.py", r"([^\S\r\n]+)timeout \(float\): (.*)\n", - ( - "\g<1>timeout (TimeoutType):\n" - "\g<1> \g<2>\n" - ), + ("\g<1>timeout (TimeoutType):\n" "\g<1> \g<2>\n"), ) # The namespace package declaration in google/cloud/__init__.py should be excluded @@ -341,4 +351,51 @@ # ---------------------------------------------------------------------------- python.py_samples() +# Remove the replacements below once +# https://github.com/googleapis/synthtool/pull/1188 is merged + +# Update googleapis/repo-automation-bots repo to main in .kokoro/*.sh files +s.replace( + ".kokoro/*.sh", + "repo-automation-bots/tree/master", + "repo-automation-bots/tree/main", +) + +# Customize CONTRIBUTING.rst to replace master with main +s.replace( + "CONTRIBUTING.rst", + "fetch and merge changes from upstream into master", + "fetch and merge changes from upstream into main", +) + +s.replace( + "CONTRIBUTING.rst", "git merge upstream/master", "git merge upstream/main", +) + +s.replace( + "CONTRIBUTING.rst", + """export GOOGLE_CLOUD_TESTING_BRANCH=\"master\"""", + """export GOOGLE_CLOUD_TESTING_BRANCH=\"main\"""", +) + +s.replace( + "CONTRIBUTING.rst", "remote \(``master``\)", "remote (``main``)", +) + +s.replace( + "CONTRIBUTING.rst", "blob/master/CONTRIBUTING.rst", "blob/main/CONTRIBUTING.rst", +) + +s.replace( + "CONTRIBUTING.rst", "blob/master/noxfile.py", "blob/main/noxfile.py", +) + +s.replace( + "docs/conf.py", "master_doc", "root_doc", +) + +s.replace( + "docs/conf.py", "# The master toctree document.", "# The root toctree document.", +) + s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py index cb63850a7af7..adff4bbfcb44 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py @@ -32,12 +32,12 @@ def class_method(cls): return "source class class method" @classmethod - def blacklisted_method(cls): # pragma: NO COVER - return "source class blacklisted method" + def denylisted_method(cls): # pragma: NO COVER + return "source class denylisted method" def test_add_method(): - @_gapic.add_methods(SourceClass, ("blacklisted_method",)) + @_gapic.add_methods(SourceClass, ("denylisted_method",)) class Foo(object): def __init__(self): self.api = SourceClass() @@ -47,9 +47,9 @@ def method(self): # pragma: NO COVER foo = Foo() - # Any method that's callable and not blacklisted is "inherited". + # Any method that's callable and not denylisted is "inherited". assert set(["method", "static_method", "class_method"]) <= set(dir(foo)) - assert "blacklisted_method" not in dir(foo) + assert "denylisted_method" not in dir(foo) # Source Class's static and class methods become static methods. assert type(Foo.__dict__["static_method"]) == staticmethod From b82b5c48f63d7ff9ed7c38431a521e58038d4eba Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 31 Aug 2021 17:38:44 +0200 Subject: [PATCH 0708/1197] chore(deps): update dependency pytest to v6.2.5 (#482) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index fdd38a518ef1..b1b513af71fe 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff==1.11.1 -pytest==6.2.4 +pytest==6.2.5 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From 95d2857b220e8c81bb889c1a9ddfcba744042cf3 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 2 Sep 2021 15:04:14 +0200 Subject: [PATCH 0709/1197] tests: fix flakiness in connection cleanup test (#487) --- packages/google-cloud-pubsub/tests/system.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 8ef3dca9fd23..1cf67ed96fad 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -446,7 +446,11 @@ def test_subscriber_not_leaking_open_sockets( assert len(response.received_messages) == 3 conn_count_end = len(current_process.connections()) - assert conn_count_end == conn_count_start + + # To avoid flakiness, use <= in the assertion, since on rare occasions additional + # sockets are closed, causing the == assertion to fail. + # https://github.com/googleapis/python-pubsub/issues/483#issuecomment-910122086 + assert conn_count_end <= conn_count_start def test_synchronous_pull_no_deadline_error_if_no_messages( From 3c4de34bcf9d156ebb29a7eaea6a5a92b8a28984 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 2 Sep 2021 16:15:25 +0200 Subject: [PATCH 0710/1197] docs: clarify the types of Message parameters (#486) --- .../google/cloud/pubsub_v1/subscriber/message.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 5f6e179015c4..02ffd99843cd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -79,10 +79,12 @@ def __init__(self, message, ack_id, delivery_attempt, request_queue): responsibility of :class:`BasePolicy` subclasses to do so. Args: - message (~.pubsub_v1.types.PubsubMessage): The message received - from Pub/Sub. For performance reasons it should be the the raw - protobuf message wrapped by the ``PubsubMessage`` class obtained - through the message's ``.pb()`` method. + message (`pubsub_v1.types.PubsubMessage._meta._pb`): + The message received from Pub/Sub. For performance reasons it should be + the raw protobuf message normally wrapped by + :class:`~pubsub_v1.types.PubsubMessage`. A raw message can be obtained + from a :class:`~pubsub_v1.types.PubsubMessage` instance through the + latter's ``._pb`` attribute. ack_id (str): The ack_id received from Pub/Sub. delivery_attempt (int): The delivery attempt counter received from Pub/Sub if a DeadLetterPolicy is set on the subscription, From b569fd264019e517d4a37600b4bfa22f6673133a Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 2 Sep 2021 16:40:08 +0200 Subject: [PATCH 0711/1197] feat: closed subscriber as context manager raises (#488) Closes #484. **PR checklist:** - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) --- .../cloud/pubsub_v1/subscriber/client.py | 12 ++++++++++++ .../subscriber/test_subscriber_client.py | 18 ++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 85b88006d2a4..c4b229a17334 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -85,6 +85,7 @@ def __init__(self, **kwargs): # Instantiate the underlying GAPIC client. self._api = subscriber_client.SubscriberClient(**kwargs) self._target = self._api._transport._host + self._closed = False @classmethod def from_service_account_file(cls, filename, **kwargs): @@ -120,6 +121,14 @@ def api(self): """The underlying gapic API client.""" return self._api + @property + def closed(self) -> bool: + """Return whether the client has been closed and cannot be used anymore. + + .. versionadded:: 2.8.0 + """ + return self._closed + def subscribe( self, subscription, @@ -252,8 +261,11 @@ def close(self): This method is idempotent. """ self.api._transport.grpc_channel.close() + self._closed = True def __enter__(self): + if self._closed: + raise RuntimeError("Closed subscriber cannot be used as context manager.") return self def __exit__(self, exc_type, exc_val, exc_tb): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 7624c9212cda..601b40bcc95b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -50,6 +50,11 @@ def test_init_default_client_info(creds): assert expected_client_info in user_agent +def test_init_default_closed_state(creds): + client = subscriber.Client(credentials=creds) + assert not client.closed + + def test_init_w_custom_transport(creds): transport = SubscriberGrpcTransport(credentials=creds) client = subscriber.Client(transport=transport) @@ -185,6 +190,7 @@ def test_close(creds): client.close() patched_close.assert_called() + assert client.closed def test_closes_channel_as_context_manager(creds): @@ -198,6 +204,18 @@ def test_closes_channel_as_context_manager(creds): patched_close.assert_called() +def test_context_manager_raises_if_closed(creds): + client = subscriber.Client(credentials=creds) + + with mock.patch.object(client.api._transport.grpc_channel, "close"): + client.close() + + expetect_msg = r"(?i).*closed.*cannot.*context manager.*" + with pytest.raises(RuntimeError, match=expetect_msg): + with client: + pass + + def test_streaming_pull_gapic_monkeypatch(creds): client = subscriber.Client(credentials=creds) From 14ca47e1a79e384d7c45c9d70aa8f7e26b1a9d89 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 2 Sep 2021 11:36:10 -0400 Subject: [PATCH 0712/1197] chore: remove obsolete replacements in owlbot.py (#492) This PR also switches the post processor image from `gcr.io/cloud-devrel-public-resources/owlbot-python:latest` to `gcr.io/repo-automation-bots/owlbot-python:latest` as there is a bug in owlbot where it only opens PRs when the post processor image is in the project `repo-automation-bots`. I've bumped the priority of the bug to p1 here: https://github.com/googleapis/repo-automation-bots/issues/1790 --- .../.github/.OwlBot.lock.yaml | 4 +- .../google-cloud-pubsub/.github/.OwlBot.yaml | 2 +- packages/google-cloud-pubsub/CONTRIBUTING.rst | 30 ++++++++++-- packages/google-cloud-pubsub/docs/conf.py | 1 + packages/google-cloud-pubsub/noxfile.py | 12 +++-- packages/google-cloud-pubsub/owlbot.py | 47 ------------------- packages/google-cloud-pubsub/renovate.json | 5 +- .../samples/snippets/noxfile.py | 8 ++-- .../templates/install_deps.tmpl.rst | 2 +- 9 files changed, 49 insertions(+), 62 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 6bfcf3ecc6e5..08726e7819fb 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: - digest: sha256:808628fb9d5c649a28355b062ee184a4689e98e8607c241461dae26e5a6df0c2 - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:0ffe3bdd6c7159692df5f7744da74e5ef19966288a6bf76023e8e04e0c424d7d + image: gcr.io/repo-automation-bots/owlbot-python:latest diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.yaml index 0bfe82f743cb..43e208e725ef 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.yaml @@ -13,7 +13,7 @@ # limitations under the License. docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + image: gcr.io/repo-automation-bots/owlbot-python:latest deep-remove-regex: - /owl-bot-staging diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index d470810ea93a..6970c2d59848 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -113,9 +113,9 @@ Coding Style export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date - version of ``python-pubsub``. The the suggested remote name ``upstream`` - should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``main``). + version of ``python-pubsub``. The + remote name ``upstream`` should point to the official ``googleapis`` + checkout and the branch should be the default branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking @@ -177,6 +177,30 @@ Build the docs via: $ nox -s docs +************************* +Samples and code snippets +************************* + +Code samples and snippets live in the `samples/` catalogue. Feel free to +provide more examples, but make sure to write tests for those examples. +Each folder containing example code requires its own `noxfile.py` script +which automates testing. If you decide to create a new folder, you can +base it on the `samples/snippets` folder (providing `noxfile.py` and +the requirements files). + +The tests will run against a real Google Cloud Project, so you should +configure them just like the System Tests. + +- To run sample tests, you can execute:: + + # Run all tests in a folder + $ cd samples/snippets + $ nox -s py-3.8 + + # Run a single sample test + $ cd samples/snippets + $ nox -s py-3.8 -- -k + ******************************************** Note About ``README`` as it pertains to PyPI ******************************************** diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 52a464cb038b..34fa14a847ae 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -110,6 +110,7 @@ # directories to ignore when looking for source files. exclude_patterns = [ "_build", + "**/.nox/**/*", "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md", "samples/snippets/README.rst", diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 08c6654f4980..23c89a0ea33a 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -84,9 +84,15 @@ def default(session): constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - - session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + session.install( + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", + "-c", + constraints_path, + ) session.install("-e", ".", "-c", constraints_path) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index a6aa450e5536..244643883161 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -351,51 +351,4 @@ # ---------------------------------------------------------------------------- python.py_samples() -# Remove the replacements below once -# https://github.com/googleapis/synthtool/pull/1188 is merged - -# Update googleapis/repo-automation-bots repo to main in .kokoro/*.sh files -s.replace( - ".kokoro/*.sh", - "repo-automation-bots/tree/master", - "repo-automation-bots/tree/main", -) - -# Customize CONTRIBUTING.rst to replace master with main -s.replace( - "CONTRIBUTING.rst", - "fetch and merge changes from upstream into master", - "fetch and merge changes from upstream into main", -) - -s.replace( - "CONTRIBUTING.rst", "git merge upstream/master", "git merge upstream/main", -) - -s.replace( - "CONTRIBUTING.rst", - """export GOOGLE_CLOUD_TESTING_BRANCH=\"master\"""", - """export GOOGLE_CLOUD_TESTING_BRANCH=\"main\"""", -) - -s.replace( - "CONTRIBUTING.rst", "remote \(``master``\)", "remote (``main``)", -) - -s.replace( - "CONTRIBUTING.rst", "blob/master/CONTRIBUTING.rst", "blob/main/CONTRIBUTING.rst", -) - -s.replace( - "CONTRIBUTING.rst", "blob/master/noxfile.py", "blob/main/noxfile.py", -) - -s.replace( - "docs/conf.py", "master_doc", "root_doc", -) - -s.replace( - "docs/conf.py", "# The master toctree document.", "# The root toctree document.", -) - s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-pubsub/renovate.json b/packages/google-cloud-pubsub/renovate.json index c04895563e69..c21036d385e5 100644 --- a/packages/google-cloud-pubsub/renovate.json +++ b/packages/google-cloud-pubsub/renovate.json @@ -1,6 +1,9 @@ { "extends": [ - "config:base", ":preserveSemverRanges" + "config:base", + "group:all", + ":preserveSemverRanges", + ":disableDependencyDashboard" ], "ignorePaths": [".pre-commit-config.yaml"], "pip_requirements": { diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 6a8ccdae22c9..e73436a15626 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -39,7 +39,7 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], + 'ignored_versions': [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them @@ -88,15 +88,15 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. -# All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = bool(os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False)) +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") # # Style Checks # diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst index a0406dba8c84..275d649890d7 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash From 5a86f8edd0001d78299490694b0b691a09bf164b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 6 Sep 2021 11:24:26 +0000 Subject: [PATCH 0713/1197] chore: release 2.8.0 (#491) :robot: I have created a release \*beep\* \*boop\* --- ## [2.8.0](https://www.github.com/googleapis/python-pubsub/compare/v2.7.1...v2.8.0) (2021-09-02) ### Features * closed subscriber as context manager raises ([#488](https://www.github.com/googleapis/python-pubsub/issues/488)) ([a05a3f2](https://www.github.com/googleapis/python-pubsub/commit/a05a3f250cf8567ffe0d2eb3ecc45856a2bcd07c)) ### Documentation * clarify the types of Message parameters ([#486](https://www.github.com/googleapis/python-pubsub/issues/486)) ([633e91b](https://www.github.com/googleapis/python-pubsub/commit/633e91bbfc0a8f4f484089acff6812b754f40c75)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-pubsub/CHANGELOG.md | 12 ++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index d89bc0039838..43248a98ab4b 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,18 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.8.0](https://www.github.com/googleapis/python-pubsub/compare/v2.7.1...v2.8.0) (2021-09-02) + + +### Features + +* closed subscriber as context manager raises ([#488](https://www.github.com/googleapis/python-pubsub/issues/488)) ([a05a3f2](https://www.github.com/googleapis/python-pubsub/commit/a05a3f250cf8567ffe0d2eb3ecc45856a2bcd07c)) + + +### Documentation + +* clarify the types of Message parameters ([#486](https://www.github.com/googleapis/python-pubsub/issues/486)) ([633e91b](https://www.github.com/googleapis/python-pubsub/commit/633e91bbfc0a8f4f484089acff6812b754f40c75)) + ### [2.7.1](https://www.github.com/googleapis/python-pubsub/compare/v2.7.0...v2.7.1) (2021-08-13) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index b07cad90fa97..e15f06cf57a8 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.7.1" +version = "2.8.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From ed9a97d917dd9cde480662e8109ffcf3066c86fd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 6 Sep 2021 17:59:35 +0200 Subject: [PATCH 0714/1197] chore(deps): update dependency google-cloud-pubsub to v2.8.0 (#496) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 74fcb890de39..ae20e777529a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.7.1 +google-cloud-pubsub==2.8.0 avro==1.10.2 From 53ec1ff7b4a8a811ff4dafe2ae7e1538f3b1b9fc Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 7 Sep 2021 10:06:09 -0600 Subject: [PATCH 0715/1197] chore: reference main branch of google-cloud-python (#497) Adjust google-cloud-python links to reference main branch. --- packages/google-cloud-pubsub/README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 945c0beb10e1..7d671c42b73c 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -20,7 +20,7 @@ independently written applications. - `Client Library Documentation`_ .. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-pubsub.svg :target: https://pypi.org/project/google-cloud-pubsub/ .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-pubsub.svg @@ -214,7 +214,7 @@ Contributions to this library are always welcome and highly encouraged. See the `CONTRIBUTING doc`_ for more information on how to get started. -.. _CONTRIBUTING doc: https://github.com/googleapis/google-cloud-python/blob/master/CONTRIBUTING.rst +.. _CONTRIBUTING doc: https://github.com/googleapis/google-cloud-python/blob/main/CONTRIBUTING.rst Community --------- @@ -230,4 +230,4 @@ License Apache 2.0 - See `the LICENSE`_ for more information. -.. _the LICENSE: https://github.com/googleapis/google-cloud-python/blob/master/LICENSE +.. _the LICENSE: https://github.com/googleapis/google-cloud-python/blob/main/LICENSE From d69fe5d10725e5857ac33715ee8463fe2c08e79f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 17 Sep 2021 20:20:26 +0000 Subject: [PATCH 0716/1197] chore: blacken samples noxfile template (#498) --- .../.github/.OwlBot.lock.yaml | 2 +- .../samples/snippets/noxfile.py | 44 +++++++++++-------- 2 files changed, 26 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 08726e7819fb..e2c23777477e 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: - digest: sha256:0ffe3bdd6c7159692df5f7744da74e5ef19966288a6bf76023e8e04e0c424d7d image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:0ccd9f4d714d36e311f60f407199dd460e43a99a125b5ca64b1d75f6e5f8581b diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index e73436a15626..b008613f03ff 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -39,17 +39,15 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': [], - + "ignored_versions": [], # Old samples are opted out of enforcing Python type hints # All new samples should feature them - 'enforce_type_hints': False, - + "enforce_type_hints": False, # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', # If you need to use a specific version of pip, # change pip_version_override to the string representation @@ -57,13 +55,13 @@ "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -78,12 +76,12 @@ def get_pytest_env_vars() -> Dict[str, str]: ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -92,11 +90,14 @@ def get_pytest_env_vars() -> Dict[str, str]: ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) -INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ("True", "true") +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) # # Style Checks # @@ -141,7 +142,7 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: - if not TEST_CONFIG['enforce_type_hints']: + if not TEST_CONFIG["enforce_type_hints"]: session.install("flake8", "flake8-import-order") else: session.install("flake8", "flake8-import-order", "flake8-annotations") @@ -150,9 +151,11 @@ def lint(session: nox.sessions.Session) -> None: args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) + + # # Black # @@ -165,6 +168,7 @@ def blacken(session: nox.sessions.Session) -> None: session.run("black", *python_files) + # # Sample Tests # @@ -173,7 +177,9 @@ def blacken(session: nox.sessions.Session) -> None: PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: if TEST_CONFIG["pip_version_override"]: pip_version = TEST_CONFIG["pip_version_override"] session.install(f"pip=={pip_version}") @@ -203,7 +209,7 @@ def _session_tests(session: nox.sessions.Session, post_install: Callable = None) # on travis where slow and flaky tests are excluded. # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html success_codes=[0, 5], - env=get_pytest_env_vars() + env=get_pytest_env_vars(), ) @@ -213,9 +219,9 @@ def py(session: nox.sessions.Session) -> None: if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # From 7fa68e874505f083e9ec9b6eda3ab804b16f16d2 Mon Sep 17 00:00:00 2001 From: Jeffrey Rennie Date: Tue, 21 Sep 2021 12:36:26 -0700 Subject: [PATCH 0717/1197] chore: relocate owl bot post processor (#503) chore: relocate owl bot post processor --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.github/.OwlBot.yaml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index e2c23777477e..2567653c000d 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:0ccd9f4d714d36e311f60f407199dd460e43a99a125b5ca64b1d75f6e5f8581b + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:87eee22d276554e4e52863ec9b1cb6a7245815dfae20439712bf644348215a5a diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.yaml index 43e208e725ef..0bfe82f743cb 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.yaml @@ -13,7 +13,7 @@ # limitations under the License. docker: - image: gcr.io/repo-automation-bots/owlbot-python:latest + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest deep-remove-regex: - /owl-bot-staging From 0afe235cdf827752ab17cc97fc8566fe49a819c7 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 24 Sep 2021 08:50:44 +0200 Subject: [PATCH 0718/1197] process: set publisher methods' max retry to 600s (#495) --- .../services/publisher/transports/base.py | 18 +++++++++--------- packages/google-cloud-pubsub/owlbot.py | 11 +++++++++++ 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 6e9f8cea85f0..af526eea712b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -169,7 +169,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), default_timeout=60.0, client_info=client_info, @@ -183,7 +183,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), default_timeout=60.0, client_info=client_info, @@ -203,7 +203,7 @@ def _prep_wrapped_messages(self, client_info): core_exceptions.ServiceUnavailable, core_exceptions.Unknown, ), - deadline=60.0, + deadline=600.0, ), default_timeout=60.0, client_info=client_info, @@ -219,7 +219,7 @@ def _prep_wrapped_messages(self, client_info): core_exceptions.ServiceUnavailable, core_exceptions.Unknown, ), - deadline=60.0, + deadline=600.0, ), default_timeout=60.0, client_info=client_info, @@ -235,7 +235,7 @@ def _prep_wrapped_messages(self, client_info): core_exceptions.ServiceUnavailable, core_exceptions.Unknown, ), - deadline=60.0, + deadline=600.0, ), default_timeout=60.0, client_info=client_info, @@ -251,7 +251,7 @@ def _prep_wrapped_messages(self, client_info): core_exceptions.ServiceUnavailable, core_exceptions.Unknown, ), - deadline=60.0, + deadline=600.0, ), default_timeout=60.0, client_info=client_info, @@ -267,7 +267,7 @@ def _prep_wrapped_messages(self, client_info): core_exceptions.ServiceUnavailable, core_exceptions.Unknown, ), - deadline=60.0, + deadline=600.0, ), default_timeout=60.0, client_info=client_info, @@ -281,7 +281,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), default_timeout=60.0, client_info=client_info, @@ -295,7 +295,7 @@ def _prep_wrapped_messages(self, client_info): predicate=retries.if_exception_type( core_exceptions.ServiceUnavailable, ), - deadline=60.0, + deadline=600.0, ), default_timeout=60.0, client_info=client_info, diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 244643883161..60bb319416d9 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -309,6 +309,17 @@ ("\g<1>timeout (TimeoutType):\n" "\g<1> \g<2>\n"), ) + # Override the default max retry deadline for publisher methods. + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/transports/base.py", + r"deadline=60\.0", + "deadline=600.0", + ) + if count < 9: + raise Exception( + "Default retry deadline not overriden for all publisher methods." + ) + # The namespace package declaration in google/cloud/__init__.py should be excluded # from coverage. count = s.replace( From 451ef97dce394baa0365d8b84d33c07d4bc3f717 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 24 Sep 2021 07:18:28 +0000 Subject: [PATCH 0719/1197] chore: use gapic-generator-python 0.51.2 (#505) - [ ] Regenerate this pull request now. fix: add 'dict' annotation type to 'request' Committer: @busunkim96 PiperOrigin-RevId: 398509016 Source-Link: https://github.com/googleapis/googleapis/commit/b224dfa52642a733ea64849d4e06d15c274bc08f Source-Link: https://github.com/googleapis/googleapis-gen/commit/63a1db7a38d74b9639592f521ed1daaf7299ad9a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjNhMWRiN2EzOGQ3NGI5NjM5NTkyZjUyMWVkMWRhYWY3Mjk5YWQ5YSJ9 --- .../pubsub_v1/services/publisher/client.py | 38 +++++----- .../services/publisher/transports/base.py | 2 +- .../services/publisher/transports/grpc.py | 6 +- .../publisher/transports/grpc_asyncio.py | 6 +- .../services/schema_service/client.py | 26 +++---- .../schema_service/transports/base.py | 2 +- .../schema_service/transports/grpc.py | 6 +- .../schema_service/transports/grpc_asyncio.py | 6 +- .../pubsub_v1/services/subscriber/client.py | 72 ++++++++----------- .../services/subscriber/transports/base.py | 2 +- .../services/subscriber/transports/grpc.py | 6 +- .../subscriber/transports/grpc_asyncio.py | 6 +- .../scripts/fixup_pubsub_v1_keywords.py | 64 ++++++++--------- 13 files changed, 116 insertions(+), 126 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index bbfe870db793..af0f1d8fecb6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -18,7 +18,7 @@ import functools import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -407,7 +407,7 @@ def __init__( def create_topic( self, - request: pubsub.Topic = None, + request: Union[pubsub.Topic, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -420,7 +420,7 @@ def create_topic( Args: - request (google.pubsub_v1.types.Topic): + request (Union[google.pubsub_v1.types.Topic, dict]): The request object. A topic resource. name (str): Required. The name of the topic. It must have the format @@ -485,7 +485,7 @@ def create_topic( def update_topic( self, - request: pubsub.UpdateTopicRequest = None, + request: Union[pubsub.UpdateTopicRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, @@ -496,7 +496,7 @@ def update_topic( Args: - request (google.pubsub_v1.types.UpdateTopicRequest): + request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): The request object. Request for the UpdateTopic method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -537,7 +537,7 @@ def update_topic( def publish( self, - request: pubsub.PublishRequest = None, + request: Union[pubsub.PublishRequest, dict] = None, *, topic: str = None, messages: Sequence[pubsub.PubsubMessage] = None, @@ -550,7 +550,7 @@ def publish( Args: - request (google.pubsub_v1.types.PublishRequest): + request (Union[google.pubsub_v1.types.PublishRequest, dict]): The request object. Request for the Publish method. topic (str): Required. The messages in the request will be published @@ -617,7 +617,7 @@ def publish( def get_topic( self, - request: pubsub.GetTopicRequest = None, + request: Union[pubsub.GetTopicRequest, dict] = None, *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -628,7 +628,7 @@ def get_topic( Args: - request (google.pubsub_v1.types.GetTopicRequest): + request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): The request object. Request for the GetTopic method. topic (str): Required. The name of the topic to get. Format is @@ -687,7 +687,7 @@ def get_topic( def list_topics( self, - request: pubsub.ListTopicsRequest = None, + request: Union[pubsub.ListTopicsRequest, dict] = None, *, project: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -698,7 +698,7 @@ def list_topics( Args: - request (google.pubsub_v1.types.ListTopicsRequest): + request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): The request object. Request for the `ListTopics` method. project (str): Required. The name of the project in which to list @@ -767,7 +767,7 @@ def list_topics( def list_topic_subscriptions( self, - request: pubsub.ListTopicSubscriptionsRequest = None, + request: Union[pubsub.ListTopicSubscriptionsRequest, dict] = None, *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -779,7 +779,7 @@ def list_topic_subscriptions( Args: - request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): + request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): The request object. Request for the `ListTopicSubscriptions` method. topic (str): @@ -850,7 +850,7 @@ def list_topic_subscriptions( def list_topic_snapshots( self, - request: pubsub.ListTopicSnapshotsRequest = None, + request: Union[pubsub.ListTopicSnapshotsRequest, dict] = None, *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -866,7 +866,7 @@ def list_topic_snapshots( Args: - request (google.pubsub_v1.types.ListTopicSnapshotsRequest): + request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): The request object. Request for the `ListTopicSnapshots` method. topic (str): @@ -937,7 +937,7 @@ def list_topic_snapshots( def delete_topic( self, - request: pubsub.DeleteTopicRequest = None, + request: Union[pubsub.DeleteTopicRequest, dict] = None, *, topic: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -953,7 +953,7 @@ def delete_topic( Args: - request (google.pubsub_v1.types.DeleteTopicRequest): + request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): The request object. Request for the `DeleteTopic` method. topic (str): @@ -1008,7 +1008,7 @@ def delete_topic( def detach_subscription( self, - request: pubsub.DetachSubscriptionRequest = None, + request: Union[pubsub.DetachSubscriptionRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, @@ -1022,7 +1022,7 @@ def detach_subscription( Args: - request (google.pubsub_v1.types.DetachSubscriptionRequest): + request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): The request object. Request for the DetachSubscription method. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index af526eea712b..a19d43568577 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -123,7 +123,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index d6c127d102f2..1dca72721a5c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -84,16 +84,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index d98b8fc1c735..38b81b4e2a97 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -131,16 +131,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 985676012720..7bd0bd30a4b0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -17,7 +17,7 @@ from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib # type: ignore @@ -350,7 +350,7 @@ def __init__( def create_schema( self, - request: gp_schema.CreateSchemaRequest = None, + request: Union[gp_schema.CreateSchemaRequest, dict] = None, *, parent: str = None, schema: gp_schema.Schema = None, @@ -362,7 +362,7 @@ def create_schema( r"""Creates a schema. Args: - request (google.pubsub_v1.types.CreateSchemaRequest): + request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): The request object. Request for the CreateSchema method. parent (str): Required. The name of the project in which to create the @@ -445,7 +445,7 @@ def create_schema( def get_schema( self, - request: schema.GetSchemaRequest = None, + request: Union[schema.GetSchemaRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -455,7 +455,7 @@ def get_schema( r"""Gets a schema. Args: - request (google.pubsub_v1.types.GetSchemaRequest): + request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): The request object. Request for the GetSchema method. name (str): Required. The name of the schema to get. Format is @@ -513,7 +513,7 @@ def get_schema( def list_schemas( self, - request: schema.ListSchemasRequest = None, + request: Union[schema.ListSchemasRequest, dict] = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -523,7 +523,7 @@ def list_schemas( r"""Lists schemas in a project. Args: - request (google.pubsub_v1.types.ListSchemasRequest): + request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): The request object. Request for the `ListSchemas` method. parent (str): @@ -592,7 +592,7 @@ def list_schemas( def delete_schema( self, - request: schema.DeleteSchemaRequest = None, + request: Union[schema.DeleteSchemaRequest, dict] = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -602,7 +602,7 @@ def delete_schema( r"""Deletes a schema. Args: - request (google.pubsub_v1.types.DeleteSchemaRequest): + request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): The request object. Request for the `DeleteSchema` method. name (str): @@ -656,7 +656,7 @@ def delete_schema( def validate_schema( self, - request: gp_schema.ValidateSchemaRequest = None, + request: Union[gp_schema.ValidateSchemaRequest, dict] = None, *, parent: str = None, schema: gp_schema.Schema = None, @@ -667,7 +667,7 @@ def validate_schema( r"""Validates a schema. Args: - request (google.pubsub_v1.types.ValidateSchemaRequest): + request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): The request object. Request for the `ValidateSchema` method. parent (str): @@ -737,7 +737,7 @@ def validate_schema( def validate_message( self, - request: schema.ValidateMessageRequest = None, + request: Union[schema.ValidateMessageRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -746,7 +746,7 @@ def validate_message( r"""Validates a message against a schema. Args: - request (google.pubsub_v1.types.ValidateMessageRequest): + request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): The request object. Request for the `ValidateMessage` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 1044166cce19..e37b928814ab 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -124,7 +124,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index c99633ca25ef..a58a4df328e0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -84,16 +84,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 0f4c93370688..41baacfa7894 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -131,16 +131,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 7f3c3a9dc280..540b4b8aea48 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -18,17 +18,7 @@ import functools import os import re -from typing import ( - Callable, - Dict, - Optional, - Iterable, - Iterator, - Sequence, - Tuple, - Type, - Union, -) +from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union import warnings import pkg_resources @@ -419,7 +409,7 @@ def __init__( def create_subscription( self, - request: pubsub.Subscription = None, + request: Union[pubsub.Subscription, dict] = None, *, name: str = None, topic: str = None, @@ -445,7 +435,7 @@ def create_subscription( Args: - request (google.pubsub_v1.types.Subscription): + request (Union[google.pubsub_v1.types.Subscription, dict]): The request object. A subscription resource. name (str): Required. The name of the subscription. It must have the @@ -564,7 +554,7 @@ def create_subscription( def get_subscription( self, - request: pubsub.GetSubscriptionRequest = None, + request: Union[pubsub.GetSubscriptionRequest, dict] = None, *, subscription: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -575,7 +565,7 @@ def get_subscription( Args: - request (google.pubsub_v1.types.GetSubscriptionRequest): + request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): The request object. Request for the GetSubscription method. subscription (str): @@ -636,7 +626,7 @@ def get_subscription( def update_subscription( self, - request: pubsub.UpdateSubscriptionRequest = None, + request: Union[pubsub.UpdateSubscriptionRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -648,7 +638,7 @@ def update_subscription( Args: - request (google.pubsub_v1.types.UpdateSubscriptionRequest): + request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): The request object. Request for the UpdateSubscription method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -689,7 +679,7 @@ def update_subscription( def list_subscriptions( self, - request: pubsub.ListSubscriptionsRequest = None, + request: Union[pubsub.ListSubscriptionsRequest, dict] = None, *, project: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -700,7 +690,7 @@ def list_subscriptions( Args: - request (google.pubsub_v1.types.ListSubscriptionsRequest): + request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): The request object. Request for the `ListSubscriptions` method. project (str): @@ -769,7 +759,7 @@ def list_subscriptions( def delete_subscription( self, - request: pubsub.DeleteSubscriptionRequest = None, + request: Union[pubsub.DeleteSubscriptionRequest, dict] = None, *, subscription: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -785,7 +775,7 @@ def delete_subscription( Args: - request (google.pubsub_v1.types.DeleteSubscriptionRequest): + request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): The request object. Request for the DeleteSubscription method. subscription (str): @@ -841,7 +831,7 @@ def delete_subscription( def modify_ack_deadline( self, - request: pubsub.ModifyAckDeadlineRequest = None, + request: Union[pubsub.ModifyAckDeadlineRequest, dict] = None, *, subscription: str = None, ack_ids: Sequence[str] = None, @@ -859,7 +849,7 @@ def modify_ack_deadline( Args: - request (google.pubsub_v1.types.ModifyAckDeadlineRequest): + request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): The request object. Request for the ModifyAckDeadline method. subscription (str): @@ -940,7 +930,7 @@ def modify_ack_deadline( def acknowledge( self, - request: pubsub.AcknowledgeRequest = None, + request: Union[pubsub.AcknowledgeRequest, dict] = None, *, subscription: str = None, ack_ids: Sequence[str] = None, @@ -959,7 +949,7 @@ def acknowledge( Args: - request (google.pubsub_v1.types.AcknowledgeRequest): + request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): The request object. Request for the Acknowledge method. subscription (str): Required. The subscription whose message is being @@ -1025,7 +1015,7 @@ def acknowledge( def pull( self, - request: pubsub.PullRequest = None, + request: Union[pubsub.PullRequest, dict] = None, *, subscription: str = None, return_immediately: bool = None, @@ -1040,7 +1030,7 @@ def pull( Args: - request (google.pubsub_v1.types.PullRequest): + request (Union[google.pubsub_v1.types.PullRequest, dict]): The request object. Request for the `Pull` method. subscription (str): Required. The subscription from which messages should be @@ -1188,7 +1178,7 @@ def streaming_pull( def modify_push_config( self, - request: pubsub.ModifyPushConfigRequest = None, + request: Union[pubsub.ModifyPushConfigRequest, dict] = None, *, subscription: str = None, push_config: pubsub.PushConfig = None, @@ -1206,7 +1196,7 @@ def modify_push_config( Args: - request (google.pubsub_v1.types.ModifyPushConfigRequest): + request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): The request object. Request for the ModifyPushConfig method. subscription (str): @@ -1276,7 +1266,7 @@ def modify_push_config( def get_snapshot( self, - request: pubsub.GetSnapshotRequest = None, + request: Union[pubsub.GetSnapshotRequest, dict] = None, *, snapshot: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1293,7 +1283,7 @@ def get_snapshot( Args: - request (google.pubsub_v1.types.GetSnapshotRequest): + request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): The request object. Request for the GetSnapshot method. snapshot (str): Required. The name of the snapshot to get. Format is @@ -1357,7 +1347,7 @@ def get_snapshot( def list_snapshots( self, - request: pubsub.ListSnapshotsRequest = None, + request: Union[pubsub.ListSnapshotsRequest, dict] = None, *, project: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1372,7 +1362,7 @@ def list_snapshots( Args: - request (google.pubsub_v1.types.ListSnapshotsRequest): + request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): The request object. Request for the `ListSnapshots` method. project (str): @@ -1441,7 +1431,7 @@ def list_snapshots( def create_snapshot( self, - request: pubsub.CreateSnapshotRequest = None, + request: Union[pubsub.CreateSnapshotRequest, dict] = None, *, name: str = None, subscription: str = None, @@ -1471,7 +1461,7 @@ def create_snapshot( Args: - request (google.pubsub_v1.types.CreateSnapshotRequest): + request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): The request object. Request for the `CreateSnapshot` method. name (str): @@ -1558,7 +1548,7 @@ def create_snapshot( def update_snapshot( self, - request: pubsub.UpdateSnapshotRequest = None, + request: Union[pubsub.UpdateSnapshotRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1574,7 +1564,7 @@ def update_snapshot( Args: - request (google.pubsub_v1.types.UpdateSnapshotRequest): + request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): The request object. Request for the UpdateSnapshot method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1621,7 +1611,7 @@ def update_snapshot( def delete_snapshot( self, - request: pubsub.DeleteSnapshotRequest = None, + request: Union[pubsub.DeleteSnapshotRequest, dict] = None, *, snapshot: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, @@ -1641,7 +1631,7 @@ def delete_snapshot( Args: - request (google.pubsub_v1.types.DeleteSnapshotRequest): + request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): The request object. Request for the `DeleteSnapshot` method. snapshot (str): @@ -1695,7 +1685,7 @@ def delete_snapshot( def seek( self, - request: pubsub.SeekRequest = None, + request: Union[pubsub.SeekRequest, dict] = None, *, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, @@ -1713,7 +1703,7 @@ def seek( Args: - request (google.pubsub_v1.types.SeekRequest): + request (Union[google.pubsub_v1.types.SeekRequest, dict]): The request object. Request for the `Seek` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 7100e5849f52..96322b3a4519 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -123,7 +123,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index b116b018c85e..604277eab8a0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -86,16 +86,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 91d5577903a4..1b1704e1772c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -133,16 +133,16 @@ def __init__( api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. + ``client_cert_source`` or application default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if ``channel`` is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is + both in PEM format. It is used to configure a mutual TLS channel. It is ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index da668f42f91e..ab7623917b6f 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -39,37 +39,37 @@ def partition( class pubsubCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'acknowledge': ('subscription', 'ack_ids', ), - 'create_schema': ('parent', 'schema', 'schema_id', ), - 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'topic_message_retention_duration', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), - 'delete_schema': ('name', ), - 'delete_snapshot': ('snapshot', ), - 'delete_subscription': ('subscription', ), - 'delete_topic': ('topic', ), - 'detach_subscription': ('subscription', ), - 'get_schema': ('name', 'view', ), - 'get_snapshot': ('snapshot', ), - 'get_subscription': ('subscription', ), - 'get_topic': ('topic', ), - 'list_schemas': ('parent', 'view', 'page_size', 'page_token', ), - 'list_snapshots': ('project', 'page_size', 'page_token', ), - 'list_subscriptions': ('project', 'page_size', 'page_token', ), - 'list_topics': ('project', 'page_size', 'page_token', ), - 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), - 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), - 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), - 'modify_push_config': ('subscription', 'push_config', ), - 'publish': ('topic', 'messages', ), - 'pull': ('subscription', 'max_messages', 'return_immediately', ), - 'seek': ('subscription', 'time', 'snapshot', ), - 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), - 'update_snapshot': ('snapshot', 'update_mask', ), - 'update_subscription': ('subscription', 'update_mask', ), - 'update_topic': ('topic', 'update_mask', ), - 'validate_message': ('parent', 'name', 'schema', 'message', 'encoding', ), - 'validate_schema': ('parent', 'schema', ), + 'acknowledge': ('subscription', 'ack_ids', ), + 'create_schema': ('parent', 'schema', 'schema_id', ), + 'create_snapshot': ('name', 'subscription', 'labels', ), + 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'topic_message_retention_duration', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), + 'delete_schema': ('name', ), + 'delete_snapshot': ('snapshot', ), + 'delete_subscription': ('subscription', ), + 'delete_topic': ('topic', ), + 'detach_subscription': ('subscription', ), + 'get_schema': ('name', 'view', ), + 'get_snapshot': ('snapshot', ), + 'get_subscription': ('subscription', ), + 'get_topic': ('topic', ), + 'list_schemas': ('parent', 'view', 'page_size', 'page_token', ), + 'list_snapshots': ('project', 'page_size', 'page_token', ), + 'list_subscriptions': ('project', 'page_size', 'page_token', ), + 'list_topics': ('project', 'page_size', 'page_token', ), + 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), + 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), + 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), + 'modify_push_config': ('subscription', 'push_config', ), + 'publish': ('topic', 'messages', ), + 'pull': ('subscription', 'max_messages', 'return_immediately', ), + 'seek': ('subscription', 'time', 'snapshot', ), + 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), + 'update_snapshot': ('snapshot', 'update_mask', ), + 'update_subscription': ('subscription', 'update_mask', ), + 'update_topic': ('topic', 'update_mask', ), + 'validate_message': ('parent', 'name', 'schema', 'message', 'encoding', ), + 'validate_schema': ('parent', 'schema', ), 'get_iam_policy': ('resource', 'options', ), 'set_iam_policy': ('resource', 'policy', ), 'test_iam_permissions': ('resource', 'permissions', ), @@ -91,7 +91,7 @@ def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: return updated kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, + lambda a: a.keyword.value not in self.CTRL_PARAMS, kwargs ) From 6e1dbb84998c91f017d6ece68aab2e4fd7e0a6cb Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 27 Sep 2021 16:44:17 +0200 Subject: [PATCH 0720/1197] process: add type annotations check to CI runs (#499) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add pytype nox session * Disable false pytype positives in types.py * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.gitignore | 1 + .../google/cloud/pubsub_v1/types.py | 8 ++- packages/google-cloud-pubsub/noxfile.py | 12 ++++ packages/google-cloud-pubsub/owlbot.py | 56 +++++++++++++++++++ packages/google-cloud-pubsub/setup.cfg | 14 +++++ 5 files changed, 88 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.gitignore b/packages/google-cloud-pubsub/.gitignore index b4243ced74e4..99c3a1444ed2 100644 --- a/packages/google-cloud-pubsub/.gitignore +++ b/packages/google-cloud-pubsub/.gitignore @@ -29,6 +29,7 @@ pip-log.txt .nox .cache .pytest_cache +.pytype # Mac diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 5fc7dd581ce6..f8aa532a0e81 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -45,7 +45,7 @@ BatchSettings = collections.namedtuple( "BatchSettings", ["max_bytes", "max_latency", "max_messages"] ) -BatchSettings.__new__.__defaults__ = ( +BatchSettings.__new__.__defaults__ = ( # pytype: disable=attribute-error 1 * 1000 * 1000, # max_bytes: 1 MB 0.01, # max_latency: 10 ms 100, # max_messages: 100 @@ -78,11 +78,13 @@ class LimitExceededBehavior(str, enum.Enum): PublishFlowControl = collections.namedtuple( "PublishFlowControl", ["message_limit", "byte_limit", "limit_exceeded_behavior"] ) +# pytype: disable=attribute-error PublishFlowControl.__new__.__defaults__ = ( 10 * BatchSettings.__new__.__defaults__[2], # message limit 10 * BatchSettings.__new__.__defaults__[0], # byte limit LimitExceededBehavior.IGNORE, # desired behavior ) +# pytype: enable=attribute-error PublishFlowControl.__doc__ = "The client flow control settings for message publishing." PublishFlowControl.message_limit.__doc__ = ( "The maximum number of messages awaiting to be published." @@ -101,7 +103,7 @@ class LimitExceededBehavior(str, enum.Enum): PublisherOptions = collections.namedtuple( "PublisherOptions", ["enable_message_ordering", "flow_control", "retry", "timeout"] ) -PublisherOptions.__new__.__defaults__ = ( +PublisherOptions.__new__.__defaults__ = ( # pytype: disable=attribute-error False, # enable_message_ordering: False PublishFlowControl(), # default flow control settings gapic_v1.method.DEFAULT, # use default api_core value for retry @@ -138,7 +140,7 @@ class LimitExceededBehavior(str, enum.Enum): "max_duration_per_lease_extension", ], ) -FlowControl.__new__.__defaults__ = ( +FlowControl.__new__.__defaults__ = ( # pytype: disable=attribute-error 100 * 1024 * 1024, # max_bytes: 100mb 1000, # max_messages: 1000 1 * 60 * 60, # max_lease_duration: 1 hour. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 23c89a0ea33a..d36e59abf318 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -27,6 +27,9 @@ BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +PYTYPE_VERSION = "pytype==2021.4.9" + + DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] @@ -41,6 +44,7 @@ "lint", "lint_setup_py", "blacken", + "pytype", "docs", ] @@ -48,6 +52,14 @@ nox.options.error_on_missing_interpreters = True +@nox.session(python=DEFAULT_PYTHON_VERSION) +def pytype(session): + """Run type checks.""" + session.install("-e", ".[all]") + session.install(PYTYPE_VERSION) + session.run("pytype") + + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 60bb319416d9..ac57d3394292 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -362,4 +362,60 @@ # ---------------------------------------------------------------------------- python.py_samples() +# ---------------------------------------------------------------------------- +# pytype-related changes +# ---------------------------------------------------------------------------- + +# Add .pytype to .gitignore +s.replace(".gitignore", r"\.pytest_cache", "\g<0>\n.pytype") + +# Add pytype config to setup.cfg +s.replace( + "setup.cfg", + r"universal = 1", + textwrap.dedent( + """ \g<0> + + [pytype] + python_version = 3.8 + inputs = + google/cloud/ + exclude = + tests/ + google/pubsub_v1/ # generated code + output = .pytype/ + disable = + # There's some issue with finding some pyi files, thus disabling. + # The issue https://github.com/google/pytype/issues/150 is closed, but the + # error still occurs for some reason. + pyi-error""" + ), +) + +# Add pytype session to noxfile.py +s.replace( + "noxfile.py", + r"BLACK_PATHS = \[.*?\]", + '\g<0>\n\nPYTYPE_VERSION = "pytype==2021.4.9"\n', +) +s.replace( + "noxfile.py", r'"blacken",', '\g<0>\n "pytype",', +) +s.replace( + "noxfile.py", + r"nox\.options\.error_on_missing_interpreters = True", + textwrap.dedent( + ''' \g<0> + + + @nox.session(python=DEFAULT_PYTHON_VERSION) + def pytype(session): + """Run type checks.""" + session.install("-e", ".[all]") + session.install(PYTYPE_VERSION) + session.run("pytype")''' + ), +) + + s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/packages/google-cloud-pubsub/setup.cfg b/packages/google-cloud-pubsub/setup.cfg index c3a2b39f6528..a79cb6a60387 100644 --- a/packages/google-cloud-pubsub/setup.cfg +++ b/packages/google-cloud-pubsub/setup.cfg @@ -17,3 +17,17 @@ # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 + +[pytype] +python_version = 3.8 +inputs = + google/cloud/ +exclude = + tests/ + google/pubsub_v1/ # generated code +output = .pytype/ +disable = + # There's some issue with finding some pyi files, thus disabling. + # The issue https://github.com/google/pytype/issues/150 is closed, but the + # error still occurs for some reason. + pyi-error From dac5ce1484318ee2de74fd41b4e8be57e0d27e28 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 27 Sep 2021 17:20:28 -0400 Subject: [PATCH 0721/1197] chore: exclude 'CODEOWNERS' from templated files (#508) See: https://github.com/googleapis/synthtool/pull/1201 --- packages/google-cloud-pubsub/owlbot.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index ac57d3394292..0d870e0d7a55 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -355,7 +355,7 @@ cov_level=100, system_test_external_dependencies=["psutil"], ) -s.move(templated_files, excludes=[".coveragerc"]) +s.move(templated_files, excludes=[".coveragerc", ".github/CODEOWNERS"]) # ---------------------------------------------------------------------------- # Samples templates From a56d00416e7ce4f005108ae9403b7883d5539381 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Sep 2021 10:30:10 +0000 Subject: [PATCH 0722/1197] chore: use gapic-generator-python 0.52.0 (#510) - [ ] Regenerate this pull request now. fix: improper types in pagers generation PiperOrigin-RevId: 399773015 Source-Link: https://github.com/googleapis/googleapis/commit/410c184536a22fadaf00aec3cab04102e34d2322 Source-Link: https://github.com/googleapis/googleapis-gen/commit/290e883545e3ac9ff2bd00cd0dacb28f1b8ca945 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjkwZTg4MzU0NWUzYWM5ZmYyYmQwMGNkMGRhY2IyOGYxYjhjYTk0NSJ9 --- .../pubsub_v1/services/publisher/pagers.py | 28 +++++++++---------- .../services/schema_service/pagers.py | 12 ++++---- .../pubsub_v1/services/subscriber/pagers.py | 20 ++++++------- 3 files changed, 30 insertions(+), 30 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py index 1a826de78236..50096517d3ab 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.pubsub_v1.types import pubsub @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[pubsub.ListTopicsResponse]: + def pages(self) -> Iterator[pubsub.ListTopicsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[pubsub.Topic]: + def __iter__(self) -> Iterator[pubsub.Topic]: for page in self.pages: yield from page.topics @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[pubsub.ListTopicsResponse]: + async def pages(self) -> AsyncIterator[pubsub.ListTopicsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[pubsub.Topic]: + def __aiter__(self) -> AsyncIterator[pubsub.Topic]: async def async_generator(): async for page in self.pages: for response in page.topics: @@ -202,14 +202,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[pubsub.ListTopicSubscriptionsResponse]: + def pages(self) -> Iterator[pubsub.ListTopicSubscriptionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[str]: + def __iter__(self) -> Iterator[str]: for page in self.pages: yield from page.subscriptions @@ -264,14 +264,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[pubsub.ListTopicSubscriptionsResponse]: + async def pages(self) -> AsyncIterator[pubsub.ListTopicSubscriptionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[str]: + def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: for response in page.subscriptions: @@ -330,14 +330,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[pubsub.ListTopicSnapshotsResponse]: + def pages(self) -> Iterator[pubsub.ListTopicSnapshotsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[str]: + def __iter__(self) -> Iterator[str]: for page in self.pages: yield from page.snapshots @@ -392,14 +392,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[pubsub.ListTopicSnapshotsResponse]: + async def pages(self) -> AsyncIterator[pubsub.ListTopicSnapshotsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[str]: + def __aiter__(self) -> AsyncIterator[str]: async def async_generator(): async for page in self.pages: for response in page.snapshots: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index f0248bf5ed9a..43d520a2686d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.pubsub_v1.types import schema @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[schema.ListSchemasResponse]: + def pages(self) -> Iterator[schema.ListSchemasResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[schema.Schema]: + def __iter__(self) -> Iterator[schema.Schema]: for page in self.pages: yield from page.schemas @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[schema.ListSchemasResponse]: + async def pages(self) -> AsyncIterator[schema.ListSchemasResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[schema.Schema]: + def __aiter__(self) -> AsyncIterator[schema.Schema]: async def async_generator(): async for page in self.pages: for response in page.schemas: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py index 49568852261e..ffd17c840cac 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.pubsub_v1.types import pubsub @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[pubsub.ListSubscriptionsResponse]: + def pages(self) -> Iterator[pubsub.ListSubscriptionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[pubsub.Subscription]: + def __iter__(self) -> Iterator[pubsub.Subscription]: for page in self.pages: yield from page.subscriptions @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[pubsub.ListSubscriptionsResponse]: + async def pages(self) -> AsyncIterator[pubsub.ListSubscriptionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[pubsub.Subscription]: + def __aiter__(self) -> AsyncIterator[pubsub.Subscription]: async def async_generator(): async for page in self.pages: for response in page.subscriptions: @@ -202,14 +202,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[pubsub.ListSnapshotsResponse]: + def pages(self) -> Iterator[pubsub.ListSnapshotsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[pubsub.Snapshot]: + def __iter__(self) -> Iterator[pubsub.Snapshot]: for page in self.pages: yield from page.snapshots @@ -264,14 +264,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - async def pages(self) -> AsyncIterable[pubsub.ListSnapshotsResponse]: + async def pages(self) -> AsyncIterator[pubsub.ListSnapshotsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response - def __aiter__(self) -> AsyncIterable[pubsub.Snapshot]: + def __aiter__(self) -> AsyncIterator[pubsub.Snapshot]: async def async_generator(): async for page in self.pages: for response in page.snapshots: From e8b5887b62419fb3159cd40e7695435c0b5e2dfb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Sep 2021 18:34:12 +0000 Subject: [PATCH 0723/1197] chore: fail samples nox session if python version is missing (#511) --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/samples/snippets/noxfile.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 2567653c000d..ae6c57fad807 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:87eee22d276554e4e52863ec9b1cb6a7245815dfae20439712bf644348215a5a + digest: sha256:82b12321da4446a73cb11bcb6812fbec8c105abda3946d46e6394e5fbfb64c0f diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index b008613f03ff..1fd8956fbf01 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -98,6 +98,10 @@ def get_pytest_env_vars() -> Dict[str, str]: "True", "true", ) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + # # Style Checks # From 9cae2e6e3614d26a0131aa5586946a4f7842abd4 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Fri, 1 Oct 2021 21:22:50 +0200 Subject: [PATCH 0724/1197] docs: add type annotations to codebase (#509) Closes #500. **PR checklist:** - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) --- .../google/cloud/pubsub_v1/_gapic.py | 9 +- .../google/cloud/pubsub_v1/futures.py | 14 +- .../cloud/pubsub_v1/publisher/_batch/base.py | 50 +++-- .../pubsub_v1/publisher/_batch/thread.py | 114 +++++----- .../pubsub_v1/publisher/_sequencer/base.py | 25 ++- .../publisher/_sequencer/ordered_sequencer.py | 60 +++--- .../_sequencer/unordered_sequencer.py | 47 ++-- .../cloud/pubsub_v1/publisher/client.py | 114 ++++++---- .../cloud/pubsub_v1/publisher/exceptions.py | 2 +- .../pubsub_v1/publisher/flow_controller.py | 44 ++-- .../cloud/pubsub_v1/publisher/futures.py | 12 +- .../subscriber/_protocol/dispatcher.py | 60 ++++-- .../subscriber/_protocol/heartbeater.py | 14 +- .../subscriber/_protocol/helper_threads.py | 53 +++-- .../subscriber/_protocol/histogram.py | 49 +++-- .../pubsub_v1/subscriber/_protocol/leaser.py | 41 ++-- .../subscriber/_protocol/messages_on_hold.py | 49 +++-- .../subscriber/_protocol/requests.py | 42 ++-- .../_protocol/streaming_pull_manager.py | 184 ++++++++-------- .../cloud/pubsub_v1/subscriber/client.py | 63 +++--- .../cloud/pubsub_v1/subscriber/futures.py | 19 +- .../cloud/pubsub_v1/subscriber/message.py | 109 ++++++---- .../cloud/pubsub_v1/subscriber/scheduler.py | 64 +++--- .../google/cloud/pubsub_v1/types.py | 203 +++++++++--------- 24 files changed, 805 insertions(+), 636 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py index 2a50994feae8..e25c1dc6c1cb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py @@ -15,9 +15,12 @@ from __future__ import absolute_import import functools +from typing import Callable, Container, Type -def add_methods(source_class, denylist=()): +def add_methods( + source_class: Type, denylist: Container[str] = () +) -> Callable[[Type], Type]: """Add wrapped versions of the `api` member's methods to the class. Any methods passed in `denylist` are not added. @@ -25,7 +28,7 @@ def add_methods(source_class, denylist=()): not added. """ - def wrap(wrapped_fx, lookup_fx): + def wrap(wrapped_fx: Callable, lookup_fx: Callable): """Wrap a GAPIC method; preserve its name and docstring.""" # If this is a static or class method, then we do *not* # send self as the first argument. @@ -40,7 +43,7 @@ def wrap(wrapped_fx, lookup_fx): fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) # noqa return functools.wraps(wrapped_fx)(fx) - def actual_decorator(cls): + def actual_decorator(cls: Type) -> Type: # Reflectively iterate over most of the methods on the source class # (the GAPIC) and make wrapped versions available on this client. for name in dir(source_class): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index 4dc72fdaac3d..d8acc8ea5c40 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -15,6 +15,7 @@ from __future__ import absolute_import import concurrent.futures +from typing import Any, NoReturn import google.api_core.future @@ -29,19 +30,16 @@ class Future(concurrent.futures.Future, google.api_core.future.Future): methods in this library. """ - def running(self): - """Return ``True`` if the associated Pub/Sub action has not yet completed. - - Returns: bool: - """ + def running(self) -> bool: + """Return ``True`` if the associated Pub/Sub action has not yet completed.""" return not self.done() - def set_running_or_notify_cancel(self): + def set_running_or_notify_cancel(self) -> NoReturn: raise NotImplementedError( "Only used by executors from `concurrent.futures` package." ) - def set_result(self, result): + def set_result(self, result: Any): """Set the return value of work associated with the future. Do not use this method, it should only be used internally by the library and its @@ -49,7 +47,7 @@ def set_result(self, result): """ return super().set_result(result=result) - def set_exception(self, exception): + def set_exception(self, exception: Exception): """Set the result of the future as being the given exception. Do not use this method, it should only be used internally by the library and its diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py index 812e0e0e216d..f3202836084e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -16,6 +16,14 @@ import abc import enum +import typing +from typing import Optional, Sequence + + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1 import types + from google.pubsub_v1 import types as gapic_types class Batch(metaclass=abc.ABCMeta): @@ -50,7 +58,7 @@ def __len__(self): @staticmethod @abc.abstractmethod - def make_lock(): # pragma: NO COVER + def make_lock() -> None: # pragma: NO COVER """Return a lock in the chosen concurrency model. Returns: @@ -60,17 +68,17 @@ def make_lock(): # pragma: NO COVER @property @abc.abstractmethod - def messages(self): # pragma: NO COVER + def messages(self) -> Sequence["gapic_types.PubsubMessage"]: # pragma: NO COVER """Return the messages currently in the batch. Returns: - Sequence: The messages currently in the batch. + The messages currently in the batch. """ raise NotImplementedError @property @abc.abstractmethod - def size(self): # pragma: NO COVER + def size(self) -> int: # pragma: NO COVER """Return the total size of all of the messages currently in the batch. The size includes any overhead of the actual ``PublishRequest`` that is @@ -84,42 +92,45 @@ def size(self): # pragma: NO COVER @property @abc.abstractmethod - def settings(self): # pragma: NO COVER + def settings(self) -> "types.BatchSettings": # pragma: NO COVER """Return the batch settings. Returns: - ~.pubsub_v1.types.BatchSettings: The batch settings. These are - considered immutable once the batch has been opened. + The batch settings. These are considered immutable once the batch has + been opened. """ raise NotImplementedError @property @abc.abstractmethod - def status(self): # pragma: NO COVER + def status(self) -> "BatchStatus": # pragma: NO COVER """Return the status of this batch. Returns: - str: The status of this batch. All statuses are human-readable, - all-lowercase strings. The ones represented in the - :class:`BaseBatch.Status` enum are special, but other statuses - are permitted. + The status of this batch. All statuses are human-readable, all-lowercase + strings. The ones represented in the :class:`BaseBatch.Status` enum are + special, but other statuses are permitted. """ raise NotImplementedError - def cancel(self, cancellation_reason): # pragma: NO COVER + def cancel( + self, cancellation_reason: "BatchCancellationReason" + ) -> None: # pragma: NO COVER """Complete pending futures with an exception. This method must be called before publishing starts (ie: while the batch is still accepting messages.) Args: - cancellation_reason (BatchCancellationReason): The reason why this - batch has been cancelled. + cancellation_reason: + The reason why this batch has been cancelled. """ raise NotImplementedError @abc.abstractmethod - def publish(self, message): # pragma: NO COVER + def publish( + self, message: "gapic_types.PubsubMessage" + ) -> Optional["pubsub_v1.publisher.futures.Future"]: # pragma: NO COVER """Publish a single message. Add the given message to this object; this will cause it to be @@ -129,11 +140,12 @@ def publish(self, message): # pragma: NO COVER This method is called by :meth:`~.PublisherClient.publish`. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message: The Pub/Sub message. Returns: - ~google.api_core.future.Future: An object conforming to the - :class:`concurrent.futures.Future` interface. + An object conforming to the :class:`concurrent.futures.Future` interface. + If :data:`None` is returned, that signals that the batch cannot + accept a message. """ raise NotImplementedError diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index e59dff00e4fd..d68d00a0eb12 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -17,6 +17,8 @@ import logging import threading import time +import typing +from typing import Any, Callable, Optional, Sequence import google.api_core.exceptions from google.api_core import gapic_v1 @@ -25,6 +27,12 @@ from google.cloud.pubsub_v1.publisher._batch import base from google.pubsub_v1 import types as gapic_types +if typing.TYPE_CHECKING: # pragma: NO COVER + from google import api_core + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1 import types + from google.cloud.pubsub_v1 import PublisherClient + _LOGGER = logging.getLogger(__name__) _CAN_COMMIT = (base.BatchStatus.ACCEPTING_MESSAGES, base.BatchStatus.STARTING) @@ -56,36 +64,36 @@ class Batch(base.Batch): implementation details. Args: - client (~.pubsub_v1.PublisherClient): The publisher client used to - create this batch. - topic (str): The topic. The format for this is - ``projects/{project}/topics/{topic}``. - settings (~.pubsub_v1.types.BatchSettings): The settings for batch - publishing. These should be considered immutable once the batch - has been opened. - batch_done_callback (Callable[[bool], Any]): Callback called when the - response for a batch publish has been received. Called with one - boolean argument: successfully published or a permanent error - occurred. Temporary errors are not surfaced because they are retried + client: + The publisher client used to create this batch. + topic: + The topic. The format for this is ``projects/{project}/topics/{topic}``. + settings: + The settings for batch publishing. These should be considered immutable + once the batch has been opened. + batch_done_callback: + Callback called when the response for a batch publish has been received. + Called with one boolean argument: successfully published or a permanent + error occurred. Temporary errors are not surfaced because they are retried at a lower level. - commit_when_full (bool): Whether to commit the batch when the batch - is full. - commit_retry (Optional[google.api_core.retry.Retry]): Designation of what - errors, if any, should be retried when commiting the batch. If not - provided, a default retry is used. - commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`): - The timeout to apply when commiting the batch. If not provided, a - default timeout is used. + commit_when_full: + Whether to commit the batch when the batch is full. + commit_retry: + Designation of what errors, if any, should be retried when commiting + the batch. If not provided, a default retry is used. + commit_timeout: + The timeout to apply when commiting the batch. If not provided, a default + timeout is used. """ def __init__( self, - client, - topic, - settings, - batch_done_callback=None, - commit_when_full=True, - commit_retry=gapic_v1.method.DEFAULT, + client: "PublisherClient", + topic: str, + settings: "types.BatchSettings", + batch_done_callback: Callable[[bool], Any] = None, + commit_when_full: bool = True, + commit_retry: "api_core.retry.Retry" = gapic_v1.method.DEFAULT, commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, ): self._client = client @@ -113,66 +121,65 @@ def __init__( self._commit_timeout = commit_timeout @staticmethod - def make_lock(): + def make_lock() -> threading.Lock: """Return a threading lock. Returns: - _thread.Lock: A newly created lock. + A newly created lock. """ return threading.Lock() @property - def client(self): - """~.pubsub_v1.client.PublisherClient: A publisher client.""" + def client(self) -> "PublisherClient": + """A publisher client.""" return self._client @property - def messages(self): - """Sequence: The messages currently in the batch.""" + def messages(self) -> Sequence[gapic_types.PubsubMessage]: + """The messages currently in the batch.""" return self._messages @property - def settings(self): + def settings(self) -> "types.BatchSettings": """Return the batch settings. Returns: - ~.pubsub_v1.types.BatchSettings: The batch settings. These are - considered immutable once the batch has been opened. + The batch settings. These are considered immutable once the batch has + been opened. """ return self._settings @property - def size(self): + def size(self) -> int: """Return the total size of all of the messages currently in the batch. The size includes any overhead of the actual ``PublishRequest`` that is sent to the backend. Returns: - int: The total size of all of the messages currently - in the batch (including the request overhead), in bytes. + The total size of all of the messages currently in the batch (including + the request overhead), in bytes. """ return self._size @property - def status(self): + def status(self) -> base.BatchStatus: """Return the status of this batch. Returns: - str: The status of this batch. All statuses are human-readable, - all-lowercase strings. + The status of this batch. All statuses are human-readable, all-lowercase + strings. """ return self._status - def cancel(self, cancellation_reason): + def cancel(self, cancellation_reason: base.BatchCancellationReason) -> None: """Complete pending futures with an exception. This method must be called before publishing starts (ie: while the batch is still accepting messages.) Args: - cancellation_reason (BatchCancellationReason): The reason why this - batch has been cancelled. + The reason why this batch has been cancelled. """ with self._state_lock: @@ -185,7 +192,7 @@ def cancel(self, cancellation_reason): future.set_exception(exc) self._status = base.BatchStatus.ERROR - def commit(self): + def commit(self) -> None: """Actually publish all of the messages on the active batch. .. note:: @@ -210,7 +217,7 @@ def commit(self): self._start_commit_thread() - def _start_commit_thread(self): + def _start_commit_thread(self) -> None: """Start a new thread to actually handle the commit.""" # NOTE: If the thread is *not* a daemon, a memory leak exists due to a CPython issue. # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-829910303 @@ -220,7 +227,7 @@ def _start_commit_thread(self): ) commit_thread.start() - def _commit(self): + def _commit(self) -> None: """Actually publish all of the messages on the active batch. This moves the batch out from being the active batch to an in progress @@ -320,7 +327,9 @@ def _commit(self): if self._batch_done_callback is not None: self._batch_done_callback(batch_transport_succeeded) - def publish(self, message): + def publish( + self, message: gapic_types.PubsubMessage + ) -> Optional["pubsub_v1.publisher.futures.Future"]: """Publish a single message. Add the given message to this object; this will cause it to be @@ -331,13 +340,12 @@ def publish(self, message): This method is called by :meth:`~.PublisherClient.publish`. Args: - message (~.pubsub_v1.types.PubsubMessage): The Pub/Sub message. + message: The Pub/Sub message. Returns: - Optional[~google.api_core.future.Future]: An object conforming to - the :class:`~concurrent.futures.Future` interface or :data:`None`. - If :data:`None` is returned, that signals that the batch cannot - accept a message. + An object conforming to the :class:`~concurrent.futures.Future` interface + or :data:`None`. If :data:`None` is returned, that signals that the batch + cannot accept a message. Raises: pubsub_v1.publisher.exceptions.MessageTooLargeError: If publishing @@ -398,5 +406,5 @@ def publish(self, message): return future - def _set_status(self, status): + def _set_status(self, status: base.BatchStatus): self._status = status diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py index 60a7d269ce4f..49bdcb7409a6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -15,10 +15,15 @@ from __future__ import absolute_import import abc +import typing from google.api_core import gapic_v1 from google.pubsub_v1 import types as gapic_types +if typing.TYPE_CHECKING: # pragma: NO COVER + from concurrent import futures + from google.api_core import retry + class Sequencer(metaclass=abc.ABCMeta): """The base class for sequencers for Pub/Sub publishing. A sequencer @@ -27,7 +32,7 @@ class Sequencer(metaclass=abc.ABCMeta): @staticmethod @abc.abstractmethod - def is_finished(self): # pragma: NO COVER + def is_finished(self) -> bool: # pragma: NO COVER """ Whether the sequencer is finished and should be cleaned up. Returns: @@ -37,7 +42,7 @@ def is_finished(self): # pragma: NO COVER @staticmethod @abc.abstractmethod - def unpause(self, message): # pragma: NO COVER + def unpause(self) -> None: # pragma: NO COVER """ Unpauses this sequencer. Raises: @@ -50,24 +55,24 @@ def unpause(self, message): # pragma: NO COVER @abc.abstractmethod def publish( self, - message, - retry=None, + message: gapic_types.PubsubMessage, + retry: "retry.Retry" = None, timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, - ): # pragma: NO COVER + ) -> "futures.Future": # pragma: NO COVER """ Publish message for this ordering key. Args: - message (~.pubsub_v1.types.PubsubMessage): + message: The Pub/Sub message. - retry (Optional[google.api_core.retry.Retry]): + retry: The retry settings to apply when publishing the message. - timeout (:class:`~.pubsub_v1.types.TimeoutType`): + timeout: The timeout to apply when publishing the message. Returns: A class instance that conforms to Python Standard library's - :class:`~concurrent.futures.Future` interface (but not an - instance of that class). The future might return immediately with a + :class:`~concurrent.futures.Future` interface. The future might return + immediately with a `pubsub_v1.publisher.exceptions.PublishToPausedOrderingKeyException` if the ordering key is paused. Otherwise, the future tracks the lifetime of the message publish. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py index 83dd0c921268..106c4da99197 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py @@ -16,6 +16,8 @@ import collections import concurrent.futures as futures import threading +import typing +from typing import Iterable, Sequence from google.api_core import gapic_v1 from google.cloud.pubsub_v1.publisher import exceptions @@ -23,6 +25,11 @@ from google.cloud.pubsub_v1.publisher._batch import base as batch_base from google.pubsub_v1 import types as gapic_types +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.api_core import retry + from google.cloud.pubsub_v1 import PublisherClient + from google.cloud.pubsub_v1.publisher import _batch + class _OrderedSequencerStatus(str, enum.Enum): """An enum-like class representing valid statuses for an OrderedSequencer. @@ -77,14 +84,15 @@ class OrderedSequencer(sequencer_base.Sequencer): Public methods are thread-safe. Args: - client (~.pubsub_v1.PublisherClient): The publisher client used to - create this sequencer. - topic (str): The topic. The format for this is - ``projects/{project}/topics/{topic}``. - ordering_key (str): The ordering key for this sequencer. + client: + The publisher client used to create this sequencer. + topic: + The topic. The format for this is ``projects/{project}/topics/{topic}``. + ordering_key: + The ordering key for this sequencer. """ - def __init__(self, client, topic, ordering_key): + def __init__(self, client: "PublisherClient", topic: str, ordering_key: str): self._client = client self._topic = topic self._ordering_key = ordering_key @@ -97,16 +105,16 @@ def __init__(self, client, topic, ordering_key): # See _OrderedSequencerStatus for valid state transitions. self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES - def is_finished(self): + def is_finished(self) -> bool: """ Whether the sequencer is finished and should be cleaned up. Returns: - bool: Whether the sequencer is finished and should be cleaned up. + Whether the sequencer is finished and should be cleaned up. """ with self._state_lock: return self._state == _OrderedSequencerStatus.FINISHED - def stop(self): + def stop(self) -> None: """ Permanently stop this sequencer. This differs from pausing, which may be resumed. Immediately commits @@ -133,7 +141,7 @@ def stop(self): batch = self._ordered_batches.pop() batch.cancel(batch_base.BatchCancellationReason.CLIENT_STOPPED) - def commit(self): + def commit(self) -> None: """ Commit the first batch, if unpaused. If paused or no batches exist, this method does nothing. @@ -151,7 +159,7 @@ def commit(self): # operation is idempotent. self._ordered_batches[0].commit() - def _batch_done_callback(self, success): + def _batch_done_callback(self, success: bool) -> None: """ Deal with completion of a batch. Called when a batch has finished publishing, with either a success @@ -199,7 +207,7 @@ def _batch_done_callback(self, success): if ensure_cleanup_and_commit_timer_runs: self._client.ensure_cleanup_and_commit_timer_runs() - def _pause(self): + def _pause(self) -> None: """ Pause this sequencer: set state to paused, cancel all batches, and clear the list of ordered batches. @@ -215,7 +223,7 @@ def _pause(self): ) self._ordered_batches.clear() - def unpause(self): + def unpause(self) -> None: """ Unpause this sequencer. Raises: @@ -229,16 +237,16 @@ def unpause(self): def _create_batch( self, - commit_retry=gapic_v1.method.DEFAULT, + commit_retry: "retry.Retry" = gapic_v1.method.DEFAULT, commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, - ): + ) -> "_batch.thread.Batch": """ Create a new batch using the client's batch class and other stored settings. Args: - commit_retry (Optional[google.api_core.retry.Retry]): + commit_retry: The retry settings to apply when publishing the batch. - commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`): + commit_timeout: The timeout to apply when publishing the batch. """ return self._client._batch_class( @@ -253,18 +261,18 @@ def _create_batch( def publish( self, - message, - retry=gapic_v1.method.DEFAULT, + message: gapic_types.PubsubMessage, + retry: "retry.Retry" = gapic_v1.method.DEFAULT, timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, - ): + ) -> futures.Future: """ Publish message for this ordering key. Args: - message (~.pubsub_v1.types.PubsubMessage): + message: The Pub/Sub message. - retry (Optional[google.api_core.retry.Retry]): + retry: The retry settings to apply when publishing the message. - timeout (:class:`~.pubsub_v1.types.TimeoutType`): + timeout: The timeout to apply when publishing the message. Returns: @@ -317,13 +325,13 @@ def publish( return future # Used only for testing. - def _set_batch(self, batch): + def _set_batch(self, batch: "_batch.thread.Batch") -> None: self._ordered_batches = collections.deque([batch]) # Used only for testing. - def _set_batches(self, batches): + def _set_batches(self, batches: Iterable["_batch.thread.Batch"]) -> None: self._ordered_batches = collections.deque(batches) # Used only for testing. - def _get_batches(self): + def _get_batches(self) -> Sequence["_batch.thread.Batch"]: return self._ordered_batches diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py index 76dd1cad72b9..91d47b948241 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -12,11 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +import typing + from google.api_core import gapic_v1 from google.cloud.pubsub_v1.publisher._sequencer import base from google.pubsub_v1 import types as gapic_types +if typing.TYPE_CHECKING: # pragma: NO COVER + from concurrent import futures + from google.api_core import retry + from google.cloud.pubsub_v1 import PublisherClient + from google.cloud.pubsub_v1.publisher import _batch + class UnorderedSequencer(base.Sequencer): """ Sequences messages into batches for one topic without any ordering. @@ -24,17 +32,17 @@ class UnorderedSequencer(base.Sequencer): Public methods are NOT thread-safe. """ - def __init__(self, client, topic): + def __init__(self, client: "PublisherClient", topic: str): self._client = client self._topic = topic self._current_batch = None self._stopped = False - def is_finished(self): + def is_finished(self) -> bool: """ Whether the sequencer is finished and should be cleaned up. Returns: - bool: Whether the sequencer is finished and should be cleaned up. + Whether the sequencer is finished and should be cleaned up. """ # TODO: Implement. Not implementing yet because of possible performance # impact due to extra locking required. This does mean that @@ -42,7 +50,7 @@ def is_finished(self): # previously existing behavior. return False - def stop(self): + def stop(self) -> None: """ Stop the sequencer. Subsequent publishes will fail. @@ -56,7 +64,7 @@ def stop(self): self.commit() self._stopped = True - def commit(self): + def commit(self) -> None: """ Commit the batch. Raises: @@ -74,22 +82,22 @@ def commit(self): # batch. self._current_batch = None - def unpause(self): + def unpause(self) -> typing.NoReturn: """ Not relevant for this class. """ raise NotImplementedError def _create_batch( self, - commit_retry=gapic_v1.method.DEFAULT, + commit_retry: "retry.Retry" = gapic_v1.method.DEFAULT, commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, - ): + ) -> "_batch.thread.Batch": """ Create a new batch using the client's batch class and other stored settings. Args: - commit_retry (Optional[google.api_core.retry.Retry]): + commit_retry: The retry settings to apply when publishing the batch. - commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`): + commit_timeout: The timeout to apply when publishing the batch. """ return self._client._batch_class( @@ -104,24 +112,23 @@ def _create_batch( def publish( self, - message, - retry=gapic_v1.method.DEFAULT, + message: gapic_types.PubsubMessage, + retry: "retry.Retry" = gapic_v1.method.DEFAULT, timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, - ): + ) -> "futures.Future": """ Batch message into existing or new batch. Args: - message (~.pubsub_v1.types.PubsubMessage): + message: The Pub/Sub message. - retry (Optional[google.api_core.retry.Retry]): + retry: The retry settings to apply when publishing the message. - timeout (:class:`~.pubsub_v1.types.TimeoutType`): + timeout: The timeout to apply when publishing the message. Returns: - ~google.api_core.future.Future: An object conforming to - the :class:`~concurrent.futures.Future` interface. The future tracks - the publishing status of the message. + An object conforming to the :class:`~concurrent.futures.Future` interface. + The future tracks the publishing status of the message. Raises: RuntimeError: @@ -151,5 +158,5 @@ def publish( return future # Used only for testing. - def _set_batch(self, batch): + def _set_batch(self, batch: "_batch.thread.Batch") -> None: self._current_batch = batch diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 1c5381d08722..7e7c01c19772 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -20,6 +20,8 @@ import pkg_resources import threading import time +import typing +from typing import Any, Sequence, Type, Union from google.api_core import gapic_v1 from google.auth.credentials import AnonymousCredentials @@ -43,6 +45,13 @@ # PIP package. __version__ = "0.0" +if typing.TYPE_CHECKING: # pragma: NO COVER + from google import api_core + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.publisher._sequencer.base import Sequencer + from google.cloud.pubsub_v1.publisher import _batch + + _LOGGER = logging.getLogger(__name__) _DENYLISTED_METHODS = ( @@ -63,13 +72,14 @@ class Client(object): get sensible defaults. Args: - batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The - settings for batch publishing. - publisher_options (~google.cloud.pubsub_v1.types.PublisherOptions): The - options for the publisher client. Note that enabling message ordering will - override the publish retry timeout to be infinite. - kwargs (dict): Any additional arguments provided are sent as keyword - arguments to the underlying + batch_settings: + The settings for batch publishing. + publisher_options: + The options for the publisher client. Note that enabling message ordering + will override the publish retry timeout to be infinite. + kwargs: + Any additional arguments provided are sent as keyword arguments to the + underlying :class:`~google.cloud.pubsub_v1.gapic.publisher_client.PublisherClient`. Generally you should not need to set additional keyword arguments. Regional endpoints can be set via ``client_options`` that @@ -104,14 +114,19 @@ class Client(object): ) """ - def __init__(self, batch_settings=(), publisher_options=(), **kwargs): + def __init__( + self, + batch_settings: Union[types.BatchSettings, Sequence] = (), + publisher_options: Union[types.PublisherOptions, Sequence] = (), + **kwargs: Any, + ): assert ( type(batch_settings) is types.BatchSettings or len(batch_settings) == 0 - ), "batch_settings must be of type BatchSettings or an empty tuple." + ), "batch_settings must be of type BatchSettings or an empty sequence." assert ( type(publisher_options) is types.PublisherOptions or len(publisher_options) == 0 - ), "publisher_options must be of type PublisherOptions or an empty tuple." + ), "publisher_options must be of type PublisherOptions or an empty sequence." # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host @@ -146,20 +161,25 @@ def __init__(self, batch_settings=(), publisher_options=(), **kwargs): self._flow_controller = FlowController(self.publisher_options.flow_control) @classmethod - def from_service_account_file(cls, filename, batch_settings=(), **kwargs): + def from_service_account_file( + cls, + filename: str, + batch_settings: Union[types.BatchSettings, Sequence] = (), + **kwargs: Any, + ) -> "Client": """Creates an instance of this client using the provided credentials file. Args: - filename (str): The path to the service account private key json - file. - batch_settings (~google.cloud.pubsub_v1.types.BatchSettings): The - settings for batch publishing. - kwargs: Additional arguments to pass to the constructor. + filename: + The path to the service account private key JSON file. + batch_settings: + The settings for batch publishing. + kwargs: + Additional arguments to pass to the constructor. Returns: - A Publisher :class:`~google.cloud.pubsub_v1.publisher.client.Client` - instance that is the constructed client. + A Publisher instance that is the constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -168,15 +188,15 @@ def from_service_account_file(cls, filename, batch_settings=(), **kwargs): from_service_account_json = from_service_account_file @property - def target(self): + def target(self) -> str: """Return the target (where the API is). Returns: - str: The location of the API. + The location of the API. """ return self._target - def _get_or_create_sequencer(self, topic, ordering_key): + def _get_or_create_sequencer(self, topic: str, ordering_key: str) -> "Sequencer": """ Get an existing sequencer or create a new one given the (topic, ordering_key) pair. """ @@ -193,11 +213,11 @@ def _get_or_create_sequencer(self, topic, ordering_key): return sequencer - def resume_publish(self, topic, ordering_key): + def resume_publish(self, topic: str, ordering_key: str) -> None: """ Resume publish on an ordering key that has had unrecoverable errors. Args: - topic (str): The topic to publish messages to. + topic: The topic to publish messages to. ordering_key: A string that identifies related messages for which publish order should be respected. @@ -231,13 +251,13 @@ def resume_publish(self, topic, ordering_key): def publish( self, - topic, - data, - ordering_key="", - retry=gapic_v1.method.DEFAULT, + topic: str, + data: bytes, + ordering_key: str = "", + retry: "api_core.retry.Retry" = gapic_v1.method.DEFAULT, timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, - **attrs - ): + **attrs: Union[bytes, str], + ) -> "pubsub_v1.publisher.futures.Future": """Publish a single message. .. note:: @@ -266,22 +286,22 @@ def publish( >>> response = client.publish(topic, data, username='guido') Args: - topic (str): The topic to publish messages to. - data (bytes): A bytestring representing the message body. This + topic: The topic to publish messages to. + data: A bytestring representing the message body. This must be a bytestring. ordering_key: A string that identifies related messages for which publish order should be respected. Message ordering must be enabled for this client to use this feature. - retry (Optional[google.api_core.retry.Retry]): Designation of what - errors, if any, should be retried. If `ordering_key` is specified, - the total retry deadline will be changed to "infinity". + retry: + Designation of what errors, if any, should be retried. If `ordering_key` + is specified, the total retry deadline will be changed to "infinity". If given, it overides any retry passed into the client through the ``publisher_options`` argument. - timeout (:class:`~.pubsub_v1.types.TimeoutType`): + timeout: The timeout for the RPC request. Can be used to override any timeout passed in through ``publisher_options`` when instantiating the client. - attrs (Mapping[str, str]): A dictionary of attributes to be + attrs: A dictionary of attributes to be sent as metadata. (These may be text strings or byte strings.) Returns: @@ -374,7 +394,7 @@ def on_publish_done(future): return future - def ensure_cleanup_and_commit_timer_runs(self): + def ensure_cleanup_and_commit_timer_runs(self) -> None: """ Ensure a cleanup/commit timer thread is running. If a cleanup/commit timer thread is already running, this does nothing. @@ -382,7 +402,7 @@ def ensure_cleanup_and_commit_timer_runs(self): with self._batch_lock: self._ensure_commit_timer_runs_no_lock() - def _ensure_commit_timer_runs_no_lock(self): + def _ensure_commit_timer_runs_no_lock(self) -> None: """ Ensure a commit timer thread is running, without taking _batch_lock. @@ -391,7 +411,7 @@ def _ensure_commit_timer_runs_no_lock(self): if not self._commit_thread and self.batch_settings.max_latency < float("inf"): self._start_commit_thread() - def _start_commit_thread(self): + def _start_commit_thread(self) -> None: """Start a new thread to actually wait and commit the sequencers.""" # NOTE: If the thread is *not* a daemon, a memory leak exists due to a CPython issue. # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-829910303 @@ -403,7 +423,7 @@ def _start_commit_thread(self): ) self._commit_thread.start() - def _wait_and_commit_sequencers(self): + def _wait_and_commit_sequencers(self) -> None: """ Wait up to the batching timeout, and commit all sequencers. """ # Sleep for however long we should be waiting. @@ -416,7 +436,7 @@ def _wait_and_commit_sequencers(self): self._commit_sequencers() self._commit_thread = None - def _commit_sequencers(self): + def _commit_sequencers(self) -> None: """ Clean up finished sequencers and commit the rest. """ finished_sequencer_keys = [ key @@ -429,7 +449,7 @@ def _commit_sequencers(self): for sequencer in self._sequencers.values(): sequencer.commit() - def stop(self): + def stop(self) -> None: """Immediately publish all outstanding messages. Asynchronously sends all outstanding messages and @@ -458,15 +478,19 @@ def stop(self): sequencer.stop() # Used only for testing. - def _set_batch(self, topic, batch, ordering_key=""): + def _set_batch( + self, topic: str, batch: "_batch.thread.Batch", ordering_key: str = "" + ) -> None: sequencer = self._get_or_create_sequencer(topic, ordering_key) sequencer._set_batch(batch) # Used only for testing. - def _set_batch_class(self, batch_class): + def _set_batch_class(self, batch_class: Type) -> None: self._batch_class = batch_class # Used only for testing. - def _set_sequencer(self, topic, sequencer, ordering_key=""): + def _set_sequencer( + self, topic: str, sequencer: "Sequencer", ordering_key: str = "" + ) -> None: sequencer_key = (topic, ordering_key) self._sequencers[sequencer_key] = sequencer diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index 89b3790a0a18..ff0f0713d14b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -33,7 +33,7 @@ class PublishToPausedOrderingKeyException(Exception): occurred during publish of a batch for that key. """ - def __init__(self, ordering_key): + def __init__(self, ordering_key: str): self.ordering_key = ordering_key super(PublishToPausedOrderingKeyException, self).__init__() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py index f899f4d0859e..fa3d58d33e3e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py @@ -15,6 +15,7 @@ from collections import OrderedDict import logging import threading +from typing import Optional import warnings from google.cloud.pubsub_v1 import types @@ -45,11 +46,10 @@ class FlowController(object): """A class used to control the flow of messages passing through it. Args: - settings (~google.cloud.pubsub_v1.types.PublishFlowControl): - Desired flow control configuration. + settings: Desired flow control configuration. """ - def __init__(self, settings): + def __init__(self, settings: types.PublishFlowControl): self._settings = settings # Load statistics. They represent the number of messages added, but not @@ -72,14 +72,14 @@ def __init__(self, settings): # The condition for blocking the flow if capacity is exceeded. self._has_capacity = threading.Condition(lock=self._operational_lock) - def add(self, message): + def add(self, message: types.PubsubMessage) -> None: # pytype: disable=module-attr """Add a message to flow control. Adding a message updates the internal load statistics, and an action is taken if these limits are exceeded (depending on the flow control settings). Args: - message (:class:`~google.cloud.pubsub_v1.types.PubsubMessage`): + message: The message entering the flow control. Raises: @@ -166,11 +166,13 @@ def add(self, message): self._reserved_slots -= 1 del self._waiting[current_thread] - def release(self, message): + def release( + self, message: types.PubsubMessage # pytype: disable=module-attr + ) -> None: """Release a mesage from flow control. Args: - message (:class:`~google.cloud.pubsub_v1.types.PubsubMessage`): + message: The message entering the flow control. """ if self._settings.limit_exceeded_behavior == types.LimitExceededBehavior.IGNORE: @@ -197,7 +199,7 @@ def release(self, message): _LOGGER.debug("Notifying threads waiting to add messages to flow.") self._has_capacity.notify_all() - def _distribute_available_capacity(self): + def _distribute_available_capacity(self) -> None: """Distribute available capacity among the waiting threads in FIFO order. The method assumes that the caller has obtained ``_operational_lock``. @@ -237,13 +239,10 @@ def _distribute_available_capacity(self): self._reserved_bytes += can_give available_bytes -= can_give - def _ready_to_unblock(self): + def _ready_to_unblock(self) -> bool: """Determine if any of the threads waiting to add a message can proceed. The method assumes that the caller has obtained ``_operational_lock``. - - Returns: - bool """ if self._waiting: # It's enough to only check the head of the queue, because FIFO @@ -256,17 +255,15 @@ def _ready_to_unblock(self): return False - def _would_overflow(self, message): + def _would_overflow( + self, message: types.PubsubMessage # pytype: disable=module-attr + ) -> bool: """Determine if accepting a message would exceed flow control limits. The method assumes that the caller has obtained ``_operational_lock``. Args: - message (:class:`~google.cloud.pubsub_v1.types.PubsubMessage`): - The message entering the flow control. - - Returns: - bool + message: The message entering the flow control. """ reservation = self._waiting.get(threading.current_thread()) @@ -287,7 +284,9 @@ def _would_overflow(self, message): return size_overflow or msg_count_overflow - def _load_info(self, message_count=None, total_bytes=None): + def _load_info( + self, message_count: Optional[int] = None, total_bytes: Optional[int] = None + ) -> str: """Return the current flow control load information. The caller can optionally adjust some of the values to fit its reporting @@ -296,13 +295,10 @@ def _load_info(self, message_count=None, total_bytes=None): The method assumes that the caller has obtained ``_operational_lock``. Args: - message_count (Optional[int]): + message_count: The value to override the current message count with. - total_bytes (Optional[int]): + total_bytes: The value to override the current total bytes with. - - Returns: - str """ if message_count is None: message_count = self._message_count diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index 04748e8542eb..09bb2417c45c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -14,6 +14,8 @@ from __future__ import absolute_import +from typing import Union + from google.cloud.pubsub_v1 import futures @@ -25,32 +27,32 @@ class Future(futures.Future): ID, unless an error occurs. """ - def cancel(self): + def cancel(self) -> bool: """Actions in Pub/Sub generally may not be canceled. This method always returns ``False``. """ return False - def cancelled(self): + def cancelled(self) -> bool: """Actions in Pub/Sub generally may not be canceled. This method always returns ``False``. """ return False - def result(self, timeout=None): + def result(self, timeout: Union[int, float] = None) -> str: """Return the message ID or raise an exception. This blocks until the message has been published successfully and returns the message ID unless an exception is raised. Args: - timeout (Union[int, float]): The number of seconds before this call + timeout: The number of seconds before this call times out and raises TimeoutError. Returns: - str: The message ID. + The message ID. Raises: concurrent.futures.TimeoutError: If the request times out. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 382c5c38a2c2..badcd78f386e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -20,11 +20,28 @@ import logging import math import threading +import typing +from typing import Sequence, Union from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests from google.pubsub_v1 import types as gapic_types +if typing.TYPE_CHECKING: # pragma: NO COVER + import queue + from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( + StreamingPullManager, + ) + + +RequestItem = Union[ + requests.AckRequest, + requests.DropRequest, + requests.LeaseRequest, + requests.ModAckRequest, + requests.NackRequest, +] + _LOGGER = logging.getLogger(__name__) _CALLBACK_WORKER_NAME = "Thread-CallbackRequestDispatcher" @@ -51,14 +68,15 @@ class Dispatcher(object): - def __init__(self, manager, queue): + def __init__(self, manager: "StreamingPullManager", queue: "queue.Queue"): self._manager = manager self._queue = queue self._thread = None self._operational_lock = threading.Lock() - def start(self): + def start(self) -> None: """Start a thread to dispatch requests queued up by callbacks. + Spawns a thread to run :meth:`dispatch_callback`. """ with self._operational_lock: @@ -78,7 +96,7 @@ def start(self): _LOGGER.debug("Started helper thread %s", thread.name) self._thread = thread - def stop(self): + def stop(self) -> None: with self._operational_lock: if self._thread is not None: # Signal the worker to stop by queueing a "poison pill" @@ -87,17 +105,12 @@ def stop(self): self._thread = None - def dispatch_callback(self, items): + def dispatch_callback(self, items: Sequence[RequestItem]) -> None: """Map the callback request to the appropriate gRPC request. Args: - action (str): The method to be invoked. - kwargs (Dict[str, Any]): The keyword arguments for the method - specified by ``action``. - - Raises: - ValueError: If ``action`` isn't one of the expected actions - "ack", "drop", "lease", "modify_ack_deadline" or "nack". + items: + Queued requests to dispatch. """ batched_commands = collections.defaultdict(list) @@ -119,11 +132,11 @@ def dispatch_callback(self, items): if batched_commands[requests.DropRequest]: self.drop(batched_commands.pop(requests.DropRequest)) - def ack(self, items): + def ack(self, items: Sequence[requests.AckRequest]) -> None: """Acknowledge the given messages. Args: - items(Sequence[AckRequest]): The items to acknowledge. + items: The items to acknowledge. """ # If we got timing information, add it to the histogram. for item in items: @@ -145,31 +158,36 @@ def ack(self, items): # Remove the message from lease management. self.drop(items) - def drop(self, items): + def drop( + self, + items: Sequence[ + Union[requests.AckRequest, requests.DropRequest, requests.NackRequest] + ], + ) -> None: """Remove the given messages from lease management. Args: - items(Sequence[DropRequest]): The items to drop. + items: The items to drop. """ self._manager.leaser.remove(items) ordering_keys = (k.ordering_key for k in items if k.ordering_key) self._manager.activate_ordering_keys(ordering_keys) self._manager.maybe_resume_consumer() - def lease(self, items): + def lease(self, items: Sequence[requests.LeaseRequest]) -> None: """Add the given messages to lease management. Args: - items(Sequence[LeaseRequest]): The items to lease. + items: The items to lease. """ self._manager.leaser.add(items) self._manager.maybe_pause_consumer() - def modify_ack_deadline(self, items): + def modify_ack_deadline(self, items: Sequence[requests.ModAckRequest]) -> None: """Modify the ack deadline for the given messages. Args: - items(Sequence[ModAckRequest]): The items to modify. + items: The items to modify. """ # We must potentially split the request into multiple smaller requests # to avoid the server-side max request size limit. @@ -184,11 +202,11 @@ def modify_ack_deadline(self, items): ) self._manager.send(request) - def nack(self, items): + def nack(self, items: Sequence[requests.NackRequest]) -> None: """Explicitly deny receipt of messages. Args: - items(Sequence[NackRequest]): The items to deny. + items: The items to deny. """ self.modify_ack_deadline( [requests.ModAckRequest(ack_id=item.ack_id, seconds=0) for item in items] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py index fef158965c57..842e4adc55d2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -16,6 +16,12 @@ import logging import threading +import typing + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( + StreamingPullManager, + ) _LOGGER = logging.getLogger(__name__) @@ -27,14 +33,14 @@ class Heartbeater(object): - def __init__(self, manager, period=_DEFAULT_PERIOD): + def __init__(self, manager: "StreamingPullManager", period: int = _DEFAULT_PERIOD): self._thread = None self._operational_lock = threading.Lock() self._manager = manager self._stop_event = threading.Event() self._period = period - def heartbeat(self): + def heartbeat(self) -> None: """Periodically send streaming pull heartbeats. """ while not self._stop_event.is_set(): @@ -44,7 +50,7 @@ def heartbeat(self): _LOGGER.info("%s exiting.", _HEARTBEAT_WORKER_NAME) - def start(self): + def start(self) -> None: with self._operational_lock: if self._thread is not None: raise ValueError("Heartbeater is already running.") @@ -59,7 +65,7 @@ def start(self): _LOGGER.debug("Started helper thread %s", thread.name) self._thread = thread - def stop(self): + def stop(self) -> None: with self._operational_lock: self._stop_event.set() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py index 661df7927014..fbcab781df84 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py @@ -15,6 +15,7 @@ import logging import queue import time +from typing import Any, Callable, List, Sequence import uuid @@ -30,22 +31,25 @@ STOP = uuid.uuid4() -def _get_many(queue_, max_items=None, max_latency=0): +def _get_many( + queue_: queue.Queue, max_items: int = None, max_latency: float = 0 +) -> List[Any]: """Get multiple items from a Queue. Gets at least one (blocking) and at most ``max_items`` items (non-blocking) from a given Queue. Does not mark the items as done. Args: - queue_ (~queue.Queue`): The Queue to get items from. - max_items (int): The maximum number of items to get. If ``None``, then - all available items in the queue are returned. - max_latency (float): The maximum number of seconds to wait for more - than one item from a queue. This number includes the time required - to retrieve the first item. + queue_: The Queue to get items from. + max_items: + The maximum number of items to get. If ``None``, then all available items + in the queue are returned. + max_latency: + The maximum number of seconds to wait for more than one item from a queue. + This number includes the time required to retrieve the first item. Returns: - Sequence[Any]: A sequence of items retrieved from the queue. + A sequence of items retrieved from the queue. """ start = time.time() # Always return at least one item. @@ -67,26 +71,33 @@ class QueueCallbackWorker(object): :attr:`STOP`. Args: - queue (~queue.Queue): A Queue instance, appropriate for crossing the - concurrency boundary implemented by ``executor``. Items will - be popped off (with a blocking ``get()``) until :attr:`STOP` - is encountered. - callback (Callable[Sequence[Any], Any]): A callback that can process - items pulled off of the queue. Multiple items will be passed to - the callback in batches. - max_items (int): The maximum amount of items that will be passed to the - callback at a time. - max_latency (float): The maximum amount of time in seconds to wait for - additional items before executing the callback. + queue: + A Queue instance, appropriate for crossing the concurrency boundary + implemented by ``executor``. Items will be popped off (with a blocking + ``get()``) until :attr:`STOP` is encountered. + callback: + A callback that can process items pulled off of the queue. Multiple items + will be passed to the callback in batches. + max_items: + The maximum amount of items that will be passed to the callback at a time. + max_latency: + The maximum amount of time in seconds to wait for additional items before + executing the callback. """ - def __init__(self, queue, callback, max_items=100, max_latency=0): + def __init__( + self, + queue: queue.Queue, + callback: Callable[[Sequence[Any]], Any], + max_items: int = 100, + max_latency: float = 0, + ): self.queue = queue self._callback = callback self.max_items = max_items self.max_latency = max_latency - def __call__(self): + def __call__(self) -> None: continue_ = True while continue_: items = _get_many( diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py index 0a4a81746bb8..7ffa4b3a0b42 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import absolute_import, division +from typing import Dict, Optional, Union MIN_ACK_DEADLINE = 10 @@ -36,15 +36,15 @@ class Histogram(object): leases in the actual API. """ - def __init__(self, data=None): + def __init__(self, data: Optional[Dict[int, int]] = None): """Instantiate the histogram. Args: - data (Mapping[str, int]): The data strucure to be used to store - the underlying data. The default is an empty dictionary. - This can be set to a dictionary-like object if required - (for example, if a special object is needed for - concurrency reasons). + data: + The data strucure to be used to store the underlying data. The default + is an empty dictionary. This can be set to a dictionary-like object if + required (for example, if a special object is needed for concurrency + reasons). """ # The data is stored as a dictionary, with the keys being the # value being added and the values being the number of times that @@ -60,22 +60,19 @@ def __init__(self, data=None): self._data = data self._len = 0 - def __len__(self): + def __len__(self) -> int: """Return the total number of data points in this histogram. This is cached on a separate counter (rather than computing it using ``sum([v for v in self._data.values()])``) to optimize lookup. Returns: - int: The total number of data points in this histogram. + The total number of data points in this histogram. """ return self._len - def __contains__(self, needle): - """Return True if needle is present in the histogram, False otherwise. - - Returns: - bool: True or False + def __contains__(self, needle: int) -> bool: + """Return ``True`` if needle is present in the histogram, ``False`` otherwise. """ return needle in self._data @@ -85,37 +82,38 @@ def __repr__(self): ) @property - def max(self): + def max(self) -> int: """Return the maximum value in this histogram. If there are no values in the histogram at all, return ``MAX_ACK_DEADLINE``. Returns: - int: The maximum value in the histogram. + The maximum value in the histogram. """ if len(self._data) == 0: return MAX_ACK_DEADLINE return next(iter(reversed(sorted(self._data.keys())))) @property - def min(self): + def min(self) -> int: """Return the minimum value in this histogram. If there are no values in the histogram at all, return ``MIN_ACK_DEADLINE``. Returns: - int: The minimum value in the histogram. + The minimum value in the histogram. """ if len(self._data) == 0: return MIN_ACK_DEADLINE return next(iter(sorted(self._data.keys()))) - def add(self, value): + def add(self, value: Union[int, float]) -> None: """Add the value to this histogram. Args: - value (int): The value. Values outside of - ``MIN_ACK_DEADLINE <= x <= MAX_ACK_DEADLINE`` + value: + The value. Values outside of + ``MIN_ACK_DEADLINE <= x <= MAX_ACK_DEADLINE`` will be raised to ``MIN_ACK_DEADLINE`` or reduced to ``MAX_ACK_DEADLINE``. """ @@ -131,15 +129,16 @@ def add(self, value): self._data[value] += 1 self._len += 1 - def percentile(self, percent): + def percentile(self, percent: Union[int, float]) -> int: """Return the value that is the Nth precentile in the histogram. Args: - percent (Union[int, float]): The precentile being sought. The - default consumer implementations consistently use ``99``. + percent: + The precentile being sought. The default consumer implementations + consistently use ``99``. Returns: - int: The value corresponding to the requested percentile. + The value corresponding to the requested percentile. """ # Sanity check: Any value over 100 should become 100. if percent >= 100: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 8fd067aaf6d4..c8d7e93659fb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -20,9 +20,16 @@ import random import threading import time +import typing +from typing import Iterable, Sequence, Union from google.cloud.pubsub_v1.subscriber._protocol import requests +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( + StreamingPullManager, + ) + _LOGGER = logging.getLogger(__name__) _LEASE_WORKER_NAME = "Thread-LeaseMaintainer" @@ -34,7 +41,7 @@ class Leaser(object): - def __init__(self, manager): + def __init__(self, manager: "StreamingPullManager"): self._thread = None self._manager = manager @@ -55,21 +62,21 @@ def __init__(self, manager): self._stop_event = threading.Event() @property - def message_count(self): - """int: The number of leased messages.""" + def message_count(self) -> int: + """The number of leased messages.""" return len(self._leased_messages) @property - def ack_ids(self): - """Sequence[str]: The ack IDs of all leased messages.""" + def ack_ids(self) -> Sequence[str]: + """The ack IDs of all leased messages.""" return self._leased_messages.keys() @property - def bytes(self): - """int: The total size, in bytes, of all leased messages.""" + def bytes(self) -> int: + """The total size, in bytes, of all leased messages.""" return self._bytes - def add(self, items): + def add(self, items: Iterable[requests.LeaseRequest]) -> None: """Add messages to be managed by the leaser.""" with self._add_remove_lock: for item in items: @@ -85,12 +92,11 @@ def add(self, items): else: _LOGGER.debug("Message %s is already lease managed", item.ack_id) - def start_lease_expiry_timer(self, ack_ids): + def start_lease_expiry_timer(self, ack_ids: Iterable[str]) -> None: """Start the lease expiry timer for `items`. Args: - items (Sequence[str]): Sequence of ack-ids for which to start - lease expiry timers. + items: Sequence of ack-ids for which to start lease expiry timers. """ with self._add_remove_lock: for ack_id in ack_ids: @@ -102,7 +108,12 @@ def start_lease_expiry_timer(self, ack_ids): sent_time=time.time() ) - def remove(self, items): + def remove( + self, + items: Iterable[ + Union[requests.AckRequest, requests.DropRequest, requests.NackRequest] + ], + ) -> None: """Remove messages from lease management.""" with self._add_remove_lock: # Remove the ack ID from lease management, and decrement the @@ -117,7 +128,7 @@ def remove(self, items): _LOGGER.debug("Bytes was unexpectedly negative: %d", self._bytes) self._bytes = 0 - def maintain_leases(self): + def maintain_leases(self) -> None: """Maintain all of the leases being managed. This method modifies the ack deadline for all of the managed @@ -188,7 +199,7 @@ def maintain_leases(self): _LOGGER.info("%s exiting.", _LEASE_WORKER_NAME) - def start(self): + def start(self) -> None: with self._operational_lock: if self._thread is not None: raise ValueError("Leaser is already running.") @@ -203,7 +214,7 @@ def start(self): _LOGGER.debug("Started helper thread %s", thread.name) self._thread = thread - def stop(self): + def stop(self) -> None: with self._operational_lock: self._stop_event.set() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py index bab15f2182b7..82d5ca376dec 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py @@ -13,6 +13,11 @@ # limitations under the License. import collections +import typing +from typing import Any, Callable, Iterable, Optional + +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1 import subscriber class MessagesOnHold(object): @@ -41,27 +46,25 @@ def __init__(self): self._pending_ordered_messages = {} @property - def size(self): - """Return the number of messages on hold across ordered and unordered - messages. + def size(self) -> int: + """Return the number of messages on hold across ordered and unordered messages. Note that this object may still store information about ordered messages in flight even if size is zero. Returns: - int: The size value. + The size value. """ return self._size - def get(self): + def get(self) -> Optional["subscriber.message.Message"]: """ Gets a message from the on-hold queue. A message with an ordering key wont be returned if there's another message with the same key in flight. Returns: - Optional[google.cloud.pubsub_v1.subscriber.message.Message]: A message - that hasn't been sent to the user yet or None if there are no - messages available. + A message that hasn't been sent to the user yet or ``None`` if there are no + messages available. """ while self._messages_on_hold: msg = self._messages_on_hold.popleft() @@ -88,17 +91,20 @@ def get(self): return None - def put(self, message): + def put(self, message: "subscriber.message.Message") -> None: """Put a message on hold. Args: - message (google.cloud.pubsub_v1.subscriber.message.Message): The - message to put on hold. + message: The message to put on hold. """ self._messages_on_hold.append(message) self._size = self._size + 1 - def activate_ordering_keys(self, ordering_keys, schedule_message_callback): + def activate_ordering_keys( + self, + ordering_keys: Iterable[str], + schedule_message_callback: Callable[["subscriber.message.Message"], Any], + ) -> None: """Send the next message in the queue for each of the passed-in ordering keys, if they exist. Clean up state for keys that no longer have any queued messages. @@ -107,9 +113,9 @@ def activate_ordering_keys(self, ordering_keys, schedule_message_callback): detail about the impact of this method on load. Args: - ordering_keys(Sequence[str]): A sequence of ordering keys to - activate. May be empty. - schedule_message_callback(Callable[google.cloud.pubsub_v1.subscriber.message.Message]): + ordering_keys: + The ordering keys to activate. May be empty. + schedule_message_callback: The callback to call to schedule a message to be sent to the user. """ for key in ordering_keys: @@ -126,18 +132,19 @@ def activate_ordering_keys(self, ordering_keys, schedule_message_callback): # No more messages for this ordering key, so do clean-up. self._clean_up_ordering_key(key) - def _get_next_for_ordering_key(self, ordering_key): + def _get_next_for_ordering_key( + self, ordering_key: str + ) -> Optional["subscriber.message.Message"]: """Get next message for ordering key. The client should call clean_up_ordering_key() if this method returns None. Args: - ordering_key (str): Ordering key for which to get the next message. + ordering_key: Ordering key for which to get the next message. Returns: - google.cloud.pubsub_v1.subscriber.message.Message|None: The - next message for this ordering key or None if there aren't any. + The next message for this ordering key or None if there aren't any. """ queue_for_key = self._pending_ordered_messages.get(ordering_key) if queue_for_key: @@ -145,11 +152,11 @@ def _get_next_for_ordering_key(self, ordering_key): return queue_for_key.popleft() return None - def _clean_up_ordering_key(self, ordering_key): + def _clean_up_ordering_key(self, ordering_key: str) -> None: """Clean up state for an ordering key with no pending messages. Args: - ordering_key (str): The ordering key to clean up. + ordering_key: The ordering key to clean up. """ message_queue = self._pending_ordered_messages.get(ordering_key) assert ( diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py index 58d53a61da9c..7481d95a9f36 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py @@ -12,28 +12,36 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Base class for concurrency policy.""" +from typing import NamedTuple, Optional -from __future__ import absolute_import, division - -import collections # Namedtuples for management requests. Used by the Message class to communicate # items of work back to the policy. -AckRequest = collections.namedtuple( - "AckRequest", ["ack_id", "byte_size", "time_to_ack", "ordering_key"] -) +class AckRequest(NamedTuple): + ack_id: str + byte_size: int + time_to_ack: float + ordering_key: Optional[str] + + +class DropRequest(NamedTuple): + ack_id: str + byte_size: int + ordering_key: Optional[str] + + +class LeaseRequest(NamedTuple): + ack_id: str + byte_size: int + ordering_key: Optional[str] -DropRequest = collections.namedtuple( - "DropRequest", ["ack_id", "byte_size", "ordering_key"] -) -LeaseRequest = collections.namedtuple( - "LeaseRequest", ["ack_id", "byte_size", "ordering_key"] -) +class ModAckRequest(NamedTuple): + ack_id: str + seconds: float -ModAckRequest = collections.namedtuple("ModAckRequest", ["ack_id", "seconds"]) -NackRequest = collections.namedtuple( - "NackRequest", ["ack_id", "byte_size", "ordering_key"] -) +class NackRequest(NamedTuple): + ack_id: str + byte_size: int + ordering_key: Optional[str] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index da027fcbed1e..018917b901da 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -19,6 +19,8 @@ import itertools import logging import threading +import typing +from typing import Any, Callable, Iterable import uuid import grpc @@ -36,6 +38,11 @@ import google.cloud.pubsub_v1.subscriber.scheduler from google.pubsub_v1 import types as gapic_types +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1 import subscriber + from google.cloud.pubsub_v1.subscriber.scheduler import Scheduler + + _LOGGER = logging.getLogger(__name__) _REGULAR_SHUTDOWN_THREAD_NAME = "Thread-RegularStreamShutdown" _RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" @@ -55,11 +62,11 @@ """The load threshold below which to resume the incoming message stream.""" -def _wrap_as_exception(maybe_exception): +def _wrap_as_exception(maybe_exception: Any) -> BaseException: """Wrap an object as a Python exception, if needed. Args: - maybe_exception (Any): The object to wrap, usually a gRPC exception class. + maybe_exception: The object to wrap, usually a gRPC exception class. Returns: The argument itself if an instance of ``BaseException``, otherwise @@ -73,13 +80,17 @@ def _wrap_as_exception(maybe_exception): return Exception(maybe_exception) -def _wrap_callback_errors(callback, on_callback_error, message): +def _wrap_callback_errors( + callback: Callable[["google.cloud.pubsub_v1.subscriber.message.Message"], Any], + on_callback_error: Callable[[Exception], Any], + message: "google.cloud.pubsub_v1.subscriber.message.Message", +): """Wraps a user callback so that if an exception occurs the message is nacked. Args: - callback (Callable[None, Message]): The user callback. - message (~Message): The Pub/Sub message. + callback: The user callback. + message: The Pub/Sub message. """ try: callback(message) @@ -99,21 +110,21 @@ class StreamingPullManager(object): leasing them, and scheduling them to be processed. Args: - client (~.pubsub_v1.subscriber.client): The subscriber client used - to create this instance. - subscription (str): The name of the subscription. The canonical - format for this is + client: + The subscriber client used to create this instance. + subscription: + The name of the subscription. The canonical format for this is ``projects/{project}/subscriptions/{subscription}``. - flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow - control settings. - use_legacy_flow_control (bool): + flow_control: + The flow control settings. + scheduler: + The scheduler to use to process messages. If not provided, a thread + pool-based scheduler will be used. + use_legacy_flow_control: If set to ``True``, flow control at the Cloud Pub/Sub server is disabled, though client-side flow control is still enabled. If set to ``False`` (default), both server-side and client-side flow control are enabled. - scheduler (~google.cloud.pubsub_v1.scheduler.Scheduler): The scheduler - to use to process messages. If not provided, a thread pool-based - scheduler will be used. - await_callbacks_on_shutdown (bool): + await_callbacks_on_shutdown: If ``True``, the shutdown thread will wait until all scheduler threads terminate and only then proceed with shutting down the remaining running helper threads. @@ -129,12 +140,12 @@ class StreamingPullManager(object): def __init__( self, - client, - subscription, - flow_control=types.FlowControl(), - scheduler=None, - use_legacy_flow_control=False, - await_callbacks_on_shutdown=False, + client: "subscriber.Client", + subscription: str, + flow_control: types.FlowControl = types.FlowControl(), + scheduler: "Scheduler" = None, + use_legacy_flow_control: bool = False, + await_callbacks_on_shutdown: bool = False, ): self._client = client self._subscription = subscription @@ -191,8 +202,8 @@ def __init__( self._heartbeater = None @property - def is_active(self): - """bool: True if this manager is actively streaming. + def is_active(self) -> bool: + """``True`` if this manager is actively streaming. Note that ``False`` does not indicate this is complete shut down, just that it stopped getting new messages. @@ -200,42 +211,35 @@ def is_active(self): return self._consumer is not None and self._consumer.is_active @property - def flow_control(self): - """google.cloud.pubsub_v1.types.FlowControl: The active flow control - settings.""" + def flow_control(self) -> types.FlowControl: + """The active flow control settings.""" return self._flow_control @property - def dispatcher(self): - """google.cloud.pubsub_v1.subscriber._protocol.dispatcher.Dispatcher: - The dispatcher helper. - """ + def dispatcher(self) -> dispatcher.Dispatcher: + """The dispatcher helper.""" return self._dispatcher @property - def leaser(self): - """google.cloud.pubsub_v1.subscriber._protocol.leaser.Leaser: - The leaser helper. - """ + def leaser(self) -> leaser.Leaser: + """The leaser helper.""" return self._leaser @property - def ack_histogram(self): - """google.cloud.pubsub_v1.subscriber._protocol.histogram.Histogram: - The histogram tracking time-to-acknowledge. - """ + def ack_histogram(self) -> histogram.Histogram: + """The histogram tracking time-to-acknowledge.""" return self._ack_histogram @property - def ack_deadline(self): + def ack_deadline(self) -> float: """Return the current ACK deadline based on historical data without updating it. Returns: - int: The ack deadline. + The ack deadline. """ return self._obtain_ack_deadline(maybe_update=False) - def _obtain_ack_deadline(self, maybe_update: bool) -> int: + def _obtain_ack_deadline(self, maybe_update: bool) -> float: """The actual `ack_deadline` implementation. This method is "sticky". It will only perform the computations to check on the @@ -243,12 +247,12 @@ def _obtain_ack_deadline(self, maybe_update: bool) -> int: time-to-ack data has gained a significant amount of new information. Args: - maybe_update (bool): + maybe_update: If ``True``, also update the current ACK deadline before returning it if enough new ACK data has been gathered. Returns: - int: The current ACK deadline in seconds to use. + The current ACK deadline in seconds to use. """ with self._ack_deadline_lock: if not maybe_update: @@ -273,7 +277,7 @@ def _obtain_ack_deadline(self, maybe_update: bool) -> int: return self._ack_deadline @property - def load(self): + def load(self) -> float: """Return the current load. The load is represented as a float, where 1.0 represents having @@ -287,7 +291,7 @@ def load(self): running room on setting A if setting B is over.) Returns: - float: The load value. + The load value. """ if self._leaser is None: return 0.0 @@ -307,15 +311,17 @@ def load(self): ] ) - def add_close_callback(self, callback): + def add_close_callback( + self, callback: Callable[["StreamingPullManager", Any], Any], + ) -> None: """Schedules a callable when the manager closes. Args: - callback (Callable): The method to call. + The method to call. """ self._close_callbacks.append(callback) - def activate_ordering_keys(self, ordering_keys): + def activate_ordering_keys(self, ordering_keys: Iterable[str]) -> None: """Send the next message in the queue for each of the passed-in ordering keys, if they exist. Clean up state for keys that no longer have any queued messages. @@ -326,8 +332,8 @@ def activate_ordering_keys(self, ordering_keys): This decision is by design because it simplifies MessagesOnHold. Args: - ordering_keys(Sequence[str]): A sequence of ordering keys to - activate. May be empty. + ordering_keys: + A sequence of ordering keys to activate. May be empty. """ with self._pause_resume_lock: if self._scheduler is None: @@ -337,7 +343,7 @@ def activate_ordering_keys(self, ordering_keys): ordering_keys, self._schedule_message_on_hold ) - def maybe_pause_consumer(self): + def maybe_pause_consumer(self) -> None: """Check the current load and pause the consumer if needed.""" with self._pause_resume_lock: if self.load >= _MAX_LOAD: @@ -347,7 +353,7 @@ def maybe_pause_consumer(self): ) self._consumer.pause() - def maybe_resume_consumer(self): + def maybe_resume_consumer(self) -> None: """Check the load and held messages and resume the consumer if needed. If there are messages held internally, release those messages before @@ -375,7 +381,7 @@ def maybe_resume_consumer(self): else: _LOGGER.debug("Did not resume, current load is %.2f.", self.load) - def _maybe_release_messages(self): + def _maybe_release_messages(self) -> None: """Release (some of) the held messages if the current load allows for it. The method tries to release as many messages as the current leaser load @@ -397,15 +403,15 @@ def _maybe_release_messages(self): released_ack_ids.append(msg.ack_id) self._leaser.start_lease_expiry_timer(released_ack_ids) - def _schedule_message_on_hold(self, msg): - """Schedule a message on hold to be sent to the user and change - on-hold-bytes. + def _schedule_message_on_hold( + self, msg: "google.cloud.pubsub_v1.subscriber.message.Message" + ): + """Schedule a message on hold to be sent to the user and change on-hold-bytes. The method assumes the caller has acquired the ``_pause_resume_lock``. Args: - msg (google.cloud.pubsub_v1.message.Message): The message to - schedule to be sent to the user. + msg: The message to schedule to be sent to the user. """ assert msg, "Message must not be None." @@ -426,13 +432,11 @@ def _schedule_message_on_hold(self, msg): ) self._scheduler.schedule(self._callback, msg) - def _send_unary_request(self, request): - """Send a request using a separate unary request instead of over the - stream. + def _send_unary_request(self, request: gapic_types.StreamingPullRequest) -> None: + """Send a request using a separate unary request instead of over the stream. Args: - request (gapic_types.StreamingPullRequest): The stream request to be - mapped into unary requests. + request: The stream request to be mapped into unary requests. """ if request.ack_ids: self._client.acknowledge( @@ -456,7 +460,7 @@ def _send_unary_request(self, request): _LOGGER.debug("Sent request(s) over unary RPC.") - def send(self, request): + def send(self, request: gapic_types.StreamingPullRequest) -> None: """Queue a request to be sent to the RPC. If a RetryError occurs, the manager shutdown is triggered, and the @@ -481,11 +485,11 @@ def send(self, request): self._on_rpc_done(exc) raise - def heartbeat(self): + def heartbeat(self) -> bool: """Sends an empty request over the streaming pull RPC. Returns: - bool: If a heartbeat request has actually been sent. + If a heartbeat request has actually been sent. """ if self._rpc is not None and self._rpc.is_active: self._rpc.send(gapic_types.StreamingPullRequest()) @@ -493,14 +497,18 @@ def heartbeat(self): return False - def open(self, callback, on_callback_error): + def open( + self, + callback: Callable[["google.cloud.pubsub_v1.subscriber.message.Message"], Any], + on_callback_error: Callable[[Exception], Any], + ) -> None: """Begin consuming messages. Args: - callback (Callable[None, google.cloud.pubsub_v1.message.Message]): + callback: A callback that will be called for each message received on the stream. - on_callback_error (Callable[Exception]): + on_callback_error: A callable that will be called if an exception is raised in the provided `callback`. """ @@ -536,10 +544,13 @@ def open(self, callback, on_callback_error): ) # Create references to threads + # pytype: disable=wrong-arg-types + # (pytype incorrectly complains about "self" not being the right argument type) self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) self._leaser = leaser.Leaser(self) self._heartbeater = heartbeater.Heartbeater(self) + # pytype: enable=wrong-arg-types # Start the thread to pass the requests. self._dispatcher.start() @@ -553,7 +564,7 @@ def open(self, callback, on_callback_error): # Start the stream heartbeater thread. self._heartbeater.start() - def close(self, reason=None): + def close(self, reason: Any = None) -> None: """Stop consuming messages and shutdown all helper threads. This method is idempotent. Additional calls will have no effect. @@ -562,7 +573,8 @@ def close(self, reason=None): thread. Args: - reason (Any): The reason to close this. If ``None``, this is considered + reason: + The reason to close this. If ``None``, this is considered an "intentional" shutdown. This is passed to the callbacks specified via :meth:`add_close_callback`. """ @@ -574,12 +586,13 @@ def close(self, reason=None): ) self._regular_shutdown_thread.start() - def _shutdown(self, reason=None): + def _shutdown(self, reason: Any = None) -> None: """Run the actual shutdown sequence (stop the stream and all helper threads). Args: - reason (Any): The reason to close the stream. If ``None``, this is - considered an "intentional" shutdown. + reason: + The reason to close the stream. If ``None``, this is considered + an "intentional" shutdown. """ with self._closing: if self._closed: @@ -637,19 +650,20 @@ def _shutdown(self, reason=None): for callback in self._close_callbacks: callback(self, reason) - def _get_initial_request(self, stream_ack_deadline_seconds): + def _get_initial_request( + self, stream_ack_deadline_seconds: int + ) -> gapic_types.StreamingPullRequest: """Return the initial request for the RPC. This defines the initial request that must always be sent to Pub/Sub immediately upon opening the subscription. Args: - stream_ack_deadline_seconds (int): + stream_ack_deadline_seconds: The default message acknowledge deadline for the stream. Returns: - google.pubsub_v1.types.StreamingPullRequest: A request - suitable for being the first request on the stream (and not + A request suitable for being the first request on the stream (and not suitable for any other purpose). """ # Any ack IDs that are under lease management need to have their @@ -679,7 +693,7 @@ def _get_initial_request(self, stream_ack_deadline_seconds): # Return the initial request. return request - def _on_response(self, response): + def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: """Process all received Pub/Sub messages. For each message, send a modified acknowledgment request to the @@ -739,7 +753,7 @@ def _on_response(self, response): self.maybe_pause_consumer() - def _should_recover(self, exception): + def _should_recover(self, exception: Exception) -> bool: """Determine if an error on the RPC stream should be recovered. If the exception is one of the retryable exceptions, this will signal @@ -748,7 +762,7 @@ def _should_recover(self, exception): This will cause the stream to exit when it returns :data:`False`. Returns: - bool: Indicates if the caller should recover or shut down. + Indicates if the caller should recover or shut down. Will be :data:`True` if the ``exception`` is "acceptable", i.e. in a list of retryable / idempotent exceptions. """ @@ -761,7 +775,7 @@ def _should_recover(self, exception): _LOGGER.info("Observed non-recoverable stream error %s", exception) return False - def _should_terminate(self, exception): + def _should_terminate(self, exception: Exception) -> bool: """Determine if an error on the RPC stream should be terminated. If the exception is one of the terminating exceptions, this will signal @@ -770,7 +784,7 @@ def _should_terminate(self, exception): This will cause the stream to exit when it returns :data:`True`. Returns: - bool: Indicates if the caller should terminate or attempt recovery. + Indicates if the caller should terminate or attempt recovery. Will be :data:`True` if the ``exception`` is "acceptable", i.e. in a list of terminating exceptions. """ @@ -781,7 +795,7 @@ def _should_terminate(self, exception): _LOGGER.info("Observed non-terminating stream error %s", exception) return False - def _on_rpc_done(self, future): + def _on_rpc_done(self, future: Any) -> None: """Triggered whenever the underlying RPC terminates without recovery. This is typically triggered from one of two threads: the background diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index c4b229a17334..099a6531820d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -16,6 +16,8 @@ import os import pkg_resources +import typing +from typing import Any, Callable, Optional, Sequence, Union from google.auth.credentials import AnonymousCredentials from google.oauth2 import service_account @@ -26,6 +28,9 @@ from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager from google.pubsub_v1.services.subscriber import client as subscriber_client +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1 import subscriber + try: __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version @@ -50,7 +55,7 @@ class Client(object): get sensible defaults. Args: - kwargs (dict): Any additional arguments provided are sent as keyword + kwargs: Any additional arguments provided are sent as keyword keyword arguments to the underlying :class:`~google.cloud.pubsub_v1.gapic.subscriber_client.SubscriberClient`. Generally you should not need to set additional keyword @@ -72,7 +77,7 @@ class Client(object): ) """ - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any): # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. @@ -88,13 +93,12 @@ def __init__(self, **kwargs): self._closed = False @classmethod - def from_service_account_file(cls, filename, **kwargs): + def from_service_account_file(cls, filename: str, **kwargs: Any) -> "Client": """Creates an instance of this client using the provided credentials file. Args: - filename (str): The path to the service account private key json - file. + filename: The path to the service account private key json file. kwargs: Additional arguments to pass to the constructor. Returns: @@ -108,16 +112,16 @@ def from_service_account_file(cls, filename, **kwargs): from_service_account_json = from_service_account_file @property - def target(self): + def target(self) -> str: """Return the target (where the API is). Returns: - str: The location of the API. + The location of the API. """ return self._target @property - def api(self): + def api(self) -> subscriber_client.SubscriberClient: """The underlying gapic API client.""" return self._api @@ -131,13 +135,13 @@ def closed(self) -> bool: def subscribe( self, - subscription, - callback, - flow_control=(), - scheduler=None, - use_legacy_flow_control=False, - await_callbacks_on_shutdown=False, - ): + subscription: str, + callback: Callable[["subscriber.message.Message"], Any], + flow_control: Union[types.FlowControl, Sequence] = (), + scheduler: Optional["subscriber.scheduler.Scheduler"] = None, + use_legacy_flow_control: bool = False, + await_callbacks_on_shutdown: bool = False, + ) -> futures.StreamingPullFuture: """Asynchronously start receiving messages on a given subscription. This method starts a background thread to begin pulling messages from @@ -201,25 +205,25 @@ def callback(message): future.result() # Block until the shutdown is complete. Args: - subscription (str): The name of the subscription. The - subscription should have already been created (for example, - by using :meth:`create_subscription`). - callback (Callable[~google.cloud.pubsub_v1.subscriber.message.Message]): + subscription: + The name of the subscription. The subscription should have already been + created (for example, by using :meth:`create_subscription`). + callback: The callback function. This function receives the message as its only argument and will be called from a different thread/ process depending on the scheduling strategy. - flow_control (~google.cloud.pubsub_v1.types.FlowControl): The flow control - settings. Use this to prevent situations where you are + flow_control: + The flow control settings. Use this to prevent situations where you are inundated with too many messages at once. - scheduler (~google.cloud.pubsub_v1.subscriber.scheduler.Scheduler): An optional - *scheduler* to use when executing the callback. This controls - how callbacks are executed concurrently. This object must not be shared - across multiple SubscriberClients. + scheduler: + An optional *scheduler* to use when executing the callback. This + controls how callbacks are executed concurrently. This object must not + be shared across multiple ``SubscriberClient`` instances. use_legacy_flow_control (bool): If set to ``True``, flow control at the Cloud Pub/Sub server is disabled, though client-side flow control is still enabled. If set to ``False`` (default), both server-side and client-side flow control are enabled. - await_callbacks_on_shutdown (bool): + await_callbacks_on_shutdown: If ``True``, after canceling the returned future, the latter's ``result()`` method will block until the background stream and its helper threads have been terminated, and all currently executing message @@ -232,8 +236,7 @@ def callback(message): running at that point. Returns: - A :class:`~google.cloud.pubsub_v1.subscriber.futures.StreamingPullFuture` - instance that can be used to manage the background stream. + A future instance that can be used to manage the background stream. """ flow_control = types.FlowControl(*flow_control) @@ -252,7 +255,7 @@ def callback(message): return future - def close(self): + def close(self) -> None: """Close the underlying channel to release socket resources. After a channel has been closed, the client instance cannot be used @@ -263,7 +266,7 @@ def close(self): self.api._transport.grpc_channel.close() self._closed = True - def __enter__(self): + def __enter__(self) -> "Client": if self._closed: raise RuntimeError("Closed subscriber cannot be used as context manager.") return self diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index 18298b956313..eec9590ed59d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -14,9 +14,18 @@ from __future__ import absolute_import +import typing +from typing import Any + from google.cloud.pubsub_v1 import futures +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( + StreamingPullManager, + ) + + class StreamingPullFuture(futures.Future): """Represents a process that asynchronously performs streaming pull and schedules messages to be processed. @@ -26,13 +35,13 @@ class StreamingPullFuture(futures.Future): the calling thread to block indefinitely. """ - def __init__(self, manager): + def __init__(self, manager: "StreamingPullManager"): super(StreamingPullFuture, self).__init__() self.__manager = manager self.__manager.add_close_callback(self._on_close_callback) self.__cancelled = False - def _on_close_callback(self, manager, result): + def _on_close_callback(self, manager: "StreamingPullManager", result: Any): if self.done(): # The future has already been resolved in a different thread, # nothing to do on the streaming pull manager shutdown. @@ -57,9 +66,9 @@ def cancel(self): self.__cancelled = True return self.__manager.close() - def cancelled(self): + def cancelled(self) -> bool: """ - returns: - bool: ``True`` if the subscription has been cancelled. + Returns: + ``True`` if the subscription has been cancelled. """ return self.__cancelled diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 02ffd99843cd..4a08257a61ba 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -18,9 +18,17 @@ import json import math import time +import typing +from typing import Optional from google.cloud.pubsub_v1.subscriber._protocol import requests +if typing.TYPE_CHECKING: # pragma: NO COVER + import datetime + import queue + from google.cloud.pubsub_v1 import types + from google.protobuf.internal import containers + _MESSAGE_REPR = """\ Message {{ @@ -30,18 +38,19 @@ }}""" -def _indent(lines, prefix=" "): +def _indent(lines: str, prefix: str = " ") -> str: """Indent some text. Note that this is present as ``textwrap.indent``, but not in Python 2. Args: - lines (str): The newline delimited string to be indented. - prefix (Optional[str]): The prefix to indent each line with. Default - to two spaces. + lines: + The newline delimited string to be indented. + prefix: + The prefix to indent each line with. Defaults to two spaces. Returns: - str: The newly indented content. + The newly indented content. """ indented = [] for line in lines.split("\n"): @@ -60,17 +69,25 @@ class Message(object): :class:`~.pubsub_v1.subscriber._consumer.Consumer`.) Attributes: - message_id (str): The message ID. In general, you should not need - to use this directly. - data (bytes): The data in the message. Note that this will be a - :class:`bytes`, not a text string. - attributes (.ScalarMapContainer): The attributes sent along with the - message. See :attr:`attributes` for more information on this type. - publish_time (datetime): The time that this message was originally - published. + message_id: + The message ID. In general, you should not need to use this directly. + data: + The data in the message. Note that this will be a :class:`bytes`, + not a text string. + attributes: + The attributes sent along with the message. See :attr:`attributes` for more + information on this type. + publish_time: + The time that this message was originally published. """ - def __init__(self, message, ack_id, delivery_attempt, request_queue): + def __init__( # pytype: disable=module-attr + self, + message: "types.PubsubMessage._meta._pb", + ack_id: str, + delivery_attempt: int, + request_queue: "queue.Queue", + ): """Construct the Message. .. note:: @@ -79,19 +96,20 @@ def __init__(self, message, ack_id, delivery_attempt, request_queue): responsibility of :class:`BasePolicy` subclasses to do so. Args: - message (`pubsub_v1.types.PubsubMessage._meta._pb`): + message: The message received from Pub/Sub. For performance reasons it should be the raw protobuf message normally wrapped by :class:`~pubsub_v1.types.PubsubMessage`. A raw message can be obtained from a :class:`~pubsub_v1.types.PubsubMessage` instance through the latter's ``._pb`` attribute. - ack_id (str): The ack_id received from Pub/Sub. - delivery_attempt (int): The delivery attempt counter received - from Pub/Sub if a DeadLetterPolicy is set on the subscription, - and zero otherwise. - request_queue (queue.Queue): A queue provided by the policy that - can accept requests; the policy is responsible for handling - those requests. + ack_id: + The ack_id received from Pub/Sub. + delivery_attempt: + The delivery attempt counter received from Pub/Sub if a DeadLetterPolicy + is set on the subscription, and zero otherwise. + request_queue: + A queue provided by the policy that can accept requests; the policy is + responsible for handling those requests. """ self._message = message self._ack_id = ack_id @@ -131,12 +149,12 @@ def __repr__(self): return _MESSAGE_REPR.format(abbv_data, str(self.ordering_key), pretty_attrs) @property - def attributes(self): + def attributes(self) -> "containers.ScalarMap": """Return the attributes of the underlying Pub/Sub Message. .. warning:: - A ``ScalarMapContainer`` behaves slightly differently than a + A ``ScalarMap`` behaves slightly differently than a ``dict``. For a Pub / Sub message this is a ``string->string`` map. When trying to access a value via ``map['key']``, if the key is not in the map, then the default value for the string type will @@ -144,47 +162,47 @@ def attributes(self): to just cast the map to a ``dict`` or to one use ``map.get``. Returns: - .ScalarMapContainer: The message's attributes. This is a - ``dict``-like object provided by ``google.protobuf``. + The message's attributes. This is a ``dict``-like object provided by + ``google.protobuf``. """ return self._attributes @property - def data(self): + def data(self) -> bytes: """Return the data for the underlying Pub/Sub Message. Returns: - bytes: The message data. This is always a bytestring; if you - want a text string, call :meth:`bytes.decode`. + The message data. This is always a bytestring; if you want a text string, + call :meth:`bytes.decode`. """ return self._data @property - def publish_time(self): + def publish_time(self) -> "datetime.datetime": """Return the time that the message was originally published. Returns: - datetime: The date and time that the message was published. + The date and time that the message was published. """ return self._publish_time @property - def ordering_key(self): - """str: the ordering key used to publish the message.""" + def ordering_key(self) -> str: + """The ordering key used to publish the message.""" return self._ordering_key @property - def size(self): + def size(self) -> int: """Return the size of the underlying message, in bytes.""" return self._size @property - def ack_id(self): - """str: the ID used to ack the message.""" + def ack_id(self) -> str: + """the ID used to ack the message.""" return self._ack_id @property - def delivery_attempt(self): + def delivery_attempt(self) -> Optional[int]: """The delivery attempt counter is 1 + (the sum of number of NACKs and number of ack_deadline exceeds) for this message. It is set to None if a DeadLetterPolicy is not set on the subscription. @@ -199,11 +217,11 @@ def delivery_attempt(self): is calculated at best effort and is approximate. Returns: - Optional[int]: The delivery attempt counter or None. + The delivery attempt counter or ``None``. """ return self._delivery_attempt - def ack(self): + def ack(self) -> None: """Acknowledge the given message. Acknowledging a message in Pub/Sub means that you are done @@ -227,7 +245,7 @@ def ack(self): ) ) - def drop(self): + def drop(self) -> None: """Release the message from lease management. This informs the policy to no longer hold on to the lease for this @@ -246,7 +264,7 @@ def drop(self): ) ) - def modify_ack_deadline(self, seconds): + def modify_ack_deadline(self, seconds: int) -> None: """Resets the deadline for acknowledgement. New deadline will be the given value of seconds from now. @@ -257,15 +275,16 @@ def modify_ack_deadline(self, seconds): :class:`~.pubsub_v1.subcriber._consumer.Consumer`. Args: - seconds (int): The number of seconds to set the lease deadline - to. This should be between 0 and 600. Due to network latency, - values below 10 are advised against. + seconds: + The number of seconds to set the lease deadline to. This should be + between 0 and 600. Due to network latency, values below 10 are advised + against. """ self._request_queue.put( requests.ModAckRequest(ack_id=self._ack_id, seconds=seconds) ) - def nack(self): + def nack(self) -> None: """Decline to acknowldge the given message. This will cause the message to be re-delivered to the subscription. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index b8f2b592cc73..3db7ed73e0ea 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -21,8 +21,13 @@ import abc import concurrent.futures import queue +import typing +from typing import Callable, List, Optional import warnings +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud import pubsub_v1 + class Scheduler(metaclass=abc.ABCMeta): """Abstract base class for schedulers. @@ -32,7 +37,7 @@ class Scheduler(metaclass=abc.ABCMeta): @property @abc.abstractmethod - def queue(self): # pragma: NO COVER + def queue(self) -> queue.Queue: # pragma: NO COVER """Queue: A concurrency-safe queue specific to the underlying concurrency implementation. @@ -41,13 +46,13 @@ def queue(self): # pragma: NO COVER raise NotImplementedError @abc.abstractmethod - def schedule(self, callback, *args, **kwargs): # pragma: NO COVER + def schedule(self, callback: Callable, *args, **kwargs) -> None: # pragma: NO COVER """Schedule the callback to be called asynchronously. Args: - callback (Callable): The function to call. - args: Positional arguments passed to the function. - kwargs: Key-word arguments passed to the function. + callback: The function to call. + args: Positional arguments passed to the callback. + kwargs: Key-word arguments passed to the callback. Returns: None @@ -55,26 +60,27 @@ def schedule(self, callback, *args, **kwargs): # pragma: NO COVER raise NotImplementedError @abc.abstractmethod - def shutdown(self, await_msg_callbacks=False): # pragma: NO COVER + def shutdown( + self, await_msg_callbacks: bool = False + ) -> List["pubsub_v1.subscriber.message.Message"]: # pragma: NO COVER """Shuts down the scheduler and immediately end all pending callbacks. Args: - await_msg_callbacks (bool): + await_msg_callbacks: If ``True``, the method will block until all currently executing callbacks are done processing. If ``False`` (default), the method will not wait for the currently running callbacks to complete. Returns: - List[pubsub_v1.subscriber.message.Message]: - The messages submitted to the scheduler that were not yet dispatched - to their callbacks. - It is assumed that each message was submitted to the scheduler as the - first positional argument to the provided callback. + The messages submitted to the scheduler that were not yet dispatched + to their callbacks. + It is assumed that each message was submitted to the scheduler as the + first positional argument to the provided callback. """ raise NotImplementedError -def _make_default_thread_pool_executor(): +def _make_default_thread_pool_executor() -> concurrent.futures.ThreadPoolExecutor: return concurrent.futures.ThreadPoolExecutor( max_workers=10, thread_name_prefix="ThreadPoolExecutor-ThreadScheduler" ) @@ -87,11 +93,14 @@ class ThreadScheduler(Scheduler): This scheduler is useful in typical I/O-bound message processing. Args: - executor(concurrent.futures.ThreadPoolExecutor): An optional executor - to use. If not specified, a default one will be created. + executor: + An optional executor to use. If not specified, a default one + will be created. """ - def __init__(self, executor=None): + def __init__( + self, executor: Optional[concurrent.futures.ThreadPoolExecutor] = None + ): self._queue = queue.Queue() if executor is None: self._executor = _make_default_thread_pool_executor() @@ -104,13 +113,13 @@ def queue(self): and the scheduling thread.""" return self._queue - def schedule(self, callback, *args, **kwargs): + def schedule(self, callback: Callable, *args, **kwargs) -> None: """Schedule the callback to be called asynchronously in a thread pool. Args: - callback (Callable): The function to call. - args: Positional arguments passed to the function. - kwargs: Key-word arguments passed to the function. + callback: The function to call. + args: Positional arguments passed to the callback. + kwargs: Key-word arguments passed to the callback. Returns: None @@ -124,21 +133,22 @@ def schedule(self, callback, *args, **kwargs): stacklevel=2, ) - def shutdown(self, await_msg_callbacks=False): + def shutdown( + self, await_msg_callbacks: bool = False + ) -> List["pubsub_v1.subscriber.message.Message"]: """Shut down the scheduler and immediately end all pending callbacks. Args: - await_msg_callbacks (bool): + await_msg_callbacks: If ``True``, the method will block until all currently executing executor threads are done processing. If ``False`` (default), the method will not wait for the currently running threads to complete. Returns: - List[pubsub_v1.subscriber.message.Message]: - The messages submitted to the scheduler that were not yet dispatched - to their callbacks. - It is assumed that each message was submitted to the scheduler as the - first positional argument to the provided callback. + The messages submitted to the scheduler that were not yet dispatched + to their callbacks. + It is assumed that each message was submitted to the scheduler as the + first positional argument to the provided callback. """ dropped_messages = [] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index f8aa532a0e81..0558c2f1e542 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -18,6 +18,8 @@ import enum import inspect import sys +import typing +from typing import Dict, NamedTuple import proto @@ -37,34 +39,39 @@ from google.pubsub_v1.types import pubsub as pubsub_gapic_types +if typing.TYPE_CHECKING: # pragma: NO COVER + from types import ModuleType + from google.api_core import retry as retries + from google.pubsub_v1 import types as gapic_types + + # Define the default values for batching. # # This class is used when creating a publisher or subscriber client, and # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. -BatchSettings = collections.namedtuple( - "BatchSettings", ["max_bytes", "max_latency", "max_messages"] -) -BatchSettings.__new__.__defaults__ = ( # pytype: disable=attribute-error - 1 * 1000 * 1000, # max_bytes: 1 MB - 0.01, # max_latency: 10 ms - 100, # max_messages: 100 -) -BatchSettings.__doc__ = "The settings for batch publishing the messages." -BatchSettings.max_bytes.__doc__ = ( - "The maximum total size of the messages to collect before automatically " - "publishing the batch, including any byte size overhead of the publish " - "request itself. The maximum value is bound by the server-side limit of " - "10_000_000 bytes." -) -BatchSettings.max_latency.__doc__ = ( - "The maximum number of seconds to wait for additional messages before " - "automatically publishing the batch." -) -BatchSettings.max_messages.__doc__ = ( - "The maximum number of messages to collect before automatically " - "publishing the batch." -) +class BatchSettings(NamedTuple): + """The settings for batch publishing the messages.""" + + max_bytes: int = 1 * 1000 * 1000 # 1 MB + ( + "The maximum total size of the messages to collect before automatically " + "publishing the batch, including any byte size overhead of the publish " + "request itself. The maximum value is bound by the server-side limit of " + "10_000_000 bytes." + ) + + max_latency: float = 0.01 # 10 ms + ( + "The maximum number of seconds to wait for additional messages before " + "automatically publishing the batch." + ) + + max_messages: int = 100 + ( + "The maximum number of messages to collect before automatically " + "publishing the batch." + ) class LimitExceededBehavior(str, enum.Enum): @@ -75,105 +82,89 @@ class LimitExceededBehavior(str, enum.Enum): ERROR = "error" -PublishFlowControl = collections.namedtuple( - "PublishFlowControl", ["message_limit", "byte_limit", "limit_exceeded_behavior"] -) -# pytype: disable=attribute-error -PublishFlowControl.__new__.__defaults__ = ( - 10 * BatchSettings.__new__.__defaults__[2], # message limit - 10 * BatchSettings.__new__.__defaults__[0], # byte limit - LimitExceededBehavior.IGNORE, # desired behavior -) -# pytype: enable=attribute-error -PublishFlowControl.__doc__ = "The client flow control settings for message publishing." -PublishFlowControl.message_limit.__doc__ = ( - "The maximum number of messages awaiting to be published." -) -PublishFlowControl.byte_limit.__doc__ = ( - "The maximum total size of messages awaiting to be published." -) -PublishFlowControl.limit_exceeded_behavior.__doc__ = ( - "The action to take when publish flow control limits are exceeded." -) +class PublishFlowControl(NamedTuple): + """The client flow control settings for message publishing.""" + + message_limit: int = 10 * BatchSettings.__new__.__defaults__[2] + """The maximum number of messages awaiting to be published.""" + + byte_limit: int = 10 * BatchSettings.__new__.__defaults__[0] + """The maximum total size of messages awaiting to be published.""" + + limit_exceeded_behavior: LimitExceededBehavior = LimitExceededBehavior.IGNORE + """The action to take when publish flow control limits are exceeded.""" + # Define the default publisher options. # # This class is used when creating a publisher client to pass in options # to enable/disable features. -PublisherOptions = collections.namedtuple( - "PublisherOptions", ["enable_message_ordering", "flow_control", "retry", "timeout"] -) -PublisherOptions.__new__.__defaults__ = ( # pytype: disable=attribute-error - False, # enable_message_ordering: False - PublishFlowControl(), # default flow control settings - gapic_v1.method.DEFAULT, # use default api_core value for retry - gapic_v1.method.DEFAULT, # use default api_core value for timeout -) -PublisherOptions.__doc__ = "The options for the publisher client." -PublisherOptions.enable_message_ordering.__doc__ = ( - "Whether to order messages in a batch by a supplied ordering key." -) -PublisherOptions.flow_control.__doc__ = ( - "Flow control settings for message publishing by the client. By default " - "the publisher client does not do any throttling." -) -PublisherOptions.retry.__doc__ = ( - "Retry settings for message publishing by the client. This should be " - "an instance of :class:`google.api_core.retry.Retry`." -) -PublisherOptions.timeout.__doc__ = ( - "Timeout settings for message publishing by the client. It should be compatible " - "with :class:`~.pubsub_v1.types.TimeoutType`." -) +class PublisherOptions(NamedTuple): + """The options for the publisher client.""" + + enable_message_ordering: bool = False + """Whether to order messages in a batch by a supplied ordering key.""" + + flow_control: PublishFlowControl = PublishFlowControl() + ( + "Flow control settings for message publishing by the client. By default " + "the publisher client does not do any throttling." + ) + + retry: "retries.Retry" = gapic_v1.method.DEFAULT # use api_core default + ( + "Retry settings for message publishing by the client. This should be " + "an instance of :class:`google.api_core.retry.Retry`." + ) + + timeout: "gapic_types.TimeoutType" = gapic_v1.method.DEFAULT # use api_core default + ( + "Timeout settings for message publishing by the client. It should be " + "compatible with :class:`~.pubsub_v1.types.TimeoutType`." + ) + # Define the type class and default values for flow control settings. # # This class is used when creating a publisher or subscriber client, and # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. -FlowControl = collections.namedtuple( - "FlowControl", - [ - "max_bytes", - "max_messages", - "max_lease_duration", - "max_duration_per_lease_extension", - ], -) -FlowControl.__new__.__defaults__ = ( # pytype: disable=attribute-error - 100 * 1024 * 1024, # max_bytes: 100mb - 1000, # max_messages: 1000 - 1 * 60 * 60, # max_lease_duration: 1 hour. - 0, # max_duration_per_lease_extension: disabled -) -FlowControl.__doc__ = ( - "The settings for controlling the rate at which messages are pulled " - "with an asynchronous subscription." -) -FlowControl.max_bytes.__doc__ = ( - "The maximum total size of received - but not yet processed - messages " - "before pausing the message stream." -) -FlowControl.max_messages.__doc__ = ( - "The maximum number of received - but not yet processed - messages before " - "pausing the message stream." -) -FlowControl.max_lease_duration.__doc__ = ( - "The maximum amount of time in seconds to hold a lease on a message " - "before dropping it from the lease management." -) -FlowControl.max_duration_per_lease_extension.__doc__ = ( - "The max amount of time in seconds for a single lease extension attempt. " - "Bounds the delay before a message redelivery if the subscriber " - "fails to extend the deadline. Must be between 10 and 600 (inclusive). Ignored " - "if set to 0." -) +class FlowControl(NamedTuple): + """The settings for controlling the rate at which messages are pulled + with an asynchronous subscription. + """ + + max_bytes: int = 100 * 1024 * 1024 # 100 MiB + ( + "The maximum total size of received - but not yet processed - messages " + "before pausing the message stream." + ) + + max_messages: int = 1000 + ( + "The maximum number of received - but not yet processed - messages before " + "pausing the message stream." + ) + + max_lease_duration: float = 1 * 60 * 60 # 1 hour + ( + "The maximum amount of time in seconds to hold a lease on a message " + "before dropping it from the lease management." + ) + + max_duration_per_lease_extension: float = 0 # disabled by default + ( + "The max amount of time in seconds for a single lease extension attempt. " + "Bounds the delay before a message redelivery if the subscriber " + "fails to extend the deadline. Must be between 10 and 600 (inclusive). Ignored " + "if set to 0." + ) # The current api core helper does not find new proto messages of type proto.Message, # thus we need our own helper. Adjusted from # https://github.com/googleapis/python-api-core/blob/8595f620e7d8295b6a379d6fd7979af3bef717e2/google/api_core/protobuf_helpers.py#L101-L118 -def _get_protobuf_messages(module): +def _get_protobuf_messages(module: "ModuleType") -> Dict[str, proto.Message]: """Discover all protobuf Message classes in a given import module. Args: From 26000b75d31833eac59fba6bc8d0452c229d049f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 5 Oct 2021 11:22:43 -0600 Subject: [PATCH 0725/1197] build: use trampoline_v2 for python samples and allow custom dockerfile (#515) Source-Link: https://github.com/googleapis/synthtool/commit/a7ed11ec0863c422ba2e73aafa75eab22c32b33d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/lint/common.cfg | 2 +- .../.kokoro/samples/python3.6/common.cfg | 2 +- .../.kokoro/samples/python3.6/periodic.cfg | 2 +- .../.kokoro/samples/python3.7/common.cfg | 2 +- .../.kokoro/samples/python3.7/periodic.cfg | 2 +- .../.kokoro/samples/python3.8/common.cfg | 2 +- .../.kokoro/samples/python3.8/periodic.cfg | 2 +- .../.kokoro/samples/python3.9/common.cfg | 2 +- .../.kokoro/samples/python3.9/periodic.cfg | 2 +- .../.kokoro/test-samples-against-head.sh | 2 -- .../google-cloud-pubsub/.kokoro/test-samples.sh | 2 -- packages/google-cloud-pubsub/.trampolinerc | 17 ++++++++++++++--- 13 files changed, 24 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index ae6c57fad807..ee94722ab57b 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:82b12321da4446a73cb11bcb6812fbec8c105abda3946d46e6394e5fbfb64c0f + digest: sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc diff --git a/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg index 7dcd141937cd..a0106ace3cb0 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg @@ -31,4 +31,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg index 6b989042284b..068788e9e910 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg index e2cb0f168769..9156c5975a64 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg index cb7a71d5c8b5..5922bef077e7 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg index b858ccbd4246..a69739cce9a8 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg @@ -37,4 +37,4 @@ gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" \ No newline at end of file +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg index 50fec9649732..71cd1e597e38 100644 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg @@ -3,4 +3,4 @@ env_vars: { key: "INSTALL_LIBRARY_FROM_SOURCE" value: "False" -} \ No newline at end of file +} diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh index 18e4ef2133d1..ba3a707b040c 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh @@ -23,6 +23,4 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-pubsub - exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples.sh b/packages/google-cloud-pubsub/.kokoro/test-samples.sh index 9753da38773c..11c042d342d7 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples.sh @@ -24,8 +24,6 @@ set -eo pipefail # Enables `**` to include files nested inside sub-folders shopt -s globstar -cd github/python-pubsub - # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then # preserving the test runner implementation. diff --git a/packages/google-cloud-pubsub/.trampolinerc b/packages/google-cloud-pubsub/.trampolinerc index 383b6ec89fbc..0eee72ab62aa 100644 --- a/packages/google-cloud-pubsub/.trampolinerc +++ b/packages/google-cloud-pubsub/.trampolinerc @@ -16,15 +16,26 @@ # Add required env vars here. required_envvars+=( - "STAGING_BUCKET" - "V2_STAGING_BUCKET" ) # Add env vars which are passed down into the container here. pass_down_envvars+=( + "NOX_SESSION" + ############### + # Docs builds + ############### "STAGING_BUCKET" "V2_STAGING_BUCKET" - "NOX_SESSION" + ################## + # Samples builds + ################## + "INSTALL_LIBRARY_FROM_SOURCE" + "RUN_TESTS_SESSION" + "BUILD_SPECIFIC_GCLOUD_PROJECT" + # Target directories. + "RUN_TESTS_DIRS" + # The nox session to run. + "RUN_TESTS_SESSION" ) # Prevent unintentional override on the default image. From fa8c8b355c11b04ba1144dd72e6a6688a7d603de Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 8 Oct 2021 19:28:17 +0000 Subject: [PATCH 0726/1197] chore(python): Add kokoro configs for python 3.10 samples testing (#521) --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/samples/python3.10/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.10/continuous.cfg | 6 +++ .../samples/python3.10/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.10/periodic.cfg | 6 +++ .../.kokoro/samples/python3.10/presubmit.cfg | 6 +++ packages/google-cloud-pubsub/CONTRIBUTING.rst | 6 ++- packages/google-cloud-pubsub/noxfile.py | 2 +- .../samples/snippets/noxfile.py | 2 +- 9 files changed, 76 insertions(+), 5 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic-head.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/presubmit.cfg diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index ee94722ab57b..7d98291cc35f 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6e7328583be8edd3ba8f35311c76a1ecbc823010279ccb6ab46b7a76e25eafcc + digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/common.cfg new file mode 100644 index 000000000000..363d8b0f758e --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.10" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-310" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 6970c2d59848..b749f1fb47d9 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. + 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.9 -- -k + $ nox -s unit-3.10 -- -k .. note:: @@ -225,11 +225,13 @@ We support: - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ +- `Python 3.10`_ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ +.. _Python 3.10: https://docs.python.org/3.10/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index d36e59abf318..0be25350247a 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -32,7 +32,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 1fd8956fbf01..93a9122cc457 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -87,7 +87,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From 48894226e05761b0f152d5ccca3e34863c0baa42 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 8 Oct 2021 17:06:34 -0400 Subject: [PATCH 0727/1197] ci: harden schema sample teardown against 500 InternalServiceError (#519) Closes #388. --- .../google-cloud-pubsub/samples/snippets/schema_test.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/schema_test.py b/packages/google-cloud-pubsub/samples/snippets/schema_test.py index c58958672399..1f74c5eb335a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema_test.py @@ -20,9 +20,12 @@ from _pytest.capture import CaptureFixture from flaky import flaky +from google.api_core.exceptions import InternalServerError from google.api_core.exceptions import NotFound from google.cloud import pubsub_v1 -from google.cloud.pubsub import PublisherClient, SchemaServiceClient, SubscriberClient +from google.cloud.pubsub import PublisherClient +from google.cloud.pubsub import SchemaServiceClient +from google.cloud.pubsub import SubscriberClient from google.pubsub_v1.types import Encoding import pytest @@ -59,7 +62,7 @@ def avro_schema( try: schema_client.delete_schema(request={"name": avro_schema_path}) - except NotFound: + except (NotFound, InternalServerError): pass @@ -73,7 +76,7 @@ def proto_schema( try: schema_client.delete_schema(request={"name": proto_schema_path}) - except NotFound: + except (NotFound, InternalServerError): pass From b8d184d9a9edcde57683debe2bf3d30c41194b0a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 9 Oct 2021 15:06:11 +0000 Subject: [PATCH 0728/1197] feat: add context manager support in client (#516) - [ ] Regenerate this pull request now. chore: fix docstring for first attribute of protos committer: @busunkim96 PiperOrigin-RevId: 401271153 Source-Link: https://github.com/googleapis/googleapis/commit/787f8c9a731f44e74a90b9847d48659ca9462d10 Source-Link: https://github.com/googleapis/googleapis-gen/commit/81decffe9fc72396a8153e756d1d67a6eecfd620 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODFkZWNmZmU5ZmM3MjM5NmE4MTUzZTc1NmQxZDY3YTZlZWNmZDYyMCJ9 --- .../services/publisher/async_client.py | 6 +++ .../pubsub_v1/services/publisher/client.py | 18 +++++-- .../services/publisher/transports/base.py | 9 ++++ .../services/publisher/transports/grpc.py | 3 ++ .../publisher/transports/grpc_asyncio.py | 3 ++ .../services/schema_service/async_client.py | 6 +++ .../services/schema_service/client.py | 18 +++++-- .../schema_service/transports/base.py | 9 ++++ .../schema_service/transports/grpc.py | 3 ++ .../schema_service/transports/grpc_asyncio.py | 3 ++ .../services/subscriber/async_client.py | 6 +++ .../pubsub_v1/services/subscriber/client.py | 18 +++++-- .../services/subscriber/transports/base.py | 9 ++++ .../services/subscriber/transports/grpc.py | 3 ++ .../subscriber/transports/grpc_asyncio.py | 3 ++ .../google/pubsub_v1/types/pubsub.py | 6 ++- .../google/pubsub_v1/types/schema.py | 6 ++- .../unit/gapic/pubsub_v1/test_publisher.py | 50 +++++++++++++++++++ .../gapic/pubsub_v1/test_schema_service.py | 50 +++++++++++++++++++ .../unit/gapic/pubsub_v1/test_subscriber.py | 50 +++++++++++++++++++ 20 files changed, 263 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index df436e72116d..25753d0cf2b3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1183,6 +1183,12 @@ async def test_iam_permissions( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index af0f1d8fecb6..7075e8fcd7ca 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -399,10 +399,7 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def create_topic( @@ -1064,6 +1061,19 @@ def detach_subscription( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + def set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index a19d43568577..b794d5965214 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -302,6 +302,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def create_topic( self, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 1dca72721a5c..fa4d2c4376b9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -559,5 +559,8 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def close(self): + self.grpc_channel.close() + __all__ = ("PublisherGrpcTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 38b81b4e2a97..7c896a252d22 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -569,5 +569,8 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def close(self): + return self.grpc_channel.close() + __all__ = ("PublisherGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index d1f56f8d6172..df10c2a8d7ab 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -871,6 +871,12 @@ async def test_iam_permissions( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 7bd0bd30a4b0..66c63a803f29 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -342,10 +342,7 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def create_schema( @@ -785,6 +782,19 @@ def validate_message( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + def set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index e37b928814ab..bcc50f01186d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -181,6 +181,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def create_schema( self, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index a58a4df328e0..a3ca6e14b981 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -462,5 +462,8 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def close(self): + self.grpc_channel.close() + __all__ = ("SchemaServiceGrpcTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 41baacfa7894..8b9205f84abe 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -473,5 +473,8 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def close(self): + return self.grpc_channel.close() + __all__ = ("SchemaServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 502f6f158fa4..4e918daed032 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1924,6 +1924,12 @@ async def test_iam_permissions( # Done; return the response. return response + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 540b4b8aea48..e8bb6df6601a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -401,10 +401,7 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), + always_use_jwt_access=True, ) def create_subscription( @@ -1741,6 +1738,19 @@ def seek( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + def set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 96322b3a4519..c39d8fcc86d6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -404,6 +404,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def create_subscription( self, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 604277eab8a0..944171cf7f23 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -817,5 +817,8 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def close(self): + self.grpc_channel.close() + __all__ = ("SubscriberGrpcTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 1b1704e1772c..ff822071f4b5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -830,5 +830,8 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def close(self): + return self.grpc_channel.close() + __all__ = ("SubscriberGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index a62b0217b352..913e845be75d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -438,7 +438,8 @@ class DetachSubscriptionRequest(proto.Message): class DetachSubscriptionResponse(proto.Message): r"""Response for the DetachSubscription method. Reserved for future use. - """ + + """ class Subscription(proto.Message): @@ -1312,7 +1313,8 @@ class SeekRequest(proto.Message): class SeekResponse(proto.Message): - r"""Response for the ``Seek`` method (this response is empty). """ + r"""Response for the ``Seek`` method (this response is empty). + """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index fec9f5250768..4f7777fbcdbe 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -195,7 +195,8 @@ class ValidateSchemaRequest(proto.Message): class ValidateSchemaResponse(proto.Message): - r"""Response for the ``ValidateSchema`` method. Empty for now. """ + r"""Response for the ``ValidateSchema`` method. Empty for now. + """ class ValidateMessageRequest(proto.Message): @@ -227,7 +228,8 @@ class ValidateMessageRequest(proto.Message): class ValidateMessageResponse(proto.Message): - r"""Response for the ``ValidateMessage`` method. Empty for now. """ + r"""Response for the ``ValidateMessage`` method. Empty for now. + """ __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index db1ae85a3348..457c00640f37 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -2645,6 +2646,9 @@ def test_publisher_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_publisher_base_transport_with_credentials_file(): @@ -3626,3 +3630,49 @@ async def test_test_iam_permissions_from_dict_async(): } ) call.assert_called() + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 91b58aff4ea3..a3002ca6cfca 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -1897,6 +1898,9 @@ def test_schema_service_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_schema_service_base_transport_with_credentials_file(): @@ -2863,3 +2867,49 @@ async def test_test_iam_permissions_from_dict_async(): } ) call.assert_called() + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 8242d636eada..9d760c68eb2b 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -30,6 +30,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -3921,6 +3922,9 @@ def test_subscriber_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + @requires_google_auth_gte_1_25_0 def test_subscriber_base_transport_with_credentials_file(): @@ -4902,3 +4906,49 @@ async def test_test_iam_permissions_from_dict_async(): } ) call.assert_called() + + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "grpc", + ] + for transport in transports: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() From e6db2e2c9732fa8a232a2cfa335d1367dab927e5 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sat, 9 Oct 2021 17:37:24 -0400 Subject: [PATCH 0729/1197] chore: add default_version and codeowner_team to .repo-metadata.json (#514) * chore: add default_version and codeowner_team to .repo-metadata.json * update default_version and codeowner_team --- .../google-cloud-pubsub/.repo-metadata.json | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json index b21c198b1485..fa07857c21e6 100644 --- a/packages/google-cloud-pubsub/.repo-metadata.json +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -1,13 +1,15 @@ { - "name": "pubsub", - "name_pretty": "Google Cloud Pub/Sub", - "product_documentation": "https://cloud.google.com/pubsub/docs/", - "client_documentation": "https://googleapis.dev/python/pubsub/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", - "release_level": "ga", - "language": "python", - "repo": "googleapis/python-pubsub", - "distribution_name": "google-cloud-pubsub", - "api_id": "pubsub.googleapis.com", - "requires_billing": true + "name": "pubsub", + "name_pretty": "Google Cloud Pub/Sub", + "product_documentation": "https://cloud.google.com/pubsub/docs/", + "client_documentation": "https://googleapis.dev/python/pubsub/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", + "release_level": "ga", + "language": "python", + "repo": "googleapis/python-pubsub", + "distribution_name": "google-cloud-pubsub", + "api_id": "pubsub.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/api-pubsub" } From 520917155a98e00b99ce715ba6fe11657afea902 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 12 Oct 2021 12:39:27 -0400 Subject: [PATCH 0730/1197] ci: harden subscriber samples against EC (#520) Closes #461. Closes #462. Closes #501. --- .../samples/snippets/subscriber_test.py | 220 ++++++++++-------- 1 file changed, 129 insertions(+), 91 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index efe89f82555f..574529e80c32 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -225,7 +225,7 @@ def eventually_consistent_test() -> None: eventually_consistent_test() -def test_create( +def test_create_subscription( subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str, capsys: CaptureFixture, @@ -357,39 +357,50 @@ def test_create_subscription_with_ordering( assert "enable_message_ordering: true" in out -def test_create_push( +def test_create_push_subscription( subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str, capsys: CaptureFixture, ) -> None: # The scope of `subscription_path` is limited to this function. - subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_ADMIN - ) - try: - subscriber_client.delete_subscription( - request={"subscription": subscription_path} + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test() -> None: + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ADMIN ) - except NotFound: - pass + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass - subscriber.create_push_subscription(PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT) + subscriber.create_push_subscription(PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT) - out, _ = capsys.readouterr() - assert f"{subscription_admin}" in out + out, _ = capsys.readouterr() + assert f"{subscription_admin}" in out + eventually_consistent_test() -def test_update(subscription_admin: str, capsys: CaptureFixture) -> None: - subscriber.update_push_subscription( - PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT - ) - out, _ = capsys.readouterr() - assert "Subscription updated" in out - assert f"{subscription_admin}" in out +def test_update_push_suscription( + subscription_admin: str, + capsys: CaptureFixture, +) -> None: + @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + def eventually_consistent_test() -> None: + subscriber.update_push_subscription( + PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT + ) + + out, _ = capsys.readouterr() + assert "Subscription updated" in out + assert f"{subscription_admin}" in out + + eventually_consistent_test() -def test_delete( +def test_delete_subscription( subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str ) -> None: subscriber.delete_subscription(PROJECT_ID, SUBSCRIPTION_ADMIN) @@ -410,14 +421,19 @@ def test_receive( subscription_async: str, capsys: CaptureFixture, ) -> None: - _publish_messages(publisher_client, topic) - subscriber.receive_messages(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) + @backoff.on_exception(backoff.expo, Unknown, max_time=60) + def eventually_consistent_test() -> None: + _publish_messages(publisher_client, topic) - out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_async in out - assert "message" in out + subscriber.receive_messages(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) + + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async in out + assert "message" in out + + eventually_consistent_test() def test_receive_with_custom_attributes( @@ -427,17 +443,21 @@ def test_receive_with_custom_attributes( capsys: CaptureFixture, ) -> None: - _publish_messages(publisher_client, topic, origin="python-sample") + @backoff.on_exception(backoff.expo, Unknown, max_time=60) + def eventually_consistent_test() -> None: + _publish_messages(publisher_client, topic, origin="python-sample") - subscriber.receive_messages_with_custom_attributes( - PROJECT_ID, SUBSCRIPTION_ASYNC, 5 - ) + subscriber.receive_messages_with_custom_attributes( + PROJECT_ID, SUBSCRIPTION_ASYNC, 5 + ) - out, _ = capsys.readouterr() - assert subscription_async in out - assert "message" in out - assert "origin" in out - assert "python-sample" in out + out, _ = capsys.readouterr() + assert subscription_async in out + assert "message" in out + assert "origin" in out + assert "python-sample" in out + + eventually_consistent_test() def test_receive_with_flow_control( @@ -447,14 +467,18 @@ def test_receive_with_flow_control( capsys: CaptureFixture, ) -> None: - _publish_messages(publisher_client, topic) + @backoff.on_exception(backoff.expo, Unknown, max_time=300) + def eventually_consistent_test() -> None: + _publish_messages(publisher_client, topic) - subscriber.receive_messages_with_flow_control(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) + subscriber.receive_messages_with_flow_control(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) - out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_async in out - assert "message" in out + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async in out + assert "message" in out + + eventually_consistent_test() def test_receive_with_blocking_shutdown( @@ -463,57 +487,67 @@ def test_receive_with_blocking_shutdown( subscription_async: str, capsys: CaptureFixture, ) -> None: - _publish_messages(publisher_client, topic, message_num=3) - subscriber.receive_messages_with_blocking_shutdown( - PROJECT_ID, SUBSCRIPTION_ASYNC, timeout=5.0 - ) + _received = re.compile(r".*received.*message.*", flags=re.IGNORECASE) + _done = re.compile(r".*done processing.*message.*", flags=re.IGNORECASE) + _canceled = re.compile(r".*streaming pull future canceled.*", flags=re.IGNORECASE) + _shut_down = re.compile(r".*done waiting.*stream shutdown.*", flags=re.IGNORECASE) - out, _ = capsys.readouterr() - out_lines = out.splitlines() - - msg_received_lines = [ - i - for i, line in enumerate(out_lines) - if re.search(r".*received.*message.*", line, flags=re.IGNORECASE) - ] - msg_done_lines = [ - i - for i, line in enumerate(out_lines) - if re.search(r".*done processing.*message.*", line, flags=re.IGNORECASE) - ] - stream_canceled_lines = [ - i - for i, line in enumerate(out_lines) - if re.search(r".*streaming pull future canceled.*", line, flags=re.IGNORECASE) - ] - shutdown_done_waiting_lines = [ - i - for i, line in enumerate(out_lines) - if re.search(r".*done waiting.*stream shutdown.*", line, flags=re.IGNORECASE) - ] + @backoff.on_exception(backoff.expo, Unknown, max_time=300) + def eventually_consistent_test() -> None: + _publish_messages(publisher_client, topic, message_num=3) - try: - assert "Listening" in out - assert subscription_async in out + subscriber.receive_messages_with_blocking_shutdown( + PROJECT_ID, SUBSCRIPTION_ASYNC, timeout=5.0 + ) + + out, _ = capsys.readouterr() + out_lines = out.splitlines() + + msg_received_lines = [ + i + for i, line in enumerate(out_lines) + if _received.search(line) + ] + msg_done_lines = [ + i + for i, line in enumerate(out_lines) + if _done.search(line) + ] + stream_canceled_lines = [ + i + for i, line in enumerate(out_lines) + if _canceled.search(line) + ] + shutdown_done_waiting_lines = [ + i + for i, line in enumerate(out_lines) + if _shut_down.search(line) + ] + + try: + assert "Listening" in out + assert subscription_async in out - assert len(stream_canceled_lines) == 1 - assert len(shutdown_done_waiting_lines) == 1 - assert len(msg_received_lines) == 3 - assert len(msg_done_lines) == 3 + assert len(stream_canceled_lines) == 1 + assert len(shutdown_done_waiting_lines) == 1 + assert len(msg_received_lines) == 3 + assert len(msg_done_lines) == 3 - # The stream should have been canceled *after* receiving messages, but before - # message processing was done. - assert msg_received_lines[-1] < stream_canceled_lines[0] < msg_done_lines[0] + # The stream should have been canceled *after* receiving messages, but before + # message processing was done. + assert msg_received_lines[-1] < stream_canceled_lines[0] < msg_done_lines[0] - # Yet, waiting on the stream shutdown should have completed *after* - # the processing of received messages has ended. - assert msg_done_lines[-1] < shutdown_done_waiting_lines[0] - except AssertionError: # pragma: NO COVER - from pprint import pprint + # Yet, waiting on the stream shutdown should have completed *after* + # the processing of received messages has ended. + assert msg_done_lines[-1] < shutdown_done_waiting_lines[0] + except AssertionError: # pragma: NO COVER + from pprint import pprint - pprint(out_lines) # To make possible flakiness debugging easier. - raise + pprint(out_lines) # To make possible flakiness debugging easier. + raise + + eventually_consistent_test() def test_listen_for_errors( @@ -523,13 +557,17 @@ def test_listen_for_errors( capsys: CaptureFixture, ) -> None: - _publish_messages(publisher_client, topic) + @backoff.on_exception(backoff.expo, Unknown, max_time=60) + def eventually_consistent_test() -> None: + _publish_messages(publisher_client, topic) - subscriber.listen_for_errors(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) + subscriber.listen_for_errors(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) - out, _ = capsys.readouterr() - assert subscription_async in out - assert "threw an exception" in out + out, _ = capsys.readouterr() + assert subscription_async in out + assert "threw an exception" in out + + eventually_consistent_test() def test_receive_synchronously( From 9d6792f4bdc37dcf5b071e9bb8df49c54f6bd72c Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 14 Oct 2021 06:38:09 -0400 Subject: [PATCH 0731/1197] feat: add support for Python 3.10 (#518) --- packages/google-cloud-pubsub/owlbot.py | 1 + packages/google-cloud-pubsub/setup.py | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 0d870e0d7a55..3f8f5bd664d8 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -353,6 +353,7 @@ microgenerator=True, samples=True, cov_level=100, + unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"], system_test_external_dependencies=["psutil"], ) s.move(templated_files, excludes=[".coveragerc", ".github/CODEOWNERS"]) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index e15f06cf57a8..9749aea2e204 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -82,6 +82,8 @@ "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", "Operating System :: OS Independent", "Topic :: Internet", ], From a26ae077ac9aac68e0e13abfe8a48b60f5777600 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 28 Oct 2021 02:53:41 -0400 Subject: [PATCH 0732/1197] ci: fix IAM samples to avoid block on 'allUsers' (#529) Closes #528. --- packages/google-cloud-pubsub/samples/snippets/iam.py | 4 ++-- packages/google-cloud-pubsub/samples/snippets/iam_test.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index e94d1a9a176a..b638a53449d7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -81,7 +81,7 @@ def set_topic_policy(project_id: str, topic_id: str) -> None: policy = client.get_iam_policy(request={"resource": topic_path}) # Add all users as viewers. - policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) + policy.bindings.add(role="roles/pubsub.viewer", members=["domain:google.com"]) # Add a group as a publisher. policy.bindings.add( @@ -110,7 +110,7 @@ def set_subscription_policy(project_id: str, subscription_id: str) -> None: policy = client.get_iam_policy(request={"resource": subscription_path}) # Add all users as viewers. - policy.bindings.add(role="roles/pubsub.viewer", members=["allUsers"]) + policy.bindings.add(role="roles/pubsub.viewer", members=["domain:google.com"]) # Add a group as an editor. policy.bindings.add(role="roles/editor", members=["group:cloud-logs@google.com"]) diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index fcd57781d305..f5d0ef192c71 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -98,7 +98,7 @@ def test_set_topic_policy( iam.set_topic_policy(PROJECT_ID, TOPIC_ID) policy = publisher_client.get_iam_policy(request={"resource": topic_path}) assert "roles/pubsub.publisher" in str(policy) - assert "allUsers" in str(policy) + assert "domain:google.com" in str(policy) def test_set_subscription_policy( @@ -107,7 +107,7 @@ def test_set_subscription_policy( iam.set_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) policy = subscriber_client.get_iam_policy(request={"resource": subscription_path}) assert "roles/pubsub.viewer" in str(policy) - assert "allUsers" in str(policy) + assert "domain:google.com" in str(policy) def test_check_topic_permissions(topic_path: str, capsys: CaptureFixture) -> None: From 22163d5947746f67d24bfc3fedafebc66a0b7773 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 28 Oct 2021 09:52:43 +0200 Subject: [PATCH 0733/1197] chore(deps): update dependency avro to v1.11.0 (#527) Co-authored-by: Peter Lamut --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index ae20e777529a..40343160b35c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==2.8.0 -avro==1.10.2 +avro==1.11.0 From 6bd10373549bf1e91e0ad2efa49a309a62a8359c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 1 Nov 2021 11:28:12 +0000 Subject: [PATCH 0734/1197] chore: use gapic-generator-python 0.53.4 (#530) - [ ] Regenerate this pull request now. docs: list oneofs in docstring fix(deps): require google-api-core >= 1.28.0 fix(deps): drop packaging dependency committer: busunkim96@ PiperOrigin-RevId: 406468269 Source-Link: https://github.com/googleapis/googleapis/commit/83d81b0c8fc22291a13398d6d77f02dc97a5b6f4 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2ff001fbacb9e77e71d734de5f955c05fdae8526 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmZmMDAxZmJhY2I5ZTc3ZTcxZDczNGRlNWY5NTVjMDVmZGFlODUyNiJ9 --- .../services/publisher/async_client.py | 64 +++++------ .../pubsub_v1/services/publisher/client.py | 26 ++--- .../services/publisher/transports/base.py | 35 +----- .../publisher/transports/grpc_asyncio.py | 1 - .../services/schema_service/async_client.py | 46 ++++---- .../services/schema_service/client.py | 20 ++-- .../schema_service/transports/base.py | 35 +----- .../schema_service/transports/grpc_asyncio.py | 1 - .../services/subscriber/async_client.py | 102 +++++++++--------- .../pubsub_v1/services/subscriber/client.py | 40 +++---- .../services/subscriber/transports/base.py | 35 +----- .../subscriber/transports/grpc_asyncio.py | 1 - .../google/pubsub_v1/types/pubsub.py | 12 +++ .../google/pubsub_v1/types/schema.py | 9 ++ packages/google-cloud-pubsub/setup.py | 3 +- .../testing/constraints-3.6.txt | 4 +- .../unit/gapic/pubsub_v1/test_publisher.py | 95 ++-------------- .../gapic/pubsub_v1/test_schema_service.py | 100 ++--------------- .../unit/gapic/pubsub_v1/test_subscriber.py | 95 ++-------------- 19 files changed, 208 insertions(+), 516 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 25753d0cf2b3..e835ae7b7716 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -19,7 +19,7 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -27,6 +27,8 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -168,10 +170,10 @@ def __init__( async def create_topic( self, - request: pubsub.Topic = None, + request: Union[pubsub.Topic, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: @@ -180,7 +182,7 @@ async def create_topic( (https://cloud.google.com/pubsub/docs/admin#resource_names). Args: - request (:class:`google.pubsub_v1.types.Topic`): + request (Union[google.pubsub_v1.types.Topic, dict]): The request object. A topic resource. name (:class:`str`): Required. The name of the topic. It must have the format @@ -254,9 +256,9 @@ async def create_topic( async def update_topic( self, - request: pubsub.UpdateTopicRequest = None, + request: Union[pubsub.UpdateTopicRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: @@ -264,7 +266,7 @@ async def update_topic( properties of a topic are not modifiable. Args: - request (:class:`google.pubsub_v1.types.UpdateTopicRequest`): + request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): The request object. Request for the UpdateTopic method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -313,11 +315,11 @@ async def update_topic( async def publish( self, - request: pubsub.PublishRequest = None, + request: Union[pubsub.PublishRequest, dict] = None, *, topic: str = None, messages: Sequence[pubsub.PubsubMessage] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PublishResponse: @@ -325,7 +327,7 @@ async def publish( the topic does not exist. Args: - request (:class:`google.pubsub_v1.types.PublishRequest`): + request (Union[google.pubsub_v1.types.PublishRequest, dict]): The request object. Request for the Publish method. topic (:class:`str`): Required. The messages in the request will be published @@ -407,17 +409,17 @@ async def publish( async def get_topic( self, - request: pubsub.GetTopicRequest = None, + request: Union[pubsub.GetTopicRequest, dict] = None, *, topic: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Gets the configuration of a topic. Args: - request (:class:`google.pubsub_v1.types.GetTopicRequest`): + request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): The request object. Request for the GetTopic method. topic (:class:`str`): Required. The name of the topic to get. Format is @@ -487,17 +489,17 @@ async def get_topic( async def list_topics( self, - request: pubsub.ListTopicsRequest = None, + request: Union[pubsub.ListTopicsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicsAsyncPager: r"""Lists matching topics. Args: - request (:class:`google.pubsub_v1.types.ListTopicsRequest`): + request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): The request object. Request for the `ListTopics` method. project (:class:`str`): Required. The name of the project in which to list @@ -577,10 +579,10 @@ async def list_topics( async def list_topic_subscriptions( self, - request: pubsub.ListTopicSubscriptionsRequest = None, + request: Union[pubsub.ListTopicSubscriptionsRequest, dict] = None, *, topic: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSubscriptionsAsyncPager: @@ -588,7 +590,7 @@ async def list_topic_subscriptions( topic. Args: - request (:class:`google.pubsub_v1.types.ListTopicSubscriptionsRequest`): + request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): The request object. Request for the `ListTopicSubscriptions` method. topic (:class:`str`): @@ -670,10 +672,10 @@ async def list_topic_subscriptions( async def list_topic_snapshots( self, - request: pubsub.ListTopicSnapshotsRequest = None, + request: Union[pubsub.ListTopicSnapshotsRequest, dict] = None, *, topic: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSnapshotsAsyncPager: @@ -685,7 +687,7 @@ async def list_topic_snapshots( in an existing subscription to the state captured by a snapshot. Args: - request (:class:`google.pubsub_v1.types.ListTopicSnapshotsRequest`): + request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): The request object. Request for the `ListTopicSnapshots` method. topic (:class:`str`): @@ -767,10 +769,10 @@ async def list_topic_snapshots( async def delete_topic( self, - request: pubsub.DeleteTopicRequest = None, + request: Union[pubsub.DeleteTopicRequest, dict] = None, *, topic: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -782,7 +784,7 @@ async def delete_topic( field is set to ``_deleted-topic_``. Args: - request (:class:`google.pubsub_v1.types.DeleteTopicRequest`): + request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): The request object. Request for the `DeleteTopic` method. topic (:class:`str`): @@ -846,9 +848,9 @@ async def delete_topic( async def detach_subscription( self, - request: pubsub.DetachSubscriptionRequest = None, + request: Union[pubsub.DetachSubscriptionRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.DetachSubscriptionResponse: @@ -859,7 +861,7 @@ async def detach_subscription( will stop. Args: - request (:class:`google.pubsub_v1.types.DetachSubscriptionRequest`): + request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): The request object. Request for the DetachSubscription method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -913,7 +915,7 @@ async def set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1022,7 +1024,7 @@ async def get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1132,7 +1134,7 @@ async def test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 7075e8fcd7ca..604d58411b38 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -32,6 +32,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -407,7 +409,7 @@ def create_topic( request: Union[pubsub.Topic, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: @@ -484,7 +486,7 @@ def update_topic( self, request: Union[pubsub.UpdateTopicRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: @@ -538,7 +540,7 @@ def publish( *, topic: str = None, messages: Sequence[pubsub.PubsubMessage] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PublishResponse: @@ -617,7 +619,7 @@ def get_topic( request: Union[pubsub.GetTopicRequest, dict] = None, *, topic: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: @@ -687,7 +689,7 @@ def list_topics( request: Union[pubsub.ListTopicsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicsPager: @@ -767,7 +769,7 @@ def list_topic_subscriptions( request: Union[pubsub.ListTopicSubscriptionsRequest, dict] = None, *, topic: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSubscriptionsPager: @@ -850,7 +852,7 @@ def list_topic_snapshots( request: Union[pubsub.ListTopicSnapshotsRequest, dict] = None, *, topic: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSnapshotsPager: @@ -937,7 +939,7 @@ def delete_topic( request: Union[pubsub.DeleteTopicRequest, dict] = None, *, topic: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1007,7 +1009,7 @@ def detach_subscription( self, request: Union[pubsub.DetachSubscriptionRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.DetachSubscriptionResponse: @@ -1078,7 +1080,7 @@ def set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1191,7 +1193,7 @@ def get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1305,7 +1307,7 @@ def test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index b794d5965214..02740aa4e154 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -40,15 +39,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class PublisherTransport(abc.ABC): """Abstract transport class for Publisher.""" @@ -101,7 +91,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -134,29 +124,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 7c896a252d22..67c59c79a468 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index df10c2a8d7ab..b77639f340d6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -19,13 +19,15 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.pubsub_v1.services.schema_service import pagers @@ -166,19 +168,19 @@ def __init__( async def create_schema( self, - request: gp_schema.CreateSchemaRequest = None, + request: Union[gp_schema.CreateSchemaRequest, dict] = None, *, parent: str = None, schema: gp_schema.Schema = None, schema_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.Schema: r"""Creates a schema. Args: - request (:class:`google.pubsub_v1.types.CreateSchemaRequest`): + request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): The request object. Request for the CreateSchema method. parent (:class:`str`): Required. The name of the project in which to create the @@ -261,17 +263,17 @@ async def create_schema( async def get_schema( self, - request: schema.GetSchemaRequest = None, + request: Union[schema.GetSchemaRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.Schema: r"""Gets a schema. Args: - request (:class:`google.pubsub_v1.types.GetSchemaRequest`): + request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): The request object. Request for the GetSchema method. name (:class:`str`): Required. The name of the schema to get. Format is @@ -329,17 +331,17 @@ async def get_schema( async def list_schemas( self, - request: schema.ListSchemasRequest = None, + request: Union[schema.ListSchemasRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSchemasAsyncPager: r"""Lists schemas in a project. Args: - request (:class:`google.pubsub_v1.types.ListSchemasRequest`): + request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): The request object. Request for the `ListSchemas` method. parent (:class:`str`): @@ -408,17 +410,17 @@ async def list_schemas( async def delete_schema( self, - request: schema.DeleteSchemaRequest = None, + request: Union[schema.DeleteSchemaRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a schema. Args: - request (:class:`google.pubsub_v1.types.DeleteSchemaRequest`): + request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): The request object. Request for the `DeleteSchema` method. name (:class:`str`): @@ -472,18 +474,18 @@ async def delete_schema( async def validate_schema( self, - request: gp_schema.ValidateSchemaRequest = None, + request: Union[gp_schema.ValidateSchemaRequest, dict] = None, *, parent: str = None, schema: gp_schema.Schema = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. Args: - request (:class:`google.pubsub_v1.types.ValidateSchemaRequest`): + request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): The request object. Request for the `ValidateSchema` method. parent (:class:`str`): @@ -553,16 +555,16 @@ async def validate_schema( async def validate_message( self, - request: schema.ValidateMessageRequest = None, + request: Union[schema.ValidateMessageRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. Args: - request (:class:`google.pubsub_v1.types.ValidateMessageRequest`): + request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): The request object. Request for the `ValidateMessage` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -604,7 +606,7 @@ async def set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -712,7 +714,7 @@ async def get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -821,7 +823,7 @@ async def test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 66c63a803f29..22efb1512abb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -30,6 +30,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.pubsub_v1.services.schema_service import pagers @@ -352,7 +354,7 @@ def create_schema( parent: str = None, schema: gp_schema.Schema = None, schema_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.Schema: @@ -445,7 +447,7 @@ def get_schema( request: Union[schema.GetSchemaRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.Schema: @@ -513,7 +515,7 @@ def list_schemas( request: Union[schema.ListSchemasRequest, dict] = None, *, parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSchemasPager: @@ -592,7 +594,7 @@ def delete_schema( request: Union[schema.DeleteSchemaRequest, dict] = None, *, name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -657,7 +659,7 @@ def validate_schema( *, parent: str = None, schema: gp_schema.Schema = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.ValidateSchemaResponse: @@ -736,7 +738,7 @@ def validate_message( self, request: Union[schema.ValidateMessageRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.ValidateMessageResponse: @@ -799,7 +801,7 @@ def set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -907,7 +909,7 @@ def get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1016,7 +1018,7 @@ def test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index bcc50f01186d..96e9f44c8981 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -41,15 +40,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class SchemaServiceTransport(abc.ABC): """Abstract transport class for SchemaService.""" @@ -102,7 +92,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -135,29 +125,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 8b9205f84abe..120214bf0dd3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 4e918daed032..0a55f4bd5bbe 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -29,13 +29,15 @@ import warnings import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core.client_options import ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -179,13 +181,13 @@ def __init__( async def create_subscription( self, - request: pubsub.Subscription = None, + request: Union[pubsub.Subscription, dict] = None, *, name: str = None, topic: str = None, push_config: pubsub.PushConfig = None, ack_deadline_seconds: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: @@ -204,7 +206,7 @@ async def create_subscription( request. Args: - request (:class:`google.pubsub_v1.types.Subscription`): + request (Union[google.pubsub_v1.types.Subscription, dict]): The request object. A subscription resource. name (:class:`str`): Required. The name of the subscription. It must have the @@ -334,17 +336,17 @@ async def create_subscription( async def get_subscription( self, - request: pubsub.GetSubscriptionRequest = None, + request: Union[pubsub.GetSubscriptionRequest, dict] = None, *, subscription: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. Args: - request (:class:`google.pubsub_v1.types.GetSubscriptionRequest`): + request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): The request object. Request for the GetSubscription method. subscription (:class:`str`): @@ -416,9 +418,9 @@ async def get_subscription( async def update_subscription( self, - request: pubsub.UpdateSubscriptionRequest = None, + request: Union[pubsub.UpdateSubscriptionRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: @@ -427,7 +429,7 @@ async def update_subscription( modifiable. Args: - request (:class:`google.pubsub_v1.types.UpdateSubscriptionRequest`): + request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): The request object. Request for the UpdateSubscription method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -476,17 +478,17 @@ async def update_subscription( async def list_subscriptions( self, - request: pubsub.ListSubscriptionsRequest = None, + request: Union[pubsub.ListSubscriptionsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSubscriptionsAsyncPager: r"""Lists matching subscriptions. Args: - request (:class:`google.pubsub_v1.types.ListSubscriptionsRequest`): + request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): The request object. Request for the `ListSubscriptions` method. project (:class:`str`): @@ -566,10 +568,10 @@ async def list_subscriptions( async def delete_subscription( self, - request: pubsub.DeleteSubscriptionRequest = None, + request: Union[pubsub.DeleteSubscriptionRequest, dict] = None, *, subscription: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -581,7 +583,7 @@ async def delete_subscription( topic unless the same topic is specified. Args: - request (:class:`google.pubsub_v1.types.DeleteSubscriptionRequest`): + request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): The request object. Request for the DeleteSubscription method. subscription (:class:`str`): @@ -646,12 +648,12 @@ async def delete_subscription( async def modify_ack_deadline( self, - request: pubsub.ModifyAckDeadlineRequest = None, + request: Union[pubsub.ModifyAckDeadlineRequest, dict] = None, *, subscription: str = None, ack_ids: Sequence[str] = None, ack_deadline_seconds: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -663,7 +665,7 @@ async def modify_ack_deadline( used for subsequent messages. Args: - request (:class:`google.pubsub_v1.types.ModifyAckDeadlineRequest`): + request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): The request object. Request for the ModifyAckDeadline method. subscription (:class:`str`): @@ -753,11 +755,11 @@ async def modify_ack_deadline( async def acknowledge( self, - request: pubsub.AcknowledgeRequest = None, + request: Union[pubsub.AcknowledgeRequest, dict] = None, *, subscription: str = None, ack_ids: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -771,7 +773,7 @@ async def acknowledge( error. Args: - request (:class:`google.pubsub_v1.types.AcknowledgeRequest`): + request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): The request object. Request for the Acknowledge method. subscription (:class:`str`): Required. The subscription whose message is being @@ -846,12 +848,12 @@ async def acknowledge( async def pull( self, - request: pubsub.PullRequest = None, + request: Union[pubsub.PullRequest, dict] = None, *, subscription: str = None, return_immediately: bool = None, max_messages: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PullResponse: @@ -860,7 +862,7 @@ async def pull( pending for the given subscription. Args: - request (:class:`google.pubsub_v1.types.PullRequest`): + request (Union[google.pubsub_v1.types.PullRequest, dict]): The request object. Request for the `Pull` method. subscription (:class:`str`): Required. The subscription from which messages should be @@ -968,7 +970,7 @@ def streaming_pull( self, requests: AsyncIterator[pubsub.StreamingPullRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[pubsub.StreamingPullResponse]]: @@ -1030,11 +1032,11 @@ def streaming_pull( async def modify_push_config( self, - request: pubsub.ModifyPushConfigRequest = None, + request: Union[pubsub.ModifyPushConfigRequest, dict] = None, *, subscription: str = None, push_config: pubsub.PushConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1047,7 +1049,7 @@ async def modify_push_config( call regardless of changes to the ``PushConfig``. Args: - request (:class:`google.pubsub_v1.types.ModifyPushConfigRequest`): + request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): The request object. Request for the ModifyPushConfig method. subscription (:class:`str`): @@ -1126,10 +1128,10 @@ async def modify_push_config( async def get_snapshot( self, - request: pubsub.GetSnapshotRequest = None, + request: Union[pubsub.GetSnapshotRequest, dict] = None, *, snapshot: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: @@ -1142,7 +1144,7 @@ async def get_snapshot( subscription to the state captured by a snapshot. Args: - request (:class:`google.pubsub_v1.types.GetSnapshotRequest`): + request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): The request object. Request for the GetSnapshot method. snapshot (:class:`str`): Required. The name of the snapshot to get. Format is @@ -1217,10 +1219,10 @@ async def get_snapshot( async def list_snapshots( self, - request: pubsub.ListSnapshotsRequest = None, + request: Union[pubsub.ListSnapshotsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSnapshotsAsyncPager: @@ -1231,7 +1233,7 @@ async def list_snapshots( in an existing subscription to the state captured by a snapshot. Args: - request (:class:`google.pubsub_v1.types.ListSnapshotsRequest`): + request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): The request object. Request for the `ListSnapshots` method. project (:class:`str`): @@ -1311,11 +1313,11 @@ async def list_snapshots( async def create_snapshot( self, - request: pubsub.CreateSnapshotRequest = None, + request: Union[pubsub.CreateSnapshotRequest, dict] = None, *, name: str = None, subscription: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: @@ -1340,7 +1342,7 @@ async def create_snapshot( request. Args: - request (:class:`google.pubsub_v1.types.CreateSnapshotRequest`): + request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): The request object. Request for the `CreateSnapshot` method. name (:class:`str`): @@ -1436,9 +1438,9 @@ async def create_snapshot( async def update_snapshot( self, - request: pubsub.UpdateSnapshotRequest = None, + request: Union[pubsub.UpdateSnapshotRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: @@ -1451,7 +1453,7 @@ async def update_snapshot( snapshot. Args: - request (:class:`google.pubsub_v1.types.UpdateSnapshotRequest`): + request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): The request object. Request for the UpdateSnapshot method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1506,10 +1508,10 @@ async def update_snapshot( async def delete_snapshot( self, - request: pubsub.DeleteSnapshotRequest = None, + request: Union[pubsub.DeleteSnapshotRequest, dict] = None, *, snapshot: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1525,7 +1527,7 @@ async def delete_snapshot( the same subscription is specified. Args: - request (:class:`google.pubsub_v1.types.DeleteSnapshotRequest`): + request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): The request object. Request for the `DeleteSnapshot` method. snapshot (:class:`str`): @@ -1588,9 +1590,9 @@ async def delete_snapshot( async def seek( self, - request: pubsub.SeekRequest = None, + request: Union[pubsub.SeekRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.SeekResponse: @@ -1605,7 +1607,7 @@ async def seek( same topic. Args: - request (:class:`google.pubsub_v1.types.SeekRequest`): + request (Union[google.pubsub_v1.types.SeekRequest, dict]): The request object. Request for the `Seek` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -1657,7 +1659,7 @@ async def set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1765,7 +1767,7 @@ async def get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1874,7 +1876,7 @@ async def test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index e8bb6df6601a..c6f9b2d9f802 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -32,6 +32,8 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +OptionalRetry = Union[retries.Retry, object] + from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -412,7 +414,7 @@ def create_subscription( topic: str = None, push_config: pubsub.PushConfig = None, ack_deadline_seconds: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: @@ -554,7 +556,7 @@ def get_subscription( request: Union[pubsub.GetSubscriptionRequest, dict] = None, *, subscription: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: @@ -625,7 +627,7 @@ def update_subscription( self, request: Union[pubsub.UpdateSubscriptionRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: @@ -679,7 +681,7 @@ def list_subscriptions( request: Union[pubsub.ListSubscriptionsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSubscriptionsPager: @@ -759,7 +761,7 @@ def delete_subscription( request: Union[pubsub.DeleteSubscriptionRequest, dict] = None, *, subscription: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -833,7 +835,7 @@ def modify_ack_deadline( subscription: str = None, ack_ids: Sequence[str] = None, ack_deadline_seconds: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -931,7 +933,7 @@ def acknowledge( *, subscription: str = None, ack_ids: Sequence[str] = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1017,7 +1019,7 @@ def pull( subscription: str = None, return_immediately: bool = None, max_messages: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PullResponse: @@ -1124,7 +1126,7 @@ def streaming_pull( self, requests: Iterator[pubsub.StreamingPullRequest] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[pubsub.StreamingPullResponse]: @@ -1179,7 +1181,7 @@ def modify_push_config( *, subscription: str = None, push_config: pubsub.PushConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1266,7 +1268,7 @@ def get_snapshot( request: Union[pubsub.GetSnapshotRequest, dict] = None, *, snapshot: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: @@ -1347,7 +1349,7 @@ def list_snapshots( request: Union[pubsub.ListSnapshotsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSnapshotsPager: @@ -1432,7 +1434,7 @@ def create_snapshot( *, name: str = None, subscription: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: @@ -1547,7 +1549,7 @@ def update_snapshot( self, request: Union[pubsub.UpdateSnapshotRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: @@ -1611,7 +1613,7 @@ def delete_snapshot( request: Union[pubsub.DeleteSnapshotRequest, dict] = None, *, snapshot: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: @@ -1684,7 +1686,7 @@ def seek( self, request: Union[pubsub.SeekRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.SeekResponse: @@ -1755,7 +1757,7 @@ def set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1867,7 +1869,7 @@ def get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: @@ -1980,7 +1982,7 @@ def test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index c39d8fcc86d6..57b671400699 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -15,7 +15,6 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources import google.auth # type: ignore @@ -40,15 +39,6 @@ except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class SubscriberTransport(abc.ABC): """Abstract transport class for Subscriber.""" @@ -101,7 +91,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -134,29 +124,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index ff822071f4b5..5d5149b8b4b8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -20,7 +20,6 @@ from google.api_core import grpc_helpers_async # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version import grpc # type: ignore from grpc.experimental import aio # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 913e845be75d..1a8e8b00fffb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -692,6 +692,8 @@ class ExpirationPolicy(proto.Message): class PushConfig(proto.Message): r"""Configuration for a push delivery endpoint. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: push_endpoint (str): A URL locating the endpoint to which messages should be @@ -731,6 +733,7 @@ class PushConfig(proto.Message): If specified, Pub/Sub will generate and attach an OIDC JWT token as an ``Authorization`` header in the HTTP request for every pushed message. + This field is a member of `oneof`_ ``authentication_method``. """ class OidcToken(proto.Message): @@ -1282,6 +1285,13 @@ class DeleteSnapshotRequest(proto.Message): class SeekRequest(proto.Message): r"""Request for the ``Seek`` method. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: subscription (str): Required. The subscription to affect. @@ -1299,10 +1309,12 @@ class SeekRequest(proto.Message): subscription creation time), only retained messages will be marked as unacknowledged, and already-expunged messages will not be restored. + This field is a member of `oneof`_ ``target``. snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as that of the provided subscription. Format is ``projects/{project}/snapshots/{snap}``. + This field is a member of `oneof`_ ``target``. """ subscription = proto.Field(proto.STRING, number=1,) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 4f7777fbcdbe..6d1821ecdba9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -202,6 +202,13 @@ class ValidateSchemaResponse(proto.Message): class ValidateMessageRequest(proto.Message): r"""Request for the ``ValidateMessage`` method. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: parent (str): Required. The name of the project in which to validate @@ -210,8 +217,10 @@ class ValidateMessageRequest(proto.Message): Name of the schema against which to validate. Format is ``projects/{project}/schemas/{schema}``. + This field is a member of `oneof`_ ``schema_spec``. schema (google.pubsub_v1.types.Schema): Ad-hoc schema against which to validate + This field is a member of `oneof`_ ``schema_spec``. message (bytes): Message to validate against the provided ``schema_spec``. encoding (google.pubsub_v1.types.Encoding): diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 9749aea2e204..4fedbb7f894c 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -33,11 +33,10 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.26.0, <3.0.0dev", + "google-api-core[grpc] >= 1.28.0, <3.0.0dev", "libcst >= 0.3.10", "proto-plus >= 1.7.1", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", - "packaging >= 14.3", ] extras = {} diff --git a/packages/google-cloud-pubsub/testing/constraints-3.6.txt b/packages/google-cloud-pubsub/testing/constraints-3.6.txt index 73677dc23c98..b89267633dee 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.6.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.6.txt @@ -5,9 +5,7 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 grpcio==1.38.1 -google-api-core==1.26.0 +google-api-core==1.28.0 libcst==0.3.10 proto-plus==1.7.1 grpc-google-iam-v1==0.12.3 -packaging==14.3 -google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 is transitively required through google-api-core diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 457c00640f37..58291d280f46 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -43,26 +42,11 @@ from google.pubsub_v1.services.publisher import PublisherClient from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.services.publisher import transports -from google.pubsub_v1.services.publisher.transports.base import _GOOGLE_AUTH_VERSION from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import schema import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -206,7 +190,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -223,7 +207,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -240,7 +224,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -269,7 +253,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -326,7 +310,7 @@ def test_publisher_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -368,7 +352,7 @@ def test_publisher_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -390,7 +374,7 @@ def test_publisher_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -421,7 +405,7 @@ def test_publisher_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -452,7 +436,7 @@ def test_publisher_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -2650,7 +2634,6 @@ def test_publisher_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_publisher_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2674,29 +2657,6 @@ def test_publisher_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_publisher_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PublisherTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id="octopus", - ) - - def test_publisher_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2708,7 +2668,6 @@ def test_publisher_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_publisher_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2724,26 +2683,10 @@ def test_publisher_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_publisher_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PublisherClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport,], ) -@requires_google_auth_gte_1_25_0 def test_publisher_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2760,26 +2703,6 @@ def test_publisher_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport,], -) -@requires_google_auth_lt_1_25_0 -def test_publisher_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index a3002ca6cfca..0091244406fe 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -40,28 +39,11 @@ from google.pubsub_v1.services.schema_service import SchemaServiceClient from google.pubsub_v1.services.schema_service import pagers from google.pubsub_v1.services.schema_service import transports -from google.pubsub_v1.services.schema_service.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -218,7 +200,7 @@ def test_schema_service_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,7 +217,7 @@ def test_schema_service_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -252,7 +234,7 @@ def test_schema_service_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -281,7 +263,7 @@ def test_schema_service_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -340,7 +322,7 @@ def test_schema_service_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -382,7 +364,7 @@ def test_schema_service_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -404,7 +386,7 @@ def test_schema_service_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -435,7 +417,7 @@ def test_schema_service_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -466,7 +448,7 @@ def test_schema_service_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -1902,7 +1884,6 @@ def test_schema_service_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_schema_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1926,29 +1907,6 @@ def test_schema_service_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_schema_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SchemaServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id="octopus", - ) - - def test_schema_service_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1960,7 +1918,6 @@ def test_schema_service_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_schema_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1976,21 +1933,6 @@ def test_schema_service_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_schema_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SchemaServiceClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [ @@ -1998,7 +1940,6 @@ def test_schema_service_auth_adc_old_google_auth(): transports.SchemaServiceGrpcAsyncIOTransport, ], ) -@requires_google_auth_gte_1_25_0 def test_schema_service_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -2015,29 +1956,6 @@ def test_schema_service_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.SchemaServiceGrpcTransport, - transports.SchemaServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_schema_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 9d760c68eb2b..78ed2e926207 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -16,7 +16,6 @@ import os import mock import warnings -import packaging.version import grpc from grpc.experimental import aio @@ -44,25 +43,10 @@ from google.pubsub_v1.services.subscriber import SubscriberClient from google.pubsub_v1.services.subscriber import pagers from google.pubsub_v1.services.subscriber import transports -from google.pubsub_v1.services.subscriber.transports.base import _GOOGLE_AUTH_VERSION from google.pubsub_v1.types import pubsub import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -210,7 +194,7 @@ def test_subscriber_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -227,7 +211,7 @@ def test_subscriber_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -244,7 +228,7 @@ def test_subscriber_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,7 +257,7 @@ def test_subscriber_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -330,7 +314,7 @@ def test_subscriber_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -372,7 +356,7 @@ def test_subscriber_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -394,7 +378,7 @@ def test_subscriber_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -425,7 +409,7 @@ def test_subscriber_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -456,7 +440,7 @@ def test_subscriber_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -3926,7 +3910,6 @@ def test_subscriber_base_transport(): transport.close() -@requires_google_auth_gte_1_25_0 def test_subscriber_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -3950,29 +3933,6 @@ def test_subscriber_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_subscriber_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SubscriberTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id="octopus", - ) - - def test_subscriber_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -3984,7 +3944,6 @@ def test_subscriber_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_subscriber_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -4000,26 +3959,10 @@ def test_subscriber_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_subscriber_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SubscriberClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id=None, - ) - - @pytest.mark.parametrize( "transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport,], ) -@requires_google_auth_gte_1_25_0 def test_subscriber_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use # ADC credentials. @@ -4036,26 +3979,6 @@ def test_subscriber_transport_auth_adc(transport_class): ) -@pytest.mark.parametrize( - "transport_class", - [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport,], -) -@requires_google_auth_lt_1_25_0 -def test_subscriber_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id="octopus", - ) - - @pytest.mark.parametrize( "transport_class,grpc_helpers", [ From 8e8b2efab29b84a98aa90a0f407cefba9823de37 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 4 Nov 2021 08:44:47 -0400 Subject: [PATCH 0735/1197] chore: skip reporting coverage for `google/__init__.py` (#526) * chore(python): omit google/__init__.py in coverage Source-Link: https://github.com/googleapis/synthtool/commit/694118b039b09551fb5d445fceb361a7dbb06400 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 * omit google/cloud/__init__.py * add replacement in owlbot.py to revert changes to noxfile.py Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/.kokoro/docs/common.cfg | 1 + packages/google-cloud-pubsub/owlbot.py | 4 +++- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 7d98291cc35f..cb89b2e326b7 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:58f73ba196b5414782605236dd0712a73541b44ff2ff4d3a36ec41092dd6fa5b + digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 diff --git a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg index b81c7b901136..63ce88a8292c 100644 --- a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg @@ -30,6 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" + # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` value: "docs-staging-v2" } diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 3f8f5bd664d8..725d833338ff 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -417,6 +417,8 @@ def pytype(session): session.run("pytype")''' ), ) - +s.replace( + "noxfile.py", "--cov=google", "--cov=google/cloud", +) s.shell.run(["nox", "-s", "blacken"], hide_output=False) From cb88cbf33403ea7f129a6fd0937ec878d25c5203 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Nov 2021 17:55:37 -0500 Subject: [PATCH 0736/1197] chore: use gapic-generator-python 0.56.2 (#532) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update Java and Python dependencies PiperOrigin-RevId: 408420890 Source-Link: https://github.com/googleapis/googleapis/commit/2921f9fb3bfbd16f6b2da0104373e2b47a80a65e Source-Link: https://github.com/googleapis/googleapis-gen/commit/6598ca8cbbf5226733a099c4506518a5af6ff74c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjU5OGNhOGNiYmY1MjI2NzMzYTA5OWM0NTA2NTE4YTVhZjZmZjc0YyJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/publisher/async_client.py | 13 +- .../pubsub_v1/services/publisher/client.py | 25 ++- .../services/publisher/transports/base.py | 8 +- .../services/publisher/transports/grpc.py | 4 +- .../publisher/transports/grpc_asyncio.py | 4 +- .../services/schema_service/async_client.py | 13 +- .../services/schema_service/client.py | 25 ++- .../schema_service/transports/base.py | 8 +- .../schema_service/transports/grpc.py | 4 +- .../schema_service/transports/grpc_asyncio.py | 4 +- .../services/subscriber/async_client.py | 13 +- .../pubsub_v1/services/subscriber/client.py | 25 ++- .../services/subscriber/transports/base.py | 8 +- .../services/subscriber/transports/grpc.py | 4 +- .../subscriber/transports/grpc_asyncio.py | 4 +- .../google/pubsub_v1/types/pubsub.py | 3 + .../google/pubsub_v1/types/schema.py | 2 + .../unit/gapic/pubsub_v1/test_publisher.py | 64 ++++-- .../gapic/pubsub_v1/test_schema_service.py | 64 ++++-- .../unit/gapic/pubsub_v1/test_subscriber.py | 184 +++++++++++++----- 20 files changed, 332 insertions(+), 147 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index e835ae7b7716..7165061d2df6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -19,15 +19,18 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.api_core import timeout as timeouts # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 604d58411b38..51cc2d1add7d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -14,17 +14,16 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import functools import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.api_core import timeout as timeouts # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore @@ -32,7 +31,10 @@ from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -329,8 +331,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 02740aa4e154..bdb55ab7b2b8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index fa4d2c4376b9..2507d97423d6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 67c59c79a468..26677ad12845 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index b77639f340d6..64f65ae07941 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 22efb1512abb..ccb958dd89f9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -281,8 +283,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 96e9f44c8981..16d98a8c45d1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index a3ca6e14b981..7313ec2dd43d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 120214bf0dd3..a542e066d8e9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 0a55f4bd5bbe..3754f151a443 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -29,14 +29,17 @@ import warnings import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index c6f9b2d9f802..308da6f74cd3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import functools import os import re @@ -22,17 +21,20 @@ import warnings import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -331,8 +333,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 57b671400699..6d90cef94478 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 944171cf7f23..f0472bdd0f93 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 5d5149b8b4b8..9a4b4522402b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 1a8e8b00fffb..de29dcd1016a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -733,6 +733,7 @@ class PushConfig(proto.Message): If specified, Pub/Sub will generate and attach an OIDC JWT token as an ``Authorization`` header in the HTTP request for every pushed message. + This field is a member of `oneof`_ ``authentication_method``. """ @@ -1309,11 +1310,13 @@ class SeekRequest(proto.Message): subscription creation time), only retained messages will be marked as unacknowledged, and already-expunged messages will not be restored. + This field is a member of `oneof`_ ``target``. snapshot (str): The snapshot to seek to. The snapshot's topic must be the same as that of the provided subscription. Format is ``projects/{project}/snapshots/{snap}``. + This field is a member of `oneof`_ ``target``. """ diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 6d1821ecdba9..3e389af46e38 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -217,9 +217,11 @@ class ValidateMessageRequest(proto.Message): Name of the schema against which to validate. Format is ``projects/{project}/schemas/{schema}``. + This field is a member of `oneof`_ ``schema_spec``. schema (google.pubsub_v1.types.Schema): Ad-hoc schema against which to validate + This field is a member of `oneof`_ ``schema_spec``. message (bytes): Message to validate against the provided ``schema_spec``. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 58291d280f46..5e2afb7e2272 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -620,7 +620,9 @@ def test_create_topic_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_create_topic_flattened_error(): @@ -652,7 +654,9 @@ async def test_create_topic_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -952,8 +956,12 @@ def test_publish_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" - assert args[0].messages == [pubsub.PubsubMessage(data=b"data_blob")] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + arg = args[0].messages + mock_val = [pubsub.PubsubMessage(data=b"data_blob")] + assert arg == mock_val def test_publish_flattened_error(): @@ -991,8 +999,12 @@ async def test_publish_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" - assert args[0].messages == [pubsub.PubsubMessage(data=b"data_blob")] + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + arg = args[0].messages + mock_val = [pubsub.PubsubMessage(data=b"data_blob")] + assert arg == mock_val @pytest.mark.asyncio @@ -1162,7 +1174,9 @@ def test_get_topic_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val def test_get_topic_flattened_error(): @@ -1194,7 +1208,9 @@ async def test_get_topic_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1356,7 +1372,9 @@ def test_list_topics_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val def test_list_topics_flattened_error(): @@ -1390,7 +1408,9 @@ async def test_list_topics_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1678,7 +1698,9 @@ def test_list_topic_subscriptions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val def test_list_topic_subscriptions_flattened_error(): @@ -1714,7 +1736,9 @@ async def test_list_topic_subscriptions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2020,7 +2044,9 @@ def test_list_topic_snapshots_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val def test_list_topic_snapshots_flattened_error(): @@ -2056,7 +2082,9 @@ async def test_list_topic_snapshots_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2328,7 +2356,9 @@ def test_delete_topic_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val def test_delete_topic_flattened_error(): @@ -2360,7 +2390,9 @@ async def test_delete_topic_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].topic == "topic_value" + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 0091244406fe..226c5f818161 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -644,9 +644,15 @@ def test_create_schema_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].schema == gp_schema.Schema(name="name_value") - assert args[0].schema_id == "schema_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + arg = args[0].schema_id + mock_val = "schema_id_value" + assert arg == mock_val def test_create_schema_flattened_error(): @@ -687,9 +693,15 @@ async def test_create_schema_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].schema == gp_schema.Schema(name="name_value") - assert args[0].schema_id == "schema_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + arg = args[0].schema_id + mock_val = "schema_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -866,7 +878,9 @@ def test_get_schema_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_schema_flattened_error(): @@ -900,7 +914,9 @@ async def test_get_schema_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1066,7 +1082,9 @@ def test_list_schemas_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_schemas_flattened_error(): @@ -1102,7 +1120,9 @@ async def test_list_schemas_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1376,7 +1396,9 @@ def test_delete_schema_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_schema_flattened_error(): @@ -1410,7 +1432,9 @@ async def test_delete_schema_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1576,8 +1600,12 @@ def test_validate_schema_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].schema == gp_schema.Schema(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val def test_validate_schema_flattened_error(): @@ -1617,8 +1645,12 @@ async def test_validate_schema_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].schema == gp_schema.Schema(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val @pytest.mark.asyncio diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 78ed2e926207..c5ed946a67ac 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -659,12 +659,18 @@ def test_create_subscription_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].topic == "topic_value" - assert args[0].push_config == pubsub.PushConfig( - push_endpoint="push_endpoint_value" - ) - assert args[0].ack_deadline_seconds == 2066 + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint="push_endpoint_value") + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val def test_create_subscription_flattened_error(): @@ -707,12 +713,18 @@ async def test_create_subscription_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].topic == "topic_value" - assert args[0].push_config == pubsub.PushConfig( - push_endpoint="push_endpoint_value" - ) - assert args[0].ack_deadline_seconds == 2066 + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].topic + mock_val = "topic_value" + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint="push_endpoint_value") + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val @pytest.mark.asyncio @@ -908,7 +920,9 @@ def test_get_subscription_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val def test_get_subscription_flattened_error(): @@ -940,7 +954,9 @@ async def test_get_subscription_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1290,7 +1306,9 @@ def test_list_subscriptions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val def test_list_subscriptions_flattened_error(): @@ -1326,7 +1344,9 @@ async def test_list_subscriptions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1644,7 +1664,9 @@ def test_delete_subscription_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val def test_delete_subscription_flattened_error(): @@ -1678,7 +1700,9 @@ async def test_delete_subscription_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1854,9 +1878,15 @@ def test_modify_ack_deadline_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].ack_ids == ["ack_ids_value"] - assert args[0].ack_deadline_seconds == 2066 + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ["ack_ids_value"] + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val def test_modify_ack_deadline_flattened_error(): @@ -1897,9 +1927,15 @@ async def test_modify_ack_deadline_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].ack_ids == ["ack_ids_value"] - assert args[0].ack_deadline_seconds == 2066 + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ["ack_ids_value"] + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val @pytest.mark.asyncio @@ -2062,8 +2098,12 @@ def test_acknowledge_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].ack_ids == ["ack_ids_value"] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ["ack_ids_value"] + assert arg == mock_val def test_acknowledge_flattened_error(): @@ -2099,8 +2139,12 @@ async def test_acknowledge_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].ack_ids == ["ack_ids_value"] + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ["ack_ids_value"] + assert arg == mock_val @pytest.mark.asyncio @@ -2266,9 +2310,15 @@ def test_pull_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].return_immediately == True - assert args[0].max_messages == 1277 + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].return_immediately + mock_val = True + assert arg == mock_val + arg = args[0].max_messages + mock_val = 1277 + assert arg == mock_val def test_pull_flattened_error(): @@ -2309,9 +2359,15 @@ async def test_pull_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].return_immediately == True - assert args[0].max_messages == 1277 + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].return_immediately + mock_val = True + assert arg == mock_val + arg = args[0].max_messages + mock_val = 1277 + assert arg == mock_val @pytest.mark.asyncio @@ -2558,10 +2614,12 @@ def test_modify_push_config_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].push_config == pubsub.PushConfig( - push_endpoint="push_endpoint_value" - ) + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint="push_endpoint_value") + assert arg == mock_val def test_modify_push_config_flattened_error(): @@ -2600,10 +2658,12 @@ async def test_modify_push_config_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].subscription == "subscription_value" - assert args[0].push_config == pubsub.PushConfig( - push_endpoint="push_endpoint_value" - ) + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint="push_endpoint_value") + assert arg == mock_val @pytest.mark.asyncio @@ -2765,7 +2825,9 @@ def test_get_snapshot_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].snapshot == "snapshot_value" + arg = args[0].snapshot + mock_val = "snapshot_value" + assert arg == mock_val def test_get_snapshot_flattened_error(): @@ -2797,7 +2859,9 @@ async def test_get_snapshot_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].snapshot == "snapshot_value" + arg = args[0].snapshot + mock_val = "snapshot_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2961,7 +3025,9 @@ def test_list_snapshots_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val def test_list_snapshots_flattened_error(): @@ -2995,7 +3061,9 @@ async def test_list_snapshots_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project == "project_value" + arg = args[0].project + mock_val = "project_value" + assert arg == mock_val @pytest.mark.asyncio @@ -3281,8 +3349,12 @@ def test_create_snapshot_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].subscription == "subscription_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val def test_create_snapshot_flattened_error(): @@ -3318,8 +3390,12 @@ async def test_create_snapshot_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].subscription == "subscription_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].subscription + mock_val = "subscription_value" + assert arg == mock_val @pytest.mark.asyncio @@ -3613,7 +3689,9 @@ def test_delete_snapshot_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].snapshot == "snapshot_value" + arg = args[0].snapshot + mock_val = "snapshot_value" + assert arg == mock_val def test_delete_snapshot_flattened_error(): @@ -3645,7 +3723,9 @@ async def test_delete_snapshot_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].snapshot == "snapshot_value" + arg = args[0].snapshot + mock_val = "snapshot_value" + assert arg == mock_val @pytest.mark.asyncio From 505d6492ff255a1f24d246ca836ad7fd5324fc16 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 10 Nov 2021 17:05:09 +0100 Subject: [PATCH 0737/1197] chore: run system tests with Python 3.10 (#534) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: run system tests with Python 3.10 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/CONTRIBUTING.rst | 4 ++-- packages/google-cloud-pubsub/noxfile.py | 2 +- packages/google-cloud-pubsub/owlbot.py | 1 + 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index b749f1fb47d9..26c1d580b5e2 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system-3.8 -- -k + $ nox -s system-3.10 -- -k .. note:: - System tests are only configured to run under Python 3.8. + System tests are only configured to run under Python 3.10. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 0be25350247a..931c23ad2d5b 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -31,7 +31,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 725d833338ff..06031ff371ec 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -354,6 +354,7 @@ samples=True, cov_level=100, unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"], + system_test_python_versions=["3.10"], system_test_external_dependencies=["psutil"], ) s.move(templated_files, excludes=[".coveragerc", ".github/CODEOWNERS"]) From 5ebed692c909e9608817961bfbe5b7ef7e5c6376 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 10 Nov 2021 16:24:21 +0000 Subject: [PATCH 0738/1197] chore: release 2.9.0 (#522) :robot: I have created a release \*beep\* \*boop\* --- ## [2.9.0](https://www.github.com/googleapis/python-pubsub/compare/v2.8.0...v2.9.0) (2021-11-10) ### Features * add context manager support in client ([#516](https://www.github.com/googleapis/python-pubsub/issues/516)) ([51eae67](https://www.github.com/googleapis/python-pubsub/commit/51eae67c47e2ce7d2f7620209e98df4a129801b5)) * add support for Python 3.10 ([#518](https://www.github.com/googleapis/python-pubsub/issues/518)) ([bb25d75](https://www.github.com/googleapis/python-pubsub/commit/bb25d755d70ba19e69d8a281be65f13eb994967d)) ### Bug Fixes * add 'dict' annotation type to 'request' ([b72522a](https://www.github.com/googleapis/python-pubsub/commit/b72522a4617c4b2773fb6a5a631038791aa08300)) * **deps:** drop packaging dependency ([290b9c5](https://www.github.com/googleapis/python-pubsub/commit/290b9c5615eaa03674b773a27b756483abd76195)) * **deps:** require google-api-core >= 1.28.0 ([290b9c5](https://www.github.com/googleapis/python-pubsub/commit/290b9c5615eaa03674b773a27b756483abd76195)) * improper types in pagers generation ([2ad639d](https://www.github.com/googleapis/python-pubsub/commit/2ad639d6370c7a085498595d7bd0d7eaadfff3c1)) ### Documentation * add type annotations to codebase ([#509](https://www.github.com/googleapis/python-pubsub/issues/509)) ([093cabf](https://www.github.com/googleapis/python-pubsub/commit/093cabff9f0464b1dfaa8f373b6fffbc439518de)) * list oneofs in docstring ([290b9c5](https://www.github.com/googleapis/python-pubsub/commit/290b9c5615eaa03674b773a27b756483abd76195)) --- This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please). --- packages/google-cloud-pubsub/CHANGELOG.md | 22 ++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 43248a98ab4b..00a8aae50a8f 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,28 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.9.0](https://www.github.com/googleapis/python-pubsub/compare/v2.8.0...v2.9.0) (2021-11-10) + + +### Features + +* add context manager support in client ([#516](https://www.github.com/googleapis/python-pubsub/issues/516)) ([51eae67](https://www.github.com/googleapis/python-pubsub/commit/51eae67c47e2ce7d2f7620209e98df4a129801b5)) +* add support for Python 3.10 ([#518](https://www.github.com/googleapis/python-pubsub/issues/518)) ([bb25d75](https://www.github.com/googleapis/python-pubsub/commit/bb25d755d70ba19e69d8a281be65f13eb994967d)) + + +### Bug Fixes + +* add 'dict' annotation type to 'request' ([b72522a](https://www.github.com/googleapis/python-pubsub/commit/b72522a4617c4b2773fb6a5a631038791aa08300)) +* **deps:** drop packaging dependency ([290b9c5](https://www.github.com/googleapis/python-pubsub/commit/290b9c5615eaa03674b773a27b756483abd76195)) +* **deps:** require google-api-core >= 1.28.0 ([290b9c5](https://www.github.com/googleapis/python-pubsub/commit/290b9c5615eaa03674b773a27b756483abd76195)) +* improper types in pagers generation ([2ad639d](https://www.github.com/googleapis/python-pubsub/commit/2ad639d6370c7a085498595d7bd0d7eaadfff3c1)) + + +### Documentation + +* add type annotations to codebase ([#509](https://www.github.com/googleapis/python-pubsub/issues/509)) ([093cabf](https://www.github.com/googleapis/python-pubsub/commit/093cabff9f0464b1dfaa8f373b6fffbc439518de)) +* list oneofs in docstring ([290b9c5](https://www.github.com/googleapis/python-pubsub/commit/290b9c5615eaa03674b773a27b756483abd76195)) + ## [2.8.0](https://www.github.com/googleapis/python-pubsub/compare/v2.7.1...v2.8.0) (2021-09-02) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 4fedbb7f894c..e35af7289954 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.8.0" +version = "2.9.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 109499c85ef5d8ded0596778eb527a6234f1baf2 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 10 Nov 2021 20:55:27 +0100 Subject: [PATCH 0739/1197] chore(deps): update dependency google-cloud-pubsub to v2.9.0 (#535) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 40343160b35c..40078e73f2b8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.8.0 +google-cloud-pubsub==2.9.0 avro==1.11.0 From 56d797c037693643f57beeb4e0cc1e00d424d84a Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Tue, 16 Nov 2021 03:33:55 -0500 Subject: [PATCH 0740/1197] chore: update doc links from googleapis.dev to cloud.google.com (#537) --- packages/google-cloud-pubsub/.repo-metadata.json | 2 +- packages/google-cloud-pubsub/README.rst | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json index fa07857c21e6..928c9c759870 100644 --- a/packages/google-cloud-pubsub/.repo-metadata.json +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "pubsub", "name_pretty": "Google Cloud Pub/Sub", "product_documentation": "https://cloud.google.com/pubsub/docs/", - "client_documentation": "https://googleapis.dev/python/pubsub/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/pubsub/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", "release_level": "ga", "language": "python", diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 7d671c42b73c..6432525b1cb8 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -27,7 +27,7 @@ independently written applications. :target: https://pypi.org/project/google-cloud-pubsub/ .. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/ .. _Product Documentation: https://cloud.google.com/pubsub/docs -.. _Client Library Documentation: https://googleapis.dev/python/pubsub/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/pubsub/latest Quick Start ----------- @@ -116,7 +116,7 @@ messages to it To learn more, consult the `publishing documentation`_. -.. _publishing documentation: https://googleapis.dev/python/pubsub/latest +.. _publishing documentation: https://cloud.google.com/python/docs/reference/pubsub/latest Subscribing @@ -162,7 +162,7 @@ block the current thread until a given condition obtains: It is also possible to pull messages in a synchronous (blocking) fashion. To learn more about subscribing, consult the `subscriber documentation`_. -.. _subscriber documentation: https://googleapis.dev/python/pubsub/latest +.. _subscriber documentation: https://cloud.google.com/python/docs/reference/pubsub/latest Authentication From 4e53e009a8801a1c23025eaadfaee8f39cc28c23 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 30 Nov 2021 15:59:15 +0100 Subject: [PATCH 0741/1197] process: make sure type annoations pass with mypy (#542) * Add mypy to nox sessions * Mark google/cloud package as type-checked * Ignore types for dependencies lacking type info * Fix type annotations in publish flow controller * Fix type hints in thread-based Batch * Fix type annotations in sequencers * Fix type hints in dispatcher * Fix type hints in publisher client * Fix misc type errors in various modules * Fix type hints in leaser * Fix type annotations in streaming pull manager * Fix gapic timeout hint in older api-core versions google-api-core versions prior to v2.2.2 lack the definition of _MethodDefault, thus a workaround is needed for that. * Remove py.typed marker file The autogenerated code does not pass mypy type checks yet, thus we should not advertise the package as type-checked. * Replace typing.cast with is not None assertions * Replace batched_commands dict with separate lists * Rename variable to avoid false type warnings * Get rid of type cast by using a new variable * Just ignore the line where type checkers disagree * Remove unused imports * Replace type casts with is not None assertions * Cover missing dispatcher case after refactoring --- .../google/cloud/__init__.py | 4 +- .../google/cloud/pubsub_v1/futures.py | 4 +- .../cloud/pubsub_v1/publisher/_batch/base.py | 2 +- .../pubsub_v1/publisher/_batch/thread.py | 17 +++-- .../pubsub_v1/publisher/_sequencer/base.py | 7 +- .../publisher/_sequencer/ordered_sequencer.py | 25 ++++---- .../_sequencer/unordered_sequencer.py | 19 +++--- .../cloud/pubsub_v1/publisher/client.py | 31 +++++---- .../pubsub_v1/publisher/flow_controller.py | 17 +++-- .../subscriber/_protocol/dispatcher.py | 54 +++++++++++----- .../subscriber/_protocol/heartbeater.py | 3 +- .../pubsub_v1/subscriber/_protocol/leaser.py | 33 ++++++---- .../_protocol/streaming_pull_manager.py | 64 +++++++++++-------- .../cloud/pubsub_v1/subscriber/client.py | 14 ++-- .../cloud/pubsub_v1/subscriber/message.py | 2 +- .../cloud/pubsub_v1/subscriber/scheduler.py | 2 +- .../google/cloud/pubsub_v1/types.py | 32 +++++++--- packages/google-cloud-pubsub/noxfile.py | 18 +++++- .../pubsub_v1/subscriber/test_dispatcher.py | 21 ++++++ .../subscriber/test_streaming_pull_manager.py | 1 + 20 files changed, 240 insertions(+), 130 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py index 9a1b64a6d586..e1f8a4d20fd1 100644 --- a/packages/google-cloud-pubsub/google/cloud/__init__.py +++ b/packages/google-cloud-pubsub/google/cloud/__init__.py @@ -14,6 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import List + try: import pkg_resources @@ -21,4 +23,4 @@ except ImportError: import pkgutil - __path__ = pkgutil.extend_path(__path__, __name__) + __path__: List[str] = pkgutil.extend_path(__path__, __name__) # type: ignore diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py index d8acc8ea5c40..5527d21d0683 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/futures.py @@ -15,7 +15,7 @@ from __future__ import absolute_import import concurrent.futures -from typing import Any, NoReturn +from typing import Any, NoReturn, Optional import google.api_core.future @@ -47,7 +47,7 @@ def set_result(self, result: Any): """ return super().set_result(result=result) - def set_exception(self, exception: Exception): + def set_exception(self, exception: Optional[BaseException]): """Set the result of the future as being the given exception. Do not use this method, it should only be used internally by the library and its diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py index f3202836084e..52505996be0b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -58,7 +58,7 @@ def __len__(self): @staticmethod @abc.abstractmethod - def make_lock() -> None: # pragma: NO COVER + def make_lock(): # pragma: NO COVER """Return a lock in the chosen concurrency model. Returns: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index d68d00a0eb12..ade135f4598f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -18,7 +18,7 @@ import threading import time import typing -from typing import Any, Callable, Optional, Sequence +from typing import Any, Callable, List, Optional, Sequence import google.api_core.exceptions from google.api_core import gapic_v1 @@ -28,11 +28,10 @@ from google.pubsub_v1 import types as gapic_types if typing.TYPE_CHECKING: # pragma: NO COVER - from google import api_core from google.cloud import pubsub_v1 from google.cloud.pubsub_v1 import types - from google.cloud.pubsub_v1 import PublisherClient - + from google.cloud.pubsub_v1.publisher import Client as PublisherClient + from google.pubsub_v1.services.publisher.client import OptionalRetry _LOGGER = logging.getLogger(__name__) _CAN_COMMIT = (base.BatchStatus.ACCEPTING_MESSAGES, base.BatchStatus.STARTING) @@ -93,8 +92,8 @@ def __init__( settings: "types.BatchSettings", batch_done_callback: Callable[[bool], Any] = None, commit_when_full: bool = True, - commit_retry: "api_core.retry.Retry" = gapic_v1.method.DEFAULT, - commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + commit_retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + commit_timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ): self._client = client self._topic = topic @@ -108,8 +107,8 @@ def __init__( # _futures list should remain unchanged after batch # status changed from ACCEPTING_MESSAGES to any other # in order to avoid race conditions - self._futures = [] - self._messages = [] + self._futures: List[futures.Future] = [] + self._messages: List[gapic_types.PubsubMessage] = [] self._status = base.BatchStatus.ACCEPTING_MESSAGES # The initial size is not zero, we need to account for the size overhead @@ -368,7 +367,7 @@ def publish( ), "Publish after stop() or publish error." if self.status != base.BatchStatus.ACCEPTING_MESSAGES: - return + return None size_increase = gapic_types.PublishRequest( messages=[message] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py index 49bdcb7409a6..7a0c28e45571 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -22,7 +22,7 @@ if typing.TYPE_CHECKING: # pragma: NO COVER from concurrent import futures - from google.api_core import retry + from google.pubsub_v1.services.publisher.client import OptionalRetry class Sequencer(metaclass=abc.ABCMeta): @@ -30,7 +30,6 @@ class Sequencer(metaclass=abc.ABCMeta): sequences messages to be published. """ - @staticmethod @abc.abstractmethod def is_finished(self) -> bool: # pragma: NO COVER """ Whether the sequencer is finished and should be cleaned up. @@ -40,7 +39,6 @@ def is_finished(self) -> bool: # pragma: NO COVER """ raise NotImplementedError - @staticmethod @abc.abstractmethod def unpause(self) -> None: # pragma: NO COVER """ Unpauses this sequencer. @@ -51,12 +49,11 @@ def unpause(self) -> None: # pragma: NO COVER """ raise NotImplementedError - @staticmethod @abc.abstractmethod def publish( self, message: gapic_types.PubsubMessage, - retry: "retry.Retry" = None, + retry: "OptionalRetry" = gapic_v1.method.DEFAULT, timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, ) -> "futures.Future": # pragma: NO COVER """ Publish message for this ordering key. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py index 106c4da99197..4d44b1a4f19f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py @@ -14,21 +14,22 @@ import enum import collections -import concurrent.futures as futures import threading import typing -from typing import Iterable, Sequence +from typing import Deque, Iterable, Sequence from google.api_core import gapic_v1 +from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import base as sequencer_base from google.cloud.pubsub_v1.publisher._batch import base as batch_base from google.pubsub_v1 import types as gapic_types if typing.TYPE_CHECKING: # pragma: NO COVER - from google.api_core import retry - from google.cloud.pubsub_v1 import PublisherClient + from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import _batch + from google.cloud.pubsub_v1.publisher.client import Client as PublisherClient + from google.pubsub_v1.services.publisher.client import OptionalRetry class _OrderedSequencerStatus(str, enum.Enum): @@ -101,7 +102,7 @@ def __init__(self, client: "PublisherClient", topic: str, ordering_key: str): # Batches ordered from first (head/left) to last (right/tail). # Invariant: always has at least one batch after the first publish, # unless paused or stopped. - self._ordered_batches = collections.deque() + self._ordered_batches: Deque["_batch.thread.Batch"] = collections.deque() # See _OrderedSequencerStatus for valid state transitions. self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES @@ -237,8 +238,8 @@ def unpause(self) -> None: def _create_batch( self, - commit_retry: "retry.Retry" = gapic_v1.method.DEFAULT, - commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + commit_retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + commit_timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> "_batch.thread.Batch": """ Create a new batch using the client's batch class and other stored settings. @@ -262,8 +263,8 @@ def _create_batch( def publish( self, message: gapic_types.PubsubMessage, - retry: "retry.Retry" = gapic_v1.method.DEFAULT, - timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> futures.Future: """ Publish message for this ordering key. @@ -289,12 +290,12 @@ def publish( """ with self._state_lock: if self._state == _OrderedSequencerStatus.PAUSED: - future = futures.Future() + errored_future = futures.Future() exception = exceptions.PublishToPausedOrderingKeyException( self._ordering_key ) - future.set_exception(exception) - return future + errored_future.set_exception(exception) + return errored_future # If waiting to be cleaned-up, convert to accepting messages to # prevent this sequencer from being cleaned-up only to have another diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py index 91d47b948241..7f2f136105b8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -13,6 +13,7 @@ # limitations under the License. import typing +from typing import Optional from google.api_core import gapic_v1 @@ -20,10 +21,12 @@ from google.pubsub_v1 import types as gapic_types if typing.TYPE_CHECKING: # pragma: NO COVER - from concurrent import futures - from google.api_core import retry - from google.cloud.pubsub_v1 import PublisherClient from google.cloud.pubsub_v1.publisher import _batch + from google.cloud.pubsub_v1.publisher import futures + from google.cloud.pubsub_v1.publisher.client import Client as PublisherClient + from google.pubsub_v1.services.publisher.client import OptionalRetry + + from google.cloud.pubsub_v1 import types class UnorderedSequencer(base.Sequencer): @@ -35,7 +38,7 @@ class UnorderedSequencer(base.Sequencer): def __init__(self, client: "PublisherClient", topic: str): self._client = client self._topic = topic - self._current_batch = None + self._current_batch: Optional["_batch.thread.Batch"] = None self._stopped = False def is_finished(self) -> bool: @@ -88,8 +91,8 @@ def unpause(self) -> typing.NoReturn: def _create_batch( self, - commit_retry: "retry.Retry" = gapic_v1.method.DEFAULT, - commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + commit_retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + commit_timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> "_batch.thread.Batch": """ Create a new batch using the client's batch class and other stored settings. @@ -113,8 +116,8 @@ def _create_batch( def publish( self, message: gapic_types.PubsubMessage, - retry: "retry.Retry" = gapic_v1.method.DEFAULT, - timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> "futures.Future": """ Batch message into existing or new batch. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 7e7c01c19772..58baf43b6a37 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -21,11 +21,11 @@ import threading import time import typing -from typing import Any, Sequence, Type, Union +from typing import Any, Dict, Optional, Sequence, Tuple, Type, Union from google.api_core import gapic_v1 -from google.auth.credentials import AnonymousCredentials -from google.oauth2 import service_account +from google.auth.credentials import AnonymousCredentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types @@ -46,10 +46,9 @@ __version__ = "0.0" if typing.TYPE_CHECKING: # pragma: NO COVER - from google import api_core from google.cloud import pubsub_v1 - from google.cloud.pubsub_v1.publisher._sequencer.base import Sequencer from google.cloud.pubsub_v1.publisher import _batch + from google.pubsub_v1.services.publisher.client import OptionalRetry _LOGGER = logging.getLogger(__name__) @@ -62,6 +61,10 @@ _raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() +SequencerType = Union[ + ordered_sequencer.OrderedSequencer, unordered_sequencer.UnorderedSequencer +] + @_gapic.add_methods(publisher_client.PublisherClient, denylist=_DENYLISTED_METHODS) class Client(object): @@ -152,10 +155,10 @@ def __init__( # messages. One batch exists for each topic. self._batch_lock = self._batch_class.make_lock() # (topic, ordering_key) => sequencers object - self._sequencers = {} + self._sequencers: Dict[Tuple[str, str], SequencerType] = {} self._is_stopped = False # Thread created to commit all sequencers after a timeout. - self._commit_thread = None + self._commit_thread: Optional[threading.Thread] = None # The object controlling the message publishing flow self._flow_controller = FlowController(self.publisher_options.flow_control) @@ -196,7 +199,7 @@ def target(self) -> str: """ return self._target - def _get_or_create_sequencer(self, topic: str, ordering_key: str) -> "Sequencer": + def _get_or_create_sequencer(self, topic: str, ordering_key: str) -> SequencerType: """ Get an existing sequencer or create a new one given the (topic, ordering_key) pair. """ @@ -254,8 +257,8 @@ def publish( topic: str, data: bytes, ordering_key: str = "", - retry: "api_core.retry.Retry" = gapic_v1.method.DEFAULT, - timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + retry: "OptionalRetry" = gapic_v1.method.DEFAULT, + timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, **attrs: Union[bytes, str], ) -> "pubsub_v1.publisher.futures.Future": """Publish a single message. @@ -380,8 +383,10 @@ def on_publish_done(future): if retry is gapic_v1.method.DEFAULT: # use the default retry for the publish GRPC method as a base transport = self.api._transport - retry = transport._wrapped_methods[transport.publish]._retry - retry = retry.with_deadline(2.0 ** 32) + base_retry = transport._wrapped_methods[transport.publish]._retry + retry = base_retry.with_deadline(2.0 ** 32) + else: + retry = retry.with_deadline(2.0 ** 32) # Delegate the publishing to the sequencer. sequencer = self._get_or_create_sequencer(topic, ordering_key) @@ -490,7 +495,7 @@ def _set_batch_class(self, batch_class: Type) -> None: # Used only for testing. def _set_sequencer( - self, topic: str, sequencer: "Sequencer", ordering_key: str = "" + self, topic: str, sequencer: SequencerType, ordering_key: str = "" ) -> None: sequencer_key = (topic, ordering_key) self._sequencers[sequencer_key] = sequencer diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py index fa3d58d33e3e..3c0558fe51cd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py @@ -15,7 +15,7 @@ from collections import OrderedDict import logging import threading -from typing import Optional +from typing import Dict, Optional, Type import warnings from google.cloud.pubsub_v1 import types @@ -25,6 +25,9 @@ _LOGGER = logging.getLogger(__name__) +MessageType = Type[types.PubsubMessage] # type: ignore # pytype: disable=module-attr + + class _QuantityReservation: """A (partial) reservation of quantifiable resources.""" @@ -60,7 +63,7 @@ def __init__(self, settings: types.PublishFlowControl): # A FIFO queue of threads blocked on adding a message that also tracks their # reservations of available flow control bytes and message slots. # Only relevant if the configured limit exceeded behavior is BLOCK. - self._waiting = OrderedDict() + self._waiting: Dict[threading.Thread, _QuantityReservation] = OrderedDict() self._reserved_bytes = 0 self._reserved_slots = 0 @@ -72,7 +75,7 @@ def __init__(self, settings: types.PublishFlowControl): # The condition for blocking the flow if capacity is exceeded. self._has_capacity = threading.Condition(lock=self._operational_lock) - def add(self, message: types.PubsubMessage) -> None: # pytype: disable=module-attr + def add(self, message: MessageType) -> None: """Add a message to flow control. Adding a message updates the internal load statistics, and an action is @@ -166,9 +169,7 @@ def add(self, message: types.PubsubMessage) -> None: # pytype: disable=module-a self._reserved_slots -= 1 del self._waiting[current_thread] - def release( - self, message: types.PubsubMessage # pytype: disable=module-attr - ) -> None: + def release(self, message: MessageType) -> None: """Release a mesage from flow control. Args: @@ -255,9 +256,7 @@ def _ready_to_unblock(self) -> bool: return False - def _would_overflow( - self, message: types.PubsubMessage # pytype: disable=module-attr - ) -> bool: + def _would_overflow(self, message: MessageType) -> bool: """Determine if accepting a message would exceed flow control limits. The method assumes that the caller has obtained ``_operational_lock``. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index badcd78f386e..885210fc6a19 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -15,13 +15,13 @@ from __future__ import absolute_import from __future__ import division -import collections import itertools import logging import math import threading import typing -from typing import Sequence, Union +from typing import List, Optional, Sequence, Union +import warnings from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -71,7 +71,7 @@ class Dispatcher(object): def __init__(self, manager: "StreamingPullManager", queue: "queue.Queue"): self._manager = manager self._queue = queue - self._thread = None + self._thread: Optional[threading.Thread] = None self._operational_lock = threading.Lock() def start(self) -> None: @@ -112,25 +112,47 @@ def dispatch_callback(self, items: Sequence[RequestItem]) -> None: items: Queued requests to dispatch. """ - batched_commands = collections.defaultdict(list) + lease_requests: List[requests.LeaseRequest] = [] + modack_requests: List[requests.ModAckRequest] = [] + ack_requests: List[requests.AckRequest] = [] + nack_requests: List[requests.NackRequest] = [] + drop_requests: List[requests.DropRequest] = [] for item in items: - batched_commands[item.__class__].append(item) + if isinstance(item, requests.LeaseRequest): + lease_requests.append(item) + elif isinstance(item, requests.ModAckRequest): + modack_requests.append(item) + elif isinstance(item, requests.AckRequest): + ack_requests.append(item) + elif isinstance(item, requests.NackRequest): + nack_requests.append(item) + elif isinstance(item, requests.DropRequest): + drop_requests.append(item) + else: + warnings.warn( + f'Skipping unknown request item of type "{type(item)}"', + category=RuntimeWarning, + ) _LOGGER.debug("Handling %d batched requests", len(items)) - if batched_commands[requests.LeaseRequest]: - self.lease(batched_commands.pop(requests.LeaseRequest)) - if batched_commands[requests.ModAckRequest]: - self.modify_ack_deadline(batched_commands.pop(requests.ModAckRequest)) + if lease_requests: + self.lease(lease_requests) + + if modack_requests: + self.modify_ack_deadline(modack_requests) + # Note: Drop and ack *must* be after lease. It's possible to get both # the lease and the ack/drop request in the same batch. - if batched_commands[requests.AckRequest]: - self.ack(batched_commands.pop(requests.AckRequest)) - if batched_commands[requests.NackRequest]: - self.nack(batched_commands.pop(requests.NackRequest)) - if batched_commands[requests.DropRequest]: - self.drop(batched_commands.pop(requests.DropRequest)) + if ack_requests: + self.ack(ack_requests) + + if nack_requests: + self.nack(nack_requests) + + if drop_requests: + self.drop(drop_requests) def ack(self, items: Sequence[requests.AckRequest]) -> None: """Acknowledge the given messages. @@ -169,6 +191,7 @@ def drop( Args: items: The items to drop. """ + assert self._manager.leaser is not None self._manager.leaser.remove(items) ordering_keys = (k.ordering_key for k in items if k.ordering_key) self._manager.activate_ordering_keys(ordering_keys) @@ -180,6 +203,7 @@ def lease(self, items: Sequence[requests.LeaseRequest]) -> None: Args: items: The items to lease. """ + assert self._manager.leaser is not None self._manager.leaser.add(items) self._manager.maybe_pause_consumer() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py index 842e4adc55d2..0ab03ddf968e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -17,6 +17,7 @@ import logging import threading import typing +from typing import Optional if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( @@ -34,7 +35,7 @@ class Heartbeater(object): def __init__(self, manager: "StreamingPullManager", period: int = _DEFAULT_PERIOD): - self._thread = None + self._thread: Optional[threading.Thread] = None self._operational_lock = threading.Lock() self._manager = manager self._stop_event = threading.Event() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index c8d7e93659fb..7cd9317e6bb1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -14,14 +14,21 @@ from __future__ import absolute_import -import collections import copy import logging import random import threading import time import typing -from typing import Iterable, Sequence, Union +from typing import Dict, Iterable, Optional, Union + +try: + from collections.abc import KeysView + + KeysView[None] # KeysView is only subscriptable in Python 3.9+ +except TypeError: + # Deprecated since Python 3.9, thus only use as a fallback in older Python versions + from typing import KeysView from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -35,14 +42,17 @@ _LEASE_WORKER_NAME = "Thread-LeaseMaintainer" -_LeasedMessage = collections.namedtuple( - "_LeasedMessage", ["sent_time", "size", "ordering_key"] -) +class _LeasedMessage(typing.NamedTuple): + sent_time: float + """The local time when ACK ID was initially leased in seconds since the epoch.""" + + size: int + ordering_key: Optional[str] class Leaser(object): def __init__(self, manager: "StreamingPullManager"): - self._thread = None + self._thread: Optional[threading.Thread] = None self._manager = manager # a lock used for start/stop operations, protecting the _thread attribute @@ -53,11 +63,10 @@ def __init__(self, manager: "StreamingPullManager"): self._add_remove_lock = threading.Lock() # Dict of ack_id -> _LeasedMessage - self._leased_messages = {} - """dict[str, float]: A mapping of ack IDs to the local time when the - ack ID was initially leased in seconds since the epoch.""" + self._leased_messages: Dict[str, _LeasedMessage] = {} + self._bytes = 0 - """int: The total number of bytes consumed by leased messages.""" + """The total number of bytes consumed by leased messages.""" self._stop_event = threading.Event() @@ -67,7 +76,7 @@ def message_count(self) -> int: return len(self._leased_messages) @property - def ack_ids(self) -> Sequence[str]: + def ack_ids(self) -> KeysView[str]: # pytype: disable=invalid-annotation """The ack IDs of all leased messages.""" return self._leased_messages.keys() @@ -163,6 +172,7 @@ def maintain_leases(self) -> None: _LOGGER.warning( "Dropping %s items because they were leased too long.", len(to_drop) ) + assert self._manager.dispatcher is not None self._manager.dispatcher.drop(to_drop) # Remove dropped items from our copy of the leased messages (they @@ -183,6 +193,7 @@ def maintain_leases(self) -> None: # without any sort of race condition would require a # way for ``send_request`` to fail when the consumer # is inactive. + assert self._manager.dispatcher is not None self._manager.dispatcher.modify_ack_deadline( [requests.ModAckRequest(ack_id, deadline) for ack_id in ack_ids] ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 018917b901da..3a2bc6bc1136 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -20,10 +20,10 @@ import logging import threading import typing -from typing import Any, Callable, Iterable +from typing import Any, Callable, Iterable, List, Optional, Union import uuid -import grpc +import grpc # type: ignore from google.api_core import bidi from google.api_core import exceptions @@ -35,12 +35,11 @@ from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold from google.cloud.pubsub_v1.subscriber._protocol import requests import google.cloud.pubsub_v1.subscriber.message -import google.cloud.pubsub_v1.subscriber.scheduler +from google.cloud.pubsub_v1.subscriber.scheduler import ThreadScheduler from google.pubsub_v1 import types as gapic_types if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1 import subscriber - from google.cloud.pubsub_v1.subscriber.scheduler import Scheduler _LOGGER = logging.getLogger(__name__) @@ -143,7 +142,7 @@ def __init__( client: "subscriber.Client", subscription: str, flow_control: types.FlowControl = types.FlowControl(), - scheduler: "Scheduler" = None, + scheduler: ThreadScheduler = None, use_legacy_flow_control: bool = False, await_callbacks_on_shutdown: bool = False, ): @@ -154,13 +153,15 @@ def __init__( self._await_callbacks_on_shutdown = await_callbacks_on_shutdown self._ack_histogram = histogram.Histogram() self._last_histogram_size = 0 - self._ack_deadline = histogram.MIN_ACK_DEADLINE - self._rpc = None - self._callback = None + self._ack_deadline: Union[int, float] = histogram.MIN_ACK_DEADLINE + self._rpc: Optional[bidi.ResumableBidiRpc] = None + self._callback: Optional[functools.partial] = None self._closing = threading.Lock() self._closed = False - self._close_callbacks = [] - self._regular_shutdown_thread = None # Created on intentional shutdown. + self._close_callbacks: List[Callable[["StreamingPullManager", Any], Any]] = [] + + # A shutdown thread is created on intentional shutdown. + self._regular_shutdown_thread: Optional[threading.Thread] = None # Generate a random client id tied to this object. All streaming pull # connections (initial and re-connects) will then use the same client @@ -169,9 +170,7 @@ def __init__( self._client_id = str(uuid.uuid4()) if scheduler is None: - self._scheduler = ( - google.cloud.pubsub_v1.subscriber.scheduler.ThreadScheduler() - ) + self._scheduler: Optional[ThreadScheduler] = ThreadScheduler() else: self._scheduler = scheduler @@ -196,10 +195,10 @@ def __init__( self._ack_deadline_lock = threading.Lock() # The threads created in ``.open()``. - self._dispatcher = None - self._leaser = None - self._consumer = None - self._heartbeater = None + self._dispatcher: Optional[dispatcher.Dispatcher] = None + self._leaser: Optional[leaser.Leaser] = None + self._consumer: Optional[bidi.BackgroundConsumer] = None + self._heartbeater: Optional[heartbeater.Heartbeater] = None @property def is_active(self) -> bool: @@ -216,12 +215,12 @@ def flow_control(self) -> types.FlowControl: return self._flow_control @property - def dispatcher(self) -> dispatcher.Dispatcher: + def dispatcher(self) -> Optional[dispatcher.Dispatcher]: """The dispatcher helper.""" return self._dispatcher @property - def leaser(self) -> leaser.Leaser: + def leaser(self) -> Optional[leaser.Leaser]: """The leaser helper.""" return self._leaser @@ -401,6 +400,8 @@ def _maybe_release_messages(self) -> None: self._schedule_message_on_hold(msg) released_ack_ids.append(msg.ack_id) + + assert self._leaser is not None self._leaser.start_lease_expiry_timer(released_ack_ids) def _schedule_message_on_hold( @@ -430,6 +431,8 @@ def _schedule_message_on_hold( self._messages_on_hold.size, self._on_hold_bytes, ) + assert self._scheduler is not None + assert self._callback is not None self._scheduler.schedule(self._callback, msg) def _send_unary_request(self, request: gapic_types.StreamingPullRequest) -> None: @@ -439,7 +442,7 @@ def _send_unary_request(self, request: gapic_types.StreamingPullRequest) -> None request: The stream request to be mapped into unary requests. """ if request.ack_ids: - self._client.acknowledge( + self._client.acknowledge( # type: ignore subscription=self._subscription, ack_ids=list(request.ack_ids) ) @@ -452,7 +455,7 @@ def _send_unary_request(self, request: gapic_types.StreamingPullRequest) -> None deadline_to_ack_ids[deadline].append(ack_id) for deadline, ack_ids in deadline_to_ack_ids.items(): - self._client.modify_ack_deadline( + self._client.modify_ack_deadline( # type: ignore subscription=self._subscription, ack_ids=ack_ids, ack_deadline_seconds=deadline, @@ -544,9 +547,11 @@ def open( ) # Create references to threads + assert self._scheduler is not None # pytype: disable=wrong-arg-types # (pytype incorrectly complains about "self" not being the right argument type) - self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) + scheduler_queue = self._scheduler.queue + self._dispatcher = dispatcher.Dispatcher(self, scheduler_queue) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) self._leaser = leaser.Leaser(self) self._heartbeater = heartbeater.Heartbeater(self) @@ -601,11 +606,13 @@ def _shutdown(self, reason: Any = None) -> None: # Stop consuming messages. if self.is_active: _LOGGER.debug("Stopping consumer.") + assert self._consumer is not None self._consumer.stop() self._consumer = None # Shutdown all helper threads _LOGGER.debug("Stopping scheduler.") + assert self._scheduler is not None dropped_messages = self._scheduler.shutdown( await_msg_callbacks=self._await_callbacks_on_shutdown ) @@ -621,6 +628,7 @@ def _shutdown(self, reason: Any = None) -> None: # for the manager's maybe_resume_consumer() / maybe_pause_consumer(), # because the consumer gets shut down first. _LOGGER.debug("Stopping leaser.") + assert self._leaser is not None self._leaser.stop() total = len(dropped_messages) + len( @@ -634,12 +642,14 @@ def _shutdown(self, reason: Any = None) -> None: msg.nack() _LOGGER.debug("Stopping dispatcher.") + assert self._dispatcher is not None self._dispatcher.stop() self._dispatcher = None # dispatcher terminated, OK to dispose the leaser reference now self._leaser = None _LOGGER.debug("Stopping heartbeater.") + assert self._heartbeater is not None self._heartbeater.stop() self._heartbeater = None @@ -730,9 +740,13 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: requests.ModAckRequest(message.ack_id, self.ack_deadline) for message in received_messages ] + assert self._dispatcher is not None self._dispatcher.modify_ack_deadline(items) with self._pause_resume_lock: + assert self._scheduler is not None + assert self._leaser is not None + for received_message in received_messages: message = google.cloud.pubsub_v1.subscriber.message.Message( received_message.message, @@ -747,13 +761,13 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: byte_size=message.size, ordering_key=message.ordering_key, ) - self.leaser.add([req]) + self._leaser.add([req]) self._maybe_release_messages() self.maybe_pause_consumer() - def _should_recover(self, exception: Exception) -> bool: + def _should_recover(self, exception: BaseException) -> bool: """Determine if an error on the RPC stream should be recovered. If the exception is one of the retryable exceptions, this will signal @@ -775,7 +789,7 @@ def _should_recover(self, exception: Exception) -> bool: _LOGGER.info("Observed non-recoverable stream error %s", exception) return False - def _should_terminate(self, exception: Exception) -> bool: + def _should_terminate(self, exception: BaseException) -> bool: """Determine if an error on the RPC stream should be terminated. If the exception is one of the terminating exceptions, this will signal diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 099a6531820d..8eb1e2e25ebd 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -17,10 +17,10 @@ import os import pkg_resources import typing -from typing import Any, Callable, Optional, Sequence, Union +from typing import cast, Any, Callable, Optional, Sequence, Union -from google.auth.credentials import AnonymousCredentials -from google.oauth2 import service_account +from google.auth.credentials import AnonymousCredentials # type: ignore +from google.oauth2 import service_account # type: ignore from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types @@ -30,6 +30,9 @@ if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1 import subscriber + from google.pubsub_v1.services.subscriber.transports.grpc import ( + SubscriberGrpcTransport, + ) try: @@ -138,7 +141,7 @@ def subscribe( subscription: str, callback: Callable[["subscriber.message.Message"], Any], flow_control: Union[types.FlowControl, Sequence] = (), - scheduler: Optional["subscriber.scheduler.Scheduler"] = None, + scheduler: Optional["subscriber.scheduler.ThreadScheduler"] = None, use_legacy_flow_control: bool = False, await_callbacks_on_shutdown: bool = False, ) -> futures.StreamingPullFuture: @@ -263,7 +266,8 @@ def close(self) -> None: This method is idempotent. """ - self.api._transport.grpc_channel.close() + transport = cast("SubscriberGrpcTransport", self.api._transport) + transport.grpc_channel.close() self._closed = True def __enter__(self) -> "Client": diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 4a08257a61ba..5bd84e9ad228 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -83,7 +83,7 @@ class Message(object): def __init__( # pytype: disable=module-attr self, - message: "types.PubsubMessage._meta._pb", + message: "types.PubsubMessage._meta._pb", # type: ignore ack_id: str, delivery_attempt: int, request_queue: "queue.Queue", diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index 3db7ed73e0ea..ca270a077dfb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -101,7 +101,7 @@ class ThreadScheduler(Scheduler): def __init__( self, executor: Optional[concurrent.futures.ThreadPoolExecutor] = None ): - self._queue = queue.Queue() + self._queue: queue.Queue = queue.Queue() if executor is None: self._executor = _make_default_thread_pool_executor() else: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 0558c2f1e542..62dffcfc3021 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -19,15 +19,15 @@ import inspect import sys import typing -from typing import Dict, NamedTuple +from typing import Dict, NamedTuple, Union -import proto +import proto # type: ignore -from google.api import http_pb2 +from google.api import http_pb2 # type: ignore from google.api_core import gapic_v1 -from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 -from google.iam.v1.logging import audit_data_pb2 +from google.iam.v1.logging import audit_data_pb2 # type: ignore from google.protobuf import descriptor_pb2 from google.protobuf import duration_pb2 from google.protobuf import empty_pb2 @@ -41,8 +41,18 @@ if typing.TYPE_CHECKING: # pragma: NO COVER from types import ModuleType - from google.api_core import retry as retries from google.pubsub_v1 import types as gapic_types + from google.pubsub_v1.services.publisher.client import OptionalRetry + + # TODO: Eventually implement OptionalTimeout in the GAPIC code generator and import + # it from the generated code. It's the same solution that is used for OptionalRetry. + # https://github.com/googleapis/gapic-generator-python/pull/1032/files + # https://github.com/googleapis/gapic-generator-python/pull/1065/files + if hasattr(gapic_v1.method, "_MethodDefault"): + # _MethodDefault was only added in google-api-core==2.2.2 + OptionalTimeout = Union[gapic_types.TimeoutType, gapic_v1.method._MethodDefault] + else: + OptionalTimeout = Union[gapic_types.TimeoutType, object] # type: ignore # Define the default values for batching. @@ -85,10 +95,10 @@ class LimitExceededBehavior(str, enum.Enum): class PublishFlowControl(NamedTuple): """The client flow control settings for message publishing.""" - message_limit: int = 10 * BatchSettings.__new__.__defaults__[2] + message_limit: int = 10 * BatchSettings.__new__.__defaults__[2] # type: ignore """The maximum number of messages awaiting to be published.""" - byte_limit: int = 10 * BatchSettings.__new__.__defaults__[0] + byte_limit: int = 10 * BatchSettings.__new__.__defaults__[0] # type: ignore """The maximum total size of messages awaiting to be published.""" limit_exceeded_behavior: LimitExceededBehavior = LimitExceededBehavior.IGNORE @@ -111,17 +121,19 @@ class PublisherOptions(NamedTuple): "the publisher client does not do any throttling." ) - retry: "retries.Retry" = gapic_v1.method.DEFAULT # use api_core default + retry: "OptionalRetry" = gapic_v1.method.DEFAULT # use api_core default ( "Retry settings for message publishing by the client. This should be " "an instance of :class:`google.api_core.retry.Retry`." ) - timeout: "gapic_types.TimeoutType" = gapic_v1.method.DEFAULT # use api_core default + # pytype: disable=invalid-annotation + timeout: "OptionalTimeout" = gapic_v1.method.DEFAULT # use api_core default ( "Timeout settings for message publishing by the client. It should be " "compatible with :class:`~.pubsub_v1.types.TimeoutType`." ) + # pytype: enable=invalid-annotation # Define the type class and default values for flow control settings. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 931c23ad2d5b..4341577a016d 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -27,6 +27,7 @@ BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +MYPY_VERSION = "mypy==0.910" PYTYPE_VERSION = "pytype==2021.4.9" @@ -44,6 +45,7 @@ "lint", "lint_setup_py", "blacken", + "mypy", "pytype", "docs", ] @@ -52,10 +54,24 @@ nox.options.error_on_missing_interpreters = True +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy(session): + """Run type checks with mypy.""" + session.install("-e", ".") + session.install(MYPY_VERSION) + + # Just install the type info directly, since "mypy --install-types" might require + # an additional pass. + session.install("types-protobuf", "types-setuptools") + + # Check the hand-written layer. + session.run("mypy", "google/cloud") + + @nox.session(python=DEFAULT_PYTHON_VERSION) def pytype(session): """Run type checks.""" - session.install("-e", ".[all]") + session.install("-e", ".") session.install(PYTYPE_VERSION) session.run("pytype") diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 84e04df1b99c..539ae40c7c2c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -15,6 +15,7 @@ import collections import queue import threading +import warnings from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import helper_threads @@ -75,6 +76,26 @@ def test_dispatch_callback_inactive_manager(item, method_name): method.assert_called_once_with([item]) +def test_dispatch_callback_inactive_manager_unknown_request(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + manager.is_active = False + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + FooType = type("FooType", (), {}) + items = [FooType()] + + with warnings.catch_warnings(record=True) as warned: + dispatcher_.dispatch_callback(items) + + assert len(warned) == 1 + assert issubclass(warned[0].category, RuntimeWarning) + warning_msg = str(warned[0].message) + assert "unknown request item" in warning_msg + assert "FooType" in warning_msg + + def test_ack(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 8e4f6daf0852..609026598856 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -401,6 +401,7 @@ def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) ) + manager._callback = lambda msg: msg # pragma: NO COVER msg = mock.create_autospec(message.Message, instance=True, ack_id="ack", size=17) manager._messages_on_hold.put(msg) From ed4b59dedb0860b53a5c184195cfe819aec62d30 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Thu, 2 Dec 2021 11:36:52 +0100 Subject: [PATCH 0742/1197] process: persist mypy session in noxfile (#546) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * process: persist mypy session in noxfile * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Get rid of two redundant empty lines in noxfile.py * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Add newline for better goruping of constants Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/noxfile.py | 12 ++++---- packages/google-cloud-pubsub/owlbot.py | 40 ++++++++++++++++++++++++- 2 files changed, 46 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 4341577a016d..33813328121f 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -57,21 +57,23 @@ @nox.session(python=DEFAULT_PYTHON_VERSION) def mypy(session): """Run type checks with mypy.""" - session.install("-e", ".") + session.install("-e", ".[all]") session.install(MYPY_VERSION) - # Just install the type info directly, since "mypy --install-types" might require - # an additional pass. + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. session.install("types-protobuf", "types-setuptools") - # Check the hand-written layer. + # TODO: Only check the hand-written layer, the generated code does not pass + # mypy checks yet. + # https://github.com/googleapis/gapic-generator-python/issues/1092 session.run("mypy", "google/cloud") @nox.session(python=DEFAULT_PYTHON_VERSION) def pytype(session): """Run type checks.""" - session.install("-e", ".") + session.install("-e", ".[all]") session.install(PYTYPE_VERSION) session.run("pytype") diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 06031ff371ec..8ad33cf141a3 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -398,7 +398,7 @@ s.replace( "noxfile.py", r"BLACK_PATHS = \[.*?\]", - '\g<0>\n\nPYTYPE_VERSION = "pytype==2021.4.9"\n', + '\g<0>\nPYTYPE_VERSION = "pytype==2021.4.9"\n', ) s.replace( "noxfile.py", r'"blacken",', '\g<0>\n "pytype",', @@ -418,8 +418,46 @@ def pytype(session): session.run("pytype")''' ), ) + +# ---------------------------------------------------------------------------- +# Add mypy nox session. +# ---------------------------------------------------------------------------- +s.replace( + "noxfile.py", + r"BLACK_PATHS = \[.*?\]", + '\g<0>\n\nMYPY_VERSION = "mypy==0.910"', +) +s.replace( + "noxfile.py", r'"blacken",', '\g<0>\n "mypy",', +) +s.replace( + "noxfile.py", + r"nox\.options\.error_on_missing_interpreters = True", + textwrap.dedent( + ''' \g<0> + + + @nox.session(python=DEFAULT_PYTHON_VERSION) + def mypy(session): + """Run type checks with mypy.""" + session.install("-e", ".[all]") + session.install(MYPY_VERSION) + + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + session.install("types-protobuf", "types-setuptools") + + # TODO: Only check the hand-written layer, the generated code does not pass + # mypy checks yet. + # https://github.com/googleapis/gapic-generator-python/issues/1092 + session.run("mypy", "google/cloud")''' + ), +) + +# Only consider the hand-written layer when assessing the test coverage. s.replace( "noxfile.py", "--cov=google", "--cov=google/cloud", ) +# Final code style adjustments. s.shell.run(["nox", "-s", "blacken"], hide_output=False) From 70aa409151364e85cd8dcba68fe61f975aadd673 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 12 Dec 2021 12:04:01 -0500 Subject: [PATCH 0743/1197] chore: update python-docs-samples link to main branch (#547) Source-Link: https://github.com/googleapis/synthtool/commit/0941ef32b18aff0be34a40404f3971d9f51996e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2f90537dd7df70f6b663cd654b1fa5dee483cf6a4edcfd46072b2775be8a23ec Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md | 2 +- packages/google-cloud-pubsub/samples/CONTRIBUTING.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index cb89b2e326b7..0b3c8cd98f89 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 + digest: sha256:2f90537dd7df70f6b663cd654b1fa5dee483cf6a4edcfd46072b2775be8a23ec diff --git a/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md b/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md index 55c97b32f4c1..8249522ffc2d 100644 --- a/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md +++ b/packages/google-cloud-pubsub/samples/AUTHORING_GUIDE.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/CONTRIBUTING.md b/packages/google-cloud-pubsub/samples/CONTRIBUTING.md index 34c882b6f1a3..f5fe2e6baf13 100644 --- a/packages/google-cloud-pubsub/samples/CONTRIBUTING.md +++ b/packages/google-cloud-pubsub/samples/CONTRIBUTING.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file From 2e4873ad6ebc40a4228a467f45d71664503d5db8 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 24 Dec 2021 12:17:13 -0500 Subject: [PATCH 0744/1197] chore: update .repo-metadata.json (#554) --- packages/google-cloud-pubsub/.repo-metadata.json | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json index 928c9c759870..55824dc13c9b 100644 --- a/packages/google-cloud-pubsub/.repo-metadata.json +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -4,12 +4,14 @@ "product_documentation": "https://cloud.google.com/pubsub/docs/", "client_documentation": "https://cloud.google.com/python/docs/reference/pubsub/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", - "release_level": "ga", + "release_level": "stable", "language": "python", "repo": "googleapis/python-pubsub", "distribution_name": "google-cloud-pubsub", "api_id": "pubsub.googleapis.com", "requires_billing": true, "default_version": "v1", - "codeowner_team": "@googleapis/api-pubsub" + "codeowner_team": "@googleapis/api-pubsub", + "api_shortname": "pubsub", + "library_type": "" } From e674b88610f5ead329aa50878c41c7a461cb0d92 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 7 Jan 2022 19:43:11 -0500 Subject: [PATCH 0745/1197] chore: use gapic-generator-python 0.58.4 (#555) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.58.4 fix: provide appropriate mock values for message body fields committer: dovs PiperOrigin-RevId: 419025932 Source-Link: https://github.com/googleapis/googleapis/commit/73da6697f598f1ba30618924936a59f8e457ec89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46df624a54b9ed47c1a7eefb7a49413cf7b82f98 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDZkZjYyNGE1NGI5ZWQ0N2MxYTdlZWZiN2E0OTQxM2NmN2I4MmY5OCJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/publisher/transports/base.py | 1 - .../schema_service/transports/base.py | 1 - .../services/subscriber/transports/base.py | 1 - .../unit/gapic/pubsub_v1/test_publisher.py | 119 +++++------- .../gapic/pubsub_v1/test_schema_service.py | 76 +++----- .../unit/gapic/pubsub_v1/test_subscriber.py | 172 ++++++------------ 6 files changed, 131 insertions(+), 239 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index bdb55ab7b2b8..818912f1f95e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -107,7 +107,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 16d98a8c45d1..5bf62b1bcf4e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -108,7 +108,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 6d90cef94478..1bd7b288e313 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -107,7 +107,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 5e2afb7e2272..2c8a5062b33c 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -240,20 +240,20 @@ def test_publisher_client_client_options(client_class, transport_class, transpor # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -310,7 +310,7 @@ def test_publisher_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -405,7 +405,7 @@ def test_publisher_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -436,7 +436,7 @@ def test_publisher_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -467,7 +467,8 @@ def test_publisher_client_client_options_from_dict(): ) -def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): +@pytest.mark.parametrize("request_type", [pubsub.Topic, dict,]) +def test_create_topic(request_type, transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -496,10 +497,6 @@ def test_create_topic(transport: str = "grpc", request_type=pubsub.Topic): assert response.satisfies_pzs is True -def test_create_topic_from_dict(): - test_create_topic(request_type=dict) - - def test_create_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -671,7 +668,8 @@ async def test_create_topic_flattened_error_async(): ) -def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRequest): +@pytest.mark.parametrize("request_type", [pubsub.UpdateTopicRequest, dict,]) +def test_update_topic(request_type, transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -700,10 +698,6 @@ def test_update_topic(transport: str = "grpc", request_type=pubsub.UpdateTopicRe assert response.satisfies_pzs is True -def test_update_topic_from_dict(): - test_update_topic(request_type=dict) - - def test_update_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -809,7 +803,8 @@ async def test_update_topic_field_headers_async(): assert ("x-goog-request-params", "topic.name=topic.name/value",) in kw["metadata"] -def test_publish(transport: str = "grpc", request_type=pubsub.PublishRequest): +@pytest.mark.parametrize("request_type", [pubsub.PublishRequest, dict,]) +def test_publish(request_type, transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -834,10 +829,6 @@ def test_publish(transport: str = "grpc", request_type=pubsub.PublishRequest): assert response.message_ids == ["message_ids_value"] -def test_publish_from_dict(): - test_publish(request_type=dict) - - def test_publish_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1021,7 +1012,8 @@ async def test_publish_flattened_error_async(): ) -def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest): +@pytest.mark.parametrize("request_type", [pubsub.GetTopicRequest, dict,]) +def test_get_topic(request_type, transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1050,10 +1042,6 @@ def test_get_topic(transport: str = "grpc", request_type=pubsub.GetTopicRequest) assert response.satisfies_pzs is True -def test_get_topic_from_dict(): - test_get_topic(request_type=dict) - - def test_get_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1225,7 +1213,8 @@ async def test_get_topic_flattened_error_async(): ) -def test_list_topics(transport: str = "grpc", request_type=pubsub.ListTopicsRequest): +@pytest.mark.parametrize("request_type", [pubsub.ListTopicsRequest, dict,]) +def test_list_topics(request_type, transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1252,10 +1241,6 @@ def test_list_topics(transport: str = "grpc", request_type=pubsub.ListTopicsRequ assert response.next_page_token == "next_page_token_value" -def test_list_topics_from_dict(): - test_list_topics(request_type=dict) - - def test_list_topics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1425,8 +1410,10 @@ async def test_list_topics_flattened_error_async(): ) -def test_list_topics_pager(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_topics_pager(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: @@ -1455,8 +1442,10 @@ def test_list_topics_pager(): assert all(isinstance(i, pubsub.Topic) for i in results) -def test_list_topics_pages(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_topics_pages(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: @@ -1531,9 +1520,8 @@ async def test_list_topics_async_pages(): assert page_.raw_page.next_page_token == token -def test_list_topic_subscriptions( - transport: str = "grpc", request_type=pubsub.ListTopicSubscriptionsRequest -): +@pytest.mark.parametrize("request_type", [pubsub.ListTopicSubscriptionsRequest, dict,]) +def test_list_topic_subscriptions(request_type, transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1564,10 +1552,6 @@ def test_list_topic_subscriptions( assert response.next_page_token == "next_page_token_value" -def test_list_topic_subscriptions_from_dict(): - test_list_topic_subscriptions(request_type=dict) - - def test_list_topic_subscriptions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1753,8 +1737,10 @@ async def test_list_topic_subscriptions_flattened_error_async(): ) -def test_list_topic_subscriptions_pager(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1788,8 +1774,10 @@ def test_list_topic_subscriptions_pager(): assert all(isinstance(i, str) for i in results) -def test_list_topic_subscriptions_pages(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1879,9 +1867,8 @@ async def test_list_topic_subscriptions_async_pages(): assert page_.raw_page.next_page_token == token -def test_list_topic_snapshots( - transport: str = "grpc", request_type=pubsub.ListTopicSnapshotsRequest -): +@pytest.mark.parametrize("request_type", [pubsub.ListTopicSnapshotsRequest, dict,]) +def test_list_topic_snapshots(request_type, transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1911,10 +1898,6 @@ def test_list_topic_snapshots( assert response.next_page_token == "next_page_token_value" -def test_list_topic_snapshots_from_dict(): - test_list_topic_snapshots(request_type=dict) - - def test_list_topic_snapshots_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2099,8 +2082,10 @@ async def test_list_topic_snapshots_flattened_error_async(): ) -def test_list_topic_snapshots_pager(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_topic_snapshots_pager(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2132,8 +2117,10 @@ def test_list_topic_snapshots_pager(): assert all(isinstance(i, str) for i in results) -def test_list_topic_snapshots_pages(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_topic_snapshots_pages(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2217,7 +2204,8 @@ async def test_list_topic_snapshots_async_pages(): assert page_.raw_page.next_page_token == token -def test_delete_topic(transport: str = "grpc", request_type=pubsub.DeleteTopicRequest): +@pytest.mark.parametrize("request_type", [pubsub.DeleteTopicRequest, dict,]) +def test_delete_topic(request_type, transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2241,10 +2229,6 @@ def test_delete_topic(transport: str = "grpc", request_type=pubsub.DeleteTopicRe assert response is None -def test_delete_topic_from_dict(): - test_delete_topic(request_type=dict) - - def test_delete_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2407,9 +2391,8 @@ async def test_delete_topic_flattened_error_async(): ) -def test_detach_subscription( - transport: str = "grpc", request_type=pubsub.DetachSubscriptionRequest -): +@pytest.mark.parametrize("request_type", [pubsub.DetachSubscriptionRequest, dict,]) +def test_detach_subscription(request_type, transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2435,10 +2418,6 @@ def test_detach_subscription( assert isinstance(response, pubsub.DetachSubscriptionResponse) -def test_detach_subscription_from_dict(): - test_detach_subscription(request_type=dict) - - def test_detach_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3115,7 +3094,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 226c5f818161..a110c01f16a0 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -250,20 +250,20 @@ def test_schema_service_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -322,7 +322,7 @@ def test_schema_service_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -417,7 +417,7 @@ def test_schema_service_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -448,7 +448,7 @@ def test_schema_service_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -481,9 +481,8 @@ def test_schema_service_client_client_options_from_dict(): ) -def test_create_schema( - transport: str = "grpc", request_type=gp_schema.CreateSchemaRequest -): +@pytest.mark.parametrize("request_type", [gp_schema.CreateSchemaRequest, dict,]) +def test_create_schema(request_type, transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -514,10 +513,6 @@ def test_create_schema( assert response.definition == "definition_value" -def test_create_schema_from_dict(): - test_create_schema(request_type=dict) - - def test_create_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -721,7 +716,8 @@ async def test_create_schema_flattened_error_async(): ) -def test_get_schema(transport: str = "grpc", request_type=schema.GetSchemaRequest): +@pytest.mark.parametrize("request_type", [schema.GetSchemaRequest, dict,]) +def test_get_schema(request_type, transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -752,10 +748,6 @@ def test_get_schema(transport: str = "grpc", request_type=schema.GetSchemaReques assert response.definition == "definition_value" -def test_get_schema_from_dict(): - test_get_schema(request_type=dict) - - def test_get_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -933,7 +925,8 @@ async def test_get_schema_flattened_error_async(): ) -def test_list_schemas(transport: str = "grpc", request_type=schema.ListSchemasRequest): +@pytest.mark.parametrize("request_type", [schema.ListSchemasRequest, dict,]) +def test_list_schemas(request_type, transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -960,10 +953,6 @@ def test_list_schemas(transport: str = "grpc", request_type=schema.ListSchemasRe assert response.next_page_token == "next_page_token_value" -def test_list_schemas_from_dict(): - test_list_schemas(request_type=dict) - - def test_list_schemas_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1139,8 +1128,10 @@ async def test_list_schemas_flattened_error_async(): ) -def test_list_schemas_pager(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_schemas_pager(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: @@ -1171,8 +1162,10 @@ def test_list_schemas_pager(): assert all(isinstance(i, schema.Schema) for i in results) -def test_list_schemas_pages(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_schemas_pages(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: @@ -1253,9 +1246,8 @@ async def test_list_schemas_async_pages(): assert page_.raw_page.next_page_token == token -def test_delete_schema( - transport: str = "grpc", request_type=schema.DeleteSchemaRequest -): +@pytest.mark.parametrize("request_type", [schema.DeleteSchemaRequest, dict,]) +def test_delete_schema(request_type, transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1279,10 +1271,6 @@ def test_delete_schema( assert response is None -def test_delete_schema_from_dict(): - test_delete_schema(request_type=dict) - - def test_delete_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1451,9 +1439,8 @@ async def test_delete_schema_flattened_error_async(): ) -def test_validate_schema( - transport: str = "grpc", request_type=gp_schema.ValidateSchemaRequest -): +@pytest.mark.parametrize("request_type", [gp_schema.ValidateSchemaRequest, dict,]) +def test_validate_schema(request_type, transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1477,10 +1464,6 @@ def test_validate_schema( assert isinstance(response, gp_schema.ValidateSchemaResponse) -def test_validate_schema_from_dict(): - test_validate_schema(request_type=dict) - - def test_validate_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1669,9 +1652,8 @@ async def test_validate_schema_flattened_error_async(): ) -def test_validate_message( - transport: str = "grpc", request_type=schema.ValidateMessageRequest -): +@pytest.mark.parametrize("request_type", [schema.ValidateMessageRequest, dict,]) +def test_validate_message(request_type, transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1695,10 +1677,6 @@ def test_validate_message( assert isinstance(response, schema.ValidateMessageResponse) -def test_validate_message_from_dict(): - test_validate_message(request_type=dict) - - def test_validate_message_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2335,7 +2313,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index c5ed946a67ac..f35a82ef343e 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -244,20 +244,20 @@ def test_subscriber_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -314,7 +314,7 @@ def test_subscriber_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -409,7 +409,7 @@ def test_subscriber_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -440,7 +440,7 @@ def test_subscriber_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -471,7 +471,8 @@ def test_subscriber_client_client_options_from_dict(): ) -def test_create_subscription(transport: str = "grpc", request_type=pubsub.Subscription): +@pytest.mark.parametrize("request_type", [pubsub.Subscription, dict,]) +def test_create_subscription(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -512,10 +513,6 @@ def test_create_subscription(transport: str = "grpc", request_type=pubsub.Subscr assert response.detached is True -def test_create_subscription_from_dict(): - test_create_subscription(request_type=dict) - - def test_create_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -743,9 +740,8 @@ async def test_create_subscription_flattened_error_async(): ) -def test_get_subscription( - transport: str = "grpc", request_type=pubsub.GetSubscriptionRequest -): +@pytest.mark.parametrize("request_type", [pubsub.GetSubscriptionRequest, dict,]) +def test_get_subscription(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -784,10 +780,6 @@ def test_get_subscription( assert response.detached is True -def test_get_subscription_from_dict(): - test_get_subscription(request_type=dict) - - def test_get_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -971,9 +963,8 @@ async def test_get_subscription_flattened_error_async(): ) -def test_update_subscription( - transport: str = "grpc", request_type=pubsub.UpdateSubscriptionRequest -): +@pytest.mark.parametrize("request_type", [pubsub.UpdateSubscriptionRequest, dict,]) +def test_update_subscription(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1014,10 +1005,6 @@ def test_update_subscription( assert response.detached is True -def test_update_subscription_from_dict(): - test_update_subscription(request_type=dict) - - def test_update_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1145,9 +1132,8 @@ async def test_update_subscription_field_headers_async(): ) in kw["metadata"] -def test_list_subscriptions( - transport: str = "grpc", request_type=pubsub.ListSubscriptionsRequest -): +@pytest.mark.parametrize("request_type", [pubsub.ListSubscriptionsRequest, dict,]) +def test_list_subscriptions(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1176,10 +1162,6 @@ def test_list_subscriptions( assert response.next_page_token == "next_page_token_value" -def test_list_subscriptions_from_dict(): - test_list_subscriptions(request_type=dict) - - def test_list_subscriptions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1361,8 +1343,10 @@ async def test_list_subscriptions_flattened_error_async(): ) -def test_list_subscriptions_pager(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_subscriptions_pager(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1401,8 +1385,10 @@ def test_list_subscriptions_pager(): assert all(isinstance(i, pubsub.Subscription) for i in results) -def test_list_subscriptions_pages(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_subscriptions_pages(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1507,9 +1493,8 @@ async def test_list_subscriptions_async_pages(): assert page_.raw_page.next_page_token == token -def test_delete_subscription( - transport: str = "grpc", request_type=pubsub.DeleteSubscriptionRequest -): +@pytest.mark.parametrize("request_type", [pubsub.DeleteSubscriptionRequest, dict,]) +def test_delete_subscription(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1535,10 +1520,6 @@ def test_delete_subscription( assert response is None -def test_delete_subscription_from_dict(): - test_delete_subscription(request_type=dict) - - def test_delete_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1717,9 +1698,8 @@ async def test_delete_subscription_flattened_error_async(): ) -def test_modify_ack_deadline( - transport: str = "grpc", request_type=pubsub.ModifyAckDeadlineRequest -): +@pytest.mark.parametrize("request_type", [pubsub.ModifyAckDeadlineRequest, dict,]) +def test_modify_ack_deadline(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1745,10 +1725,6 @@ def test_modify_ack_deadline( assert response is None -def test_modify_ack_deadline_from_dict(): - test_modify_ack_deadline(request_type=dict) - - def test_modify_ack_deadline_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1953,7 +1929,8 @@ async def test_modify_ack_deadline_flattened_error_async(): ) -def test_acknowledge(transport: str = "grpc", request_type=pubsub.AcknowledgeRequest): +@pytest.mark.parametrize("request_type", [pubsub.AcknowledgeRequest, dict,]) +def test_acknowledge(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1977,10 +1954,6 @@ def test_acknowledge(transport: str = "grpc", request_type=pubsub.AcknowledgeReq assert response is None -def test_acknowledge_from_dict(): - test_acknowledge(request_type=dict) - - def test_acknowledge_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2161,7 +2134,8 @@ async def test_acknowledge_flattened_error_async(): ) -def test_pull(transport: str = "grpc", request_type=pubsub.PullRequest): +@pytest.mark.parametrize("request_type", [pubsub.PullRequest, dict,]) +def test_pull(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2185,10 +2159,6 @@ def test_pull(transport: str = "grpc", request_type=pubsub.PullRequest): assert isinstance(response, pubsub.PullResponse) -def test_pull_from_dict(): - test_pull(request_type=dict) - - def test_pull_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2385,9 +2355,8 @@ async def test_pull_flattened_error_async(): ) -def test_streaming_pull( - transport: str = "grpc", request_type=pubsub.StreamingPullRequest -): +@pytest.mark.parametrize("request_type", [pubsub.StreamingPullRequest, dict,]) +def test_streaming_pull(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2413,10 +2382,6 @@ def test_streaming_pull( assert isinstance(message, pubsub.StreamingPullResponse) -def test_streaming_pull_from_dict(): - test_streaming_pull(request_type=dict) - - @pytest.mark.asyncio async def test_streaming_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.StreamingPullRequest @@ -2454,9 +2419,8 @@ async def test_streaming_pull_async_from_dict(): await test_streaming_pull_async(request_type=dict) -def test_modify_push_config( - transport: str = "grpc", request_type=pubsub.ModifyPushConfigRequest -): +@pytest.mark.parametrize("request_type", [pubsub.ModifyPushConfigRequest, dict,]) +def test_modify_push_config(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2482,10 +2446,6 @@ def test_modify_push_config( assert response is None -def test_modify_push_config_from_dict(): - test_modify_push_config(request_type=dict) - - def test_modify_push_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2680,7 +2640,8 @@ async def test_modify_push_config_flattened_error_async(): ) -def test_get_snapshot(transport: str = "grpc", request_type=pubsub.GetSnapshotRequest): +@pytest.mark.parametrize("request_type", [pubsub.GetSnapshotRequest, dict,]) +def test_get_snapshot(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2706,10 +2667,6 @@ def test_get_snapshot(transport: str = "grpc", request_type=pubsub.GetSnapshotRe assert response.topic == "topic_value" -def test_get_snapshot_from_dict(): - test_get_snapshot(request_type=dict) - - def test_get_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2876,9 +2833,8 @@ async def test_get_snapshot_flattened_error_async(): ) -def test_list_snapshots( - transport: str = "grpc", request_type=pubsub.ListSnapshotsRequest -): +@pytest.mark.parametrize("request_type", [pubsub.ListSnapshotsRequest, dict,]) +def test_list_snapshots(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2905,10 +2861,6 @@ def test_list_snapshots( assert response.next_page_token == "next_page_token_value" -def test_list_snapshots_from_dict(): - test_list_snapshots(request_type=dict) - - def test_list_snapshots_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3078,8 +3030,10 @@ async def test_list_snapshots_flattened_error_async(): ) -def test_list_snapshots_pager(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_snapshots_pager(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: @@ -3112,8 +3066,10 @@ def test_list_snapshots_pager(): assert all(isinstance(i, pubsub.Snapshot) for i in results) -def test_list_snapshots_pages(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_snapshots_pages(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: @@ -3200,9 +3156,8 @@ async def test_list_snapshots_async_pages(): assert page_.raw_page.next_page_token == token -def test_create_snapshot( - transport: str = "grpc", request_type=pubsub.CreateSnapshotRequest -): +@pytest.mark.parametrize("request_type", [pubsub.CreateSnapshotRequest, dict,]) +def test_create_snapshot(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3228,10 +3183,6 @@ def test_create_snapshot( assert response.topic == "topic_value" -def test_create_snapshot_from_dict(): - test_create_snapshot(request_type=dict) - - def test_create_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3412,9 +3363,8 @@ async def test_create_snapshot_flattened_error_async(): ) -def test_update_snapshot( - transport: str = "grpc", request_type=pubsub.UpdateSnapshotRequest -): +@pytest.mark.parametrize("request_type", [pubsub.UpdateSnapshotRequest, dict,]) +def test_update_snapshot(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3440,10 +3390,6 @@ def test_update_snapshot( assert response.topic == "topic_value" -def test_update_snapshot_from_dict(): - test_update_snapshot(request_type=dict) - - def test_update_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3548,9 +3494,8 @@ async def test_update_snapshot_field_headers_async(): ] -def test_delete_snapshot( - transport: str = "grpc", request_type=pubsub.DeleteSnapshotRequest -): +@pytest.mark.parametrize("request_type", [pubsub.DeleteSnapshotRequest, dict,]) +def test_delete_snapshot(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3574,10 +3519,6 @@ def test_delete_snapshot( assert response is None -def test_delete_snapshot_from_dict(): - test_delete_snapshot(request_type=dict) - - def test_delete_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3740,7 +3681,8 @@ async def test_delete_snapshot_flattened_error_async(): ) -def test_seek(transport: str = "grpc", request_type=pubsub.SeekRequest): +@pytest.mark.parametrize("request_type", [pubsub.SeekRequest, dict,]) +def test_seek(request_type, transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3764,10 +3706,6 @@ def test_seek(transport: str = "grpc", request_type=pubsub.SeekRequest): assert isinstance(response, pubsub.SeekResponse) -def test_seek_from_dict(): - test_seek(request_type=dict) - - def test_seek_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4439,7 +4377,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( From c62fd6753cf581817e268f0668d348b03e682577 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 10:35:02 -0500 Subject: [PATCH 0746/1197] chore(samples): Add check for tests in directory (#557) Source-Link: https://github.com/googleapis/synthtool/commit/52aef91f8d25223d9dbdb4aebd94ba8eea2101f3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../samples/snippets/noxfile.py | 70 +++++++++++-------- 2 files changed, 40 insertions(+), 32 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 0b3c8cd98f89..6b8a73b31465 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2f90537dd7df70f6b663cd654b1fa5dee483cf6a4edcfd46072b2775be8a23ec + digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 93a9122cc457..3bbef5d54f44 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 4ea134fa81f1afd70975c17cbf1fd82e22c7b08f Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Thu, 13 Jan 2022 07:08:54 -0500 Subject: [PATCH 0747/1197] chore: update library type in .repo-metadata.json (#559) --- packages/google-cloud-pubsub/.repo-metadata.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json index 55824dc13c9b..21bdab122f03 100644 --- a/packages/google-cloud-pubsub/.repo-metadata.json +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -13,5 +13,5 @@ "default_version": "v1", "codeowner_team": "@googleapis/api-pubsub", "api_shortname": "pubsub", - "library_type": "" + "library_type": "GAPIC_COMBO" } From c8dbbf6c35feafd72a79187a0929ba2073f8ac81 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 14 Jan 2022 15:58:33 -0500 Subject: [PATCH 0748/1197] build: upgrade pip in samples build script (#561) * test: lite version update * try 1.2 * upgrade pip --- .../.kokoro/presubmit-against-pubsublite-samples.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh index a93980bc36d0..ff143a3941c3 100755 --- a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh @@ -79,6 +79,7 @@ for file in python-pubsublite/samples/**/requirements.txt; do python3.6 -m venv py-3.6 source py-3.6/bin/activate # Install python-pubsublite samples tests requirements. + python -m pip install --upgrade pip python -m pip install -r requirements.txt -q python -m pip install -r requirements-test.txt -q # Install python-pubsub from source. From 268d464bd1e8b77aa9270c3e5b8c987c87698482 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 16:25:38 -0500 Subject: [PATCH 0749/1197] build: switch to release-please for tagging (#560) Source-Link: https://github.com/googleapis/synthtool/commit/f8077d237e0df2cb0066dfc6e09fc41e1c59646a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/.github/release-please.yml | 1 + packages/google-cloud-pubsub/.github/release-trigger.yml | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-pubsub/.github/release-trigger.yml diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 6b8a73b31465..ff5126c188d0 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 + digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 diff --git a/packages/google-cloud-pubsub/.github/release-please.yml b/packages/google-cloud-pubsub/.github/release-please.yml index 4507ad0598a5..466597e5b196 100644 --- a/packages/google-cloud-pubsub/.github/release-please.yml +++ b/packages/google-cloud-pubsub/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/packages/google-cloud-pubsub/.github/release-trigger.yml b/packages/google-cloud-pubsub/.github/release-trigger.yml new file mode 100644 index 000000000000..d4ca94189e16 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true From c5f437619c422ea1cbff41081c9ae537e5d412aa Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 18:58:11 -0500 Subject: [PATCH 0750/1197] chore(python): update release.sh to use keystore (#562) Source-Link: https://github.com/googleapis/synthtool/commit/69fda12e2994f0b595a397e8bb6e3e9f380524eb Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/.kokoro/release.sh | 2 +- .../google-cloud-pubsub/.kokoro/release/common.cfg | 12 +++++++++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index ff5126c188d0..eecb84c21b27 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 + digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh index b3a2d20a8ef6..5c00cba3e852 100755 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-pubsub python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-pubsub/.kokoro/release/common.cfg b/packages/google-cloud-pubsub/.kokoro/release/common.cfg index 1648dd9ad75b..c67fccae42fa 100644 --- a/packages/google-cloud-pubsub/.kokoro/release/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-pubsub/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } From 54c3d9b3dd3d46cbf40aac513a81e69d819461cb Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Mon, 17 Jan 2022 14:24:37 +0100 Subject: [PATCH 0751/1197] process: add mypy session for samples (#551) * Add mypy session for samples to main noxfile * Make samples type hints good with mypy * Make samples type hints good with mypy (part 2) * Temporarily disable mypy_samples session * Move type-checked depepndency pins to noxfile The version pins that require recent enough depndencies for the sake of static type analysis should be placed in the corresponding nox session definition. They should not interfere with dependency versions used for testing the code at runtime, i.e. when running the tests. * Update google/cloud/pubsub_v1/publisher/futures.py Co-authored-by: Tim Swast Co-authored-by: Tianzi Cai Co-authored-by: Anthonios Partheniou --- .../cloud/pubsub_v1/publisher/futures.py | 23 ++- .../cloud/pubsub_v1/subscriber/futures.py | 12 +- packages/google-cloud-pubsub/noxfile.py | 29 ++++ packages/google-cloud-pubsub/owlbot.py | 47 +++++ .../samples/snippets/iam_test.py | 12 +- .../samples/snippets/mypy.ini | 8 + .../samples/snippets/publisher.py | 20 +-- .../samples/snippets/publisher_test.py | 62 ++++--- .../samples/snippets/quickstart/pub.py | 2 +- .../snippets/quickstart/quickstart_test.py | 27 ++- .../samples/snippets/quickstart/sub.py | 3 +- .../samples/snippets/schema.py | 15 +- .../samples/snippets/schema_test.py | 36 ++-- .../samples/snippets/subscriber.py | 41 +++-- .../samples/snippets/subscriber_test.py | 163 ++++++++++++------ 15 files changed, 366 insertions(+), 134 deletions(-) create mode 100644 packages/google-cloud-pubsub/samples/snippets/mypy.ini diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index 09bb2417c45c..c7cc66f18d2c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -14,10 +14,14 @@ from __future__ import absolute_import -from typing import Union +import typing +from typing import Any, Callable, Union from google.cloud.pubsub_v1 import futures +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud import pubsub_v1 + class Future(futures.Future): """This future object is returned from asychronous Pub/Sub publishing @@ -60,3 +64,20 @@ def result(self, timeout: Union[int, float] = None) -> str: call execution. """ return super().result(timeout=timeout) + + # This exists to make the type checkers happy. + def add_done_callback( + self, callback: Callable[["pubsub_v1.publisher.futures.Future"], Any] + ) -> None: + """Attach a callable that will be called when the future finishes. + + Args: + callback: + A callable that will be called with this future as its only + argument when the future completes or is cancelled. The callable + will always be called by a thread in the same process in which + it was added. If the future has already completed or been + cancelled then the callable will be called immediately. These + callables are called in the order that they were added. + """ + return super().add_done_callback(callback) # type: ignore diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index eec9590ed59d..a024ba698093 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -52,19 +52,27 @@ def _on_close_callback(self, manager: "StreamingPullManager", result: Any): else: self.set_exception(result) - def cancel(self): + def cancel(self) -> bool: """Stops pulling messages and shutdowns the background thread consuming messages. + The method always returns ``True``, as the shutdown is always initiated. + However, if the background stream is already being shut down or the shutdown + has completed, this method is a no-op. + .. versionchanged:: 2.4.1 The method does not block anymore, it just triggers the shutdown and returns immediately. To block until the background stream is terminated, call :meth:`result()` after cancelling the future. + + .. versionchanged:: 2.10.0 + The method always returns ``True`` instead of ``None``. """ # NOTE: We circumvent the base future's self._state to track the cancellation # state, as this state has different meaning with streaming pull futures. self.__cancelled = True - return self.__manager.close() + self.__manager.close() + return True def cancelled(self) -> bool: """ diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 33813328121f..f5d3cd26c686 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -47,6 +47,7 @@ "blacken", "mypy", "pytype", + # "mypy_samples", # TODO: uncomment when the checks pass "docs", ] @@ -64,6 +65,12 @@ def mypy(session): # require an additional pass. session.install("types-protobuf", "types-setuptools") + # Version 2.1.1 of google-api-core version is the first type-checked release. + # Version 2.2.0 of google-cloud-core version is the first type-checked release. + session.install( + "google-api-core[grpc]>=2.1.1", "google-cloud-core>=2.2.0", + ) + # TODO: Only check the hand-written layer, the generated code does not pass # mypy checks yet. # https://github.com/googleapis/gapic-generator-python/issues/1092 @@ -78,6 +85,28 @@ def pytype(session): session.run("pytype") +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy_samples(session): + """Run type checks with mypy.""" + + session.install("-e", ".[all]") + + session.install("pytest") + session.install(MYPY_VERSION) + + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + session.install("types-mock", "types-protobuf", "types-setuptools") + + session.run( + "mypy", + "--config-file", + str(CURRENT_DIRECTORY / "samples" / "snippets" / "mypy.ini"), + "--no-incremental", # Required by warn-unused-configs from mypy.ini to work + "samples/", + ) + + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 8ad33cf141a3..583e82aa9d73 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -447,6 +447,13 @@ def mypy(session): # require an additional pass. session.install("types-protobuf", "types-setuptools") + # Version 2.1.1 of google-api-core version is the first type-checked release. + # Version 2.2.0 of google-cloud-core version is the first type-checked release. + session.install( + "google-api-core[grpc]>=2.1.1", + "google-cloud-core>=2.2.0", + ) + # TODO: Only check the hand-written layer, the generated code does not pass # mypy checks yet. # https://github.com/googleapis/gapic-generator-python/issues/1092 @@ -454,6 +461,46 @@ def mypy(session): ), ) + +# ---------------------------------------------------------------------------- +# Add mypy_samples nox session. +# ---------------------------------------------------------------------------- +s.replace( + "noxfile.py", + r'"pytype",', + '\g<0>\n # "mypy_samples", # TODO: uncomment when the checks pass', +) +s.replace( + "noxfile.py", + r'session\.run\("pytype"\)', + textwrap.dedent( + ''' \g<0> + + + @nox.session(python=DEFAULT_PYTHON_VERSION) + def mypy_samples(session): + """Run type checks with mypy.""" + + session.install("-e", ".[all]") + + session.install("pytest") + session.install(MYPY_VERSION) + + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + session.install("types-mock", "types-protobuf", "types-setuptools") + + session.run( + "mypy", + "--config-file", + str(CURRENT_DIRECTORY / "samples" / "snippets" / "mypy.ini"), + "--no-incremental", # Required by warn-unused-configs from mypy.ini to work + "samples/", + )''' + ), +) + + # Only consider the hand-written layer when assessing the test coverage. s.replace( "noxfile.py", "--cov=google", "--cov=google/cloud", diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index f5d0ef192c71..655e43e3689f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -78,14 +78,14 @@ def subscription_path( pass -def test_get_topic_policy(topic_path: str, capsys: CaptureFixture) -> None: +def test_get_topic_policy(topic_path: str, capsys: CaptureFixture[str]) -> None: iam.get_topic_policy(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert topic_path in out def test_get_subscription_policy( - subscription_path: str, capsys: CaptureFixture + subscription_path: str, capsys: CaptureFixture[str] ) -> None: iam.get_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() @@ -93,8 +93,8 @@ def test_get_subscription_policy( def test_set_topic_policy( - publisher_client: pubsub_v1.PublisherClient, topic_path: str, -) -> CaptureFixture: + publisher_client: pubsub_v1.PublisherClient, topic_path: str +) -> None: iam.set_topic_policy(PROJECT_ID, TOPIC_ID) policy = publisher_client.get_iam_policy(request={"resource": topic_path}) assert "roles/pubsub.publisher" in str(policy) @@ -110,7 +110,7 @@ def test_set_subscription_policy( assert "domain:google.com" in str(policy) -def test_check_topic_permissions(topic_path: str, capsys: CaptureFixture) -> None: +def test_check_topic_permissions(topic_path: str, capsys: CaptureFixture[str]) -> None: iam.check_topic_permissions(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert topic_path in out @@ -118,7 +118,7 @@ def test_check_topic_permissions(topic_path: str, capsys: CaptureFixture) -> Non def test_check_subscription_permissions( - subscription_path: str, capsys: CaptureFixture, + subscription_path: str, capsys: CaptureFixture[str], ) -> None: iam.check_subscription_permissions(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() diff --git a/packages/google-cloud-pubsub/samples/snippets/mypy.ini b/packages/google-cloud-pubsub/samples/snippets/mypy.ini new file mode 100644 index 000000000000..8f2bae69aebc --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/mypy.ini @@ -0,0 +1,8 @@ +[mypy] +; We require type annotations in all samples. +strict = True +exclude = noxfile\.py +warn_unused_configs = True + +[mypy-avro.*,backoff,flaky] +ignore_missing_imports = True diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 821efcbb5d5d..11fa1eb85c08 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -95,9 +95,9 @@ def publish_messages(project_id: str, topic_id: str) -> None: topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = f"Message number {n}" + data_str = f"Message number {n}" # Data must be a bytestring - data = data.encode("utf-8") + data = data_str.encode("utf-8") # When you publish a message, the client returns a future. future = publisher.publish(topic_path, data) print(future.result()) @@ -121,9 +121,9 @@ def publish_messages_with_custom_attributes(project_id: str, topic_id: str) -> N topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = f"Message number {n}" + data_str = f"Message number {n}" # Data must be a bytestring - data = data.encode("utf-8") + data = data_str.encode("utf-8") # Add two attributes, origin and username, to the message future = publisher.publish( topic_path, data, origin="python-sample", username="gcp" @@ -202,9 +202,9 @@ def callback(future: pubsub_v1.publisher.futures.Future) -> None: print(message_id) for n in range(1, 10): - data = f"Message number {n}" + data_str = f"Message number {n}" # Data must be a bytestring - data = data.encode("utf-8") + data = data_str.encode("utf-8") publish_future = publisher.publish(topic_path, data) # Non-blocking. Allow the publisher client to batch multiple messages. publish_future.add_done_callback(callback) @@ -252,9 +252,9 @@ def callback(publish_future: pubsub_v1.publisher.futures.Future) -> None: # Publish 1000 messages in quick succession may be constrained by # publisher flow control. for n in range(1, 1000): - data = f"Message number {n}" + data_str = f"Message number {n}" # Data must be a bytestring - data = data.encode("utf-8") + data = data_str.encode("utf-8") publish_future = publisher.publish(topic_path, data) # Non-blocking. Allow the publisher client to batch messages. publish_future.add_done_callback(callback) @@ -298,9 +298,9 @@ def publish_messages_with_retry_settings(project_id: str, topic_id: str) -> None topic_path = publisher.topic_path(project_id, topic_id) for n in range(1, 10): - data = f"Message number {n}" + data_str = f"Message number {n}" # Data must be a bytestring - data = data.encode("utf-8") + data = data_str.encode("utf-8") future = publisher.publish(topic=topic_path, data=data, retry=custom_retry) print(future.result()) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 0e06d8f2afa2..cf00da98e285 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -14,7 +14,8 @@ import os import time -from typing import Generator +import typing +from typing import Any, Callable, cast, Iterator, TypeVar, Union import uuid from _pytest.capture import CaptureFixture @@ -26,6 +27,7 @@ import publisher + UUID = uuid.uuid4().hex PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC_ID = "publisher-test-topic-" + UUID @@ -33,14 +35,19 @@ # Allow 60s for tests to finish. MAX_TIME = 60 +if typing.TYPE_CHECKING: + from unittest.mock import AsyncMock, MagicMock + + MockType = Union[MagicMock, AsyncMock] + @pytest.fixture(scope="module") -def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: +def publisher_client() -> Iterator[pubsub_v1.PublisherClient]: yield pubsub_v1.PublisherClient() @pytest.fixture(scope="module") -def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: +def subscriber_client() -> Iterator[pubsub_v1.SubscriberClient]: subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client # Close the subscriber client properly during teardown. @@ -48,9 +55,7 @@ def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: @pytest.fixture(scope="module") -def topic_path( - publisher_client: pubsub_v1.PublisherClient, -) -> Generator[str, None, None]: +def topic_path(publisher_client: pubsub_v1.PublisherClient) -> Iterator[str]: topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) try: @@ -69,7 +74,7 @@ def topic_path( @pytest.fixture(scope="module") def subscription_path( subscriber_client: pubsub_v1.SubscriberClient, topic_path: str -) -> Generator[str, None, None]: +) -> Iterator[str]: subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) subscription = subscriber_client.create_subscription( request={"name": subscription_path, "topic": topic_path} @@ -84,7 +89,7 @@ def subscription_path( pass -def _make_sleep_patch() -> None: +def _make_sleep_patch() -> 'mock.mock._patch["MockType"]': real_sleep = time.sleep def new_sleep(period: float) -> None: @@ -98,7 +103,7 @@ def new_sleep(period: float) -> None: def test_create( - publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] ) -> None: # The scope of `topic_path` is limited to this function. topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) @@ -114,14 +119,14 @@ def test_create( assert f"Created topic: {topic_path}" in out -def test_list(topic_path: str, capsys: CaptureFixture) -> None: +def test_list(topic_path: str, capsys: CaptureFixture[str]) -> None: publisher.list_topics(PROJECT_ID) out, _ = capsys.readouterr() assert topic_path in out -def test_publish(topic_path: str, capsys: CaptureFixture) -> None: +def test_publish(topic_path: str, capsys: CaptureFixture[str]) -> None: publisher.publish_messages(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() @@ -129,7 +134,7 @@ def test_publish(topic_path: str, capsys: CaptureFixture) -> None: def test_publish_with_custom_attributes( - topic_path: str, capsys: CaptureFixture + topic_path: str, capsys: CaptureFixture[str] ) -> None: publisher.publish_messages_with_custom_attributes(PROJECT_ID, TOPIC_ID) @@ -137,7 +142,9 @@ def test_publish_with_custom_attributes( assert f"Published messages with custom attributes to {topic_path}." in out -def test_publish_with_batch_settings(topic_path: str, capsys: CaptureFixture) -> None: +def test_publish_with_batch_settings( + topic_path: str, capsys: CaptureFixture[str] +) -> None: publisher.publish_messages_with_batch_settings(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() @@ -145,7 +152,7 @@ def test_publish_with_batch_settings(topic_path: str, capsys: CaptureFixture) -> def test_publish_with_flow_control_settings( - topic_path: str, capsys: CaptureFixture + topic_path: str, capsys: CaptureFixture[str] ) -> None: publisher.publish_messages_with_flow_control_settings(PROJECT_ID, TOPIC_ID) @@ -153,21 +160,27 @@ def test_publish_with_flow_control_settings( assert f"Published messages with flow control settings to {topic_path}." in out -def test_publish_with_retry_settings(topic_path: str, capsys: CaptureFixture) -> None: +def test_publish_with_retry_settings( + topic_path: str, capsys: CaptureFixture[str] +) -> None: publisher.publish_messages_with_retry_settings(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Published messages with retry settings to {topic_path}." in out -def test_publish_with_error_handler(topic_path: str, capsys: CaptureFixture) -> None: +def test_publish_with_error_handler( + topic_path: str, capsys: CaptureFixture[str] +) -> None: publisher.publish_messages_with_error_handler(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() assert f"Published messages with error handler to {topic_path}." in out -def test_publish_with_ordering_keys(topic_path: str, capsys: CaptureFixture) -> None: +def test_publish_with_ordering_keys( + topic_path: str, capsys: CaptureFixture[str] +) -> None: publisher.publish_with_ordering_keys(PROJECT_ID, TOPIC_ID) out, _ = capsys.readouterr() @@ -175,7 +188,7 @@ def test_publish_with_ordering_keys(topic_path: str, capsys: CaptureFixture) -> def test_resume_publish_with_error_handler( - topic_path: str, capsys: CaptureFixture + topic_path: str, capsys: CaptureFixture[str] ) -> None: publisher.resume_publish_with_ordering_keys(PROJECT_ID, TOPIC_ID) @@ -183,7 +196,9 @@ def test_resume_publish_with_error_handler( assert f"Resumed publishing messages with ordering keys to {topic_path}." in out -def test_detach_subscription(subscription_path: str, capsys: CaptureFixture) -> None: +def test_detach_subscription( + subscription_path: str, capsys: CaptureFixture[str] +) -> None: publisher.detach_subscription(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() @@ -193,7 +208,14 @@ def test_detach_subscription(subscription_path: str, capsys: CaptureFixture) -> def test_delete(publisher_client: pubsub_v1.PublisherClient) -> None: publisher.delete_topic(PROJECT_ID, TOPIC_ID) - @backoff.on_exception(backoff.expo, AssertionError, max_time=MAX_TIME) + C = TypeVar("C", bound=Callable[..., Any]) + + typed_backoff = cast( + Callable[[C], C], + backoff.on_exception(backoff.expo, AssertionError, max_time=MAX_TIME), + ) + + @typed_backoff def eventually_consistent_test() -> None: with pytest.raises(Exception): publisher_client.get_topic( diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py index 80bf157a36c2..7215abd864ff 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/pub.py @@ -33,7 +33,7 @@ def pub(project_id: str, topic_id: str) -> None: api_future = client.publish(topic_path, data) message_id = api_future.result() - print(f"Published {data} to {topic_path}: {message_id}") + print(f"Published {data.decode()} to {topic_path}: {message_id}") if __name__ == "__main__": diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py index e2a5e9844f8a..3ed07cf8184c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/quickstart_test.py @@ -15,7 +15,7 @@ # limitations under the License. import os -from typing import Generator +from typing import Any, Callable, cast, Iterator, TypeVar import uuid from _pytest.capture import CaptureFixture @@ -32,19 +32,19 @@ @pytest.fixture(scope="module") -def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: +def publisher_client() -> Iterator[pubsub_v1.PublisherClient]: yield pubsub_v1.PublisherClient() @pytest.fixture(scope="module") -def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: +def subscriber_client() -> Iterator[pubsub_v1.SubscriberClient]: subscriber_client = pubsub_v1.SubscriberClient() yield subscriber_client subscriber_client.close() @pytest.fixture(scope="module") -def topic_path(publisher_client: pubsub_v1.PublisherClient) -> Generator[str, None, None]: +def topic_path(publisher_client: pubsub_v1.PublisherClient) -> Iterator[str]: topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) try: @@ -57,7 +57,9 @@ def topic_path(publisher_client: pubsub_v1.PublisherClient) -> Generator[str, No @pytest.fixture(scope="module") -def subscription_path(subscriber_client: pubsub_v1.SubscriberClient, topic_path: str) -> Generator[str, None, None]: +def subscription_path( + subscriber_client: pubsub_v1.SubscriberClient, topic_path: str +) -> Iterator[str]: subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) try: @@ -72,7 +74,7 @@ def subscription_path(subscriber_client: pubsub_v1.SubscriberClient, topic_path: subscriber_client.close() -def test_pub(topic_path: str, capsys: CaptureFixture) -> None: +def test_pub(topic_path: str, capsys: CaptureFixture[str]) -> None: import pub pub.pub(PROJECT_ID, TOPIC_ID) @@ -82,8 +84,17 @@ def test_pub(topic_path: str, capsys: CaptureFixture) -> None: assert "Hello, World!" in out -@flaky(max_runs=3, min_passes=1) -def test_sub(publisher_client: pubsub_v1.PublisherClient, topic_path: str, subscription_path: str, capsys: CaptureFixture) -> None: +C = TypeVar("C", bound=Callable[..., Any]) +_typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) + + +@_typed_flaky +def test_sub( + publisher_client: pubsub_v1.PublisherClient, + topic_path: str, + subscription_path: str, + capsys: CaptureFixture[str], +) -> None: publisher_client.publish(topic_path, b"Hello World!") import sub diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index d3326f9802f3..0900f652d4e9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -15,11 +15,12 @@ # limitations under the License. import argparse +from typing import Optional from google.cloud import pubsub_v1 -def sub(project_id: str, subscription_id: str, timeout: float = None) -> None: +def sub(project_id: str, subscription_id: str, timeout: Optional[float] = None) -> None: """Receives messages from a Pub/Sub subscription.""" # Initialize a Subscriber client subscriber_client = pubsub_v1.SubscriberClient() diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py index bf7ae3fbe500..977e4c0c432d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -22,6 +22,7 @@ """ import argparse +from typing import Optional from google.cloud import pubsub_v1 @@ -234,10 +235,11 @@ def publish_avro_records(project_id: str, topic_id: str, avsc_file: str) -> None encoder = BinaryEncoder(bout) writer.write(record, encoder) data = bout.getvalue() - print(f"Preparing a binary-encoded message:\n{data}") + print(f"Preparing a binary-encoded message:\n{data.decode()}") elif encoding == Encoding.JSON: - data = json.dumps(record).encode("utf-8") - print(f"Preparing a JSON-encoded message:\n{data}") + data_str = json.dumps(record) + print(f"Preparing a JSON-encoded message:\n{data_str}") + data = data_str.encode("utf-8") else: print(f"No encoding specified in {topic_path}. Abort.") exit(0) @@ -258,7 +260,7 @@ def publish_proto_messages(project_id: str, topic_id: str) -> None: from google.protobuf.json_format import MessageToJson from google.pubsub_v1.types import Encoding - from utilities import us_states_pb2 + from utilities import us_states_pb2 # type: ignore # TODO(developer): Replace these variables before running the sample. # project_id = "your-project-id" @@ -298,7 +300,10 @@ def publish_proto_messages(project_id: str, topic_id: str) -> None: def subscribe_with_avro_schema( - project_id: str, subscription_id: str, avsc_file: str, timeout: float = None + project_id: str, + subscription_id: str, + avsc_file: str, + timeout: Optional[float] = None, ) -> None: """Receive and decode messages sent to a topic with an Avro schema.""" # [START pubsub_subscribe_avro_records] diff --git a/packages/google-cloud-pubsub/samples/snippets/schema_test.py b/packages/google-cloud-pubsub/samples/snippets/schema_test.py index 1f74c5eb335a..2cdf4bfb6b46 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema_test.py @@ -15,7 +15,7 @@ # limitations under the License. import os -from typing import Generator +from typing import Any, Callable, cast, Generator, TypeVar import uuid from _pytest.capture import CaptureFixture @@ -193,7 +193,7 @@ def proto_subscription( def test_create_avro_schema( schema_client: pubsub_v1.SchemaServiceClient, avro_schema: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: try: schema_client.delete_schema(request={"name": avro_schema}) @@ -210,7 +210,7 @@ def test_create_avro_schema( def test_create_proto_schema( schema_client: pubsub_v1.SchemaServiceClient, proto_schema: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: try: schema_client.delete_schema(request={"name": proto_schema}) @@ -224,20 +224,22 @@ def test_create_proto_schema( assert f"{proto_schema}" in out -def test_get_schema(avro_schema: str, capsys: CaptureFixture) -> None: +def test_get_schema(avro_schema: str, capsys: CaptureFixture[str]) -> None: schema.get_schema(PROJECT_ID, AVRO_SCHEMA_ID) out, _ = capsys.readouterr() assert "Got a schema" in out assert f"{avro_schema}" in out -def test_list_schemas(capsys: CaptureFixture) -> None: +def test_list_schemas(capsys: CaptureFixture[str]) -> None: schema.list_schemas(PROJECT_ID) out, _ = capsys.readouterr() assert "Listed schemas." in out -def test_create_topic_with_schema(avro_schema: str, capsys: CaptureFixture) -> None: +def test_create_topic_with_schema( + avro_schema: str, capsys: CaptureFixture[str] +) -> None: schema.create_topic_with_schema(PROJECT_ID, AVRO_TOPIC_ID, AVRO_SCHEMA_ID, "BINARY") out, _ = capsys.readouterr() assert "Created a topic" in out @@ -247,7 +249,7 @@ def test_create_topic_with_schema(avro_schema: str, capsys: CaptureFixture) -> N def test_publish_avro_records( - avro_schema: str, avro_topic: str, capsys: CaptureFixture + avro_schema: str, avro_topic: str, capsys: CaptureFixture[str] ) -> None: schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) out, _ = capsys.readouterr() @@ -256,7 +258,10 @@ def test_publish_avro_records( def test_subscribe_with_avro_schema( - avro_schema: str, avro_topic: str, avro_subscription: str, capsys: CaptureFixture + avro_schema: str, + avro_topic: str, + avro_subscription: str, + capsys: CaptureFixture[str], ) -> None: schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) @@ -265,7 +270,7 @@ def test_subscribe_with_avro_schema( assert "Received a binary-encoded message:" in out -def test_publish_proto_records(proto_topic: str, capsys: CaptureFixture) -> None: +def test_publish_proto_records(proto_topic: str, capsys: CaptureFixture[str]) -> None: schema.publish_proto_messages(PROJECT_ID, PROTO_TOPIC_ID) out, _ = capsys.readouterr() assert "Preparing a binary-encoded message" in out @@ -273,7 +278,10 @@ def test_publish_proto_records(proto_topic: str, capsys: CaptureFixture) -> None def test_subscribe_with_proto_schema( - proto_schema: str, proto_topic: str, proto_subscription: str, capsys: CaptureFixture + proto_schema: str, + proto_topic: str, + proto_subscription: str, + capsys: CaptureFixture[str], ) -> None: schema.publish_proto_messages(PROJECT_ID, PROTO_TOPIC_ID) @@ -282,8 +290,12 @@ def test_subscribe_with_proto_schema( assert "Received a binary-encoded message" in out -@flaky(max_runs=3, min_passes=1) -def test_delete_schema(proto_schema: str, capsys: CaptureFixture) -> None: +C = TypeVar("C", bound=Callable[..., Any]) +typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) + + +@typed_flaky +def test_delete_schema(proto_schema: str, capsys: CaptureFixture[str]) -> None: schema.delete_schema(PROJECT_ID, PROTO_SCHEMA_ID) out, _ = capsys.readouterr() assert "Deleted a schema" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 0c67c95fb492..955dd278fe04 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -22,6 +22,11 @@ """ import argparse +import typing +from typing import Optional + +if typing.TYPE_CHECKING: + from google.pubsub_v1 import types as gapic_types def list_subscriptions_in_topic(project_id: str, topic_id: str) -> None: @@ -278,7 +283,7 @@ def update_subscription_with_dead_letter_policy( subscription_id: str, dead_letter_topic_id: str, max_delivery_attempts: int = 5, -) -> None: +) -> "gapic_types.Subscription": """Update a subscription's dead letter policy.""" # [START pubsub_dead_letter_update_subscription] from google.cloud import pubsub_v1 @@ -327,8 +332,11 @@ def update_subscription_with_dead_letter_policy( ) with subscriber: - subscription_after_update = subscriber.update_subscription( - request={"subscription": subscription, "update_mask": update_mask} + subscription_after_update = typing.cast( + "gapic_types.Subscription", + subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} + ), ) print(f"After the update: {subscription_after_update}.") @@ -338,7 +346,7 @@ def update_subscription_with_dead_letter_policy( def remove_dead_letter_policy( project_id: str, topic_id: str, subscription_id: str -) -> None: +) -> "gapic_types.Subscription": """Remove dead letter policy from a subscription.""" # [START pubsub_dead_letter_remove] from google.cloud import pubsub_v1 @@ -372,8 +380,11 @@ def remove_dead_letter_policy( ) with subscriber: - subscription_after_update = subscriber.update_subscription( - request={"subscription": subscription, "update_mask": update_mask} + subscription_after_update = typing.cast( + "gapic_types.Subscription", + subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} + ), ) print(f"After removing the policy: {subscription_after_update}.") @@ -382,7 +393,7 @@ def remove_dead_letter_policy( def receive_messages( - project_id: str, subscription_id: str, timeout: float = None + project_id: str, subscription_id: str, timeout: Optional[float] = None ) -> None: """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull] @@ -422,7 +433,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: def receive_messages_with_custom_attributes( - project_id: str, subscription_id: str, timeout: float = None + project_id: str, subscription_id: str, timeout: Optional[float] = None ) -> None: """Receives messages from a pull subscription.""" # [START pubsub_subscriber_async_pull_custom_attributes] @@ -439,7 +450,7 @@ def receive_messages_with_custom_attributes( subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message: pubsub_v1.subscriber.message.Message) -> None: - print(f"Received {message.data}.") + print(f"Received {message.data!r}.") if message.attributes: print("Attributes:") for key in message.attributes: @@ -463,7 +474,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: def receive_messages_with_flow_control( - project_id: str, subscription_id: str, timeout: float = None + project_id: str, subscription_id: str, timeout: Optional[float] = None ) -> None: """Receives messages from a pull subscription with flow control.""" # [START pubsub_subscriber_flow_settings] @@ -480,7 +491,7 @@ def receive_messages_with_flow_control( subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message: pubsub_v1.subscriber.message.Message) -> None: - print(f"Received {message.data}.") + print(f"Received {message.data!r}.") message.ack() # Limit the subscriber to only have ten outstanding messages at a time. @@ -522,10 +533,10 @@ def receive_messages_with_blocking_shutdown( subscription_path = subscriber.subscription_path(project_id, subscription_id) def callback(message: pubsub_v1.subscriber.message.Message) -> None: - print(f"Received {message.data}.") + print(f"Received {message.data!r}.") time.sleep(timeout + 3.0) # Pocess longer than streaming pull future timeout. message.ack() - print(f"Done processing the message {message.data}.") + print(f"Done processing the message {message.data!r}.") streaming_pull_future = subscriber.subscribe( subscription_path, callback=callback, await_callbacks_on_shutdown=True, @@ -665,7 +676,7 @@ def synchronous_pull_with_lease_management( def listen_for_errors( - project_id: str, subscription_id: str, timeout: float = None + project_id: str, subscription_id: str, timeout: Optional[float] = None ) -> None: """Receives messages and catches errors from a pull subscription.""" # [START pubsub_subscriber_error_listener] @@ -703,7 +714,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: def receive_messages_with_delivery_attempts( - project_id: str, subscription_id: str, timeout: float = None + project_id: str, subscription_id: str, timeout: Optional[float] = None ) -> None: # [START pubsub_dead_letter_delivery_attempt] from concurrent.futures import TimeoutError diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 574529e80c32..6ad3da4faa88 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -16,7 +16,7 @@ import re import sys import time -from typing import Generator +from typing import Any, Callable, cast, Generator, TypeVar import uuid from _pytest.capture import CaptureFixture @@ -44,6 +44,10 @@ DEFAULT_MAX_DELIVERY_ATTEMPTS = 5 UPDATED_MAX_DELIVERY_ATTEMPTS = 20 +C = TypeVar("C", bound=Callable[..., Any]) + +typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) + @pytest.fixture(scope="module") def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: @@ -126,7 +130,11 @@ def subscription_sync( yield subscription.name - @backoff.on_exception(backoff.expo, Unknown, max_time=300) + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), + ) + + @typed_backoff def delete_subscription() -> None: try: subscriber_client.delete_subscription( @@ -197,7 +205,7 @@ def _publish_messages( publisher_client: pubsub_v1.PublisherClient, topic: str, message_num: int = 5, - **attrs: dict, + **attrs: Any, ) -> None: for n in range(message_num): data = f"message {n}".encode("utf-8") @@ -205,8 +213,13 @@ def _publish_messages( publish_future.result() -def test_list_in_topic(subscription_admin: str, capsys: CaptureFixture) -> None: - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) +def test_list_in_topic(subscription_admin: str, capsys: CaptureFixture[str]) -> None: + typed_backoff = cast( + Callable[[C], C], + backoff.on_exception(backoff.expo, AssertionError, max_time=60), + ) + + @typed_backoff def eventually_consistent_test() -> None: subscriber.list_subscriptions_in_topic(PROJECT_ID, TOPIC) out, _ = capsys.readouterr() @@ -215,8 +228,13 @@ def eventually_consistent_test() -> None: eventually_consistent_test() -def test_list_in_project(subscription_admin: str, capsys: CaptureFixture) -> None: - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) +def test_list_in_project(subscription_admin: str, capsys: CaptureFixture[str]) -> None: + typed_backoff = cast( + Callable[[C], C], + backoff.on_exception(backoff.expo, AssertionError, max_time=60), + ) + + @typed_backoff def eventually_consistent_test() -> None: subscriber.list_subscriptions_in_project(PROJECT_ID) out, _ = capsys.readouterr() @@ -228,7 +246,7 @@ def eventually_consistent_test() -> None: def test_create_subscription( subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ADMIN @@ -251,7 +269,7 @@ def test_create_subscription_with_dead_letter_policy( subscriber_client: pubsub_v1.SubscriberClient, subscription_dlq: str, dead_letter_topic: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: try: subscriber_client.delete_subscription( @@ -270,18 +288,23 @@ def test_create_subscription_with_dead_letter_policy( assert f"After {DEFAULT_MAX_DELIVERY_ATTEMPTS} delivery attempts." in out -@flaky(max_runs=3, min_passes=1) +@typed_flaky def test_receive_with_delivery_attempts( publisher_client: pubsub_v1.PublisherClient, topic: str, dead_letter_topic: str, subscription_dlq: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: + typed_backoff = cast( + Callable[[C], C], + backoff.on_exception(backoff.expo, (Unknown, NotFound), max_time=120), + ) + # The dlq subscription raises 404 before it's ready. # We keep retrying up to 10 minutes for mitigating the flakiness. - @backoff.on_exception(backoff.expo, (Unknown, NotFound), max_time=120) + @typed_backoff def run_sample() -> None: _publish_messages(publisher_client, topic) @@ -296,14 +319,19 @@ def run_sample() -> None: assert "With delivery attempts: " in out -@flaky(max_runs=3, min_passes=1) +@typed_flaky def test_update_dead_letter_policy( - subscription_dlq: str, dead_letter_topic: str, capsys: CaptureFixture + subscription_dlq: str, dead_letter_topic: str, capsys: CaptureFixture[str] ) -> None: + typed_backoff = cast( + Callable[[C], C], + backoff.on_exception(backoff.expo, (Unknown, InternalServerError), max_time=60), + ) + # We saw internal server error that suggests to retry. - @backoff.on_exception(backoff.expo, (Unknown, InternalServerError), max_time=60) + @typed_backoff def run_sample() -> None: subscriber.update_subscription_with_dead_letter_policy( PROJECT_ID, @@ -321,9 +349,9 @@ def run_sample() -> None: assert f"max_delivery_attempts: {UPDATED_MAX_DELIVERY_ATTEMPTS}" in out -@flaky(max_runs=3, min_passes=1) +@typed_flaky def test_remove_dead_letter_policy( - subscription_dlq: str, capsys: CaptureFixture + subscription_dlq: str, capsys: CaptureFixture[str] ) -> None: subscription_after_update = subscriber.remove_dead_letter_policy( PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ @@ -337,7 +365,7 @@ def test_remove_dead_letter_policy( def test_create_subscription_with_ordering( subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ADMIN @@ -360,10 +388,14 @@ def test_create_subscription_with_ordering( def test_create_push_subscription( subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + ) + # The scope of `subscription_path` is limited to this function. - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + @typed_backoff def eventually_consistent_test() -> None: subscription_path = subscriber_client.subscription_path( PROJECT_ID, SUBSCRIPTION_ADMIN @@ -375,7 +407,9 @@ def eventually_consistent_test() -> None: except NotFound: pass - subscriber.create_push_subscription(PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT) + subscriber.create_push_subscription( + PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT + ) out, _ = capsys.readouterr() assert f"{subscription_admin}" in out @@ -384,10 +418,14 @@ def eventually_consistent_test() -> None: def test_update_push_suscription( - subscription_admin: str, - capsys: CaptureFixture, + subscription_admin: str, capsys: CaptureFixture[str], ) -> None: - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + ) + + @typed_backoff def eventually_consistent_test() -> None: subscriber.update_push_subscription( PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT @@ -405,7 +443,11 @@ def test_delete_subscription( ) -> None: subscriber.delete_subscription(PROJECT_ID, SUBSCRIPTION_ADMIN) - @backoff.on_exception(backoff.expo, AssertionError, max_time=60) + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + ) + + @typed_backoff def eventually_consistent_test() -> None: with pytest.raises(Exception): subscriber_client.get_subscription( @@ -419,10 +461,14 @@ def test_receive( publisher_client: pubsub_v1.PublisherClient, topic: str, subscription_async: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: - @backoff.on_exception(backoff.expo, Unknown, max_time=60) + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + ) + + @typed_backoff def eventually_consistent_test() -> None: _publish_messages(publisher_client, topic) @@ -440,10 +486,14 @@ def test_receive_with_custom_attributes( publisher_client: pubsub_v1.PublisherClient, topic: str, subscription_async: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: - @backoff.on_exception(backoff.expo, Unknown, max_time=60) + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + ) + + @typed_backoff def eventually_consistent_test() -> None: _publish_messages(publisher_client, topic, origin="python-sample") @@ -464,10 +514,14 @@ def test_receive_with_flow_control( publisher_client: pubsub_v1.PublisherClient, topic: str, subscription_async: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: - @backoff.on_exception(backoff.expo, Unknown, max_time=300) + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), + ) + + @typed_backoff def eventually_consistent_test() -> None: _publish_messages(publisher_client, topic) @@ -485,7 +539,7 @@ def test_receive_with_blocking_shutdown( publisher_client: pubsub_v1.PublisherClient, topic: str, subscription_async: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: _received = re.compile(r".*received.*message.*", flags=re.IGNORECASE) @@ -493,7 +547,11 @@ def test_receive_with_blocking_shutdown( _canceled = re.compile(r".*streaming pull future canceled.*", flags=re.IGNORECASE) _shut_down = re.compile(r".*done waiting.*stream shutdown.*", flags=re.IGNORECASE) - @backoff.on_exception(backoff.expo, Unknown, max_time=300) + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), + ) + + @typed_backoff def eventually_consistent_test() -> None: _publish_messages(publisher_client, topic, message_num=3) @@ -505,24 +563,14 @@ def eventually_consistent_test() -> None: out_lines = out.splitlines() msg_received_lines = [ - i - for i, line in enumerate(out_lines) - if _received.search(line) - ] - msg_done_lines = [ - i - for i, line in enumerate(out_lines) - if _done.search(line) + i for i, line in enumerate(out_lines) if _received.search(line) ] + msg_done_lines = [i for i, line in enumerate(out_lines) if _done.search(line)] stream_canceled_lines = [ - i - for i, line in enumerate(out_lines) - if _canceled.search(line) + i for i, line in enumerate(out_lines) if _canceled.search(line) ] shutdown_done_waiting_lines = [ - i - for i, line in enumerate(out_lines) - if _shut_down.search(line) + i for i, line in enumerate(out_lines) if _shut_down.search(line) ] try: @@ -554,10 +602,14 @@ def test_listen_for_errors( publisher_client: pubsub_v1.PublisherClient, topic: str, subscription_async: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: - @backoff.on_exception(backoff.expo, Unknown, max_time=60) + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + ) + + @typed_backoff def eventually_consistent_test() -> None: _publish_messages(publisher_client, topic) @@ -574,7 +626,7 @@ def test_receive_synchronously( publisher_client: pubsub_v1.PublisherClient, topic: str, subscription_sync: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: _publish_messages(publisher_client, topic) @@ -586,14 +638,19 @@ def test_receive_synchronously( assert f"{subscription_sync}" in out -@flaky(max_runs=3, min_passes=1) +@typed_flaky def test_receive_synchronously_with_lease( publisher_client: pubsub_v1.PublisherClient, topic: str, subscription_sync: str, - capsys: CaptureFixture, + capsys: CaptureFixture[str], ) -> None: - @backoff.on_exception(backoff.expo, Unknown, max_time=300) + + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), + ) + + @typed_backoff def run_sample() -> None: _publish_messages(publisher_client, topic, message_num=10) # Pausing 10s to allow the subscriber to establish the connection From 779fc16f1d7c09f8525ac073c7df54cbf3277334 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 18 Jan 2022 20:29:28 -0500 Subject: [PATCH 0752/1197] chore(python): Noxfile recognizes that tests can live in a folder (#566) Source-Link: https://github.com/googleapis/synthtool/commit/4760d8dce1351d93658cb11d02a1b7ceb23ae5d7 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/samples/snippets/noxfile.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index eecb84c21b27..52d79c11f3ad 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 + digest: sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 3bbef5d54f44..20cdfc620138 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: From bc600bb6f40cbdadd9e2cd313464f9ade7186b5d Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Wed, 19 Jan 2022 17:35:13 +0100 Subject: [PATCH 0753/1197] fix: refactor client classes for safer type checking (#552) * Directly subclass generated PublisherClient * Directly subclass generated SubscriberClient * Remove unneded GAPIC helper * Remove pytype There are no real advantages over mypy, but at the same time several downsides such as being slow, producing more false positives, etc. * Re-enable mypy_samples nox session * Convert a comment to docstring in publisher client * Add api property back, but deprecated * Assure that mypy_samples is not commented out * Remove redundant type hint casts * Disable mypy_samples session until blockers resolved --- packages/google-cloud-pubsub/.gitignore | 1 - .../google/cloud/pubsub_v1/_gapic.py | 74 ------------------- .../pubsub_v1/publisher/_batch/thread.py | 2 +- .../cloud/pubsub_v1/publisher/client.py | 47 ++++++++---- .../pubsub_v1/publisher/flow_controller.py | 2 +- .../pubsub_v1/subscriber/_protocol/leaser.py | 2 +- .../_protocol/streaming_pull_manager.py | 5 +- .../cloud/pubsub_v1/subscriber/client.py | 48 +++++++----- .../cloud/pubsub_v1/subscriber/message.py | 2 +- .../google/cloud/pubsub_v1/types.py | 2 - packages/google-cloud-pubsub/noxfile.py | 14 +--- packages/google-cloud-pubsub/owlbot.py | 62 +--------------- .../samples/snippets/subscriber.py | 14 +--- packages/google-cloud-pubsub/setup.cfg | 14 ---- .../pubsub_v1/publisher/batch/test_thread.py | 24 +++--- .../publisher/test_publisher_client.py | 61 +++++++++++---- .../subscriber/test_streaming_pull_manager.py | 4 +- .../subscriber/test_subscriber_client.py | 64 +++++++++++----- .../tests/unit/pubsub_v1/test__gapic.py | 63 ---------------- 19 files changed, 180 insertions(+), 325 deletions(-) delete mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py delete mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py diff --git a/packages/google-cloud-pubsub/.gitignore b/packages/google-cloud-pubsub/.gitignore index 99c3a1444ed2..b4243ced74e4 100644 --- a/packages/google-cloud-pubsub/.gitignore +++ b/packages/google-cloud-pubsub/.gitignore @@ -29,7 +29,6 @@ pip-log.txt .nox .cache .pytest_cache -.pytype # Mac diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py deleted file mode 100644 index e25c1dc6c1cb..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/_gapic.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2019, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import absolute_import - -import functools -from typing import Callable, Container, Type - - -def add_methods( - source_class: Type, denylist: Container[str] = () -) -> Callable[[Type], Type]: - """Add wrapped versions of the `api` member's methods to the class. - - Any methods passed in `denylist` are not added. - Additionally, any methods explicitly defined on the wrapped class are - not added. - """ - - def wrap(wrapped_fx: Callable, lookup_fx: Callable): - """Wrap a GAPIC method; preserve its name and docstring.""" - # If this is a static or class method, then we do *not* - # send self as the first argument. - # - # For instance methods, we need to send self.api rather - # than self, since that is where the actual methods were declared. - - if isinstance(lookup_fx, (classmethod, staticmethod)): - fx = lambda *a, **kw: wrapped_fx(*a, **kw) # noqa - return staticmethod(functools.wraps(wrapped_fx)(fx)) - else: - fx = lambda self, *a, **kw: wrapped_fx(self.api, *a, **kw) # noqa - return functools.wraps(wrapped_fx)(fx) - - def actual_decorator(cls: Type) -> Type: - # Reflectively iterate over most of the methods on the source class - # (the GAPIC) and make wrapped versions available on this client. - for name in dir(source_class): - # Ignore all private and magic methods. - if name.startswith("_"): - continue - - # Ignore anything on our denylist. - if name in denylist: - continue - - # Retrieve the attribute, and ignore it if it is not callable. - attr = getattr(source_class, name) - if not callable(attr): - continue - - # Add a wrapper method to this object. - lookup_fx = source_class.__dict__[name] - fx = wrap(attr, lookup_fx) - - setattr(cls, name, fx) - - # Return the augmented class. - return cls - - # Simply return the actual decorator; this is returned from this method - # and actually used to decorate the class. - return actual_decorator diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index ade135f4598f..8b868eaee88f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -271,7 +271,7 @@ def _commit(self) -> None: batch_transport_succeeded = True try: # Performs retries for errors defined by the retry configuration. - response = self._client.api.publish( + response = self._client._gapic_publish( topic=self._topic, messages=self._messages, retry=self._commit_retry, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 58baf43b6a37..43305afcc181 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -22,12 +22,12 @@ import time import typing from typing import Any, Dict, Optional, Sequence, Tuple, Type, Union +import warnings from google.api_core import gapic_v1 from google.auth.credentials import AnonymousCredentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures @@ -49,15 +49,11 @@ from google.cloud import pubsub_v1 from google.cloud.pubsub_v1.publisher import _batch from google.pubsub_v1.services.publisher.client import OptionalRetry + from google.pubsub_v1.types import pubsub as pubsub_types _LOGGER = logging.getLogger(__name__) -_DENYLISTED_METHODS = ( - "publish", - "from_service_account_file", - "from_service_account_json", -) _raw_proto_pubbsub_message = gapic_types.PubsubMessage.pb() @@ -66,8 +62,7 @@ ] -@_gapic.add_methods(publisher_client.PublisherClient, denylist=_DENYLISTED_METHODS) -class Client(object): +class Client(publisher_client.PublisherClient): """A publisher client for Google Cloud Pub/Sub. This creates an object that is capable of publishing messages. @@ -146,8 +141,8 @@ def __init__( # Add the metrics headers, and instantiate the underlying GAPIC # client. - self.api = publisher_client.PublisherClient(**kwargs) - self._target = self.api._transport._host + super().__init__(**kwargs) + self._target = self._transport._host self._batch_class = thread.Batch self.batch_settings = types.BatchSettings(*batch_settings) @@ -164,7 +159,7 @@ def __init__( self._flow_controller = FlowController(self.publisher_options.flow_control) @classmethod - def from_service_account_file( + def from_service_account_file( # type: ignore[override] cls, filename: str, batch_settings: Union[types.BatchSettings, Sequence] = (), @@ -188,7 +183,7 @@ def from_service_account_file( kwargs["credentials"] = credentials return cls(batch_settings, **kwargs) - from_service_account_json = from_service_account_file + from_service_account_json = from_service_account_file # type: ignore[assignment] @property def target(self) -> str: @@ -199,6 +194,26 @@ def target(self) -> str: """ return self._target + @property + def api(self): + """The underlying gapic API client. + + .. versionchanged:: 2.10.0 + Instead of a GAPIC ``PublisherClient`` client instance, this property is a + proxy object to it with the same interface. + + .. deprecated:: 2.10.0 + Use the GAPIC methods and properties on the client instance directly + instead of through the :attr:`api` attribute. + """ + msg = ( + 'The "api" property only exists for backward compatibility, access its ' + 'attributes directly thorugh the client instance (e.g. "client.foo" ' + 'instead of "client.api.foo").' + ) + warnings.warn(msg, category=DeprecationWarning) + return super() + def _get_or_create_sequencer(self, topic: str, ordering_key: str) -> SequencerType: """ Get an existing sequencer or create a new one given the (topic, ordering_key) pair. @@ -252,7 +267,11 @@ def resume_publish(self, topic: str, ordering_key: str) -> None: else: sequencer.unpause() - def publish( + def _gapic_publish(self, *args, **kwargs) -> "pubsub_types.PublishResponse": + """Call the GAPIC public API directly.""" + return super().publish(*args, **kwargs) + + def publish( # type: ignore[override] self, topic: str, data: bytes, @@ -382,7 +401,7 @@ def on_publish_done(future): if self._enable_message_ordering: if retry is gapic_v1.method.DEFAULT: # use the default retry for the publish GRPC method as a base - transport = self.api._transport + transport = self._transport base_retry = transport._wrapped_methods[transport.publish]._retry retry = base_retry.with_deadline(2.0 ** 32) else: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py index 3c0558fe51cd..baf6ba8ff838 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/flow_controller.py @@ -25,7 +25,7 @@ _LOGGER = logging.getLogger(__name__) -MessageType = Type[types.PubsubMessage] # type: ignore # pytype: disable=module-attr +MessageType = Type[types.PubsubMessage] # type: ignore class _QuantityReservation: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 7cd9317e6bb1..bfa1b5a492a0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -76,7 +76,7 @@ def message_count(self) -> int: return len(self._leased_messages) @property - def ack_ids(self) -> KeysView[str]: # pytype: disable=invalid-annotation + def ack_ids(self) -> KeysView[str]: """The ack IDs of all leased messages.""" return self._leased_messages.keys() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 3a2bc6bc1136..d207718fc001 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -532,7 +532,7 @@ def open( self._get_initial_request, stream_ack_deadline_seconds ) self._rpc = bidi.ResumableBidiRpc( - start_rpc=self._client.api.streaming_pull, + start_rpc=self._client.streaming_pull, initial_request=get_initial_request, should_recover=self._should_recover, should_terminate=self._should_terminate, @@ -548,14 +548,11 @@ def open( # Create references to threads assert self._scheduler is not None - # pytype: disable=wrong-arg-types - # (pytype incorrectly complains about "self" not being the right argument type) scheduler_queue = self._scheduler.queue self._dispatcher = dispatcher.Dispatcher(self, scheduler_queue) self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) self._leaser = leaser.Leaser(self) self._heartbeater = heartbeater.Heartbeater(self) - # pytype: enable=wrong-arg-types # Start the thread to pass the requests. self._dispatcher.start() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 8eb1e2e25ebd..9c12a0bfba3f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -18,11 +18,11 @@ import pkg_resources import typing from typing import cast, Any, Callable, Optional, Sequence, Union +import warnings from google.auth.credentials import AnonymousCredentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.cloud.pubsub_v1 import _gapic from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager @@ -42,15 +42,8 @@ # a PIP package. __version__ = "0.0" -_DENYLISTED_METHODS = ( - "publish", - "from_service_account_file", - "from_service_account_json", -) - -@_gapic.add_methods(subscriber_client.SubscriberClient, denylist=_DENYLISTED_METHODS) -class Client(object): +class Client(subscriber_client.SubscriberClient): """A subscriber client for Google Cloud Pub/Sub. This creates an object that is capable of subscribing to messages. @@ -91,12 +84,14 @@ def __init__(self, **kwargs: Any): kwargs["credentials"] = AnonymousCredentials() # Instantiate the underlying GAPIC client. - self._api = subscriber_client.SubscriberClient(**kwargs) - self._target = self._api._transport._host + super().__init__(**kwargs) + self._target = self._transport._host self._closed = False @classmethod - def from_service_account_file(cls, filename: str, **kwargs: Any) -> "Client": + def from_service_account_file( # type: ignore[override] + cls, filename: str, **kwargs: Any + ) -> "Client": """Creates an instance of this client using the provided credentials file. @@ -112,7 +107,7 @@ def from_service_account_file(cls, filename: str, **kwargs: Any) -> "Client": kwargs["credentials"] = credentials return cls(**kwargs) - from_service_account_json = from_service_account_file + from_service_account_json = from_service_account_file # type: ignore[assignment] @property def target(self) -> str: @@ -123,11 +118,6 @@ def target(self) -> str: """ return self._target - @property - def api(self) -> subscriber_client.SubscriberClient: - """The underlying gapic API client.""" - return self._api - @property def closed(self) -> bool: """Return whether the client has been closed and cannot be used anymore. @@ -136,6 +126,26 @@ def closed(self) -> bool: """ return self._closed + @property + def api(self): + """The underlying gapic API client. + + .. versionchanged:: 2.10.0 + Instead of a GAPIC ``SubscriberClient`` client instance, this property is a + proxy object to it with the same interface. + + .. deprecated:: 2.10.0 + Use the GAPIC methods and properties on the client instance directly + instead of through the :attr:`api` attribute. + """ + msg = ( + 'The "api" property only exists for backward compatibility, access its ' + 'attributes directly thorugh the client instance (e.g. "client.foo" ' + 'instead of "client.api.foo").' + ) + warnings.warn(msg, category=DeprecationWarning) + return super() + def subscribe( self, subscription: str, @@ -266,7 +276,7 @@ def close(self) -> None: This method is idempotent. """ - transport = cast("SubscriberGrpcTransport", self.api._transport) + transport = cast("SubscriberGrpcTransport", self._transport) transport.grpc_channel.close() self._closed = True diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 5bd84e9ad228..2d72bba57f3a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -81,7 +81,7 @@ class Message(object): The time that this message was originally published. """ - def __init__( # pytype: disable=module-attr + def __init__( self, message: "types.PubsubMessage._meta._pb", # type: ignore ack_id: str, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 62dffcfc3021..e843a6da91a3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -127,13 +127,11 @@ class PublisherOptions(NamedTuple): "an instance of :class:`google.api_core.retry.Retry`." ) - # pytype: disable=invalid-annotation timeout: "OptionalTimeout" = gapic_v1.method.DEFAULT # use api_core default ( "Timeout settings for message publishing by the client. It should be " "compatible with :class:`~.pubsub_v1.types.TimeoutType`." ) - # pytype: enable=invalid-annotation # Define the type class and default values for flow control settings. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index f5d3cd26c686..e9fea8af8493 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -28,8 +28,6 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] MYPY_VERSION = "mypy==0.910" -PYTYPE_VERSION = "pytype==2021.4.9" - DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] @@ -46,8 +44,8 @@ "lint_setup_py", "blacken", "mypy", - "pytype", - # "mypy_samples", # TODO: uncomment when the checks pass + # https://github.com/googleapis/python-pubsub/pull/552#issuecomment-1016256936 + # "mypy_samples", # TODO: uncomment when the check passes "docs", ] @@ -77,14 +75,6 @@ def mypy(session): session.run("mypy", "google/cloud") -@nox.session(python=DEFAULT_PYTHON_VERSION) -def pytype(session): - """Run type checks.""" - session.install("-e", ".[all]") - session.install(PYTYPE_VERSION) - session.run("pytype") - - @nox.session(python=DEFAULT_PYTHON_VERSION) def mypy_samples(session): """Run type checks with mypy.""" diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 583e82aa9d73..a58470e65dc3 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -364,61 +364,6 @@ # ---------------------------------------------------------------------------- python.py_samples() -# ---------------------------------------------------------------------------- -# pytype-related changes -# ---------------------------------------------------------------------------- - -# Add .pytype to .gitignore -s.replace(".gitignore", r"\.pytest_cache", "\g<0>\n.pytype") - -# Add pytype config to setup.cfg -s.replace( - "setup.cfg", - r"universal = 1", - textwrap.dedent( - """ \g<0> - - [pytype] - python_version = 3.8 - inputs = - google/cloud/ - exclude = - tests/ - google/pubsub_v1/ # generated code - output = .pytype/ - disable = - # There's some issue with finding some pyi files, thus disabling. - # The issue https://github.com/google/pytype/issues/150 is closed, but the - # error still occurs for some reason. - pyi-error""" - ), -) - -# Add pytype session to noxfile.py -s.replace( - "noxfile.py", - r"BLACK_PATHS = \[.*?\]", - '\g<0>\nPYTYPE_VERSION = "pytype==2021.4.9"\n', -) -s.replace( - "noxfile.py", r'"blacken",', '\g<0>\n "pytype",', -) -s.replace( - "noxfile.py", - r"nox\.options\.error_on_missing_interpreters = True", - textwrap.dedent( - ''' \g<0> - - - @nox.session(python=DEFAULT_PYTHON_VERSION) - def pytype(session): - """Run type checks.""" - session.install("-e", ".[all]") - session.install(PYTYPE_VERSION) - session.run("pytype")''' - ), -) - # ---------------------------------------------------------------------------- # Add mypy nox session. # ---------------------------------------------------------------------------- @@ -467,12 +412,13 @@ def mypy(session): # ---------------------------------------------------------------------------- s.replace( "noxfile.py", - r'"pytype",', - '\g<0>\n # "mypy_samples", # TODO: uncomment when the checks pass', + r' "mypy",', + '\g<0>\n # https://github.com/googleapis/python-pubsub/pull/552#issuecomment-1016256936' + '\n # "mypy_samples", # TODO: uncomment when the check passes', ) s.replace( "noxfile.py", - r'session\.run\("pytype"\)', + r'session\.run\("mypy", "google/cloud"\)', textwrap.dedent( ''' \g<0> diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 955dd278fe04..f44f82c4a376 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -332,11 +332,8 @@ def update_subscription_with_dead_letter_policy( ) with subscriber: - subscription_after_update = typing.cast( - "gapic_types.Subscription", - subscriber.update_subscription( - request={"subscription": subscription, "update_mask": update_mask} - ), + subscription_after_update = subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} ) print(f"After the update: {subscription_after_update}.") @@ -380,11 +377,8 @@ def remove_dead_letter_policy( ) with subscriber: - subscription_after_update = typing.cast( - "gapic_types.Subscription", - subscriber.update_subscription( - request={"subscription": subscription, "update_mask": update_mask} - ), + subscription_after_update = subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} ) print(f"After removing the policy: {subscription_after_update}.") diff --git a/packages/google-cloud-pubsub/setup.cfg b/packages/google-cloud-pubsub/setup.cfg index a79cb6a60387..c3a2b39f6528 100644 --- a/packages/google-cloud-pubsub/setup.cfg +++ b/packages/google-cloud-pubsub/setup.cfg @@ -17,17 +17,3 @@ # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 - -[pytype] -python_version = 3.8 -inputs = - google/cloud/ -exclude = - tests/ - google/pubsub_v1/ # generated code -output = .pytype/ -disable = - # There's some issue with finding some pyi files, thus disabling. - # The issue https://github.com/google/pytype/issues/150 is closed, but the - # error still occurs for some reason. - pyi-error diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index abf5ec76f8bf..b15128489f5d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -128,7 +128,7 @@ def test_blocking__commit(): # Set up the underlying API publish method to return a PublishResponse. publish_response = gapic_types.PublishResponse(message_ids=["a", "b"]) patch = mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ) with patch as publish: batch._commit() @@ -160,7 +160,7 @@ def test_blocking__commit_custom_retry(): # Set up the underlying API publish method to return a PublishResponse. publish_response = gapic_types.PublishResponse(message_ids=["a"]) patch = mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ) with patch as publish: batch._commit() @@ -182,7 +182,7 @@ def test_blocking__commit_custom_timeout(): # Set up the underlying API publish method to return a PublishResponse. publish_response = gapic_types.PublishResponse(message_ids=["a"]) patch = mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ) with patch as publish: batch._commit() @@ -208,7 +208,7 @@ def api_publish_delay(topic="", messages=(), retry=None, timeout=None): return gapic_types.PublishResponse(message_ids=message_ids) api_publish_patch = mock.patch.object( - type(batch.client.api), "publish", side_effect=api_publish_delay + type(batch.client), "_gapic_publish", side_effect=api_publish_delay ) with api_publish_patch: @@ -252,7 +252,7 @@ def test_blocking__commit_already_started(_LOGGER): def test_blocking__commit_no_messages(): batch = create_batch() - with mock.patch.object(type(batch.client.api), "publish") as publish: + with mock.patch.object(type(batch.client), "_gapic_publish") as publish: batch._commit() assert publish.call_count == 0 @@ -268,7 +268,7 @@ def test_blocking__commit_wrong_messageid_length(): # Set up a PublishResponse that only returns one message ID. publish_response = gapic_types.PublishResponse(message_ids=["a"]) patch = mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ) with patch: @@ -288,7 +288,7 @@ def test_block__commmit_api_error(): # Make the API throw an error when publishing. error = google.api_core.exceptions.InternalServerError("uh oh") - patch = mock.patch.object(type(batch.client.api), "publish", side_effect=error) + patch = mock.patch.object(type(batch.client), "_gapic_publish", side_effect=error) with patch: batch._commit() @@ -307,7 +307,7 @@ def test_block__commmit_retry_error(): # Make the API throw an error when publishing. error = google.api_core.exceptions.RetryError("uh oh", None) - patch = mock.patch.object(type(batch.client.api), "publish", side_effect=error) + patch = mock.patch.object(type(batch.client), "_gapic_publish", side_effect=error) with patch: batch._commit() @@ -536,7 +536,7 @@ def test_batch_done_callback_called_on_success(): publish_response = gapic_types.PublishResponse(message_ids=["a"]) with mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ): batch._commit() @@ -559,8 +559,8 @@ def test_batch_done_callback_called_on_publish_failure(): error = google.api_core.exceptions.InternalServerError("uh oh") with mock.patch.object( - type(batch.client.api), - "publish", + type(batch.client), + "_gapic_publish", return_value=publish_response, side_effect=error, ): @@ -582,7 +582,7 @@ def test_batch_done_callback_called_on_publish_response_invalid(): publish_response = gapic_types.PublishResponse(message_ids=[]) with mock.patch.object( - type(batch.client.api), "publish", return_value=publish_response + type(batch.client), "_gapic_publish", return_value=publish_response ): batch._commit() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 161f9e33bb66..20d5b328c4a3 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -22,6 +22,7 @@ import mock import pytest import time +import warnings from google.api_core import gapic_v1 from google.api_core import retry as retries @@ -50,12 +51,42 @@ def _assert_retries_equal(retry, retry2): assert inspect.getclosurevars(pred) == inspect.getclosurevars(pred2) +def test_api_property_deprecated(creds): + client = publisher.Client(credentials=creds) + + with warnings.catch_warnings(record=True) as warned: + client.api + + assert len(warned) == 1 + assert issubclass(warned[0].category, DeprecationWarning) + warning_msg = str(warned[0].message) + assert "client.api" in warning_msg + + +def test_api_property_proxy_to_generated_client(creds): + client = publisher.Client(credentials=creds) + + with warnings.catch_warnings(record=True): + api_object = client.api + + # Not a perfect check, but we are satisficed if the returned API object indeed + # contains all methods of the generated class. + superclass_attrs = (attr for attr in dir(type(client).__mro__[1])) + assert all( + hasattr(api_object, attr) + for attr in superclass_attrs + if callable(getattr(client, attr)) + ) + + # The resume_publish() method only exists on the hand-written wrapper class. + assert hasattr(client, "resume_publish") + assert not hasattr(api_object, "resume_publish") + + def test_init(creds): client = publisher.Client(credentials=creds) - # A plain client should have an `api` (the underlying GAPIC) and a - # batch settings object, which should have the defaults. - assert isinstance(client.api, publisher_client.PublisherClient) + # A plain client should have a batch settings object containing the defaults. assert client.batch_settings.max_bytes == 1 * 1000 * 1000 assert client.batch_settings.max_latency == 0.01 assert client.batch_settings.max_messages == 100 @@ -67,7 +98,7 @@ def test_init_default_client_info(creds): installed_version = publisher.client.__version__ expected_client_info = f"gccl/{installed_version}" - for wrapped_method in client.api.transport._wrapped_methods.values(): + for wrapped_method in client.transport._wrapped_methods.values(): user_agent = next( ( header_value @@ -84,10 +115,10 @@ def test_init_w_custom_transport(creds): transport = PublisherGrpcTransport(credentials=creds) client = publisher.Client(transport=transport) - # A plain client should have an `api` (the underlying GAPIC) and a - # batch settings object, which should have the defaults. - assert isinstance(client.api, publisher_client.PublisherClient) - assert client.api._transport is transport + # A plain client should have a transport and a batch settings object, which should + # contain the defaults. + assert isinstance(client, publisher_client.PublisherClient) + assert client._transport is transport assert client.batch_settings.max_bytes == 1 * 1000 * 1000 assert client.batch_settings.max_latency == 0.01 assert client.batch_settings.max_messages == 100 @@ -97,8 +128,7 @@ def test_init_w_api_endpoint(creds): client_options = {"api_endpoint": "testendpoint.google.com"} client = publisher.Client(client_options=client_options, credentials=creds) - assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api._transport.grpc_channel._channel.target()).decode( + assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com:443" @@ -106,8 +136,7 @@ def test_init_w_api_endpoint(creds): def test_init_w_empty_client_options(creds): client = publisher.Client(client_options={}, credentials=creds) - assert isinstance(client.api, publisher_client.PublisherClient) - assert (client.api._transport.grpc_channel._channel.target()).decode( + assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" ) == publisher_client.PublisherClient.SERVICE_ADDRESS @@ -129,12 +158,12 @@ def init(self, *args, **kwargs): "credentials_file": "file.json", } ) - client_options = client.api.kwargs["client_options"] + client_options = client.kwargs["client_options"] assert client_options.get("quota_project_id") == "42" assert client_options.get("scopes") == [] assert client_options.get("credentials_file") == "file.json" assert client.target == "testendpoint.google.com" - assert client.api.transport._ssl_channel_credentials == mock_ssl_creds + assert client.transport._ssl_channel_credentials == mock_ssl_creds def test_init_emulator(monkeypatch): @@ -147,7 +176,7 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api._transport.publish._channel + channel = client._transport.publish._channel assert channel.target().decode("utf8") == "/foo/bar:123" @@ -418,7 +447,7 @@ def test_gapic_instance_method(creds): transport_mock._wrapped_methods = { transport_mock.create_topic: fake_create_topic_rpc } - patcher = mock.patch.object(client.api, "_transport", new=transport_mock) + patcher = mock.patch.object(client, "_transport", new=transport_mock) topic = gapic_types.Topic(name="projects/foo/topics/bar") diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 609026598856..42c14c47d7ff 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -553,7 +553,7 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi assert manager._consumer == background_consumer.return_value resumable_bidi_rpc.assert_called_once_with( - start_rpc=manager._client.api.streaming_pull, + start_rpc=manager._client.streaming_pull, initial_request=mock.ANY, should_recover=manager._should_recover, should_terminate=manager._should_terminate, @@ -562,7 +562,7 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] assert initial_request_arg.func == manager._get_initial_request assert initial_request_arg.args[0] == 18 - assert not manager._client.api.get_subscription.called + assert not manager._client.get_subscription.called resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( manager._on_rpc_done diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 601b40bcc95b..1f60b536d554 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -26,18 +26,13 @@ from google.pubsub_v1.services.subscriber.transports.grpc import SubscriberGrpcTransport -def test_init(creds): - client = subscriber.Client(credentials=creds) - assert isinstance(client.api, subscriber_client.SubscriberClient) - - def test_init_default_client_info(creds): client = subscriber.Client(credentials=creds) installed_version = subscriber.client.__version__ expected_client_info = f"gccl/{installed_version}" - for wrapped_method in client.api.transport._wrapped_methods.values(): + for wrapped_method in client.transport._wrapped_methods.values(): user_agent = next( ( header_value @@ -58,16 +53,14 @@ def test_init_default_closed_state(creds): def test_init_w_custom_transport(creds): transport = SubscriberGrpcTransport(credentials=creds) client = subscriber.Client(transport=transport) - assert isinstance(client.api, subscriber_client.SubscriberClient) - assert client.api._transport is transport + assert client._transport is transport def test_init_w_api_endpoint(creds): client_options = {"api_endpoint": "testendpoint.google.com"} client = subscriber.Client(client_options=client_options, credentials=creds) - assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api._transport.grpc_channel._channel.target()).decode( + assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" ) == "testendpoint.google.com:443" @@ -75,8 +68,7 @@ def test_init_w_api_endpoint(creds): def test_init_w_empty_client_options(creds): client = subscriber.Client(client_options={}, credentials=creds) - assert isinstance(client.api, subscriber_client.SubscriberClient) - assert (client.api._transport.grpc_channel._channel.target()).decode( + assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" ) == subscriber_client.SubscriberClient.SERVICE_ADDRESS @@ -98,12 +90,12 @@ def init(self, *args, **kwargs): "credentials_file": "file.json", } ) - client_options = client._api.kwargs["client_options"] + client_options = client.kwargs["client_options"] assert client_options.get("quota_project_id") == "42" assert client_options.get("scopes") == [] assert client_options.get("credentials_file") == "file.json" assert client.target == "testendpoint.google.com" - assert client.api.transport._ssl_channel_credentials == mock_ssl_creds + assert client.transport._ssl_channel_credentials == mock_ssl_creds def test_init_emulator(monkeypatch): @@ -116,7 +108,7 @@ def test_init_emulator(monkeypatch): # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client.api._transport.pull._channel + channel = client._transport.pull._channel assert channel.target().decode("utf8") == "/baz/bacon:123" @@ -184,7 +176,7 @@ def test_subscribe_options(manager_open, creds): def test_close(creds): client = subscriber.Client(credentials=creds) - patcher = mock.patch.object(client.api._transport.grpc_channel, "close") + patcher = mock.patch.object(client._transport.grpc_channel, "close") with patcher as patched_close: client.close() @@ -195,7 +187,7 @@ def test_close(creds): def test_closes_channel_as_context_manager(creds): client = subscriber.Client(credentials=creds) - patcher = mock.patch.object(client.api._transport.grpc_channel, "close") + patcher = mock.patch.object(client._transport.grpc_channel, "close") with patcher as patched_close: with client: @@ -207,7 +199,7 @@ def test_closes_channel_as_context_manager(creds): def test_context_manager_raises_if_closed(creds): client = subscriber.Client(credentials=creds) - with mock.patch.object(client.api._transport.grpc_channel, "close"): + with mock.patch.object(client._transport.grpc_channel, "close"): client.close() expetect_msg = r"(?i).*closed.*cannot.*context manager.*" @@ -216,13 +208,45 @@ def test_context_manager_raises_if_closed(creds): pass +def test_api_property_deprecated(creds): + client = subscriber.Client(credentials=creds) + + with warnings.catch_warnings(record=True) as warned: + client.api + + assert len(warned) == 1 + assert issubclass(warned[0].category, DeprecationWarning) + warning_msg = str(warned[0].message) + assert "client.api" in warning_msg + + +def test_api_property_proxy_to_generated_client(creds): + client = subscriber.Client(credentials=creds) + + with warnings.catch_warnings(record=True): + api_object = client.api + + # Not a perfect check, but we are satisficed if the returned API object indeed + # contains all methods of the generated class. + superclass_attrs = (attr for attr in dir(type(client).__mro__[1])) + assert all( + hasattr(api_object, attr) + for attr in superclass_attrs + if callable(getattr(client, attr)) + ) + + # The close() method only exists on the hand-written wrapper class. + assert hasattr(client, "close") + assert not hasattr(api_object, "close") + + def test_streaming_pull_gapic_monkeypatch(creds): client = subscriber.Client(credentials=creds) with mock.patch("google.api_core.gapic_v1.method.wrap_method"): client.streaming_pull(requests=iter([])) - transport = client.api._transport + transport = client._transport assert hasattr(transport.streaming_pull, "_prefetch_first_result_") assert not transport.streaming_pull._prefetch_first_result_ @@ -232,7 +256,7 @@ def test_sync_pull_warning_if_return_immediately(creds): subscription_path = "projects/foo/subscriptions/bar" with mock.patch.object( - client.api._transport, "_wrapped_methods" + client._transport, "_wrapped_methods" ), warnings.catch_warnings(record=True) as warned: client.pull(subscription=subscription_path, return_immediately=True) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py deleted file mode 100644 index adff4bbfcb44..000000000000 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test__gapic.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from google.cloud.pubsub_v1 import _gapic - - -class SourceClass(object): - def __init__(self): - self.x = "x" - - def method(self): - return "source class instance method" - - @staticmethod - def static_method(): - return "source class static method" - - @classmethod - def class_method(cls): - return "source class class method" - - @classmethod - def denylisted_method(cls): # pragma: NO COVER - return "source class denylisted method" - - -def test_add_method(): - @_gapic.add_methods(SourceClass, ("denylisted_method",)) - class Foo(object): - def __init__(self): - self.api = SourceClass() - - def method(self): # pragma: NO COVER - return "foo class instance method" - - foo = Foo() - - # Any method that's callable and not denylisted is "inherited". - assert set(["method", "static_method", "class_method"]) <= set(dir(foo)) - assert "denylisted_method" not in dir(foo) - - # Source Class's static and class methods become static methods. - assert type(Foo.__dict__["static_method"]) == staticmethod - assert foo.static_method() == "source class static method" - assert type(Foo.__dict__["class_method"]) == staticmethod - assert foo.class_method() == "source class class method" - - # The decorator changes the behavior of instance methods of the wrapped class. - # method() is called on an instance of the Source Class (stored as an - # attribute on the wrapped class). - assert foo.method() == "source class instance method" From d3e3de5f507f0f128deb8b36760102aae2a1cba9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 20 Jan 2022 13:14:19 -0500 Subject: [PATCH 0754/1197] ci(python): run lint / unit tests / docs as GH actions (#568) * ci(python): run lint / unit tests / docs as GH actions Source-Link: https://github.com/googleapis/synthtool/commit/57be0cdb0b94e1669cee0ca38d790de1dfdbcd44 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 * add commit to trigger gh actions Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 16 +++++- .../.github/workflows/docs.yml | 38 +++++++++++++ .../.github/workflows/lint.yml | 25 ++++++++ .../.github/workflows/unittest.yml | 57 +++++++++++++++++++ 4 files changed, 135 insertions(+), 1 deletion(-) create mode 100644 packages/google-cloud-pubsub/.github/workflows/docs.yml create mode 100644 packages/google-cloud-pubsub/.github/workflows/lint.yml create mode 100644 packages/google-cloud-pubsub/.github/workflows/unittest.yml diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 52d79c11f3ad..b668c04d5d65 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml new file mode 100644 index 000000000000..f7b8344c4500 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml new file mode 100644 index 000000000000..1e8b05c3d7ff --- /dev/null +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml new file mode 100644 index 000000000000..074ee2504ca5 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-results/ + - name: Report coverage results + run: | + coverage combine .coverage-results/.coverage* + coverage report --show-missing --fail-under=100 From a7e9cb695b006f782510dac71f8931731ac6975a Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 24 Jan 2022 13:08:12 -0700 Subject: [PATCH 0755/1197] chore: make samples 3.6 check optional (#570) --- packages/google-cloud-pubsub/.github/sync-repo-settings.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml index 2f68b5a4de07..48639e7e7f0f 100644 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -11,6 +11,5 @@ branchProtectionRules: - 'Kokoro - Against Pub/Sub Lite samples' - 'cla/google' - 'Samples - Lint' - - 'Samples - Python 3.6' - 'Samples - Python 3.7' - 'Samples - Python 3.8' From 2776e20c974860a4ef9b1ff9042fbccdf4a16469 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 25 Jan 2022 12:38:06 -0500 Subject: [PATCH 0756/1197] feat: add api key support (#571) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: upgrade gapic-generator-java, gax-java and gapic-generator-python PiperOrigin-RevId: 423842556 Source-Link: https://github.com/googleapis/googleapis/commit/a616ca08f4b1416abbac7bc5dd6d61c791756a81 Source-Link: https://github.com/googleapis/googleapis-gen/commit/29b938c58c1e51d019f2ee539d55dc0a3c86a905 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjliOTM4YzU4YzFlNTFkMDE5ZjJlZTUzOWQ1NWRjMGEzYzg2YTkwNSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/publisher/async_client.py | 38 +++++- .../pubsub_v1/services/publisher/client.py | 128 ++++++++++++------ .../services/schema_service/async_client.py | 38 +++++- .../services/schema_service/client.py | 127 +++++++++++------ .../services/subscriber/async_client.py | 37 +++++ .../pubsub_v1/services/subscriber/client.py | 128 ++++++++++++------ .../unit/gapic/pubsub_v1/test_publisher.py | 124 +++++++++++++++++ .../gapic/pubsub_v1/test_schema_service.py | 128 ++++++++++++++++++ .../unit/gapic/pubsub_v1/test_subscriber.py | 124 +++++++++++++++++ 9 files changed, 741 insertions(+), 131 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 7165061d2df6..9301490710a3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -111,6 +111,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PublisherClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> PublisherTransport: """Returns the transport used by the client instance. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 51cc2d1add7d..869200cf6c00 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -279,6 +279,74 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -330,57 +398,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, PublisherTransport): # transport is a PublisherTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -392,6 +425,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 64f65ae07941..a3cb75371153 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -109,6 +109,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SchemaServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> SchemaServiceTransport: """Returns the transport used by the client instance. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index ccb958dd89f9..832d7d8d188b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -232,6 +232,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -282,57 +349,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, SchemaServiceTransport): # transport is a SchemaServiceTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -344,6 +376,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 3754f151a443..9e7d48f06e0d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Optional, AsyncIterable, Awaitable, AsyncIterator, @@ -122,6 +123,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SubscriberClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> SubscriberTransport: """Returns the transport used by the client instance. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 308da6f74cd3..d0320c7c4ce1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -281,6 +281,74 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -332,57 +400,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, SubscriberTransport): # transport is a SubscriberTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -394,6 +427,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 2c8a5062b33c..c0253371ad68 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -387,6 +387,83 @@ def test_publisher_client_mtls_env_auto( ) +@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient]) +@mock.patch.object( + PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient) +) +@mock.patch.object( + PublisherAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(PublisherAsyncClient), +) +def test_publisher_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -2550,6 +2627,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublisherClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublisherClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.PublisherGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -3610,3 +3704,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (PublisherClient, transports.PublisherGrpcTransport), + (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index a110c01f16a0..6088af71edca 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -399,6 +399,87 @@ def test_schema_service_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient] +) +@mock.patch.object( + SchemaServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SchemaServiceAsyncClient), +) +def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -1799,6 +1880,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.SchemaServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2841,3 +2939,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index f35a82ef343e..9f6ebd6c36c0 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -391,6 +391,83 @@ def test_subscriber_client_mtls_env_auto( ) +@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient]) +@mock.patch.object( + SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient) +) +@mock.patch.object( + SubscriberAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(SubscriberAsyncClient), +) +def test_subscriber_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -3826,6 +3903,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubscriberClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubscriberClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.SubscriberGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -4893,3 +4987,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (SubscriberClient, transports.SubscriberGrpcTransport), + (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) From e0be0b102f615bb6e381af1954cd445bc42929d4 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Tue, 25 Jan 2022 13:21:55 -0500 Subject: [PATCH 0757/1197] docs: Docs have inconsistent default values for max_latency and max_bytes (#572) * fix max_latency and max_byte defaults and add to docs * docs: fix max_latency and max_byte defaults --- .../docs/publisher/index.rst | 20 +++++++++++++++---- .../samples/snippets/publisher.py | 2 +- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/docs/publisher/index.rst b/packages/google-cloud-pubsub/docs/publisher/index.rst index 0e7a9b50bccf..2a0ad320eb3d 100644 --- a/packages/google-cloud-pubsub/docs/publisher/index.rst +++ b/packages/google-cloud-pubsub/docs/publisher/index.rst @@ -72,7 +72,7 @@ The way that this works is that on the first message that you send, a new batch is created automatically. For every subsequent message, if there is already a valid batch that is still accepting messages, then that batch is used. When the batch is created, it begins a countdown that publishes the batch once -sufficient time has elapsed (by default, this is 0.05 seconds). +sufficient time has elapsed (by default, this is 0.01 seconds). If you need different batching settings, simply provide a :class:`~.pubsub_v1.types.BatchSettings` object when you instantiate the @@ -84,11 +84,23 @@ If you need different batching settings, simply provide a from google.cloud.pubsub import types client = pubsub.PublisherClient( - batch_settings=types.BatchSettings(max_messages=500), + batch_settings=types.BatchSettings( + max_messages=500, # default 100 + max_bytes=1024, # default 1 MB + max_latency=1 # default .01 seconds + ), ) -Pub/Sub accepts a maximum of 1,000 messages in a batch, and the size of a -batch can not exceed 10 megabytes. +The `max_bytes` argument is the maximum total size of the messages to collect +before automatically publishing the batch, (in bytes) including any byte size +overhead of the publish request itself. The maximum value is bound by the +server-side limit of 10_000_000 bytes. The default value is 1 MB. + +The `max_messages` argument is the maximum number of messages to collect +before automatically publishing the batch, the default value is 100 messages. + +The `max_latency` is the maximum number of seconds to wait for additional +messages before automatically publishing the batch, the default is .01 seconds. Futures diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 11fa1eb85c08..d6e52077213d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -189,7 +189,7 @@ def publish_messages_with_batch_settings(project_id: str, topic_id: str) -> None # or 1 KiB of data, or 1 second has passed. batch_settings = pubsub_v1.types.BatchSettings( max_messages=10, # default 100 - max_bytes=1024, # default 1 MiB + max_bytes=1024, # default 1 MB max_latency=1, # default 10 ms ) publisher = pubsub_v1.PublisherClient(batch_settings) From 6b3a5c26ecd2bc0b5d66177be7f4e997d7ac80cd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Feb 2022 01:10:49 +0000 Subject: [PATCH 0758/1197] chore: use gapic-generator-python 0.62.1 (#574) - [ ] Regenerate this pull request now. fix: resolve DuplicateCredentialArgs error when using credentials_file committer: parthea PiperOrigin-RevId: 425964861 Source-Link: https://github.com/googleapis/googleapis/commit/84b1a5a4f6fb2d04905be58e586b8a7a4310a8cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/4fb761bbd8506ac156f49bac5f18306aa8eb3aa8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGZiNzYxYmJkODUwNmFjMTU2ZjQ5YmFjNWYxODMwNmFhOGViM2FhOCJ9 --- .../services/publisher/async_client.py | 14 ++-- .../pubsub_v1/services/publisher/client.py | 14 ++-- .../services/publisher/transports/grpc.py | 5 +- .../publisher/transports/grpc_asyncio.py | 5 +- .../services/schema_service/async_client.py | 10 +-- .../services/schema_service/client.py | 10 +-- .../schema_service/transports/grpc.py | 5 +- .../schema_service/transports/grpc_asyncio.py | 5 +- .../services/subscriber/async_client.py | 37 +++++---- .../pubsub_v1/services/subscriber/client.py | 37 +++++---- .../services/subscriber/transports/grpc.py | 18 ++-- .../subscriber/transports/grpc_asyncio.py | 18 ++-- .../unit/gapic/pubsub_v1/test_publisher.py | 73 +++++++++++++++- .../gapic/pubsub_v1/test_schema_service.py | 83 ++++++++++++++++++- .../unit/gapic/pubsub_v1/test_subscriber.py | 73 +++++++++++++++- 15 files changed, 320 insertions(+), 87 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 9301490710a3..308e0f62950f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -248,7 +248,7 @@ async def create_topic( A topic resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -393,7 +393,7 @@ async def publish( Response for the Publish method. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic, messages]) if request is not None and has_flattened_params: @@ -479,7 +479,7 @@ async def get_topic( A topic resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: @@ -563,7 +563,7 @@ async def list_topics( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: @@ -656,7 +656,7 @@ async def list_topic_subscriptions( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: @@ -753,7 +753,7 @@ async def list_topic_snapshots( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: @@ -841,7 +841,7 @@ async def delete_topic( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 869200cf6c00..d445ddd141b1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -497,7 +497,7 @@ def create_topic( A topic resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -627,7 +627,7 @@ def publish( Response for the Publish method. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic, messages]) if request is not None and has_flattened_params: @@ -699,7 +699,7 @@ def get_topic( A topic resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: @@ -773,7 +773,7 @@ def list_topics( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: @@ -856,7 +856,7 @@ def list_topic_subscriptions( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: @@ -943,7 +943,7 @@ def list_topic_snapshots( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: @@ -1021,7 +1021,7 @@ def delete_topic( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 2507d97423d6..ca63b4445643 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -162,8 +162,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 26677ad12845..14bc0a15bbac 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -207,8 +207,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index a3cb75371153..3ab7daf7cad5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -260,7 +260,7 @@ async def create_schema( A schema resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, schema, schema_id]) if request is not None and has_flattened_params: @@ -332,7 +332,7 @@ async def get_schema( A schema resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -405,7 +405,7 @@ async def list_schemas( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -476,7 +476,7 @@ async def delete_schema( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -554,7 +554,7 @@ async def validate_schema( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, schema]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 832d7d8d188b..0a3575f6a4d9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -452,7 +452,7 @@ def create_schema( A schema resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, schema, schema_id]) if request is not None and has_flattened_params: @@ -524,7 +524,7 @@ def get_schema( A schema resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -597,7 +597,7 @@ def list_schemas( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -668,7 +668,7 @@ def delete_schema( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -746,7 +746,7 @@ def validate_schema( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, schema]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 7313ec2dd43d..4f8863da9a9c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -162,8 +162,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index a542e066d8e9..56450ac8552e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -207,8 +207,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 9e7d48f06e0d..319915887b4b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -321,7 +321,7 @@ async def create_subscription( A subscription resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) if request is not None and has_flattened_params: @@ -407,7 +407,7 @@ async def get_subscription( A subscription resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription]) if request is not None and has_flattened_params: @@ -553,7 +553,7 @@ async def list_subscriptions( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: @@ -640,7 +640,7 @@ async def delete_subscription( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription]) if request is not None and has_flattened_params: @@ -743,7 +743,7 @@ async def modify_ack_deadline( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) if request is not None and has_flattened_params: @@ -838,7 +838,7 @@ async def acknowledge( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, ack_ids]) if request is not None and has_flattened_params: @@ -947,7 +947,7 @@ async def pull( Response for the Pull method. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, return_immediately, max_messages]) if request is not None and has_flattened_params: @@ -1118,7 +1118,7 @@ async def modify_push_config( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, push_config]) if request is not None and has_flattened_params: @@ -1177,10 +1177,10 @@ async def get_snapshot( ) -> pubsub.Snapshot: r"""Gets the configuration details of a snapshot. Snapshots are used in Seek operations, which allow you to manage - message acknowledgments in bulk. That is, you can set - the acknowledgment state of messages in an existing + href="https://cloud.google.com/pubsub/docs/replay-overview">Seek + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state captured by a snapshot. Args: @@ -1210,7 +1210,7 @@ async def get_snapshot( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot]) if request is not None and has_flattened_params: @@ -1298,7 +1298,7 @@ async def list_snapshots( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: @@ -1429,7 +1429,7 @@ async def create_snapshot( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, subscription]) if request is not None and has_flattened_params: @@ -1485,8 +1485,9 @@ async def update_snapshot( metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot. Snapshots are used in - Seek operations, which allow + Seek + operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a @@ -1584,7 +1585,7 @@ async def delete_snapshot( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index d0320c7c4ce1..2d2839b2ada7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -560,7 +560,7 @@ def create_subscription( A subscription resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) if request is not None and has_flattened_params: @@ -636,7 +636,7 @@ def get_subscription( A subscription resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription]) if request is not None and has_flattened_params: @@ -765,7 +765,7 @@ def list_subscriptions( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: @@ -842,7 +842,7 @@ def delete_subscription( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription]) if request is not None and has_flattened_params: @@ -937,7 +937,7 @@ def modify_ack_deadline( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) if request is not None and has_flattened_params: @@ -1024,7 +1024,7 @@ def acknowledge( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, ack_ids]) if request is not None and has_flattened_params: @@ -1125,7 +1125,7 @@ def pull( Response for the Pull method. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, return_immediately, max_messages]) if request is not None and has_flattened_params: @@ -1275,7 +1275,7 @@ def modify_push_config( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, push_config]) if request is not None and has_flattened_params: @@ -1325,10 +1325,10 @@ def get_snapshot( ) -> pubsub.Snapshot: r"""Gets the configuration details of a snapshot. Snapshots are used in Seek operations, which allow you to manage - message acknowledgments in bulk. That is, you can set - the acknowledgment state of messages in an existing + href="https://cloud.google.com/pubsub/docs/replay-overview">Seek + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state captured by a snapshot. @@ -1359,7 +1359,7 @@ def get_snapshot( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot]) if request is not None and has_flattened_params: @@ -1437,7 +1437,7 @@ def list_snapshots( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: @@ -1558,7 +1558,7 @@ def create_snapshot( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name, subscription]) if request is not None and has_flattened_params: @@ -1605,8 +1605,9 @@ def update_snapshot( metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot. Snapshots are used in - Seek operations, which allow + Seek + operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a @@ -1698,7 +1699,7 @@ def delete_snapshot( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot]) if request is not None and has_flattened_params: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index f0472bdd0f93..b6ef6c6c4617 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -164,8 +164,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -540,10 +543,10 @@ def get_snapshot(self) -> Callable[[pubsub.GetSnapshotRequest], pubsub.Snapshot] Gets the configuration details of a snapshot. Snapshots are used in Seek operations, which allow you to manage - message acknowledgments in bulk. That is, you can set - the acknowledgment state of messages in an existing + href="https://cloud.google.com/pubsub/docs/replay-overview">Seek + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state captured by a snapshot. Returns: @@ -645,8 +648,9 @@ def update_snapshot( r"""Return a callable for the update snapshot method over gRPC. Updates an existing snapshot. Snapshots are used in - Seek operations, which allow + Seek + operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 9a4b4522402b..9e3e712125f5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -209,8 +209,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -550,10 +553,10 @@ def get_snapshot( Gets the configuration details of a snapshot. Snapshots are used in Seek operations, which allow you to manage - message acknowledgments in bulk. That is, you can set - the acknowledgment state of messages in an existing + href="https://cloud.google.com/pubsub/docs/replay-overview">Seek + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing subscription to the state captured by a snapshot. Returns: @@ -657,8 +660,9 @@ def update_snapshot( r"""Return a callable for the update snapshot method over gRPC. Updates an existing snapshot. Snapshots are used in - Seek operations, which allow + Seek + operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index c0253371ad68..ab9947d7a443 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -496,21 +496,23 @@ def test_publisher_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + (PublisherClient, transports.PublisherGrpcTransport, "grpc", grpc_helpers), ( PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_publisher_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -544,6 +546,71 @@ def test_publisher_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc", grpc_helpers), + ( + PublisherAsyncClient, + transports.PublisherGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_publisher_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=None, + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + @pytest.mark.parametrize("request_type", [pubsub.Topic, dict,]) def test_create_topic(request_type, transport: str = "grpc"): client = PublisherClient( diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 6088af71edca..fc2090743709 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -512,21 +512,28 @@ def test_schema_service_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + ( + SchemaServiceClient, + transports.SchemaServiceGrpcTransport, + "grpc", + grpc_helpers, + ), ( SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_schema_service_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -562,6 +569,76 @@ def test_schema_service_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + SchemaServiceClient, + transports.SchemaServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + SchemaServiceAsyncClient, + transports.SchemaServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_schema_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=None, + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + @pytest.mark.parametrize("request_type", [gp_schema.CreateSchemaRequest, dict,]) def test_create_schema(request_type, transport: str = "grpc"): client = SchemaServiceClient( diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 9f6ebd6c36c0..37a208c34b86 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -500,21 +500,23 @@ def test_subscriber_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", grpc_helpers), ( SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_subscriber_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -548,6 +550,71 @@ def test_subscriber_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", grpc_helpers), + ( + SubscriberAsyncClient, + transports.SubscriberGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_subscriber_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + scopes=None, + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.keepalive_time_ms", 30000), + ], + ) + + @pytest.mark.parametrize("request_type", [pubsub.Subscription, dict,]) def test_create_subscription(request_type, transport: str = "grpc"): client = SubscriberClient( From 9e7517f9dfd5b49df186c0fc4fcd7eb90698472f Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 5 Feb 2022 10:55:05 +0100 Subject: [PATCH 0759/1197] chore(deps): update dependency pytest to v7 (#576) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index b1b513af71fe..b54ed4d838da 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff==1.11.1 -pytest==6.2.5 +pytest==7.0.0 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From b9694001dd892474936255f1ae37d4525242d6d6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 5 Feb 2022 11:42:53 +0000 Subject: [PATCH 0760/1197] chore: use gapic-generator-python 0.63.1 (#575) - [x] Regenerate this pull request now. docs: add autogenerated code snippets PiperOrigin-RevId: 426256923 Source-Link: https://github.com/googleapis/googleapis/commit/9ebabfa115341b8016b6ed64b22c04260360a8ff Source-Link: https://github.com/googleapis/googleapis-gen/commit/a88175263e60a1d45d3a447848652b0f670b2cb8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTg4MTc1MjYzZTYwYTFkNDVkM2E0NDc4NDg2NTJiMGY2NzBiMmNiOCJ9 --- .../services/publisher/async_client.py | 175 ++ .../pubsub_v1/services/publisher/client.py | 184 ++ .../services/schema_service/async_client.py | 119 + .../services/schema_service/client.py | 125 + .../services/subscriber/async_client.py | 319 ++ .../pubsub_v1/services/subscriber/client.py | 335 ++ packages/google-cloud-pubsub/owlbot.py | 8 + ..._pubsub_v1_publisher_create_topic_async.py | 45 + ...d_pubsub_v1_publisher_create_topic_sync.py | 45 + ..._pubsub_v1_publisher_delete_topic_async.py | 43 + ...d_pubsub_v1_publisher_delete_topic_sync.py | 43 + ..._v1_publisher_detach_subscription_async.py | 45 + ...b_v1_publisher_detach_subscription_sync.py | 45 + ...ted_pubsub_v1_publisher_get_topic_async.py | 45 + ...ated_pubsub_v1_publisher_get_topic_sync.py | 45 + ...v1_publisher_list_topic_snapshots_async.py | 44 + ..._v1_publisher_list_topic_snapshots_sync.py | 44 + ...ublisher_list_topic_subscriptions_async.py | 44 + ...publisher_list_topic_subscriptions_sync.py | 44 + ...d_pubsub_v1_publisher_list_topics_async.py | 44 + ...ed_pubsub_v1_publisher_list_topics_sync.py | 44 + ...rated_pubsub_v1_publisher_publish_async.py | 45 + ...erated_pubsub_v1_publisher_publish_sync.py | 45 + ..._pubsub_v1_publisher_update_topic_async.py | 48 + ...d_pubsub_v1_publisher_update_topic_sync.py | 48 + ...b_v1_schema_service_create_schema_async.py | 49 + ...ub_v1_schema_service_create_schema_sync.py | 49 + ...b_v1_schema_service_delete_schema_async.py | 43 + ...ub_v1_schema_service_delete_schema_sync.py | 43 + ...bsub_v1_schema_service_get_schema_async.py | 45 + ...ubsub_v1_schema_service_get_schema_sync.py | 45 + ...ub_v1_schema_service_list_schemas_async.py | 44 + ...sub_v1_schema_service_list_schemas_sync.py | 44 + ...1_schema_service_validate_message_async.py | 46 + ...v1_schema_service_validate_message_sync.py | 46 + ...v1_schema_service_validate_schema_async.py | 49 + ..._v1_schema_service_validate_schema_sync.py | 49 + ..._pubsub_v1_subscriber_acknowledge_async.py | 44 + ...d_pubsub_v1_subscriber_acknowledge_sync.py | 44 + ...sub_v1_subscriber_create_snapshot_async.py | 46 + ...bsub_v1_subscriber_create_snapshot_sync.py | 46 + ...v1_subscriber_create_subscription_async.py | 46 + ..._v1_subscriber_create_subscription_sync.py | 46 + ...sub_v1_subscriber_delete_snapshot_async.py | 43 + ...bsub_v1_subscriber_delete_snapshot_sync.py | 43 + ...v1_subscriber_delete_subscription_async.py | 43 + ..._v1_subscriber_delete_subscription_sync.py | 43 + ...pubsub_v1_subscriber_get_snapshot_async.py | 45 + ..._pubsub_v1_subscriber_get_snapshot_sync.py | 45 + ...ub_v1_subscriber_get_subscription_async.py | 45 + ...sub_v1_subscriber_get_subscription_sync.py | 45 + ...bsub_v1_subscriber_list_snapshots_async.py | 44 + ...ubsub_v1_subscriber_list_snapshots_sync.py | 44 + ..._v1_subscriber_list_subscriptions_async.py | 44 + ...b_v1_subscriber_list_subscriptions_sync.py | 44 + ...v1_subscriber_modify_ack_deadline_async.py | 45 + ..._v1_subscriber_modify_ack_deadline_sync.py | 45 + ..._v1_subscriber_modify_push_config_async.py | 43 + ...b_v1_subscriber_modify_push_config_sync.py | 43 + ...nerated_pubsub_v1_subscriber_pull_async.py | 46 + ...enerated_pubsub_v1_subscriber_pull_sync.py | 46 + ...nerated_pubsub_v1_subscriber_seek_async.py | 45 + ...enerated_pubsub_v1_subscriber_seek_sync.py | 45 + ...bsub_v1_subscriber_streaming_pull_async.py | 54 + ...ubsub_v1_subscriber_streaming_pull_sync.py | 54 + ...sub_v1_subscriber_update_snapshot_async.py | 44 + ...bsub_v1_subscriber_update_snapshot_sync.py | 44 + ...v1_subscriber_update_subscription_async.py | 49 + ..._v1_subscriber_update_subscription_sync.py | 49 + .../snippet_metadata_pubsub_v1.json | 2707 +++++++++++++++++ 70 files changed, 6782 insertions(+) create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 308e0f62950f..2a1dd9213c5e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -220,6 +220,26 @@ async def create_topic( name rules] (https://cloud.google.com/pubsub/docs/admin#resource_names). + + .. code-block:: + + from google import pubsub_v1 + + def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = client.create_topic(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.Topic, dict]): The request object. A topic resource. @@ -304,6 +324,29 @@ async def update_topic( r"""Updates an existing topic. Note that certain properties of a topic are not modifiable. + + .. code-block:: + + from google import pubsub_v1 + + def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = client.update_topic(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): The request object. Request for the UpdateTopic method. @@ -365,6 +408,26 @@ async def publish( r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic does not exist. + + .. code-block:: + + from google import pubsub_v1 + + def sample_publish(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = client.publish(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.PublishRequest, dict]): The request object. Request for the Publish method. @@ -457,6 +520,25 @@ async def get_topic( ) -> pubsub.Topic: r"""Gets the configuration of a topic. + .. code-block:: + + from google import pubsub_v1 + + def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.get_topic(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): The request object. Request for the GetTopic method. @@ -537,6 +619,24 @@ async def list_topics( ) -> pagers.ListTopicsAsyncPager: r"""Lists matching topics. + .. code-block:: + + from google import pubsub_v1 + + def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + for response in page_result: + print(response) + Args: request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): The request object. Request for the `ListTopics` method. @@ -628,6 +728,25 @@ async def list_topic_subscriptions( r"""Lists the names of the attached subscriptions on this topic. + + .. code-block:: + + from google import pubsub_v1 + + def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + for response in page_result: + print(response) + Args: request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): The request object. Request for the @@ -725,6 +844,25 @@ async def list_topic_snapshots( bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. + + .. code-block:: + + from google import pubsub_v1 + + def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + for response in page_result: + print(response) + Args: request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): The request object. Request for the `ListTopicSnapshots` @@ -822,6 +960,23 @@ async def delete_topic( subscriptions to this topic are not deleted, but their ``topic`` field is set to ``_deleted-topic_``. + + .. code-block:: + + from google import pubsub_v1 + + def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.delete_topic(request=request) + Args: request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): The request object. Request for the `DeleteTopic` @@ -899,6 +1054,26 @@ async def detach_subscription( the subscription is a push subscription, pushes to the endpoint will stop. + + .. code-block:: + + from google import pubsub_v1 + + def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.detach_subscription(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): The request object. Request for the DetachSubscription diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index d445ddd141b1..01612ead6cde 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -469,6 +469,27 @@ def create_topic( (https://cloud.google.com/pubsub/docs/admin#resource_names). + + .. code-block:: + + from google import pubsub_v1 + + def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = client.create_topic(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.Topic, dict]): The request object. A topic resource. @@ -545,6 +566,30 @@ def update_topic( properties of a topic are not modifiable. + + .. code-block:: + + from google import pubsub_v1 + + def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = client.update_topic(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): The request object. Request for the UpdateTopic method. @@ -599,6 +644,27 @@ def publish( the topic does not exist. + + .. code-block:: + + from google import pubsub_v1 + + def sample_publish(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = client.publish(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.PublishRequest, dict]): The request object. Request for the Publish method. @@ -677,6 +743,26 @@ def get_topic( r"""Gets the configuration of a topic. + .. code-block:: + + from google import pubsub_v1 + + def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.get_topic(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): The request object. Request for the GetTopic method. @@ -747,6 +833,25 @@ def list_topics( r"""Lists matching topics. + .. code-block:: + + from google import pubsub_v1 + + def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + for response in page_result: + print(response) + + Args: request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): The request object. Request for the `ListTopics` method. @@ -828,6 +933,26 @@ def list_topic_subscriptions( topic. + + .. code-block:: + + from google import pubsub_v1 + + def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + for response in page_result: + print(response) + + Args: request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): The request object. Request for the @@ -915,6 +1040,26 @@ def list_topic_snapshots( in an existing subscription to the state captured by a snapshot. + + .. code-block:: + + from google import pubsub_v1 + + def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + for response in page_result: + print(response) + + Args: request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): The request object. Request for the `ListTopicSnapshots` @@ -1002,6 +1147,24 @@ def delete_topic( field is set to ``_deleted-topic_``. + + .. code-block:: + + from google import pubsub_v1 + + def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.delete_topic(request=request) + + Args: request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): The request object. Request for the `DeleteTopic` @@ -1071,6 +1234,27 @@ def detach_subscription( will stop. + + .. code-block:: + + from google import pubsub_v1 + + def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.detach_subscription(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): The request object. Request for the DetachSubscription diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 3ab7daf7cad5..80826bb114d8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -218,6 +218,29 @@ async def create_schema( ) -> gp_schema.Schema: r"""Creates a schema. + .. code-block:: + + from google import pubsub_v1 + + def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.create_schema(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): The request object. Request for the CreateSchema method. @@ -311,6 +334,25 @@ async def get_schema( ) -> schema.Schema: r"""Gets a schema. + .. code-block:: + + from google import pubsub_v1 + + def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): The request object. Request for the GetSchema method. @@ -379,6 +421,24 @@ async def list_schemas( ) -> pagers.ListSchemasAsyncPager: r"""Lists schemas in a project. + .. code-block:: + + from google import pubsub_v1 + + def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + for response in page_result: + print(response) + Args: request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): The request object. Request for the `ListSchemas` @@ -458,6 +518,22 @@ async def delete_schema( ) -> None: r"""Deletes a schema. + .. code-block:: + + from google import pubsub_v1 + + def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.delete_schema(request=request) + Args: request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): The request object. Request for the `DeleteSchema` @@ -523,6 +599,29 @@ async def validate_schema( ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. + .. code-block:: + + from google import pubsub_v1 + + def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.validate_schema(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): The request object. Request for the `ValidateSchema` @@ -602,6 +701,26 @@ async def validate_message( ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. + .. code-block:: + + from google import pubsub_v1 + + def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = client.validate_message(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): The request object. Request for the `ValidateMessage` diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 0a3575f6a4d9..41914efd817a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -410,6 +410,30 @@ def create_schema( ) -> gp_schema.Schema: r"""Creates a schema. + + .. code-block:: + + from google import pubsub_v1 + + def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.create_schema(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): The request object. Request for the CreateSchema method. @@ -503,6 +527,26 @@ def get_schema( ) -> schema.Schema: r"""Gets a schema. + + .. code-block:: + + from google import pubsub_v1 + + def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): The request object. Request for the GetSchema method. @@ -571,6 +615,25 @@ def list_schemas( ) -> pagers.ListSchemasPager: r"""Lists schemas in a project. + + .. code-block:: + + from google import pubsub_v1 + + def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + for response in page_result: + print(response) + Args: request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): The request object. Request for the `ListSchemas` @@ -650,6 +713,23 @@ def delete_schema( ) -> None: r"""Deletes a schema. + + .. code-block:: + + from google import pubsub_v1 + + def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.delete_schema(request=request) + Args: request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): The request object. Request for the `DeleteSchema` @@ -715,6 +795,30 @@ def validate_schema( ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. + + .. code-block:: + + from google import pubsub_v1 + + def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.validate_schema(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): The request object. Request for the `ValidateSchema` @@ -794,6 +898,27 @@ def validate_message( ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. + + .. code-block:: + + from google import pubsub_v1 + + def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = client.validate_message(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): The request object. Request for the `ValidateMessage` diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 319915887b4b..2137f5cc9a79 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -245,6 +245,27 @@ async def create_subscription( Note that for REST API requests, you must specify a name in the request. + + .. code-block:: + + from google import pubsub_v1 + + def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = client.create_subscription(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.Subscription, dict]): The request object. A subscription resource. @@ -385,6 +406,25 @@ async def get_subscription( ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. + .. code-block:: + + from google import pubsub_v1 + + def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.get_subscription(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): The request object. Request for the GetSubscription @@ -468,6 +508,30 @@ async def update_subscription( properties of a subscription, such as its topic, are not modifiable. + + .. code-block:: + + from google import pubsub_v1 + + def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = client.update_subscription(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): The request object. Request for the UpdateSubscription @@ -527,6 +591,24 @@ async def list_subscriptions( ) -> pagers.ListSubscriptionsAsyncPager: r"""Lists matching subscriptions. + .. code-block:: + + from google import pubsub_v1 + + def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + for response in page_result: + print(response) + Args: request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): The request object. Request for the `ListSubscriptions` @@ -622,6 +704,23 @@ async def delete_subscription( new one has no association with the old subscription or its topic unless the same topic is specified. + + .. code-block:: + + from google import pubsub_v1 + + def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.delete_subscription(request=request) + Args: request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): The request object. Request for the DeleteSubscription @@ -704,6 +803,25 @@ async def modify_ack_deadline( does not modify the subscription-level ``ackDeadlineSeconds`` used for subsequent messages. + + .. code-block:: + + from google import pubsub_v1 + + def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_deadline_seconds=2066, + ) + + # Make the request + response = client.modify_ack_deadline(request=request) + Args: request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): The request object. Request for the ModifyAckDeadline @@ -812,6 +930,24 @@ async def acknowledge( Acknowledging a message more than once will not result in an error. + + .. code-block:: + + from google import pubsub_v1 + + def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ) + + # Make the request + response = client.acknowledge(request=request) + Args: request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): The request object. Request for the Acknowledge method. @@ -901,6 +1037,27 @@ async def pull( ``UNAVAILABLE`` if there are too many concurrent pull requests pending for the given subscription. + + .. code-block:: + + from google import pubsub_v1 + + def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = client.pull(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.PullRequest, dict]): The request object. Request for the `Pull` method. @@ -1023,6 +1180,35 @@ def streaming_pull( re-establish the stream. Flow control can be achieved by configuring the underlying RPC channel. + + .. code-block:: + + from google import pubsub_v1 + + def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_pull(requests=request_generator()) + for response in stream: + print(response) + Args: requests (AsyncIterator[`google.pubsub_v1.types.StreamingPullRequest`]): The request object AsyncIterator. Request for the `StreamingPull` @@ -1088,6 +1274,23 @@ async def modify_push_config( Messages will accumulate for delivery continuously through the call regardless of changes to the ``PushConfig``. + + .. code-block:: + + from google import pubsub_v1 + + def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.modify_push_config(request=request) + Args: request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): The request object. Request for the ModifyPushConfig @@ -1183,6 +1386,26 @@ async def get_snapshot( acknowledgment state of messages in an existing subscription to the state captured by a snapshot. + + .. code-block:: + + from google import pubsub_v1 + + def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): The request object. Request for the GetSnapshot method. @@ -1272,6 +1495,25 @@ async def list_snapshots( bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. + + .. code-block:: + + from google import pubsub_v1 + + def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + for response in page_result: + print(response) + Args: request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): The request object. Request for the `ListSnapshots` @@ -1381,6 +1623,27 @@ async def create_snapshot( Note that for REST API requests, you must specify a name in the request. + + .. code-block:: + + from google import pubsub_v1 + + def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): The request object. Request for the `CreateSnapshot` @@ -1493,6 +1756,25 @@ async def update_snapshot( existing subscription to the state captured by a snapshot. + + .. code-block:: + + from google import pubsub_v1 + + def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = client.update_snapshot(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): The request object. Request for the UpdateSnapshot @@ -1567,6 +1849,23 @@ async def delete_snapshot( no association with the old snapshot or its subscription, unless the same subscription is specified. + + .. code-block:: + + from google import pubsub_v1 + + def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.delete_snapshot(request=request) + Args: request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): The request object. Request for the `DeleteSnapshot` @@ -1647,6 +1946,26 @@ async def seek( Note that both the subscription and the snapshot must be on the same topic. + + .. code-block:: + + from google import pubsub_v1 + + def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.seek(request=request) + + # Handle response + print(response) + Args: request (Union[google.pubsub_v1.types.SeekRequest, dict]): The request object. Request for the `Seek` method. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 2d2839b2ada7..ff5cda6652eb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -484,6 +484,28 @@ def create_subscription( request. + + .. code-block:: + + from google import pubsub_v1 + + def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = client.create_subscription(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.Subscription, dict]): The request object. A subscription resource. @@ -614,6 +636,26 @@ def get_subscription( r"""Gets the configuration details of a subscription. + .. code-block:: + + from google import pubsub_v1 + + def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.get_subscription(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): The request object. Request for the GetSubscription @@ -687,6 +729,31 @@ def update_subscription( modifiable. + + .. code-block:: + + from google import pubsub_v1 + + def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = client.update_subscription(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): The request object. Request for the UpdateSubscription @@ -739,6 +806,25 @@ def list_subscriptions( r"""Lists matching subscriptions. + .. code-block:: + + from google import pubsub_v1 + + def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + for response in page_result: + print(response) + + Args: request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): The request object. Request for the `ListSubscriptions` @@ -824,6 +910,24 @@ def delete_subscription( topic unless the same topic is specified. + + .. code-block:: + + from google import pubsub_v1 + + def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.delete_subscription(request=request) + + Args: request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): The request object. Request for the DeleteSubscription @@ -898,6 +1002,26 @@ def modify_ack_deadline( used for subsequent messages. + + .. code-block:: + + from google import pubsub_v1 + + def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_deadline_seconds=2066, + ) + + # Make the request + response = client.modify_ack_deadline(request=request) + + Args: request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): The request object. Request for the ModifyAckDeadline @@ -998,6 +1122,25 @@ def acknowledge( error. + + .. code-block:: + + from google import pubsub_v1 + + def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ) + + # Make the request + response = client.acknowledge(request=request) + + Args: request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): The request object. Request for the Acknowledge method. @@ -1079,6 +1222,28 @@ def pull( pending for the given subscription. + + .. code-block:: + + from google import pubsub_v1 + + def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = client.pull(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.PullRequest, dict]): The request object. Request for the `Pull` method. @@ -1191,6 +1356,36 @@ def streaming_pull( configuring the underlying RPC channel. + + .. code-block:: + + from google import pubsub_v1 + + def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_pull(requests=request_generator()) + for response in stream: + print(response) + + Args: requests (Iterator[google.pubsub_v1.types.StreamingPullRequest]): The request object iterator. Request for the `StreamingPull` @@ -1245,6 +1440,24 @@ def modify_push_config( call regardless of changes to the ``PushConfig``. + + .. code-block:: + + from google import pubsub_v1 + + def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.modify_push_config(request=request) + + Args: request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): The request object. Request for the ModifyPushConfig @@ -1332,6 +1545,27 @@ def get_snapshot( subscription to the state captured by a snapshot. + + .. code-block:: + + from google import pubsub_v1 + + def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): The request object. Request for the GetSnapshot method. @@ -1411,6 +1645,26 @@ def list_snapshots( in an existing subscription to the state captured by a snapshot. + + .. code-block:: + + from google import pubsub_v1 + + def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + for response in page_result: + print(response) + + Args: request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): The request object. Request for the `ListSnapshots` @@ -1510,6 +1764,28 @@ def create_snapshot( request. + + .. code-block:: + + from google import pubsub_v1 + + def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): The request object. Request for the `CreateSnapshot` @@ -1614,6 +1890,26 @@ def update_snapshot( snapshot. + + .. code-block:: + + from google import pubsub_v1 + + def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = client.update_snapshot(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): The request object. Request for the UpdateSnapshot @@ -1681,6 +1977,24 @@ def delete_snapshot( the same subscription is specified. + + .. code-block:: + + from google import pubsub_v1 + + def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.delete_snapshot(request=request) + + Args: request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): The request object. Request for the `DeleteSnapshot` @@ -1753,6 +2067,27 @@ def seek( same topic. + + .. code-block:: + + from google import pubsub_v1 + + def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.seek(request=request) + + # Handle response + print(response) + + Args: request (Union[google.pubsub_v1.types.SeekRequest, dict]): The request object. Request for the `Seek` method. diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index a58470e65dc3..0ab7b944a556 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -332,6 +332,14 @@ if count < 1: raise Exception(".coveragerc replacement failed.") + # fix the package name in samples/generated_samples to reflect + # the package on pypi. https://pypi.org/project/google-cloud-pubsub/ + s.replace( + library / "samples/generated_samples/**/*.py", + "pip install google-pubsub", + "pip install google-cloud-pubsub", + ) + s.move( library, excludes=[ diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py new file mode 100644 index 000000000000..9b03af89b6ef --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_CreateTopic_async] +from google import pubsub_v1 + + +async def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = await client.create_topic(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_CreateTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py new file mode 100644 index 000000000000..99af672fc633 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_CreateTopic_sync] +from google import pubsub_v1 + + +def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = client.create_topic(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_CreateTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py new file mode 100644 index 000000000000..e375b02c8a5e --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_DeleteTopic_async] +from google import pubsub_v1 + + +async def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + response = await client.delete_topic(request=request) + + +# [END pubsub_generated_pubsub_v1_Publisher_DeleteTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py new file mode 100644 index 000000000000..7b1932026874 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_DeleteTopic_sync] +from google import pubsub_v1 + + +def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.delete_topic(request=request) + + +# [END pubsub_generated_pubsub_v1_Publisher_DeleteTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py new file mode 100644 index 000000000000..b0b349d669da --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_DetachSubscription_async] +from google import pubsub_v1 + + +async def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.detach_subscription(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_DetachSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py new file mode 100644 index 000000000000..b697598e98c9 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_DetachSubscription_sync] +from google import pubsub_v1 + + +def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.detach_subscription(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_DetachSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py new file mode 100644 index 000000000000..485a61c99f79 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_GetTopic_async] +from google import pubsub_v1 + + +async def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = await client.get_topic(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_GetTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py new file mode 100644 index 000000000000..d6f28516dfbb --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_GetTopic_sync] +from google import pubsub_v1 + + +def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.get_topic(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_GetTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py new file mode 100644 index 000000000000..71054e6e20cc --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_async] +from google import pubsub_v1 + + +async def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + async for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py new file mode 100644 index 000000000000..f49fb6615c52 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_sync] +from google import pubsub_v1 + + +def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py new file mode 100644 index 000000000000..4edb308c9e0c --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_async] +from google import pubsub_v1 + + +async def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + async for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py new file mode 100644 index 000000000000..c90b1bd689eb --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_sync] +from google import pubsub_v1 + + +def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py new file mode 100644 index 000000000000..3be9178a07e3 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_ListTopics_async] +from google import pubsub_v1 + + +async def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + async for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_ListTopics_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py new file mode 100644 index 000000000000..d0fe084af5fb --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_ListTopics_sync] +from google import pubsub_v1 + + +def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_ListTopics_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py new file mode 100644 index 000000000000..d9d84eaa79f0 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Publish +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_Publish_async] +from google import pubsub_v1 + + +async def sample_publish(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = await client.publish(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_Publish_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py new file mode 100644 index 000000000000..7a265a832c31 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Publish +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_Publish_sync] +from google import pubsub_v1 + + +def sample_publish(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = client.publish(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_Publish_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py new file mode 100644 index 000000000000..8b83713b7c76 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_UpdateTopic_async] +from google import pubsub_v1 + + +async def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = await client.update_topic(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_UpdateTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py new file mode 100644 index 000000000000..3863bf4e8fa8 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Publisher_UpdateTopic_sync] +from google import pubsub_v1 + + +def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = client.update_topic(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Publisher_UpdateTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py new file mode 100644 index 000000000000..c78b58497726 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_CreateSchema_async] +from google import pubsub_v1 + + +async def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.create_schema(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_CreateSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py new file mode 100644 index 000000000000..d3d2cbbc7e7a --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_CreateSchema_sync] +from google import pubsub_v1 + + +def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.create_schema(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_CreateSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py new file mode 100644 index 000000000000..80f68c21b006 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_async] +from google import pubsub_v1 + + +async def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.delete_schema(request=request) + + +# [END pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py new file mode 100644 index 000000000000..f9711fb6e745 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_sync] +from google import pubsub_v1 + + +def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.delete_schema(request=request) + + +# [END pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py new file mode 100644 index 000000000000..ae9fd6d68412 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_GetSchema_async] +from google import pubsub_v1 + + +async def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.get_schema(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_GetSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py new file mode 100644 index 000000000000..41e2fde074a9 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_GetSchema_sync] +from google import pubsub_v1 + + +def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_GetSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py new file mode 100644 index 000000000000..a92cb700e8fe --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_ListSchemas_async] +from google import pubsub_v1 + + +async def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + async for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_ListSchemas_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py new file mode 100644 index 000000000000..58beed28c44e --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_ListSchemas_sync] +from google import pubsub_v1 + + +def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_ListSchemas_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py new file mode 100644 index 000000000000..f32a665fa4ca --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_async] +from google import pubsub_v1 + + +async def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = await client.validate_message(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py new file mode 100644 index 000000000000..c31c0c4ddfb8 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_sync] +from google import pubsub_v1 + + +def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = client.validate_message(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py new file mode 100644 index 000000000000..4b73371386bc --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_async] +from google import pubsub_v1 + + +async def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.validate_schema(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py new file mode 100644 index 000000000000..17455ab2f212 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_sync] +from google import pubsub_v1 + + +def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.validate_schema(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py new file mode 100644 index 000000000000..120b9e1f5d0c --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Acknowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_Acknowledge_async] +from google import pubsub_v1 + + +async def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ) + + # Make the request + response = await client.acknowledge(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_Acknowledge_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py new file mode 100644 index 000000000000..9da8a5fd5755 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Acknowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_Acknowledge_sync] +from google import pubsub_v1 + + +def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ) + + # Make the request + response = client.acknowledge(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_Acknowledge_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py new file mode 100644 index 000000000000..ca45144e8f09 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_async] +from google import pubsub_v1 + + +async def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = await client.create_snapshot(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py new file mode 100644 index 000000000000..f60d35d4dbb0 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_sync] +from google import pubsub_v1 + + +def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py new file mode 100644 index 000000000000..095a7ff0eb99 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_async] +from google import pubsub_v1 + + +async def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = await client.create_subscription(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py new file mode 100644 index 000000000000..7495a50c5a6d --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_sync] +from google import pubsub_v1 + + +def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = client.create_subscription(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py new file mode 100644 index 000000000000..2fd2f7df3b5d --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_async] +from google import pubsub_v1 + + +async def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = await client.delete_snapshot(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py new file mode 100644 index 000000000000..8315700fc64e --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_sync] +from google import pubsub_v1 + + +def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.delete_snapshot(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py new file mode 100644 index 000000000000..4394089f5127 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_async] +from google import pubsub_v1 + + +async def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.delete_subscription(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py new file mode 100644 index 000000000000..031880b9c522 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_sync] +from google import pubsub_v1 + + +def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.delete_subscription(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py new file mode 100644 index 000000000000..8f1bf92f7e0e --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_async] +from google import pubsub_v1 + + +async def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = await client.get_snapshot(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py new file mode 100644 index 000000000000..4abcf326e755 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_sync] +from google import pubsub_v1 + + +def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py new file mode 100644 index 000000000000..3908cb934124 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_GetSubscription_async] +from google import pubsub_v1 + + +async def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.get_subscription(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_GetSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py new file mode 100644 index 000000000000..85065ae24559 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_GetSubscription_sync] +from google import pubsub_v1 + + +def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.get_subscription(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_GetSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py new file mode 100644 index 000000000000..262303b38e69 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_async] +from google import pubsub_v1 + + +async def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + async for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py new file mode 100644 index 000000000000..0acdc7f43b39 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_sync] +from google import pubsub_v1 + + +def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py new file mode 100644 index 000000000000..ae2f4c12fc74 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_async] +from google import pubsub_v1 + + +async def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + async for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py new file mode 100644 index 000000000000..a173fa081f46 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_sync] +from google import pubsub_v1 + + +def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + for response in page_result: + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py new file mode 100644 index 000000000000..acc6c2924d37 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyAckDeadline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_async] +from google import pubsub_v1 + + +async def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_deadline_seconds=2066, + ) + + # Make the request + response = await client.modify_ack_deadline(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py new file mode 100644 index 000000000000..359b10f080f5 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyAckDeadline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_sync] +from google import pubsub_v1 + + +def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_deadline_seconds=2066, + ) + + # Make the request + response = client.modify_ack_deadline(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py new file mode 100644 index 000000000000..6ea1fb283cf7 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyPushConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_async] +from google import pubsub_v1 + + +async def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.modify_push_config(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py new file mode 100644 index 000000000000..2c127a9fd35c --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyPushConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_sync] +from google import pubsub_v1 + + +def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.modify_push_config(request=request) + + +# [END pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py new file mode 100644 index 000000000000..fb31d074533d --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Pull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_Pull_async] +from google import pubsub_v1 + + +async def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = await client.pull(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_Pull_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py new file mode 100644 index 000000000000..4a1d380fb1d3 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Pull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_Pull_sync] +from google import pubsub_v1 + + +def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = client.pull(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_Pull_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py new file mode 100644 index 000000000000..cf2c53aee75e --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Seek +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_Seek_async] +from google import pubsub_v1 + + +async def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.seek(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_Seek_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py new file mode 100644 index 000000000000..38d9f22b9baf --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Seek +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_Seek_sync] +from google import pubsub_v1 + + +def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.seek(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_Seek_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py new file mode 100644 index 000000000000..d3e1a5166852 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingPull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_StreamingPull_async] +from google import pubsub_v1 + + +async def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_pull(requests=request_generator()) + async for response in stream: + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_StreamingPull_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py new file mode 100644 index 000000000000..8765f70427a0 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingPull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_StreamingPull_sync] +from google import pubsub_v1 + + +def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_pull(requests=request_generator()) + for response in stream: + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_StreamingPull_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py new file mode 100644 index 000000000000..3dc78eb4260a --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_async] +from google import pubsub_v1 + + +async def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = await client.update_snapshot(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py new file mode 100644 index 000000000000..adfd50ef85e4 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_sync] +from google import pubsub_v1 + + +def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = client.update_snapshot(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py new file mode 100644 index 000000000000..59d32c59b1e7 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_async] +from google import pubsub_v1 + + +async def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = await client.update_subscription(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py new file mode 100644 index 000000000000..f29e54be5414 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_sync] +from google import pubsub_v1 + + +def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = client.update_subscription(request=request) + + # Handle response + print(response) + +# [END pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json new file mode 100644 index 000000000000..c35ef2c6740f --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json @@ -0,0 +1,2707 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "CreateTopic" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_create_topic_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_CreateTopic_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "CreateTopic" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_create_topic_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_CreateTopic_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "DeleteTopic" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_delete_topic_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_DeleteTopic_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "DeleteTopic" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_DeleteTopic_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "DetachSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_DetachSubscription_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "DetachSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_DetachSubscription_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "GetTopic" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_get_topic_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_GetTopic_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "GetTopic" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_get_topic_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_GetTopic_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "ListTopicSnapshots" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "ListTopicSnapshots" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "ListTopicSubscriptions" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "ListTopicSubscriptions" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "ListTopics" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_list_topics_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopics_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "ListTopics" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_list_topics_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopics_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "Publish" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_publish_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_Publish_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "Publish" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_publish_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_Publish_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "UpdateTopic" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_update_topic_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_UpdateTopic_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Publisher" + }, + "shortName": "UpdateTopic" + } + }, + "file": "pubsub_generated_pubsub_v1_publisher_update_topic_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Publisher_UpdateTopic_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_create_schema_async.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_CreateSchema_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_CreateSchema_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_get_schema_async.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_GetSchema_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_GetSchema_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ListSchemas_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ListSchemas_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "ValidateMessage" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_validate_message_async.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "ValidateMessage" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "ValidateSchema" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "SchemaService" + }, + "shortName": "ValidateSchema" + } + }, + "file": "pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "Acknowledge" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Acknowledge_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "Acknowledge" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Acknowledge_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "CreateSnapshot" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "CreateSnapshot" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "CreateSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "CreateSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "DeleteSnapshot" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "DeleteSnapshot" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "DeleteSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "DeleteSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "GetSnapshot" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "GetSnapshot" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "GetSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_GetSubscription_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "GetSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_GetSubscription_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "ListSnapshots" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "ListSnapshots" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "ListSubscriptions" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "ListSubscriptions" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "ModifyAckDeadline" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 40, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 41, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "ModifyAckDeadline" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 40, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 41, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "ModifyPushConfig" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "ModifyPushConfig" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "Pull" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_pull_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Pull_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "Pull" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_pull_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Pull_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "Seek" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_seek_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Seek_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "Seek" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_seek_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Seek_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "StreamingPull" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_StreamingPull_async", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "StreamingPull" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_StreamingPull_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "UpdateSnapshot" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 40, + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "start": 41, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "UpdateSnapshot" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 40, + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "start": 41, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "UpdateSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "Subscriber" + }, + "shortName": "UpdateSubscription" + } + }, + "file": "pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py", + "regionTag": "pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} From 59960e548d53194e452728d87b2f1e8a79287342 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 5 Feb 2022 12:30:43 +0000 Subject: [PATCH 0761/1197] feat: add exactly once delivery flag (#577) - [x] Regenerate this pull request now. PiperOrigin-RevId: 426401315 Source-Link: https://github.com/googleapis/googleapis/commit/f02f4392673ec85157120cf451ce32fcb613ad5a Source-Link: https://github.com/googleapis/googleapis-gen/commit/a6d5846eeb74502057ba19968328365bfbeedfb9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTZkNTg0NmVlYjc0NTAyMDU3YmExOTk2ODMyODM2NWJmYmVlZGZiOSJ9 --- .../google/pubsub_v1/types/pubsub.py | 17 +++++++++++++++++ .../scripts/fixup_pubsub_v1_keywords.py | 2 +- .../unit/gapic/pubsub_v1/test_subscriber.py | 12 ++++++++++++ 3 files changed, 30 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index de29dcd1016a..93342c8b69d3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -558,6 +558,22 @@ class Subscription(proto.Message): ``StreamingPull`` requests will return FAILED_PRECONDITION. If the subscription is a push subscription, pushes to the endpoint will not be made. + enable_exactly_once_delivery (bool): + If true, Pub/Sub provides the following guarantees for the + delivery of a message with a given value of ``message_id`` + on this subscription: + + - The message sent to a subscriber is guaranteed not to be + resent before the message's acknowledgement deadline + expires. + - An acknowledged message will not be resent to a + subscriber. + + Note that subscribers may still receive multiple copies of a + message when ``enable_exactly_once_delivery`` is true if the + message was published multiple times by a publisher client. + These copies are considered distinct by Pub/Sub and have + distinct ``message_id`` values. topic_message_retention_duration (google.protobuf.duration_pb2.Duration): Output only. Indicates the minimum duration for which a message is retained after it is published to the @@ -588,6 +604,7 @@ class Subscription(proto.Message): ) retry_policy = proto.Field(proto.MESSAGE, number=14, message="RetryPolicy",) detached = proto.Field(proto.BOOL, number=15,) + enable_exactly_once_delivery = proto.Field(proto.BOOL, number=16,) topic_message_retention_duration = proto.Field( proto.MESSAGE, number=17, message=duration_pb2.Duration, ) diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index ab7623917b6f..039fa1e8f64b 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -42,7 +42,7 @@ class pubsubCallTransformer(cst.CSTTransformer): 'acknowledge': ('subscription', 'ack_ids', ), 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'topic_message_retention_duration', ), + 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', ), 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), 'delete_schema': ('name', ), 'delete_snapshot': ('snapshot', ), diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 37a208c34b86..7ba54cc6de39 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -638,6 +638,7 @@ def test_create_subscription(request_type, transport: str = "grpc"): enable_message_ordering=True, filter="filter_value", detached=True, + enable_exactly_once_delivery=True, ) response = client.create_subscription(request) @@ -655,6 +656,7 @@ def test_create_subscription(request_type, transport: str = "grpc"): assert response.enable_message_ordering is True assert response.filter == "filter_value" assert response.detached is True + assert response.enable_exactly_once_delivery is True def test_create_subscription_empty_call(): @@ -700,6 +702,7 @@ async def test_create_subscription_async( enable_message_ordering=True, filter="filter_value", detached=True, + enable_exactly_once_delivery=True, ) ) response = await client.create_subscription(request) @@ -718,6 +721,7 @@ async def test_create_subscription_async( assert response.enable_message_ordering is True assert response.filter == "filter_value" assert response.detached is True + assert response.enable_exactly_once_delivery is True @pytest.mark.asyncio @@ -905,6 +909,7 @@ def test_get_subscription(request_type, transport: str = "grpc"): enable_message_ordering=True, filter="filter_value", detached=True, + enable_exactly_once_delivery=True, ) response = client.get_subscription(request) @@ -922,6 +927,7 @@ def test_get_subscription(request_type, transport: str = "grpc"): assert response.enable_message_ordering is True assert response.filter == "filter_value" assert response.detached is True + assert response.enable_exactly_once_delivery is True def test_get_subscription_empty_call(): @@ -963,6 +969,7 @@ async def test_get_subscription_async( enable_message_ordering=True, filter="filter_value", detached=True, + enable_exactly_once_delivery=True, ) ) response = await client.get_subscription(request) @@ -981,6 +988,7 @@ async def test_get_subscription_async( assert response.enable_message_ordering is True assert response.filter == "filter_value" assert response.detached is True + assert response.enable_exactly_once_delivery is True @pytest.mark.asyncio @@ -1130,6 +1138,7 @@ def test_update_subscription(request_type, transport: str = "grpc"): enable_message_ordering=True, filter="filter_value", detached=True, + enable_exactly_once_delivery=True, ) response = client.update_subscription(request) @@ -1147,6 +1156,7 @@ def test_update_subscription(request_type, transport: str = "grpc"): assert response.enable_message_ordering is True assert response.filter == "filter_value" assert response.detached is True + assert response.enable_exactly_once_delivery is True def test_update_subscription_empty_call(): @@ -1192,6 +1202,7 @@ async def test_update_subscription_async( enable_message_ordering=True, filter="filter_value", detached=True, + enable_exactly_once_delivery=True, ) ) response = await client.update_subscription(request) @@ -1210,6 +1221,7 @@ async def test_update_subscription_async( assert response.enable_message_ordering is True assert response.filter == "filter_value" assert response.detached is True + assert response.enable_exactly_once_delivery is True @pytest.mark.asyncio From 63266f2edcf36ae53fd107a28f06ca925ea1faf1 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 5 Feb 2022 08:35:14 -0500 Subject: [PATCH 0762/1197] feat: add support for exactly once delivery (#578) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add exactly once delivery flag PiperOrigin-RevId: 426415626 Source-Link: https://github.com/googleapis/googleapis/commit/1f707ab5f82f83644f2812f5f3d94462c72f9870 Source-Link: https://github.com/googleapis/googleapis-gen/commit/2baebc579ed42c4d17883a14eca64411f69dcc87 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMmJhZWJjNTc5ZWQ0MmM0ZDE3ODgzYTE0ZWNhNjQ0MTFmNjlkY2M4NyJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/pubsub_v1/types/pubsub.py | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 93342c8b69d3..5750f495c2fc 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1123,24 +1123,76 @@ class StreamingPullResponse(proto.Message): received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): Received Pub/Sub messages. This will not be empty. + acknowlege_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): + This field will only be set if + ``enable_exactly_once_delivery`` is set to ``true``. + modify_ack_deadline_confirmation (google.pubsub_v1.types.StreamingPullResponse.ModifyAckDeadlineConfirmation): + This field will only be set if + ``enable_exactly_once_delivery`` is set to ``true``. subscription_properties (google.pubsub_v1.types.StreamingPullResponse.SubscriptionProperties): Properties associated with this subscription. """ + class AcknowledgeConfirmation(proto.Message): + r"""Acknowledgement IDs sent in one or more previous requests to + acknowledge a previously received message. + + Attributes: + ack_ids (Sequence[str]): + Successfully processed acknowledgement IDs. + invalid_ack_ids (Sequence[str]): + List of acknowledgement IDs that were + malformed or whose acknowledgement deadline has + expired. + unordered_ack_ids (Sequence[str]): + List of acknowledgement IDs that were out of + order. + """ + + ack_ids = proto.RepeatedField(proto.STRING, number=1,) + invalid_ack_ids = proto.RepeatedField(proto.STRING, number=2,) + unordered_ack_ids = proto.RepeatedField(proto.STRING, number=3,) + + class ModifyAckDeadlineConfirmation(proto.Message): + r"""Acknowledgement IDs sent in one or more previous requests to + modify the deadline for a specific message. + + Attributes: + ack_ids (Sequence[str]): + Successfully processed acknowledgement IDs. + invalid_ack_ids (Sequence[str]): + List of acknowledgement IDs that were + malformed or whose acknowledgement deadline has + expired. + """ + + ack_ids = proto.RepeatedField(proto.STRING, number=1,) + invalid_ack_ids = proto.RepeatedField(proto.STRING, number=2,) + class SubscriptionProperties(proto.Message): r"""Subscription properties sent as part of the response. Attributes: + exactly_once_delivery_enabled (bool): + True iff exactly once delivery is enabled for + this subscription. message_ordering_enabled (bool): True iff message ordering is enabled for this subscription. """ + exactly_once_delivery_enabled = proto.Field(proto.BOOL, number=1,) message_ordering_enabled = proto.Field(proto.BOOL, number=2,) received_messages = proto.RepeatedField( proto.MESSAGE, number=1, message="ReceivedMessage", ) + acknowlege_confirmation = proto.Field( + proto.MESSAGE, number=2, message=AcknowledgeConfirmation, + ) + modify_ack_deadline_confirmation = proto.Field( + proto.MESSAGE, number=3, message=ModifyAckDeadlineConfirmation, + ) subscription_properties = proto.Field( proto.MESSAGE, number=4, message=SubscriptionProperties, ) From 47d088844afff73fea8cc09e7557ba2d0ef3251c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Feb 2022 11:46:25 -0500 Subject: [PATCH 0763/1197] chore: Update pubsub BUILD.bazel package name to google-cloud-pubsub (#579) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): Update pubsub BUILD.bazel package name to google-cloud-pubsub PiperOrigin-RevId: 426930301 Source-Link: https://github.com/googleapis/googleapis/commit/397dc80e76fc6f2509a8c21e9926b3745e98a000 Source-Link: https://github.com/googleapis/googleapis-gen/commit/727cdf14ee16a5875dd7c20898c674eada443fae Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNzI3Y2RmMTRlZTE2YTU4NzVkZDdjMjA4OThjNjc0ZWFkYTQ0M2ZhZSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix replacement in owlbot.py * run generator locally Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google/pubsub/py.typed | 2 +- .../google/pubsub_v1/py.typed | 2 +- .../services/publisher/async_client.py | 18 ++- .../pubsub_v1/services/publisher/client.py | 18 ++- .../services/schema_service/async_client.py | 12 +- .../services/schema_service/client.py | 12 +- .../services/subscriber/async_client.py | 33 +++-- .../pubsub_v1/services/subscriber/client.py | 33 +++-- packages/google-cloud-pubsub/owlbot.py | 2 +- ..._pubsub_v1_publisher_create_topic_async.py | 2 +- ...d_pubsub_v1_publisher_create_topic_sync.py | 2 +- ..._pubsub_v1_publisher_delete_topic_async.py | 2 +- ...d_pubsub_v1_publisher_delete_topic_sync.py | 2 +- ..._v1_publisher_detach_subscription_async.py | 2 +- ...b_v1_publisher_detach_subscription_sync.py | 2 +- ...ted_pubsub_v1_publisher_get_topic_async.py | 2 +- ...ated_pubsub_v1_publisher_get_topic_sync.py | 2 +- ...v1_publisher_list_topic_snapshots_async.py | 2 + ..._v1_publisher_list_topic_snapshots_sync.py | 2 + ...ublisher_list_topic_subscriptions_async.py | 2 + ...publisher_list_topic_subscriptions_sync.py | 2 + ...d_pubsub_v1_publisher_list_topics_async.py | 2 + ...ed_pubsub_v1_publisher_list_topics_sync.py | 2 + ...rated_pubsub_v1_publisher_publish_async.py | 2 +- ...erated_pubsub_v1_publisher_publish_sync.py | 2 +- ..._pubsub_v1_publisher_update_topic_async.py | 2 +- ...d_pubsub_v1_publisher_update_topic_sync.py | 2 +- ...b_v1_schema_service_create_schema_async.py | 2 +- ...ub_v1_schema_service_create_schema_sync.py | 2 +- ...b_v1_schema_service_delete_schema_async.py | 2 +- ...ub_v1_schema_service_delete_schema_sync.py | 2 +- ...bsub_v1_schema_service_get_schema_async.py | 2 +- ...ubsub_v1_schema_service_get_schema_sync.py | 2 +- ...ub_v1_schema_service_list_schemas_async.py | 2 + ...sub_v1_schema_service_list_schemas_sync.py | 2 + ...1_schema_service_validate_message_async.py | 2 +- ...v1_schema_service_validate_message_sync.py | 2 +- ...v1_schema_service_validate_schema_async.py | 2 +- ..._v1_schema_service_validate_schema_sync.py | 2 +- ..._pubsub_v1_subscriber_acknowledge_async.py | 2 +- ...d_pubsub_v1_subscriber_acknowledge_sync.py | 2 +- ...sub_v1_subscriber_create_snapshot_async.py | 2 +- ...bsub_v1_subscriber_create_snapshot_sync.py | 2 +- ...v1_subscriber_create_subscription_async.py | 2 +- ..._v1_subscriber_create_subscription_sync.py | 2 +- ...sub_v1_subscriber_delete_snapshot_async.py | 2 +- ...bsub_v1_subscriber_delete_snapshot_sync.py | 2 +- ...v1_subscriber_delete_subscription_async.py | 2 +- ..._v1_subscriber_delete_subscription_sync.py | 2 +- ...pubsub_v1_subscriber_get_snapshot_async.py | 2 +- ..._pubsub_v1_subscriber_get_snapshot_sync.py | 2 +- ...ub_v1_subscriber_get_subscription_async.py | 2 +- ...sub_v1_subscriber_get_subscription_sync.py | 2 +- ...bsub_v1_subscriber_list_snapshots_async.py | 2 + ...ubsub_v1_subscriber_list_snapshots_sync.py | 2 + ..._v1_subscriber_list_subscriptions_async.py | 2 + ...b_v1_subscriber_list_subscriptions_sync.py | 2 + ...v1_subscriber_modify_ack_deadline_async.py | 2 +- ..._v1_subscriber_modify_ack_deadline_sync.py | 2 +- ..._v1_subscriber_modify_push_config_async.py | 2 +- ...b_v1_subscriber_modify_push_config_sync.py | 2 +- ...nerated_pubsub_v1_subscriber_pull_async.py | 2 +- ...enerated_pubsub_v1_subscriber_pull_sync.py | 2 +- ...nerated_pubsub_v1_subscriber_seek_async.py | 2 +- ...enerated_pubsub_v1_subscriber_seek_sync.py | 2 +- ...bsub_v1_subscriber_streaming_pull_async.py | 3 + ...ubsub_v1_subscriber_streaming_pull_sync.py | 3 + ...sub_v1_subscriber_update_snapshot_async.py | 2 +- ...bsub_v1_subscriber_update_snapshot_sync.py | 2 +- ...v1_subscriber_update_subscription_async.py | 2 +- ..._v1_subscriber_update_subscription_sync.py | 2 +- .../snippet_metadata_pubsub_v1.json | 120 +++++++++++------- 72 files changed, 233 insertions(+), 145 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub/py.typed b/packages/google-cloud-pubsub/google/pubsub/py.typed index 9b0e3743353f..1cec9a5ba1ab 100644 --- a/packages/google-cloud-pubsub/google/pubsub/py.typed +++ b/packages/google-cloud-pubsub/google/pubsub/py.typed @@ -1,2 +1,2 @@ # Marker file for PEP 561. -# The google-pubsub package uses inline types. +# The google-cloud-pubsub package uses inline types. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/py.typed b/packages/google-cloud-pubsub/google/pubsub_v1/py.typed index 9b0e3743353f..1cec9a5ba1ab 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/py.typed +++ b/packages/google-cloud-pubsub/google/pubsub_v1/py.typed @@ -1,2 +1,2 @@ # Marker file for PEP 561. -# The google-pubsub package uses inline types. +# The google-cloud-pubsub package uses inline types. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 2a1dd9213c5e..66b9f71c807f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -237,7 +237,7 @@ def sample_create_topic(): # Make the request response = client.create_topic(request=request) - # Handle response + # Handle the response print(response) Args: @@ -344,7 +344,7 @@ def sample_update_topic(): # Make the request response = client.update_topic(request=request) - # Handle response + # Handle the response print(response) Args: @@ -425,7 +425,7 @@ def sample_publish(): # Make the request response = client.publish(request=request) - # Handle response + # Handle the response print(response) Args: @@ -536,7 +536,7 @@ def sample_get_topic(): # Make the request response = client.get_topic(request=request) - # Handle response + # Handle the response print(response) Args: @@ -634,6 +634,8 @@ def sample_list_topics(): # Make the request page_result = client.list_topics(request=request) + + # Handle the response for response in page_result: print(response) @@ -744,6 +746,8 @@ def sample_list_topic_subscriptions(): # Make the request page_result = client.list_topic_subscriptions(request=request) + + # Handle the response for response in page_result: print(response) @@ -860,6 +864,8 @@ def sample_list_topic_snapshots(): # Make the request page_result = client.list_topic_snapshots(request=request) + + # Handle the response for response in page_result: print(response) @@ -975,7 +981,7 @@ def sample_delete_topic(): ) # Make the request - response = client.delete_topic(request=request) + client.delete_topic(request=request) Args: request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): @@ -1071,7 +1077,7 @@ def sample_detach_subscription(): # Make the request response = client.detach_subscription(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 01612ead6cde..2492a0854045 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -486,7 +486,7 @@ def sample_create_topic(): # Make the request response = client.create_topic(request=request) - # Handle response + # Handle the response print(response) @@ -586,7 +586,7 @@ def sample_update_topic(): # Make the request response = client.update_topic(request=request) - # Handle response + # Handle the response print(response) @@ -661,7 +661,7 @@ def sample_publish(): # Make the request response = client.publish(request=request) - # Handle response + # Handle the response print(response) @@ -759,7 +759,7 @@ def sample_get_topic(): # Make the request response = client.get_topic(request=request) - # Handle response + # Handle the response print(response) @@ -848,6 +848,8 @@ def sample_list_topics(): # Make the request page_result = client.list_topics(request=request) + + # Handle the response for response in page_result: print(response) @@ -949,6 +951,8 @@ def sample_list_topic_subscriptions(): # Make the request page_result = client.list_topic_subscriptions(request=request) + + # Handle the response for response in page_result: print(response) @@ -1056,6 +1060,8 @@ def sample_list_topic_snapshots(): # Make the request page_result = client.list_topic_snapshots(request=request) + + # Handle the response for response in page_result: print(response) @@ -1162,7 +1168,7 @@ def sample_delete_topic(): ) # Make the request - response = client.delete_topic(request=request) + client.delete_topic(request=request) Args: @@ -1251,7 +1257,7 @@ def sample_detach_subscription(): # Make the request response = client.detach_subscription(request=request) - # Handle response + # Handle the response print(response) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 80826bb114d8..68997e326c0a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -238,7 +238,7 @@ def sample_create_schema(): # Make the request response = client.create_schema(request=request) - # Handle response + # Handle the response print(response) Args: @@ -350,7 +350,7 @@ def sample_get_schema(): # Make the request response = client.get_schema(request=request) - # Handle response + # Handle the response print(response) Args: @@ -436,6 +436,8 @@ def sample_list_schemas(): # Make the request page_result = client.list_schemas(request=request) + + # Handle the response for response in page_result: print(response) @@ -532,7 +534,7 @@ def sample_delete_schema(): ) # Make the request - response = client.delete_schema(request=request) + client.delete_schema(request=request) Args: request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): @@ -619,7 +621,7 @@ def sample_validate_schema(): # Make the request response = client.validate_schema(request=request) - # Handle response + # Handle the response print(response) Args: @@ -718,7 +720,7 @@ def sample_validate_message(): # Make the request response = client.validate_message(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 41914efd817a..c296c121294f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -431,7 +431,7 @@ def sample_create_schema(): # Make the request response = client.create_schema(request=request) - # Handle response + # Handle the response print(response) Args: @@ -544,7 +544,7 @@ def sample_get_schema(): # Make the request response = client.get_schema(request=request) - # Handle response + # Handle the response print(response) Args: @@ -631,6 +631,8 @@ def sample_list_schemas(): # Make the request page_result = client.list_schemas(request=request) + + # Handle the response for response in page_result: print(response) @@ -728,7 +730,7 @@ def sample_delete_schema(): ) # Make the request - response = client.delete_schema(request=request) + client.delete_schema(request=request) Args: request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): @@ -816,7 +818,7 @@ def sample_validate_schema(): # Make the request response = client.validate_schema(request=request) - # Handle response + # Handle the response print(response) Args: @@ -916,7 +918,7 @@ def sample_validate_message(): # Make the request response = client.validate_message(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 2137f5cc9a79..9f76a8c183a5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -263,7 +263,7 @@ def sample_create_subscription(): # Make the request response = client.create_subscription(request=request) - # Handle response + # Handle the response print(response) Args: @@ -422,7 +422,7 @@ def sample_get_subscription(): # Make the request response = client.get_subscription(request=request) - # Handle response + # Handle the response print(response) Args: @@ -529,7 +529,7 @@ def sample_update_subscription(): # Make the request response = client.update_subscription(request=request) - # Handle response + # Handle the response print(response) Args: @@ -606,6 +606,8 @@ def sample_list_subscriptions(): # Make the request page_result = client.list_subscriptions(request=request) + + # Handle the response for response in page_result: print(response) @@ -719,7 +721,7 @@ def sample_delete_subscription(): ) # Make the request - response = client.delete_subscription(request=request) + client.delete_subscription(request=request) Args: request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): @@ -820,7 +822,7 @@ def sample_modify_ack_deadline(): ) # Make the request - response = client.modify_ack_deadline(request=request) + client.modify_ack_deadline(request=request) Args: request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): @@ -946,7 +948,7 @@ def sample_acknowledge(): ) # Make the request - response = client.acknowledge(request=request) + client.acknowledge(request=request) Args: request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): @@ -1055,7 +1057,7 @@ def sample_pull(): # Make the request response = client.pull(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1200,12 +1202,15 @@ def sample_streaming_pull(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request # Make the request stream = client.streaming_pull(requests=request_generator()) + + # Handle the response for response in stream: print(response) @@ -1289,7 +1294,7 @@ def sample_modify_push_config(): ) # Make the request - response = client.modify_push_config(request=request) + client.modify_push_config(request=request) Args: request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): @@ -1403,7 +1408,7 @@ def sample_get_snapshot(): # Make the request response = client.get_snapshot(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1511,6 +1516,8 @@ def sample_list_snapshots(): # Make the request page_result = client.list_snapshots(request=request) + + # Handle the response for response in page_result: print(response) @@ -1641,7 +1648,7 @@ def sample_create_snapshot(): # Make the request response = client.create_snapshot(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1772,7 +1779,7 @@ def sample_update_snapshot(): # Make the request response = client.update_snapshot(request=request) - # Handle response + # Handle the response print(response) Args: @@ -1864,7 +1871,7 @@ def sample_delete_snapshot(): ) # Make the request - response = client.delete_snapshot(request=request) + client.delete_snapshot(request=request) Args: request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): @@ -1963,7 +1970,7 @@ def sample_seek(): # Make the request response = client.seek(request=request) - # Handle response + # Handle the response print(response) Args: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index ff5cda6652eb..0e6a17f0e961 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -502,7 +502,7 @@ def sample_create_subscription(): # Make the request response = client.create_subscription(request=request) - # Handle response + # Handle the response print(response) @@ -652,7 +652,7 @@ def sample_get_subscription(): # Make the request response = client.get_subscription(request=request) - # Handle response + # Handle the response print(response) @@ -750,7 +750,7 @@ def sample_update_subscription(): # Make the request response = client.update_subscription(request=request) - # Handle response + # Handle the response print(response) @@ -821,6 +821,8 @@ def sample_list_subscriptions(): # Make the request page_result = client.list_subscriptions(request=request) + + # Handle the response for response in page_result: print(response) @@ -925,7 +927,7 @@ def sample_delete_subscription(): ) # Make the request - response = client.delete_subscription(request=request) + client.delete_subscription(request=request) Args: @@ -1019,7 +1021,7 @@ def sample_modify_ack_deadline(): ) # Make the request - response = client.modify_ack_deadline(request=request) + client.modify_ack_deadline(request=request) Args: @@ -1138,7 +1140,7 @@ def sample_acknowledge(): ) # Make the request - response = client.acknowledge(request=request) + client.acknowledge(request=request) Args: @@ -1240,7 +1242,7 @@ def sample_pull(): # Make the request response = client.pull(request=request) - # Handle response + # Handle the response print(response) @@ -1376,12 +1378,15 @@ def sample_streaming_pull(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request # Make the request stream = client.streaming_pull(requests=request_generator()) + + # Handle the response for response in stream: print(response) @@ -1455,7 +1460,7 @@ def sample_modify_push_config(): ) # Make the request - response = client.modify_push_config(request=request) + client.modify_push_config(request=request) Args: @@ -1562,7 +1567,7 @@ def sample_get_snapshot(): # Make the request response = client.get_snapshot(request=request) - # Handle response + # Handle the response print(response) @@ -1661,6 +1666,8 @@ def sample_list_snapshots(): # Make the request page_result = client.list_snapshots(request=request) + + # Handle the response for response in page_result: print(response) @@ -1782,7 +1789,7 @@ def sample_create_snapshot(): # Make the request response = client.create_snapshot(request=request) - # Handle response + # Handle the response print(response) @@ -1906,7 +1913,7 @@ def sample_update_snapshot(): # Make the request response = client.update_snapshot(request=request) - # Handle response + # Handle the response print(response) @@ -1992,7 +1999,7 @@ def sample_delete_snapshot(): ) # Make the request - response = client.delete_snapshot(request=request) + client.delete_snapshot(request=request) Args: @@ -2084,7 +2091,7 @@ def sample_seek(): # Make the request response = client.seek(request=request) - # Handle response + # Handle the response print(response) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 0ab7b944a556..570d61ca4260 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -232,7 +232,7 @@ library / f"google/pubsub_{library.name}/services/subscriber/transports/base.py", ], - r"""gapic_version=(pkg_resources\.get_distribution\(\s+)['"]google-pubsub['"]""", + r"""gapic_version=(pkg_resources\.get_distribution\(\s+)['"]google-cloud-pubsub['"]""", "client_library_version=\g<1>'google-cloud-pubsub'", ) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py index 9b03af89b6ef..0414c9b9eb28 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py @@ -39,7 +39,7 @@ async def sample_create_topic(): # Make the request response = await client.create_topic(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_CreateTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py index 99af672fc633..b8fca80b91dc 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py @@ -39,7 +39,7 @@ def sample_create_topic(): # Make the request response = client.create_topic(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_CreateTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py index e375b02c8a5e..f247e5d38775 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py @@ -37,7 +37,7 @@ async def sample_delete_topic(): ) # Make the request - response = await client.delete_topic(request=request) + await client.delete_topic(request=request) # [END pubsub_generated_pubsub_v1_Publisher_DeleteTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py index 7b1932026874..d5bc0975b67a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py @@ -37,7 +37,7 @@ def sample_delete_topic(): ) # Make the request - response = client.delete_topic(request=request) + client.delete_topic(request=request) # [END pubsub_generated_pubsub_v1_Publisher_DeleteTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py index b0b349d669da..4ca04e5330c6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py @@ -39,7 +39,7 @@ async def sample_detach_subscription(): # Make the request response = await client.detach_subscription(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_DetachSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py index b697598e98c9..e4deb54e5ac9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py @@ -39,7 +39,7 @@ def sample_detach_subscription(): # Make the request response = client.detach_subscription(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_DetachSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py index 485a61c99f79..e12616cd547f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py @@ -39,7 +39,7 @@ async def sample_get_topic(): # Make the request response = await client.get_topic(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_GetTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py index d6f28516dfbb..a774c4c65944 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py @@ -39,7 +39,7 @@ def sample_get_topic(): # Make the request response = client.get_topic(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_GetTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py index 71054e6e20cc..b6276384721d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py @@ -38,6 +38,8 @@ async def sample_list_topic_snapshots(): # Make the request page_result = client.list_topic_snapshots(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py index f49fb6615c52..fb89ff3b6fb9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py @@ -38,6 +38,8 @@ def sample_list_topic_snapshots(): # Make the request page_result = client.list_topic_snapshots(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py index 4edb308c9e0c..c61c6bc1e626 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py @@ -38,6 +38,8 @@ async def sample_list_topic_subscriptions(): # Make the request page_result = client.list_topic_subscriptions(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py index c90b1bd689eb..d559b186627f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py @@ -38,6 +38,8 @@ def sample_list_topic_subscriptions(): # Make the request page_result = client.list_topic_subscriptions(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py index 3be9178a07e3..2fd742d0252e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py @@ -38,6 +38,8 @@ async def sample_list_topics(): # Make the request page_result = client.list_topics(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py index d0fe084af5fb..1ea3aacb7f4f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py @@ -38,6 +38,8 @@ def sample_list_topics(): # Make the request page_result = client.list_topics(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py index d9d84eaa79f0..b783aa247bc1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py @@ -39,7 +39,7 @@ async def sample_publish(): # Make the request response = await client.publish(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_Publish_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py index 7a265a832c31..d477b936a864 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py @@ -39,7 +39,7 @@ def sample_publish(): # Make the request response = client.publish(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_Publish_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py index 8b83713b7c76..e11be0a1dd38 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py @@ -42,7 +42,7 @@ async def sample_update_topic(): # Make the request response = await client.update_topic(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_UpdateTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py index 3863bf4e8fa8..7671debc2d34 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py @@ -42,7 +42,7 @@ def sample_update_topic(): # Make the request response = client.update_topic(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Publisher_UpdateTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py index c78b58497726..629152cca865 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py @@ -43,7 +43,7 @@ async def sample_create_schema(): # Make the request response = await client.create_schema(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_SchemaService_CreateSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py index d3d2cbbc7e7a..79946fbd815d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py @@ -43,7 +43,7 @@ def sample_create_schema(): # Make the request response = client.create_schema(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_SchemaService_CreateSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py index 80f68c21b006..c5a58bbd975e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py @@ -37,7 +37,7 @@ async def sample_delete_schema(): ) # Make the request - response = await client.delete_schema(request=request) + await client.delete_schema(request=request) # [END pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py index f9711fb6e745..25c2a59c6996 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py @@ -37,7 +37,7 @@ def sample_delete_schema(): ) # Make the request - response = client.delete_schema(request=request) + client.delete_schema(request=request) # [END pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py index ae9fd6d68412..41e7e103d210 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py @@ -39,7 +39,7 @@ async def sample_get_schema(): # Make the request response = await client.get_schema(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_SchemaService_GetSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py index 41e2fde074a9..8354c47f8069 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py @@ -39,7 +39,7 @@ def sample_get_schema(): # Make the request response = client.get_schema(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_SchemaService_GetSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py index a92cb700e8fe..129a05905683 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py @@ -38,6 +38,8 @@ async def sample_list_schemas(): # Make the request page_result = client.list_schemas(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py index 58beed28c44e..ef7972bbcf48 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py @@ -38,6 +38,8 @@ def sample_list_schemas(): # Make the request page_result = client.list_schemas(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py index f32a665fa4ca..ce9829359f5b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py @@ -40,7 +40,7 @@ async def sample_validate_message(): # Make the request response = await client.validate_message(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py index c31c0c4ddfb8..e63a2d5bd258 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py @@ -40,7 +40,7 @@ def sample_validate_message(): # Make the request response = client.validate_message(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py index 4b73371386bc..0f28f3f8641f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py @@ -43,7 +43,7 @@ async def sample_validate_schema(): # Make the request response = await client.validate_schema(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py index 17455ab2f212..bb0699e18b7c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py @@ -43,7 +43,7 @@ def sample_validate_schema(): # Make the request response = client.validate_schema(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py index 120b9e1f5d0c..7b84de8b49b4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py @@ -38,7 +38,7 @@ async def sample_acknowledge(): ) # Make the request - response = await client.acknowledge(request=request) + await client.acknowledge(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_Acknowledge_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py index 9da8a5fd5755..d982f95834ea 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py @@ -38,7 +38,7 @@ def sample_acknowledge(): ) # Make the request - response = client.acknowledge(request=request) + client.acknowledge(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_Acknowledge_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py index ca45144e8f09..4242ade0610c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py @@ -40,7 +40,7 @@ async def sample_create_snapshot(): # Make the request response = await client.create_snapshot(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py index f60d35d4dbb0..20624ebce357 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py @@ -40,7 +40,7 @@ def sample_create_snapshot(): # Make the request response = client.create_snapshot(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py index 095a7ff0eb99..812893aaf5b6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py @@ -40,7 +40,7 @@ async def sample_create_subscription(): # Make the request response = await client.create_subscription(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py index 7495a50c5a6d..853ac7089ec2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py @@ -40,7 +40,7 @@ def sample_create_subscription(): # Make the request response = client.create_subscription(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py index 2fd2f7df3b5d..d648536a4ed3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py @@ -37,7 +37,7 @@ async def sample_delete_snapshot(): ) # Make the request - response = await client.delete_snapshot(request=request) + await client.delete_snapshot(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py index 8315700fc64e..748962042ed2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py @@ -37,7 +37,7 @@ def sample_delete_snapshot(): ) # Make the request - response = client.delete_snapshot(request=request) + client.delete_snapshot(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py index 4394089f5127..f00b9676f04d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py @@ -37,7 +37,7 @@ async def sample_delete_subscription(): ) # Make the request - response = await client.delete_subscription(request=request) + await client.delete_subscription(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py index 031880b9c522..b19a27ac2866 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py @@ -37,7 +37,7 @@ def sample_delete_subscription(): ) # Make the request - response = client.delete_subscription(request=request) + client.delete_subscription(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py index 8f1bf92f7e0e..bab081ce697f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py @@ -39,7 +39,7 @@ async def sample_get_snapshot(): # Make the request response = await client.get_snapshot(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py index 4abcf326e755..396b71b12796 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py @@ -39,7 +39,7 @@ def sample_get_snapshot(): # Make the request response = client.get_snapshot(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py index 3908cb934124..a744d90e2f5c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py @@ -39,7 +39,7 @@ async def sample_get_subscription(): # Make the request response = await client.get_subscription(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_GetSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py index 85065ae24559..f2a0e3cbb7a7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py @@ -39,7 +39,7 @@ def sample_get_subscription(): # Make the request response = client.get_subscription(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_GetSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py index 262303b38e69..392a89e1f225 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py @@ -38,6 +38,8 @@ async def sample_list_snapshots(): # Make the request page_result = client.list_snapshots(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py index 0acdc7f43b39..596e97adf748 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py @@ -38,6 +38,8 @@ def sample_list_snapshots(): # Make the request page_result = client.list_snapshots(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py index ae2f4c12fc74..6faff29c46d2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py @@ -38,6 +38,8 @@ async def sample_list_subscriptions(): # Make the request page_result = client.list_subscriptions(request=request) + + # Handle the response async for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py index a173fa081f46..beca817b24dd 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py @@ -38,6 +38,8 @@ def sample_list_subscriptions(): # Make the request page_result = client.list_subscriptions(request=request) + + # Handle the response for response in page_result: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py index acc6c2924d37..91c2ab3043fa 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py @@ -39,7 +39,7 @@ async def sample_modify_ack_deadline(): ) # Make the request - response = await client.modify_ack_deadline(request=request) + await client.modify_ack_deadline(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py index 359b10f080f5..f3d2b2032e4b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py @@ -39,7 +39,7 @@ def sample_modify_ack_deadline(): ) # Make the request - response = client.modify_ack_deadline(request=request) + client.modify_ack_deadline(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py index 6ea1fb283cf7..33281deb1f0c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py @@ -37,7 +37,7 @@ async def sample_modify_push_config(): ) # Make the request - response = await client.modify_push_config(request=request) + await client.modify_push_config(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py index 2c127a9fd35c..5a64461b0c92 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py @@ -37,7 +37,7 @@ def sample_modify_push_config(): ) # Make the request - response = client.modify_push_config(request=request) + client.modify_push_config(request=request) # [END pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py index fb31d074533d..33ddc17a402d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py @@ -40,7 +40,7 @@ async def sample_pull(): # Make the request response = await client.pull(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_Pull_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py index 4a1d380fb1d3..787669650bfe 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py @@ -40,7 +40,7 @@ def sample_pull(): # Make the request response = client.pull(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_Pull_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py index cf2c53aee75e..365797a90e21 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py @@ -39,7 +39,7 @@ async def sample_seek(): # Make the request response = await client.seek(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_Seek_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py index 38d9f22b9baf..796316c70e84 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py @@ -39,7 +39,7 @@ def sample_seek(): # Make the request response = client.seek(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_Seek_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py index d3e1a5166852..b0b9077fc1d0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py @@ -42,12 +42,15 @@ async def sample_streaming_pull(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request # Make the request stream = await client.streaming_pull(requests=request_generator()) + + # Handle the response async for response in stream: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py index 8765f70427a0..b2cf9b1649de 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py @@ -42,12 +42,15 @@ def sample_streaming_pull(): # Here we create a generator that yields a single `request` for # demonstrative purposes. requests = [request] + def request_generator(): for request in requests: yield request # Make the request stream = client.streaming_pull(requests=request_generator()) + + # Handle the response for response in stream: print(response) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py index 3dc78eb4260a..a71c967744fb 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py @@ -38,7 +38,7 @@ async def sample_update_snapshot(): # Make the request response = await client.update_snapshot(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py index adfd50ef85e4..129adfc0364e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py @@ -38,7 +38,7 @@ def sample_update_snapshot(): # Make the request response = client.update_snapshot(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py index 59d32c59b1e7..b103a906d4b3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py @@ -43,7 +43,7 @@ async def sample_update_subscription(): # Make the request response = await client.update_subscription(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py index f29e54be5414..bd73a799439c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py @@ -43,7 +43,7 @@ def sample_update_subscription(): # Make the request response = client.update_subscription(request=request) - # Handle response + # Handle the response print(response) # [END pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json index c35ef2c6740f..b975a4c94e50 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json @@ -366,12 +366,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -386,11 +386,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -408,12 +410,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -428,11 +430,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -451,12 +455,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -471,11 +475,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -493,12 +499,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -513,11 +519,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -536,12 +544,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopics_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -556,11 +564,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -578,12 +588,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopics_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -598,11 +608,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1062,12 +1074,12 @@ "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ListSchemas_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1082,11 +1094,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1104,12 +1118,12 @@ "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ListSchemas_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1124,11 +1138,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1936,12 +1952,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1956,11 +1972,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -1978,12 +1996,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -1998,11 +2016,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2021,12 +2041,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_async", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -2041,11 +2061,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2063,12 +2085,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_sync", "segments": [ { - "end": 43, + "end": 45, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 45, "start": 27, "type": "SHORT" }, @@ -2083,11 +2105,13 @@ "type": "REQUEST_INITIALIZATION" }, { + "end": 41, "start": 39, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 46, + "start": 42, "type": "RESPONSE_HANDLING" } ] @@ -2454,12 +2478,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Subscriber_StreamingPull_async", "segments": [ { - "end": 53, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 56, "start": 27, "type": "SHORT" }, @@ -2469,16 +2493,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 49, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 49, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 57, + "start": 53, "type": "RESPONSE_HANDLING" } ] @@ -2496,12 +2522,12 @@ "regionTag": "pubsub_generated_pubsub_v1_Subscriber_StreamingPull_sync", "segments": [ { - "end": 53, + "end": 56, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 56, "start": 27, "type": "SHORT" }, @@ -2511,16 +2537,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 48, + "end": 49, "start": 34, "type": "REQUEST_INITIALIZATION" }, { - "start": 49, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 57, + "start": 53, "type": "RESPONSE_HANDLING" } ] From 40705b399505cc9a3176afb828176c8236e5bb20 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 8 Feb 2022 16:16:55 -0800 Subject: [PATCH 0764/1197] samples: create subscription with filtering enabled (#580) * samples: create subscription with filtering enabled * lint --- .../samples/snippets/subscriber.py | 45 +++++++++++++++++-- .../samples/snippets/subscriber_test.py | 26 +++++++++++ 2 files changed, 67 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index f44f82c4a376..dad841600d7f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -190,7 +190,7 @@ def create_push_subscription( def create_subscription_with_ordering( project_id: str, topic_id: str, subscription_id: str ) -> None: - """Create a subscription with dead letter policy.""" + """Create a subscription with ordering enabled.""" # [START pubsub_enable_subscription_ordering] from google.cloud import pubsub_v1 @@ -216,6 +216,32 @@ def create_subscription_with_ordering( # [END pubsub_enable_subscription_ordering] +def create_subscription_with_filtering( + project_id: str, topic_id: str, subscription_id: str, filter: str, +) -> None: + """Create a subscription with filtering enabled.""" + # [START pubsub_create_subscription_with_filter] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # filter = "attributes.author=\"unknown\"" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + with subscriber: + subscription = subscriber.create_subscription( + request={"name": subscription_path, "topic": topic_path, "filter": filter} + ) + print(f"Created subscription with filtering enabled: {subscription}") + # [END pubsub_create_subscription_with_filter] + + def delete_subscription(project_id: str, subscription_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] @@ -741,7 +767,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: # [END pubsub_dead_letter_delivery_attempt] -if __name__ == "__main__": +if __name__ == "__main__": # noqa parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) @@ -785,6 +811,13 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_subscription_with_ordering_parser.add_argument("topic_id") create_subscription_with_ordering_parser.add_argument("subscription_id") + create_subscription_with_filtering_parser = subparsers.add_parser( + "create-with-filtering", help=create_subscription_with_filtering.__doc__ + ) + create_subscription_with_filtering_parser.add_argument("topic_id") + create_subscription_with_filtering_parser.add_argument("subscription_id") + create_subscription_with_filtering_parser.add_argument("filter") + delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) delete_parser.add_argument("subscription_id") @@ -888,17 +921,21 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: ) elif args.command == "create-push": create_push_subscription( - args.project_id, args.topic_id, args.subscription_id, args.endpoint, + args.project_id, args.topic_id, args.subscription_id, args.endpoint ) elif args.command == "create-with-ordering": create_subscription_with_ordering( args.project_id, args.topic_id, args.subscription_id ) + elif args.command == "create-with-filtering": + create_subscription_with_filtering( + args.project_id, args.topic_id, args.subscription_id, args.filter + ) elif args.command == "delete": delete_subscription(args.project_id, args.subscription_id) elif args.command == "update-push": update_push_subscription( - args.project_id, args.topic_id, args.subscription_id, args.endpoint, + args.project_id, args.topic_id, args.subscription_id, args.endpoint ) elif args.command == "update-dead-letter-policy": update_subscription_with_dead_letter_policy( diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 6ad3da4faa88..9fcb1c1192fe 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -43,6 +43,7 @@ NEW_ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push2" DEFAULT_MAX_DELIVERY_ATTEMPTS = 5 UPDATED_MAX_DELIVERY_ATTEMPTS = 20 +FILTER = 'attributes.author="unknown"' C = TypeVar("C", bound=Callable[..., Any]) @@ -385,6 +386,31 @@ def test_create_subscription_with_ordering( assert "enable_message_ordering: true" in out +def test_create_subscription_with_filtering( + subscriber_client: pubsub_v1.SubscriberClient, + subscription_admin: str, + capsys: CaptureFixture[str], +) -> None: + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ADMIN + ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_subscription_with_filtering( + PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, FILTER + ) + + out, _ = capsys.readouterr() + assert "Created subscription with filtering enabled" in out + assert f"{subscription_admin}" in out + assert '"attributes.author=\\"unknown\\""' in out + + def test_create_push_subscription( subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str, From d56fd339fe9def47a29aa056a452194b0d4cb761 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 11 Feb 2022 14:09:17 -0500 Subject: [PATCH 0765/1197] chore: removing misspelled field, add correctly spelled field (#583) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix!: removing misspelled field, add correctly spelled field PiperOrigin-RevId: 428023165 Source-Link: https://github.com/googleapis/googleapis/commit/63d374da58fba5cf1308cf1df74b0b2afa1454f1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/6be9b6c6b6c85face18adbdbdd3fc06356810b4a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNmJlOWI2YzZiNmM4NWZhY2UxOGFkYmRiZGQzZmMwNjM1NjgxMGI0YSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 5750f495c2fc..a07abae0fead 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1123,7 +1123,7 @@ class StreamingPullResponse(proto.Message): received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): Received Pub/Sub messages. This will not be empty. - acknowlege_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): + acknowledge_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): This field will only be set if ``enable_exactly_once_delivery`` is set to ``true``. modify_ack_deadline_confirmation (google.pubsub_v1.types.StreamingPullResponse.ModifyAckDeadlineConfirmation): @@ -1187,8 +1187,8 @@ class SubscriptionProperties(proto.Message): received_messages = proto.RepeatedField( proto.MESSAGE, number=1, message="ReceivedMessage", ) - acknowlege_confirmation = proto.Field( - proto.MESSAGE, number=2, message=AcknowledgeConfirmation, + acknowledge_confirmation = proto.Field( + proto.MESSAGE, number=5, message=AcknowledgeConfirmation, ) modify_ack_deadline_confirmation = proto.Field( proto.MESSAGE, number=3, message=ModifyAckDeadlineConfirmation, From 36776129638cf0b683b966dac00a44dd88de9204 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 14 Feb 2022 16:53:01 +0100 Subject: [PATCH 0766/1197] chore(deps): update dependency pytest to v7.0.1 (#584) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index b54ed4d838da..1fc2a0443845 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff==1.11.1 -pytest==7.0.0 +pytest==7.0.1 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From 4012dad4f85beb24eedadd80beb6e03edc59ebf4 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Mon, 14 Feb 2022 21:20:56 -0800 Subject: [PATCH 0767/1197] samples: handle empty response in sync pull samples (#586) --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index dad841600d7f..7bc124ca8e9f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -605,6 +605,9 @@ def synchronous_pull(project_id: str, subscription_id: str) -> None: retry=retry.Retry(deadline=300), ) + if len(response.received_messages) == 0: + return + ack_ids = [] for received_message in response.received_messages: print(f"Received: {received_message.message.data}.") @@ -651,6 +654,9 @@ def synchronous_pull_with_lease_management( retry=retry.Retry(deadline=300), ) + if len(response.received_messages) == 0: + return + # Start a process for each message based on its size modulo 10. for message in response.received_messages: process = multiprocessing.Process( From 7f7d5b721c66d3c83c820f730b8a5c824795418f Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Tue, 15 Feb 2022 09:16:45 -0700 Subject: [PATCH 0768/1197] fix(deps): move libcst to extras (#585) `libcst` is only needed to run the fixup script. --- packages/google-cloud-pubsub/UPGRADING.md | 4 ++-- packages/google-cloud-pubsub/setup.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/UPGRADING.md b/packages/google-cloud-pubsub/UPGRADING.md index 3033e3fd4ad1..777dd7ae6963 100644 --- a/packages/google-cloud-pubsub/UPGRADING.md +++ b/packages/google-cloud-pubsub/UPGRADING.md @@ -24,10 +24,10 @@ The 2.0.0 release requires Python 3.6+. Almost all methods that send requests to the backend expect request objects. We provide a script that will convert most common use cases. -* Install the library +* Install the library with the `libcst` extra. ```py -python3 -m pip install google-cloud-pubsub +python3 -m pip install google-cloud-pubsub[libcst] ``` * The script `fixup_pubsub_v1_keywords.py` is shipped with the library. It expects diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index e35af7289954..666fa61aa8d9 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -34,11 +34,10 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.28.0, <3.0.0dev", - "libcst >= 0.3.10", "proto-plus >= 1.7.1", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", ] -extras = {} +extras = {"libcst": "libcst >= 0.3.10"} # Setup boilerplate below this line. From a73e3c82e562ffb11f275c0c420f38ce6453592c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 25 Feb 2022 16:53:15 -0500 Subject: [PATCH 0769/1197] chore: use gapic-generator-python 0.63.4 (#589) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.63.4 chore: fix snippet region tag format chore: fix docstring code block formatting PiperOrigin-RevId: 430730865 Source-Link: https://github.com/googleapis/googleapis/commit/ea5800229f73f94fd7204915a86ed09dcddf429a Source-Link: https://github.com/googleapis/googleapis-gen/commit/ca893ff8af25fc7fe001de1405a517d80446ecca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2E4OTNmZjhhZjI1ZmM3ZmUwMDFkZTE0MDVhNTE3ZDgwNDQ2ZWNjYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: delete duplicates Co-authored-by: Owl Bot Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> --- .../services/publisher/async_client.py | 18 +- .../pubsub_v1/services/publisher/client.py | 27 +- .../services/schema_service/async_client.py | 12 +- .../services/schema_service/client.py | 18 +- .../services/subscriber/async_client.py | 32 +-- .../pubsub_v1/services/subscriber/client.py | 48 ++-- ...generated_publisher_create_topic_async.py} | 4 +- ..._generated_publisher_create_topic_sync.py} | 4 +- ...generated_publisher_delete_topic_async.py} | 4 +- ..._generated_publisher_delete_topic_sync.py} | 4 +- ...ed_publisher_detach_subscription_async.py} | 4 +- ...ted_publisher_detach_subscription_sync.py} | 4 +- ...v1_generated_publisher_get_topic_async.py} | 4 +- ..._v1_generated_publisher_get_topic_sync.py} | 4 +- ...d_publisher_list_topic_snapshots_async.py} | 4 +- ...ed_publisher_list_topic_snapshots_sync.py} | 4 +- ...blisher_list_topic_subscriptions_async.py} | 4 +- ...ublisher_list_topic_subscriptions_sync.py} | 4 +- ..._generated_publisher_list_topics_async.py} | 4 +- ...1_generated_publisher_list_topics_sync.py} | 4 +- ...b_v1_generated_publisher_publish_async.py} | 4 +- ...ub_v1_generated_publisher_publish_sync.py} | 4 +- ...generated_publisher_update_topic_async.py} | 4 +- ..._generated_publisher_update_topic_sync.py} | 4 +- ...ted_schema_service_create_schema_async.py} | 4 +- ...ated_schema_service_create_schema_sync.py} | 4 +- ...ted_schema_service_delete_schema_async.py} | 4 +- ...ated_schema_service_delete_schema_sync.py} | 4 +- ...erated_schema_service_get_schema_async.py} | 4 +- ...nerated_schema_service_get_schema_sync.py} | 4 +- ...ated_schema_service_list_schemas_async.py} | 4 +- ...rated_schema_service_list_schemas_sync.py} | 4 +- ..._schema_service_validate_message_async.py} | 4 +- ...d_schema_service_validate_message_sync.py} | 4 +- ...d_schema_service_validate_schema_async.py} | 4 +- ...ed_schema_service_validate_schema_sync.py} | 4 +- ...generated_subscriber_acknowledge_async.py} | 4 +- ..._generated_subscriber_acknowledge_sync.py} | 4 +- ...rated_subscriber_create_snapshot_async.py} | 4 +- ...erated_subscriber_create_snapshot_sync.py} | 4 +- ...d_subscriber_create_subscription_async.py} | 4 +- ...ed_subscriber_create_subscription_sync.py} | 4 +- ...rated_subscriber_delete_snapshot_async.py} | 4 +- ...erated_subscriber_delete_snapshot_sync.py} | 4 +- ...d_subscriber_delete_subscription_async.py} | 4 +- ...ed_subscriber_delete_subscription_sync.py} | 4 +- ...enerated_subscriber_get_snapshot_async.py} | 4 +- ...generated_subscriber_get_snapshot_sync.py} | 4 +- ...ated_subscriber_get_subscription_async.py} | 4 +- ...rated_subscriber_get_subscription_sync.py} | 4 +- ...erated_subscriber_list_snapshots_async.py} | 4 +- ...nerated_subscriber_list_snapshots_sync.py} | 4 +- ...ed_subscriber_list_subscriptions_async.py} | 4 +- ...ted_subscriber_list_subscriptions_sync.py} | 4 +- ...d_subscriber_modify_ack_deadline_async.py} | 4 +- ...ed_subscriber_modify_ack_deadline_sync.py} | 4 +- ...ed_subscriber_modify_push_config_async.py} | 4 +- ...ted_subscriber_modify_push_config_sync.py} | 4 +- ...sub_v1_generated_subscriber_pull_async.py} | 4 +- ...bsub_v1_generated_subscriber_pull_sync.py} | 4 +- ...sub_v1_generated_subscriber_seek_async.py} | 4 +- ...bsub_v1_generated_subscriber_seek_sync.py} | 4 +- ...erated_subscriber_streaming_pull_async.py} | 4 +- ...nerated_subscriber_streaming_pull_sync.py} | 4 +- ...rated_subscriber_update_snapshot_async.py} | 4 +- ...erated_subscriber_update_snapshot_sync.py} | 4 +- ...d_subscriber_update_subscription_async.py} | 4 +- ...ed_subscriber_update_subscription_sync.py} | 4 +- .../snippet_metadata_pubsub_v1.json | 248 +++++++++--------- 69 files changed, 310 insertions(+), 341 deletions(-) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_create_topic_async.py => pubsub_v1_generated_publisher_create_topic_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_create_topic_sync.py => pubsub_v1_generated_publisher_create_topic_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_delete_topic_async.py => pubsub_v1_generated_publisher_delete_topic_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py => pubsub_v1_generated_publisher_delete_topic_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py => pubsub_v1_generated_publisher_detach_subscription_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py => pubsub_v1_generated_publisher_detach_subscription_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_get_topic_async.py => pubsub_v1_generated_publisher_get_topic_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_get_topic_sync.py => pubsub_v1_generated_publisher_get_topic_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py => pubsub_v1_generated_publisher_list_topic_snapshots_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py => pubsub_v1_generated_publisher_list_topic_snapshots_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py => pubsub_v1_generated_publisher_list_topic_subscriptions_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py => pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_list_topics_async.py => pubsub_v1_generated_publisher_list_topics_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_list_topics_sync.py => pubsub_v1_generated_publisher_list_topics_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_publish_async.py => pubsub_v1_generated_publisher_publish_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_publish_sync.py => pubsub_v1_generated_publisher_publish_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_update_topic_async.py => pubsub_v1_generated_publisher_update_topic_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_publisher_update_topic_sync.py => pubsub_v1_generated_publisher_update_topic_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_create_schema_async.py => pubsub_v1_generated_schema_service_create_schema_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py => pubsub_v1_generated_schema_service_create_schema_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py => pubsub_v1_generated_schema_service_delete_schema_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py => pubsub_v1_generated_schema_service_delete_schema_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_get_schema_async.py => pubsub_v1_generated_schema_service_get_schema_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py => pubsub_v1_generated_schema_service_get_schema_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py => pubsub_v1_generated_schema_service_list_schemas_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py => pubsub_v1_generated_schema_service_list_schemas_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_validate_message_async.py => pubsub_v1_generated_schema_service_validate_message_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py => pubsub_v1_generated_schema_service_validate_message_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py => pubsub_v1_generated_schema_service_validate_schema_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py => pubsub_v1_generated_schema_service_validate_schema_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py => pubsub_v1_generated_subscriber_acknowledge_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py => pubsub_v1_generated_subscriber_acknowledge_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py => pubsub_v1_generated_subscriber_create_snapshot_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py => pubsub_v1_generated_subscriber_create_snapshot_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py => pubsub_v1_generated_subscriber_create_subscription_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py => pubsub_v1_generated_subscriber_create_subscription_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py => pubsub_v1_generated_subscriber_delete_snapshot_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py => pubsub_v1_generated_subscriber_delete_snapshot_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py => pubsub_v1_generated_subscriber_delete_subscription_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py => pubsub_v1_generated_subscriber_delete_subscription_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py => pubsub_v1_generated_subscriber_get_snapshot_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py => pubsub_v1_generated_subscriber_get_snapshot_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py => pubsub_v1_generated_subscriber_get_subscription_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py => pubsub_v1_generated_subscriber_get_subscription_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py => pubsub_v1_generated_subscriber_list_snapshots_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py => pubsub_v1_generated_subscriber_list_snapshots_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py => pubsub_v1_generated_subscriber_list_subscriptions_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py => pubsub_v1_generated_subscriber_list_subscriptions_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py => pubsub_v1_generated_subscriber_modify_ack_deadline_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py => pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py => pubsub_v1_generated_subscriber_modify_push_config_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py => pubsub_v1_generated_subscriber_modify_push_config_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_pull_async.py => pubsub_v1_generated_subscriber_pull_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_pull_sync.py => pubsub_v1_generated_subscriber_pull_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_seek_async.py => pubsub_v1_generated_subscriber_seek_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_seek_sync.py => pubsub_v1_generated_subscriber_seek_sync.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py => pubsub_v1_generated_subscriber_streaming_pull_async.py} (92%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py => pubsub_v1_generated_subscriber_streaming_pull_sync.py} (92%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py => pubsub_v1_generated_subscriber_update_snapshot_async.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py => pubsub_v1_generated_subscriber_update_snapshot_sync.py} (90%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py => pubsub_v1_generated_subscriber_update_subscription_async.py} (91%) rename packages/google-cloud-pubsub/samples/generated_samples/{pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py => pubsub_v1_generated_subscriber_update_subscription_sync.py} (91%) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 66b9f71c807f..83b492eca1fa 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -221,7 +221,7 @@ async def create_topic( (https://cloud.google.com/pubsub/docs/admin#resource_names). - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -325,7 +325,7 @@ async def update_topic( properties of a topic are not modifiable. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -409,7 +409,7 @@ async def publish( the topic does not exist. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -520,7 +520,7 @@ async def get_topic( ) -> pubsub.Topic: r"""Gets the configuration of a topic. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -619,7 +619,7 @@ async def list_topics( ) -> pagers.ListTopicsAsyncPager: r"""Lists matching topics. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -731,7 +731,7 @@ async def list_topic_subscriptions( topic. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -849,7 +849,7 @@ async def list_topic_snapshots( in an existing subscription to the state captured by a snapshot. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -967,7 +967,7 @@ async def delete_topic( field is set to ``_deleted-topic_``. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1061,7 +1061,7 @@ async def detach_subscription( will stop. - .. code-block:: + .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 2492a0854045..caa88f1dfcf2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -469,8 +469,7 @@ def create_topic( (https://cloud.google.com/pubsub/docs/admin#resource_names). - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -566,8 +565,7 @@ def update_topic( properties of a topic are not modifiable. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -644,8 +642,7 @@ def publish( the topic does not exist. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -742,8 +739,7 @@ def get_topic( ) -> pubsub.Topic: r"""Gets the configuration of a topic. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -832,8 +828,7 @@ def list_topics( ) -> pagers.ListTopicsPager: r"""Lists matching topics. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -935,8 +930,7 @@ def list_topic_subscriptions( topic. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1044,8 +1038,7 @@ def list_topic_snapshots( in an existing subscription to the state captured by a snapshot. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1153,8 +1146,7 @@ def delete_topic( field is set to ``_deleted-topic_``. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1240,8 +1232,7 @@ def detach_subscription( will stop. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 68997e326c0a..3e226deecda4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -218,7 +218,7 @@ async def create_schema( ) -> gp_schema.Schema: r"""Creates a schema. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -334,7 +334,7 @@ async def get_schema( ) -> schema.Schema: r"""Gets a schema. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -421,7 +421,7 @@ async def list_schemas( ) -> pagers.ListSchemasAsyncPager: r"""Lists schemas in a project. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -520,7 +520,7 @@ async def delete_schema( ) -> None: r"""Deletes a schema. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -601,7 +601,7 @@ async def validate_schema( ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -703,7 +703,7 @@ async def validate_message( ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. - .. code-block:: + .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index c296c121294f..e795e5cfa298 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -410,8 +410,7 @@ def create_schema( ) -> gp_schema.Schema: r"""Creates a schema. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -527,8 +526,7 @@ def get_schema( ) -> schema.Schema: r"""Gets a schema. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -615,8 +613,7 @@ def list_schemas( ) -> pagers.ListSchemasPager: r"""Lists schemas in a project. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -715,8 +712,7 @@ def delete_schema( ) -> None: r"""Deletes a schema. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -797,8 +793,7 @@ def validate_schema( ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -900,8 +895,7 @@ def validate_message( ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 9f76a8c183a5..eb80fe7c8b52 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -246,7 +246,7 @@ async def create_subscription( request. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -406,7 +406,7 @@ async def get_subscription( ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -509,7 +509,7 @@ async def update_subscription( modifiable. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -591,7 +591,7 @@ async def list_subscriptions( ) -> pagers.ListSubscriptionsAsyncPager: r"""Lists matching subscriptions. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -707,7 +707,7 @@ async def delete_subscription( topic unless the same topic is specified. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -806,7 +806,7 @@ async def modify_ack_deadline( used for subsequent messages. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -933,7 +933,7 @@ async def acknowledge( error. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1040,7 +1040,7 @@ async def pull( pending for the given subscription. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1183,7 +1183,7 @@ def streaming_pull( configuring the underlying RPC channel. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1280,7 +1280,7 @@ async def modify_push_config( call regardless of changes to the ``PushConfig``. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1392,7 +1392,7 @@ async def get_snapshot( subscription to the state captured by a snapshot. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1501,7 +1501,7 @@ async def list_snapshots( in an existing subscription to the state captured by a snapshot. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1631,7 +1631,7 @@ async def create_snapshot( request. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1764,7 +1764,7 @@ async def update_snapshot( snapshot. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1857,7 +1857,7 @@ async def delete_snapshot( the same subscription is specified. - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1954,7 +1954,7 @@ async def seek( same topic. - .. code-block:: + .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 0e6a17f0e961..55797ec2e86a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -484,8 +484,7 @@ def create_subscription( request. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -635,8 +634,7 @@ def get_subscription( ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -729,8 +727,7 @@ def update_subscription( modifiable. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -805,8 +802,7 @@ def list_subscriptions( ) -> pagers.ListSubscriptionsPager: r"""Lists matching subscriptions. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -912,8 +908,7 @@ def delete_subscription( topic unless the same topic is specified. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1004,8 +999,7 @@ def modify_ack_deadline( used for subsequent messages. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1124,8 +1118,7 @@ def acknowledge( error. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1224,8 +1217,7 @@ def pull( pending for the given subscription. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1358,8 +1350,7 @@ def streaming_pull( configuring the underlying RPC channel. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1445,8 +1436,7 @@ def modify_push_config( call regardless of changes to the ``PushConfig``. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1550,8 +1540,7 @@ def get_snapshot( subscription to the state captured by a snapshot. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1650,8 +1639,7 @@ def list_snapshots( in an existing subscription to the state captured by a snapshot. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1771,8 +1759,7 @@ def create_snapshot( request. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1897,8 +1884,7 @@ def update_snapshot( snapshot. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -1984,8 +1970,7 @@ def delete_snapshot( the same subscription is specified. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 @@ -2074,8 +2059,7 @@ def seek( same topic. - - .. code-block:: + .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py index 0414c9b9eb28..590f52fabe85 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_CreateTopic_async] +# [START pubsub_v1_generated_Publisher_CreateTopic_async] from google import pubsub_v1 @@ -42,4 +42,4 @@ async def sample_create_topic(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_CreateTopic_async] +# [END pubsub_v1_generated_Publisher_CreateTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py index b8fca80b91dc..98fdc16ace08 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_create_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_CreateTopic_sync] +# [START pubsub_v1_generated_Publisher_CreateTopic_sync] from google import pubsub_v1 @@ -42,4 +42,4 @@ def sample_create_topic(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_CreateTopic_sync] +# [END pubsub_v1_generated_Publisher_CreateTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py index f247e5d38775..5c184293b774 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_DeleteTopic_async] +# [START pubsub_v1_generated_Publisher_DeleteTopic_async] from google import pubsub_v1 @@ -40,4 +40,4 @@ async def sample_delete_topic(): await client.delete_topic(request=request) -# [END pubsub_generated_pubsub_v1_Publisher_DeleteTopic_async] +# [END pubsub_v1_generated_Publisher_DeleteTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py index d5bc0975b67a..9ae341d44bfc 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_DeleteTopic_sync] +# [START pubsub_v1_generated_Publisher_DeleteTopic_sync] from google import pubsub_v1 @@ -40,4 +40,4 @@ def sample_delete_topic(): client.delete_topic(request=request) -# [END pubsub_generated_pubsub_v1_Publisher_DeleteTopic_sync] +# [END pubsub_v1_generated_Publisher_DeleteTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py index 4ca04e5330c6..28605e555f44 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_DetachSubscription_async] +# [START pubsub_v1_generated_Publisher_DetachSubscription_async] from google import pubsub_v1 @@ -42,4 +42,4 @@ async def sample_detach_subscription(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_DetachSubscription_async] +# [END pubsub_v1_generated_Publisher_DetachSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py index e4deb54e5ac9..7092196d7f2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_DetachSubscription_sync] +# [START pubsub_v1_generated_Publisher_DetachSubscription_sync] from google import pubsub_v1 @@ -42,4 +42,4 @@ def sample_detach_subscription(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_DetachSubscription_sync] +# [END pubsub_v1_generated_Publisher_DetachSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py index e12616cd547f..b9cffa6bba4e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_GetTopic_async] +# [START pubsub_v1_generated_Publisher_GetTopic_async] from google import pubsub_v1 @@ -42,4 +42,4 @@ async def sample_get_topic(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_GetTopic_async] +# [END pubsub_v1_generated_Publisher_GetTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py index a774c4c65944..fbe481bd29b4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_get_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_GetTopic_sync] +# [START pubsub_v1_generated_Publisher_GetTopic_sync] from google import pubsub_v1 @@ -42,4 +42,4 @@ def sample_get_topic(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_GetTopic_sync] +# [END pubsub_v1_generated_Publisher_GetTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py index b6276384721d..e96cabf33c20 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_async] +# [START pubsub_v1_generated_Publisher_ListTopicSnapshots_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_list_topic_snapshots(): async for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_async] +# [END pubsub_v1_generated_Publisher_ListTopicSnapshots_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py index fb89ff3b6fb9..6d27bffd5722 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_sync] +# [START pubsub_v1_generated_Publisher_ListTopicSnapshots_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_list_topic_snapshots(): for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_sync] +# [END pubsub_v1_generated_Publisher_ListTopicSnapshots_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py index c61c6bc1e626..85443916ca2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_async] +# [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_list_topic_subscriptions(): async for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_async] +# [END pubsub_v1_generated_Publisher_ListTopicSubscriptions_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py index d559b186627f..c64f2aaacdcc 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_sync] +# [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_list_topic_subscriptions(): for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_sync] +# [END pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py index 2fd742d0252e..a00a89d7ba2c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_ListTopics_async] +# [START pubsub_v1_generated_Publisher_ListTopics_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_list_topics(): async for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Publisher_ListTopics_async] +# [END pubsub_v1_generated_Publisher_ListTopics_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py index 1ea3aacb7f4f..c888aa0dc4ef 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_list_topics_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_ListTopics_sync] +# [START pubsub_v1_generated_Publisher_ListTopics_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_list_topics(): for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Publisher_ListTopics_sync] +# [END pubsub_v1_generated_Publisher_ListTopics_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py index b783aa247bc1..3aee7dc3c315 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_Publish_async] +# [START pubsub_v1_generated_Publisher_Publish_async] from google import pubsub_v1 @@ -42,4 +42,4 @@ async def sample_publish(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_Publish_async] +# [END pubsub_v1_generated_Publisher_Publish_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py index d477b936a864..0f35f8377f68 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_publish_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_Publish_sync] +# [START pubsub_v1_generated_Publisher_Publish_sync] from google import pubsub_v1 @@ -42,4 +42,4 @@ def sample_publish(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_Publish_sync] +# [END pubsub_v1_generated_Publisher_Publish_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py index e11be0a1dd38..cb25a49c97e5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_UpdateTopic_async] +# [START pubsub_v1_generated_Publisher_UpdateTopic_async] from google import pubsub_v1 @@ -45,4 +45,4 @@ async def sample_update_topic(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_UpdateTopic_async] +# [END pubsub_v1_generated_Publisher_UpdateTopic_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py index 7671debc2d34..ba33d4a6bca0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_publisher_update_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Publisher_UpdateTopic_sync] +# [START pubsub_v1_generated_Publisher_UpdateTopic_sync] from google import pubsub_v1 @@ -45,4 +45,4 @@ def sample_update_topic(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Publisher_UpdateTopic_sync] +# [END pubsub_v1_generated_Publisher_UpdateTopic_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py index 629152cca865..fbbffef058df 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_CreateSchema_async] +# [START pubsub_v1_generated_SchemaService_CreateSchema_async] from google import pubsub_v1 @@ -46,4 +46,4 @@ async def sample_create_schema(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_CreateSchema_async] +# [END pubsub_v1_generated_SchemaService_CreateSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py index 79946fbd815d..b8db41e36c2c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_CreateSchema_sync] +# [START pubsub_v1_generated_SchemaService_CreateSchema_sync] from google import pubsub_v1 @@ -46,4 +46,4 @@ def sample_create_schema(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_CreateSchema_sync] +# [END pubsub_v1_generated_SchemaService_CreateSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py index c5a58bbd975e..d6ab4fa73f1c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_async] +# [START pubsub_v1_generated_SchemaService_DeleteSchema_async] from google import pubsub_v1 @@ -40,4 +40,4 @@ async def sample_delete_schema(): await client.delete_schema(request=request) -# [END pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_async] +# [END pubsub_v1_generated_SchemaService_DeleteSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py index 25c2a59c6996..136bd5842fa3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_sync] +# [START pubsub_v1_generated_SchemaService_DeleteSchema_sync] from google import pubsub_v1 @@ -40,4 +40,4 @@ def sample_delete_schema(): client.delete_schema(request=request) -# [END pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_sync] +# [END pubsub_v1_generated_SchemaService_DeleteSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py index 41e7e103d210..99f56353eae0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_GetSchema_async] +# [START pubsub_v1_generated_SchemaService_GetSchema_async] from google import pubsub_v1 @@ -42,4 +42,4 @@ async def sample_get_schema(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_GetSchema_async] +# [END pubsub_v1_generated_SchemaService_GetSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py index 8354c47f8069..cda1aa227d7b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_GetSchema_sync] +# [START pubsub_v1_generated_SchemaService_GetSchema_sync] from google import pubsub_v1 @@ -42,4 +42,4 @@ def sample_get_schema(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_GetSchema_sync] +# [END pubsub_v1_generated_SchemaService_GetSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py index 129a05905683..19c2c13950ad 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_ListSchemas_async] +# [START pubsub_v1_generated_SchemaService_ListSchemas_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_list_schemas(): async for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_ListSchemas_async] +# [END pubsub_v1_generated_SchemaService_ListSchemas_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py index ef7972bbcf48..6448bae0029a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_ListSchemas_sync] +# [START pubsub_v1_generated_SchemaService_ListSchemas_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_list_schemas(): for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_ListSchemas_sync] +# [END pubsub_v1_generated_SchemaService_ListSchemas_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py index ce9829359f5b..d2b7c4d1d41e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_async] +# [START pubsub_v1_generated_SchemaService_ValidateMessage_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_validate_message(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_async] +# [END pubsub_v1_generated_SchemaService_ValidateMessage_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py index e63a2d5bd258..88f9b3404e3b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_sync] +# [START pubsub_v1_generated_SchemaService_ValidateMessage_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_validate_message(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_sync] +# [END pubsub_v1_generated_SchemaService_ValidateMessage_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py index 0f28f3f8641f..3a1646618c07 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_async] +# [START pubsub_v1_generated_SchemaService_ValidateSchema_async] from google import pubsub_v1 @@ -46,4 +46,4 @@ async def sample_validate_schema(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_async] +# [END pubsub_v1_generated_SchemaService_ValidateSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py index bb0699e18b7c..46b3253db867 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_sync] +# [START pubsub_v1_generated_SchemaService_ValidateSchema_sync] from google import pubsub_v1 @@ -46,4 +46,4 @@ def sample_validate_schema(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_sync] +# [END pubsub_v1_generated_SchemaService_ValidateSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py index 7b84de8b49b4..3c8c098e8bc6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_Acknowledge_async] +# [START pubsub_v1_generated_Subscriber_Acknowledge_async] from google import pubsub_v1 @@ -41,4 +41,4 @@ async def sample_acknowledge(): await client.acknowledge(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_Acknowledge_async] +# [END pubsub_v1_generated_Subscriber_Acknowledge_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py index d982f95834ea..b733b6932c37 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_Acknowledge_sync] +# [START pubsub_v1_generated_Subscriber_Acknowledge_sync] from google import pubsub_v1 @@ -41,4 +41,4 @@ def sample_acknowledge(): client.acknowledge(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_Acknowledge_sync] +# [END pubsub_v1_generated_Subscriber_Acknowledge_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py index 4242ade0610c..9d777ec71e2d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_async] +# [START pubsub_v1_generated_Subscriber_CreateSnapshot_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_create_snapshot(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_async] +# [END pubsub_v1_generated_Subscriber_CreateSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py index 20624ebce357..e995d8cc879d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_sync] +# [START pubsub_v1_generated_Subscriber_CreateSnapshot_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_create_snapshot(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_sync] +# [END pubsub_v1_generated_Subscriber_CreateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py index 812893aaf5b6..74da998b5a96 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_async] +# [START pubsub_v1_generated_Subscriber_CreateSubscription_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_create_subscription(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_async] +# [END pubsub_v1_generated_Subscriber_CreateSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py index 853ac7089ec2..39aa04a2e73d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_sync] +# [START pubsub_v1_generated_Subscriber_CreateSubscription_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_create_subscription(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_sync] +# [END pubsub_v1_generated_Subscriber_CreateSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py index d648536a4ed3..314a97621f39 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_async] +# [START pubsub_v1_generated_Subscriber_DeleteSnapshot_async] from google import pubsub_v1 @@ -40,4 +40,4 @@ async def sample_delete_snapshot(): await client.delete_snapshot(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_async] +# [END pubsub_v1_generated_Subscriber_DeleteSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py index 748962042ed2..984bdb18fcba 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_sync] +# [START pubsub_v1_generated_Subscriber_DeleteSnapshot_sync] from google import pubsub_v1 @@ -40,4 +40,4 @@ def sample_delete_snapshot(): client.delete_snapshot(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_sync] +# [END pubsub_v1_generated_Subscriber_DeleteSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py index f00b9676f04d..c56bb42912c5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_async] +# [START pubsub_v1_generated_Subscriber_DeleteSubscription_async] from google import pubsub_v1 @@ -40,4 +40,4 @@ async def sample_delete_subscription(): await client.delete_subscription(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_async] +# [END pubsub_v1_generated_Subscriber_DeleteSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py index b19a27ac2866..c2e2130c1b87 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_sync] +# [START pubsub_v1_generated_Subscriber_DeleteSubscription_sync] from google import pubsub_v1 @@ -40,4 +40,4 @@ def sample_delete_subscription(): client.delete_subscription(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_sync] +# [END pubsub_v1_generated_Subscriber_DeleteSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py index bab081ce697f..99c6cb5b1c11 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_async] +# [START pubsub_v1_generated_Subscriber_GetSnapshot_async] from google import pubsub_v1 @@ -42,4 +42,4 @@ async def sample_get_snapshot(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_async] +# [END pubsub_v1_generated_Subscriber_GetSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py index 396b71b12796..9ba4b4a0c3c7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_sync] +# [START pubsub_v1_generated_Subscriber_GetSnapshot_sync] from google import pubsub_v1 @@ -42,4 +42,4 @@ def sample_get_snapshot(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_sync] +# [END pubsub_v1_generated_Subscriber_GetSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py index a744d90e2f5c..a3db7419d4d2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_GetSubscription_async] +# [START pubsub_v1_generated_Subscriber_GetSubscription_async] from google import pubsub_v1 @@ -42,4 +42,4 @@ async def sample_get_subscription(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_GetSubscription_async] +# [END pubsub_v1_generated_Subscriber_GetSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py index f2a0e3cbb7a7..c6ed92a10b16 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_GetSubscription_sync] +# [START pubsub_v1_generated_Subscriber_GetSubscription_sync] from google import pubsub_v1 @@ -42,4 +42,4 @@ def sample_get_subscription(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_GetSubscription_sync] +# [END pubsub_v1_generated_Subscriber_GetSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py index 392a89e1f225..5f12da1f53e1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_async] +# [START pubsub_v1_generated_Subscriber_ListSnapshots_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_list_snapshots(): async for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_async] +# [END pubsub_v1_generated_Subscriber_ListSnapshots_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py index 596e97adf748..7b5189965d32 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_sync] +# [START pubsub_v1_generated_Subscriber_ListSnapshots_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_list_snapshots(): for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_sync] +# [END pubsub_v1_generated_Subscriber_ListSnapshots_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py index 6faff29c46d2..6a90654d8017 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_async] +# [START pubsub_v1_generated_Subscriber_ListSubscriptions_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_list_subscriptions(): async for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_async] +# [END pubsub_v1_generated_Subscriber_ListSubscriptions_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py index beca817b24dd..d7b71e573165 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_sync] +# [START pubsub_v1_generated_Subscriber_ListSubscriptions_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_list_subscriptions(): for response in page_result: print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_sync] +# [END pubsub_v1_generated_Subscriber_ListSubscriptions_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py index 91c2ab3043fa..60e2715d1448 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_async] +# [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_async] from google import pubsub_v1 @@ -42,4 +42,4 @@ async def sample_modify_ack_deadline(): await client.modify_ack_deadline(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_async] +# [END pubsub_v1_generated_Subscriber_ModifyAckDeadline_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py index f3d2b2032e4b..43247da2ca7e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_sync] +# [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync] from google import pubsub_v1 @@ -42,4 +42,4 @@ def sample_modify_ack_deadline(): client.modify_ack_deadline(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_sync] +# [END pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py index 33281deb1f0c..aee1749050b4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_async] +# [START pubsub_v1_generated_Subscriber_ModifyPushConfig_async] from google import pubsub_v1 @@ -40,4 +40,4 @@ async def sample_modify_push_config(): await client.modify_push_config(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_async] +# [END pubsub_v1_generated_Subscriber_ModifyPushConfig_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py index 5a64461b0c92..93dd19d0c960 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_sync] +# [START pubsub_v1_generated_Subscriber_ModifyPushConfig_sync] from google import pubsub_v1 @@ -40,4 +40,4 @@ def sample_modify_push_config(): client.modify_push_config(request=request) -# [END pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_sync] +# [END pubsub_v1_generated_Subscriber_ModifyPushConfig_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py index 33ddc17a402d..16b84d68dcb4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_Pull_async] +# [START pubsub_v1_generated_Subscriber_Pull_async] from google import pubsub_v1 @@ -43,4 +43,4 @@ async def sample_pull(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_Pull_async] +# [END pubsub_v1_generated_Subscriber_Pull_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py index 787669650bfe..a62cbfa8f8ee 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_Pull_sync] +# [START pubsub_v1_generated_Subscriber_Pull_sync] from google import pubsub_v1 @@ -43,4 +43,4 @@ def sample_pull(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_Pull_sync] +# [END pubsub_v1_generated_Subscriber_Pull_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py index 365797a90e21..7b22250ee5a2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_Seek_async] +# [START pubsub_v1_generated_Subscriber_Seek_async] from google import pubsub_v1 @@ -42,4 +42,4 @@ async def sample_seek(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_Seek_async] +# [END pubsub_v1_generated_Subscriber_Seek_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py index 796316c70e84..2608a84b8b8a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_seek_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_Seek_sync] +# [START pubsub_v1_generated_Subscriber_Seek_sync] from google import pubsub_v1 @@ -42,4 +42,4 @@ def sample_seek(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_Seek_sync] +# [END pubsub_v1_generated_Subscriber_Seek_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py similarity index 92% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py index b0b9077fc1d0..ab050f8991e0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_StreamingPull_async] +# [START pubsub_v1_generated_Subscriber_StreamingPull_async] from google import pubsub_v1 @@ -54,4 +54,4 @@ def request_generator(): async for response in stream: print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_StreamingPull_async] +# [END pubsub_v1_generated_Subscriber_StreamingPull_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py similarity index 92% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py index b2cf9b1649de..b4d8616d8c63 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_StreamingPull_sync] +# [START pubsub_v1_generated_Subscriber_StreamingPull_sync] from google import pubsub_v1 @@ -54,4 +54,4 @@ def request_generator(): for response in stream: print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_StreamingPull_sync] +# [END pubsub_v1_generated_Subscriber_StreamingPull_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py index a71c967744fb..568849217682 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_async] +# [START pubsub_v1_generated_Subscriber_UpdateSnapshot_async] from google import pubsub_v1 @@ -41,4 +41,4 @@ async def sample_update_snapshot(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_async] +# [END pubsub_v1_generated_Subscriber_UpdateSnapshot_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py similarity index 90% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py index 129adfc0364e..ece25c54c4be 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_sync] +# [START pubsub_v1_generated_Subscriber_UpdateSnapshot_sync] from google import pubsub_v1 @@ -41,4 +41,4 @@ def sample_update_snapshot(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_sync] +# [END pubsub_v1_generated_Subscriber_UpdateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py index b103a906d4b3..59caa42901da 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_async] +# [START pubsub_v1_generated_Subscriber_UpdateSubscription_async] from google import pubsub_v1 @@ -46,4 +46,4 @@ async def sample_update_subscription(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_async] +# [END pubsub_v1_generated_Subscriber_UpdateSubscription_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py similarity index 91% rename from packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py rename to packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py index bd73a799439c..d0148bdaeb1b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py @@ -23,7 +23,7 @@ # python3 -m pip install google-cloud-pubsub -# [START pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_sync] +# [START pubsub_v1_generated_Subscriber_UpdateSubscription_sync] from google import pubsub_v1 @@ -46,4 +46,4 @@ def sample_update_subscription(): # Handle the response print(response) -# [END pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_sync] +# [END pubsub_v1_generated_Subscriber_UpdateSubscription_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json index b975a4c94e50..17a34496456e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json @@ -10,8 +10,8 @@ "shortName": "CreateTopic" } }, - "file": "pubsub_generated_pubsub_v1_publisher_create_topic_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_CreateTopic_async", + "file": "pubsub_v1_generated_publisher_create_topic_async.py", + "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_async", "segments": [ { "end": 44, @@ -54,8 +54,8 @@ "shortName": "CreateTopic" } }, - "file": "pubsub_generated_pubsub_v1_publisher_create_topic_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_CreateTopic_sync", + "file": "pubsub_v1_generated_publisher_create_topic_sync.py", + "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_sync", "segments": [ { "end": 44, @@ -99,8 +99,8 @@ "shortName": "DeleteTopic" } }, - "file": "pubsub_generated_pubsub_v1_publisher_delete_topic_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_DeleteTopic_async", + "file": "pubsub_v1_generated_publisher_delete_topic_async.py", + "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_async", "segments": [ { "end": 42, @@ -141,8 +141,8 @@ "shortName": "DeleteTopic" } }, - "file": "pubsub_generated_pubsub_v1_publisher_delete_topic_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_DeleteTopic_sync", + "file": "pubsub_v1_generated_publisher_delete_topic_sync.py", + "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_sync", "segments": [ { "end": 42, @@ -184,8 +184,8 @@ "shortName": "DetachSubscription" } }, - "file": "pubsub_generated_pubsub_v1_publisher_detach_subscription_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_DetachSubscription_async", + "file": "pubsub_v1_generated_publisher_detach_subscription_async.py", + "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_async", "segments": [ { "end": 44, @@ -228,8 +228,8 @@ "shortName": "DetachSubscription" } }, - "file": "pubsub_generated_pubsub_v1_publisher_detach_subscription_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_DetachSubscription_sync", + "file": "pubsub_v1_generated_publisher_detach_subscription_sync.py", + "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_sync", "segments": [ { "end": 44, @@ -273,8 +273,8 @@ "shortName": "GetTopic" } }, - "file": "pubsub_generated_pubsub_v1_publisher_get_topic_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_GetTopic_async", + "file": "pubsub_v1_generated_publisher_get_topic_async.py", + "regionTag": "pubsub_v1_generated_Publisher_GetTopic_async", "segments": [ { "end": 44, @@ -317,8 +317,8 @@ "shortName": "GetTopic" } }, - "file": "pubsub_generated_pubsub_v1_publisher_get_topic_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_GetTopic_sync", + "file": "pubsub_v1_generated_publisher_get_topic_sync.py", + "regionTag": "pubsub_v1_generated_Publisher_GetTopic_sync", "segments": [ { "end": 44, @@ -362,8 +362,8 @@ "shortName": "ListTopicSnapshots" } }, - "file": "pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_async", + "file": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_async", "segments": [ { "end": 45, @@ -406,8 +406,8 @@ "shortName": "ListTopicSnapshots" } }, - "file": "pubsub_generated_pubsub_v1_publisher_list_topic_snapshots_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSnapshots_sync", + "file": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_sync", "segments": [ { "end": 45, @@ -451,8 +451,8 @@ "shortName": "ListTopicSubscriptions" } }, - "file": "pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_async", + "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_async", "segments": [ { "end": 45, @@ -495,8 +495,8 @@ "shortName": "ListTopicSubscriptions" } }, - "file": "pubsub_generated_pubsub_v1_publisher_list_topic_subscriptions_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopicSubscriptions_sync", + "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync", "segments": [ { "end": 45, @@ -540,8 +540,8 @@ "shortName": "ListTopics" } }, - "file": "pubsub_generated_pubsub_v1_publisher_list_topics_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopics_async", + "file": "pubsub_v1_generated_publisher_list_topics_async.py", + "regionTag": "pubsub_v1_generated_Publisher_ListTopics_async", "segments": [ { "end": 45, @@ -584,8 +584,8 @@ "shortName": "ListTopics" } }, - "file": "pubsub_generated_pubsub_v1_publisher_list_topics_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_ListTopics_sync", + "file": "pubsub_v1_generated_publisher_list_topics_sync.py", + "regionTag": "pubsub_v1_generated_Publisher_ListTopics_sync", "segments": [ { "end": 45, @@ -629,8 +629,8 @@ "shortName": "Publish" } }, - "file": "pubsub_generated_pubsub_v1_publisher_publish_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_Publish_async", + "file": "pubsub_v1_generated_publisher_publish_async.py", + "regionTag": "pubsub_v1_generated_Publisher_Publish_async", "segments": [ { "end": 44, @@ -673,8 +673,8 @@ "shortName": "Publish" } }, - "file": "pubsub_generated_pubsub_v1_publisher_publish_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_Publish_sync", + "file": "pubsub_v1_generated_publisher_publish_sync.py", + "regionTag": "pubsub_v1_generated_Publisher_Publish_sync", "segments": [ { "end": 44, @@ -718,8 +718,8 @@ "shortName": "UpdateTopic" } }, - "file": "pubsub_generated_pubsub_v1_publisher_update_topic_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_UpdateTopic_async", + "file": "pubsub_v1_generated_publisher_update_topic_async.py", + "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_async", "segments": [ { "end": 47, @@ -762,8 +762,8 @@ "shortName": "UpdateTopic" } }, - "file": "pubsub_generated_pubsub_v1_publisher_update_topic_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Publisher_UpdateTopic_sync", + "file": "pubsub_v1_generated_publisher_update_topic_sync.py", + "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_sync", "segments": [ { "end": 47, @@ -807,8 +807,8 @@ "shortName": "CreateSchema" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_create_schema_async.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_CreateSchema_async", + "file": "pubsub_v1_generated_schema_service_create_schema_async.py", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", "segments": [ { "end": 48, @@ -851,8 +851,8 @@ "shortName": "CreateSchema" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_create_schema_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_CreateSchema_sync", + "file": "pubsub_v1_generated_schema_service_create_schema_sync.py", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", "segments": [ { "end": 48, @@ -896,8 +896,8 @@ "shortName": "DeleteSchema" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_delete_schema_async.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_async", + "file": "pubsub_v1_generated_schema_service_delete_schema_async.py", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", "segments": [ { "end": 42, @@ -938,8 +938,8 @@ "shortName": "DeleteSchema" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_delete_schema_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_DeleteSchema_sync", + "file": "pubsub_v1_generated_schema_service_delete_schema_sync.py", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", "segments": [ { "end": 42, @@ -981,8 +981,8 @@ "shortName": "GetSchema" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_get_schema_async.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_GetSchema_async", + "file": "pubsub_v1_generated_schema_service_get_schema_async.py", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", "segments": [ { "end": 44, @@ -1025,8 +1025,8 @@ "shortName": "GetSchema" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_get_schema_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_GetSchema_sync", + "file": "pubsub_v1_generated_schema_service_get_schema_sync.py", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", "segments": [ { "end": 44, @@ -1070,8 +1070,8 @@ "shortName": "ListSchemas" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_list_schemas_async.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ListSchemas_async", + "file": "pubsub_v1_generated_schema_service_list_schemas_async.py", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", "segments": [ { "end": 45, @@ -1114,8 +1114,8 @@ "shortName": "ListSchemas" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_list_schemas_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ListSchemas_sync", + "file": "pubsub_v1_generated_schema_service_list_schemas_sync.py", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", "segments": [ { "end": 45, @@ -1159,8 +1159,8 @@ "shortName": "ValidateMessage" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_validate_message_async.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_async", + "file": "pubsub_v1_generated_schema_service_validate_message_async.py", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_async", "segments": [ { "end": 45, @@ -1203,8 +1203,8 @@ "shortName": "ValidateMessage" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_validate_message_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ValidateMessage_sync", + "file": "pubsub_v1_generated_schema_service_validate_message_sync.py", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_sync", "segments": [ { "end": 45, @@ -1248,8 +1248,8 @@ "shortName": "ValidateSchema" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_validate_schema_async.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_async", + "file": "pubsub_v1_generated_schema_service_validate_schema_async.py", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_async", "segments": [ { "end": 48, @@ -1292,8 +1292,8 @@ "shortName": "ValidateSchema" } }, - "file": "pubsub_generated_pubsub_v1_schema_service_validate_schema_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_SchemaService_ValidateSchema_sync", + "file": "pubsub_v1_generated_schema_service_validate_schema_sync.py", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_sync", "segments": [ { "end": 48, @@ -1337,8 +1337,8 @@ "shortName": "Acknowledge" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_acknowledge_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Acknowledge_async", + "file": "pubsub_v1_generated_subscriber_acknowledge_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_async", "segments": [ { "end": 43, @@ -1379,8 +1379,8 @@ "shortName": "Acknowledge" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_acknowledge_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Acknowledge_sync", + "file": "pubsub_v1_generated_subscriber_acknowledge_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_sync", "segments": [ { "end": 43, @@ -1422,8 +1422,8 @@ "shortName": "CreateSnapshot" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_create_snapshot_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_async", + "file": "pubsub_v1_generated_subscriber_create_snapshot_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_async", "segments": [ { "end": 45, @@ -1466,8 +1466,8 @@ "shortName": "CreateSnapshot" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_create_snapshot_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_CreateSnapshot_sync", + "file": "pubsub_v1_generated_subscriber_create_snapshot_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_sync", "segments": [ { "end": 45, @@ -1511,8 +1511,8 @@ "shortName": "CreateSubscription" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_create_subscription_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_async", + "file": "pubsub_v1_generated_subscriber_create_subscription_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_async", "segments": [ { "end": 45, @@ -1555,8 +1555,8 @@ "shortName": "CreateSubscription" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_create_subscription_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_CreateSubscription_sync", + "file": "pubsub_v1_generated_subscriber_create_subscription_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_sync", "segments": [ { "end": 45, @@ -1600,8 +1600,8 @@ "shortName": "DeleteSnapshot" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_delete_snapshot_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_async", + "file": "pubsub_v1_generated_subscriber_delete_snapshot_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_async", "segments": [ { "end": 42, @@ -1642,8 +1642,8 @@ "shortName": "DeleteSnapshot" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_delete_snapshot_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_DeleteSnapshot_sync", + "file": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_sync", "segments": [ { "end": 42, @@ -1685,8 +1685,8 @@ "shortName": "DeleteSubscription" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_delete_subscription_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_async", + "file": "pubsub_v1_generated_subscriber_delete_subscription_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_async", "segments": [ { "end": 42, @@ -1727,8 +1727,8 @@ "shortName": "DeleteSubscription" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_delete_subscription_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_DeleteSubscription_sync", + "file": "pubsub_v1_generated_subscriber_delete_subscription_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_sync", "segments": [ { "end": 42, @@ -1770,8 +1770,8 @@ "shortName": "GetSnapshot" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_get_snapshot_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_async", + "file": "pubsub_v1_generated_subscriber_get_snapshot_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_async", "segments": [ { "end": 44, @@ -1814,8 +1814,8 @@ "shortName": "GetSnapshot" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_get_snapshot_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_GetSnapshot_sync", + "file": "pubsub_v1_generated_subscriber_get_snapshot_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_sync", "segments": [ { "end": 44, @@ -1859,8 +1859,8 @@ "shortName": "GetSubscription" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_get_subscription_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_GetSubscription_async", + "file": "pubsub_v1_generated_subscriber_get_subscription_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_async", "segments": [ { "end": 44, @@ -1903,8 +1903,8 @@ "shortName": "GetSubscription" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_get_subscription_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_GetSubscription_sync", + "file": "pubsub_v1_generated_subscriber_get_subscription_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_sync", "segments": [ { "end": 44, @@ -1948,8 +1948,8 @@ "shortName": "ListSnapshots" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_list_snapshots_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_async", + "file": "pubsub_v1_generated_subscriber_list_snapshots_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_async", "segments": [ { "end": 45, @@ -1992,8 +1992,8 @@ "shortName": "ListSnapshots" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_list_snapshots_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSnapshots_sync", + "file": "pubsub_v1_generated_subscriber_list_snapshots_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_sync", "segments": [ { "end": 45, @@ -2037,8 +2037,8 @@ "shortName": "ListSubscriptions" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_list_subscriptions_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_async", + "file": "pubsub_v1_generated_subscriber_list_subscriptions_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_async", "segments": [ { "end": 45, @@ -2081,8 +2081,8 @@ "shortName": "ListSubscriptions" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_list_subscriptions_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ListSubscriptions_sync", + "file": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_sync", "segments": [ { "end": 45, @@ -2126,8 +2126,8 @@ "shortName": "ModifyAckDeadline" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_async", + "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_async", "segments": [ { "end": 44, @@ -2168,8 +2168,8 @@ "shortName": "ModifyAckDeadline" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_modify_ack_deadline_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ModifyAckDeadline_sync", + "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync", "segments": [ { "end": 44, @@ -2211,8 +2211,8 @@ "shortName": "ModifyPushConfig" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_modify_push_config_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_async", + "file": "pubsub_v1_generated_subscriber_modify_push_config_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_async", "segments": [ { "end": 42, @@ -2253,8 +2253,8 @@ "shortName": "ModifyPushConfig" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_modify_push_config_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_ModifyPushConfig_sync", + "file": "pubsub_v1_generated_subscriber_modify_push_config_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_sync", "segments": [ { "end": 42, @@ -2296,8 +2296,8 @@ "shortName": "Pull" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_pull_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Pull_async", + "file": "pubsub_v1_generated_subscriber_pull_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_Pull_async", "segments": [ { "end": 45, @@ -2340,8 +2340,8 @@ "shortName": "Pull" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_pull_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Pull_sync", + "file": "pubsub_v1_generated_subscriber_pull_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_Pull_sync", "segments": [ { "end": 45, @@ -2385,8 +2385,8 @@ "shortName": "Seek" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_seek_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Seek_async", + "file": "pubsub_v1_generated_subscriber_seek_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_Seek_async", "segments": [ { "end": 44, @@ -2429,8 +2429,8 @@ "shortName": "Seek" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_seek_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_Seek_sync", + "file": "pubsub_v1_generated_subscriber_seek_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_Seek_sync", "segments": [ { "end": 44, @@ -2474,8 +2474,8 @@ "shortName": "StreamingPull" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_streaming_pull_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_StreamingPull_async", + "file": "pubsub_v1_generated_subscriber_streaming_pull_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_async", "segments": [ { "end": 56, @@ -2518,8 +2518,8 @@ "shortName": "StreamingPull" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_streaming_pull_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_StreamingPull_sync", + "file": "pubsub_v1_generated_subscriber_streaming_pull_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_sync", "segments": [ { "end": 56, @@ -2563,8 +2563,8 @@ "shortName": "UpdateSnapshot" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_update_snapshot_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_async", + "file": "pubsub_v1_generated_subscriber_update_snapshot_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_async", "segments": [ { "end": 43, @@ -2607,8 +2607,8 @@ "shortName": "UpdateSnapshot" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_update_snapshot_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_UpdateSnapshot_sync", + "file": "pubsub_v1_generated_subscriber_update_snapshot_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_sync", "segments": [ { "end": 43, @@ -2652,8 +2652,8 @@ "shortName": "UpdateSubscription" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_update_subscription_async.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_async", + "file": "pubsub_v1_generated_subscriber_update_subscription_async.py", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_async", "segments": [ { "end": 48, @@ -2696,8 +2696,8 @@ "shortName": "UpdateSubscription" } }, - "file": "pubsub_generated_pubsub_v1_subscriber_update_subscription_sync.py", - "regionTag": "pubsub_generated_pubsub_v1_Subscriber_UpdateSubscription_sync", + "file": "pubsub_v1_generated_subscriber_update_subscription_sync.py", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_sync", "segments": [ { "end": 48, From 6a607fccdf2b62dd17ec02bf68a9da822ff1dd8d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Feb 2022 05:30:19 -0500 Subject: [PATCH 0770/1197] chore: update copyright year to 2022 (#590) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update copyright year to 2022 PiperOrigin-RevId: 431037888 Source-Link: https://github.com/googleapis/googleapis/commit/b3397f5febbf21dfc69b875ddabaf76bee765058 Source-Link: https://github.com/googleapis/googleapis-gen/commit/510b54e1cdefd53173984df16645081308fe897e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTEwYjU0ZTFjZGVmZDUzMTczOTg0ZGYxNjY0NTA4MTMwOGZlODk3ZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/google/pubsub/__init__.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/__init__.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/services/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/async_client.py | 2 +- .../google/pubsub_v1/services/publisher/client.py | 2 +- .../google/pubsub_v1/services/publisher/pagers.py | 2 +- .../google/pubsub_v1/services/publisher/transports/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/transports/base.py | 2 +- .../google/pubsub_v1/services/publisher/transports/grpc.py | 2 +- .../pubsub_v1/services/publisher/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/schema_service/__init__.py | 2 +- .../google/pubsub_v1/services/schema_service/async_client.py | 2 +- .../google/pubsub_v1/services/schema_service/client.py | 2 +- .../google/pubsub_v1/services/schema_service/pagers.py | 2 +- .../pubsub_v1/services/schema_service/transports/__init__.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/base.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/grpc.py | 2 +- .../services/schema_service/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/subscriber/__init__.py | 2 +- .../google/pubsub_v1/services/subscriber/async_client.py | 2 +- .../google/pubsub_v1/services/subscriber/client.py | 2 +- .../google/pubsub_v1/services/subscriber/pagers.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/__init__.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/base.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/grpc.py | 2 +- .../pubsub_v1/services/subscriber/transports/grpc_asyncio.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py | 2 +- .../pubsub_v1_generated_publisher_create_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_create_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_delete_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_delete_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_detach_subscription_async.py | 2 +- .../pubsub_v1_generated_publisher_detach_subscription_sync.py | 2 +- .../pubsub_v1_generated_publisher_get_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_get_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_list_topic_snapshots_async.py | 2 +- .../pubsub_v1_generated_publisher_list_topic_snapshots_sync.py | 2 +- ...sub_v1_generated_publisher_list_topic_subscriptions_async.py | 2 +- ...bsub_v1_generated_publisher_list_topic_subscriptions_sync.py | 2 +- .../pubsub_v1_generated_publisher_list_topics_async.py | 2 +- .../pubsub_v1_generated_publisher_list_topics_sync.py | 2 +- .../pubsub_v1_generated_publisher_publish_async.py | 2 +- .../pubsub_v1_generated_publisher_publish_sync.py | 2 +- .../pubsub_v1_generated_publisher_update_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_update_topic_sync.py | 2 +- .../pubsub_v1_generated_schema_service_create_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_create_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_delete_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_delete_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_get_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_get_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_list_schemas_async.py | 2 +- .../pubsub_v1_generated_schema_service_list_schemas_sync.py | 2 +- ...pubsub_v1_generated_schema_service_validate_message_async.py | 2 +- .../pubsub_v1_generated_schema_service_validate_message_sync.py | 2 +- .../pubsub_v1_generated_schema_service_validate_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_validate_schema_sync.py | 2 +- .../pubsub_v1_generated_subscriber_acknowledge_async.py | 2 +- .../pubsub_v1_generated_subscriber_acknowledge_sync.py | 2 +- .../pubsub_v1_generated_subscriber_create_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_create_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_create_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_create_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_delete_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_delete_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_delete_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_delete_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_get_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_get_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_get_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_get_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_list_snapshots_async.py | 2 +- .../pubsub_v1_generated_subscriber_list_snapshots_sync.py | 2 +- .../pubsub_v1_generated_subscriber_list_subscriptions_async.py | 2 +- .../pubsub_v1_generated_subscriber_list_subscriptions_sync.py | 2 +- .../pubsub_v1_generated_subscriber_modify_ack_deadline_async.py | 2 +- .../pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py | 2 +- .../pubsub_v1_generated_subscriber_modify_push_config_async.py | 2 +- .../pubsub_v1_generated_subscriber_modify_push_config_sync.py | 2 +- .../pubsub_v1_generated_subscriber_pull_async.py | 2 +- .../pubsub_v1_generated_subscriber_pull_sync.py | 2 +- .../pubsub_v1_generated_subscriber_seek_async.py | 2 +- .../pubsub_v1_generated_subscriber_seek_sync.py | 2 +- .../pubsub_v1_generated_subscriber_streaming_pull_async.py | 2 +- .../pubsub_v1_generated_subscriber_streaming_pull_sync.py | 2 +- .../pubsub_v1_generated_subscriber_update_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_update_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_update_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_update_subscription_sync.py | 2 +- .../google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py | 2 +- packages/google-cloud-pubsub/tests/__init__.py | 2 +- packages/google-cloud-pubsub/tests/unit/__init__.py | 2 +- packages/google-cloud-pubsub/tests/unit/gapic/__init__.py | 2 +- .../google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_publisher.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_schema_service.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_subscriber.py | 2 +- 100 files changed, 100 insertions(+), 100 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index dfa5c7e0434f..9fc4e6feb824 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index bc78db26fcce..75c41bf215cc 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py index 98e50425da44..56fb64a17a88 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 83b492eca1fa..b818e6cbbd04 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index caa88f1dfcf2..e4eb074c5f89 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py index 50096517d3ab..1e095181f7be 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py index 34066edc3e4a..e73fe8901f80 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 818912f1f95e..a3ea34af6fe5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index ca63b4445643..43fae36d62f4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 14bc0a15bbac..3729275f9c24 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py index 523d5b5f5a5c..2609e9ecd722 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 3e226deecda4..17c2bdaa6cf9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index e795e5cfa298..825aa39980a5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index 43d520a2686d..965778d459af 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py index 81ebf8d1c566..6c2d9460a073 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 5bf62b1bcf4e..97b85afa77df 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 4f8863da9a9c..6d5290e0dead 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 56450ac8552e..206b0eb26da4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py index 0961d69d1bca..1d7599467227 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index eb80fe7c8b52..265c7dc75a17 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 55797ec2e86a..01499a881a36 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py index ffd17c840cac..cb3896bb59fa 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py index 023406c8f58c..f71cdecd4a4c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 1bd7b288e313..3cd6e7972bf1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index b6ef6c6c4617..130dca2c1f50 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 9e3e712125f5..a6a19fb6ae70 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index ebc8b53994de..888e2184aba7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index a07abae0fead..3b79e8eca06e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 3e389af46e38..b0a4d4e0a2f5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py index 590f52fabe85..e79f28c983b3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py index 98fdc16ace08..6a6f04a271f4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py index 5c184293b774..2a0148abbad1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py index 9ae341d44bfc..376a93ba085e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py index 28605e555f44..6fb8d4e7d3e7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py index 7092196d7f2f..7c36e4df1291 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py index b9cffa6bba4e..87904db2b378 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py index fbe481bd29b4..2f28cef0a8a2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py index e96cabf33c20..b6388f7f5722 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py index 6d27bffd5722..f7f3a61ec9bf 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py index 85443916ca2f..59b35194b459 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py index c64f2aaacdcc..d7dffa0e2d3b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py index a00a89d7ba2c..0d0f10a9896f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py index c888aa0dc4ef..cffdd77a49af 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py index 3aee7dc3c315..98bfc618e64e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py index 0f35f8377f68..650440a78436 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py index cb25a49c97e5..473144d07caa 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py index ba33d4a6bca0..5a9838c2acfd 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py index fbbffef058df..9f979072528b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py index b8db41e36c2c..798194050d75 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py index d6ab4fa73f1c..6d5e8f7345ca 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py index 136bd5842fa3..2e516b97aa77 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py index 99f56353eae0..10db352c3abf 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py index cda1aa227d7b..7d3cdf6d1d44 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py index 19c2c13950ad..a1c9be6ee6e2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py index 6448bae0029a..4604da242389 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py index d2b7c4d1d41e..94a699e53de0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py index 88f9b3404e3b..26e32efa13d8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py index 3a1646618c07..86647c7bd65f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py index 46b3253db867..102fb75edc03 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py index 3c8c098e8bc6..8f87241a1f7e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py index b733b6932c37..a56c55a33c73 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py index 9d777ec71e2d..6e2d4538771d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py index e995d8cc879d..b6145acb903f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py index 74da998b5a96..4c63c47cd594 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py index 39aa04a2e73d..6e37969f1f8c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py index 314a97621f39..26e2c7aa783e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py index 984bdb18fcba..f2538ddb0ca3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py index c56bb42912c5..f310d24b2869 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py index c2e2130c1b87..c601dd6633b2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py index 99c6cb5b1c11..3a56e4fbbe0e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py index 9ba4b4a0c3c7..3a6cd24ca08a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py index a3db7419d4d2..7ad71832664f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py index c6ed92a10b16..d883e085dfbe 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py index 5f12da1f53e1..edc7976a1293 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py index 7b5189965d32..e67ca2a39633 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py index 6a90654d8017..01c45577a7f2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py index d7b71e573165..272b0408d9cf 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py index 60e2715d1448..b85c2033ff49 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py index 43247da2ca7e..ac0805db437c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py index aee1749050b4..662823a1d682 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py index 93dd19d0c960..a7499941c486 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py index 16b84d68dcb4..113f3ddfcffe 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py index a62cbfa8f8ee..abb47bfa16b2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py index 7b22250ee5a2..062c69409de9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py index 2608a84b8b8a..f28570e7c779 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py index ab050f8991e0..64c1e37483c0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py index b4d8616d8c63..0aa02fa40cdc 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py index 568849217682..f07bca1f5d76 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py index ece25c54c4be..7afe32ec2259 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py index 59caa42901da..5a0410ec36cb 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py index d0148bdaeb1b..75d6e8a95299 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 039fa1e8f64b..3b6d3d378238 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/__init__.py b/packages/google-cloud-pubsub/tests/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-pubsub/tests/__init__.py +++ b/packages/google-cloud-pubsub/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/__init__.py b/packages/google-cloud-pubsub/tests/unit/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-pubsub/tests/unit/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py index 4de65971c238..e8e1c3845db5 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index ab9947d7a443..a8c963d5ac88 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index fc2090743709..ed5bca9a173a 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 7ba54cc6de39..c66d92404c81 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 739451e38fd44858a11fa8136d9003c8c3c5b493 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 1 Mar 2022 11:49:19 -0500 Subject: [PATCH 0771/1197] chore(deps): update actions/setup-python action to v3 (#595) Source-Link: https://github.com/googleapis/synthtool/commit/571ee2c3b26182429eddcf115122ee545d7d3787 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:09af371bb7d8ebbaef620bfc76c0a3a42da96d75f4821409b54f3466d4ebbd3c Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 3 +-- packages/google-cloud-pubsub/.github/workflows/docs.yml | 4 ++-- packages/google-cloud-pubsub/.github/workflows/lint.yml | 2 +- packages/google-cloud-pubsub/.github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index b668c04d5d65..2a654fafc0d6 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 - + digest: sha256:09af371bb7d8ebbaef620bfc76c0a3a42da96d75f4821409b54f3466d4ebbd3c diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml index f7b8344c4500..cca4e98bf236 100644 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml index 1e8b05c3d7ff..f687324ef2eb 100644 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index 074ee2504ca5..d3003e09e0c6 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v2 - name: Setup Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: "3.10" - name: Install coverage From f8b2c1196e6bbedb2102ee6b6a029a54d577112e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 3 Mar 2022 00:24:26 +0000 Subject: [PATCH 0772/1197] chore(deps): update actions/checkout action to v3 (#597) Source-Link: https://github.com/googleapis/synthtool/commit/ca879097772aeec2cbb971c3cea8ecc81522b68a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/.github/workflows/docs.yml | 4 ++-- packages/google-cloud-pubsub/.github/workflows/lint.yml | 2 +- packages/google-cloud-pubsub/.github/workflows/unittest.yml | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 2a654fafc0d6..480226ac08a9 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:09af371bb7d8ebbaef620bfc76c0a3a42da96d75f4821409b54f3466d4ebbd3c + digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml index cca4e98bf236..b46d7305d8cf 100644 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml index f687324ef2eb..f512a4960beb 100644 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index d3003e09e0c6..e87fe5b7b79a 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -11,7 +11,7 @@ jobs: python: ['3.6', '3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: @@ -37,7 +37,7 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Setup Python uses: actions/setup-python@v3 with: From dd95c226ee9eb195ac479a74fe89d839b4d0db28 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Fri, 4 Mar 2022 09:42:08 -0500 Subject: [PATCH 0773/1197] feat: exactly-once delivery support (#550) --- .../google/cloud/pubsub_v1/proto/pubsub.proto | 1 + .../subscriber/_protocol/dispatcher.py | 145 ++- .../pubsub_v1/subscriber/_protocol/leaser.py | 7 +- .../subscriber/_protocol/requests.py | 7 + .../_protocol/streaming_pull_manager.py | 289 +++++- .../cloud/pubsub_v1/subscriber/exceptions.py | 44 + .../cloud/pubsub_v1/subscriber/futures.py | 45 +- .../cloud/pubsub_v1/subscriber/message.py | 190 +++- .../google/cloud/pubsub_v1/types.py | 7 + packages/google-cloud-pubsub/noxfile.py | 10 +- packages/google-cloud-pubsub/setup.py | 1 + .../pubsub_v1/subscriber/test_dispatcher.py | 228 ++++- .../subscriber/test_futures_subscriber.py | 31 + .../unit/pubsub_v1/subscriber/test_leaser.py | 23 +- .../unit/pubsub_v1/subscriber/test_message.py | 103 ++- .../subscriber/test_messages_on_hold.py | 8 +- .../subscriber/test_streaming_pull_manager.py | 850 +++++++++++++++++- .../subscriber/test_subscriber_client.py | 2 +- 18 files changed, 1837 insertions(+), 154 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto index c5cb855d67fd..716c7ba05cb4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/proto/pubsub.proto @@ -1164,6 +1164,7 @@ message StreamingPullRequest { message StreamingPullResponse { // Subscription properties sent as part of the response. message SubscriptionProperties { + bool exactly_once_delivery_enabled = 1; // True iff message ordering is enabled for this subscription. bool message_ordering_enabled = 2; } diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 885210fc6a19..6ab5165d17b3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -15,17 +15,19 @@ from __future__ import absolute_import from __future__ import division +import functools import itertools import logging import math +import time import threading import typing from typing import List, Optional, Sequence, Union import warnings +from google.api_core.retry import exponential_sleep_generator from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests -from google.pubsub_v1 import types as gapic_types if typing.TYPE_CHECKING: # pragma: NO COVER import queue @@ -66,6 +68,14 @@ IDs at a time. """ +_MIN_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS = 1 +"""The time to wait for the first retry of failed acks and modacks when exactly-once +delivery is enabled.""" + +_MAX_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS = 10 * 60 +"""The maximum amount of time in seconds to retry failed acks and modacks when +exactly-once delivery is enabled.""" + class Dispatcher(object): def __init__(self, manager: "StreamingPullManager", queue: "queue.Queue"): @@ -168,17 +178,66 @@ def ack(self, items: Sequence[requests.AckRequest]) -> None: # We must potentially split the request into multiple smaller requests # to avoid the server-side max request size limit. - ack_ids = (item.ack_id for item in items) + items_gen = iter(items) + ack_ids_gen = (item.ack_id for item in items) total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) for _ in range(total_chunks): - request = gapic_types.StreamingPullRequest( - ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE) + ack_reqs_dict = { + req.ack_id: req + for req in itertools.islice(items_gen, _ACK_IDS_BATCH_SIZE) + } + requests_completed, requests_to_retry = self._manager.send_unary_ack( + ack_ids=list(itertools.islice(ack_ids_gen, _ACK_IDS_BATCH_SIZE)), + ack_reqs_dict=ack_reqs_dict, + ) + + # Remove the completed messages from lease management. + self.drop(requests_completed) + + # Retry on a separate thread so the dispatcher thread isn't blocked + # by sleeps. + if requests_to_retry: + self._start_retry_thread( + "Thread-RetryAcks", + functools.partial(self._retry_acks, requests_to_retry), + ) + + def _start_retry_thread(self, thread_name, thread_target): + # note: if the thread is *not* a daemon, a memory leak exists due to a cpython issue. + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-829910303 + # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-830092418 + retry_thread = threading.Thread( + name=thread_name, target=thread_target, daemon=True, + ) + # The thread finishes when the requests succeed or eventually fail with + # a back-end timeout error or other permanent failure. + retry_thread.start() + + def _retry_acks(self, requests_to_retry): + retry_delay_gen = exponential_sleep_generator( + initial=_MIN_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, + maximum=_MAX_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, + ) + while requests_to_retry: + time_to_wait = next(retry_delay_gen) + _LOGGER.debug( + "Retrying {len(requests_to_retry)} ack(s) after delay of " + + str(time_to_wait) + + " seconds" ) - self._manager.send(request) + time.sleep(time_to_wait) - # Remove the message from lease management. - self.drop(items) + ack_reqs_dict = {req.ack_id: req for req in requests_to_retry} + requests_completed, requests_to_retry = self._manager.send_unary_ack( + ack_ids=[req.ack_id for req in requests_to_retry], + ack_reqs_dict=ack_reqs_dict, + ) + assert ( + len(requests_to_retry) <= _ACK_IDS_BATCH_SIZE + ), "Too many requests to be retried." + # Remove the completed messages from lease management. + self.drop(requests_completed) def drop( self, @@ -215,16 +274,58 @@ def modify_ack_deadline(self, items: Sequence[requests.ModAckRequest]) -> None: """ # We must potentially split the request into multiple smaller requests # to avoid the server-side max request size limit. - ack_ids = (item.ack_id for item in items) - seconds = (item.seconds for item in items) + items_gen = iter(items) + ack_ids_gen = (item.ack_id for item in items) + deadline_seconds_gen = (item.seconds for item in items) total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) for _ in range(total_chunks): - request = gapic_types.StreamingPullRequest( - modify_deadline_ack_ids=itertools.islice(ack_ids, _ACK_IDS_BATCH_SIZE), - modify_deadline_seconds=itertools.islice(seconds, _ACK_IDS_BATCH_SIZE), + ack_reqs_dict = { + req.ack_id: req + for req in itertools.islice(items_gen, _ACK_IDS_BATCH_SIZE) + } + # no further work needs to be done for `requests_to_retry` + requests_completed, requests_to_retry = self._manager.send_unary_modack( + modify_deadline_ack_ids=list( + itertools.islice(ack_ids_gen, _ACK_IDS_BATCH_SIZE) + ), + modify_deadline_seconds=list( + itertools.islice(deadline_seconds_gen, _ACK_IDS_BATCH_SIZE) + ), + ack_reqs_dict=ack_reqs_dict, + ) + assert ( + len(requests_to_retry) <= _ACK_IDS_BATCH_SIZE + ), "Too many requests to be retried." + + # Retry on a separate thread so the dispatcher thread isn't blocked + # by sleeps. + if requests_to_retry: + self._start_retry_thread( + "Thread-RetryModAcks", + functools.partial(self._retry_modacks, requests_to_retry), + ) + + def _retry_modacks(self, requests_to_retry): + retry_delay_gen = exponential_sleep_generator( + initial=_MIN_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, + maximum=_MAX_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, + ) + while requests_to_retry: + time_to_wait = next(retry_delay_gen) + _LOGGER.debug( + "Retrying {len(requests_to_retry)} modack(s) after delay of " + + str(time_to_wait) + + " seconds" + ) + time.sleep(time_to_wait) + + ack_reqs_dict = {req.ack_id: req for req in requests_to_retry} + requests_completed, requests_to_retry = self._manager.send_unary_modack( + modify_deadline_ack_ids=[req.ack_id for req in requests_to_retry], + modify_deadline_seconds=[req.seconds for req in requests_to_retry], + ack_reqs_dict=ack_reqs_dict, ) - self._manager.send(request) def nack(self, items: Sequence[requests.NackRequest]) -> None: """Explicitly deny receipt of messages. @@ -233,6 +334,20 @@ def nack(self, items: Sequence[requests.NackRequest]) -> None: items: The items to deny. """ self.modify_ack_deadline( - [requests.ModAckRequest(ack_id=item.ack_id, seconds=0) for item in items] + [ + requests.ModAckRequest( + ack_id=item.ack_id, seconds=0, future=item.future + ) + for item in items + ] + ) + self.drop( + [ + requests.DropRequest( + ack_id=item.ack_id, + byte_size=item.byte_size, + ordering_key=item.ordering_key, + ) + for item in items + ] ) - self.drop([requests.DropRequest(*item) for item in items]) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index bfa1b5a492a0..de110e9920ef 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -181,7 +181,7 @@ def maintain_leases(self) -> None: for item in to_drop: leased_messages.pop(item.ack_id) - # Create a streaming pull request. + # Create a modack request. # We do not actually call `modify_ack_deadline` over and over # because it is more efficient to make a single request. ack_ids = leased_messages.keys() @@ -194,9 +194,8 @@ def maintain_leases(self) -> None: # way for ``send_request`` to fail when the consumer # is inactive. assert self._manager.dispatcher is not None - self._manager.dispatcher.modify_ack_deadline( - [requests.ModAckRequest(ack_id, deadline) for ack_id in ack_ids] - ) + ack_id_gen = (ack_id for ack_id in ack_ids) + self._manager._send_lease_modacks(ack_id_gen, deadline) # Now wait an appropriate period of time and do this again. # diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py index 7481d95a9f36..9cd387545909 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py @@ -12,8 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +import typing from typing import NamedTuple, Optional +if typing.TYPE_CHECKING: # pragma: NO COVER + from google.cloud.pubsub_v1.subscriber import futures + # Namedtuples for management requests. Used by the Message class to communicate # items of work back to the policy. @@ -22,6 +26,7 @@ class AckRequest(NamedTuple): byte_size: int time_to_ack: float ordering_key: Optional[str] + future: Optional["futures.Future"] class DropRequest(NamedTuple): @@ -39,9 +44,11 @@ class LeaseRequest(NamedTuple): class ModAckRequest(NamedTuple): ack_id: str seconds: float + future: Optional["futures.Future"] class NackRequest(NamedTuple): ack_id: str byte_size: int ordering_key: Optional[str] + future: Optional["futures.Future"] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index d207718fc001..5a9d080265e4 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -20,7 +20,7 @@ import logging import threading import typing -from typing import Any, Callable, Iterable, List, Optional, Union +from typing import Any, Callable, Iterable, List, Optional, Tuple, Union import uuid import grpc # type: ignore @@ -34,12 +34,22 @@ from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.cloud.pubsub_v1.subscriber.exceptions import ( + AcknowledgeError, + AcknowledgeStatus, +) import google.cloud.pubsub_v1.subscriber.message +from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber.scheduler import ThreadScheduler from google.pubsub_v1 import types as gapic_types +from grpc_status import rpc_status # type: ignore +from google.rpc.error_details_pb2 import ErrorInfo # type: ignore +from google.rpc import code_pb2 # type: ignore +from google.rpc import status_pb2 if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1 import subscriber + from google.protobuf.internal import containers _LOGGER = logging.getLogger(__name__) @@ -60,6 +70,11 @@ _RESUME_THRESHOLD = 0.8 """The load threshold below which to resume the incoming message stream.""" +_MIN_ACK_DEADLINE_SECS_WHEN_EXACTLY_ONCE_ENABLED = 60 +"""The minimum ack_deadline, in seconds, for when exactly_once is enabled for +a subscription. We do this to reduce premature ack expiration. +""" + def _wrap_as_exception(maybe_exception: Any) -> BaseException: """Wrap an object as a Python exception, if needed. @@ -104,6 +119,86 @@ def _wrap_callback_errors( on_callback_error(exc) +def _get_status(exc: exceptions.GoogleAPICallError,) -> Optional["status_pb2.Status"]: + if not exc.response: + _LOGGER.debug("No response obj in errored RPC call.") + return None + try: + return rpc_status.from_call(exc.response) + # Possible "If the gRPC call’s code or details are inconsistent + # with the status code and message inside of the + # google.rpc.status.Status" + except ValueError: + _LOGGER.debug("ValueError when parsing ErrorInfo.", exc_info=True) + return None + + +def _get_ack_errors( + exc: exceptions.GoogleAPICallError, +) -> Optional["containers.ScalarMap"]: + status = _get_status(exc) + if not status: + _LOGGER.debug("Unable to get status of errored RPC.") + return None + for detail in status.details: + info = ErrorInfo() + if not (detail.Is(ErrorInfo.DESCRIPTOR) and detail.Unpack(info)): + _LOGGER.debug("Unable to unpack ErrorInfo.") + return None + return info.metadata + return None + + +def _process_requests( + error_status: Optional["status_pb2.Status"], + ack_reqs_dict: "containers.ScalarMap", + errors_dict: Optional["containers.ScalarMap"], +): + """Process requests by referring to error_status and errors_dict. + + The errors returned by the server in as `error_status` or in `errors_dict` + are used to complete the request futures in `ack_reqs_dict` (with a success + or exception) or to return requests for further retries. + """ + requests_completed = [] + requests_to_retry = [] + for ack_id in ack_reqs_dict: + if errors_dict and ack_id in errors_dict: + exactly_once_error = errors_dict[ack_id] + if exactly_once_error.startswith("TRANSIENT_"): + requests_to_retry.append(ack_reqs_dict[ack_id]) + else: + if exactly_once_error == "PERMANENT_FAILURE_INVALID_ACK_ID": + exc = AcknowledgeError(AcknowledgeStatus.INVALID_ACK_ID, info=None) + else: + exc = AcknowledgeError(AcknowledgeStatus.OTHER, exactly_once_error) + + future = ack_reqs_dict[ack_id].future + future.set_exception(exc) + requests_completed.append(ack_reqs_dict[ack_id]) + elif error_status: + # Only permanent errors are expected here b/c retriable errors are + # retried at the lower, GRPC level. + if error_status.code == code_pb2.PERMISSION_DENIED: + exc = AcknowledgeError(AcknowledgeStatus.PERMISSION_DENIED, info=None) + elif error_status.code == code_pb2.FAILED_PRECONDITION: + exc = AcknowledgeError(AcknowledgeStatus.FAILED_PRECONDITION, info=None) + else: + exc = AcknowledgeError(AcknowledgeStatus.OTHER, str(error_status)) + future = ack_reqs_dict[ack_id].future + future.set_exception(exc) + requests_completed.append(ack_reqs_dict[ack_id]) + elif ack_reqs_dict[ack_id].future: + future = ack_reqs_dict[ack_id].future + # success + future.set_result(AcknowledgeStatus.SUCCESS) + requests_completed.append(ack_reqs_dict[ack_id]) + else: + requests_completed.append(ack_reqs_dict[ack_id]) + + return requests_completed, requests_to_retry + + class StreamingPullManager(object): """The streaming pull manager coordinates pulling messages from Pub/Sub, leasing them, and scheduling them to be processed. @@ -148,6 +243,7 @@ def __init__( ): self._client = client self._subscription = subscription + self._exactly_once_enabled = False self._flow_control = flow_control self._use_legacy_flow_control = use_legacy_flow_control self._await_callbacks_on_shutdown = await_callbacks_on_shutdown @@ -159,6 +255,8 @@ def __init__( self._closing = threading.Lock() self._closed = False self._close_callbacks: List[Callable[["StreamingPullManager", Any], Any]] = [] + # Guarded by self._exactly_once_enabled_lock + self._send_new_ack_deadline = False # A shutdown thread is created on intentional shutdown. self._regular_shutdown_thread: Optional[threading.Thread] = None @@ -189,6 +287,12 @@ def __init__( # currently on hold. self._pause_resume_lock = threading.Lock() + # A lock guarding the self._exactly_once_enabled variable. We may also + # acquire the self._ack_deadline_lock while this lock is held, but not + # the reverse. So, we maintain a simple ordering of these two locks to + # prevent deadlocks. + self._exactly_once_enabled_lock = threading.Lock() + # A lock protecting the current ACK deadline used in the lease management. This # value can be potentially updated both by the leaser thread and by the message # consumer thread when invoking the internal _on_response() callback. @@ -273,6 +377,22 @@ def _obtain_ack_deadline(self, maybe_update: bool) -> float: histogram.MIN_ACK_DEADLINE, ) self._ack_deadline = min(self._ack_deadline, flow_control_setting) + + # If the user explicitly sets a min ack_deadline, respect it. + if self.flow_control.min_duration_per_lease_extension > 0: + # The setting in flow control could be too high, adjust if needed. + flow_control_setting = min( + self.flow_control.min_duration_per_lease_extension, + histogram.MAX_ACK_DEADLINE, + ) + self._ack_deadline = max(self._ack_deadline, flow_control_setting) + elif self._exactly_once_enabled: + # Higher minimum ack_deadline for subscriptions with + # exactly-once delivery enabled. + self._ack_deadline = max( + self._ack_deadline, _MIN_ACK_DEADLINE_SECS_WHEN_EXACTLY_ONCE_ENABLED + ) + return self._ack_deadline @property @@ -311,7 +431,7 @@ def load(self) -> float: ) def add_close_callback( - self, callback: Callable[["StreamingPullManager", Any], Any], + self, callback: Callable[["StreamingPullManager", Any], Any] ) -> None: """Schedules a callable when the manager closes. @@ -435,49 +555,88 @@ def _schedule_message_on_hold( assert self._callback is not None self._scheduler.schedule(self._callback, msg) - def _send_unary_request(self, request: gapic_types.StreamingPullRequest) -> None: + def send_unary_ack( + self, ack_ids, ack_reqs_dict + ) -> Tuple[List[requests.AckRequest], List[requests.AckRequest]]: """Send a request using a separate unary request instead of over the stream. - Args: - request: The stream request to be mapped into unary requests. + If a RetryError occurs, the manager shutdown is triggered, and the + error is re-raised. """ - if request.ack_ids: - self._client.acknowledge( # type: ignore - subscription=self._subscription, ack_ids=list(request.ack_ids) + assert ack_ids + assert len(ack_ids) == len(ack_reqs_dict) + + error_status = None + ack_errors_dict = None + try: + self._client.acknowledge(subscription=self._subscription, ack_ids=ack_ids) + except exceptions.GoogleAPICallError as exc: + _LOGGER.debug( + "Exception while sending unary RPC. This is typically " + "non-fatal as stream requests are best-effort.", + exc_info=True, ) + error_status = _get_status(exc) + ack_errors_dict = _get_ack_errors(exc) + except exceptions.RetryError as exc: + status = status_pb2.Status() + status.code = code_pb2.DEADLINE_EXCEEDED + # Makes sure to complete futures so they don't block forever. + _process_requests(status, ack_reqs_dict, None) + _LOGGER.debug( + "RetryError while sending unary RPC. Waiting on a transient " + "error resolution for too long, will now trigger shutdown.", + exc_info=False, + ) + # The underlying channel has been suffering from a retryable error + # for too long, time to give up and shut the streaming pull down. + self._on_rpc_done(exc) + raise - if request.modify_deadline_ack_ids: + requests_completed, requests_to_retry = _process_requests( + error_status, ack_reqs_dict, ack_errors_dict + ) + return requests_completed, requests_to_retry + + def send_unary_modack( + self, modify_deadline_ack_ids, modify_deadline_seconds, ack_reqs_dict + ) -> Tuple[List[requests.ModAckRequest], List[requests.ModAckRequest]]: + """Send a request using a separate unary request instead of over the stream. + + If a RetryError occurs, the manager shutdown is triggered, and the + error is re-raised. + """ + assert modify_deadline_ack_ids + + error_status = None + modack_errors_dict = None + try: # Send ack_ids with the same deadline seconds together. deadline_to_ack_ids = collections.defaultdict(list) - for n, ack_id in enumerate(request.modify_deadline_ack_ids): - deadline = request.modify_deadline_seconds[n] + for n, ack_id in enumerate(modify_deadline_ack_ids): + deadline = modify_deadline_seconds[n] deadline_to_ack_ids[deadline].append(ack_id) for deadline, ack_ids in deadline_to_ack_ids.items(): - self._client.modify_ack_deadline( # type: ignore + self._client.modify_ack_deadline( subscription=self._subscription, ack_ids=ack_ids, ack_deadline_seconds=deadline, ) - - _LOGGER.debug("Sent request(s) over unary RPC.") - - def send(self, request: gapic_types.StreamingPullRequest) -> None: - """Queue a request to be sent to the RPC. - - If a RetryError occurs, the manager shutdown is triggered, and the - error is re-raised. - """ - try: - self._send_unary_request(request) - except exceptions.GoogleAPICallError: + except exceptions.GoogleAPICallError as exc: _LOGGER.debug( "Exception while sending unary RPC. This is typically " "non-fatal as stream requests are best-effort.", exc_info=True, ) + error_status = _get_status(exc) + modack_errors_dict = _get_ack_errors(exc) except exceptions.RetryError as exc: + status = status_pb2.Status() + status.code = code_pb2.DEADLINE_EXCEEDED + # Makes sure to complete futures so they don't block forever. + _process_requests(status, ack_reqs_dict, None) _LOGGER.debug( "RetryError while sending unary RPC. Waiting on a transient " "error resolution for too long, will now trigger shutdown.", @@ -488,14 +647,34 @@ def send(self, request: gapic_types.StreamingPullRequest) -> None: self._on_rpc_done(exc) raise + requests_completed, requests_to_retry = _process_requests( + error_status, ack_reqs_dict, modack_errors_dict + ) + return requests_completed, requests_to_retry + def heartbeat(self) -> bool: - """Sends an empty request over the streaming pull RPC. + """Sends a heartbeat request over the streaming pull RPC. + + The request is empty by default, but may contain the current ack_deadline + if the self._exactly_once_enabled flag has changed. Returns: If a heartbeat request has actually been sent. """ if self._rpc is not None and self._rpc.is_active: - self._rpc.send(gapic_types.StreamingPullRequest()) + send_new_ack_deadline = False + with self._exactly_once_enabled_lock: + send_new_ack_deadline = self._send_new_ack_deadline + self._send_new_ack_deadline = False + + if send_new_ack_deadline: + request = gapic_types.StreamingPullRequest( + stream_ack_deadline_seconds=self.ack_deadline + ) + else: + request = gapic_types.StreamingPullRequest() + + self._rpc.send(request) return True return False @@ -700,6 +879,42 @@ def _get_initial_request( # Return the initial request. return request + def _send_lease_modacks(self, ack_ids: Iterable[str], ack_deadline: float): + exactly_once_enabled = False + with self._exactly_once_enabled_lock: + exactly_once_enabled = self._exactly_once_enabled + if exactly_once_enabled: + items = [] + for ack_id in ack_ids: + future = futures.Future() + request = requests.ModAckRequest(ack_id, ack_deadline, future) + items.append(request) + + assert self._dispatcher is not None + self._dispatcher.modify_ack_deadline(items) + + for req in items: + try: + assert req.future is not None + req.future.result() + except AcknowledgeError: + _LOGGER.warning( + "AcknowledgeError when lease-modacking a message.", + exc_info=True, + ) + else: + items = [ + requests.ModAckRequest(ack_id, self.ack_deadline, None) + for ack_id in ack_ids + ] + assert self._dispatcher is not None + self._dispatcher.modify_ack_deadline(items) + + def _exactly_once_delivery_enabled(self) -> bool: + """Whether exactly-once delivery is enabled for the subscription.""" + with self._exactly_once_enabled_lock: + return self._exactly_once_enabled + def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: """Process all received Pub/Sub messages. @@ -730,15 +945,24 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: self._on_hold_bytes, ) + with self._exactly_once_enabled_lock: + if ( + response.subscription_properties.exactly_once_delivery_enabled + != self._exactly_once_enabled + ): + self._exactly_once_enabled = ( + response.subscription_properties.exactly_once_delivery_enabled + ) + # Update ack_deadline, whose minimum depends on self._exactly_once_enabled + # This method acquires the self._ack_deadline_lock lock. + self._obtain_ack_deadline(maybe_update=True) + self._send_new_ack_deadline = True + # Immediately (i.e. without waiting for the auto lease management) # modack the messages we received, as this tells the server that we've # received them. - items = [ - requests.ModAckRequest(message.ack_id, self.ack_deadline) - for message in received_messages - ] - assert self._dispatcher is not None - self._dispatcher.modify_ack_deadline(items) + ack_id_gen = (message.ack_id for message in received_messages) + self._send_lease_modacks(ack_id_gen, self.ack_deadline) with self._pause_resume_lock: assert self._scheduler is not None @@ -750,6 +974,7 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: received_message.ack_id, received_message.delivery_attempt, self._scheduler.queue, + self._exactly_once_delivery_enabled, ) self._messages_on_hold.put(message) self._on_hold_bytes += message.size diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py new file mode 100644 index 000000000000..a5dad31a9998 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/exceptions.py @@ -0,0 +1,44 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from enum import Enum +from google.api_core.exceptions import GoogleAPICallError +from typing import Optional + + +class AcknowledgeStatus(Enum): + SUCCESS = 1 + PERMISSION_DENIED = 2 + FAILED_PRECONDITION = 3 + INVALID_ACK_ID = 4 + OTHER = 5 + + +class AcknowledgeError(GoogleAPICallError): + """Error during ack/modack/nack operation on exactly-once-enabled subscription.""" + + def __init__(self, error_code: AcknowledgeStatus, info: Optional[str]): + self.error_code = error_code + self.info = info + message = None + if info: + message = str(self.error_code) + " : " + str(self.info) + else: + message = str(self.error_code) + super(AcknowledgeError, self).__init__(message) + + +__all__ = ("AcknowledgeError",) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index a024ba698093..f043b7eb517e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -16,9 +16,10 @@ import typing from typing import Any +from typing import Union from google.cloud.pubsub_v1 import futures - +from google.cloud.pubsub_v1.subscriber.exceptions import AcknowledgeStatus if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1.subscriber._protocol.streaming_pull_manager import ( @@ -80,3 +81,45 @@ def cancelled(self) -> bool: ``True`` if the subscription has been cancelled. """ return self.__cancelled + + +class Future(futures.Future): + """This future object is for subscribe-side calls. + + Calling :meth:`result` will resolve the future by returning the message + ID, unless an error occurs. + """ + + def cancel(self) -> bool: + """Actions in Pub/Sub generally may not be canceled. + + This method always returns ``False``. + """ + return False + + def cancelled(self) -> bool: + """Actions in Pub/Sub generally may not be canceled. + + This method always returns ``False``. + """ + return False + + def result(self, timeout: Union[int, float] = None) -> AcknowledgeStatus: + """Return a success code or raise an exception. + + This blocks until the operation completes successfully and + returns the error code unless an exception is raised. + + Args: + timeout: The number of seconds before this call + times out and raises TimeoutError. + + Returns: + AcknowledgeStatus.SUCCESS if the operation succeeded. + + Raises: + concurrent.futures.TimeoutError: If the request times out. + AcknowledgeError: If the operation did not succeed for another + reason. + """ + return super().result(timeout=timeout) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 2d72bba57f3a..5744aa71ca00 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -19,9 +19,12 @@ import math import time import typing -from typing import Optional +from typing import Optional, Callable from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.cloud.pubsub_v1.subscriber import futures +from google.cloud.pubsub_v1.subscriber.exceptions import AcknowledgeStatus + if typing.TYPE_CHECKING: # pragma: NO COVER import datetime @@ -87,6 +90,7 @@ def __init__( ack_id: str, delivery_attempt: int, request_queue: "queue.Queue", + exactly_once_delivery_enabled_func: Callable[[], bool] = lambda: False, ): """Construct the Message. @@ -110,11 +114,14 @@ def __init__( request_queue: A queue provided by the policy that can accept requests; the policy is responsible for handling those requests. + exactly_once_delivery_enabled_func: + A Callable that returns whether exactly-once delivery is currently-enabled. Defaults to a lambda that always returns False. """ self._message = message self._ack_id = ack_id self._delivery_attempt = delivery_attempt if delivery_attempt > 0 else None self._request_queue = request_queue + self._exactly_once_delivery_enabled_func = exactly_once_delivery_enabled_func self.message_id = message.message_id # The instantiation time is the time that this message @@ -233,7 +240,10 @@ def ack(self) -> None: .. warning:: Acks in Pub/Sub are best effort. You should always ensure that your processing code is idempotent, as you may - receive any given message more than once. + receive any given message more than once. If you need strong + guarantees about acks and re-deliveres, enable exactly-once + delivery on your subscription and use the `ack_with_response` + method instead. """ time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( @@ -242,9 +252,63 @@ def ack(self) -> None: byte_size=self.size, time_to_ack=time_to_ack, ordering_key=self.ordering_key, + future=None, ) ) + def ack_with_response(self) -> "futures.Future": + """Acknowledge the given message. + + Acknowledging a message in Pub/Sub means that you are done + with it, and it will not be delivered to this subscription again. + You should avoid acknowledging messages until you have + *finished* processing them, so that in the event of a failure, + you receive the message again. + + If exactly-once delivery is enabled on the subscription, the + future returned by this method tracks the state of acknowledgement + operation. If the future completes successfully, the message is + guaranteed NOT to be re-delivered. Otherwise, the future will + contain an exception with more details about the failure and the + message may be re-delivered. + + If exactly-once delivery is NOT enabled on the subscription, the + future returns immediately with an AcknowledgeStatus.SUCCESS. + Since acks in Cloud Pub/Sub are best effort when exactly-once + delivery is disabled, the message may be re-delivered. Because + re-deliveries are possible, you should ensure that your processing + code is idempotent, as you may receive any given message more than + once. + + Returns: + A :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` + instance that conforms to Python Standard library's + :class:`~concurrent.futures.Future` interface (but not an + instance of that class). Call `result()` to get the result + of the operation; upon success, a + pubsub_v1.subscriber.exceptions.AcknowledgeStatus.SUCCESS + will be returned and upon an error, an + pubsub_v1.subscriber.exceptions.AcknowledgeError exception + will be thrown. + """ + future = futures.Future() + req_future = None + if self._exactly_once_delivery_enabled_func(): + req_future = future + else: + future.set_result(AcknowledgeStatus.SUCCESS) + time_to_ack = math.ceil(time.time() - self._received_timestamp) + self._request_queue.put( + requests.AckRequest( + ack_id=self._ack_id, + byte_size=self.size, + time_to_ack=time_to_ack, + ordering_key=self.ordering_key, + future=req_future, + ) + ) + return future + def drop(self) -> None: """Release the message from lease management. @@ -269,8 +333,8 @@ def modify_ack_deadline(self, seconds: int) -> None: New deadline will be the given value of seconds from now. - The default implementation handles this for you; you should not need - to manually deal with setting ack deadlines. The exception case is + The default implementation handles automatically modacking received messages for you; + you should not need to manually deal with setting ack deadlines. The exception case is if you are implementing your own custom subclass of :class:`~.pubsub_v1.subcriber._consumer.Consumer`. @@ -281,16 +345,126 @@ def modify_ack_deadline(self, seconds: int) -> None: against. """ self._request_queue.put( - requests.ModAckRequest(ack_id=self._ack_id, seconds=seconds) + requests.ModAckRequest(ack_id=self._ack_id, seconds=seconds, future=None) ) + def modify_ack_deadline_with_response(self, seconds: int) -> "futures.Future": + """Resets the deadline for acknowledgement and returns the response + status via a future. + + New deadline will be the given value of seconds from now. + + The default implementation handles automatically modacking received messages for you; + you should not need to manually deal with setting ack deadlines. The exception case is + if you are implementing your own custom subclass of + :class:`~.pubsub_v1.subcriber._consumer.Consumer`. + + If exactly-once delivery is enabled on the subscription, the + future returned by this method tracks the state of the + modify-ack-deadline operation. If the future completes successfully, + the message is guaranteed NOT to be re-delivered within the new deadline. + Otherwise, the future will contain an exception with more details about + the failure and the message will be redelivered according to its + currently-set ack deadline. + + If exactly-once delivery is NOT enabled on the subscription, the + future returns immediately with an AcknowledgeStatus.SUCCESS. + Since modify-ack-deadline operations in Cloud Pub/Sub are best effort + when exactly-once delivery is disabled, the message may be re-delivered + within the set deadline. + + Args: + seconds: + The number of seconds to set the lease deadline to. This should be + between 0 and 600. Due to network latency, values below 10 are advised + against. + Returns: + A :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` + instance that conforms to Python Standard library's + :class:`~concurrent.futures.Future` interface (but not an + instance of that class). Call `result()` to get the result + of the operation; upon success, a + pubsub_v1.subscriber.exceptions.AcknowledgeStatus.SUCCESS + will be returned and upon an error, an + pubsub_v1.subscriber.exceptions.AcknowledgeError exception + will be thrown. + + """ + future = futures.Future() + req_future = None + if self._exactly_once_delivery_enabled_func(): + req_future = future + else: + future.set_result(AcknowledgeStatus.SUCCESS) + + self._request_queue.put( + requests.ModAckRequest( + ack_id=self._ack_id, seconds=seconds, future=req_future + ) + ) + + return future + def nack(self) -> None: - """Decline to acknowldge the given message. + """Decline to acknowledge the given message. - This will cause the message to be re-delivered to the subscription. + This will cause the message to be re-delivered to subscribers. Re-deliveries + may take place immediately or after a delay, and may arrive at this subscriber + or another. """ self._request_queue.put( requests.NackRequest( - ack_id=self._ack_id, byte_size=self.size, ordering_key=self.ordering_key + ack_id=self._ack_id, + byte_size=self.size, + ordering_key=self.ordering_key, + future=None, + ) + ) + + def nack_with_response(self) -> "futures.Future": + """Decline to acknowledge the given message, returning the response status via + a future. + + This will cause the message to be re-delivered to subscribers. Re-deliveries + may take place immediately or after a delay, and may arrive at this subscriber + or another. + + If exactly-once delivery is enabled on the subscription, the + future returned by this method tracks the state of the + nack operation. If the future completes successfully, + the future's result will be an AcknowledgeStatus.SUCCESS. + Otherwise, the future will contain an exception with more details about + the failure. + + If exactly-once delivery is NOT enabled on the subscription, the + future returns immediately with an AcknowledgeStatus.SUCCESS. + + Returns: + A :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` + instance that conforms to Python Standard library's + :class:`~concurrent.futures.Future` interface (but not an + instance of that class). Call `result()` to get the result + of the operation; upon success, a + pubsub_v1.subscriber.exceptions.AcknowledgeStatus.SUCCESS + will be returned and upon an error, an + pubsub_v1.subscriber.exceptions.AcknowledgeError exception + will be thrown. + + """ + future = futures.Future() + req_future = None + if self._exactly_once_delivery_enabled_func(): + req_future = future + else: + future.set_result(AcknowledgeStatus.SUCCESS) + + self._request_queue.put( + requests.NackRequest( + ack_id=self._ack_id, + byte_size=self.size, + ordering_key=self.ordering_key, + future=req_future, ) ) + + return future diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index e843a6da91a3..109d4aadc8a7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -162,6 +162,13 @@ class FlowControl(NamedTuple): "before dropping it from the lease management." ) + min_duration_per_lease_extension: float = 0 + ( + "The min amount of time in seconds for a single lease extension attempt. " + "Must be between 10 and 600 (inclusive). Ignored by default, but set to " + "60 seconds if the subscription has exactly-once delivery enabled." + ) + max_duration_per_lease_extension: float = 0 # disabled by default ( "The max amount of time in seconds for a single lease extension attempt. " diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index e9fea8af8493..ba16d5a469e0 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -146,6 +146,9 @@ def default(session): session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. + # THe following flags are useful during development: + # "-s" -> show print() statement output + # "-k " -> filter test cases session.run( "py.test", "--quiet", @@ -156,6 +159,7 @@ def default(session): "--cov-config=.coveragerc", "--cov-report=", "--cov-fail-under=0", + "-s", os.path.join("tests", "unit"), *session.posargs, ) @@ -200,6 +204,9 @@ def system(session): session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. + # THe following flags are useful during development: + # "-s" -> show print() statement output + # "-k " -> filter test cases if system_test_exists: session.run( "py.test", @@ -226,7 +233,8 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + # Tip: The "-i" flag lets you ignore errors with specific files. + session.run("coverage", "report", "-i", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 666fa61aa8d9..5423bbb0ddfe 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -36,6 +36,7 @@ "google-api-core[grpc] >= 1.28.0, <3.0.0dev", "proto-plus >= 1.7.1", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", + "grpcio-status >= 1.16.0", ] extras = {"libcst": "libcst >= 0.3.10"} diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 539ae40c7c2c..bbc6170e2496 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -15,13 +15,12 @@ import collections import queue import threading -import warnings from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager -from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.subscriber import futures import mock import pytest @@ -30,11 +29,11 @@ @pytest.mark.parametrize( "item,method_name", [ - (requests.AckRequest("0", 0, 0, ""), "ack"), + (requests.AckRequest("0", 0, 0, "", None), "ack"), (requests.DropRequest("0", 0, ""), "drop"), (requests.LeaseRequest("0", 0, ""), "lease"), - (requests.ModAckRequest("0", 0), "modify_ack_deadline"), - (requests.NackRequest("0", 0, ""), "nack"), + (requests.ModAckRequest("0", 0, None), "modify_ack_deadline"), + (requests.NackRequest("0", 0, "", None), "nack"), ], ) def test_dispatch_callback_active_manager(item, method_name): @@ -54,11 +53,11 @@ def test_dispatch_callback_active_manager(item, method_name): @pytest.mark.parametrize( "item,method_name", [ - (requests.AckRequest("0", 0, 0, ""), "ack"), + (requests.AckRequest("0", 0, 0, "", None), "ack"), (requests.DropRequest("0", 0, ""), "drop"), (requests.LeaseRequest("0", 0, ""), "lease"), - (requests.ModAckRequest("0", 0), "modify_ack_deadline"), - (requests.NackRequest("0", 0, ""), "nack"), + (requests.ModAckRequest("0", 0, None), "modify_ack_deadline"), + (requests.NackRequest("0", 0, "", None), "nack"), ], ) def test_dispatch_callback_inactive_manager(item, method_name): @@ -76,24 +75,15 @@ def test_dispatch_callback_inactive_manager(item, method_name): method.assert_called_once_with([item]) -def test_dispatch_callback_inactive_manager_unknown_request(): +def test_unknown_request_type(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True ) - manager.is_active = False dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - FooType = type("FooType", (), {}) - items = [FooType()] - - with warnings.catch_warnings(record=True) as warned: - dispatcher_.dispatch_callback(items) - - assert len(warned) == 1 - assert issubclass(warned[0].category, RuntimeWarning) - warning_msg = str(warned[0].message) - assert "unknown request item" in warning_msg - assert "FooType" in warning_msg + items = ["a random string, not a known request type"] + manager.send_unary_ack.return_value = (items, []) + dispatcher_.dispatch_callback(items) def test_ack(): @@ -104,13 +94,18 @@ def test_ack(): items = [ requests.AckRequest( - ack_id="ack_id_string", byte_size=0, time_to_ack=20, ordering_key="" + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, ) ] + manager.send_unary_ack.return_value = (items, []) dispatcher_.ack(items) - manager.send.assert_called_once_with( - gapic_types.StreamingPullRequest(ack_ids=["ack_id_string"]) + manager.send_unary_ack.assert_called_once_with( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} ) manager.leaser.remove.assert_called_once_with(items) @@ -126,13 +121,18 @@ def test_ack_no_time(): items = [ requests.AckRequest( - ack_id="ack_id_string", byte_size=0, time_to_ack=None, ordering_key="" + ack_id="ack_id_string", + byte_size=0, + time_to_ack=None, + ordering_key="", + future=None, ) ] + manager.send_unary_ack.return_value = (items, []) dispatcher_.ack(items) - manager.send.assert_called_once_with( - gapic_types.StreamingPullRequest(ack_ids=["ack_id_string"]) + manager.send_unary_ack.assert_called_once_with( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} ) manager.ack_histogram.add.assert_not_called() @@ -147,27 +147,152 @@ def test_ack_splitting_large_payload(): items = [ # use realistic lengths for ACK IDs (max 176 bytes) requests.AckRequest( - ack_id=str(i).zfill(176), byte_size=0, time_to_ack=20, ordering_key="" + ack_id=str(i).zfill(176), + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, ) for i in range(5001) ] + manager.send_unary_ack.return_value = (items, []) dispatcher_.ack(items) - calls = manager.send.call_args_list + calls = manager.send_unary_ack.call_args_list assert len(calls) == 3 all_ack_ids = {item.ack_id for item in items} sent_ack_ids = collections.Counter() for call in calls: - message = call.args[0] - assert message._pb.ByteSize() <= 524288 # server-side limit (2**19) - sent_ack_ids.update(message.ack_ids) + ack_ids = call[1]["ack_ids"] + assert len(ack_ids) <= dispatcher._ACK_IDS_BATCH_SIZE + sent_ack_ids.update(ack_ids) assert set(sent_ack_ids) == all_ack_ids # all messages should have been ACK-ed assert sent_ack_ids.most_common(1)[0][1] == 1 # each message ACK-ed exactly once +def test_retry_acks_in_new_thread(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + f = futures.Future() + items = [ + requests.AckRequest( + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=f, + ) + ] + # failure triggers creation of new retry thread + manager.send_unary_ack.side_effect = [([], items)] + with mock.patch("time.sleep", return_value=None): + with mock.patch.object(threading, "Thread", autospec=True) as Thread: + dispatcher_.ack(items) + + assert len(Thread.mock_calls) == 2 + ctor_call = Thread.mock_calls[0] + assert ctor_call.kwargs["name"] == "Thread-RetryAcks" + assert ctor_call.kwargs["target"].args[0] == items + assert ctor_call.kwargs["daemon"] + + +def test_retry_acks(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + f = futures.Future() + items = [ + requests.AckRequest( + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=f, + ) + ] + # first and second `send_unary_ack` calls fail, third one succeeds + manager.send_unary_ack.side_effect = [([], items), ([], items), (items, [])] + with mock.patch("time.sleep", return_value=None): + dispatcher_._retry_acks(items) + + manager.send_unary_ack.assert_has_calls( + [ + mock.call( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} + ), + mock.call( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} + ), + mock.call( + ack_ids=["ack_id_string"], ack_reqs_dict={"ack_id_string": items[0]} + ), + ] + ) + + +def test_retry_modacks_in_new_thread(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + f = futures.Future() + items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=20, future=f,)] + # failure triggers creation of new retry thread + manager.send_unary_modack.side_effect = [([], items)] + with mock.patch("time.sleep", return_value=None): + with mock.patch.object(threading, "Thread", autospec=True) as Thread: + dispatcher_.modify_ack_deadline(items) + + assert len(Thread.mock_calls) == 2 + ctor_call = Thread.mock_calls[0] + assert ctor_call.kwargs["name"] == "Thread-RetryModAcks" + assert ctor_call.kwargs["target"].args[0] == items + assert ctor_call.kwargs["daemon"] + + +def test_retry_modacks(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + f = futures.Future() + items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=20, future=f,)] + # first and second calls fail, third one succeeds + manager.send_unary_modack.side_effect = [([], items), ([], items), (items, [])] + with mock.patch("time.sleep", return_value=None): + dispatcher_._retry_modacks(items) + + manager.send_unary_modack.assert_has_calls( + [ + mock.call( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[20], + ack_reqs_dict={"ack_id_string": items[0]}, + ), + mock.call( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[20], + ack_reqs_dict={"ack_id_string": items[0]}, + ), + mock.call( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[20], + ack_reqs_dict={"ack_id_string": items[0]}, + ), + ] + ) + + def test_lease(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True @@ -224,14 +349,21 @@ def test_nack(): dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) items = [ - requests.NackRequest(ack_id="ack_id_string", byte_size=10, ordering_key="") + requests.NackRequest( + ack_id="ack_id_string", byte_size=10, ordering_key="", future=None + ) ] + manager.send_unary_modack.return_value = (items, []) dispatcher_.nack(items) - manager.send.assert_called_once_with( - gapic_types.StreamingPullRequest( - modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[0] - ) + manager.send_unary_modack.assert_called_once_with( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[0], + ack_reqs_dict={ + "ack_id_string": requests.ModAckRequest( + ack_id="ack_id_string", seconds=0, future=None + ) + }, ) @@ -241,13 +373,14 @@ def test_modify_ack_deadline(): ) dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) - items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=60)] + items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=60, future=None)] + manager.send_unary_modack.return_value = (items, []) dispatcher_.modify_ack_deadline(items) - manager.send.assert_called_once_with( - gapic_types.StreamingPullRequest( - modify_deadline_ack_ids=["ack_id_string"], modify_deadline_seconds=[60] - ) + manager.send_unary_modack.assert_called_once_with( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[60], + ack_reqs_dict={"ack_id_string": items[0]}, ) @@ -259,21 +392,22 @@ def test_modify_ack_deadline_splitting_large_payload(): items = [ # use realistic lengths for ACK IDs (max 176 bytes) - requests.ModAckRequest(ack_id=str(i).zfill(176), seconds=60) + requests.ModAckRequest(ack_id=str(i).zfill(176), seconds=60, future=None) for i in range(5001) ] + manager.send_unary_modack.return_value = (items, []) dispatcher_.modify_ack_deadline(items) - calls = manager.send.call_args_list + calls = manager.send_unary_modack.call_args_list assert len(calls) == 3 all_ack_ids = {item.ack_id for item in items} sent_ack_ids = collections.Counter() for call in calls: - message = call.args[0] - assert message._pb.ByteSize() <= 524288 # server-side limit (2**19) - sent_ack_ids.update(message.modify_deadline_ack_ids) + modack_ackids = call[1]["modify_deadline_ack_ids"] + assert len(modack_ackids) <= dispatcher._ACK_IDS_BATCH_SIZE + sent_ack_ids.update(modack_ackids) assert set(sent_ack_ids) == all_ack_ids # all messages should have been MODACK-ed assert sent_ack_ids.most_common(1)[0][1] == 1 # each message MODACK-ed exactly once diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 5411674c0082..9f71109e711a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -19,6 +19,10 @@ from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.cloud.pubsub_v1.subscriber.exceptions import ( + AcknowledgeError, + AcknowledgeStatus, +) class TestStreamingPullFuture(object): @@ -76,3 +80,30 @@ def test_cancel(self): manager.close.assert_called_once() assert future.cancelled() + + +class TestFuture(object): + def test_cancel(self): + future = futures.Future() + assert future.cancel() is False + + def test_cancelled(self): + future = futures.Future() + assert future.cancelled() is False + + def test_result_on_success(self): + future = futures.Future() + future.set_result(AcknowledgeStatus.SUCCESS) + assert future.result() == AcknowledgeStatus.SUCCESS + + def test_result_on_failure(self): + future = futures.Future() + future.set_exception( + AcknowledgeError( + AcknowledgeStatus.PERMISSION_DENIED, "Something bad happened." + ) + ) + with pytest.raises(AcknowledgeError) as e: + future.result() + assert e.value.error_code == AcknowledgeStatus.PERMISSION_DENIED + assert e.value.info == "Something bad happened." diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index f389e5205fe7..890c3c947cce 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -102,7 +102,7 @@ def test_maintain_leases_inactive_manager(caplog): leaser_.maintain_leases() # Leases should still be maintained even if the manager is inactive. - manager.dispatcher.modify_ack_deadline.assert_called() + manager._send_lease_modacks.assert_called() assert "exiting" in caplog.text @@ -138,9 +138,11 @@ def test_maintain_leases_ack_ids(): leaser_.maintain_leases() - manager.dispatcher.modify_ack_deadline.assert_called_once_with( - [requests.ModAckRequest(ack_id="my ack id", seconds=10)] - ) + assert len(manager._send_lease_modacks.mock_calls) == 1 + call = manager._send_lease_modacks.mock_calls[0] + ack_ids = list(call.args[0]) + assert ack_ids == ["my ack id"] + assert call.args[1] == 10 def test_maintain_leases_no_ack_ids(): @@ -182,14 +184,11 @@ def test_maintain_leases_outdated_items(time): leaser_.maintain_leases() # ack2, ack3, and ack4 should be renewed. ack1 should've been dropped - modacks = manager.dispatcher.modify_ack_deadline.call_args.args[0] - expected = [ - requests.ModAckRequest(ack_id="ack2", seconds=10), - requests.ModAckRequest(ack_id="ack3", seconds=10), - requests.ModAckRequest(ack_id="ack4", seconds=10), - ] - # Use sorting to allow for ordering variance. - assert sorted(modacks) == sorted(expected) + assert len(manager._send_lease_modacks.mock_calls) == 1 + call = manager._send_lease_modacks.mock_calls[0] + ack_ids = list(call.args[0]) + assert ack_ids == ["ack2", "ack3", "ack4"] + assert call.args[1] == 10 manager.dispatcher.drop.assert_called_once_with( [requests.DropRequest(ack_id="ack1", byte_size=50, ordering_key="")] diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index e3c14c93ca41..f5c7bf3c7fab 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -23,6 +23,7 @@ from google.cloud.pubsub_v1.subscriber._protocol import requests from google.protobuf import timestamp_pb2 from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.subscriber.exceptions import AcknowledgeStatus RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc) @@ -32,7 +33,14 @@ PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 -def create_message(data, ack_id="ACKID", delivery_attempt=0, ordering_key="", **attrs): +def create_message( + data, + ack_id="ACKID", + delivery_attempt=0, + ordering_key="", + exactly_once_delivery_enabled=False, + **attrs +): with mock.patch.object(time, "time") as time_: time_.return_value = RECEIVED_SECONDS gapic_pubsub_message = gapic_types.PubsubMessage( @@ -51,6 +59,7 @@ def create_message(data, ack_id="ACKID", delivery_attempt=0, ordering_key="", ** ack_id=ack_id, delivery_attempt=delivery_attempt, request_queue=queue.Queue(), + exactly_once_delivery_enabled_func=lambda: exactly_once_delivery_enabled, ) return msg @@ -127,6 +136,42 @@ def test_ack(): byte_size=30, time_to_ack=mock.ANY, ordering_key="", + future=None, + ) + ) + check_call_types(put, requests.AckRequest) + + +def test_ack_with_response_exactly_once_delivery_disabled(): + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.ack_with_response() + put.assert_called_once_with( + requests.AckRequest( + ack_id="bogus_ack_id", + byte_size=30, + time_to_ack=mock.ANY, + ordering_key="", + future=None, + ) + ) + assert future.result() == AcknowledgeStatus.SUCCESS + check_call_types(put, requests.AckRequest) + + +def test_ack_with_response_exactly_once_delivery_enabled(): + msg = create_message( + b"foo", ack_id="bogus_ack_id", exactly_once_delivery_enabled=True + ) + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.ack_with_response() + put.assert_called_once_with( + requests.AckRequest( + ack_id="bogus_ack_id", + byte_size=30, + time_to_ack=mock.ANY, + ordering_key="", + future=future, ) ) check_call_types(put, requests.AckRequest) @@ -147,7 +192,30 @@ def test_modify_ack_deadline(): with mock.patch.object(msg._request_queue, "put") as put: msg.modify_ack_deadline(60) put.assert_called_once_with( - requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60) + requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60, future=None) + ) + check_call_types(put, requests.ModAckRequest) + + +def test_modify_ack_deadline_with_response_exactly_once_delivery_disabled(): + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.modify_ack_deadline_with_response(60) + put.assert_called_once_with( + requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60, future=None) + ) + assert future.result() == AcknowledgeStatus.SUCCESS + check_call_types(put, requests.ModAckRequest) + + +def test_modify_ack_deadline_with_response_exactly_once_delivery_enabled(): + msg = create_message( + b"foo", ack_id="bogus_ack_id", exactly_once_delivery_enabled=True + ) + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.modify_ack_deadline_with_response(60) + put.assert_called_once_with( + requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60, future=future) ) check_call_types(put, requests.ModAckRequest) @@ -157,7 +225,36 @@ def test_nack(): with mock.patch.object(msg._request_queue, "put") as put: msg.nack() put.assert_called_once_with( - requests.NackRequest(ack_id="bogus_ack_id", byte_size=30, ordering_key="") + requests.NackRequest( + ack_id="bogus_ack_id", byte_size=30, ordering_key="", future=None + ) + ) + check_call_types(put, requests.NackRequest) + + +def test_nack_with_response_exactly_once_delivery_disabled(): + msg = create_message(b"foo", ack_id="bogus_ack_id") + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.nack_with_response() + put.assert_called_once_with( + requests.NackRequest( + ack_id="bogus_ack_id", byte_size=30, ordering_key="", future=None + ) + ) + assert future.result() == AcknowledgeStatus.SUCCESS + check_call_types(put, requests.NackRequest) + + +def test_nack_with_response_exactly_once_delivery_enabled(): + msg = create_message( + b"foo", ack_id="bogus_ack_id", exactly_once_delivery_enabled=True + ) + with mock.patch.object(msg._request_queue, "put") as put: + future = msg.nack_with_response() + put.assert_called_once_with( + requests.NackRequest( + ack_id="bogus_ack_id", byte_size=30, ordering_key="", future=future + ) ) check_call_types(put, requests.NackRequest) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py index 797430e0780a..0d28ec447c7c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py @@ -21,7 +21,13 @@ def make_message(ack_id, ordering_key): proto_msg = gapic_types.PubsubMessage(data=b"Q", ordering_key=ordering_key) - return message.Message(proto_msg._pb, ack_id, 0, queue.Queue()) + return message.Message( + proto_msg._pb, + ack_id, + 0, + queue.Queue(), + exactly_once_delivery_enabled_func=lambda: False, # pragma: NO COVER + ) def test_init(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 42c14c47d7ff..9e8d6c5ed903 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -33,8 +33,13 @@ from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.cloud.pubsub_v1.subscriber import exceptions as subscriber_exceptions +from google.cloud.pubsub_v1.subscriber import futures from google.pubsub_v1 import types as gapic_types import grpc +from google.rpc import status_pb2 +from google.rpc import code_pb2 +from google.rpc import error_details_pb2 @pytest.mark.parametrize( @@ -122,6 +127,16 @@ def make_manager(**kwargs): ) +def complete_modify_ack_deadline_calls(dispatcher): + def complete_futures(*args, **kwargs): + modack_requests = args[0] + for req in modack_requests: + if req.future: + req.future.set_result(subscriber_exceptions.AcknowledgeStatus.SUCCESS) + + dispatcher.modify_ack_deadline.side_effect = complete_futures + + def fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10): """Add a simplified fake add() method to a leaser instance. @@ -144,8 +159,11 @@ def test__obtain_ack_deadline_no_custom_flow_control_setting(): manager = make_manager() - # Make sure that max_duration_per_lease_extension is disabled. - manager._flow_control = types.FlowControl(max_duration_per_lease_extension=0) + # Make sure that min_duration_per_lease_extension and + # max_duration_per_lease_extension is disabled. + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=0, max_duration_per_lease_extension=0 + ) deadline = manager._obtain_ack_deadline(maybe_update=True) assert deadline == histogram.MIN_ACK_DEADLINE @@ -175,6 +193,20 @@ def test__obtain_ack_deadline_with_max_duration_per_lease_extension(): assert deadline == histogram.MIN_ACK_DEADLINE + 1 +def test__obtain_ack_deadline_with_min_duration_per_lease_extension(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=histogram.MAX_ACK_DEADLINE + ) + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE) # make p99 value small + + # The deadline configured in flow control should prevail. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MAX_ACK_DEADLINE + + def test__obtain_ack_deadline_with_max_duration_per_lease_extension_too_low(): from google.cloud.pubsub_v1.subscriber._protocol import histogram @@ -182,13 +214,58 @@ def test__obtain_ack_deadline_with_max_duration_per_lease_extension_too_low(): manager._flow_control = types.FlowControl( max_duration_per_lease_extension=histogram.MIN_ACK_DEADLINE - 1 ) - manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE * 3) # make p99 value large # The deadline configured in flow control should be adjusted to the minimum allowed. deadline = manager._obtain_ack_deadline(maybe_update=True) assert deadline == histogram.MIN_ACK_DEADLINE +def test__obtain_ack_deadline_with_min_duration_per_lease_extension_too_high(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=histogram.MAX_ACK_DEADLINE + 1 + ) + + # The deadline configured in flow control should be adjusted to the maximum allowed. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MAX_ACK_DEADLINE + + +def test__obtain_ack_deadline_with_exactly_once_enabled(): + manager = make_manager() + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=0 # leave as default value + ) + manager._exactly_once_enabled = True + manager.ack_histogram.add( + 10 + ) # reduce p99 value below 60s min for exactly_once subscriptions + + deadline = manager._obtain_ack_deadline(maybe_update=True) + # Since the 60-second min ack_deadline value for exactly_once subscriptions + # seconds is higher than the histogram value, the deadline should be 60 sec. + assert deadline == 60 + + +def test__obtain_ack_deadline_with_min_duration_per_lease_extension_with_exactly_once_enabled(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager = make_manager() + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=histogram.MAX_ACK_DEADLINE + ) + manager._exactly_once_enabled = True + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE) # make p99 value small + + # The deadline configured in flow control should prevail. + deadline = manager._obtain_ack_deadline(maybe_update=True) + # User-defined custom min ack_deadline value takes precedence over + # exactly_once default of 60 seconds. + assert deadline == histogram.MAX_ACK_DEADLINE + + def test__obtain_ack_deadline_no_value_update(): manager = make_manager() @@ -426,21 +503,38 @@ def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): assert manager._on_hold_bytes == 0 # should be auto-corrected -def test_send_unary(): +def test_send_unary_ack(): manager = make_manager() - manager.send( - gapic_types.StreamingPullRequest( - ack_ids=["ack_id1", "ack_id2"], - modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], - modify_deadline_seconds=[10, 20, 20], - ) - ) + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", byte_size=0, time_to_ack=20, ordering_key="", future=None + ), + } + manager.send_unary_ack(ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict) manager._client.acknowledge.assert_called_once_with( subscription=manager._subscription, ack_ids=["ack_id1", "ack_id2"] ) + +def test_send_unary_modack(): + manager = make_manager() + + ack_reqs_dict = { + "ack_id3": requests.ModAckRequest(ack_id="ack_id3", seconds=60, future=None), + "ack_id4": requests.ModAckRequest(ack_id="ack_id4", seconds=60, future=None), + "ack_id5": requests.ModAckRequest(ack_id="ack_id5", seconds=60, future=None), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], + modify_deadline_seconds=[10, 20, 20], + ack_reqs_dict=ack_reqs_dict, + ) + manager._client.modify_ack_deadline.assert_has_calls( [ mock.call( @@ -458,29 +552,61 @@ def test_send_unary(): ) -def test_send_unary_empty(): +def test_send_unary_ack_api_call_error(caplog): + caplog.set_level(logging.DEBUG) + manager = make_manager() - manager.send(gapic_types.StreamingPullRequest()) + error = exceptions.GoogleAPICallError("The front fell off") + manager._client.acknowledge.side_effect = error - manager._client.acknowledge.assert_not_called() - manager._client.modify_ack_deadline.assert_not_called() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", byte_size=0, time_to_ack=20, ordering_key="", future=None + ), + } + manager.send_unary_ack(ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict) + + assert "The front fell off" in caplog.text -def test_send_unary_api_call_error(caplog): +def test_send_unary_modack_api_call_error(caplog): caplog.set_level(logging.DEBUG) manager = make_manager() error = exceptions.GoogleAPICallError("The front fell off") - manager._client.acknowledge.side_effect = error - - manager.send(gapic_types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) + manager._client.modify_ack_deadline.side_effect = error + + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=futures.Future(), + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=futures.Future(), + ), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id_string"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) assert "The front fell off" in caplog.text -def test_send_unary_retry_error(caplog): +def test_send_unary_ack_retry_error(caplog): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() @@ -490,11 +616,68 @@ def test_send_unary_retry_error(caplog): ) manager._client.acknowledge.side_effect = error + future1 = futures.Future() + future2 = futures.Future() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_ack( + ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict + ) + + assert "RetryError while sending unary RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + assert isinstance(future1.exception(), subscriber_exceptions.AcknowledgeError) + assert ( + future1.exception().error_code is subscriber_exceptions.AcknowledgeStatus.OTHER + ) + assert isinstance(future2.exception(), subscriber_exceptions.AcknowledgeError) + assert ( + future2.exception().error_code is subscriber_exceptions.AcknowledgeStatus.OTHER + ) + + +def test_send_unary_modack_retry_error(caplog): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.modify_ack_deadline.side_effect = error + + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=future) + } with pytest.raises(exceptions.RetryError): - manager.send(gapic_types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"])) + manager.send_unary_modack( + modify_deadline_ack_ids=["ackid1"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) assert "RetryError while sending unary RPC" in caplog.text assert "signaled streaming pull manager shutdown" in caplog.text + assert isinstance(future.exception(), subscriber_exceptions.AcknowledgeError) + assert ( + future.exception().error_code is subscriber_exceptions.AcknowledgeStatus.OTHER + ) def test_heartbeat(): @@ -519,6 +702,23 @@ def test_heartbeat_inactive(): assert not result +def test_heartbeat_stream_ack_deadline_seconds(): + manager = make_manager() + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = True + # Send new ack deadline with next heartbeat. + manager._send_new_ack_deadline = True + + result = manager.heartbeat() + + manager._rpc.send.assert_called_once_with( + gapic_types.StreamingPullRequest(stream_ack_deadline_seconds=10) + ) + assert result + # Set to false after a send is initiated. + assert not manager._send_new_ack_deadline + + @mock.patch("google.api_core.bidi.ResumableBidiRpc", autospec=True) @mock.patch("google.api_core.bidi.BackgroundConsumer", autospec=True) @mock.patch("google.cloud.pubsub_v1.subscriber._protocol.leaser.Leaser", autospec=True) @@ -860,7 +1060,127 @@ def test__on_response_modifies_ack_deadline(): manager._on_response(response) dispatcher.modify_ack_deadline.assert_called_once_with( - [requests.ModAckRequest("ack_1", 18), requests.ModAckRequest("ack_2", 18)] + [ + requests.ModAckRequest("ack_1", 18, None), + requests.ModAckRequest("ack_2", 18, None), + ] + ) + + +def test__on_response_modifies_ack_deadline_with_exactly_once_min_lease(): + # exactly_once is disabled by default. + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + complete_modify_ack_deadline_calls(dispatcher) + + # make p99 value smaller than exactly_once min lease + manager.ack_histogram.add(10) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + # Set up the response with the first set of messages and exactly_once not + # enabled. + response1 = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack_1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack_2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + ), + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=False + ), + ) + + # Set up the response with the second set of messages and exactly_once enabled. + response2 = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack_3", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack_4", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + ), + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # exactly_once is still disabled b/c subscription_properties says so + manager._on_response(response1) + + # expect mod-acks are called with histogram-based lease value + assert len(dispatcher.modify_ack_deadline.mock_calls) == 1 + call = dispatcher.modify_ack_deadline.mock_calls[0] + assert call.args[0] == [ + requests.ModAckRequest("ack_1", 10, None), + requests.ModAckRequest("ack_2", 10, None), + ] + + # exactly_once should be enabled after this request b/c subscription_properties says so + manager._on_response(response2) + + # expect mod-acks called with 60 sec min lease value for exactly_once subscriptions + # ignore the futures here + assert len(dispatcher.modify_ack_deadline.mock_calls) == 2 + call = dispatcher.modify_ack_deadline.mock_calls[1] + modack_reqs = call.args[0] + assert modack_reqs[0].ack_id == "ack_3" + assert modack_reqs[0].seconds == 60 + assert modack_reqs[1].ack_id == "ack_4" + assert modack_reqs[1].seconds == 60 + + +def test__on_response_send_ack_deadline_after_enabling_exactly_once(): + # exactly_once is disabled by default. + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + complete_modify_ack_deadline_calls(dispatcher) + + # set up an active RPC + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = True + + # make p99 value smaller than exactly_once min lease + manager.ack_histogram.add(10) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + # Set up the response with the a message and exactly_once enabled. + response2 = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack_1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ) + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # exactly_once should be enabled after this request b/c subscription_properties says so + # when exactly_once is enabled or disabled, we send a new ack_deadline via + # the heartbeat + # should satisfy assertion 1 + manager._on_response(response2) + + # simulate periodic heartbeat trigger + heartbeat_request_sent = manager.heartbeat() + assert heartbeat_request_sent + + # heartbeat request is sent with the 60 sec min lease value for exactly_once subscriptions + manager._rpc.send.assert_called_once_with( + gapic_types.StreamingPullRequest(stream_ack_deadline_seconds=60) ) @@ -890,7 +1210,10 @@ def test__on_response_no_leaser_overload(): manager._on_response(response) dispatcher.modify_ack_deadline.assert_called_once_with( - [requests.ModAckRequest("fack", 10), requests.ModAckRequest("back", 10)] + [ + requests.ModAckRequest("fack", 10, None), + requests.ModAckRequest("back", 10, None), + ] ) schedule_calls = scheduler.schedule.mock_calls @@ -937,9 +1260,9 @@ def test__on_response_with_leaser_overload(): # deadline extended, even those not dispatched to callbacks dispatcher.modify_ack_deadline.assert_called_once_with( [ - requests.ModAckRequest("fack", 10), - requests.ModAckRequest("back", 10), - requests.ModAckRequest("zack", 10), + requests.ModAckRequest("fack", 10, None), + requests.ModAckRequest("back", 10, None), + requests.ModAckRequest("zack", 10, None), ] ) @@ -1017,9 +1340,9 @@ def test__on_response_with_ordering_keys(): # deadline extended, even those not dispatched to callbacks. dispatcher.modify_ack_deadline.assert_called_once_with( [ - requests.ModAckRequest("fack", 10), - requests.ModAckRequest("back", 10), - requests.ModAckRequest("zack", 10), + requests.ModAckRequest("fack", 10, None), + requests.ModAckRequest("back", 10, None), + requests.ModAckRequest("zack", 10, None), ] ) @@ -1058,6 +1381,109 @@ def test__on_response_with_ordering_keys(): assert manager._messages_on_hold.get() is None +def test__on_response_enable_exactly_once(): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + complete_modify_ack_deadline_calls(dispatcher) + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ) + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + # exactly_once should be enabled + manager._on_response(response) + + assert manager._exactly_once_delivery_enabled() + # new deadline for exactly_once subscriptions should be used + assert manager.ack_deadline == 60 + + +def test__on_response_disable_exactly_once(): + from google.cloud.pubsub_v1.subscriber._protocol import histogram + + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + manager._flow_control = types.FlowControl( + min_duration_per_lease_extension=histogram.MIN_ACK_DEADLINE + ) + # enable exactly_once + manager._exactly_once_enabled = True + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ) + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=False + ), + ) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + # exactly_once should be disabled + manager._on_response(response) + + assert not manager._exactly_once_enabled + # The deadline configured in flow control should be used, not the + # exactly_once minimum since exactly_once has been disabled. + deadline = manager._obtain_ack_deadline(maybe_update=True) + assert deadline == histogram.MIN_ACK_DEADLINE + + +def test__on_response_exactly_once_immediate_modacks_fail(): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + def complete_futures_with_error(*args, **kwargs): + modack_requests = args[0] + for req in modack_requests: + req.future.set_exception( + subscriber_exceptions.AcknowledgeError( + subscriber_exceptions.AcknowledgeStatus.SUCCESS, None + ) + ) + + dispatcher.modify_ack_deadline.side_effect = complete_futures_with_error + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ) + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + + # exactly_once should be enabled + manager._on_response(response) + # exceptions are logged, but otherwise no effect + + def test__should_recover_true(): manager = make_manager() @@ -1130,3 +1556,369 @@ def test_activate_ordering_keys_stopped_scheduler(): manager.activate_ordering_keys(["key1", "key2"]) manager._messages_on_hold.activate_ordering_keys.assert_not_called() + + +@mock.patch("grpc_status.rpc_status.from_call") +@mock.patch("google.protobuf.any_pb2.Any.Unpack") +def test_get_ack_errors_unable_to_unpack(from_call, unpack): + st = status_pb2.Status() + st.code = code_pb2.Code.INTERNAL + st.message = "qmsg" + error_info = error_details_pb2.ErrorInfo() + error_info.metadata["ack_1"] = "error1" + st.details.add().Pack(error_info) + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.return_value = st + # Unpack() failed + unpack.return_value = None + + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_no_response_obj(from_call): + exception = exceptions.InternalServerError("msg", errors=(), response=None) + # No response obj + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_from_call_returned_none(from_call): + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.return_value = None + # rpc_status.from_call() returned None + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_value_error_thrown(from_call): + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.side_effect = ValueError("val error msg") + # ValueError thrown, so return None + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_no_error_details(from_call): + st = status_pb2.Status() + st.code = code_pb2.Code.INTERNAL + st.message = "qmsg" + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.side_effect = None + from_call.return_value = st + # status has no details to extract exactly-once error info from + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_detail_not_error_info(from_call): + st = status_pb2.Status() + st.code = code_pb2.Code.INTERNAL + st.message = "qmsg" + # pack a dummy status instead of an ErrorInfo + dummy_status = status_pb2.Status() + st.details.add().Pack(dummy_status) + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.side_effect = None + from_call.return_value = st + assert not streaming_pull_manager._get_ack_errors(exception) + + +@mock.patch("grpc_status.rpc_status.from_call") +def test_get_ack_errors_happy_case(from_call): + st = status_pb2.Status() + st.code = code_pb2.Code.INTERNAL + st.message = "qmsg" + error_info = error_details_pb2.ErrorInfo() + error_info.metadata["ack_1"] = "error1" + st.details.add().Pack(error_info) + mock_gprc_call = mock.Mock(spec=grpc.Call) + exception = exceptions.InternalServerError( + "msg", errors=(), response=mock_gprc_call + ) + from_call.side_effect = None + from_call.return_value = st + # happy case - errors returned in a map + ack_errors = streaming_pull_manager._get_ack_errors(exception) + assert ack_errors + assert ack_errors["ack_1"] == "error1" + + +def test_process_requests_no_requests(): + # no requests so no items in results lists + ack_reqs_dict = {} + errors_dict = {} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert not requests_completed + assert not requests_to_retry + + +def test_process_requests_error_dict_is_none(): + # it's valid to pass in `None` for `errors_dict` + ack_reqs_dict = {} + errors_dict = None + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert not requests_completed + assert not requests_to_retry + + +def test_process_requests_no_errors_has_no_future(): + # no errors so request should be completed, even with no future + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ) + } + errors_dict = {} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + assert not requests_to_retry + + +def test_process_requests_no_errors(): + # no errors so request and its future should be completed + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + assert future.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert not requests_to_retry + + +def test_process_requests_permanent_error_raises_exception(): + # a permanent error raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {"ackid1": "PERMANENT_FAILURE_INVALID_ACK_ID"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.INVALID_ACK_ID + ) + assert not requests_to_retry + + +def test_process_requests_transient_error_returns_request(): + # a transient error returns the request in `requests_to_retry` + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {"ackid1": "TRANSIENT_FAILURE_INVALID_ACK_ID"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert not requests_completed + assert requests_to_retry[0].ack_id == "ackid1" + assert not future.done() + + +def test_process_requests_unknown_error_raises_exception(): + # an unknown error raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {"ackid1": "unknown_error"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert exc_info.value.error_code == subscriber_exceptions.AcknowledgeStatus.OTHER + assert exc_info.value.info == "unknown_error" + assert not requests_to_retry + + +def test_process_requests_permission_denied_error_status_raises_exception(): + # a permission-denied error status raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + st = status_pb2.Status() + st.code = code_pb2.Code.PERMISSION_DENIED + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + st, ack_reqs_dict, None + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.PERMISSION_DENIED + ) + assert exc_info.value.info is None + assert not requests_to_retry + + +def test_process_requests_failed_precondition_error_status_raises_exception(): + # a failed-precondition error status raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + st = status_pb2.Status() + st.code = code_pb2.Code.FAILED_PRECONDITION + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + st, ack_reqs_dict, None + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.FAILED_PRECONDITION + ) + assert exc_info.value.info is None + assert not requests_to_retry + + +def test_process_requests_other_error_status_raises_exception(): + # an unrecognized error status raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + st = status_pb2.Status() + st.code = code_pb2.Code.OUT_OF_RANGE + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + st, ack_reqs_dict, None + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert exc_info.value.error_code == subscriber_exceptions.AcknowledgeStatus.OTHER + assert not requests_to_retry + + +def test_process_requests_mixed_success_and_failure_acks(): + # mixed success and failure (acks) + future1 = futures.Future() + future2 = futures.Future() + future3 = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ackid2": requests.AckRequest( + ack_id="ackid2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + "ackid3": requests.AckRequest( + ack_id="ackid3", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future3, + ), + } + errors_dict = { + "ackid1": "PERMANENT_FAILURE_INVALID_ACK_ID", + "ackid2": "TRANSIENT_FAILURE_INVALID_ACK_ID", + } + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + # message with ack_id 'ackid1' fails with an exception + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future1.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.INVALID_ACK_ID + ) + # message with ack_id 'ackid2' is to be retried + assert requests_to_retry[0].ack_id == "ackid2" + assert not requests_to_retry[0].future.done() + # message with ack_id 'ackid3' succeeds + assert requests_completed[1].ack_id == "ackid3" + assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +def test_process_requests_mixed_success_and_failure_modacks(): + # mixed success and failure (modacks) + future1 = futures.Future() + future2 = futures.Future() + future3 = futures.Future() + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=future1), + "ackid2": requests.ModAckRequest(ack_id="ackid2", seconds=60, future=future2), + "ackid3": requests.ModAckRequest(ack_id="ackid3", seconds=60, future=future3), + } + errors_dict = { + "ackid1": "PERMANENT_FAILURE_INVALID_ACK_ID", + "ackid2": "TRANSIENT_FAILURE_INVALID_ACK_ID", + } + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + # message with ack_id 'ackid1' fails with an exception + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future1.result() + assert ( + exc_info.value.error_code + == subscriber_exceptions.AcknowledgeStatus.INVALID_ACK_ID + ) + # message with ack_id 'ackid2' is to be retried + assert requests_to_retry[0].ack_id == "ackid2" + assert not requests_to_retry[0].future.done() + # message with ack_id 'ackid3' succeeds + assert requests_completed[1].ack_id == "ackid3" + assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 1f60b536d554..793ceca3c16c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -205,7 +205,7 @@ def test_context_manager_raises_if_closed(creds): expetect_msg = r"(?i).*closed.*cannot.*context manager.*" with pytest.raises(RuntimeError, match=expetect_msg): with client: - pass + pass # pragma: NO COVER def test_api_property_deprecated(creds): From 63105101dc8fe2594869a78ecb245a99c548454b Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Fri, 4 Mar 2022 10:12:51 -0500 Subject: [PATCH 0774/1197] samples: sample for receiving messages with exactly-once delivery enabled (#588) * Add receive_messages_with_exactly_once_delivery_enabled sample with its own region tag * Address Tianzi and Mahesh's comments. * Add code for arg parsing / integrate sample with infra * Add sample test * Reformat and remove min lease extension period setting from sample * Address Tianzi's comments. * Fix import of subscriber exceptions. --- .../samples/snippets/subscriber.py | 70 +++++++++++++++++++ .../samples/snippets/subscriber_test.py | 28 ++++++++ 2 files changed, 98 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 7bc124ca8e9f..ada70a02dc41 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -580,6 +580,61 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: # [END pubsub_subscriber_blocking_shutdown] +def receive_messages_with_exactly_once_delivery_enabled( + project_id: str, subscription_id: str, timeout: Optional[float] = None +) -> None: + """Receives messages from a pull subscription with exactly-once delivery enabled.""" + # [START pubsub_subscriber_exactly_once] + from concurrent.futures import TimeoutError + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1.subscriber import exceptions as sub_exceptions + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + subscriber = pubsub_v1.SubscriberClient() + # The `subscription_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message}.") + + # Use `ack_with_response()` instead of `ack()` to get a future that tracks + # the result of the acknowledge call. When exactly-once delivery is enabled + # on the subscription, the message is guaranteed to not be delivered again + # if the ack future succeeds. + ack_future = message.ack_with_response() + + try: + # Block on result of acknowledge call. + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + ack_future.result(timeout=timeout) + print(f"Ack for message {message.message_id} successful.") + except sub_exceptions.AcknowledgeError as e: + print( + f"Ack for message {message.message_id} failed with error: {e.error_code}" + ) + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + # [END pubsub_subscriber_exactly_once] + + def synchronous_pull(project_id: str, subscription_id: str) -> None: """Pulling messages synchronously.""" # [START pubsub_subscriber_sync_pull] @@ -881,6 +936,17 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: "timeout", default=None, type=float, nargs="?" ) + receive_messages_with_exactly_once_delivery_enabled_parser = subparsers.add_parser( + "receive-messages-with-exactly-once-delivery-enabled", + help=receive_messages_with_exactly_once_delivery_enabled.__doc__, + ) + receive_messages_with_exactly_once_delivery_enabled_parser.add_argument( + "subscription_id" + ) + receive_messages_with_exactly_once_delivery_enabled_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + synchronous_pull_parser = subparsers.add_parser( "receive-synchronously", help=synchronous_pull.__doc__ ) @@ -967,6 +1033,10 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: receive_messages_with_blocking_shutdown( args.project_id, args.subscription_id, args.timeout ) + elif args.command == "receive-messages-with-exactly-once-delivery-enabled": + receive_messages_with_exactly_once_delivery_enabled( + args.project_id, args.subscription_id, args.timeout + ) elif args.command == "receive-synchronously": synchronous_pull(args.project_id, args.subscription_id) elif args.command == "receive-synchronously-with-lease": diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 9fcb1c1192fe..614633664f2e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -624,6 +624,34 @@ def eventually_consistent_test() -> None: eventually_consistent_test() +def test_receive_messages_with_exactly_once_delivery_enabled( + publisher_client: pubsub_v1.PublisherClient, + topic: str, + subscription_async: str, + capsys: CaptureFixture[str], +) -> None: + + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + ) + + @typed_backoff + def eventually_consistent_test() -> None: + _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_exactly_once_delivery_enabled( + PROJECT_ID, SUBSCRIPTION_ASYNC, 10 + ) + + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async in out + assert "Received" in out + assert "Ack" in out + + eventually_consistent_test() + + def test_listen_for_errors( publisher_client: pubsub_v1.PublisherClient, topic: str, From 0ba16b84219f230a1216269f5b52715229b15253 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Fri, 4 Mar 2022 07:34:54 -0800 Subject: [PATCH 0775/1197] samples: create subscription with exactly once delivery (#592) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * samples: create subscription with exactly once delivery * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Prad Nelluru Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/noxfile.py | 10 +---- .../samples/snippets/subscriber.py | 44 +++++++++++++++++++ .../samples/snippets/subscriber_test.py | 25 +++++++++++ 3 files changed, 70 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index ba16d5a469e0..e9fea8af8493 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -146,9 +146,6 @@ def default(session): session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. - # THe following flags are useful during development: - # "-s" -> show print() statement output - # "-k " -> filter test cases session.run( "py.test", "--quiet", @@ -159,7 +156,6 @@ def default(session): "--cov-config=.coveragerc", "--cov-report=", "--cov-fail-under=0", - "-s", os.path.join("tests", "unit"), *session.posargs, ) @@ -204,9 +200,6 @@ def system(session): session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. - # THe following flags are useful during development: - # "-s" -> show print() statement output - # "-k " -> filter test cases if system_test_exists: session.run( "py.test", @@ -233,8 +226,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - # Tip: The "-i" flag lets you ignore errors with specific files. - session.run("coverage", "report", "-i", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index ada70a02dc41..5a9d0a7a5c42 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -242,6 +242,37 @@ def create_subscription_with_filtering( # [END pubsub_create_subscription_with_filter] +def create_subscription_with_exactly_once_delivery( + project_id: str, topic_id: str, subscription_id: str +) -> None: + """Create a subscription with exactly once delivery enabled.""" + # [START pubsub_create_subscription_with_exactly_once_delivery] + from google.cloud import pubsub_v1 + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "enable_exactly_once_delivery": True, + } + ) + print( + f"Created subscription with exactly once delivery enabled: {subscription}" + ) + # [END pubsub_create_subscription_with_exactly_once_delivery] + + def delete_subscription(project_id: str, subscription_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] @@ -879,6 +910,15 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_subscription_with_filtering_parser.add_argument("subscription_id") create_subscription_with_filtering_parser.add_argument("filter") + create_subscription_with_exactly_once_delivery_parser = subparsers.add_parser( + "create-with-exactly-once", + help=create_subscription_with_exactly_once_delivery.__doc__, + ) + create_subscription_with_exactly_once_delivery_parser.add_argument("topic_id") + create_subscription_with_exactly_once_delivery_parser.add_argument( + "subscription_id" + ) + delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) delete_parser.add_argument("subscription_id") @@ -1003,6 +1043,10 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_subscription_with_filtering( args.project_id, args.topic_id, args.subscription_id, args.filter ) + elif args.command == "create-with-exactly-once": + create_subscription_with_exactly_once_delivery( + args.project_id, args.topic_id, args.subscription_id + ) elif args.command == "delete": delete_subscription(args.project_id, args.subscription_id) elif args.command == "update-push": diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 614633664f2e..34a42cac4348 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -411,6 +411,31 @@ def test_create_subscription_with_filtering( assert '"attributes.author=\\"unknown\\""' in out +def test_create_subscription_with_exactly_once_delivery( + subscriber_client: pubsub_v1.SubscriberClient, + subscription_admin: str, + capsys: CaptureFixture[str], +) -> None: + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ADMIN + ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_subscription_with_exactly_once_delivery( + PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN + ) + + out, _ = capsys.readouterr() + assert "Created subscription with exactly once delivery enabled" in out + assert f"{subscription_admin}" in out + assert "enable_exactly_once_delivery: true" in out + + def test_create_push_subscription( subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str, From 039fba6ec8d51ca345c8a936f7b4398d299c38db Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 4 Mar 2022 11:12:36 -0500 Subject: [PATCH 0776/1197] chore(main): release 2.10.0 (#603) * chore(main): release 2.10.0 * Add samples PRs to release notes. Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Prad Nelluru --- packages/google-cloud-pubsub/CHANGELOG.md | 31 +++++++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 00a8aae50a8f..57732bacf53f 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,37 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.10.0](https://github.com/googleapis/python-pubsub/compare/v2.9.0...v2.10.0) (2022-03-04) + + +### Features + +* add api key support ([#571](https://github.com/googleapis/python-pubsub/issues/571)) ([cdda762](https://github.com/googleapis/python-pubsub/commit/cdda762f6d15d96f5e2d7fac975f3494dc49eaa9)) +* add exactly once delivery flag ([#577](https://github.com/googleapis/python-pubsub/issues/577)) ([d6614e2](https://github.com/googleapis/python-pubsub/commit/d6614e274328c58449e67dfc788e2e7986c0c10b)) +* add support for exactly once delivery ([#578](https://github.com/googleapis/python-pubsub/issues/578)) ([95a86fa](https://github.com/googleapis/python-pubsub/commit/95a86fa5f528701b760064f0cece0efa4e60cd44)) +* exactly-once delivery support ([#550](https://github.com/googleapis/python-pubsub/issues/550)) ([2fb6e15](https://github.com/googleapis/python-pubsub/commit/2fb6e1533192ae81dceee5c71283169a0a85a015)) + + +### Bug Fixes + +* **deps:** move libcst to extras ([#585](https://github.com/googleapis/python-pubsub/issues/585)) ([0846762](https://github.com/googleapis/python-pubsub/commit/084676243ca4afd54cda601e589b80883f9703a3)) +* refactor client classes for safer type checking ([#552](https://github.com/googleapis/python-pubsub/issues/552)) ([7f705be](https://github.com/googleapis/python-pubsub/commit/7f705beb927383f14b9d56f0341ee0de101f7c05)) +* resolve DuplicateCredentialArgs error when using credentials_file ([8ca8cf2](https://github.com/googleapis/python-pubsub/commit/8ca8cf27333baf823a1dffd081e63079f1a12625)) + + +### Samples +* samples: create subscription with filtering enabled [#580](https://github.com/googleapis/python-pubsub/pull/580) +* samples: handle empty response in sync pull samples [#586](https://github.com/googleapis/python-pubsub/pull/586) +* samples: sample for receiving messages with exactly-once delivery enabled [#588](https://github.com/googleapis/python-pubsub/pull/588) +* samples: create subscription with exactly once delivery [#592](https://github.com/googleapis/python-pubsub/pull/592) +(https://github.com/googleapis/python-pubsub/pull/588 + + +### Documentation + +* add autogenerated code snippets ([aa3754c](https://github.com/googleapis/python-pubsub/commit/aa3754cf432bd02be2734a23a32d5b36cd216aee)) +* Docs have inconsistent default values for max_latency and max_bytes ([#572](https://github.com/googleapis/python-pubsub/issues/572)) ([d136dfd](https://github.com/googleapis/python-pubsub/commit/d136dfdb69ebeebd1411a1415f863b94d07078f0)) + ## [2.9.0](https://www.github.com/googleapis/python-pubsub/compare/v2.8.0...v2.9.0) (2021-11-10) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 5423bbb0ddfe..e1b259bd6321 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.9.0" +version = "2.10.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From e1797c695377269f865253ec4d38ca1e575a05c6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 4 Mar 2022 14:32:23 -0500 Subject: [PATCH 0777/1197] chore: Adding support for pytest-xdist and pytest-parallel (#602) Source-Link: https://github.com/googleapis/synthtool/commit/82f5cb283efffe96e1b6cd634738e0e7de2cd90a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 2 +- .../samples/snippets/noxfile.py | 80 +++++++++++-------- 2 files changed, 47 insertions(+), 35 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 480226ac08a9..7e08e05a380c 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6162c384d685c5fe22521d3f37f6fc732bf99a085f6d47b677dbcae97fc21392 + digest: sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 20cdfc620138..4c808af73ea2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -188,42 +188,54 @@ def _session_tests( # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") test_list.extend(glob.glob("tests")) + if len(test_list) == 0: print("No tests found, skipping directory.") - else: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + return + + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + concurrent_args = [] + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + with open("requirements.txt") as rfile: + packages = rfile.read() + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + with open("requirements-test.txt") as rtfile: + packages += rtfile.read() + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + if "pytest-parallel" in packages: + concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + elif "pytest-xdist" in packages: + concurrent_args.extend(['-n', 'auto']) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs + concurrent_args), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 548d7d1c9c21f8986357263b3f5e1e24280f04ef Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 4 Mar 2022 16:14:58 -0500 Subject: [PATCH 0778/1197] fix(deps): require google-api-core>=1.31.5, >=2.3.2 (#600) fix(deps): require proto-plus>=1.15.0 --- packages/google-cloud-pubsub/setup.py | 4 ++-- packages/google-cloud-pubsub/testing/constraints-3.6.txt | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index e1b259bd6321..1ac6760aed47 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -33,8 +33,8 @@ # NOTE: Maintainers, please do not require google-api-core>=2.x.x # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.28.0, <3.0.0dev", - "proto-plus >= 1.7.1", + "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "proto-plus >= 1.15.0", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", "grpcio-status >= 1.16.0", ] diff --git a/packages/google-cloud-pubsub/testing/constraints-3.6.txt b/packages/google-cloud-pubsub/testing/constraints-3.6.txt index b89267633dee..0ce29f32c95e 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.6.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.6.txt @@ -5,7 +5,7 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 grpcio==1.38.1 -google-api-core==1.28.0 +google-api-core==1.31.5 libcst==0.3.10 -proto-plus==1.7.1 +proto-plus==1.15.0 grpc-google-iam-v1==0.12.3 From 75ab838c272e132da00f13ce9616c13c6f538227 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 5 Mar 2022 00:22:24 +0000 Subject: [PATCH 0779/1197] chore(deps): update actions/download-artifact action to v3 (#605) Source-Link: https://github.com/googleapis/synthtool/commit/38e11ad1104dcc1e63b52691ddf2fe4015d06955 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/.github/workflows/unittest.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 7e08e05a380c..44c78f7cc12d 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5d8da01438ece4021d135433f2cf3227aa39ef0eaccc941d62aa35e6902832ae + digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index e87fe5b7b79a..e5be6edbd54d 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -26,7 +26,7 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v3 with: name: coverage-artifacts path: .coverage-${{ matrix.python }} @@ -47,7 +47,7 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v2 + uses: actions/download-artifact@v3 with: name: coverage-artifacts path: .coverage-results/ From f756f84f47c9fd2c1c85131668902317caf7d8bb Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sat, 5 Mar 2022 17:56:15 +0100 Subject: [PATCH 0780/1197] chore(deps): update dependency google-cloud-pubsub to v2.10.0 (#606) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 40078e73f2b8..f47d14979f91 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.9.0 +google-cloud-pubsub==2.10.0 avro==1.11.0 From 7e2ee732a5bc1f15f30a79561e7a9e6418fd9288 Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Tue, 8 Mar 2022 17:10:05 -0500 Subject: [PATCH 0781/1197] feat: retry temporary GRPC statuses for ack/modack/nack when exactly-once delivery is enabled (#607) We need to do this because [only UNAVAILABLE](https://github.com/googleapis/googleapis/blob/eb0700c6f29ca94f460307f201eb605744f055cb/google/pubsub/v1/pubsub_grpc_service_config.json#L221) is retried for acks/modacks/nacks at the GRPC level. With this CL, we extend the higher-level, manual retry mechanism for these RPCs to all the ones considered temporary for the Publish RPC. The new list of retriable codes is for these RPCs when exactly-once delivery is enabled is: DEADLINE_EXCEEDED, RESOURCE_EXHAUSTED, ABORTED, INTERNAL, UNAVAILABLE. --- .../_protocol/streaming_pull_manager.py | 29 +++++++++++++--- .../subscriber/test_streaming_pull_manager.py | 34 ++++++++++++++++++- 2 files changed, 58 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 5a9d080265e4..e098491fe7d3 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -75,6 +75,14 @@ a subscription. We do this to reduce premature ack expiration. """ +_EXACTLY_ONCE_DELIVERY_TEMPORARY_RETRY_ERRORS = { + code_pb2.DEADLINE_EXCEEDED, + code_pb2.RESOURCE_EXHAUSTED, + code_pb2.ABORTED, + code_pb2.INTERNAL, + code_pb2.UNAVAILABLE, +} + def _wrap_as_exception(maybe_exception: Any) -> BaseException: """Wrap an object as a Python exception, if needed. @@ -163,6 +171,8 @@ def _process_requests( requests_completed = [] requests_to_retry = [] for ack_id in ack_reqs_dict: + # Handle special errors returned for ack/modack RPCs via the ErrorInfo + # sidecar metadata when exactly-once delivery is enabled. if errors_dict and ack_id in errors_dict: exactly_once_error = errors_dict[ack_id] if exactly_once_error.startswith("TRANSIENT_"): @@ -176,9 +186,14 @@ def _process_requests( future = ack_reqs_dict[ack_id].future future.set_exception(exc) requests_completed.append(ack_reqs_dict[ack_id]) + # Temporary GRPC errors are retried + elif ( + error_status + and error_status.code in _EXACTLY_ONCE_DELIVERY_TEMPORARY_RETRY_ERRORS + ): + requests_to_retry.append(ack_reqs_dict[ack_id]) + # Other GRPC errors are NOT retried elif error_status: - # Only permanent errors are expected here b/c retriable errors are - # retried at the lower, GRPC level. if error_status.code == code_pb2.PERMISSION_DENIED: exc = AcknowledgeError(AcknowledgeStatus.PERMISSION_DENIED, info=None) elif error_status.code == code_pb2.FAILED_PRECONDITION: @@ -188,11 +203,13 @@ def _process_requests( future = ack_reqs_dict[ack_id].future future.set_exception(exc) requests_completed.append(ack_reqs_dict[ack_id]) + # Since no error occurred, requests with futures are completed successfully. elif ack_reqs_dict[ack_id].future: future = ack_reqs_dict[ack_id].future # success future.set_result(AcknowledgeStatus.SUCCESS) requests_completed.append(ack_reqs_dict[ack_id]) + # All other requests are considered completed. else: requests_completed.append(ack_reqs_dict[ack_id]) @@ -580,7 +597,9 @@ def send_unary_ack( ack_errors_dict = _get_ack_errors(exc) except exceptions.RetryError as exc: status = status_pb2.Status() - status.code = code_pb2.DEADLINE_EXCEEDED + # Choose a non-retriable error code so the futures fail with + # exceptions. + status.code = code_pb2.UNKNOWN # Makes sure to complete futures so they don't block forever. _process_requests(status, ack_reqs_dict, None) _LOGGER.debug( @@ -634,7 +653,9 @@ def send_unary_modack( modack_errors_dict = _get_ack_errors(exc) except exceptions.RetryError as exc: status = status_pb2.Status() - status.code = code_pb2.DEADLINE_EXCEEDED + # Choose a non-retriable error code so the futures fail with + # exceptions. + status.code = code_pb2.UNKNOWN # Makes sure to complete futures so they don't block forever. _process_requests(status, ack_reqs_dict, None) _LOGGER.debug( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 9e8d6c5ed903..36f82b621926 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1735,7 +1735,7 @@ def test_process_requests_permanent_error_raises_exception(): assert not requests_to_retry -def test_process_requests_transient_error_returns_request(): +def test_process_requests_transient_error_returns_request_for_retrying(): # a transient error returns the request in `requests_to_retry` future = futures.Future() ack_reqs_dict = { @@ -1772,6 +1772,38 @@ def test_process_requests_unknown_error_raises_exception(): assert not requests_to_retry +def test_process_requests_retriable_error_status_returns_request_for_retrying(): + # a retriable error status returns the request in `requests_to_retry` + retriable_errors = [ + code_pb2.DEADLINE_EXCEEDED, + code_pb2.RESOURCE_EXHAUSTED, + code_pb2.ABORTED, + code_pb2.INTERNAL, + code_pb2.UNAVAILABLE, + ] + + for retriable_error in retriable_errors: + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future, + ) + } + st = status_pb2.Status() + st.code = retriable_error + ( + requests_completed, + requests_to_retry, + ) = streaming_pull_manager._process_requests(st, ack_reqs_dict, None) + assert not requests_completed + assert requests_to_retry[0].ack_id == "ackid1" + assert not future.done() + + def test_process_requests_permission_denied_error_status_raises_exception(): # a permission-denied error status raises an exception future = futures.Future() From e74381036903a97414fe323706949f4e71f15d2f Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Wed, 9 Mar 2022 13:48:08 -0500 Subject: [PATCH 0782/1197] feat: return singleton success future for exactly-once methods in Message (#608) * Return singleton success future for exactly-once methods in subscriber.Message --- .../cloud/pubsub_v1/subscriber/message.py | 24 ++++++++++++------- .../unit/pubsub_v1/subscriber/test_message.py | 3 +++ 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 5744aa71ca00..ab17bab781fe 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -40,6 +40,9 @@ attributes: {} }}""" +_SUCCESS_FUTURE = futures.Future() +_SUCCESS_FUTURE.set_result(AcknowledgeStatus.SUCCESS) + def _indent(lines: str, prefix: str = " ") -> str: """Indent some text. @@ -291,12 +294,13 @@ def ack_with_response(self) -> "futures.Future": pubsub_v1.subscriber.exceptions.AcknowledgeError exception will be thrown. """ - future = futures.Future() - req_future = None + req_future: Optional[futures.Future] if self._exactly_once_delivery_enabled_func(): + future = futures.Future() req_future = future else: - future.set_result(AcknowledgeStatus.SUCCESS) + future = _SUCCESS_FUTURE + req_future = None time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( requests.AckRequest( @@ -390,12 +394,13 @@ def modify_ack_deadline_with_response(self, seconds: int) -> "futures.Future": will be thrown. """ - future = futures.Future() - req_future = None + req_future: Optional[futures.Future] if self._exactly_once_delivery_enabled_func(): + future = futures.Future() req_future = future else: - future.set_result(AcknowledgeStatus.SUCCESS) + future = _SUCCESS_FUTURE + req_future = None self._request_queue.put( requests.ModAckRequest( @@ -451,12 +456,13 @@ def nack_with_response(self) -> "futures.Future": will be thrown. """ - future = futures.Future() - req_future = None + req_future: Optional[futures.Future] if self._exactly_once_delivery_enabled_func(): + future = futures.Future() req_future = future else: - future.set_result(AcknowledgeStatus.SUCCESS) + future = _SUCCESS_FUTURE + req_future = None self._request_queue.put( requests.NackRequest( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index f5c7bf3c7fab..0debabaf3476 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -156,6 +156,7 @@ def test_ack_with_response_exactly_once_delivery_disabled(): ) ) assert future.result() == AcknowledgeStatus.SUCCESS + assert future == message._SUCCESS_FUTURE check_call_types(put, requests.AckRequest) @@ -205,6 +206,7 @@ def test_modify_ack_deadline_with_response_exactly_once_delivery_disabled(): requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60, future=None) ) assert future.result() == AcknowledgeStatus.SUCCESS + assert future == message._SUCCESS_FUTURE check_call_types(put, requests.ModAckRequest) @@ -242,6 +244,7 @@ def test_nack_with_response_exactly_once_delivery_disabled(): ) ) assert future.result() == AcknowledgeStatus.SUCCESS + assert future == message._SUCCESS_FUTURE check_call_types(put, requests.NackRequest) From b0b301c5fa7de602d11456222553986330e7bcfb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 9 Mar 2022 18:39:16 -0500 Subject: [PATCH 0783/1197] chore(main): release 2.11.0 (#610) --- packages/google-cloud-pubsub/CHANGELOG.md | 14 ++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 57732bacf53f..bac55a5dc006 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,20 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.11.0](https://github.com/googleapis/python-pubsub/compare/v2.10.0...v2.11.0) (2022-03-09) + + +### Features + +* retry temporary GRPC statuses for ack/modack/nack when exactly-once delivery is enabled ([#607](https://github.com/googleapis/python-pubsub/issues/607)) ([a91bed8](https://github.com/googleapis/python-pubsub/commit/a91bed829c9040fcc6c1e70b99b66188ac4ded40)) +* return singleton success future for exactly-once methods in Message ([#608](https://github.com/googleapis/python-pubsub/issues/608)) ([253ced2](https://github.com/googleapis/python-pubsub/commit/253ced28f308450c7a1a93cc38f6d101ecd7d4c0)) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.31.5, >=2.3.2 ([#600](https://github.com/googleapis/python-pubsub/issues/600)) ([1608b7f](https://github.com/googleapis/python-pubsub/commit/1608b7ffdd5b5db87e1e55fde763440ca9a4086e)) +* **deps:** require proto-plus>=1.15.0 ([1608b7f](https://github.com/googleapis/python-pubsub/commit/1608b7ffdd5b5db87e1e55fde763440ca9a4086e)) + ## [2.10.0](https://github.com/googleapis/python-pubsub/compare/v2.9.0...v2.10.0) (2022-03-04) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 1ac6760aed47..8624885b53dc 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.10.0" +version = "2.11.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 91bd83cb81bc28338634c1d043d7bd35f05e1ee0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 10 Mar 2022 11:38:27 +0100 Subject: [PATCH 0784/1197] chore(deps): update dependency google-cloud-pubsub to v2.11.0 (#611) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index f47d14979f91..ac58c1298be7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.10.0 +google-cloud-pubsub==2.11.0 avro==1.11.0 From 2e5b778716b6a5ced7480dea61bbcbf24841564b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 14 Mar 2022 09:37:03 -0400 Subject: [PATCH 0785/1197] ci: use python 3.7 for pubsublite samples testing (#615) --- .../.kokoro/presubmit-against-pubsublite-samples.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh index ff143a3941c3..1078a5f5ea31 100755 --- a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh @@ -75,9 +75,9 @@ for file in python-pubsublite/samples/**/requirements.txt; do echo "- testing $file" echo "------------------------------------------------------------" - # Use pytest to execute tests for py-3.6 - python3.6 -m venv py-3.6 - source py-3.6/bin/activate + # Use pytest to execute tests for py-3.7 + python3.7 -m venv py-3.7 + source py-3.7/bin/activate # Install python-pubsublite samples tests requirements. python -m pip install --upgrade pip python -m pip install -r requirements.txt -q @@ -87,8 +87,8 @@ for file in python-pubsublite/samples/**/requirements.txt; do python -m pytest quickstart_test.py EXIT=$? - deactivate py-3.6 - rm -rf py-3.6/ + deactivate py-3.7 + rm -rf py-3.7/ if [[ $EXIT -ne 0 ]]; then RTN=1 From 9eb59bc7df1dcd89c61849102203604c669144b4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 14 Mar 2022 15:06:34 +0100 Subject: [PATCH 0786/1197] chore(deps): update dependency pytest to v7.1.0 (#614) Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 1fc2a0443845..cd94c7525ef9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff==1.11.1 -pytest==7.0.1 +pytest==7.1.0 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From 1633169e820b70c4ae2e5784f035675a1d4a8f67 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Wed, 23 Mar 2022 11:29:01 -0400 Subject: [PATCH 0787/1197] fix: mypy errors (#622) * fix: mypy errors * fixing linter errors * addressing review comments * fixing test coverage * fixing test comments * fixing test comments * linting for test * fixing test comment --- .../_protocol/streaming_pull_manager.py | 19 +++--- .../subscriber/test_streaming_pull_manager.py | 66 +++++++++++++++++++ 2 files changed, 75 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index e098491fe7d3..4d9097ff9cf8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -20,7 +20,7 @@ import logging import threading import typing -from typing import Any, Callable, Iterable, List, Optional, Tuple, Union +from typing import Any, Dict, Callable, Iterable, List, Optional, Tuple, Union import uuid import grpc # type: ignore @@ -49,7 +49,6 @@ if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1 import subscriber - from google.protobuf.internal import containers _LOGGER = logging.getLogger(__name__) @@ -141,9 +140,7 @@ def _get_status(exc: exceptions.GoogleAPICallError,) -> Optional["status_pb2.Sta return None -def _get_ack_errors( - exc: exceptions.GoogleAPICallError, -) -> Optional["containers.ScalarMap"]: +def _get_ack_errors(exc: exceptions.GoogleAPICallError,) -> Optional[Dict[str, str]]: status = _get_status(exc) if not status: _LOGGER.debug("Unable to get status of errored RPC.") @@ -159,8 +156,8 @@ def _get_ack_errors( def _process_requests( error_status: Optional["status_pb2.Status"], - ack_reqs_dict: "containers.ScalarMap", - errors_dict: Optional["containers.ScalarMap"], + ack_reqs_dict: Dict[str, requests.AckRequest], + errors_dict: Optional[Dict[str, str]], ): """Process requests by referring to error_status and errors_dict. @@ -182,9 +179,9 @@ def _process_requests( exc = AcknowledgeError(AcknowledgeStatus.INVALID_ACK_ID, info=None) else: exc = AcknowledgeError(AcknowledgeStatus.OTHER, exactly_once_error) - future = ack_reqs_dict[ack_id].future - future.set_exception(exc) + if future is not None: + future.set_exception(exc) requests_completed.append(ack_reqs_dict[ack_id]) # Temporary GRPC errors are retried elif ( @@ -201,12 +198,14 @@ def _process_requests( else: exc = AcknowledgeError(AcknowledgeStatus.OTHER, str(error_status)) future = ack_reqs_dict[ack_id].future - future.set_exception(exc) + if future is not None: + future.set_exception(exc) requests_completed.append(ack_reqs_dict[ack_id]) # Since no error occurred, requests with futures are completed successfully. elif ack_reqs_dict[ack_id].future: future = ack_reqs_dict[ack_id].future # success + assert future is not None future.set_result(AcknowledgeStatus.SUCCESS) requests_completed.append(ack_reqs_dict[ack_id]) # All other requests are considered completed. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 36f82b621926..e9554dedad8f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1713,6 +1713,21 @@ def test_process_requests_no_errors(): assert not requests_to_retry +def test_process_requests_no_errors_no_future(): + # no errors, request should be completed, even when future is None. + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ) + } + errors_dict = {} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + assert not requests_to_retry + + def test_process_requests_permanent_error_raises_exception(): # a permanent error raises an exception future = futures.Future() @@ -1735,6 +1750,40 @@ def test_process_requests_permanent_error_raises_exception(): assert not requests_to_retry +def test_process_requests_permanent_error_other_raises_exception(): + # a permanent error of other raises an exception + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=future + ) + } + errors_dict = {"ackid1": "PERMANENT_FAILURE_OTHER"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + with pytest.raises(subscriber_exceptions.AcknowledgeError) as exc_info: + future.result() + assert exc_info.value.error_code == subscriber_exceptions.AcknowledgeStatus.OTHER + assert not requests_to_retry + + +def test_process_requests_permanent_error_other_raises_exception_no_future(): + # with a permanent error, request is completed even when future is None. + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ) + } + errors_dict = {"ackid1": "PERMANENT_FAILURE_OTHER"} + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + None, ack_reqs_dict, errors_dict + ) + assert requests_completed[0].ack_id == "ackid1" + assert not requests_to_retry + + def test_process_requests_transient_error_returns_request_for_retrying(): # a transient error returns the request in `requests_to_retry` future = futures.Future() @@ -1872,6 +1921,23 @@ def test_process_requests_other_error_status_raises_exception(): assert not requests_to_retry +def test_process_requests_other_error_status_raises_exception_no_future(): + # with an unrecognized error status, requests are completed, even when + # future is None. + ack_reqs_dict = { + "ackid1": requests.AckRequest( + ack_id="ackid1", byte_size=0, time_to_ack=20, ordering_key="", future=None + ) + } + st = status_pb2.Status() + st.code = code_pb2.Code.OUT_OF_RANGE + requests_completed, requests_to_retry = streaming_pull_manager._process_requests( + st, ack_reqs_dict, None + ) + assert requests_completed[0].ack_id == "ackid1" + assert not requests_to_retry + + def test_process_requests_mixed_success_and_failure_acks(): # mixed success and failure (acks) future1 = futures.Future() From c751ad9b11cba6f031f166a4bdf62345823c9ef1 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Wed, 23 Mar 2022 09:33:10 -0700 Subject: [PATCH 0788/1197] samples(test): correct subscription type in test (#619) --- .../samples/snippets/subscriber_test.py | 89 ++++++++++++++----- 1 file changed, 69 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 34a42cac4348..fd5d8d768508 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -16,7 +16,7 @@ import re import sys import time -from typing import Any, Callable, cast, Generator, TypeVar +from typing import Any, Callable, cast, Generator, List, TypeVar import uuid from _pytest.capture import CaptureFixture @@ -35,10 +35,12 @@ PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC = f"subscription-test-topic-{PY_VERSION}-{UUID}" DEAD_LETTER_TOPIC = f"subscription-test-dead-letter-topic-{PY_VERSION}-{UUID}" +EOD_TOPIC = f"subscription-test-eod-topic-{PY_VERSION}-{UUID}" SUBSCRIPTION_ADMIN = f"subscription-test-subscription-admin-{PY_VERSION}-{UUID}" SUBSCRIPTION_ASYNC = f"subscription-test-subscription-async-{PY_VERSION}-{UUID}" SUBSCRIPTION_SYNC = f"subscription-test-subscription-sync-{PY_VERSION}-{UUID}" SUBSCRIPTION_DLQ = f"subscription-test-subscription-dlq-{PY_VERSION}-{UUID}" +SUBSCRIPTION_EOD = f"subscription-test-subscription-eod-{PY_VERSION}-{UUID}" ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push" NEW_ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push2" DEFAULT_MAX_DELIVERY_ATTEMPTS = 5 @@ -85,6 +87,22 @@ def dead_letter_topic( publisher_client.delete_topic(request={"topic": dead_letter_topic.name}) +@pytest.fixture(scope="module") +def exactly_once_delivery_topic( + publisher_client: pubsub_v1.PublisherClient, +) -> Generator[str, None, None]: + topic_path = publisher_client.topic_path(PROJECT_ID, EOD_TOPIC) + + try: + topic = publisher_client.get_topic(request={"topic": topic_path}) + except NotFound: + topic = publisher_client.create_topic(request={"name": topic_path}) + + yield topic.name + + publisher_client.delete_topic(request={"topic": topic.name}) + + @pytest.fixture(scope="module") def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: subscriber_client = pubsub_v1.SubscriberClient() @@ -202,16 +220,45 @@ def subscription_dlq( subscriber_client.delete_subscription(request={"subscription": subscription.name}) +@pytest.fixture(scope="module") +def subscription_eod( + subscriber_client: pubsub_v1.SubscriberClient, exactly_once_delivery_topic: str +) -> Generator[str, None, None]: + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_EOD + ) + + try: + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + subscription = subscriber_client.create_subscription( + request={ + "name": subscription_path, + "topic": exactly_once_delivery_topic, + "enable_exactly_once_delivery": True + } + ) + + yield subscription.name + + subscriber_client.delete_subscription(request={"subscription": subscription.name}) + + def _publish_messages( publisher_client: pubsub_v1.PublisherClient, topic: str, message_num: int = 5, **attrs: Any, -) -> None: +) -> List[str]: + message_ids = [] for n in range(message_num): data = f"message {n}".encode("utf-8") publish_future = publisher_client.publish(topic, data, **attrs) - publish_future.result() + message_ids.append(publish_future.result()) + return message_ids def test_list_in_topic(subscription_admin: str, capsys: CaptureFixture[str]) -> None: @@ -307,7 +354,7 @@ def test_receive_with_delivery_attempts( # We keep retrying up to 10 minutes for mitigating the flakiness. @typed_backoff def run_sample() -> None: - _publish_messages(publisher_client, topic) + _ = _publish_messages(publisher_client, topic) subscriber.receive_messages_with_delivery_attempts( PROJECT_ID, SUBSCRIPTION_DLQ, 90 @@ -413,11 +460,11 @@ def test_create_subscription_with_filtering( def test_create_subscription_with_exactly_once_delivery( subscriber_client: pubsub_v1.SubscriberClient, - subscription_admin: str, + subscription_eod: str, capsys: CaptureFixture[str], ) -> None: subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_ADMIN + PROJECT_ID, SUBSCRIPTION_EOD ) try: subscriber_client.delete_subscription( @@ -427,12 +474,12 @@ def test_create_subscription_with_exactly_once_delivery( pass subscriber.create_subscription_with_exactly_once_delivery( - PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN + PROJECT_ID, EOD_TOPIC, SUBSCRIPTION_EOD ) out, _ = capsys.readouterr() assert "Created subscription with exactly once delivery enabled" in out - assert f"{subscription_admin}" in out + assert f"{subscription_eod}" in out assert "enable_exactly_once_delivery: true" in out @@ -521,7 +568,7 @@ def test_receive( @typed_backoff def eventually_consistent_test() -> None: - _publish_messages(publisher_client, topic) + _ = _publish_messages(publisher_client, topic) subscriber.receive_messages(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) @@ -546,7 +593,7 @@ def test_receive_with_custom_attributes( @typed_backoff def eventually_consistent_test() -> None: - _publish_messages(publisher_client, topic, origin="python-sample") + _ = _publish_messages(publisher_client, topic, origin="python-sample") subscriber.receive_messages_with_custom_attributes( PROJECT_ID, SUBSCRIPTION_ASYNC, 5 @@ -574,7 +621,7 @@ def test_receive_with_flow_control( @typed_backoff def eventually_consistent_test() -> None: - _publish_messages(publisher_client, topic) + _ = _publish_messages(publisher_client, topic) subscriber.receive_messages_with_flow_control(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) @@ -604,7 +651,7 @@ def test_receive_with_blocking_shutdown( @typed_backoff def eventually_consistent_test() -> None: - _publish_messages(publisher_client, topic, message_num=3) + _ = _publish_messages(publisher_client, topic, message_num=3) subscriber.receive_messages_with_blocking_shutdown( PROJECT_ID, SUBSCRIPTION_ASYNC, timeout=5.0 @@ -651,8 +698,8 @@ def eventually_consistent_test() -> None: def test_receive_messages_with_exactly_once_delivery_enabled( publisher_client: pubsub_v1.PublisherClient, - topic: str, - subscription_async: str, + exactly_once_delivery_topic: str, + subscription_eod: str, capsys: CaptureFixture[str], ) -> None: @@ -662,17 +709,19 @@ def test_receive_messages_with_exactly_once_delivery_enabled( @typed_backoff def eventually_consistent_test() -> None: - _publish_messages(publisher_client, topic) + message_ids = _publish_messages(publisher_client, exactly_once_delivery_topic) subscriber.receive_messages_with_exactly_once_delivery_enabled( - PROJECT_ID, SUBSCRIPTION_ASYNC, 10 + PROJECT_ID, SUBSCRIPTION_EOD, 10 ) out, _ = capsys.readouterr() assert "Listening" in out - assert subscription_async in out + assert subscription_eod in out assert "Received" in out assert "Ack" in out + for message_id in message_ids: + assert message_id in out eventually_consistent_test() @@ -690,7 +739,7 @@ def test_listen_for_errors( @typed_backoff def eventually_consistent_test() -> None: - _publish_messages(publisher_client, topic) + _ = _publish_messages(publisher_client, topic) subscriber.listen_for_errors(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) @@ -707,7 +756,7 @@ def test_receive_synchronously( subscription_sync: str, capsys: CaptureFixture[str], ) -> None: - _publish_messages(publisher_client, topic) + _ = _publish_messages(publisher_client, topic) subscriber.synchronous_pull(PROJECT_ID, SUBSCRIPTION_SYNC) @@ -731,7 +780,7 @@ def test_receive_synchronously_with_lease( @typed_backoff def run_sample() -> None: - _publish_messages(publisher_client, topic, message_num=10) + _ = _publish_messages(publisher_client, topic, message_num=10) # Pausing 10s to allow the subscriber to establish the connection # because sync pull often returns fewer messages than requested. # The intention is to fix flaky tests reporting errors like From e5e4bd76e8b19e6288789dacfe51eb70009c26cd Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Wed, 23 Mar 2022 14:20:44 -0400 Subject: [PATCH 0789/1197] feat: increase GRPC max metadata size to 4 MB (#623) * Ack/modack operations on subscriptions with exactly-once delivery enabled may return up to 4 MB of metadata (about ack/modack success/failure) * It's unclear what the current limit is (I searched far and wide), but it seems to be lower than 4 MB. So, we're currently seeing subscription StreamingPull stream closures as a result of this low limit. We aim to keep subscription the same for subscribers to exactly-once subscriptions, even if they don't use the new *with_response methods, so this fix ensures that. --- .../google/pubsub_v1/services/publisher/transports/grpc.py | 1 + .../pubsub_v1/services/publisher/transports/grpc_asyncio.py | 1 + .../pubsub_v1/services/schema_service/transports/grpc.py | 1 + .../services/schema_service/transports/grpc_asyncio.py | 1 + .../google/pubsub_v1/services/subscriber/transports/grpc.py | 1 + .../pubsub_v1/services/subscriber/transports/grpc_asyncio.py | 1 + packages/google-cloud-pubsub/owlbot.py | 1 + .../tests/unit/gapic/pubsub_v1/test_publisher.py | 5 +++++ .../tests/unit/gapic/pubsub_v1/test_schema_service.py | 5 +++++ .../tests/unit/gapic/pubsub_v1/test_subscriber.py | 5 +++++ 10 files changed, 22 insertions(+) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 43fae36d62f4..f4f9831c93b0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -173,6 +173,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 3729275f9c24..878a9e3471f5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -218,6 +218,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 6d5290e0dead..1aadad53d4ae 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -173,6 +173,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 206b0eb26da4..c182bfe8c020 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -218,6 +218,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 130dca2c1f50..de7ff05c37d5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -175,6 +175,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index a6a19fb6ae70..ce2af7afa781 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -220,6 +220,7 @@ def __init__( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 570d61ca4260..908d3c4faa0e 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -72,6 +72,7 @@ """options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ]""", flags=re.MULTILINE | re.DOTALL, diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index a8c963d5ac88..065b6277e96b 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -606,6 +606,7 @@ def test_publisher_client_create_channel_credentials_file( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -2909,6 +2910,7 @@ def test_publisher_transport_create_channel(transport_class, grpc_helpers): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -2939,6 +2941,7 @@ def test_publisher_grpc_transport_client_cert_source_for_mtls(transport_class): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -3044,6 +3047,7 @@ def test_publisher_transport_channel_mtls_with_client_cert_source(transport_clas options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -3089,6 +3093,7 @@ def test_publisher_transport_channel_mtls_with_adc(transport_class): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index ed5bca9a173a..d166ce656c37 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -634,6 +634,7 @@ def test_schema_service_client_create_channel_credentials_file( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -2175,6 +2176,7 @@ def test_schema_service_transport_create_channel(transport_class, grpc_helpers): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -2208,6 +2210,7 @@ def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_cla options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -2316,6 +2319,7 @@ def test_schema_service_transport_channel_mtls_with_client_cert_source(transport options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -2364,6 +2368,7 @@ def test_schema_service_transport_channel_mtls_with_adc(transport_class): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index c66d92404c81..1157ca1bdbfc 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -610,6 +610,7 @@ def test_subscriber_client_create_channel_credentials_file( options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -4204,6 +4205,7 @@ def test_subscriber_transport_create_channel(transport_class, grpc_helpers): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -4234,6 +4236,7 @@ def test_subscriber_grpc_transport_client_cert_source_for_mtls(transport_class): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -4339,6 +4342,7 @@ def test_subscriber_transport_channel_mtls_with_client_cert_source(transport_cla options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) @@ -4384,6 +4388,7 @@ def test_subscriber_transport_channel_mtls_with_adc(transport_class): options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), ("grpc.keepalive_time_ms", 30000), ], ) From 0fb327359fcf70c6e901a93dec80994e72013bcd Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 25 Mar 2022 15:20:02 +0100 Subject: [PATCH 0790/1197] chore(deps): update dependency pytest to v7.1.1 (#618) Co-authored-by: Tianzi Cai Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index cd94c7525ef9..49d72e9e4cc0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff==1.11.1 -pytest==7.1.0 +pytest==7.1.1 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From e64f5b45e3c5c12a4b420e5452f0f4a8227755a9 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 28 Mar 2022 20:02:19 -0400 Subject: [PATCH 0791/1197] chore(python): use black==22.3.0 (#627) Source-Link: https://github.com/googleapis/synthtool/commit/6fab84af09f2cf89a031fd8671d1def6b2931b11 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/docs/conf.py | 13 +- .../pubsub_v1/publisher/_sequencer/base.py | 12 +- .../publisher/_sequencer/ordered_sequencer.py | 72 +- .../_sequencer/unordered_sequencer.py | 34 +- .../cloud/pubsub_v1/publisher/client.py | 25 +- .../cloud/pubsub_v1/publisher/exceptions.py | 8 +- .../subscriber/_protocol/dispatcher.py | 4 +- .../subscriber/_protocol/heartbeater.py | 3 +- .../subscriber/_protocol/histogram.py | 3 +- .../subscriber/_protocol/messages_on_hold.py | 5 +- .../_protocol/streaming_pull_manager.py | 8 +- .../services/publisher/async_client.py | 97 +- .../pubsub_v1/services/publisher/client.py | 164 ++- .../services/publisher/transports/base.py | 6 +- .../services/publisher/transports/grpc.py | 3 +- .../services/schema_service/async_client.py | 66 +- .../services/schema_service/client.py | 115 +- .../schema_service/transports/base.py | 30 +- .../schema_service/transports/grpc.py | 3 +- .../services/subscriber/async_client.py | 133 +- .../pubsub_v1/services/subscriber/client.py | 200 ++- .../services/subscriber/transports/base.py | 6 +- .../services/subscriber/transports/grpc.py | 3 +- .../google/pubsub_v1/types/pubsub.py | 628 ++++++-- .../google/pubsub_v1/types/schema.py | 124 +- packages/google-cloud-pubsub/noxfile.py | 12 +- .../samples/snippets/noxfile.py | 2 +- packages/google-cloud-pubsub/tests/system.py | 2 +- .../unit/gapic/pubsub_v1/test_publisher.py | 1099 ++++++++++---- .../gapic/pubsub_v1/test_schema_service.py | 552 +++++-- .../unit/gapic/pubsub_v1/test_subscriber.py | 1281 ++++++++++++----- .../sequencer/test_ordered_sequencer.py | 2 +- .../sequencer/test_unordered_sequencer.py | 2 +- .../publisher/test_publisher_client.py | 2 +- .../pubsub_v1/subscriber/test_dispatcher.py | 16 +- 36 files changed, 3618 insertions(+), 1119 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 44c78f7cc12d..87dd00611576 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 34fa14a847ae..9245a8edfc51 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -314,7 +314,13 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (root_doc, "google-cloud-pubsub", "google-cloud-pubsub Documentation", [author], 1,) + ( + root_doc, + "google-cloud-pubsub", + "google-cloud-pubsub Documentation", + [author], + 1, + ) ] # If true, show URL addresses after external links. @@ -355,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py index 7a0c28e45571..58ec5a571bb8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -27,21 +27,21 @@ class Sequencer(metaclass=abc.ABCMeta): """The base class for sequencers for Pub/Sub publishing. A sequencer - sequences messages to be published. + sequences messages to be published. """ @abc.abstractmethod def is_finished(self) -> bool: # pragma: NO COVER - """ Whether the sequencer is finished and should be cleaned up. + """Whether the sequencer is finished and should be cleaned up. - Returns: - bool: Whether the sequencer is finished and should be cleaned up. + Returns: + bool: Whether the sequencer is finished and should be cleaned up. """ raise NotImplementedError @abc.abstractmethod def unpause(self) -> None: # pragma: NO COVER - """ Unpauses this sequencer. + """Unpauses this sequencer. Raises: RuntimeError: @@ -56,7 +56,7 @@ def publish( retry: "OptionalRetry" = gapic_v1.method.DEFAULT, timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, ) -> "futures.Future": # pragma: NO COVER - """ Publish message for this ordering key. + """Publish message for this ordering key. Args: message: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py index 4d44b1a4f19f..30c76a44f4d2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py @@ -76,21 +76,21 @@ class _OrderedSequencerStatus(str, enum.Enum): class OrderedSequencer(sequencer_base.Sequencer): - """ Sequences messages into batches ordered by an ordering key for one topic. + """Sequences messages into batches ordered by an ordering key for one topic. - A sequencer always has at least one batch in it, unless paused or stopped. - When no batches remain, the |publishes_done_callback| is called so the - client can perform cleanup. + A sequencer always has at least one batch in it, unless paused or stopped. + When no batches remain, the |publishes_done_callback| is called so the + client can perform cleanup. - Public methods are thread-safe. + Public methods are thread-safe. - Args: - client: - The publisher client used to create this sequencer. - topic: - The topic. The format for this is ``projects/{project}/topics/{topic}``. - ordering_key: - The ordering key for this sequencer. + Args: + client: + The publisher client used to create this sequencer. + topic: + The topic. The format for this is ``projects/{project}/topics/{topic}``. + ordering_key: + The ordering key for this sequencer. """ def __init__(self, client: "PublisherClient", topic: str, ordering_key: str): @@ -107,23 +107,23 @@ def __init__(self, client: "PublisherClient", topic: str, ordering_key: str): self._state = _OrderedSequencerStatus.ACCEPTING_MESSAGES def is_finished(self) -> bool: - """ Whether the sequencer is finished and should be cleaned up. + """Whether the sequencer is finished and should be cleaned up. - Returns: - Whether the sequencer is finished and should be cleaned up. + Returns: + Whether the sequencer is finished and should be cleaned up. """ with self._state_lock: return self._state == _OrderedSequencerStatus.FINISHED def stop(self) -> None: - """ Permanently stop this sequencer. + """Permanently stop this sequencer. - This differs from pausing, which may be resumed. Immediately commits - the first batch and cancels the rest. + This differs from pausing, which may be resumed. Immediately commits + the first batch and cancels the rest. - Raises: - RuntimeError: - If called after stop() has already been called. + Raises: + RuntimeError: + If called after stop() has already been called. """ with self._state_lock: if self._state == _OrderedSequencerStatus.STOPPED: @@ -143,13 +143,13 @@ def stop(self) -> None: batch.cancel(batch_base.BatchCancellationReason.CLIENT_STOPPED) def commit(self) -> None: - """ Commit the first batch, if unpaused. + """Commit the first batch, if unpaused. - If paused or no batches exist, this method does nothing. + If paused or no batches exist, this method does nothing. - Raises: - RuntimeError: - If called after stop() has already been called. + Raises: + RuntimeError: + If called after stop() has already been called. """ with self._state_lock: if self._state == _OrderedSequencerStatus.STOPPED: @@ -161,11 +161,11 @@ def commit(self) -> None: self._ordered_batches[0].commit() def _batch_done_callback(self, success: bool) -> None: - """ Deal with completion of a batch. + """Deal with completion of a batch. - Called when a batch has finished publishing, with either a success - or a failure. (Temporary failures are retried infinitely when - ordering keys are enabled.) + Called when a batch has finished publishing, with either a success + or a failure. (Temporary failures are retried infinitely when + ordering keys are enabled.) """ ensure_cleanup_and_commit_timer_runs = False with self._state_lock: @@ -209,10 +209,10 @@ def _batch_done_callback(self, success: bool) -> None: self._client.ensure_cleanup_and_commit_timer_runs() def _pause(self) -> None: - """ Pause this sequencer: set state to paused, cancel all batches, and - clear the list of ordered batches. + """Pause this sequencer: set state to paused, cancel all batches, and + clear the list of ordered batches. - _state_lock must be taken before calling this method. + _state_lock must be taken before calling this method. """ assert ( self._state != _OrderedSequencerStatus.FINISHED @@ -225,7 +225,7 @@ def _pause(self) -> None: self._ordered_batches.clear() def unpause(self) -> None: - """ Unpause this sequencer. + """Unpause this sequencer. Raises: RuntimeError: @@ -241,7 +241,7 @@ def _create_batch( commit_retry: "OptionalRetry" = gapic_v1.method.DEFAULT, commit_timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> "_batch.thread.Batch": - """ Create a new batch using the client's batch class and other stored + """Create a new batch using the client's batch class and other stored settings. Args: @@ -266,7 +266,7 @@ def publish( retry: "OptionalRetry" = gapic_v1.method.DEFAULT, timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> futures.Future: - """ Publish message for this ordering key. + """Publish message for this ordering key. Args: message: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py index 7f2f136105b8..7d57aa8218b9 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -30,9 +30,9 @@ class UnorderedSequencer(base.Sequencer): - """ Sequences messages into batches for one topic without any ordering. + """Sequences messages into batches for one topic without any ordering. - Public methods are NOT thread-safe. + Public methods are NOT thread-safe. """ def __init__(self, client: "PublisherClient", topic: str): @@ -42,10 +42,10 @@ def __init__(self, client: "PublisherClient", topic: str): self._stopped = False def is_finished(self) -> bool: - """ Whether the sequencer is finished and should be cleaned up. + """Whether the sequencer is finished and should be cleaned up. - Returns: - Whether the sequencer is finished and should be cleaned up. + Returns: + Whether the sequencer is finished and should be cleaned up. """ # TODO: Implement. Not implementing yet because of possible performance # impact due to extra locking required. This does mean that @@ -54,13 +54,13 @@ def is_finished(self) -> bool: return False def stop(self) -> None: - """ Stop the sequencer. + """Stop the sequencer. - Subsequent publishes will fail. + Subsequent publishes will fail. - Raises: - RuntimeError: - If called after stop() has already been called. + Raises: + RuntimeError: + If called after stop() has already been called. """ if self._stopped: raise RuntimeError("Unordered sequencer already stopped.") @@ -68,11 +68,11 @@ def stop(self) -> None: self._stopped = True def commit(self) -> None: - """ Commit the batch. + """Commit the batch. - Raises: - RuntimeError: - If called after stop() has already been called. + Raises: + RuntimeError: + If called after stop() has already been called. """ if self._stopped: raise RuntimeError("Unordered sequencer already stopped.") @@ -86,7 +86,7 @@ def commit(self) -> None: self._current_batch = None def unpause(self) -> typing.NoReturn: - """ Not relevant for this class. """ + """Not relevant for this class.""" raise NotImplementedError def _create_batch( @@ -94,7 +94,7 @@ def _create_batch( commit_retry: "OptionalRetry" = gapic_v1.method.DEFAULT, commit_timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> "_batch.thread.Batch": - """ Create a new batch using the client's batch class and other stored + """Create a new batch using the client's batch class and other stored settings. Args: @@ -119,7 +119,7 @@ def publish( retry: "OptionalRetry" = gapic_v1.method.DEFAULT, timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> "futures.Future": - """ Batch message into existing or new batch. + """Batch message into existing or new batch. Args: message: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 43305afcc181..e3266e57f854 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -215,8 +215,8 @@ def api(self): return super() def _get_or_create_sequencer(self, topic: str, ordering_key: str) -> SequencerType: - """ Get an existing sequencer or create a new one given the (topic, - ordering_key) pair. + """Get an existing sequencer or create a new one given the (topic, + ordering_key) pair. """ sequencer_key = (topic, ordering_key) sequencer = self._sequencers.get(sequencer_key) @@ -232,7 +232,7 @@ def _get_or_create_sequencer(self, topic: str, ordering_key: str) -> SequencerTy return sequencer def resume_publish(self, topic: str, ordering_key: str) -> None: - """ Resume publish on an ordering key that has had unrecoverable errors. + """Resume publish on an ordering key that has had unrecoverable errors. Args: topic: The topic to publish messages to. @@ -403,9 +403,9 @@ def on_publish_done(future): # use the default retry for the publish GRPC method as a base transport = self._transport base_retry = transport._wrapped_methods[transport.publish]._retry - retry = base_retry.with_deadline(2.0 ** 32) + retry = base_retry.with_deadline(2.0**32) else: - retry = retry.with_deadline(2.0 ** 32) + retry = retry.with_deadline(2.0**32) # Delegate the publishing to the sequencer. sequencer = self._get_or_create_sequencer(topic, ordering_key) @@ -419,18 +419,18 @@ def on_publish_done(future): return future def ensure_cleanup_and_commit_timer_runs(self) -> None: - """ Ensure a cleanup/commit timer thread is running. + """Ensure a cleanup/commit timer thread is running. - If a cleanup/commit timer thread is already running, this does nothing. + If a cleanup/commit timer thread is already running, this does nothing. """ with self._batch_lock: self._ensure_commit_timer_runs_no_lock() def _ensure_commit_timer_runs_no_lock(self) -> None: - """ Ensure a commit timer thread is running, without taking - _batch_lock. + """Ensure a commit timer thread is running, without taking + _batch_lock. - _batch_lock must be held before calling this method. + _batch_lock must be held before calling this method. """ if not self._commit_thread and self.batch_settings.max_latency < float("inf"): self._start_commit_thread() @@ -448,8 +448,7 @@ def _start_commit_thread(self) -> None: self._commit_thread.start() def _wait_and_commit_sequencers(self) -> None: - """ Wait up to the batching timeout, and commit all sequencers. - """ + """Wait up to the batching timeout, and commit all sequencers.""" # Sleep for however long we should be waiting. time.sleep(self.batch_settings.max_latency) _LOGGER.debug("Commit thread is waking up") @@ -461,7 +460,7 @@ def _wait_and_commit_sequencers(self) -> None: self._commit_thread = None def _commit_sequencers(self) -> None: - """ Clean up finished sequencers and commit the rest. """ + """Clean up finished sequencers and commit the rest.""" finished_sequencer_keys = [ key for key, sequencer in self._sequencers.items() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py index ff0f0713d14b..f2b65299e1f2 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/exceptions.py @@ -27,10 +27,10 @@ class MessageTooLargeError(ValueError): class PublishToPausedOrderingKeyException(Exception): - """ Publish attempted to paused ordering key. To resume publishing, call - the resumePublish method on the publisher Client object with this - ordering key. Ordering keys are paused if an unrecoverable error - occurred during publish of a batch for that key. + """Publish attempted to paused ordering key. To resume publishing, call + the resumePublish method on the publisher Client object with this + ordering key. Ordering keys are paused if an unrecoverable error + occurred during publish of a batch for that key. """ def __init__(self, ordering_key: str): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 6ab5165d17b3..916161616579 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -208,7 +208,9 @@ def _start_retry_thread(self, thread_name, thread_target): # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-829910303 # https://github.com/googleapis/python-pubsub/issues/395#issuecomment-830092418 retry_thread = threading.Thread( - name=thread_name, target=thread_target, daemon=True, + name=thread_name, + target=thread_target, + daemon=True, ) # The thread finishes when the requests succeed or eventually fail with # a back-end timeout error or other permanent failure. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py index 0ab03ddf968e..7fa964ff622e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -42,8 +42,7 @@ def __init__(self, manager: "StreamingPullManager", period: int = _DEFAULT_PERIO self._period = period def heartbeat(self) -> None: - """Periodically send streaming pull heartbeats. - """ + """Periodically send streaming pull heartbeats.""" while not self._stop_event.is_set(): if self._manager.heartbeat(): _LOGGER.debug("Sent heartbeat.") diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py index 7ffa4b3a0b42..d922bbf685c7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/histogram.py @@ -72,8 +72,7 @@ def __len__(self) -> int: return self._len def __contains__(self, needle: int) -> bool: - """Return ``True`` if needle is present in the histogram, ``False`` otherwise. - """ + """Return ``True`` if needle is present in the histogram, ``False`` otherwise.""" return needle in self._data def __repr__(self): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py index 82d5ca376dec..5c3cc1a75e8e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py @@ -21,8 +21,7 @@ class MessagesOnHold(object): - """Tracks messages on hold by ordering key. Not thread-safe. - """ + """Tracks messages on hold by ordering key. Not thread-safe.""" def __init__(self): self._size = 0 @@ -58,7 +57,7 @@ def size(self) -> int: return self._size def get(self) -> Optional["subscriber.message.Message"]: - """ Gets a message from the on-hold queue. A message with an ordering + """Gets a message from the on-hold queue. A message with an ordering key wont be returned if there's another message with the same key in flight. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 4d9097ff9cf8..9fb489967cfe 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -126,7 +126,9 @@ def _wrap_callback_errors( on_callback_error(exc) -def _get_status(exc: exceptions.GoogleAPICallError,) -> Optional["status_pb2.Status"]: +def _get_status( + exc: exceptions.GoogleAPICallError, +) -> Optional["status_pb2.Status"]: if not exc.response: _LOGGER.debug("No response obj in errored RPC call.") return None @@ -140,7 +142,9 @@ def _get_status(exc: exceptions.GoogleAPICallError,) -> Optional["status_pb2.Sta return None -def _get_ack_errors(exc: exceptions.GoogleAPICallError,) -> Optional[Dict[str, str]]: +def _get_ack_errors( + exc: exceptions.GoogleAPICallError, +) -> Optional[Dict[str, str]]: status = _get_status(exc) if not status: _LOGGER.debug("Unable to get status of errored RPC.") diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index b818e6cbbd04..1262e6abdeae 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -308,7 +308,12 @@ def sample_create_topic(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -390,7 +395,12 @@ def sample_update_topic(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -504,7 +514,12 @@ def sample_publish(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -603,7 +618,12 @@ def sample_get_topic(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -707,12 +727,20 @@ def sample_list_topics(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListTopicsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -821,12 +849,20 @@ def sample_list_topic_subscriptions(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListTopicSubscriptionsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -939,12 +975,20 @@ def sample_list_topic_snapshots(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListTopicSnapshotsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1043,7 +1087,10 @@ def sample_delete_topic(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def detach_subscription( @@ -1126,7 +1173,12 @@ def sample_detach_subscription(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1235,7 +1287,12 @@ async def set_iam_policy( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1345,7 +1402,12 @@ async def get_iam_policy( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1400,7 +1462,12 @@ async def test_iam_permissions( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index e4eb074c5f89..7e9bdeb83a06 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -61,7 +61,10 @@ class PublisherClientMeta(type): _transport_registry["grpc"] = PublisherGrpcTransport _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[PublisherTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[PublisherTransport]: """Returns an appropriate transport class. @@ -182,10 +185,14 @@ def transport(self) -> PublisherTransport: return self._transport @staticmethod - def schema_path(project: str, schema: str,) -> str: + def schema_path( + project: str, + schema: str, + ) -> str: """Returns a fully-qualified schema string.""" return "projects/{project}/schemas/{schema}".format( - project=project, schema=schema, + project=project, + schema=schema, ) @staticmethod @@ -195,10 +202,14 @@ def parse_schema_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def subscription_path(project: str, subscription: str,) -> str: + def subscription_path( + project: str, + subscription: str, + ) -> str: """Returns a fully-qualified subscription string.""" return "projects/{project}/subscriptions/{subscription}".format( - project=project, subscription=subscription, + project=project, + subscription=subscription, ) @staticmethod @@ -210,9 +221,15 @@ def parse_subscription_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def topic_path(project: str, topic: str,) -> str: + def topic_path( + project: str, + topic: str, + ) -> str: """Returns a fully-qualified topic string.""" - return "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + return "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) @staticmethod def parse_topic_path(path: str) -> Dict[str, str]: @@ -221,7 +238,9 @@ def parse_topic_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -234,9 +253,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -245,9 +268,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -256,9 +283,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -267,10 +298,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -548,7 +583,12 @@ def sample_create_topic(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -623,7 +663,12 @@ def sample_update_topic(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -723,7 +768,12 @@ def sample_publish(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -812,7 +862,12 @@ def sample_get_topic(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -906,12 +961,20 @@ def sample_list_topics(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListTopicsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1010,12 +1073,20 @@ def sample_list_topic_subscriptions(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListTopicSubscriptionsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1118,12 +1189,20 @@ def sample_list_topic_snapshots(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListTopicSnapshotsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1214,7 +1293,10 @@ def sample_delete_topic(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def detach_subscription( @@ -1290,7 +1372,12 @@ def sample_detach_subscription(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1416,7 +1503,12 @@ def set_iam_policy( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1530,7 +1622,12 @@ def get_iam_policy( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1586,7 +1683,12 @@ def test_iam_permissions( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index a3ea34af6fe5..7b3d2a6d201f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -271,9 +271,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index f4f9831c93b0..a8b1db159b00 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -230,8 +230,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 17c2bdaa6cf9..f57b64e3811b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -318,7 +318,12 @@ def sample_create_schema(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -405,7 +410,12 @@ def sample_get_schema(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -498,12 +508,20 @@ def sample_list_schemas(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListSchemasAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -586,7 +604,10 @@ def sample_delete_schema(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def validate_schema( @@ -688,7 +709,12 @@ def sample_validate_schema(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -757,7 +783,12 @@ def sample_validate_message(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -865,7 +896,12 @@ async def set_iam_policy( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -974,7 +1010,12 @@ async def get_iam_policy( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1028,7 +1069,12 @@ async def test_iam_permissions( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 825aa39980a5..b150b2681760 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -56,7 +56,10 @@ class SchemaServiceClientMeta(type): _transport_registry["grpc"] = SchemaServiceGrpcTransport _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[SchemaServiceTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[SchemaServiceTransport]: """Returns an appropriate transport class. Args: @@ -161,10 +164,14 @@ def transport(self) -> SchemaServiceTransport: return self._transport @staticmethod - def schema_path(project: str, schema: str,) -> str: + def schema_path( + project: str, + schema: str, + ) -> str: """Returns a fully-qualified schema string.""" return "projects/{project}/schemas/{schema}".format( - project=project, schema=schema, + project=project, + schema=schema, ) @staticmethod @@ -174,7 +181,9 @@ def parse_schema_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -187,9 +196,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -198,9 +211,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -209,9 +226,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -220,10 +241,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -510,7 +535,12 @@ def sample_create_schema(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -597,7 +627,12 @@ def sample_get_schema(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -690,12 +725,20 @@ def sample_list_schemas(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListSchemasPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -778,7 +821,10 @@ def sample_delete_schema(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def validate_schema( @@ -880,7 +926,12 @@ def sample_validate_schema(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -950,7 +1001,12 @@ def sample_validate_message(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1071,7 +1127,12 @@ def set_iam_policy( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1180,7 +1241,12 @@ def get_iam_policy( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1234,7 +1300,12 @@ def test_iam_permissions( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 97b85afa77df..58a2e1265851 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -128,31 +128,43 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_schema: gapic_v1.method.wrap_method( - self.create_schema, default_timeout=None, client_info=client_info, + self.create_schema, + default_timeout=None, + client_info=client_info, ), self.get_schema: gapic_v1.method.wrap_method( - self.get_schema, default_timeout=None, client_info=client_info, + self.get_schema, + default_timeout=None, + client_info=client_info, ), self.list_schemas: gapic_v1.method.wrap_method( - self.list_schemas, default_timeout=None, client_info=client_info, + self.list_schemas, + default_timeout=None, + client_info=client_info, ), self.delete_schema: gapic_v1.method.wrap_method( - self.delete_schema, default_timeout=None, client_info=client_info, + self.delete_schema, + default_timeout=None, + client_info=client_info, ), self.validate_schema: gapic_v1.method.wrap_method( - self.validate_schema, default_timeout=None, client_info=client_info, + self.validate_schema, + default_timeout=None, + client_info=client_info, ), self.validate_message: gapic_v1.method.wrap_method( - self.validate_message, default_timeout=None, client_info=client_info, + self.validate_message, + default_timeout=None, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 1aadad53d4ae..c8bf265f9f7f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -230,8 +230,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 265c7dc75a17..3acfac4e0712 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -390,7 +390,12 @@ def sample_create_subscription(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -491,7 +496,12 @@ def sample_get_subscription(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -575,7 +585,12 @@ def sample_update_subscription(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -679,12 +694,20 @@ def sample_list_subscriptions(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListSubscriptionsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -784,7 +807,10 @@ def sample_delete_subscription(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def modify_ack_deadline( @@ -910,7 +936,10 @@ def sample_modify_ack_deadline(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def acknowledge( @@ -1021,7 +1050,10 @@ def sample_acknowledge(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def pull( @@ -1160,7 +1192,12 @@ def sample_pull(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1256,7 +1293,12 @@ def request_generator(): ) # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1371,7 +1413,10 @@ def sample_modify_push_config(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def get_snapshot( @@ -1480,7 +1525,12 @@ def sample_get_snapshot(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1589,12 +1639,20 @@ def sample_list_snapshots(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListSnapshotsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1741,7 +1799,12 @@ def sample_create_snapshot(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1831,7 +1894,12 @@ def sample_update_snapshot(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1932,7 +2000,10 @@ def sample_delete_snapshot(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def seek( @@ -2017,7 +2088,12 @@ def sample_seek(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2125,7 +2201,12 @@ async def set_iam_policy( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2234,7 +2315,12 @@ async def get_iam_policy( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2288,7 +2374,12 @@ async def test_iam_permissions( ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 01499a881a36..e208327a2569 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -61,7 +61,10 @@ class SubscriberClientMeta(type): _transport_registry["grpc"] = SubscriberGrpcTransport _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport - def get_transport_class(cls, label: str = None,) -> Type[SubscriberTransport]: + def get_transport_class( + cls, + label: str = None, + ) -> Type[SubscriberTransport]: """Returns an appropriate transport class. @@ -184,10 +187,14 @@ def transport(self) -> SubscriberTransport: return self._transport @staticmethod - def snapshot_path(project: str, snapshot: str,) -> str: + def snapshot_path( + project: str, + snapshot: str, + ) -> str: """Returns a fully-qualified snapshot string.""" return "projects/{project}/snapshots/{snapshot}".format( - project=project, snapshot=snapshot, + project=project, + snapshot=snapshot, ) @staticmethod @@ -197,10 +204,14 @@ def parse_snapshot_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def subscription_path(project: str, subscription: str,) -> str: + def subscription_path( + project: str, + subscription: str, + ) -> str: """Returns a fully-qualified subscription string.""" return "projects/{project}/subscriptions/{subscription}".format( - project=project, subscription=subscription, + project=project, + subscription=subscription, ) @staticmethod @@ -212,9 +223,15 @@ def parse_subscription_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def topic_path(project: str, topic: str,) -> str: + def topic_path( + project: str, + topic: str, + ) -> str: """Returns a fully-qualified topic string.""" - return "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + return "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) @staticmethod def parse_topic_path(path: str) -> Dict[str, str]: @@ -223,7 +240,9 @@ def parse_topic_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -236,9 +255,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -247,9 +270,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -258,9 +285,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -269,10 +300,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -618,7 +653,12 @@ def sample_create_subscription(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -709,7 +749,12 @@ def sample_get_subscription(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -786,7 +831,12 @@ def sample_update_subscription(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -880,12 +930,20 @@ def sample_list_subscriptions(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListSubscriptionsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -977,7 +1035,10 @@ def sample_delete_subscription(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def modify_ack_deadline( @@ -1095,7 +1156,10 @@ def sample_modify_ack_deadline(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def acknowledge( @@ -1198,7 +1262,10 @@ def sample_acknowledge(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def pull( @@ -1327,7 +1394,12 @@ def sample_pull(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1412,7 +1484,12 @@ def request_generator(): rpc = self._transport._wrapped_methods[self._transport.streaming_pull] # Send the request. - response = rpc(requests, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1519,7 +1596,10 @@ def sample_modify_push_config(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def get_snapshot( @@ -1618,7 +1698,12 @@ def sample_get_snapshot(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1717,12 +1802,20 @@ def sample_list_snapshots(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListSnapshotsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1861,7 +1954,12 @@ def sample_create_snapshot(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1944,7 +2042,12 @@ def sample_update_snapshot(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2037,7 +2140,10 @@ def sample_delete_snapshot(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def seek( @@ -2113,7 +2219,12 @@ def sample_seek(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2238,7 +2349,12 @@ def set_iam_policy( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2351,7 +2467,12 @@ def get_iam_policy( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -2406,7 +2527,12 @@ def test_iam_permissions( ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 3cd6e7972bf1..835e1984e805 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -373,9 +373,9 @@ def _prep_wrapped_messages(self, client_info): def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index de7ff05c37d5..d7f755f7eaa5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -232,8 +232,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 3b79e8eca06e..c344dbe3df34 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -88,7 +88,10 @@ class MessageStoragePolicy(proto.Message): not a valid configuration. """ - allowed_persistence_regions = proto.RepeatedField(proto.STRING, number=1,) + allowed_persistence_regions = proto.RepeatedField( + proto.STRING, + number=1, + ) class SchemaSettings(proto.Message): @@ -105,8 +108,15 @@ class SchemaSettings(proto.Message): The encoding of messages validated against ``schema``. """ - schema = proto.Field(proto.STRING, number=1,) - encoding = proto.Field(proto.ENUM, number=2, enum=gp_schema.Encoding,) + schema = proto.Field( + proto.STRING, + number=1, + ) + encoding = proto.Field( + proto.ENUM, + number=2, + enum=gp_schema.Encoding, + ) class Topic(proto.Message): @@ -157,16 +167,37 @@ class Topic(proto.Message): days or less than 10 minutes. """ - name = proto.Field(proto.STRING, number=1,) - labels = proto.MapField(proto.STRING, proto.STRING, number=2,) + name = proto.Field( + proto.STRING, + number=1, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) message_storage_policy = proto.Field( - proto.MESSAGE, number=3, message="MessageStoragePolicy", + proto.MESSAGE, + number=3, + message="MessageStoragePolicy", + ) + kms_key_name = proto.Field( + proto.STRING, + number=5, + ) + schema_settings = proto.Field( + proto.MESSAGE, + number=6, + message="SchemaSettings", + ) + satisfies_pzs = proto.Field( + proto.BOOL, + number=7, ) - kms_key_name = proto.Field(proto.STRING, number=5,) - schema_settings = proto.Field(proto.MESSAGE, number=6, message="SchemaSettings",) - satisfies_pzs = proto.Field(proto.BOOL, number=7,) message_retention_duration = proto.Field( - proto.MESSAGE, number=8, message=duration_pb2.Duration, + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, ) @@ -213,13 +244,28 @@ class PubsubMessage(proto.Message): same ``ordering_key`` value. """ - data = proto.Field(proto.BYTES, number=1,) - attributes = proto.MapField(proto.STRING, proto.STRING, number=2,) - message_id = proto.Field(proto.STRING, number=3,) + data = proto.Field( + proto.BYTES, + number=1, + ) + attributes = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + message_id = proto.Field( + proto.STRING, + number=3, + ) publish_time = proto.Field( - proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + ordering_key = proto.Field( + proto.STRING, + number=5, ) - ordering_key = proto.Field(proto.STRING, number=5,) class GetTopicRequest(proto.Message): @@ -231,7 +277,10 @@ class GetTopicRequest(proto.Message): ``projects/{project}/topics/{topic}``. """ - topic = proto.Field(proto.STRING, number=1,) + topic = proto.Field( + proto.STRING, + number=1, + ) class UpdateTopicRequest(proto.Message): @@ -249,9 +298,15 @@ class UpdateTopicRequest(proto.Message): policy configured at the project or organization level. """ - topic = proto.Field(proto.MESSAGE, number=1, message="Topic",) + topic = proto.Field( + proto.MESSAGE, + number=1, + message="Topic", + ) update_mask = proto.Field( - proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, ) @@ -266,8 +321,15 @@ class PublishRequest(proto.Message): Required. The messages to publish. """ - topic = proto.Field(proto.STRING, number=1,) - messages = proto.RepeatedField(proto.MESSAGE, number=2, message="PubsubMessage",) + topic = proto.Field( + proto.STRING, + number=1, + ) + messages = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="PubsubMessage", + ) class PublishResponse(proto.Message): @@ -281,7 +343,10 @@ class PublishResponse(proto.Message): within the topic. """ - message_ids = proto.RepeatedField(proto.STRING, number=1,) + message_ids = proto.RepeatedField( + proto.STRING, + number=1, + ) class ListTopicsRequest(proto.Message): @@ -300,9 +365,18 @@ class ListTopicsRequest(proto.Message): next page of data. """ - project = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) + project = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) class ListTopicsResponse(proto.Message): @@ -321,8 +395,15 @@ class ListTopicsResponse(proto.Message): def raw_page(self): return self - topics = proto.RepeatedField(proto.MESSAGE, number=1, message="Topic",) - next_page_token = proto.Field(proto.STRING, number=2,) + topics = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Topic", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class ListTopicSubscriptionsRequest(proto.Message): @@ -343,9 +424,18 @@ class ListTopicSubscriptionsRequest(proto.Message): that the system should return the next page of data. """ - topic = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) + topic = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) class ListTopicSubscriptionsResponse(proto.Message): @@ -365,8 +455,14 @@ class ListTopicSubscriptionsResponse(proto.Message): def raw_page(self): return self - subscriptions = proto.RepeatedField(proto.STRING, number=1,) - next_page_token = proto.Field(proto.STRING, number=2,) + subscriptions = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class ListTopicSnapshotsRequest(proto.Message): @@ -385,9 +481,18 @@ class ListTopicSnapshotsRequest(proto.Message): that the system should return the next page of data. """ - topic = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) + topic = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) class ListTopicSnapshotsResponse(proto.Message): @@ -407,8 +512,14 @@ class ListTopicSnapshotsResponse(proto.Message): def raw_page(self): return self - snapshots = proto.RepeatedField(proto.STRING, number=1,) - next_page_token = proto.Field(proto.STRING, number=2,) + snapshots = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class DeleteTopicRequest(proto.Message): @@ -420,7 +531,10 @@ class DeleteTopicRequest(proto.Message): ``projects/{project}/topics/{topic}``. """ - topic = proto.Field(proto.STRING, number=1,) + topic = proto.Field( + proto.STRING, + number=1, + ) class DetachSubscriptionRequest(proto.Message): @@ -432,7 +546,10 @@ class DetachSubscriptionRequest(proto.Message): ``projects/{project}/subscriptions/{subscription}``. """ - subscription = proto.Field(proto.STRING, number=1,) + subscription = proto.Field( + proto.STRING, + number=1, + ) class DetachSubscriptionResponse(proto.Message): @@ -585,28 +702,72 @@ class Subscription(proto.Message): server; it is ignored if it is set in any requests. """ - name = proto.Field(proto.STRING, number=1,) - topic = proto.Field(proto.STRING, number=2,) - push_config = proto.Field(proto.MESSAGE, number=4, message="PushConfig",) - ack_deadline_seconds = proto.Field(proto.INT32, number=5,) - retain_acked_messages = proto.Field(proto.BOOL, number=7,) + name = proto.Field( + proto.STRING, + number=1, + ) + topic = proto.Field( + proto.STRING, + number=2, + ) + push_config = proto.Field( + proto.MESSAGE, + number=4, + message="PushConfig", + ) + ack_deadline_seconds = proto.Field( + proto.INT32, + number=5, + ) + retain_acked_messages = proto.Field( + proto.BOOL, + number=7, + ) message_retention_duration = proto.Field( - proto.MESSAGE, number=8, message=duration_pb2.Duration, + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + enable_message_ordering = proto.Field( + proto.BOOL, + number=10, ) - labels = proto.MapField(proto.STRING, proto.STRING, number=9,) - enable_message_ordering = proto.Field(proto.BOOL, number=10,) expiration_policy = proto.Field( - proto.MESSAGE, number=11, message="ExpirationPolicy", + proto.MESSAGE, + number=11, + message="ExpirationPolicy", + ) + filter = proto.Field( + proto.STRING, + number=12, ) - filter = proto.Field(proto.STRING, number=12,) dead_letter_policy = proto.Field( - proto.MESSAGE, number=13, message="DeadLetterPolicy", + proto.MESSAGE, + number=13, + message="DeadLetterPolicy", + ) + retry_policy = proto.Field( + proto.MESSAGE, + number=14, + message="RetryPolicy", + ) + detached = proto.Field( + proto.BOOL, + number=15, + ) + enable_exactly_once_delivery = proto.Field( + proto.BOOL, + number=16, ) - retry_policy = proto.Field(proto.MESSAGE, number=14, message="RetryPolicy",) - detached = proto.Field(proto.BOOL, number=15,) - enable_exactly_once_delivery = proto.Field(proto.BOOL, number=16,) topic_message_retention_duration = proto.Field( - proto.MESSAGE, number=17, message=duration_pb2.Duration, + proto.MESSAGE, + number=17, + message=duration_pb2.Duration, ) @@ -638,10 +799,14 @@ class RetryPolicy(proto.Message): """ minimum_backoff = proto.Field( - proto.MESSAGE, number=1, message=duration_pb2.Duration, + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, ) maximum_backoff = proto.Field( - proto.MESSAGE, number=2, message=duration_pb2.Duration, + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, ) @@ -684,8 +849,14 @@ class DeadLetterPolicy(proto.Message): If this parameter is 0, a default value of 5 is used. """ - dead_letter_topic = proto.Field(proto.STRING, number=1,) - max_delivery_attempts = proto.Field(proto.INT32, number=2,) + dead_letter_topic = proto.Field( + proto.STRING, + number=1, + ) + max_delivery_attempts = proto.Field( + proto.INT32, + number=2, + ) class ExpirationPolicy(proto.Message): @@ -703,7 +874,11 @@ class ExpirationPolicy(proto.Message): associated resource never expires. """ - ttl = proto.Field(proto.MESSAGE, number=1, message=duration_pb2.Duration,) + ttl = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) class PushConfig(proto.Message): @@ -779,13 +954,29 @@ class OidcToken(proto.Message): will be used. """ - service_account_email = proto.Field(proto.STRING, number=1,) - audience = proto.Field(proto.STRING, number=2,) - - push_endpoint = proto.Field(proto.STRING, number=1,) - attributes = proto.MapField(proto.STRING, proto.STRING, number=2,) + service_account_email = proto.Field( + proto.STRING, + number=1, + ) + audience = proto.Field( + proto.STRING, + number=2, + ) + + push_endpoint = proto.Field( + proto.STRING, + number=1, + ) + attributes = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) oidc_token = proto.Field( - proto.MESSAGE, number=3, oneof="authentication_method", message=OidcToken, + proto.MESSAGE, + number=3, + oneof="authentication_method", + message=OidcToken, ) @@ -819,9 +1010,19 @@ class ReceivedMessage(proto.Message): will be 0. """ - ack_id = proto.Field(proto.STRING, number=1,) - message = proto.Field(proto.MESSAGE, number=2, message="PubsubMessage",) - delivery_attempt = proto.Field(proto.INT32, number=3,) + ack_id = proto.Field( + proto.STRING, + number=1, + ) + message = proto.Field( + proto.MESSAGE, + number=2, + message="PubsubMessage", + ) + delivery_attempt = proto.Field( + proto.INT32, + number=3, + ) class GetSubscriptionRequest(proto.Message): @@ -833,7 +1034,10 @@ class GetSubscriptionRequest(proto.Message): ``projects/{project}/subscriptions/{sub}``. """ - subscription = proto.Field(proto.STRING, number=1,) + subscription = proto.Field( + proto.STRING, + number=1, + ) class UpdateSubscriptionRequest(proto.Message): @@ -848,9 +1052,15 @@ class UpdateSubscriptionRequest(proto.Message): specified and non-empty. """ - subscription = proto.Field(proto.MESSAGE, number=1, message="Subscription",) + subscription = proto.Field( + proto.MESSAGE, + number=1, + message="Subscription", + ) update_mask = proto.Field( - proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, ) @@ -870,9 +1080,18 @@ class ListSubscriptionsRequest(proto.Message): the system should return the next page of data. """ - project = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) + project = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) class ListSubscriptionsResponse(proto.Message): @@ -892,9 +1111,14 @@ def raw_page(self): return self subscriptions = proto.RepeatedField( - proto.MESSAGE, number=1, message="Subscription", + proto.MESSAGE, + number=1, + message="Subscription", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, ) - next_page_token = proto.Field(proto.STRING, number=2,) class DeleteSubscriptionRequest(proto.Message): @@ -906,7 +1130,10 @@ class DeleteSubscriptionRequest(proto.Message): ``projects/{project}/subscriptions/{sub}``. """ - subscription = proto.Field(proto.STRING, number=1,) + subscription = proto.Field( + proto.STRING, + number=1, + ) class ModifyPushConfigRequest(proto.Message): @@ -926,8 +1153,15 @@ class ModifyPushConfigRequest(proto.Message): not called. """ - subscription = proto.Field(proto.STRING, number=1,) - push_config = proto.Field(proto.MESSAGE, number=2, message="PushConfig",) + subscription = proto.Field( + proto.STRING, + number=1, + ) + push_config = proto.Field( + proto.MESSAGE, + number=2, + message="PushConfig", + ) class PullRequest(proto.Message): @@ -954,9 +1188,18 @@ class PullRequest(proto.Message): than the number specified. """ - subscription = proto.Field(proto.STRING, number=1,) - return_immediately = proto.Field(proto.BOOL, number=2,) - max_messages = proto.Field(proto.INT32, number=3,) + subscription = proto.Field( + proto.STRING, + number=1, + ) + return_immediately = proto.Field( + proto.BOOL, + number=2, + ) + max_messages = proto.Field( + proto.INT32, + number=3, + ) class PullResponse(proto.Message): @@ -972,7 +1215,9 @@ class PullResponse(proto.Message): """ received_messages = proto.RepeatedField( - proto.MESSAGE, number=1, message="ReceivedMessage", + proto.MESSAGE, + number=1, + message="ReceivedMessage", ) @@ -998,9 +1243,18 @@ class ModifyAckDeadlineRequest(proto.Message): seconds (10 minutes). """ - subscription = proto.Field(proto.STRING, number=1,) - ack_ids = proto.RepeatedField(proto.STRING, number=4,) - ack_deadline_seconds = proto.Field(proto.INT32, number=3,) + subscription = proto.Field( + proto.STRING, + number=1, + ) + ack_ids = proto.RepeatedField( + proto.STRING, + number=4, + ) + ack_deadline_seconds = proto.Field( + proto.INT32, + number=3, + ) class AcknowledgeRequest(proto.Message): @@ -1017,8 +1271,14 @@ class AcknowledgeRequest(proto.Message): ``Pull`` response. Must not be empty. """ - subscription = proto.Field(proto.STRING, number=1,) - ack_ids = proto.RepeatedField(proto.STRING, number=2,) + subscription = proto.Field( + proto.STRING, + number=1, + ) + ack_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) class StreamingPullRequest(proto.Message): @@ -1105,14 +1365,38 @@ class StreamingPullRequest(proto.Message): ``INVALID_ARGUMENT``. """ - subscription = proto.Field(proto.STRING, number=1,) - ack_ids = proto.RepeatedField(proto.STRING, number=2,) - modify_deadline_seconds = proto.RepeatedField(proto.INT32, number=3,) - modify_deadline_ack_ids = proto.RepeatedField(proto.STRING, number=4,) - stream_ack_deadline_seconds = proto.Field(proto.INT32, number=5,) - client_id = proto.Field(proto.STRING, number=6,) - max_outstanding_messages = proto.Field(proto.INT64, number=7,) - max_outstanding_bytes = proto.Field(proto.INT64, number=8,) + subscription = proto.Field( + proto.STRING, + number=1, + ) + ack_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) + modify_deadline_seconds = proto.RepeatedField( + proto.INT32, + number=3, + ) + modify_deadline_ack_ids = proto.RepeatedField( + proto.STRING, + number=4, + ) + stream_ack_deadline_seconds = proto.Field( + proto.INT32, + number=5, + ) + client_id = proto.Field( + proto.STRING, + number=6, + ) + max_outstanding_messages = proto.Field( + proto.INT64, + number=7, + ) + max_outstanding_bytes = proto.Field( + proto.INT64, + number=8, + ) class StreamingPullResponse(proto.Message): @@ -1149,9 +1433,18 @@ class AcknowledgeConfirmation(proto.Message): order. """ - ack_ids = proto.RepeatedField(proto.STRING, number=1,) - invalid_ack_ids = proto.RepeatedField(proto.STRING, number=2,) - unordered_ack_ids = proto.RepeatedField(proto.STRING, number=3,) + ack_ids = proto.RepeatedField( + proto.STRING, + number=1, + ) + invalid_ack_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) + unordered_ack_ids = proto.RepeatedField( + proto.STRING, + number=3, + ) class ModifyAckDeadlineConfirmation(proto.Message): r"""Acknowledgement IDs sent in one or more previous requests to @@ -1166,8 +1459,14 @@ class ModifyAckDeadlineConfirmation(proto.Message): expired. """ - ack_ids = proto.RepeatedField(proto.STRING, number=1,) - invalid_ack_ids = proto.RepeatedField(proto.STRING, number=2,) + ack_ids = proto.RepeatedField( + proto.STRING, + number=1, + ) + invalid_ack_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) class SubscriptionProperties(proto.Message): r"""Subscription properties sent as part of the response. @@ -1181,20 +1480,34 @@ class SubscriptionProperties(proto.Message): subscription. """ - exactly_once_delivery_enabled = proto.Field(proto.BOOL, number=1,) - message_ordering_enabled = proto.Field(proto.BOOL, number=2,) + exactly_once_delivery_enabled = proto.Field( + proto.BOOL, + number=1, + ) + message_ordering_enabled = proto.Field( + proto.BOOL, + number=2, + ) received_messages = proto.RepeatedField( - proto.MESSAGE, number=1, message="ReceivedMessage", + proto.MESSAGE, + number=1, + message="ReceivedMessage", ) acknowledge_confirmation = proto.Field( - proto.MESSAGE, number=5, message=AcknowledgeConfirmation, + proto.MESSAGE, + number=5, + message=AcknowledgeConfirmation, ) modify_ack_deadline_confirmation = proto.Field( - proto.MESSAGE, number=3, message=ModifyAckDeadlineConfirmation, + proto.MESSAGE, + number=3, + message=ModifyAckDeadlineConfirmation, ) subscription_properties = proto.Field( - proto.MESSAGE, number=4, message=SubscriptionProperties, + proto.MESSAGE, + number=4, + message=SubscriptionProperties, ) @@ -1226,9 +1539,19 @@ class CreateSnapshotRequest(proto.Message): Creating and managing labels. """ - name = proto.Field(proto.STRING, number=1,) - subscription = proto.Field(proto.STRING, number=2,) - labels = proto.MapField(proto.STRING, proto.STRING, number=3,) + name = proto.Field( + proto.STRING, + number=1, + ) + subscription = proto.Field( + proto.STRING, + number=2, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) class UpdateSnapshotRequest(proto.Message): @@ -1243,9 +1566,15 @@ class UpdateSnapshotRequest(proto.Message): and non-empty. """ - snapshot = proto.Field(proto.MESSAGE, number=1, message="Snapshot",) + snapshot = proto.Field( + proto.MESSAGE, + number=1, + message="Snapshot", + ) update_mask = proto.Field( - proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, ) @@ -1281,10 +1610,24 @@ class Snapshot(proto.Message): (https://cloud.google.com/pubsub/docs/labels). """ - name = proto.Field(proto.STRING, number=1,) - topic = proto.Field(proto.STRING, number=2,) - expire_time = proto.Field(proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp,) - labels = proto.MapField(proto.STRING, proto.STRING, number=4,) + name = proto.Field( + proto.STRING, + number=1, + ) + topic = proto.Field( + proto.STRING, + number=2, + ) + expire_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) class GetSnapshotRequest(proto.Message): @@ -1296,7 +1639,10 @@ class GetSnapshotRequest(proto.Message): ``projects/{project}/snapshots/{snap}``. """ - snapshot = proto.Field(proto.STRING, number=1,) + snapshot = proto.Field( + proto.STRING, + number=1, + ) class ListSnapshotsRequest(proto.Message): @@ -1315,9 +1661,18 @@ class ListSnapshotsRequest(proto.Message): the next page of data. """ - project = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) + project = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) class ListSnapshotsResponse(proto.Message): @@ -1336,8 +1691,15 @@ class ListSnapshotsResponse(proto.Message): def raw_page(self): return self - snapshots = proto.RepeatedField(proto.MESSAGE, number=1, message="Snapshot",) - next_page_token = proto.Field(proto.STRING, number=2,) + snapshots = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Snapshot", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class DeleteSnapshotRequest(proto.Message): @@ -1349,7 +1711,10 @@ class DeleteSnapshotRequest(proto.Message): ``projects/{project}/snapshots/{snap}``. """ - snapshot = proto.Field(proto.STRING, number=1,) + snapshot = proto.Field( + proto.STRING, + number=1, + ) class SeekRequest(proto.Message): @@ -1389,16 +1754,25 @@ class SeekRequest(proto.Message): This field is a member of `oneof`_ ``target``. """ - subscription = proto.Field(proto.STRING, number=1,) + subscription = proto.Field( + proto.STRING, + number=1, + ) time = proto.Field( - proto.MESSAGE, number=2, oneof="target", message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=2, + oneof="target", + message=timestamp_pb2.Timestamp, + ) + snapshot = proto.Field( + proto.STRING, + number=3, + oneof="target", ) - snapshot = proto.Field(proto.STRING, number=3, oneof="target",) class SeekResponse(proto.Message): - r"""Response for the ``Seek`` method (this response is empty). - """ + r"""Response for the ``Seek`` method (this response is empty).""" __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index b0a4d4e0a2f5..59fe3aa3e5b8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -72,9 +72,19 @@ class Type(proto.Enum): PROTOCOL_BUFFER = 1 AVRO = 2 - name = proto.Field(proto.STRING, number=1,) - type_ = proto.Field(proto.ENUM, number=2, enum=Type,) - definition = proto.Field(proto.STRING, number=3,) + name = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + definition = proto.Field( + proto.STRING, + number=3, + ) class CreateSchemaRequest(proto.Message): @@ -99,9 +109,19 @@ class CreateSchemaRequest(proto.Message): for resource name constraints. """ - parent = proto.Field(proto.STRING, number=1,) - schema = proto.Field(proto.MESSAGE, number=2, message="Schema",) - schema_id = proto.Field(proto.STRING, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + schema = proto.Field( + proto.MESSAGE, + number=2, + message="Schema", + ) + schema_id = proto.Field( + proto.STRING, + number=3, + ) class GetSchemaRequest(proto.Message): @@ -117,8 +137,15 @@ class GetSchemaRequest(proto.Message): ``definition``. Set to ``FULL`` to retrieve all fields. """ - name = proto.Field(proto.STRING, number=1,) - view = proto.Field(proto.ENUM, number=2, enum="SchemaView",) + name = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.ENUM, + number=2, + enum="SchemaView", + ) class ListSchemasRequest(proto.Message): @@ -141,10 +168,23 @@ class ListSchemasRequest(proto.Message): next page of data. """ - parent = proto.Field(proto.STRING, number=1,) - view = proto.Field(proto.ENUM, number=2, enum="SchemaView",) - page_size = proto.Field(proto.INT32, number=3,) - page_token = proto.Field(proto.STRING, number=4,) + parent = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.ENUM, + number=2, + enum="SchemaView", + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) class ListSchemasResponse(proto.Message): @@ -163,8 +203,15 @@ class ListSchemasResponse(proto.Message): def raw_page(self): return self - schemas = proto.RepeatedField(proto.MESSAGE, number=1, message="Schema",) - next_page_token = proto.Field(proto.STRING, number=2,) + schemas = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Schema", + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) class DeleteSchemaRequest(proto.Message): @@ -176,7 +223,10 @@ class DeleteSchemaRequest(proto.Message): ``projects/{project}/schemas/{schema}``. """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class ValidateSchemaRequest(proto.Message): @@ -190,13 +240,19 @@ class ValidateSchemaRequest(proto.Message): Required. The schema object to validate. """ - parent = proto.Field(proto.STRING, number=1,) - schema = proto.Field(proto.MESSAGE, number=2, message="Schema",) + parent = proto.Field( + proto.STRING, + number=1, + ) + schema = proto.Field( + proto.MESSAGE, + number=2, + message="Schema", + ) class ValidateSchemaResponse(proto.Message): - r"""Response for the ``ValidateSchema`` method. Empty for now. - """ + r"""Response for the ``ValidateSchema`` method. Empty for now.""" class ValidateMessageRequest(proto.Message): @@ -229,18 +285,34 @@ class ValidateMessageRequest(proto.Message): The encoding expected for messages """ - parent = proto.Field(proto.STRING, number=1,) - name = proto.Field(proto.STRING, number=2, oneof="schema_spec",) + parent = proto.Field( + proto.STRING, + number=1, + ) + name = proto.Field( + proto.STRING, + number=2, + oneof="schema_spec", + ) schema = proto.Field( - proto.MESSAGE, number=3, oneof="schema_spec", message="Schema", + proto.MESSAGE, + number=3, + oneof="schema_spec", + message="Schema", + ) + message = proto.Field( + proto.BYTES, + number=4, + ) + encoding = proto.Field( + proto.ENUM, + number=5, + enum="Encoding", ) - message = proto.Field(proto.BYTES, number=4,) - encoding = proto.Field(proto.ENUM, number=5, enum="Encoding",) class ValidateMessageResponse(proto.Message): - r"""Response for the ``ValidateMessage`` method. Empty for now. - """ + r"""Response for the ``ValidateMessage`` method. Empty for now.""" __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index e9fea8af8493..a97d5ffb4f5f 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -24,7 +24,7 @@ import nox -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] MYPY_VERSION = "mypy==0.910" @@ -66,7 +66,8 @@ def mypy(session): # Version 2.1.1 of google-api-core version is the first type-checked release. # Version 2.2.0 of google-cloud-core version is the first type-checked release. session.install( - "google-api-core[grpc]>=2.1.1", "google-cloud-core>=2.2.0", + "google-api-core[grpc]>=2.1.1", + "google-cloud-core>=2.2.0", ) # TODO: Only check the hand-written layer, the generated code does not pass @@ -106,7 +107,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *BLACK_PATHS, ) session.run("flake8", "google", "tests") @@ -116,7 +119,8 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *BLACK_PATHS, ) diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 4c808af73ea2..949e0fde9ae1 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -29,7 +29,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" # Copy `noxfile_config.py` to your directory and modify it instead. diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 1cf67ed96fad..aa6ca486fbc9 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -104,7 +104,7 @@ def test_publish_large_messages(publisher, topic_path, cleanup): # cases well. # Mind that the total PublishRequest size must still be smaller than # 10 * 1024 * 1024 bytes in order to not exceed the max request body size limit. - msg_data = b"x" * (2 * 10 ** 6) + msg_data = b"x" * (2 * 10**6) publisher.batch_settings = types.BatchSettings( max_bytes=11 * 1000 * 1000, # more than the server limit of 10 ** 7 diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 065b6277e96b..36527c10d0dd 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -86,7 +86,13 @@ def test__get_default_mtls_endpoint(): assert PublisherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + PublisherClient, + PublisherAsyncClient, + ], +) def test_publisher_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -126,7 +132,13 @@ def test_publisher_client_service_account_always_use_jwt( use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + PublisherClient, + PublisherAsyncClient, + ], +) def test_publisher_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -479,7 +491,9 @@ def test_publisher_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -612,10 +626,17 @@ def test_publisher_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [pubsub.Topic, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Topic, + dict, + ], +) def test_create_topic(request_type, transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -626,7 +647,9 @@ def test_create_topic(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.create_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic( - name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, ) response = client.create_topic(request) @@ -646,7 +669,8 @@ def test_create_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -662,7 +686,8 @@ async def test_create_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.Topic ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -699,7 +724,9 @@ async def test_create_topic_async_from_dict(): def test_create_topic_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -719,12 +746,17 @@ def test_create_topic_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_topic_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -744,11 +776,16 @@ async def test_create_topic_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_create_topic_flattened(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: @@ -756,7 +793,9 @@ def test_create_topic_flattened(): call.return_value = pubsub.Topic() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_topic(name="name_value",) + client.create_topic( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -768,19 +807,24 @@ def test_create_topic_flattened(): def test_create_topic_flattened_error(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.create_topic( - pubsub.Topic(), name="name_value", + pubsub.Topic(), + name="name_value", ) @pytest.mark.asyncio async def test_create_topic_flattened_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: @@ -790,7 +834,9 @@ async def test_create_topic_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_topic(name="name_value",) + response = await client.create_topic( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -803,20 +849,30 @@ async def test_create_topic_flattened_async(): @pytest.mark.asyncio async def test_create_topic_flattened_error_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.create_topic( - pubsub.Topic(), name="name_value", + pubsub.Topic(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [pubsub.UpdateTopicRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateTopicRequest, + dict, + ], +) def test_update_topic(request_type, transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -827,7 +883,9 @@ def test_update_topic(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.update_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic( - name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, ) response = client.update_topic(request) @@ -847,7 +905,8 @@ def test_update_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -863,7 +922,8 @@ async def test_update_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateTopicRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -900,7 +960,9 @@ async def test_update_topic_async_from_dict(): def test_update_topic_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -920,12 +982,17 @@ def test_update_topic_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic.name=topic.name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic.name=topic.name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_update_topic_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -945,13 +1012,23 @@ async def test_update_topic_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic.name=topic.name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic.name=topic.name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [pubsub.PublishRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PublishRequest, + dict, + ], +) def test_publish(request_type, transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -961,7 +1038,9 @@ def test_publish(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = pubsub.PublishResponse(message_ids=["message_ids_value"],) + call.return_value = pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) response = client.publish(request) # Establish that the underlying gRPC stub method was called. @@ -978,7 +1057,8 @@ def test_publish_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -994,7 +1074,8 @@ async def test_publish_async( transport: str = "grpc_asyncio", request_type=pubsub.PublishRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1005,7 +1086,9 @@ async def test_publish_async( with mock.patch.object(type(client.transport.publish), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.PublishResponse(message_ids=["message_ids_value"],) + pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) ) response = await client.publish(request) @@ -1025,7 +1108,9 @@ async def test_publish_async_from_dict(): def test_publish_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1045,12 +1130,17 @@ def test_publish_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_publish_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1072,11 +1162,16 @@ async def test_publish_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] def test_publish_flattened(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: @@ -1085,7 +1180,8 @@ def test_publish_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.publish( - topic="topic_value", messages=[pubsub.PubsubMessage(data=b"data_blob")], + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], ) # Establish that the underlying call was made with the expected @@ -1101,7 +1197,9 @@ def test_publish_flattened(): def test_publish_flattened_error(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1115,7 +1213,9 @@ def test_publish_flattened_error(): @pytest.mark.asyncio async def test_publish_flattened_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: @@ -1128,7 +1228,8 @@ async def test_publish_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.publish( - topic="topic_value", messages=[pubsub.PubsubMessage(data=b"data_blob")], + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], ) # Establish that the underlying call was made with the expected @@ -1145,7 +1246,9 @@ async def test_publish_flattened_async(): @pytest.mark.asyncio async def test_publish_flattened_error_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1157,10 +1260,17 @@ async def test_publish_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [pubsub.GetTopicRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetTopicRequest, + dict, + ], +) def test_get_topic(request_type, transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1171,7 +1281,9 @@ def test_get_topic(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.get_topic), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = pubsub.Topic( - name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, ) response = client.get_topic(request) @@ -1191,7 +1303,8 @@ def test_get_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1207,7 +1320,8 @@ async def test_get_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.GetTopicRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1244,7 +1358,9 @@ async def test_get_topic_async_from_dict(): def test_get_topic_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1264,12 +1380,17 @@ def test_get_topic_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_topic_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1289,11 +1410,16 @@ async def test_get_topic_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] def test_get_topic_flattened(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: @@ -1301,7 +1427,9 @@ def test_get_topic_flattened(): call.return_value = pubsub.Topic() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_topic(topic="topic_value",) + client.get_topic( + topic="topic_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1313,19 +1441,24 @@ def test_get_topic_flattened(): def test_get_topic_flattened_error(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_topic( - pubsub.GetTopicRequest(), topic="topic_value", + pubsub.GetTopicRequest(), + topic="topic_value", ) @pytest.mark.asyncio async def test_get_topic_flattened_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: @@ -1335,7 +1468,9 @@ async def test_get_topic_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_topic(topic="topic_value",) + response = await client.get_topic( + topic="topic_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1348,20 +1483,30 @@ async def test_get_topic_flattened_async(): @pytest.mark.asyncio async def test_get_topic_flattened_error_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_topic( - pubsub.GetTopicRequest(), topic="topic_value", + pubsub.GetTopicRequest(), + topic="topic_value", ) -@pytest.mark.parametrize("request_type", [pubsub.ListTopicsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicsRequest, + dict, + ], +) def test_list_topics(request_type, transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1390,7 +1535,8 @@ def test_list_topics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1406,7 +1552,8 @@ async def test_list_topics_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicsRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1417,7 +1564,9 @@ async def test_list_topics_async( with mock.patch.object(type(client.transport.list_topics), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.ListTopicsResponse(next_page_token="next_page_token_value",) + pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_topics(request) @@ -1437,7 +1586,9 @@ async def test_list_topics_async_from_dict(): def test_list_topics_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1457,12 +1608,17 @@ def test_list_topics_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "project=project/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_topics_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1484,11 +1640,16 @@ async def test_list_topics_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "project=project/value", + ) in kw["metadata"] def test_list_topics_flattened(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: @@ -1496,7 +1657,9 @@ def test_list_topics_flattened(): call.return_value = pubsub.ListTopicsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_topics(project="project_value",) + client.list_topics( + project="project_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1508,19 +1671,24 @@ def test_list_topics_flattened(): def test_list_topics_flattened_error(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_topics( - pubsub.ListTopicsRequest(), project="project_value", + pubsub.ListTopicsRequest(), + project="project_value", ) @pytest.mark.asyncio async def test_list_topics_flattened_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: @@ -1532,7 +1700,9 @@ async def test_list_topics_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_topics(project="project_value",) + response = await client.list_topics( + project="project_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1545,19 +1715,23 @@ async def test_list_topics_flattened_async(): @pytest.mark.asyncio async def test_list_topics_flattened_error_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_topics( - pubsub.ListTopicsRequest(), project="project_value", + pubsub.ListTopicsRequest(), + project="project_value", ) def test_list_topics_pager(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1565,12 +1739,29 @@ def test_list_topics_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicsResponse( - topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], next_page_token="abc", ), - pubsub.ListTopicsResponse(topics=[], next_page_token="def",), - pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), - pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), RuntimeError, ) @@ -1589,7 +1780,8 @@ def test_list_topics_pager(transport_name: str = "grpc"): def test_list_topics_pages(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1597,12 +1789,29 @@ def test_list_topics_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicsResponse( - topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], next_page_token="abc", ), - pubsub.ListTopicsResponse(topics=[], next_page_token="def",), - pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), - pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), RuntimeError, ) pages = list(client.list_topics(request={}).pages) @@ -1612,7 +1821,9 @@ def test_list_topics_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_topics_async_pager(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1621,15 +1832,34 @@ async def test_list_topics_async_pager(): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicsResponse( - topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], next_page_token="abc", ), - pubsub.ListTopicsResponse(topics=[], next_page_token="def",), - pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), - pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), RuntimeError, ) - async_pager = await client.list_topics(request={},) + async_pager = await client.list_topics( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1641,7 +1871,9 @@ async def test_list_topics_async_pager(): @pytest.mark.asyncio async def test_list_topics_async_pages(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1650,12 +1882,29 @@ async def test_list_topics_async_pages(): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicsResponse( - topics=[pubsub.Topic(), pubsub.Topic(), pubsub.Topic(),], + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], next_page_token="abc", ), - pubsub.ListTopicsResponse(topics=[], next_page_token="def",), - pubsub.ListTopicsResponse(topics=[pubsub.Topic(),], next_page_token="ghi",), - pubsub.ListTopicsResponse(topics=[pubsub.Topic(), pubsub.Topic(),],), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), RuntimeError, ) pages = [] @@ -1665,10 +1914,17 @@ async def test_list_topics_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [pubsub.ListTopicSubscriptionsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSubscriptionsRequest, + dict, + ], +) def test_list_topic_subscriptions(request_type, transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1701,7 +1957,8 @@ def test_list_topic_subscriptions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1719,7 +1976,8 @@ async def test_list_topic_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSubscriptionsRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1756,7 +2014,9 @@ async def test_list_topic_subscriptions_async_from_dict(): def test_list_topic_subscriptions_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1778,12 +2038,17 @@ def test_list_topic_subscriptions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_topic_subscriptions_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1807,11 +2072,16 @@ async def test_list_topic_subscriptions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] def test_list_topic_subscriptions_flattened(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1821,7 +2091,9 @@ def test_list_topic_subscriptions_flattened(): call.return_value = pubsub.ListTopicSubscriptionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_topic_subscriptions(topic="topic_value",) + client.list_topic_subscriptions( + topic="topic_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1833,19 +2105,24 @@ def test_list_topic_subscriptions_flattened(): def test_list_topic_subscriptions_flattened_error(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_topic_subscriptions( - pubsub.ListTopicSubscriptionsRequest(), topic="topic_value", + pubsub.ListTopicSubscriptionsRequest(), + topic="topic_value", ) @pytest.mark.asyncio async def test_list_topic_subscriptions_flattened_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1859,7 +2136,9 @@ async def test_list_topic_subscriptions_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_topic_subscriptions(topic="topic_value",) + response = await client.list_topic_subscriptions( + topic="topic_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1872,19 +2151,23 @@ async def test_list_topic_subscriptions_flattened_async(): @pytest.mark.asyncio async def test_list_topic_subscriptions_flattened_error_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_topic_subscriptions( - pubsub.ListTopicSubscriptionsRequest(), topic="topic_value", + pubsub.ListTopicSubscriptionsRequest(), + topic="topic_value", ) def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1894,15 +2177,29 @@ def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicSubscriptionsResponse( - subscriptions=[str(), str(), str(),], next_page_token="abc", + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token="def", ), pubsub.ListTopicSubscriptionsResponse( - subscriptions=[], next_page_token="def", + subscriptions=[ + str(), + ], + next_page_token="ghi", ), pubsub.ListTopicSubscriptionsResponse( - subscriptions=[str(),], next_page_token="ghi", + subscriptions=[ + str(), + str(), + ], ), - pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), RuntimeError, ) @@ -1921,7 +2218,8 @@ def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1931,15 +2229,29 @@ def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicSubscriptionsResponse( - subscriptions=[str(), str(), str(),], next_page_token="abc", + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", ), pubsub.ListTopicSubscriptionsResponse( - subscriptions=[], next_page_token="def", + subscriptions=[], + next_page_token="def", ), pubsub.ListTopicSubscriptionsResponse( - subscriptions=[str(),], next_page_token="ghi", + subscriptions=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], ), - pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), RuntimeError, ) pages = list(client.list_topic_subscriptions(request={}).pages) @@ -1949,7 +2261,9 @@ def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_topic_subscriptions_async_pager(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1960,18 +2274,34 @@ async def test_list_topic_subscriptions_async_pager(): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicSubscriptionsResponse( - subscriptions=[str(), str(), str(),], next_page_token="abc", + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", ), pubsub.ListTopicSubscriptionsResponse( - subscriptions=[], next_page_token="def", + subscriptions=[], + next_page_token="def", ), pubsub.ListTopicSubscriptionsResponse( - subscriptions=[str(),], next_page_token="ghi", + subscriptions=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], ), - pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), RuntimeError, ) - async_pager = await client.list_topic_subscriptions(request={},) + async_pager = await client.list_topic_subscriptions( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1983,7 +2313,9 @@ async def test_list_topic_subscriptions_async_pager(): @pytest.mark.asyncio async def test_list_topic_subscriptions_async_pages(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1994,15 +2326,29 @@ async def test_list_topic_subscriptions_async_pages(): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicSubscriptionsResponse( - subscriptions=[str(), str(), str(),], next_page_token="abc", + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", ), pubsub.ListTopicSubscriptionsResponse( - subscriptions=[], next_page_token="def", + subscriptions=[], + next_page_token="def", ), pubsub.ListTopicSubscriptionsResponse( - subscriptions=[str(),], next_page_token="ghi", + subscriptions=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], ), - pubsub.ListTopicSubscriptionsResponse(subscriptions=[str(), str(),],), RuntimeError, ) pages = [] @@ -2012,10 +2358,17 @@ async def test_list_topic_subscriptions_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [pubsub.ListTopicSnapshotsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSnapshotsRequest, + dict, + ], +) def test_list_topic_snapshots(request_type, transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2028,7 +2381,8 @@ def test_list_topic_snapshots(request_type, transport: str = "grpc"): ) as call: # Designate an appropriate return value for the call. call.return_value = pubsub.ListTopicSnapshotsResponse( - snapshots=["snapshots_value"], next_page_token="next_page_token_value", + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", ) response = client.list_topic_snapshots(request) @@ -2047,7 +2401,8 @@ def test_list_topic_snapshots_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2065,7 +2420,8 @@ async def test_list_topic_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSnapshotsRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2079,7 +2435,8 @@ async def test_list_topic_snapshots_async( # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( pubsub.ListTopicSnapshotsResponse( - snapshots=["snapshots_value"], next_page_token="next_page_token_value", + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", ) ) response = await client.list_topic_snapshots(request) @@ -2101,7 +2458,9 @@ async def test_list_topic_snapshots_async_from_dict(): def test_list_topic_snapshots_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2123,12 +2482,17 @@ def test_list_topic_snapshots_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_topic_snapshots_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2152,11 +2516,16 @@ async def test_list_topic_snapshots_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] def test_list_topic_snapshots_flattened(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2166,7 +2535,9 @@ def test_list_topic_snapshots_flattened(): call.return_value = pubsub.ListTopicSnapshotsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_topic_snapshots(topic="topic_value",) + client.list_topic_snapshots( + topic="topic_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2178,19 +2549,24 @@ def test_list_topic_snapshots_flattened(): def test_list_topic_snapshots_flattened_error(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_topic_snapshots( - pubsub.ListTopicSnapshotsRequest(), topic="topic_value", + pubsub.ListTopicSnapshotsRequest(), + topic="topic_value", ) @pytest.mark.asyncio async def test_list_topic_snapshots_flattened_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2204,7 +2580,9 @@ async def test_list_topic_snapshots_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_topic_snapshots(topic="topic_value",) + response = await client.list_topic_snapshots( + topic="topic_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2217,19 +2595,23 @@ async def test_list_topic_snapshots_flattened_async(): @pytest.mark.asyncio async def test_list_topic_snapshots_flattened_error_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_topic_snapshots( - pubsub.ListTopicSnapshotsRequest(), topic="topic_value", + pubsub.ListTopicSnapshotsRequest(), + topic="topic_value", ) def test_list_topic_snapshots_pager(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2239,13 +2621,29 @@ def test_list_topic_snapshots_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicSnapshotsResponse( - snapshots=[str(), str(), str(),], next_page_token="abc", + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", ), - pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), pubsub.ListTopicSnapshotsResponse( - snapshots=[str(),], next_page_token="ghi", + snapshots=[], + next_page_token="def", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], ), - pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), RuntimeError, ) @@ -2264,7 +2662,8 @@ def test_list_topic_snapshots_pager(transport_name: str = "grpc"): def test_list_topic_snapshots_pages(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2274,13 +2673,29 @@ def test_list_topic_snapshots_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicSnapshotsResponse( - snapshots=[str(), str(), str(),], next_page_token="abc", + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token="def", ), - pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), pubsub.ListTopicSnapshotsResponse( - snapshots=[str(),], next_page_token="ghi", + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], ), - pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), RuntimeError, ) pages = list(client.list_topic_snapshots(request={}).pages) @@ -2290,7 +2705,9 @@ def test_list_topic_snapshots_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_topic_snapshots_async_pager(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2301,16 +2718,34 @@ async def test_list_topic_snapshots_async_pager(): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicSnapshotsResponse( - snapshots=[str(), str(), str(),], next_page_token="abc", + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", ), - pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), pubsub.ListTopicSnapshotsResponse( - snapshots=[str(),], next_page_token="ghi", + snapshots=[], + next_page_token="def", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], ), - pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), RuntimeError, ) - async_pager = await client.list_topic_snapshots(request={},) + async_pager = await client.list_topic_snapshots( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -2322,7 +2757,9 @@ async def test_list_topic_snapshots_async_pager(): @pytest.mark.asyncio async def test_list_topic_snapshots_async_pages(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2333,13 +2770,29 @@ async def test_list_topic_snapshots_async_pages(): # Set the response to a series of pages. call.side_effect = ( pubsub.ListTopicSnapshotsResponse( - snapshots=[str(), str(), str(),], next_page_token="abc", + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token="def", ), - pubsub.ListTopicSnapshotsResponse(snapshots=[], next_page_token="def",), pubsub.ListTopicSnapshotsResponse( - snapshots=[str(),], next_page_token="ghi", + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], ), - pubsub.ListTopicSnapshotsResponse(snapshots=[str(), str(),],), RuntimeError, ) pages = [] @@ -2349,10 +2802,17 @@ async def test_list_topic_snapshots_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [pubsub.DeleteTopicRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteTopicRequest, + dict, + ], +) def test_delete_topic(request_type, transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2378,7 +2838,8 @@ def test_delete_topic_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2394,7 +2855,8 @@ async def test_delete_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteTopicRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2422,7 +2884,9 @@ async def test_delete_topic_async_from_dict(): def test_delete_topic_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2442,12 +2906,17 @@ def test_delete_topic_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_topic_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2467,11 +2936,16 @@ async def test_delete_topic_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "topic=topic/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "topic=topic/value", + ) in kw["metadata"] def test_delete_topic_flattened(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: @@ -2479,7 +2953,9 @@ def test_delete_topic_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_topic(topic="topic_value",) + client.delete_topic( + topic="topic_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2491,19 +2967,24 @@ def test_delete_topic_flattened(): def test_delete_topic_flattened_error(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_topic( - pubsub.DeleteTopicRequest(), topic="topic_value", + pubsub.DeleteTopicRequest(), + topic="topic_value", ) @pytest.mark.asyncio async def test_delete_topic_flattened_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: @@ -2513,7 +2994,9 @@ async def test_delete_topic_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_topic(topic="topic_value",) + response = await client.delete_topic( + topic="topic_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2526,20 +3009,30 @@ async def test_delete_topic_flattened_async(): @pytest.mark.asyncio async def test_delete_topic_flattened_error_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_topic( - pubsub.DeleteTopicRequest(), topic="topic_value", + pubsub.DeleteTopicRequest(), + topic="topic_value", ) -@pytest.mark.parametrize("request_type", [pubsub.DetachSubscriptionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DetachSubscriptionRequest, + dict, + ], +) def test_detach_subscription(request_type, transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2567,7 +3060,8 @@ def test_detach_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2585,7 +3079,8 @@ async def test_detach_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DetachSubscriptionRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2617,7 +3112,9 @@ async def test_detach_subscription_async_from_dict(): def test_detach_subscription_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2639,14 +3136,17 @@ def test_detach_subscription_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_detach_subscription_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2670,9 +3170,10 @@ async def test_detach_subscription_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] def test_credentials_transport_error(): @@ -2682,7 +3183,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2702,7 +3204,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = PublisherClient(client_options=options, transport=transport,) + client = PublisherClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -2718,7 +3223,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = PublisherClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2748,7 +3254,10 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport,], + [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, + ], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -2760,8 +3269,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.PublisherGrpcTransport,) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PublisherGrpcTransport, + ) def test_publisher_base_transport_error(): @@ -2817,7 +3331,8 @@ def test_publisher_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.PublisherTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2858,7 +3373,10 @@ def test_publisher_auth_adc(): @pytest.mark.parametrize( "transport_class", - [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport,], + [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, + ], ) def test_publisher_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use @@ -2985,7 +3503,8 @@ def test_publisher_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.PublisherGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2997,7 +3516,8 @@ def test_publisher_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.PublisherGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3104,7 +3624,8 @@ def test_schema_path(): project = "squid" schema = "clam" expected = "projects/{project}/schemas/{schema}".format( - project=project, schema=schema, + project=project, + schema=schema, ) actual = PublisherClient.schema_path(project, schema) assert expected == actual @@ -3126,7 +3647,8 @@ def test_subscription_path(): project = "oyster" subscription = "nudibranch" expected = "projects/{project}/subscriptions/{subscription}".format( - project=project, subscription=subscription, + project=project, + subscription=subscription, ) actual = PublisherClient.subscription_path(project, subscription) assert expected == actual @@ -3147,7 +3669,10 @@ def test_parse_subscription_path(): def test_topic_path(): project = "winkle" topic = "nautilus" - expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + expected = "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) actual = PublisherClient.topic_path(project, topic) assert expected == actual @@ -3186,7 +3711,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = PublisherClient.common_folder_path(folder) assert expected == actual @@ -3204,7 +3731,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = PublisherClient.common_organization_path(organization) assert expected == actual @@ -3222,7 +3751,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = PublisherClient.common_project_path(project) assert expected == actual @@ -3242,7 +3773,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = PublisherClient.common_location_path(project, location) assert expected == actual @@ -3267,7 +3799,8 @@ def test_client_with_default_client_info(): transports.PublisherTransport, "_prep_wrapped_messages" ) as prep: client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3276,14 +3809,16 @@ def test_client_with_default_client_info(): ) as prep: transport_class = PublisherClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) def test_set_iam_policy(transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3293,7 +3828,10 @@ def test_set_iam_policy(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) response = client.set_iam_policy(request) @@ -3314,7 +3852,8 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3325,7 +3864,10 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) response = await client.set_iam_policy(request) @@ -3345,7 +3887,9 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): def test_set_iam_policy_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3365,12 +3909,17 @@ def test_set_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3390,11 +3939,16 @@ async def test_set_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_set_iam_policy_from_dict(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -3411,7 +3965,9 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -3428,7 +3984,8 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3438,7 +3995,10 @@ def test_get_iam_policy(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) response = client.get_iam_policy(request) @@ -3459,7 +4019,8 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3470,7 +4031,10 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) response = await client.get_iam_policy(request) @@ -3490,7 +4054,9 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3510,12 +4076,17 @@ def test_get_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3535,11 +4106,16 @@ async def test_get_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_get_iam_policy_from_dict(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -3556,7 +4132,9 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -3573,7 +4151,8 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3606,7 +4185,8 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3639,7 +4219,9 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): def test_test_iam_permissions_field_headers(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3661,12 +4243,17 @@ def test_test_iam_permissions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3690,11 +4277,16 @@ async def test_test_iam_permissions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_test_iam_permissions_from_dict(): - client = PublisherClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" @@ -3713,7 +4305,9 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): - client = PublisherAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" @@ -3735,7 +4329,8 @@ async def test_test_iam_permissions_from_dict_async(): @pytest.mark.asyncio async def test_transport_close_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index d166ce656c37..19a2f3ab8b85 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -89,7 +89,11 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [SchemaServiceClient, SchemaServiceAsyncClient,] + "client_class", + [ + SchemaServiceClient, + SchemaServiceAsyncClient, + ], ) def test_schema_service_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() @@ -131,7 +135,11 @@ def test_schema_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [SchemaServiceClient, SchemaServiceAsyncClient,] + "client_class", + [ + SchemaServiceClient, + SchemaServiceAsyncClient, + ], ) def test_schema_service_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() @@ -495,7 +503,9 @@ def test_schema_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -640,10 +650,17 @@ def test_schema_service_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [gp_schema.CreateSchemaRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CreateSchemaRequest, + dict, + ], +) def test_create_schema(request_type, transport: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -676,7 +693,8 @@ def test_create_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -692,7 +710,8 @@ async def test_create_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.CreateSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -729,7 +748,9 @@ async def test_create_schema_async_from_dict(): def test_create_schema_field_headers(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -749,7 +770,10 @@ def test_create_schema_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -776,11 +800,16 @@ async def test_create_schema_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_create_schema_flattened(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_schema), "__call__") as call: @@ -810,7 +839,9 @@ def test_create_schema_flattened(): def test_create_schema_flattened_error(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -875,10 +906,17 @@ async def test_create_schema_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [schema.GetSchemaRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + schema.GetSchemaRequest, + dict, + ], +) def test_get_schema(request_type, transport: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -911,7 +949,8 @@ def test_get_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -927,7 +966,8 @@ async def test_get_schema_async( transport: str = "grpc_asyncio", request_type=schema.GetSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -964,7 +1004,9 @@ async def test_get_schema_async_from_dict(): def test_get_schema_field_headers(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -984,7 +1026,10 @@ def test_get_schema_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1011,11 +1056,16 @@ async def test_get_schema_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_get_schema_flattened(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_schema), "__call__") as call: @@ -1023,7 +1073,9 @@ def test_get_schema_flattened(): call.return_value = schema.Schema() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_schema(name="name_value",) + client.get_schema( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1035,13 +1087,16 @@ def test_get_schema_flattened(): def test_get_schema_flattened_error(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_schema( - schema.GetSchemaRequest(), name="name_value", + schema.GetSchemaRequest(), + name="name_value", ) @@ -1059,7 +1114,9 @@ async def test_get_schema_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_schema(name="name_value",) + response = await client.get_schema( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1080,14 +1137,22 @@ async def test_get_schema_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_schema( - schema.GetSchemaRequest(), name="name_value", + schema.GetSchemaRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [schema.ListSchemasRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemasRequest, + dict, + ], +) def test_list_schemas(request_type, transport: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1116,7 +1181,8 @@ def test_list_schemas_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1132,7 +1198,8 @@ async def test_list_schemas_async( transport: str = "grpc_asyncio", request_type=schema.ListSchemasRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1143,7 +1210,9 @@ async def test_list_schemas_async( with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schema.ListSchemasResponse(next_page_token="next_page_token_value",) + schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_schemas(request) @@ -1163,7 +1232,9 @@ async def test_list_schemas_async_from_dict(): def test_list_schemas_field_headers(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1183,7 +1254,10 @@ def test_list_schemas_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1212,11 +1286,16 @@ async def test_list_schemas_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_list_schemas_flattened(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: @@ -1224,7 +1303,9 @@ def test_list_schemas_flattened(): call.return_value = schema.ListSchemasResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_schemas(parent="parent_value",) + client.list_schemas( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1236,13 +1317,16 @@ def test_list_schemas_flattened(): def test_list_schemas_flattened_error(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_schemas( - schema.ListSchemasRequest(), parent="parent_value", + schema.ListSchemasRequest(), + parent="parent_value", ) @@ -1262,7 +1346,9 @@ async def test_list_schemas_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_schemas(parent="parent_value",) + response = await client.list_schemas( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1283,13 +1369,15 @@ async def test_list_schemas_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_schemas( - schema.ListSchemasRequest(), parent="parent_value", + schema.ListSchemasRequest(), + parent="parent_value", ) def test_list_schemas_pager(transport_name: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1297,14 +1385,29 @@ def test_list_schemas_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( schema.ListSchemasResponse( - schemas=[schema.Schema(), schema.Schema(), schema.Schema(),], + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], next_page_token="abc", ), - schema.ListSchemasResponse(schemas=[], next_page_token="def",), schema.ListSchemasResponse( - schemas=[schema.Schema(),], next_page_token="ghi", + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], ), - schema.ListSchemasResponse(schemas=[schema.Schema(), schema.Schema(),],), RuntimeError, ) @@ -1323,7 +1426,8 @@ def test_list_schemas_pager(transport_name: str = "grpc"): def test_list_schemas_pages(transport_name: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1331,14 +1435,29 @@ def test_list_schemas_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( schema.ListSchemasResponse( - schemas=[schema.Schema(), schema.Schema(), schema.Schema(),], + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], next_page_token="abc", ), - schema.ListSchemasResponse(schemas=[], next_page_token="def",), schema.ListSchemasResponse( - schemas=[schema.Schema(),], next_page_token="ghi", + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], ), - schema.ListSchemasResponse(schemas=[schema.Schema(), schema.Schema(),],), RuntimeError, ) pages = list(client.list_schemas(request={}).pages) @@ -1348,7 +1467,9 @@ def test_list_schemas_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_schemas_async_pager(): - client = SchemaServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1357,17 +1478,34 @@ async def test_list_schemas_async_pager(): # Set the response to a series of pages. call.side_effect = ( schema.ListSchemasResponse( - schemas=[schema.Schema(), schema.Schema(), schema.Schema(),], + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], next_page_token="abc", ), - schema.ListSchemasResponse(schemas=[], next_page_token="def",), schema.ListSchemasResponse( - schemas=[schema.Schema(),], next_page_token="ghi", + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], ), - schema.ListSchemasResponse(schemas=[schema.Schema(), schema.Schema(),],), RuntimeError, ) - async_pager = await client.list_schemas(request={},) + async_pager = await client.list_schemas( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1379,7 +1517,9 @@ async def test_list_schemas_async_pager(): @pytest.mark.asyncio async def test_list_schemas_async_pages(): - client = SchemaServiceAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1388,14 +1528,29 @@ async def test_list_schemas_async_pages(): # Set the response to a series of pages. call.side_effect = ( schema.ListSchemasResponse( - schemas=[schema.Schema(), schema.Schema(), schema.Schema(),], + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], next_page_token="abc", ), - schema.ListSchemasResponse(schemas=[], next_page_token="def",), schema.ListSchemasResponse( - schemas=[schema.Schema(),], next_page_token="ghi", + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], ), - schema.ListSchemasResponse(schemas=[schema.Schema(), schema.Schema(),],), RuntimeError, ) pages = [] @@ -1405,10 +1560,17 @@ async def test_list_schemas_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [schema.DeleteSchemaRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRequest, + dict, + ], +) def test_delete_schema(request_type, transport: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1434,7 +1596,8 @@ def test_delete_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1450,7 +1613,8 @@ async def test_delete_schema_async( transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1478,7 +1642,9 @@ async def test_delete_schema_async_from_dict(): def test_delete_schema_field_headers(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1498,7 +1664,10 @@ def test_delete_schema_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1525,11 +1694,16 @@ async def test_delete_schema_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_delete_schema_flattened(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: @@ -1537,7 +1711,9 @@ def test_delete_schema_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_schema(name="name_value",) + client.delete_schema( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1549,13 +1725,16 @@ def test_delete_schema_flattened(): def test_delete_schema_flattened_error(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_schema( - schema.DeleteSchemaRequest(), name="name_value", + schema.DeleteSchemaRequest(), + name="name_value", ) @@ -1573,7 +1752,9 @@ async def test_delete_schema_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_schema(name="name_value",) + response = await client.delete_schema( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1594,14 +1775,22 @@ async def test_delete_schema_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_schema( - schema.DeleteSchemaRequest(), name="name_value", + schema.DeleteSchemaRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [gp_schema.ValidateSchemaRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.ValidateSchemaRequest, + dict, + ], +) def test_validate_schema(request_type, transport: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1627,7 +1816,8 @@ def test_validate_schema_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1643,7 +1833,8 @@ async def test_validate_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.ValidateSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1673,7 +1864,9 @@ async def test_validate_schema_async_from_dict(): def test_validate_schema_field_headers(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1693,7 +1886,10 @@ def test_validate_schema_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1722,11 +1918,16 @@ async def test_validate_schema_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_validate_schema_flattened(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: @@ -1735,7 +1936,8 @@ def test_validate_schema_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.validate_schema( - parent="parent_value", schema=gp_schema.Schema(name="name_value"), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -1751,7 +1953,9 @@ def test_validate_schema_flattened(): def test_validate_schema_flattened_error(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1780,7 +1984,8 @@ async def test_validate_schema_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.validate_schema( - parent="parent_value", schema=gp_schema.Schema(name="name_value"), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -1811,10 +2016,17 @@ async def test_validate_schema_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [schema.ValidateMessageRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + schema.ValidateMessageRequest, + dict, + ], +) def test_validate_message(request_type, transport: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1840,7 +2052,8 @@ def test_validate_message_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1856,7 +2069,8 @@ async def test_validate_message_async( transport: str = "grpc_asyncio", request_type=schema.ValidateMessageRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1886,7 +2100,9 @@ async def test_validate_message_async_from_dict(): def test_validate_message_field_headers(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1906,7 +2122,10 @@ def test_validate_message_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1935,7 +2154,10 @@ async def test_validate_message_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent/value", + ) in kw["metadata"] def test_credentials_transport_error(): @@ -1945,7 +2167,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -1965,7 +2188,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = SchemaServiceClient(client_options=options, transport=transport,) + client = SchemaServiceClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -1981,7 +2207,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SchemaServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2026,8 +2253,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.SchemaServiceGrpcTransport,) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SchemaServiceGrpcTransport, + ) def test_schema_service_base_transport_error(): @@ -2080,7 +2312,8 @@ def test_schema_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.SchemaServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2254,7 +2487,8 @@ def test_schema_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.SchemaServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2266,7 +2500,8 @@ def test_schema_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.SchemaServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2379,7 +2614,8 @@ def test_schema_path(): project = "squid" schema = "clam" expected = "projects/{project}/schemas/{schema}".format( - project=project, schema=schema, + project=project, + schema=schema, ) actual = SchemaServiceClient.schema_path(project, schema) assert expected == actual @@ -2419,7 +2655,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = SchemaServiceClient.common_folder_path(folder) assert expected == actual @@ -2437,7 +2675,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "winkle" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = SchemaServiceClient.common_organization_path(organization) assert expected == actual @@ -2455,7 +2695,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "scallop" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = SchemaServiceClient.common_project_path(project) assert expected == actual @@ -2475,7 +2717,8 @@ def test_common_location_path(): project = "squid" location = "clam" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = SchemaServiceClient.common_location_path(project, location) assert expected == actual @@ -2500,7 +2743,8 @@ def test_client_with_default_client_info(): transports.SchemaServiceTransport, "_prep_wrapped_messages" ) as prep: client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -2509,14 +2753,16 @@ def test_client_with_default_client_info(): ) as prep: transport_class = SchemaServiceClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) def test_set_iam_policy(transport: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2526,7 +2772,10 @@ def test_set_iam_policy(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) response = client.set_iam_policy(request) @@ -2547,7 +2796,8 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2558,7 +2808,10 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) response = await client.set_iam_policy(request) @@ -2578,7 +2831,9 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): def test_set_iam_policy_field_headers(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2598,7 +2853,10 @@ def test_set_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2625,11 +2883,16 @@ async def test_set_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_set_iam_policy_from_dict(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -2665,7 +2928,8 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2675,7 +2939,10 @@ def test_get_iam_policy(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) response = client.get_iam_policy(request) @@ -2696,7 +2963,8 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2707,7 +2975,10 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) response = await client.get_iam_policy(request) @@ -2727,7 +2998,9 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2747,7 +3020,10 @@ def test_get_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2774,11 +3050,16 @@ async def test_get_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_get_iam_policy_from_dict(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -2814,7 +3095,8 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2847,7 +3129,8 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2880,7 +3163,9 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): def test_test_iam_permissions_field_headers(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2902,7 +3187,10 @@ def test_test_iam_permissions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2933,11 +3221,16 @@ async def test_test_iam_permissions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_test_iam_permissions_from_dict(): - client = SchemaServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" @@ -2980,7 +3273,8 @@ async def test_test_iam_permissions_from_dict_async(): @pytest.mark.asyncio async def test_transport_close_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 1157ca1bdbfc..0455855de74d 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -88,7 +88,13 @@ def test__get_default_mtls_endpoint(): assert SubscriberClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + SubscriberClient, + SubscriberAsyncClient, + ], +) def test_subscriber_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -128,7 +134,13 @@ def test_subscriber_client_service_account_always_use_jwt( use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient,]) +@pytest.mark.parametrize( + "client_class", + [ + SubscriberClient, + SubscriberAsyncClient, + ], +) def test_subscriber_client_from_service_account_file(client_class): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( @@ -483,7 +495,9 @@ def test_subscriber_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -616,10 +630,17 @@ def test_subscriber_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [pubsub.Subscription, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Subscription, + dict, + ], +) def test_create_subscription(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -664,7 +685,8 @@ def test_create_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -682,7 +704,8 @@ async def test_create_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.Subscription ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -731,7 +754,9 @@ async def test_create_subscription_async_from_dict(): def test_create_subscription_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -753,12 +778,17 @@ def test_create_subscription_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_subscription_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -780,11 +810,16 @@ async def test_create_subscription_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_create_subscription_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -820,7 +855,9 @@ def test_create_subscription_flattened(): def test_create_subscription_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -836,7 +873,9 @@ def test_create_subscription_flattened_error(): @pytest.mark.asyncio async def test_create_subscription_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -875,7 +914,9 @@ async def test_create_subscription_flattened_async(): @pytest.mark.asyncio async def test_create_subscription_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -889,10 +930,17 @@ async def test_create_subscription_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [pubsub.GetSubscriptionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetSubscriptionRequest, + dict, + ], +) def test_get_subscription(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -935,7 +983,8 @@ def test_get_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -951,7 +1000,8 @@ async def test_get_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSubscriptionRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -998,7 +1048,9 @@ async def test_get_subscription_async_from_dict(): def test_get_subscription_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1018,14 +1070,17 @@ def test_get_subscription_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_subscription_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1045,13 +1100,16 @@ async def test_get_subscription_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] def test_get_subscription_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: @@ -1059,7 +1117,9 @@ def test_get_subscription_flattened(): call.return_value = pubsub.Subscription() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_subscription(subscription="subscription_value",) + client.get_subscription( + subscription="subscription_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1071,19 +1131,24 @@ def test_get_subscription_flattened(): def test_get_subscription_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_subscription( - pubsub.GetSubscriptionRequest(), subscription="subscription_value", + pubsub.GetSubscriptionRequest(), + subscription="subscription_value", ) @pytest.mark.asyncio async def test_get_subscription_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: @@ -1093,7 +1158,9 @@ async def test_get_subscription_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_subscription(subscription="subscription_value",) + response = await client.get_subscription( + subscription="subscription_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1106,20 +1173,30 @@ async def test_get_subscription_flattened_async(): @pytest.mark.asyncio async def test_get_subscription_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_subscription( - pubsub.GetSubscriptionRequest(), subscription="subscription_value", + pubsub.GetSubscriptionRequest(), + subscription="subscription_value", ) -@pytest.mark.parametrize("request_type", [pubsub.UpdateSubscriptionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSubscriptionRequest, + dict, + ], +) def test_update_subscription(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1164,7 +1241,8 @@ def test_update_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1182,7 +1260,8 @@ async def test_update_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSubscriptionRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1231,7 +1310,9 @@ async def test_update_subscription_async_from_dict(): def test_update_subscription_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1261,7 +1342,9 @@ def test_update_subscription_field_headers(): @pytest.mark.asyncio async def test_update_subscription_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1289,10 +1372,17 @@ async def test_update_subscription_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [pubsub.ListSubscriptionsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSubscriptionsRequest, + dict, + ], +) def test_list_subscriptions(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1323,7 +1413,8 @@ def test_list_subscriptions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1341,7 +1432,8 @@ async def test_list_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSubscriptionsRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1354,7 +1446,9 @@ async def test_list_subscriptions_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.ListSubscriptionsResponse(next_page_token="next_page_token_value",) + pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_subscriptions(request) @@ -1374,7 +1468,9 @@ async def test_list_subscriptions_async_from_dict(): def test_list_subscriptions_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1396,12 +1492,17 @@ def test_list_subscriptions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "project=project/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_subscriptions_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1425,11 +1526,16 @@ async def test_list_subscriptions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "project=project/value", + ) in kw["metadata"] def test_list_subscriptions_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1439,7 +1545,9 @@ def test_list_subscriptions_flattened(): call.return_value = pubsub.ListSubscriptionsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_subscriptions(project="project_value",) + client.list_subscriptions( + project="project_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1451,19 +1559,24 @@ def test_list_subscriptions_flattened(): def test_list_subscriptions_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_subscriptions( - pubsub.ListSubscriptionsRequest(), project="project_value", + pubsub.ListSubscriptionsRequest(), + project="project_value", ) @pytest.mark.asyncio async def test_list_subscriptions_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1477,7 +1590,9 @@ async def test_list_subscriptions_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_subscriptions(project="project_value",) + response = await client.list_subscriptions( + project="project_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1490,19 +1605,23 @@ async def test_list_subscriptions_flattened_async(): @pytest.mark.asyncio async def test_list_subscriptions_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_subscriptions( - pubsub.ListSubscriptionsRequest(), project="project_value", + pubsub.ListSubscriptionsRequest(), + project="project_value", ) def test_list_subscriptions_pager(transport_name: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1519,12 +1638,21 @@ def test_list_subscriptions_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), pubsub.ListSubscriptionsResponse( - subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", ), pubsub.ListSubscriptionsResponse( - subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], ), RuntimeError, ) @@ -1544,7 +1672,8 @@ def test_list_subscriptions_pager(transport_name: str = "grpc"): def test_list_subscriptions_pages(transport_name: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1561,12 +1690,21 @@ def test_list_subscriptions_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), pubsub.ListSubscriptionsResponse( - subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", ), pubsub.ListSubscriptionsResponse( - subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], ), RuntimeError, ) @@ -1577,7 +1715,9 @@ def test_list_subscriptions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_subscriptions_async_pager(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1595,16 +1735,27 @@ async def test_list_subscriptions_async_pager(): ], next_page_token="abc", ), - pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), pubsub.ListSubscriptionsResponse( - subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", ), pubsub.ListSubscriptionsResponse( - subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], ), RuntimeError, ) - async_pager = await client.list_subscriptions(request={},) + async_pager = await client.list_subscriptions( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -1616,7 +1767,9 @@ async def test_list_subscriptions_async_pager(): @pytest.mark.asyncio async def test_list_subscriptions_async_pages(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1634,12 +1787,21 @@ async def test_list_subscriptions_async_pages(): ], next_page_token="abc", ), - pubsub.ListSubscriptionsResponse(subscriptions=[], next_page_token="def",), pubsub.ListSubscriptionsResponse( - subscriptions=[pubsub.Subscription(),], next_page_token="ghi", + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", ), pubsub.ListSubscriptionsResponse( - subscriptions=[pubsub.Subscription(), pubsub.Subscription(),], + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], ), RuntimeError, ) @@ -1650,10 +1812,17 @@ async def test_list_subscriptions_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [pubsub.DeleteSubscriptionRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSubscriptionRequest, + dict, + ], +) def test_delete_subscription(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1681,7 +1850,8 @@ def test_delete_subscription_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1699,7 +1869,8 @@ async def test_delete_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSubscriptionRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1729,7 +1900,9 @@ async def test_delete_subscription_async_from_dict(): def test_delete_subscription_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1751,14 +1924,17 @@ def test_delete_subscription_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_subscription_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1780,13 +1956,16 @@ async def test_delete_subscription_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] def test_delete_subscription_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1796,7 +1975,9 @@ def test_delete_subscription_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_subscription(subscription="subscription_value",) + client.delete_subscription( + subscription="subscription_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1808,19 +1989,24 @@ def test_delete_subscription_flattened(): def test_delete_subscription_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_subscription( - pubsub.DeleteSubscriptionRequest(), subscription="subscription_value", + pubsub.DeleteSubscriptionRequest(), + subscription="subscription_value", ) @pytest.mark.asyncio async def test_delete_subscription_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1832,7 +2018,9 @@ async def test_delete_subscription_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_subscription(subscription="subscription_value",) + response = await client.delete_subscription( + subscription="subscription_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1845,20 +2033,30 @@ async def test_delete_subscription_flattened_async(): @pytest.mark.asyncio async def test_delete_subscription_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_subscription( - pubsub.DeleteSubscriptionRequest(), subscription="subscription_value", + pubsub.DeleteSubscriptionRequest(), + subscription="subscription_value", ) -@pytest.mark.parametrize("request_type", [pubsub.ModifyAckDeadlineRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyAckDeadlineRequest, + dict, + ], +) def test_modify_ack_deadline(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1886,7 +2084,8 @@ def test_modify_ack_deadline_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1904,7 +2103,8 @@ async def test_modify_ack_deadline_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyAckDeadlineRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1934,7 +2134,9 @@ async def test_modify_ack_deadline_async_from_dict(): def test_modify_ack_deadline_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1956,14 +2158,17 @@ def test_modify_ack_deadline_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_modify_ack_deadline_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -1985,13 +2190,16 @@ async def test_modify_ack_deadline_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] def test_modify_ack_deadline_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2023,7 +2231,9 @@ def test_modify_ack_deadline_flattened(): def test_modify_ack_deadline_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2038,7 +2248,9 @@ def test_modify_ack_deadline_flattened_error(): @pytest.mark.asyncio async def test_modify_ack_deadline_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2073,7 +2285,9 @@ async def test_modify_ack_deadline_flattened_async(): @pytest.mark.asyncio async def test_modify_ack_deadline_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2086,10 +2300,17 @@ async def test_modify_ack_deadline_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [pubsub.AcknowledgeRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.AcknowledgeRequest, + dict, + ], +) def test_acknowledge(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2115,7 +2336,8 @@ def test_acknowledge_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2131,7 +2353,8 @@ async def test_acknowledge_async( transport: str = "grpc_asyncio", request_type=pubsub.AcknowledgeRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2159,7 +2382,9 @@ async def test_acknowledge_async_from_dict(): def test_acknowledge_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2179,14 +2404,17 @@ def test_acknowledge_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] - - + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] + + @pytest.mark.asyncio async def test_acknowledge_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2206,13 +2434,16 @@ async def test_acknowledge_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] def test_acknowledge_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: @@ -2221,7 +2452,8 @@ def test_acknowledge_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.acknowledge( - subscription="subscription_value", ack_ids=["ack_ids_value"], + subscription="subscription_value", + ack_ids=["ack_ids_value"], ) # Establish that the underlying call was made with the expected @@ -2237,7 +2469,9 @@ def test_acknowledge_flattened(): def test_acknowledge_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2251,7 +2485,9 @@ def test_acknowledge_flattened_error(): @pytest.mark.asyncio async def test_acknowledge_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: @@ -2262,7 +2498,8 @@ async def test_acknowledge_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.acknowledge( - subscription="subscription_value", ack_ids=["ack_ids_value"], + subscription="subscription_value", + ack_ids=["ack_ids_value"], ) # Establish that the underlying call was made with the expected @@ -2279,7 +2516,9 @@ async def test_acknowledge_flattened_async(): @pytest.mark.asyncio async def test_acknowledge_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2291,10 +2530,17 @@ async def test_acknowledge_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [pubsub.PullRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PullRequest, + dict, + ], +) def test_pull(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2320,7 +2566,8 @@ def test_pull_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2336,7 +2583,8 @@ async def test_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.PullRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2364,7 +2612,9 @@ async def test_pull_async_from_dict(): def test_pull_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2384,14 +2634,17 @@ def test_pull_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_pull_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2411,13 +2664,16 @@ async def test_pull_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] def test_pull_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: @@ -2449,7 +2705,9 @@ def test_pull_flattened(): def test_pull_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2464,7 +2722,9 @@ def test_pull_flattened_error(): @pytest.mark.asyncio async def test_pull_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: @@ -2499,7 +2759,9 @@ async def test_pull_flattened_async(): @pytest.mark.asyncio async def test_pull_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2512,10 +2774,17 @@ async def test_pull_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [pubsub.StreamingPullRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.StreamingPullRequest, + dict, + ], +) def test_streaming_pull(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2544,7 +2813,8 @@ async def test_streaming_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.StreamingPullRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2576,10 +2846,17 @@ async def test_streaming_pull_async_from_dict(): await test_streaming_pull_async(request_type=dict) -@pytest.mark.parametrize("request_type", [pubsub.ModifyPushConfigRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyPushConfigRequest, + dict, + ], +) def test_modify_push_config(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2607,7 +2884,8 @@ def test_modify_push_config_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2625,7 +2903,8 @@ async def test_modify_push_config_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyPushConfigRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2655,7 +2934,9 @@ async def test_modify_push_config_async_from_dict(): def test_modify_push_config_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2677,14 +2958,17 @@ def test_modify_push_config_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_modify_push_config_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2706,13 +2990,16 @@ async def test_modify_push_config_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] def test_modify_push_config_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2740,7 +3027,9 @@ def test_modify_push_config_flattened(): def test_modify_push_config_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2754,7 +3043,9 @@ def test_modify_push_config_flattened_error(): @pytest.mark.asyncio async def test_modify_push_config_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2785,7 +3076,9 @@ async def test_modify_push_config_flattened_async(): @pytest.mark.asyncio async def test_modify_push_config_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2797,10 +3090,17 @@ async def test_modify_push_config_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [pubsub.GetSnapshotRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetSnapshotRequest, + dict, + ], +) def test_get_snapshot(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2810,7 +3110,10 @@ def test_get_snapshot(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) + call.return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) response = client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -2828,7 +3131,8 @@ def test_get_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2844,7 +3148,8 @@ async def test_get_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSnapshotRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2855,7 +3160,10 @@ async def test_get_snapshot_async( with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Snapshot(name="name_value", topic="topic_value",) + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) ) response = await client.get_snapshot(request) @@ -2876,7 +3184,9 @@ async def test_get_snapshot_async_from_dict(): def test_get_snapshot_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2896,12 +3206,17 @@ def test_get_snapshot_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "snapshot=snapshot/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_snapshot_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -2921,11 +3236,16 @@ async def test_get_snapshot_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "snapshot=snapshot/value", + ) in kw["metadata"] def test_get_snapshot_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: @@ -2933,7 +3253,9 @@ def test_get_snapshot_flattened(): call.return_value = pubsub.Snapshot() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_snapshot(snapshot="snapshot_value",) + client.get_snapshot( + snapshot="snapshot_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2945,19 +3267,24 @@ def test_get_snapshot_flattened(): def test_get_snapshot_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_snapshot( - pubsub.GetSnapshotRequest(), snapshot="snapshot_value", + pubsub.GetSnapshotRequest(), + snapshot="snapshot_value", ) @pytest.mark.asyncio async def test_get_snapshot_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: @@ -2967,7 +3294,9 @@ async def test_get_snapshot_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_snapshot(snapshot="snapshot_value",) + response = await client.get_snapshot( + snapshot="snapshot_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2980,20 +3309,30 @@ async def test_get_snapshot_flattened_async(): @pytest.mark.asyncio async def test_get_snapshot_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.get_snapshot( - pubsub.GetSnapshotRequest(), snapshot="snapshot_value", + pubsub.GetSnapshotRequest(), + snapshot="snapshot_value", ) -@pytest.mark.parametrize("request_type", [pubsub.ListSnapshotsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSnapshotsRequest, + dict, + ], +) def test_list_snapshots(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3022,7 +3361,8 @@ def test_list_snapshots_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3038,7 +3378,8 @@ async def test_list_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSnapshotsRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3049,7 +3390,9 @@ async def test_list_snapshots_async( with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.ListSnapshotsResponse(next_page_token="next_page_token_value",) + pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) ) response = await client.list_snapshots(request) @@ -3069,7 +3412,9 @@ async def test_list_snapshots_async_from_dict(): def test_list_snapshots_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3089,12 +3434,17 @@ def test_list_snapshots_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "project=project/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_list_snapshots_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3116,11 +3466,16 @@ async def test_list_snapshots_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "project=project/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "project=project/value", + ) in kw["metadata"] def test_list_snapshots_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: @@ -3128,7 +3483,9 @@ def test_list_snapshots_flattened(): call.return_value = pubsub.ListSnapshotsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_snapshots(project="project_value",) + client.list_snapshots( + project="project_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3140,19 +3497,24 @@ def test_list_snapshots_flattened(): def test_list_snapshots_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_snapshots( - pubsub.ListSnapshotsRequest(), project="project_value", + pubsub.ListSnapshotsRequest(), + project="project_value", ) @pytest.mark.asyncio async def test_list_snapshots_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: @@ -3164,7 +3526,9 @@ async def test_list_snapshots_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_snapshots(project="project_value",) + response = await client.list_snapshots( + project="project_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3177,19 +3541,23 @@ async def test_list_snapshots_flattened_async(): @pytest.mark.asyncio async def test_list_snapshots_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.list_snapshots( - pubsub.ListSnapshotsRequest(), project="project_value", + pubsub.ListSnapshotsRequest(), + project="project_value", ) def test_list_snapshots_pager(transport_name: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3197,15 +3565,28 @@ def test_list_snapshots_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], next_page_token="abc", ), - pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + snapshots=[], + next_page_token="def", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", ), pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], ), RuntimeError, ) @@ -3225,7 +3606,8 @@ def test_list_snapshots_pager(transport_name: str = "grpc"): def test_list_snapshots_pages(transport_name: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3233,15 +3615,28 @@ def test_list_snapshots_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], next_page_token="abc", ), - pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + snapshots=[], + next_page_token="def", ), pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], ), RuntimeError, ) @@ -3252,7 +3647,9 @@ def test_list_snapshots_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_snapshots_async_pager(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3261,19 +3658,34 @@ async def test_list_snapshots_async_pager(): # Set the response to a series of pages. call.side_effect = ( pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], next_page_token="abc", ), - pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + snapshots=[], + next_page_token="def", ), pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], ), RuntimeError, ) - async_pager = await client.list_snapshots(request={},) + async_pager = await client.list_snapshots( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] async for response in async_pager: @@ -3285,7 +3697,9 @@ async def test_list_snapshots_async_pager(): @pytest.mark.asyncio async def test_list_snapshots_async_pages(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials,) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3294,15 +3708,28 @@ async def test_list_snapshots_async_pages(): # Set the response to a series of pages. call.side_effect = ( pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(), pubsub.Snapshot(), pubsub.Snapshot(),], + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], next_page_token="abc", ), - pubsub.ListSnapshotsResponse(snapshots=[], next_page_token="def",), pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(),], next_page_token="ghi", + snapshots=[], + next_page_token="def", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", ), pubsub.ListSnapshotsResponse( - snapshots=[pubsub.Snapshot(), pubsub.Snapshot(),], + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], ), RuntimeError, ) @@ -3313,10 +3740,17 @@ async def test_list_snapshots_async_pages(): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [pubsub.CreateSnapshotRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.CreateSnapshotRequest, + dict, + ], +) def test_create_snapshot(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3326,7 +3760,10 @@ def test_create_snapshot(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) + call.return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) response = client.create_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -3344,7 +3781,8 @@ def test_create_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3360,7 +3798,8 @@ async def test_create_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.CreateSnapshotRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3371,7 +3810,10 @@ async def test_create_snapshot_async( with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Snapshot(name="name_value", topic="topic_value",) + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) ) response = await client.create_snapshot(request) @@ -3392,7 +3834,9 @@ async def test_create_snapshot_async_from_dict(): def test_create_snapshot_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3412,12 +3856,17 @@ def test_create_snapshot_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_create_snapshot_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3437,11 +3886,16 @@ async def test_create_snapshot_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name/value", + ) in kw["metadata"] def test_create_snapshot_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: @@ -3450,7 +3904,8 @@ def test_create_snapshot_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_snapshot( - name="name_value", subscription="subscription_value", + name="name_value", + subscription="subscription_value", ) # Establish that the underlying call was made with the expected @@ -3466,7 +3921,9 @@ def test_create_snapshot_flattened(): def test_create_snapshot_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3480,7 +3937,9 @@ def test_create_snapshot_flattened_error(): @pytest.mark.asyncio async def test_create_snapshot_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: @@ -3491,7 +3950,8 @@ async def test_create_snapshot_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_snapshot( - name="name_value", subscription="subscription_value", + name="name_value", + subscription="subscription_value", ) # Establish that the underlying call was made with the expected @@ -3508,7 +3968,9 @@ async def test_create_snapshot_flattened_async(): @pytest.mark.asyncio async def test_create_snapshot_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3520,10 +3982,17 @@ async def test_create_snapshot_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [pubsub.UpdateSnapshotRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSnapshotRequest, + dict, + ], +) def test_update_snapshot(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3533,7 +4002,10 @@ def test_update_snapshot(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot(name="name_value", topic="topic_value",) + call.return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) response = client.update_snapshot(request) # Establish that the underlying gRPC stub method was called. @@ -3551,7 +4023,8 @@ def test_update_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3567,7 +4040,8 @@ async def test_update_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSnapshotRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3578,7 +4052,10 @@ async def test_update_snapshot_async( with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Snapshot(name="name_value", topic="topic_value",) + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) ) response = await client.update_snapshot(request) @@ -3599,7 +4076,9 @@ async def test_update_snapshot_async_from_dict(): def test_update_snapshot_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3619,14 +4098,17 @@ def test_update_snapshot_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "snapshot.name=snapshot.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "snapshot.name=snapshot.name/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_update_snapshot_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3646,15 +4128,23 @@ async def test_update_snapshot_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "snapshot.name=snapshot.name/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "snapshot.name=snapshot.name/value", + ) in kw["metadata"] -@pytest.mark.parametrize("request_type", [pubsub.DeleteSnapshotRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSnapshotRequest, + dict, + ], +) def test_delete_snapshot(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3680,7 +4170,8 @@ def test_delete_snapshot_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3696,7 +4187,8 @@ async def test_delete_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSnapshotRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3724,7 +4216,9 @@ async def test_delete_snapshot_async_from_dict(): def test_delete_snapshot_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3744,12 +4238,17 @@ def test_delete_snapshot_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "snapshot=snapshot/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_delete_snapshot_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3769,11 +4268,16 @@ async def test_delete_snapshot_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "snapshot=snapshot/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "snapshot=snapshot/value", + ) in kw["metadata"] def test_delete_snapshot_flattened(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: @@ -3781,7 +4285,9 @@ def test_delete_snapshot_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_snapshot(snapshot="snapshot_value",) + client.delete_snapshot( + snapshot="snapshot_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3793,19 +4299,24 @@ def test_delete_snapshot_flattened(): def test_delete_snapshot_flattened_error(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_snapshot( - pubsub.DeleteSnapshotRequest(), snapshot="snapshot_value", + pubsub.DeleteSnapshotRequest(), + snapshot="snapshot_value", ) @pytest.mark.asyncio async def test_delete_snapshot_flattened_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: @@ -3815,7 +4326,9 @@ async def test_delete_snapshot_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_snapshot(snapshot="snapshot_value",) + response = await client.delete_snapshot( + snapshot="snapshot_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -3828,20 +4341,30 @@ async def test_delete_snapshot_flattened_async(): @pytest.mark.asyncio async def test_delete_snapshot_flattened_error_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): await client.delete_snapshot( - pubsub.DeleteSnapshotRequest(), snapshot="snapshot_value", + pubsub.DeleteSnapshotRequest(), + snapshot="snapshot_value", ) -@pytest.mark.parametrize("request_type", [pubsub.SeekRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + pubsub.SeekRequest, + dict, + ], +) def test_seek(request_type, transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3867,7 +4390,8 @@ def test_seek_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3883,7 +4407,8 @@ async def test_seek_async( transport: str = "grpc_asyncio", request_type=pubsub.SeekRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -3911,7 +4436,9 @@ async def test_seek_async_from_dict(): def test_seek_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3931,14 +4458,17 @@ def test_seek_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_seek_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -3958,9 +4488,10 @@ async def test_seek_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "subscription=subscription/value",) in kw[ - "metadata" - ] + assert ( + "x-goog-request-params", + "subscription=subscription/value", + ) in kw["metadata"] def test_credentials_transport_error(): @@ -3970,7 +4501,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -3990,7 +4522,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = SubscriberClient(client_options=options, transport=transport,) + client = SubscriberClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -4006,7 +4541,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = SubscriberClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -4036,7 +4572,10 @@ def test_transport_get_channel(): @pytest.mark.parametrize( "transport_class", - [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport,], + [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, + ], ) def test_transport_adc(transport_class): # Test default credentials are used if not provided. @@ -4048,8 +4587,13 @@ def test_transport_adc(transport_class): def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.SubscriberGrpcTransport,) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SubscriberGrpcTransport, + ) def test_subscriber_base_transport_error(): @@ -4112,7 +4656,8 @@ def test_subscriber_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.SubscriberTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -4153,7 +4698,10 @@ def test_subscriber_auth_adc(): @pytest.mark.parametrize( "transport_class", - [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport,], + [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, + ], ) def test_subscriber_transport_auth_adc(transport_class): # If credentials and host are not provided, the transport class should use @@ -4280,7 +4828,8 @@ def test_subscriber_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.SubscriberGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -4292,7 +4841,8 @@ def test_subscriber_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.SubscriberGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -4399,7 +4949,8 @@ def test_snapshot_path(): project = "squid" snapshot = "clam" expected = "projects/{project}/snapshots/{snapshot}".format( - project=project, snapshot=snapshot, + project=project, + snapshot=snapshot, ) actual = SubscriberClient.snapshot_path(project, snapshot) assert expected == actual @@ -4421,7 +4972,8 @@ def test_subscription_path(): project = "oyster" subscription = "nudibranch" expected = "projects/{project}/subscriptions/{subscription}".format( - project=project, subscription=subscription, + project=project, + subscription=subscription, ) actual = SubscriberClient.subscription_path(project, subscription) assert expected == actual @@ -4442,7 +4994,10 @@ def test_parse_subscription_path(): def test_topic_path(): project = "winkle" topic = "nautilus" - expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic,) + expected = "projects/{project}/topics/{topic}".format( + project=project, + topic=topic, + ) actual = SubscriberClient.topic_path(project, topic) assert expected == actual @@ -4481,7 +5036,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = SubscriberClient.common_folder_path(folder) assert expected == actual @@ -4499,7 +5056,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = SubscriberClient.common_organization_path(organization) assert expected == actual @@ -4517,7 +5076,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = SubscriberClient.common_project_path(project) assert expected == actual @@ -4537,7 +5098,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = SubscriberClient.common_location_path(project, location) assert expected == actual @@ -4562,7 +5124,8 @@ def test_client_with_default_client_info(): transports.SubscriberTransport, "_prep_wrapped_messages" ) as prep: client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -4571,14 +5134,16 @@ def test_client_with_default_client_info(): ) as prep: transport_class = SubscriberClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) def test_set_iam_policy(transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4588,7 +5153,10 @@ def test_set_iam_policy(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) response = client.set_iam_policy(request) @@ -4609,7 +5177,8 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4620,7 +5189,10 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) response = await client.set_iam_policy(request) @@ -4640,7 +5212,9 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): def test_set_iam_policy_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4660,12 +5234,17 @@ def test_set_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4685,11 +5264,16 @@ async def test_set_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_set_iam_policy_from_dict(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -4706,7 +5290,9 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -4723,7 +5309,8 @@ async def test_set_iam_policy_from_dict_async(): def test_get_iam_policy(transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4733,7 +5320,10 @@ def test_get_iam_policy(transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + call.return_value = policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) response = client.get_iam_policy(request) @@ -4754,7 +5344,8 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4765,7 +5356,10 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) + policy_pb2.Policy( + version=774, + etag=b"etag_blob", + ) ) response = await client.get_iam_policy(request) @@ -4785,7 +5379,9 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): def test_get_iam_policy_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4805,12 +5401,17 @@ def test_get_iam_policy_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4830,11 +5431,16 @@ async def test_get_iam_policy_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_get_iam_policy_from_dict(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -4851,7 +5457,9 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. @@ -4868,7 +5476,8 @@ async def test_get_iam_policy_from_dict_async(): def test_test_iam_permissions(transport: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4901,7 +5510,8 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -4934,7 +5544,9 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): def test_test_iam_permissions_field_headers(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4956,12 +5568,17 @@ def test_test_iam_permissions_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. @@ -4985,11 +5602,16 @@ async def test_test_iam_permissions_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "resource=resource/value", + ) in kw["metadata"] def test_test_iam_permissions_from_dict(): - client = SubscriberClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" @@ -5008,7 +5630,9 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): - client = SubscriberAsyncClient(credentials=ga_credentials.AnonymousCredentials(),) + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.test_iam_permissions), "__call__" @@ -5030,7 +5654,8 @@ async def test_test_iam_permissions_from_dict_async(): @pytest.mark.asyncio async def test_transport_close_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close" diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py index 09795d37b236..7384af2a2191 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -25,7 +25,7 @@ def create_message(): - return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) def create_client(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index 486cba5f77da..ee0cfab83e4a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -24,7 +24,7 @@ def create_message(): - return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": u"baz"}) + return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) def create_client(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 20d5b328c4a3..372f53015ad2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -308,7 +308,7 @@ def test_publish_with_ordering_key_uses_extended_retry_deadline(creds): _, kwargs = batch_class.call_args batch_commit_retry = kwargs["commit_retry"] - expected_retry = custom_retry.with_deadline(2.0 ** 32) + expected_retry = custom_retry.with_deadline(2.0**32) _assert_retries_equal(batch_commit_retry, expected_retry) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index bbc6170e2496..c1de19e65cdf 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -245,7 +245,13 @@ def test_retry_modacks_in_new_thread(): dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) f = futures.Future() - items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=20, future=f,)] + items = [ + requests.ModAckRequest( + ack_id="ack_id_string", + seconds=20, + future=f, + ) + ] # failure triggers creation of new retry thread manager.send_unary_modack.side_effect = [([], items)] with mock.patch("time.sleep", return_value=None): @@ -266,7 +272,13 @@ def test_retry_modacks(): dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) f = futures.Future() - items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=20, future=f,)] + items = [ + requests.ModAckRequest( + ack_id="ack_id_string", + seconds=20, + future=f, + ) + ] # first and second calls fail, third one succeeds manager.send_unary_modack.side_effect = [([], items), ([], items), (items, [])] with mock.patch("time.sleep", return_value=None): From 96cdb52d2fb48f2bfda0c7b964d4f9fdbe50f861 Mon Sep 17 00:00:00 2001 From: Tianzi Cai Date: Tue, 29 Mar 2022 12:09:17 -0700 Subject: [PATCH 0792/1197] samples(test): use regional publisher and remove backoff on exactly once delivery test (#628) --- .../samples/snippets/subscriber_test.py | 36 +++++++++---------- 1 file changed, 16 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index fd5d8d768508..159aa3b541a3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -57,6 +57,13 @@ def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: yield pubsub_v1.PublisherClient() +@pytest.fixture(scope="module") +def regional_publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: + client_options = {"api_endpoint": "us-east1-pubsub.googleapis.com:443"} + publisher = pubsub_v1.PublisherClient(client_options=client_options) + yield publisher + + @pytest.fixture(scope="module") def topic(publisher_client: pubsub_v1.PublisherClient) -> Generator[str, None, None]: topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC) @@ -697,33 +704,22 @@ def eventually_consistent_test() -> None: def test_receive_messages_with_exactly_once_delivery_enabled( - publisher_client: pubsub_v1.PublisherClient, + regional_publisher_client: pubsub_v1.PublisherClient, exactly_once_delivery_topic: str, subscription_eod: str, capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), - ) + message_ids = _publish_messages(regional_publisher_client, exactly_once_delivery_topic) - @typed_backoff - def eventually_consistent_test() -> None: - message_ids = _publish_messages(publisher_client, exactly_once_delivery_topic) - - subscriber.receive_messages_with_exactly_once_delivery_enabled( - PROJECT_ID, SUBSCRIPTION_EOD, 10 - ) - - out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_eod in out - assert "Received" in out - assert "Ack" in out - for message_id in message_ids: - assert message_id in out + subscriber.receive_messages_with_exactly_once_delivery_enabled( + PROJECT_ID, SUBSCRIPTION_EOD, 10 + ) - eventually_consistent_test() + out, _ = capsys.readouterr() + assert subscription_eod in out + for message_id in message_ids: + assert message_id in out def test_listen_for_errors( From a04f8e00e6730c6157d248d93f746d3ee16f8baf Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Tue, 29 Mar 2022 15:57:32 -0400 Subject: [PATCH 0793/1197] fix: process ErrorInfo / GRPC errors for ack/modack only when exactly-once delivery is enabled (#626) * Process EOS/GRPC errors for ack/modack only when EOS is enabled; don't retry temporary errors for these RPCS when EOS is disabled. * Add more tests for coverage * Reformat tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py * Reformat with new version of black Co-authored-by: Anthonios Partheniou Co-authored-by: Tianzi Cai --- .../_protocol/streaming_pull_manager.py | 86 ++++- .../subscriber/test_streaming_pull_manager.py | 341 +++++++++++++++++- 2 files changed, 404 insertions(+), 23 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 9fb489967cfe..ae3635892f7b 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -163,7 +163,8 @@ def _process_requests( ack_reqs_dict: Dict[str, requests.AckRequest], errors_dict: Optional[Dict[str, str]], ): - """Process requests by referring to error_status and errors_dict. + """Process requests when exactly-once delivery is enabled by referring to + error_status and errors_dict. The errors returned by the server in as `error_status` or in `errors_dict` are used to complete the request futures in `ack_reqs_dict` (with a success @@ -599,14 +600,23 @@ def send_unary_ack( error_status = _get_status(exc) ack_errors_dict = _get_ack_errors(exc) except exceptions.RetryError as exc: - status = status_pb2.Status() - # Choose a non-retriable error code so the futures fail with - # exceptions. - status.code = code_pb2.UNKNOWN + exactly_once_delivery_enabled = self._exactly_once_delivery_enabled() # Makes sure to complete futures so they don't block forever. - _process_requests(status, ack_reqs_dict, None) + for req in ack_reqs_dict.values(): + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + if req.future: + if exactly_once_delivery_enabled: + e = AcknowledgeError( + AcknowledgeStatus.OTHER, "RetryError while sending ack RPC." + ) + req.future.set_exception(e) + else: + req.future.set_result(AcknowledgeStatus.SUCCESS) + _LOGGER.debug( - "RetryError while sending unary RPC. Waiting on a transient " + "RetryError while sending ack RPC. Waiting on a transient " "error resolution for too long, will now trigger shutdown.", exc_info=False, ) @@ -615,9 +625,23 @@ def send_unary_ack( self._on_rpc_done(exc) raise - requests_completed, requests_to_retry = _process_requests( - error_status, ack_reqs_dict, ack_errors_dict - ) + if self._exactly_once_delivery_enabled(): + requests_completed, requests_to_retry = _process_requests( + error_status, ack_reqs_dict, ack_errors_dict + ) + else: + requests_completed = [] + requests_to_retry = [] + # When exactly-once delivery is NOT enabled, acks/modacks are considered + # best-effort. So, they always succeed even if the RPC fails. + for req in ack_reqs_dict.values(): + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + if req.future: + req.future.set_result(AcknowledgeStatus.SUCCESS) + requests_completed.append(req) + return requests_completed, requests_to_retry def send_unary_modack( @@ -655,14 +679,24 @@ def send_unary_modack( error_status = _get_status(exc) modack_errors_dict = _get_ack_errors(exc) except exceptions.RetryError as exc: - status = status_pb2.Status() - # Choose a non-retriable error code so the futures fail with - # exceptions. - status.code = code_pb2.UNKNOWN + exactly_once_delivery_enabled = self._exactly_once_delivery_enabled() # Makes sure to complete futures so they don't block forever. - _process_requests(status, ack_reqs_dict, None) + for req in ack_reqs_dict.values(): + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + if req.future: + if exactly_once_delivery_enabled: + e = AcknowledgeError( + AcknowledgeStatus.OTHER, + "RetryError while sending modack RPC.", + ) + req.future.set_exception(e) + else: + req.future.set_result(AcknowledgeStatus.SUCCESS) + _LOGGER.debug( - "RetryError while sending unary RPC. Waiting on a transient " + "RetryError while sending modack RPC. Waiting on a transient " "error resolution for too long, will now trigger shutdown.", exc_info=False, ) @@ -671,9 +705,23 @@ def send_unary_modack( self._on_rpc_done(exc) raise - requests_completed, requests_to_retry = _process_requests( - error_status, ack_reqs_dict, modack_errors_dict - ) + if self._exactly_once_delivery_enabled(): + requests_completed, requests_to_retry = _process_requests( + error_status, ack_reqs_dict, modack_errors_dict + ) + else: + requests_completed = [] + requests_to_retry = [] + # When exactly-once delivery is NOT enabled, acks/modacks are considered + # best-effort. So, they always succeed even if the RPC fails. + for req in ack_reqs_dict.values(): + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + if req.future: + req.future.set_result(AcknowledgeStatus.SUCCESS) + requests_completed.append(req) + return requests_completed, requests_to_retry def heartbeat(self) -> bool: diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index e9554dedad8f..8a14609519ff 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -521,6 +521,67 @@ def test_send_unary_ack(): ) +def test_send_unary_ack_exactly_once_enabled_with_futures(): + manager = make_manager() + manager._exactly_once_enabled = True + + future1 = futures.Future() + future2 = futures.Future() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + } + manager.send_unary_ack(ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict) + + manager._client.acknowledge.assert_called_once_with( + subscription=manager._subscription, ack_ids=["ack_id1", "ack_id2"] + ) + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +def test_send_unary_ack_exactly_once_disabled_with_futures(): + manager = make_manager() + + future1 = futures.Future() + future2 = futures.Future() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + } + manager.send_unary_ack(ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict) + + manager._client.acknowledge.assert_called_once_with( + subscription=manager._subscription, ack_ids=["ack_id1", "ack_id2"] + ) + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + def test_send_unary_modack(): manager = make_manager() @@ -552,6 +613,81 @@ def test_send_unary_modack(): ) +def test_send_unary_modack_exactly_once_enabled_with_futures(): + manager = make_manager() + manager._exactly_once_enabled = True + + future1 = futures.Future() + future2 = futures.Future() + future3 = futures.Future() + ack_reqs_dict = { + "ack_id3": requests.ModAckRequest(ack_id="ack_id3", seconds=60, future=future1), + "ack_id4": requests.ModAckRequest(ack_id="ack_id4", seconds=60, future=future2), + "ack_id5": requests.ModAckRequest(ack_id="ack_id5", seconds=60, future=future3), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], + modify_deadline_seconds=[10, 20, 20], + ack_reqs_dict=ack_reqs_dict, + ) + + manager._client.modify_ack_deadline.assert_has_calls( + [ + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id3"], + ack_deadline_seconds=10, + ), + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id4", "ack_id5"], + ack_deadline_seconds=20, + ), + ], + any_order=True, + ) + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +def test_send_unary_modack_exactly_once_disabled_with_futures(): + manager = make_manager() + + future1 = futures.Future() + future2 = futures.Future() + future3 = futures.Future() + ack_reqs_dict = { + "ack_id3": requests.ModAckRequest(ack_id="ack_id3", seconds=60, future=future1), + "ack_id4": requests.ModAckRequest(ack_id="ack_id4", seconds=60, future=future2), + "ack_id5": requests.ModAckRequest(ack_id="ack_id5", seconds=60, future=future3), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], + modify_deadline_seconds=[10, 20, 20], + ack_reqs_dict=ack_reqs_dict, + ) + + manager._client.modify_ack_deadline.assert_has_calls( + [ + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id3"], + ack_deadline_seconds=10, + ), + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id4", "ack_id5"], + ack_deadline_seconds=20, + ), + ], + any_order=True, + ) + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + def test_send_unary_ack_api_call_error(caplog): caplog.set_level(logging.DEBUG) @@ -606,10 +742,123 @@ def test_send_unary_modack_api_call_error(caplog): assert "The front fell off" in caplog.text -def test_send_unary_ack_retry_error(caplog): +def test_send_unary_ack_retry_error_exactly_once_disabled_no_futures(caplog): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = False + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.acknowledge.side_effect = error + + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + ), + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_ack( + ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict + ) + + assert "RetryError while sending ack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + + +def test_send_unary_ack_retry_error_exactly_once_disabled_with_futures(caplog): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = False + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.acknowledge.side_effect = error + + future1 = futures.Future() + future2 = futures.Future() + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future1, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=future2, + ), + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_ack( + ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict + ) + + assert "RetryError while sending ack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + assert future1.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +def test_send_unary_ack_retry_error_exactly_once_enabled_no_futures(caplog): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = True + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.acknowledge.side_effect = error + + ack_reqs_dict = { + "ack_id1": requests.AckRequest( + ack_id="ack_id1", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + ), + "ack_id2": requests.AckRequest( + ack_id="ack_id2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + ), + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_ack( + ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict + ) + + assert "RetryError while sending ack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + + +def test_send_unary_ack_retry_error_exactly_once_enabled_with_futures(caplog): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = True error = exceptions.RetryError( "Too long a transient error", cause=Exception("Out of time!") @@ -639,7 +888,7 @@ def test_send_unary_ack_retry_error(caplog): ack_ids=["ack_id1", "ack_id2"], ack_reqs_dict=ack_reqs_dict ) - assert "RetryError while sending unary RPC" in caplog.text + assert "RetryError while sending ack RPC" in caplog.text assert "signaled streaming pull manager shutdown" in caplog.text assert isinstance(future1.exception(), subscriber_exceptions.AcknowledgeError) assert ( @@ -651,10 +900,94 @@ def test_send_unary_ack_retry_error(caplog): ) -def test_send_unary_modack_retry_error(caplog): +def test_send_unary_modack_retry_error_exactly_once_disabled_no_future(caplog): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = False + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.modify_ack_deadline.side_effect = error + + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=None) + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_modack( + modify_deadline_ack_ids=["ackid1"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) + + assert "RetryError while sending modack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + + +def test_send_unary_modack_retry_error_exactly_once_disabled_with_futures( + caplog, +): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = False + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.modify_ack_deadline.side_effect = error + + future = futures.Future() + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=future) + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_modack( + modify_deadline_ack_ids=["ackid1"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) + + assert "RetryError while sending modack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + assert future.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +def test_send_unary_modack_retry_error_exactly_once_enabled_no_futures( + caplog, +): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = True + + error = exceptions.RetryError( + "Too long a transient error", cause=Exception("Out of time!") + ) + manager._client.modify_ack_deadline.side_effect = error + + ack_reqs_dict = { + "ackid1": requests.ModAckRequest(ack_id="ackid1", seconds=60, future=None) + } + with pytest.raises(exceptions.RetryError): + manager.send_unary_modack( + modify_deadline_ack_ids=["ackid1"], + modify_deadline_seconds=[0], + ack_reqs_dict=ack_reqs_dict, + ) + + assert "RetryError while sending modack RPC" in caplog.text + assert "signaled streaming pull manager shutdown" in caplog.text + + +def test_send_unary_modack_retry_error_exactly_once_enabled_with_futures( + caplog, +): + caplog.set_level(logging.DEBUG) + + manager, _, _, _, _, _ = make_running_manager() + manager._exactly_once_enabled = True error = exceptions.RetryError( "Too long a transient error", cause=Exception("Out of time!") @@ -672,7 +1005,7 @@ def test_send_unary_modack_retry_error(caplog): ack_reqs_dict=ack_reqs_dict, ) - assert "RetryError while sending unary RPC" in caplog.text + assert "RetryError while sending modack RPC" in caplog.text assert "signaled streaming pull manager shutdown" in caplog.text assert isinstance(future.exception(), subscriber_exceptions.AcknowledgeError) assert ( From b3007e22768a8e5cb74293c633ee139d51a96be4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 30 Mar 2022 16:48:16 +0000 Subject: [PATCH 0794/1197] chore(python): add E231 to .flake8 ignore list (#629) Source-Link: https://github.com/googleapis/synthtool/commit/7ff4aad2ec5af0380e8bd6da1fa06eaadf24ec81 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 --- packages/google-cloud-pubsub/.flake8 | 2 +- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index 29227d4cf419..2e438749863d 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 87dd00611576..9e0a9356b6eb 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7cffbc10910c3ab1b852c05114a08d374c195a81cdec1d4a67a1d129331d0bfe + digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 From 09de20db398aa5e0d1cbc93fd753cd9babcc5abc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 01:48:13 +0000 Subject: [PATCH 0795/1197] chore(python): update .pre-commit-config.yaml to use black==22.3.0 (#631) Source-Link: https://github.com/googleapis/synthtool/commit/7804ade3daae0d66649bee8df6c55484c6580b8d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 3 ++- packages/google-cloud-pubsub/.pre-commit-config.yaml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 9e0a9356b6eb..22cc254afa2c 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:462782b0b492346b2d9099aaff52206dd30bc8e031ea97082e6facecc2373244 + digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d +# created: 2022-03-30T23:44:26.560599165Z diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index 62eb5a77d9a3..46d237160f6d 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 From 732fdbe9bcd4a795f60bb9d971871fa3be3f2d1b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 03:04:12 +0000 Subject: [PATCH 0796/1197] chore(python): Enable size-label bot (#632) Source-Link: https://github.com/googleapis/synthtool/commit/06e82790dd719a165ad32b8a06f8f6ec3e3cae0f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.github/auto-label.yaml | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-pubsub/.github/auto-label.yaml diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 22cc254afa2c..58a0b153bf0e 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:eede5672562a32821444a8e803fb984a6f61f2237ea3de229d2de24453f4ae7d -# created: 2022-03-30T23:44:26.560599165Z + digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce +# created: 2022-04-01T01:42:03.609279246Z diff --git a/packages/google-cloud-pubsub/.github/auto-label.yaml b/packages/google-cloud-pubsub/.github/auto-label.yaml new file mode 100644 index 000000000000..09c8d735b456 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/auto-label.yaml @@ -0,0 +1,2 @@ +requestsize: + enabled: true From d22c104659fb7e9967da0bb683cd497e63e08853 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 1 Apr 2022 19:40:13 +0000 Subject: [PATCH 0797/1197] chore(python): refactor unit / system test dependency install (#634) Source-Link: https://github.com/googleapis/synthtool/commit/993985f0fc4b37152e588f0549bcbdaf34666023 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-pubsub/noxfile.py | 109 ++++++++++++++---- 2 files changed, 89 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 58a0b153bf0e..fa5762290c5b 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce -# created: 2022-04-01T01:42:03.609279246Z + digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd +# created: 2022-04-01T15:48:07.524222836Z diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index a97d5ffb4f5f..59fd2a1b8719 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -20,18 +20,44 @@ import os import pathlib import shutil +import warnings import nox - BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] MYPY_VERSION = "mypy==0.910" DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ + "psutil", +] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -131,23 +157,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -171,6 +215,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -193,15 +266,7 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", "psutil", "-c", constraints_path - ) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: From 51baa31bd68db4b10a1c8fff620bb29c4adf2228 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Tue, 5 Apr 2022 15:03:36 -0400 Subject: [PATCH 0798/1197] Revert "chore(python): refactor unit / system test dependency install (#634)" (#637) This reverts commit 6cf785c03c7bbf62dbb8f98e5a7cbb4adf98f1bf. --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-pubsub/noxfile.py | 109 ++++-------------- 2 files changed, 24 insertions(+), 89 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index fa5762290c5b..58a0b153bf0e 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1894490910e891a385484514b22eb5133578897eb5b3c380e6d8ad475c6647cd -# created: 2022-04-01T15:48:07.524222836Z + digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce +# created: 2022-04-01T01:42:03.609279246Z diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 59fd2a1b8719..a97d5ffb4f5f 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -20,44 +20,18 @@ import os import pathlib import shutil -import warnings import nox + BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] MYPY_VERSION = "mypy==0.910" DEFAULT_PYTHON_VERSION = "3.8" - -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] -UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", -] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} - SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", - "pytest", - "google-cloud-testutils", -] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ - "psutil", -] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -157,41 +131,23 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") -def install_unittest_dependencies(session, *constraints): - standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES - session.install(*standard_deps, *constraints) - - if UNIT_TEST_EXTERNAL_DEPENDENCIES: - warnings.warn( - "'unit_test_external_dependencies' is deprecated. Instead, please " - "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", - DeprecationWarning, - ) - session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_LOCAL_DEPENDENCIES: - session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) - - if UNIT_TEST_EXTRAS_BY_PYTHON: - extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif UNIT_TEST_EXTRAS: - extras = UNIT_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - install_unittest_dependencies(session, "-c", constraints_path) + session.install( + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", + "-c", + constraints_path, + ) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -215,35 +171,6 @@ def unit(session): default(session) -def install_systemtest_dependencies(session, *constraints): - - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: - session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_LOCAL_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_DEPENDENCIES: - session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) - - if SYSTEM_TEST_EXTRAS_BY_PYTHON: - extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) - elif SYSTEM_TEST_EXTRAS: - extras = SYSTEM_TEST_EXTRAS - else: - extras = [] - - if extras: - session.install("-e", f".[{','.join(extras)}]", *constraints) - else: - session.install("-e", ".", *constraints) - - @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -266,7 +193,15 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - install_systemtest_dependencies(session, "-c", constraints_path) + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + # Install all test dependencies, then install this package into the + # virtualenv's dist-packages. + session.install( + "mock", "pytest", "google-cloud-testutils", "psutil", "-c", constraints_path + ) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: From 96819e8ae3ef247172d3afbec71969f28184fc60 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Wed, 6 Apr 2022 09:15:10 -0600 Subject: [PATCH 0799/1197] chore: allow releases from previous major versions (#639) --- packages/google-cloud-pubsub/.github/release-please.yml | 9 +++++++++ packages/google-cloud-pubsub/owlbot.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.github/release-please.yml b/packages/google-cloud-pubsub/.github/release-please.yml index 466597e5b196..29601ad4692c 100644 --- a/packages/google-cloud-pubsub/.github/release-please.yml +++ b/packages/google-cloud-pubsub/.github/release-please.yml @@ -1,2 +1,11 @@ releaseType: python handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v1 + handleGHRelease: true + releaseType: python +- branch: v0 + handleGHRelease: true + releaseType: python diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 908d3c4faa0e..af5b972d25a6 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -367,7 +367,7 @@ system_test_external_dependencies=["psutil"], ) s.move(templated_files, excludes=[".coveragerc", ".github/CODEOWNERS"]) - +python.configure_previous_major_version_branches() # ---------------------------------------------------------------------------- # Samples templates # ---------------------------------------------------------------------------- From 4590103f8bccd998e7c01b23af02c1f6f955fb86 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 13:40:48 -0400 Subject: [PATCH 0800/1197] chore(python): add license header to auto-label.yaml (#640) chore: refactor noxfile.py Source-Link: https://github.com/googleapis/synthtool/commit/eb78c980b52c7c6746d2edb77d9cf7aaa99a2aab Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/auto-label.yaml | 13 +++ packages/google-cloud-pubsub/noxfile.py | 109 ++++++++++++++---- 3 files changed, 102 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 58a0b153bf0e..bc893c979e20 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b3500c053313dc34e07b1632ba9e4e589f4f77036a7cf39e1fe8906811ae0fce -# created: 2022-04-01T01:42:03.609279246Z + digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 +# created: 2022-04-06T10:30:21.687684602Z diff --git a/packages/google-cloud-pubsub/.github/auto-label.yaml b/packages/google-cloud-pubsub/.github/auto-label.yaml index 09c8d735b456..41bff0b5375a 100644 --- a/packages/google-cloud-pubsub/.github/auto-label.yaml +++ b/packages/google-cloud-pubsub/.github/auto-label.yaml @@ -1,2 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. requestsize: enabled: true diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index a97d5ffb4f5f..59fd2a1b8719 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -20,18 +20,44 @@ import os import pathlib import shutil +import warnings import nox - BLACK_VERSION = "black==22.3.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] MYPY_VERSION = "mypy==0.910" DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ + "psutil", +] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -131,23 +157,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -171,6 +215,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -193,15 +266,7 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", "psutil", "-c", constraints_path - ) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: From 1d10bf2bd5d40337e1e1b740dc4449ca65d3c57d Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Wed, 6 Apr 2022 15:57:01 -0400 Subject: [PATCH 0801/1197] CI: add flaky to exactly-once sample test and add a non-regional test (#638) * add flaky annotation to regional_endpoint test * add new global exactly-once test --- .../samples/snippets/subscriber_test.py | 25 +++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 159aa3b541a3..708a6d1ed016 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -43,6 +43,7 @@ SUBSCRIPTION_EOD = f"subscription-test-subscription-eod-{PY_VERSION}-{UUID}" ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push" NEW_ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push2" +REGIONAL_ENDPOINT = "us-east1-pubsub.googleapis.com:443" DEFAULT_MAX_DELIVERY_ATTEMPTS = 5 UPDATED_MAX_DELIVERY_ATTEMPTS = 20 FILTER = 'attributes.author="unknown"' @@ -59,7 +60,7 @@ def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: @pytest.fixture(scope="module") def regional_publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: - client_options = {"api_endpoint": "us-east1-pubsub.googleapis.com:443"} + client_options = {"api_endpoint": REGIONAL_ENDPOINT} publisher = pubsub_v1.PublisherClient(client_options=client_options) yield publisher @@ -703,7 +704,8 @@ def eventually_consistent_test() -> None: eventually_consistent_test() -def test_receive_messages_with_exactly_once_delivery_enabled( +@typed_flaky +def test_receive_messages_with_exactly_once_delivery_enabled_regional_endpoint( regional_publisher_client: pubsub_v1.PublisherClient, exactly_once_delivery_topic: str, subscription_eod: str, @@ -722,6 +724,25 @@ def test_receive_messages_with_exactly_once_delivery_enabled( assert message_id in out +def test_receive_messages_with_exactly_once_delivery_enabled( + publisher_client: pubsub_v1.PublisherClient, + exactly_once_delivery_topic: str, + subscription_eod: str, + capsys: CaptureFixture[str], +) -> None: + + message_ids = _publish_messages(publisher_client, exactly_once_delivery_topic) + + subscriber.receive_messages_with_exactly_once_delivery_enabled( + PROJECT_ID, SUBSCRIPTION_EOD, 10 + ) + + out, _ = capsys.readouterr() + assert subscription_eod in out + for message_id in message_ids: + assert message_id in out + + def test_listen_for_errors( publisher_client: pubsub_v1.PublisherClient, topic: str, From 91a2ca9f3a84eb96ff2a649bbb8d73215b8e3672 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 6 Apr 2022 17:47:24 -0400 Subject: [PATCH 0802/1197] chore(main): release 2.12.0 (#642) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 13 +++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index bac55a5dc006..41f185ed6422 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,19 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.12.0](https://github.com/googleapis/python-pubsub/compare/v2.11.0...v2.12.0) (2022-04-06) + + +### Features + +* increase GRPC max metadata size to 4 MB ([#623](https://github.com/googleapis/python-pubsub/issues/623)) ([54b9e07](https://github.com/googleapis/python-pubsub/commit/54b9e07401b7309f16ecfe2a7afc36ea69f24a9c)) + + +### Bug Fixes + +* mypy errors ([#622](https://github.com/googleapis/python-pubsub/issues/622)) ([dab13d5](https://github.com/googleapis/python-pubsub/commit/dab13d5fb1d723c971cd84ae20f18462e624a26d)) +* process ErrorInfo / GRPC errors for ack/modack only when exactly-once delivery is enabled ([#626](https://github.com/googleapis/python-pubsub/issues/626)) ([cc1953b](https://github.com/googleapis/python-pubsub/commit/cc1953bcf942fb394a92ba50ba615adf822bfe7d)) + ## [2.11.0](https://github.com/googleapis/python-pubsub/compare/v2.10.0...v2.11.0) (2022-03-09) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 8624885b53dc..2e978e11a18b 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.11.0" +version = "2.12.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From f205aad78e91aeeca3cf57b53d1057194ae32ce0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 7 Apr 2022 17:19:32 +0200 Subject: [PATCH 0803/1197] chore(deps): update dependency google-cloud-pubsub to v2.12.0 (#645) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index ac58c1298be7..38d750077474 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.11.0 +google-cloud-pubsub==2.12.0 avro==1.11.0 From 12b750703dccede6f339c8b9b2df62bda784555e Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Fri, 8 Apr 2022 13:10:45 -0400 Subject: [PATCH 0804/1197] fix: fix eod snippets test (#643) adding typed back_off --- .../samples/snippets/subscriber_test.py | 44 +++++++------------ 1 file changed, 17 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 708a6d1ed016..5a5062564b1f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -246,7 +246,7 @@ def subscription_eod( request={ "name": subscription_path, "topic": exactly_once_delivery_topic, - "enable_exactly_once_delivery": True + "enable_exactly_once_delivery": True, } ) @@ -704,43 +704,33 @@ def eventually_consistent_test() -> None: eventually_consistent_test() -@typed_flaky -def test_receive_messages_with_exactly_once_delivery_enabled_regional_endpoint( +def test_receive_messages_with_exactly_once_delivery_enabled( regional_publisher_client: pubsub_v1.PublisherClient, exactly_once_delivery_topic: str, subscription_eod: str, capsys: CaptureFixture[str], ) -> None: - message_ids = _publish_messages(regional_publisher_client, exactly_once_delivery_topic) - - subscriber.receive_messages_with_exactly_once_delivery_enabled( - PROJECT_ID, SUBSCRIPTION_EOD, 10 + typed_backoff = cast( + Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), ) - out, _ = capsys.readouterr() - assert subscription_eod in out - for message_id in message_ids: - assert message_id in out - - -def test_receive_messages_with_exactly_once_delivery_enabled( - publisher_client: pubsub_v1.PublisherClient, - exactly_once_delivery_topic: str, - subscription_eod: str, - capsys: CaptureFixture[str], -) -> None: + @typed_backoff + def eventually_consistent_test() -> None: + message_ids = _publish_messages( + regional_publisher_client, exactly_once_delivery_topic + ) - message_ids = _publish_messages(publisher_client, exactly_once_delivery_topic) + subscriber.receive_messages_with_exactly_once_delivery_enabled( + PROJECT_ID, SUBSCRIPTION_EOD, 10 + ) - subscriber.receive_messages_with_exactly_once_delivery_enabled( - PROJECT_ID, SUBSCRIPTION_EOD, 10 - ) + out, _ = capsys.readouterr() + assert subscription_eod in out + for message_id in message_ids: + assert message_id in out - out, _ = capsys.readouterr() - assert subscription_eod in out - for message_id in message_ids: - assert message_id in out + eventually_consistent_test() def test_listen_for_errors( From 4c1be773f8d97751c1a206a35593d8bbc664ea0a Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Mon, 11 Apr 2022 13:33:32 -0400 Subject: [PATCH 0805/1197] Remove mention of obsolete --use-keywords in version 2.0 migration doc (#635) Fixes https://github.com/googleapis/python-pubsub/issues/630 Co-authored-by: meredithslota Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Co-authored-by: Tianzi Cai --- packages/google-cloud-pubsub/UPGRADING.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/google-cloud-pubsub/UPGRADING.md b/packages/google-cloud-pubsub/UPGRADING.md index 777dd7ae6963..d2d48d668f05 100644 --- a/packages/google-cloud-pubsub/UPGRADING.md +++ b/packages/google-cloud-pubsub/UPGRADING.md @@ -32,9 +32,6 @@ python3 -m pip install google-cloud-pubsub[libcst] * The script `fixup_pubsub_v1_keywords.py` is shipped with the library. It expects an input directory (with the code to convert) and an empty destination directory. -Optionally, the `--use-keywords` switch can be added to generate flattened keyword -parameters instead of a request dictionary (see the following section for an -explanation). ```sh $ scripts/fixup_pubsub_v1_keywords.py --input-directory .samples/ --output-directory samples/ From 27735ab95f1f14b439cac5645a9dd81d63858947 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Mon, 11 Apr 2022 17:11:22 -0400 Subject: [PATCH 0806/1197] Update sync-repo-settings.yaml (#648) --- packages/google-cloud-pubsub/.github/sync-repo-settings.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml index 48639e7e7f0f..c50f7a03f40e 100644 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -13,3 +13,5 @@ branchProtectionRules: - 'Samples - Lint' - 'Samples - Python 3.7' - 'Samples - Python 3.8' + - 'Samples - Python 3.9' + - 'Samples - Python 3.10' From c670b47cf076ed6de717b6220eeeb6e6ab4baefc Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Wed, 13 Apr 2022 17:08:32 -0400 Subject: [PATCH 0807/1197] Samples(test): Extend timeout eod receive test (#653) * Samples(test): Mark eod test super_flaky and extend timeout --- .../samples/snippets/subscriber_test.py | 28 ++++++++----------- 1 file changed, 11 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 5a5062564b1f..868720fd4dbd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -51,6 +51,7 @@ C = TypeVar("C", bound=Callable[..., Any]) typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) +typed_super_flaky = cast(Callable[[C], C], flaky(max_runs=10, min_passes=10)) @pytest.fixture(scope="module") @@ -704,6 +705,7 @@ def eventually_consistent_test() -> None: eventually_consistent_test() +@typed_super_flaky def test_receive_messages_with_exactly_once_delivery_enabled( regional_publisher_client: pubsub_v1.PublisherClient, exactly_once_delivery_topic: str, @@ -711,26 +713,18 @@ def test_receive_messages_with_exactly_once_delivery_enabled( capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), + message_ids = _publish_messages( + regional_publisher_client, exactly_once_delivery_topic ) - @typed_backoff - def eventually_consistent_test() -> None: - message_ids = _publish_messages( - regional_publisher_client, exactly_once_delivery_topic - ) - - subscriber.receive_messages_with_exactly_once_delivery_enabled( - PROJECT_ID, SUBSCRIPTION_EOD, 10 - ) - - out, _ = capsys.readouterr() - assert subscription_eod in out - for message_id in message_ids: - assert message_id in out + subscriber.receive_messages_with_exactly_once_delivery_enabled( + PROJECT_ID, SUBSCRIPTION_EOD, 200 + ) - eventually_consistent_test() + out, _ = capsys.readouterr() + assert subscription_eod in out + for message_id in message_ids: + assert message_id in out def test_listen_for_errors( From 1b73f061f679bc071e75adaa109bcbb4ba39fb7a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Apr 2022 11:11:42 -0400 Subject: [PATCH 0808/1197] chore: use gapic-generator-python 0.65.1 (#654) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.1 PiperOrigin-RevId: 441524537 Source-Link: https://github.com/googleapis/googleapis/commit/2a273915b3f70fe86c9d2a75470a0b83e48d0abf Source-Link: https://github.com/googleapis/googleapis-gen/commit/ab6756a48c89b5bcb9fb73443cb8e55d574f4643 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWI2NzU2YTQ4Yzg5YjViY2I5ZmI3MzQ0M2NiOGU1NWQ1NzRmNDY0MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../services/publisher/async_client.py | 9 +- .../pubsub_v1/services/publisher/client.py | 9 +- .../services/publisher/transports/base.py | 5 + .../services/publisher/transports/grpc.py | 4 + .../services/schema_service/async_client.py | 2 +- .../services/schema_service/client.py | 2 +- .../schema_service/transports/base.py | 5 + .../schema_service/transports/grpc.py | 4 + .../services/subscriber/async_client.py | 15 +- .../pubsub_v1/services/subscriber/client.py | 26 +- .../services/subscriber/transports/base.py | 5 + .../services/subscriber/transports/grpc.py | 4 + .../google/pubsub_v1/types/pubsub.py | 12 +- .../snippet_metadata_pubsub_v1.json | 2532 ++++++++++++++++- .../unit/gapic/pubsub_v1/test_publisher.py | 130 +- .../gapic/pubsub_v1/test_schema_service.py | 118 +- .../unit/gapic/pubsub_v1/test_subscriber.py | 124 +- 17 files changed, 2706 insertions(+), 300 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 1262e6abdeae..204b534fcbba 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -220,7 +220,6 @@ async def create_topic( name rules] (https://cloud.google.com/pubsub/docs/admin#resource_names). - .. code-block:: python from google import pubsub_v1 @@ -329,7 +328,6 @@ async def update_topic( r"""Updates an existing topic. Note that certain properties of a topic are not modifiable. - .. code-block:: python from google import pubsub_v1 @@ -418,7 +416,6 @@ async def publish( r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic does not exist. - .. code-block:: python from google import pubsub_v1 @@ -758,7 +755,6 @@ async def list_topic_subscriptions( r"""Lists the names of the attached subscriptions on this topic. - .. code-block:: python from google import pubsub_v1 @@ -884,7 +880,6 @@ async def list_topic_snapshots( bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. - .. code-block:: python from google import pubsub_v1 @@ -1010,7 +1005,6 @@ async def delete_topic( subscriptions to this topic are not deleted, but their ``topic`` field is set to ``_deleted-topic_``. - .. code-block:: python from google import pubsub_v1 @@ -1107,7 +1101,6 @@ async def detach_subscription( the subscription is a push subscription, pushes to the endpoint will stop. - .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 7e9bdeb83a06..5dc77d2a23f6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -17,7 +17,7 @@ import functools import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib @@ -503,7 +503,6 @@ def create_topic( name rules] (https://cloud.google.com/pubsub/docs/admin#resource_names). - .. code-block:: python from google import pubsub_v1 @@ -604,7 +603,6 @@ def update_topic( r"""Updates an existing topic. Note that certain properties of a topic are not modifiable. - .. code-block:: python from google import pubsub_v1 @@ -686,7 +684,6 @@ def publish( r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic does not exist. - .. code-block:: python from google import pubsub_v1 @@ -992,7 +989,6 @@ def list_topic_subscriptions( r"""Lists the names of the attached subscriptions on this topic. - .. code-block:: python from google import pubsub_v1 @@ -1108,7 +1104,6 @@ def list_topic_snapshots( bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. - .. code-block:: python from google import pubsub_v1 @@ -1224,7 +1219,6 @@ def delete_topic( subscriptions to this topic are not deleted, but their ``topic`` field is set to ``_deleted-topic_``. - .. code-block:: python from google import pubsub_v1 @@ -1313,7 +1307,6 @@ def detach_subscription( the subscription is a push subscription, pushes to the endpoint will stop. - .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 7b3d2a6d201f..6a14ce03c36a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -86,6 +86,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -391,5 +392,9 @@ def test_iam_permissions( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("PublisherTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index a8b1db159b00..839b78d8d528 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -565,5 +565,9 @@ def test_iam_permissions( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("PublisherGrpcTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index f57b64e3811b..169d71fdfea5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index b150b2681760..f50a3a4d6bec 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core import client_options as client_options_lib diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 58a2e1265851..ff1c871438fd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -87,6 +87,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -255,5 +256,9 @@ def test_iam_permissions( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("SchemaServiceTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index c8bf265f9f7f..e8c8d811e217 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -468,5 +468,9 @@ def test_iam_permissions( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("SchemaServiceGrpcTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 3acfac4e0712..cc67b66e5d23 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Mapping, Optional, AsyncIterable, Awaitable, @@ -245,7 +246,6 @@ async def create_subscription( Note that for REST API requests, you must specify a name in the request. - .. code-block:: python from google import pubsub_v1 @@ -518,7 +518,6 @@ async def update_subscription( properties of a subscription, such as its topic, are not modifiable. - .. code-block:: python from google import pubsub_v1 @@ -729,7 +728,6 @@ async def delete_subscription( new one has no association with the old subscription or its topic unless the same topic is specified. - .. code-block:: python from google import pubsub_v1 @@ -831,7 +829,6 @@ async def modify_ack_deadline( does not modify the subscription-level ``ackDeadlineSeconds`` used for subsequent messages. - .. code-block:: python from google import pubsub_v1 @@ -961,7 +958,6 @@ async def acknowledge( Acknowledging a message more than once will not result in an error. - .. code-block:: python from google import pubsub_v1 @@ -1071,7 +1067,6 @@ async def pull( ``UNAVAILABLE`` if there are too many concurrent pull requests pending for the given subscription. - .. code-block:: python from google import pubsub_v1 @@ -1219,7 +1214,6 @@ def streaming_pull( re-establish the stream. Flow control can be achieved by configuring the underlying RPC channel. - .. code-block:: python from google import pubsub_v1 @@ -1321,7 +1315,6 @@ async def modify_push_config( Messages will accumulate for delivery continuously through the call regardless of changes to the ``PushConfig``. - .. code-block:: python from google import pubsub_v1 @@ -1436,7 +1429,6 @@ async def get_snapshot( acknowledgment state of messages in an existing subscription to the state captured by a snapshot. - .. code-block:: python from google import pubsub_v1 @@ -1550,7 +1542,6 @@ async def list_snapshots( bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. - .. code-block:: python from google import pubsub_v1 @@ -1688,7 +1679,6 @@ async def create_snapshot( Note that for REST API requests, you must specify a name in the request. - .. code-block:: python from google import pubsub_v1 @@ -1826,7 +1816,6 @@ async def update_snapshot( existing subscription to the state captured by a snapshot. - .. code-block:: python from google import pubsub_v1 @@ -1924,7 +1913,6 @@ async def delete_snapshot( no association with the old snapshot or its subscription, unless the same subscription is specified. - .. code-block:: python from google import pubsub_v1 @@ -2024,7 +2012,6 @@ async def seek( Note that both the subscription and the snapshot must be on the same topic. - .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index e208327a2569..f635b3295f2b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -17,7 +17,17 @@ import functools import os import re -from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + Optional, + Iterable, + Iterator, + Sequence, + Tuple, + Type, + Union, +) import warnings import pkg_resources @@ -518,7 +528,6 @@ def create_subscription( Note that for REST API requests, you must specify a name in the request. - .. code-block:: python from google import pubsub_v1 @@ -771,7 +780,6 @@ def update_subscription( properties of a subscription, such as its topic, are not modifiable. - .. code-block:: python from google import pubsub_v1 @@ -965,7 +973,6 @@ def delete_subscription( new one has no association with the old subscription or its topic unless the same topic is specified. - .. code-block:: python from google import pubsub_v1 @@ -1059,7 +1066,6 @@ def modify_ack_deadline( does not modify the subscription-level ``ackDeadlineSeconds`` used for subsequent messages. - .. code-block:: python from google import pubsub_v1 @@ -1181,7 +1187,6 @@ def acknowledge( Acknowledging a message more than once will not result in an error. - .. code-block:: python from google import pubsub_v1 @@ -1283,7 +1288,6 @@ def pull( ``UNAVAILABLE`` if there are too many concurrent pull requests pending for the given subscription. - .. code-block:: python from google import pubsub_v1 @@ -1421,7 +1425,6 @@ def streaming_pull( re-establish the stream. Flow control can be achieved by configuring the underlying RPC channel. - .. code-block:: python from google import pubsub_v1 @@ -1512,7 +1515,6 @@ def modify_push_config( Messages will accumulate for delivery continuously through the call regardless of changes to the ``PushConfig``. - .. code-block:: python from google import pubsub_v1 @@ -1619,7 +1621,6 @@ def get_snapshot( acknowledgment state of messages in an existing subscription to the state captured by a snapshot. - .. code-block:: python from google import pubsub_v1 @@ -1723,7 +1724,6 @@ def list_snapshots( bulk. That is, you can set the acknowledgment state of messages in an existing subscription to the state captured by a snapshot. - .. code-block:: python from google import pubsub_v1 @@ -1851,7 +1851,6 @@ def create_snapshot( Note that for REST API requests, you must specify a name in the request. - .. code-block:: python from google import pubsub_v1 @@ -1981,7 +1980,6 @@ def update_snapshot( existing subscription to the state captured by a snapshot. - .. code-block:: python from google import pubsub_v1 @@ -2072,7 +2070,6 @@ def delete_snapshot( no association with the old snapshot or its subscription, unless the same subscription is specified. - .. code-block:: python from google import pubsub_v1 @@ -2164,7 +2161,6 @@ def seek( Note that both the subscription and the snapshot must be on the same topic. - .. code-block:: python from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 835e1984e805..eec697f16979 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -86,6 +86,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -552,5 +553,9 @@ def test_iam_permissions( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("SubscriberTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index d7f755f7eaa5..d9ebcd3a6241 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -824,5 +824,9 @@ def test_iam_permissions( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("SubscriberGrpcTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index c344dbe3df34..4c5841f5cbee 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -132,7 +132,7 @@ class Topic(proto.Message): (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``"goog"``. - labels (Sequence[google.pubsub_v1.types.Topic.LabelsEntry]): + labels (Mapping[str, str]): See [Creating and managing labels] (https://cloud.google.com/pubsub/docs/labels). message_storage_policy (google.pubsub_v1.types.MessageStoragePolicy): @@ -217,7 +217,7 @@ class PubsubMessage(proto.Message): The message data field. If this field is empty, the message must contain at least one attribute. - attributes (Sequence[google.pubsub_v1.types.PubsubMessage.AttributesEntry]): + attributes (Mapping[str, str]): Attributes for this message. If this field is empty, the message must contain non-empty data. This can be used to filter messages on the @@ -623,7 +623,7 @@ class Subscription(proto.Message): thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes. - labels (Sequence[google.pubsub_v1.types.Subscription.LabelsEntry]): + labels (Mapping[str, str]): See Creating and managing labels. @@ -891,7 +891,7 @@ class PushConfig(proto.Message): A URL locating the endpoint to which messages should be pushed. For example, a Webhook endpoint might use ``https://example.com/push``. - attributes (Sequence[google.pubsub_v1.types.PushConfig.AttributesEntry]): + attributes (Mapping[str, str]): Endpoint configuration attributes that can be used to control different aspects of the message delivery. @@ -1533,7 +1533,7 @@ class CreateSnapshotRequest(proto.Message): topic following the successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. - labels (Sequence[google.pubsub_v1.types.CreateSnapshotRequest.LabelsEntry]): + labels (Mapping[str, str]): See Creating and managing labels. @@ -1605,7 +1605,7 @@ class Snapshot(proto.Message): expire in 4 days. The service will refuse to create a snapshot that would expire in less than 1 hour after creation. - labels (Sequence[google.pubsub_v1.types.Snapshot.LabelsEntry]): + labels (Mapping[str, str]): See [Creating and managing labels] (https://cloud.google.com/pubsub/docs/labels). """ diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json index 17a34496456e..0f5906e95364 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json @@ -1,16 +1,61 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.pubsub.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-pubsub" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.create_topic", "method": { + "fullName": "google.pubsub.v1.Publisher.CreateTopic", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "CreateTopic" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "create_topic" }, + "description": "Sample for CreateTopic", "file": "pubsub_v1_generated_publisher_create_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_async", "segments": [ { @@ -43,18 +88,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_create_topic_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.create_topic", "method": { + "fullName": "google.pubsub.v1.Publisher.CreateTopic", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "CreateTopic" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "create_topic" }, + "description": "Sample for CreateTopic", "file": "pubsub_v1_generated_publisher_create_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_sync", "segments": [ { @@ -87,19 +168,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_create_topic_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.delete_topic", "method": { + "fullName": "google.pubsub.v1.Publisher.DeleteTopic", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "DeleteTopic" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_topic" }, + "description": "Sample for DeleteTopic", "file": "pubsub_v1_generated_publisher_delete_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_async", "segments": [ { @@ -130,18 +246,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_delete_topic_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.delete_topic", "method": { + "fullName": "google.pubsub.v1.Publisher.DeleteTopic", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "DeleteTopic" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_topic" }, + "description": "Sample for DeleteTopic", "file": "pubsub_v1_generated_publisher_delete_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_sync", "segments": [ { @@ -172,19 +323,51 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_delete_topic_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.detach_subscription", "method": { + "fullName": "google.pubsub.v1.Publisher.DetachSubscription", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "DetachSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DetachSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", + "shortName": "detach_subscription" }, + "description": "Sample for DetachSubscription", "file": "pubsub_v1_generated_publisher_detach_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_async", "segments": [ { @@ -217,18 +400,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_detach_subscription_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.detach_subscription", "method": { + "fullName": "google.pubsub.v1.Publisher.DetachSubscription", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "DetachSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DetachSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", + "shortName": "detach_subscription" }, + "description": "Sample for DetachSubscription", "file": "pubsub_v1_generated_publisher_detach_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_sync", "segments": [ { @@ -261,19 +476,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_detach_subscription_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.get_topic", "method": { + "fullName": "google.pubsub.v1.Publisher.GetTopic", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "GetTopic" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "get_topic" }, + "description": "Sample for GetTopic", "file": "pubsub_v1_generated_publisher_get_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_GetTopic_async", "segments": [ { @@ -306,18 +557,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_get_topic_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.get_topic", "method": { + "fullName": "google.pubsub.v1.Publisher.GetTopic", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "GetTopic" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "get_topic" }, + "description": "Sample for GetTopic", "file": "pubsub_v1_generated_publisher_get_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_GetTopic_sync", "segments": [ { @@ -350,19 +637,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_get_topic_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_snapshots", "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "ListTopicSnapshots" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager", + "shortName": "list_topic_snapshots" }, + "description": "Sample for ListTopicSnapshots", "file": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_async", "segments": [ { @@ -395,18 +718,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topic_snapshots", "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "ListTopicSnapshots" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager", + "shortName": "list_topic_snapshots" }, + "description": "Sample for ListTopicSnapshots", "file": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_sync", "segments": [ { @@ -439,19 +798,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_subscriptions", "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "ListTopicSubscriptions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager", + "shortName": "list_topic_subscriptions" }, + "description": "Sample for ListTopicSubscriptions", "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_async", "segments": [ { @@ -484,18 +879,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topic_subscriptions", "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "ListTopicSubscriptions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager", + "shortName": "list_topic_subscriptions" }, + "description": "Sample for ListTopicSubscriptions", "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync", "segments": [ { @@ -528,19 +959,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topics", "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopics", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "ListTopics" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager", + "shortName": "list_topics" }, + "description": "Sample for ListTopics", "file": "pubsub_v1_generated_publisher_list_topics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_ListTopics_async", "segments": [ { @@ -573,18 +1040,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_list_topics_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topics", "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopics", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "ListTopics" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsPager", + "shortName": "list_topics" }, + "description": "Sample for ListTopics", "file": "pubsub_v1_generated_publisher_list_topics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_ListTopics_sync", "segments": [ { @@ -617,19 +1120,59 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_list_topics_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.publish", "method": { + "fullName": "google.pubsub.v1.Publisher.Publish", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "Publish" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PublishRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "messages", + "type": "Sequence[google.pubsub_v1.types.PubsubMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PublishResponse", + "shortName": "publish" }, + "description": "Sample for Publish", "file": "pubsub_v1_generated_publisher_publish_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_Publish_async", "segments": [ { @@ -662,18 +1205,58 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_publish_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.publish", "method": { + "fullName": "google.pubsub.v1.Publisher.Publish", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "Publish" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PublishRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "messages", + "type": "Sequence[google.pubsub_v1.types.PubsubMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PublishResponse", + "shortName": "publish" }, + "description": "Sample for Publish", "file": "pubsub_v1_generated_publisher_publish_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_Publish_sync", "segments": [ { @@ -706,19 +1289,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_publish_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.update_topic", "method": { + "fullName": "google.pubsub.v1.Publisher.UpdateTopic", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "UpdateTopic" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateTopicRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "update_topic" }, + "description": "Sample for UpdateTopic", "file": "pubsub_v1_generated_publisher_update_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_async", "segments": [ { @@ -751,18 +1366,50 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_update_topic_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.update_topic", "method": { + "fullName": "google.pubsub.v1.Publisher.UpdateTopic", "service": { + "fullName": "google.pubsub.v1.Publisher", "shortName": "Publisher" }, "shortName": "UpdateTopic" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateTopicRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "update_topic" }, + "description": "Sample for UpdateTopic", "file": "pubsub_v1_generated_publisher_update_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_sync", "segments": [ { @@ -795,19 +1442,63 @@ "start": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_publisher_update_topic_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.create_schema", "method": { + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "CreateSchema" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" }, + "description": "Sample for CreateSchema", "file": "pubsub_v1_generated_schema_service_create_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", "segments": [ { @@ -840,18 +1531,62 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_create_schema_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.create_schema", "method": { + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "CreateSchema" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" }, + "description": "Sample for CreateSchema", "file": "pubsub_v1_generated_schema_service_create_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", "segments": [ { @@ -884,19 +1619,54 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_create_schema_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema", "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "DeleteSchema" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_schema" }, + "description": "Sample for DeleteSchema", "file": "pubsub_v1_generated_schema_service_delete_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", "segments": [ { @@ -927,18 +1697,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema", "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "DeleteSchema" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_schema" }, + "description": "Sample for DeleteSchema", "file": "pubsub_v1_generated_schema_service_delete_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", "segments": [ { @@ -969,19 +1774,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.get_schema", "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "GetSchema" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" }, + "description": "Sample for GetSchema", "file": "pubsub_v1_generated_schema_service_get_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", "segments": [ { @@ -1014,18 +1855,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_get_schema_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.get_schema", "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "GetSchema" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" }, + "description": "Sample for GetSchema", "file": "pubsub_v1_generated_schema_service_get_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", "segments": [ { @@ -1058,19 +1935,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_get_schema_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schemas", "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "ListSchemas" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager", + "shortName": "list_schemas" }, + "description": "Sample for ListSchemas", "file": "pubsub_v1_generated_schema_service_list_schemas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", "segments": [ { @@ -1103,18 +2016,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.list_schemas", "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "ListSchemas" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasPager", + "shortName": "list_schemas" }, + "description": "Sample for ListSchemas", "file": "pubsub_v1_generated_schema_service_list_schemas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", "segments": [ { @@ -1147,19 +2096,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_message", "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "ValidateMessage" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateMessageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateMessageResponse", + "shortName": "validate_message" }, + "description": "Sample for ValidateMessage", "file": "pubsub_v1_generated_schema_service_validate_message_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_async", "segments": [ { @@ -1192,18 +2173,50 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_validate_message_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.validate_message", "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "ValidateMessage" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateMessageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateMessageResponse", + "shortName": "validate_message" }, + "description": "Sample for ValidateMessage", "file": "pubsub_v1_generated_schema_service_validate_message_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_sync", "segments": [ { @@ -1236,19 +2249,59 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_validate_message_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_schema", "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "ValidateSchema" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", + "shortName": "validate_schema" }, + "description": "Sample for ValidateSchema", "file": "pubsub_v1_generated_schema_service_validate_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_async", "segments": [ { @@ -1281,18 +2334,58 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_validate_schema_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.validate_schema", "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", "service": { + "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, "shortName": "ValidateSchema" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", + "shortName": "validate_schema" }, + "description": "Sample for ValidateSchema", "file": "pubsub_v1_generated_schema_service_validate_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_sync", "segments": [ { @@ -1325,19 +2418,58 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_schema_service_validate_schema_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.acknowledge", "method": { + "fullName": "google.pubsub.v1.Subscriber.Acknowledge", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "Acknowledge" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.AcknowledgeRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "acknowledge" }, + "description": "Sample for Acknowledge", "file": "pubsub_v1_generated_subscriber_acknowledge_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_async", "segments": [ { @@ -1368,18 +2500,57 @@ "end": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_acknowledge_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.acknowledge", "method": { + "fullName": "google.pubsub.v1.Subscriber.Acknowledge", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "Acknowledge" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.AcknowledgeRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "acknowledge" }, + "description": "Sample for Acknowledge", "file": "pubsub_v1_generated_subscriber_acknowledge_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_sync", "segments": [ { @@ -1410,19 +2581,59 @@ "end": 44, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_acknowledge_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_snapshot", "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "CreateSnapshot" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "create_snapshot" }, + "description": "Sample for CreateSnapshot", "file": "pubsub_v1_generated_subscriber_create_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_async", "segments": [ { @@ -1455,18 +2666,58 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_create_snapshot_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.create_snapshot", "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "CreateSnapshot" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "create_snapshot" }, + "description": "Sample for CreateSnapshot", "file": "pubsub_v1_generated_subscriber_create_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_sync", "segments": [ { @@ -1499,19 +2750,67 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_create_snapshot_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_subscription", "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "CreateSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "create_subscription" }, + "description": "Sample for CreateSubscription", "file": "pubsub_v1_generated_subscriber_create_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_async", "segments": [ { @@ -1544,18 +2843,66 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_create_subscription_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.create_subscription", "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "CreateSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "create_subscription" }, + "description": "Sample for CreateSubscription", "file": "pubsub_v1_generated_subscriber_create_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_sync", "segments": [ { @@ -1588,19 +2935,54 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_create_subscription_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_snapshot", "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "DeleteSnapshot" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_snapshot" }, + "description": "Sample for DeleteSnapshot", "file": "pubsub_v1_generated_subscriber_delete_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_async", "segments": [ { @@ -1631,18 +3013,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_delete_snapshot_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.delete_snapshot", "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "DeleteSnapshot" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_snapshot" }, + "description": "Sample for DeleteSnapshot", "file": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_sync", "segments": [ { @@ -1673,19 +3090,54 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_subscription", "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "DeleteSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_subscription" }, + "description": "Sample for DeleteSubscription", "file": "pubsub_v1_generated_subscriber_delete_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_async", "segments": [ { @@ -1716,18 +3168,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_delete_subscription_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.delete_subscription", "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "DeleteSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_subscription" }, + "description": "Sample for DeleteSubscription", "file": "pubsub_v1_generated_subscriber_delete_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_sync", "segments": [ { @@ -1758,19 +3245,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_delete_subscription_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_snapshot", "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "GetSnapshot" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "get_snapshot" }, + "description": "Sample for GetSnapshot", "file": "pubsub_v1_generated_subscriber_get_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_async", "segments": [ { @@ -1803,18 +3326,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_get_snapshot_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.get_snapshot", "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "GetSnapshot" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "get_snapshot" }, + "description": "Sample for GetSnapshot", "file": "pubsub_v1_generated_subscriber_get_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_sync", "segments": [ { @@ -1847,19 +3406,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_get_snapshot_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_subscription", "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSubscription", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "GetSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "get_subscription" }, + "description": "Sample for GetSubscription", "file": "pubsub_v1_generated_subscriber_get_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_async", "segments": [ { @@ -1892,18 +3487,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_get_subscription_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.get_subscription", "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSubscription", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "GetSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "get_subscription" }, + "description": "Sample for GetSubscription", "file": "pubsub_v1_generated_subscriber_get_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_sync", "segments": [ { @@ -1936,19 +3567,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_get_subscription_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_snapshots", "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "ListSnapshots" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager", + "shortName": "list_snapshots" }, + "description": "Sample for ListSnapshots", "file": "pubsub_v1_generated_subscriber_list_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_async", "segments": [ { @@ -1981,18 +3648,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_list_snapshots_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.list_snapshots", "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "ListSnapshots" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager", + "shortName": "list_snapshots" }, + "description": "Sample for ListSnapshots", "file": "pubsub_v1_generated_subscriber_list_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_sync", "segments": [ { @@ -2025,19 +3728,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_list_snapshots_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_subscriptions", "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "ListSubscriptions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager", + "shortName": "list_subscriptions" }, + "description": "Sample for ListSubscriptions", "file": "pubsub_v1_generated_subscriber_list_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_async", "segments": [ { @@ -2070,18 +3809,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_list_subscriptions_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.list_subscriptions", "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "ListSubscriptions" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager", + "shortName": "list_subscriptions" }, + "description": "Sample for ListSubscriptions", "file": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_sync", "segments": [ { @@ -2114,19 +3889,62 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_ack_deadline", "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "ModifyAckDeadline" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "Sequence[str]" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_ack_deadline" }, + "description": "Sample for ModifyAckDeadline", "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_async", "segments": [ { @@ -2157,18 +3975,61 @@ "end": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.modify_ack_deadline", "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "ModifyAckDeadline" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "Sequence[str]" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_ack_deadline" }, + "description": "Sample for ModifyAckDeadline", "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync", "segments": [ { @@ -2199,19 +4060,58 @@ "end": 45, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_push_config", "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "ModifyPushConfig" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyPushConfigRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_push_config" }, + "description": "Sample for ModifyPushConfig", "file": "pubsub_v1_generated_subscriber_modify_push_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_async", "segments": [ { @@ -2242,18 +4142,57 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_modify_push_config_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.modify_push_config", "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "ModifyPushConfig" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyPushConfigRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_push_config" }, + "description": "Sample for ModifyPushConfig", "file": "pubsub_v1_generated_subscriber_modify_push_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_sync", "segments": [ { @@ -2284,19 +4223,63 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_modify_push_config_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.pull", "method": { + "fullName": "google.pubsub.v1.Subscriber.Pull", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "Pull" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PullRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "return_immediately", + "type": "bool" + }, + { + "name": "max_messages", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PullResponse", + "shortName": "pull" }, + "description": "Sample for Pull", "file": "pubsub_v1_generated_subscriber_pull_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_Pull_async", "segments": [ { @@ -2329,18 +4312,62 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_pull_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.pull", "method": { + "fullName": "google.pubsub.v1.Subscriber.Pull", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "Pull" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PullRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "return_immediately", + "type": "bool" + }, + { + "name": "max_messages", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PullResponse", + "shortName": "pull" }, + "description": "Sample for Pull", "file": "pubsub_v1_generated_subscriber_pull_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_Pull_sync", "segments": [ { @@ -2373,19 +4400,51 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_pull_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.seek", "method": { + "fullName": "google.pubsub.v1.Subscriber.Seek", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "Seek" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.SeekRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.SeekResponse", + "shortName": "seek" }, + "description": "Sample for Seek", "file": "pubsub_v1_generated_subscriber_seek_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_Seek_async", "segments": [ { @@ -2418,18 +4477,50 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_seek_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.seek", "method": { + "fullName": "google.pubsub.v1.Subscriber.Seek", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "Seek" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.SeekRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.SeekResponse", + "shortName": "seek" }, + "description": "Sample for Seek", "file": "pubsub_v1_generated_subscriber_seek_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_Seek_sync", "segments": [ { @@ -2462,19 +4553,51 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_seek_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.streaming_pull", "method": { + "fullName": "google.pubsub.v1.Subscriber.StreamingPull", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "StreamingPull" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", + "shortName": "streaming_pull" }, + "description": "Sample for StreamingPull", "file": "pubsub_v1_generated_subscriber_streaming_pull_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_async", "segments": [ { @@ -2507,18 +4630,50 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_streaming_pull_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.streaming_pull", "method": { + "fullName": "google.pubsub.v1.Subscriber.StreamingPull", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "StreamingPull" - } + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", + "shortName": "streaming_pull" }, + "description": "Sample for StreamingPull", "file": "pubsub_v1_generated_subscriber_streaming_pull_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_sync", "segments": [ { @@ -2551,19 +4706,51 @@ "start": 53, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_streaming_pull_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_snapshot", "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "UpdateSnapshot" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "update_snapshot" }, + "description": "Sample for UpdateSnapshot", "file": "pubsub_v1_generated_subscriber_update_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_async", "segments": [ { @@ -2596,18 +4783,50 @@ "start": 41, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_update_snapshot_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.update_snapshot", "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "UpdateSnapshot" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "update_snapshot" }, + "description": "Sample for UpdateSnapshot", "file": "pubsub_v1_generated_subscriber_update_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_sync", "segments": [ { @@ -2640,19 +4859,51 @@ "start": 41, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_update_snapshot_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_subscription", "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "UpdateSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "update_subscription" }, + "description": "Sample for UpdateSubscription", "file": "pubsub_v1_generated_subscriber_update_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_async", "segments": [ { @@ -2685,18 +4936,50 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_update_subscription_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.update_subscription", "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", "service": { + "fullName": "google.pubsub.v1.Subscriber", "shortName": "Subscriber" }, "shortName": "UpdateSubscription" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "update_subscription" }, + "description": "Sample for UpdateSubscription", "file": "pubsub_v1_generated_subscriber_update_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_sync", "segments": [ { @@ -2729,7 +5012,8 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "pubsub_v1_generated_subscriber_update_subscription_sync.py" } ] } diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 36527c10d0dd..de81d0c1360e 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -87,24 +87,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - PublisherClient, - PublisherAsyncClient, + (PublisherClient, "grpc"), + (PublisherAsyncClient, "grpc_asyncio"), ], ) -def test_publisher_client_from_service_account_info(client_class): +def test_publisher_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == ("pubsub.googleapis.com:443") @pytest.mark.parametrize( @@ -133,27 +133,31 @@ def test_publisher_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - PublisherClient, - PublisherAsyncClient, + (PublisherClient, "grpc"), + (PublisherAsyncClient, "grpc_asyncio"), ], ) -def test_publisher_client_from_service_account_file(client_class): +def test_publisher_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == ("pubsub.googleapis.com:443") def test_publisher_client_get_transport_class(): @@ -1862,7 +1866,7 @@ async def test_list_topics_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1908,7 +1912,9 @@ async def test_list_topics_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_topics(request={})).pages: + async for page_ in ( + await client.list_topics(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2304,7 +2310,7 @@ async def test_list_topic_subscriptions_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2352,7 +2358,9 @@ async def test_list_topic_subscriptions_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_topic_subscriptions(request={})).pages: + async for page_ in ( + await client.list_topic_subscriptions(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2748,7 +2756,7 @@ async def test_list_topic_snapshots_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2796,7 +2804,9 @@ async def test_list_topic_snapshots_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_topic_snapshots(request={})).pages: + async for page_ in ( + await client.list_topic_snapshots(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -3267,6 +3277,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = PublisherClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = PublisherClient( @@ -3320,6 +3343,14 @@ def test_publisher_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_publisher_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -3478,24 +3509,40 @@ def test_publisher_grpc_transport_client_cert_source_for_mtls(transport_class): ) -def test_publisher_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_publisher_host_no_port(transport_name): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == ("pubsub.googleapis.com:443") -def test_publisher_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_publisher_host_with_port(transport_name): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "pubsub.googleapis.com:8000" + assert client.transport._host == ("pubsub.googleapis.com:8000") def test_publisher_grpc_transport_channel(): @@ -3815,6 +3862,20 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + def test_set_iam_policy(transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -3832,9 +3893,7 @@ def test_set_iam_policy(transport: str = "grpc"): version=774, etag=b"etag_blob", ) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] @@ -3862,6 +3921,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy_pb2.Policy( @@ -3869,11 +3929,9 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): etag=b"etag_blob", ) ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -3933,7 +3991,7 @@ async def test_set_iam_policy_field_headers_async(): await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -4326,20 +4384,6 @@ async def test_test_iam_permissions_from_dict_async(): call.assert_called() -@pytest.mark.asyncio -async def test_transport_close_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 19a2f3ab8b85..b35750e7f8c3 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -89,24 +89,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - SchemaServiceClient, - SchemaServiceAsyncClient, + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), ], ) -def test_schema_service_client_from_service_account_info(client_class): +def test_schema_service_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == ("pubsub.googleapis.com:443") @pytest.mark.parametrize( @@ -135,27 +135,31 @@ def test_schema_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - SchemaServiceClient, - SchemaServiceAsyncClient, + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), ], ) -def test_schema_service_client_from_service_account_file(client_class): +def test_schema_service_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == ("pubsub.googleapis.com:443") def test_schema_service_client_get_transport_class(): @@ -1508,7 +1512,7 @@ async def test_list_schemas_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1554,7 +1558,9 @@ async def test_list_schemas_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_schemas(request={})).pages: + async for page_ in ( + await client.list_schemas(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2251,6 +2257,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = SchemaServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = SchemaServiceClient( @@ -2301,6 +2320,14 @@ def test_schema_service_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_schema_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2462,24 +2489,40 @@ def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_cla ) -def test_schema_service_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_schema_service_host_no_port(transport_name): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == ("pubsub.googleapis.com:443") -def test_schema_service_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_schema_service_host_with_port(transport_name): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "pubsub.googleapis.com:8000" + assert client.transport._host == ("pubsub.googleapis.com:8000") def test_schema_service_grpc_transport_channel(): @@ -2759,6 +2802,20 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + def test_set_iam_policy(transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -2776,9 +2833,7 @@ def test_set_iam_policy(transport: str = "grpc"): version=774, etag=b"etag_blob", ) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] @@ -2806,6 +2861,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy_pb2.Policy( @@ -2813,11 +2869,9 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): etag=b"etag_blob", ) ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -2877,7 +2931,7 @@ async def test_set_iam_policy_field_headers_async(): await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -3270,20 +3324,6 @@ async def test_test_iam_permissions_from_dict_async(): call.assert_called() -@pytest.mark.asyncio -async def test_transport_close_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - def test_transport_close(): transports = { "grpc": "_grpc_channel", diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 0455855de74d..d9146f11c6db 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -89,24 +89,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - SubscriberClient, - SubscriberAsyncClient, + (SubscriberClient, "grpc"), + (SubscriberAsyncClient, "grpc_asyncio"), ], ) -def test_subscriber_client_from_service_account_info(client_class): +def test_subscriber_client_from_service_account_info(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == ("pubsub.googleapis.com:443") @pytest.mark.parametrize( @@ -135,27 +135,31 @@ def test_subscriber_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", + "client_class,transport_name", [ - SubscriberClient, - SubscriberAsyncClient, + (SubscriberClient, "grpc"), + (SubscriberAsyncClient, "grpc_asyncio"), ], ) -def test_subscriber_client_from_service_account_file(client_class): +def test_subscriber_client_from_service_account_file(client_class, transport_name): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == ("pubsub.googleapis.com:443") def test_subscriber_client_get_transport_class(): @@ -1758,7 +1762,7 @@ async def test_list_subscriptions_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1806,7 +1810,9 @@ async def test_list_subscriptions_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_subscriptions(request={})).pages: + async for page_ in ( + await client.list_subscriptions(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -3688,7 +3694,7 @@ async def test_list_snapshots_async_pager(): ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -3734,7 +3740,9 @@ async def test_list_snapshots_async_pages(): RuntimeError, ) pages = [] - async for page_ in (await client.list_snapshots(request={})).pages: + async for page_ in ( + await client.list_snapshots(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -4585,6 +4593,19 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = SubscriberClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = SubscriberClient( @@ -4645,6 +4666,14 @@ def test_subscriber_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_subscriber_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -4803,24 +4832,40 @@ def test_subscriber_grpc_transport_client_cert_source_for_mtls(transport_class): ) -def test_subscriber_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_subscriber_host_no_port(transport_name): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "pubsub.googleapis.com:443" + assert client.transport._host == ("pubsub.googleapis.com:443") -def test_subscriber_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_subscriber_host_with_port(transport_name): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="pubsub.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "pubsub.googleapis.com:8000" + assert client.transport._host == ("pubsub.googleapis.com:8000") def test_subscriber_grpc_transport_channel(): @@ -5140,6 +5185,20 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object( + type(getattr(client.transport, "grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + def test_set_iam_policy(transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5157,9 +5216,7 @@ def test_set_iam_policy(transport: str = "grpc"): version=774, etag=b"etag_blob", ) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] @@ -5187,6 +5244,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy_pb2.Policy( @@ -5194,11 +5252,9 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): etag=b"etag_blob", ) ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -5258,7 +5314,7 @@ async def test_set_iam_policy_field_headers_async(): await client.set_iam_policy(request) # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) + assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] assert args[0] == request @@ -5651,20 +5707,6 @@ async def test_test_iam_permissions_from_dict_async(): call.assert_called() -@pytest.mark.asyncio -async def test_transport_close_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - def test_transport_close(): transports = { "grpc": "_grpc_channel", From 5abfbc099ec610c7986ab684967fc3e02ffb9f22 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Thu, 14 Apr 2022 18:41:02 -0400 Subject: [PATCH 0809/1197] docs: mark eod as preview (#657) --- .../cloud/pubsub_v1/subscriber/message.py | 42 ++++++++++++------- .../samples/snippets/subscriber.py | 5 ++- 2 files changed, 31 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index ab17bab781fe..c0a2e70ea965 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -246,7 +246,10 @@ def ack(self) -> None: receive any given message more than once. If you need strong guarantees about acks and re-deliveres, enable exactly-once delivery on your subscription and use the `ack_with_response` - method instead. + method instead. Exactly once delivery is a preview feature. + For more details, see: + https://cloud.google.com/pubsub/docs/exactly-once-delivery." + """ time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( @@ -268,13 +271,6 @@ def ack_with_response(self) -> "futures.Future": *finished* processing them, so that in the event of a failure, you receive the message again. - If exactly-once delivery is enabled on the subscription, the - future returned by this method tracks the state of acknowledgement - operation. If the future completes successfully, the message is - guaranteed NOT to be re-delivered. Otherwise, the future will - contain an exception with more details about the failure and the - message may be re-delivered. - If exactly-once delivery is NOT enabled on the subscription, the future returns immediately with an AcknowledgeStatus.SUCCESS. Since acks in Cloud Pub/Sub are best effort when exactly-once @@ -283,6 +279,16 @@ def ack_with_response(self) -> "futures.Future": code is idempotent, as you may receive any given message more than once. + If exactly-once delivery is enabled on the subscription, the + future returned by this method tracks the state of acknowledgement + operation. If the future completes successfully, the message is + guaranteed NOT to be re-delivered. Otherwise, the future will + contain an exception with more details about the failure and the + message may be re-delivered. + + Exactly once delivery is a preview feature. For more details, + see https://cloud.google.com/pubsub/docs/exactly-once-delivery." + Returns: A :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` instance that conforms to Python Standard library's @@ -363,6 +369,12 @@ def modify_ack_deadline_with_response(self, seconds: int) -> "futures.Future": if you are implementing your own custom subclass of :class:`~.pubsub_v1.subcriber._consumer.Consumer`. + If exactly-once delivery is NOT enabled on the subscription, the + future returns immediately with an AcknowledgeStatus.SUCCESS. + Since modify-ack-deadline operations in Cloud Pub/Sub are best effort + when exactly-once delivery is disabled, the message may be re-delivered + within the set deadline. + If exactly-once delivery is enabled on the subscription, the future returned by this method tracks the state of the modify-ack-deadline operation. If the future completes successfully, @@ -371,11 +383,8 @@ def modify_ack_deadline_with_response(self, seconds: int) -> "futures.Future": the failure and the message will be redelivered according to its currently-set ack deadline. - If exactly-once delivery is NOT enabled on the subscription, the - future returns immediately with an AcknowledgeStatus.SUCCESS. - Since modify-ack-deadline operations in Cloud Pub/Sub are best effort - when exactly-once delivery is disabled, the message may be re-delivered - within the set deadline. + Exactly once delivery is a preview feature. For more details, + see https://cloud.google.com/pubsub/docs/exactly-once-delivery." Args: seconds: @@ -434,6 +443,9 @@ def nack_with_response(self) -> "futures.Future": may take place immediately or after a delay, and may arrive at this subscriber or another. + If exactly-once delivery is NOT enabled on the subscription, the + future returns immediately with an AcknowledgeStatus.SUCCESS. + If exactly-once delivery is enabled on the subscription, the future returned by this method tracks the state of the nack operation. If the future completes successfully, @@ -441,8 +453,8 @@ def nack_with_response(self) -> "futures.Future": Otherwise, the future will contain an exception with more details about the failure. - If exactly-once delivery is NOT enabled on the subscription, the - future returns immediately with an AcknowledgeStatus.SUCCESS. + Exactly once delivery is a preview feature. For more details, + see https://cloud.google.com/pubsub/docs/exactly-once-delivery." Returns: A :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 5a9d0a7a5c42..a642bf03a966 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -614,7 +614,10 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: def receive_messages_with_exactly_once_delivery_enabled( project_id: str, subscription_id: str, timeout: Optional[float] = None ) -> None: - """Receives messages from a pull subscription with exactly-once delivery enabled.""" + """Receives messages from a pull subscription with exactly-once delivery enabled. + This is a preview feature. For more details, see: + https://cloud.google.com/pubsub/docs/exactly-once-delivery." + """ # [START pubsub_subscriber_exactly_once] from concurrent.futures import TimeoutError from google.cloud import pubsub_v1 From e6460c9bc334f96be833a5bbc16f155253474d10 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Mon, 18 Apr 2022 16:09:23 -0400 Subject: [PATCH 0810/1197] Docs: fix project_path typo in UPGRADING.md (#660) --- packages/google-cloud-pubsub/UPGRADING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/UPGRADING.md b/packages/google-cloud-pubsub/UPGRADING.md index d2d48d668f05..83081c1ac69e 100644 --- a/packages/google-cloud-pubsub/UPGRADING.md +++ b/packages/google-cloud-pubsub/UPGRADING.md @@ -149,7 +149,7 @@ and now only exist in the relevant client, e.g. `subscriber.subscription_path()` The `project_path()` method has been removed from both the publisher and subscriber client, this path must now be constructed manually: ```py -project_path = f"project/{PROJECT_ID}" +project_path = f"projects/{PROJECT_ID}" ``` ## Removed `client_config` Parameter From 056494c339582281690233037e46d9371dbeca3f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 11:40:55 -0400 Subject: [PATCH 0811/1197] chore(python): add nox session to sort python imports (#663) * chore(python): add nox session to sort python imports Source-Link: https://github.com/googleapis/synthtool/commit/1b71c10e20de7ed3f97f692f99a0e3399b67049f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 * update replacement in owlbot.py Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +-- packages/google-cloud-pubsub/noxfile.py | 27 ++++++++++++++++--- packages/google-cloud-pubsub/owlbot.py | 2 +- .../samples/snippets/noxfile.py | 21 +++++++++++++++ 4 files changed, 48 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index bc893c979e20..7c454abf76f3 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8a5d3f6a2e43ed8293f34e06a2f56931d1e88a2694c3bb11b15df4eb256ad163 -# created: 2022-04-06T10:30:21.687684602Z + digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 +# created: 2022-04-20T23:42:53.970438194Z diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 59fd2a1b8719..914b76e5b8ab 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -25,7 +25,8 @@ import nox BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] MYPY_VERSION = "mypy==0.910" @@ -135,7 +136,7 @@ def lint(session): session.run( "black", "--check", - *BLACK_PATHS, + *LINT_PATHS, ) session.run("flake8", "google", "tests") @@ -146,7 +147,27 @@ def blacken(session): session.install(BLACK_VERSION) session.run( "black", - *BLACK_PATHS, + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index af5b972d25a6..bb73bd7e4b77 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -378,7 +378,7 @@ # ---------------------------------------------------------------------------- s.replace( "noxfile.py", - r"BLACK_PATHS = \[.*?\]", + r"LINT_PATHS = \[.*?\]", '\g<0>\n\nMYPY_VERSION = "mypy==0.910"', ) s.replace( diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 949e0fde9ae1..38bb0a572b81 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -30,6 +30,7 @@ # WARNING - WARNING - WARNING - WARNING - WARNING BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +169,32 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # From 28b7c0725a3c33969f944f27741be077a78a36e0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 21 Apr 2022 16:54:27 +0000 Subject: [PATCH 0812/1197] chore(python): use ubuntu 22.04 in docs image (#666) Source-Link: https://github.com/googleapis/synthtool/commit/f15cc72fb401b4861cedebb10af74afe428fb1f8 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../.kokoro/docker/docs/Dockerfile | 20 +++++++++++++++++-- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 7c454abf76f3..64f82d6bf4bc 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:00c9d764fd1cd56265f12a5ef4b99a0c9e87cf261018099141e2ca5158890416 -# created: 2022-04-20T23:42:53.970438194Z + digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd +# created: 2022-04-21T15:43:16.246106921Z diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile index 4e1b1fb8b5a5..238b87b9d1c9 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] From 441bf789f260714892998b8799e9fb70e8bf0b21 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Fri, 22 Apr 2022 15:24:15 -0400 Subject: [PATCH 0813/1197] tests: separate eod creation and receive in parallel tests (#667) * samples (tests): separate eod creation and receive in parallel tests --- .../samples/snippets/subscriber_test.py | 87 ++++++++++++++----- 1 file changed, 64 insertions(+), 23 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 868720fd4dbd..d656c6ce460c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -40,7 +40,12 @@ SUBSCRIPTION_ASYNC = f"subscription-test-subscription-async-{PY_VERSION}-{UUID}" SUBSCRIPTION_SYNC = f"subscription-test-subscription-sync-{PY_VERSION}-{UUID}" SUBSCRIPTION_DLQ = f"subscription-test-subscription-dlq-{PY_VERSION}-{UUID}" -SUBSCRIPTION_EOD = f"subscription-test-subscription-eod-{PY_VERSION}-{UUID}" +SUBSCRIPTION_EOD_FOR_CREATE = ( + f"subscription-test-subscription-eod-for-create-{PY_VERSION}-{UUID}" +) +SUBSCRIPTION_EOD_FOR_RECEIVE = ( + f"subscription-test-subscription-eod-for-receive-{PY_VERSION}-{UUID}" +) ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push" NEW_ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push2" REGIONAL_ENDPOINT = "us-east1-pubsub.googleapis.com:443" @@ -51,7 +56,6 @@ C = TypeVar("C", bound=Callable[..., Any]) typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) -typed_super_flaky = cast(Callable[[C], C], flaky(max_runs=10, min_passes=10)) @pytest.fixture(scope="module") @@ -159,7 +163,8 @@ def subscription_sync( yield subscription.name typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=300), ) @typed_backoff @@ -230,12 +235,39 @@ def subscription_dlq( @pytest.fixture(scope="module") -def subscription_eod( +def subscription_eod_for_receive( subscriber_client: pubsub_v1.SubscriberClient, exactly_once_delivery_topic: str ) -> Generator[str, None, None]: subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_EOD + PROJECT_ID, SUBSCRIPTION_EOD_FOR_RECEIVE + ) + + try: + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + subscription = subscriber_client.create_subscription( + request={ + "name": subscription_path, + "topic": exactly_once_delivery_topic, + "enable_exactly_once_delivery": True, + } + ) + + yield subscription.name + + subscriber_client.delete_subscription(request={"subscription": subscription.name}) + + +@pytest.fixture(scope="module") +def subscription_eod_for_create( + subscriber_client: pubsub_v1.SubscriberClient, exactly_once_delivery_topic: str +) -> Generator[str, None, None]: + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_EOD_FOR_CREATE ) try: @@ -469,11 +501,11 @@ def test_create_subscription_with_filtering( def test_create_subscription_with_exactly_once_delivery( subscriber_client: pubsub_v1.SubscriberClient, - subscription_eod: str, + subscription_eod_for_create: str, capsys: CaptureFixture[str], ) -> None: subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_EOD + PROJECT_ID, SUBSCRIPTION_EOD_FOR_CREATE ) try: subscriber_client.delete_subscription( @@ -483,12 +515,12 @@ def test_create_subscription_with_exactly_once_delivery( pass subscriber.create_subscription_with_exactly_once_delivery( - PROJECT_ID, EOD_TOPIC, SUBSCRIPTION_EOD + PROJECT_ID, EOD_TOPIC, SUBSCRIPTION_EOD_FOR_CREATE ) out, _ = capsys.readouterr() assert "Created subscription with exactly once delivery enabled" in out - assert f"{subscription_eod}" in out + assert f"{subscription_eod_for_create}" in out assert "enable_exactly_once_delivery: true" in out @@ -498,7 +530,8 @@ def test_create_push_subscription( capsys: CaptureFixture[str], ) -> None: typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=60), ) # The scope of `subscription_path` is limited to this function. @@ -525,11 +558,13 @@ def eventually_consistent_test() -> None: def test_update_push_suscription( - subscription_admin: str, capsys: CaptureFixture[str], + subscription_admin: str, + capsys: CaptureFixture[str], ) -> None: typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=60), ) @typed_backoff @@ -551,7 +586,8 @@ def test_delete_subscription( subscriber.delete_subscription(PROJECT_ID, SUBSCRIPTION_ADMIN) typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=60), ) @typed_backoff @@ -572,7 +608,8 @@ def test_receive( ) -> None: typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=60), ) @typed_backoff @@ -597,7 +634,8 @@ def test_receive_with_custom_attributes( ) -> None: typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=60), ) @typed_backoff @@ -625,7 +663,8 @@ def test_receive_with_flow_control( ) -> None: typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=300), ) @typed_backoff @@ -655,7 +694,8 @@ def test_receive_with_blocking_shutdown( _shut_down = re.compile(r".*done waiting.*stream shutdown.*", flags=re.IGNORECASE) typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=300), ) @typed_backoff @@ -705,11 +745,10 @@ def eventually_consistent_test() -> None: eventually_consistent_test() -@typed_super_flaky def test_receive_messages_with_exactly_once_delivery_enabled( regional_publisher_client: pubsub_v1.PublisherClient, exactly_once_delivery_topic: str, - subscription_eod: str, + subscription_eod_for_receive: str, capsys: CaptureFixture[str], ) -> None: @@ -718,11 +757,11 @@ def test_receive_messages_with_exactly_once_delivery_enabled( ) subscriber.receive_messages_with_exactly_once_delivery_enabled( - PROJECT_ID, SUBSCRIPTION_EOD, 200 + PROJECT_ID, SUBSCRIPTION_EOD_FOR_RECEIVE, 30 ) out, _ = capsys.readouterr() - assert subscription_eod in out + assert subscription_eod_for_receive in out for message_id in message_ids: assert message_id in out @@ -735,7 +774,8 @@ def test_listen_for_errors( ) -> None: typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=60), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=60), ) @typed_backoff @@ -776,7 +816,8 @@ def test_receive_synchronously_with_lease( ) -> None: typed_backoff = cast( - Callable[[C], C], backoff.on_exception(backoff.expo, Unknown, max_time=300), + Callable[[C], C], + backoff.on_exception(backoff.expo, Unknown, max_time=300), ) @typed_backoff From a894e2e82d8e52f9d91fcb1fe71972d6460a12f3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 25 Apr 2022 17:04:25 +0200 Subject: [PATCH 0814/1197] chore(deps): update dependency pytest to v7.1.2 (#670) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 49d72e9e4cc0..86f54f84bd10 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ backoff==1.11.1 -pytest==7.1.1 +pytest==7.1.2 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From 08c2da973d4358b79ee023be04317c8ab000185b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 26 Apr 2022 20:03:49 +0200 Subject: [PATCH 0815/1197] chore(deps): update dependency backoff to v2 (#672) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 86f54f84bd10..e63a85c6d079 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ -backoff==1.11.1 +backoff==2.0.0 pytest==7.1.2 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From 3e074193f4c8854dcf1d32133cbc38496748d568 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 27 Apr 2022 12:11:54 -0400 Subject: [PATCH 0816/1197] chore: use gapic-generator-python 0.65.2 (#673) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.65.2 PiperOrigin-RevId: 444333013 Source-Link: https://github.com/googleapis/googleapis/commit/f91b6cf82e929280f6562f6110957c654bd9e2e6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/16eb36095c294e712c74a1bf23550817b42174e5 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTZlYjM2MDk1YzI5NGU3MTJjNzRhMWJmMjM1NTA4MTdiNDIxNzRlNSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../services/publisher/async_client.py | 54 ++++---- .../services/schema_service/async_client.py | 36 ++--- .../services/subscriber/async_client.py | 98 +++++++------- .../unit/gapic/pubsub_v1/test_publisher.py | 78 +++++------ .../gapic/pubsub_v1/test_schema_service.py | 50 +++---- .../unit/gapic/pubsub_v1/test_subscriber.py | 124 +++++++++--------- 6 files changed, 220 insertions(+), 220 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 204b534fcbba..fbe6216f0a6c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -224,9 +224,9 @@ async def create_topic( from google import pubsub_v1 - def sample_create_topic(): + async def sample_create_topic(): # Create a client - client = pubsub_v1.PublisherClient() + client = pubsub_v1.PublisherAsyncClient() # Initialize request argument(s) request = pubsub_v1.Topic( @@ -234,7 +234,7 @@ def sample_create_topic(): ) # Make the request - response = client.create_topic(request=request) + response = await client.create_topic(request=request) # Handle the response print(response) @@ -332,9 +332,9 @@ async def update_topic( from google import pubsub_v1 - def sample_update_topic(): + async def sample_update_topic(): # Create a client - client = pubsub_v1.PublisherClient() + client = pubsub_v1.PublisherAsyncClient() # Initialize request argument(s) topic = pubsub_v1.Topic() @@ -345,7 +345,7 @@ def sample_update_topic(): ) # Make the request - response = client.update_topic(request=request) + response = await client.update_topic(request=request) # Handle the response print(response) @@ -420,9 +420,9 @@ async def publish( from google import pubsub_v1 - def sample_publish(): + async def sample_publish(): # Create a client - client = pubsub_v1.PublisherClient() + client = pubsub_v1.PublisherAsyncClient() # Initialize request argument(s) request = pubsub_v1.PublishRequest( @@ -430,7 +430,7 @@ def sample_publish(): ) # Make the request - response = client.publish(request=request) + response = await client.publish(request=request) # Handle the response print(response) @@ -536,9 +536,9 @@ async def get_topic( from google import pubsub_v1 - def sample_get_topic(): + async def sample_get_topic(): # Create a client - client = pubsub_v1.PublisherClient() + client = pubsub_v1.PublisherAsyncClient() # Initialize request argument(s) request = pubsub_v1.GetTopicRequest( @@ -546,7 +546,7 @@ def sample_get_topic(): ) # Make the request - response = client.get_topic(request=request) + response = await client.get_topic(request=request) # Handle the response print(response) @@ -640,9 +640,9 @@ async def list_topics( from google import pubsub_v1 - def sample_list_topics(): + async def sample_list_topics(): # Create a client - client = pubsub_v1.PublisherClient() + client = pubsub_v1.PublisherAsyncClient() # Initialize request argument(s) request = pubsub_v1.ListTopicsRequest( @@ -653,7 +653,7 @@ def sample_list_topics(): page_result = client.list_topics(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -759,9 +759,9 @@ async def list_topic_subscriptions( from google import pubsub_v1 - def sample_list_topic_subscriptions(): + async def sample_list_topic_subscriptions(): # Create a client - client = pubsub_v1.PublisherClient() + client = pubsub_v1.PublisherAsyncClient() # Initialize request argument(s) request = pubsub_v1.ListTopicSubscriptionsRequest( @@ -772,7 +772,7 @@ def sample_list_topic_subscriptions(): page_result = client.list_topic_subscriptions(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -884,9 +884,9 @@ async def list_topic_snapshots( from google import pubsub_v1 - def sample_list_topic_snapshots(): + async def sample_list_topic_snapshots(): # Create a client - client = pubsub_v1.PublisherClient() + client = pubsub_v1.PublisherAsyncClient() # Initialize request argument(s) request = pubsub_v1.ListTopicSnapshotsRequest( @@ -897,7 +897,7 @@ def sample_list_topic_snapshots(): page_result = client.list_topic_snapshots(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1009,9 +1009,9 @@ async def delete_topic( from google import pubsub_v1 - def sample_delete_topic(): + async def sample_delete_topic(): # Create a client - client = pubsub_v1.PublisherClient() + client = pubsub_v1.PublisherAsyncClient() # Initialize request argument(s) request = pubsub_v1.DeleteTopicRequest( @@ -1019,7 +1019,7 @@ def sample_delete_topic(): ) # Make the request - client.delete_topic(request=request) + await client.delete_topic(request=request) Args: request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): @@ -1105,9 +1105,9 @@ async def detach_subscription( from google import pubsub_v1 - def sample_detach_subscription(): + async def sample_detach_subscription(): # Create a client - client = pubsub_v1.PublisherClient() + client = pubsub_v1.PublisherAsyncClient() # Initialize request argument(s) request = pubsub_v1.DetachSubscriptionRequest( @@ -1115,7 +1115,7 @@ def sample_detach_subscription(): ) # Make the request - response = client.detach_subscription(request=request) + response = await client.detach_subscription(request=request) # Handle the response print(response) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 169d71fdfea5..b0831fe09fa5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -222,9 +222,9 @@ async def create_schema( from google import pubsub_v1 - def sample_create_schema(): + async def sample_create_schema(): # Create a client - client = pubsub_v1.SchemaServiceClient() + client = pubsub_v1.SchemaServiceAsyncClient() # Initialize request argument(s) schema = pubsub_v1.Schema() @@ -236,7 +236,7 @@ def sample_create_schema(): ) # Make the request - response = client.create_schema(request=request) + response = await client.create_schema(request=request) # Handle the response print(response) @@ -343,9 +343,9 @@ async def get_schema( from google import pubsub_v1 - def sample_get_schema(): + async def sample_get_schema(): # Create a client - client = pubsub_v1.SchemaServiceClient() + client = pubsub_v1.SchemaServiceAsyncClient() # Initialize request argument(s) request = pubsub_v1.GetSchemaRequest( @@ -353,7 +353,7 @@ def sample_get_schema(): ) # Make the request - response = client.get_schema(request=request) + response = await client.get_schema(request=request) # Handle the response print(response) @@ -435,9 +435,9 @@ async def list_schemas( from google import pubsub_v1 - def sample_list_schemas(): + async def sample_list_schemas(): # Create a client - client = pubsub_v1.SchemaServiceClient() + client = pubsub_v1.SchemaServiceAsyncClient() # Initialize request argument(s) request = pubsub_v1.ListSchemasRequest( @@ -448,7 +448,7 @@ def sample_list_schemas(): page_result = client.list_schemas(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -542,9 +542,9 @@ async def delete_schema( from google import pubsub_v1 - def sample_delete_schema(): + async def sample_delete_schema(): # Create a client - client = pubsub_v1.SchemaServiceClient() + client = pubsub_v1.SchemaServiceAsyncClient() # Initialize request argument(s) request = pubsub_v1.DeleteSchemaRequest( @@ -552,7 +552,7 @@ def sample_delete_schema(): ) # Make the request - client.delete_schema(request=request) + await client.delete_schema(request=request) Args: request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): @@ -626,9 +626,9 @@ async def validate_schema( from google import pubsub_v1 - def sample_validate_schema(): + async def sample_validate_schema(): # Create a client - client = pubsub_v1.SchemaServiceClient() + client = pubsub_v1.SchemaServiceAsyncClient() # Initialize request argument(s) schema = pubsub_v1.Schema() @@ -640,7 +640,7 @@ def sample_validate_schema(): ) # Make the request - response = client.validate_schema(request=request) + response = await client.validate_schema(request=request) # Handle the response print(response) @@ -733,9 +733,9 @@ async def validate_message( from google import pubsub_v1 - def sample_validate_message(): + async def sample_validate_message(): # Create a client - client = pubsub_v1.SchemaServiceClient() + client = pubsub_v1.SchemaServiceAsyncClient() # Initialize request argument(s) request = pubsub_v1.ValidateMessageRequest( @@ -744,7 +744,7 @@ def sample_validate_message(): ) # Make the request - response = client.validate_message(request=request) + response = await client.validate_message(request=request) # Handle the response print(response) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index cc67b66e5d23..acfc54a12a52 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -250,9 +250,9 @@ async def create_subscription( from google import pubsub_v1 - def sample_create_subscription(): + async def sample_create_subscription(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.Subscription( @@ -261,7 +261,7 @@ def sample_create_subscription(): ) # Make the request - response = client.create_subscription(request=request) + response = await client.create_subscription(request=request) # Handle the response print(response) @@ -415,9 +415,9 @@ async def get_subscription( from google import pubsub_v1 - def sample_get_subscription(): + async def sample_get_subscription(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.GetSubscriptionRequest( @@ -425,7 +425,7 @@ def sample_get_subscription(): ) # Make the request - response = client.get_subscription(request=request) + response = await client.get_subscription(request=request) # Handle the response print(response) @@ -522,9 +522,9 @@ async def update_subscription( from google import pubsub_v1 - def sample_update_subscription(): + async def sample_update_subscription(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) subscription = pubsub_v1.Subscription() @@ -536,7 +536,7 @@ def sample_update_subscription(): ) # Make the request - response = client.update_subscription(request=request) + response = await client.update_subscription(request=request) # Handle the response print(response) @@ -609,9 +609,9 @@ async def list_subscriptions( from google import pubsub_v1 - def sample_list_subscriptions(): + async def sample_list_subscriptions(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.ListSubscriptionsRequest( @@ -622,7 +622,7 @@ def sample_list_subscriptions(): page_result = client.list_subscriptions(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -732,9 +732,9 @@ async def delete_subscription( from google import pubsub_v1 - def sample_delete_subscription(): + async def sample_delete_subscription(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.DeleteSubscriptionRequest( @@ -742,7 +742,7 @@ def sample_delete_subscription(): ) # Make the request - client.delete_subscription(request=request) + await client.delete_subscription(request=request) Args: request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): @@ -833,9 +833,9 @@ async def modify_ack_deadline( from google import pubsub_v1 - def sample_modify_ack_deadline(): + async def sample_modify_ack_deadline(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.ModifyAckDeadlineRequest( @@ -845,7 +845,7 @@ def sample_modify_ack_deadline(): ) # Make the request - client.modify_ack_deadline(request=request) + await client.modify_ack_deadline(request=request) Args: request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): @@ -962,9 +962,9 @@ async def acknowledge( from google import pubsub_v1 - def sample_acknowledge(): + async def sample_acknowledge(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.AcknowledgeRequest( @@ -973,7 +973,7 @@ def sample_acknowledge(): ) # Make the request - client.acknowledge(request=request) + await client.acknowledge(request=request) Args: request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): @@ -1071,9 +1071,9 @@ async def pull( from google import pubsub_v1 - def sample_pull(): + async def sample_pull(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.PullRequest( @@ -1082,7 +1082,7 @@ def sample_pull(): ) # Make the request - response = client.pull(request=request) + response = await client.pull(request=request) # Handle the response print(response) @@ -1218,9 +1218,9 @@ def streaming_pull( from google import pubsub_v1 - def sample_streaming_pull(): + async def sample_streaming_pull(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.StreamingPullRequest( @@ -1239,10 +1239,10 @@ def request_generator(): yield request # Make the request - stream = client.streaming_pull(requests=request_generator()) + stream = await client.streaming_pull(requests=request_generator()) # Handle the response - for response in stream: + async for response in stream: print(response) Args: @@ -1319,9 +1319,9 @@ async def modify_push_config( from google import pubsub_v1 - def sample_modify_push_config(): + async def sample_modify_push_config(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.ModifyPushConfigRequest( @@ -1329,7 +1329,7 @@ def sample_modify_push_config(): ) # Make the request - client.modify_push_config(request=request) + await client.modify_push_config(request=request) Args: request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): @@ -1433,9 +1433,9 @@ async def get_snapshot( from google import pubsub_v1 - def sample_get_snapshot(): + async def sample_get_snapshot(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.GetSnapshotRequest( @@ -1443,7 +1443,7 @@ def sample_get_snapshot(): ) # Make the request - response = client.get_snapshot(request=request) + response = await client.get_snapshot(request=request) # Handle the response print(response) @@ -1546,9 +1546,9 @@ async def list_snapshots( from google import pubsub_v1 - def sample_list_snapshots(): + async def sample_list_snapshots(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.ListSnapshotsRequest( @@ -1559,7 +1559,7 @@ def sample_list_snapshots(): page_result = client.list_snapshots(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -1683,9 +1683,9 @@ async def create_snapshot( from google import pubsub_v1 - def sample_create_snapshot(): + async def sample_create_snapshot(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.CreateSnapshotRequest( @@ -1694,7 +1694,7 @@ def sample_create_snapshot(): ) # Make the request - response = client.create_snapshot(request=request) + response = await client.create_snapshot(request=request) # Handle the response print(response) @@ -1820,16 +1820,16 @@ async def update_snapshot( from google import pubsub_v1 - def sample_update_snapshot(): + async def sample_update_snapshot(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.UpdateSnapshotRequest( ) # Make the request - response = client.update_snapshot(request=request) + response = await client.update_snapshot(request=request) # Handle the response print(response) @@ -1917,9 +1917,9 @@ async def delete_snapshot( from google import pubsub_v1 - def sample_delete_snapshot(): + async def sample_delete_snapshot(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.DeleteSnapshotRequest( @@ -1927,7 +1927,7 @@ def sample_delete_snapshot(): ) # Make the request - client.delete_snapshot(request=request) + await client.delete_snapshot(request=request) Args: request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): @@ -2016,9 +2016,9 @@ async def seek( from google import pubsub_v1 - def sample_seek(): + async def sample_seek(): # Create a client - client = pubsub_v1.SubscriberClient() + client = pubsub_v1.SubscriberAsyncClient() # Initialize request argument(s) request = pubsub_v1.SeekRequest( @@ -2026,7 +2026,7 @@ def sample_seek(): ) # Make the request - response = client.seek(request=request) + response = await client.seek(request=request) # Handle the response print(response) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index de81d0c1360e..c5ab354d68fb 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -736,7 +736,7 @@ def test_create_topic_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.Topic() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: @@ -752,7 +752,7 @@ def test_create_topic_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -766,7 +766,7 @@ async def test_create_topic_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.Topic() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: @@ -782,7 +782,7 @@ async def test_create_topic_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -972,7 +972,7 @@ def test_update_topic_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.UpdateTopicRequest() - request.topic.name = "topic.name/value" + request.topic.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_topic), "__call__") as call: @@ -988,7 +988,7 @@ def test_update_topic_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic.name=topic.name/value", + "topic.name=name_value", ) in kw["metadata"] @@ -1002,7 +1002,7 @@ async def test_update_topic_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.UpdateTopicRequest() - request.topic.name = "topic.name/value" + request.topic.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_topic), "__call__") as call: @@ -1018,7 +1018,7 @@ async def test_update_topic_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic.name=topic.name/value", + "topic.name=name_value", ) in kw["metadata"] @@ -1120,7 +1120,7 @@ def test_publish_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.PublishRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: @@ -1136,7 +1136,7 @@ def test_publish_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -1150,7 +1150,7 @@ async def test_publish_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.PublishRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: @@ -1168,7 +1168,7 @@ async def test_publish_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -1370,7 +1370,7 @@ def test_get_topic_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.GetTopicRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: @@ -1386,7 +1386,7 @@ def test_get_topic_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -1400,7 +1400,7 @@ async def test_get_topic_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.GetTopicRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: @@ -1416,7 +1416,7 @@ async def test_get_topic_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -1598,7 +1598,7 @@ def test_list_topics_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.ListTopicsRequest() - request.project = "project/value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: @@ -1614,7 +1614,7 @@ def test_list_topics_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "project=project/value", + "project=project_value", ) in kw["metadata"] @@ -1628,7 +1628,7 @@ async def test_list_topics_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.ListTopicsRequest() - request.project = "project/value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: @@ -1646,7 +1646,7 @@ async def test_list_topics_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "project=project/value", + "project=project_value", ) in kw["metadata"] @@ -1777,7 +1777,7 @@ def test_list_topics_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, pubsub.Topic) for i in results) @@ -2028,7 +2028,7 @@ def test_list_topic_subscriptions_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.ListTopicSubscriptionsRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2046,7 +2046,7 @@ def test_list_topic_subscriptions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -2060,7 +2060,7 @@ async def test_list_topic_subscriptions_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.ListTopicSubscriptionsRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2080,7 +2080,7 @@ async def test_list_topic_subscriptions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -2217,7 +2217,7 @@ def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, str) for i in results) @@ -2474,7 +2474,7 @@ def test_list_topic_snapshots_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.ListTopicSnapshotsRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2492,7 +2492,7 @@ def test_list_topic_snapshots_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -2506,7 +2506,7 @@ async def test_list_topic_snapshots_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.ListTopicSnapshotsRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2526,7 +2526,7 @@ async def test_list_topic_snapshots_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -2663,7 +2663,7 @@ def test_list_topic_snapshots_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, str) for i in results) @@ -2902,7 +2902,7 @@ def test_delete_topic_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.DeleteTopicRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: @@ -2918,7 +2918,7 @@ def test_delete_topic_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -2932,7 +2932,7 @@ async def test_delete_topic_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.DeleteTopicRequest() - request.topic = "topic/value" + request.topic = "topic_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: @@ -2948,7 +2948,7 @@ async def test_delete_topic_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "topic=topic/value", + "topic=topic_value", ) in kw["metadata"] @@ -3130,7 +3130,7 @@ def test_detach_subscription_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.DetachSubscriptionRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3148,7 +3148,7 @@ def test_detach_subscription_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -3162,7 +3162,7 @@ async def test_detach_subscription_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.DetachSubscriptionRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -3182,7 +3182,7 @@ async def test_detach_subscription_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index b35750e7f8c3..852e4ef3b475 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -760,7 +760,7 @@ def test_create_schema_field_headers(): # a field header. Set these to a non-empty value. request = gp_schema.CreateSchemaRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_schema), "__call__") as call: @@ -776,7 +776,7 @@ def test_create_schema_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -790,7 +790,7 @@ async def test_create_schema_field_headers_async(): # a field header. Set these to a non-empty value. request = gp_schema.CreateSchemaRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_schema), "__call__") as call: @@ -806,7 +806,7 @@ async def test_create_schema_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1016,7 +1016,7 @@ def test_get_schema_field_headers(): # a field header. Set these to a non-empty value. request = schema.GetSchemaRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_schema), "__call__") as call: @@ -1032,7 +1032,7 @@ def test_get_schema_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1046,7 +1046,7 @@ async def test_get_schema_field_headers_async(): # a field header. Set these to a non-empty value. request = schema.GetSchemaRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_schema), "__call__") as call: @@ -1062,7 +1062,7 @@ async def test_get_schema_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1244,7 +1244,7 @@ def test_list_schemas_field_headers(): # a field header. Set these to a non-empty value. request = schema.ListSchemasRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: @@ -1260,7 +1260,7 @@ def test_list_schemas_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1274,7 +1274,7 @@ async def test_list_schemas_field_headers_async(): # a field header. Set these to a non-empty value. request = schema.ListSchemasRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: @@ -1292,7 +1292,7 @@ async def test_list_schemas_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1423,7 +1423,7 @@ def test_list_schemas_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, schema.Schema) for i in results) @@ -1656,7 +1656,7 @@ def test_delete_schema_field_headers(): # a field header. Set these to a non-empty value. request = schema.DeleteSchemaRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: @@ -1672,7 +1672,7 @@ def test_delete_schema_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1686,7 +1686,7 @@ async def test_delete_schema_field_headers_async(): # a field header. Set these to a non-empty value. request = schema.DeleteSchemaRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: @@ -1702,7 +1702,7 @@ async def test_delete_schema_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1878,7 +1878,7 @@ def test_validate_schema_field_headers(): # a field header. Set these to a non-empty value. request = gp_schema.ValidateSchemaRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: @@ -1894,7 +1894,7 @@ def test_validate_schema_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -1908,7 +1908,7 @@ async def test_validate_schema_field_headers_async(): # a field header. Set these to a non-empty value. request = gp_schema.ValidateSchemaRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: @@ -1926,7 +1926,7 @@ async def test_validate_schema_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2114,7 +2114,7 @@ def test_validate_message_field_headers(): # a field header. Set these to a non-empty value. request = schema.ValidateMessageRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_message), "__call__") as call: @@ -2130,7 +2130,7 @@ def test_validate_message_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] @@ -2144,7 +2144,7 @@ async def test_validate_message_field_headers_async(): # a field header. Set these to a non-empty value. request = schema.ValidateMessageRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_message), "__call__") as call: @@ -2162,7 +2162,7 @@ async def test_validate_message_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent/value", + "parent=parent_value", ) in kw["metadata"] diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index d9146f11c6db..cbccac58293e 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -766,7 +766,7 @@ def test_create_subscription_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.Subscription() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -784,7 +784,7 @@ def test_create_subscription_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -798,7 +798,7 @@ async def test_create_subscription_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.Subscription() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -816,7 +816,7 @@ async def test_create_subscription_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -1060,7 +1060,7 @@ def test_get_subscription_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.GetSubscriptionRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: @@ -1076,7 +1076,7 @@ def test_get_subscription_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -1090,7 +1090,7 @@ async def test_get_subscription_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.GetSubscriptionRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: @@ -1106,7 +1106,7 @@ async def test_get_subscription_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -1322,7 +1322,7 @@ def test_update_subscription_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.UpdateSubscriptionRequest() - request.subscription.name = "subscription.name/value" + request.subscription.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1340,7 +1340,7 @@ def test_update_subscription_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription.name=subscription.name/value", + "subscription.name=name_value", ) in kw["metadata"] @@ -1354,7 +1354,7 @@ async def test_update_subscription_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.UpdateSubscriptionRequest() - request.subscription.name = "subscription.name/value" + request.subscription.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1372,7 +1372,7 @@ async def test_update_subscription_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription.name=subscription.name/value", + "subscription.name=name_value", ) in kw["metadata"] @@ -1480,7 +1480,7 @@ def test_list_subscriptions_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.ListSubscriptionsRequest() - request.project = "project/value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1498,7 +1498,7 @@ def test_list_subscriptions_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "project=project/value", + "project=project_value", ) in kw["metadata"] @@ -1512,7 +1512,7 @@ async def test_list_subscriptions_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.ListSubscriptionsRequest() - request.project = "project/value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1532,7 +1532,7 @@ async def test_list_subscriptions_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "project=project/value", + "project=project_value", ) in kw["metadata"] @@ -1669,7 +1669,7 @@ def test_list_subscriptions_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, pubsub.Subscription) for i in results) @@ -1914,7 +1914,7 @@ def test_delete_subscription_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.DeleteSubscriptionRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1932,7 +1932,7 @@ def test_delete_subscription_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -1946,7 +1946,7 @@ async def test_delete_subscription_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.DeleteSubscriptionRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1964,7 +1964,7 @@ async def test_delete_subscription_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -2148,7 +2148,7 @@ def test_modify_ack_deadline_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.ModifyAckDeadlineRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2166,7 +2166,7 @@ def test_modify_ack_deadline_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -2180,7 +2180,7 @@ async def test_modify_ack_deadline_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.ModifyAckDeadlineRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2198,7 +2198,7 @@ async def test_modify_ack_deadline_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -2396,7 +2396,7 @@ def test_acknowledge_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.AcknowledgeRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: @@ -2412,7 +2412,7 @@ def test_acknowledge_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -2426,7 +2426,7 @@ async def test_acknowledge_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.AcknowledgeRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: @@ -2442,7 +2442,7 @@ async def test_acknowledge_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -2626,7 +2626,7 @@ def test_pull_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.PullRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: @@ -2642,7 +2642,7 @@ def test_pull_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -2656,7 +2656,7 @@ async def test_pull_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.PullRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: @@ -2672,7 +2672,7 @@ async def test_pull_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -2948,7 +2948,7 @@ def test_modify_push_config_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.ModifyPushConfigRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2966,7 +2966,7 @@ def test_modify_push_config_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -2980,7 +2980,7 @@ async def test_modify_push_config_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.ModifyPushConfigRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2998,7 +2998,7 @@ async def test_modify_push_config_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -3198,7 +3198,7 @@ def test_get_snapshot_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.GetSnapshotRequest() - request.snapshot = "snapshot/value" + request.snapshot = "snapshot_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: @@ -3214,7 +3214,7 @@ def test_get_snapshot_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "snapshot=snapshot/value", + "snapshot=snapshot_value", ) in kw["metadata"] @@ -3228,7 +3228,7 @@ async def test_get_snapshot_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.GetSnapshotRequest() - request.snapshot = "snapshot/value" + request.snapshot = "snapshot_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: @@ -3244,7 +3244,7 @@ async def test_get_snapshot_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "snapshot=snapshot/value", + "snapshot=snapshot_value", ) in kw["metadata"] @@ -3426,7 +3426,7 @@ def test_list_snapshots_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.ListSnapshotsRequest() - request.project = "project/value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: @@ -3442,7 +3442,7 @@ def test_list_snapshots_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "project=project/value", + "project=project_value", ) in kw["metadata"] @@ -3456,7 +3456,7 @@ async def test_list_snapshots_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.ListSnapshotsRequest() - request.project = "project/value" + request.project = "project_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: @@ -3474,7 +3474,7 @@ async def test_list_snapshots_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "project=project/value", + "project=project_value", ) in kw["metadata"] @@ -3605,7 +3605,7 @@ def test_list_snapshots_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, pubsub.Snapshot) for i in results) @@ -3850,7 +3850,7 @@ def test_create_snapshot_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.CreateSnapshotRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: @@ -3866,7 +3866,7 @@ def test_create_snapshot_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -3880,7 +3880,7 @@ async def test_create_snapshot_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.CreateSnapshotRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: @@ -3896,7 +3896,7 @@ async def test_create_snapshot_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name/value", + "name=name_value", ) in kw["metadata"] @@ -4092,7 +4092,7 @@ def test_update_snapshot_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.UpdateSnapshotRequest() - request.snapshot.name = "snapshot.name/value" + request.snapshot.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: @@ -4108,7 +4108,7 @@ def test_update_snapshot_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "snapshot.name=snapshot.name/value", + "snapshot.name=name_value", ) in kw["metadata"] @@ -4122,7 +4122,7 @@ async def test_update_snapshot_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.UpdateSnapshotRequest() - request.snapshot.name = "snapshot.name/value" + request.snapshot.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: @@ -4138,7 +4138,7 @@ async def test_update_snapshot_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "snapshot.name=snapshot.name/value", + "snapshot.name=name_value", ) in kw["metadata"] @@ -4232,7 +4232,7 @@ def test_delete_snapshot_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.DeleteSnapshotRequest() - request.snapshot = "snapshot/value" + request.snapshot = "snapshot_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: @@ -4248,7 +4248,7 @@ def test_delete_snapshot_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "snapshot=snapshot/value", + "snapshot=snapshot_value", ) in kw["metadata"] @@ -4262,7 +4262,7 @@ async def test_delete_snapshot_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.DeleteSnapshotRequest() - request.snapshot = "snapshot/value" + request.snapshot = "snapshot_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: @@ -4278,7 +4278,7 @@ async def test_delete_snapshot_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "snapshot=snapshot/value", + "snapshot=snapshot_value", ) in kw["metadata"] @@ -4452,7 +4452,7 @@ def test_seek_field_headers(): # a field header. Set these to a non-empty value. request = pubsub.SeekRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.seek), "__call__") as call: @@ -4468,7 +4468,7 @@ def test_seek_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] @@ -4482,7 +4482,7 @@ async def test_seek_field_headers_async(): # a field header. Set these to a non-empty value. request = pubsub.SeekRequest() - request.subscription = "subscription/value" + request.subscription = "subscription_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.seek), "__call__") as call: @@ -4498,7 +4498,7 @@ async def test_seek_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "subscription=subscription/value", + "subscription=subscription_value", ) in kw["metadata"] From aea147264b758fcbe5bb7eaf074f6f6b1e87c44d Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 27 Apr 2022 19:32:45 +0200 Subject: [PATCH 0817/1197] chore(deps): update dependency backoff to v2.0.1 (#674) Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index e63a85c6d079..3f18f3b00dcd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ -backoff==2.0.0 +backoff==2.0.1 pytest==7.1.2 mock==4.0.3 flaky==3.7.0 \ No newline at end of file From fab9c00f576cd6ed99419bbd6faccec8c375c296 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Wed, 4 May 2022 15:01:15 -0400 Subject: [PATCH 0818/1197] fix: set min snooze on lease management to .01 sec (#678) --- .../cloud/pubsub_v1/subscriber/_protocol/leaser.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index de110e9920ef..dc2b14fc60d0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -22,6 +22,8 @@ import typing from typing import Dict, Iterable, Optional, Union +from google.cloud.pubsub_v1.subscriber._protocol.dispatcher import _MAX_BATCH_LATENCY + try: from collections.abc import KeysView @@ -200,10 +202,13 @@ def maintain_leases(self) -> None: # Now wait an appropriate period of time and do this again. # # We determine the appropriate period of time based on a random - # period between 0 seconds and 90% of the lease. This use of - # jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases + # period between: + # minimum: MAX_BATCH_LATENCY (to prevent duplicate modacks being created in one batch) + # maximum: 90% of the deadline + # This maximum time attempts to prevent ack expiration before new lease modacks arrive at the server. + # This use of jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases # where there are many clients. - snooze = random.uniform(0.0, deadline * 0.9) + snooze = random.uniform(_MAX_BATCH_LATENCY, deadline * 0.9) _LOGGER.debug("Snoozing lease management for %f seconds.", snooze) self._stop_event.wait(timeout=snooze) From e74ba1ed1181af071d6927ff1a06e77a150f1fa2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 12:39:15 -0400 Subject: [PATCH 0819/1197] chore: [autoapprove] update readme_gen.py to include autoescape True (#680) Source-Link: https://github.com/googleapis/synthtool/commit/6b4d5a6407d740beb4158b302194a62a4108a8a6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-pubsub/scripts/readme-gen/readme_gen.py | 5 ++++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 64f82d6bf4bc..b631901e99f4 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bc5eed3804aec2f05fad42aacf973821d9500c174015341f721a984a0825b6fd -# created: 2022-04-21T15:43:16.246106921Z + digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 +# created: 2022-05-05T15:17:27.599381182Z diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py b/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py index d309d6e97518..91b59676bfc7 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py +++ b/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') From 211c05f3a76d4982b0b42fd606cf8c6dc561021b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 5 May 2022 23:10:25 +0000 Subject: [PATCH 0820/1197] chore(python): auto approve template changes (#682) Source-Link: https://github.com/googleapis/synthtool/commit/453a5d9c9a55d1969240a37d36cec626d20a9024 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.github/auto-approve.yml | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-pubsub/.github/auto-approve.yml diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index b631901e99f4..757c9dca75ad 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f792ee1320e03eda2d13a5281a2989f7ed8a9e50b73ef6da97fac7e1e850b149 -# created: 2022-05-05T15:17:27.599381182Z + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/packages/google-cloud-pubsub/.github/auto-approve.yml b/packages/google-cloud-pubsub/.github/auto-approve.yml new file mode 100644 index 000000000000..311ebbb853a9 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" From d409c9141390d9582505191f09fe2625147c260b Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Tue, 10 May 2022 15:45:28 -0400 Subject: [PATCH 0821/1197] fix: Add emulator support to schema service (#658) * fix: add emulator support to schema service --- .../pubsub_v1/services/schema_service/client.py | 12 ++++++++++++ packages/google-cloud-pubsub/owlbot.py | 5 +++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index f50a3a4d6bec..18b7ba632ca4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -14,6 +14,7 @@ # limitations under the License. # from collections import OrderedDict +import functools import os import re from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union @@ -39,6 +40,8 @@ from google.pubsub_v1.services.schema_service import pagers from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema + +import grpc from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import SchemaServiceGrpcTransport from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport @@ -411,6 +414,15 @@ def __init__( ) Transport = type(self).get_transport_class(transport) + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(Transport, type(self)._transport_registry["grpc"]): + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + Transport = functools.partial(Transport, channel=channel) + self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index bb73bd7e4b77..c0176106bccf 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -85,6 +85,7 @@ clients_to_patch = [ library / f"google/pubsub_{library.name}/services/publisher/client.py", library / f"google/pubsub_{library.name}/services/subscriber/client.py", + library / f"google/pubsub_{library.name}/services/schema_service/client.py", ] err_msg = ( "Expected replacements for gRPC channel to use with the emulator not made." @@ -97,8 +98,8 @@ count = s.replace( clients_to_patch, - f"from google\.pubsub_{library.name}\.types import pubsub", - "\g<0>\n\nimport grpc", + f"from \.transports\.base", + "\nimport grpc\n\g<0>", ) if count < len(clients_to_patch): From 5868dbbd3d1e174b9be3cb465e17bd825cca7798 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Wed, 11 May 2022 10:20:40 -0400 Subject: [PATCH 0822/1197] Fix: Handle duplicate acks with streaming pull (#662) --- .../subscriber/_protocol/dispatcher.py | 69 ++++- .../pubsub_v1/subscriber/test_dispatcher.py | 272 ++++++++++++++++++ 2 files changed, 336 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 916161616579..c6dbf067f21c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -28,6 +28,9 @@ from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests +from google.cloud.pubsub_v1.subscriber.exceptions import ( + AcknowledgeStatus, +) if typing.TYPE_CHECKING: # pragma: NO COVER import queue @@ -128,17 +131,50 @@ def dispatch_callback(self, items: Sequence[RequestItem]) -> None: nack_requests: List[requests.NackRequest] = [] drop_requests: List[requests.DropRequest] = [] + lease_ids = set() + modack_ids = set() + ack_ids = set() + nack_ids = set() + drop_ids = set() + exactly_once_delivery_enabled = self._manager._exactly_once_delivery_enabled() + for item in items: if isinstance(item, requests.LeaseRequest): - lease_requests.append(item) + if ( + item.ack_id not in lease_ids + ): # LeaseRequests have no futures to handle. + lease_ids.add(item.ack_id) + lease_requests.append(item) elif isinstance(item, requests.ModAckRequest): - modack_requests.append(item) + if item.ack_id in modack_ids: + self._handle_duplicate_request_future( + exactly_once_delivery_enabled, item + ) + else: + modack_ids.add(item.ack_id) + modack_requests.append(item) elif isinstance(item, requests.AckRequest): - ack_requests.append(item) + if item.ack_id in ack_ids: + self._handle_duplicate_request_future( + exactly_once_delivery_enabled, item + ) + else: + ack_ids.add(item.ack_id) + ack_requests.append(item) elif isinstance(item, requests.NackRequest): - nack_requests.append(item) + if item.ack_id in nack_ids: + self._handle_duplicate_request_future( + exactly_once_delivery_enabled, item + ) + else: + nack_ids.add(item.ack_id) + nack_requests.append(item) elif isinstance(item, requests.DropRequest): - drop_requests.append(item) + if ( + item.ack_id not in drop_ids + ): # DropRequests have no futures to handle. + drop_ids.add(item.ack_id) + drop_requests.append(item) else: warnings.warn( f'Skipping unknown request item of type "{type(item)}"', @@ -164,6 +200,29 @@ def dispatch_callback(self, items: Sequence[RequestItem]) -> None: if drop_requests: self.drop(drop_requests) + def _handle_duplicate_request_future( + self, + exactly_once_delivery_enabled: bool, + item: Union[requests.AckRequest, requests.ModAckRequest, requests.NackRequest], + ) -> None: + _LOGGER.debug( + "This is a duplicate %s with the same ack_id: %s.", + type(item), + item.ack_id, + ) + if item.future: + if exactly_once_delivery_enabled: + item.future.set_exception( + ValueError(f"Duplicate ack_id for {type(item)}") + ) + # Futures may be present even with exactly-once delivery + # disabled, in transition periods after the setting is changed on + # the subscription. + else: + # When exactly-once delivery is NOT enabled, acks/modacks are considered + # best-effort, so the future should succeed even though this is a duplicate. + item.future.set_result(AcknowledgeStatus.SUCCESS) + def ack(self, items: Sequence[requests.AckRequest]) -> None: """Acknowledge the given messages. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index c1de19e65cdf..c6902da69621 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -24,6 +24,9 @@ import mock import pytest +from google.cloud.pubsub_v1.subscriber.exceptions import ( + AcknowledgeStatus, +) @pytest.mark.parametrize( @@ -48,6 +51,7 @@ def test_dispatch_callback_active_manager(item, method_name): dispatcher_.dispatch_callback(items) method.assert_called_once_with([item]) + manager._exactly_once_delivery_enabled.assert_called() @pytest.mark.parametrize( @@ -73,6 +77,274 @@ def test_dispatch_callback_inactive_manager(item, method_name): dispatcher_.dispatch_callback(items) method.assert_called_once_with([item]) + manager._exactly_once_delivery_enabled.assert_called() + + +@pytest.mark.parametrize( + "items,method_name", + [ + ( + [ + requests.AckRequest("0", 0, 0, "", None), + requests.AckRequest("0", 0, 1, "", None), + ], + "ack", + ), + ( + [ + requests.DropRequest("0", 0, ""), + requests.DropRequest("0", 1, ""), + ], + "drop", + ), + ( + [ + requests.LeaseRequest("0", 0, ""), + requests.LeaseRequest("0", 1, ""), + ], + "lease", + ), + ( + [ + requests.ModAckRequest("0", 0, None), + requests.ModAckRequest("0", 1, None), + ], + "modify_ack_deadline", + ), + ( + [ + requests.NackRequest("0", 0, "", None), + requests.NackRequest("0", 1, "", None), + ], + "nack", + ), + ], +) +def test_dispatch_duplicate_items_callback_active_manager_no_futures( + items, method_name +): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = False + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([items[0]]) + manager._exactly_once_delivery_enabled.assert_called() + + +@pytest.mark.parametrize( + "items,method_name", + [ + ( + [ + requests.AckRequest("0", 0, 0, "", None), + requests.AckRequest("0", 0, 1, "", futures.Future()), + ], + "ack", + ), + ( + [ + requests.DropRequest("0", 0, ""), + requests.DropRequest("0", 1, ""), + ], + "drop", + ), + ( + [ + requests.LeaseRequest("0", 0, ""), + requests.LeaseRequest("0", 1, ""), + ], + "lease", + ), + ( + [ + requests.ModAckRequest("0", 0, None), + requests.ModAckRequest("0", 1, futures.Future()), + ], + "modify_ack_deadline", + ), + ( + [ + requests.NackRequest("0", 0, "", None), + requests.NackRequest("0", 1, "", futures.Future()), + ], + "nack", + ), + ], +) +def test_dispatch_duplicate_items_callback_active_manager_with_futures_no_eod( + items, method_name +): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = False + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([items[0]]) + manager._exactly_once_delivery_enabled.assert_called() + + if method_name != "drop" and method_name != "lease": + assert items[1].future.result() == AcknowledgeStatus.SUCCESS + + +@pytest.mark.parametrize( + "items,method_name", + [ + ( + [ + requests.AckRequest("0", 0, 0, "", None), + requests.AckRequest("0", 0, 1, "", futures.Future()), + ], + "ack", + ), + ( + [ + requests.DropRequest("0", 0, ""), + requests.DropRequest("0", 1, ""), + ], + "drop", + ), + ( + [ + requests.LeaseRequest("0", 0, ""), + requests.LeaseRequest("0", 1, ""), + ], + "lease", + ), + ( + [ + requests.ModAckRequest("0", 0, None), + requests.ModAckRequest("0", 1, futures.Future()), + ], + "modify_ack_deadline", + ), + ( + [ + requests.NackRequest("0", 0, "", None), + requests.NackRequest("0", 1, "", futures.Future()), + ], + "nack", + ), + ], +) +def test_dispatch_duplicate_items_callback_active_manager_with_futures_eod( + items, method_name +): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = True + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([items[0]]) + manager._exactly_once_delivery_enabled.assert_called() + + if method_name != "drop" and method_name != "lease": + with pytest.raises(ValueError) as err: + items[1].future.result() + assert err.errisinstance(ValueError) + + +def test_dispatch_duplicate_items_diff_types_callback_active_manager_with_futures_eod(): + ack_future = futures.Future() + ack_request = requests.AckRequest("0", 0, 1, "", ack_future) + drop_request = requests.DropRequest("0", 1, "") + lease_request = requests.LeaseRequest("0", 1, "") + nack_future = futures.Future() + nack_request = requests.NackRequest("0", 1, "", nack_future) + modack_future = futures.Future() + modack_request = requests.ModAckRequest("0", 1, modack_future) + + items = [ack_request, drop_request, lease_request, nack_request, modack_request] + + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = True + with mock.patch.multiple( + dispatcher_, + ack=mock.DEFAULT, + nack=mock.DEFAULT, + drop=mock.DEFAULT, + lease=mock.DEFAULT, + modify_ack_deadline=mock.DEFAULT, + ): + dispatcher_.dispatch_callback(items) + manager._exactly_once_delivery_enabled.assert_called() + dispatcher_.ack.assert_called_once_with([ack_request]) + dispatcher_.drop.assert_called_once_with([drop_request]) + dispatcher_.lease.assert_called_once_with([lease_request]) + dispatcher_.nack.assert_called_once_with([nack_request]) + dispatcher_.modify_ack_deadline.assert_called_once_with([modack_request]) + + +@pytest.mark.parametrize( + "items,method_name", + [ + ( + [ + requests.AckRequest("0", 0, 0, "", None), + requests.AckRequest("0", 0, 1, "", None), + ], + "ack", + ), + ( + [ + requests.DropRequest("0", 0, ""), + requests.DropRequest("0", 1, ""), + ], + "drop", + ), + ( + [ + requests.LeaseRequest("0", 0, ""), + requests.LeaseRequest("0", 1, ""), + ], + "lease", + ), + ( + [ + requests.ModAckRequest("0", 0, None), + requests.ModAckRequest("0", 1, None), + ], + "modify_ack_deadline", + ), + ( + [ + requests.NackRequest("0", 0, "", None), + requests.NackRequest("0", 1, "", None), + ], + "nack", + ), + ], +) +def test_dispatch_duplicate_items_callback_active_manager_no_futures_eod( + items, method_name +): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + manager._exactly_once_delivery_enabled.return_value = True + with mock.patch.object(dispatcher_, method_name) as method: + dispatcher_.dispatch_callback(items) + + method.assert_called_once_with([items[0]]) + manager._exactly_once_delivery_enabled.assert_called() def test_unknown_request_type(): From 585675091e421a8992c4d54323803d58d56bb32c Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 11 May 2022 16:44:18 -0400 Subject: [PATCH 0823/1197] chore(main): release 2.12.1 (#655) * chore(main): release 2.12.1 * Update CHANGELOG.md Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 15 +++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 41f185ed6422..997583f01ce2 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,21 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +### [2.12.1](https://github.com/googleapis/python-pubsub/compare/v2.12.0...v2.12.1) (2022-05-11) + + +### Bug Fixes + +* Add emulator support to schema service ([#658](https://github.com/googleapis/python-pubsub/issues/658)) ([1a07d7c](https://github.com/googleapis/python-pubsub/commit/1a07d7ce3b3580191f74b7895dd1b8afb13baccb)) +* Handle duplicate acks with streaming pull ([#662](https://github.com/googleapis/python-pubsub/issues/662)) ([219491e](https://github.com/googleapis/python-pubsub/commit/219491ea1e615f33e1955e3afc204a0281c525db)) +* set min snooze on lease management to .01 sec ([#678](https://github.com/googleapis/python-pubsub/issues/678)) ([91c6e69](https://github.com/googleapis/python-pubsub/commit/91c6e69e96953919bc86004692edd3a52c7b9796)) + + +### Documentation + +* fix project_path typo in UPGRADING.md ([#660](https://github.com/googleapis/python-pubsub/issues/660)) ([20d661c](https://github.com/googleapis/python-pubsub/commit/20d661c8562cc1f777ac7b3f1ba03dcad7a831c0)) +* mark eod as preview ([#657](https://github.com/googleapis/python-pubsub/issues/657)) ([418e1a3](https://github.com/googleapis/python-pubsub/commit/418e1a3783441469713ca8ec8776007ff0fdb15d)) + ## [2.12.0](https://github.com/googleapis/python-pubsub/compare/v2.11.0...v2.12.0) (2022-04-06) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 2e978e11a18b..f178df851530 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.12.0" +version = "2.12.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 5468cf04a6e56868e7234514102237df6e69f631 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 12 May 2022 22:16:46 +0200 Subject: [PATCH 0824/1197] chore(deps): update dependency google-cloud-pubsub to v2.12.1 (#683) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 38d750077474..27904a8f7f6f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.12.0 +google-cloud-pubsub==2.12.1 avro==1.11.0 From 5a889948122a3de5375a816ff3c5e3c2484f6d69 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Mon, 16 May 2022 12:28:16 -0400 Subject: [PATCH 0825/1197] Tests: Remove shared subscriptions across parallel tests (#669) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In samples/snippets/noxfile.py, tests are run in parallel if pytest-parallel is installed. This resulted in tests failing in CI that did not fail locally. Because of the use of shared subscription names across tests running in parallel, some tests were deleting a subscription, while others were attempting to receive messages. This PR removes all subscription test fixtures that created and deleted subscriptions that were reused across tests in samples/snippets/subscriber_test.py, instead repeating the creation logic within each test, with a different subscription name per test, to allow tests to run in parallel without failures. Also includes unrelated formatting changes due to linting changes. Fixes #668🦕 --- .../samples/snippets/publisher_test.py | 5 + .../samples/snippets/schema_test.py | 5 + .../samples/snippets/subscriber_test.py | 862 ++++++++++-------- 3 files changed, 475 insertions(+), 397 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index cf00da98e285..0a63113085ff 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -28,6 +28,7 @@ import publisher +# This uuid is shared across tests which run in parallel. UUID = uuid.uuid4().hex PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC_ID = "publisher-test-topic-" + UUID @@ -35,6 +36,10 @@ # Allow 60s for tests to finish. MAX_TIME = 60 +# These tests run in parallel if pytest-parallel is installed. +# Avoid modifying resources that are shared across tests, +# as this results in test flake. + if typing.TYPE_CHECKING: from unittest.mock import AsyncMock, MagicMock diff --git a/packages/google-cloud-pubsub/samples/snippets/schema_test.py b/packages/google-cloud-pubsub/samples/snippets/schema_test.py index 2cdf4bfb6b46..7780bebc1c16 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema_test.py @@ -31,6 +31,7 @@ import schema +# This uuid is shared across tests which run in parallel. UUID = uuid.uuid4().hex try: PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] @@ -45,6 +46,10 @@ AVSC_FILE = "resources/us-states.avsc" PROTO_FILE = "resources/us-states.proto" +# These tests run in parallel if pytest-parallel is installed. +# Avoid modifying resources that are shared across tests, +# as this results in test flake. + @pytest.fixture(scope="module") def schema_client() -> Generator[pubsub_v1.SchemaServiceClient, None, None]: diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index d656c6ce460c..e07aba775d39 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -22,14 +22,13 @@ from _pytest.capture import CaptureFixture import backoff from flaky import flaky -from google.api_core.exceptions import InternalServerError from google.api_core.exceptions import NotFound -from google.api_core.exceptions import Unknown from google.cloud import pubsub_v1 import pytest import subscriber +# This uuid is shared across tests which run in parallel. UUID = uuid.uuid4().hex PY_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}" PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] @@ -37,15 +36,6 @@ DEAD_LETTER_TOPIC = f"subscription-test-dead-letter-topic-{PY_VERSION}-{UUID}" EOD_TOPIC = f"subscription-test-eod-topic-{PY_VERSION}-{UUID}" SUBSCRIPTION_ADMIN = f"subscription-test-subscription-admin-{PY_VERSION}-{UUID}" -SUBSCRIPTION_ASYNC = f"subscription-test-subscription-async-{PY_VERSION}-{UUID}" -SUBSCRIPTION_SYNC = f"subscription-test-subscription-sync-{PY_VERSION}-{UUID}" -SUBSCRIPTION_DLQ = f"subscription-test-subscription-dlq-{PY_VERSION}-{UUID}" -SUBSCRIPTION_EOD_FOR_CREATE = ( - f"subscription-test-subscription-eod-for-create-{PY_VERSION}-{UUID}" -) -SUBSCRIPTION_EOD_FOR_RECEIVE = ( - f"subscription-test-subscription-eod-for-receive-{PY_VERSION}-{UUID}" -) ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push" NEW_ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push2" REGIONAL_ENDPOINT = "us-east1-pubsub.googleapis.com:443" @@ -57,6 +47,10 @@ typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) +# These tests run in parallel if pytest-parallel is installed. +# Avoid modifying resources that are shared across tests, +# as this results in test flake. + @pytest.fixture(scope="module") def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: @@ -70,6 +64,26 @@ def regional_publisher_client() -> Generator[pubsub_v1.PublisherClient, None, No yield publisher +@pytest.fixture(scope="module") +def subscription_admin( + subscriber_client: pubsub_v1.SubscriberClient, topic: str +) -> Generator[str, None, None]: + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, SUBSCRIPTION_ADMIN + ) + + try: + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + subscription = subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + yield subscription.name + + @pytest.fixture(scope="module") def topic(publisher_client: pubsub_v1.PublisherClient) -> Generator[str, None, None]: topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC) @@ -123,171 +137,6 @@ def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: subscriber_client.close() -@pytest.fixture(scope="module") -def subscription_admin( - subscriber_client: pubsub_v1.SubscriberClient, topic: str -) -> Generator[str, None, None]: - subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_ADMIN - ) - - try: - subscription = subscriber_client.get_subscription( - request={"subscription": subscription_path} - ) - except NotFound: - subscription = subscriber_client.create_subscription( - request={"name": subscription_path, "topic": topic} - ) - - yield subscription.name - - -@pytest.fixture(scope="module") -def subscription_sync( - subscriber_client: pubsub_v1.SubscriberClient, topic: str -) -> Generator[str, None, None]: - subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_SYNC - ) - - try: - subscription = subscriber_client.get_subscription( - request={"subscription": subscription_path} - ) - except NotFound: - subscription = subscriber_client.create_subscription( - request={"name": subscription_path, "topic": topic} - ) - - yield subscription.name - - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=300), - ) - - @typed_backoff - def delete_subscription() -> None: - try: - subscriber_client.delete_subscription( - request={"subscription": subscription.name} - ) - except NotFound: - print( - "When Unknown error happens, the server might have" - " successfully deleted the subscription under the cover, so" - " we ignore NotFound" - ) - - delete_subscription() - - -@pytest.fixture(scope="module") -def subscription_async( - subscriber_client: pubsub_v1.SubscriberClient, topic: str -) -> Generator[str, None, None]: - subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_ASYNC - ) - - try: - subscription = subscriber_client.get_subscription( - request={"subscription": subscription_path} - ) - except NotFound: - subscription = subscriber_client.create_subscription( - request={"name": subscription_path, "topic": topic} - ) - - yield subscription.name - - subscriber_client.delete_subscription(request={"subscription": subscription.name}) - - -@pytest.fixture(scope="module") -def subscription_dlq( - subscriber_client: pubsub_v1.SubscriberClient, topic: str, dead_letter_topic: str -) -> Generator[str, None, None]: - from google.cloud.pubsub_v1.types import DeadLetterPolicy - - subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_DLQ - ) - - try: - subscription = subscriber_client.get_subscription( - request={"subscription": subscription_path} - ) - except NotFound: - request = { - "name": subscription_path, - "topic": topic, - "dead_letter_policy": DeadLetterPolicy( - dead_letter_topic=dead_letter_topic, max_delivery_attempts=10 - ), - } - subscription = subscriber_client.create_subscription(request) - - yield subscription.name - - subscriber_client.delete_subscription(request={"subscription": subscription.name}) - - -@pytest.fixture(scope="module") -def subscription_eod_for_receive( - subscriber_client: pubsub_v1.SubscriberClient, exactly_once_delivery_topic: str -) -> Generator[str, None, None]: - - subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_EOD_FOR_RECEIVE - ) - - try: - subscription = subscriber_client.get_subscription( - request={"subscription": subscription_path} - ) - except NotFound: - subscription = subscriber_client.create_subscription( - request={ - "name": subscription_path, - "topic": exactly_once_delivery_topic, - "enable_exactly_once_delivery": True, - } - ) - - yield subscription.name - - subscriber_client.delete_subscription(request={"subscription": subscription.name}) - - -@pytest.fixture(scope="module") -def subscription_eod_for_create( - subscriber_client: pubsub_v1.SubscriberClient, exactly_once_delivery_topic: str -) -> Generator[str, None, None]: - - subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_EOD_FOR_CREATE - ) - - try: - subscription = subscriber_client.get_subscription( - request={"subscription": subscription_path} - ) - except NotFound: - subscription = subscriber_client.create_subscription( - request={ - "name": subscription_path, - "topic": exactly_once_delivery_topic, - "enable_exactly_once_delivery": True, - } - ) - - yield subscription.name - - subscriber_client.delete_subscription(request={"subscription": subscription.name}) - - def _publish_messages( publisher_client: pubsub_v1.PublisherClient, topic: str, @@ -334,11 +183,16 @@ def eventually_consistent_test() -> None: def test_create_subscription( subscriber_client: pubsub_v1.SubscriberClient, - subscription_admin: str, + topic: str, capsys: CaptureFixture[str], ) -> None: + + subscription_for_create_name = ( + f"subscription-test-subscription-for-create-{PY_VERSION}-{UUID}" + ) + subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_ADMIN + PROJECT_ID, subscription_for_create_name ) try: @@ -348,116 +202,198 @@ def test_create_subscription( except NotFound: pass - subscriber.create_subscription(PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN) + subscriber.create_subscription(PROJECT_ID, TOPIC, subscription_for_create_name) out, _ = capsys.readouterr() - assert f"{subscription_admin}" in out + assert f"{subscription_for_create_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_create_subscription_with_dead_letter_policy( subscriber_client: pubsub_v1.SubscriberClient, - subscription_dlq: str, dead_letter_topic: str, capsys: CaptureFixture[str], ) -> None: + + subscription_dlq_name = ( + f"subscription-test-subscription-dlq-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_dlq_name + ) + try: subscriber_client.delete_subscription( - request={"subscription": subscription_dlq} + request={"subscription": subscription_path} ) except NotFound: pass subscriber.create_subscription_with_dead_letter_topic( - PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ, DEAD_LETTER_TOPIC + PROJECT_ID, TOPIC, subscription_dlq_name, DEAD_LETTER_TOPIC ) out, _ = capsys.readouterr() - assert f"Subscription created: {subscription_dlq}" in out + assert f"Subscription created: {subscription_path}" in out assert f"It will forward dead letter messages to: {dead_letter_topic}" in out assert f"After {DEFAULT_MAX_DELIVERY_ATTEMPTS} delivery attempts." in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + -@typed_flaky def test_receive_with_delivery_attempts( + subscriber_client: pubsub_v1.SubscriberClient, publisher_client: pubsub_v1.PublisherClient, topic: str, dead_letter_topic: str, - subscription_dlq: str, capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, (Unknown, NotFound), max_time=120), + from google.cloud.pubsub_v1.types import DeadLetterPolicy + + subscription_dlq_for_receive_name = ( + f"subscription-test-subscription-dlq-for-receive-{PY_VERSION}-{UUID}" ) - # The dlq subscription raises 404 before it's ready. - # We keep retrying up to 10 minutes for mitigating the flakiness. - @typed_backoff - def run_sample() -> None: - _ = _publish_messages(publisher_client, topic) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_dlq_for_receive_name + ) - subscriber.receive_messages_with_delivery_attempts( - PROJECT_ID, SUBSCRIPTION_DLQ, 90 + try: + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} ) + except NotFound: + request = { + "name": subscription_path, + "topic": topic, + "dead_letter_policy": DeadLetterPolicy( + dead_letter_topic=dead_letter_topic, max_delivery_attempts=10 + ), + } + subscription = subscriber_client.create_subscription(request) - run_sample() + subscription_dlq = subscription.name + + _ = _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_delivery_attempts( + PROJECT_ID, subscription_dlq_for_receive_name, 90 + ) out, _ = capsys.readouterr() assert f"Listening for messages on {subscription_dlq}.." in out assert "With delivery attempts: " in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + -@typed_flaky def test_update_dead_letter_policy( - subscription_dlq: str, dead_letter_topic: str, capsys: CaptureFixture[str] + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + dead_letter_topic: str, + capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, (Unknown, InternalServerError), max_time=60), + from google.cloud.pubsub_v1.types import DeadLetterPolicy + + subscription_dlq_for_update_name = ( + f"subscription-test-subscription-dlq-for-update-{PY_VERSION}-{UUID}" ) - # We saw internal server error that suggests to retry. + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_dlq_for_update_name + ) - @typed_backoff - def run_sample() -> None: - subscriber.update_subscription_with_dead_letter_policy( - PROJECT_ID, - TOPIC, - SUBSCRIPTION_DLQ, - DEAD_LETTER_TOPIC, - UPDATED_MAX_DELIVERY_ATTEMPTS, + try: + subscription = subscriber_client.get_subscription( + request={"subscription": subscription_path} ) + except NotFound: + request = { + "name": subscription_path, + "topic": topic, + "dead_letter_policy": DeadLetterPolicy( + dead_letter_topic=dead_letter_topic, max_delivery_attempts=10 + ), + } + subscription = subscriber_client.create_subscription(request) - run_sample() + subscription_dlq = subscription.name + + subscriber.update_subscription_with_dead_letter_policy( + PROJECT_ID, + TOPIC, + subscription_dlq_for_update_name, + DEAD_LETTER_TOPIC, + UPDATED_MAX_DELIVERY_ATTEMPTS, + ) out, _ = capsys.readouterr() assert dead_letter_topic in out assert subscription_dlq in out assert f"max_delivery_attempts: {UPDATED_MAX_DELIVERY_ATTEMPTS}" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + -@typed_flaky def test_remove_dead_letter_policy( - subscription_dlq: str, capsys: CaptureFixture[str] + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + dead_letter_topic: str, + capsys: CaptureFixture[str], ) -> None: + + from google.cloud.pubsub_v1.types import DeadLetterPolicy + + subscription_dlq_for_remove_name = ( + f"subscription-test-subscription-dlq-for-remove-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_dlq_for_remove_name + ) + + request = { + "name": subscription_path, + "topic": topic, + "dead_letter_policy": DeadLetterPolicy( + dead_letter_topic=dead_letter_topic, max_delivery_attempts=10 + ), + } + subscription = subscriber_client.create_subscription(request) + + subscription_dlq = subscription.name + subscription_after_update = subscriber.remove_dead_letter_policy( - PROJECT_ID, TOPIC, SUBSCRIPTION_DLQ + PROJECT_ID, TOPIC, subscription_dlq_for_remove_name ) out, _ = capsys.readouterr() assert subscription_dlq in out assert subscription_after_update.dead_letter_policy.dead_letter_topic == "" + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + def test_create_subscription_with_ordering( subscriber_client: pubsub_v1.SubscriberClient, - subscription_admin: str, + topic: str, capsys: CaptureFixture[str], ) -> None: + subscription_with_ordering_name = ( + f"subscription-test-subscription-with-ordering-{PY_VERSION}-{UUID}" + ) + subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_ADMIN + PROJECT_ID, subscription_with_ordering_name ) try: subscriber_client.delete_subscription( @@ -466,21 +402,31 @@ def test_create_subscription_with_ordering( except NotFound: pass - subscriber.create_subscription_with_ordering(PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN) + subscriber.create_subscription_with_ordering( + PROJECT_ID, TOPIC, subscription_with_ordering_name + ) out, _ = capsys.readouterr() assert "Created subscription with ordering" in out - assert f"{subscription_admin}" in out + assert f"{subscription_with_ordering_name}" in out assert "enable_message_ordering: true" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + def test_create_subscription_with_filtering( subscriber_client: pubsub_v1.SubscriberClient, - subscription_admin: str, + topic: str, capsys: CaptureFixture[str], ) -> None: + + subscription_with_filtering_name = ( + f"subscription-test-subscription-with-filtering-{PY_VERSION}-{UUID}" + ) + subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_ADMIN + PROJECT_ID, subscription_with_filtering_name ) try: subscriber_client.delete_subscription( @@ -490,23 +436,32 @@ def test_create_subscription_with_filtering( pass subscriber.create_subscription_with_filtering( - PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, FILTER + PROJECT_ID, TOPIC, subscription_with_filtering_name, FILTER ) out, _ = capsys.readouterr() assert "Created subscription with filtering enabled" in out - assert f"{subscription_admin}" in out + assert f"{subscription_with_filtering_name}" in out assert '"attributes.author=\\"unknown\\""' in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + def test_create_subscription_with_exactly_once_delivery( subscriber_client: pubsub_v1.SubscriberClient, - subscription_eod_for_create: str, + exactly_once_delivery_topic: str, capsys: CaptureFixture[str], ) -> None: + + subscription_eod_for_create_name = ( + f"subscription-test-subscription-eod-for-create-{PY_VERSION}-{UUID}" + ) + subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_EOD_FOR_CREATE + PROJECT_ID, subscription_eod_for_create_name ) + try: subscriber_client.delete_subscription( request={"subscription": subscription_path} @@ -515,327 +470,440 @@ def test_create_subscription_with_exactly_once_delivery( pass subscriber.create_subscription_with_exactly_once_delivery( - PROJECT_ID, EOD_TOPIC, SUBSCRIPTION_EOD_FOR_CREATE + PROJECT_ID, EOD_TOPIC, subscription_eod_for_create_name ) out, _ = capsys.readouterr() assert "Created subscription with exactly once delivery enabled" in out - assert f"{subscription_eod_for_create}" in out + assert f"{subscription_eod_for_create_name}" in out assert "enable_exactly_once_delivery: true" in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + def test_create_push_subscription( subscriber_client: pubsub_v1.SubscriberClient, - subscription_admin: str, + topic: str, capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=60), + + push_subscription_for_create_name = ( + f"subscription-test-subscription-push-for-create-{PY_VERSION}-{UUID}" ) - # The scope of `subscription_path` is limited to this function. - @typed_backoff - def eventually_consistent_test() -> None: - subscription_path = subscriber_client.subscription_path( - PROJECT_ID, SUBSCRIPTION_ADMIN - ) - try: - subscriber_client.delete_subscription( - request={"subscription": subscription_path} - ) - except NotFound: - pass - - subscriber.create_push_subscription( - PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, ENDPOINT + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, push_subscription_for_create_name + ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} ) + except NotFound: + pass - out, _ = capsys.readouterr() - assert f"{subscription_admin}" in out + subscriber.create_push_subscription( + PROJECT_ID, TOPIC, push_subscription_for_create_name, ENDPOINT + ) - eventually_consistent_test() + out, _ = capsys.readouterr() + assert f"{push_subscription_for_create_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) -def test_update_push_suscription( - subscription_admin: str, +def test_update_push_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=60), + push_subscription_for_update_name = ( + f"subscription-test-subscription-push-for-create-{PY_VERSION}-{UUID}" + ) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, push_subscription_for_update_name ) - @typed_backoff - def eventually_consistent_test() -> None: - subscriber.update_push_subscription( - PROJECT_ID, TOPIC, SUBSCRIPTION_ADMIN, NEW_ENDPOINT + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} ) - out, _ = capsys.readouterr() - assert "Subscription updated" in out - assert f"{subscription_admin}" in out + subscriber.update_push_subscription( + PROJECT_ID, TOPIC, push_subscription_for_update_name, NEW_ENDPOINT + ) - eventually_consistent_test() + out, _ = capsys.readouterr() + assert "Subscription updated" in out + assert f"{push_subscription_for_update_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_delete_subscription( - subscriber_client: pubsub_v1.SubscriberClient, subscription_admin: str + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, ) -> None: - subscriber.delete_subscription(PROJECT_ID, SUBSCRIPTION_ADMIN) - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=60), + subscription_for_delete_name = ( + f"subscription-test-subscription-for-delete-{PY_VERSION}-{UUID}" ) - @typed_backoff - def eventually_consistent_test() -> None: - with pytest.raises(Exception): - subscriber_client.get_subscription( - request={"subscription": subscription_admin} - ) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_for_delete_name + ) - eventually_consistent_test() + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + subscriber.delete_subscription(PROJECT_ID, subscription_for_delete_name) + + with pytest.raises(Exception): + subscriber_client.get_subscription( + request={"subscription": subscription_for_delete_name} + ) + + # No clean up required. def test_receive( - publisher_client: pubsub_v1.PublisherClient, + subscriber_client: pubsub_v1.SubscriberClient, topic: str, - subscription_async: str, + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=60), + subscription_async_for_receive_name = ( + f"subscription-test-subscription-async-for-receive-{PY_VERSION}-{UUID}" ) - @typed_backoff - def eventually_consistent_test() -> None: - _ = _publish_messages(publisher_client, topic) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_for_receive_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) - subscriber.receive_messages(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) + _ = _publish_messages(publisher_client, topic) - out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_async in out - assert "message" in out + subscriber.receive_messages(PROJECT_ID, subscription_async_for_receive_name, 5) - eventually_consistent_test() + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async_for_receive_name in out + assert "message" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_receive_with_custom_attributes( + subscriber_client: pubsub_v1.SubscriberClient, publisher_client: pubsub_v1.PublisherClient, topic: str, - subscription_async: str, capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=60), + subscription_async_receive_with_custom_name = ( + f"subscription-test-subscription-async-receive-with-custom-{PY_VERSION}-{UUID}" ) - @typed_backoff - def eventually_consistent_test() -> None: - _ = _publish_messages(publisher_client, topic, origin="python-sample") + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_receive_with_custom_name + ) - subscriber.receive_messages_with_custom_attributes( - PROJECT_ID, SUBSCRIPTION_ASYNC, 5 + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} ) - out, _ = capsys.readouterr() - assert subscription_async in out - assert "message" in out - assert "origin" in out - assert "python-sample" in out + _ = _publish_messages(publisher_client, topic, origin="python-sample") - eventually_consistent_test() + subscriber.receive_messages_with_custom_attributes( + PROJECT_ID, subscription_async_receive_with_custom_name, 5 + ) + + out, _ = capsys.readouterr() + assert subscription_async_receive_with_custom_name in out + assert "message" in out + assert "origin" in out + assert "python-sample" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_receive_with_flow_control( + subscriber_client: pubsub_v1.SubscriberClient, publisher_client: pubsub_v1.PublisherClient, topic: str, - subscription_async: str, capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=300), + subscription_async_receive_with_flow_control_name = f"subscription-test-subscription-async-receive-with-flow-control-{PY_VERSION}-{UUID}" + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_receive_with_flow_control_name ) - @typed_backoff - def eventually_consistent_test() -> None: - _ = _publish_messages(publisher_client, topic) + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) - subscriber.receive_messages_with_flow_control(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) + _ = _publish_messages(publisher_client, topic) - out, _ = capsys.readouterr() - assert "Listening" in out - assert subscription_async in out - assert "message" in out + subscriber.receive_messages_with_flow_control( + PROJECT_ID, subscription_async_receive_with_flow_control_name, 5 + ) - eventually_consistent_test() + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async_receive_with_flow_control_name in out + assert "message" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_receive_with_blocking_shutdown( + subscriber_client: pubsub_v1.SubscriberClient, publisher_client: pubsub_v1.PublisherClient, topic: str, - subscription_async: str, capsys: CaptureFixture[str], ) -> None: + subscription_async_receive_with_blocking_name = f"subscription-test-subscription-async-receive-with-blocking-shutdown-{PY_VERSION}-{UUID}" + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_receive_with_blocking_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + _received = re.compile(r".*received.*message.*", flags=re.IGNORECASE) _done = re.compile(r".*done processing.*message.*", flags=re.IGNORECASE) _canceled = re.compile(r".*streaming pull future canceled.*", flags=re.IGNORECASE) _shut_down = re.compile(r".*done waiting.*stream shutdown.*", flags=re.IGNORECASE) - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=300), + _ = _publish_messages(publisher_client, topic, message_num=3) + + subscriber.receive_messages_with_blocking_shutdown( + PROJECT_ID, subscription_async_receive_with_blocking_name, timeout=5.0 ) - @typed_backoff - def eventually_consistent_test() -> None: - _ = _publish_messages(publisher_client, topic, message_num=3) + out, _ = capsys.readouterr() + out_lines = out.splitlines() + + msg_received_lines = [ + i for i, line in enumerate(out_lines) if _received.search(line) + ] + msg_done_lines = [i for i, line in enumerate(out_lines) if _done.search(line)] + stream_canceled_lines = [ + i for i, line in enumerate(out_lines) if _canceled.search(line) + ] + shutdown_done_waiting_lines = [ + i for i, line in enumerate(out_lines) if _shut_down.search(line) + ] - subscriber.receive_messages_with_blocking_shutdown( - PROJECT_ID, SUBSCRIPTION_ASYNC, timeout=5.0 - ) + try: + assert "Listening" in out + assert subscription_async_receive_with_blocking_name in out - out, _ = capsys.readouterr() - out_lines = out.splitlines() - - msg_received_lines = [ - i for i, line in enumerate(out_lines) if _received.search(line) - ] - msg_done_lines = [i for i, line in enumerate(out_lines) if _done.search(line)] - stream_canceled_lines = [ - i for i, line in enumerate(out_lines) if _canceled.search(line) - ] - shutdown_done_waiting_lines = [ - i for i, line in enumerate(out_lines) if _shut_down.search(line) - ] - - try: - assert "Listening" in out - assert subscription_async in out - - assert len(stream_canceled_lines) == 1 - assert len(shutdown_done_waiting_lines) == 1 - assert len(msg_received_lines) == 3 - assert len(msg_done_lines) == 3 - - # The stream should have been canceled *after* receiving messages, but before - # message processing was done. - assert msg_received_lines[-1] < stream_canceled_lines[0] < msg_done_lines[0] - - # Yet, waiting on the stream shutdown should have completed *after* - # the processing of received messages has ended. - assert msg_done_lines[-1] < shutdown_done_waiting_lines[0] - except AssertionError: # pragma: NO COVER - from pprint import pprint - - pprint(out_lines) # To make possible flakiness debugging easier. - raise + assert len(stream_canceled_lines) == 1 + assert len(shutdown_done_waiting_lines) == 1 + assert len(msg_received_lines) == 3 + assert len(msg_done_lines) == 3 - eventually_consistent_test() + # The stream should have been canceled *after* receiving messages, but before + # message processing was done. + assert msg_received_lines[-1] < stream_canceled_lines[0] < msg_done_lines[0] + + # Yet, waiting on the stream shutdown should have completed *after* + # the processing of received messages has ended. + assert msg_done_lines[-1] < shutdown_done_waiting_lines[0] + except AssertionError: # pragma: NO COVER + from pprint import pprint + + pprint(out_lines) # To make possible flakiness debugging easier. + raise + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_receive_messages_with_exactly_once_delivery_enabled( + subscriber_client: pubsub_v1.SubscriberClient, regional_publisher_client: pubsub_v1.PublisherClient, exactly_once_delivery_topic: str, - subscription_eod_for_receive: str, capsys: CaptureFixture[str], ) -> None: + subscription_eod_for_receive_name = ( + f"subscription-test-subscription-eod-for-receive-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_eod_for_receive_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={ + "name": subscription_path, + "topic": exactly_once_delivery_topic, + "enable_exactly_once_delivery": True, + } + ) + message_ids = _publish_messages( regional_publisher_client, exactly_once_delivery_topic ) subscriber.receive_messages_with_exactly_once_delivery_enabled( - PROJECT_ID, SUBSCRIPTION_EOD_FOR_RECEIVE, 30 + PROJECT_ID, subscription_eod_for_receive_name, 200 ) out, _ = capsys.readouterr() - assert subscription_eod_for_receive in out + assert subscription_eod_for_receive_name in out for message_id in message_ids: assert message_id in out + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + def test_listen_for_errors( + subscriber_client: pubsub_v1.SubscriberClient, publisher_client: pubsub_v1.PublisherClient, topic: str, - subscription_async: str, capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=60), + subscription_async_listen = ( + f"subscription-test-subscription-async-listen-{PY_VERSION}-{UUID}" ) - @typed_backoff - def eventually_consistent_test() -> None: - _ = _publish_messages(publisher_client, topic) + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_listen + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) - subscriber.listen_for_errors(PROJECT_ID, SUBSCRIPTION_ASYNC, 5) + _ = _publish_messages(publisher_client, topic) - out, _ = capsys.readouterr() - assert subscription_async in out - assert "threw an exception" in out + subscriber.listen_for_errors(PROJECT_ID, subscription_async_listen, 5) - eventually_consistent_test() + out, _ = capsys.readouterr() + assert subscription_path in out + assert "threw an exception" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) def test_receive_synchronously( + subscriber_client: pubsub_v1.SubscriberClient, publisher_client: pubsub_v1.PublisherClient, topic: str, - subscription_sync: str, capsys: CaptureFixture[str], ) -> None: + + subscription_sync_for_receive_name = ( + f"subscription-test-subscription-sync-for-receive-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_sync_for_receive_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + _ = _publish_messages(publisher_client, topic) - subscriber.synchronous_pull(PROJECT_ID, SUBSCRIPTION_SYNC) + subscriber.synchronous_pull(PROJECT_ID, subscription_sync_for_receive_name) out, _ = capsys.readouterr() assert "Received" in out - assert f"{subscription_sync}" in out + assert f"{subscription_sync_for_receive_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) @typed_flaky def test_receive_synchronously_with_lease( + subscriber_client: pubsub_v1.SubscriberClient, publisher_client: pubsub_v1.PublisherClient, topic: str, - subscription_sync: str, capsys: CaptureFixture[str], ) -> None: - typed_backoff = cast( - Callable[[C], C], - backoff.on_exception(backoff.expo, Unknown, max_time=300), + subscription_sync_for_receive_with_lease_name = f"subscription-test-subscription-sync-for-receive-with-lease-{PY_VERSION}-{UUID}" + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_sync_for_receive_with_lease_name ) - @typed_backoff - def run_sample() -> None: - _ = _publish_messages(publisher_client, topic, message_num=10) - # Pausing 10s to allow the subscriber to establish the connection - # because sync pull often returns fewer messages than requested. - # The intention is to fix flaky tests reporting errors like - # `google.api_core.exceptions.Unknown: None Stream removed` as - # in https://github.com/googleapis/python-pubsub/issues/341. - time.sleep(10) - subscriber.synchronous_pull_with_lease_management(PROJECT_ID, SUBSCRIPTION_SYNC) - - run_sample() + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + _ = _publish_messages(publisher_client, topic, message_num=10) + # Pausing 10s to allow the subscriber to establish the connection + # because sync pull often returns fewer messages than requested. + # The intention is to fix flaky tests reporting errors like + # `google.api_core.exceptions.Unknown: None Stream removed` as + # in https://github.com/googleapis/python-pubsub/issues/341. + time.sleep(10) + subscriber.synchronous_pull_with_lease_management( + PROJECT_ID, subscription_sync_for_receive_with_lease_name + ) out, _ = capsys.readouterr() # Sometimes the subscriber only gets 1 or 2 messages and test fails. # I think it's ok to consider those cases as passing. assert "Received and acknowledged" in out - assert f"messages from {subscription_sync}." in out + assert f"messages from {subscription_path}." in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) From 899c86fc7e26c797a91f9e8bf282466b3ae91c80 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 19 May 2022 08:13:35 -0400 Subject: [PATCH 0826/1197] feat: add BigQuery configuration for subscriptions (#685) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add BigQuery configuration for subscriptions PiperOrigin-RevId: 449031535 Source-Link: https://github.com/googleapis/googleapis/commit/feec34dfac930eb0ab8c3e72ff5794c3f4c5924d Source-Link: https://github.com/googleapis/googleapis-gen/commit/89664e9708c19d532c63f7a16fd79cb631d87aa1 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiODk2NjRlOTcwOGMxOWQ1MzJjNjNmN2ExNmZkNzljYjYzMWQ4N2FhMSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../google/pubsub/__init__.py | 2 + .../google/pubsub_v1/__init__.py | 2 + .../services/subscriber/async_client.py | 5 +- .../pubsub_v1/services/subscriber/client.py | 5 +- .../google/pubsub_v1/types/__init__.py | 2 + .../google/pubsub_v1/types/pubsub.py | 95 ++++++++++++++++++- .../scripts/fixup_pubsub_v1_keywords.py | 2 +- .../unit/gapic/pubsub_v1/test_subscriber.py | 12 +++ 8 files changed, 117 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index 9fc4e6feb824..7c94c23082b0 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -24,6 +24,7 @@ from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient from google.pubsub_v1.types.pubsub import AcknowledgeRequest +from google.pubsub_v1.types.pubsub import BigQueryConfig from google.pubsub_v1.types.pubsub import CreateSnapshotRequest from google.pubsub_v1.types.pubsub import DeadLetterPolicy from google.pubsub_v1.types.pubsub import DeleteSnapshotRequest @@ -88,6 +89,7 @@ "SubscriberClient", "SubscriberAsyncClient", "AcknowledgeRequest", + "BigQueryConfig", "CreateSnapshotRequest", "DeadLetterPolicy", "DeleteSnapshotRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 75c41bf215cc..80fc23d59644 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -22,6 +22,7 @@ from .services.subscriber import SubscriberAsyncClient from .types.pubsub import AcknowledgeRequest +from .types.pubsub import BigQueryConfig from .types.pubsub import CreateSnapshotRequest from .types.pubsub import DeadLetterPolicy from .types.pubsub import DeleteSnapshotRequest @@ -83,6 +84,7 @@ "SchemaServiceAsyncClient", "SubscriberAsyncClient", "AcknowledgeRequest", + "BigQueryConfig", "CreateSchemaRequest", "CreateSnapshotRequest", "DeadLetterPolicy", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index acfc54a12a52..fa88cc64936b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -295,8 +295,9 @@ async def sample_create_subscription(): should not be set. push_config (:class:`google.pubsub_v1.types.PushConfig`): If push delivery is used with this subscription, this - field is used to configure it. An empty ``pushConfig`` - signifies that the subscriber will pull and ack messages + field is used to configure it. Either ``pushConfig`` or + ``bigQueryConfig`` can be set, but not both. If both are + empty, then the subscriber will pull and ack messages using API methods. This corresponds to the ``push_config`` field diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index f635b3295f2b..c0af49577c39 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -578,8 +578,9 @@ def sample_create_subscription(): should not be set. push_config (google.pubsub_v1.types.PushConfig): If push delivery is used with this subscription, this - field is used to configure it. An empty ``pushConfig`` - signifies that the subscriber will pull and ack messages + field is used to configure it. Either ``pushConfig`` or + ``bigQueryConfig`` can be set, but not both. If both are + empty, then the subscriber will pull and ack messages using API methods. This corresponds to the ``push_config`` field diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 888e2184aba7..c0d9c4619d75 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -17,6 +17,7 @@ from .pubsub import ( AcknowledgeRequest, + BigQueryConfig, CreateSnapshotRequest, DeadLetterPolicy, DeleteSnapshotRequest, @@ -87,6 +88,7 @@ __all__ = ( "TimeoutType", "AcknowledgeRequest", + "BigQueryConfig", "CreateSnapshotRequest", "DeadLetterPolicy", "DeleteSnapshotRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 4c5841f5cbee..73658e9c0989 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -46,6 +46,7 @@ "DeadLetterPolicy", "ExpirationPolicy", "PushConfig", + "BigQueryConfig", "ReceivedMessage", "GetSubscriptionRequest", "UpdateSubscriptionRequest", @@ -581,9 +582,16 @@ class Subscription(proto.Message): deleted. push_config (google.pubsub_v1.types.PushConfig): If push delivery is used with this subscription, this field - is used to configure it. An empty ``pushConfig`` signifies - that the subscriber will pull and ack messages using API - methods. + is used to configure it. Either ``pushConfig`` or + ``bigQueryConfig`` can be set, but not both. If both are + empty, then the subscriber will pull and ack messages using + API methods. + bigquery_config (google.pubsub_v1.types.BigQueryConfig): + If delivery to BigQuery is used with this subscription, this + field is used to configure it. Either ``pushConfig`` or + ``bigQueryConfig`` can be set, but not both. If both are + empty, then the subscriber will pull and ack messages using + API methods. ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits for the subscriber to acknowledge receipt @@ -700,8 +708,18 @@ class Subscription(proto.Message): subscribers. See the ``message_retention_duration`` field in ``Topic``. This field is set only in responses from the server; it is ignored if it is set in any requests. + state (google.pubsub_v1.types.Subscription.State): + Output only. An output-only field indicating + whether or not the subscription can receive + messages. """ + class State(proto.Enum): + r"""Possible states for a subscription.""" + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + RESOURCE_ERROR = 2 + name = proto.Field( proto.STRING, number=1, @@ -715,6 +733,11 @@ class Subscription(proto.Message): number=4, message="PushConfig", ) + bigquery_config = proto.Field( + proto.MESSAGE, + number=18, + message="BigQueryConfig", + ) ack_deadline_seconds = proto.Field( proto.INT32, number=5, @@ -769,6 +792,11 @@ class Subscription(proto.Message): number=17, message=duration_pb2.Duration, ) + state = proto.Field( + proto.ENUM, + number=19, + enum=State, + ) class RetryPolicy(proto.Message): @@ -980,6 +1008,67 @@ class OidcToken(proto.Message): ) +class BigQueryConfig(proto.Message): + r"""Configuration for a BigQuery subscription. + + Attributes: + table (str): + The name of the table to which to write data, + of the form {projectId}:{datasetId}.{tableId} + use_topic_schema (bool): + When true, use the topic's schema as the + columns to write to in BigQuery, if it exists. + write_metadata (bool): + When true, write the subscription name, message_id, + publish_time, attributes, and ordering_key to additional + columns in the table. The subscription name, message_id, and + publish_time fields are put in their own columns while all + other message properties (other than data) are written to a + JSON object in the attributes column. + drop_unknown_fields (bool): + When true and use_topic_schema is true, any fields that are + a part of the topic schema that are not part of the BigQuery + table schema are dropped when writing to BigQuery. + Otherwise, the schemas must be kept in sync and any messages + with extra fields are not written and remain in the + subscription's backlog. + state (google.pubsub_v1.types.BigQueryConfig.State): + Output only. An output-only field that + indicates whether or not the subscription can + receive messages. + """ + + class State(proto.Enum): + r"""Possible states for a BigQuery subscription.""" + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PERMISSION_DENIED = 2 + NOT_FOUND = 3 + SCHEMA_MISMATCH = 4 + + table = proto.Field( + proto.STRING, + number=1, + ) + use_topic_schema = proto.Field( + proto.BOOL, + number=2, + ) + write_metadata = proto.Field( + proto.BOOL, + number=3, + ) + drop_unknown_fields = proto.Field( + proto.BOOL, + number=4, + ) + state = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + + class ReceivedMessage(proto.Message): r"""A message and its corresponding acknowledgment ID. diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 3b6d3d378238..d1bbcedf98af 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -42,7 +42,7 @@ class pubsubCallTransformer(cst.CSTTransformer): 'acknowledge': ('subscription', 'ack_ids', ), 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', ), + 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', ), 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), 'delete_schema': ('name', ), 'delete_snapshot': ('snapshot', ), diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index cbccac58293e..b4f9b3882a8c 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -665,6 +665,7 @@ def test_create_subscription(request_type, transport: str = "grpc"): filter="filter_value", detached=True, enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, ) response = client.create_subscription(request) @@ -683,6 +684,7 @@ def test_create_subscription(request_type, transport: str = "grpc"): assert response.filter == "filter_value" assert response.detached is True assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE def test_create_subscription_empty_call(): @@ -731,6 +733,7 @@ async def test_create_subscription_async( filter="filter_value", detached=True, enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, ) ) response = await client.create_subscription(request) @@ -750,6 +753,7 @@ async def test_create_subscription_async( assert response.filter == "filter_value" assert response.detached is True assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE @pytest.mark.asyncio @@ -963,6 +967,7 @@ def test_get_subscription(request_type, transport: str = "grpc"): filter="filter_value", detached=True, enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, ) response = client.get_subscription(request) @@ -981,6 +986,7 @@ def test_get_subscription(request_type, transport: str = "grpc"): assert response.filter == "filter_value" assert response.detached is True assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE def test_get_subscription_empty_call(): @@ -1025,6 +1031,7 @@ async def test_get_subscription_async( filter="filter_value", detached=True, enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, ) ) response = await client.get_subscription(request) @@ -1044,6 +1051,7 @@ async def test_get_subscription_async( assert response.filter == "filter_value" assert response.detached is True assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE @pytest.mark.asyncio @@ -1221,6 +1229,7 @@ def test_update_subscription(request_type, transport: str = "grpc"): filter="filter_value", detached=True, enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, ) response = client.update_subscription(request) @@ -1239,6 +1248,7 @@ def test_update_subscription(request_type, transport: str = "grpc"): assert response.filter == "filter_value" assert response.detached is True assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE def test_update_subscription_empty_call(): @@ -1287,6 +1297,7 @@ async def test_update_subscription_async( filter="filter_value", detached=True, enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, ) ) response = await client.update_subscription(request) @@ -1306,6 +1317,7 @@ async def test_update_subscription_async( assert response.filter == "filter_value" assert response.detached is True assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE @pytest.mark.asyncio From e72d39ebceaea288e3dd10ed3ac9ec660bfcf538 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Thu, 26 May 2022 15:55:58 -0400 Subject: [PATCH 0827/1197] samples: regenerate sample proto for compatibility with protobuf >3.20.1 (#690) --- .../snippets/utilities/us_states_pb2.py | 70 +++---------------- 1 file changed, 9 insertions(+), 61 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/utilities/us_states_pb2.py b/packages/google-cloud-pubsub/samples/snippets/utilities/us_states_pb2.py index 0b0c325dc728..93af674bd2ea 100644 --- a/packages/google-cloud-pubsub/samples/snippets/utilities/us_states_pb2.py +++ b/packages/google-cloud-pubsub/samples/snippets/utilities/us_states_pb2.py @@ -2,9 +2,9 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: us-states.proto """Generated protocol buffer code.""" +from google.protobuf.internal import builder as _builder from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) @@ -13,65 +13,13 @@ -DESCRIPTOR = _descriptor.FileDescriptor( - name='us-states.proto', - package='utilities', - syntax='proto3', - serialized_options=None, - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n\x0fus-states.proto\x12\tutilities\"-\n\nStateProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tpost_abbr\x18\x02 \x01(\tb\x06proto3' -) - - - - -_STATEPROTO = _descriptor.Descriptor( - name='StateProto', - full_name='utilities.StateProto', - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='utilities.StateProto.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - _descriptor.FieldDescriptor( - name='post_abbr', full_name='utilities.StateProto.post_abbr', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=30, - serialized_end=75, -) - -DESCRIPTOR.message_types_by_name['StateProto'] = _STATEPROTO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -StateProto = _reflection.GeneratedProtocolMessageType('StateProto', (_message.Message,), { - 'DESCRIPTOR' : _STATEPROTO, - '__module__' : 'us_states_pb2' - # @@protoc_insertion_point(class_scope:utilities.StateProto) - }) -_sym_db.RegisterMessage(StateProto) +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0fus-states.proto\x12\tutilities\"-\n\nStateProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tpost_abbr\x18\x02 \x01(\tb\x06proto3') +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'us_states_pb2', globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + _STATEPROTO._serialized_start=30 + _STATEPROTO._serialized_end=75 # @@protoc_insertion_point(module_scope) From fafeb4286effabb15ed653a42261c5a69b0e30f9 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Fri, 27 May 2022 16:22:05 -0400 Subject: [PATCH 0828/1197] fix: add info log for bidi streaming pull ack_deadline requests (#692) * fix add info logg on bidi streaming pull ack_deadline changes * adding test coverage for logging --- .../pubsub_v1/subscriber/_protocol/streaming_pull_manager.py | 3 +++ .../unit/pubsub_v1/subscriber/test_streaming_pull_manager.py | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index ae3635892f7b..894c41b4c8b1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -743,6 +743,9 @@ def heartbeat(self) -> bool: request = gapic_types.StreamingPullRequest( stream_ack_deadline_seconds=self.ack_deadline ) + _LOGGER.info( + "Sending new ack_deadline of %d seconds.", self.ack_deadline + ) else: request = gapic_types.StreamingPullRequest() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 8a14609519ff..ad1647119c04 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1035,7 +1035,8 @@ def test_heartbeat_inactive(): assert not result -def test_heartbeat_stream_ack_deadline_seconds(): +def test_heartbeat_stream_ack_deadline_seconds(caplog): + caplog.set_level(logging.INFO) manager = make_manager() manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) manager._rpc.is_active = True @@ -1050,6 +1051,7 @@ def test_heartbeat_stream_ack_deadline_seconds(): assert result # Set to false after a send is initiated. assert not manager._send_new_ack_deadline + assert "Sending new ack_deadline of 10 seconds." in caplog.text @mock.patch("google.api_core.bidi.ResumableBidiRpc", autospec=True) From 95d9d8510e84335434381b427fb313d20652d4b0 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Sun, 29 May 2022 14:23:57 -0400 Subject: [PATCH 0829/1197] chore(deps): add asyncio-mode=strict (#695) * chore(dependencies): add asyncio-mode=strict * run only >3.6 * add to extras by python * add arg only after 3.6 * lint and remove comments * Revert "lint and remove comments" This reverts commit 2c12c3f4a26a6c3a88719215cad7848188abeb59. * fix lint and comments --- packages/google-cloud-pubsub/noxfile.py | 42 +++++++++++++++++-------- 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 914b76e5b8ab..7af8428afa1a 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -215,19 +215,35 @@ def default(session): install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google/cloud", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) + if session.python != "3.6": + session.run( + "py.test", + "--quiet", + "--asyncio-mode=strict", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google/cloud", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) + else: + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google/cloud", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) @nox.session(python=UNIT_TEST_PYTHON_VERSIONS) From da6817f436d6c7b842e71f5b8a1142b2e3265df7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 May 2022 17:54:16 +0000 Subject: [PATCH 0830/1197] chore: use gapic-generator-python 1.0.0 (#691) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 451250442 Source-Link: https://github.com/googleapis/googleapis/commit/cca5e8181f6442b134e8d4d206fbe9e0e74684ba Source-Link: https://github.com/googleapis/googleapis-gen/commit/0b219da161a8bdcc3c6f7b2efcd82105182a30ca Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMGIyMTlkYTE2MWE4YmRjYzNjNmY3YjJlZmNkODIxMDUxODJhMzBjYSJ9 --- .../owl-bot-staging/v1/.coveragerc | 17 + .../owl-bot-staging/v1/.flake8 | 33 + .../owl-bot-staging/v1/MANIFEST.in | 2 + .../owl-bot-staging/v1/README.rst | 49 + .../owl-bot-staging/v1/docs/conf.py | 376 ++ .../owl-bot-staging/v1/docs/index.rst | 7 + .../v1/docs/pubsub_v1/publisher.rst | 10 + .../v1/docs/pubsub_v1/schema_service.rst | 10 + .../v1/docs/pubsub_v1/services.rst | 8 + .../v1/docs/pubsub_v1/subscriber.rst | 10 + .../v1/docs/pubsub_v1/types.rst | 7 + .../v1/google/pubsub/__init__.py | 145 + .../owl-bot-staging/v1/google/pubsub/py.typed | 2 + .../v1/google/pubsub_v1/__init__.py | 146 + .../v1/google/pubsub_v1/gapic_metadata.json | 361 ++ .../v1/google/pubsub_v1/py.typed | 2 + .../v1/google/pubsub_v1/services/__init__.py | 15 + .../pubsub_v1/services/publisher/__init__.py | 22 + .../services/publisher/async_client.py | 1408 +++++ .../pubsub_v1/services/publisher/client.py | 1559 +++++ .../pubsub_v1/services/publisher/pagers.py | 381 ++ .../services/publisher/transports/__init__.py | 33 + .../services/publisher/transports/base.py | 364 ++ .../services/publisher/transports/grpc.py | 572 ++ .../publisher/transports/grpc_asyncio.py | 572 ++ .../services/schema_service/__init__.py | 22 + .../services/schema_service/async_client.py | 1062 ++++ .../services/schema_service/client.py | 1262 ++++ .../services/schema_service/pagers.py | 139 + .../schema_service/transports/__init__.py | 33 + .../schema_service/transports/base.py | 255 + .../schema_service/transports/grpc.py | 475 ++ .../schema_service/transports/grpc_asyncio.py | 475 ++ .../pubsub_v1/services/subscriber/__init__.py | 22 + .../services/subscriber/async_client.py | 2271 +++++++ .../pubsub_v1/services/subscriber/client.py | 2373 +++++++ .../pubsub_v1/services/subscriber/pagers.py | 260 + .../subscriber/transports/__init__.py | 33 + .../services/subscriber/transports/base.py | 508 ++ .../services/subscriber/transports/grpc.py | 835 +++ .../subscriber/transports/grpc_asyncio.py | 835 +++ .../v1/google/pubsub_v1/types/__init__.py | 136 + .../v1/google/pubsub_v1/types/pubsub.py | 1866 ++++++ .../v1/google/pubsub_v1/types/schema.py | 319 + .../owl-bot-staging/v1/mypy.ini | 3 + .../owl-bot-staging/v1/noxfile.py | 180 + ..._generated_publisher_create_topic_async.py | 45 + ...1_generated_publisher_create_topic_sync.py | 45 + ..._generated_publisher_delete_topic_async.py | 43 + ...1_generated_publisher_delete_topic_sync.py | 43 + ...ted_publisher_detach_subscription_async.py | 45 + ...ated_publisher_detach_subscription_sync.py | 45 + ..._v1_generated_publisher_get_topic_async.py | 45 + ...b_v1_generated_publisher_get_topic_sync.py | 45 + ...ed_publisher_list_topic_snapshots_async.py | 46 + ...ted_publisher_list_topic_snapshots_sync.py | 46 + ...ublisher_list_topic_subscriptions_async.py | 46 + ...publisher_list_topic_subscriptions_sync.py | 46 + ...1_generated_publisher_list_topics_async.py | 46 + ...v1_generated_publisher_list_topics_sync.py | 46 + ...ub_v1_generated_publisher_publish_async.py | 45 + ...sub_v1_generated_publisher_publish_sync.py | 45 + ..._generated_publisher_update_topic_async.py | 48 + ...1_generated_publisher_update_topic_sync.py | 48 + ...ated_schema_service_create_schema_async.py | 49 + ...rated_schema_service_create_schema_sync.py | 49 + ...ated_schema_service_delete_schema_async.py | 43 + ...rated_schema_service_delete_schema_sync.py | 43 + ...nerated_schema_service_get_schema_async.py | 45 + ...enerated_schema_service_get_schema_sync.py | 45 + ...rated_schema_service_list_schemas_async.py | 46 + ...erated_schema_service_list_schemas_sync.py | 46 + ...d_schema_service_validate_message_async.py | 46 + ...ed_schema_service_validate_message_sync.py | 46 + ...ed_schema_service_validate_schema_async.py | 49 + ...ted_schema_service_validate_schema_sync.py | 49 + ..._generated_subscriber_acknowledge_async.py | 44 + ...1_generated_subscriber_acknowledge_sync.py | 44 + ...erated_subscriber_create_snapshot_async.py | 46 + ...nerated_subscriber_create_snapshot_sync.py | 46 + ...ed_subscriber_create_subscription_async.py | 46 + ...ted_subscriber_create_subscription_sync.py | 46 + ...erated_subscriber_delete_snapshot_async.py | 43 + ...nerated_subscriber_delete_snapshot_sync.py | 43 + ...ed_subscriber_delete_subscription_async.py | 43 + ...ted_subscriber_delete_subscription_sync.py | 43 + ...generated_subscriber_get_snapshot_async.py | 45 + ..._generated_subscriber_get_snapshot_sync.py | 45 + ...rated_subscriber_get_subscription_async.py | 45 + ...erated_subscriber_get_subscription_sync.py | 45 + ...nerated_subscriber_list_snapshots_async.py | 46 + ...enerated_subscriber_list_snapshots_sync.py | 46 + ...ted_subscriber_list_subscriptions_async.py | 46 + ...ated_subscriber_list_subscriptions_sync.py | 46 + ...ed_subscriber_modify_ack_deadline_async.py | 45 + ...ted_subscriber_modify_ack_deadline_sync.py | 45 + ...ted_subscriber_modify_push_config_async.py | 43 + ...ated_subscriber_modify_push_config_sync.py | 43 + ...bsub_v1_generated_subscriber_pull_async.py | 46 + ...ubsub_v1_generated_subscriber_pull_sync.py | 46 + ...bsub_v1_generated_subscriber_seek_async.py | 45 + ...ubsub_v1_generated_subscriber_seek_sync.py | 45 + ...nerated_subscriber_streaming_pull_async.py | 57 + ...enerated_subscriber_streaming_pull_sync.py | 57 + ...erated_subscriber_update_snapshot_async.py | 44 + ...nerated_subscriber_update_snapshot_sync.py | 44 + ...ed_subscriber_update_subscription_async.py | 49 + ...ted_subscriber_update_subscription_sync.py | 49 + .../snippet_metadata_pubsub_v1.json | 5019 +++++++++++++++ .../v1/scripts/fixup_pubsub_v1_keywords.py | 209 + .../owl-bot-staging/v1/setup.py | 60 + .../owl-bot-staging/v1/tests/__init__.py | 16 + .../owl-bot-staging/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../v1/tests/unit/gapic/pubsub_v1/__init__.py | 16 + .../unit/gapic/pubsub_v1/test_publisher.py | 4157 +++++++++++++ .../gapic/pubsub_v1/test_schema_service.py | 3123 ++++++++++ .../unit/gapic/pubsub_v1/test_subscriber.py | 5492 +++++++++++++++++ 118 files changed, 40439 insertions(+) create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/.coveragerc create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/.flake8 create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/MANIFEST.in create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/README.rst create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/conf.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/index.rst create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/publisher.rst create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/schema_service.rst create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/services.rst create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/subscriber.rst create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/types.rst create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/py.typed create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/gapic_metadata.json create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/py.typed create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/async_client.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/client.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/pagers.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/base.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/async_client.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/client.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/pagers.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/base.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/async_client.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/client.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/pagers.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/base.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/pubsub.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/schema.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/mypy.ini create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/noxfile.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_pubsub_v1.json create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/scripts/fixup_pubsub_v1_keywords.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/setup.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/__init__.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_publisher.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_schema_service.py create mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_subscriber.py diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/.coveragerc b/packages/google-cloud-pubsub/owl-bot-staging/v1/.coveragerc new file mode 100644 index 000000000000..bcdb1c5865b2 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/pubsub/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/.flake8 b/packages/google-cloud-pubsub/owl-bot-staging/v1/.flake8 new file mode 100644 index 000000000000..29227d4cf419 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/MANIFEST.in b/packages/google-cloud-pubsub/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 000000000000..4daeccbde234 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/pubsub *.py +recursive-include google/pubsub_v1 *.py diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/README.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/README.rst new file mode 100644 index 000000000000..4c7897d8f43a --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Pubsub API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Pubsub API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/conf.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 000000000000..828a145fb52d --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-pubsub documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-pubsub" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-pubsub-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-pubsub.tex", + u"google-cloud-pubsub Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-pubsub", + u"Google Pubsub Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-pubsub", + u"google-cloud-pubsub Documentation", + author, + "google-cloud-pubsub", + "GAPIC library for Google Pubsub API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/index.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 000000000000..5cb0459d080e --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + pubsub_v1/services + pubsub_v1/types diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/publisher.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/publisher.rst new file mode 100644 index 000000000000..0a132656d3f9 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/publisher.rst @@ -0,0 +1,10 @@ +Publisher +--------------------------- + +.. automodule:: google.pubsub_v1.services.publisher + :members: + :inherited-members: + +.. automodule:: google.pubsub_v1.services.publisher.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/schema_service.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/schema_service.rst new file mode 100644 index 000000000000..4cb7a04965fe --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/schema_service.rst @@ -0,0 +1,10 @@ +SchemaService +------------------------------- + +.. automodule:: google.pubsub_v1.services.schema_service + :members: + :inherited-members: + +.. automodule:: google.pubsub_v1.services.schema_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/services.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/services.rst new file mode 100644 index 000000000000..6ab7d9e527f7 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/services.rst @@ -0,0 +1,8 @@ +Services for Google Pubsub v1 API +================================= +.. toctree:: + :maxdepth: 2 + + publisher + schema_service + subscriber diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/subscriber.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/subscriber.rst new file mode 100644 index 000000000000..7f2c74a4143e --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/subscriber.rst @@ -0,0 +1,10 @@ +Subscriber +---------------------------- + +.. automodule:: google.pubsub_v1.services.subscriber + :members: + :inherited-members: + +.. automodule:: google.pubsub_v1.services.subscriber.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/types.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/types.rst new file mode 100644 index 000000000000..964dfe575dc1 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Pubsub v1 API +============================== + +.. automodule:: google.pubsub_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/__init__.py new file mode 100644 index 000000000000..7efeb4c46c9f --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/__init__.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.pubsub_v1.services.publisher.client import PublisherClient +from google.pubsub_v1.services.publisher.async_client import PublisherAsyncClient +from google.pubsub_v1.services.schema_service.client import SchemaServiceClient +from google.pubsub_v1.services.schema_service.async_client import SchemaServiceAsyncClient +from google.pubsub_v1.services.subscriber.client import SubscriberClient +from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient + +from google.pubsub_v1.types.pubsub import AcknowledgeRequest +from google.pubsub_v1.types.pubsub import BigQueryConfig +from google.pubsub_v1.types.pubsub import CreateSnapshotRequest +from google.pubsub_v1.types.pubsub import DeadLetterPolicy +from google.pubsub_v1.types.pubsub import DeleteSnapshotRequest +from google.pubsub_v1.types.pubsub import DeleteSubscriptionRequest +from google.pubsub_v1.types.pubsub import DeleteTopicRequest +from google.pubsub_v1.types.pubsub import DetachSubscriptionRequest +from google.pubsub_v1.types.pubsub import DetachSubscriptionResponse +from google.pubsub_v1.types.pubsub import ExpirationPolicy +from google.pubsub_v1.types.pubsub import GetSnapshotRequest +from google.pubsub_v1.types.pubsub import GetSubscriptionRequest +from google.pubsub_v1.types.pubsub import GetTopicRequest +from google.pubsub_v1.types.pubsub import ListSnapshotsRequest +from google.pubsub_v1.types.pubsub import ListSnapshotsResponse +from google.pubsub_v1.types.pubsub import ListSubscriptionsRequest +from google.pubsub_v1.types.pubsub import ListSubscriptionsResponse +from google.pubsub_v1.types.pubsub import ListTopicSnapshotsRequest +from google.pubsub_v1.types.pubsub import ListTopicSnapshotsResponse +from google.pubsub_v1.types.pubsub import ListTopicsRequest +from google.pubsub_v1.types.pubsub import ListTopicsResponse +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsRequest +from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsResponse +from google.pubsub_v1.types.pubsub import MessageStoragePolicy +from google.pubsub_v1.types.pubsub import ModifyAckDeadlineRequest +from google.pubsub_v1.types.pubsub import ModifyPushConfigRequest +from google.pubsub_v1.types.pubsub import PublishRequest +from google.pubsub_v1.types.pubsub import PublishResponse +from google.pubsub_v1.types.pubsub import PubsubMessage +from google.pubsub_v1.types.pubsub import PullRequest +from google.pubsub_v1.types.pubsub import PullResponse +from google.pubsub_v1.types.pubsub import PushConfig +from google.pubsub_v1.types.pubsub import ReceivedMessage +from google.pubsub_v1.types.pubsub import RetryPolicy +from google.pubsub_v1.types.pubsub import SchemaSettings +from google.pubsub_v1.types.pubsub import SeekRequest +from google.pubsub_v1.types.pubsub import SeekResponse +from google.pubsub_v1.types.pubsub import Snapshot +from google.pubsub_v1.types.pubsub import StreamingPullRequest +from google.pubsub_v1.types.pubsub import StreamingPullResponse +from google.pubsub_v1.types.pubsub import Subscription +from google.pubsub_v1.types.pubsub import Topic +from google.pubsub_v1.types.pubsub import UpdateSnapshotRequest +from google.pubsub_v1.types.pubsub import UpdateSubscriptionRequest +from google.pubsub_v1.types.pubsub import UpdateTopicRequest +from google.pubsub_v1.types.schema import CreateSchemaRequest +from google.pubsub_v1.types.schema import DeleteSchemaRequest +from google.pubsub_v1.types.schema import GetSchemaRequest +from google.pubsub_v1.types.schema import ListSchemasRequest +from google.pubsub_v1.types.schema import ListSchemasResponse +from google.pubsub_v1.types.schema import Schema +from google.pubsub_v1.types.schema import ValidateMessageRequest +from google.pubsub_v1.types.schema import ValidateMessageResponse +from google.pubsub_v1.types.schema import ValidateSchemaRequest +from google.pubsub_v1.types.schema import ValidateSchemaResponse +from google.pubsub_v1.types.schema import Encoding +from google.pubsub_v1.types.schema import SchemaView + +__all__ = ('PublisherClient', + 'PublisherAsyncClient', + 'SchemaServiceClient', + 'SchemaServiceAsyncClient', + 'SubscriberClient', + 'SubscriberAsyncClient', + 'AcknowledgeRequest', + 'BigQueryConfig', + 'CreateSnapshotRequest', + 'DeadLetterPolicy', + 'DeleteSnapshotRequest', + 'DeleteSubscriptionRequest', + 'DeleteTopicRequest', + 'DetachSubscriptionRequest', + 'DetachSubscriptionResponse', + 'ExpirationPolicy', + 'GetSnapshotRequest', + 'GetSubscriptionRequest', + 'GetTopicRequest', + 'ListSnapshotsRequest', + 'ListSnapshotsResponse', + 'ListSubscriptionsRequest', + 'ListSubscriptionsResponse', + 'ListTopicSnapshotsRequest', + 'ListTopicSnapshotsResponse', + 'ListTopicsRequest', + 'ListTopicsResponse', + 'ListTopicSubscriptionsRequest', + 'ListTopicSubscriptionsResponse', + 'MessageStoragePolicy', + 'ModifyAckDeadlineRequest', + 'ModifyPushConfigRequest', + 'PublishRequest', + 'PublishResponse', + 'PubsubMessage', + 'PullRequest', + 'PullResponse', + 'PushConfig', + 'ReceivedMessage', + 'RetryPolicy', + 'SchemaSettings', + 'SeekRequest', + 'SeekResponse', + 'Snapshot', + 'StreamingPullRequest', + 'StreamingPullResponse', + 'Subscription', + 'Topic', + 'UpdateSnapshotRequest', + 'UpdateSubscriptionRequest', + 'UpdateTopicRequest', + 'CreateSchemaRequest', + 'DeleteSchemaRequest', + 'GetSchemaRequest', + 'ListSchemasRequest', + 'ListSchemasResponse', + 'Schema', + 'ValidateMessageRequest', + 'ValidateMessageResponse', + 'ValidateSchemaRequest', + 'ValidateSchemaResponse', + 'Encoding', + 'SchemaView', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/py.typed b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/py.typed new file mode 100644 index 000000000000..1cec9a5ba1ab --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-pubsub package uses inline types. diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/__init__.py new file mode 100644 index 000000000000..0b045251288e --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/__init__.py @@ -0,0 +1,146 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.publisher import PublisherClient +from .services.publisher import PublisherAsyncClient +from .services.schema_service import SchemaServiceClient +from .services.schema_service import SchemaServiceAsyncClient +from .services.subscriber import SubscriberClient +from .services.subscriber import SubscriberAsyncClient + +from .types.pubsub import AcknowledgeRequest +from .types.pubsub import BigQueryConfig +from .types.pubsub import CreateSnapshotRequest +from .types.pubsub import DeadLetterPolicy +from .types.pubsub import DeleteSnapshotRequest +from .types.pubsub import DeleteSubscriptionRequest +from .types.pubsub import DeleteTopicRequest +from .types.pubsub import DetachSubscriptionRequest +from .types.pubsub import DetachSubscriptionResponse +from .types.pubsub import ExpirationPolicy +from .types.pubsub import GetSnapshotRequest +from .types.pubsub import GetSubscriptionRequest +from .types.pubsub import GetTopicRequest +from .types.pubsub import ListSnapshotsRequest +from .types.pubsub import ListSnapshotsResponse +from .types.pubsub import ListSubscriptionsRequest +from .types.pubsub import ListSubscriptionsResponse +from .types.pubsub import ListTopicSnapshotsRequest +from .types.pubsub import ListTopicSnapshotsResponse +from .types.pubsub import ListTopicsRequest +from .types.pubsub import ListTopicsResponse +from .types.pubsub import ListTopicSubscriptionsRequest +from .types.pubsub import ListTopicSubscriptionsResponse +from .types.pubsub import MessageStoragePolicy +from .types.pubsub import ModifyAckDeadlineRequest +from .types.pubsub import ModifyPushConfigRequest +from .types.pubsub import PublishRequest +from .types.pubsub import PublishResponse +from .types.pubsub import PubsubMessage +from .types.pubsub import PullRequest +from .types.pubsub import PullResponse +from .types.pubsub import PushConfig +from .types.pubsub import ReceivedMessage +from .types.pubsub import RetryPolicy +from .types.pubsub import SchemaSettings +from .types.pubsub import SeekRequest +from .types.pubsub import SeekResponse +from .types.pubsub import Snapshot +from .types.pubsub import StreamingPullRequest +from .types.pubsub import StreamingPullResponse +from .types.pubsub import Subscription +from .types.pubsub import Topic +from .types.pubsub import UpdateSnapshotRequest +from .types.pubsub import UpdateSubscriptionRequest +from .types.pubsub import UpdateTopicRequest +from .types.schema import CreateSchemaRequest +from .types.schema import DeleteSchemaRequest +from .types.schema import GetSchemaRequest +from .types.schema import ListSchemasRequest +from .types.schema import ListSchemasResponse +from .types.schema import Schema +from .types.schema import ValidateMessageRequest +from .types.schema import ValidateMessageResponse +from .types.schema import ValidateSchemaRequest +from .types.schema import ValidateSchemaResponse +from .types.schema import Encoding +from .types.schema import SchemaView + +__all__ = ( + 'PublisherAsyncClient', + 'SchemaServiceAsyncClient', + 'SubscriberAsyncClient', +'AcknowledgeRequest', +'BigQueryConfig', +'CreateSchemaRequest', +'CreateSnapshotRequest', +'DeadLetterPolicy', +'DeleteSchemaRequest', +'DeleteSnapshotRequest', +'DeleteSubscriptionRequest', +'DeleteTopicRequest', +'DetachSubscriptionRequest', +'DetachSubscriptionResponse', +'Encoding', +'ExpirationPolicy', +'GetSchemaRequest', +'GetSnapshotRequest', +'GetSubscriptionRequest', +'GetTopicRequest', +'ListSchemasRequest', +'ListSchemasResponse', +'ListSnapshotsRequest', +'ListSnapshotsResponse', +'ListSubscriptionsRequest', +'ListSubscriptionsResponse', +'ListTopicSnapshotsRequest', +'ListTopicSnapshotsResponse', +'ListTopicSubscriptionsRequest', +'ListTopicSubscriptionsResponse', +'ListTopicsRequest', +'ListTopicsResponse', +'MessageStoragePolicy', +'ModifyAckDeadlineRequest', +'ModifyPushConfigRequest', +'PublishRequest', +'PublishResponse', +'PublisherClient', +'PubsubMessage', +'PullRequest', +'PullResponse', +'PushConfig', +'ReceivedMessage', +'RetryPolicy', +'Schema', +'SchemaServiceClient', +'SchemaSettings', +'SchemaView', +'SeekRequest', +'SeekResponse', +'Snapshot', +'StreamingPullRequest', +'StreamingPullResponse', +'SubscriberClient', +'Subscription', +'Topic', +'UpdateSnapshotRequest', +'UpdateSubscriptionRequest', +'UpdateTopicRequest', +'ValidateMessageRequest', +'ValidateMessageResponse', +'ValidateSchemaRequest', +'ValidateSchemaResponse', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/gapic_metadata.json b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/gapic_metadata.json new file mode 100644 index 000000000000..4c5b86bd13bc --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/gapic_metadata.json @@ -0,0 +1,361 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.pubsub_v1", + "protoPackage": "google.pubsub.v1", + "schema": "1.0", + "services": { + "Publisher": { + "clients": { + "grpc": { + "libraryClient": "PublisherClient", + "rpcs": { + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "DetachSubscription": { + "methods": [ + "detach_subscription" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListTopicSnapshots": { + "methods": [ + "list_topic_snapshots" + ] + }, + "ListTopicSubscriptions": { + "methods": [ + "list_topic_subscriptions" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "Publish": { + "methods": [ + "publish" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + }, + "grpc-async": { + "libraryClient": "PublisherAsyncClient", + "rpcs": { + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "DetachSubscription": { + "methods": [ + "detach_subscription" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListTopicSnapshots": { + "methods": [ + "list_topic_snapshots" + ] + }, + "ListTopicSubscriptions": { + "methods": [ + "list_topic_subscriptions" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "Publish": { + "methods": [ + "publish" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } + } + } + }, + "SchemaService": { + "clients": { + "grpc": { + "libraryClient": "SchemaServiceClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "ValidateMessage": { + "methods": [ + "validate_message" + ] + }, + "ValidateSchema": { + "methods": [ + "validate_schema" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SchemaServiceAsyncClient", + "rpcs": { + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "ValidateMessage": { + "methods": [ + "validate_message" + ] + }, + "ValidateSchema": { + "methods": [ + "validate_schema" + ] + } + } + } + } + }, + "Subscriber": { + "clients": { + "grpc": { + "libraryClient": "SubscriberClient", + "rpcs": { + "Acknowledge": { + "methods": [ + "acknowledge" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "CreateSubscription": { + "methods": [ + "create_subscription" + ] + }, + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "ModifyAckDeadline": { + "methods": [ + "modify_ack_deadline" + ] + }, + "ModifyPushConfig": { + "methods": [ + "modify_push_config" + ] + }, + "Pull": { + "methods": [ + "pull" + ] + }, + "Seek": { + "methods": [ + "seek" + ] + }, + "StreamingPull": { + "methods": [ + "streaming_pull" + ] + }, + "UpdateSnapshot": { + "methods": [ + "update_snapshot" + ] + }, + "UpdateSubscription": { + "methods": [ + "update_subscription" + ] + } + } + }, + "grpc-async": { + "libraryClient": "SubscriberAsyncClient", + "rpcs": { + "Acknowledge": { + "methods": [ + "acknowledge" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "CreateSubscription": { + "methods": [ + "create_subscription" + ] + }, + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "ModifyAckDeadline": { + "methods": [ + "modify_ack_deadline" + ] + }, + "ModifyPushConfig": { + "methods": [ + "modify_push_config" + ] + }, + "Pull": { + "methods": [ + "pull" + ] + }, + "Seek": { + "methods": [ + "seek" + ] + }, + "StreamingPull": { + "methods": [ + "streaming_pull" + ] + }, + "UpdateSnapshot": { + "methods": [ + "update_snapshot" + ] + }, + "UpdateSubscription": { + "methods": [ + "update_subscription" + ] + } + } + } + } + } + } +} diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/py.typed b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/py.typed new file mode 100644 index 000000000000..1cec9a5ba1ab --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-pubsub package uses inline types. diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/__init__.py new file mode 100644 index 000000000000..e8e1c3845db5 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/__init__.py new file mode 100644 index 000000000000..ebfdcc3789d8 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import PublisherClient +from .async_client import PublisherAsyncClient + +__all__ = ( + 'PublisherClient', + 'PublisherAsyncClient', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/async_client.py new file mode 100644 index 000000000000..48eeca8d0fcc --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/async_client.py @@ -0,0 +1,1408 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.types import pubsub +from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport +from .client import PublisherClient + + +class PublisherAsyncClient: + """The service that an application uses to manipulate topics, + and to send messages to a topic. + """ + + _client: PublisherClient + + DEFAULT_ENDPOINT = PublisherClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = PublisherClient.DEFAULT_MTLS_ENDPOINT + + schema_path = staticmethod(PublisherClient.schema_path) + parse_schema_path = staticmethod(PublisherClient.parse_schema_path) + subscription_path = staticmethod(PublisherClient.subscription_path) + parse_subscription_path = staticmethod(PublisherClient.parse_subscription_path) + topic_path = staticmethod(PublisherClient.topic_path) + parse_topic_path = staticmethod(PublisherClient.parse_topic_path) + common_billing_account_path = staticmethod(PublisherClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(PublisherClient.parse_common_billing_account_path) + common_folder_path = staticmethod(PublisherClient.common_folder_path) + parse_common_folder_path = staticmethod(PublisherClient.parse_common_folder_path) + common_organization_path = staticmethod(PublisherClient.common_organization_path) + parse_common_organization_path = staticmethod(PublisherClient.parse_common_organization_path) + common_project_path = staticmethod(PublisherClient.common_project_path) + parse_common_project_path = staticmethod(PublisherClient.parse_common_project_path) + common_location_path = staticmethod(PublisherClient.common_location_path) + parse_common_location_path = staticmethod(PublisherClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherAsyncClient: The constructed client. + """ + return PublisherClient.from_service_account_info.__func__(PublisherAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherAsyncClient: The constructed client. + """ + return PublisherClient.from_service_account_file.__func__(PublisherAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return PublisherClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> PublisherTransport: + """Returns the transport used by the client instance. + + Returns: + PublisherTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(PublisherClient).get_transport_class, type(PublisherClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, PublisherTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the publisher client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.PublisherTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = PublisherClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_topic(self, + request: Union[pubsub.Topic, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = await client.create_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.Topic, dict]): + The request object. A topic resource. + name (:class:`str`): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` + must start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.Topic(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_topic, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_topic(self, + request: Union[pubsub.UpdateTopicRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = await client.update_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): + The request object. Request for the UpdateTopic method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + request = pubsub.UpdateTopicRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_topic, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic.name", request.topic.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def publish(self, + request: Union[pubsub.PublishRequest, dict] = None, + *, + topic: str = None, + messages: Sequence[pubsub.PubsubMessage] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PublishResponse: + r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_publish(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = await client.publish(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.PublishRequest, dict]): + The request object. Request for the Publish method. + topic (:class:`str`): + Required. The messages in the request will be published + on this topic. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + messages (:class:`Sequence[google.pubsub_v1.types.PubsubMessage]`): + Required. The messages to publish. + This corresponds to the ``messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.PublishResponse: + Response for the Publish method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic, messages]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.PublishRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if messages: + request.messages.extend(messages) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.publish, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.Cancelled, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_topic(self, + request: Union[pubsub.GetTopicRequest, dict] = None, + *, + topic: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Gets the configuration of a topic. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = await client.get_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): + The request object. Request for the GetTopic method. + topic (:class:`str`): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.GetTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_topic, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topics(self, + request: Union[pubsub.ListTopicsRequest, dict] = None, + *, + project: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsAsyncPager: + r"""Lists matching topics. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): + The request object. Request for the `ListTopics` method. + project (:class:`str`): + Required. The name of the project in which to list + topics. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager: + Response for the ListTopics method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.ListTopicsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_topics, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topic_subscriptions(self, + request: Union[pubsub.ListTopicSubscriptionsRequest, dict] = None, + *, + topic: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSubscriptionsAsyncPager: + r"""Lists the names of the attached subscriptions on this + topic. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): + The request object. Request for the + `ListTopicSubscriptions` method. + topic (:class:`str`): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager: + Response for the ListTopicSubscriptions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.ListTopicSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_topic_subscriptions, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicSubscriptionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_topic_snapshots(self, + request: Union[pubsub.ListTopicSnapshotsRequest, dict] = None, + *, + topic: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSnapshotsAsyncPager: + r"""Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): + The request object. Request for the `ListTopicSnapshots` + method. + topic (:class:`str`): + Required. The name of the topic that snapshots are + attached to. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager: + Response for the ListTopicSnapshots method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.ListTopicSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_topic_snapshots, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListTopicSnapshotsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_topic(self, + request: Union[pubsub.DeleteTopicRequest, dict] = None, + *, + topic: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + await client.delete_topic(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): + The request object. Request for the `DeleteTopic` + method. + topic (:class:`str`): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.DeleteTopicRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_topic, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def detach_subscription(self, + request: Union[pubsub.DetachSubscriptionRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.detach_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): + The request object. Request for the DetachSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + # Create or coerce a protobuf request object. + request = pubsub.DetachSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.detach_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does + not have a policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. + + If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~iam_policy_pb2.PolicyTestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "PublisherAsyncClient", +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/client.py new file mode 100644 index 000000000000..a3fd8003d853 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/client.py @@ -0,0 +1,1559 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.types import pubsub +from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import PublisherGrpcTransport +from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport + + +class PublisherClientMeta(type): + """Metaclass for the Publisher client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] + _transport_registry["grpc"] = PublisherGrpcTransport + _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[PublisherTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class PublisherClient(metaclass=PublisherClientMeta): + """The service that an application uses to manipulate topics, + and to send messages to a topic. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + PublisherClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> PublisherTransport: + """Returns the transport used by the client instance. + + Returns: + PublisherTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def schema_path(project: str,schema: str,) -> str: + """Returns a fully-qualified schema string.""" + return "projects/{project}/schemas/{schema}".format(project=project, schema=schema, ) + + @staticmethod + def parse_schema_path(path: str) -> Dict[str,str]: + """Parses a schema path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def subscription_path(project: str,subscription: str,) -> str: + """Returns a fully-qualified subscription string.""" + return "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str,str]: + """Parses a subscription path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def topic_path(project: str,topic: str,) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str,str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, PublisherTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the publisher client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, PublisherTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, PublisherTransport): + # transport is a PublisherTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + def create_topic(self, + request: Union[pubsub.Topic, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). + + .. code-block:: python + + from google import pubsub_v1 + + def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = client.create_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.Topic, dict]): + The request object. A topic resource. + name (str): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` + must start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), + plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.Topic. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.Topic): + request = pubsub.Topic(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_topic(self, + request: Union[pubsub.UpdateTopicRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = client.update_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): + The request object. Request for the UpdateTopic method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.UpdateTopicRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.UpdateTopicRequest): + request = pubsub.UpdateTopicRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic.name", request.topic.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def publish(self, + request: Union[pubsub.PublishRequest, dict] = None, + *, + topic: str = None, + messages: Sequence[pubsub.PubsubMessage] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PublishResponse: + r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_publish(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = client.publish(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.PublishRequest, dict]): + The request object. Request for the Publish method. + topic (str): + Required. The messages in the request will be published + on this topic. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + messages (Sequence[google.pubsub_v1.types.PubsubMessage]): + Required. The messages to publish. + This corresponds to the ``messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.PublishResponse: + Response for the Publish method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic, messages]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.PublishRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.PublishRequest): + request = pubsub.PublishRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if messages is not None: + request.messages = messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.publish] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_topic(self, + request: Union[pubsub.GetTopicRequest, dict] = None, + *, + topic: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Gets the configuration of a topic. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.get_topic(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): + The request object. Request for the GetTopic method. + topic (str): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Topic: + A topic resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.GetTopicRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.GetTopicRequest): + request = pubsub.GetTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topics(self, + request: Union[pubsub.ListTopicsRequest, dict] = None, + *, + project: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicsPager: + r"""Lists matching topics. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): + The request object. Request for the `ListTopics` method. + project (str): + Required. The name of the project in which to list + topics. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicsPager: + Response for the ListTopics method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListTopicsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListTopicsRequest): + request = pubsub.ListTopicsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topics] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topic_subscriptions(self, + request: Union[pubsub.ListTopicSubscriptionsRequest, dict] = None, + *, + topic: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSubscriptionsPager: + r"""Lists the names of the attached subscriptions on this + topic. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): + The request object. Request for the + `ListTopicSubscriptions` method. + topic (str): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager: + Response for the ListTopicSubscriptions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListTopicSubscriptionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListTopicSubscriptionsRequest): + request = pubsub.ListTopicSubscriptionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topic_subscriptions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicSubscriptionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_topic_snapshots(self, + request: Union[pubsub.ListTopicSnapshotsRequest, dict] = None, + *, + topic: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListTopicSnapshotsPager: + r"""Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): + The request object. Request for the `ListTopicSnapshots` + method. + topic (str): + Required. The name of the topic that snapshots are + attached to. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager: + Response for the ListTopicSnapshots method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListTopicSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListTopicSnapshotsRequest): + request = pubsub.ListTopicSnapshotsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_topic_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListTopicSnapshotsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_topic(self, + request: Union[pubsub.DeleteTopicRequest, dict] = None, + *, + topic: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + client.delete_topic(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): + The request object. Request for the `DeleteTopic` + method. + topic (str): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DeleteTopicRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DeleteTopicRequest): + request = pubsub.DeleteTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_topic] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("topic", request.topic), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def detach_subscription(self, + request: Union[pubsub.DetachSubscriptionRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.detach_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): + The request object. Request for the DetachSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DetachSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DetachSubscriptionRequest): + request = pubsub.DetachSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.detach_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "PublisherClient", +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/pagers.py new file mode 100644 index 000000000000..0f25db27ee3a --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/pagers.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.pubsub_v1.types import pubsub + + +class ListTopicsPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., pubsub.ListTopicsResponse], + request: pubsub.ListTopicsRequest, + response: pubsub.ListTopicsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[pubsub.Topic]: + for page in self.pages: + yield from page.topics + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTopicsAsyncPager: + """A pager for iterating through ``list_topics`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``topics`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopics`` requests and continue to iterate + through the ``topics`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[pubsub.ListTopicsResponse]], + request: pubsub.ListTopicsRequest, + response: pubsub.ListTopicsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListTopicsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[pubsub.Topic]: + async def async_generator(): + async for page in self.pages: + for response in page.topics: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTopicSubscriptionsPager: + """A pager for iterating through ``list_topic_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopicSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., pubsub.ListTopicSubscriptionsResponse], + request: pubsub.ListTopicSubscriptionsRequest, + response: pubsub.ListTopicSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListTopicSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.subscriptions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTopicSubscriptionsAsyncPager: + """A pager for iterating through ``list_topic_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopicSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[pubsub.ListTopicSubscriptionsResponse]], + request: pubsub.ListTopicSubscriptionsRequest, + response: pubsub.ListTopicSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListTopicSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTopicSnapshotsPager: + """A pager for iterating through ``list_topic_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListTopicSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., pubsub.ListTopicSnapshotsResponse], + request: pubsub.ListTopicSnapshotsRequest, + response: pubsub.ListTopicSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicSnapshotsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicSnapshotsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListTopicSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[str]: + for page in self.pages: + yield from page.snapshots + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListTopicSnapshotsAsyncPager: + """A pager for iterating through ``list_topic_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListTopicSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[pubsub.ListTopicSnapshotsResponse]], + request: pubsub.ListTopicSnapshotsRequest, + response: pubsub.ListTopicSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListTopicSnapshotsRequest): + The initial request object. + response (google.pubsub_v1.types.ListTopicSnapshotsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListTopicSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListTopicSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[str]: + async def async_generator(): + async for page in self.pages: + for response in page.snapshots: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/__init__.py new file mode 100644 index 000000000000..362fe78b8e0c --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import PublisherTransport +from .grpc import PublisherGrpcTransport +from .grpc_asyncio import PublisherGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] +_transport_registry['grpc'] = PublisherGrpcTransport +_transport_registry['grpc_asyncio'] = PublisherGrpcAsyncIOTransport + +__all__ = ( + 'PublisherTransport', + 'PublisherGrpcTransport', + 'PublisherGrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/base.py new file mode 100644 index 000000000000..f995d6728d4f --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/base.py @@ -0,0 +1,364 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import pkg_resources + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-pubsub', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class PublisherTransport(abc.ABC): + """Abstract transport class for Publisher.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', + ) + + DEFAULT_HOST: str = 'pubsub.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_topic: gapic_v1.method.wrap_method( + self.create_topic, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_topic: gapic_v1.method.wrap_method( + self.update_topic, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.publish: gapic_v1.method.wrap_method( + self.publish, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.Cancelled, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_topic: gapic_v1.method.wrap_method( + self.get_topic, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topics: gapic_v1.method.wrap_method( + self.list_topics, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_subscriptions: gapic_v1.method.wrap_method( + self.list_topic_subscriptions, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_snapshots: gapic_v1.method.wrap_method( + self.list_topic_snapshots, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_topic: gapic_v1.method.wrap_method( + self.delete_topic, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.detach_subscription: gapic_v1.method.wrap_method( + self.detach_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_topic(self) -> Callable[ + [pubsub.Topic], + Union[ + pubsub.Topic, + Awaitable[pubsub.Topic] + ]]: + raise NotImplementedError() + + @property + def update_topic(self) -> Callable[ + [pubsub.UpdateTopicRequest], + Union[ + pubsub.Topic, + Awaitable[pubsub.Topic] + ]]: + raise NotImplementedError() + + @property + def publish(self) -> Callable[ + [pubsub.PublishRequest], + Union[ + pubsub.PublishResponse, + Awaitable[pubsub.PublishResponse] + ]]: + raise NotImplementedError() + + @property + def get_topic(self) -> Callable[ + [pubsub.GetTopicRequest], + Union[ + pubsub.Topic, + Awaitable[pubsub.Topic] + ]]: + raise NotImplementedError() + + @property + def list_topics(self) -> Callable[ + [pubsub.ListTopicsRequest], + Union[ + pubsub.ListTopicsResponse, + Awaitable[pubsub.ListTopicsResponse] + ]]: + raise NotImplementedError() + + @property + def list_topic_subscriptions(self) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], + Union[ + pubsub.ListTopicSubscriptionsResponse, + Awaitable[pubsub.ListTopicSubscriptionsResponse] + ]]: + raise NotImplementedError() + + @property + def list_topic_snapshots(self) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], + Union[ + pubsub.ListTopicSnapshotsResponse, + Awaitable[pubsub.ListTopicSnapshotsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_topic(self) -> Callable[ + [pubsub.DeleteTopicRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def detach_subscription(self) -> Callable[ + [pubsub.DetachSubscriptionRequest], + Union[ + pubsub.DetachSubscriptionResponse, + Awaitable[pubsub.DetachSubscriptionResponse] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'PublisherTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc.py new file mode 100644 index 000000000000..9a77ed68f913 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc.py @@ -0,0 +1,572 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub +from .base import PublisherTransport, DEFAULT_CLIENT_INFO + + +class PublisherGrpcTransport(PublisherTransport): + """gRPC backend transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_topic(self) -> Callable[ + [pubsub.Topic], + pubsub.Topic]: + r"""Return a callable for the create topic method over gRPC. + + Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). + + Returns: + Callable[[~.Topic], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_topic' not in self._stubs: + self._stubs['create_topic'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=pubsub.Topic.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs['create_topic'] + + @property + def update_topic(self) -> Callable[ + [pubsub.UpdateTopicRequest], + pubsub.Topic]: + r"""Return a callable for the update topic method over gRPC. + + Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + Returns: + Callable[[~.UpdateTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_topic' not in self._stubs: + self._stubs['update_topic'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=pubsub.UpdateTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs['update_topic'] + + @property + def publish(self) -> Callable[ + [pubsub.PublishRequest], + pubsub.PublishResponse]: + r"""Return a callable for the publish method over gRPC. + + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Returns: + Callable[[~.PublishRequest], + ~.PublishResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'publish' not in self._stubs: + self._stubs['publish'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=pubsub.PublishRequest.serialize, + response_deserializer=pubsub.PublishResponse.deserialize, + ) + return self._stubs['publish'] + + @property + def get_topic(self) -> Callable[ + [pubsub.GetTopicRequest], + pubsub.Topic]: + r"""Return a callable for the get topic method over gRPC. + + Gets the configuration of a topic. + + Returns: + Callable[[~.GetTopicRequest], + ~.Topic]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_topic' not in self._stubs: + self._stubs['get_topic'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=pubsub.GetTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs['get_topic'] + + @property + def list_topics(self) -> Callable[ + [pubsub.ListTopicsRequest], + pubsub.ListTopicsResponse]: + r"""Return a callable for the list topics method over gRPC. + + Lists matching topics. + + Returns: + Callable[[~.ListTopicsRequest], + ~.ListTopicsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_topics' not in self._stubs: + self._stubs['list_topics'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=pubsub.ListTopicsRequest.serialize, + response_deserializer=pubsub.ListTopicsResponse.deserialize, + ) + return self._stubs['list_topics'] + + @property + def list_topic_subscriptions(self) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], + pubsub.ListTopicSubscriptionsResponse]: + r"""Return a callable for the list topic subscriptions method over gRPC. + + Lists the names of the attached subscriptions on this + topic. + + Returns: + Callable[[~.ListTopicSubscriptionsRequest], + ~.ListTopicSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_topic_subscriptions' not in self._stubs: + self._stubs['list_topic_subscriptions'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, + ) + return self._stubs['list_topic_subscriptions'] + + @property + def list_topic_snapshots(self) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], + pubsub.ListTopicSnapshotsResponse]: + r"""Return a callable for the list topic snapshots method over gRPC. + + Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListTopicSnapshotsRequest], + ~.ListTopicSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_topic_snapshots' not in self._stubs: + self._stubs['list_topic_snapshots'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSnapshots', + request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, + response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, + ) + return self._stubs['list_topic_snapshots'] + + @property + def delete_topic(self) -> Callable[ + [pubsub.DeleteTopicRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Returns: + Callable[[~.DeleteTopicRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_topic' not in self._stubs: + self._stubs['delete_topic'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=pubsub.DeleteTopicRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_topic'] + + @property + def detach_subscription(self) -> Callable[ + [pubsub.DetachSubscriptionRequest], + pubsub.DetachSubscriptionResponse]: + r"""Return a callable for the detach subscription method over gRPC. + + Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Returns: + Callable[[~.DetachSubscriptionRequest], + ~.DetachSubscriptionResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'detach_subscription' not in self._stubs: + self._stubs['detach_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/DetachSubscription', + request_serializer=pubsub.DetachSubscriptionRequest.serialize, + response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, + ) + return self._stubs['detach_subscription'] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'PublisherGrpcTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py new file mode 100644 index 000000000000..38164d7144b6 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -0,0 +1,572 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub +from .base import PublisherTransport, DEFAULT_CLIENT_INFO +from .grpc import PublisherGrpcTransport + + +class PublisherGrpcAsyncIOTransport(PublisherTransport): + """gRPC AsyncIO backend transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_topic(self) -> Callable[ + [pubsub.Topic], + Awaitable[pubsub.Topic]]: + r"""Return a callable for the create topic method over gRPC. + + Creates the given topic with the given name. See the [resource + name rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). + + Returns: + Callable[[~.Topic], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_topic' not in self._stubs: + self._stubs['create_topic'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/CreateTopic', + request_serializer=pubsub.Topic.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs['create_topic'] + + @property + def update_topic(self) -> Callable[ + [pubsub.UpdateTopicRequest], + Awaitable[pubsub.Topic]]: + r"""Return a callable for the update topic method over gRPC. + + Updates an existing topic. Note that certain + properties of a topic are not modifiable. + + Returns: + Callable[[~.UpdateTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_topic' not in self._stubs: + self._stubs['update_topic'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/UpdateTopic', + request_serializer=pubsub.UpdateTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs['update_topic'] + + @property + def publish(self) -> Callable[ + [pubsub.PublishRequest], + Awaitable[pubsub.PublishResponse]]: + r"""Return a callable for the publish method over gRPC. + + Adds one or more messages to the topic. Returns ``NOT_FOUND`` if + the topic does not exist. + + Returns: + Callable[[~.PublishRequest], + Awaitable[~.PublishResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'publish' not in self._stubs: + self._stubs['publish'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/Publish', + request_serializer=pubsub.PublishRequest.serialize, + response_deserializer=pubsub.PublishResponse.deserialize, + ) + return self._stubs['publish'] + + @property + def get_topic(self) -> Callable[ + [pubsub.GetTopicRequest], + Awaitable[pubsub.Topic]]: + r"""Return a callable for the get topic method over gRPC. + + Gets the configuration of a topic. + + Returns: + Callable[[~.GetTopicRequest], + Awaitable[~.Topic]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_topic' not in self._stubs: + self._stubs['get_topic'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/GetTopic', + request_serializer=pubsub.GetTopicRequest.serialize, + response_deserializer=pubsub.Topic.deserialize, + ) + return self._stubs['get_topic'] + + @property + def list_topics(self) -> Callable[ + [pubsub.ListTopicsRequest], + Awaitable[pubsub.ListTopicsResponse]]: + r"""Return a callable for the list topics method over gRPC. + + Lists matching topics. + + Returns: + Callable[[~.ListTopicsRequest], + Awaitable[~.ListTopicsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_topics' not in self._stubs: + self._stubs['list_topics'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopics', + request_serializer=pubsub.ListTopicsRequest.serialize, + response_deserializer=pubsub.ListTopicsResponse.deserialize, + ) + return self._stubs['list_topics'] + + @property + def list_topic_subscriptions(self) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], + Awaitable[pubsub.ListTopicSubscriptionsResponse]]: + r"""Return a callable for the list topic subscriptions method over gRPC. + + Lists the names of the attached subscriptions on this + topic. + + Returns: + Callable[[~.ListTopicSubscriptionsRequest], + Awaitable[~.ListTopicSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_topic_subscriptions' not in self._stubs: + self._stubs['list_topic_subscriptions'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSubscriptions', + request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, + ) + return self._stubs['list_topic_subscriptions'] + + @property + def list_topic_snapshots(self) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], + Awaitable[pubsub.ListTopicSnapshotsResponse]]: + r"""Return a callable for the list topic snapshots method over gRPC. + + Lists the names of the snapshots on this topic. Snapshots are + used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListTopicSnapshotsRequest], + Awaitable[~.ListTopicSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_topic_snapshots' not in self._stubs: + self._stubs['list_topic_snapshots'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/ListTopicSnapshots', + request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, + response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, + ) + return self._stubs['list_topic_snapshots'] + + @property + def delete_topic(self) -> Callable[ + [pubsub.DeleteTopicRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete topic method over gRPC. + + Deletes the topic with the given name. Returns ``NOT_FOUND`` if + the topic does not exist. After a topic is deleted, a new topic + may be created with the same name; this is an entirely new topic + with none of the old configuration or subscriptions. Existing + subscriptions to this topic are not deleted, but their ``topic`` + field is set to ``_deleted-topic_``. + + Returns: + Callable[[~.DeleteTopicRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_topic' not in self._stubs: + self._stubs['delete_topic'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/DeleteTopic', + request_serializer=pubsub.DeleteTopicRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_topic'] + + @property + def detach_subscription(self) -> Callable[ + [pubsub.DetachSubscriptionRequest], + Awaitable[pubsub.DetachSubscriptionResponse]]: + r"""Return a callable for the detach subscription method over gRPC. + + Detaches a subscription from this topic. All messages retained + in the subscription are dropped. Subsequent ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. If + the subscription is a push subscription, pushes to the endpoint + will stop. + + Returns: + Callable[[~.DetachSubscriptionRequest], + Awaitable[~.DetachSubscriptionResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'detach_subscription' not in self._stubs: + self._stubs['detach_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Publisher/DetachSubscription', + request_serializer=pubsub.DetachSubscriptionRequest.serialize, + response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, + ) + return self._stubs['detach_subscription'] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'PublisherGrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/__init__.py new file mode 100644 index 000000000000..1883aa56de6a --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SchemaServiceClient +from .async_client import SchemaServiceAsyncClient + +__all__ = ( + 'SchemaServiceClient', + 'SchemaServiceAsyncClient', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/async_client.py new file mode 100644 index 000000000000..3f01d265fd56 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/async_client.py @@ -0,0 +1,1062 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.pubsub_v1.services.schema_service import pagers +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema +from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .client import SchemaServiceClient + + +class SchemaServiceAsyncClient: + """Service for doing schema-related operations.""" + + _client: SchemaServiceClient + + DEFAULT_ENDPOINT = SchemaServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + + schema_path = staticmethod(SchemaServiceClient.schema_path) + parse_schema_path = staticmethod(SchemaServiceClient.parse_schema_path) + common_billing_account_path = staticmethod(SchemaServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(SchemaServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(SchemaServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(SchemaServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(SchemaServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(SchemaServiceClient.parse_common_organization_path) + common_project_path = staticmethod(SchemaServiceClient.common_project_path) + parse_common_project_path = staticmethod(SchemaServiceClient.parse_common_project_path) + common_location_path = staticmethod(SchemaServiceClient.common_location_path) + parse_common_location_path = staticmethod(SchemaServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_info.__func__(SchemaServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceAsyncClient: The constructed client. + """ + return SchemaServiceClient.from_service_account_file.__func__(SchemaServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SchemaServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SchemaServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, SchemaServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the schema service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SchemaServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SchemaServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_schema(self, + request: Union[gp_schema.CreateSchemaRequest, dict] = None, + *, + parent: str = None, + schema: gp_schema.Schema = None, + schema_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.Schema: + r"""Creates a schema. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.create_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): + The request object. Request for the CreateSchema method. + parent (:class:`str`): + Required. The name of the project in which to create the + schema. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`google.pubsub_v1.types.Schema`): + Required. The schema object to create. + + This schema's ``name`` parameter is ignored. The schema + object returned by CreateSchema will have a ``name`` + made using the given ``parent`` and ``schema_id``. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (:class:`str`): + The ID to use for the schema, which will become the + final component of the schema's resource name. + + See + https://cloud.google.com/pubsub/docs/admin#resource_names + for resource name constraints. + + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema, schema_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = gp_schema.CreateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_schema(self, + request: Union[schema.GetSchemaRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Gets a schema. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.get_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): + The request object. Request for the GetSchema method. + name (:class:`str`): + Required. The name of the schema to get. Format is + ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = schema.GetSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_schemas(self, + request: Union[schema.ListSchemasRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemasAsyncPager: + r"""Lists schemas in a project. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): + The request object. Request for the `ListSchemas` + method. + parent (:class:`str`): + Required. The name of the project in which to list + schemas. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager: + Response for the ListSchemas method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = schema.ListSchemasRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_schemas, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSchemasAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_schema(self, + request: Union[schema.DeleteSchemaRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a schema. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + await client.delete_schema(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): + The request object. Request for the `DeleteSchema` + method. + name (:class:`str`): + Required. Name of the schema to delete. Format is + ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = schema.DeleteSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def validate_schema(self, + request: Union[gp_schema.ValidateSchemaRequest, dict] = None, + *, + parent: str = None, + schema: gp_schema.Schema = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.ValidateSchemaResponse: + r"""Validates a schema. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.validate_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): + The request object. Request for the `ValidateSchema` + method. + parent (:class:`str`): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`google.pubsub_v1.types.Schema`): + Required. The schema object to + validate. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.ValidateSchemaResponse: + Response for the ValidateSchema method. + Empty for now. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = gp_schema.ValidateSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.validate_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def validate_message(self, + request: Union[schema.ValidateMessageRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.ValidateMessageResponse: + r"""Validates a message against a schema. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = await client.validate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): + The request object. Request for the `ValidateMessage` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.ValidateMessageResponse: + Response for the ValidateMessage method. + Empty for now. + + """ + # Create or coerce a protobuf request object. + request = schema.ValidateMessageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.validate_message, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does + not have a policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. + + If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~iam_policy_pb2.PolicyTestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "SchemaServiceAsyncClient", +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/client.py new file mode 100644 index 000000000000..e6becf99be0c --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/client.py @@ -0,0 +1,1262 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.pubsub_v1.services.schema_service import pagers +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema +from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SchemaServiceGrpcTransport +from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport + + +class SchemaServiceClientMeta(type): + """Metaclass for the SchemaService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] + _transport_registry["grpc"] = SchemaServiceGrpcTransport + _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[SchemaServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SchemaServiceClient(metaclass=SchemaServiceClientMeta): + """Service for doing schema-related operations.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SchemaServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SchemaServiceTransport: + """Returns the transport used by the client instance. + + Returns: + SchemaServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def schema_path(project: str,schema: str,) -> str: + """Returns a fully-qualified schema string.""" + return "projects/{project}/schemas/{schema}".format(project=project, schema=schema, ) + + @staticmethod + def parse_schema_path(path: str) -> Dict[str,str]: + """Parses a schema path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SchemaServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the schema service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SchemaServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SchemaServiceTransport): + # transport is a SchemaServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + def create_schema(self, + request: Union[gp_schema.CreateSchemaRequest, dict] = None, + *, + parent: str = None, + schema: gp_schema.Schema = None, + schema_id: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.Schema: + r"""Creates a schema. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.create_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): + The request object. Request for the CreateSchema method. + parent (str): + Required. The name of the project in which to create the + schema. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (google.pubsub_v1.types.Schema): + Required. The schema object to create. + + This schema's ``name`` parameter is ignored. The schema + object returned by CreateSchema will have a ``name`` + made using the given ``parent`` and ``schema_id``. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema_id (str): + The ID to use for the schema, which will become the + final component of the schema's resource name. + + See + https://cloud.google.com/pubsub/docs/admin#resource_names + for resource name constraints. + + This corresponds to the ``schema_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema, schema_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a gp_schema.CreateSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gp_schema.CreateSchemaRequest): + request = gp_schema.CreateSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + if schema_id is not None: + request.schema_id = schema_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_schema(self, + request: Union[schema.GetSchemaRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Gets a schema. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): + The request object. Request for the GetSchema method. + name (str): + Required. The name of the schema to get. Format is + ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a schema.GetSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.GetSchemaRequest): + request = schema.GetSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_schemas(self, + request: Union[schema.ListSchemasRequest, dict] = None, + *, + parent: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemasPager: + r"""Lists schemas in a project. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): + The request object. Request for the `ListSchemas` + method. + parent (str): + Required. The name of the project in which to list + schemas. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.schema_service.pagers.ListSchemasPager: + Response for the ListSchemas method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a schema.ListSchemasRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.ListSchemasRequest): + request = schema.ListSchemasRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_schemas] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSchemasPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_schema(self, + request: Union[schema.DeleteSchemaRequest, dict] = None, + *, + name: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a schema. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + client.delete_schema(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): + The request object. Request for the `DeleteSchema` + method. + name (str): + Required. Name of the schema to delete. Format is + ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a schema.DeleteSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.DeleteSchemaRequest): + request = schema.DeleteSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def validate_schema(self, + request: Union[gp_schema.ValidateSchemaRequest, dict] = None, + *, + parent: str = None, + schema: gp_schema.Schema = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.ValidateSchemaResponse: + r"""Validates a schema. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.validate_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): + The request object. Request for the `ValidateSchema` + method. + parent (str): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (google.pubsub_v1.types.Schema): + Required. The schema object to + validate. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.ValidateSchemaResponse: + Response for the ValidateSchema method. + Empty for now. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, schema]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a gp_schema.ValidateSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gp_schema.ValidateSchemaRequest): + request = gp_schema.ValidateSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def validate_message(self, + request: Union[schema.ValidateMessageRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.ValidateMessageResponse: + r"""Validates a message against a schema. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = client.validate_message(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): + The request object. Request for the `ValidateMessage` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.ValidateMessageResponse: + Response for the ValidateMessage method. + Empty for now. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a schema.ValidateMessageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.ValidateMessageRequest): + request = schema.ValidateMessageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.validate_message] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "SchemaServiceClient", +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/pagers.py new file mode 100644 index 000000000000..2c2e0e00eb76 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/pagers.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.pubsub_v1.types import schema + + +class ListSchemasPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSchemasResponse` object, and + provides an ``__iter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., schema.ListSchemasResponse], + request: schema.ListSchemasRequest, + response: schema.ListSchemasResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSchemasRequest): + The initial request object. + response (google.pubsub_v1.types.ListSchemasResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema.ListSchemasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[schema.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[schema.Schema]: + for page in self.pages: + yield from page.schemas + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSchemasAsyncPager: + """A pager for iterating through ``list_schemas`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSchemasResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSchemas`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSchemasResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[schema.ListSchemasResponse]], + request: schema.ListSchemasRequest, + response: schema.ListSchemasResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSchemasRequest): + The initial request object. + response (google.pubsub_v1.types.ListSchemasResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema.ListSchemasRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[schema.ListSchemasResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[schema.Schema]: + async def async_generator(): + async for page in self.pages: + for response in page.schemas: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/__init__.py new file mode 100644 index 000000000000..59b4fd088e07 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SchemaServiceTransport +from .grpc import SchemaServiceGrpcTransport +from .grpc_asyncio import SchemaServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] +_transport_registry['grpc'] = SchemaServiceGrpcTransport +_transport_registry['grpc_asyncio'] = SchemaServiceGrpcAsyncIOTransport + +__all__ = ( + 'SchemaServiceTransport', + 'SchemaServiceGrpcTransport', + 'SchemaServiceGrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/base.py new file mode 100644 index 000000000000..6e24f13cc6b3 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/base.py @@ -0,0 +1,255 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import pkg_resources + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-pubsub', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class SchemaServiceTransport(abc.ABC): + """Abstract transport class for SchemaService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', + ) + + DEFAULT_HOST: str = 'pubsub.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_schema: gapic_v1.method.wrap_method( + self.create_schema, + default_timeout=None, + client_info=client_info, + ), + self.get_schema: gapic_v1.method.wrap_method( + self.get_schema, + default_timeout=None, + client_info=client_info, + ), + self.list_schemas: gapic_v1.method.wrap_method( + self.list_schemas, + default_timeout=None, + client_info=client_info, + ), + self.delete_schema: gapic_v1.method.wrap_method( + self.delete_schema, + default_timeout=None, + client_info=client_info, + ), + self.validate_schema: gapic_v1.method.wrap_method( + self.validate_schema, + default_timeout=None, + client_info=client_info, + ), + self.validate_message: gapic_v1.method.wrap_method( + self.validate_message, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_schema(self) -> Callable[ + [gp_schema.CreateSchemaRequest], + Union[ + gp_schema.Schema, + Awaitable[gp_schema.Schema] + ]]: + raise NotImplementedError() + + @property + def get_schema(self) -> Callable[ + [schema.GetSchemaRequest], + Union[ + schema.Schema, + Awaitable[schema.Schema] + ]]: + raise NotImplementedError() + + @property + def list_schemas(self) -> Callable[ + [schema.ListSchemasRequest], + Union[ + schema.ListSchemasResponse, + Awaitable[schema.ListSchemasResponse] + ]]: + raise NotImplementedError() + + @property + def delete_schema(self) -> Callable[ + [schema.DeleteSchemaRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def validate_schema(self) -> Callable[ + [gp_schema.ValidateSchemaRequest], + Union[ + gp_schema.ValidateSchemaResponse, + Awaitable[gp_schema.ValidateSchemaResponse] + ]]: + raise NotImplementedError() + + @property + def validate_message(self) -> Callable[ + [schema.ValidateMessageRequest], + Union[ + schema.ValidateMessageResponse, + Awaitable[schema.ValidateMessageResponse] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'SchemaServiceTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc.py new file mode 100644 index 000000000000..b27c59f59a34 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -0,0 +1,475 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema +from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO + + +class SchemaServiceGrpcTransport(SchemaServiceTransport): + """gRPC backend transport for SchemaService. + + Service for doing schema-related operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_schema(self) -> Callable[ + [gp_schema.CreateSchemaRequest], + gp_schema.Schema]: + r"""Return a callable for the create schema method over gRPC. + + Creates a schema. + + Returns: + Callable[[~.CreateSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_schema' not in self._stubs: + self._stubs['create_schema'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/CreateSchema', + request_serializer=gp_schema.CreateSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs['create_schema'] + + @property + def get_schema(self) -> Callable[ + [schema.GetSchemaRequest], + schema.Schema]: + r"""Return a callable for the get schema method over gRPC. + + Gets a schema. + + Returns: + Callable[[~.GetSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_schema' not in self._stubs: + self._stubs['get_schema'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/GetSchema', + request_serializer=schema.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs['get_schema'] + + @property + def list_schemas(self) -> Callable[ + [schema.ListSchemasRequest], + schema.ListSchemasResponse]: + r"""Return a callable for the list schemas method over gRPC. + + Lists schemas in a project. + + Returns: + Callable[[~.ListSchemasRequest], + ~.ListSchemasResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_schemas' not in self._stubs: + self._stubs['list_schemas'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/ListSchemas', + request_serializer=schema.ListSchemasRequest.serialize, + response_deserializer=schema.ListSchemasResponse.deserialize, + ) + return self._stubs['list_schemas'] + + @property + def delete_schema(self) -> Callable[ + [schema.DeleteSchemaRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a schema. + + Returns: + Callable[[~.DeleteSchemaRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_schema' not in self._stubs: + self._stubs['delete_schema'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/DeleteSchema', + request_serializer=schema.DeleteSchemaRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_schema'] + + @property + def validate_schema(self) -> Callable[ + [gp_schema.ValidateSchemaRequest], + gp_schema.ValidateSchemaResponse]: + r"""Return a callable for the validate schema method over gRPC. + + Validates a schema. + + Returns: + Callable[[~.ValidateSchemaRequest], + ~.ValidateSchemaResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'validate_schema' not in self._stubs: + self._stubs['validate_schema'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/ValidateSchema', + request_serializer=gp_schema.ValidateSchemaRequest.serialize, + response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, + ) + return self._stubs['validate_schema'] + + @property + def validate_message(self) -> Callable[ + [schema.ValidateMessageRequest], + schema.ValidateMessageResponse]: + r"""Return a callable for the validate message method over gRPC. + + Validates a message against a schema. + + Returns: + Callable[[~.ValidateMessageRequest], + ~.ValidateMessageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'validate_message' not in self._stubs: + self._stubs['validate_message'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/ValidateMessage', + request_serializer=schema.ValidateMessageRequest.serialize, + response_deserializer=schema.ValidateMessageResponse.deserialize, + ) + return self._stubs['validate_message'] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'SchemaServiceGrpcTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..e7024367d521 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -0,0 +1,475 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema +from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import SchemaServiceGrpcTransport + + +class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): + """gRPC AsyncIO backend transport for SchemaService. + + Service for doing schema-related operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_schema(self) -> Callable[ + [gp_schema.CreateSchemaRequest], + Awaitable[gp_schema.Schema]]: + r"""Return a callable for the create schema method over gRPC. + + Creates a schema. + + Returns: + Callable[[~.CreateSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_schema' not in self._stubs: + self._stubs['create_schema'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/CreateSchema', + request_serializer=gp_schema.CreateSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs['create_schema'] + + @property + def get_schema(self) -> Callable[ + [schema.GetSchemaRequest], + Awaitable[schema.Schema]]: + r"""Return a callable for the get schema method over gRPC. + + Gets a schema. + + Returns: + Callable[[~.GetSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_schema' not in self._stubs: + self._stubs['get_schema'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/GetSchema', + request_serializer=schema.GetSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs['get_schema'] + + @property + def list_schemas(self) -> Callable[ + [schema.ListSchemasRequest], + Awaitable[schema.ListSchemasResponse]]: + r"""Return a callable for the list schemas method over gRPC. + + Lists schemas in a project. + + Returns: + Callable[[~.ListSchemasRequest], + Awaitable[~.ListSchemasResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_schemas' not in self._stubs: + self._stubs['list_schemas'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/ListSchemas', + request_serializer=schema.ListSchemasRequest.serialize, + response_deserializer=schema.ListSchemasResponse.deserialize, + ) + return self._stubs['list_schemas'] + + @property + def delete_schema(self) -> Callable[ + [schema.DeleteSchemaRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete schema method over gRPC. + + Deletes a schema. + + Returns: + Callable[[~.DeleteSchemaRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_schema' not in self._stubs: + self._stubs['delete_schema'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/DeleteSchema', + request_serializer=schema.DeleteSchemaRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_schema'] + + @property + def validate_schema(self) -> Callable[ + [gp_schema.ValidateSchemaRequest], + Awaitable[gp_schema.ValidateSchemaResponse]]: + r"""Return a callable for the validate schema method over gRPC. + + Validates a schema. + + Returns: + Callable[[~.ValidateSchemaRequest], + Awaitable[~.ValidateSchemaResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'validate_schema' not in self._stubs: + self._stubs['validate_schema'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/ValidateSchema', + request_serializer=gp_schema.ValidateSchemaRequest.serialize, + response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, + ) + return self._stubs['validate_schema'] + + @property + def validate_message(self) -> Callable[ + [schema.ValidateMessageRequest], + Awaitable[schema.ValidateMessageResponse]]: + r"""Return a callable for the validate message method over gRPC. + + Validates a message against a schema. + + Returns: + Callable[[~.ValidateMessageRequest], + Awaitable[~.ValidateMessageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'validate_message' not in self._stubs: + self._stubs['validate_message'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.SchemaService/ValidateMessage', + request_serializer=schema.ValidateMessageRequest.serialize, + response_deserializer=schema.ValidateMessageResponse.deserialize, + ) + return self._stubs['validate_message'] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'SchemaServiceGrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/__init__.py new file mode 100644 index 000000000000..5f20ec4319af --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import SubscriberClient +from .async_client import SubscriberAsyncClient + +__all__ = ( + 'SubscriberClient', + 'SubscriberAsyncClient', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/async_client.py new file mode 100644 index 000000000000..4306ace7f500 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/async_client.py @@ -0,0 +1,2271 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.types import pubsub +from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport +from .client import SubscriberClient + + +class SubscriberAsyncClient: + """The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + """ + + _client: SubscriberClient + + DEFAULT_ENDPOINT = SubscriberClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = SubscriberClient.DEFAULT_MTLS_ENDPOINT + + snapshot_path = staticmethod(SubscriberClient.snapshot_path) + parse_snapshot_path = staticmethod(SubscriberClient.parse_snapshot_path) + subscription_path = staticmethod(SubscriberClient.subscription_path) + parse_subscription_path = staticmethod(SubscriberClient.parse_subscription_path) + topic_path = staticmethod(SubscriberClient.topic_path) + parse_topic_path = staticmethod(SubscriberClient.parse_topic_path) + common_billing_account_path = staticmethod(SubscriberClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(SubscriberClient.parse_common_billing_account_path) + common_folder_path = staticmethod(SubscriberClient.common_folder_path) + parse_common_folder_path = staticmethod(SubscriberClient.parse_common_folder_path) + common_organization_path = staticmethod(SubscriberClient.common_organization_path) + parse_common_organization_path = staticmethod(SubscriberClient.parse_common_organization_path) + common_project_path = staticmethod(SubscriberClient.common_project_path) + parse_common_project_path = staticmethod(SubscriberClient.parse_common_project_path) + common_location_path = staticmethod(SubscriberClient.common_location_path) + parse_common_location_path = staticmethod(SubscriberClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberAsyncClient: The constructed client. + """ + return SubscriberClient.from_service_account_info.__func__(SubscriberAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberAsyncClient: The constructed client. + """ + return SubscriberClient.from_service_account_file.__func__(SubscriberAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return SubscriberClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> SubscriberTransport: + """Returns the transport used by the client instance. + + Returns: + SubscriberTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(SubscriberClient).get_transport_class, type(SubscriberClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, SubscriberTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the subscriber client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.SubscriberTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = SubscriberClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_subscription(self, + request: Union[pubsub.Subscription, dict] = None, + *, + name: str = None, + topic: str = None, + push_config: pubsub.PushConfig = None, + ack_deadline_seconds: int = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = await client.create_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.Subscription, dict]): + The request object. A subscription resource. + name (:class:`str`): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (:class:`str`): + Required. The name of the topic from which this + subscription is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`google.pubsub_v1.types.PushConfig`): + If push delivery is used with this subscription, this + field is used to configure it. Either ``pushConfig`` or + ``bigQueryConfig`` can be set, but not both. If both are + empty, then the subscriber will pull and ack messages + using API methods. + + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt + before resending the message. In the interval after the + message is delivered and before it is acknowledged, it + is considered to be outstanding. During that time + period, the message will not be redelivered (on a + best-effort basis). + + For pull subscriptions, this value is used as the + initial value for the ack deadline. To override this + value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using non-streaming + pull or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming + pull. The minimum custom deadline you can specify is 10 + seconds. The maximum custom deadline you can specify is + 600 seconds (10 minutes). If this parameter is 0, a + default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.Subscription(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if topic is not None: + request.topic = topic + if push_config is not None: + request.push_config = push_config + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_subscription(self, + request: Union[pubsub.GetSubscriptionRequest, dict] = None, + *, + subscription: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Gets the configuration details of a subscription. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.get_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): + The request object. Request for the GetSubscription + method. + subscription (:class:`str`): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.GetSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_subscription(self, + request: Union[pubsub.UpdateSubscriptionRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = await client.update_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): + The request object. Request for the UpdateSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + request = pubsub.UpdateSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription.name", request.subscription.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_subscriptions(self, + request: Union[pubsub.ListSubscriptionsRequest, dict] = None, + *, + project: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubscriptionsAsyncPager: + r"""Lists matching subscriptions. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): + The request object. Request for the `ListSubscriptions` + method. + project (:class:`str`): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager: + Response for the ListSubscriptions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.ListSubscriptionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_subscriptions, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSubscriptionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_subscription(self, + request: Union[pubsub.DeleteSubscriptionRequest, dict] = None, + *, + subscription: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + await client.delete_subscription(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): + The request object. Request for the DeleteSubscription + method. + subscription (:class:`str`): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.DeleteSubscriptionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def modify_ack_deadline(self, + request: Union[pubsub.ModifyAckDeadlineRequest, dict] = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + ack_deadline_seconds: int = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_deadline_seconds=2066, + ) + + # Make the request + await client.modify_ack_deadline(request=request) + + Args: + request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): + The request object. Request for the ModifyAckDeadline + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. List of acknowledgment IDs. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (:class:`int`): + Required. The new ack deadline with respect to the time + this request was sent to the Pub/Sub system. For + example, if the value is 10, the new ack deadline will + expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero might immediately make the + message available for delivery to another subscriber + client. This typically results in an increase in the + rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The + maximum deadline you can specify is 600 seconds (10 + minutes). + + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.ModifyAckDeadlineRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + if ack_ids: + request.ack_ids.extend(ack_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.modify_ack_deadline, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def acknowledge(self, + request: Union[pubsub.AcknowledgeRequest, dict] = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ) + + # Make the request + await client.acknowledge(request=request) + + Args: + request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): + The request object. Request for the Acknowledge method. + subscription (:class:`str`): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (:class:`Sequence[str]`): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in + the ``Pull`` response. Must not be empty. + + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, ack_ids]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.AcknowledgeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if ack_ids: + request.ack_ids.extend(ack_ids) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.acknowledge, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def pull(self, + request: Union[pubsub.PullRequest, dict] = None, + *, + subscription: str = None, + return_immediately: bool = None, + max_messages: int = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PullResponse: + r"""Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = await client.pull(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.PullRequest, dict]): + The request object. Request for the `Pull` method. + subscription (:class:`str`): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + return_immediately (:class:`bool`): + Optional. If this field set to true, the system will + respond immediately even if it there are no messages + available to return in the ``Pull`` response. Otherwise, + the system may wait (for a bounded amount of time) until + at least one message is available, rather than returning + no messages. Warning: setting this field to ``true`` is + discouraged because it adversely impacts the performance + of ``Pull`` operations. We recommend that users do not + set this field. + + This corresponds to the ``return_immediately`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_messages (:class:`int`): + Required. The maximum number of + messages to return for this request. + Must be a positive integer. The Pub/Sub + system may return fewer than the number + specified. + + This corresponds to the ``max_messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.PullResponse: + Response for the Pull method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, return_immediately, max_messages]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.PullRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if return_immediately is not None: + request.return_immediately = return_immediately + if max_messages is not None: + request.max_messages = max_messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.pull, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_pull(self, + requests: AsyncIterator[pubsub.StreamingPullRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Awaitable[AsyncIterable[pubsub.StreamingPullResponse]]: + r"""Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_pull(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.pubsub_v1.types.StreamingPullRequest`]): + The request object AsyncIterator. Request for the `StreamingPull` + streaming RPC method. This request is used to establish + the initial stream as well as to stream acknowledgements + and ack deadline modifications from the client to the + server. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + AsyncIterable[google.pubsub_v1.types.StreamingPullResponse]: + Response for the StreamingPull method. This response is used to stream + messages from the server to the client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.streaming_pull, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def modify_push_config(self, + request: Union[pubsub.ModifyPushConfigRequest, dict] = None, + *, + subscription: str = None, + push_config: pubsub.PushConfig = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + await client.modify_push_config(request=request) + + Args: + request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): + The request object. Request for the ModifyPushConfig + method. + subscription (:class:`str`): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (:class:`google.pubsub_v1.types.PushConfig`): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub + system should stop pushing messages from the given + subscription and allow messages to be pulled and + acknowledged - effectively pausing the subscription if + ``Pull`` or ``StreamingPull`` is not called. + + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, push_config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.ModifyPushConfigRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if push_config is not None: + request.push_config = push_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.modify_push_config, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def get_snapshot(self, + request: Union[pubsub.GetSnapshotRequest, dict] = None, + *, + snapshot: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Gets the configuration details of a snapshot. + Snapshots are used in Seek + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = await client.get_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): + The request object. Request for the GetSnapshot method. + snapshot (:class:`str`): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.GetSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_snapshot, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("snapshot", request.snapshot), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_snapshots(self, + request: Union[pubsub.ListSnapshotsRequest, dict] = None, + *, + project: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSnapshotsAsyncPager: + r"""Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): + The request object. Request for the `ListSnapshots` + method. + project (:class:`str`): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager: + Response for the ListSnapshots method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.ListSnapshotsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_snapshots, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSnapshotsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_snapshot(self, + request: Union[pubsub.CreateSnapshotRequest, dict] = None, + *, + name: str = None, + subscription: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = await client.create_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): + The request object. Request for the `CreateSnapshot` + method. + name (:class:`str`): + Required. User-provided name for this snapshot. If the + name is not provided in the request, the server will + assign a random name for this snapshot on the same + project as the subscription. Note that for REST API + requests, you must specify a name. See the resource name + rules. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subscription (:class:`str`): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is + guaranteed to retain: (a) The existing backlog on the + subscription. More precisely, this is defined as the + messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, subscription]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.CreateSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_snapshot, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_snapshot(self, + request: Union[pubsub.UpdateSnapshotRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Updates an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = await client.update_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): + The request object. Request for the UpdateSnapshot + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + request = pubsub.UpdateSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_snapshot, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("snapshot.name", request.snapshot.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_snapshot(self, + request: Union[pubsub.DeleteSnapshotRequest, dict] = None, + *, + snapshot: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + await client.delete_snapshot(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): + The request object. Request for the `DeleteSnapshot` + method. + snapshot (:class:`str`): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = pubsub.DeleteSnapshotRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_snapshot, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("snapshot", request.snapshot), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def seek(self, + request: Union[pubsub.SeekRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.SeekResponse: + r"""Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + .. code-block:: python + + from google import pubsub_v1 + + async def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.seek(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.SeekRequest, dict]): + The request object. Request for the `Seek` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.SeekResponse: + Response for the Seek method (this response is empty). + """ + # Create or coerce a protobuf request object. + request = pubsub.SeekRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.seek, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does + not have a policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified permissions against the IAM access control + policy for a function. + + If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~iam_policy_pb2.PolicyTestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "SubscriberAsyncClient", +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/client.py new file mode 100644 index 000000000000..d953aa859794 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/client.py @@ -0,0 +1,2373 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.types import pubsub +from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import SubscriberGrpcTransport +from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport + + +class SubscriberClientMeta(type): + """Metaclass for the Subscriber client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] + _transport_registry["grpc"] = SubscriberGrpcTransport + _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[SubscriberTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class SubscriberClient(metaclass=SubscriberClientMeta): + """The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "pubsub.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + SubscriberClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> SubscriberTransport: + """Returns the transport used by the client instance. + + Returns: + SubscriberTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def snapshot_path(project: str,snapshot: str,) -> str: + """Returns a fully-qualified snapshot string.""" + return "projects/{project}/snapshots/{snapshot}".format(project=project, snapshot=snapshot, ) + + @staticmethod + def parse_snapshot_path(path: str) -> Dict[str,str]: + """Parses a snapshot path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/snapshots/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def subscription_path(project: str,subscription: str,) -> str: + """Returns a fully-qualified subscription string.""" + return "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) + + @staticmethod + def parse_subscription_path(path: str) -> Dict[str,str]: + """Parses a subscription path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def topic_path(project: str,topic: str,) -> str: + """Returns a fully-qualified topic string.""" + return "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + + @staticmethod + def parse_topic_path(path: str) -> Dict[str,str]: + """Parses a topic path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, SubscriberTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the subscriber client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, SubscriberTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, SubscriberTransport): + # transport is a SubscriberTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) + + def create_subscription(self, + request: Union[pubsub.Subscription, dict] = None, + *, + name: str = None, + topic: str = None, + push_config: pubsub.PushConfig = None, + ack_deadline_seconds: int = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = client.create_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.Subscription, dict]): + The request object. A subscription resource. + name (str): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + topic (str): + Required. The name of the topic from which this + subscription is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (google.pubsub_v1.types.PushConfig): + If push delivery is used with this subscription, this + field is used to configure it. Either ``pushConfig`` or + ``bigQueryConfig`` can be set, but not both. If both are + empty, then the subscriber will pull and ack messages + using API methods. + + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (int): + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt + before resending the message. In the interval after the + message is delivered and before it is acknowledged, it + is considered to be outstanding. During that time + period, the message will not be redelivered (on a + best-effort basis). + + For pull subscriptions, this value is used as the + initial value for the ack deadline. To override this + value for a given message, call ``ModifyAckDeadline`` + with the corresponding ``ack_id`` if using non-streaming + pull or send the ``ack_id`` in a + ``StreamingModifyAckDeadlineRequest`` if using streaming + pull. The minimum custom deadline you can specify is 10 + seconds. The maximum custom deadline you can specify is + 600 seconds (10 minutes). If this parameter is 0, a + default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.Subscription. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.Subscription): + request = pubsub.Subscription(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if topic is not None: + request.topic = topic + if push_config is not None: + request.push_config = push_config + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_subscription(self, + request: Union[pubsub.GetSubscriptionRequest, dict] = None, + *, + subscription: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Gets the configuration details of a subscription. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.get_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): + The request object. Request for the GetSubscription + method. + subscription (str): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.GetSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.GetSubscriptionRequest): + request = pubsub.GetSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_subscription(self, + request: Union[pubsub.UpdateSubscriptionRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = client.update_subscription(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): + The request object. Request for the UpdateSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Subscription: + A subscription resource. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.UpdateSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.UpdateSubscriptionRequest): + request = pubsub.UpdateSubscriptionRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription.name", request.subscription.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_subscriptions(self, + request: Union[pubsub.ListSubscriptionsRequest, dict] = None, + *, + project: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSubscriptionsPager: + r"""Lists matching subscriptions. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): + The request object. Request for the `ListSubscriptions` + method. + project (str): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager: + Response for the ListSubscriptions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListSubscriptionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListSubscriptionsRequest): + request = pubsub.ListSubscriptionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_subscriptions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSubscriptionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_subscription(self, + request: Union[pubsub.DeleteSubscriptionRequest, dict] = None, + *, + subscription: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + client.delete_subscription(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): + The request object. Request for the DeleteSubscription + method. + subscription (str): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DeleteSubscriptionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DeleteSubscriptionRequest): + request = pubsub.DeleteSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_subscription] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def modify_ack_deadline(self, + request: Union[pubsub.ModifyAckDeadlineRequest, dict] = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + ack_deadline_seconds: int = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_deadline_seconds=2066, + ) + + # Make the request + client.modify_ack_deadline(request=request) + + Args: + request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): + The request object. Request for the ModifyAckDeadline + method. + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (Sequence[str]): + Required. List of acknowledgment IDs. + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_deadline_seconds (int): + Required. The new ack deadline with respect to the time + this request was sent to the Pub/Sub system. For + example, if the value is 10, the new ack deadline will + expire 10 seconds after the ``ModifyAckDeadline`` call + was made. Specifying zero might immediately make the + message available for delivery to another subscriber + client. This typically results in an increase in the + rate of message redeliveries (that is, duplicates). The + minimum deadline you can specify is 0 seconds. The + maximum deadline you can specify is 600 seconds (10 + minutes). + + This corresponds to the ``ack_deadline_seconds`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ModifyAckDeadlineRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ModifyAckDeadlineRequest): + request = pubsub.ModifyAckDeadlineRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + if ack_deadline_seconds is not None: + request.ack_deadline_seconds = ack_deadline_seconds + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_ack_deadline] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def acknowledge(self, + request: Union[pubsub.AcknowledgeRequest, dict] = None, + *, + subscription: str = None, + ack_ids: Sequence[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ) + + # Make the request + client.acknowledge(request=request) + + Args: + request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): + The request object. Request for the Acknowledge method. + subscription (str): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + ack_ids (Sequence[str]): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in + the ``Pull`` response. Must not be empty. + + This corresponds to the ``ack_ids`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, ack_ids]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.AcknowledgeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.AcknowledgeRequest): + request = pubsub.AcknowledgeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if ack_ids is not None: + request.ack_ids = ack_ids + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.acknowledge] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def pull(self, + request: Union[pubsub.PullRequest, dict] = None, + *, + subscription: str = None, + return_immediately: bool = None, + max_messages: int = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PullResponse: + r"""Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = client.pull(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.PullRequest, dict]): + The request object. Request for the `Pull` method. + subscription (str): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + return_immediately (bool): + Optional. If this field set to true, the system will + respond immediately even if it there are no messages + available to return in the ``Pull`` response. Otherwise, + the system may wait (for a bounded amount of time) until + at least one message is available, rather than returning + no messages. Warning: setting this field to ``true`` is + discouraged because it adversely impacts the performance + of ``Pull`` operations. We recommend that users do not + set this field. + + This corresponds to the ``return_immediately`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + max_messages (int): + Required. The maximum number of + messages to return for this request. + Must be a positive integer. The Pub/Sub + system may return fewer than the number + specified. + + This corresponds to the ``max_messages`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.PullResponse: + Response for the Pull method. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, return_immediately, max_messages]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.PullRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.PullRequest): + request = pubsub.PullRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if return_immediately is not None: + request.return_immediately = return_immediately + if max_messages is not None: + request.max_messages = max_messages + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.pull] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def streaming_pull(self, + requests: Iterator[pubsub.StreamingPullRequest] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> Iterable[pubsub.StreamingPullResponse]: + r"""Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_pull(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.pubsub_v1.types.StreamingPullRequest]): + The request object iterator. Request for the `StreamingPull` + streaming RPC method. This request is used to establish + the initial stream as well as to stream acknowledgements + and ack deadline modifications from the client to the + server. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + Iterable[google.pubsub_v1.types.StreamingPullResponse]: + Response for the StreamingPull method. This response is used to stream + messages from the server to the client. + + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.streaming_pull] + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def modify_push_config(self, + request: Union[pubsub.ModifyPushConfigRequest, dict] = None, + *, + subscription: str = None, + push_config: pubsub.PushConfig = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + client.modify_push_config(request=request) + + Args: + request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): + The request object. Request for the ModifyPushConfig + method. + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + push_config (google.pubsub_v1.types.PushConfig): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub + system should stop pushing messages from the given + subscription and allow messages to be pulled and + acknowledged - effectively pausing the subscription if + ``Pull`` or ``StreamingPull`` is not called. + + This corresponds to the ``push_config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, push_config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ModifyPushConfigRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ModifyPushConfigRequest): + request = pubsub.ModifyPushConfigRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if push_config is not None: + request.push_config = push_config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.modify_push_config] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def get_snapshot(self, + request: Union[pubsub.GetSnapshotRequest, dict] = None, + *, + snapshot: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Gets the configuration details of a snapshot. + Snapshots are used in Seek + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): + The request object. Request for the GetSnapshot method. + snapshot (str): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.GetSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.GetSnapshotRequest): + request = pubsub.GetSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("snapshot", request.snapshot), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_snapshots(self, + request: Union[pubsub.ListSnapshotsRequest, dict] = None, + *, + project: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSnapshotsPager: + r"""Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): + The request object. Request for the `ListSnapshots` + method. + project (str): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager: + Response for the ListSnapshots method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([project]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.ListSnapshotsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.ListSnapshotsRequest): + request = pubsub.ListSnapshotsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_snapshots] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("project", request.project), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSnapshotsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_snapshot(self, + request: Union[pubsub.CreateSnapshotRequest, dict] = None, + *, + name: str = None, + subscription: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): + The request object. Request for the `CreateSnapshot` + method. + name (str): + Required. User-provided name for this snapshot. If the + name is not provided in the request, the server will + assign a random name for this snapshot on the same + project as the subscription. Note that for REST API + requests, you must specify a name. See the resource name + rules. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + subscription (str): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is + guaranteed to retain: (a) The existing backlog on the + subscription. More precisely, this is defined as the + messages in the subscription's backlog that are + unacknowledged upon the successful completion of the + ``CreateSnapshot`` request; as well as: (b) Any messages + published to the subscription's topic following the + successful completion of the CreateSnapshot request. + Format is ``projects/{project}/subscriptions/{sub}``. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, subscription]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.CreateSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.CreateSnapshotRequest): + request = pubsub.CreateSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if subscription is not None: + request.subscription = subscription + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_snapshot(self, + request: Union[pubsub.UpdateSnapshotRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Updates an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = client.update_snapshot(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): + The request object. Request for the UpdateSnapshot + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Snapshot: + A snapshot resource. Snapshots are used in + [Seek](https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.UpdateSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.UpdateSnapshotRequest): + request = pubsub.UpdateSnapshotRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("snapshot.name", request.snapshot.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_snapshot(self, + request: Union[pubsub.DeleteSnapshotRequest, dict] = None, + *, + snapshot: str = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + client.delete_snapshot(request=request) + + Args: + request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): + The request object. Request for the `DeleteSnapshot` + method. + snapshot (str): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.DeleteSnapshotRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.DeleteSnapshotRequest): + request = pubsub.DeleteSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_snapshot] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("snapshot", request.snapshot), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def seek(self, + request: Union[pubsub.SeekRequest, dict] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.SeekResponse: + r"""Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + .. code-block:: python + + from google import pubsub_v1 + + def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.seek(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.SeekRequest, dict]): + The request object. Request for the `Seek` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.SeekResponse: + Response for the Seek method (this response is empty). + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a pubsub.SeekRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, pubsub.SeekRequest): + request = pubsub.SeekRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.seek] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("subscription", request.subscription), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Sets the IAM access control policy on the specified function. + + Replaces any existing policy. + + Args: + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): + The request object. Request message for `SetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.SetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.set_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + r"""Gets the IAM access control policy for a function. + + Returns an empty policy if the function exists and does not have a + policy set. + + Args: + request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): + The request object. Request message for `GetIamPolicy` + method. + retry (google.api_core.retry.Retry): Designation of what errors, if + any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. + It is used to specify access control policies for Cloud + Platform resources. + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members`` to a single + ``role``. Members can be user accounts, service + accounts, Google groups, and domains (such as G Suite). + A ``role`` is a named list of permissions (defined by + IAM or configured by users). A ``binding`` can + optionally specify a ``condition``, which is a logic + expression that further constrains the role binding + based on attributes about the request and/or target + resource. + **JSON Example** + :: + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", + } + } + ] + } + **YAML Example** + :: + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM + developer's + guide `__. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.GetIamPolicyRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.get_iam_policy, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + r"""Tests the specified IAM permissions against the IAM access control + policy for a function. + + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. + + Args: + request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): + The request object. Request message for + `TestIamPermissions` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + Returns: + ~.iam_policy_pb2.TestIamPermissionsResponse: + Response message for ``TestIamPermissions`` method. + """ + # Create or coerce a protobuf request object. + + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if isinstance(request, dict): + request = iam_policy_pb2.TestIamPermissionsRequest(**request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method.wrap_method( + self._transport.test_iam_permissions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("resource", request.resource),)), + ) + + # Send the request. + response = rpc( + request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-pubsub", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "SubscriberClient", +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/pagers.py new file mode 100644 index 000000000000..6023648a5d20 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/pagers.py @@ -0,0 +1,260 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.pubsub_v1.types import pubsub + + +class ListSubscriptionsPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSubscriptionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., pubsub.ListSubscriptionsResponse], + request: pubsub.ListSubscriptionsRequest, + response: pubsub.ListSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSubscriptionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSubscriptionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[pubsub.Subscription]: + for page in self.pages: + yield from page.subscriptions + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSubscriptionsAsyncPager: + """A pager for iterating through ``list_subscriptions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSubscriptionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``subscriptions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSubscriptions`` requests and continue to iterate + through the ``subscriptions`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSubscriptionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[pubsub.ListSubscriptionsResponse]], + request: pubsub.ListSubscriptionsRequest, + response: pubsub.ListSubscriptionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSubscriptionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSubscriptionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSubscriptionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListSubscriptionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[pubsub.Subscription]: + async def async_generator(): + async for page in self.pages: + for response in page.subscriptions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSnapshotsPager: + """A pager for iterating through ``list_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSnapshotsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., pubsub.ListSnapshotsResponse], + request: pubsub.ListSnapshotsRequest, + response: pubsub.ListSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSnapshotsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSnapshotsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[pubsub.ListSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[pubsub.Snapshot]: + for page in self.pages: + yield from page.snapshots + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListSnapshotsAsyncPager: + """A pager for iterating through ``list_snapshots`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSnapshotsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``snapshots`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSnapshots`` requests and continue to iterate + through the ``snapshots`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSnapshotsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[pubsub.ListSnapshotsResponse]], + request: pubsub.ListSnapshotsRequest, + response: pubsub.ListSnapshotsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSnapshotsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSnapshotsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = pubsub.ListSnapshotsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[pubsub.ListSnapshotsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[pubsub.Snapshot]: + async def async_generator(): + async for page in self.pages: + for response in page.snapshots: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/__init__.py new file mode 100644 index 000000000000..196dd1beea65 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import SubscriberTransport +from .grpc import SubscriberGrpcTransport +from .grpc_asyncio import SubscriberGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] +_transport_registry['grpc'] = SubscriberGrpcTransport +_transport_registry['grpc_asyncio'] = SubscriberGrpcAsyncIOTransport + +__all__ = ( + 'SubscriberTransport', + 'SubscriberGrpcTransport', + 'SubscriberGrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/base.py new file mode 100644 index 000000000000..9e22202727af --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/base.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import pkg_resources + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-pubsub', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +class SubscriberTransport(abc.ABC): + """Abstract transport class for Subscriber.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', + ) + + DEFAULT_HOST: str = 'pubsub.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_subscription: gapic_v1.method.wrap_method( + self.create_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_subscription: gapic_v1.method.wrap_method( + self.get_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_subscription: gapic_v1.method.wrap_method( + self.update_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_subscriptions: gapic_v1.method.wrap_method( + self.list_subscriptions, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_subscription: gapic_v1.method.wrap_method( + self.delete_subscription, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.modify_ack_deadline: gapic_v1.method.wrap_method( + self.modify_ack_deadline, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.acknowledge: gapic_v1.method.wrap_method( + self.acknowledge, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.pull: gapic_v1.method.wrap_method( + self.pull, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.streaming_pull: gapic_v1.method.wrap_method( + self.streaming_pull, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.modify_push_config: gapic_v1.method.wrap_method( + self.modify_push_config, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_snapshot: gapic_v1.method.wrap_method( + self.get_snapshot, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_snapshots: gapic_v1.method.wrap_method( + self.list_snapshots, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_snapshot: gapic_v1.method.wrap_method( + self.create_snapshot, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_snapshot: gapic_v1.method.wrap_method( + self.update_snapshot, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_snapshot: gapic_v1.method.wrap_method( + self.delete_snapshot, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.seek: gapic_v1.method.wrap_method( + self.seek, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_subscription(self) -> Callable[ + [pubsub.Subscription], + Union[ + pubsub.Subscription, + Awaitable[pubsub.Subscription] + ]]: + raise NotImplementedError() + + @property + def get_subscription(self) -> Callable[ + [pubsub.GetSubscriptionRequest], + Union[ + pubsub.Subscription, + Awaitable[pubsub.Subscription] + ]]: + raise NotImplementedError() + + @property + def update_subscription(self) -> Callable[ + [pubsub.UpdateSubscriptionRequest], + Union[ + pubsub.Subscription, + Awaitable[pubsub.Subscription] + ]]: + raise NotImplementedError() + + @property + def list_subscriptions(self) -> Callable[ + [pubsub.ListSubscriptionsRequest], + Union[ + pubsub.ListSubscriptionsResponse, + Awaitable[pubsub.ListSubscriptionsResponse] + ]]: + raise NotImplementedError() + + @property + def delete_subscription(self) -> Callable[ + [pubsub.DeleteSubscriptionRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def modify_ack_deadline(self) -> Callable[ + [pubsub.ModifyAckDeadlineRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def acknowledge(self) -> Callable[ + [pubsub.AcknowledgeRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def pull(self) -> Callable[ + [pubsub.PullRequest], + Union[ + pubsub.PullResponse, + Awaitable[pubsub.PullResponse] + ]]: + raise NotImplementedError() + + @property + def streaming_pull(self) -> Callable[ + [pubsub.StreamingPullRequest], + Union[ + pubsub.StreamingPullResponse, + Awaitable[pubsub.StreamingPullResponse] + ]]: + raise NotImplementedError() + + @property + def modify_push_config(self) -> Callable[ + [pubsub.ModifyPushConfigRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def get_snapshot(self) -> Callable[ + [pubsub.GetSnapshotRequest], + Union[ + pubsub.Snapshot, + Awaitable[pubsub.Snapshot] + ]]: + raise NotImplementedError() + + @property + def list_snapshots(self) -> Callable[ + [pubsub.ListSnapshotsRequest], + Union[ + pubsub.ListSnapshotsResponse, + Awaitable[pubsub.ListSnapshotsResponse] + ]]: + raise NotImplementedError() + + @property + def create_snapshot(self) -> Callable[ + [pubsub.CreateSnapshotRequest], + Union[ + pubsub.Snapshot, + Awaitable[pubsub.Snapshot] + ]]: + raise NotImplementedError() + + @property + def update_snapshot(self) -> Callable[ + [pubsub.UpdateSnapshotRequest], + Union[ + pubsub.Snapshot, + Awaitable[pubsub.Snapshot] + ]]: + raise NotImplementedError() + + @property + def delete_snapshot(self) -> Callable[ + [pubsub.DeleteSnapshotRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def seek(self) -> Callable[ + [pubsub.SeekRequest], + Union[ + pubsub.SeekResponse, + Awaitable[pubsub.SeekResponse] + ]]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.SetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [iam_policy_pb2.GetIamPolicyRequest], + Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Union[ + iam_policy_pb2.TestIamPermissionsResponse, + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'SubscriberTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc.py new file mode 100644 index 000000000000..99a882c7bf7c --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -0,0 +1,835 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO + + +class SubscriberGrpcTransport(SubscriberTransport): + """gRPC backend transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_subscription(self) -> Callable[ + [pubsub.Subscription], + pubsub.Subscription]: + r"""Return a callable for the create subscription method over gRPC. + + Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.Subscription], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_subscription' not in self._stubs: + self._stubs['create_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=pubsub.Subscription.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs['create_subscription'] + + @property + def get_subscription(self) -> Callable[ + [pubsub.GetSubscriptionRequest], + pubsub.Subscription]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the configuration details of a subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_subscription' not in self._stubs: + self._stubs['get_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=pubsub.GetSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs['get_subscription'] + + @property + def update_subscription(self) -> Callable[ + [pubsub.UpdateSubscriptionRequest], + pubsub.Subscription]: + r"""Return a callable for the update subscription method over gRPC. + + Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Returns: + Callable[[~.UpdateSubscriptionRequest], + ~.Subscription]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_subscription' not in self._stubs: + self._stubs['update_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=pubsub.UpdateSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs['update_subscription'] + + @property + def list_subscriptions(self) -> Callable[ + [pubsub.ListSubscriptionsRequest], + pubsub.ListSubscriptionsResponse]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists matching subscriptions. + + Returns: + Callable[[~.ListSubscriptionsRequest], + ~.ListSubscriptionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_subscriptions' not in self._stubs: + self._stubs['list_subscriptions'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=pubsub.ListSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs['list_subscriptions'] + + @property + def delete_subscription(self) -> Callable[ + [pubsub.DeleteSubscriptionRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_subscription' not in self._stubs: + self._stubs['delete_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=pubsub.DeleteSubscriptionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_subscription'] + + @property + def modify_ack_deadline(self) -> Callable[ + [pubsub.ModifyAckDeadlineRequest], + empty_pb2.Empty]: + r"""Return a callable for the modify ack deadline method over gRPC. + + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Returns: + Callable[[~.ModifyAckDeadlineRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'modify_ack_deadline' not in self._stubs: + self._stubs['modify_ack_deadline'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['modify_ack_deadline'] + + @property + def acknowledge(self) -> Callable[ + [pubsub.AcknowledgeRequest], + empty_pb2.Empty]: + r"""Return a callable for the acknowledge method over gRPC. + + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Returns: + Callable[[~.AcknowledgeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'acknowledge' not in self._stubs: + self._stubs['acknowledge'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=pubsub.AcknowledgeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['acknowledge'] + + @property + def pull(self) -> Callable[ + [pubsub.PullRequest], + pubsub.PullResponse]: + r"""Return a callable for the pull method over gRPC. + + Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + Returns: + Callable[[~.PullRequest], + ~.PullResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'pull' not in self._stubs: + self._stubs['pull'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=pubsub.PullRequest.serialize, + response_deserializer=pubsub.PullResponse.deserialize, + ) + return self._stubs['pull'] + + @property + def streaming_pull(self) -> Callable[ + [pubsub.StreamingPullRequest], + pubsub.StreamingPullResponse]: + r"""Return a callable for the streaming pull method over gRPC. + + Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Returns: + Callable[[~.StreamingPullRequest], + ~.StreamingPullResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'streaming_pull' not in self._stubs: + self._stubs['streaming_pull'] = self.grpc_channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=pubsub.StreamingPullRequest.serialize, + response_deserializer=pubsub.StreamingPullResponse.deserialize, + ) + return self._stubs['streaming_pull'] + + @property + def modify_push_config(self) -> Callable[ + [pubsub.ModifyPushConfigRequest], + empty_pb2.Empty]: + r"""Return a callable for the modify push config method over gRPC. + + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Returns: + Callable[[~.ModifyPushConfigRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'modify_push_config' not in self._stubs: + self._stubs['modify_push_config'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=pubsub.ModifyPushConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['modify_push_config'] + + @property + def get_snapshot(self) -> Callable[ + [pubsub.GetSnapshotRequest], + pubsub.Snapshot]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets the configuration details of a snapshot. + Snapshots are used in Seek + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_snapshot' not in self._stubs: + self._stubs['get_snapshot'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSnapshot', + request_serializer=pubsub.GetSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs['get_snapshot'] + + @property + def list_snapshots(self) -> Callable[ + [pubsub.ListSnapshotsRequest], + pubsub.ListSnapshotsResponse]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListSnapshotsRequest], + ~.ListSnapshotsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_snapshots' not in self._stubs: + self._stubs['list_snapshots'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=pubsub.ListSnapshotsRequest.serialize, + response_deserializer=pubsub.ListSnapshotsResponse.deserialize, + ) + return self._stubs['list_snapshots'] + + @property + def create_snapshot(self) -> Callable[ + [pubsub.CreateSnapshotRequest], + pubsub.Snapshot]: + r"""Return a callable for the create snapshot method over gRPC. + + Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.CreateSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_snapshot' not in self._stubs: + self._stubs['create_snapshot'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=pubsub.CreateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs['create_snapshot'] + + @property + def update_snapshot(self) -> Callable[ + [pubsub.UpdateSnapshotRequest], + pubsub.Snapshot]: + r"""Return a callable for the update snapshot method over gRPC. + + Updates an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + Returns: + Callable[[~.UpdateSnapshotRequest], + ~.Snapshot]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_snapshot' not in self._stubs: + self._stubs['update_snapshot'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=pubsub.UpdateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs['update_snapshot'] + + @property + def delete_snapshot(self) -> Callable[ + [pubsub.DeleteSnapshotRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete snapshot method over gRPC. + + Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Returns: + Callable[[~.DeleteSnapshotRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_snapshot' not in self._stubs: + self._stubs['delete_snapshot'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=pubsub.DeleteSnapshotRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_snapshot'] + + @property + def seek(self) -> Callable[ + [pubsub.SeekRequest], + pubsub.SeekResponse]: + r"""Return a callable for the seek method over gRPC. + + Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Returns: + Callable[[~.SeekRequest], + ~.SeekResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'seek' not in self._stubs: + self._stubs['seek'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=pubsub.SeekRequest.serialize, + response_deserializer=pubsub.SeekResponse.deserialize, + ) + return self._stubs['seek'] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'SubscriberGrpcTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py new file mode 100644 index 000000000000..e8c4a4da6273 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -0,0 +1,835 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO +from .grpc import SubscriberGrpcTransport + + +class SubscriberGrpcAsyncIOTransport(SubscriberTransport): + """gRPC AsyncIO backend transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'pubsub.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_subscription(self) -> Callable[ + [pubsub.Subscription], + Awaitable[pubsub.Subscription]]: + r"""Return a callable for the create subscription method over gRPC. + + Creates a subscription to a given topic. See the [resource name + rules] + (https://cloud.google.com/pubsub/docs/admin#resource_names). If + the subscription already exists, returns ``ALREADY_EXISTS``. If + the corresponding topic doesn't exist, returns ``NOT_FOUND``. + + If the name is not provided in the request, the server will + assign a random name for this subscription on the same project + as the topic, conforming to the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Subscription object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.Subscription], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_subscription' not in self._stubs: + self._stubs['create_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSubscription', + request_serializer=pubsub.Subscription.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs['create_subscription'] + + @property + def get_subscription(self) -> Callable[ + [pubsub.GetSubscriptionRequest], + Awaitable[pubsub.Subscription]]: + r"""Return a callable for the get subscription method over gRPC. + + Gets the configuration details of a subscription. + + Returns: + Callable[[~.GetSubscriptionRequest], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_subscription' not in self._stubs: + self._stubs['get_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSubscription', + request_serializer=pubsub.GetSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs['get_subscription'] + + @property + def update_subscription(self) -> Callable[ + [pubsub.UpdateSubscriptionRequest], + Awaitable[pubsub.Subscription]]: + r"""Return a callable for the update subscription method over gRPC. + + Updates an existing subscription. Note that certain + properties of a subscription, such as its topic, are not + modifiable. + + Returns: + Callable[[~.UpdateSubscriptionRequest], + Awaitable[~.Subscription]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_subscription' not in self._stubs: + self._stubs['update_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSubscription', + request_serializer=pubsub.UpdateSubscriptionRequest.serialize, + response_deserializer=pubsub.Subscription.deserialize, + ) + return self._stubs['update_subscription'] + + @property + def list_subscriptions(self) -> Callable[ + [pubsub.ListSubscriptionsRequest], + Awaitable[pubsub.ListSubscriptionsResponse]]: + r"""Return a callable for the list subscriptions method over gRPC. + + Lists matching subscriptions. + + Returns: + Callable[[~.ListSubscriptionsRequest], + Awaitable[~.ListSubscriptionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_subscriptions' not in self._stubs: + self._stubs['list_subscriptions'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSubscriptions', + request_serializer=pubsub.ListSubscriptionsRequest.serialize, + response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, + ) + return self._stubs['list_subscriptions'] + + @property + def delete_subscription(self) -> Callable[ + [pubsub.DeleteSubscriptionRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete subscription method over gRPC. + + Deletes an existing subscription. All messages retained in the + subscription are immediately dropped. Calls to ``Pull`` after + deletion will return ``NOT_FOUND``. After a subscription is + deleted, a new one may be created with the same name, but the + new one has no association with the old subscription or its + topic unless the same topic is specified. + + Returns: + Callable[[~.DeleteSubscriptionRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_subscription' not in self._stubs: + self._stubs['delete_subscription'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSubscription', + request_serializer=pubsub.DeleteSubscriptionRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_subscription'] + + @property + def modify_ack_deadline(self) -> Callable[ + [pubsub.ModifyAckDeadlineRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the modify ack deadline method over gRPC. + + Modifies the ack deadline for a specific message. This method is + useful to indicate that more time is needed to process a message + by the subscriber, or to make the message available for + redelivery if the processing was interrupted. Note that this + does not modify the subscription-level ``ackDeadlineSeconds`` + used for subsequent messages. + + Returns: + Callable[[~.ModifyAckDeadlineRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'modify_ack_deadline' not in self._stubs: + self._stubs['modify_ack_deadline'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyAckDeadline', + request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['modify_ack_deadline'] + + @property + def acknowledge(self) -> Callable[ + [pubsub.AcknowledgeRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the acknowledge method over gRPC. + + Acknowledges the messages associated with the ``ack_ids`` in the + ``AcknowledgeRequest``. The Pub/Sub system can remove the + relevant messages from the subscription. + + Acknowledging a message whose ack deadline has expired may + succeed, but such a message may be redelivered later. + Acknowledging a message more than once will not result in an + error. + + Returns: + Callable[[~.AcknowledgeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'acknowledge' not in self._stubs: + self._stubs['acknowledge'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/Acknowledge', + request_serializer=pubsub.AcknowledgeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['acknowledge'] + + @property + def pull(self) -> Callable[ + [pubsub.PullRequest], + Awaitable[pubsub.PullResponse]]: + r"""Return a callable for the pull method over gRPC. + + Pulls messages from the server. The server may return + ``UNAVAILABLE`` if there are too many concurrent pull requests + pending for the given subscription. + + Returns: + Callable[[~.PullRequest], + Awaitable[~.PullResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'pull' not in self._stubs: + self._stubs['pull'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/Pull', + request_serializer=pubsub.PullRequest.serialize, + response_deserializer=pubsub.PullResponse.deserialize, + ) + return self._stubs['pull'] + + @property + def streaming_pull(self) -> Callable[ + [pubsub.StreamingPullRequest], + Awaitable[pubsub.StreamingPullResponse]]: + r"""Return a callable for the streaming pull method over gRPC. + + Establishes a stream with the server, which sends messages down + to the client. The client streams acknowledgements and ack + deadline modifications back to the server. The server will close + the stream and return the status on any error. The server may + close the stream with status ``UNAVAILABLE`` to reassign + server-side resources, in which case, the client should + re-establish the stream. Flow control can be achieved by + configuring the underlying RPC channel. + + Returns: + Callable[[~.StreamingPullRequest], + Awaitable[~.StreamingPullResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'streaming_pull' not in self._stubs: + self._stubs['streaming_pull'] = self.grpc_channel.stream_stream( + '/google.pubsub.v1.Subscriber/StreamingPull', + request_serializer=pubsub.StreamingPullRequest.serialize, + response_deserializer=pubsub.StreamingPullResponse.deserialize, + ) + return self._stubs['streaming_pull'] + + @property + def modify_push_config(self) -> Callable[ + [pubsub.ModifyPushConfigRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the modify push config method over gRPC. + + Modifies the ``PushConfig`` for a specified subscription. + + This may be used to change a push subscription to a pull one + (signified by an empty ``PushConfig``) or vice versa, or change + the endpoint URL and other attributes of a push subscription. + Messages will accumulate for delivery continuously through the + call regardless of changes to the ``PushConfig``. + + Returns: + Callable[[~.ModifyPushConfigRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'modify_push_config' not in self._stubs: + self._stubs['modify_push_config'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/ModifyPushConfig', + request_serializer=pubsub.ModifyPushConfigRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['modify_push_config'] + + @property + def get_snapshot(self) -> Callable[ + [pubsub.GetSnapshotRequest], + Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the get snapshot method over gRPC. + + Gets the configuration details of a snapshot. + Snapshots are used in Seek + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + Returns: + Callable[[~.GetSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_snapshot' not in self._stubs: + self._stubs['get_snapshot'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/GetSnapshot', + request_serializer=pubsub.GetSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs['get_snapshot'] + + @property + def list_snapshots(self) -> Callable[ + [pubsub.ListSnapshotsRequest], + Awaitable[pubsub.ListSnapshotsResponse]]: + r"""Return a callable for the list snapshots method over gRPC. + + Lists the existing snapshots. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + + Returns: + Callable[[~.ListSnapshotsRequest], + Awaitable[~.ListSnapshotsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_snapshots' not in self._stubs: + self._stubs['list_snapshots'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/ListSnapshots', + request_serializer=pubsub.ListSnapshotsRequest.serialize, + response_deserializer=pubsub.ListSnapshotsResponse.deserialize, + ) + return self._stubs['list_snapshots'] + + @property + def create_snapshot(self) -> Callable[ + [pubsub.CreateSnapshotRequest], + Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the create snapshot method over gRPC. + + Creates a snapshot from the requested subscription. Snapshots + are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + If the snapshot already exists, returns ``ALREADY_EXISTS``. If + the requested subscription doesn't exist, returns ``NOT_FOUND``. + If the backlog in the subscription is too old -- and the + resulting snapshot would expire in less than 1 hour -- then + ``FAILED_PRECONDITION`` is returned. See also the + ``Snapshot.expire_time`` field. If the name is not provided in + the request, the server will assign a random name for this + snapshot on the same project as the subscription, conforming to + the [resource name format] + (https://cloud.google.com/pubsub/docs/admin#resource_names). The + generated name is populated in the returned Snapshot object. + Note that for REST API requests, you must specify a name in the + request. + + Returns: + Callable[[~.CreateSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_snapshot' not in self._stubs: + self._stubs['create_snapshot'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/CreateSnapshot', + request_serializer=pubsub.CreateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs['create_snapshot'] + + @property + def update_snapshot(self) -> Callable[ + [pubsub.UpdateSnapshotRequest], + Awaitable[pubsub.Snapshot]]: + r"""Return a callable for the update snapshot method over gRPC. + + Updates an existing snapshot. Snapshots are used in + Seek + operations, which allow + you to manage message acknowledgments in bulk. That is, + you can set the acknowledgment state of messages in an + existing subscription to the state captured by a + snapshot. + + Returns: + Callable[[~.UpdateSnapshotRequest], + Awaitable[~.Snapshot]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_snapshot' not in self._stubs: + self._stubs['update_snapshot'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/UpdateSnapshot', + request_serializer=pubsub.UpdateSnapshotRequest.serialize, + response_deserializer=pubsub.Snapshot.deserialize, + ) + return self._stubs['update_snapshot'] + + @property + def delete_snapshot(self) -> Callable[ + [pubsub.DeleteSnapshotRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete snapshot method over gRPC. + + Removes an existing snapshot. Snapshots are used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + When the snapshot is deleted, all messages retained in the + snapshot are immediately dropped. After a snapshot is deleted, a + new one may be created with the same name, but the new one has + no association with the old snapshot or its subscription, unless + the same subscription is specified. + + Returns: + Callable[[~.DeleteSnapshotRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_snapshot' not in self._stubs: + self._stubs['delete_snapshot'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/DeleteSnapshot', + request_serializer=pubsub.DeleteSnapshotRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_snapshot'] + + @property + def seek(self) -> Callable[ + [pubsub.SeekRequest], + Awaitable[pubsub.SeekResponse]]: + r"""Return a callable for the seek method over gRPC. + + Seeks an existing subscription to a point in time or to a given + snapshot, whichever is provided in the request. Snapshots are + used in [Seek] + (https://cloud.google.com/pubsub/docs/replay-overview) + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. + Note that both the subscription and the snapshot must be on the + same topic. + + Returns: + Callable[[~.SeekRequest], + Awaitable[~.SeekResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'seek' not in self._stubs: + self._stubs['seek'] = self.grpc_channel.unary_unary( + '/google.pubsub.v1.Subscriber/Seek', + request_serializer=pubsub.SeekRequest.serialize, + response_deserializer=pubsub.SeekResponse.deserialize, + ) + return self._stubs['seek'] + + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + Awaitable[~.Policy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + Awaitable[iam_policy_pb2.TestIamPermissionsResponse], + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + Awaitable[~.TestIamPermissionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'SubscriberGrpcAsyncIOTransport', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/__init__.py new file mode 100644 index 000000000000..cb65f15ef091 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/__init__.py @@ -0,0 +1,136 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .pubsub import ( + AcknowledgeRequest, + BigQueryConfig, + CreateSnapshotRequest, + DeadLetterPolicy, + DeleteSnapshotRequest, + DeleteSubscriptionRequest, + DeleteTopicRequest, + DetachSubscriptionRequest, + DetachSubscriptionResponse, + ExpirationPolicy, + GetSnapshotRequest, + GetSubscriptionRequest, + GetTopicRequest, + ListSnapshotsRequest, + ListSnapshotsResponse, + ListSubscriptionsRequest, + ListSubscriptionsResponse, + ListTopicSnapshotsRequest, + ListTopicSnapshotsResponse, + ListTopicsRequest, + ListTopicsResponse, + ListTopicSubscriptionsRequest, + ListTopicSubscriptionsResponse, + MessageStoragePolicy, + ModifyAckDeadlineRequest, + ModifyPushConfigRequest, + PublishRequest, + PublishResponse, + PubsubMessage, + PullRequest, + PullResponse, + PushConfig, + ReceivedMessage, + RetryPolicy, + SchemaSettings, + SeekRequest, + SeekResponse, + Snapshot, + StreamingPullRequest, + StreamingPullResponse, + Subscription, + Topic, + UpdateSnapshotRequest, + UpdateSubscriptionRequest, + UpdateTopicRequest, +) +from .schema import ( + CreateSchemaRequest, + DeleteSchemaRequest, + GetSchemaRequest, + ListSchemasRequest, + ListSchemasResponse, + Schema, + ValidateMessageRequest, + ValidateMessageResponse, + ValidateSchemaRequest, + ValidateSchemaResponse, + Encoding, + SchemaView, +) + +__all__ = ( + 'AcknowledgeRequest', + 'BigQueryConfig', + 'CreateSnapshotRequest', + 'DeadLetterPolicy', + 'DeleteSnapshotRequest', + 'DeleteSubscriptionRequest', + 'DeleteTopicRequest', + 'DetachSubscriptionRequest', + 'DetachSubscriptionResponse', + 'ExpirationPolicy', + 'GetSnapshotRequest', + 'GetSubscriptionRequest', + 'GetTopicRequest', + 'ListSnapshotsRequest', + 'ListSnapshotsResponse', + 'ListSubscriptionsRequest', + 'ListSubscriptionsResponse', + 'ListTopicSnapshotsRequest', + 'ListTopicSnapshotsResponse', + 'ListTopicsRequest', + 'ListTopicsResponse', + 'ListTopicSubscriptionsRequest', + 'ListTopicSubscriptionsResponse', + 'MessageStoragePolicy', + 'ModifyAckDeadlineRequest', + 'ModifyPushConfigRequest', + 'PublishRequest', + 'PublishResponse', + 'PubsubMessage', + 'PullRequest', + 'PullResponse', + 'PushConfig', + 'ReceivedMessage', + 'RetryPolicy', + 'SchemaSettings', + 'SeekRequest', + 'SeekResponse', + 'Snapshot', + 'StreamingPullRequest', + 'StreamingPullResponse', + 'Subscription', + 'Topic', + 'UpdateSnapshotRequest', + 'UpdateSubscriptionRequest', + 'UpdateTopicRequest', + 'CreateSchemaRequest', + 'DeleteSchemaRequest', + 'GetSchemaRequest', + 'ListSchemasRequest', + 'ListSchemasResponse', + 'Schema', + 'ValidateMessageRequest', + 'ValidateMessageResponse', + 'ValidateSchemaRequest', + 'ValidateSchemaResponse', + 'Encoding', + 'SchemaView', +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/pubsub.py new file mode 100644 index 000000000000..48f6b24000f4 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/pubsub.py @@ -0,0 +1,1866 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.types import schema as gp_schema + + +__protobuf__ = proto.module( + package='google.pubsub.v1', + manifest={ + 'MessageStoragePolicy', + 'SchemaSettings', + 'Topic', + 'PubsubMessage', + 'GetTopicRequest', + 'UpdateTopicRequest', + 'PublishRequest', + 'PublishResponse', + 'ListTopicsRequest', + 'ListTopicsResponse', + 'ListTopicSubscriptionsRequest', + 'ListTopicSubscriptionsResponse', + 'ListTopicSnapshotsRequest', + 'ListTopicSnapshotsResponse', + 'DeleteTopicRequest', + 'DetachSubscriptionRequest', + 'DetachSubscriptionResponse', + 'Subscription', + 'RetryPolicy', + 'DeadLetterPolicy', + 'ExpirationPolicy', + 'PushConfig', + 'BigQueryConfig', + 'ReceivedMessage', + 'GetSubscriptionRequest', + 'UpdateSubscriptionRequest', + 'ListSubscriptionsRequest', + 'ListSubscriptionsResponse', + 'DeleteSubscriptionRequest', + 'ModifyPushConfigRequest', + 'PullRequest', + 'PullResponse', + 'ModifyAckDeadlineRequest', + 'AcknowledgeRequest', + 'StreamingPullRequest', + 'StreamingPullResponse', + 'CreateSnapshotRequest', + 'UpdateSnapshotRequest', + 'Snapshot', + 'GetSnapshotRequest', + 'ListSnapshotsRequest', + 'ListSnapshotsResponse', + 'DeleteSnapshotRequest', + 'SeekRequest', + 'SeekResponse', + }, +) + + +class MessageStoragePolicy(proto.Message): + r"""A policy constraining the storage of messages published to + the topic. + + Attributes: + allowed_persistence_regions (Sequence[str]): + A list of IDs of GCP regions where messages + that are published to the topic may be persisted + in storage. Messages published by publishers + running in non-allowed GCP regions (or running + outside of GCP altogether) will be routed for + storage in one of the allowed regions. An empty + list means that no regions are allowed, and is + not a valid configuration. + """ + + allowed_persistence_regions = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class SchemaSettings(proto.Message): + r"""Settings for validating messages published against a schema. + + Attributes: + schema (str): + Required. The name of the schema that messages published + should be validated against. Format is + ``projects/{project}/schemas/{schema}``. The value of this + field will be ``_deleted-schema_`` if the schema has been + deleted. + encoding (google.pubsub_v1.types.Encoding): + The encoding of messages validated against ``schema``. + """ + + schema = proto.Field( + proto.STRING, + number=1, + ) + encoding = proto.Field( + proto.ENUM, + number=2, + enum=gp_schema.Encoding, + ) + + +class Topic(proto.Message): + r"""A topic resource. + + Attributes: + name (str): + Required. The name of the topic. It must have the format + ``"projects/{project}/topics/{topic}"``. ``{topic}`` must + start with a letter, and contain only letters + (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + underscores (``_``), periods (``.``), tildes (``~``), plus + (``+``) or percent signs (``%``). It must be between 3 and + 255 characters in length, and it must not start with + ``"goog"``. + labels (Mapping[str, str]): + See [Creating and managing labels] + (https://cloud.google.com/pubsub/docs/labels). + message_storage_policy (google.pubsub_v1.types.MessageStoragePolicy): + Policy constraining the set of Google Cloud + Platform regions where messages published to the + topic may be stored. If not present, then no + constraints are in effect. + kms_key_name (str): + The resource name of the Cloud KMS CryptoKey to be used to + protect access to messages published on this topic. + + The expected format is + ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. + schema_settings (google.pubsub_v1.types.SchemaSettings): + Settings for validating messages published + against a schema. + satisfies_pzs (bool): + Reserved for future use. This field is set + only in responses from the server; it is ignored + if it is set in any requests. + message_retention_duration (google.protobuf.duration_pb2.Duration): + Indicates the minimum duration to retain a message after it + is published to the topic. If this field is set, messages + published to the topic in the last + ``message_retention_duration`` are always available to + subscribers. For instance, it allows any attached + subscription to `seek to a + timestamp `__ + that is up to ``message_retention_duration`` in the past. If + this field is not set, message retention is controlled by + settings on individual subscriptions. Cannot be more than 7 + days or less than 10 minutes. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + message_storage_policy = proto.Field( + proto.MESSAGE, + number=3, + message='MessageStoragePolicy', + ) + kms_key_name = proto.Field( + proto.STRING, + number=5, + ) + schema_settings = proto.Field( + proto.MESSAGE, + number=6, + message='SchemaSettings', + ) + satisfies_pzs = proto.Field( + proto.BOOL, + number=7, + ) + message_retention_duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + + +class PubsubMessage(proto.Message): + r"""A message that is published by publishers and consumed by + subscribers. The message must contain either a non-empty data field + or at least one attribute. Note that client libraries represent this + object differently depending on the language. See the corresponding + `client library + documentation `__ + for more information. See [quotas and limits] + (https://cloud.google.com/pubsub/quotas) for more information about + message limits. + + Attributes: + data (bytes): + The message data field. If this field is + empty, the message must contain at least one + attribute. + attributes (Mapping[str, str]): + Attributes for this message. If this field is + empty, the message must contain non-empty data. + This can be used to filter messages on the + subscription. + message_id (str): + ID of this message, assigned by the server when the message + is published. Guaranteed to be unique within the topic. This + value may be read by a subscriber that receives a + ``PubsubMessage`` via a ``Pull`` call or a push delivery. It + must not be populated by the publisher in a ``Publish`` + call. + publish_time (google.protobuf.timestamp_pb2.Timestamp): + The time at which the message was published, populated by + the server when it receives the ``Publish`` call. It must + not be populated by the publisher in a ``Publish`` call. + ordering_key (str): + If non-empty, identifies related messages for which publish + order should be respected. If a ``Subscription`` has + ``enable_message_ordering`` set to ``true``, messages + published with the same non-empty ``ordering_key`` value + will be delivered to subscribers in the order in which they + are received by the Pub/Sub system. All ``PubsubMessage``\ s + published in a given ``PublishRequest`` must specify the + same ``ordering_key`` value. + """ + + data = proto.Field( + proto.BYTES, + number=1, + ) + attributes = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + message_id = proto.Field( + proto.STRING, + number=3, + ) + publish_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + ordering_key = proto.Field( + proto.STRING, + number=5, + ) + + +class GetTopicRequest(proto.Message): + r"""Request for the GetTopic method. + + Attributes: + topic (str): + Required. The name of the topic to get. Format is + ``projects/{project}/topics/{topic}``. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateTopicRequest(proto.Message): + r"""Request for the UpdateTopic method. + + Attributes: + topic (google.pubsub_v1.types.Topic): + Required. The updated topic object. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in the provided topic to + update. Must be specified and non-empty. Note that if + ``update_mask`` contains "message_storage_policy" but the + ``message_storage_policy`` is not set in the ``topic`` + provided above, then the updated value is determined by the + policy configured at the project or organization level. + """ + + topic = proto.Field( + proto.MESSAGE, + number=1, + message='Topic', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class PublishRequest(proto.Message): + r"""Request for the Publish method. + + Attributes: + topic (str): + Required. The messages in the request will be published on + this topic. Format is ``projects/{project}/topics/{topic}``. + messages (Sequence[google.pubsub_v1.types.PubsubMessage]): + Required. The messages to publish. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + messages = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='PubsubMessage', + ) + + +class PublishResponse(proto.Message): + r"""Response for the ``Publish`` method. + + Attributes: + message_ids (Sequence[str]): + The server-assigned ID of each published + message, in the same order as the messages in + the request. IDs are guaranteed to be unique + within the topic. + """ + + message_ids = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class ListTopicsRequest(proto.Message): + r"""Request for the ``ListTopics`` method. + + Attributes: + project (str): + Required. The name of the project in which to list topics. + Format is ``projects/{project-id}``. + page_size (int): + Maximum number of topics to return. + page_token (str): + The value returned by the last ``ListTopicsResponse``; + indicates that this is a continuation of a prior + ``ListTopics`` call, and that the system should return the + next page of data. + """ + + project = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTopicsResponse(proto.Message): + r"""Response for the ``ListTopics`` method. + + Attributes: + topics (Sequence[google.pubsub_v1.types.Topic]): + The resulting topics. + next_page_token (str): + If not empty, indicates that there may be more topics that + match the request; this value should be passed in a new + ``ListTopicsRequest``. + """ + + @property + def raw_page(self): + return self + + topics = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Topic', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class ListTopicSubscriptionsRequest(proto.Message): + r"""Request for the ``ListTopicSubscriptions`` method. + + Attributes: + topic (str): + Required. The name of the topic that subscriptions are + attached to. Format is + ``projects/{project}/topics/{topic}``. + page_size (int): + Maximum number of subscription names to + return. + page_token (str): + The value returned by the last + ``ListTopicSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListTopicSubscriptions`` call, and + that the system should return the next page of data. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTopicSubscriptionsResponse(proto.Message): + r"""Response for the ``ListTopicSubscriptions`` method. + + Attributes: + subscriptions (Sequence[str]): + The names of subscriptions attached to the + topic specified in the request. + next_page_token (str): + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListTopicSubscriptionsRequest`` to get more subscriptions. + """ + + @property + def raw_page(self): + return self + + subscriptions = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class ListTopicSnapshotsRequest(proto.Message): + r"""Request for the ``ListTopicSnapshots`` method. + + Attributes: + topic (str): + Required. The name of the topic that snapshots are attached + to. Format is ``projects/{project}/topics/{topic}``. + page_size (int): + Maximum number of snapshot names to return. + page_token (str): + The value returned by the last + ``ListTopicSnapshotsResponse``; indicates that this is a + continuation of a prior ``ListTopicSnapshots`` call, and + that the system should return the next page of data. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListTopicSnapshotsResponse(proto.Message): + r"""Response for the ``ListTopicSnapshots`` method. + + Attributes: + snapshots (Sequence[str]): + The names of the snapshots that match the + request. + next_page_token (str): + If not empty, indicates that there may be more snapshots + that match the request; this value should be passed in a new + ``ListTopicSnapshotsRequest`` to get more snapshots. + """ + + @property + def raw_page(self): + return self + + snapshots = proto.RepeatedField( + proto.STRING, + number=1, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteTopicRequest(proto.Message): + r"""Request for the ``DeleteTopic`` method. + + Attributes: + topic (str): + Required. Name of the topic to delete. Format is + ``projects/{project}/topics/{topic}``. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + + +class DetachSubscriptionRequest(proto.Message): + r"""Request for the DetachSubscription method. + + Attributes: + subscription (str): + Required. The subscription to detach. Format is + ``projects/{project}/subscriptions/{subscription}``. + """ + + subscription = proto.Field( + proto.STRING, + number=1, + ) + + +class DetachSubscriptionResponse(proto.Message): + r"""Response for the DetachSubscription method. + Reserved for future use. + + """ + + +class Subscription(proto.Message): + r"""A subscription resource. + + Attributes: + name (str): + Required. The name of the subscription. It must have the + format + ``"projects/{project}/subscriptions/{subscription}"``. + ``{subscription}`` must start with a letter, and contain + only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must be + between 3 and 255 characters in length, and it must not + start with ``"goog"``. + topic (str): + Required. The name of the topic from which this subscription + is receiving messages. Format is + ``projects/{project}/topics/{topic}``. The value of this + field will be ``_deleted-topic_`` if the topic has been + deleted. + push_config (google.pubsub_v1.types.PushConfig): + If push delivery is used with this subscription, this field + is used to configure it. Either ``pushConfig`` or + ``bigQueryConfig`` can be set, but not both. If both are + empty, then the subscriber will pull and ack messages using + API methods. + bigquery_config (google.pubsub_v1.types.BigQueryConfig): + If delivery to BigQuery is used with this subscription, this + field is used to configure it. Either ``pushConfig`` or + ``bigQueryConfig`` can be set, but not both. If both are + empty, then the subscriber will pull and ack messages using + API methods. + ack_deadline_seconds (int): + The approximate amount of time (on a best-effort basis) + Pub/Sub waits for the subscriber to acknowledge receipt + before resending the message. In the interval after the + message is delivered and before it is acknowledged, it is + considered to be outstanding. During that time period, the + message will not be redelivered (on a best-effort basis). + + For pull subscriptions, this value is used as the initial + value for the ack deadline. To override this value for a + given message, call ``ModifyAckDeadline`` with the + corresponding ``ack_id`` if using non-streaming pull or send + the ``ack_id`` in a ``StreamingModifyAckDeadlineRequest`` if + using streaming pull. The minimum custom deadline you can + specify is 10 seconds. The maximum custom deadline you can + specify is 600 seconds (10 minutes). If this parameter is 0, + a default value of 10 seconds is used. + + For push delivery, this value is also used to set the + request timeout for the call to the push endpoint. + + If the subscriber never acknowledges the message, the + Pub/Sub system will eventually redeliver the message. + retain_acked_messages (bool): + Indicates whether to retain acknowledged messages. If true, + then messages are not expunged from the subscription's + backlog, even if they are acknowledged, until they fall out + of the ``message_retention_duration`` window. This must be + true if you would like to [``Seek`` to a timestamp] + (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) + in the past to replay previously-acknowledged messages. + message_retention_duration (google.protobuf.duration_pb2.Duration): + How long to retain unacknowledged messages in the + subscription's backlog, from the moment a message is + published. If ``retain_acked_messages`` is true, then this + also configures the retention of acknowledged messages, and + thus configures how far back in time a ``Seek`` can be done. + Defaults to 7 days. Cannot be more than 7 days or less than + 10 minutes. + labels (Mapping[str, str]): + See + Creating and managing labels. + enable_message_ordering (bool): + If true, messages published with the same ``ordering_key`` + in ``PubsubMessage`` will be delivered to the subscribers in + the order in which they are received by the Pub/Sub system. + Otherwise, they may be delivered in any order. + expiration_policy (google.pubsub_v1.types.ExpirationPolicy): + A policy that specifies the conditions for this + subscription's expiration. A subscription is considered + active as long as any connected subscriber is successfully + consuming messages from the subscription or is issuing + operations on the subscription. If ``expiration_policy`` is + not set, a *default policy* with ``ttl`` of 31 days will be + used. The minimum allowed value for + ``expiration_policy.ttl`` is 1 day. + filter (str): + An expression written in the Pub/Sub `filter + language `__. + If non-empty, then only ``PubsubMessage``\ s whose + ``attributes`` field matches the filter are delivered on + this subscription. If empty, then no messages are filtered + out. + dead_letter_policy (google.pubsub_v1.types.DeadLetterPolicy): + A policy that specifies the conditions for dead lettering + messages in this subscription. If dead_letter_policy is not + set, dead lettering is disabled. + + The Cloud Pub/Sub service account associated with this + subscriptions's parent project (i.e., + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) + must have permission to Acknowledge() messages on this + subscription. + retry_policy (google.pubsub_v1.types.RetryPolicy): + A policy that specifies how Pub/Sub retries + message delivery for this subscription. + + If not set, the default retry policy is applied. + This generally implies that messages will be + retried as soon as possible for healthy + subscribers. RetryPolicy will be triggered on + NACKs or acknowledgement deadline exceeded + events for a given message. + detached (bool): + Indicates whether the subscription is detached from its + topic. Detached subscriptions don't receive messages from + their topic and don't retain any backlog. ``Pull`` and + ``StreamingPull`` requests will return FAILED_PRECONDITION. + If the subscription is a push subscription, pushes to the + endpoint will not be made. + enable_exactly_once_delivery (bool): + If true, Pub/Sub provides the following guarantees for the + delivery of a message with a given value of ``message_id`` + on this subscription: + + - The message sent to a subscriber is guaranteed not to be + resent before the message's acknowledgement deadline + expires. + - An acknowledged message will not be resent to a + subscriber. + + Note that subscribers may still receive multiple copies of a + message when ``enable_exactly_once_delivery`` is true if the + message was published multiple times by a publisher client. + These copies are considered distinct by Pub/Sub and have + distinct ``message_id`` values. + topic_message_retention_duration (google.protobuf.duration_pb2.Duration): + Output only. Indicates the minimum duration for which a + message is retained after it is published to the + subscription's topic. If this field is set, messages + published to the subscription's topic in the last + ``topic_message_retention_duration`` are always available to + subscribers. See the ``message_retention_duration`` field in + ``Topic``. This field is set only in responses from the + server; it is ignored if it is set in any requests. + state (google.pubsub_v1.types.Subscription.State): + Output only. An output-only field indicating + whether or not the subscription can receive + messages. + """ + class State(proto.Enum): + r"""Possible states for a subscription.""" + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + RESOURCE_ERROR = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + topic = proto.Field( + proto.STRING, + number=2, + ) + push_config = proto.Field( + proto.MESSAGE, + number=4, + message='PushConfig', + ) + bigquery_config = proto.Field( + proto.MESSAGE, + number=18, + message='BigQueryConfig', + ) + ack_deadline_seconds = proto.Field( + proto.INT32, + number=5, + ) + retain_acked_messages = proto.Field( + proto.BOOL, + number=7, + ) + message_retention_duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=9, + ) + enable_message_ordering = proto.Field( + proto.BOOL, + number=10, + ) + expiration_policy = proto.Field( + proto.MESSAGE, + number=11, + message='ExpirationPolicy', + ) + filter = proto.Field( + proto.STRING, + number=12, + ) + dead_letter_policy = proto.Field( + proto.MESSAGE, + number=13, + message='DeadLetterPolicy', + ) + retry_policy = proto.Field( + proto.MESSAGE, + number=14, + message='RetryPolicy', + ) + detached = proto.Field( + proto.BOOL, + number=15, + ) + enable_exactly_once_delivery = proto.Field( + proto.BOOL, + number=16, + ) + topic_message_retention_duration = proto.Field( + proto.MESSAGE, + number=17, + message=duration_pb2.Duration, + ) + state = proto.Field( + proto.ENUM, + number=19, + enum=State, + ) + + +class RetryPolicy(proto.Message): + r"""A policy that specifies how Cloud Pub/Sub retries message delivery. + + Retry delay will be exponential based on provided minimum and + maximum backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. + + RetryPolicy will be triggered on NACKs or acknowledgement deadline + exceeded events for a given message. + + Retry Policy is implemented on a best effort basis. At times, the + delay between consecutive deliveries may not match the + configuration. That is, delay can be more or less than configured + backoff. + + Attributes: + minimum_backoff (google.protobuf.duration_pb2.Duration): + The minimum delay between consecutive + deliveries of a given message. Value should be + between 0 and 600 seconds. Defaults to 10 + seconds. + maximum_backoff (google.protobuf.duration_pb2.Duration): + The maximum delay between consecutive + deliveries of a given message. Value should be + between 0 and 600 seconds. Defaults to 600 + seconds. + """ + + minimum_backoff = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + maximum_backoff = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + +class DeadLetterPolicy(proto.Message): + r"""Dead lettering is done on a best effort basis. The same + message might be dead lettered multiple times. + + If validation on any of the fields fails at subscription + creation/updation, the create/update subscription request will + fail. + + Attributes: + dead_letter_topic (str): + The name of the topic to which dead letter messages should + be published. Format is + ``projects/{project}/topics/{topic}``.The Cloud Pub/Sub + service account associated with the enclosing subscription's + parent project (i.e., + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) + must have permission to Publish() to this topic. + + The operation will fail if the topic does not exist. Users + should ensure that there is a subscription attached to this + topic since messages published to a topic with no + subscriptions are lost. + max_delivery_attempts (int): + The maximum number of delivery attempts for any message. The + value must be between 5 and 100. + + The number of delivery attempts is defined as 1 + (the sum + of number of NACKs and number of times the acknowledgement + deadline has been exceeded for the message). + + A NACK is any call to ModifyAckDeadline with a 0 deadline. + Note that client libraries may automatically extend + ack_deadlines. + + This field will be honored on a best effort basis. + + If this parameter is 0, a default value of 5 is used. + """ + + dead_letter_topic = proto.Field( + proto.STRING, + number=1, + ) + max_delivery_attempts = proto.Field( + proto.INT32, + number=2, + ) + + +class ExpirationPolicy(proto.Message): + r"""A policy that specifies the conditions for resource + expiration (i.e., automatic resource deletion). + + Attributes: + ttl (google.protobuf.duration_pb2.Duration): + Specifies the "time-to-live" duration for an associated + resource. The resource expires if it is not active for a + period of ``ttl``. The definition of "activity" depends on + the type of the associated resource. The minimum and maximum + allowed values for ``ttl`` depend on the type of the + associated resource, as well. If ``ttl`` is not set, the + associated resource never expires. + """ + + ttl = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + +class PushConfig(proto.Message): + r"""Configuration for a push delivery endpoint. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + push_endpoint (str): + A URL locating the endpoint to which messages should be + pushed. For example, a Webhook endpoint might use + ``https://example.com/push``. + attributes (Mapping[str, str]): + Endpoint configuration attributes that can be used to + control different aspects of the message delivery. + + The only currently supported attribute is + ``x-goog-version``, which you can use to change the format + of the pushed message. This attribute indicates the version + of the data expected by the endpoint. This controls the + shape of the pushed message (i.e., its fields and metadata). + + If not present during the ``CreateSubscription`` call, it + will default to the version of the Pub/Sub API used to make + such call. If not present in a ``ModifyPushConfig`` call, + its value will not be changed. ``GetSubscription`` calls + will always return a valid version, even if the subscription + was created without this attribute. + + The only supported values for the ``x-goog-version`` + attribute are: + + - ``v1beta1``: uses the push format defined in the v1beta1 + Pub/Sub API. + - ``v1`` or ``v1beta2``: uses the push format defined in + the v1 Pub/Sub API. + + For example: + + .. raw:: html + +
attributes { "x-goog-version": "v1" } 
+ oidc_token (google.pubsub_v1.types.PushConfig.OidcToken): + If specified, Pub/Sub will generate and attach an OIDC JWT + token as an ``Authorization`` header in the HTTP request for + every pushed message. + + This field is a member of `oneof`_ ``authentication_method``. + """ + + class OidcToken(proto.Message): + r"""Contains information needed for generating an `OpenID Connect + token `__. + + Attributes: + service_account_email (str): + `Service account + email `__ + to be used for generating the OIDC token. The caller (for + CreateSubscription, UpdateSubscription, and ModifyPushConfig + RPCs) must have the iam.serviceAccounts.actAs permission for + the service account. + audience (str): + Audience to be used when generating OIDC + token. The audience claim identifies the + recipients that the JWT is intended for. The + audience value is a single case-sensitive + string. Having multiple values (array) for the + audience field is not supported. More info about + the OIDC JWT token audience here: + https://tools.ietf.org/html/rfc7519#section-4.1.3 + Note: if not specified, the Push endpoint URL + will be used. + """ + + service_account_email = proto.Field( + proto.STRING, + number=1, + ) + audience = proto.Field( + proto.STRING, + number=2, + ) + + push_endpoint = proto.Field( + proto.STRING, + number=1, + ) + attributes = proto.MapField( + proto.STRING, + proto.STRING, + number=2, + ) + oidc_token = proto.Field( + proto.MESSAGE, + number=3, + oneof='authentication_method', + message=OidcToken, + ) + + +class BigQueryConfig(proto.Message): + r"""Configuration for a BigQuery subscription. + + Attributes: + table (str): + The name of the table to which to write data, + of the form {projectId}:{datasetId}.{tableId} + use_topic_schema (bool): + When true, use the topic's schema as the + columns to write to in BigQuery, if it exists. + write_metadata (bool): + When true, write the subscription name, message_id, + publish_time, attributes, and ordering_key to additional + columns in the table. The subscription name, message_id, and + publish_time fields are put in their own columns while all + other message properties (other than data) are written to a + JSON object in the attributes column. + drop_unknown_fields (bool): + When true and use_topic_schema is true, any fields that are + a part of the topic schema that are not part of the BigQuery + table schema are dropped when writing to BigQuery. + Otherwise, the schemas must be kept in sync and any messages + with extra fields are not written and remain in the + subscription's backlog. + state (google.pubsub_v1.types.BigQueryConfig.State): + Output only. An output-only field that + indicates whether or not the subscription can + receive messages. + """ + class State(proto.Enum): + r"""Possible states for a BigQuery subscription.""" + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PERMISSION_DENIED = 2 + NOT_FOUND = 3 + SCHEMA_MISMATCH = 4 + + table = proto.Field( + proto.STRING, + number=1, + ) + use_topic_schema = proto.Field( + proto.BOOL, + number=2, + ) + write_metadata = proto.Field( + proto.BOOL, + number=3, + ) + drop_unknown_fields = proto.Field( + proto.BOOL, + number=4, + ) + state = proto.Field( + proto.ENUM, + number=5, + enum=State, + ) + + +class ReceivedMessage(proto.Message): + r"""A message and its corresponding acknowledgment ID. + + Attributes: + ack_id (str): + This ID can be used to acknowledge the + received message. + message (google.pubsub_v1.types.PubsubMessage): + The message. + delivery_attempt (int): + The approximate number of times that Cloud Pub/Sub has + attempted to deliver the associated message to a subscriber. + + More precisely, this is 1 + (number of NACKs) + (number of + ack_deadline exceeds) for this message. + + A NACK is any call to ModifyAckDeadline with a 0 deadline. + An ack_deadline exceeds event is whenever a message is not + acknowledged within ack_deadline. Note that ack_deadline is + initially Subscription.ackDeadlineSeconds, but may get + extended automatically by the client library. + + Upon the first delivery of a given message, + ``delivery_attempt`` will have a value of 1. The value is + calculated at best effort and is approximate. + + If a DeadLetterPolicy is not set on the subscription, this + will be 0. + """ + + ack_id = proto.Field( + proto.STRING, + number=1, + ) + message = proto.Field( + proto.MESSAGE, + number=2, + message='PubsubMessage', + ) + delivery_attempt = proto.Field( + proto.INT32, + number=3, + ) + + +class GetSubscriptionRequest(proto.Message): + r"""Request for the GetSubscription method. + + Attributes: + subscription (str): + Required. The name of the subscription to get. Format is + ``projects/{project}/subscriptions/{sub}``. + """ + + subscription = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateSubscriptionRequest(proto.Message): + r"""Request for the UpdateSubscription method. + + Attributes: + subscription (google.pubsub_v1.types.Subscription): + Required. The updated subscription object. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in the + provided subscription to update. Must be + specified and non-empty. + """ + + subscription = proto.Field( + proto.MESSAGE, + number=1, + message='Subscription', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListSubscriptionsRequest(proto.Message): + r"""Request for the ``ListSubscriptions`` method. + + Attributes: + project (str): + Required. The name of the project in which to list + subscriptions. Format is ``projects/{project-id}``. + page_size (int): + Maximum number of subscriptions to return. + page_token (str): + The value returned by the last + ``ListSubscriptionsResponse``; indicates that this is a + continuation of a prior ``ListSubscriptions`` call, and that + the system should return the next page of data. + """ + + project = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSubscriptionsResponse(proto.Message): + r"""Response for the ``ListSubscriptions`` method. + + Attributes: + subscriptions (Sequence[google.pubsub_v1.types.Subscription]): + The subscriptions that match the request. + next_page_token (str): + If not empty, indicates that there may be more subscriptions + that match the request; this value should be passed in a new + ``ListSubscriptionsRequest`` to get more subscriptions. + """ + + @property + def raw_page(self): + return self + + subscriptions = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Subscription', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSubscriptionRequest(proto.Message): + r"""Request for the DeleteSubscription method. + + Attributes: + subscription (str): + Required. The subscription to delete. Format is + ``projects/{project}/subscriptions/{sub}``. + """ + + subscription = proto.Field( + proto.STRING, + number=1, + ) + + +class ModifyPushConfigRequest(proto.Message): + r"""Request for the ModifyPushConfig method. + + Attributes: + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + push_config (google.pubsub_v1.types.PushConfig): + Required. The push configuration for future deliveries. + + An empty ``pushConfig`` indicates that the Pub/Sub system + should stop pushing messages from the given subscription and + allow messages to be pulled and acknowledged - effectively + pausing the subscription if ``Pull`` or ``StreamingPull`` is + not called. + """ + + subscription = proto.Field( + proto.STRING, + number=1, + ) + push_config = proto.Field( + proto.MESSAGE, + number=2, + message='PushConfig', + ) + + +class PullRequest(proto.Message): + r"""Request for the ``Pull`` method. + + Attributes: + subscription (str): + Required. The subscription from which messages should be + pulled. Format is + ``projects/{project}/subscriptions/{sub}``. + return_immediately (bool): + Optional. If this field set to true, the system will respond + immediately even if it there are no messages available to + return in the ``Pull`` response. Otherwise, the system may + wait (for a bounded amount of time) until at least one + message is available, rather than returning no messages. + Warning: setting this field to ``true`` is discouraged + because it adversely impacts the performance of ``Pull`` + operations. We recommend that users do not set this field. + max_messages (int): + Required. The maximum number of messages to + return for this request. Must be a positive + integer. The Pub/Sub system may return fewer + than the number specified. + """ + + subscription = proto.Field( + proto.STRING, + number=1, + ) + return_immediately = proto.Field( + proto.BOOL, + number=2, + ) + max_messages = proto.Field( + proto.INT32, + number=3, + ) + + +class PullResponse(proto.Message): + r"""Response for the ``Pull`` method. + + Attributes: + received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): + Received Pub/Sub messages. The list will be empty if there + are no more messages available in the backlog. For JSON, the + response can be entirely empty. The Pub/Sub system may + return fewer than the ``maxMessages`` requested even if + there are more messages available in the backlog. + """ + + received_messages = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ReceivedMessage', + ) + + +class ModifyAckDeadlineRequest(proto.Message): + r"""Request for the ModifyAckDeadline method. + + Attributes: + subscription (str): + Required. The name of the subscription. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (Sequence[str]): + Required. List of acknowledgment IDs. + ack_deadline_seconds (int): + Required. The new ack deadline with respect to the time this + request was sent to the Pub/Sub system. For example, if the + value is 10, the new ack deadline will expire 10 seconds + after the ``ModifyAckDeadline`` call was made. Specifying + zero might immediately make the message available for + delivery to another subscriber client. This typically + results in an increase in the rate of message redeliveries + (that is, duplicates). The minimum deadline you can specify + is 0 seconds. The maximum deadline you can specify is 600 + seconds (10 minutes). + """ + + subscription = proto.Field( + proto.STRING, + number=1, + ) + ack_ids = proto.RepeatedField( + proto.STRING, + number=4, + ) + ack_deadline_seconds = proto.Field( + proto.INT32, + number=3, + ) + + +class AcknowledgeRequest(proto.Message): + r"""Request for the Acknowledge method. + + Attributes: + subscription (str): + Required. The subscription whose message is being + acknowledged. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (Sequence[str]): + Required. The acknowledgment ID for the messages being + acknowledged that was returned by the Pub/Sub system in the + ``Pull`` response. Must not be empty. + """ + + subscription = proto.Field( + proto.STRING, + number=1, + ) + ack_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class StreamingPullRequest(proto.Message): + r"""Request for the ``StreamingPull`` streaming RPC method. This request + is used to establish the initial stream as well as to stream + acknowledgements and ack deadline modifications from the client to + the server. + + Attributes: + subscription (str): + Required. The subscription for which to initialize the new + stream. This must be provided in the first request on the + stream, and must not be set in subsequent requests from + client to server. Format is + ``projects/{project}/subscriptions/{sub}``. + ack_ids (Sequence[str]): + List of acknowledgement IDs for acknowledging previously + received messages (received on this stream or a different + stream). If an ack ID has expired, the corresponding message + may be redelivered later. Acknowledging a message more than + once will not result in an error. If the acknowledgement ID + is malformed, the stream will be aborted with status + ``INVALID_ARGUMENT``. + modify_deadline_seconds (Sequence[int]): + The list of new ack deadlines for the IDs listed in + ``modify_deadline_ack_ids``. The size of this list must be + the same as the size of ``modify_deadline_ack_ids``. If it + differs the stream will be aborted with + ``INVALID_ARGUMENT``. Each element in this list is applied + to the element in the same position in + ``modify_deadline_ack_ids``. The new ack deadline is with + respect to the time this request was sent to the Pub/Sub + system. Must be >= 0. For example, if the value is 10, the + new ack deadline will expire 10 seconds after this request + is received. If the value is 0, the message is immediately + made available for another streaming or non-streaming pull + request. If the value is < 0 (an error), the stream will be + aborted with status ``INVALID_ARGUMENT``. + modify_deadline_ack_ids (Sequence[str]): + List of acknowledgement IDs whose deadline will be modified + based on the corresponding element in + ``modify_deadline_seconds``. This field can be used to + indicate that more time is needed to process a message by + the subscriber, or to make the message available for + redelivery if the processing was interrupted. + stream_ack_deadline_seconds (int): + Required. The ack deadline to use for the + stream. This must be provided in the first + request on the stream, but it can also be + updated on subsequent requests from client to + server. The minimum deadline you can specify is + 10 seconds. The maximum deadline you can specify + is 600 seconds (10 minutes). + client_id (str): + A unique identifier that is used to distinguish client + instances from each other. Only needs to be provided on the + initial request. When a stream disconnects and reconnects + for the same stream, the client_id should be set to the same + value so that state associated with the old stream can be + transferred to the new stream. The same client_id should not + be used for different client instances. + max_outstanding_messages (int): + Flow control settings for the maximum number of outstanding + messages. When there are ``max_outstanding_messages`` or + more currently sent to the streaming pull client that have + not yet been acked or nacked, the server stops sending more + messages. The sending of messages resumes once the number of + outstanding messages is less than this value. If the value + is <= 0, there is no limit to the number of outstanding + messages. This property can only be set on the initial + StreamingPullRequest. If it is set on a subsequent request, + the stream will be aborted with status ``INVALID_ARGUMENT``. + max_outstanding_bytes (int): + Flow control settings for the maximum number of outstanding + bytes. When there are ``max_outstanding_bytes`` or more + worth of messages currently sent to the streaming pull + client that have not yet been acked or nacked, the server + will stop sending more messages. The sending of messages + resumes once the number of outstanding bytes is less than + this value. If the value is <= 0, there is no limit to the + number of outstanding bytes. This property can only be set + on the initial StreamingPullRequest. If it is set on a + subsequent request, the stream will be aborted with status + ``INVALID_ARGUMENT``. + """ + + subscription = proto.Field( + proto.STRING, + number=1, + ) + ack_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) + modify_deadline_seconds = proto.RepeatedField( + proto.INT32, + number=3, + ) + modify_deadline_ack_ids = proto.RepeatedField( + proto.STRING, + number=4, + ) + stream_ack_deadline_seconds = proto.Field( + proto.INT32, + number=5, + ) + client_id = proto.Field( + proto.STRING, + number=6, + ) + max_outstanding_messages = proto.Field( + proto.INT64, + number=7, + ) + max_outstanding_bytes = proto.Field( + proto.INT64, + number=8, + ) + + +class StreamingPullResponse(proto.Message): + r"""Response for the ``StreamingPull`` method. This response is used to + stream messages from the server to the client. + + Attributes: + received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): + Received Pub/Sub messages. This will not be + empty. + acknowledge_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): + This field will only be set if + ``enable_exactly_once_delivery`` is set to ``true``. + modify_ack_deadline_confirmation (google.pubsub_v1.types.StreamingPullResponse.ModifyAckDeadlineConfirmation): + This field will only be set if + ``enable_exactly_once_delivery`` is set to ``true``. + subscription_properties (google.pubsub_v1.types.StreamingPullResponse.SubscriptionProperties): + Properties associated with this subscription. + """ + + class AcknowledgeConfirmation(proto.Message): + r"""Acknowledgement IDs sent in one or more previous requests to + acknowledge a previously received message. + + Attributes: + ack_ids (Sequence[str]): + Successfully processed acknowledgement IDs. + invalid_ack_ids (Sequence[str]): + List of acknowledgement IDs that were + malformed or whose acknowledgement deadline has + expired. + unordered_ack_ids (Sequence[str]): + List of acknowledgement IDs that were out of + order. + """ + + ack_ids = proto.RepeatedField( + proto.STRING, + number=1, + ) + invalid_ack_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) + unordered_ack_ids = proto.RepeatedField( + proto.STRING, + number=3, + ) + + class ModifyAckDeadlineConfirmation(proto.Message): + r"""Acknowledgement IDs sent in one or more previous requests to + modify the deadline for a specific message. + + Attributes: + ack_ids (Sequence[str]): + Successfully processed acknowledgement IDs. + invalid_ack_ids (Sequence[str]): + List of acknowledgement IDs that were + malformed or whose acknowledgement deadline has + expired. + """ + + ack_ids = proto.RepeatedField( + proto.STRING, + number=1, + ) + invalid_ack_ids = proto.RepeatedField( + proto.STRING, + number=2, + ) + + class SubscriptionProperties(proto.Message): + r"""Subscription properties sent as part of the response. + + Attributes: + exactly_once_delivery_enabled (bool): + True iff exactly once delivery is enabled for + this subscription. + message_ordering_enabled (bool): + True iff message ordering is enabled for this + subscription. + """ + + exactly_once_delivery_enabled = proto.Field( + proto.BOOL, + number=1, + ) + message_ordering_enabled = proto.Field( + proto.BOOL, + number=2, + ) + + received_messages = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='ReceivedMessage', + ) + acknowledge_confirmation = proto.Field( + proto.MESSAGE, + number=5, + message=AcknowledgeConfirmation, + ) + modify_ack_deadline_confirmation = proto.Field( + proto.MESSAGE, + number=3, + message=ModifyAckDeadlineConfirmation, + ) + subscription_properties = proto.Field( + proto.MESSAGE, + number=4, + message=SubscriptionProperties, + ) + + +class CreateSnapshotRequest(proto.Message): + r"""Request for the ``CreateSnapshot`` method. + + Attributes: + name (str): + Required. User-provided name for this snapshot. If the name + is not provided in the request, the server will assign a + random name for this snapshot on the same project as the + subscription. Note that for REST API requests, you must + specify a name. See the resource name rules. Format is + ``projects/{project}/snapshots/{snap}``. + subscription (str): + Required. The subscription whose backlog the snapshot + retains. Specifically, the created snapshot is guaranteed to + retain: (a) The existing backlog on the subscription. More + precisely, this is defined as the messages in the + subscription's backlog that are unacknowledged upon the + successful completion of the ``CreateSnapshot`` request; as + well as: (b) Any messages published to the subscription's + topic following the successful completion of the + CreateSnapshot request. Format is + ``projects/{project}/subscriptions/{sub}``. + labels (Mapping[str, str]): + See + Creating and managing labels. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + subscription = proto.Field( + proto.STRING, + number=2, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class UpdateSnapshotRequest(proto.Message): + r"""Request for the UpdateSnapshot method. + + Attributes: + snapshot (google.pubsub_v1.types.Snapshot): + Required. The updated snapshot object. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in the + provided snapshot to update. Must be specified + and non-empty. + """ + + snapshot = proto.Field( + proto.MESSAGE, + number=1, + message='Snapshot', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class Snapshot(proto.Message): + r"""A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages in + an existing subscription to the state captured by a snapshot. + + Attributes: + name (str): + The name of the snapshot. + topic (str): + The name of the topic from which this + snapshot is retaining messages. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + The snapshot is guaranteed to exist up until this time. A + newly-created snapshot expires no later than 7 days from the + time of its creation. Its exact lifetime is determined at + creation by the existing backlog in the source subscription. + Specifically, the lifetime of the snapshot is + ``7 days - (age of oldest unacked message in the subscription)``. + For example, consider a subscription whose oldest unacked + message is 3 days old. If a snapshot is created from this + subscription, the snapshot -- which will always capture this + 3-day-old backlog as long as the snapshot exists -- will + expire in 4 days. The service will refuse to create a + snapshot that would expire in less than 1 hour after + creation. + labels (Mapping[str, str]): + See [Creating and managing labels] + (https://cloud.google.com/pubsub/docs/labels). + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + topic = proto.Field( + proto.STRING, + number=2, + ) + expire_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + + +class GetSnapshotRequest(proto.Message): + r"""Request for the GetSnapshot method. + + Attributes: + snapshot (str): + Required. The name of the snapshot to get. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + snapshot = proto.Field( + proto.STRING, + number=1, + ) + + +class ListSnapshotsRequest(proto.Message): + r"""Request for the ``ListSnapshots`` method. + + Attributes: + project (str): + Required. The name of the project in which to list + snapshots. Format is ``projects/{project-id}``. + page_size (int): + Maximum number of snapshots to return. + page_token (str): + The value returned by the last ``ListSnapshotsResponse``; + indicates that this is a continuation of a prior + ``ListSnapshots`` call, and that the system should return + the next page of data. + """ + + project = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class ListSnapshotsResponse(proto.Message): + r"""Response for the ``ListSnapshots`` method. + + Attributes: + snapshots (Sequence[google.pubsub_v1.types.Snapshot]): + The resulting snapshots. + next_page_token (str): + If not empty, indicates that there may be more snapshot that + match the request; this value should be passed in a new + ``ListSnapshotsRequest``. + """ + + @property + def raw_page(self): + return self + + snapshots = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Snapshot', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSnapshotRequest(proto.Message): + r"""Request for the ``DeleteSnapshot`` method. + + Attributes: + snapshot (str): + Required. The name of the snapshot to delete. Format is + ``projects/{project}/snapshots/{snap}``. + """ + + snapshot = proto.Field( + proto.STRING, + number=1, + ) + + +class SeekRequest(proto.Message): + r"""Request for the ``Seek`` method. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + subscription (str): + Required. The subscription to affect. + time (google.protobuf.timestamp_pb2.Timestamp): + The time to seek to. Messages retained in the subscription + that were published before this time are marked as + acknowledged, and messages retained in the subscription that + were published after this time are marked as unacknowledged. + Note that this operation affects only those messages + retained in the subscription (configured by the combination + of ``message_retention_duration`` and + ``retain_acked_messages``). For example, if ``time`` + corresponds to a point before the message retention window + (or to a point before the system's notion of the + subscription creation time), only retained messages will be + marked as unacknowledged, and already-expunged messages will + not be restored. + + This field is a member of `oneof`_ ``target``. + snapshot (str): + The snapshot to seek to. The snapshot's topic must be the + same as that of the provided subscription. Format is + ``projects/{project}/snapshots/{snap}``. + + This field is a member of `oneof`_ ``target``. + """ + + subscription = proto.Field( + proto.STRING, + number=1, + ) + time = proto.Field( + proto.MESSAGE, + number=2, + oneof='target', + message=timestamp_pb2.Timestamp, + ) + snapshot = proto.Field( + proto.STRING, + number=3, + oneof='target', + ) + + +class SeekResponse(proto.Message): + r"""Response for the ``Seek`` method (this response is empty). + """ + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/schema.py new file mode 100644 index 000000000000..a1a43be882b9 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/schema.py @@ -0,0 +1,319 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package='google.pubsub.v1', + manifest={ + 'SchemaView', + 'Encoding', + 'Schema', + 'CreateSchemaRequest', + 'GetSchemaRequest', + 'ListSchemasRequest', + 'ListSchemasResponse', + 'DeleteSchemaRequest', + 'ValidateSchemaRequest', + 'ValidateSchemaResponse', + 'ValidateMessageRequest', + 'ValidateMessageResponse', + }, +) + + +class SchemaView(proto.Enum): + r"""View of Schema object fields to be returned by GetSchema and + ListSchemas. + """ + SCHEMA_VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 + + +class Encoding(proto.Enum): + r"""Possible encoding types for messages.""" + ENCODING_UNSPECIFIED = 0 + JSON = 1 + BINARY = 2 + + +class Schema(proto.Message): + r"""A schema resource. + + Attributes: + name (str): + Required. Name of the schema. Format is + ``projects/{project}/schemas/{schema}``. + type_ (google.pubsub_v1.types.Schema.Type): + The type of the schema definition. + definition (str): + The definition of the schema. This should contain a string + representing the full definition of the schema that is a + valid schema definition of the type specified in ``type``. + """ + class Type(proto.Enum): + r"""Possible schema definition types.""" + TYPE_UNSPECIFIED = 0 + PROTOCOL_BUFFER = 1 + AVRO = 2 + + name = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.ENUM, + number=2, + enum=Type, + ) + definition = proto.Field( + proto.STRING, + number=3, + ) + + +class CreateSchemaRequest(proto.Message): + r"""Request for the CreateSchema method. + + Attributes: + parent (str): + Required. The name of the project in which to create the + schema. Format is ``projects/{project-id}``. + schema (google.pubsub_v1.types.Schema): + Required. The schema object to create. + + This schema's ``name`` parameter is ignored. The schema + object returned by CreateSchema will have a ``name`` made + using the given ``parent`` and ``schema_id``. + schema_id (str): + The ID to use for the schema, which will become the final + component of the schema's resource name. + + See + https://cloud.google.com/pubsub/docs/admin#resource_names + for resource name constraints. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + schema = proto.Field( + proto.MESSAGE, + number=2, + message='Schema', + ) + schema_id = proto.Field( + proto.STRING, + number=3, + ) + + +class GetSchemaRequest(proto.Message): + r"""Request for the GetSchema method. + + Attributes: + name (str): + Required. The name of the schema to get. Format is + ``projects/{project}/schemas/{schema}``. + view (google.pubsub_v1.types.SchemaView): + The set of fields to return in the response. If not set, + returns a Schema with ``name`` and ``type``, but not + ``definition``. Set to ``FULL`` to retrieve all fields. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.ENUM, + number=2, + enum='SchemaView', + ) + + +class ListSchemasRequest(proto.Message): + r"""Request for the ``ListSchemas`` method. + + Attributes: + parent (str): + Required. The name of the project in which to list schemas. + Format is ``projects/{project-id}``. + view (google.pubsub_v1.types.SchemaView): + The set of Schema fields to return in the response. If not + set, returns Schemas with ``name`` and ``type``, but not + ``definition``. Set to ``FULL`` to retrieve all fields. + page_size (int): + Maximum number of schemas to return. + page_token (str): + The value returned by the last ``ListSchemasResponse``; + indicates that this is a continuation of a prior + ``ListSchemas`` call, and that the system should return the + next page of data. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + view = proto.Field( + proto.ENUM, + number=2, + enum='SchemaView', + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + page_token = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSchemasResponse(proto.Message): + r"""Response for the ``ListSchemas`` method. + + Attributes: + schemas (Sequence[google.pubsub_v1.types.Schema]): + The resulting schemas. + next_page_token (str): + If not empty, indicates that there may be more schemas that + match the request; this value should be passed in a new + ``ListSchemasRequest``. + """ + + @property + def raw_page(self): + return self + + schemas = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Schema', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSchemaRequest(proto.Message): + r"""Request for the ``DeleteSchema`` method. + + Attributes: + name (str): + Required. Name of the schema to delete. Format is + ``projects/{project}/schemas/{schema}``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ValidateSchemaRequest(proto.Message): + r"""Request for the ``ValidateSchema`` method. + + Attributes: + parent (str): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + schema (google.pubsub_v1.types.Schema): + Required. The schema object to validate. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + schema = proto.Field( + proto.MESSAGE, + number=2, + message='Schema', + ) + + +class ValidateSchemaResponse(proto.Message): + r"""Response for the ``ValidateSchema`` method. Empty for now. + """ + + +class ValidateMessageRequest(proto.Message): + r"""Request for the ``ValidateMessage`` method. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + parent (str): + Required. The name of the project in which to validate + schemas. Format is ``projects/{project-id}``. + name (str): + Name of the schema against which to validate. + + Format is ``projects/{project}/schemas/{schema}``. + + This field is a member of `oneof`_ ``schema_spec``. + schema (google.pubsub_v1.types.Schema): + Ad-hoc schema against which to validate + + This field is a member of `oneof`_ ``schema_spec``. + message (bytes): + Message to validate against the provided ``schema_spec``. + encoding (google.pubsub_v1.types.Encoding): + The encoding expected for messages + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + name = proto.Field( + proto.STRING, + number=2, + oneof='schema_spec', + ) + schema = proto.Field( + proto.MESSAGE, + number=3, + oneof='schema_spec', + message='Schema', + ) + message = proto.Field( + proto.BYTES, + number=4, + ) + encoding = proto.Field( + proto.ENUM, + number=5, + enum='Encoding', + ) + + +class ValidateMessageResponse(proto.Message): + r"""Response for the ``ValidateMessage`` method. Empty for now. + """ + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/mypy.ini b/packages/google-cloud-pubsub/owl-bot-staging/v1/mypy.ini new file mode 100644 index 000000000000..4505b485436b --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/noxfile.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/noxfile.py new file mode 100644 index 000000000000..ea844afbfe85 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,180 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.6", + "3.7", + "3.8", + "3.9", + "3.10", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==19.10b0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.9" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/pubsub_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py new file mode 100644 index 000000000000..e79f28c983b3 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_CreateTopic_async] +from google import pubsub_v1 + + +async def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = await client.create_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_CreateTopic_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py new file mode 100644 index 000000000000..6a6f04a271f4 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_CreateTopic_sync] +from google import pubsub_v1 + + +def sample_create_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.Topic( + name="name_value", + ) + + # Make the request + response = client.create_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_CreateTopic_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py new file mode 100644 index 000000000000..2a0148abbad1 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_DeleteTopic_async] +from google import pubsub_v1 + + +async def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + await client.delete_topic(request=request) + + +# [END pubsub_v1_generated_Publisher_DeleteTopic_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py new file mode 100644 index 000000000000..376a93ba085e --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_DeleteTopic_sync] +from google import pubsub_v1 + + +def sample_delete_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteTopicRequest( + topic="topic_value", + ) + + # Make the request + client.delete_topic(request=request) + + +# [END pubsub_v1_generated_Publisher_DeleteTopic_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py new file mode 100644 index 000000000000..6fb8d4e7d3e7 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_DetachSubscription_async] +from google import pubsub_v1 + + +async def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.detach_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_DetachSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py new file mode 100644 index 000000000000..7c36e4df1291 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DetachSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_DetachSubscription_sync] +from google import pubsub_v1 + + +def sample_detach_subscription(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.detach_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_DetachSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py new file mode 100644 index 000000000000..87904db2b378 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_GetTopic_async] +from google import pubsub_v1 + + +async def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = await client.get_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_GetTopic_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py new file mode 100644 index 000000000000..2f28cef0a8a2 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_GetTopic_sync] +from google import pubsub_v1 + + +def sample_get_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.GetTopicRequest( + topic="topic_value", + ) + + # Make the request + response = client.get_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_GetTopic_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py new file mode 100644 index 000000000000..b6388f7f5722 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopicSnapshots_async] +from google import pubsub_v1 + + +async def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopicSnapshots_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py new file mode 100644 index 000000000000..f7f3a61ec9bf --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopicSnapshots_sync] +from google import pubsub_v1 + + +def sample_list_topic_snapshots(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSnapshotsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopicSnapshots_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py new file mode 100644 index 000000000000..59b35194b459 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_async] +from google import pubsub_v1 + + +async def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopicSubscriptions_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py new file mode 100644 index 000000000000..d7dffa0e2d3b --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopicSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync] +from google import pubsub_v1 + + +def sample_list_topic_subscriptions(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicSubscriptionsRequest( + topic="topic_value", + ) + + # Make the request + page_result = client.list_topic_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py new file mode 100644 index 000000000000..0d0f10a9896f --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopics_async] +from google import pubsub_v1 + + +async def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopics_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py new file mode 100644 index 000000000000..cffdd77a49af --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListTopics +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_ListTopics_sync] +from google import pubsub_v1 + + +def sample_list_topics(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.ListTopicsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_topics(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Publisher_ListTopics_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py new file mode 100644 index 000000000000..98bfc618e64e --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Publish +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_Publish_async] +from google import pubsub_v1 + + +async def sample_publish(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = await client.publish(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_Publish_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py new file mode 100644 index 000000000000..650440a78436 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Publish +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_Publish_sync] +from google import pubsub_v1 + + +def sample_publish(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + request = pubsub_v1.PublishRequest( + topic="topic_value", + ) + + # Make the request + response = client.publish(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_Publish_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py new file mode 100644 index 000000000000..473144d07caa --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_UpdateTopic_async] +from google import pubsub_v1 + + +async def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherAsyncClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = await client.update_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_UpdateTopic_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py new file mode 100644 index 000000000000..5a9838c2acfd --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateTopic +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Publisher_UpdateTopic_sync] +from google import pubsub_v1 + + +def sample_update_topic(): + # Create a client + client = pubsub_v1.PublisherClient() + + # Initialize request argument(s) + topic = pubsub_v1.Topic() + topic.name = "name_value" + + request = pubsub_v1.UpdateTopicRequest( + topic=topic, + ) + + # Make the request + response = client.update_topic(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Publisher_UpdateTopic_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py new file mode 100644 index 000000000000..9f979072528b --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_CreateSchema_async] +from google import pubsub_v1 + + +async def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.create_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_CreateSchema_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py new file mode 100644 index 000000000000..798194050d75 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_CreateSchema_sync] +from google import pubsub_v1 + + +def sample_create_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CreateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.create_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_CreateSchema_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py new file mode 100644 index 000000000000..6d5e8f7345ca --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_DeleteSchema_async] +from google import pubsub_v1 + + +async def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + await client.delete_schema(request=request) + + +# [END pubsub_v1_generated_SchemaService_DeleteSchema_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py new file mode 100644 index 000000000000..2e516b97aa77 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_DeleteSchema_sync] +from google import pubsub_v1 + + +def sample_delete_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRequest( + name="name_value", + ) + + # Make the request + client.delete_schema(request=request) + + +# [END pubsub_v1_generated_SchemaService_DeleteSchema_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py new file mode 100644 index 000000000000..10db352c3abf --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_GetSchema_async] +from google import pubsub_v1 + + +async def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = await client.get_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_GetSchema_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py new file mode 100644 index 000000000000..7d3cdf6d1d44 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_GetSchema_sync] +from google import pubsub_v1 + + +def sample_get_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSchemaRequest( + name="name_value", + ) + + # Make the request + response = client.get_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_GetSchema_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py new file mode 100644 index 000000000000..a1c9be6ee6e2 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ListSchemas_async] +from google import pubsub_v1 + + +async def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_SchemaService_ListSchemas_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py new file mode 100644 index 000000000000..4604da242389 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemas +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ListSchemas_sync] +from google import pubsub_v1 + + +def sample_list_schemas(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemasRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_schemas(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_SchemaService_ListSchemas_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py new file mode 100644 index 000000000000..94a699e53de0 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ValidateMessage_async] +from google import pubsub_v1 + + +async def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = await client.validate_message(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_ValidateMessage_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py new file mode 100644 index 000000000000..26e32efa13d8 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateMessage +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ValidateMessage_sync] +from google import pubsub_v1 + + +def sample_validate_message(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ValidateMessageRequest( + name="name_value", + parent="parent_value", + ) + + # Make the request + response = client.validate_message(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_ValidateMessage_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py new file mode 100644 index 000000000000..86647c7bd65f --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ValidateSchema_async] +from google import pubsub_v1 + + +async def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = await client.validate_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_ValidateSchema_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py new file mode 100644 index 000000000000..102fb75edc03 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ValidateSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ValidateSchema_sync] +from google import pubsub_v1 + + +def sample_validate_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.ValidateSchemaRequest( + parent="parent_value", + schema=schema, + ) + + # Make the request + response = client.validate_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_ValidateSchema_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py new file mode 100644 index 000000000000..8f87241a1f7e --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Acknowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Acknowledge_async] +from google import pubsub_v1 + + +async def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ) + + # Make the request + await client.acknowledge(request=request) + + +# [END pubsub_v1_generated_Subscriber_Acknowledge_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py new file mode 100644 index 000000000000..a56c55a33c73 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Acknowledge +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Acknowledge_sync] +from google import pubsub_v1 + + +def sample_acknowledge(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.AcknowledgeRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ) + + # Make the request + client.acknowledge(request=request) + + +# [END pubsub_v1_generated_Subscriber_Acknowledge_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py new file mode 100644 index 000000000000..6e2d4538771d --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_CreateSnapshot_async] +from google import pubsub_v1 + + +async def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = await client.create_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_CreateSnapshot_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py new file mode 100644 index 000000000000..b6145acb903f --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_CreateSnapshot_sync] +from google import pubsub_v1 + + +def sample_create_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Make the request + response = client.create_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_CreateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py new file mode 100644 index 000000000000..4c63c47cd594 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_CreateSubscription_async] +from google import pubsub_v1 + + +async def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = await client.create_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_CreateSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py new file mode 100644 index 000000000000..6e37969f1f8c --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_CreateSubscription_sync] +from google import pubsub_v1 + + +def sample_create_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.Subscription( + name="name_value", + topic="topic_value", + ) + + # Make the request + response = client.create_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_CreateSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py new file mode 100644 index 000000000000..26e2c7aa783e --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_DeleteSnapshot_async] +from google import pubsub_v1 + + +async def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + await client.delete_snapshot(request=request) + + +# [END pubsub_v1_generated_Subscriber_DeleteSnapshot_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py new file mode 100644 index 000000000000..f2538ddb0ca3 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_DeleteSnapshot_sync] +from google import pubsub_v1 + + +def sample_delete_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + client.delete_snapshot(request=request) + + +# [END pubsub_v1_generated_Subscriber_DeleteSnapshot_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py new file mode 100644 index 000000000000..f310d24b2869 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_DeleteSubscription_async] +from google import pubsub_v1 + + +async def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + await client.delete_subscription(request=request) + + +# [END pubsub_v1_generated_Subscriber_DeleteSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py new file mode 100644 index 000000000000..c601dd6633b2 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_DeleteSubscription_sync] +from google import pubsub_v1 + + +def sample_delete_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + client.delete_subscription(request=request) + + +# [END pubsub_v1_generated_Subscriber_DeleteSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py new file mode 100644 index 000000000000..3a56e4fbbe0e --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_GetSnapshot_async] +from google import pubsub_v1 + + +async def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = await client.get_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_GetSnapshot_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py new file mode 100644 index 000000000000..3a6cd24ca08a --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_GetSnapshot_sync] +from google import pubsub_v1 + + +def sample_get_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Make the request + response = client.get_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_GetSnapshot_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py new file mode 100644 index 000000000000..7ad71832664f --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_GetSubscription_async] +from google import pubsub_v1 + + +async def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.get_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_GetSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py new file mode 100644 index 000000000000..d883e085dfbe --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_GetSubscription_sync] +from google import pubsub_v1 + + +def sample_get_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.get_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_GetSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py new file mode 100644 index 000000000000..edc7976a1293 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ListSnapshots_async] +from google import pubsub_v1 + + +async def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Subscriber_ListSnapshots_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py new file mode 100644 index 000000000000..e67ca2a39633 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSnapshots +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ListSnapshots_sync] +from google import pubsub_v1 + + +def sample_list_snapshots(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSnapshotsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_snapshots(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Subscriber_ListSnapshots_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py new file mode 100644 index 000000000000..01c45577a7f2 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ListSubscriptions_async] +from google import pubsub_v1 + + +async def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_Subscriber_ListSubscriptions_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py new file mode 100644 index 000000000000..272b0408d9cf --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSubscriptions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ListSubscriptions_sync] +from google import pubsub_v1 + + +def sample_list_subscriptions(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSubscriptionsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_subscriptions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_Subscriber_ListSubscriptions_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py new file mode 100644 index 000000000000..b85c2033ff49 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyAckDeadline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_async] +from google import pubsub_v1 + + +async def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_deadline_seconds=2066, + ) + + # Make the request + await client.modify_ack_deadline(request=request) + + +# [END pubsub_v1_generated_Subscriber_ModifyAckDeadline_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py new file mode 100644 index 000000000000..ac0805db437c --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyAckDeadline +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync] +from google import pubsub_v1 + + +def sample_modify_ack_deadline(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyAckDeadlineRequest( + subscription="subscription_value", + ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_deadline_seconds=2066, + ) + + # Make the request + client.modify_ack_deadline(request=request) + + +# [END pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py new file mode 100644 index 000000000000..662823a1d682 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyPushConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ModifyPushConfig_async] +from google import pubsub_v1 + + +async def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + await client.modify_push_config(request=request) + + +# [END pubsub_v1_generated_Subscriber_ModifyPushConfig_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py new file mode 100644 index 000000000000..a7499941c486 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ModifyPushConfig +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_ModifyPushConfig_sync] +from google import pubsub_v1 + + +def sample_modify_push_config(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Make the request + client.modify_push_config(request=request) + + +# [END pubsub_v1_generated_Subscriber_ModifyPushConfig_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py new file mode 100644 index 000000000000..113f3ddfcffe --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Pull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Pull_async] +from google import pubsub_v1 + + +async def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = await client.pull(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_Pull_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py new file mode 100644 index 000000000000..abb47bfa16b2 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Pull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Pull_sync] +from google import pubsub_v1 + + +def sample_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.PullRequest( + subscription="subscription_value", + max_messages=1277, + ) + + # Make the request + response = client.pull(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_Pull_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py new file mode 100644 index 000000000000..062c69409de9 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Seek +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Seek_async] +from google import pubsub_v1 + + +async def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = await client.seek(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_Seek_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py new file mode 100644 index 000000000000..f28570e7c779 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Seek +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_Seek_sync] +from google import pubsub_v1 + + +def sample_seek(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.SeekRequest( + subscription="subscription_value", + ) + + # Make the request + response = client.seek(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_Seek_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py new file mode 100644 index 000000000000..64c1e37483c0 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingPull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_StreamingPull_async] +from google import pubsub_v1 + + +async def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.streaming_pull(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + +# [END pubsub_v1_generated_Subscriber_StreamingPull_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py new file mode 100644 index 000000000000..0aa02fa40cdc --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamingPull +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_StreamingPull_sync] +from google import pubsub_v1 + + +def sample_streaming_pull(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.StreamingPullRequest( + subscription="subscription_value", + stream_ack_deadline_seconds=2813, + ) + + # This method expects an iterator which contains + # 'pubsub_v1.StreamingPullRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.streaming_pull(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + +# [END pubsub_v1_generated_Subscriber_StreamingPull_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py new file mode 100644 index 000000000000..f07bca1f5d76 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_UpdateSnapshot_async] +from google import pubsub_v1 + + +async def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = await client.update_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_UpdateSnapshot_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py new file mode 100644 index 000000000000..7afe32ec2259 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSnapshot +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_UpdateSnapshot_sync] +from google import pubsub_v1 + + +def sample_update_snapshot(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + request = pubsub_v1.UpdateSnapshotRequest( + ) + + # Make the request + response = client.update_snapshot(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_UpdateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py new file mode 100644 index 000000000000..5a0410ec36cb --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_UpdateSubscription_async] +from google import pubsub_v1 + + +async def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberAsyncClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = await client.update_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_UpdateSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py new file mode 100644 index 000000000000..75d6e8a95299 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateSubscription +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_Subscriber_UpdateSubscription_sync] +from google import pubsub_v1 + + +def sample_update_subscription(): + # Create a client + client = pubsub_v1.SubscriberClient() + + # Initialize request argument(s) + subscription = pubsub_v1.Subscription() + subscription.name = "name_value" + subscription.topic = "topic_value" + + request = pubsub_v1.UpdateSubscriptionRequest( + subscription=subscription, + ) + + # Make the request + response = client.update_subscription(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_Subscriber_UpdateSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_pubsub_v1.json b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_pubsub_v1.json new file mode 100644 index 000000000000..0f5906e95364 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_pubsub_v1.json @@ -0,0 +1,5019 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.pubsub.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-pubsub" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.create_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.CreateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "CreateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "create_topic" + }, + "description": "Sample for CreateTopic", + "file": "pubsub_v1_generated_publisher_create_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_create_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.create_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.CreateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "CreateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "create_topic" + }, + "description": "Sample for CreateTopic", + "file": "pubsub_v1_generated_publisher_create_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_create_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.delete_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.DeleteTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DeleteTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_topic" + }, + "description": "Sample for DeleteTopic", + "file": "pubsub_v1_generated_publisher_delete_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_delete_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.delete_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.DeleteTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DeleteTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_topic" + }, + "description": "Sample for DeleteTopic", + "file": "pubsub_v1_generated_publisher_delete_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_delete_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.detach_subscription", + "method": { + "fullName": "google.pubsub.v1.Publisher.DetachSubscription", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DetachSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DetachSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", + "shortName": "detach_subscription" + }, + "description": "Sample for DetachSubscription", + "file": "pubsub_v1_generated_publisher_detach_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_detach_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.detach_subscription", + "method": { + "fullName": "google.pubsub.v1.Publisher.DetachSubscription", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DetachSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DetachSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", + "shortName": "detach_subscription" + }, + "description": "Sample for DetachSubscription", + "file": "pubsub_v1_generated_publisher_detach_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_detach_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.get_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.GetTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "GetTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "get_topic" + }, + "description": "Sample for GetTopic", + "file": "pubsub_v1_generated_publisher_get_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_GetTopic_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_get_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.get_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.GetTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "GetTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "get_topic" + }, + "description": "Sample for GetTopic", + "file": "pubsub_v1_generated_publisher_get_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_GetTopic_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_get_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_snapshots", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager", + "shortName": "list_topic_snapshots" + }, + "description": "Sample for ListTopicSnapshots", + "file": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topic_snapshots", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager", + "shortName": "list_topic_snapshots" + }, + "description": "Sample for ListTopicSnapshots", + "file": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager", + "shortName": "list_topic_subscriptions" + }, + "description": "Sample for ListTopicSubscriptions", + "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topic_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager", + "shortName": "list_topic_subscriptions" + }, + "description": "Sample for ListTopicSubscriptions", + "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topics", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopics", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopics" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager", + "shortName": "list_topics" + }, + "description": "Sample for ListTopics", + "file": "pubsub_v1_generated_publisher_list_topics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopics_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topics_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topics", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopics", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopics" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsPager", + "shortName": "list_topics" + }, + "description": "Sample for ListTopics", + "file": "pubsub_v1_generated_publisher_list_topics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopics_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topics_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.publish", + "method": { + "fullName": "google.pubsub.v1.Publisher.Publish", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "Publish" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PublishRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "messages", + "type": "Sequence[google.pubsub_v1.types.PubsubMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PublishResponse", + "shortName": "publish" + }, + "description": "Sample for Publish", + "file": "pubsub_v1_generated_publisher_publish_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_Publish_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_publish_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.publish", + "method": { + "fullName": "google.pubsub.v1.Publisher.Publish", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "Publish" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PublishRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "messages", + "type": "Sequence[google.pubsub_v1.types.PubsubMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PublishResponse", + "shortName": "publish" + }, + "description": "Sample for Publish", + "file": "pubsub_v1_generated_publisher_publish_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_Publish_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_publish_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.update_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.UpdateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "UpdateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateTopicRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "update_topic" + }, + "description": "Sample for UpdateTopic", + "file": "pubsub_v1_generated_publisher_update_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_async", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_update_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.update_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.UpdateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "UpdateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateTopicRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "update_topic" + }, + "description": "Sample for UpdateTopic", + "file": "pubsub_v1_generated_publisher_update_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_sync", + "segments": [ + { + "end": 47, + "start": 27, + "type": "FULL" + }, + { + "end": 47, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 41, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 44, + "start": 42, + "type": "REQUEST_EXECUTION" + }, + { + "end": 48, + "start": 45, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_update_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.create_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "pubsub_v1_generated_schema_service_create_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_create_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.create_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "pubsub_v1_generated_schema_service_create_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_create_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "pubsub_v1_generated_schema_service_delete_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "pubsub_v1_generated_schema_service_delete_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.get_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "pubsub_v1_generated_schema_service_get_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_get_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.get_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "pubsub_v1_generated_schema_service_get_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_get_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schemas", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "pubsub_v1_generated_schema_service_list_schemas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.list_schemas", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "pubsub_v1_generated_schema_service_list_schemas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_message", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateMessageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateMessageResponse", + "shortName": "validate_message" + }, + "description": "Sample for ValidateMessage", + "file": "pubsub_v1_generated_schema_service_validate_message_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_message_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.validate_message", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateMessageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateMessageResponse", + "shortName": "validate_message" + }, + "description": "Sample for ValidateMessage", + "file": "pubsub_v1_generated_schema_service_validate_message_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_message_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", + "shortName": "validate_schema" + }, + "description": "Sample for ValidateSchema", + "file": "pubsub_v1_generated_schema_service_validate_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.validate_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", + "shortName": "validate_schema" + }, + "description": "Sample for ValidateSchema", + "file": "pubsub_v1_generated_schema_service_validate_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.acknowledge", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Acknowledge", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Acknowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.AcknowledgeRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "acknowledge" + }, + "description": "Sample for Acknowledge", + "file": "pubsub_v1_generated_subscriber_acknowledge_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_acknowledge_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.acknowledge", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Acknowledge", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Acknowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.AcknowledgeRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "Sequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "acknowledge" + }, + "description": "Sample for Acknowledge", + "file": "pubsub_v1_generated_subscriber_acknowledge_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_acknowledge_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "pubsub_v1_generated_subscriber_create_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.create_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "pubsub_v1_generated_subscriber_create_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "create_subscription" + }, + "description": "Sample for CreateSubscription", + "file": "pubsub_v1_generated_subscriber_create_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.create_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "create_subscription" + }, + "description": "Sample for CreateSubscription", + "file": "pubsub_v1_generated_subscriber_create_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "pubsub_v1_generated_subscriber_delete_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.delete_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_subscription" + }, + "description": "Sample for DeleteSubscription", + "file": "pubsub_v1_generated_subscriber_delete_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.delete_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_subscription" + }, + "description": "Sample for DeleteSubscription", + "file": "pubsub_v1_generated_subscriber_delete_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "pubsub_v1_generated_subscriber_get_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.get_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "pubsub_v1_generated_subscriber_get_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "get_subscription" + }, + "description": "Sample for GetSubscription", + "file": "pubsub_v1_generated_subscriber_get_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.get_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "get_subscription" + }, + "description": "Sample for GetSubscription", + "file": "pubsub_v1_generated_subscriber_get_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_snapshots", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager", + "shortName": "list_snapshots" + }, + "description": "Sample for ListSnapshots", + "file": "pubsub_v1_generated_subscriber_list_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_snapshots_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.list_snapshots", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager", + "shortName": "list_snapshots" + }, + "description": "Sample for ListSnapshots", + "file": "pubsub_v1_generated_subscriber_list_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_snapshots_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager", + "shortName": "list_subscriptions" + }, + "description": "Sample for ListSubscriptions", + "file": "pubsub_v1_generated_subscriber_list_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_subscriptions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.list_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager", + "shortName": "list_subscriptions" + }, + "description": "Sample for ListSubscriptions", + "file": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_ack_deadline", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyAckDeadline" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "Sequence[str]" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_ack_deadline" + }, + "description": "Sample for ModifyAckDeadline", + "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 40, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 41, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.modify_ack_deadline", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyAckDeadline" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "Sequence[str]" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_ack_deadline" + }, + "description": "Sample for ModifyAckDeadline", + "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 40, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 41, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_push_config", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyPushConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyPushConfigRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_push_config" + }, + "description": "Sample for ModifyPushConfig", + "file": "pubsub_v1_generated_subscriber_modify_push_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_push_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.modify_push_config", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyPushConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyPushConfigRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_push_config" + }, + "description": "Sample for ModifyPushConfig", + "file": "pubsub_v1_generated_subscriber_modify_push_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_push_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Pull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Pull" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PullRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "return_immediately", + "type": "bool" + }, + { + "name": "max_messages", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PullResponse", + "shortName": "pull" + }, + "description": "Sample for Pull", + "file": "pubsub_v1_generated_subscriber_pull_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Pull_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_pull_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Pull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Pull" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PullRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "return_immediately", + "type": "bool" + }, + { + "name": "max_messages", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PullResponse", + "shortName": "pull" + }, + "description": "Sample for Pull", + "file": "pubsub_v1_generated_subscriber_pull_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Pull_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_pull_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.seek", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Seek", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Seek" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.SeekRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.SeekResponse", + "shortName": "seek" + }, + "description": "Sample for Seek", + "file": "pubsub_v1_generated_subscriber_seek_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Seek_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_seek_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.seek", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Seek", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Seek" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.SeekRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.SeekResponse", + "shortName": "seek" + }, + "description": "Sample for Seek", + "file": "pubsub_v1_generated_subscriber_seek_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Seek_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_seek_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.streaming_pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.StreamingPull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "StreamingPull" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", + "shortName": "streaming_pull" + }, + "description": "Sample for StreamingPull", + "file": "pubsub_v1_generated_subscriber_streaming_pull_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_streaming_pull_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.streaming_pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.StreamingPull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "StreamingPull" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", + "shortName": "streaming_pull" + }, + "description": "Sample for StreamingPull", + "file": "pubsub_v1_generated_subscriber_streaming_pull_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_streaming_pull_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "update_snapshot" + }, + "description": "Sample for UpdateSnapshot", + "file": "pubsub_v1_generated_subscriber_update_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_async", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 40, + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "start": 41, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.update_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "update_snapshot" + }, + "description": "Sample for UpdateSnapshot", + "file": "pubsub_v1_generated_subscriber_update_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_sync", + "segments": [ + { + "end": 43, + "start": 27, + "type": "FULL" + }, + { + "end": 43, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 37, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 40, + "start": 38, + "type": "REQUEST_EXECUTION" + }, + { + "end": 44, + "start": 41, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "update_subscription" + }, + "description": "Sample for UpdateSubscription", + "file": "pubsub_v1_generated_subscriber_update_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.update_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "update_subscription" + }, + "description": "Sample for UpdateSubscription", + "file": "pubsub_v1_generated_subscriber_update_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_subscription_sync.py" + } + ] +} diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/scripts/fixup_pubsub_v1_keywords.py new file mode 100644 index 000000000000..d1bbcedf98af --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/scripts/fixup_pubsub_v1_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class pubsubCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'acknowledge': ('subscription', 'ack_ids', ), + 'create_schema': ('parent', 'schema', 'schema_id', ), + 'create_snapshot': ('name', 'subscription', 'labels', ), + 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), + 'delete_schema': ('name', ), + 'delete_snapshot': ('snapshot', ), + 'delete_subscription': ('subscription', ), + 'delete_topic': ('topic', ), + 'detach_subscription': ('subscription', ), + 'get_schema': ('name', 'view', ), + 'get_snapshot': ('snapshot', ), + 'get_subscription': ('subscription', ), + 'get_topic': ('topic', ), + 'list_schemas': ('parent', 'view', 'page_size', 'page_token', ), + 'list_snapshots': ('project', 'page_size', 'page_token', ), + 'list_subscriptions': ('project', 'page_size', 'page_token', ), + 'list_topics': ('project', 'page_size', 'page_token', ), + 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), + 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), + 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), + 'modify_push_config': ('subscription', 'push_config', ), + 'publish': ('topic', 'messages', ), + 'pull': ('subscription', 'max_messages', 'return_immediately', ), + 'seek': ('subscription', 'time', 'snapshot', ), + 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), + 'update_snapshot': ('snapshot', 'update_mask', ), + 'update_subscription': ('subscription', 'update_mask', ), + 'update_topic': ('topic', 'update_mask', ), + 'validate_message': ('parent', 'name', 'schema', 'message', 'encoding', ), + 'validate_schema': ('parent', 'schema', ), + 'get_iam_policy': ('resource', 'options', ), + 'set_iam_policy': ('resource', 'policy', ), + 'test_iam_permissions': ('resource', 'permissions', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=pubsubCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the pubsub client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/setup.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/setup.py new file mode 100644 index 000000000000..c69f9fbaca4d --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/setup.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-pubsub', + author="Google LLC", + author_email="googleapis-packages@google.com", + url="https://github.com/googleapis/python-google-cloud-pubsub", + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google',), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', + 'libcst >= 0.2.5', + 'googleapis-common-protos >= 1.55.0, <2.0.0dev', + 'proto-plus >= 1.19.7', + 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', + ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 000000000000..231bc125017b --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 000000000000..231bc125017b --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 000000000000..231bc125017b --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/__init__.py new file mode 100644 index 000000000000..231bc125017b --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_publisher.py new file mode 100644 index 000000000000..e03b3e95a1ef --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -0,0 +1,4157 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.publisher import PublisherAsyncClient +from google.pubsub_v1.services.publisher import PublisherClient +from google.pubsub_v1.services.publisher import pagers +from google.pubsub_v1.services.publisher import transports +from google.pubsub_v1.types import pubsub +from google.pubsub_v1.types import schema +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert PublisherClient._get_default_mtls_endpoint(None) is None + assert PublisherClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert PublisherClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert PublisherClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert PublisherClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert PublisherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PublisherClient, "grpc"), + (PublisherAsyncClient, "grpc_asyncio"), +]) +def test_publisher_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'pubsub.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.PublisherGrpcTransport, "grpc"), + (transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_publisher_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (PublisherClient, "grpc"), + (PublisherAsyncClient, "grpc_asyncio"), +]) +def test_publisher_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'pubsub.googleapis.com:443' + ) + + +def test_publisher_client_get_transport_class(): + transport = PublisherClient.get_transport_class() + available_transports = [ + transports.PublisherGrpcTransport, + ] + assert transport in available_transports + + transport = PublisherClient.get_transport_class("grpc") + assert transport == transports.PublisherGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient)) +@mock.patch.object(PublisherAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherAsyncClient)) +def test_publisher_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(PublisherClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(PublisherClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc", "true"), + (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (PublisherClient, transports.PublisherGrpcTransport, "grpc", "false"), + (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient)) +@mock.patch.object(PublisherAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_publisher_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class", [ + PublisherClient, PublisherAsyncClient +]) +@mock.patch.object(PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient)) +@mock.patch.object(PublisherAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherAsyncClient)) +def test_publisher_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_publisher_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc", grpc_helpers), + (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_publisher_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +def test_publisher_client_client_options_from_dict(): + with mock.patch('google.pubsub_v1.services.publisher.transports.PublisherGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = PublisherClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc", grpc_helpers), + (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_publisher_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + scopes=None, + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.Topic, + dict, +]) +def test_create_topic(request_type, transport: str = 'grpc'): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name='name_value', + kms_key_name='kms_key_name_value', + satisfies_pzs=True, + ) + response = client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Topic() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.satisfies_pzs is True + + +def test_create_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_topic), + '__call__') as call: + client.create_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Topic() + +@pytest.mark.asyncio +async def test_create_topic_async(transport: str = 'grpc_asyncio', request_type=pubsub.Topic): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic( + name='name_value', + kms_key_name='kms_key_name_value', + satisfies_pzs=True, + )) + response = await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Topic() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.satisfies_pzs is True + + +@pytest.mark.asyncio +async def test_create_topic_async_from_dict(): + await test_create_topic_async(request_type=dict) + + +def test_create_topic_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Topic() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_topic), + '__call__') as call: + call.return_value = pubsub.Topic() + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_topic_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Topic() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_topic), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_create_topic_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_topic( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_create_topic_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_topic( + pubsub.Topic(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_create_topic_flattened_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_topic( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_topic_flattened_error_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_topic( + pubsub.Topic(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.UpdateTopicRequest, + dict, +]) +def test_update_topic(request_type, transport: str = 'grpc'): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name='name_value', + kms_key_name='kms_key_name_value', + satisfies_pzs=True, + ) + response = client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateTopicRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.satisfies_pzs is True + + +def test_update_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_topic), + '__call__') as call: + client.update_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateTopicRequest() + +@pytest.mark.asyncio +async def test_update_topic_async(transport: str = 'grpc_asyncio', request_type=pubsub.UpdateTopicRequest): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic( + name='name_value', + kms_key_name='kms_key_name_value', + satisfies_pzs=True, + )) + response = await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateTopicRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.satisfies_pzs is True + + +@pytest.mark.asyncio +async def test_update_topic_async_from_dict(): + await test_update_topic_async(request_type=dict) + + +def test_update_topic_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateTopicRequest() + + request.topic.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_topic), + '__call__') as call: + call.return_value = pubsub.Topic() + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_topic_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateTopicRequest() + + request.topic.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_topic), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + pubsub.PublishRequest, + dict, +]) +def test_publish(request_type, transport: str = 'grpc'): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.publish), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse( + message_ids=['message_ids_value'], + ) + response = client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PublishRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + assert response.message_ids == ['message_ids_value'] + + +def test_publish_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.publish), + '__call__') as call: + client.publish() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PublishRequest() + +@pytest.mark.asyncio +async def test_publish_async(transport: str = 'grpc_asyncio', request_type=pubsub.PublishRequest): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.publish), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PublishResponse( + message_ids=['message_ids_value'], + )) + response = await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PublishRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + assert response.message_ids == ['message_ids_value'] + + +@pytest.mark.asyncio +async def test_publish_async_from_dict(): + await test_publish_async(request_type=dict) + + +def test_publish_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PublishRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.publish), + '__call__') as call: + call.return_value = pubsub.PublishResponse() + client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_publish_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PublishRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.publish), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PublishResponse()) + await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +def test_publish_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.publish), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.publish( + topic='topic_value', + messages=[pubsub.PubsubMessage(data=b'data_blob')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + arg = args[0].messages + mock_val = [pubsub.PubsubMessage(data=b'data_blob')] + assert arg == mock_val + + +def test_publish_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.publish( + pubsub.PublishRequest(), + topic='topic_value', + messages=[pubsub.PubsubMessage(data=b'data_blob')], + ) + +@pytest.mark.asyncio +async def test_publish_flattened_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.publish), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PublishResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PublishResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.publish( + topic='topic_value', + messages=[pubsub.PubsubMessage(data=b'data_blob')], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + arg = args[0].messages + mock_val = [pubsub.PubsubMessage(data=b'data_blob')] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_publish_flattened_error_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.publish( + pubsub.PublishRequest(), + topic='topic_value', + messages=[pubsub.PubsubMessage(data=b'data_blob')], + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.GetTopicRequest, + dict, +]) +def test_get_topic(request_type, transport: str = 'grpc'): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic( + name='name_value', + kms_key_name='kms_key_name_value', + satisfies_pzs=True, + ) + response = client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetTopicRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.satisfies_pzs is True + + +def test_get_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_topic), + '__call__') as call: + client.get_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetTopicRequest() + +@pytest.mark.asyncio +async def test_get_topic_async(transport: str = 'grpc_asyncio', request_type=pubsub.GetTopicRequest): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic( + name='name_value', + kms_key_name='kms_key_name_value', + satisfies_pzs=True, + )) + response = await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetTopicRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == 'name_value' + assert response.kms_key_name == 'kms_key_name_value' + assert response.satisfies_pzs is True + + +@pytest.mark.asyncio +async def test_get_topic_async_from_dict(): + await test_get_topic_async(request_type=dict) + + +def test_get_topic_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetTopicRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_topic), + '__call__') as call: + call.return_value = pubsub.Topic() + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_topic_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetTopicRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_topic), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +def test_get_topic_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_topic( + topic='topic_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + + +def test_get_topic_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_topic( + pubsub.GetTopicRequest(), + topic='topic_value', + ) + +@pytest.mark.asyncio +async def test_get_topic_flattened_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_topic( + topic='topic_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_topic_flattened_error_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_topic( + pubsub.GetTopicRequest(), + topic='topic_value', + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.ListTopicsRequest, + dict, +]) +def test_list_topics(request_type, transport: str = 'grpc'): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_topics_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__') as call: + client.list_topics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicsRequest() + +@pytest.mark.asyncio +async def test_list_topics_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListTopicsRequest): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_topics_async_from_dict(): + await test_list_topics_async(request_type=dict) + + +def test_list_topics_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicsRequest() + + request.project = 'project_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__') as call: + call.return_value = pubsub.ListTopicsResponse() + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project=project_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_topics_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicsRequest() + + request.project = 'project_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicsResponse()) + await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project=project_value', + ) in kw['metadata'] + + +def test_list_topics_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topics( + project='project_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = 'project_value' + assert arg == mock_val + + +def test_list_topics_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topics( + pubsub.ListTopicsRequest(), + project='project_value', + ) + +@pytest.mark.asyncio +async def test_list_topics_flattened_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topics( + project='project_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = 'project_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_topics_flattened_error_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topics( + pubsub.ListTopicsRequest(), + project='project_value', + ) + + +def test_list_topics_pager(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token='abc', + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token='def', + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('project', ''), + )), + ) + pager = client.list_topics(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Topic) + for i in results) +def test_list_topics_pages(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token='abc', + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token='def', + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + RuntimeError, + ) + pages = list(client.list_topics(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_topics_async_pager(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token='abc', + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token='def', + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_topics(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Topic) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_topics_async_pages(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topics), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token='abc', + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token='def', + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_topics(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + pubsub.ListTopicSubscriptionsRequest, + dict, +]) +def test_list_topic_subscriptions(request_type, transport: str = 'grpc'): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse( + subscriptions=['subscriptions_value'], + next_page_token='next_page_token_value', + ) + response = client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsPager) + assert response.subscriptions == ['subscriptions_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_list_topic_subscriptions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__') as call: + client.list_topic_subscriptions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSubscriptionsRequest() + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListTopicSubscriptionsRequest): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSubscriptionsResponse( + subscriptions=['subscriptions_value'], + next_page_token='next_page_token_value', + )) + response = await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsAsyncPager) + assert response.subscriptions == ['subscriptions_value'] + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_from_dict(): + await test_list_topic_subscriptions_async(request_type=dict) + + +def test_list_topic_subscriptions_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSubscriptionsRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__') as call: + call.return_value = pubsub.ListTopicSubscriptionsResponse() + client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSubscriptionsRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSubscriptionsResponse()) + await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +def test_list_topic_subscriptions_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topic_subscriptions( + topic='topic_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + + +def test_list_topic_subscriptions_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), + topic='topic_value', + ) + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_flattened_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSubscriptionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topic_subscriptions( + topic='topic_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_flattened_error_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), + topic='topic_value', + ) + + +def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token='def', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('topic', ''), + )), + ) + pager = client.list_topic_subscriptions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) +def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token='def', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.list_topic_subscriptions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_pager(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token='def', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_topic_subscriptions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_pages(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token='def', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_topic_subscriptions(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + pubsub.ListTopicSnapshotsRequest, + dict, +]) +def test_list_topic_snapshots(request_type, transport: str = 'grpc'): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse( + snapshots=['snapshots_value'], + next_page_token='next_page_token_value', + ) + response = client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsPager) + assert response.snapshots == ['snapshots_value'] + assert response.next_page_token == 'next_page_token_value' + + +def test_list_topic_snapshots_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__') as call: + client.list_topic_snapshots() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSnapshotsRequest() + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListTopicSnapshotsRequest): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSnapshotsResponse( + snapshots=['snapshots_value'], + next_page_token='next_page_token_value', + )) + response = await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsAsyncPager) + assert response.snapshots == ['snapshots_value'] + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_from_dict(): + await test_list_topic_snapshots_async(request_type=dict) + + +def test_list_topic_snapshots_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSnapshotsRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__') as call: + call.return_value = pubsub.ListTopicSnapshotsResponse() + client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListTopicSnapshotsRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSnapshotsResponse()) + await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +def test_list_topic_snapshots_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_topic_snapshots( + topic='topic_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + + +def test_list_topic_snapshots_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), + topic='topic_value', + ) + +@pytest.mark.asyncio +async def test_list_topic_snapshots_flattened_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListTopicSnapshotsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSnapshotsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_topic_snapshots( + topic='topic_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_topic_snapshots_flattened_error_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), + topic='topic_value', + ) + + +def test_list_topic_snapshots_pager(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token='def', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('topic', ''), + )), + ) + pager = client.list_topic_snapshots(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) + for i in results) +def test_list_topic_snapshots_pages(transport_name: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token='def', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = list(client.list_topic_snapshots(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_pager(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token='def', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_topic_snapshots(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, str) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_pages(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token='abc', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token='def', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token='ghi', + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_topic_snapshots(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + pubsub.DeleteTopicRequest, + dict, +]) +def test_delete_topic(request_type, transport: str = 'grpc'): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteTopicRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_topic_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_topic), + '__call__') as call: + client.delete_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteTopicRequest() + +@pytest.mark.asyncio +async def test_delete_topic_async(transport: str = 'grpc_asyncio', request_type=pubsub.DeleteTopicRequest): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteTopicRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_topic_async_from_dict(): + await test_delete_topic_async(request_type=dict) + + +def test_delete_topic_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteTopicRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_topic), + '__call__') as call: + call.return_value = None + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_topic_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteTopicRequest() + + request.topic = 'topic_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_topic), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'topic=topic_value', + ) in kw['metadata'] + + +def test_delete_topic_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_topic( + topic='topic_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + + +def test_delete_topic_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_topic( + pubsub.DeleteTopicRequest(), + topic='topic_value', + ) + +@pytest.mark.asyncio +async def test_delete_topic_flattened_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_topic), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_topic( + topic='topic_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_topic_flattened_error_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_topic( + pubsub.DeleteTopicRequest(), + topic='topic_value', + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.DetachSubscriptionRequest, + dict, +]) +def test_detach_subscription(request_type, transport: str = 'grpc'): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.DetachSubscriptionResponse( + ) + response = client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DetachSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +def test_detach_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), + '__call__') as call: + client.detach_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DetachSubscriptionRequest() + +@pytest.mark.asyncio +async def test_detach_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.DetachSubscriptionRequest): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.DetachSubscriptionResponse( + )) + response = await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DetachSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +@pytest.mark.asyncio +async def test_detach_subscription_async_from_dict(): + await test_detach_subscription_async(request_type=dict) + + +def test_detach_subscription_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DetachSubscriptionRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), + '__call__') as call: + call.return_value = pubsub.DetachSubscriptionResponse() + client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_detach_subscription_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DetachSubscriptionRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.DetachSubscriptionResponse()) + await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublisherClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublisherClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PublisherClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PublisherGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = PublisherClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PublisherGrpcTransport, + ) + +def test_publisher_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PublisherTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_publisher_base_transport(): + # Instantiate the base transport. + with mock.patch('google.pubsub_v1.services.publisher.transports.PublisherTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.PublisherTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_topic', + 'update_topic', + 'publish', + 'get_topic', + 'list_topics', + 'list_topic_subscriptions', + 'list_topic_snapshots', + 'delete_topic', + 'detach_subscription', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_publisher_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PublisherTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + quota_project_id="octopus", + ) + + +def test_publisher_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.PublisherTransport() + adc.assert_called_once() + + +def test_publisher_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + PublisherClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, + ], +) +def test_publisher_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/pubsub',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.PublisherGrpcTransport, grpc_helpers), + (transports.PublisherGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_publisher_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + scopes=["1", "2"], + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport]) +def test_publisher_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_publisher_host_no_port(transport_name): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'pubsub.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_publisher_host_with_port(transport_name): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'pubsub.googleapis.com:8000' + ) + +def test_publisher_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PublisherGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_publisher_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.PublisherGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport]) +def test_publisher_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport]) +def test_publisher_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_schema_path(): + project = "squid" + schema = "clam" + expected = "projects/{project}/schemas/{schema}".format(project=project, schema=schema, ) + actual = PublisherClient.schema_path(project, schema) + assert expected == actual + + +def test_parse_schema_path(): + expected = { + "project": "whelk", + "schema": "octopus", + } + path = PublisherClient.schema_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_schema_path(path) + assert expected == actual + +def test_subscription_path(): + project = "oyster" + subscription = "nudibranch" + expected = "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) + actual = PublisherClient.subscription_path(project, subscription) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "cuttlefish", + "subscription": "mussel", + } + path = PublisherClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_subscription_path(path) + assert expected == actual + +def test_topic_path(): + project = "winkle" + topic = "nautilus" + expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + actual = PublisherClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "scallop", + "topic": "abalone", + } + path = PublisherClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_topic_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = PublisherClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = PublisherClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = PublisherClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = PublisherClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = PublisherClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = PublisherClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = PublisherClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = PublisherClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = PublisherClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = PublisherClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.PublisherTransport, '_prep_wrapped_messages') as prep: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.PublisherTransport, '_prep_wrapped_messages') as prep: + transport_class = PublisherClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_set_iam_policy(transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (PublisherClient, transports.PublisherGrpcTransport), + (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_schema_service.py new file mode 100644 index 000000000000..bb9a7086f2d6 --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -0,0 +1,3123 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.pubsub_v1.services.schema_service import SchemaServiceAsyncClient +from google.pubsub_v1.services.schema_service import SchemaServiceClient +from google.pubsub_v1.services.schema_service import pagers +from google.pubsub_v1.services.schema_service import transports +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SchemaServiceClient._get_default_mtls_endpoint(None) is None + assert SchemaServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SchemaServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SchemaServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SchemaServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SchemaServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), +]) +def test_schema_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'pubsub.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SchemaServiceGrpcTransport, "grpc"), + (transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_schema_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SchemaServiceClient, "grpc"), + (SchemaServiceAsyncClient, "grpc_asyncio"), +]) +def test_schema_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'pubsub.googleapis.com:443' + ) + + +def test_schema_service_client_get_transport_class(): + transport = SchemaServiceClient.get_transport_class() + available_transports = [ + transports.SchemaServiceGrpcTransport, + ] + assert transport in available_transports + + transport = SchemaServiceClient.get_transport_class("grpc") + assert transport == transports.SchemaServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(SchemaServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceClient)) +@mock.patch.object(SchemaServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceAsyncClient)) +def test_schema_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SchemaServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SchemaServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "true"), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "false"), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(SchemaServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceClient)) +@mock.patch.object(SchemaServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_schema_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class", [ + SchemaServiceClient, SchemaServiceAsyncClient +]) +@mock.patch.object(SchemaServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceClient)) +@mock.patch.object(SchemaServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceAsyncClient)) +def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_schema_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", grpc_helpers), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_schema_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +def test_schema_service_client_client_options_from_dict(): + with mock.patch('google.pubsub_v1.services.schema_service.transports.SchemaServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = SchemaServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", grpc_helpers), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_schema_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + scopes=None, + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + gp_schema.CreateSchemaRequest, + dict, +]) +def test_create_schema(request_type, transport: str = 'grpc'): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema( + name='name_value', + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition='definition_value', + ) + response = client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CreateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == 'name_value' + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == 'definition_value' + + +def test_create_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_schema), + '__call__') as call: + client.create_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CreateSchemaRequest() + +@pytest.mark.asyncio +async def test_create_schema_async(transport: str = 'grpc_asyncio', request_type=gp_schema.CreateSchemaRequest): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema( + name='name_value', + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition='definition_value', + )) + response = await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CreateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == 'name_value' + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == 'definition_value' + + +@pytest.mark.asyncio +async def test_create_schema_async_from_dict(): + await test_create_schema_async(request_type=dict) + + +def test_create_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CreateSchemaRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_schema), + '__call__') as call: + call.return_value = gp_schema.Schema() + client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CreateSchemaRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_schema), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_schema( + parent='parent_value', + schema=gp_schema.Schema(name='name_value'), + schema_id='schema_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name='name_value') + assert arg == mock_val + arg = args[0].schema_id + mock_val = 'schema_id_value' + assert arg == mock_val + + +def test_create_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schema( + gp_schema.CreateSchemaRequest(), + parent='parent_value', + schema=gp_schema.Schema(name='name_value'), + schema_id='schema_id_value', + ) + +@pytest.mark.asyncio +async def test_create_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_schema( + parent='parent_value', + schema=gp_schema.Schema(name='name_value'), + schema_id='schema_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name='name_value') + assert arg == mock_val + arg = args[0].schema_id + mock_val = 'schema_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_schema( + gp_schema.CreateSchemaRequest(), + parent='parent_value', + schema=gp_schema.Schema(name='name_value'), + schema_id='schema_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + schema.GetSchemaRequest, + dict, +]) +def test_get_schema(request_type, transport: str = 'grpc'): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema( + name='name_value', + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition='definition_value', + ) + response = client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema.GetSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == 'name_value' + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == 'definition_value' + + +def test_get_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_schema), + '__call__') as call: + client.get_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.GetSchemaRequest() + +@pytest.mark.asyncio +async def test_get_schema_async(transport: str = 'grpc_asyncio', request_type=schema.GetSchemaRequest): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema( + name='name_value', + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition='definition_value', + )) + response = await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema.GetSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == 'name_value' + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == 'definition_value' + + +@pytest.mark.asyncio +async def test_get_schema_async_from_dict(): + await test_get_schema_async(request_type=dict) + + +def test_get_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.GetSchemaRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_schema), + '__call__') as call: + call.return_value = schema.Schema() + client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.GetSchemaRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_schema), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_schema( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schema( + schema.GetSchemaRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_schema( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_schema( + schema.GetSchemaRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + schema.ListSchemasRequest, + dict, +]) +def test_list_schemas(request_type, transport: str = 'grpc'): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemasResponse( + next_page_token='next_page_token_value', + ) + response = client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemasRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_schemas_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__') as call: + client.list_schemas() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemasRequest() + +@pytest.mark.asyncio +async def test_list_schemas_async(transport: str = 'grpc_asyncio', request_type=schema.ListSchemasRequest): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schema.ListSchemasResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemasRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_schemas_async_from_dict(): + await test_list_schemas_async(request_type=dict) + + +def test_list_schemas_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemasRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__') as call: + call.return_value = schema.ListSchemasResponse() + client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_schemas_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemasRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.ListSchemasResponse()) + await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_schemas_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemasResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_schemas( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_schemas_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schemas( + schema.ListSchemasRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_schemas_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemasResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.ListSchemasResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_schemas( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_schemas_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_schemas( + schema.ListSchemasRequest(), + parent='parent_value', + ) + + +def test_list_schemas_pager(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token='abc', + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token='def', + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token='ghi', + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_schemas(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) + for i in results) +def test_list_schemas_pages(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token='abc', + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token='def', + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token='ghi', + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = list(client.list_schemas(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_schemas_async_pager(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token='abc', + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token='def', + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token='ghi', + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_schemas(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, schema.Schema) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_schemas_async_pages(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schemas), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token='abc', + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token='def', + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token='ghi', + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_schemas(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + schema.DeleteSchemaRequest, + dict, +]) +def test_delete_schema(request_type, transport: str = 'grpc'): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema), + '__call__') as call: + client.delete_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRequest() + +@pytest.mark.asyncio +async def test_delete_schema_async(transport: str = 'grpc_asyncio', request_type=schema.DeleteSchemaRequest): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_schema_async_from_dict(): + await test_delete_schema_async(request_type=dict) + + +def test_delete_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema), + '__call__') as call: + call.return_value = None + client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_schema( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema( + schema.DeleteSchemaRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_schema( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_schema( + schema.DeleteSchemaRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + gp_schema.ValidateSchemaRequest, + dict, +]) +def test_validate_schema(request_type, transport: str = 'grpc'): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.ValidateSchemaResponse( + ) + response = client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.ValidateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.ValidateSchemaResponse) + + +def test_validate_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_schema), + '__call__') as call: + client.validate_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.ValidateSchemaRequest() + +@pytest.mark.asyncio +async def test_validate_schema_async(transport: str = 'grpc_asyncio', request_type=gp_schema.ValidateSchemaRequest): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.ValidateSchemaResponse( + )) + response = await client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.ValidateSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.ValidateSchemaResponse) + + +@pytest.mark.asyncio +async def test_validate_schema_async_from_dict(): + await test_validate_schema_async(request_type=dict) + + +def test_validate_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.ValidateSchemaRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_schema), + '__call__') as call: + call.return_value = gp_schema.ValidateSchemaResponse() + client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_validate_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.ValidateSchemaRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_schema), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.ValidateSchemaResponse()) + await client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_validate_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.ValidateSchemaResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.validate_schema( + parent='parent_value', + schema=gp_schema.Schema(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name='name_value') + assert arg == mock_val + + +def test_validate_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.validate_schema( + gp_schema.ValidateSchemaRequest(), + parent='parent_value', + schema=gp_schema.Schema(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_validate_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_schema), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.ValidateSchemaResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.ValidateSchemaResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.validate_schema( + parent='parent_value', + schema=gp_schema.Schema(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_validate_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.validate_schema( + gp_schema.ValidateSchemaRequest(), + parent='parent_value', + schema=gp_schema.Schema(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + schema.ValidateMessageRequest, + dict, +]) +def test_validate_message(request_type, transport: str = 'grpc'): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_message), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ValidateMessageResponse( + ) + response = client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ValidateMessageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.ValidateMessageResponse) + + +def test_validate_message_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_message), + '__call__') as call: + client.validate_message() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ValidateMessageRequest() + +@pytest.mark.asyncio +async def test_validate_message_async(transport: str = 'grpc_asyncio', request_type=schema.ValidateMessageRequest): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_message), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schema.ValidateMessageResponse( + )) + response = await client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ValidateMessageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.ValidateMessageResponse) + + +@pytest.mark.asyncio +async def test_validate_message_async_from_dict(): + await test_validate_message_async(request_type=dict) + + +def test_validate_message_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ValidateMessageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_message), + '__call__') as call: + call.return_value = schema.ValidateMessageResponse() + client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_validate_message_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ValidateMessageRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.validate_message), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.ValidateMessageResponse()) + await client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SchemaServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SchemaServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = SchemaServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SchemaServiceGrpcTransport, + ) + +def test_schema_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_schema_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_schema', + 'get_schema', + 'list_schemas', + 'delete_schema', + 'validate_schema', + 'validate_message', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_schema_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + quota_project_id="octopus", + ) + + +def test_schema_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport() + adc.assert_called_once() + + +def test_schema_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SchemaServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/pubsub',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SchemaServiceGrpcTransport, grpc_helpers), + (transports.SchemaServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_schema_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + scopes=["1", "2"], + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.SchemaServiceGrpcTransport, transports.SchemaServiceGrpcAsyncIOTransport]) +def test_schema_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_schema_service_host_no_port(transport_name): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'pubsub.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_schema_service_host_with_port(transport_name): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'pubsub.googleapis.com:8000' + ) + +def test_schema_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_schema_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SchemaServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SchemaServiceGrpcTransport, transports.SchemaServiceGrpcAsyncIOTransport]) +def test_schema_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SchemaServiceGrpcTransport, transports.SchemaServiceGrpcAsyncIOTransport]) +def test_schema_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_schema_path(): + project = "squid" + schema = "clam" + expected = "projects/{project}/schemas/{schema}".format(project=project, schema=schema, ) + actual = SchemaServiceClient.schema_path(project, schema) + assert expected == actual + + +def test_parse_schema_path(): + expected = { + "project": "whelk", + "schema": "octopus", + } + path = SchemaServiceClient.schema_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_schema_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SchemaServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = SchemaServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = SchemaServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = SchemaServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SchemaServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = SchemaServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = SchemaServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = SchemaServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SchemaServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = SchemaServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SchemaServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SchemaServiceTransport, '_prep_wrapped_messages') as prep: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SchemaServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = SchemaServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_set_iam_policy(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport), + (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_subscriber.py new file mode 100644 index 000000000000..f60040d7d83b --- /dev/null +++ b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -0,0 +1,5492 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import options_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.pubsub_v1.services.subscriber import SubscriberAsyncClient +from google.pubsub_v1.services.subscriber import SubscriberClient +from google.pubsub_v1.services.subscriber import pagers +from google.pubsub_v1.services.subscriber import transports +from google.pubsub_v1.types import pubsub +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert SubscriberClient._get_default_mtls_endpoint(None) is None + assert SubscriberClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert SubscriberClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert SubscriberClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert SubscriberClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert SubscriberClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SubscriberClient, "grpc"), + (SubscriberAsyncClient, "grpc_asyncio"), +]) +def test_subscriber_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'pubsub.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.SubscriberGrpcTransport, "grpc"), + (transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_subscriber_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (SubscriberClient, "grpc"), + (SubscriberAsyncClient, "grpc_asyncio"), +]) +def test_subscriber_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'pubsub.googleapis.com:443' + ) + + +def test_subscriber_client_get_transport_class(): + transport = SubscriberClient.get_transport_class() + available_transports = [ + transports.SubscriberGrpcTransport, + ] + assert transport in available_transports + + transport = SubscriberClient.get_transport_class("grpc") + assert transport == transports.SubscriberGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient)) +@mock.patch.object(SubscriberAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberAsyncClient)) +def test_subscriber_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(SubscriberClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(SubscriberClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", "true"), + (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", "false"), + (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient)) +@mock.patch.object(SubscriberAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_subscriber_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class", [ + SubscriberClient, SubscriberAsyncClient +]) +@mock.patch.object(SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient)) +@mock.patch.object(SubscriberAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberAsyncClient)) +def test_subscriber_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_subscriber_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", grpc_helpers), + (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_subscriber_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +def test_subscriber_client_client_options_from_dict(): + with mock.patch('google.pubsub_v1.services.subscriber.transports.SubscriberGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = SubscriberClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", grpc_helpers), + (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_subscriber_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + scopes=None, + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.Subscription, + dict, +]) +def test_create_subscription(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name='name_value', + topic='topic_value', + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter='filter_value', + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + response = client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Subscription() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == 'filter_value' + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +def test_create_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), + '__call__') as call: + client.create_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Subscription() + +@pytest.mark.asyncio +async def test_create_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.Subscription): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription( + name='name_value', + topic='topic_value', + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter='filter_value', + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + )) + response = await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Subscription() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == 'filter_value' + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.asyncio +async def test_create_subscription_async_from_dict(): + await test_create_subscription_async(request_type=dict) + + +def test_create_subscription_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Subscription() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), + '__call__') as call: + call.return_value = pubsub.Subscription() + client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_subscription_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.Subscription() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_create_subscription_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_subscription( + name='name_value', + topic='topic_value', + push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint='push_endpoint_value') + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val + + +def test_create_subscription_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_subscription( + pubsub.Subscription(), + name='name_value', + topic='topic_value', + push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), + ack_deadline_seconds=2066, + ) + +@pytest.mark.asyncio +async def test_create_subscription_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_subscription( + name='name_value', + topic='topic_value', + push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].topic + mock_val = 'topic_value' + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint='push_endpoint_value') + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_subscription_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_subscription( + pubsub.Subscription(), + name='name_value', + topic='topic_value', + push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), + ack_deadline_seconds=2066, + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.GetSubscriptionRequest, + dict, +]) +def test_get_subscription(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name='name_value', + topic='topic_value', + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter='filter_value', + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + response = client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == 'filter_value' + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +def test_get_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subscription), + '__call__') as call: + client.get_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSubscriptionRequest() + +@pytest.mark.asyncio +async def test_get_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.GetSubscriptionRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription( + name='name_value', + topic='topic_value', + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter='filter_value', + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + )) + response = await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == 'filter_value' + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.asyncio +async def test_get_subscription_async_from_dict(): + await test_get_subscription_async(request_type=dict) + + +def test_get_subscription_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSubscriptionRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subscription), + '__call__') as call: + call.return_value = pubsub.Subscription() + client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_subscription_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSubscriptionRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subscription), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +def test_get_subscription_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_subscription( + subscription='subscription_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + + +def test_get_subscription_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_subscription( + pubsub.GetSubscriptionRequest(), + subscription='subscription_value', + ) + +@pytest.mark.asyncio +async def test_get_subscription_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_subscription( + subscription='subscription_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_subscription_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_subscription( + pubsub.GetSubscriptionRequest(), + subscription='subscription_value', + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.UpdateSubscriptionRequest, + dict, +]) +def test_update_subscription(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription( + name='name_value', + topic='topic_value', + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter='filter_value', + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + response = client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == 'filter_value' + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +def test_update_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), + '__call__') as call: + client.update_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSubscriptionRequest() + +@pytest.mark.asyncio +async def test_update_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.UpdateSubscriptionRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription( + name='name_value', + topic='topic_value', + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter='filter_value', + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + )) + response = await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == 'filter_value' + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.asyncio +async def test_update_subscription_async_from_dict(): + await test_update_subscription_async(request_type=dict) + + +def test_update_subscription_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSubscriptionRequest() + + request.subscription.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), + '__call__') as call: + call.return_value = pubsub.Subscription() + client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_subscription_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSubscriptionRequest() + + request.subscription.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + pubsub.ListSubscriptionsRequest, + dict, +]) +def test_list_subscriptions(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_subscriptions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__') as call: + client.list_subscriptions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSubscriptionsRequest() + +@pytest.mark.asyncio +async def test_list_subscriptions_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListSubscriptionsRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSubscriptionsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSubscriptionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_from_dict(): + await test_list_subscriptions_async(request_type=dict) + + +def test_list_subscriptions_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSubscriptionsRequest() + + request.project = 'project_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__') as call: + call.return_value = pubsub.ListSubscriptionsResponse() + client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project=project_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_subscriptions_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSubscriptionsRequest() + + request.project = 'project_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSubscriptionsResponse()) + await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project=project_value', + ) in kw['metadata'] + + +def test_list_subscriptions_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_subscriptions( + project='project_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = 'project_value' + assert arg == mock_val + + +def test_list_subscriptions_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), + project='project_value', + ) + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSubscriptionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSubscriptionsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_subscriptions( + project='project_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = 'project_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_subscriptions_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), + project='project_value', + ) + + +def test_list_subscriptions_pager(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token='abc', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token='def', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token='ghi', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('project', ''), + )), + ) + pager = client.list_subscriptions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Subscription) + for i in results) +def test_list_subscriptions_pages(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token='abc', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token='def', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token='ghi', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + RuntimeError, + ) + pages = list(client.list_subscriptions(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pager(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token='abc', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token='def', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token='ghi', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_subscriptions(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Subscription) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_subscriptions_async_pages(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token='abc', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token='def', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token='ghi', + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_subscriptions(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + pubsub.DeleteSubscriptionRequest, + dict, +]) +def test_delete_subscription(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subscription_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), + '__call__') as call: + client.delete_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSubscriptionRequest() + +@pytest.mark.asyncio +async def test_delete_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.DeleteSubscriptionRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSubscriptionRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_subscription_async_from_dict(): + await test_delete_subscription_async(request_type=dict) + + +def test_delete_subscription_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSubscriptionRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), + '__call__') as call: + call.return_value = None + client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_subscription_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSubscriptionRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +def test_delete_subscription_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_subscription( + subscription='subscription_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + + +def test_delete_subscription_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), + subscription='subscription_value', + ) + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_subscription( + subscription='subscription_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_subscription_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), + subscription='subscription_value', + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.ModifyAckDeadlineRequest, + dict, +]) +def test_modify_ack_deadline(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyAckDeadlineRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_ack_deadline_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), + '__call__') as call: + client.modify_ack_deadline() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyAckDeadlineRequest() + +@pytest.mark.asyncio +async def test_modify_ack_deadline_async(transport: str = 'grpc_asyncio', request_type=pubsub.ModifyAckDeadlineRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyAckDeadlineRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_async_from_dict(): + await test_modify_ack_deadline_async(request_type=dict) + + +def test_modify_ack_deadline_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyAckDeadlineRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), + '__call__') as call: + call.return_value = None + client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyAckDeadlineRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +def test_modify_ack_deadline_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.modify_ack_deadline( + subscription='subscription_value', + ack_ids=['ack_ids_value'], + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ['ack_ids_value'] + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val + + +def test_modify_ack_deadline_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription='subscription_value', + ack_ids=['ack_ids_value'], + ack_deadline_seconds=2066, + ) + +@pytest.mark.asyncio +async def test_modify_ack_deadline_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.modify_ack_deadline( + subscription='subscription_value', + ack_ids=['ack_ids_value'], + ack_deadline_seconds=2066, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ['ack_ids_value'] + assert arg == mock_val + arg = args[0].ack_deadline_seconds + mock_val = 2066 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_modify_ack_deadline_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription='subscription_value', + ack_ids=['ack_ids_value'], + ack_deadline_seconds=2066, + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.AcknowledgeRequest, + dict, +]) +def test_acknowledge(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.AcknowledgeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge), + '__call__') as call: + client.acknowledge() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.AcknowledgeRequest() + +@pytest.mark.asyncio +async def test_acknowledge_async(transport: str = 'grpc_asyncio', request_type=pubsub.AcknowledgeRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.AcknowledgeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_acknowledge_async_from_dict(): + await test_acknowledge_async(request_type=dict) + + +def test_acknowledge_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.AcknowledgeRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge), + '__call__') as call: + call.return_value = None + client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_acknowledge_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.AcknowledgeRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +def test_acknowledge_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.acknowledge( + subscription='subscription_value', + ack_ids=['ack_ids_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ['ack_ids_value'] + assert arg == mock_val + + +def test_acknowledge_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription='subscription_value', + ack_ids=['ack_ids_value'], + ) + +@pytest.mark.asyncio +async def test_acknowledge_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.acknowledge), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.acknowledge( + subscription='subscription_value', + ack_ids=['ack_ids_value'], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + arg = args[0].ack_ids + mock_val = ['ack_ids_value'] + assert arg == mock_val + +@pytest.mark.asyncio +async def test_acknowledge_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription='subscription_value', + ack_ids=['ack_ids_value'], + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.PullRequest, + dict, +]) +def test_pull(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pull), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse( + ) + response = client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PullRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +def test_pull_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pull), + '__call__') as call: + client.pull() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PullRequest() + +@pytest.mark.asyncio +async def test_pull_async(transport: str = 'grpc_asyncio', request_type=pubsub.PullRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pull), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse( + )) + response = await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PullRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +@pytest.mark.asyncio +async def test_pull_async_from_dict(): + await test_pull_async(request_type=dict) + + +def test_pull_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PullRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pull), + '__call__') as call: + call.return_value = pubsub.PullResponse() + client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_pull_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.PullRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pull), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +def test_pull_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pull), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.pull( + subscription='subscription_value', + return_immediately=True, + max_messages=1277, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + arg = args[0].return_immediately + mock_val = True + assert arg == mock_val + arg = args[0].max_messages + mock_val = 1277 + assert arg == mock_val + + +def test_pull_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pull( + pubsub.PullRequest(), + subscription='subscription_value', + return_immediately=True, + max_messages=1277, + ) + +@pytest.mark.asyncio +async def test_pull_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.pull), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.PullResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.pull( + subscription='subscription_value', + return_immediately=True, + max_messages=1277, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + arg = args[0].return_immediately + mock_val = True + assert arg == mock_val + arg = args[0].max_messages + mock_val = 1277 + assert arg == mock_val + +@pytest.mark.asyncio +async def test_pull_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.pull( + pubsub.PullRequest(), + subscription='subscription_value', + return_immediately=True, + max_messages=1277, + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.StreamingPullRequest, + dict, +]) +def test_streaming_pull(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_pull), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = iter([pubsub.StreamingPullResponse()]) + response = client.streaming_pull(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, pubsub.StreamingPullResponse) + + +@pytest.mark.asyncio +async def test_streaming_pull_async(transport: str = 'grpc_asyncio', request_type=pubsub.StreamingPullRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.streaming_pull), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock(side_effect=[pubsub.StreamingPullResponse()]) + response = await client.streaming_pull(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, pubsub.StreamingPullResponse) + + +@pytest.mark.asyncio +async def test_streaming_pull_async_from_dict(): + await test_streaming_pull_async(request_type=dict) + + +@pytest.mark.parametrize("request_type", [ + pubsub.ModifyPushConfigRequest, + dict, +]) +def test_modify_push_config(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyPushConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_push_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), + '__call__') as call: + client.modify_push_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyPushConfigRequest() + +@pytest.mark.asyncio +async def test_modify_push_config_async(transport: str = 'grpc_asyncio', request_type=pubsub.ModifyPushConfigRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyPushConfigRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_modify_push_config_async_from_dict(): + await test_modify_push_config_async(request_type=dict) + + +def test_modify_push_config_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyPushConfigRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), + '__call__') as call: + call.return_value = None + client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_modify_push_config_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ModifyPushConfigRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +def test_modify_push_config_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.modify_push_config( + subscription='subscription_value', + push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint='push_endpoint_value') + assert arg == mock_val + + +def test_modify_push_config_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription='subscription_value', + push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), + ) + +@pytest.mark.asyncio +async def test_modify_push_config_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.modify_push_config( + subscription='subscription_value', + push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + arg = args[0].push_config + mock_val = pubsub.PushConfig(push_endpoint='push_endpoint_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_modify_push_config_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription='subscription_value', + push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.GetSnapshotRequest, + dict, +]) +def test_get_snapshot(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot( + name='name_value', + topic='topic_value', + ) + response = client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + + +def test_get_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + client.get_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSnapshotRequest() + +@pytest.mark.asyncio +async def test_get_snapshot_async(transport: str = 'grpc_asyncio', request_type=pubsub.GetSnapshotRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot( + name='name_value', + topic='topic_value', + )) + response = await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + + +@pytest.mark.asyncio +async def test_get_snapshot_async_from_dict(): + await test_get_snapshot_async(request_type=dict) + + +def test_get_snapshot_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSnapshotRequest() + + request.snapshot = 'snapshot_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + call.return_value = pubsub.Snapshot() + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'snapshot=snapshot_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_snapshot_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.GetSnapshotRequest() + + request.snapshot = 'snapshot_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'snapshot=snapshot_value', + ) in kw['metadata'] + + +def test_get_snapshot_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_snapshot( + snapshot='snapshot_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = 'snapshot_value' + assert arg == mock_val + + +def test_get_snapshot_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_snapshot( + pubsub.GetSnapshotRequest(), + snapshot='snapshot_value', + ) + +@pytest.mark.asyncio +async def test_get_snapshot_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_snapshot( + snapshot='snapshot_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = 'snapshot_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_snapshot_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_snapshot( + pubsub.GetSnapshotRequest(), + snapshot='snapshot_value', + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.ListSnapshotsRequest, + dict, +]) +def test_list_snapshots(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_snapshots_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + client.list_snapshots() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSnapshotsRequest() + +@pytest.mark.asyncio +async def test_list_snapshots_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListSnapshotsRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSnapshotsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSnapshotsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_snapshots_async_from_dict(): + await test_list_snapshots_async(request_type=dict) + + +def test_list_snapshots_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSnapshotsRequest() + + request.project = 'project_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + call.return_value = pubsub.ListSnapshotsResponse() + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project=project_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_snapshots_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.ListSnapshotsRequest() + + request.project = 'project_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSnapshotsResponse()) + await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'project=project_value', + ) in kw['metadata'] + + +def test_list_snapshots_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_snapshots( + project='project_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = 'project_value' + assert arg == mock_val + + +def test_list_snapshots_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_snapshots( + pubsub.ListSnapshotsRequest(), + project='project_value', + ) + +@pytest.mark.asyncio +async def test_list_snapshots_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.ListSnapshotsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSnapshotsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_snapshots( + project='project_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].project + mock_val = 'project_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_snapshots_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_snapshots( + pubsub.ListSnapshotsRequest(), + project='project_value', + ) + + +def test_list_snapshots_pager(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token='abc', + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token='def', + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token='ghi', + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('project', ''), + )), + ) + pager = client.list_snapshots(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Snapshot) + for i in results) +def test_list_snapshots_pages(transport_name: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token='abc', + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token='def', + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token='ghi', + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + RuntimeError, + ) + pages = list(client.list_snapshots(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_snapshots_async_pager(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token='abc', + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token='def', + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token='ghi', + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_snapshots(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, pubsub.Snapshot) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_snapshots_async_pages(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_snapshots), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token='abc', + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token='def', + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token='ghi', + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_snapshots(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + pubsub.CreateSnapshotRequest, + dict, +]) +def test_create_snapshot(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot( + name='name_value', + topic='topic_value', + ) + response = client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.CreateSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + + +def test_create_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_snapshot), + '__call__') as call: + client.create_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.CreateSnapshotRequest() + +@pytest.mark.asyncio +async def test_create_snapshot_async(transport: str = 'grpc_asyncio', request_type=pubsub.CreateSnapshotRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot( + name='name_value', + topic='topic_value', + )) + response = await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.CreateSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + + +@pytest.mark.asyncio +async def test_create_snapshot_async_from_dict(): + await test_create_snapshot_async(request_type=dict) + + +def test_create_snapshot_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.CreateSnapshotRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_snapshot), + '__call__') as call: + call.return_value = pubsub.Snapshot() + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_snapshot_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.CreateSnapshotRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_snapshot), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_create_snapshot_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_snapshot( + name='name_value', + subscription='subscription_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + + +def test_create_snapshot_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name='name_value', + subscription='subscription_value', + ) + +@pytest.mark.asyncio +async def test_create_snapshot_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_snapshot( + name='name_value', + subscription='subscription_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + arg = args[0].subscription + mock_val = 'subscription_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_snapshot_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name='name_value', + subscription='subscription_value', + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.UpdateSnapshotRequest, + dict, +]) +def test_update_snapshot(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot( + name='name_value', + topic='topic_value', + ) + response = client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + + +def test_update_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_snapshot), + '__call__') as call: + client.update_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSnapshotRequest() + +@pytest.mark.asyncio +async def test_update_snapshot_async(transport: str = 'grpc_asyncio', request_type=pubsub.UpdateSnapshotRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot( + name='name_value', + topic='topic_value', + )) + response = await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSnapshotRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == 'name_value' + assert response.topic == 'topic_value' + + +@pytest.mark.asyncio +async def test_update_snapshot_async_from_dict(): + await test_update_snapshot_async(request_type=dict) + + +def test_update_snapshot_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSnapshotRequest() + + request.snapshot.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_snapshot), + '__call__') as call: + call.return_value = pubsub.Snapshot() + client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'snapshot.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_snapshot_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.UpdateSnapshotRequest() + + request.snapshot.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_snapshot), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'snapshot.name=name_value', + ) in kw['metadata'] + + +@pytest.mark.parametrize("request_type", [ + pubsub.DeleteSnapshotRequest, + dict, +]) +def test_delete_snapshot(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSnapshotRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_snapshot_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + client.delete_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSnapshotRequest() + +@pytest.mark.asyncio +async def test_delete_snapshot_async(transport: str = 'grpc_asyncio', request_type=pubsub.DeleteSnapshotRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSnapshotRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_snapshot_async_from_dict(): + await test_delete_snapshot_async(request_type=dict) + + +def test_delete_snapshot_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSnapshotRequest() + + request.snapshot = 'snapshot_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + call.return_value = None + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'snapshot=snapshot_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_snapshot_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.DeleteSnapshotRequest() + + request.snapshot = 'snapshot_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'snapshot=snapshot_value', + ) in kw['metadata'] + + +def test_delete_snapshot_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_snapshot( + snapshot='snapshot_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = 'snapshot_value' + assert arg == mock_val + + +def test_delete_snapshot_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), + snapshot='snapshot_value', + ) + +@pytest.mark.asyncio +async def test_delete_snapshot_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_snapshot), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_snapshot( + snapshot='snapshot_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = 'snapshot_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_snapshot_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), + snapshot='snapshot_value', + ) + + +@pytest.mark.parametrize("request_type", [ + pubsub.SeekRequest, + dict, +]) +def test_seek(request_type, transport: str = 'grpc'): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seek), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.SeekResponse( + ) + response = client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.SeekRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +def test_seek_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seek), + '__call__') as call: + client.seek() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.SeekRequest() + +@pytest.mark.asyncio +async def test_seek_async(transport: str = 'grpc_asyncio', request_type=pubsub.SeekRequest): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seek), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse( + )) + response = await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.SeekRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +@pytest.mark.asyncio +async def test_seek_async_from_dict(): + await test_seek_async(request_type=dict) + + +def test_seek_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.SeekRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seek), + '__call__') as call: + call.return_value = pubsub.SeekResponse() + client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_seek_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = pubsub.SeekRequest() + + request.subscription = 'subscription_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.seek), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'subscription=subscription_value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubscriberClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubscriberClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SubscriberClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SubscriberGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = SubscriberClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SubscriberGrpcTransport, + ) + +def test_subscriber_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SubscriberTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_subscriber_base_transport(): + # Instantiate the base transport. + with mock.patch('google.pubsub_v1.services.subscriber.transports.SubscriberTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.SubscriberTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_subscription', + 'get_subscription', + 'update_subscription', + 'list_subscriptions', + 'delete_subscription', + 'modify_ack_deadline', + 'acknowledge', + 'pull', + 'streaming_pull', + 'modify_push_config', + 'get_snapshot', + 'list_snapshots', + 'create_snapshot', + 'update_snapshot', + 'delete_snapshot', + 'seek', + 'set_iam_policy', + 'get_iam_policy', + 'test_iam_permissions', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_subscriber_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SubscriberTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + quota_project_id="octopus", + ) + + +def test_subscriber_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SubscriberTransport() + adc.assert_called_once() + + +def test_subscriber_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + SubscriberClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, + ], +) +def test_subscriber_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/pubsub',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.SubscriberGrpcTransport, grpc_helpers), + (transports.SubscriberGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_subscriber_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "pubsub.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', +), + scopes=["1", "2"], + default_host="pubsub.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport]) +def test_subscriber_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_subscriber_host_no_port(transport_name): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'pubsub.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_subscriber_host_with_port(transport_name): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'pubsub.googleapis.com:8000' + ) + +def test_subscriber_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SubscriberGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_subscriber_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.SubscriberGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport]) +def test_subscriber_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport]) +def test_subscriber_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_snapshot_path(): + project = "squid" + snapshot = "clam" + expected = "projects/{project}/snapshots/{snapshot}".format(project=project, snapshot=snapshot, ) + actual = SubscriberClient.snapshot_path(project, snapshot) + assert expected == actual + + +def test_parse_snapshot_path(): + expected = { + "project": "whelk", + "snapshot": "octopus", + } + path = SubscriberClient.snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_snapshot_path(path) + assert expected == actual + +def test_subscription_path(): + project = "oyster" + subscription = "nudibranch" + expected = "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) + actual = SubscriberClient.subscription_path(project, subscription) + assert expected == actual + + +def test_parse_subscription_path(): + expected = { + "project": "cuttlefish", + "subscription": "mussel", + } + path = SubscriberClient.subscription_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_subscription_path(path) + assert expected == actual + +def test_topic_path(): + project = "winkle" + topic = "nautilus" + expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) + actual = SubscriberClient.topic_path(project, topic) + assert expected == actual + + +def test_parse_topic_path(): + expected = { + "project": "scallop", + "topic": "abalone", + } + path = SubscriberClient.topic_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_topic_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = SubscriberClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = SubscriberClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = SubscriberClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = SubscriberClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = SubscriberClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = SubscriberClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = SubscriberClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = SubscriberClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = SubscriberClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = SubscriberClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.SubscriberTransport, '_prep_wrapped_messages') as prep: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.SubscriberTransport, '_prep_wrapped_messages') as prep: + transport_class = SubscriberClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_set_iam_policy(transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + response = client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.SetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + response = await client.set_iam_policy(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + +def test_set_iam_policy_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] +@pytest.mark.asyncio +async def test_set_iam_policy_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.SetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + +def test_set_iam_policy_from_dict(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_set_iam_policy_from_dict_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.set_iam_policy( + request={ + "resource": "resource_value", + "policy": policy_pb2.Policy(version=774), + } + ) + call.assert_called() + + +def test_get_iam_policy(transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) + + response = client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +@pytest.mark.asyncio +async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.GetIamPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy(version=774, etag=b"etag_blob",) + ) + + response = await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + assert response.version == 774 + + assert response.etag == b"etag_blob" + + +def test_get_iam_policy_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + call.return_value = policy_pb2.Policy() + + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_iam_policy_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.GetIamPolicyRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_iam_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) + + await client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_get_iam_policy_from_dict(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = policy_pb2.Policy() + + response = client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_get_iam_policy_from_dict_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + policy_pb2.Policy() + ) + + response = await client.get_iam_policy( + request={ + "resource": "resource_value", + "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), + } + ) + call.assert_called() + + +def test_test_iam_permissions(transport: str = "grpc"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse( + permissions=["permissions_value"], + ) + + response = client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = iam_policy_pb2.TestIamPermissionsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) + ) + + response = await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + assert response.permissions == ["permissions_value"] + + +def test_test_iam_permissions_field_headers(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_test_iam_permissions_field_headers_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = iam_policy_pb2.TestIamPermissionsRequest() + request.resource = "resource/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + await client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] + + +def test_test_iam_permissions_from_dict(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iam_policy_pb2.TestIamPermissionsResponse() + + response = client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + +@pytest.mark.asyncio +async def test_test_iam_permissions_from_dict_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + iam_policy_pb2.TestIamPermissionsResponse() + ) + + response = await client.test_iam_permissions( + request={ + "resource": "resource_value", + "permissions": ["permissions_value"], + } + ) + call.assert_called() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (SubscriberClient, transports.SubscriberGrpcTransport), + (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) From e20abfa9221f7976b49a1aa243cc29c380c2b0bb Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 30 May 2022 14:59:42 -0400 Subject: [PATCH 0831/1197] chore(revert): use gapic-generator-python 1.0.0 (#696) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Revert "chore: use gapic-generator-python 1.0.0 (#691)" This reverts commit fb493bd4b20da58a5224a154cc906df273327181. * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/noxfile.py | 42 +- .../owl-bot-staging/v1/.coveragerc | 17 - .../owl-bot-staging/v1/.flake8 | 33 - .../owl-bot-staging/v1/MANIFEST.in | 2 - .../owl-bot-staging/v1/README.rst | 49 - .../owl-bot-staging/v1/docs/conf.py | 376 -- .../owl-bot-staging/v1/docs/index.rst | 7 - .../v1/docs/pubsub_v1/publisher.rst | 10 - .../v1/docs/pubsub_v1/schema_service.rst | 10 - .../v1/docs/pubsub_v1/services.rst | 8 - .../v1/docs/pubsub_v1/subscriber.rst | 10 - .../v1/docs/pubsub_v1/types.rst | 7 - .../v1/google/pubsub/__init__.py | 145 - .../owl-bot-staging/v1/google/pubsub/py.typed | 2 - .../v1/google/pubsub_v1/__init__.py | 146 - .../v1/google/pubsub_v1/gapic_metadata.json | 361 -- .../v1/google/pubsub_v1/py.typed | 2 - .../v1/google/pubsub_v1/services/__init__.py | 15 - .../pubsub_v1/services/publisher/__init__.py | 22 - .../services/publisher/async_client.py | 1408 ----- .../pubsub_v1/services/publisher/client.py | 1559 ----- .../pubsub_v1/services/publisher/pagers.py | 381 -- .../services/publisher/transports/__init__.py | 33 - .../services/publisher/transports/base.py | 364 -- .../services/publisher/transports/grpc.py | 572 -- .../publisher/transports/grpc_asyncio.py | 572 -- .../services/schema_service/__init__.py | 22 - .../services/schema_service/async_client.py | 1062 ---- .../services/schema_service/client.py | 1262 ---- .../services/schema_service/pagers.py | 139 - .../schema_service/transports/__init__.py | 33 - .../schema_service/transports/base.py | 255 - .../schema_service/transports/grpc.py | 475 -- .../schema_service/transports/grpc_asyncio.py | 475 -- .../pubsub_v1/services/subscriber/__init__.py | 22 - .../services/subscriber/async_client.py | 2271 ------- .../pubsub_v1/services/subscriber/client.py | 2373 ------- .../pubsub_v1/services/subscriber/pagers.py | 260 - .../subscriber/transports/__init__.py | 33 - .../services/subscriber/transports/base.py | 508 -- .../services/subscriber/transports/grpc.py | 835 --- .../subscriber/transports/grpc_asyncio.py | 835 --- .../v1/google/pubsub_v1/types/__init__.py | 136 - .../v1/google/pubsub_v1/types/pubsub.py | 1866 ------ .../v1/google/pubsub_v1/types/schema.py | 319 - .../owl-bot-staging/v1/mypy.ini | 3 - .../owl-bot-staging/v1/noxfile.py | 180 - ..._generated_publisher_create_topic_async.py | 45 - ...1_generated_publisher_create_topic_sync.py | 45 - ..._generated_publisher_delete_topic_async.py | 43 - ...1_generated_publisher_delete_topic_sync.py | 43 - ...ted_publisher_detach_subscription_async.py | 45 - ...ated_publisher_detach_subscription_sync.py | 45 - ..._v1_generated_publisher_get_topic_async.py | 45 - ...b_v1_generated_publisher_get_topic_sync.py | 45 - ...ed_publisher_list_topic_snapshots_async.py | 46 - ...ted_publisher_list_topic_snapshots_sync.py | 46 - ...ublisher_list_topic_subscriptions_async.py | 46 - ...publisher_list_topic_subscriptions_sync.py | 46 - ...1_generated_publisher_list_topics_async.py | 46 - ...v1_generated_publisher_list_topics_sync.py | 46 - ...ub_v1_generated_publisher_publish_async.py | 45 - ...sub_v1_generated_publisher_publish_sync.py | 45 - ..._generated_publisher_update_topic_async.py | 48 - ...1_generated_publisher_update_topic_sync.py | 48 - ...ated_schema_service_create_schema_async.py | 49 - ...rated_schema_service_create_schema_sync.py | 49 - ...ated_schema_service_delete_schema_async.py | 43 - ...rated_schema_service_delete_schema_sync.py | 43 - ...nerated_schema_service_get_schema_async.py | 45 - ...enerated_schema_service_get_schema_sync.py | 45 - ...rated_schema_service_list_schemas_async.py | 46 - ...erated_schema_service_list_schemas_sync.py | 46 - ...d_schema_service_validate_message_async.py | 46 - ...ed_schema_service_validate_message_sync.py | 46 - ...ed_schema_service_validate_schema_async.py | 49 - ...ted_schema_service_validate_schema_sync.py | 49 - ..._generated_subscriber_acknowledge_async.py | 44 - ...1_generated_subscriber_acknowledge_sync.py | 44 - ...erated_subscriber_create_snapshot_async.py | 46 - ...nerated_subscriber_create_snapshot_sync.py | 46 - ...ed_subscriber_create_subscription_async.py | 46 - ...ted_subscriber_create_subscription_sync.py | 46 - ...erated_subscriber_delete_snapshot_async.py | 43 - ...nerated_subscriber_delete_snapshot_sync.py | 43 - ...ed_subscriber_delete_subscription_async.py | 43 - ...ted_subscriber_delete_subscription_sync.py | 43 - ...generated_subscriber_get_snapshot_async.py | 45 - ..._generated_subscriber_get_snapshot_sync.py | 45 - ...rated_subscriber_get_subscription_async.py | 45 - ...erated_subscriber_get_subscription_sync.py | 45 - ...nerated_subscriber_list_snapshots_async.py | 46 - ...enerated_subscriber_list_snapshots_sync.py | 46 - ...ted_subscriber_list_subscriptions_async.py | 46 - ...ated_subscriber_list_subscriptions_sync.py | 46 - ...ed_subscriber_modify_ack_deadline_async.py | 45 - ...ted_subscriber_modify_ack_deadline_sync.py | 45 - ...ted_subscriber_modify_push_config_async.py | 43 - ...ated_subscriber_modify_push_config_sync.py | 43 - ...bsub_v1_generated_subscriber_pull_async.py | 46 - ...ubsub_v1_generated_subscriber_pull_sync.py | 46 - ...bsub_v1_generated_subscriber_seek_async.py | 45 - ...ubsub_v1_generated_subscriber_seek_sync.py | 45 - ...nerated_subscriber_streaming_pull_async.py | 57 - ...enerated_subscriber_streaming_pull_sync.py | 57 - ...erated_subscriber_update_snapshot_async.py | 44 - ...nerated_subscriber_update_snapshot_sync.py | 44 - ...ed_subscriber_update_subscription_async.py | 49 - ...ted_subscriber_update_subscription_sync.py | 49 - .../snippet_metadata_pubsub_v1.json | 5019 --------------- .../v1/scripts/fixup_pubsub_v1_keywords.py | 209 - .../owl-bot-staging/v1/setup.py | 60 - .../owl-bot-staging/v1/tests/__init__.py | 16 - .../owl-bot-staging/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../v1/tests/unit/gapic/pubsub_v1/__init__.py | 16 - .../unit/gapic/pubsub_v1/test_publisher.py | 4157 ------------- .../gapic/pubsub_v1/test_schema_service.py | 3123 ---------- .../unit/gapic/pubsub_v1/test_subscriber.py | 5492 ----------------- 119 files changed, 13 insertions(+), 40468 deletions(-) delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/.coveragerc delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/.flake8 delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/MANIFEST.in delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/README.rst delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/conf.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/index.rst delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/publisher.rst delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/schema_service.rst delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/services.rst delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/subscriber.rst delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/types.rst delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/py.typed delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/gapic_metadata.json delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/py.typed delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/async_client.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/client.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/pagers.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/base.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/async_client.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/client.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/pagers.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/base.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/async_client.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/client.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/pagers.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/base.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/pubsub.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/schema.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/mypy.ini delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/noxfile.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_pubsub_v1.json delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/scripts/fixup_pubsub_v1_keywords.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/setup.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/__init__.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_publisher.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_schema_service.py delete mode 100644 packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_subscriber.py diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 7af8428afa1a..914b76e5b8ab 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -215,35 +215,19 @@ def default(session): install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. - if session.python != "3.6": - session.run( - "py.test", - "--quiet", - "--asyncio-mode=strict", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google/cloud", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) - else: - session.run( - "py.test", - "--quiet", - f"--junitxml=unit_{session.python}_sponge_log.xml", - "--cov=google/cloud", - "--cov=tests/unit", - "--cov-append", - "--cov-config=.coveragerc", - "--cov-report=", - "--cov-fail-under=0", - os.path.join("tests", "unit"), - *session.posargs, - ) + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google/cloud", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + ) @nox.session(python=UNIT_TEST_PYTHON_VERSIONS) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/.coveragerc b/packages/google-cloud-pubsub/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index bcdb1c5865b2..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/pubsub/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/.flake8 b/packages/google-cloud-pubsub/owl-bot-staging/v1/.flake8 deleted file mode 100644 index 29227d4cf419..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/MANIFEST.in b/packages/google-cloud-pubsub/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index 4daeccbde234..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/pubsub *.py -recursive-include google/pubsub_v1 *.py diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/README.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/README.rst deleted file mode 100644 index 4c7897d8f43a..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Pubsub API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Pubsub API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/conf.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index 828a145fb52d..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-pubsub documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-pubsub" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-pubsub-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-pubsub.tex", - u"google-cloud-pubsub Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-pubsub", - u"Google Pubsub Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-pubsub", - u"google-cloud-pubsub Documentation", - author, - "google-cloud-pubsub", - "GAPIC library for Google Pubsub API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/index.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index 5cb0459d080e..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - pubsub_v1/services - pubsub_v1/types diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/publisher.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/publisher.rst deleted file mode 100644 index 0a132656d3f9..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/publisher.rst +++ /dev/null @@ -1,10 +0,0 @@ -Publisher ---------------------------- - -.. automodule:: google.pubsub_v1.services.publisher - :members: - :inherited-members: - -.. automodule:: google.pubsub_v1.services.publisher.pagers - :members: - :inherited-members: diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/schema_service.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/schema_service.rst deleted file mode 100644 index 4cb7a04965fe..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/schema_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -SchemaService -------------------------------- - -.. automodule:: google.pubsub_v1.services.schema_service - :members: - :inherited-members: - -.. automodule:: google.pubsub_v1.services.schema_service.pagers - :members: - :inherited-members: diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/services.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/services.rst deleted file mode 100644 index 6ab7d9e527f7..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/services.rst +++ /dev/null @@ -1,8 +0,0 @@ -Services for Google Pubsub v1 API -================================= -.. toctree:: - :maxdepth: 2 - - publisher - schema_service - subscriber diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/subscriber.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/subscriber.rst deleted file mode 100644 index 7f2c74a4143e..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/subscriber.rst +++ /dev/null @@ -1,10 +0,0 @@ -Subscriber ----------------------------- - -.. automodule:: google.pubsub_v1.services.subscriber - :members: - :inherited-members: - -.. automodule:: google.pubsub_v1.services.subscriber.pagers - :members: - :inherited-members: diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/types.rst b/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/types.rst deleted file mode 100644 index 964dfe575dc1..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/docs/pubsub_v1/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Pubsub v1 API -============================== - -.. automodule:: google.pubsub_v1.types - :members: - :undoc-members: - :show-inheritance: diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/__init__.py deleted file mode 100644 index 7efeb4c46c9f..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/__init__.py +++ /dev/null @@ -1,145 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.pubsub_v1.services.publisher.client import PublisherClient -from google.pubsub_v1.services.publisher.async_client import PublisherAsyncClient -from google.pubsub_v1.services.schema_service.client import SchemaServiceClient -from google.pubsub_v1.services.schema_service.async_client import SchemaServiceAsyncClient -from google.pubsub_v1.services.subscriber.client import SubscriberClient -from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient - -from google.pubsub_v1.types.pubsub import AcknowledgeRequest -from google.pubsub_v1.types.pubsub import BigQueryConfig -from google.pubsub_v1.types.pubsub import CreateSnapshotRequest -from google.pubsub_v1.types.pubsub import DeadLetterPolicy -from google.pubsub_v1.types.pubsub import DeleteSnapshotRequest -from google.pubsub_v1.types.pubsub import DeleteSubscriptionRequest -from google.pubsub_v1.types.pubsub import DeleteTopicRequest -from google.pubsub_v1.types.pubsub import DetachSubscriptionRequest -from google.pubsub_v1.types.pubsub import DetachSubscriptionResponse -from google.pubsub_v1.types.pubsub import ExpirationPolicy -from google.pubsub_v1.types.pubsub import GetSnapshotRequest -from google.pubsub_v1.types.pubsub import GetSubscriptionRequest -from google.pubsub_v1.types.pubsub import GetTopicRequest -from google.pubsub_v1.types.pubsub import ListSnapshotsRequest -from google.pubsub_v1.types.pubsub import ListSnapshotsResponse -from google.pubsub_v1.types.pubsub import ListSubscriptionsRequest -from google.pubsub_v1.types.pubsub import ListSubscriptionsResponse -from google.pubsub_v1.types.pubsub import ListTopicSnapshotsRequest -from google.pubsub_v1.types.pubsub import ListTopicSnapshotsResponse -from google.pubsub_v1.types.pubsub import ListTopicsRequest -from google.pubsub_v1.types.pubsub import ListTopicsResponse -from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsRequest -from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsResponse -from google.pubsub_v1.types.pubsub import MessageStoragePolicy -from google.pubsub_v1.types.pubsub import ModifyAckDeadlineRequest -from google.pubsub_v1.types.pubsub import ModifyPushConfigRequest -from google.pubsub_v1.types.pubsub import PublishRequest -from google.pubsub_v1.types.pubsub import PublishResponse -from google.pubsub_v1.types.pubsub import PubsubMessage -from google.pubsub_v1.types.pubsub import PullRequest -from google.pubsub_v1.types.pubsub import PullResponse -from google.pubsub_v1.types.pubsub import PushConfig -from google.pubsub_v1.types.pubsub import ReceivedMessage -from google.pubsub_v1.types.pubsub import RetryPolicy -from google.pubsub_v1.types.pubsub import SchemaSettings -from google.pubsub_v1.types.pubsub import SeekRequest -from google.pubsub_v1.types.pubsub import SeekResponse -from google.pubsub_v1.types.pubsub import Snapshot -from google.pubsub_v1.types.pubsub import StreamingPullRequest -from google.pubsub_v1.types.pubsub import StreamingPullResponse -from google.pubsub_v1.types.pubsub import Subscription -from google.pubsub_v1.types.pubsub import Topic -from google.pubsub_v1.types.pubsub import UpdateSnapshotRequest -from google.pubsub_v1.types.pubsub import UpdateSubscriptionRequest -from google.pubsub_v1.types.pubsub import UpdateTopicRequest -from google.pubsub_v1.types.schema import CreateSchemaRequest -from google.pubsub_v1.types.schema import DeleteSchemaRequest -from google.pubsub_v1.types.schema import GetSchemaRequest -from google.pubsub_v1.types.schema import ListSchemasRequest -from google.pubsub_v1.types.schema import ListSchemasResponse -from google.pubsub_v1.types.schema import Schema -from google.pubsub_v1.types.schema import ValidateMessageRequest -from google.pubsub_v1.types.schema import ValidateMessageResponse -from google.pubsub_v1.types.schema import ValidateSchemaRequest -from google.pubsub_v1.types.schema import ValidateSchemaResponse -from google.pubsub_v1.types.schema import Encoding -from google.pubsub_v1.types.schema import SchemaView - -__all__ = ('PublisherClient', - 'PublisherAsyncClient', - 'SchemaServiceClient', - 'SchemaServiceAsyncClient', - 'SubscriberClient', - 'SubscriberAsyncClient', - 'AcknowledgeRequest', - 'BigQueryConfig', - 'CreateSnapshotRequest', - 'DeadLetterPolicy', - 'DeleteSnapshotRequest', - 'DeleteSubscriptionRequest', - 'DeleteTopicRequest', - 'DetachSubscriptionRequest', - 'DetachSubscriptionResponse', - 'ExpirationPolicy', - 'GetSnapshotRequest', - 'GetSubscriptionRequest', - 'GetTopicRequest', - 'ListSnapshotsRequest', - 'ListSnapshotsResponse', - 'ListSubscriptionsRequest', - 'ListSubscriptionsResponse', - 'ListTopicSnapshotsRequest', - 'ListTopicSnapshotsResponse', - 'ListTopicsRequest', - 'ListTopicsResponse', - 'ListTopicSubscriptionsRequest', - 'ListTopicSubscriptionsResponse', - 'MessageStoragePolicy', - 'ModifyAckDeadlineRequest', - 'ModifyPushConfigRequest', - 'PublishRequest', - 'PublishResponse', - 'PubsubMessage', - 'PullRequest', - 'PullResponse', - 'PushConfig', - 'ReceivedMessage', - 'RetryPolicy', - 'SchemaSettings', - 'SeekRequest', - 'SeekResponse', - 'Snapshot', - 'StreamingPullRequest', - 'StreamingPullResponse', - 'Subscription', - 'Topic', - 'UpdateSnapshotRequest', - 'UpdateSubscriptionRequest', - 'UpdateTopicRequest', - 'CreateSchemaRequest', - 'DeleteSchemaRequest', - 'GetSchemaRequest', - 'ListSchemasRequest', - 'ListSchemasResponse', - 'Schema', - 'ValidateMessageRequest', - 'ValidateMessageResponse', - 'ValidateSchemaRequest', - 'ValidateSchemaResponse', - 'Encoding', - 'SchemaView', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/py.typed b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/py.typed deleted file mode 100644 index 1cec9a5ba1ab..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-pubsub package uses inline types. diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/__init__.py deleted file mode 100644 index 0b045251288e..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/__init__.py +++ /dev/null @@ -1,146 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.publisher import PublisherClient -from .services.publisher import PublisherAsyncClient -from .services.schema_service import SchemaServiceClient -from .services.schema_service import SchemaServiceAsyncClient -from .services.subscriber import SubscriberClient -from .services.subscriber import SubscriberAsyncClient - -from .types.pubsub import AcknowledgeRequest -from .types.pubsub import BigQueryConfig -from .types.pubsub import CreateSnapshotRequest -from .types.pubsub import DeadLetterPolicy -from .types.pubsub import DeleteSnapshotRequest -from .types.pubsub import DeleteSubscriptionRequest -from .types.pubsub import DeleteTopicRequest -from .types.pubsub import DetachSubscriptionRequest -from .types.pubsub import DetachSubscriptionResponse -from .types.pubsub import ExpirationPolicy -from .types.pubsub import GetSnapshotRequest -from .types.pubsub import GetSubscriptionRequest -from .types.pubsub import GetTopicRequest -from .types.pubsub import ListSnapshotsRequest -from .types.pubsub import ListSnapshotsResponse -from .types.pubsub import ListSubscriptionsRequest -from .types.pubsub import ListSubscriptionsResponse -from .types.pubsub import ListTopicSnapshotsRequest -from .types.pubsub import ListTopicSnapshotsResponse -from .types.pubsub import ListTopicsRequest -from .types.pubsub import ListTopicsResponse -from .types.pubsub import ListTopicSubscriptionsRequest -from .types.pubsub import ListTopicSubscriptionsResponse -from .types.pubsub import MessageStoragePolicy -from .types.pubsub import ModifyAckDeadlineRequest -from .types.pubsub import ModifyPushConfigRequest -from .types.pubsub import PublishRequest -from .types.pubsub import PublishResponse -from .types.pubsub import PubsubMessage -from .types.pubsub import PullRequest -from .types.pubsub import PullResponse -from .types.pubsub import PushConfig -from .types.pubsub import ReceivedMessage -from .types.pubsub import RetryPolicy -from .types.pubsub import SchemaSettings -from .types.pubsub import SeekRequest -from .types.pubsub import SeekResponse -from .types.pubsub import Snapshot -from .types.pubsub import StreamingPullRequest -from .types.pubsub import StreamingPullResponse -from .types.pubsub import Subscription -from .types.pubsub import Topic -from .types.pubsub import UpdateSnapshotRequest -from .types.pubsub import UpdateSubscriptionRequest -from .types.pubsub import UpdateTopicRequest -from .types.schema import CreateSchemaRequest -from .types.schema import DeleteSchemaRequest -from .types.schema import GetSchemaRequest -from .types.schema import ListSchemasRequest -from .types.schema import ListSchemasResponse -from .types.schema import Schema -from .types.schema import ValidateMessageRequest -from .types.schema import ValidateMessageResponse -from .types.schema import ValidateSchemaRequest -from .types.schema import ValidateSchemaResponse -from .types.schema import Encoding -from .types.schema import SchemaView - -__all__ = ( - 'PublisherAsyncClient', - 'SchemaServiceAsyncClient', - 'SubscriberAsyncClient', -'AcknowledgeRequest', -'BigQueryConfig', -'CreateSchemaRequest', -'CreateSnapshotRequest', -'DeadLetterPolicy', -'DeleteSchemaRequest', -'DeleteSnapshotRequest', -'DeleteSubscriptionRequest', -'DeleteTopicRequest', -'DetachSubscriptionRequest', -'DetachSubscriptionResponse', -'Encoding', -'ExpirationPolicy', -'GetSchemaRequest', -'GetSnapshotRequest', -'GetSubscriptionRequest', -'GetTopicRequest', -'ListSchemasRequest', -'ListSchemasResponse', -'ListSnapshotsRequest', -'ListSnapshotsResponse', -'ListSubscriptionsRequest', -'ListSubscriptionsResponse', -'ListTopicSnapshotsRequest', -'ListTopicSnapshotsResponse', -'ListTopicSubscriptionsRequest', -'ListTopicSubscriptionsResponse', -'ListTopicsRequest', -'ListTopicsResponse', -'MessageStoragePolicy', -'ModifyAckDeadlineRequest', -'ModifyPushConfigRequest', -'PublishRequest', -'PublishResponse', -'PublisherClient', -'PubsubMessage', -'PullRequest', -'PullResponse', -'PushConfig', -'ReceivedMessage', -'RetryPolicy', -'Schema', -'SchemaServiceClient', -'SchemaSettings', -'SchemaView', -'SeekRequest', -'SeekResponse', -'Snapshot', -'StreamingPullRequest', -'StreamingPullResponse', -'SubscriberClient', -'Subscription', -'Topic', -'UpdateSnapshotRequest', -'UpdateSubscriptionRequest', -'UpdateTopicRequest', -'ValidateMessageRequest', -'ValidateMessageResponse', -'ValidateSchemaRequest', -'ValidateSchemaResponse', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/gapic_metadata.json b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/gapic_metadata.json deleted file mode 100644 index 4c5b86bd13bc..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/gapic_metadata.json +++ /dev/null @@ -1,361 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.pubsub_v1", - "protoPackage": "google.pubsub.v1", - "schema": "1.0", - "services": { - "Publisher": { - "clients": { - "grpc": { - "libraryClient": "PublisherClient", - "rpcs": { - "CreateTopic": { - "methods": [ - "create_topic" - ] - }, - "DeleteTopic": { - "methods": [ - "delete_topic" - ] - }, - "DetachSubscription": { - "methods": [ - "detach_subscription" - ] - }, - "GetTopic": { - "methods": [ - "get_topic" - ] - }, - "ListTopicSnapshots": { - "methods": [ - "list_topic_snapshots" - ] - }, - "ListTopicSubscriptions": { - "methods": [ - "list_topic_subscriptions" - ] - }, - "ListTopics": { - "methods": [ - "list_topics" - ] - }, - "Publish": { - "methods": [ - "publish" - ] - }, - "UpdateTopic": { - "methods": [ - "update_topic" - ] - } - } - }, - "grpc-async": { - "libraryClient": "PublisherAsyncClient", - "rpcs": { - "CreateTopic": { - "methods": [ - "create_topic" - ] - }, - "DeleteTopic": { - "methods": [ - "delete_topic" - ] - }, - "DetachSubscription": { - "methods": [ - "detach_subscription" - ] - }, - "GetTopic": { - "methods": [ - "get_topic" - ] - }, - "ListTopicSnapshots": { - "methods": [ - "list_topic_snapshots" - ] - }, - "ListTopicSubscriptions": { - "methods": [ - "list_topic_subscriptions" - ] - }, - "ListTopics": { - "methods": [ - "list_topics" - ] - }, - "Publish": { - "methods": [ - "publish" - ] - }, - "UpdateTopic": { - "methods": [ - "update_topic" - ] - } - } - } - } - }, - "SchemaService": { - "clients": { - "grpc": { - "libraryClient": "SchemaServiceClient", - "rpcs": { - "CreateSchema": { - "methods": [ - "create_schema" - ] - }, - "DeleteSchema": { - "methods": [ - "delete_schema" - ] - }, - "GetSchema": { - "methods": [ - "get_schema" - ] - }, - "ListSchemas": { - "methods": [ - "list_schemas" - ] - }, - "ValidateMessage": { - "methods": [ - "validate_message" - ] - }, - "ValidateSchema": { - "methods": [ - "validate_schema" - ] - } - } - }, - "grpc-async": { - "libraryClient": "SchemaServiceAsyncClient", - "rpcs": { - "CreateSchema": { - "methods": [ - "create_schema" - ] - }, - "DeleteSchema": { - "methods": [ - "delete_schema" - ] - }, - "GetSchema": { - "methods": [ - "get_schema" - ] - }, - "ListSchemas": { - "methods": [ - "list_schemas" - ] - }, - "ValidateMessage": { - "methods": [ - "validate_message" - ] - }, - "ValidateSchema": { - "methods": [ - "validate_schema" - ] - } - } - } - } - }, - "Subscriber": { - "clients": { - "grpc": { - "libraryClient": "SubscriberClient", - "rpcs": { - "Acknowledge": { - "methods": [ - "acknowledge" - ] - }, - "CreateSnapshot": { - "methods": [ - "create_snapshot" - ] - }, - "CreateSubscription": { - "methods": [ - "create_subscription" - ] - }, - "DeleteSnapshot": { - "methods": [ - "delete_snapshot" - ] - }, - "DeleteSubscription": { - "methods": [ - "delete_subscription" - ] - }, - "GetSnapshot": { - "methods": [ - "get_snapshot" - ] - }, - "GetSubscription": { - "methods": [ - "get_subscription" - ] - }, - "ListSnapshots": { - "methods": [ - "list_snapshots" - ] - }, - "ListSubscriptions": { - "methods": [ - "list_subscriptions" - ] - }, - "ModifyAckDeadline": { - "methods": [ - "modify_ack_deadline" - ] - }, - "ModifyPushConfig": { - "methods": [ - "modify_push_config" - ] - }, - "Pull": { - "methods": [ - "pull" - ] - }, - "Seek": { - "methods": [ - "seek" - ] - }, - "StreamingPull": { - "methods": [ - "streaming_pull" - ] - }, - "UpdateSnapshot": { - "methods": [ - "update_snapshot" - ] - }, - "UpdateSubscription": { - "methods": [ - "update_subscription" - ] - } - } - }, - "grpc-async": { - "libraryClient": "SubscriberAsyncClient", - "rpcs": { - "Acknowledge": { - "methods": [ - "acknowledge" - ] - }, - "CreateSnapshot": { - "methods": [ - "create_snapshot" - ] - }, - "CreateSubscription": { - "methods": [ - "create_subscription" - ] - }, - "DeleteSnapshot": { - "methods": [ - "delete_snapshot" - ] - }, - "DeleteSubscription": { - "methods": [ - "delete_subscription" - ] - }, - "GetSnapshot": { - "methods": [ - "get_snapshot" - ] - }, - "GetSubscription": { - "methods": [ - "get_subscription" - ] - }, - "ListSnapshots": { - "methods": [ - "list_snapshots" - ] - }, - "ListSubscriptions": { - "methods": [ - "list_subscriptions" - ] - }, - "ModifyAckDeadline": { - "methods": [ - "modify_ack_deadline" - ] - }, - "ModifyPushConfig": { - "methods": [ - "modify_push_config" - ] - }, - "Pull": { - "methods": [ - "pull" - ] - }, - "Seek": { - "methods": [ - "seek" - ] - }, - "StreamingPull": { - "methods": [ - "streaming_pull" - ] - }, - "UpdateSnapshot": { - "methods": [ - "update_snapshot" - ] - }, - "UpdateSubscription": { - "methods": [ - "update_subscription" - ] - } - } - } - } - } - } -} diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/py.typed b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/py.typed deleted file mode 100644 index 1cec9a5ba1ab..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-pubsub package uses inline types. diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/__init__.py deleted file mode 100644 index e8e1c3845db5..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/__init__.py deleted file mode 100644 index ebfdcc3789d8..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import PublisherClient -from .async_client import PublisherAsyncClient - -__all__ = ( - 'PublisherClient', - 'PublisherAsyncClient', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/async_client.py deleted file mode 100644 index 48eeca8d0fcc..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/async_client.py +++ /dev/null @@ -1,1408 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.pubsub_v1.services.publisher import pagers -from google.pubsub_v1.types import pubsub -from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport -from .client import PublisherClient - - -class PublisherAsyncClient: - """The service that an application uses to manipulate topics, - and to send messages to a topic. - """ - - _client: PublisherClient - - DEFAULT_ENDPOINT = PublisherClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = PublisherClient.DEFAULT_MTLS_ENDPOINT - - schema_path = staticmethod(PublisherClient.schema_path) - parse_schema_path = staticmethod(PublisherClient.parse_schema_path) - subscription_path = staticmethod(PublisherClient.subscription_path) - parse_subscription_path = staticmethod(PublisherClient.parse_subscription_path) - topic_path = staticmethod(PublisherClient.topic_path) - parse_topic_path = staticmethod(PublisherClient.parse_topic_path) - common_billing_account_path = staticmethod(PublisherClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(PublisherClient.parse_common_billing_account_path) - common_folder_path = staticmethod(PublisherClient.common_folder_path) - parse_common_folder_path = staticmethod(PublisherClient.parse_common_folder_path) - common_organization_path = staticmethod(PublisherClient.common_organization_path) - parse_common_organization_path = staticmethod(PublisherClient.parse_common_organization_path) - common_project_path = staticmethod(PublisherClient.common_project_path) - parse_common_project_path = staticmethod(PublisherClient.parse_common_project_path) - common_location_path = staticmethod(PublisherClient.common_location_path) - parse_common_location_path = staticmethod(PublisherClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PublisherAsyncClient: The constructed client. - """ - return PublisherClient.from_service_account_info.__func__(PublisherAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PublisherAsyncClient: The constructed client. - """ - return PublisherClient.from_service_account_file.__func__(PublisherAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return PublisherClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> PublisherTransport: - """Returns the transport used by the client instance. - - Returns: - PublisherTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(PublisherClient).get_transport_class, type(PublisherClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, PublisherTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the publisher client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.PublisherTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = PublisherClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_topic(self, - request: Union[pubsub.Topic, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Topic: - r"""Creates the given topic with the given name. See the [resource - name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_create_topic(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.Topic( - name="name_value", - ) - - # Make the request - response = await client.create_topic(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.Topic, dict]): - The request object. A topic resource. - name (:class:`str`): - Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` - must start with a letter, and contain only letters - (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), - plus (``+``) or percent signs (``%``). It must be - between 3 and 255 characters in length, and it must not - start with ``"goog"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Topic: - A topic resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.Topic(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_topic, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_topic(self, - request: Union[pubsub.UpdateTopicRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Topic: - r"""Updates an existing topic. Note that certain - properties of a topic are not modifiable. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_update_topic(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - topic = pubsub_v1.Topic() - topic.name = "name_value" - - request = pubsub_v1.UpdateTopicRequest( - topic=topic, - ) - - # Make the request - response = await client.update_topic(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): - The request object. Request for the UpdateTopic method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Topic: - A topic resource. - """ - # Create or coerce a protobuf request object. - request = pubsub.UpdateTopicRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_topic, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic.name", request.topic.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def publish(self, - request: Union[pubsub.PublishRequest, dict] = None, - *, - topic: str = None, - messages: Sequence[pubsub.PubsubMessage] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.PublishResponse: - r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if - the topic does not exist. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_publish(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.PublishRequest( - topic="topic_value", - ) - - # Make the request - response = await client.publish(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.PublishRequest, dict]): - The request object. Request for the Publish method. - topic (:class:`str`): - Required. The messages in the request will be published - on this topic. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - messages (:class:`Sequence[google.pubsub_v1.types.PubsubMessage]`): - Required. The messages to publish. - This corresponds to the ``messages`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.PublishResponse: - Response for the Publish method. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic, messages]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.PublishRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - if messages: - request.messages.extend(messages) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.publish, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.Cancelled, - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_topic(self, - request: Union[pubsub.GetTopicRequest, dict] = None, - *, - topic: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Topic: - r"""Gets the configuration of a topic. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_get_topic(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.GetTopicRequest( - topic="topic_value", - ) - - # Make the request - response = await client.get_topic(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): - The request object. Request for the GetTopic method. - topic (:class:`str`): - Required. The name of the topic to get. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Topic: - A topic resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.GetTopicRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_topic, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_topics(self, - request: Union[pubsub.ListTopicsRequest, dict] = None, - *, - project: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTopicsAsyncPager: - r"""Lists matching topics. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_list_topics(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_topics(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): - The request object. Request for the `ListTopics` method. - project (:class:`str`): - Required. The name of the project in which to list - topics. Format is ``projects/{project-id}``. - - This corresponds to the ``project`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager: - Response for the ListTopics method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.ListTopicsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project is not None: - request.project = project - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_topics, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project", request.project), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTopicsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_topic_subscriptions(self, - request: Union[pubsub.ListTopicSubscriptionsRequest, dict] = None, - *, - topic: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTopicSubscriptionsAsyncPager: - r"""Lists the names of the attached subscriptions on this - topic. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_list_topic_subscriptions(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicSubscriptionsRequest( - topic="topic_value", - ) - - # Make the request - page_result = client.list_topic_subscriptions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): - The request object. Request for the - `ListTopicSubscriptions` method. - topic (:class:`str`): - Required. The name of the topic that subscriptions are - attached to. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager: - Response for the ListTopicSubscriptions method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.ListTopicSubscriptionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_topic_subscriptions, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTopicSubscriptionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_topic_snapshots(self, - request: Union[pubsub.ListTopicSnapshotsRequest, dict] = None, - *, - topic: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTopicSnapshotsAsyncPager: - r"""Lists the names of the snapshots on this topic. Snapshots are - used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_list_topic_snapshots(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicSnapshotsRequest( - topic="topic_value", - ) - - # Make the request - page_result = client.list_topic_snapshots(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): - The request object. Request for the `ListTopicSnapshots` - method. - topic (:class:`str`): - Required. The name of the topic that snapshots are - attached to. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager: - Response for the ListTopicSnapshots method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.ListTopicSnapshotsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_topic_snapshots, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListTopicSnapshotsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_topic(self, - request: Union[pubsub.DeleteTopicRequest, dict] = None, - *, - topic: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if - the topic does not exist. After a topic is deleted, a new topic - may be created with the same name; this is an entirely new topic - with none of the old configuration or subscriptions. Existing - subscriptions to this topic are not deleted, but their ``topic`` - field is set to ``_deleted-topic_``. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_delete_topic(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteTopicRequest( - topic="topic_value", - ) - - # Make the request - await client.delete_topic(request=request) - - Args: - request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): - The request object. Request for the `DeleteTopic` - method. - topic (:class:`str`): - Required. Name of the topic to delete. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.DeleteTopicRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_topic, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def detach_subscription(self, - request: Union[pubsub.DetachSubscriptionRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.DetachSubscriptionResponse: - r"""Detaches a subscription from this topic. All messages retained - in the subscription are dropped. Subsequent ``Pull`` and - ``StreamingPull`` requests will return FAILED_PRECONDITION. If - the subscription is a push subscription, pushes to the endpoint - will stop. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_detach_subscription(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DetachSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - response = await client.detach_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): - The request object. Request for the DetachSubscription - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.DetachSubscriptionResponse: - Response for the DetachSubscription - method. Reserved for future use. - - """ - # Create or coerce a protobuf request object. - request = pubsub.DetachSubscriptionRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.detach_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy( - self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does - not have a policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control - policy for a function. - - If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~iam_policy_pb2.PolicyTestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "PublisherAsyncClient", -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/client.py deleted file mode 100644 index a3fd8003d853..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/client.py +++ /dev/null @@ -1,1559 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.pubsub_v1.services.publisher import pagers -from google.pubsub_v1.types import pubsub -from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import PublisherGrpcTransport -from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport - - -class PublisherClientMeta(type): - """Metaclass for the Publisher client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] - _transport_registry["grpc"] = PublisherGrpcTransport - _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[PublisherTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class PublisherClient(metaclass=PublisherClientMeta): - """The service that an application uses to manipulate topics, - and to send messages to a topic. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "pubsub.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PublisherClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - PublisherClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> PublisherTransport: - """Returns the transport used by the client instance. - - Returns: - PublisherTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def schema_path(project: str,schema: str,) -> str: - """Returns a fully-qualified schema string.""" - return "projects/{project}/schemas/{schema}".format(project=project, schema=schema, ) - - @staticmethod - def parse_schema_path(path: str) -> Dict[str,str]: - """Parses a schema path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def subscription_path(project: str,subscription: str,) -> str: - """Returns a fully-qualified subscription string.""" - return "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) - - @staticmethod - def parse_subscription_path(path: str) -> Dict[str,str]: - """Parses a subscription path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def topic_path(project: str,topic: str,) -> str: - """Returns a fully-qualified topic string.""" - return "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) - - @staticmethod - def parse_topic_path(path: str) -> Dict[str,str]: - """Parses a topic path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, PublisherTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the publisher client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, PublisherTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, PublisherTransport): - # transport is a PublisherTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - def create_topic(self, - request: Union[pubsub.Topic, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Topic: - r"""Creates the given topic with the given name. See the [resource - name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). - - .. code-block:: python - - from google import pubsub_v1 - - def sample_create_topic(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.Topic( - name="name_value", - ) - - # Make the request - response = client.create_topic(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.Topic, dict]): - The request object. A topic resource. - name (str): - Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` - must start with a letter, and contain only letters - (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), - plus (``+``) or percent signs (``%``). It must be - between 3 and 255 characters in length, and it must not - start with ``"goog"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Topic: - A topic resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.Topic. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.Topic): - request = pubsub.Topic(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_topic] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_topic(self, - request: Union[pubsub.UpdateTopicRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Topic: - r"""Updates an existing topic. Note that certain - properties of a topic are not modifiable. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_update_topic(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - topic = pubsub_v1.Topic() - topic.name = "name_value" - - request = pubsub_v1.UpdateTopicRequest( - topic=topic, - ) - - # Make the request - response = client.update_topic(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): - The request object. Request for the UpdateTopic method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Topic: - A topic resource. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.UpdateTopicRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.UpdateTopicRequest): - request = pubsub.UpdateTopicRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_topic] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic.name", request.topic.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def publish(self, - request: Union[pubsub.PublishRequest, dict] = None, - *, - topic: str = None, - messages: Sequence[pubsub.PubsubMessage] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.PublishResponse: - r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if - the topic does not exist. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_publish(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.PublishRequest( - topic="topic_value", - ) - - # Make the request - response = client.publish(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.PublishRequest, dict]): - The request object. Request for the Publish method. - topic (str): - Required. The messages in the request will be published - on this topic. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - messages (Sequence[google.pubsub_v1.types.PubsubMessage]): - Required. The messages to publish. - This corresponds to the ``messages`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.PublishResponse: - Response for the Publish method. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic, messages]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.PublishRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.PublishRequest): - request = pubsub.PublishRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - if messages is not None: - request.messages = messages - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.publish] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_topic(self, - request: Union[pubsub.GetTopicRequest, dict] = None, - *, - topic: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Topic: - r"""Gets the configuration of a topic. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_get_topic(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.GetTopicRequest( - topic="topic_value", - ) - - # Make the request - response = client.get_topic(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): - The request object. Request for the GetTopic method. - topic (str): - Required. The name of the topic to get. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Topic: - A topic resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.GetTopicRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.GetTopicRequest): - request = pubsub.GetTopicRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_topic] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_topics(self, - request: Union[pubsub.ListTopicsRequest, dict] = None, - *, - project: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTopicsPager: - r"""Lists matching topics. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_list_topics(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_topics(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): - The request object. Request for the `ListTopics` method. - project (str): - Required. The name of the project in which to list - topics. Format is ``projects/{project-id}``. - - This corresponds to the ``project`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.publisher.pagers.ListTopicsPager: - Response for the ListTopics method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListTopicsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.ListTopicsRequest): - request = pubsub.ListTopicsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project is not None: - request.project = project - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_topics] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project", request.project), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTopicsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_topic_subscriptions(self, - request: Union[pubsub.ListTopicSubscriptionsRequest, dict] = None, - *, - topic: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTopicSubscriptionsPager: - r"""Lists the names of the attached subscriptions on this - topic. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_list_topic_subscriptions(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicSubscriptionsRequest( - topic="topic_value", - ) - - # Make the request - page_result = client.list_topic_subscriptions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): - The request object. Request for the - `ListTopicSubscriptions` method. - topic (str): - Required. The name of the topic that subscriptions are - attached to. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager: - Response for the ListTopicSubscriptions method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListTopicSubscriptionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.ListTopicSubscriptionsRequest): - request = pubsub.ListTopicSubscriptionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_topic_subscriptions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTopicSubscriptionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_topic_snapshots(self, - request: Union[pubsub.ListTopicSnapshotsRequest, dict] = None, - *, - topic: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListTopicSnapshotsPager: - r"""Lists the names of the snapshots on this topic. Snapshots are - used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_list_topic_snapshots(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicSnapshotsRequest( - topic="topic_value", - ) - - # Make the request - page_result = client.list_topic_snapshots(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): - The request object. Request for the `ListTopicSnapshots` - method. - topic (str): - Required. The name of the topic that snapshots are - attached to. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager: - Response for the ListTopicSnapshots method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListTopicSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.ListTopicSnapshotsRequest): - request = pubsub.ListTopicSnapshotsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_topic_snapshots] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListTopicSnapshotsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_topic(self, - request: Union[pubsub.DeleteTopicRequest, dict] = None, - *, - topic: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if - the topic does not exist. After a topic is deleted, a new topic - may be created with the same name; this is an entirely new topic - with none of the old configuration or subscriptions. Existing - subscriptions to this topic are not deleted, but their ``topic`` - field is set to ``_deleted-topic_``. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_delete_topic(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteTopicRequest( - topic="topic_value", - ) - - # Make the request - client.delete_topic(request=request) - - Args: - request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): - The request object. Request for the `DeleteTopic` - method. - topic (str): - Required. Name of the topic to delete. Format is - ``projects/{project}/topics/{topic}``. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.DeleteTopicRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.DeleteTopicRequest): - request = pubsub.DeleteTopicRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if topic is not None: - request.topic = topic - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_topic] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("topic", request.topic), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def detach_subscription(self, - request: Union[pubsub.DetachSubscriptionRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.DetachSubscriptionResponse: - r"""Detaches a subscription from this topic. All messages retained - in the subscription are dropped. Subsequent ``Pull`` and - ``StreamingPull`` requests will return FAILED_PRECONDITION. If - the subscription is a push subscription, pushes to the endpoint - will stop. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_detach_subscription(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.DetachSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - response = client.detach_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): - The request object. Request for the DetachSubscription - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.DetachSubscriptionResponse: - Response for the DetachSubscription - method. Reserved for future use. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.DetachSubscriptionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.DetachSubscriptionRequest): - request = pubsub.DetachSubscriptionRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.detach_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def set_iam_policy( - self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "PublisherClient", -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/pagers.py deleted file mode 100644 index 0f25db27ee3a..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/pagers.py +++ /dev/null @@ -1,381 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.pubsub_v1.types import pubsub - - -class ListTopicsPager: - """A pager for iterating through ``list_topics`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListTopicsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``topics`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTopics`` requests and continue to iterate - through the ``topics`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListTopicsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., pubsub.ListTopicsResponse], - request: pubsub.ListTopicsRequest, - response: pubsub.ListTopicsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListTopicsRequest): - The initial request object. - response (google.pubsub_v1.types.ListTopicsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListTopicsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[pubsub.ListTopicsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[pubsub.Topic]: - for page in self.pages: - yield from page.topics - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTopicsAsyncPager: - """A pager for iterating through ``list_topics`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListTopicsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``topics`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTopics`` requests and continue to iterate - through the ``topics`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListTopicsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[pubsub.ListTopicsResponse]], - request: pubsub.ListTopicsRequest, - response: pubsub.ListTopicsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListTopicsRequest): - The initial request object. - response (google.pubsub_v1.types.ListTopicsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListTopicsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[pubsub.ListTopicsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[pubsub.Topic]: - async def async_generator(): - async for page in self.pages: - for response in page.topics: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTopicSubscriptionsPager: - """A pager for iterating through ``list_topic_subscriptions`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``subscriptions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTopicSubscriptions`` requests and continue to iterate - through the ``subscriptions`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., pubsub.ListTopicSubscriptionsResponse], - request: pubsub.ListTopicSubscriptionsRequest, - response: pubsub.ListTopicSubscriptionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): - The initial request object. - response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListTopicSubscriptionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[pubsub.ListTopicSubscriptionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[str]: - for page in self.pages: - yield from page.subscriptions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTopicSubscriptionsAsyncPager: - """A pager for iterating through ``list_topic_subscriptions`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``subscriptions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTopicSubscriptions`` requests and continue to iterate - through the ``subscriptions`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListTopicSubscriptionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[pubsub.ListTopicSubscriptionsResponse]], - request: pubsub.ListTopicSubscriptionsRequest, - response: pubsub.ListTopicSubscriptionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListTopicSubscriptionsRequest): - The initial request object. - response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListTopicSubscriptionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[pubsub.ListTopicSubscriptionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[str]: - async def async_generator(): - async for page in self.pages: - for response in page.subscriptions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTopicSnapshotsPager: - """A pager for iterating through ``list_topic_snapshots`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``snapshots`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListTopicSnapshots`` requests and continue to iterate - through the ``snapshots`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., pubsub.ListTopicSnapshotsResponse], - request: pubsub.ListTopicSnapshotsRequest, - response: pubsub.ListTopicSnapshotsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListTopicSnapshotsRequest): - The initial request object. - response (google.pubsub_v1.types.ListTopicSnapshotsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListTopicSnapshotsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[pubsub.ListTopicSnapshotsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[str]: - for page in self.pages: - yield from page.snapshots - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListTopicSnapshotsAsyncPager: - """A pager for iterating through ``list_topic_snapshots`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``snapshots`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListTopicSnapshots`` requests and continue to iterate - through the ``snapshots`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListTopicSnapshotsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[pubsub.ListTopicSnapshotsResponse]], - request: pubsub.ListTopicSnapshotsRequest, - response: pubsub.ListTopicSnapshotsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListTopicSnapshotsRequest): - The initial request object. - response (google.pubsub_v1.types.ListTopicSnapshotsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListTopicSnapshotsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[pubsub.ListTopicSnapshotsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[str]: - async def async_generator(): - async for page in self.pages: - for response in page.snapshots: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/__init__.py deleted file mode 100644 index 362fe78b8e0c..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import PublisherTransport -from .grpc import PublisherGrpcTransport -from .grpc_asyncio import PublisherGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] -_transport_registry['grpc'] = PublisherGrpcTransport -_transport_registry['grpc_asyncio'] = PublisherGrpcAsyncIOTransport - -__all__ = ( - 'PublisherTransport', - 'PublisherGrpcTransport', - 'PublisherGrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/base.py deleted file mode 100644 index f995d6728d4f..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/base.py +++ /dev/null @@ -1,364 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.pubsub_v1.types import pubsub - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-pubsub', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -class PublisherTransport(abc.ABC): - """Abstract transport class for Publisher.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', - ) - - DEFAULT_HOST: str = 'pubsub.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_topic: gapic_v1.method.wrap_method( - self.create_topic, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_topic: gapic_v1.method.wrap_method( - self.update_topic, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.publish: gapic_v1.method.wrap_method( - self.publish, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.Cancelled, - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_topic: gapic_v1.method.wrap_method( - self.get_topic, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_topics: gapic_v1.method.wrap_method( - self.list_topics, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_topic_subscriptions: gapic_v1.method.wrap_method( - self.list_topic_subscriptions, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_topic_snapshots: gapic_v1.method.wrap_method( - self.list_topic_snapshots, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_topic: gapic_v1.method.wrap_method( - self.delete_topic, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.detach_subscription: gapic_v1.method.wrap_method( - self.detach_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_topic(self) -> Callable[ - [pubsub.Topic], - Union[ - pubsub.Topic, - Awaitable[pubsub.Topic] - ]]: - raise NotImplementedError() - - @property - def update_topic(self) -> Callable[ - [pubsub.UpdateTopicRequest], - Union[ - pubsub.Topic, - Awaitable[pubsub.Topic] - ]]: - raise NotImplementedError() - - @property - def publish(self) -> Callable[ - [pubsub.PublishRequest], - Union[ - pubsub.PublishResponse, - Awaitable[pubsub.PublishResponse] - ]]: - raise NotImplementedError() - - @property - def get_topic(self) -> Callable[ - [pubsub.GetTopicRequest], - Union[ - pubsub.Topic, - Awaitable[pubsub.Topic] - ]]: - raise NotImplementedError() - - @property - def list_topics(self) -> Callable[ - [pubsub.ListTopicsRequest], - Union[ - pubsub.ListTopicsResponse, - Awaitable[pubsub.ListTopicsResponse] - ]]: - raise NotImplementedError() - - @property - def list_topic_subscriptions(self) -> Callable[ - [pubsub.ListTopicSubscriptionsRequest], - Union[ - pubsub.ListTopicSubscriptionsResponse, - Awaitable[pubsub.ListTopicSubscriptionsResponse] - ]]: - raise NotImplementedError() - - @property - def list_topic_snapshots(self) -> Callable[ - [pubsub.ListTopicSnapshotsRequest], - Union[ - pubsub.ListTopicSnapshotsResponse, - Awaitable[pubsub.ListTopicSnapshotsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_topic(self) -> Callable[ - [pubsub.DeleteTopicRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def detach_subscription(self) -> Callable[ - [pubsub.DetachSubscriptionRequest], - Union[ - pubsub.DetachSubscriptionResponse, - Awaitable[pubsub.DetachSubscriptionResponse] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'PublisherTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc.py deleted file mode 100644 index 9a77ed68f913..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc.py +++ /dev/null @@ -1,572 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.pubsub_v1.types import pubsub -from .base import PublisherTransport, DEFAULT_CLIENT_INFO - - -class PublisherGrpcTransport(PublisherTransport): - """gRPC backend transport for Publisher. - - The service that an application uses to manipulate topics, - and to send messages to a topic. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_topic(self) -> Callable[ - [pubsub.Topic], - pubsub.Topic]: - r"""Return a callable for the create topic method over gRPC. - - Creates the given topic with the given name. See the [resource - name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). - - Returns: - Callable[[~.Topic], - ~.Topic]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_topic' not in self._stubs: - self._stubs['create_topic'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/CreateTopic', - request_serializer=pubsub.Topic.serialize, - response_deserializer=pubsub.Topic.deserialize, - ) - return self._stubs['create_topic'] - - @property - def update_topic(self) -> Callable[ - [pubsub.UpdateTopicRequest], - pubsub.Topic]: - r"""Return a callable for the update topic method over gRPC. - - Updates an existing topic. Note that certain - properties of a topic are not modifiable. - - Returns: - Callable[[~.UpdateTopicRequest], - ~.Topic]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_topic' not in self._stubs: - self._stubs['update_topic'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/UpdateTopic', - request_serializer=pubsub.UpdateTopicRequest.serialize, - response_deserializer=pubsub.Topic.deserialize, - ) - return self._stubs['update_topic'] - - @property - def publish(self) -> Callable[ - [pubsub.PublishRequest], - pubsub.PublishResponse]: - r"""Return a callable for the publish method over gRPC. - - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if - the topic does not exist. - - Returns: - Callable[[~.PublishRequest], - ~.PublishResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'publish' not in self._stubs: - self._stubs['publish'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/Publish', - request_serializer=pubsub.PublishRequest.serialize, - response_deserializer=pubsub.PublishResponse.deserialize, - ) - return self._stubs['publish'] - - @property - def get_topic(self) -> Callable[ - [pubsub.GetTopicRequest], - pubsub.Topic]: - r"""Return a callable for the get topic method over gRPC. - - Gets the configuration of a topic. - - Returns: - Callable[[~.GetTopicRequest], - ~.Topic]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_topic' not in self._stubs: - self._stubs['get_topic'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/GetTopic', - request_serializer=pubsub.GetTopicRequest.serialize, - response_deserializer=pubsub.Topic.deserialize, - ) - return self._stubs['get_topic'] - - @property - def list_topics(self) -> Callable[ - [pubsub.ListTopicsRequest], - pubsub.ListTopicsResponse]: - r"""Return a callable for the list topics method over gRPC. - - Lists matching topics. - - Returns: - Callable[[~.ListTopicsRequest], - ~.ListTopicsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_topics' not in self._stubs: - self._stubs['list_topics'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopics', - request_serializer=pubsub.ListTopicsRequest.serialize, - response_deserializer=pubsub.ListTopicsResponse.deserialize, - ) - return self._stubs['list_topics'] - - @property - def list_topic_subscriptions(self) -> Callable[ - [pubsub.ListTopicSubscriptionsRequest], - pubsub.ListTopicSubscriptionsResponse]: - r"""Return a callable for the list topic subscriptions method over gRPC. - - Lists the names of the attached subscriptions on this - topic. - - Returns: - Callable[[~.ListTopicSubscriptionsRequest], - ~.ListTopicSubscriptionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_topic_subscriptions' not in self._stubs: - self._stubs['list_topic_subscriptions'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopicSubscriptions', - request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, - response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, - ) - return self._stubs['list_topic_subscriptions'] - - @property - def list_topic_snapshots(self) -> Callable[ - [pubsub.ListTopicSnapshotsRequest], - pubsub.ListTopicSnapshotsResponse]: - r"""Return a callable for the list topic snapshots method over gRPC. - - Lists the names of the snapshots on this topic. Snapshots are - used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - - Returns: - Callable[[~.ListTopicSnapshotsRequest], - ~.ListTopicSnapshotsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_topic_snapshots' not in self._stubs: - self._stubs['list_topic_snapshots'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopicSnapshots', - request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, - response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, - ) - return self._stubs['list_topic_snapshots'] - - @property - def delete_topic(self) -> Callable[ - [pubsub.DeleteTopicRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete topic method over gRPC. - - Deletes the topic with the given name. Returns ``NOT_FOUND`` if - the topic does not exist. After a topic is deleted, a new topic - may be created with the same name; this is an entirely new topic - with none of the old configuration or subscriptions. Existing - subscriptions to this topic are not deleted, but their ``topic`` - field is set to ``_deleted-topic_``. - - Returns: - Callable[[~.DeleteTopicRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_topic' not in self._stubs: - self._stubs['delete_topic'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/DeleteTopic', - request_serializer=pubsub.DeleteTopicRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_topic'] - - @property - def detach_subscription(self) -> Callable[ - [pubsub.DetachSubscriptionRequest], - pubsub.DetachSubscriptionResponse]: - r"""Return a callable for the detach subscription method over gRPC. - - Detaches a subscription from this topic. All messages retained - in the subscription are dropped. Subsequent ``Pull`` and - ``StreamingPull`` requests will return FAILED_PRECONDITION. If - the subscription is a push subscription, pushes to the endpoint - will stop. - - Returns: - Callable[[~.DetachSubscriptionRequest], - ~.DetachSubscriptionResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'detach_subscription' not in self._stubs: - self._stubs['detach_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/DetachSubscription', - request_serializer=pubsub.DetachSubscriptionRequest.serialize, - response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, - ) - return self._stubs['detach_subscription'] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'PublisherGrpcTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py deleted file mode 100644 index 38164d7144b6..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ /dev/null @@ -1,572 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.pubsub_v1.types import pubsub -from .base import PublisherTransport, DEFAULT_CLIENT_INFO -from .grpc import PublisherGrpcTransport - - -class PublisherGrpcAsyncIOTransport(PublisherTransport): - """gRPC AsyncIO backend transport for Publisher. - - The service that an application uses to manipulate topics, - and to send messages to a topic. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_topic(self) -> Callable[ - [pubsub.Topic], - Awaitable[pubsub.Topic]]: - r"""Return a callable for the create topic method over gRPC. - - Creates the given topic with the given name. See the [resource - name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). - - Returns: - Callable[[~.Topic], - Awaitable[~.Topic]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_topic' not in self._stubs: - self._stubs['create_topic'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/CreateTopic', - request_serializer=pubsub.Topic.serialize, - response_deserializer=pubsub.Topic.deserialize, - ) - return self._stubs['create_topic'] - - @property - def update_topic(self) -> Callable[ - [pubsub.UpdateTopicRequest], - Awaitable[pubsub.Topic]]: - r"""Return a callable for the update topic method over gRPC. - - Updates an existing topic. Note that certain - properties of a topic are not modifiable. - - Returns: - Callable[[~.UpdateTopicRequest], - Awaitable[~.Topic]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_topic' not in self._stubs: - self._stubs['update_topic'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/UpdateTopic', - request_serializer=pubsub.UpdateTopicRequest.serialize, - response_deserializer=pubsub.Topic.deserialize, - ) - return self._stubs['update_topic'] - - @property - def publish(self) -> Callable[ - [pubsub.PublishRequest], - Awaitable[pubsub.PublishResponse]]: - r"""Return a callable for the publish method over gRPC. - - Adds one or more messages to the topic. Returns ``NOT_FOUND`` if - the topic does not exist. - - Returns: - Callable[[~.PublishRequest], - Awaitable[~.PublishResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'publish' not in self._stubs: - self._stubs['publish'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/Publish', - request_serializer=pubsub.PublishRequest.serialize, - response_deserializer=pubsub.PublishResponse.deserialize, - ) - return self._stubs['publish'] - - @property - def get_topic(self) -> Callable[ - [pubsub.GetTopicRequest], - Awaitable[pubsub.Topic]]: - r"""Return a callable for the get topic method over gRPC. - - Gets the configuration of a topic. - - Returns: - Callable[[~.GetTopicRequest], - Awaitable[~.Topic]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_topic' not in self._stubs: - self._stubs['get_topic'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/GetTopic', - request_serializer=pubsub.GetTopicRequest.serialize, - response_deserializer=pubsub.Topic.deserialize, - ) - return self._stubs['get_topic'] - - @property - def list_topics(self) -> Callable[ - [pubsub.ListTopicsRequest], - Awaitable[pubsub.ListTopicsResponse]]: - r"""Return a callable for the list topics method over gRPC. - - Lists matching topics. - - Returns: - Callable[[~.ListTopicsRequest], - Awaitable[~.ListTopicsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_topics' not in self._stubs: - self._stubs['list_topics'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopics', - request_serializer=pubsub.ListTopicsRequest.serialize, - response_deserializer=pubsub.ListTopicsResponse.deserialize, - ) - return self._stubs['list_topics'] - - @property - def list_topic_subscriptions(self) -> Callable[ - [pubsub.ListTopicSubscriptionsRequest], - Awaitable[pubsub.ListTopicSubscriptionsResponse]]: - r"""Return a callable for the list topic subscriptions method over gRPC. - - Lists the names of the attached subscriptions on this - topic. - - Returns: - Callable[[~.ListTopicSubscriptionsRequest], - Awaitable[~.ListTopicSubscriptionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_topic_subscriptions' not in self._stubs: - self._stubs['list_topic_subscriptions'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopicSubscriptions', - request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, - response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, - ) - return self._stubs['list_topic_subscriptions'] - - @property - def list_topic_snapshots(self) -> Callable[ - [pubsub.ListTopicSnapshotsRequest], - Awaitable[pubsub.ListTopicSnapshotsResponse]]: - r"""Return a callable for the list topic snapshots method over gRPC. - - Lists the names of the snapshots on this topic. Snapshots are - used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - - Returns: - Callable[[~.ListTopicSnapshotsRequest], - Awaitable[~.ListTopicSnapshotsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_topic_snapshots' not in self._stubs: - self._stubs['list_topic_snapshots'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/ListTopicSnapshots', - request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, - response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, - ) - return self._stubs['list_topic_snapshots'] - - @property - def delete_topic(self) -> Callable[ - [pubsub.DeleteTopicRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete topic method over gRPC. - - Deletes the topic with the given name. Returns ``NOT_FOUND`` if - the topic does not exist. After a topic is deleted, a new topic - may be created with the same name; this is an entirely new topic - with none of the old configuration or subscriptions. Existing - subscriptions to this topic are not deleted, but their ``topic`` - field is set to ``_deleted-topic_``. - - Returns: - Callable[[~.DeleteTopicRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_topic' not in self._stubs: - self._stubs['delete_topic'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/DeleteTopic', - request_serializer=pubsub.DeleteTopicRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_topic'] - - @property - def detach_subscription(self) -> Callable[ - [pubsub.DetachSubscriptionRequest], - Awaitable[pubsub.DetachSubscriptionResponse]]: - r"""Return a callable for the detach subscription method over gRPC. - - Detaches a subscription from this topic. All messages retained - in the subscription are dropped. Subsequent ``Pull`` and - ``StreamingPull`` requests will return FAILED_PRECONDITION. If - the subscription is a push subscription, pushes to the endpoint - will stop. - - Returns: - Callable[[~.DetachSubscriptionRequest], - Awaitable[~.DetachSubscriptionResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'detach_subscription' not in self._stubs: - self._stubs['detach_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Publisher/DetachSubscription', - request_serializer=pubsub.DetachSubscriptionRequest.serialize, - response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, - ) - return self._stubs['detach_subscription'] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'PublisherGrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/__init__.py deleted file mode 100644 index 1883aa56de6a..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import SchemaServiceClient -from .async_client import SchemaServiceAsyncClient - -__all__ = ( - 'SchemaServiceClient', - 'SchemaServiceAsyncClient', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/async_client.py deleted file mode 100644 index 3f01d265fd56..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/async_client.py +++ /dev/null @@ -1,1062 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.pubsub_v1.services.schema_service import pagers -from google.pubsub_v1.types import schema -from google.pubsub_v1.types import schema as gp_schema -from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport -from .client import SchemaServiceClient - - -class SchemaServiceAsyncClient: - """Service for doing schema-related operations.""" - - _client: SchemaServiceClient - - DEFAULT_ENDPOINT = SchemaServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = SchemaServiceClient.DEFAULT_MTLS_ENDPOINT - - schema_path = staticmethod(SchemaServiceClient.schema_path) - parse_schema_path = staticmethod(SchemaServiceClient.parse_schema_path) - common_billing_account_path = staticmethod(SchemaServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(SchemaServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(SchemaServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(SchemaServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(SchemaServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(SchemaServiceClient.parse_common_organization_path) - common_project_path = staticmethod(SchemaServiceClient.common_project_path) - parse_common_project_path = staticmethod(SchemaServiceClient.parse_common_project_path) - common_location_path = staticmethod(SchemaServiceClient.common_location_path) - parse_common_location_path = staticmethod(SchemaServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SchemaServiceAsyncClient: The constructed client. - """ - return SchemaServiceClient.from_service_account_info.__func__(SchemaServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SchemaServiceAsyncClient: The constructed client. - """ - return SchemaServiceClient.from_service_account_file.__func__(SchemaServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return SchemaServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> SchemaServiceTransport: - """Returns the transport used by the client instance. - - Returns: - SchemaServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, SchemaServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the schema service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.SchemaServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = SchemaServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_schema(self, - request: Union[gp_schema.CreateSchemaRequest, dict] = None, - *, - parent: str = None, - schema: gp_schema.Schema = None, - schema_id: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gp_schema.Schema: - r"""Creates a schema. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_create_schema(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - schema = pubsub_v1.Schema() - schema.name = "name_value" - - request = pubsub_v1.CreateSchemaRequest( - parent="parent_value", - schema=schema, - ) - - # Make the request - response = await client.create_schema(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): - The request object. Request for the CreateSchema method. - parent (:class:`str`): - Required. The name of the project in which to create the - schema. Format is ``projects/{project-id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schema (:class:`google.pubsub_v1.types.Schema`): - Required. The schema object to create. - - This schema's ``name`` parameter is ignored. The schema - object returned by CreateSchema will have a ``name`` - made using the given ``parent`` and ``schema_id``. - - This corresponds to the ``schema`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schema_id (:class:`str`): - The ID to use for the schema, which will become the - final component of the schema's resource name. - - See - https://cloud.google.com/pubsub/docs/admin#resource_names - for resource name constraints. - - This corresponds to the ``schema_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Schema: - A schema resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, schema, schema_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = gp_schema.CreateSchemaRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if schema is not None: - request.schema = schema - if schema_id is not None: - request.schema_id = schema_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_schema, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_schema(self, - request: Union[schema.GetSchemaRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schema.Schema: - r"""Gets a schema. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_get_schema(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSchemaRequest( - name="name_value", - ) - - # Make the request - response = await client.get_schema(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): - The request object. Request for the GetSchema method. - name (:class:`str`): - Required. The name of the schema to get. Format is - ``projects/{project}/schemas/{schema}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Schema: - A schema resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = schema.GetSchemaRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_schema, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_schemas(self, - request: Union[schema.ListSchemasRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSchemasAsyncPager: - r"""Lists schemas in a project. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_list_schemas(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSchemasRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_schemas(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): - The request object. Request for the `ListSchemas` - method. - parent (:class:`str`): - Required. The name of the project in which to list - schemas. Format is ``projects/{project-id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager: - Response for the ListSchemas method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = schema.ListSchemasRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_schemas, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSchemasAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_schema(self, - request: Union[schema.DeleteSchemaRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a schema. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_delete_schema(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSchemaRequest( - name="name_value", - ) - - # Make the request - await client.delete_schema(request=request) - - Args: - request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): - The request object. Request for the `DeleteSchema` - method. - name (:class:`str`): - Required. Name of the schema to delete. Format is - ``projects/{project}/schemas/{schema}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = schema.DeleteSchemaRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_schema, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def validate_schema(self, - request: Union[gp_schema.ValidateSchemaRequest, dict] = None, - *, - parent: str = None, - schema: gp_schema.Schema = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gp_schema.ValidateSchemaResponse: - r"""Validates a schema. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_validate_schema(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - schema = pubsub_v1.Schema() - schema.name = "name_value" - - request = pubsub_v1.ValidateSchemaRequest( - parent="parent_value", - schema=schema, - ) - - # Make the request - response = await client.validate_schema(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): - The request object. Request for the `ValidateSchema` - method. - parent (:class:`str`): - Required. The name of the project in which to validate - schemas. Format is ``projects/{project-id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schema (:class:`google.pubsub_v1.types.Schema`): - Required. The schema object to - validate. - - This corresponds to the ``schema`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.ValidateSchemaResponse: - Response for the ValidateSchema method. - Empty for now. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, schema]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = gp_schema.ValidateSchemaRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if schema is not None: - request.schema = schema - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.validate_schema, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def validate_message(self, - request: Union[schema.ValidateMessageRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schema.ValidateMessageResponse: - r"""Validates a message against a schema. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_validate_message(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ValidateMessageRequest( - name="name_value", - parent="parent_value", - ) - - # Make the request - response = await client.validate_message(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): - The request object. Request for the `ValidateMessage` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.ValidateMessageResponse: - Response for the ValidateMessage method. - Empty for now. - - """ - # Create or coerce a protobuf request object. - request = schema.ValidateMessageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.validate_message, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy( - self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does - not have a policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control - policy for a function. - - If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~iam_policy_pb2.PolicyTestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "SchemaServiceAsyncClient", -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/client.py deleted file mode 100644 index e6becf99be0c..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/client.py +++ /dev/null @@ -1,1262 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.pubsub_v1.services.schema_service import pagers -from google.pubsub_v1.types import schema -from google.pubsub_v1.types import schema as gp_schema -from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import SchemaServiceGrpcTransport -from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport - - -class SchemaServiceClientMeta(type): - """Metaclass for the SchemaService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] - _transport_registry["grpc"] = SchemaServiceGrpcTransport - _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[SchemaServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class SchemaServiceClient(metaclass=SchemaServiceClientMeta): - """Service for doing schema-related operations.""" - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "pubsub.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SchemaServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SchemaServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> SchemaServiceTransport: - """Returns the transport used by the client instance. - - Returns: - SchemaServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def schema_path(project: str,schema: str,) -> str: - """Returns a fully-qualified schema string.""" - return "projects/{project}/schemas/{schema}".format(project=project, schema=schema, ) - - @staticmethod - def parse_schema_path(path: str) -> Dict[str,str]: - """Parses a schema path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, SchemaServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the schema service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, SchemaServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, SchemaServiceTransport): - # transport is a SchemaServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - def create_schema(self, - request: Union[gp_schema.CreateSchemaRequest, dict] = None, - *, - parent: str = None, - schema: gp_schema.Schema = None, - schema_id: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gp_schema.Schema: - r"""Creates a schema. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_create_schema(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - schema = pubsub_v1.Schema() - schema.name = "name_value" - - request = pubsub_v1.CreateSchemaRequest( - parent="parent_value", - schema=schema, - ) - - # Make the request - response = client.create_schema(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): - The request object. Request for the CreateSchema method. - parent (str): - Required. The name of the project in which to create the - schema. Format is ``projects/{project-id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schema (google.pubsub_v1.types.Schema): - Required. The schema object to create. - - This schema's ``name`` parameter is ignored. The schema - object returned by CreateSchema will have a ``name`` - made using the given ``parent`` and ``schema_id``. - - This corresponds to the ``schema`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schema_id (str): - The ID to use for the schema, which will become the - final component of the schema's resource name. - - See - https://cloud.google.com/pubsub/docs/admin#resource_names - for resource name constraints. - - This corresponds to the ``schema_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Schema: - A schema resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, schema, schema_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a gp_schema.CreateSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, gp_schema.CreateSchemaRequest): - request = gp_schema.CreateSchemaRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if schema is not None: - request.schema = schema - if schema_id is not None: - request.schema_id = schema_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_schema] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_schema(self, - request: Union[schema.GetSchemaRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schema.Schema: - r"""Gets a schema. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_get_schema(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSchemaRequest( - name="name_value", - ) - - # Make the request - response = client.get_schema(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): - The request object. Request for the GetSchema method. - name (str): - Required. The name of the schema to get. Format is - ``projects/{project}/schemas/{schema}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Schema: - A schema resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a schema.GetSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, schema.GetSchemaRequest): - request = schema.GetSchemaRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_schema] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_schemas(self, - request: Union[schema.ListSchemasRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSchemasPager: - r"""Lists schemas in a project. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_list_schemas(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSchemasRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_schemas(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): - The request object. Request for the `ListSchemas` - method. - parent (str): - Required. The name of the project in which to list - schemas. Format is ``projects/{project-id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.schema_service.pagers.ListSchemasPager: - Response for the ListSchemas method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a schema.ListSchemasRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, schema.ListSchemasRequest): - request = schema.ListSchemasRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_schemas] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSchemasPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_schema(self, - request: Union[schema.DeleteSchemaRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a schema. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_delete_schema(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSchemaRequest( - name="name_value", - ) - - # Make the request - client.delete_schema(request=request) - - Args: - request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): - The request object. Request for the `DeleteSchema` - method. - name (str): - Required. Name of the schema to delete. Format is - ``projects/{project}/schemas/{schema}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a schema.DeleteSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, schema.DeleteSchemaRequest): - request = schema.DeleteSchemaRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_schema] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def validate_schema(self, - request: Union[gp_schema.ValidateSchemaRequest, dict] = None, - *, - parent: str = None, - schema: gp_schema.Schema = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> gp_schema.ValidateSchemaResponse: - r"""Validates a schema. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_validate_schema(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - schema = pubsub_v1.Schema() - schema.name = "name_value" - - request = pubsub_v1.ValidateSchemaRequest( - parent="parent_value", - schema=schema, - ) - - # Make the request - response = client.validate_schema(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): - The request object. Request for the `ValidateSchema` - method. - parent (str): - Required. The name of the project in which to validate - schemas. Format is ``projects/{project-id}``. - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - schema (google.pubsub_v1.types.Schema): - Required. The schema object to - validate. - - This corresponds to the ``schema`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.ValidateSchemaResponse: - Response for the ValidateSchema method. - Empty for now. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, schema]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a gp_schema.ValidateSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, gp_schema.ValidateSchemaRequest): - request = gp_schema.ValidateSchemaRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if schema is not None: - request.schema = schema - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.validate_schema] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def validate_message(self, - request: Union[schema.ValidateMessageRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> schema.ValidateMessageResponse: - r"""Validates a message against a schema. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_validate_message(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - request = pubsub_v1.ValidateMessageRequest( - name="name_value", - parent="parent_value", - ) - - # Make the request - response = client.validate_message(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): - The request object. Request for the `ValidateMessage` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.ValidateMessageResponse: - Response for the ValidateMessage method. - Empty for now. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a schema.ValidateMessageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, schema.ValidateMessageRequest): - request = schema.ValidateMessageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.validate_message] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def set_iam_policy( - self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "SchemaServiceClient", -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/pagers.py deleted file mode 100644 index 2c2e0e00eb76..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/pagers.py +++ /dev/null @@ -1,139 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.pubsub_v1.types import schema - - -class ListSchemasPager: - """A pager for iterating through ``list_schemas`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListSchemasResponse` object, and - provides an ``__iter__`` method to iterate through its - ``schemas`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSchemas`` requests and continue to iterate - through the ``schemas`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListSchemasResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., schema.ListSchemasResponse], - request: schema.ListSchemasRequest, - response: schema.ListSchemasResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListSchemasRequest): - The initial request object. - response (google.pubsub_v1.types.ListSchemasResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = schema.ListSchemasRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[schema.ListSchemasResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[schema.Schema]: - for page in self.pages: - yield from page.schemas - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSchemasAsyncPager: - """A pager for iterating through ``list_schemas`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListSchemasResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``schemas`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSchemas`` requests and continue to iterate - through the ``schemas`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListSchemasResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[schema.ListSchemasResponse]], - request: schema.ListSchemasRequest, - response: schema.ListSchemasResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListSchemasRequest): - The initial request object. - response (google.pubsub_v1.types.ListSchemasResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = schema.ListSchemasRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[schema.ListSchemasResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[schema.Schema]: - async def async_generator(): - async for page in self.pages: - for response in page.schemas: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/__init__.py deleted file mode 100644 index 59b4fd088e07..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import SchemaServiceTransport -from .grpc import SchemaServiceGrpcTransport -from .grpc_asyncio import SchemaServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] -_transport_registry['grpc'] = SchemaServiceGrpcTransport -_transport_registry['grpc_asyncio'] = SchemaServiceGrpcAsyncIOTransport - -__all__ = ( - 'SchemaServiceTransport', - 'SchemaServiceGrpcTransport', - 'SchemaServiceGrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/base.py deleted file mode 100644 index 6e24f13cc6b3..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/base.py +++ /dev/null @@ -1,255 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.pubsub_v1.types import schema -from google.pubsub_v1.types import schema as gp_schema - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-pubsub', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -class SchemaServiceTransport(abc.ABC): - """Abstract transport class for SchemaService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', - ) - - DEFAULT_HOST: str = 'pubsub.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_schema: gapic_v1.method.wrap_method( - self.create_schema, - default_timeout=None, - client_info=client_info, - ), - self.get_schema: gapic_v1.method.wrap_method( - self.get_schema, - default_timeout=None, - client_info=client_info, - ), - self.list_schemas: gapic_v1.method.wrap_method( - self.list_schemas, - default_timeout=None, - client_info=client_info, - ), - self.delete_schema: gapic_v1.method.wrap_method( - self.delete_schema, - default_timeout=None, - client_info=client_info, - ), - self.validate_schema: gapic_v1.method.wrap_method( - self.validate_schema, - default_timeout=None, - client_info=client_info, - ), - self.validate_message: gapic_v1.method.wrap_method( - self.validate_message, - default_timeout=None, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_schema(self) -> Callable[ - [gp_schema.CreateSchemaRequest], - Union[ - gp_schema.Schema, - Awaitable[gp_schema.Schema] - ]]: - raise NotImplementedError() - - @property - def get_schema(self) -> Callable[ - [schema.GetSchemaRequest], - Union[ - schema.Schema, - Awaitable[schema.Schema] - ]]: - raise NotImplementedError() - - @property - def list_schemas(self) -> Callable[ - [schema.ListSchemasRequest], - Union[ - schema.ListSchemasResponse, - Awaitable[schema.ListSchemasResponse] - ]]: - raise NotImplementedError() - - @property - def delete_schema(self) -> Callable[ - [schema.DeleteSchemaRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def validate_schema(self) -> Callable[ - [gp_schema.ValidateSchemaRequest], - Union[ - gp_schema.ValidateSchemaResponse, - Awaitable[gp_schema.ValidateSchemaResponse] - ]]: - raise NotImplementedError() - - @property - def validate_message(self) -> Callable[ - [schema.ValidateMessageRequest], - Union[ - schema.ValidateMessageResponse, - Awaitable[schema.ValidateMessageResponse] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'SchemaServiceTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc.py deleted file mode 100644 index b27c59f59a34..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc.py +++ /dev/null @@ -1,475 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.pubsub_v1.types import schema -from google.pubsub_v1.types import schema as gp_schema -from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO - - -class SchemaServiceGrpcTransport(SchemaServiceTransport): - """gRPC backend transport for SchemaService. - - Service for doing schema-related operations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_schema(self) -> Callable[ - [gp_schema.CreateSchemaRequest], - gp_schema.Schema]: - r"""Return a callable for the create schema method over gRPC. - - Creates a schema. - - Returns: - Callable[[~.CreateSchemaRequest], - ~.Schema]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_schema' not in self._stubs: - self._stubs['create_schema'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/CreateSchema', - request_serializer=gp_schema.CreateSchemaRequest.serialize, - response_deserializer=gp_schema.Schema.deserialize, - ) - return self._stubs['create_schema'] - - @property - def get_schema(self) -> Callable[ - [schema.GetSchemaRequest], - schema.Schema]: - r"""Return a callable for the get schema method over gRPC. - - Gets a schema. - - Returns: - Callable[[~.GetSchemaRequest], - ~.Schema]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_schema' not in self._stubs: - self._stubs['get_schema'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/GetSchema', - request_serializer=schema.GetSchemaRequest.serialize, - response_deserializer=schema.Schema.deserialize, - ) - return self._stubs['get_schema'] - - @property - def list_schemas(self) -> Callable[ - [schema.ListSchemasRequest], - schema.ListSchemasResponse]: - r"""Return a callable for the list schemas method over gRPC. - - Lists schemas in a project. - - Returns: - Callable[[~.ListSchemasRequest], - ~.ListSchemasResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_schemas' not in self._stubs: - self._stubs['list_schemas'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/ListSchemas', - request_serializer=schema.ListSchemasRequest.serialize, - response_deserializer=schema.ListSchemasResponse.deserialize, - ) - return self._stubs['list_schemas'] - - @property - def delete_schema(self) -> Callable[ - [schema.DeleteSchemaRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete schema method over gRPC. - - Deletes a schema. - - Returns: - Callable[[~.DeleteSchemaRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_schema' not in self._stubs: - self._stubs['delete_schema'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/DeleteSchema', - request_serializer=schema.DeleteSchemaRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_schema'] - - @property - def validate_schema(self) -> Callable[ - [gp_schema.ValidateSchemaRequest], - gp_schema.ValidateSchemaResponse]: - r"""Return a callable for the validate schema method over gRPC. - - Validates a schema. - - Returns: - Callable[[~.ValidateSchemaRequest], - ~.ValidateSchemaResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'validate_schema' not in self._stubs: - self._stubs['validate_schema'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/ValidateSchema', - request_serializer=gp_schema.ValidateSchemaRequest.serialize, - response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, - ) - return self._stubs['validate_schema'] - - @property - def validate_message(self) -> Callable[ - [schema.ValidateMessageRequest], - schema.ValidateMessageResponse]: - r"""Return a callable for the validate message method over gRPC. - - Validates a message against a schema. - - Returns: - Callable[[~.ValidateMessageRequest], - ~.ValidateMessageResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'validate_message' not in self._stubs: - self._stubs['validate_message'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/ValidateMessage', - request_serializer=schema.ValidateMessageRequest.serialize, - response_deserializer=schema.ValidateMessageResponse.deserialize, - ) - return self._stubs['validate_message'] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'SchemaServiceGrpcTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py deleted file mode 100644 index e7024367d521..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,475 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.pubsub_v1.types import schema -from google.pubsub_v1.types import schema as gp_schema -from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import SchemaServiceGrpcTransport - - -class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): - """gRPC AsyncIO backend transport for SchemaService. - - Service for doing schema-related operations. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_schema(self) -> Callable[ - [gp_schema.CreateSchemaRequest], - Awaitable[gp_schema.Schema]]: - r"""Return a callable for the create schema method over gRPC. - - Creates a schema. - - Returns: - Callable[[~.CreateSchemaRequest], - Awaitable[~.Schema]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_schema' not in self._stubs: - self._stubs['create_schema'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/CreateSchema', - request_serializer=gp_schema.CreateSchemaRequest.serialize, - response_deserializer=gp_schema.Schema.deserialize, - ) - return self._stubs['create_schema'] - - @property - def get_schema(self) -> Callable[ - [schema.GetSchemaRequest], - Awaitable[schema.Schema]]: - r"""Return a callable for the get schema method over gRPC. - - Gets a schema. - - Returns: - Callable[[~.GetSchemaRequest], - Awaitable[~.Schema]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_schema' not in self._stubs: - self._stubs['get_schema'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/GetSchema', - request_serializer=schema.GetSchemaRequest.serialize, - response_deserializer=schema.Schema.deserialize, - ) - return self._stubs['get_schema'] - - @property - def list_schemas(self) -> Callable[ - [schema.ListSchemasRequest], - Awaitable[schema.ListSchemasResponse]]: - r"""Return a callable for the list schemas method over gRPC. - - Lists schemas in a project. - - Returns: - Callable[[~.ListSchemasRequest], - Awaitable[~.ListSchemasResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_schemas' not in self._stubs: - self._stubs['list_schemas'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/ListSchemas', - request_serializer=schema.ListSchemasRequest.serialize, - response_deserializer=schema.ListSchemasResponse.deserialize, - ) - return self._stubs['list_schemas'] - - @property - def delete_schema(self) -> Callable[ - [schema.DeleteSchemaRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete schema method over gRPC. - - Deletes a schema. - - Returns: - Callable[[~.DeleteSchemaRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_schema' not in self._stubs: - self._stubs['delete_schema'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/DeleteSchema', - request_serializer=schema.DeleteSchemaRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_schema'] - - @property - def validate_schema(self) -> Callable[ - [gp_schema.ValidateSchemaRequest], - Awaitable[gp_schema.ValidateSchemaResponse]]: - r"""Return a callable for the validate schema method over gRPC. - - Validates a schema. - - Returns: - Callable[[~.ValidateSchemaRequest], - Awaitable[~.ValidateSchemaResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'validate_schema' not in self._stubs: - self._stubs['validate_schema'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/ValidateSchema', - request_serializer=gp_schema.ValidateSchemaRequest.serialize, - response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, - ) - return self._stubs['validate_schema'] - - @property - def validate_message(self) -> Callable[ - [schema.ValidateMessageRequest], - Awaitable[schema.ValidateMessageResponse]]: - r"""Return a callable for the validate message method over gRPC. - - Validates a message against a schema. - - Returns: - Callable[[~.ValidateMessageRequest], - Awaitable[~.ValidateMessageResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'validate_message' not in self._stubs: - self._stubs['validate_message'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.SchemaService/ValidateMessage', - request_serializer=schema.ValidateMessageRequest.serialize, - response_deserializer=schema.ValidateMessageResponse.deserialize, - ) - return self._stubs['validate_message'] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'SchemaServiceGrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/__init__.py deleted file mode 100644 index 5f20ec4319af..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import SubscriberClient -from .async_client import SubscriberAsyncClient - -__all__ = ( - 'SubscriberClient', - 'SubscriberAsyncClient', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/async_client.py deleted file mode 100644 index 4306ace7f500..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/async_client.py +++ /dev/null @@ -1,2271 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, Optional, AsyncIterable, Awaitable, AsyncIterator, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.pubsub_v1.services.subscriber import pagers -from google.pubsub_v1.types import pubsub -from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport -from .client import SubscriberClient - - -class SubscriberAsyncClient: - """The service that an application uses to manipulate subscriptions and - to consume messages from a subscription via the ``Pull`` method or - by establishing a bi-directional stream using the ``StreamingPull`` - method. - """ - - _client: SubscriberClient - - DEFAULT_ENDPOINT = SubscriberClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = SubscriberClient.DEFAULT_MTLS_ENDPOINT - - snapshot_path = staticmethod(SubscriberClient.snapshot_path) - parse_snapshot_path = staticmethod(SubscriberClient.parse_snapshot_path) - subscription_path = staticmethod(SubscriberClient.subscription_path) - parse_subscription_path = staticmethod(SubscriberClient.parse_subscription_path) - topic_path = staticmethod(SubscriberClient.topic_path) - parse_topic_path = staticmethod(SubscriberClient.parse_topic_path) - common_billing_account_path = staticmethod(SubscriberClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(SubscriberClient.parse_common_billing_account_path) - common_folder_path = staticmethod(SubscriberClient.common_folder_path) - parse_common_folder_path = staticmethod(SubscriberClient.parse_common_folder_path) - common_organization_path = staticmethod(SubscriberClient.common_organization_path) - parse_common_organization_path = staticmethod(SubscriberClient.parse_common_organization_path) - common_project_path = staticmethod(SubscriberClient.common_project_path) - parse_common_project_path = staticmethod(SubscriberClient.parse_common_project_path) - common_location_path = staticmethod(SubscriberClient.common_location_path) - parse_common_location_path = staticmethod(SubscriberClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SubscriberAsyncClient: The constructed client. - """ - return SubscriberClient.from_service_account_info.__func__(SubscriberAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SubscriberAsyncClient: The constructed client. - """ - return SubscriberClient.from_service_account_file.__func__(SubscriberAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return SubscriberClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> SubscriberTransport: - """Returns the transport used by the client instance. - - Returns: - SubscriberTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(SubscriberClient).get_transport_class, type(SubscriberClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, SubscriberTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the subscriber client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.SubscriberTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = SubscriberClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_subscription(self, - request: Union[pubsub.Subscription, dict] = None, - *, - name: str = None, - topic: str = None, - push_config: pubsub.PushConfig = None, - ack_deadline_seconds: int = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Subscription: - r"""Creates a subscription to a given topic. See the [resource name - rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). If - the subscription already exists, returns ``ALREADY_EXISTS``. If - the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will - assign a random name for this subscription on the same project - as the topic, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the - request. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_create_subscription(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.Subscription( - name="name_value", - topic="topic_value", - ) - - # Make the request - response = await client.create_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.Subscription, dict]): - The request object. A subscription resource. - name (:class:`str`): - Required. The name of the subscription. It must have the - format - ``"projects/{project}/subscriptions/{subscription}"``. - ``{subscription}`` must start with a letter, and contain - only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes - (``-``), underscores (``_``), periods (``.``), tildes - (``~``), plus (``+``) or percent signs (``%``). It must - be between 3 and 255 characters in length, and it must - not start with ``"goog"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - topic (:class:`str`): - Required. The name of the topic from which this - subscription is receiving messages. Format is - ``projects/{project}/topics/{topic}``. The value of this - field will be ``_deleted-topic_`` if the topic has been - deleted. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - push_config (:class:`google.pubsub_v1.types.PushConfig`): - If push delivery is used with this subscription, this - field is used to configure it. Either ``pushConfig`` or - ``bigQueryConfig`` can be set, but not both. If both are - empty, then the subscriber will pull and ack messages - using API methods. - - This corresponds to the ``push_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ack_deadline_seconds (:class:`int`): - The approximate amount of time (on a best-effort basis) - Pub/Sub waits for the subscriber to acknowledge receipt - before resending the message. In the interval after the - message is delivered and before it is acknowledged, it - is considered to be outstanding. During that time - period, the message will not be redelivered (on a - best-effort basis). - - For pull subscriptions, this value is used as the - initial value for the ack deadline. To override this - value for a given message, call ``ModifyAckDeadline`` - with the corresponding ``ack_id`` if using non-streaming - pull or send the ``ack_id`` in a - ``StreamingModifyAckDeadlineRequest`` if using streaming - pull. The minimum custom deadline you can specify is 10 - seconds. The maximum custom deadline you can specify is - 600 seconds (10 minutes). If this parameter is 0, a - default value of 10 seconds is used. - - For push delivery, this value is also used to set the - request timeout for the call to the push endpoint. - - If the subscriber never acknowledges the message, the - Pub/Sub system will eventually redeliver the message. - - This corresponds to the ``ack_deadline_seconds`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Subscription: - A subscription resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.Subscription(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if topic is not None: - request.topic = topic - if push_config is not None: - request.push_config = push_config - if ack_deadline_seconds is not None: - request.ack_deadline_seconds = ack_deadline_seconds - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_subscription(self, - request: Union[pubsub.GetSubscriptionRequest, dict] = None, - *, - subscription: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Subscription: - r"""Gets the configuration details of a subscription. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_get_subscription(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - response = await client.get_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): - The request object. Request for the GetSubscription - method. - subscription (:class:`str`): - Required. The name of the subscription to get. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Subscription: - A subscription resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.GetSubscriptionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_subscription(self, - request: Union[pubsub.UpdateSubscriptionRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Subscription: - r"""Updates an existing subscription. Note that certain - properties of a subscription, such as its topic, are not - modifiable. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_update_subscription(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - subscription = pubsub_v1.Subscription() - subscription.name = "name_value" - subscription.topic = "topic_value" - - request = pubsub_v1.UpdateSubscriptionRequest( - subscription=subscription, - ) - - # Make the request - response = await client.update_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): - The request object. Request for the UpdateSubscription - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Subscription: - A subscription resource. - """ - # Create or coerce a protobuf request object. - request = pubsub.UpdateSubscriptionRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription.name", request.subscription.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_subscriptions(self, - request: Union[pubsub.ListSubscriptionsRequest, dict] = None, - *, - project: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSubscriptionsAsyncPager: - r"""Lists matching subscriptions. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_list_subscriptions(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSubscriptionsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_subscriptions(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): - The request object. Request for the `ListSubscriptions` - method. - project (:class:`str`): - Required. The name of the project in which to list - subscriptions. Format is ``projects/{project-id}``. - - This corresponds to the ``project`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager: - Response for the ListSubscriptions method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.ListSubscriptionsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project is not None: - request.project = project - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_subscriptions, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project", request.project), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSubscriptionsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_subscription(self, - request: Union[pubsub.DeleteSubscriptionRequest, dict] = None, - *, - subscription: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after - deletion will return ``NOT_FOUND``. After a subscription is - deleted, a new one may be created with the same name, but the - new one has no association with the old subscription or its - topic unless the same topic is specified. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_delete_subscription(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - await client.delete_subscription(request=request) - - Args: - request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): - The request object. Request for the DeleteSubscription - method. - subscription (:class:`str`): - Required. The subscription to delete. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.DeleteSubscriptionRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def modify_ack_deadline(self, - request: Union[pubsub.ModifyAckDeadlineRequest, dict] = None, - *, - subscription: str = None, - ack_ids: Sequence[str] = None, - ack_deadline_seconds: int = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message - by the subscriber, or to make the message available for - redelivery if the processing was interrupted. Note that this - does not modify the subscription-level ``ackDeadlineSeconds`` - used for subsequent messages. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_modify_ack_deadline(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ModifyAckDeadlineRequest( - subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], - ack_deadline_seconds=2066, - ) - - # Make the request - await client.modify_ack_deadline(request=request) - - Args: - request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): - The request object. Request for the ModifyAckDeadline - method. - subscription (:class:`str`): - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ack_ids (:class:`Sequence[str]`): - Required. List of acknowledgment IDs. - This corresponds to the ``ack_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ack_deadline_seconds (:class:`int`): - Required. The new ack deadline with respect to the time - this request was sent to the Pub/Sub system. For - example, if the value is 10, the new ack deadline will - expire 10 seconds after the ``ModifyAckDeadline`` call - was made. Specifying zero might immediately make the - message available for delivery to another subscriber - client. This typically results in an increase in the - rate of message redeliveries (that is, duplicates). The - minimum deadline you can specify is 0 seconds. The - maximum deadline you can specify is 600 seconds (10 - minutes). - - This corresponds to the ``ack_deadline_seconds`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.ModifyAckDeadlineRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - if ack_deadline_seconds is not None: - request.ack_deadline_seconds = ack_deadline_seconds - if ack_ids: - request.ack_ids.extend(ack_ids) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.modify_ack_deadline, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def acknowledge(self, - request: Union[pubsub.AcknowledgeRequest, dict] = None, - *, - subscription: str = None, - ack_ids: Sequence[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the - relevant messages from the subscription. - - Acknowledging a message whose ack deadline has expired may - succeed, but such a message may be redelivered later. - Acknowledging a message more than once will not result in an - error. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_acknowledge(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.AcknowledgeRequest( - subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], - ) - - # Make the request - await client.acknowledge(request=request) - - Args: - request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): - The request object. Request for the Acknowledge method. - subscription (:class:`str`): - Required. The subscription whose message is being - acknowledged. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ack_ids (:class:`Sequence[str]`): - Required. The acknowledgment ID for the messages being - acknowledged that was returned by the Pub/Sub system in - the ``Pull`` response. Must not be empty. - - This corresponds to the ``ack_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, ack_ids]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.AcknowledgeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - if ack_ids: - request.ack_ids.extend(ack_ids) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.acknowledge, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def pull(self, - request: Union[pubsub.PullRequest, dict] = None, - *, - subscription: str = None, - return_immediately: bool = None, - max_messages: int = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.PullResponse: - r"""Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests - pending for the given subscription. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_pull(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.PullRequest( - subscription="subscription_value", - max_messages=1277, - ) - - # Make the request - response = await client.pull(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.PullRequest, dict]): - The request object. Request for the `Pull` method. - subscription (:class:`str`): - Required. The subscription from which messages should be - pulled. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - return_immediately (:class:`bool`): - Optional. If this field set to true, the system will - respond immediately even if it there are no messages - available to return in the ``Pull`` response. Otherwise, - the system may wait (for a bounded amount of time) until - at least one message is available, rather than returning - no messages. Warning: setting this field to ``true`` is - discouraged because it adversely impacts the performance - of ``Pull`` operations. We recommend that users do not - set this field. - - This corresponds to the ``return_immediately`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - max_messages (:class:`int`): - Required. The maximum number of - messages to return for this request. - Must be a positive integer. The Pub/Sub - system may return fewer than the number - specified. - - This corresponds to the ``max_messages`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.PullResponse: - Response for the Pull method. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, return_immediately, max_messages]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.PullRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - if return_immediately is not None: - request.return_immediately = return_immediately - if max_messages is not None: - request.max_messages = max_messages - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.pull, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def streaming_pull(self, - requests: AsyncIterator[pubsub.StreamingPullRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Awaitable[AsyncIterable[pubsub.StreamingPullResponse]]: - r"""Establishes a stream with the server, which sends messages down - to the client. The client streams acknowledgements and ack - deadline modifications back to the server. The server will close - the stream and return the status on any error. The server may - close the stream with status ``UNAVAILABLE`` to reassign - server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by - configuring the underlying RPC channel. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_streaming_pull(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.StreamingPullRequest( - subscription="subscription_value", - stream_ack_deadline_seconds=2813, - ) - - # This method expects an iterator which contains - # 'pubsub_v1.StreamingPullRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.streaming_pull(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - - Args: - requests (AsyncIterator[`google.pubsub_v1.types.StreamingPullRequest`]): - The request object AsyncIterator. Request for the `StreamingPull` - streaming RPC method. This request is used to establish - the initial stream as well as to stream acknowledgements - and ack deadline modifications from the client to the - server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - AsyncIterable[google.pubsub_v1.types.StreamingPullResponse]: - Response for the StreamingPull method. This response is used to stream - messages from the server to the client. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.streaming_pull, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def modify_push_config(self, - request: Union[pubsub.ModifyPushConfigRequest, dict] = None, - *, - subscription: str = None, - push_config: pubsub.PushConfig = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one - (signified by an empty ``PushConfig``) or vice versa, or change - the endpoint URL and other attributes of a push subscription. - Messages will accumulate for delivery continuously through the - call regardless of changes to the ``PushConfig``. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_modify_push_config(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ModifyPushConfigRequest( - subscription="subscription_value", - ) - - # Make the request - await client.modify_push_config(request=request) - - Args: - request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): - The request object. Request for the ModifyPushConfig - method. - subscription (:class:`str`): - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - push_config (:class:`google.pubsub_v1.types.PushConfig`): - Required. The push configuration for future deliveries. - - An empty ``pushConfig`` indicates that the Pub/Sub - system should stop pushing messages from the given - subscription and allow messages to be pulled and - acknowledged - effectively pausing the subscription if - ``Pull`` or ``StreamingPull`` is not called. - - This corresponds to the ``push_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, push_config]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.ModifyPushConfigRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - if push_config is not None: - request.push_config = push_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.modify_push_config, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def get_snapshot(self, - request: Union[pubsub.GetSnapshotRequest, dict] = None, - *, - snapshot: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Snapshot: - r"""Gets the configuration details of a snapshot. - Snapshots are used in Seek - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_get_snapshot(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSnapshotRequest( - snapshot="snapshot_value", - ) - - # Make the request - response = await client.get_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): - The request object. Request for the GetSnapshot method. - snapshot (:class:`str`): - Required. The name of the snapshot to get. Format is - ``projects/{project}/snapshots/{snap}``. - - This corresponds to the ``snapshot`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Snapshot: - A snapshot resource. Snapshots are used in - [Seek](https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.GetSnapshotRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if snapshot is not None: - request.snapshot = snapshot - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_snapshot, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("snapshot", request.snapshot), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_snapshots(self, - request: Union[pubsub.ListSnapshotsRequest, dict] = None, - *, - project: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSnapshotsAsyncPager: - r"""Lists the existing snapshots. Snapshots are used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_list_snapshots(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSnapshotsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_snapshots(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): - The request object. Request for the `ListSnapshots` - method. - project (:class:`str`): - Required. The name of the project in which to list - snapshots. Format is ``projects/{project-id}``. - - This corresponds to the ``project`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager: - Response for the ListSnapshots method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.ListSnapshotsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project is not None: - request.project = project - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_snapshots, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project", request.project), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListSnapshotsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_snapshot(self, - request: Union[pubsub.CreateSnapshotRequest, dict] = None, - *, - name: str = None, - subscription: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Snapshot: - r"""Creates a snapshot from the requested subscription. Snapshots - are used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. - If the backlog in the subscription is too old -- and the - resulting snapshot would expire in less than 1 hour -- then - ``FAILED_PRECONDITION`` is returned. See also the - ``Snapshot.expire_time`` field. If the name is not provided in - the request, the server will assign a random name for this - snapshot on the same project as the subscription, conforming to - the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the - request. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_create_snapshot(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.CreateSnapshotRequest( - name="name_value", - subscription="subscription_value", - ) - - # Make the request - response = await client.create_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): - The request object. Request for the `CreateSnapshot` - method. - name (:class:`str`): - Required. User-provided name for this snapshot. If the - name is not provided in the request, the server will - assign a random name for this snapshot on the same - project as the subscription. Note that for REST API - requests, you must specify a name. See the resource name - rules. Format is - ``projects/{project}/snapshots/{snap}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - subscription (:class:`str`): - Required. The subscription whose backlog the snapshot - retains. Specifically, the created snapshot is - guaranteed to retain: (a) The existing backlog on the - subscription. More precisely, this is defined as the - messages in the subscription's backlog that are - unacknowledged upon the successful completion of the - ``CreateSnapshot`` request; as well as: (b) Any messages - published to the subscription's topic following the - successful completion of the CreateSnapshot request. - Format is ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Snapshot: - A snapshot resource. Snapshots are used in - [Seek](https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, subscription]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.CreateSnapshotRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if subscription is not None: - request.subscription = subscription - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_snapshot, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_snapshot(self, - request: Union[pubsub.UpdateSnapshotRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Snapshot: - r"""Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, - you can set the acknowledgment state of messages in an - existing subscription to the state captured by a - snapshot. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_update_snapshot(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.UpdateSnapshotRequest( - ) - - # Make the request - response = await client.update_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): - The request object. Request for the UpdateSnapshot - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Snapshot: - A snapshot resource. Snapshots are used in - [Seek](https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - """ - # Create or coerce a protobuf request object. - request = pubsub.UpdateSnapshotRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_snapshot, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("snapshot.name", request.snapshot.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_snapshot(self, - request: Union[pubsub.DeleteSnapshotRequest, dict] = None, - *, - snapshot: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Removes an existing snapshot. Snapshots are used in [Seek] - (https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - When the snapshot is deleted, all messages retained in the - snapshot are immediately dropped. After a snapshot is deleted, a - new one may be created with the same name, but the new one has - no association with the old snapshot or its subscription, unless - the same subscription is specified. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_delete_snapshot(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSnapshotRequest( - snapshot="snapshot_value", - ) - - # Make the request - await client.delete_snapshot(request=request) - - Args: - request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): - The request object. Request for the `DeleteSnapshot` - method. - snapshot (:class:`str`): - Required. The name of the snapshot to delete. Format is - ``projects/{project}/snapshots/{snap}``. - - This corresponds to the ``snapshot`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = pubsub.DeleteSnapshotRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if snapshot is not None: - request.snapshot = snapshot - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_snapshot, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("snapshot", request.snapshot), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def seek(self, - request: Union[pubsub.SeekRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.SeekResponse: - r"""Seeks an existing subscription to a point in time or to a given - snapshot, whichever is provided in the request. Snapshots are - used in [Seek] - (https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - Note that both the subscription and the snapshot must be on the - same topic. - - .. code-block:: python - - from google import pubsub_v1 - - async def sample_seek(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.SeekRequest( - subscription="subscription_value", - ) - - # Make the request - response = await client.seek(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.SeekRequest, dict]): - The request object. Request for the `Seek` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.SeekResponse: - Response for the Seek method (this response is empty). - """ - # Create or coerce a protobuf request object. - request = pubsub.SeekRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.seek, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def set_iam_policy( - self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def get_iam_policy( - self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does - not have a policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def test_iam_permissions( - self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control - policy for a function. - - If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~iam_policy_pb2.PolicyTestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "SubscriberAsyncClient", -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/client.py deleted file mode 100644 index d953aa859794..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/client.py +++ /dev/null @@ -1,2373 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.pubsub_v1.services.subscriber import pagers -from google.pubsub_v1.types import pubsub -from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import SubscriberGrpcTransport -from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport - - -class SubscriberClientMeta(type): - """Metaclass for the Subscriber client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] - _transport_registry["grpc"] = SubscriberGrpcTransport - _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[SubscriberTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class SubscriberClient(metaclass=SubscriberClientMeta): - """The service that an application uses to manipulate subscriptions and - to consume messages from a subscription via the ``Pull`` method or - by establishing a bi-directional stream using the ``StreamingPull`` - method. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "pubsub.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SubscriberClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - SubscriberClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> SubscriberTransport: - """Returns the transport used by the client instance. - - Returns: - SubscriberTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def snapshot_path(project: str,snapshot: str,) -> str: - """Returns a fully-qualified snapshot string.""" - return "projects/{project}/snapshots/{snapshot}".format(project=project, snapshot=snapshot, ) - - @staticmethod - def parse_snapshot_path(path: str) -> Dict[str,str]: - """Parses a snapshot path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/snapshots/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def subscription_path(project: str,subscription: str,) -> str: - """Returns a fully-qualified subscription string.""" - return "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) - - @staticmethod - def parse_subscription_path(path: str) -> Dict[str,str]: - """Parses a subscription path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/subscriptions/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def topic_path(project: str,topic: str,) -> str: - """Returns a fully-qualified topic string.""" - return "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) - - @staticmethod - def parse_topic_path(path: str) -> Dict[str,str]: - """Parses a topic path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/topics/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, SubscriberTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the subscriber client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, SubscriberTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, SubscriberTransport): - # transport is a SubscriberTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - def create_subscription(self, - request: Union[pubsub.Subscription, dict] = None, - *, - name: str = None, - topic: str = None, - push_config: pubsub.PushConfig = None, - ack_deadline_seconds: int = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Subscription: - r"""Creates a subscription to a given topic. See the [resource name - rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). If - the subscription already exists, returns ``ALREADY_EXISTS``. If - the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will - assign a random name for this subscription on the same project - as the topic, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the - request. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_create_subscription(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.Subscription( - name="name_value", - topic="topic_value", - ) - - # Make the request - response = client.create_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.Subscription, dict]): - The request object. A subscription resource. - name (str): - Required. The name of the subscription. It must have the - format - ``"projects/{project}/subscriptions/{subscription}"``. - ``{subscription}`` must start with a letter, and contain - only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes - (``-``), underscores (``_``), periods (``.``), tildes - (``~``), plus (``+``) or percent signs (``%``). It must - be between 3 and 255 characters in length, and it must - not start with ``"goog"``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - topic (str): - Required. The name of the topic from which this - subscription is receiving messages. Format is - ``projects/{project}/topics/{topic}``. The value of this - field will be ``_deleted-topic_`` if the topic has been - deleted. - - This corresponds to the ``topic`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - push_config (google.pubsub_v1.types.PushConfig): - If push delivery is used with this subscription, this - field is used to configure it. Either ``pushConfig`` or - ``bigQueryConfig`` can be set, but not both. If both are - empty, then the subscriber will pull and ack messages - using API methods. - - This corresponds to the ``push_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ack_deadline_seconds (int): - The approximate amount of time (on a best-effort basis) - Pub/Sub waits for the subscriber to acknowledge receipt - before resending the message. In the interval after the - message is delivered and before it is acknowledged, it - is considered to be outstanding. During that time - period, the message will not be redelivered (on a - best-effort basis). - - For pull subscriptions, this value is used as the - initial value for the ack deadline. To override this - value for a given message, call ``ModifyAckDeadline`` - with the corresponding ``ack_id`` if using non-streaming - pull or send the ``ack_id`` in a - ``StreamingModifyAckDeadlineRequest`` if using streaming - pull. The minimum custom deadline you can specify is 10 - seconds. The maximum custom deadline you can specify is - 600 seconds (10 minutes). If this parameter is 0, a - default value of 10 seconds is used. - - For push delivery, this value is also used to set the - request timeout for the call to the push endpoint. - - If the subscriber never acknowledges the message, the - Pub/Sub system will eventually redeliver the message. - - This corresponds to the ``ack_deadline_seconds`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Subscription: - A subscription resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.Subscription. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.Subscription): - request = pubsub.Subscription(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if topic is not None: - request.topic = topic - if push_config is not None: - request.push_config = push_config - if ack_deadline_seconds is not None: - request.ack_deadline_seconds = ack_deadline_seconds - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_subscription(self, - request: Union[pubsub.GetSubscriptionRequest, dict] = None, - *, - subscription: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Subscription: - r"""Gets the configuration details of a subscription. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_get_subscription(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - response = client.get_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): - The request object. Request for the GetSubscription - method. - subscription (str): - Required. The name of the subscription to get. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Subscription: - A subscription resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.GetSubscriptionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.GetSubscriptionRequest): - request = pubsub.GetSubscriptionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_subscription(self, - request: Union[pubsub.UpdateSubscriptionRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Subscription: - r"""Updates an existing subscription. Note that certain - properties of a subscription, such as its topic, are not - modifiable. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_update_subscription(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - subscription = pubsub_v1.Subscription() - subscription.name = "name_value" - subscription.topic = "topic_value" - - request = pubsub_v1.UpdateSubscriptionRequest( - subscription=subscription, - ) - - # Make the request - response = client.update_subscription(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): - The request object. Request for the UpdateSubscription - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Subscription: - A subscription resource. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.UpdateSubscriptionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.UpdateSubscriptionRequest): - request = pubsub.UpdateSubscriptionRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription.name", request.subscription.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_subscriptions(self, - request: Union[pubsub.ListSubscriptionsRequest, dict] = None, - *, - project: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSubscriptionsPager: - r"""Lists matching subscriptions. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_list_subscriptions(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSubscriptionsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_subscriptions(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): - The request object. Request for the `ListSubscriptions` - method. - project (str): - Required. The name of the project in which to list - subscriptions. Format is ``projects/{project-id}``. - - This corresponds to the ``project`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager: - Response for the ListSubscriptions method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListSubscriptionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.ListSubscriptionsRequest): - request = pubsub.ListSubscriptionsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project is not None: - request.project = project - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_subscriptions] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project", request.project), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSubscriptionsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_subscription(self, - request: Union[pubsub.DeleteSubscriptionRequest, dict] = None, - *, - subscription: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after - deletion will return ``NOT_FOUND``. After a subscription is - deleted, a new one may be created with the same name, but the - new one has no association with the old subscription or its - topic unless the same topic is specified. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_delete_subscription(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - client.delete_subscription(request=request) - - Args: - request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): - The request object. Request for the DeleteSubscription - method. - subscription (str): - Required. The subscription to delete. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.DeleteSubscriptionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.DeleteSubscriptionRequest): - request = pubsub.DeleteSubscriptionRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_subscription] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def modify_ack_deadline(self, - request: Union[pubsub.ModifyAckDeadlineRequest, dict] = None, - *, - subscription: str = None, - ack_ids: Sequence[str] = None, - ack_deadline_seconds: int = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message - by the subscriber, or to make the message available for - redelivery if the processing was interrupted. Note that this - does not modify the subscription-level ``ackDeadlineSeconds`` - used for subsequent messages. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_modify_ack_deadline(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.ModifyAckDeadlineRequest( - subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], - ack_deadline_seconds=2066, - ) - - # Make the request - client.modify_ack_deadline(request=request) - - Args: - request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): - The request object. Request for the ModifyAckDeadline - method. - subscription (str): - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ack_ids (Sequence[str]): - Required. List of acknowledgment IDs. - This corresponds to the ``ack_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ack_deadline_seconds (int): - Required. The new ack deadline with respect to the time - this request was sent to the Pub/Sub system. For - example, if the value is 10, the new ack deadline will - expire 10 seconds after the ``ModifyAckDeadline`` call - was made. Specifying zero might immediately make the - message available for delivery to another subscriber - client. This typically results in an increase in the - rate of message redeliveries (that is, duplicates). The - minimum deadline you can specify is 0 seconds. The - maximum deadline you can specify is 600 seconds (10 - minutes). - - This corresponds to the ``ack_deadline_seconds`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ModifyAckDeadlineRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.ModifyAckDeadlineRequest): - request = pubsub.ModifyAckDeadlineRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - if ack_ids is not None: - request.ack_ids = ack_ids - if ack_deadline_seconds is not None: - request.ack_deadline_seconds = ack_deadline_seconds - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.modify_ack_deadline] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def acknowledge(self, - request: Union[pubsub.AcknowledgeRequest, dict] = None, - *, - subscription: str = None, - ack_ids: Sequence[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the - relevant messages from the subscription. - - Acknowledging a message whose ack deadline has expired may - succeed, but such a message may be redelivered later. - Acknowledging a message more than once will not result in an - error. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_acknowledge(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.AcknowledgeRequest( - subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], - ) - - # Make the request - client.acknowledge(request=request) - - Args: - request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): - The request object. Request for the Acknowledge method. - subscription (str): - Required. The subscription whose message is being - acknowledged. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - ack_ids (Sequence[str]): - Required. The acknowledgment ID for the messages being - acknowledged that was returned by the Pub/Sub system in - the ``Pull`` response. Must not be empty. - - This corresponds to the ``ack_ids`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, ack_ids]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.AcknowledgeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.AcknowledgeRequest): - request = pubsub.AcknowledgeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - if ack_ids is not None: - request.ack_ids = ack_ids - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.acknowledge] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def pull(self, - request: Union[pubsub.PullRequest, dict] = None, - *, - subscription: str = None, - return_immediately: bool = None, - max_messages: int = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.PullResponse: - r"""Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests - pending for the given subscription. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_pull(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.PullRequest( - subscription="subscription_value", - max_messages=1277, - ) - - # Make the request - response = client.pull(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.PullRequest, dict]): - The request object. Request for the `Pull` method. - subscription (str): - Required. The subscription from which messages should be - pulled. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - return_immediately (bool): - Optional. If this field set to true, the system will - respond immediately even if it there are no messages - available to return in the ``Pull`` response. Otherwise, - the system may wait (for a bounded amount of time) until - at least one message is available, rather than returning - no messages. Warning: setting this field to ``true`` is - discouraged because it adversely impacts the performance - of ``Pull`` operations. We recommend that users do not - set this field. - - This corresponds to the ``return_immediately`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - max_messages (int): - Required. The maximum number of - messages to return for this request. - Must be a positive integer. The Pub/Sub - system may return fewer than the number - specified. - - This corresponds to the ``max_messages`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.PullResponse: - Response for the Pull method. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, return_immediately, max_messages]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.PullRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.PullRequest): - request = pubsub.PullRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - if return_immediately is not None: - request.return_immediately = return_immediately - if max_messages is not None: - request.max_messages = max_messages - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.pull] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def streaming_pull(self, - requests: Iterator[pubsub.StreamingPullRequest] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> Iterable[pubsub.StreamingPullResponse]: - r"""Establishes a stream with the server, which sends messages down - to the client. The client streams acknowledgements and ack - deadline modifications back to the server. The server will close - the stream and return the status on any error. The server may - close the stream with status ``UNAVAILABLE`` to reassign - server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by - configuring the underlying RPC channel. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_streaming_pull(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.StreamingPullRequest( - subscription="subscription_value", - stream_ack_deadline_seconds=2813, - ) - - # This method expects an iterator which contains - # 'pubsub_v1.StreamingPullRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.streaming_pull(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - - Args: - requests (Iterator[google.pubsub_v1.types.StreamingPullRequest]): - The request object iterator. Request for the `StreamingPull` - streaming RPC method. This request is used to establish - the initial stream as well as to stream acknowledgements - and ack deadline modifications from the client to the - server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - Iterable[google.pubsub_v1.types.StreamingPullResponse]: - Response for the StreamingPull method. This response is used to stream - messages from the server to the client. - - """ - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.streaming_pull] - - # Send the request. - response = rpc( - requests, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def modify_push_config(self, - request: Union[pubsub.ModifyPushConfigRequest, dict] = None, - *, - subscription: str = None, - push_config: pubsub.PushConfig = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one - (signified by an empty ``PushConfig``) or vice versa, or change - the endpoint URL and other attributes of a push subscription. - Messages will accumulate for delivery continuously through the - call regardless of changes to the ``PushConfig``. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_modify_push_config(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.ModifyPushConfigRequest( - subscription="subscription_value", - ) - - # Make the request - client.modify_push_config(request=request) - - Args: - request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): - The request object. Request for the ModifyPushConfig - method. - subscription (str): - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - push_config (google.pubsub_v1.types.PushConfig): - Required. The push configuration for future deliveries. - - An empty ``pushConfig`` indicates that the Pub/Sub - system should stop pushing messages from the given - subscription and allow messages to be pulled and - acknowledged - effectively pausing the subscription if - ``Pull`` or ``StreamingPull`` is not called. - - This corresponds to the ``push_config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, push_config]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ModifyPushConfigRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.ModifyPushConfigRequest): - request = pubsub.ModifyPushConfigRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if subscription is not None: - request.subscription = subscription - if push_config is not None: - request.push_config = push_config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.modify_push_config] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def get_snapshot(self, - request: Union[pubsub.GetSnapshotRequest, dict] = None, - *, - snapshot: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Snapshot: - r"""Gets the configuration details of a snapshot. - Snapshots are used in Seek - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_get_snapshot(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSnapshotRequest( - snapshot="snapshot_value", - ) - - # Make the request - response = client.get_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): - The request object. Request for the GetSnapshot method. - snapshot (str): - Required. The name of the snapshot to get. Format is - ``projects/{project}/snapshots/{snap}``. - - This corresponds to the ``snapshot`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Snapshot: - A snapshot resource. Snapshots are used in - [Seek](https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.GetSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.GetSnapshotRequest): - request = pubsub.GetSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if snapshot is not None: - request.snapshot = snapshot - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("snapshot", request.snapshot), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_snapshots(self, - request: Union[pubsub.ListSnapshotsRequest, dict] = None, - *, - project: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListSnapshotsPager: - r"""Lists the existing snapshots. Snapshots are used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_list_snapshots(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSnapshotsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_snapshots(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): - The request object. Request for the `ListSnapshots` - method. - project (str): - Required. The name of the project in which to list - snapshots. Format is ``projects/{project-id}``. - - This corresponds to the ``project`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager: - Response for the ListSnapshots method. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.ListSnapshotsRequest): - request = pubsub.ListSnapshotsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if project is not None: - request.project = project - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_snapshots] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("project", request.project), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListSnapshotsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_snapshot(self, - request: Union[pubsub.CreateSnapshotRequest, dict] = None, - *, - name: str = None, - subscription: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Snapshot: - r"""Creates a snapshot from the requested subscription. Snapshots - are used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. - If the backlog in the subscription is too old -- and the - resulting snapshot would expire in less than 1 hour -- then - ``FAILED_PRECONDITION`` is returned. See also the - ``Snapshot.expire_time`` field. If the name is not provided in - the request, the server will assign a random name for this - snapshot on the same project as the subscription, conforming to - the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the - request. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_create_snapshot(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.CreateSnapshotRequest( - name="name_value", - subscription="subscription_value", - ) - - # Make the request - response = client.create_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): - The request object. Request for the `CreateSnapshot` - method. - name (str): - Required. User-provided name for this snapshot. If the - name is not provided in the request, the server will - assign a random name for this snapshot on the same - project as the subscription. Note that for REST API - requests, you must specify a name. See the resource name - rules. Format is - ``projects/{project}/snapshots/{snap}``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - subscription (str): - Required. The subscription whose backlog the snapshot - retains. Specifically, the created snapshot is - guaranteed to retain: (a) The existing backlog on the - subscription. More precisely, this is defined as the - messages in the subscription's backlog that are - unacknowledged upon the successful completion of the - ``CreateSnapshot`` request; as well as: (b) Any messages - published to the subscription's topic following the - successful completion of the CreateSnapshot request. - Format is ``projects/{project}/subscriptions/{sub}``. - - This corresponds to the ``subscription`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Snapshot: - A snapshot resource. Snapshots are used in - [Seek](https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, subscription]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.CreateSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.CreateSnapshotRequest): - request = pubsub.CreateSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if subscription is not None: - request.subscription = subscription - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_snapshot(self, - request: Union[pubsub.UpdateSnapshotRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.Snapshot: - r"""Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, - you can set the acknowledgment state of messages in an - existing subscription to the state captured by a - snapshot. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_update_snapshot(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.UpdateSnapshotRequest( - ) - - # Make the request - response = client.update_snapshot(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): - The request object. Request for the UpdateSnapshot - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.Snapshot: - A snapshot resource. Snapshots are used in - [Seek](https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.UpdateSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.UpdateSnapshotRequest): - request = pubsub.UpdateSnapshotRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("snapshot.name", request.snapshot.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_snapshot(self, - request: Union[pubsub.DeleteSnapshotRequest, dict] = None, - *, - snapshot: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Removes an existing snapshot. Snapshots are used in [Seek] - (https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - When the snapshot is deleted, all messages retained in the - snapshot are immediately dropped. After a snapshot is deleted, a - new one may be created with the same name, but the new one has - no association with the old snapshot or its subscription, unless - the same subscription is specified. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_delete_snapshot(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSnapshotRequest( - snapshot="snapshot_value", - ) - - # Make the request - client.delete_snapshot(request=request) - - Args: - request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): - The request object. Request for the `DeleteSnapshot` - method. - snapshot (str): - Required. The name of the snapshot to delete. Format is - ``projects/{project}/snapshots/{snap}``. - - This corresponds to the ``snapshot`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.DeleteSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.DeleteSnapshotRequest): - request = pubsub.DeleteSnapshotRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if snapshot is not None: - request.snapshot = snapshot - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_snapshot] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("snapshot", request.snapshot), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def seek(self, - request: Union[pubsub.SeekRequest, dict] = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pubsub.SeekResponse: - r"""Seeks an existing subscription to a point in time or to a given - snapshot, whichever is provided in the request. Snapshots are - used in [Seek] - (https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - Note that both the subscription and the snapshot must be on the - same topic. - - .. code-block:: python - - from google import pubsub_v1 - - def sample_seek(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.SeekRequest( - subscription="subscription_value", - ) - - # Make the request - response = client.seek(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.pubsub_v1.types.SeekRequest, dict]): - The request object. Request for the `Seek` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.pubsub_v1.types.SeekResponse: - Response for the Seek method (this response is empty). - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.SeekRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, pubsub.SeekRequest): - request = pubsub.SeekRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.seek] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("subscription", request.subscription), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - def set_iam_policy( - self, - request: iam_policy_pb2.SetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Sets the IAM access control policy on the specified function. - - Replaces any existing policy. - - Args: - request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): - The request object. Request message for `SetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.SetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def get_iam_policy( - self, - request: iam_policy_pb2.GetIamPolicyRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> policy_pb2.Policy: - r"""Gets the IAM access control policy for a function. - - Returns an empty policy if the function exists and does not have a - policy set. - - Args: - request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): - The request object. Request message for `GetIamPolicy` - method. - retry (google.api_core.retry.Retry): Designation of what errors, if - any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.policy_pb2.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - **JSON Example** - :: - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - **YAML Example** - :: - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - For a description of IAM and its features, see the `IAM - developer's - guide `__. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.GetIamPolicyRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def test_iam_permissions( - self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, - *, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified IAM permissions against the IAM access control - policy for a function. - - If the function does not exist, this will return an empty set - of permissions, not a NOT_FOUND error. - - Args: - request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): - The request object. Request message for - `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, - if any, should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - Returns: - ~.iam_policy_pb2.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. - """ - # Create or coerce a protobuf request object. - - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. - if isinstance(request, dict): - request = iam_policy_pb2.TestIamPermissionsRequest(**request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata( - (("resource", request.resource),)), - ) - - # Send the request. - response = rpc( - request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "SubscriberClient", -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/pagers.py deleted file mode 100644 index 6023648a5d20..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/pagers.py +++ /dev/null @@ -1,260 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.pubsub_v1.types import pubsub - - -class ListSubscriptionsPager: - """A pager for iterating through ``list_subscriptions`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListSubscriptionsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``subscriptions`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSubscriptions`` requests and continue to iterate - through the ``subscriptions`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListSubscriptionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., pubsub.ListSubscriptionsResponse], - request: pubsub.ListSubscriptionsRequest, - response: pubsub.ListSubscriptionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListSubscriptionsRequest): - The initial request object. - response (google.pubsub_v1.types.ListSubscriptionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListSubscriptionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[pubsub.ListSubscriptionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[pubsub.Subscription]: - for page in self.pages: - yield from page.subscriptions - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSubscriptionsAsyncPager: - """A pager for iterating through ``list_subscriptions`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListSubscriptionsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``subscriptions`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSubscriptions`` requests and continue to iterate - through the ``subscriptions`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListSubscriptionsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[pubsub.ListSubscriptionsResponse]], - request: pubsub.ListSubscriptionsRequest, - response: pubsub.ListSubscriptionsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListSubscriptionsRequest): - The initial request object. - response (google.pubsub_v1.types.ListSubscriptionsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListSubscriptionsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[pubsub.ListSubscriptionsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[pubsub.Subscription]: - async def async_generator(): - async for page in self.pages: - for response in page.subscriptions: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSnapshotsPager: - """A pager for iterating through ``list_snapshots`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListSnapshotsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``snapshots`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListSnapshots`` requests and continue to iterate - through the ``snapshots`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListSnapshotsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., pubsub.ListSnapshotsResponse], - request: pubsub.ListSnapshotsRequest, - response: pubsub.ListSnapshotsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListSnapshotsRequest): - The initial request object. - response (google.pubsub_v1.types.ListSnapshotsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListSnapshotsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[pubsub.ListSnapshotsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[pubsub.Snapshot]: - for page in self.pages: - yield from page.snapshots - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListSnapshotsAsyncPager: - """A pager for iterating through ``list_snapshots`` requests. - - This class thinly wraps an initial - :class:`google.pubsub_v1.types.ListSnapshotsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``snapshots`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListSnapshots`` requests and continue to iterate - through the ``snapshots`` field on the - corresponding responses. - - All the usual :class:`google.pubsub_v1.types.ListSnapshotsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[pubsub.ListSnapshotsResponse]], - request: pubsub.ListSnapshotsRequest, - response: pubsub.ListSnapshotsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.pubsub_v1.types.ListSnapshotsRequest): - The initial request object. - response (google.pubsub_v1.types.ListSnapshotsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = pubsub.ListSnapshotsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[pubsub.ListSnapshotsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[pubsub.Snapshot]: - async def async_generator(): - async for page in self.pages: - for response in page.snapshots: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/__init__.py deleted file mode 100644 index 196dd1beea65..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import SubscriberTransport -from .grpc import SubscriberGrpcTransport -from .grpc_asyncio import SubscriberGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] -_transport_registry['grpc'] = SubscriberGrpcTransport -_transport_registry['grpc_asyncio'] = SubscriberGrpcAsyncIOTransport - -__all__ = ( - 'SubscriberTransport', - 'SubscriberGrpcTransport', - 'SubscriberGrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/base.py deleted file mode 100644 index 9e22202727af..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/base.py +++ /dev/null @@ -1,508 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.pubsub_v1.types import pubsub - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-pubsub', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -class SubscriberTransport(abc.ABC): - """Abstract transport class for Subscriber.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', - ) - - DEFAULT_HOST: str = 'pubsub.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_subscription: gapic_v1.method.wrap_method( - self.create_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_subscription: gapic_v1.method.wrap_method( - self.get_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_subscription: gapic_v1.method.wrap_method( - self.update_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_subscriptions: gapic_v1.method.wrap_method( - self.list_subscriptions, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_subscription: gapic_v1.method.wrap_method( - self.delete_subscription, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.modify_ack_deadline: gapic_v1.method.wrap_method( - self.modify_ack_deadline, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.acknowledge: gapic_v1.method.wrap_method( - self.acknowledge, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.pull: gapic_v1.method.wrap_method( - self.pull, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.streaming_pull: gapic_v1.method.wrap_method( - self.streaming_pull, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=client_info, - ), - self.modify_push_config: gapic_v1.method.wrap_method( - self.modify_push_config, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.get_snapshot: gapic_v1.method.wrap_method( - self.get_snapshot, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.list_snapshots: gapic_v1.method.wrap_method( - self.list_snapshots, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.create_snapshot: gapic_v1.method.wrap_method( - self.create_snapshot, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.update_snapshot: gapic_v1.method.wrap_method( - self.update_snapshot, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.delete_snapshot: gapic_v1.method.wrap_method( - self.delete_snapshot, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - self.seek: gapic_v1.method.wrap_method( - self.seek, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_subscription(self) -> Callable[ - [pubsub.Subscription], - Union[ - pubsub.Subscription, - Awaitable[pubsub.Subscription] - ]]: - raise NotImplementedError() - - @property - def get_subscription(self) -> Callable[ - [pubsub.GetSubscriptionRequest], - Union[ - pubsub.Subscription, - Awaitable[pubsub.Subscription] - ]]: - raise NotImplementedError() - - @property - def update_subscription(self) -> Callable[ - [pubsub.UpdateSubscriptionRequest], - Union[ - pubsub.Subscription, - Awaitable[pubsub.Subscription] - ]]: - raise NotImplementedError() - - @property - def list_subscriptions(self) -> Callable[ - [pubsub.ListSubscriptionsRequest], - Union[ - pubsub.ListSubscriptionsResponse, - Awaitable[pubsub.ListSubscriptionsResponse] - ]]: - raise NotImplementedError() - - @property - def delete_subscription(self) -> Callable[ - [pubsub.DeleteSubscriptionRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def modify_ack_deadline(self) -> Callable[ - [pubsub.ModifyAckDeadlineRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def acknowledge(self) -> Callable[ - [pubsub.AcknowledgeRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def pull(self) -> Callable[ - [pubsub.PullRequest], - Union[ - pubsub.PullResponse, - Awaitable[pubsub.PullResponse] - ]]: - raise NotImplementedError() - - @property - def streaming_pull(self) -> Callable[ - [pubsub.StreamingPullRequest], - Union[ - pubsub.StreamingPullResponse, - Awaitable[pubsub.StreamingPullResponse] - ]]: - raise NotImplementedError() - - @property - def modify_push_config(self) -> Callable[ - [pubsub.ModifyPushConfigRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def get_snapshot(self) -> Callable[ - [pubsub.GetSnapshotRequest], - Union[ - pubsub.Snapshot, - Awaitable[pubsub.Snapshot] - ]]: - raise NotImplementedError() - - @property - def list_snapshots(self) -> Callable[ - [pubsub.ListSnapshotsRequest], - Union[ - pubsub.ListSnapshotsResponse, - Awaitable[pubsub.ListSnapshotsResponse] - ]]: - raise NotImplementedError() - - @property - def create_snapshot(self) -> Callable[ - [pubsub.CreateSnapshotRequest], - Union[ - pubsub.Snapshot, - Awaitable[pubsub.Snapshot] - ]]: - raise NotImplementedError() - - @property - def update_snapshot(self) -> Callable[ - [pubsub.UpdateSnapshotRequest], - Union[ - pubsub.Snapshot, - Awaitable[pubsub.Snapshot] - ]]: - raise NotImplementedError() - - @property - def delete_snapshot(self) -> Callable[ - [pubsub.DeleteSnapshotRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def seek(self) -> Callable[ - [pubsub.SeekRequest], - Union[ - pubsub.SeekResponse, - Awaitable[pubsub.SeekResponse] - ]]: - raise NotImplementedError() - - @property - def set_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.SetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def get_iam_policy( - self, - ) -> Callable[ - [iam_policy_pb2.GetIamPolicyRequest], - Union[policy_pb2.Policy, Awaitable[policy_pb2.Policy]], - ]: - raise NotImplementedError() - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Union[ - iam_policy_pb2.TestIamPermissionsResponse, - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ], - ]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'SubscriberTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc.py deleted file mode 100644 index 99a882c7bf7c..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc.py +++ /dev/null @@ -1,835 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.pubsub_v1.types import pubsub -from .base import SubscriberTransport, DEFAULT_CLIENT_INFO - - -class SubscriberGrpcTransport(SubscriberTransport): - """gRPC backend transport for Subscriber. - - The service that an application uses to manipulate subscriptions and - to consume messages from a subscription via the ``Pull`` method or - by establishing a bi-directional stream using the ``StreamingPull`` - method. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_subscription(self) -> Callable[ - [pubsub.Subscription], - pubsub.Subscription]: - r"""Return a callable for the create subscription method over gRPC. - - Creates a subscription to a given topic. See the [resource name - rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). If - the subscription already exists, returns ``ALREADY_EXISTS``. If - the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will - assign a random name for this subscription on the same project - as the topic, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the - request. - - Returns: - Callable[[~.Subscription], - ~.Subscription]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_subscription' not in self._stubs: - self._stubs['create_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/CreateSubscription', - request_serializer=pubsub.Subscription.serialize, - response_deserializer=pubsub.Subscription.deserialize, - ) - return self._stubs['create_subscription'] - - @property - def get_subscription(self) -> Callable[ - [pubsub.GetSubscriptionRequest], - pubsub.Subscription]: - r"""Return a callable for the get subscription method over gRPC. - - Gets the configuration details of a subscription. - - Returns: - Callable[[~.GetSubscriptionRequest], - ~.Subscription]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_subscription' not in self._stubs: - self._stubs['get_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/GetSubscription', - request_serializer=pubsub.GetSubscriptionRequest.serialize, - response_deserializer=pubsub.Subscription.deserialize, - ) - return self._stubs['get_subscription'] - - @property - def update_subscription(self) -> Callable[ - [pubsub.UpdateSubscriptionRequest], - pubsub.Subscription]: - r"""Return a callable for the update subscription method over gRPC. - - Updates an existing subscription. Note that certain - properties of a subscription, such as its topic, are not - modifiable. - - Returns: - Callable[[~.UpdateSubscriptionRequest], - ~.Subscription]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_subscription' not in self._stubs: - self._stubs['update_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/UpdateSubscription', - request_serializer=pubsub.UpdateSubscriptionRequest.serialize, - response_deserializer=pubsub.Subscription.deserialize, - ) - return self._stubs['update_subscription'] - - @property - def list_subscriptions(self) -> Callable[ - [pubsub.ListSubscriptionsRequest], - pubsub.ListSubscriptionsResponse]: - r"""Return a callable for the list subscriptions method over gRPC. - - Lists matching subscriptions. - - Returns: - Callable[[~.ListSubscriptionsRequest], - ~.ListSubscriptionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_subscriptions' not in self._stubs: - self._stubs['list_subscriptions'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/ListSubscriptions', - request_serializer=pubsub.ListSubscriptionsRequest.serialize, - response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, - ) - return self._stubs['list_subscriptions'] - - @property - def delete_subscription(self) -> Callable[ - [pubsub.DeleteSubscriptionRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete subscription method over gRPC. - - Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after - deletion will return ``NOT_FOUND``. After a subscription is - deleted, a new one may be created with the same name, but the - new one has no association with the old subscription or its - topic unless the same topic is specified. - - Returns: - Callable[[~.DeleteSubscriptionRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_subscription' not in self._stubs: - self._stubs['delete_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/DeleteSubscription', - request_serializer=pubsub.DeleteSubscriptionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_subscription'] - - @property - def modify_ack_deadline(self) -> Callable[ - [pubsub.ModifyAckDeadlineRequest], - empty_pb2.Empty]: - r"""Return a callable for the modify ack deadline method over gRPC. - - Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message - by the subscriber, or to make the message available for - redelivery if the processing was interrupted. Note that this - does not modify the subscription-level ``ackDeadlineSeconds`` - used for subsequent messages. - - Returns: - Callable[[~.ModifyAckDeadlineRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'modify_ack_deadline' not in self._stubs: - self._stubs['modify_ack_deadline'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/ModifyAckDeadline', - request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['modify_ack_deadline'] - - @property - def acknowledge(self) -> Callable[ - [pubsub.AcknowledgeRequest], - empty_pb2.Empty]: - r"""Return a callable for the acknowledge method over gRPC. - - Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the - relevant messages from the subscription. - - Acknowledging a message whose ack deadline has expired may - succeed, but such a message may be redelivered later. - Acknowledging a message more than once will not result in an - error. - - Returns: - Callable[[~.AcknowledgeRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'acknowledge' not in self._stubs: - self._stubs['acknowledge'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/Acknowledge', - request_serializer=pubsub.AcknowledgeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['acknowledge'] - - @property - def pull(self) -> Callable[ - [pubsub.PullRequest], - pubsub.PullResponse]: - r"""Return a callable for the pull method over gRPC. - - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests - pending for the given subscription. - - Returns: - Callable[[~.PullRequest], - ~.PullResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'pull' not in self._stubs: - self._stubs['pull'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/Pull', - request_serializer=pubsub.PullRequest.serialize, - response_deserializer=pubsub.PullResponse.deserialize, - ) - return self._stubs['pull'] - - @property - def streaming_pull(self) -> Callable[ - [pubsub.StreamingPullRequest], - pubsub.StreamingPullResponse]: - r"""Return a callable for the streaming pull method over gRPC. - - Establishes a stream with the server, which sends messages down - to the client. The client streams acknowledgements and ack - deadline modifications back to the server. The server will close - the stream and return the status on any error. The server may - close the stream with status ``UNAVAILABLE`` to reassign - server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by - configuring the underlying RPC channel. - - Returns: - Callable[[~.StreamingPullRequest], - ~.StreamingPullResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'streaming_pull' not in self._stubs: - self._stubs['streaming_pull'] = self.grpc_channel.stream_stream( - '/google.pubsub.v1.Subscriber/StreamingPull', - request_serializer=pubsub.StreamingPullRequest.serialize, - response_deserializer=pubsub.StreamingPullResponse.deserialize, - ) - return self._stubs['streaming_pull'] - - @property - def modify_push_config(self) -> Callable[ - [pubsub.ModifyPushConfigRequest], - empty_pb2.Empty]: - r"""Return a callable for the modify push config method over gRPC. - - Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one - (signified by an empty ``PushConfig``) or vice versa, or change - the endpoint URL and other attributes of a push subscription. - Messages will accumulate for delivery continuously through the - call regardless of changes to the ``PushConfig``. - - Returns: - Callable[[~.ModifyPushConfigRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'modify_push_config' not in self._stubs: - self._stubs['modify_push_config'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/ModifyPushConfig', - request_serializer=pubsub.ModifyPushConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['modify_push_config'] - - @property - def get_snapshot(self) -> Callable[ - [pubsub.GetSnapshotRequest], - pubsub.Snapshot]: - r"""Return a callable for the get snapshot method over gRPC. - - Gets the configuration details of a snapshot. - Snapshots are used in Seek - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - Returns: - Callable[[~.GetSnapshotRequest], - ~.Snapshot]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_snapshot' not in self._stubs: - self._stubs['get_snapshot'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/GetSnapshot', - request_serializer=pubsub.GetSnapshotRequest.serialize, - response_deserializer=pubsub.Snapshot.deserialize, - ) - return self._stubs['get_snapshot'] - - @property - def list_snapshots(self) -> Callable[ - [pubsub.ListSnapshotsRequest], - pubsub.ListSnapshotsResponse]: - r"""Return a callable for the list snapshots method over gRPC. - - Lists the existing snapshots. Snapshots are used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - - Returns: - Callable[[~.ListSnapshotsRequest], - ~.ListSnapshotsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_snapshots' not in self._stubs: - self._stubs['list_snapshots'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/ListSnapshots', - request_serializer=pubsub.ListSnapshotsRequest.serialize, - response_deserializer=pubsub.ListSnapshotsResponse.deserialize, - ) - return self._stubs['list_snapshots'] - - @property - def create_snapshot(self) -> Callable[ - [pubsub.CreateSnapshotRequest], - pubsub.Snapshot]: - r"""Return a callable for the create snapshot method over gRPC. - - Creates a snapshot from the requested subscription. Snapshots - are used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. - If the backlog in the subscription is too old -- and the - resulting snapshot would expire in less than 1 hour -- then - ``FAILED_PRECONDITION`` is returned. See also the - ``Snapshot.expire_time`` field. If the name is not provided in - the request, the server will assign a random name for this - snapshot on the same project as the subscription, conforming to - the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the - request. - - Returns: - Callable[[~.CreateSnapshotRequest], - ~.Snapshot]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_snapshot' not in self._stubs: - self._stubs['create_snapshot'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/CreateSnapshot', - request_serializer=pubsub.CreateSnapshotRequest.serialize, - response_deserializer=pubsub.Snapshot.deserialize, - ) - return self._stubs['create_snapshot'] - - @property - def update_snapshot(self) -> Callable[ - [pubsub.UpdateSnapshotRequest], - pubsub.Snapshot]: - r"""Return a callable for the update snapshot method over gRPC. - - Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, - you can set the acknowledgment state of messages in an - existing subscription to the state captured by a - snapshot. - - Returns: - Callable[[~.UpdateSnapshotRequest], - ~.Snapshot]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_snapshot' not in self._stubs: - self._stubs['update_snapshot'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/UpdateSnapshot', - request_serializer=pubsub.UpdateSnapshotRequest.serialize, - response_deserializer=pubsub.Snapshot.deserialize, - ) - return self._stubs['update_snapshot'] - - @property - def delete_snapshot(self) -> Callable[ - [pubsub.DeleteSnapshotRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete snapshot method over gRPC. - - Removes an existing snapshot. Snapshots are used in [Seek] - (https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - When the snapshot is deleted, all messages retained in the - snapshot are immediately dropped. After a snapshot is deleted, a - new one may be created with the same name, but the new one has - no association with the old snapshot or its subscription, unless - the same subscription is specified. - - Returns: - Callable[[~.DeleteSnapshotRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_snapshot' not in self._stubs: - self._stubs['delete_snapshot'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/DeleteSnapshot', - request_serializer=pubsub.DeleteSnapshotRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_snapshot'] - - @property - def seek(self) -> Callable[ - [pubsub.SeekRequest], - pubsub.SeekResponse]: - r"""Return a callable for the seek method over gRPC. - - Seeks an existing subscription to a point in time or to a given - snapshot, whichever is provided in the request. Snapshots are - used in [Seek] - (https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - Note that both the subscription and the snapshot must be on the - same topic. - - Returns: - Callable[[~.SeekRequest], - ~.SeekResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'seek' not in self._stubs: - self._stubs['seek'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/Seek', - request_serializer=pubsub.SeekRequest.serialize, - response_deserializer=pubsub.SeekResponse.deserialize, - ) - return self._stubs['seek'] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - ~.Policy]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], iam_policy_pb2.TestIamPermissionsResponse - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - ~.TestIamPermissionsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'SubscriberGrpcTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py deleted file mode 100644 index e8c4a4da6273..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ /dev/null @@ -1,835 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.pubsub_v1.types import pubsub -from .base import SubscriberTransport, DEFAULT_CLIENT_INFO -from .grpc import SubscriberGrpcTransport - - -class SubscriberGrpcAsyncIOTransport(SubscriberTransport): - """gRPC AsyncIO backend transport for Subscriber. - - The service that an application uses to manipulate subscriptions and - to consume messages from a subscription via the ``Pull`` method or - by establishing a bi-directional stream using the ``StreamingPull`` - method. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'pubsub.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_subscription(self) -> Callable[ - [pubsub.Subscription], - Awaitable[pubsub.Subscription]]: - r"""Return a callable for the create subscription method over gRPC. - - Creates a subscription to a given topic. See the [resource name - rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). If - the subscription already exists, returns ``ALREADY_EXISTS``. If - the corresponding topic doesn't exist, returns ``NOT_FOUND``. - - If the name is not provided in the request, the server will - assign a random name for this subscription on the same project - as the topic, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the - request. - - Returns: - Callable[[~.Subscription], - Awaitable[~.Subscription]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_subscription' not in self._stubs: - self._stubs['create_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/CreateSubscription', - request_serializer=pubsub.Subscription.serialize, - response_deserializer=pubsub.Subscription.deserialize, - ) - return self._stubs['create_subscription'] - - @property - def get_subscription(self) -> Callable[ - [pubsub.GetSubscriptionRequest], - Awaitable[pubsub.Subscription]]: - r"""Return a callable for the get subscription method over gRPC. - - Gets the configuration details of a subscription. - - Returns: - Callable[[~.GetSubscriptionRequest], - Awaitable[~.Subscription]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_subscription' not in self._stubs: - self._stubs['get_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/GetSubscription', - request_serializer=pubsub.GetSubscriptionRequest.serialize, - response_deserializer=pubsub.Subscription.deserialize, - ) - return self._stubs['get_subscription'] - - @property - def update_subscription(self) -> Callable[ - [pubsub.UpdateSubscriptionRequest], - Awaitable[pubsub.Subscription]]: - r"""Return a callable for the update subscription method over gRPC. - - Updates an existing subscription. Note that certain - properties of a subscription, such as its topic, are not - modifiable. - - Returns: - Callable[[~.UpdateSubscriptionRequest], - Awaitable[~.Subscription]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_subscription' not in self._stubs: - self._stubs['update_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/UpdateSubscription', - request_serializer=pubsub.UpdateSubscriptionRequest.serialize, - response_deserializer=pubsub.Subscription.deserialize, - ) - return self._stubs['update_subscription'] - - @property - def list_subscriptions(self) -> Callable[ - [pubsub.ListSubscriptionsRequest], - Awaitable[pubsub.ListSubscriptionsResponse]]: - r"""Return a callable for the list subscriptions method over gRPC. - - Lists matching subscriptions. - - Returns: - Callable[[~.ListSubscriptionsRequest], - Awaitable[~.ListSubscriptionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_subscriptions' not in self._stubs: - self._stubs['list_subscriptions'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/ListSubscriptions', - request_serializer=pubsub.ListSubscriptionsRequest.serialize, - response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, - ) - return self._stubs['list_subscriptions'] - - @property - def delete_subscription(self) -> Callable[ - [pubsub.DeleteSubscriptionRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete subscription method over gRPC. - - Deletes an existing subscription. All messages retained in the - subscription are immediately dropped. Calls to ``Pull`` after - deletion will return ``NOT_FOUND``. After a subscription is - deleted, a new one may be created with the same name, but the - new one has no association with the old subscription or its - topic unless the same topic is specified. - - Returns: - Callable[[~.DeleteSubscriptionRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_subscription' not in self._stubs: - self._stubs['delete_subscription'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/DeleteSubscription', - request_serializer=pubsub.DeleteSubscriptionRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_subscription'] - - @property - def modify_ack_deadline(self) -> Callable[ - [pubsub.ModifyAckDeadlineRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the modify ack deadline method over gRPC. - - Modifies the ack deadline for a specific message. This method is - useful to indicate that more time is needed to process a message - by the subscriber, or to make the message available for - redelivery if the processing was interrupted. Note that this - does not modify the subscription-level ``ackDeadlineSeconds`` - used for subsequent messages. - - Returns: - Callable[[~.ModifyAckDeadlineRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'modify_ack_deadline' not in self._stubs: - self._stubs['modify_ack_deadline'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/ModifyAckDeadline', - request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['modify_ack_deadline'] - - @property - def acknowledge(self) -> Callable[ - [pubsub.AcknowledgeRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the acknowledge method over gRPC. - - Acknowledges the messages associated with the ``ack_ids`` in the - ``AcknowledgeRequest``. The Pub/Sub system can remove the - relevant messages from the subscription. - - Acknowledging a message whose ack deadline has expired may - succeed, but such a message may be redelivered later. - Acknowledging a message more than once will not result in an - error. - - Returns: - Callable[[~.AcknowledgeRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'acknowledge' not in self._stubs: - self._stubs['acknowledge'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/Acknowledge', - request_serializer=pubsub.AcknowledgeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['acknowledge'] - - @property - def pull(self) -> Callable[ - [pubsub.PullRequest], - Awaitable[pubsub.PullResponse]]: - r"""Return a callable for the pull method over gRPC. - - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests - pending for the given subscription. - - Returns: - Callable[[~.PullRequest], - Awaitable[~.PullResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'pull' not in self._stubs: - self._stubs['pull'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/Pull', - request_serializer=pubsub.PullRequest.serialize, - response_deserializer=pubsub.PullResponse.deserialize, - ) - return self._stubs['pull'] - - @property - def streaming_pull(self) -> Callable[ - [pubsub.StreamingPullRequest], - Awaitable[pubsub.StreamingPullResponse]]: - r"""Return a callable for the streaming pull method over gRPC. - - Establishes a stream with the server, which sends messages down - to the client. The client streams acknowledgements and ack - deadline modifications back to the server. The server will close - the stream and return the status on any error. The server may - close the stream with status ``UNAVAILABLE`` to reassign - server-side resources, in which case, the client should - re-establish the stream. Flow control can be achieved by - configuring the underlying RPC channel. - - Returns: - Callable[[~.StreamingPullRequest], - Awaitable[~.StreamingPullResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'streaming_pull' not in self._stubs: - self._stubs['streaming_pull'] = self.grpc_channel.stream_stream( - '/google.pubsub.v1.Subscriber/StreamingPull', - request_serializer=pubsub.StreamingPullRequest.serialize, - response_deserializer=pubsub.StreamingPullResponse.deserialize, - ) - return self._stubs['streaming_pull'] - - @property - def modify_push_config(self) -> Callable[ - [pubsub.ModifyPushConfigRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the modify push config method over gRPC. - - Modifies the ``PushConfig`` for a specified subscription. - - This may be used to change a push subscription to a pull one - (signified by an empty ``PushConfig``) or vice versa, or change - the endpoint URL and other attributes of a push subscription. - Messages will accumulate for delivery continuously through the - call regardless of changes to the ``PushConfig``. - - Returns: - Callable[[~.ModifyPushConfigRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'modify_push_config' not in self._stubs: - self._stubs['modify_push_config'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/ModifyPushConfig', - request_serializer=pubsub.ModifyPushConfigRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['modify_push_config'] - - @property - def get_snapshot(self) -> Callable[ - [pubsub.GetSnapshotRequest], - Awaitable[pubsub.Snapshot]]: - r"""Return a callable for the get snapshot method over gRPC. - - Gets the configuration details of a snapshot. - Snapshots are used in Seek - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. - - Returns: - Callable[[~.GetSnapshotRequest], - Awaitable[~.Snapshot]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_snapshot' not in self._stubs: - self._stubs['get_snapshot'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/GetSnapshot', - request_serializer=pubsub.GetSnapshotRequest.serialize, - response_deserializer=pubsub.Snapshot.deserialize, - ) - return self._stubs['get_snapshot'] - - @property - def list_snapshots(self) -> Callable[ - [pubsub.ListSnapshotsRequest], - Awaitable[pubsub.ListSnapshotsResponse]]: - r"""Return a callable for the list snapshots method over gRPC. - - Lists the existing snapshots. Snapshots are used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - - Returns: - Callable[[~.ListSnapshotsRequest], - Awaitable[~.ListSnapshotsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_snapshots' not in self._stubs: - self._stubs['list_snapshots'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/ListSnapshots', - request_serializer=pubsub.ListSnapshotsRequest.serialize, - response_deserializer=pubsub.ListSnapshotsResponse.deserialize, - ) - return self._stubs['list_snapshots'] - - @property - def create_snapshot(self) -> Callable[ - [pubsub.CreateSnapshotRequest], - Awaitable[pubsub.Snapshot]]: - r"""Return a callable for the create snapshot method over gRPC. - - Creates a snapshot from the requested subscription. Snapshots - are used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - If the snapshot already exists, returns ``ALREADY_EXISTS``. If - the requested subscription doesn't exist, returns ``NOT_FOUND``. - If the backlog in the subscription is too old -- and the - resulting snapshot would expire in less than 1 hour -- then - ``FAILED_PRECONDITION`` is returned. See also the - ``Snapshot.expire_time`` field. If the name is not provided in - the request, the server will assign a random name for this - snapshot on the same project as the subscription, conforming to - the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. - Note that for REST API requests, you must specify a name in the - request. - - Returns: - Callable[[~.CreateSnapshotRequest], - Awaitable[~.Snapshot]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_snapshot' not in self._stubs: - self._stubs['create_snapshot'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/CreateSnapshot', - request_serializer=pubsub.CreateSnapshotRequest.serialize, - response_deserializer=pubsub.Snapshot.deserialize, - ) - return self._stubs['create_snapshot'] - - @property - def update_snapshot(self) -> Callable[ - [pubsub.UpdateSnapshotRequest], - Awaitable[pubsub.Snapshot]]: - r"""Return a callable for the update snapshot method over gRPC. - - Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, - you can set the acknowledgment state of messages in an - existing subscription to the state captured by a - snapshot. - - Returns: - Callable[[~.UpdateSnapshotRequest], - Awaitable[~.Snapshot]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_snapshot' not in self._stubs: - self._stubs['update_snapshot'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/UpdateSnapshot', - request_serializer=pubsub.UpdateSnapshotRequest.serialize, - response_deserializer=pubsub.Snapshot.deserialize, - ) - return self._stubs['update_snapshot'] - - @property - def delete_snapshot(self) -> Callable[ - [pubsub.DeleteSnapshotRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete snapshot method over gRPC. - - Removes an existing snapshot. Snapshots are used in [Seek] - (https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - When the snapshot is deleted, all messages retained in the - snapshot are immediately dropped. After a snapshot is deleted, a - new one may be created with the same name, but the new one has - no association with the old snapshot or its subscription, unless - the same subscription is specified. - - Returns: - Callable[[~.DeleteSnapshotRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_snapshot' not in self._stubs: - self._stubs['delete_snapshot'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/DeleteSnapshot', - request_serializer=pubsub.DeleteSnapshotRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_snapshot'] - - @property - def seek(self) -> Callable[ - [pubsub.SeekRequest], - Awaitable[pubsub.SeekResponse]]: - r"""Return a callable for the seek method over gRPC. - - Seeks an existing subscription to a point in time or to a given - snapshot, whichever is provided in the request. Snapshots are - used in [Seek] - (https://cloud.google.com/pubsub/docs/replay-overview) - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages - in an existing subscription to the state captured by a snapshot. - Note that both the subscription and the snapshot must be on the - same topic. - - Returns: - Callable[[~.SeekRequest], - Awaitable[~.SeekResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'seek' not in self._stubs: - self._stubs['seek'] = self.grpc_channel.unary_unary( - '/google.pubsub.v1.Subscriber/Seek', - request_serializer=pubsub.SeekRequest.serialize, - response_deserializer=pubsub.SeekResponse.deserialize, - ) - return self._stubs['seek'] - - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'SubscriberGrpcAsyncIOTransport', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/__init__.py deleted file mode 100644 index cb65f15ef091..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/__init__.py +++ /dev/null @@ -1,136 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .pubsub import ( - AcknowledgeRequest, - BigQueryConfig, - CreateSnapshotRequest, - DeadLetterPolicy, - DeleteSnapshotRequest, - DeleteSubscriptionRequest, - DeleteTopicRequest, - DetachSubscriptionRequest, - DetachSubscriptionResponse, - ExpirationPolicy, - GetSnapshotRequest, - GetSubscriptionRequest, - GetTopicRequest, - ListSnapshotsRequest, - ListSnapshotsResponse, - ListSubscriptionsRequest, - ListSubscriptionsResponse, - ListTopicSnapshotsRequest, - ListTopicSnapshotsResponse, - ListTopicsRequest, - ListTopicsResponse, - ListTopicSubscriptionsRequest, - ListTopicSubscriptionsResponse, - MessageStoragePolicy, - ModifyAckDeadlineRequest, - ModifyPushConfigRequest, - PublishRequest, - PublishResponse, - PubsubMessage, - PullRequest, - PullResponse, - PushConfig, - ReceivedMessage, - RetryPolicy, - SchemaSettings, - SeekRequest, - SeekResponse, - Snapshot, - StreamingPullRequest, - StreamingPullResponse, - Subscription, - Topic, - UpdateSnapshotRequest, - UpdateSubscriptionRequest, - UpdateTopicRequest, -) -from .schema import ( - CreateSchemaRequest, - DeleteSchemaRequest, - GetSchemaRequest, - ListSchemasRequest, - ListSchemasResponse, - Schema, - ValidateMessageRequest, - ValidateMessageResponse, - ValidateSchemaRequest, - ValidateSchemaResponse, - Encoding, - SchemaView, -) - -__all__ = ( - 'AcknowledgeRequest', - 'BigQueryConfig', - 'CreateSnapshotRequest', - 'DeadLetterPolicy', - 'DeleteSnapshotRequest', - 'DeleteSubscriptionRequest', - 'DeleteTopicRequest', - 'DetachSubscriptionRequest', - 'DetachSubscriptionResponse', - 'ExpirationPolicy', - 'GetSnapshotRequest', - 'GetSubscriptionRequest', - 'GetTopicRequest', - 'ListSnapshotsRequest', - 'ListSnapshotsResponse', - 'ListSubscriptionsRequest', - 'ListSubscriptionsResponse', - 'ListTopicSnapshotsRequest', - 'ListTopicSnapshotsResponse', - 'ListTopicsRequest', - 'ListTopicsResponse', - 'ListTopicSubscriptionsRequest', - 'ListTopicSubscriptionsResponse', - 'MessageStoragePolicy', - 'ModifyAckDeadlineRequest', - 'ModifyPushConfigRequest', - 'PublishRequest', - 'PublishResponse', - 'PubsubMessage', - 'PullRequest', - 'PullResponse', - 'PushConfig', - 'ReceivedMessage', - 'RetryPolicy', - 'SchemaSettings', - 'SeekRequest', - 'SeekResponse', - 'Snapshot', - 'StreamingPullRequest', - 'StreamingPullResponse', - 'Subscription', - 'Topic', - 'UpdateSnapshotRequest', - 'UpdateSubscriptionRequest', - 'UpdateTopicRequest', - 'CreateSchemaRequest', - 'DeleteSchemaRequest', - 'GetSchemaRequest', - 'ListSchemasRequest', - 'ListSchemasResponse', - 'Schema', - 'ValidateMessageRequest', - 'ValidateMessageResponse', - 'ValidateSchemaRequest', - 'ValidateSchemaResponse', - 'Encoding', - 'SchemaView', -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/pubsub.py deleted file mode 100644 index 48f6b24000f4..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/pubsub.py +++ /dev/null @@ -1,1866 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.pubsub_v1.types import schema as gp_schema - - -__protobuf__ = proto.module( - package='google.pubsub.v1', - manifest={ - 'MessageStoragePolicy', - 'SchemaSettings', - 'Topic', - 'PubsubMessage', - 'GetTopicRequest', - 'UpdateTopicRequest', - 'PublishRequest', - 'PublishResponse', - 'ListTopicsRequest', - 'ListTopicsResponse', - 'ListTopicSubscriptionsRequest', - 'ListTopicSubscriptionsResponse', - 'ListTopicSnapshotsRequest', - 'ListTopicSnapshotsResponse', - 'DeleteTopicRequest', - 'DetachSubscriptionRequest', - 'DetachSubscriptionResponse', - 'Subscription', - 'RetryPolicy', - 'DeadLetterPolicy', - 'ExpirationPolicy', - 'PushConfig', - 'BigQueryConfig', - 'ReceivedMessage', - 'GetSubscriptionRequest', - 'UpdateSubscriptionRequest', - 'ListSubscriptionsRequest', - 'ListSubscriptionsResponse', - 'DeleteSubscriptionRequest', - 'ModifyPushConfigRequest', - 'PullRequest', - 'PullResponse', - 'ModifyAckDeadlineRequest', - 'AcknowledgeRequest', - 'StreamingPullRequest', - 'StreamingPullResponse', - 'CreateSnapshotRequest', - 'UpdateSnapshotRequest', - 'Snapshot', - 'GetSnapshotRequest', - 'ListSnapshotsRequest', - 'ListSnapshotsResponse', - 'DeleteSnapshotRequest', - 'SeekRequest', - 'SeekResponse', - }, -) - - -class MessageStoragePolicy(proto.Message): - r"""A policy constraining the storage of messages published to - the topic. - - Attributes: - allowed_persistence_regions (Sequence[str]): - A list of IDs of GCP regions where messages - that are published to the topic may be persisted - in storage. Messages published by publishers - running in non-allowed GCP regions (or running - outside of GCP altogether) will be routed for - storage in one of the allowed regions. An empty - list means that no regions are allowed, and is - not a valid configuration. - """ - - allowed_persistence_regions = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class SchemaSettings(proto.Message): - r"""Settings for validating messages published against a schema. - - Attributes: - schema (str): - Required. The name of the schema that messages published - should be validated against. Format is - ``projects/{project}/schemas/{schema}``. The value of this - field will be ``_deleted-schema_`` if the schema has been - deleted. - encoding (google.pubsub_v1.types.Encoding): - The encoding of messages validated against ``schema``. - """ - - schema = proto.Field( - proto.STRING, - number=1, - ) - encoding = proto.Field( - proto.ENUM, - number=2, - enum=gp_schema.Encoding, - ) - - -class Topic(proto.Message): - r"""A topic resource. - - Attributes: - name (str): - Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` must - start with a letter, and contain only letters - (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), plus - (``+``) or percent signs (``%``). It must be between 3 and - 255 characters in length, and it must not start with - ``"goog"``. - labels (Mapping[str, str]): - See [Creating and managing labels] - (https://cloud.google.com/pubsub/docs/labels). - message_storage_policy (google.pubsub_v1.types.MessageStoragePolicy): - Policy constraining the set of Google Cloud - Platform regions where messages published to the - topic may be stored. If not present, then no - constraints are in effect. - kms_key_name (str): - The resource name of the Cloud KMS CryptoKey to be used to - protect access to messages published on this topic. - - The expected format is - ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. - schema_settings (google.pubsub_v1.types.SchemaSettings): - Settings for validating messages published - against a schema. - satisfies_pzs (bool): - Reserved for future use. This field is set - only in responses from the server; it is ignored - if it is set in any requests. - message_retention_duration (google.protobuf.duration_pb2.Duration): - Indicates the minimum duration to retain a message after it - is published to the topic. If this field is set, messages - published to the topic in the last - ``message_retention_duration`` are always available to - subscribers. For instance, it allows any attached - subscription to `seek to a - timestamp `__ - that is up to ``message_retention_duration`` in the past. If - this field is not set, message retention is controlled by - settings on individual subscriptions. Cannot be more than 7 - days or less than 10 minutes. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - message_storage_policy = proto.Field( - proto.MESSAGE, - number=3, - message='MessageStoragePolicy', - ) - kms_key_name = proto.Field( - proto.STRING, - number=5, - ) - schema_settings = proto.Field( - proto.MESSAGE, - number=6, - message='SchemaSettings', - ) - satisfies_pzs = proto.Field( - proto.BOOL, - number=7, - ) - message_retention_duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - - -class PubsubMessage(proto.Message): - r"""A message that is published by publishers and consumed by - subscribers. The message must contain either a non-empty data field - or at least one attribute. Note that client libraries represent this - object differently depending on the language. See the corresponding - `client library - documentation `__ - for more information. See [quotas and limits] - (https://cloud.google.com/pubsub/quotas) for more information about - message limits. - - Attributes: - data (bytes): - The message data field. If this field is - empty, the message must contain at least one - attribute. - attributes (Mapping[str, str]): - Attributes for this message. If this field is - empty, the message must contain non-empty data. - This can be used to filter messages on the - subscription. - message_id (str): - ID of this message, assigned by the server when the message - is published. Guaranteed to be unique within the topic. This - value may be read by a subscriber that receives a - ``PubsubMessage`` via a ``Pull`` call or a push delivery. It - must not be populated by the publisher in a ``Publish`` - call. - publish_time (google.protobuf.timestamp_pb2.Timestamp): - The time at which the message was published, populated by - the server when it receives the ``Publish`` call. It must - not be populated by the publisher in a ``Publish`` call. - ordering_key (str): - If non-empty, identifies related messages for which publish - order should be respected. If a ``Subscription`` has - ``enable_message_ordering`` set to ``true``, messages - published with the same non-empty ``ordering_key`` value - will be delivered to subscribers in the order in which they - are received by the Pub/Sub system. All ``PubsubMessage``\ s - published in a given ``PublishRequest`` must specify the - same ``ordering_key`` value. - """ - - data = proto.Field( - proto.BYTES, - number=1, - ) - attributes = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - message_id = proto.Field( - proto.STRING, - number=3, - ) - publish_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - ordering_key = proto.Field( - proto.STRING, - number=5, - ) - - -class GetTopicRequest(proto.Message): - r"""Request for the GetTopic method. - - Attributes: - topic (str): - Required. The name of the topic to get. Format is - ``projects/{project}/topics/{topic}``. - """ - - topic = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateTopicRequest(proto.Message): - r"""Request for the UpdateTopic method. - - Attributes: - topic (google.pubsub_v1.types.Topic): - Required. The updated topic object. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Indicates which fields in the provided topic to - update. Must be specified and non-empty. Note that if - ``update_mask`` contains "message_storage_policy" but the - ``message_storage_policy`` is not set in the ``topic`` - provided above, then the updated value is determined by the - policy configured at the project or organization level. - """ - - topic = proto.Field( - proto.MESSAGE, - number=1, - message='Topic', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class PublishRequest(proto.Message): - r"""Request for the Publish method. - - Attributes: - topic (str): - Required. The messages in the request will be published on - this topic. Format is ``projects/{project}/topics/{topic}``. - messages (Sequence[google.pubsub_v1.types.PubsubMessage]): - Required. The messages to publish. - """ - - topic = proto.Field( - proto.STRING, - number=1, - ) - messages = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='PubsubMessage', - ) - - -class PublishResponse(proto.Message): - r"""Response for the ``Publish`` method. - - Attributes: - message_ids (Sequence[str]): - The server-assigned ID of each published - message, in the same order as the messages in - the request. IDs are guaranteed to be unique - within the topic. - """ - - message_ids = proto.RepeatedField( - proto.STRING, - number=1, - ) - - -class ListTopicsRequest(proto.Message): - r"""Request for the ``ListTopics`` method. - - Attributes: - project (str): - Required. The name of the project in which to list topics. - Format is ``projects/{project-id}``. - page_size (int): - Maximum number of topics to return. - page_token (str): - The value returned by the last ``ListTopicsResponse``; - indicates that this is a continuation of a prior - ``ListTopics`` call, and that the system should return the - next page of data. - """ - - project = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class ListTopicsResponse(proto.Message): - r"""Response for the ``ListTopics`` method. - - Attributes: - topics (Sequence[google.pubsub_v1.types.Topic]): - The resulting topics. - next_page_token (str): - If not empty, indicates that there may be more topics that - match the request; this value should be passed in a new - ``ListTopicsRequest``. - """ - - @property - def raw_page(self): - return self - - topics = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Topic', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class ListTopicSubscriptionsRequest(proto.Message): - r"""Request for the ``ListTopicSubscriptions`` method. - - Attributes: - topic (str): - Required. The name of the topic that subscriptions are - attached to. Format is - ``projects/{project}/topics/{topic}``. - page_size (int): - Maximum number of subscription names to - return. - page_token (str): - The value returned by the last - ``ListTopicSubscriptionsResponse``; indicates that this is a - continuation of a prior ``ListTopicSubscriptions`` call, and - that the system should return the next page of data. - """ - - topic = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class ListTopicSubscriptionsResponse(proto.Message): - r"""Response for the ``ListTopicSubscriptions`` method. - - Attributes: - subscriptions (Sequence[str]): - The names of subscriptions attached to the - topic specified in the request. - next_page_token (str): - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListTopicSubscriptionsRequest`` to get more subscriptions. - """ - - @property - def raw_page(self): - return self - - subscriptions = proto.RepeatedField( - proto.STRING, - number=1, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class ListTopicSnapshotsRequest(proto.Message): - r"""Request for the ``ListTopicSnapshots`` method. - - Attributes: - topic (str): - Required. The name of the topic that snapshots are attached - to. Format is ``projects/{project}/topics/{topic}``. - page_size (int): - Maximum number of snapshot names to return. - page_token (str): - The value returned by the last - ``ListTopicSnapshotsResponse``; indicates that this is a - continuation of a prior ``ListTopicSnapshots`` call, and - that the system should return the next page of data. - """ - - topic = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class ListTopicSnapshotsResponse(proto.Message): - r"""Response for the ``ListTopicSnapshots`` method. - - Attributes: - snapshots (Sequence[str]): - The names of the snapshots that match the - request. - next_page_token (str): - If not empty, indicates that there may be more snapshots - that match the request; this value should be passed in a new - ``ListTopicSnapshotsRequest`` to get more snapshots. - """ - - @property - def raw_page(self): - return self - - snapshots = proto.RepeatedField( - proto.STRING, - number=1, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteTopicRequest(proto.Message): - r"""Request for the ``DeleteTopic`` method. - - Attributes: - topic (str): - Required. Name of the topic to delete. Format is - ``projects/{project}/topics/{topic}``. - """ - - topic = proto.Field( - proto.STRING, - number=1, - ) - - -class DetachSubscriptionRequest(proto.Message): - r"""Request for the DetachSubscription method. - - Attributes: - subscription (str): - Required. The subscription to detach. Format is - ``projects/{project}/subscriptions/{subscription}``. - """ - - subscription = proto.Field( - proto.STRING, - number=1, - ) - - -class DetachSubscriptionResponse(proto.Message): - r"""Response for the DetachSubscription method. - Reserved for future use. - - """ - - -class Subscription(proto.Message): - r"""A subscription resource. - - Attributes: - name (str): - Required. The name of the subscription. It must have the - format - ``"projects/{project}/subscriptions/{subscription}"``. - ``{subscription}`` must start with a letter, and contain - only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes - (``-``), underscores (``_``), periods (``.``), tildes - (``~``), plus (``+``) or percent signs (``%``). It must be - between 3 and 255 characters in length, and it must not - start with ``"goog"``. - topic (str): - Required. The name of the topic from which this subscription - is receiving messages. Format is - ``projects/{project}/topics/{topic}``. The value of this - field will be ``_deleted-topic_`` if the topic has been - deleted. - push_config (google.pubsub_v1.types.PushConfig): - If push delivery is used with this subscription, this field - is used to configure it. Either ``pushConfig`` or - ``bigQueryConfig`` can be set, but not both. If both are - empty, then the subscriber will pull and ack messages using - API methods. - bigquery_config (google.pubsub_v1.types.BigQueryConfig): - If delivery to BigQuery is used with this subscription, this - field is used to configure it. Either ``pushConfig`` or - ``bigQueryConfig`` can be set, but not both. If both are - empty, then the subscriber will pull and ack messages using - API methods. - ack_deadline_seconds (int): - The approximate amount of time (on a best-effort basis) - Pub/Sub waits for the subscriber to acknowledge receipt - before resending the message. In the interval after the - message is delivered and before it is acknowledged, it is - considered to be outstanding. During that time period, the - message will not be redelivered (on a best-effort basis). - - For pull subscriptions, this value is used as the initial - value for the ack deadline. To override this value for a - given message, call ``ModifyAckDeadline`` with the - corresponding ``ack_id`` if using non-streaming pull or send - the ``ack_id`` in a ``StreamingModifyAckDeadlineRequest`` if - using streaming pull. The minimum custom deadline you can - specify is 10 seconds. The maximum custom deadline you can - specify is 600 seconds (10 minutes). If this parameter is 0, - a default value of 10 seconds is used. - - For push delivery, this value is also used to set the - request timeout for the call to the push endpoint. - - If the subscriber never acknowledges the message, the - Pub/Sub system will eventually redeliver the message. - retain_acked_messages (bool): - Indicates whether to retain acknowledged messages. If true, - then messages are not expunged from the subscription's - backlog, even if they are acknowledged, until they fall out - of the ``message_retention_duration`` window. This must be - true if you would like to [``Seek`` to a timestamp] - (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) - in the past to replay previously-acknowledged messages. - message_retention_duration (google.protobuf.duration_pb2.Duration): - How long to retain unacknowledged messages in the - subscription's backlog, from the moment a message is - published. If ``retain_acked_messages`` is true, then this - also configures the retention of acknowledged messages, and - thus configures how far back in time a ``Seek`` can be done. - Defaults to 7 days. Cannot be more than 7 days or less than - 10 minutes. - labels (Mapping[str, str]): - See - Creating and managing labels. - enable_message_ordering (bool): - If true, messages published with the same ``ordering_key`` - in ``PubsubMessage`` will be delivered to the subscribers in - the order in which they are received by the Pub/Sub system. - Otherwise, they may be delivered in any order. - expiration_policy (google.pubsub_v1.types.ExpirationPolicy): - A policy that specifies the conditions for this - subscription's expiration. A subscription is considered - active as long as any connected subscriber is successfully - consuming messages from the subscription or is issuing - operations on the subscription. If ``expiration_policy`` is - not set, a *default policy* with ``ttl`` of 31 days will be - used. The minimum allowed value for - ``expiration_policy.ttl`` is 1 day. - filter (str): - An expression written in the Pub/Sub `filter - language `__. - If non-empty, then only ``PubsubMessage``\ s whose - ``attributes`` field matches the filter are delivered on - this subscription. If empty, then no messages are filtered - out. - dead_letter_policy (google.pubsub_v1.types.DeadLetterPolicy): - A policy that specifies the conditions for dead lettering - messages in this subscription. If dead_letter_policy is not - set, dead lettering is disabled. - - The Cloud Pub/Sub service account associated with this - subscriptions's parent project (i.e., - service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) - must have permission to Acknowledge() messages on this - subscription. - retry_policy (google.pubsub_v1.types.RetryPolicy): - A policy that specifies how Pub/Sub retries - message delivery for this subscription. - - If not set, the default retry policy is applied. - This generally implies that messages will be - retried as soon as possible for healthy - subscribers. RetryPolicy will be triggered on - NACKs or acknowledgement deadline exceeded - events for a given message. - detached (bool): - Indicates whether the subscription is detached from its - topic. Detached subscriptions don't receive messages from - their topic and don't retain any backlog. ``Pull`` and - ``StreamingPull`` requests will return FAILED_PRECONDITION. - If the subscription is a push subscription, pushes to the - endpoint will not be made. - enable_exactly_once_delivery (bool): - If true, Pub/Sub provides the following guarantees for the - delivery of a message with a given value of ``message_id`` - on this subscription: - - - The message sent to a subscriber is guaranteed not to be - resent before the message's acknowledgement deadline - expires. - - An acknowledged message will not be resent to a - subscriber. - - Note that subscribers may still receive multiple copies of a - message when ``enable_exactly_once_delivery`` is true if the - message was published multiple times by a publisher client. - These copies are considered distinct by Pub/Sub and have - distinct ``message_id`` values. - topic_message_retention_duration (google.protobuf.duration_pb2.Duration): - Output only. Indicates the minimum duration for which a - message is retained after it is published to the - subscription's topic. If this field is set, messages - published to the subscription's topic in the last - ``topic_message_retention_duration`` are always available to - subscribers. See the ``message_retention_duration`` field in - ``Topic``. This field is set only in responses from the - server; it is ignored if it is set in any requests. - state (google.pubsub_v1.types.Subscription.State): - Output only. An output-only field indicating - whether or not the subscription can receive - messages. - """ - class State(proto.Enum): - r"""Possible states for a subscription.""" - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - RESOURCE_ERROR = 2 - - name = proto.Field( - proto.STRING, - number=1, - ) - topic = proto.Field( - proto.STRING, - number=2, - ) - push_config = proto.Field( - proto.MESSAGE, - number=4, - message='PushConfig', - ) - bigquery_config = proto.Field( - proto.MESSAGE, - number=18, - message='BigQueryConfig', - ) - ack_deadline_seconds = proto.Field( - proto.INT32, - number=5, - ) - retain_acked_messages = proto.Field( - proto.BOOL, - number=7, - ) - message_retention_duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=9, - ) - enable_message_ordering = proto.Field( - proto.BOOL, - number=10, - ) - expiration_policy = proto.Field( - proto.MESSAGE, - number=11, - message='ExpirationPolicy', - ) - filter = proto.Field( - proto.STRING, - number=12, - ) - dead_letter_policy = proto.Field( - proto.MESSAGE, - number=13, - message='DeadLetterPolicy', - ) - retry_policy = proto.Field( - proto.MESSAGE, - number=14, - message='RetryPolicy', - ) - detached = proto.Field( - proto.BOOL, - number=15, - ) - enable_exactly_once_delivery = proto.Field( - proto.BOOL, - number=16, - ) - topic_message_retention_duration = proto.Field( - proto.MESSAGE, - number=17, - message=duration_pb2.Duration, - ) - state = proto.Field( - proto.ENUM, - number=19, - enum=State, - ) - - -class RetryPolicy(proto.Message): - r"""A policy that specifies how Cloud Pub/Sub retries message delivery. - - Retry delay will be exponential based on provided minimum and - maximum backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. - - RetryPolicy will be triggered on NACKs or acknowledgement deadline - exceeded events for a given message. - - Retry Policy is implemented on a best effort basis. At times, the - delay between consecutive deliveries may not match the - configuration. That is, delay can be more or less than configured - backoff. - - Attributes: - minimum_backoff (google.protobuf.duration_pb2.Duration): - The minimum delay between consecutive - deliveries of a given message. Value should be - between 0 and 600 seconds. Defaults to 10 - seconds. - maximum_backoff (google.protobuf.duration_pb2.Duration): - The maximum delay between consecutive - deliveries of a given message. Value should be - between 0 and 600 seconds. Defaults to 600 - seconds. - """ - - minimum_backoff = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - maximum_backoff = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - -class DeadLetterPolicy(proto.Message): - r"""Dead lettering is done on a best effort basis. The same - message might be dead lettered multiple times. - - If validation on any of the fields fails at subscription - creation/updation, the create/update subscription request will - fail. - - Attributes: - dead_letter_topic (str): - The name of the topic to which dead letter messages should - be published. Format is - ``projects/{project}/topics/{topic}``.The Cloud Pub/Sub - service account associated with the enclosing subscription's - parent project (i.e., - service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) - must have permission to Publish() to this topic. - - The operation will fail if the topic does not exist. Users - should ensure that there is a subscription attached to this - topic since messages published to a topic with no - subscriptions are lost. - max_delivery_attempts (int): - The maximum number of delivery attempts for any message. The - value must be between 5 and 100. - - The number of delivery attempts is defined as 1 + (the sum - of number of NACKs and number of times the acknowledgement - deadline has been exceeded for the message). - - A NACK is any call to ModifyAckDeadline with a 0 deadline. - Note that client libraries may automatically extend - ack_deadlines. - - This field will be honored on a best effort basis. - - If this parameter is 0, a default value of 5 is used. - """ - - dead_letter_topic = proto.Field( - proto.STRING, - number=1, - ) - max_delivery_attempts = proto.Field( - proto.INT32, - number=2, - ) - - -class ExpirationPolicy(proto.Message): - r"""A policy that specifies the conditions for resource - expiration (i.e., automatic resource deletion). - - Attributes: - ttl (google.protobuf.duration_pb2.Duration): - Specifies the "time-to-live" duration for an associated - resource. The resource expires if it is not active for a - period of ``ttl``. The definition of "activity" depends on - the type of the associated resource. The minimum and maximum - allowed values for ``ttl`` depend on the type of the - associated resource, as well. If ``ttl`` is not set, the - associated resource never expires. - """ - - ttl = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - - -class PushConfig(proto.Message): - r"""Configuration for a push delivery endpoint. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - push_endpoint (str): - A URL locating the endpoint to which messages should be - pushed. For example, a Webhook endpoint might use - ``https://example.com/push``. - attributes (Mapping[str, str]): - Endpoint configuration attributes that can be used to - control different aspects of the message delivery. - - The only currently supported attribute is - ``x-goog-version``, which you can use to change the format - of the pushed message. This attribute indicates the version - of the data expected by the endpoint. This controls the - shape of the pushed message (i.e., its fields and metadata). - - If not present during the ``CreateSubscription`` call, it - will default to the version of the Pub/Sub API used to make - such call. If not present in a ``ModifyPushConfig`` call, - its value will not be changed. ``GetSubscription`` calls - will always return a valid version, even if the subscription - was created without this attribute. - - The only supported values for the ``x-goog-version`` - attribute are: - - - ``v1beta1``: uses the push format defined in the v1beta1 - Pub/Sub API. - - ``v1`` or ``v1beta2``: uses the push format defined in - the v1 Pub/Sub API. - - For example: - - .. raw:: html - -
attributes { "x-goog-version": "v1" } 
- oidc_token (google.pubsub_v1.types.PushConfig.OidcToken): - If specified, Pub/Sub will generate and attach an OIDC JWT - token as an ``Authorization`` header in the HTTP request for - every pushed message. - - This field is a member of `oneof`_ ``authentication_method``. - """ - - class OidcToken(proto.Message): - r"""Contains information needed for generating an `OpenID Connect - token `__. - - Attributes: - service_account_email (str): - `Service account - email `__ - to be used for generating the OIDC token. The caller (for - CreateSubscription, UpdateSubscription, and ModifyPushConfig - RPCs) must have the iam.serviceAccounts.actAs permission for - the service account. - audience (str): - Audience to be used when generating OIDC - token. The audience claim identifies the - recipients that the JWT is intended for. The - audience value is a single case-sensitive - string. Having multiple values (array) for the - audience field is not supported. More info about - the OIDC JWT token audience here: - https://tools.ietf.org/html/rfc7519#section-4.1.3 - Note: if not specified, the Push endpoint URL - will be used. - """ - - service_account_email = proto.Field( - proto.STRING, - number=1, - ) - audience = proto.Field( - proto.STRING, - number=2, - ) - - push_endpoint = proto.Field( - proto.STRING, - number=1, - ) - attributes = proto.MapField( - proto.STRING, - proto.STRING, - number=2, - ) - oidc_token = proto.Field( - proto.MESSAGE, - number=3, - oneof='authentication_method', - message=OidcToken, - ) - - -class BigQueryConfig(proto.Message): - r"""Configuration for a BigQuery subscription. - - Attributes: - table (str): - The name of the table to which to write data, - of the form {projectId}:{datasetId}.{tableId} - use_topic_schema (bool): - When true, use the topic's schema as the - columns to write to in BigQuery, if it exists. - write_metadata (bool): - When true, write the subscription name, message_id, - publish_time, attributes, and ordering_key to additional - columns in the table. The subscription name, message_id, and - publish_time fields are put in their own columns while all - other message properties (other than data) are written to a - JSON object in the attributes column. - drop_unknown_fields (bool): - When true and use_topic_schema is true, any fields that are - a part of the topic schema that are not part of the BigQuery - table schema are dropped when writing to BigQuery. - Otherwise, the schemas must be kept in sync and any messages - with extra fields are not written and remain in the - subscription's backlog. - state (google.pubsub_v1.types.BigQueryConfig.State): - Output only. An output-only field that - indicates whether or not the subscription can - receive messages. - """ - class State(proto.Enum): - r"""Possible states for a BigQuery subscription.""" - STATE_UNSPECIFIED = 0 - ACTIVE = 1 - PERMISSION_DENIED = 2 - NOT_FOUND = 3 - SCHEMA_MISMATCH = 4 - - table = proto.Field( - proto.STRING, - number=1, - ) - use_topic_schema = proto.Field( - proto.BOOL, - number=2, - ) - write_metadata = proto.Field( - proto.BOOL, - number=3, - ) - drop_unknown_fields = proto.Field( - proto.BOOL, - number=4, - ) - state = proto.Field( - proto.ENUM, - number=5, - enum=State, - ) - - -class ReceivedMessage(proto.Message): - r"""A message and its corresponding acknowledgment ID. - - Attributes: - ack_id (str): - This ID can be used to acknowledge the - received message. - message (google.pubsub_v1.types.PubsubMessage): - The message. - delivery_attempt (int): - The approximate number of times that Cloud Pub/Sub has - attempted to deliver the associated message to a subscriber. - - More precisely, this is 1 + (number of NACKs) + (number of - ack_deadline exceeds) for this message. - - A NACK is any call to ModifyAckDeadline with a 0 deadline. - An ack_deadline exceeds event is whenever a message is not - acknowledged within ack_deadline. Note that ack_deadline is - initially Subscription.ackDeadlineSeconds, but may get - extended automatically by the client library. - - Upon the first delivery of a given message, - ``delivery_attempt`` will have a value of 1. The value is - calculated at best effort and is approximate. - - If a DeadLetterPolicy is not set on the subscription, this - will be 0. - """ - - ack_id = proto.Field( - proto.STRING, - number=1, - ) - message = proto.Field( - proto.MESSAGE, - number=2, - message='PubsubMessage', - ) - delivery_attempt = proto.Field( - proto.INT32, - number=3, - ) - - -class GetSubscriptionRequest(proto.Message): - r"""Request for the GetSubscription method. - - Attributes: - subscription (str): - Required. The name of the subscription to get. Format is - ``projects/{project}/subscriptions/{sub}``. - """ - - subscription = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateSubscriptionRequest(proto.Message): - r"""Request for the UpdateSubscription method. - - Attributes: - subscription (google.pubsub_v1.types.Subscription): - Required. The updated subscription object. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Indicates which fields in the - provided subscription to update. Must be - specified and non-empty. - """ - - subscription = proto.Field( - proto.MESSAGE, - number=1, - message='Subscription', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class ListSubscriptionsRequest(proto.Message): - r"""Request for the ``ListSubscriptions`` method. - - Attributes: - project (str): - Required. The name of the project in which to list - subscriptions. Format is ``projects/{project-id}``. - page_size (int): - Maximum number of subscriptions to return. - page_token (str): - The value returned by the last - ``ListSubscriptionsResponse``; indicates that this is a - continuation of a prior ``ListSubscriptions`` call, and that - the system should return the next page of data. - """ - - project = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class ListSubscriptionsResponse(proto.Message): - r"""Response for the ``ListSubscriptions`` method. - - Attributes: - subscriptions (Sequence[google.pubsub_v1.types.Subscription]): - The subscriptions that match the request. - next_page_token (str): - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListSubscriptionsRequest`` to get more subscriptions. - """ - - @property - def raw_page(self): - return self - - subscriptions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Subscription', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteSubscriptionRequest(proto.Message): - r"""Request for the DeleteSubscription method. - - Attributes: - subscription (str): - Required. The subscription to delete. Format is - ``projects/{project}/subscriptions/{sub}``. - """ - - subscription = proto.Field( - proto.STRING, - number=1, - ) - - -class ModifyPushConfigRequest(proto.Message): - r"""Request for the ModifyPushConfig method. - - Attributes: - subscription (str): - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - push_config (google.pubsub_v1.types.PushConfig): - Required. The push configuration for future deliveries. - - An empty ``pushConfig`` indicates that the Pub/Sub system - should stop pushing messages from the given subscription and - allow messages to be pulled and acknowledged - effectively - pausing the subscription if ``Pull`` or ``StreamingPull`` is - not called. - """ - - subscription = proto.Field( - proto.STRING, - number=1, - ) - push_config = proto.Field( - proto.MESSAGE, - number=2, - message='PushConfig', - ) - - -class PullRequest(proto.Message): - r"""Request for the ``Pull`` method. - - Attributes: - subscription (str): - Required. The subscription from which messages should be - pulled. Format is - ``projects/{project}/subscriptions/{sub}``. - return_immediately (bool): - Optional. If this field set to true, the system will respond - immediately even if it there are no messages available to - return in the ``Pull`` response. Otherwise, the system may - wait (for a bounded amount of time) until at least one - message is available, rather than returning no messages. - Warning: setting this field to ``true`` is discouraged - because it adversely impacts the performance of ``Pull`` - operations. We recommend that users do not set this field. - max_messages (int): - Required. The maximum number of messages to - return for this request. Must be a positive - integer. The Pub/Sub system may return fewer - than the number specified. - """ - - subscription = proto.Field( - proto.STRING, - number=1, - ) - return_immediately = proto.Field( - proto.BOOL, - number=2, - ) - max_messages = proto.Field( - proto.INT32, - number=3, - ) - - -class PullResponse(proto.Message): - r"""Response for the ``Pull`` method. - - Attributes: - received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): - Received Pub/Sub messages. The list will be empty if there - are no more messages available in the backlog. For JSON, the - response can be entirely empty. The Pub/Sub system may - return fewer than the ``maxMessages`` requested even if - there are more messages available in the backlog. - """ - - received_messages = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ReceivedMessage', - ) - - -class ModifyAckDeadlineRequest(proto.Message): - r"""Request for the ModifyAckDeadline method. - - Attributes: - subscription (str): - Required. The name of the subscription. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids (Sequence[str]): - Required. List of acknowledgment IDs. - ack_deadline_seconds (int): - Required. The new ack deadline with respect to the time this - request was sent to the Pub/Sub system. For example, if the - value is 10, the new ack deadline will expire 10 seconds - after the ``ModifyAckDeadline`` call was made. Specifying - zero might immediately make the message available for - delivery to another subscriber client. This typically - results in an increase in the rate of message redeliveries - (that is, duplicates). The minimum deadline you can specify - is 0 seconds. The maximum deadline you can specify is 600 - seconds (10 minutes). - """ - - subscription = proto.Field( - proto.STRING, - number=1, - ) - ack_ids = proto.RepeatedField( - proto.STRING, - number=4, - ) - ack_deadline_seconds = proto.Field( - proto.INT32, - number=3, - ) - - -class AcknowledgeRequest(proto.Message): - r"""Request for the Acknowledge method. - - Attributes: - subscription (str): - Required. The subscription whose message is being - acknowledged. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids (Sequence[str]): - Required. The acknowledgment ID for the messages being - acknowledged that was returned by the Pub/Sub system in the - ``Pull`` response. Must not be empty. - """ - - subscription = proto.Field( - proto.STRING, - number=1, - ) - ack_ids = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class StreamingPullRequest(proto.Message): - r"""Request for the ``StreamingPull`` streaming RPC method. This request - is used to establish the initial stream as well as to stream - acknowledgements and ack deadline modifications from the client to - the server. - - Attributes: - subscription (str): - Required. The subscription for which to initialize the new - stream. This must be provided in the first request on the - stream, and must not be set in subsequent requests from - client to server. Format is - ``projects/{project}/subscriptions/{sub}``. - ack_ids (Sequence[str]): - List of acknowledgement IDs for acknowledging previously - received messages (received on this stream or a different - stream). If an ack ID has expired, the corresponding message - may be redelivered later. Acknowledging a message more than - once will not result in an error. If the acknowledgement ID - is malformed, the stream will be aborted with status - ``INVALID_ARGUMENT``. - modify_deadline_seconds (Sequence[int]): - The list of new ack deadlines for the IDs listed in - ``modify_deadline_ack_ids``. The size of this list must be - the same as the size of ``modify_deadline_ack_ids``. If it - differs the stream will be aborted with - ``INVALID_ARGUMENT``. Each element in this list is applied - to the element in the same position in - ``modify_deadline_ack_ids``. The new ack deadline is with - respect to the time this request was sent to the Pub/Sub - system. Must be >= 0. For example, if the value is 10, the - new ack deadline will expire 10 seconds after this request - is received. If the value is 0, the message is immediately - made available for another streaming or non-streaming pull - request. If the value is < 0 (an error), the stream will be - aborted with status ``INVALID_ARGUMENT``. - modify_deadline_ack_ids (Sequence[str]): - List of acknowledgement IDs whose deadline will be modified - based on the corresponding element in - ``modify_deadline_seconds``. This field can be used to - indicate that more time is needed to process a message by - the subscriber, or to make the message available for - redelivery if the processing was interrupted. - stream_ack_deadline_seconds (int): - Required. The ack deadline to use for the - stream. This must be provided in the first - request on the stream, but it can also be - updated on subsequent requests from client to - server. The minimum deadline you can specify is - 10 seconds. The maximum deadline you can specify - is 600 seconds (10 minutes). - client_id (str): - A unique identifier that is used to distinguish client - instances from each other. Only needs to be provided on the - initial request. When a stream disconnects and reconnects - for the same stream, the client_id should be set to the same - value so that state associated with the old stream can be - transferred to the new stream. The same client_id should not - be used for different client instances. - max_outstanding_messages (int): - Flow control settings for the maximum number of outstanding - messages. When there are ``max_outstanding_messages`` or - more currently sent to the streaming pull client that have - not yet been acked or nacked, the server stops sending more - messages. The sending of messages resumes once the number of - outstanding messages is less than this value. If the value - is <= 0, there is no limit to the number of outstanding - messages. This property can only be set on the initial - StreamingPullRequest. If it is set on a subsequent request, - the stream will be aborted with status ``INVALID_ARGUMENT``. - max_outstanding_bytes (int): - Flow control settings for the maximum number of outstanding - bytes. When there are ``max_outstanding_bytes`` or more - worth of messages currently sent to the streaming pull - client that have not yet been acked or nacked, the server - will stop sending more messages. The sending of messages - resumes once the number of outstanding bytes is less than - this value. If the value is <= 0, there is no limit to the - number of outstanding bytes. This property can only be set - on the initial StreamingPullRequest. If it is set on a - subsequent request, the stream will be aborted with status - ``INVALID_ARGUMENT``. - """ - - subscription = proto.Field( - proto.STRING, - number=1, - ) - ack_ids = proto.RepeatedField( - proto.STRING, - number=2, - ) - modify_deadline_seconds = proto.RepeatedField( - proto.INT32, - number=3, - ) - modify_deadline_ack_ids = proto.RepeatedField( - proto.STRING, - number=4, - ) - stream_ack_deadline_seconds = proto.Field( - proto.INT32, - number=5, - ) - client_id = proto.Field( - proto.STRING, - number=6, - ) - max_outstanding_messages = proto.Field( - proto.INT64, - number=7, - ) - max_outstanding_bytes = proto.Field( - proto.INT64, - number=8, - ) - - -class StreamingPullResponse(proto.Message): - r"""Response for the ``StreamingPull`` method. This response is used to - stream messages from the server to the client. - - Attributes: - received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): - Received Pub/Sub messages. This will not be - empty. - acknowledge_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): - This field will only be set if - ``enable_exactly_once_delivery`` is set to ``true``. - modify_ack_deadline_confirmation (google.pubsub_v1.types.StreamingPullResponse.ModifyAckDeadlineConfirmation): - This field will only be set if - ``enable_exactly_once_delivery`` is set to ``true``. - subscription_properties (google.pubsub_v1.types.StreamingPullResponse.SubscriptionProperties): - Properties associated with this subscription. - """ - - class AcknowledgeConfirmation(proto.Message): - r"""Acknowledgement IDs sent in one or more previous requests to - acknowledge a previously received message. - - Attributes: - ack_ids (Sequence[str]): - Successfully processed acknowledgement IDs. - invalid_ack_ids (Sequence[str]): - List of acknowledgement IDs that were - malformed or whose acknowledgement deadline has - expired. - unordered_ack_ids (Sequence[str]): - List of acknowledgement IDs that were out of - order. - """ - - ack_ids = proto.RepeatedField( - proto.STRING, - number=1, - ) - invalid_ack_ids = proto.RepeatedField( - proto.STRING, - number=2, - ) - unordered_ack_ids = proto.RepeatedField( - proto.STRING, - number=3, - ) - - class ModifyAckDeadlineConfirmation(proto.Message): - r"""Acknowledgement IDs sent in one or more previous requests to - modify the deadline for a specific message. - - Attributes: - ack_ids (Sequence[str]): - Successfully processed acknowledgement IDs. - invalid_ack_ids (Sequence[str]): - List of acknowledgement IDs that were - malformed or whose acknowledgement deadline has - expired. - """ - - ack_ids = proto.RepeatedField( - proto.STRING, - number=1, - ) - invalid_ack_ids = proto.RepeatedField( - proto.STRING, - number=2, - ) - - class SubscriptionProperties(proto.Message): - r"""Subscription properties sent as part of the response. - - Attributes: - exactly_once_delivery_enabled (bool): - True iff exactly once delivery is enabled for - this subscription. - message_ordering_enabled (bool): - True iff message ordering is enabled for this - subscription. - """ - - exactly_once_delivery_enabled = proto.Field( - proto.BOOL, - number=1, - ) - message_ordering_enabled = proto.Field( - proto.BOOL, - number=2, - ) - - received_messages = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='ReceivedMessage', - ) - acknowledge_confirmation = proto.Field( - proto.MESSAGE, - number=5, - message=AcknowledgeConfirmation, - ) - modify_ack_deadline_confirmation = proto.Field( - proto.MESSAGE, - number=3, - message=ModifyAckDeadlineConfirmation, - ) - subscription_properties = proto.Field( - proto.MESSAGE, - number=4, - message=SubscriptionProperties, - ) - - -class CreateSnapshotRequest(proto.Message): - r"""Request for the ``CreateSnapshot`` method. - - Attributes: - name (str): - Required. User-provided name for this snapshot. If the name - is not provided in the request, the server will assign a - random name for this snapshot on the same project as the - subscription. Note that for REST API requests, you must - specify a name. See the resource name rules. Format is - ``projects/{project}/snapshots/{snap}``. - subscription (str): - Required. The subscription whose backlog the snapshot - retains. Specifically, the created snapshot is guaranteed to - retain: (a) The existing backlog on the subscription. More - precisely, this is defined as the messages in the - subscription's backlog that are unacknowledged upon the - successful completion of the ``CreateSnapshot`` request; as - well as: (b) Any messages published to the subscription's - topic following the successful completion of the - CreateSnapshot request. Format is - ``projects/{project}/subscriptions/{sub}``. - labels (Mapping[str, str]): - See - Creating and managing labels. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - subscription = proto.Field( - proto.STRING, - number=2, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class UpdateSnapshotRequest(proto.Message): - r"""Request for the UpdateSnapshot method. - - Attributes: - snapshot (google.pubsub_v1.types.Snapshot): - Required. The updated snapshot object. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Required. Indicates which fields in the - provided snapshot to update. Must be specified - and non-empty. - """ - - snapshot = proto.Field( - proto.MESSAGE, - number=1, - message='Snapshot', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=2, - message=field_mask_pb2.FieldMask, - ) - - -class Snapshot(proto.Message): - r"""A snapshot resource. Snapshots are used in - `Seek `__ - operations, which allow you to manage message acknowledgments in - bulk. That is, you can set the acknowledgment state of messages in - an existing subscription to the state captured by a snapshot. - - Attributes: - name (str): - The name of the snapshot. - topic (str): - The name of the topic from which this - snapshot is retaining messages. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - The snapshot is guaranteed to exist up until this time. A - newly-created snapshot expires no later than 7 days from the - time of its creation. Its exact lifetime is determined at - creation by the existing backlog in the source subscription. - Specifically, the lifetime of the snapshot is - ``7 days - (age of oldest unacked message in the subscription)``. - For example, consider a subscription whose oldest unacked - message is 3 days old. If a snapshot is created from this - subscription, the snapshot -- which will always capture this - 3-day-old backlog as long as the snapshot exists -- will - expire in 4 days. The service will refuse to create a - snapshot that would expire in less than 1 hour after - creation. - labels (Mapping[str, str]): - See [Creating and managing labels] - (https://cloud.google.com/pubsub/docs/labels). - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - topic = proto.Field( - proto.STRING, - number=2, - ) - expire_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=4, - ) - - -class GetSnapshotRequest(proto.Message): - r"""Request for the GetSnapshot method. - - Attributes: - snapshot (str): - Required. The name of the snapshot to get. Format is - ``projects/{project}/snapshots/{snap}``. - """ - - snapshot = proto.Field( - proto.STRING, - number=1, - ) - - -class ListSnapshotsRequest(proto.Message): - r"""Request for the ``ListSnapshots`` method. - - Attributes: - project (str): - Required. The name of the project in which to list - snapshots. Format is ``projects/{project-id}``. - page_size (int): - Maximum number of snapshots to return. - page_token (str): - The value returned by the last ``ListSnapshotsResponse``; - indicates that this is a continuation of a prior - ``ListSnapshots`` call, and that the system should return - the next page of data. - """ - - project = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class ListSnapshotsResponse(proto.Message): - r"""Response for the ``ListSnapshots`` method. - - Attributes: - snapshots (Sequence[google.pubsub_v1.types.Snapshot]): - The resulting snapshots. - next_page_token (str): - If not empty, indicates that there may be more snapshot that - match the request; this value should be passed in a new - ``ListSnapshotsRequest``. - """ - - @property - def raw_page(self): - return self - - snapshots = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Snapshot', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteSnapshotRequest(proto.Message): - r"""Request for the ``DeleteSnapshot`` method. - - Attributes: - snapshot (str): - Required. The name of the snapshot to delete. Format is - ``projects/{project}/snapshots/{snap}``. - """ - - snapshot = proto.Field( - proto.STRING, - number=1, - ) - - -class SeekRequest(proto.Message): - r"""Request for the ``Seek`` method. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - subscription (str): - Required. The subscription to affect. - time (google.protobuf.timestamp_pb2.Timestamp): - The time to seek to. Messages retained in the subscription - that were published before this time are marked as - acknowledged, and messages retained in the subscription that - were published after this time are marked as unacknowledged. - Note that this operation affects only those messages - retained in the subscription (configured by the combination - of ``message_retention_duration`` and - ``retain_acked_messages``). For example, if ``time`` - corresponds to a point before the message retention window - (or to a point before the system's notion of the - subscription creation time), only retained messages will be - marked as unacknowledged, and already-expunged messages will - not be restored. - - This field is a member of `oneof`_ ``target``. - snapshot (str): - The snapshot to seek to. The snapshot's topic must be the - same as that of the provided subscription. Format is - ``projects/{project}/snapshots/{snap}``. - - This field is a member of `oneof`_ ``target``. - """ - - subscription = proto.Field( - proto.STRING, - number=1, - ) - time = proto.Field( - proto.MESSAGE, - number=2, - oneof='target', - message=timestamp_pb2.Timestamp, - ) - snapshot = proto.Field( - proto.STRING, - number=3, - oneof='target', - ) - - -class SeekResponse(proto.Message): - r"""Response for the ``Seek`` method (this response is empty). - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/schema.py deleted file mode 100644 index a1a43be882b9..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/google/pubsub_v1/types/schema.py +++ /dev/null @@ -1,319 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - - -__protobuf__ = proto.module( - package='google.pubsub.v1', - manifest={ - 'SchemaView', - 'Encoding', - 'Schema', - 'CreateSchemaRequest', - 'GetSchemaRequest', - 'ListSchemasRequest', - 'ListSchemasResponse', - 'DeleteSchemaRequest', - 'ValidateSchemaRequest', - 'ValidateSchemaResponse', - 'ValidateMessageRequest', - 'ValidateMessageResponse', - }, -) - - -class SchemaView(proto.Enum): - r"""View of Schema object fields to be returned by GetSchema and - ListSchemas. - """ - SCHEMA_VIEW_UNSPECIFIED = 0 - BASIC = 1 - FULL = 2 - - -class Encoding(proto.Enum): - r"""Possible encoding types for messages.""" - ENCODING_UNSPECIFIED = 0 - JSON = 1 - BINARY = 2 - - -class Schema(proto.Message): - r"""A schema resource. - - Attributes: - name (str): - Required. Name of the schema. Format is - ``projects/{project}/schemas/{schema}``. - type_ (google.pubsub_v1.types.Schema.Type): - The type of the schema definition. - definition (str): - The definition of the schema. This should contain a string - representing the full definition of the schema that is a - valid schema definition of the type specified in ``type``. - """ - class Type(proto.Enum): - r"""Possible schema definition types.""" - TYPE_UNSPECIFIED = 0 - PROTOCOL_BUFFER = 1 - AVRO = 2 - - name = proto.Field( - proto.STRING, - number=1, - ) - type_ = proto.Field( - proto.ENUM, - number=2, - enum=Type, - ) - definition = proto.Field( - proto.STRING, - number=3, - ) - - -class CreateSchemaRequest(proto.Message): - r"""Request for the CreateSchema method. - - Attributes: - parent (str): - Required. The name of the project in which to create the - schema. Format is ``projects/{project-id}``. - schema (google.pubsub_v1.types.Schema): - Required. The schema object to create. - - This schema's ``name`` parameter is ignored. The schema - object returned by CreateSchema will have a ``name`` made - using the given ``parent`` and ``schema_id``. - schema_id (str): - The ID to use for the schema, which will become the final - component of the schema's resource name. - - See - https://cloud.google.com/pubsub/docs/admin#resource_names - for resource name constraints. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - schema = proto.Field( - proto.MESSAGE, - number=2, - message='Schema', - ) - schema_id = proto.Field( - proto.STRING, - number=3, - ) - - -class GetSchemaRequest(proto.Message): - r"""Request for the GetSchema method. - - Attributes: - name (str): - Required. The name of the schema to get. Format is - ``projects/{project}/schemas/{schema}``. - view (google.pubsub_v1.types.SchemaView): - The set of fields to return in the response. If not set, - returns a Schema with ``name`` and ``type``, but not - ``definition``. Set to ``FULL`` to retrieve all fields. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - view = proto.Field( - proto.ENUM, - number=2, - enum='SchemaView', - ) - - -class ListSchemasRequest(proto.Message): - r"""Request for the ``ListSchemas`` method. - - Attributes: - parent (str): - Required. The name of the project in which to list schemas. - Format is ``projects/{project-id}``. - view (google.pubsub_v1.types.SchemaView): - The set of Schema fields to return in the response. If not - set, returns Schemas with ``name`` and ``type``, but not - ``definition``. Set to ``FULL`` to retrieve all fields. - page_size (int): - Maximum number of schemas to return. - page_token (str): - The value returned by the last ``ListSchemasResponse``; - indicates that this is a continuation of a prior - ``ListSchemas`` call, and that the system should return the - next page of data. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - view = proto.Field( - proto.ENUM, - number=2, - enum='SchemaView', - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - page_token = proto.Field( - proto.STRING, - number=4, - ) - - -class ListSchemasResponse(proto.Message): - r"""Response for the ``ListSchemas`` method. - - Attributes: - schemas (Sequence[google.pubsub_v1.types.Schema]): - The resulting schemas. - next_page_token (str): - If not empty, indicates that there may be more schemas that - match the request; this value should be passed in a new - ``ListSchemasRequest``. - """ - - @property - def raw_page(self): - return self - - schemas = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Schema', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteSchemaRequest(proto.Message): - r"""Request for the ``DeleteSchema`` method. - - Attributes: - name (str): - Required. Name of the schema to delete. Format is - ``projects/{project}/schemas/{schema}``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ValidateSchemaRequest(proto.Message): - r"""Request for the ``ValidateSchema`` method. - - Attributes: - parent (str): - Required. The name of the project in which to validate - schemas. Format is ``projects/{project-id}``. - schema (google.pubsub_v1.types.Schema): - Required. The schema object to validate. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - schema = proto.Field( - proto.MESSAGE, - number=2, - message='Schema', - ) - - -class ValidateSchemaResponse(proto.Message): - r"""Response for the ``ValidateSchema`` method. Empty for now. - """ - - -class ValidateMessageRequest(proto.Message): - r"""Request for the ``ValidateMessage`` method. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - parent (str): - Required. The name of the project in which to validate - schemas. Format is ``projects/{project-id}``. - name (str): - Name of the schema against which to validate. - - Format is ``projects/{project}/schemas/{schema}``. - - This field is a member of `oneof`_ ``schema_spec``. - schema (google.pubsub_v1.types.Schema): - Ad-hoc schema against which to validate - - This field is a member of `oneof`_ ``schema_spec``. - message (bytes): - Message to validate against the provided ``schema_spec``. - encoding (google.pubsub_v1.types.Encoding): - The encoding expected for messages - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - name = proto.Field( - proto.STRING, - number=2, - oneof='schema_spec', - ) - schema = proto.Field( - proto.MESSAGE, - number=3, - oneof='schema_spec', - message='Schema', - ) - message = proto.Field( - proto.BYTES, - number=4, - ) - encoding = proto.Field( - proto.ENUM, - number=5, - enum='Encoding', - ) - - -class ValidateMessageResponse(proto.Message): - r"""Response for the ``ValidateMessage`` method. Empty for now. - """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/mypy.ini b/packages/google-cloud-pubsub/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 4505b485436b..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/noxfile.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index ea844afbfe85..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,180 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.6", - "3.7", - "3.8", - "3.9", - "3.10", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==19.10b0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.9" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/pubsub_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py deleted file mode 100644 index e79f28c983b3..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTopic -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_CreateTopic_async] -from google import pubsub_v1 - - -async def sample_create_topic(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.Topic( - name="name_value", - ) - - # Make the request - response = await client.create_topic(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_CreateTopic_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py deleted file mode 100644 index 6a6f04a271f4..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateTopic -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_CreateTopic_sync] -from google import pubsub_v1 - - -def sample_create_topic(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.Topic( - name="name_value", - ) - - # Make the request - response = client.create_topic(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_CreateTopic_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py deleted file mode 100644 index 2a0148abbad1..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTopic -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_DeleteTopic_async] -from google import pubsub_v1 - - -async def sample_delete_topic(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteTopicRequest( - topic="topic_value", - ) - - # Make the request - await client.delete_topic(request=request) - - -# [END pubsub_v1_generated_Publisher_DeleteTopic_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py deleted file mode 100644 index 376a93ba085e..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteTopic -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_DeleteTopic_sync] -from google import pubsub_v1 - - -def sample_delete_topic(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteTopicRequest( - topic="topic_value", - ) - - # Make the request - client.delete_topic(request=request) - - -# [END pubsub_v1_generated_Publisher_DeleteTopic_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py deleted file mode 100644 index 6fb8d4e7d3e7..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DetachSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_DetachSubscription_async] -from google import pubsub_v1 - - -async def sample_detach_subscription(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DetachSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - response = await client.detach_subscription(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_DetachSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py deleted file mode 100644 index 7c36e4df1291..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DetachSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_DetachSubscription_sync] -from google import pubsub_v1 - - -def sample_detach_subscription(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.DetachSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - response = client.detach_subscription(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_DetachSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py deleted file mode 100644 index 87904db2b378..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTopic -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_GetTopic_async] -from google import pubsub_v1 - - -async def sample_get_topic(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.GetTopicRequest( - topic="topic_value", - ) - - # Make the request - response = await client.get_topic(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_GetTopic_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py deleted file mode 100644 index 2f28cef0a8a2..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetTopic -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_GetTopic_sync] -from google import pubsub_v1 - - -def sample_get_topic(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.GetTopicRequest( - topic="topic_value", - ) - - # Make the request - response = client.get_topic(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_GetTopic_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py deleted file mode 100644 index b6388f7f5722..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTopicSnapshots -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_ListTopicSnapshots_async] -from google import pubsub_v1 - - -async def sample_list_topic_snapshots(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicSnapshotsRequest( - topic="topic_value", - ) - - # Make the request - page_result = client.list_topic_snapshots(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END pubsub_v1_generated_Publisher_ListTopicSnapshots_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py deleted file mode 100644 index f7f3a61ec9bf..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTopicSnapshots -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_ListTopicSnapshots_sync] -from google import pubsub_v1 - - -def sample_list_topic_snapshots(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicSnapshotsRequest( - topic="topic_value", - ) - - # Make the request - page_result = client.list_topic_snapshots(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END pubsub_v1_generated_Publisher_ListTopicSnapshots_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py deleted file mode 100644 index 59b35194b459..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTopicSubscriptions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_async] -from google import pubsub_v1 - - -async def sample_list_topic_subscriptions(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicSubscriptionsRequest( - topic="topic_value", - ) - - # Make the request - page_result = client.list_topic_subscriptions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END pubsub_v1_generated_Publisher_ListTopicSubscriptions_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py deleted file mode 100644 index d7dffa0e2d3b..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTopicSubscriptions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync] -from google import pubsub_v1 - - -def sample_list_topic_subscriptions(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicSubscriptionsRequest( - topic="topic_value", - ) - - # Make the request - page_result = client.list_topic_subscriptions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py deleted file mode 100644 index 0d0f10a9896f..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTopics -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_ListTopics_async] -from google import pubsub_v1 - - -async def sample_list_topics(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_topics(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END pubsub_v1_generated_Publisher_ListTopics_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py deleted file mode 100644 index cffdd77a49af..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListTopics -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_ListTopics_sync] -from google import pubsub_v1 - - -def sample_list_topics(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.ListTopicsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_topics(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END pubsub_v1_generated_Publisher_ListTopics_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py deleted file mode 100644 index 98bfc618e64e..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Publish -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_Publish_async] -from google import pubsub_v1 - - -async def sample_publish(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.PublishRequest( - topic="topic_value", - ) - - # Make the request - response = await client.publish(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_Publish_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py deleted file mode 100644 index 650440a78436..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Publish -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_Publish_sync] -from google import pubsub_v1 - - -def sample_publish(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - request = pubsub_v1.PublishRequest( - topic="topic_value", - ) - - # Make the request - response = client.publish(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_Publish_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py deleted file mode 100644 index 473144d07caa..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTopic -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_UpdateTopic_async] -from google import pubsub_v1 - - -async def sample_update_topic(): - # Create a client - client = pubsub_v1.PublisherAsyncClient() - - # Initialize request argument(s) - topic = pubsub_v1.Topic() - topic.name = "name_value" - - request = pubsub_v1.UpdateTopicRequest( - topic=topic, - ) - - # Make the request - response = await client.update_topic(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_UpdateTopic_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py deleted file mode 100644 index 5a9838c2acfd..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateTopic -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Publisher_UpdateTopic_sync] -from google import pubsub_v1 - - -def sample_update_topic(): - # Create a client - client = pubsub_v1.PublisherClient() - - # Initialize request argument(s) - topic = pubsub_v1.Topic() - topic.name = "name_value" - - request = pubsub_v1.UpdateTopicRequest( - topic=topic, - ) - - # Make the request - response = client.update_topic(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Publisher_UpdateTopic_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py deleted file mode 100644 index 9f979072528b..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSchema -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_CreateSchema_async] -from google import pubsub_v1 - - -async def sample_create_schema(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - schema = pubsub_v1.Schema() - schema.name = "name_value" - - request = pubsub_v1.CreateSchemaRequest( - parent="parent_value", - schema=schema, - ) - - # Make the request - response = await client.create_schema(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_SchemaService_CreateSchema_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py deleted file mode 100644 index 798194050d75..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSchema -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_CreateSchema_sync] -from google import pubsub_v1 - - -def sample_create_schema(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - schema = pubsub_v1.Schema() - schema.name = "name_value" - - request = pubsub_v1.CreateSchemaRequest( - parent="parent_value", - schema=schema, - ) - - # Make the request - response = client.create_schema(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_SchemaService_CreateSchema_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py deleted file mode 100644 index 6d5e8f7345ca..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSchema -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_DeleteSchema_async] -from google import pubsub_v1 - - -async def sample_delete_schema(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSchemaRequest( - name="name_value", - ) - - # Make the request - await client.delete_schema(request=request) - - -# [END pubsub_v1_generated_SchemaService_DeleteSchema_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py deleted file mode 100644 index 2e516b97aa77..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSchema -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_DeleteSchema_sync] -from google import pubsub_v1 - - -def sample_delete_schema(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSchemaRequest( - name="name_value", - ) - - # Make the request - client.delete_schema(request=request) - - -# [END pubsub_v1_generated_SchemaService_DeleteSchema_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py deleted file mode 100644 index 10db352c3abf..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSchema -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_GetSchema_async] -from google import pubsub_v1 - - -async def sample_get_schema(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSchemaRequest( - name="name_value", - ) - - # Make the request - response = await client.get_schema(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_SchemaService_GetSchema_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py deleted file mode 100644 index 7d3cdf6d1d44..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSchema -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_GetSchema_sync] -from google import pubsub_v1 - - -def sample_get_schema(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSchemaRequest( - name="name_value", - ) - - # Make the request - response = client.get_schema(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_SchemaService_GetSchema_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py deleted file mode 100644 index a1c9be6ee6e2..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSchemas -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_ListSchemas_async] -from google import pubsub_v1 - - -async def sample_list_schemas(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSchemasRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_schemas(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END pubsub_v1_generated_SchemaService_ListSchemas_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py deleted file mode 100644 index 4604da242389..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSchemas -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_ListSchemas_sync] -from google import pubsub_v1 - - -def sample_list_schemas(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSchemasRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_schemas(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END pubsub_v1_generated_SchemaService_ListSchemas_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py deleted file mode 100644 index 94a699e53de0..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ValidateMessage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_ValidateMessage_async] -from google import pubsub_v1 - - -async def sample_validate_message(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ValidateMessageRequest( - name="name_value", - parent="parent_value", - ) - - # Make the request - response = await client.validate_message(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_SchemaService_ValidateMessage_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py deleted file mode 100644 index 26e32efa13d8..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ValidateMessage -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_ValidateMessage_sync] -from google import pubsub_v1 - - -def sample_validate_message(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - request = pubsub_v1.ValidateMessageRequest( - name="name_value", - parent="parent_value", - ) - - # Make the request - response = client.validate_message(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_SchemaService_ValidateMessage_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py deleted file mode 100644 index 86647c7bd65f..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ValidateSchema -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_ValidateSchema_async] -from google import pubsub_v1 - - -async def sample_validate_schema(): - # Create a client - client = pubsub_v1.SchemaServiceAsyncClient() - - # Initialize request argument(s) - schema = pubsub_v1.Schema() - schema.name = "name_value" - - request = pubsub_v1.ValidateSchemaRequest( - parent="parent_value", - schema=schema, - ) - - # Make the request - response = await client.validate_schema(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_SchemaService_ValidateSchema_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py deleted file mode 100644 index 102fb75edc03..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ValidateSchema -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_SchemaService_ValidateSchema_sync] -from google import pubsub_v1 - - -def sample_validate_schema(): - # Create a client - client = pubsub_v1.SchemaServiceClient() - - # Initialize request argument(s) - schema = pubsub_v1.Schema() - schema.name = "name_value" - - request = pubsub_v1.ValidateSchemaRequest( - parent="parent_value", - schema=schema, - ) - - # Make the request - response = client.validate_schema(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_SchemaService_ValidateSchema_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py deleted file mode 100644 index 8f87241a1f7e..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Acknowledge -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_Acknowledge_async] -from google import pubsub_v1 - - -async def sample_acknowledge(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.AcknowledgeRequest( - subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], - ) - - # Make the request - await client.acknowledge(request=request) - - -# [END pubsub_v1_generated_Subscriber_Acknowledge_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py deleted file mode 100644 index a56c55a33c73..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Acknowledge -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_Acknowledge_sync] -from google import pubsub_v1 - - -def sample_acknowledge(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.AcknowledgeRequest( - subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], - ) - - # Make the request - client.acknowledge(request=request) - - -# [END pubsub_v1_generated_Subscriber_Acknowledge_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py deleted file mode 100644 index 6e2d4538771d..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_CreateSnapshot_async] -from google import pubsub_v1 - - -async def sample_create_snapshot(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.CreateSnapshotRequest( - name="name_value", - subscription="subscription_value", - ) - - # Make the request - response = await client.create_snapshot(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_CreateSnapshot_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py deleted file mode 100644 index b6145acb903f..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_CreateSnapshot_sync] -from google import pubsub_v1 - - -def sample_create_snapshot(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.CreateSnapshotRequest( - name="name_value", - subscription="subscription_value", - ) - - # Make the request - response = client.create_snapshot(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_CreateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py deleted file mode 100644 index 4c63c47cd594..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_CreateSubscription_async] -from google import pubsub_v1 - - -async def sample_create_subscription(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.Subscription( - name="name_value", - topic="topic_value", - ) - - # Make the request - response = await client.create_subscription(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_CreateSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py deleted file mode 100644 index 6e37969f1f8c..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_CreateSubscription_sync] -from google import pubsub_v1 - - -def sample_create_subscription(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.Subscription( - name="name_value", - topic="topic_value", - ) - - # Make the request - response = client.create_subscription(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_CreateSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py deleted file mode 100644 index 26e2c7aa783e..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_DeleteSnapshot_async] -from google import pubsub_v1 - - -async def sample_delete_snapshot(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSnapshotRequest( - snapshot="snapshot_value", - ) - - # Make the request - await client.delete_snapshot(request=request) - - -# [END pubsub_v1_generated_Subscriber_DeleteSnapshot_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py deleted file mode 100644 index f2538ddb0ca3..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_DeleteSnapshot_sync] -from google import pubsub_v1 - - -def sample_delete_snapshot(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSnapshotRequest( - snapshot="snapshot_value", - ) - - # Make the request - client.delete_snapshot(request=request) - - -# [END pubsub_v1_generated_Subscriber_DeleteSnapshot_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py deleted file mode 100644 index f310d24b2869..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_DeleteSubscription_async] -from google import pubsub_v1 - - -async def sample_delete_subscription(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - await client.delete_subscription(request=request) - - -# [END pubsub_v1_generated_Subscriber_DeleteSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py deleted file mode 100644 index c601dd6633b2..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_DeleteSubscription_sync] -from google import pubsub_v1 - - -def sample_delete_subscription(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.DeleteSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - client.delete_subscription(request=request) - - -# [END pubsub_v1_generated_Subscriber_DeleteSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py deleted file mode 100644 index 3a56e4fbbe0e..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_GetSnapshot_async] -from google import pubsub_v1 - - -async def sample_get_snapshot(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSnapshotRequest( - snapshot="snapshot_value", - ) - - # Make the request - response = await client.get_snapshot(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_GetSnapshot_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py deleted file mode 100644 index 3a6cd24ca08a..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_GetSnapshot_sync] -from google import pubsub_v1 - - -def sample_get_snapshot(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSnapshotRequest( - snapshot="snapshot_value", - ) - - # Make the request - response = client.get_snapshot(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_GetSnapshot_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py deleted file mode 100644 index 7ad71832664f..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_GetSubscription_async] -from google import pubsub_v1 - - -async def sample_get_subscription(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - response = await client.get_subscription(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_GetSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py deleted file mode 100644 index d883e085dfbe..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_GetSubscription_sync] -from google import pubsub_v1 - - -def sample_get_subscription(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.GetSubscriptionRequest( - subscription="subscription_value", - ) - - # Make the request - response = client.get_subscription(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_GetSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py deleted file mode 100644 index edc7976a1293..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSnapshots -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_ListSnapshots_async] -from google import pubsub_v1 - - -async def sample_list_snapshots(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSnapshotsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_snapshots(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END pubsub_v1_generated_Subscriber_ListSnapshots_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py deleted file mode 100644 index e67ca2a39633..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSnapshots -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_ListSnapshots_sync] -from google import pubsub_v1 - - -def sample_list_snapshots(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSnapshotsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_snapshots(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END pubsub_v1_generated_Subscriber_ListSnapshots_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py deleted file mode 100644 index 01c45577a7f2..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSubscriptions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_ListSubscriptions_async] -from google import pubsub_v1 - - -async def sample_list_subscriptions(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSubscriptionsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_subscriptions(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END pubsub_v1_generated_Subscriber_ListSubscriptions_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py deleted file mode 100644 index 272b0408d9cf..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListSubscriptions -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_ListSubscriptions_sync] -from google import pubsub_v1 - - -def sample_list_subscriptions(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.ListSubscriptionsRequest( - project="project_value", - ) - - # Make the request - page_result = client.list_subscriptions(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END pubsub_v1_generated_Subscriber_ListSubscriptions_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py deleted file mode 100644 index b85c2033ff49..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModifyAckDeadline -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_async] -from google import pubsub_v1 - - -async def sample_modify_ack_deadline(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ModifyAckDeadlineRequest( - subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], - ack_deadline_seconds=2066, - ) - - # Make the request - await client.modify_ack_deadline(request=request) - - -# [END pubsub_v1_generated_Subscriber_ModifyAckDeadline_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py deleted file mode 100644 index ac0805db437c..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModifyAckDeadline -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync] -from google import pubsub_v1 - - -def sample_modify_ack_deadline(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.ModifyAckDeadlineRequest( - subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], - ack_deadline_seconds=2066, - ) - - # Make the request - client.modify_ack_deadline(request=request) - - -# [END pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py deleted file mode 100644 index 662823a1d682..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModifyPushConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_ModifyPushConfig_async] -from google import pubsub_v1 - - -async def sample_modify_push_config(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.ModifyPushConfigRequest( - subscription="subscription_value", - ) - - # Make the request - await client.modify_push_config(request=request) - - -# [END pubsub_v1_generated_Subscriber_ModifyPushConfig_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py deleted file mode 100644 index a7499941c486..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ModifyPushConfig -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_ModifyPushConfig_sync] -from google import pubsub_v1 - - -def sample_modify_push_config(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.ModifyPushConfigRequest( - subscription="subscription_value", - ) - - # Make the request - client.modify_push_config(request=request) - - -# [END pubsub_v1_generated_Subscriber_ModifyPushConfig_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py deleted file mode 100644 index 113f3ddfcffe..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Pull -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_Pull_async] -from google import pubsub_v1 - - -async def sample_pull(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.PullRequest( - subscription="subscription_value", - max_messages=1277, - ) - - # Make the request - response = await client.pull(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_Pull_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py deleted file mode 100644 index abb47bfa16b2..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py +++ /dev/null @@ -1,46 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Pull -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_Pull_sync] -from google import pubsub_v1 - - -def sample_pull(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.PullRequest( - subscription="subscription_value", - max_messages=1277, - ) - - # Make the request - response = client.pull(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_Pull_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py deleted file mode 100644 index 062c69409de9..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Seek -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_Seek_async] -from google import pubsub_v1 - - -async def sample_seek(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.SeekRequest( - subscription="subscription_value", - ) - - # Make the request - response = await client.seek(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_Seek_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py deleted file mode 100644 index f28570e7c779..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for Seek -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_Seek_sync] -from google import pubsub_v1 - - -def sample_seek(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.SeekRequest( - subscription="subscription_value", - ) - - # Make the request - response = client.seek(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_Seek_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py deleted file mode 100644 index 64c1e37483c0..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamingPull -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_StreamingPull_async] -from google import pubsub_v1 - - -async def sample_streaming_pull(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.StreamingPullRequest( - subscription="subscription_value", - stream_ack_deadline_seconds=2813, - ) - - # This method expects an iterator which contains - # 'pubsub_v1.StreamingPullRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = await client.streaming_pull(requests=request_generator()) - - # Handle the response - async for response in stream: - print(response) - -# [END pubsub_v1_generated_Subscriber_StreamingPull_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py deleted file mode 100644 index 0aa02fa40cdc..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py +++ /dev/null @@ -1,57 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for StreamingPull -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_StreamingPull_sync] -from google import pubsub_v1 - - -def sample_streaming_pull(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.StreamingPullRequest( - subscription="subscription_value", - stream_ack_deadline_seconds=2813, - ) - - # This method expects an iterator which contains - # 'pubsub_v1.StreamingPullRequest' objects - # Here we create a generator that yields a single `request` for - # demonstrative purposes. - requests = [request] - - def request_generator(): - for request in requests: - yield request - - # Make the request - stream = client.streaming_pull(requests=request_generator()) - - # Handle the response - for response in stream: - print(response) - -# [END pubsub_v1_generated_Subscriber_StreamingPull_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py deleted file mode 100644 index f07bca1f5d76..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_UpdateSnapshot_async] -from google import pubsub_v1 - - -async def sample_update_snapshot(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - request = pubsub_v1.UpdateSnapshotRequest( - ) - - # Make the request - response = await client.update_snapshot(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_UpdateSnapshot_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py deleted file mode 100644 index 7afe32ec2259..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateSnapshot -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_UpdateSnapshot_sync] -from google import pubsub_v1 - - -def sample_update_snapshot(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - request = pubsub_v1.UpdateSnapshotRequest( - ) - - # Make the request - response = client.update_snapshot(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_UpdateSnapshot_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py deleted file mode 100644 index 5a0410ec36cb..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_UpdateSubscription_async] -from google import pubsub_v1 - - -async def sample_update_subscription(): - # Create a client - client = pubsub_v1.SubscriberAsyncClient() - - # Initialize request argument(s) - subscription = pubsub_v1.Subscription() - subscription.name = "name_value" - subscription.topic = "topic_value" - - request = pubsub_v1.UpdateSubscriptionRequest( - subscription=subscription, - ) - - # Make the request - response = await client.update_subscription(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_UpdateSubscription_async] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py deleted file mode 100644 index 75d6e8a95299..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py +++ /dev/null @@ -1,49 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for UpdateSubscription -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-pubsub - - -# [START pubsub_v1_generated_Subscriber_UpdateSubscription_sync] -from google import pubsub_v1 - - -def sample_update_subscription(): - # Create a client - client = pubsub_v1.SubscriberClient() - - # Initialize request argument(s) - subscription = pubsub_v1.Subscription() - subscription.name = "name_value" - subscription.topic = "topic_value" - - request = pubsub_v1.UpdateSubscriptionRequest( - subscription=subscription, - ) - - # Make the request - response = client.update_subscription(request=request) - - # Handle the response - print(response) - -# [END pubsub_v1_generated_Subscriber_UpdateSubscription_sync] diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_pubsub_v1.json b/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_pubsub_v1.json deleted file mode 100644 index 0f5906e95364..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_pubsub_v1.json +++ /dev/null @@ -1,5019 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.pubsub.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-pubsub" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.create_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.CreateTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "CreateTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.Topic" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "create_topic" - }, - "description": "Sample for CreateTopic", - "file": "pubsub_v1_generated_publisher_create_topic_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_async", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_create_topic_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.create_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.CreateTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "CreateTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.Topic" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "create_topic" - }, - "description": "Sample for CreateTopic", - "file": "pubsub_v1_generated_publisher_create_topic_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_sync", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_create_topic_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.delete_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.DeleteTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "DeleteTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteTopicRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_topic" - }, - "description": "Sample for DeleteTopic", - "file": "pubsub_v1_generated_publisher_delete_topic_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_async", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_delete_topic_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.delete_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.DeleteTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "DeleteTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteTopicRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_topic" - }, - "description": "Sample for DeleteTopic", - "file": "pubsub_v1_generated_publisher_delete_topic_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_sync", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_delete_topic_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.detach_subscription", - "method": { - "fullName": "google.pubsub.v1.Publisher.DetachSubscription", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "DetachSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DetachSubscriptionRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", - "shortName": "detach_subscription" - }, - "description": "Sample for DetachSubscription", - "file": "pubsub_v1_generated_publisher_detach_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_async", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_detach_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.detach_subscription", - "method": { - "fullName": "google.pubsub.v1.Publisher.DetachSubscription", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "DetachSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DetachSubscriptionRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", - "shortName": "detach_subscription" - }, - "description": "Sample for DetachSubscription", - "file": "pubsub_v1_generated_publisher_detach_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_sync", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_detach_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.get_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.GetTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "GetTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetTopicRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "get_topic" - }, - "description": "Sample for GetTopic", - "file": "pubsub_v1_generated_publisher_get_topic_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_GetTopic_async", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_get_topic_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.get_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.GetTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "GetTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetTopicRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "get_topic" - }, - "description": "Sample for GetTopic", - "file": "pubsub_v1_generated_publisher_get_topic_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_GetTopic_sync", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_get_topic_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_snapshots", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopicSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager", - "shortName": "list_topic_snapshots" - }, - "description": "Sample for ListTopicSnapshots", - "file": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.list_topic_snapshots", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopicSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager", - "shortName": "list_topic_snapshots" - }, - "description": "Sample for ListTopicSnapshots", - "file": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_subscriptions", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopicSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager", - "shortName": "list_topic_subscriptions" - }, - "description": "Sample for ListTopicSubscriptions", - "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.list_topic_subscriptions", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopicSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager", - "shortName": "list_topic_subscriptions" - }, - "description": "Sample for ListTopicSubscriptions", - "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topics", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopics", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopics" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager", - "shortName": "list_topics" - }, - "description": "Sample for ListTopics", - "file": "pubsub_v1_generated_publisher_list_topics_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopics_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topics_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.list_topics", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopics", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopics" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsPager", - "shortName": "list_topics" - }, - "description": "Sample for ListTopics", - "file": "pubsub_v1_generated_publisher_list_topics_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopics_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topics_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.publish", - "method": { - "fullName": "google.pubsub.v1.Publisher.Publish", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "Publish" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.PublishRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "messages", - "type": "Sequence[google.pubsub_v1.types.PubsubMessage]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.PublishResponse", - "shortName": "publish" - }, - "description": "Sample for Publish", - "file": "pubsub_v1_generated_publisher_publish_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_Publish_async", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_publish_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.publish", - "method": { - "fullName": "google.pubsub.v1.Publisher.Publish", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "Publish" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.PublishRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "messages", - "type": "Sequence[google.pubsub_v1.types.PubsubMessage]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.PublishResponse", - "shortName": "publish" - }, - "description": "Sample for Publish", - "file": "pubsub_v1_generated_publisher_publish_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_Publish_sync", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_publish_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.update_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.UpdateTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "UpdateTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateTopicRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "update_topic" - }, - "description": "Sample for UpdateTopic", - "file": "pubsub_v1_generated_publisher_update_topic_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_async", - "segments": [ - { - "end": 47, - "start": 27, - "type": "FULL" - }, - { - "end": 47, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 41, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 44, - "start": 42, - "type": "REQUEST_EXECUTION" - }, - { - "end": 48, - "start": 45, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_update_topic_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.update_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.UpdateTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "UpdateTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateTopicRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "update_topic" - }, - "description": "Sample for UpdateTopic", - "file": "pubsub_v1_generated_publisher_update_topic_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_sync", - "segments": [ - { - "end": 47, - "start": 27, - "type": "FULL" - }, - { - "end": 47, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 41, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 44, - "start": 42, - "type": "REQUEST_EXECUTION" - }, - { - "end": 48, - "start": 45, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_update_topic_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.create_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.CreateSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "CreateSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.CreateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.pubsub_v1.types.Schema" - }, - { - "name": "schema_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Schema", - "shortName": "create_schema" - }, - "description": "Sample for CreateSchema", - "file": "pubsub_v1_generated_schema_service_create_schema_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", - "segments": [ - { - "end": 48, - "start": 27, - "type": "FULL" - }, - { - "end": 48, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 42, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 45, - "start": 43, - "type": "REQUEST_EXECUTION" - }, - { - "end": 49, - "start": 46, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_create_schema_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.create_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.CreateSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "CreateSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.CreateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.pubsub_v1.types.Schema" - }, - { - "name": "schema_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Schema", - "shortName": "create_schema" - }, - "description": "Sample for CreateSchema", - "file": "pubsub_v1_generated_schema_service_create_schema_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", - "segments": [ - { - "end": 48, - "start": 27, - "type": "FULL" - }, - { - "end": 48, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 42, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 45, - "start": 43, - "type": "REQUEST_EXECUTION" - }, - { - "end": 49, - "start": 46, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_create_schema_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "DeleteSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSchemaRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_schema" - }, - "description": "Sample for DeleteSchema", - "file": "pubsub_v1_generated_schema_service_delete_schema_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_delete_schema_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "DeleteSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSchemaRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_schema" - }, - "description": "Sample for DeleteSchema", - "file": "pubsub_v1_generated_schema_service_delete_schema_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_delete_schema_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.get_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.GetSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "GetSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSchemaRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Schema", - "shortName": "get_schema" - }, - "description": "Sample for GetSchema", - "file": "pubsub_v1_generated_schema_service_get_schema_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_get_schema_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.get_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.GetSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "GetSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSchemaRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Schema", - "shortName": "get_schema" - }, - "description": "Sample for GetSchema", - "file": "pubsub_v1_generated_schema_service_get_schema_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_get_schema_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schemas", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ListSchemas", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ListSchemas" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSchemasRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager", - "shortName": "list_schemas" - }, - "description": "Sample for ListSchemas", - "file": "pubsub_v1_generated_schema_service_list_schemas_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_list_schemas_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.list_schemas", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ListSchemas", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ListSchemas" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSchemasRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasPager", - "shortName": "list_schemas" - }, - "description": "Sample for ListSchemas", - "file": "pubsub_v1_generated_schema_service_list_schemas_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_list_schemas_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_message", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ValidateMessage" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ValidateMessageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.ValidateMessageResponse", - "shortName": "validate_message" - }, - "description": "Sample for ValidateMessage", - "file": "pubsub_v1_generated_schema_service_validate_message_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_validate_message_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.validate_message", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ValidateMessage" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ValidateMessageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.ValidateMessageResponse", - "shortName": "validate_message" - }, - "description": "Sample for ValidateMessage", - "file": "pubsub_v1_generated_schema_service_validate_message_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_validate_message_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ValidateSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ValidateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.pubsub_v1.types.Schema" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", - "shortName": "validate_schema" - }, - "description": "Sample for ValidateSchema", - "file": "pubsub_v1_generated_schema_service_validate_schema_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_async", - "segments": [ - { - "end": 48, - "start": 27, - "type": "FULL" - }, - { - "end": 48, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 42, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 45, - "start": 43, - "type": "REQUEST_EXECUTION" - }, - { - "end": 49, - "start": 46, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_validate_schema_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.validate_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ValidateSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ValidateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.pubsub_v1.types.Schema" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", - "shortName": "validate_schema" - }, - "description": "Sample for ValidateSchema", - "file": "pubsub_v1_generated_schema_service_validate_schema_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_sync", - "segments": [ - { - "end": 48, - "start": 27, - "type": "FULL" - }, - { - "end": 48, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 42, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 45, - "start": 43, - "type": "REQUEST_EXECUTION" - }, - { - "end": 49, - "start": 46, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_validate_schema_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.acknowledge", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Acknowledge", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Acknowledge" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.AcknowledgeRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "ack_ids", - "type": "Sequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "acknowledge" - }, - "description": "Sample for Acknowledge", - "file": "pubsub_v1_generated_subscriber_acknowledge_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_async", - "segments": [ - { - "end": 43, - "start": 27, - "type": "FULL" - }, - { - "end": 43, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 44, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_acknowledge_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.acknowledge", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Acknowledge", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Acknowledge" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.AcknowledgeRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "ack_ids", - "type": "Sequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "acknowledge" - }, - "description": "Sample for Acknowledge", - "file": "pubsub_v1_generated_subscriber_acknowledge_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_sync", - "segments": [ - { - "end": 43, - "start": 27, - "type": "FULL" - }, - { - "end": 43, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 44, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_acknowledge_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "CreateSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.CreateSnapshotRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "create_snapshot" - }, - "description": "Sample for CreateSnapshot", - "file": "pubsub_v1_generated_subscriber_create_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_create_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.create_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "CreateSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.CreateSnapshotRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "create_snapshot" - }, - "description": "Sample for CreateSnapshot", - "file": "pubsub_v1_generated_subscriber_create_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_create_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "CreateSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.Subscription" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "push_config", - "type": "google.pubsub_v1.types.PushConfig" - }, - { - "name": "ack_deadline_seconds", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "create_subscription" - }, - "description": "Sample for CreateSubscription", - "file": "pubsub_v1_generated_subscriber_create_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_create_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.create_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "CreateSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.Subscription" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "push_config", - "type": "google.pubsub_v1.types.PushConfig" - }, - { - "name": "ack_deadline_seconds", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "create_subscription" - }, - "description": "Sample for CreateSubscription", - "file": "pubsub_v1_generated_subscriber_create_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_create_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "DeleteSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSnapshotRequest" - }, - { - "name": "snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_snapshot" - }, - "description": "Sample for DeleteSnapshot", - "file": "pubsub_v1_generated_subscriber_delete_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_async", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_delete_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.delete_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "DeleteSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSnapshotRequest" - }, - { - "name": "snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_snapshot" - }, - "description": "Sample for DeleteSnapshot", - "file": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_sync", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "DeleteSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_subscription" - }, - "description": "Sample for DeleteSubscription", - "file": "pubsub_v1_generated_subscriber_delete_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_async", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_delete_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.delete_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "DeleteSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_subscription" - }, - "description": "Sample for DeleteSubscription", - "file": "pubsub_v1_generated_subscriber_delete_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_sync", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_delete_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "GetSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSnapshotRequest" - }, - { - "name": "snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "get_snapshot" - }, - "description": "Sample for GetSnapshot", - "file": "pubsub_v1_generated_subscriber_get_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_async", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_get_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.get_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "GetSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSnapshotRequest" - }, - { - "name": "snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "get_snapshot" - }, - "description": "Sample for GetSnapshot", - "file": "pubsub_v1_generated_subscriber_get_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_sync", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_get_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.GetSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "GetSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSubscriptionRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "get_subscription" - }, - "description": "Sample for GetSubscription", - "file": "pubsub_v1_generated_subscriber_get_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_async", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_get_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.get_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.GetSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "GetSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSubscriptionRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "get_subscription" - }, - "description": "Sample for GetSubscription", - "file": "pubsub_v1_generated_subscriber_get_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_sync", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_get_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_snapshots", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ListSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSnapshotsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager", - "shortName": "list_snapshots" - }, - "description": "Sample for ListSnapshots", - "file": "pubsub_v1_generated_subscriber_list_snapshots_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_list_snapshots_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.list_snapshots", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ListSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSnapshotsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager", - "shortName": "list_snapshots" - }, - "description": "Sample for ListSnapshots", - "file": "pubsub_v1_generated_subscriber_list_snapshots_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_list_snapshots_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_subscriptions", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ListSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSubscriptionsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager", - "shortName": "list_subscriptions" - }, - "description": "Sample for ListSubscriptions", - "file": "pubsub_v1_generated_subscriber_list_subscriptions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_list_subscriptions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.list_subscriptions", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ListSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSubscriptionsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager", - "shortName": "list_subscriptions" - }, - "description": "Sample for ListSubscriptions", - "file": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_ack_deadline", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ModifyAckDeadline" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "ack_ids", - "type": "Sequence[str]" - }, - { - "name": "ack_deadline_seconds", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "modify_ack_deadline" - }, - "description": "Sample for ModifyAckDeadline", - "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_async", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 40, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 41, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.modify_ack_deadline", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ModifyAckDeadline" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "ack_ids", - "type": "Sequence[str]" - }, - { - "name": "ack_deadline_seconds", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "modify_ack_deadline" - }, - "description": "Sample for ModifyAckDeadline", - "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 40, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 41, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_push_config", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ModifyPushConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ModifyPushConfigRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "push_config", - "type": "google.pubsub_v1.types.PushConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "modify_push_config" - }, - "description": "Sample for ModifyPushConfig", - "file": "pubsub_v1_generated_subscriber_modify_push_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_async", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_modify_push_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.modify_push_config", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ModifyPushConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ModifyPushConfigRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "push_config", - "type": "google.pubsub_v1.types.PushConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "modify_push_config" - }, - "description": "Sample for ModifyPushConfig", - "file": "pubsub_v1_generated_subscriber_modify_push_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_sync", - "segments": [ - { - "end": 42, - "start": 27, - "type": "FULL" - }, - { - "end": 42, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_modify_push_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.pull", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Pull", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Pull" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.PullRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "return_immediately", - "type": "bool" - }, - { - "name": "max_messages", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.PullResponse", - "shortName": "pull" - }, - "description": "Sample for Pull", - "file": "pubsub_v1_generated_subscriber_pull_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Pull_async", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_pull_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.pull", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Pull", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Pull" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.PullRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "return_immediately", - "type": "bool" - }, - { - "name": "max_messages", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.PullResponse", - "shortName": "pull" - }, - "description": "Sample for Pull", - "file": "pubsub_v1_generated_subscriber_pull_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Pull_sync", - "segments": [ - { - "end": 45, - "start": 27, - "type": "FULL" - }, - { - "end": 45, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 39, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 42, - "start": 40, - "type": "REQUEST_EXECUTION" - }, - { - "end": 46, - "start": 43, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_pull_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.seek", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Seek", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Seek" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.SeekRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.SeekResponse", - "shortName": "seek" - }, - "description": "Sample for Seek", - "file": "pubsub_v1_generated_subscriber_seek_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Seek_async", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_seek_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.seek", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Seek", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Seek" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.SeekRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.SeekResponse", - "shortName": "seek" - }, - "description": "Sample for Seek", - "file": "pubsub_v1_generated_subscriber_seek_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Seek_sync", - "segments": [ - { - "end": 44, - "start": 27, - "type": "FULL" - }, - { - "end": 44, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 38, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 41, - "start": 39, - "type": "REQUEST_EXECUTION" - }, - { - "end": 45, - "start": 42, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_seek_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.streaming_pull", - "method": { - "fullName": "google.pubsub.v1.Subscriber.StreamingPull", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "StreamingPull" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", - "shortName": "streaming_pull" - }, - "description": "Sample for StreamingPull", - "file": "pubsub_v1_generated_subscriber_streaming_pull_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_async", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_streaming_pull_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.streaming_pull", - "method": { - "fullName": "google.pubsub.v1.Subscriber.StreamingPull", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "StreamingPull" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", - "shortName": "streaming_pull" - }, - "description": "Sample for StreamingPull", - "file": "pubsub_v1_generated_subscriber_streaming_pull_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_sync", - "segments": [ - { - "end": 56, - "start": 27, - "type": "FULL" - }, - { - "end": 56, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 57, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_streaming_pull_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "UpdateSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateSnapshotRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "update_snapshot" - }, - "description": "Sample for UpdateSnapshot", - "file": "pubsub_v1_generated_subscriber_update_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_async", - "segments": [ - { - "end": 43, - "start": 27, - "type": "FULL" - }, - { - "end": 43, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 37, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 40, - "start": 38, - "type": "REQUEST_EXECUTION" - }, - { - "end": 44, - "start": 41, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_update_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.update_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "UpdateSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateSnapshotRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "update_snapshot" - }, - "description": "Sample for UpdateSnapshot", - "file": "pubsub_v1_generated_subscriber_update_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_sync", - "segments": [ - { - "end": 43, - "start": 27, - "type": "FULL" - }, - { - "end": 43, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 37, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 40, - "start": 38, - "type": "REQUEST_EXECUTION" - }, - { - "end": 44, - "start": 41, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_update_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "UpdateSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "update_subscription" - }, - "description": "Sample for UpdateSubscription", - "file": "pubsub_v1_generated_subscriber_update_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_async", - "segments": [ - { - "end": 48, - "start": 27, - "type": "FULL" - }, - { - "end": 48, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 42, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 45, - "start": 43, - "type": "REQUEST_EXECUTION" - }, - { - "end": 49, - "start": 46, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_update_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.update_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "UpdateSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "update_subscription" - }, - "description": "Sample for UpdateSubscription", - "file": "pubsub_v1_generated_subscriber_update_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_sync", - "segments": [ - { - "end": 48, - "start": 27, - "type": "FULL" - }, - { - "end": 48, - "start": 27, - "type": "SHORT" - }, - { - "end": 33, - "start": 31, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 42, - "start": 34, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 45, - "start": 43, - "type": "REQUEST_EXECUTION" - }, - { - "end": 49, - "start": 46, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_update_subscription_sync.py" - } - ] -} diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/scripts/fixup_pubsub_v1_keywords.py deleted file mode 100644 index d1bbcedf98af..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/scripts/fixup_pubsub_v1_keywords.py +++ /dev/null @@ -1,209 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class pubsubCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'acknowledge': ('subscription', 'ack_ids', ), - 'create_schema': ('parent', 'schema', 'schema_id', ), - 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), - 'delete_schema': ('name', ), - 'delete_snapshot': ('snapshot', ), - 'delete_subscription': ('subscription', ), - 'delete_topic': ('topic', ), - 'detach_subscription': ('subscription', ), - 'get_schema': ('name', 'view', ), - 'get_snapshot': ('snapshot', ), - 'get_subscription': ('subscription', ), - 'get_topic': ('topic', ), - 'list_schemas': ('parent', 'view', 'page_size', 'page_token', ), - 'list_snapshots': ('project', 'page_size', 'page_token', ), - 'list_subscriptions': ('project', 'page_size', 'page_token', ), - 'list_topics': ('project', 'page_size', 'page_token', ), - 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), - 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), - 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), - 'modify_push_config': ('subscription', 'push_config', ), - 'publish': ('topic', 'messages', ), - 'pull': ('subscription', 'max_messages', 'return_immediately', ), - 'seek': ('subscription', 'time', 'snapshot', ), - 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), - 'update_snapshot': ('snapshot', 'update_mask', ), - 'update_subscription': ('subscription', 'update_mask', ), - 'update_topic': ('topic', 'update_mask', ), - 'validate_message': ('parent', 'name', 'schema', 'message', 'encoding', ), - 'validate_schema': ('parent', 'schema', ), - 'get_iam_policy': ('resource', 'options', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=pubsubCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the pubsub client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/setup.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/setup.py deleted file mode 100644 index c69f9fbaca4d..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,60 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-pubsub', - author="Google LLC", - author_email="googleapis-packages@google.com", - url="https://github.com/googleapis/python-google-cloud-pubsub", - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google',), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.31.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'googleapis-common-protos >= 1.55.0, <2.0.0dev', - 'proto-plus >= 1.19.7', - 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', - ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index 231bc125017b..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index 231bc125017b..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc125017b..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/__init__.py deleted file mode 100644 index 231bc125017b..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_publisher.py deleted file mode 100644 index e03b3e95a1ef..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_publisher.py +++ /dev/null @@ -1,4157 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock -except ImportError: - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.pubsub_v1.services.publisher import PublisherAsyncClient -from google.pubsub_v1.services.publisher import PublisherClient -from google.pubsub_v1.services.publisher import pagers -from google.pubsub_v1.services.publisher import transports -from google.pubsub_v1.types import pubsub -from google.pubsub_v1.types import schema -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert PublisherClient._get_default_mtls_endpoint(None) is None - assert PublisherClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert PublisherClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert PublisherClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert PublisherClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert PublisherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PublisherClient, "grpc"), - (PublisherAsyncClient, "grpc_asyncio"), -]) -def test_publisher_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'pubsub.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.PublisherGrpcTransport, "grpc"), - (transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_publisher_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (PublisherClient, "grpc"), - (PublisherAsyncClient, "grpc_asyncio"), -]) -def test_publisher_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'pubsub.googleapis.com:443' - ) - - -def test_publisher_client_get_transport_class(): - transport = PublisherClient.get_transport_class() - available_transports = [ - transports.PublisherGrpcTransport, - ] - assert transport in available_transports - - transport = PublisherClient.get_transport_class("grpc") - assert transport == transports.PublisherGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PublisherClient, transports.PublisherGrpcTransport, "grpc"), - (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient)) -@mock.patch.object(PublisherAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherAsyncClient)) -def test_publisher_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(PublisherClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(PublisherClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (PublisherClient, transports.PublisherGrpcTransport, "grpc", "true"), - (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (PublisherClient, transports.PublisherGrpcTransport, "grpc", "false"), - (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient)) -@mock.patch.object(PublisherAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_publisher_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class", [ - PublisherClient, PublisherAsyncClient -]) -@mock.patch.object(PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient)) -@mock.patch.object(PublisherAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherAsyncClient)) -def test_publisher_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (PublisherClient, transports.PublisherGrpcTransport, "grpc"), - (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_publisher_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PublisherClient, transports.PublisherGrpcTransport, "grpc", grpc_helpers), - (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_publisher_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -def test_publisher_client_client_options_from_dict(): - with mock.patch('google.pubsub_v1.services.publisher.transports.PublisherGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = PublisherClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (PublisherClient, transports.PublisherGrpcTransport, "grpc", grpc_helpers), - (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_publisher_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "pubsub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - scopes=None, - default_host="pubsub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.Topic, - dict, -]) -def test_create_topic(request_type, transport: str = 'grpc'): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Topic( - name='name_value', - kms_key_name='kms_key_name_value', - satisfies_pzs=True, - ) - response = client.create_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.satisfies_pzs is True - - -def test_create_topic_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_topic), - '__call__') as call: - client.create_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() - -@pytest.mark.asyncio -async def test_create_topic_async(transport: str = 'grpc_asyncio', request_type=pubsub.Topic): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic( - name='name_value', - kms_key_name='kms_key_name_value', - satisfies_pzs=True, - )) - response = await client.create_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.satisfies_pzs is True - - -@pytest.mark.asyncio -async def test_create_topic_async_from_dict(): - await test_create_topic_async(request_type=dict) - - -def test_create_topic_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.Topic() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_topic), - '__call__') as call: - call.return_value = pubsub.Topic() - client.create_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_topic_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.Topic() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_topic), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) - await client.create_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_create_topic_flattened(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Topic() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_topic( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_create_topic_flattened_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_topic( - pubsub.Topic(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_create_topic_flattened_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Topic() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_topic( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_topic_flattened_error_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_topic( - pubsub.Topic(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.UpdateTopicRequest, - dict, -]) -def test_update_topic(request_type, transport: str = 'grpc'): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Topic( - name='name_value', - kms_key_name='kms_key_name_value', - satisfies_pzs=True, - ) - response = client.update_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.satisfies_pzs is True - - -def test_update_topic_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_topic), - '__call__') as call: - client.update_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() - -@pytest.mark.asyncio -async def test_update_topic_async(transport: str = 'grpc_asyncio', request_type=pubsub.UpdateTopicRequest): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic( - name='name_value', - kms_key_name='kms_key_name_value', - satisfies_pzs=True, - )) - response = await client.update_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.satisfies_pzs is True - - -@pytest.mark.asyncio -async def test_update_topic_async_from_dict(): - await test_update_topic_async(request_type=dict) - - -def test_update_topic_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.UpdateTopicRequest() - - request.topic.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_topic), - '__call__') as call: - call.return_value = pubsub.Topic() - client.update_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_topic_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.UpdateTopicRequest() - - request.topic.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_topic), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) - await client.update_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - pubsub.PublishRequest, - dict, -]) -def test_publish(request_type, transport: str = 'grpc'): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.publish), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.PublishResponse( - message_ids=['message_ids_value'], - ) - response = client.publish(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.PublishResponse) - assert response.message_ids == ['message_ids_value'] - - -def test_publish_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.publish), - '__call__') as call: - client.publish() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() - -@pytest.mark.asyncio -async def test_publish_async(transport: str = 'grpc_asyncio', request_type=pubsub.PublishRequest): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.publish), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PublishResponse( - message_ids=['message_ids_value'], - )) - response = await client.publish(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.PublishResponse) - assert response.message_ids == ['message_ids_value'] - - -@pytest.mark.asyncio -async def test_publish_async_from_dict(): - await test_publish_async(request_type=dict) - - -def test_publish_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.PublishRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.publish), - '__call__') as call: - call.return_value = pubsub.PublishResponse() - client.publish(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_publish_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.PublishRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.publish), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PublishResponse()) - await client.publish(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -def test_publish_flattened(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.publish), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.PublishResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.publish( - topic='topic_value', - messages=[pubsub.PubsubMessage(data=b'data_blob')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - arg = args[0].messages - mock_val = [pubsub.PubsubMessage(data=b'data_blob')] - assert arg == mock_val - - -def test_publish_flattened_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.publish( - pubsub.PublishRequest(), - topic='topic_value', - messages=[pubsub.PubsubMessage(data=b'data_blob')], - ) - -@pytest.mark.asyncio -async def test_publish_flattened_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.publish), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.PublishResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PublishResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.publish( - topic='topic_value', - messages=[pubsub.PubsubMessage(data=b'data_blob')], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - arg = args[0].messages - mock_val = [pubsub.PubsubMessage(data=b'data_blob')] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_publish_flattened_error_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.publish( - pubsub.PublishRequest(), - topic='topic_value', - messages=[pubsub.PubsubMessage(data=b'data_blob')], - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.GetTopicRequest, - dict, -]) -def test_get_topic(request_type, transport: str = 'grpc'): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Topic( - name='name_value', - kms_key_name='kms_key_name_value', - satisfies_pzs=True, - ) - response = client.get_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.satisfies_pzs is True - - -def test_get_topic_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_topic), - '__call__') as call: - client.get_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() - -@pytest.mark.asyncio -async def test_get_topic_async(transport: str = 'grpc_asyncio', request_type=pubsub.GetTopicRequest): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic( - name='name_value', - kms_key_name='kms_key_name_value', - satisfies_pzs=True, - )) - response = await client.get_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == 'name_value' - assert response.kms_key_name == 'kms_key_name_value' - assert response.satisfies_pzs is True - - -@pytest.mark.asyncio -async def test_get_topic_async_from_dict(): - await test_get_topic_async(request_type=dict) - - -def test_get_topic_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.GetTopicRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_topic), - '__call__') as call: - call.return_value = pubsub.Topic() - client.get_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_topic_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.GetTopicRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_topic), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) - await client.get_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -def test_get_topic_flattened(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Topic() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_topic( - topic='topic_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - - -def test_get_topic_flattened_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_topic( - pubsub.GetTopicRequest(), - topic='topic_value', - ) - -@pytest.mark.asyncio -async def test_get_topic_flattened_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Topic() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_topic( - topic='topic_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_topic_flattened_error_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_topic( - pubsub.GetTopicRequest(), - topic='topic_value', - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.ListTopicsRequest, - dict, -]) -def test_list_topics(request_type, transport: str = 'grpc'): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListTopicsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_topics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_topics_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__') as call: - client.list_topics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() - -@pytest.mark.asyncio -async def test_list_topics_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListTopicsRequest): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_topics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_topics_async_from_dict(): - await test_list_topics_async(request_type=dict) - - -def test_list_topics_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListTopicsRequest() - - request.project = 'project_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__') as call: - call.return_value = pubsub.ListTopicsResponse() - client.list_topics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project=project_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_topics_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListTopicsRequest() - - request.project = 'project_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicsResponse()) - await client.list_topics(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project=project_value', - ) in kw['metadata'] - - -def test_list_topics_flattened(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListTopicsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_topics( - project='project_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project - mock_val = 'project_value' - assert arg == mock_val - - -def test_list_topics_flattened_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_topics( - pubsub.ListTopicsRequest(), - project='project_value', - ) - -@pytest.mark.asyncio -async def test_list_topics_flattened_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListTopicsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_topics( - project='project_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project - mock_val = 'project_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_topics_flattened_error_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_topics( - pubsub.ListTopicsRequest(), - project='project_value', - ) - - -def test_list_topics_pager(transport_name: str = "grpc"): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - pubsub.Topic(), - pubsub.Topic(), - ], - next_page_token='abc', - ), - pubsub.ListTopicsResponse( - topics=[], - next_page_token='def', - ), - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - pubsub.Topic(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project', ''), - )), - ) - pager = client.list_topics(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, pubsub.Topic) - for i in results) -def test_list_topics_pages(transport_name: str = "grpc"): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - pubsub.Topic(), - pubsub.Topic(), - ], - next_page_token='abc', - ), - pubsub.ListTopicsResponse( - topics=[], - next_page_token='def', - ), - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - pubsub.Topic(), - ], - ), - RuntimeError, - ) - pages = list(client.list_topics(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_topics_async_pager(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - pubsub.Topic(), - pubsub.Topic(), - ], - next_page_token='abc', - ), - pubsub.ListTopicsResponse( - topics=[], - next_page_token='def', - ), - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - pubsub.Topic(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_topics(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, pubsub.Topic) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_topics_async_pages(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topics), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - pubsub.Topic(), - pubsub.Topic(), - ], - next_page_token='abc', - ), - pubsub.ListTopicsResponse( - topics=[], - next_page_token='def', - ), - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicsResponse( - topics=[ - pubsub.Topic(), - pubsub.Topic(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_topics(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - pubsub.ListTopicSubscriptionsRequest, - dict, -]) -def test_list_topic_subscriptions(request_type, transport: str = 'grpc'): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListTopicSubscriptionsResponse( - subscriptions=['subscriptions_value'], - next_page_token='next_page_token_value', - ) - response = client.list_topic_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicSubscriptionsPager) - assert response.subscriptions == ['subscriptions_value'] - assert response.next_page_token == 'next_page_token_value' - - -def test_list_topic_subscriptions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__') as call: - client.list_topic_subscriptions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() - -@pytest.mark.asyncio -async def test_list_topic_subscriptions_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListTopicSubscriptionsRequest): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSubscriptionsResponse( - subscriptions=['subscriptions_value'], - next_page_token='next_page_token_value', - )) - response = await client.list_topic_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicSubscriptionsAsyncPager) - assert response.subscriptions == ['subscriptions_value'] - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_topic_subscriptions_async_from_dict(): - await test_list_topic_subscriptions_async(request_type=dict) - - -def test_list_topic_subscriptions_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListTopicSubscriptionsRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__') as call: - call.return_value = pubsub.ListTopicSubscriptionsResponse() - client.list_topic_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_topic_subscriptions_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListTopicSubscriptionsRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSubscriptionsResponse()) - await client.list_topic_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -def test_list_topic_subscriptions_flattened(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListTopicSubscriptionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_topic_subscriptions( - topic='topic_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - - -def test_list_topic_subscriptions_flattened_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_topic_subscriptions( - pubsub.ListTopicSubscriptionsRequest(), - topic='topic_value', - ) - -@pytest.mark.asyncio -async def test_list_topic_subscriptions_flattened_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListTopicSubscriptionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSubscriptionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_topic_subscriptions( - topic='topic_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_topic_subscriptions_flattened_error_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_topic_subscriptions( - pubsub.ListTopicSubscriptionsRequest(), - topic='topic_value', - ) - - -def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - str(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('topic', ''), - )), - ) - pager = client.list_topic_subscriptions(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) -def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = list(client.list_topic_subscriptions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_topic_subscriptions_async_pager(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - str(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_topic_subscriptions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, str) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_topic_subscriptions_async_pages(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicSubscriptionsResponse( - subscriptions=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_topic_subscriptions(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - pubsub.ListTopicSnapshotsRequest, - dict, -]) -def test_list_topic_snapshots(request_type, transport: str = 'grpc'): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListTopicSnapshotsResponse( - snapshots=['snapshots_value'], - next_page_token='next_page_token_value', - ) - response = client.list_topic_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicSnapshotsPager) - assert response.snapshots == ['snapshots_value'] - assert response.next_page_token == 'next_page_token_value' - - -def test_list_topic_snapshots_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__') as call: - client.list_topic_snapshots() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() - -@pytest.mark.asyncio -async def test_list_topic_snapshots_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListTopicSnapshotsRequest): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSnapshotsResponse( - snapshots=['snapshots_value'], - next_page_token='next_page_token_value', - )) - response = await client.list_topic_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicSnapshotsAsyncPager) - assert response.snapshots == ['snapshots_value'] - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_topic_snapshots_async_from_dict(): - await test_list_topic_snapshots_async(request_type=dict) - - -def test_list_topic_snapshots_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListTopicSnapshotsRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__') as call: - call.return_value = pubsub.ListTopicSnapshotsResponse() - client.list_topic_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_topic_snapshots_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListTopicSnapshotsRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSnapshotsResponse()) - await client.list_topic_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -def test_list_topic_snapshots_flattened(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListTopicSnapshotsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_topic_snapshots( - topic='topic_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - - -def test_list_topic_snapshots_flattened_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_topic_snapshots( - pubsub.ListTopicSnapshotsRequest(), - topic='topic_value', - ) - -@pytest.mark.asyncio -async def test_list_topic_snapshots_flattened_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListTopicSnapshotsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListTopicSnapshotsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_topic_snapshots( - topic='topic_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_topic_snapshots_flattened_error_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_topic_snapshots( - pubsub.ListTopicSnapshotsRequest(), - topic='topic_value', - ) - - -def test_list_topic_snapshots_pager(transport_name: str = "grpc"): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[], - next_page_token='def', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - str(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('topic', ''), - )), - ) - pager = client.list_topic_snapshots(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, str) - for i in results) -def test_list_topic_snapshots_pages(transport_name: str = "grpc"): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[], - next_page_token='def', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = list(client.list_topic_snapshots(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_topic_snapshots_async_pager(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[], - next_page_token='def', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - str(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_topic_snapshots(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, str) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_topic_snapshots_async_pages(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - str(), - str(), - ], - next_page_token='abc', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[], - next_page_token='def', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - ], - next_page_token='ghi', - ), - pubsub.ListTopicSnapshotsResponse( - snapshots=[ - str(), - str(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_topic_snapshots(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - pubsub.DeleteTopicRequest, - dict, -]) -def test_delete_topic(request_type, transport: str = 'grpc'): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_topic_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_topic), - '__call__') as call: - client.delete_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() - -@pytest.mark.asyncio -async def test_delete_topic_async(transport: str = 'grpc_asyncio', request_type=pubsub.DeleteTopicRequest): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_topic_async_from_dict(): - await test_delete_topic_async(request_type=dict) - - -def test_delete_topic_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.DeleteTopicRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_topic), - '__call__') as call: - call.return_value = None - client.delete_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_topic_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.DeleteTopicRequest() - - request.topic = 'topic_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_topic), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_topic(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'topic=topic_value', - ) in kw['metadata'] - - -def test_delete_topic_flattened(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_topic( - topic='topic_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - - -def test_delete_topic_flattened_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_topic( - pubsub.DeleteTopicRequest(), - topic='topic_value', - ) - -@pytest.mark.asyncio -async def test_delete_topic_flattened_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_topic), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_topic( - topic='topic_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_topic_flattened_error_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_topic( - pubsub.DeleteTopicRequest(), - topic='topic_value', - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.DetachSubscriptionRequest, - dict, -]) -def test_detach_subscription(request_type, transport: str = 'grpc'): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.DetachSubscriptionResponse( - ) - response = client.detach_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.DetachSubscriptionResponse) - - -def test_detach_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_subscription), - '__call__') as call: - client.detach_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() - -@pytest.mark.asyncio -async def test_detach_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.DetachSubscriptionRequest): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.DetachSubscriptionResponse( - )) - response = await client.detach_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.DetachSubscriptionResponse) - - -@pytest.mark.asyncio -async def test_detach_subscription_async_from_dict(): - await test_detach_subscription_async(request_type=dict) - - -def test_detach_subscription_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.DetachSubscriptionRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_subscription), - '__call__') as call: - call.return_value = pubsub.DetachSubscriptionResponse() - client.detach_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_detach_subscription_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.DetachSubscriptionRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_subscription), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.DetachSubscriptionResponse()) - await client.detach_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.PublisherGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.PublisherGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PublisherClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.PublisherGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PublisherClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PublisherClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.PublisherGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PublisherClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.PublisherGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = PublisherClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.PublisherGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.PublisherGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.PublisherGrpcTransport, - transports.PublisherGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = PublisherClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.PublisherGrpcTransport, - ) - -def test_publisher_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.PublisherTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_publisher_base_transport(): - # Instantiate the base transport. - with mock.patch('google.pubsub_v1.services.publisher.transports.PublisherTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.PublisherTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_topic', - 'update_topic', - 'publish', - 'get_topic', - 'list_topics', - 'list_topic_subscriptions', - 'list_topic_snapshots', - 'delete_topic', - 'detach_subscription', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_publisher_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PublisherTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - quota_project_id="octopus", - ) - - -def test_publisher_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PublisherTransport() - adc.assert_called_once() - - -def test_publisher_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PublisherClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.PublisherGrpcTransport, - transports.PublisherGrpcAsyncIOTransport, - ], -) -def test_publisher_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/pubsub',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.PublisherGrpcTransport, grpc_helpers), - (transports.PublisherGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_publisher_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "pubsub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - scopes=["1", "2"], - default_host="pubsub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport]) -def test_publisher_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_publisher_host_no_port(transport_name): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'pubsub.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_publisher_host_with_port(transport_name): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'pubsub.googleapis.com:8000' - ) - -def test_publisher_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PublisherGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_publisher_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.PublisherGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport]) -def test_publisher_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport]) -def test_publisher_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_schema_path(): - project = "squid" - schema = "clam" - expected = "projects/{project}/schemas/{schema}".format(project=project, schema=schema, ) - actual = PublisherClient.schema_path(project, schema) - assert expected == actual - - -def test_parse_schema_path(): - expected = { - "project": "whelk", - "schema": "octopus", - } - path = PublisherClient.schema_path(**expected) - - # Check that the path construction is reversible. - actual = PublisherClient.parse_schema_path(path) - assert expected == actual - -def test_subscription_path(): - project = "oyster" - subscription = "nudibranch" - expected = "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) - actual = PublisherClient.subscription_path(project, subscription) - assert expected == actual - - -def test_parse_subscription_path(): - expected = { - "project": "cuttlefish", - "subscription": "mussel", - } - path = PublisherClient.subscription_path(**expected) - - # Check that the path construction is reversible. - actual = PublisherClient.parse_subscription_path(path) - assert expected == actual - -def test_topic_path(): - project = "winkle" - topic = "nautilus" - expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) - actual = PublisherClient.topic_path(project, topic) - assert expected == actual - - -def test_parse_topic_path(): - expected = { - "project": "scallop", - "topic": "abalone", - } - path = PublisherClient.topic_path(**expected) - - # Check that the path construction is reversible. - actual = PublisherClient.parse_topic_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = PublisherClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = PublisherClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = PublisherClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = PublisherClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = PublisherClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = PublisherClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = PublisherClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = PublisherClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = PublisherClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = PublisherClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = PublisherClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = PublisherClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = PublisherClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = PublisherClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = PublisherClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.PublisherTransport, '_prep_wrapped_messages') as prep: - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.PublisherTransport, '_prep_wrapped_messages') as prep: - transport_class = PublisherClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_set_iam_policy(transport: str = "grpc"): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_get_iam_policy(transport: str = "grpc"): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_test_iam_permissions(transport: str = "grpc"): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (PublisherClient, transports.PublisherGrpcTransport), - (PublisherAsyncClient, transports.PublisherGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_schema_service.py deleted file mode 100644 index bb9a7086f2d6..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ /dev/null @@ -1,3123 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock -except ImportError: - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.pubsub_v1.services.schema_service import SchemaServiceAsyncClient -from google.pubsub_v1.services.schema_service import SchemaServiceClient -from google.pubsub_v1.services.schema_service import pagers -from google.pubsub_v1.services.schema_service import transports -from google.pubsub_v1.types import schema -from google.pubsub_v1.types import schema as gp_schema -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert SchemaServiceClient._get_default_mtls_endpoint(None) is None - assert SchemaServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert SchemaServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert SchemaServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert SchemaServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert SchemaServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (SchemaServiceClient, "grpc"), - (SchemaServiceAsyncClient, "grpc_asyncio"), -]) -def test_schema_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'pubsub.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.SchemaServiceGrpcTransport, "grpc"), - (transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_schema_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (SchemaServiceClient, "grpc"), - (SchemaServiceAsyncClient, "grpc_asyncio"), -]) -def test_schema_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'pubsub.googleapis.com:443' - ) - - -def test_schema_service_client_get_transport_class(): - transport = SchemaServiceClient.get_transport_class() - available_transports = [ - transports.SchemaServiceGrpcTransport, - ] - assert transport in available_transports - - transport = SchemaServiceClient.get_transport_class("grpc") - assert transport == transports.SchemaServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), - (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(SchemaServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceClient)) -@mock.patch.object(SchemaServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceAsyncClient)) -def test_schema_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(SchemaServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(SchemaServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "true"), - (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", "false"), - (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(SchemaServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceClient)) -@mock.patch.object(SchemaServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_schema_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class", [ - SchemaServiceClient, SchemaServiceAsyncClient -]) -@mock.patch.object(SchemaServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceClient)) -@mock.patch.object(SchemaServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SchemaServiceAsyncClient)) -def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), - (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_schema_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", grpc_helpers), - (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_schema_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -def test_schema_service_client_client_options_from_dict(): - with mock.patch('google.pubsub_v1.services.schema_service.transports.SchemaServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = SchemaServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc", grpc_helpers), - (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_schema_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "pubsub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - scopes=None, - default_host="pubsub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - gp_schema.CreateSchemaRequest, - dict, -]) -def test_create_schema(request_type, transport: str = 'grpc'): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gp_schema.Schema( - name='name_value', - type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, - definition='definition_value', - ) - response = client.create_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gp_schema.Schema) - assert response.name == 'name_value' - assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == 'definition_value' - - -def test_create_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_schema), - '__call__') as call: - client.create_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() - -@pytest.mark.asyncio -async def test_create_schema_async(transport: str = 'grpc_asyncio', request_type=gp_schema.CreateSchemaRequest): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema( - name='name_value', - type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, - definition='definition_value', - )) - response = await client.create_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gp_schema.Schema) - assert response.name == 'name_value' - assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == 'definition_value' - - -@pytest.mark.asyncio -async def test_create_schema_async_from_dict(): - await test_create_schema_async(request_type=dict) - - -def test_create_schema_field_headers(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gp_schema.CreateSchemaRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_schema), - '__call__') as call: - call.return_value = gp_schema.Schema() - client.create_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_schema_field_headers_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gp_schema.CreateSchemaRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_schema), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) - await client.create_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_schema_flattened(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gp_schema.Schema() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_schema( - parent='parent_value', - schema=gp_schema.Schema(name='name_value'), - schema_id='schema_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].schema - mock_val = gp_schema.Schema(name='name_value') - assert arg == mock_val - arg = args[0].schema_id - mock_val = 'schema_id_value' - assert arg == mock_val - - -def test_create_schema_flattened_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_schema( - gp_schema.CreateSchemaRequest(), - parent='parent_value', - schema=gp_schema.Schema(name='name_value'), - schema_id='schema_id_value', - ) - -@pytest.mark.asyncio -async def test_create_schema_flattened_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gp_schema.Schema() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_schema( - parent='parent_value', - schema=gp_schema.Schema(name='name_value'), - schema_id='schema_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].schema - mock_val = gp_schema.Schema(name='name_value') - assert arg == mock_val - arg = args[0].schema_id - mock_val = 'schema_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_schema_flattened_error_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_schema( - gp_schema.CreateSchemaRequest(), - parent='parent_value', - schema=gp_schema.Schema(name='name_value'), - schema_id='schema_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - schema.GetSchemaRequest, - dict, -]) -def test_get_schema(request_type, transport: str = 'grpc'): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schema.Schema( - name='name_value', - type_=schema.Schema.Type.PROTOCOL_BUFFER, - definition='definition_value', - ) - response = client.get_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, schema.Schema) - assert response.name == 'name_value' - assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == 'definition_value' - - -def test_get_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_schema), - '__call__') as call: - client.get_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() - -@pytest.mark.asyncio -async def test_get_schema_async(transport: str = 'grpc_asyncio', request_type=schema.GetSchemaRequest): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema( - name='name_value', - type_=schema.Schema.Type.PROTOCOL_BUFFER, - definition='definition_value', - )) - response = await client.get_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, schema.Schema) - assert response.name == 'name_value' - assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == 'definition_value' - - -@pytest.mark.asyncio -async def test_get_schema_async_from_dict(): - await test_get_schema_async(request_type=dict) - - -def test_get_schema_field_headers(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = schema.GetSchemaRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_schema), - '__call__') as call: - call.return_value = schema.Schema() - client.get_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_schema_field_headers_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = schema.GetSchemaRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_schema), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) - await client.get_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_schema_flattened(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schema.Schema() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_schema( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_schema_flattened_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_schema( - schema.GetSchemaRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_schema_flattened_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schema.Schema() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_schema( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_schema_flattened_error_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_schema( - schema.GetSchemaRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - schema.ListSchemasRequest, - dict, -]) -def test_list_schemas(request_type, transport: str = 'grpc'): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schema.ListSchemasResponse( - next_page_token='next_page_token_value', - ) - response = client.list_schemas(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSchemasPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_schemas_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__') as call: - client.list_schemas() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() - -@pytest.mark.asyncio -async def test_list_schemas_async(transport: str = 'grpc_asyncio', request_type=schema.ListSchemasRequest): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schema.ListSchemasResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_schemas(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSchemasAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_schemas_async_from_dict(): - await test_list_schemas_async(request_type=dict) - - -def test_list_schemas_field_headers(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = schema.ListSchemasRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__') as call: - call.return_value = schema.ListSchemasResponse() - client.list_schemas(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_schemas_field_headers_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = schema.ListSchemasRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.ListSchemasResponse()) - await client.list_schemas(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_schemas_flattened(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schema.ListSchemasResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_schemas( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_schemas_flattened_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_schemas( - schema.ListSchemasRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_schemas_flattened_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schema.ListSchemasResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.ListSchemasResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_schemas( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_schemas_flattened_error_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_schemas( - schema.ListSchemasRequest(), - parent='parent_value', - ) - - -def test_list_schemas_pager(transport_name: str = "grpc"): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - schema.Schema(), - schema.Schema(), - ], - next_page_token='abc', - ), - schema.ListSchemasResponse( - schemas=[], - next_page_token='def', - ), - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - ], - next_page_token='ghi', - ), - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - schema.Schema(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_schemas(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, schema.Schema) - for i in results) -def test_list_schemas_pages(transport_name: str = "grpc"): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - schema.Schema(), - schema.Schema(), - ], - next_page_token='abc', - ), - schema.ListSchemasResponse( - schemas=[], - next_page_token='def', - ), - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - ], - next_page_token='ghi', - ), - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - schema.Schema(), - ], - ), - RuntimeError, - ) - pages = list(client.list_schemas(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_schemas_async_pager(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - schema.Schema(), - schema.Schema(), - ], - next_page_token='abc', - ), - schema.ListSchemasResponse( - schemas=[], - next_page_token='def', - ), - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - ], - next_page_token='ghi', - ), - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - schema.Schema(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_schemas(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, schema.Schema) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_schemas_async_pages(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schemas), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - schema.Schema(), - schema.Schema(), - ], - next_page_token='abc', - ), - schema.ListSchemasResponse( - schemas=[], - next_page_token='def', - ), - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - ], - next_page_token='ghi', - ), - schema.ListSchemasResponse( - schemas=[ - schema.Schema(), - schema.Schema(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_schemas(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - schema.DeleteSchemaRequest, - dict, -]) -def test_delete_schema(request_type, transport: str = 'grpc'): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_schema), - '__call__') as call: - client.delete_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() - -@pytest.mark.asyncio -async def test_delete_schema_async(transport: str = 'grpc_asyncio', request_type=schema.DeleteSchemaRequest): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_schema_async_from_dict(): - await test_delete_schema_async(request_type=dict) - - -def test_delete_schema_field_headers(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = schema.DeleteSchemaRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_schema), - '__call__') as call: - call.return_value = None - client.delete_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_schema_field_headers_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = schema.DeleteSchemaRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_schema), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_schema_flattened(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_schema( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_schema_flattened_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_schema( - schema.DeleteSchemaRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_schema_flattened_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_schema( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_schema_flattened_error_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_schema( - schema.DeleteSchemaRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - gp_schema.ValidateSchemaRequest, - dict, -]) -def test_validate_schema(request_type, transport: str = 'grpc'): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gp_schema.ValidateSchemaResponse( - ) - response = client.validate_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gp_schema.ValidateSchemaResponse) - - -def test_validate_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_schema), - '__call__') as call: - client.validate_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() - -@pytest.mark.asyncio -async def test_validate_schema_async(transport: str = 'grpc_asyncio', request_type=gp_schema.ValidateSchemaRequest): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.ValidateSchemaResponse( - )) - response = await client.validate_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, gp_schema.ValidateSchemaResponse) - - -@pytest.mark.asyncio -async def test_validate_schema_async_from_dict(): - await test_validate_schema_async(request_type=dict) - - -def test_validate_schema_field_headers(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gp_schema.ValidateSchemaRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_schema), - '__call__') as call: - call.return_value = gp_schema.ValidateSchemaResponse() - client.validate_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_validate_schema_field_headers_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = gp_schema.ValidateSchemaRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_schema), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.ValidateSchemaResponse()) - await client.validate_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_validate_schema_flattened(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gp_schema.ValidateSchemaResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.validate_schema( - parent='parent_value', - schema=gp_schema.Schema(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].schema - mock_val = gp_schema.Schema(name='name_value') - assert arg == mock_val - - -def test_validate_schema_flattened_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.validate_schema( - gp_schema.ValidateSchemaRequest(), - parent='parent_value', - schema=gp_schema.Schema(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_validate_schema_flattened_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_schema), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = gp_schema.ValidateSchemaResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.ValidateSchemaResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.validate_schema( - parent='parent_value', - schema=gp_schema.Schema(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].schema - mock_val = gp_schema.Schema(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_validate_schema_flattened_error_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.validate_schema( - gp_schema.ValidateSchemaRequest(), - parent='parent_value', - schema=gp_schema.Schema(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - schema.ValidateMessageRequest, - dict, -]) -def test_validate_message(request_type, transport: str = 'grpc'): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_message), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = schema.ValidateMessageResponse( - ) - response = client.validate_message(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, schema.ValidateMessageResponse) - - -def test_validate_message_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_message), - '__call__') as call: - client.validate_message() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() - -@pytest.mark.asyncio -async def test_validate_message_async(transport: str = 'grpc_asyncio', request_type=schema.ValidateMessageRequest): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_message), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(schema.ValidateMessageResponse( - )) - response = await client.validate_message(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, schema.ValidateMessageResponse) - - -@pytest.mark.asyncio -async def test_validate_message_async_from_dict(): - await test_validate_message_async(request_type=dict) - - -def test_validate_message_field_headers(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = schema.ValidateMessageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_message), - '__call__') as call: - call.return_value = schema.ValidateMessageResponse() - client.validate_message(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_validate_message_field_headers_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = schema.ValidateMessageRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.validate_message), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.ValidateMessageResponse()) - await client.validate_message(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.SchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SchemaServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.SchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SchemaServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SchemaServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.SchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SchemaServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = SchemaServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.SchemaServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.SchemaServiceGrpcTransport, - transports.SchemaServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = SchemaServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.SchemaServiceGrpcTransport, - ) - -def test_schema_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SchemaServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_schema_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.SchemaServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_schema', - 'get_schema', - 'list_schemas', - 'delete_schema', - 'validate_schema', - 'validate_message', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_schema_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SchemaServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - quota_project_id="octopus", - ) - - -def test_schema_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SchemaServiceTransport() - adc.assert_called_once() - - -def test_schema_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SchemaServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SchemaServiceGrpcTransport, - transports.SchemaServiceGrpcAsyncIOTransport, - ], -) -def test_schema_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/pubsub',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.SchemaServiceGrpcTransport, grpc_helpers), - (transports.SchemaServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_schema_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "pubsub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - scopes=["1", "2"], - default_host="pubsub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.SchemaServiceGrpcTransport, transports.SchemaServiceGrpcAsyncIOTransport]) -def test_schema_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_schema_service_host_no_port(transport_name): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'pubsub.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_schema_service_host_with_port(transport_name): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'pubsub.googleapis.com:8000' - ) - -def test_schema_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SchemaServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_schema_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SchemaServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SchemaServiceGrpcTransport, transports.SchemaServiceGrpcAsyncIOTransport]) -def test_schema_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SchemaServiceGrpcTransport, transports.SchemaServiceGrpcAsyncIOTransport]) -def test_schema_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_schema_path(): - project = "squid" - schema = "clam" - expected = "projects/{project}/schemas/{schema}".format(project=project, schema=schema, ) - actual = SchemaServiceClient.schema_path(project, schema) - assert expected == actual - - -def test_parse_schema_path(): - expected = { - "project": "whelk", - "schema": "octopus", - } - path = SchemaServiceClient.schema_path(**expected) - - # Check that the path construction is reversible. - actual = SchemaServiceClient.parse_schema_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = SchemaServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = SchemaServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = SchemaServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = SchemaServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = SchemaServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = SchemaServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = SchemaServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = SchemaServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = SchemaServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = SchemaServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = SchemaServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = SchemaServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = SchemaServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = SchemaServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = SchemaServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.SchemaServiceTransport, '_prep_wrapped_messages') as prep: - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.SchemaServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = SchemaServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_set_iam_policy(transport: str = "grpc"): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_get_iam_policy(transport: str = "grpc"): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_test_iam_permissions(transport: str = "grpc"): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (SchemaServiceClient, transports.SchemaServiceGrpcTransport), - (SchemaServiceAsyncClient, transports.SchemaServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) diff --git a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_subscriber.py deleted file mode 100644 index f60040d7d83b..000000000000 --- a/packages/google-cloud-pubsub/owl-bot-staging/v1/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ /dev/null @@ -1,5492 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock -except ImportError: - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import options_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.pubsub_v1.services.subscriber import SubscriberAsyncClient -from google.pubsub_v1.services.subscriber import SubscriberClient -from google.pubsub_v1.services.subscriber import pagers -from google.pubsub_v1.services.subscriber import transports -from google.pubsub_v1.types import pubsub -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert SubscriberClient._get_default_mtls_endpoint(None) is None - assert SubscriberClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert SubscriberClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert SubscriberClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert SubscriberClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert SubscriberClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (SubscriberClient, "grpc"), - (SubscriberAsyncClient, "grpc_asyncio"), -]) -def test_subscriber_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'pubsub.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.SubscriberGrpcTransport, "grpc"), - (transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_subscriber_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (SubscriberClient, "grpc"), - (SubscriberAsyncClient, "grpc_asyncio"), -]) -def test_subscriber_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'pubsub.googleapis.com:443' - ) - - -def test_subscriber_client_get_transport_class(): - transport = SubscriberClient.get_transport_class() - available_transports = [ - transports.SubscriberGrpcTransport, - ] - assert transport in available_transports - - transport = SubscriberClient.get_transport_class("grpc") - assert transport == transports.SubscriberGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), - (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient)) -@mock.patch.object(SubscriberAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberAsyncClient)) -def test_subscriber_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(SubscriberClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(SubscriberClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", "true"), - (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", "false"), - (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient)) -@mock.patch.object(SubscriberAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_subscriber_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class", [ - SubscriberClient, SubscriberAsyncClient -]) -@mock.patch.object(SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient)) -@mock.patch.object(SubscriberAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberAsyncClient)) -def test_subscriber_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), - (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_subscriber_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", grpc_helpers), - (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_subscriber_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -def test_subscriber_client_client_options_from_dict(): - with mock.patch('google.pubsub_v1.services.subscriber.transports.SubscriberGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = SubscriberClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (SubscriberClient, transports.SubscriberGrpcTransport, "grpc", grpc_helpers), - (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_subscriber_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "pubsub.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - scopes=None, - default_host="pubsub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.Subscription, - dict, -]) -def test_create_subscription(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Subscription( - name='name_value', - topic='topic_value', - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter='filter_value', - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - ) - response = client.create_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == 'filter_value' - assert response.detached is True - assert response.enable_exactly_once_delivery is True - assert response.state == pubsub.Subscription.State.ACTIVE - - -def test_create_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subscription), - '__call__') as call: - client.create_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() - -@pytest.mark.asyncio -async def test_create_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.Subscription): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription( - name='name_value', - topic='topic_value', - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter='filter_value', - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - )) - response = await client.create_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == 'filter_value' - assert response.detached is True - assert response.enable_exactly_once_delivery is True - assert response.state == pubsub.Subscription.State.ACTIVE - - -@pytest.mark.asyncio -async def test_create_subscription_async_from_dict(): - await test_create_subscription_async(request_type=dict) - - -def test_create_subscription_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.Subscription() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subscription), - '__call__') as call: - call.return_value = pubsub.Subscription() - client.create_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_subscription_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.Subscription() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subscription), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) - await client.create_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_create_subscription_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Subscription() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_subscription( - name='name_value', - topic='topic_value', - push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), - ack_deadline_seconds=2066, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - arg = args[0].push_config - mock_val = pubsub.PushConfig(push_endpoint='push_endpoint_value') - assert arg == mock_val - arg = args[0].ack_deadline_seconds - mock_val = 2066 - assert arg == mock_val - - -def test_create_subscription_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_subscription( - pubsub.Subscription(), - name='name_value', - topic='topic_value', - push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), - ack_deadline_seconds=2066, - ) - -@pytest.mark.asyncio -async def test_create_subscription_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Subscription() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_subscription( - name='name_value', - topic='topic_value', - push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), - ack_deadline_seconds=2066, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].topic - mock_val = 'topic_value' - assert arg == mock_val - arg = args[0].push_config - mock_val = pubsub.PushConfig(push_endpoint='push_endpoint_value') - assert arg == mock_val - arg = args[0].ack_deadline_seconds - mock_val = 2066 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_subscription_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_subscription( - pubsub.Subscription(), - name='name_value', - topic='topic_value', - push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), - ack_deadline_seconds=2066, - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.GetSubscriptionRequest, - dict, -]) -def test_get_subscription(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Subscription( - name='name_value', - topic='topic_value', - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter='filter_value', - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - ) - response = client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == 'filter_value' - assert response.detached is True - assert response.enable_exactly_once_delivery is True - assert response.state == pubsub.Subscription.State.ACTIVE - - -def test_get_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - client.get_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() - -@pytest.mark.asyncio -async def test_get_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.GetSubscriptionRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription( - name='name_value', - topic='topic_value', - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter='filter_value', - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - )) - response = await client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == 'filter_value' - assert response.detached is True - assert response.enable_exactly_once_delivery is True - assert response.state == pubsub.Subscription.State.ACTIVE - - -@pytest.mark.asyncio -async def test_get_subscription_async_from_dict(): - await test_get_subscription_async(request_type=dict) - - -def test_get_subscription_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.GetSubscriptionRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - call.return_value = pubsub.Subscription() - client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_subscription_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.GetSubscriptionRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) - await client.get_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -def test_get_subscription_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Subscription() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_subscription( - subscription='subscription_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - - -def test_get_subscription_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_subscription( - pubsub.GetSubscriptionRequest(), - subscription='subscription_value', - ) - -@pytest.mark.asyncio -async def test_get_subscription_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Subscription() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_subscription( - subscription='subscription_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_subscription_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_subscription( - pubsub.GetSubscriptionRequest(), - subscription='subscription_value', - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.UpdateSubscriptionRequest, - dict, -]) -def test_update_subscription(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Subscription( - name='name_value', - topic='topic_value', - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter='filter_value', - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - ) - response = client.update_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == 'filter_value' - assert response.detached is True - assert response.enable_exactly_once_delivery is True - assert response.state == pubsub.Subscription.State.ACTIVE - - -def test_update_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_subscription), - '__call__') as call: - client.update_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() - -@pytest.mark.asyncio -async def test_update_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.UpdateSubscriptionRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription( - name='name_value', - topic='topic_value', - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter='filter_value', - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - )) - response = await client.update_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == 'filter_value' - assert response.detached is True - assert response.enable_exactly_once_delivery is True - assert response.state == pubsub.Subscription.State.ACTIVE - - -@pytest.mark.asyncio -async def test_update_subscription_async_from_dict(): - await test_update_subscription_async(request_type=dict) - - -def test_update_subscription_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.UpdateSubscriptionRequest() - - request.subscription.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_subscription), - '__call__') as call: - call.return_value = pubsub.Subscription() - client.update_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_subscription_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.UpdateSubscriptionRequest() - - request.subscription.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_subscription), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) - await client.update_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - pubsub.ListSubscriptionsRequest, - dict, -]) -def test_list_subscriptions(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListSubscriptionsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubscriptionsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_subscriptions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - client.list_subscriptions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() - -@pytest.mark.asyncio -async def test_list_subscriptions_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListSubscriptionsRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSubscriptionsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubscriptionsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_subscriptions_async_from_dict(): - await test_list_subscriptions_async(request_type=dict) - - -def test_list_subscriptions_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListSubscriptionsRequest() - - request.project = 'project_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - call.return_value = pubsub.ListSubscriptionsResponse() - client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project=project_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_subscriptions_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListSubscriptionsRequest() - - request.project = 'project_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSubscriptionsResponse()) - await client.list_subscriptions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project=project_value', - ) in kw['metadata'] - - -def test_list_subscriptions_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListSubscriptionsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_subscriptions( - project='project_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project - mock_val = 'project_value' - assert arg == mock_val - - -def test_list_subscriptions_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_subscriptions( - pubsub.ListSubscriptionsRequest(), - project='project_value', - ) - -@pytest.mark.asyncio -async def test_list_subscriptions_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListSubscriptionsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSubscriptionsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_subscriptions( - project='project_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project - mock_val = 'project_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_subscriptions_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_subscriptions( - pubsub.ListSubscriptionsRequest(), - project='project_value', - ) - - -def test_list_subscriptions_pager(transport_name: str = "grpc"): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - pubsub.Subscription(), - pubsub.Subscription(), - ], - next_page_token='abc', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - ], - next_page_token='ghi', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - pubsub.Subscription(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project', ''), - )), - ) - pager = client.list_subscriptions(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, pubsub.Subscription) - for i in results) -def test_list_subscriptions_pages(transport_name: str = "grpc"): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - pubsub.Subscription(), - pubsub.Subscription(), - ], - next_page_token='abc', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - ], - next_page_token='ghi', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - pubsub.Subscription(), - ], - ), - RuntimeError, - ) - pages = list(client.list_subscriptions(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_subscriptions_async_pager(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - pubsub.Subscription(), - pubsub.Subscription(), - ], - next_page_token='abc', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - ], - next_page_token='ghi', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - pubsub.Subscription(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_subscriptions(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, pubsub.Subscription) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_subscriptions_async_pages(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - pubsub.Subscription(), - pubsub.Subscription(), - ], - next_page_token='abc', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[], - next_page_token='def', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - ], - next_page_token='ghi', - ), - pubsub.ListSubscriptionsResponse( - subscriptions=[ - pubsub.Subscription(), - pubsub.Subscription(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_subscriptions(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - pubsub.DeleteSubscriptionRequest, - dict, -]) -def test_delete_subscription(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - client.delete_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() - -@pytest.mark.asyncio -async def test_delete_subscription_async(transport: str = 'grpc_asyncio', request_type=pubsub.DeleteSubscriptionRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_subscription_async_from_dict(): - await test_delete_subscription_async(request_type=dict) - - -def test_delete_subscription_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.DeleteSubscriptionRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - call.return_value = None - client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_subscription_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.DeleteSubscriptionRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_subscription(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -def test_delete_subscription_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_subscription( - subscription='subscription_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - - -def test_delete_subscription_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_subscription( - pubsub.DeleteSubscriptionRequest(), - subscription='subscription_value', - ) - -@pytest.mark.asyncio -async def test_delete_subscription_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_subscription( - subscription='subscription_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_subscription_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_subscription( - pubsub.DeleteSubscriptionRequest(), - subscription='subscription_value', - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.ModifyAckDeadlineRequest, - dict, -]) -def test_modify_ack_deadline(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_ack_deadline), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.modify_ack_deadline(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_modify_ack_deadline_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_ack_deadline), - '__call__') as call: - client.modify_ack_deadline() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() - -@pytest.mark.asyncio -async def test_modify_ack_deadline_async(transport: str = 'grpc_asyncio', request_type=pubsub.ModifyAckDeadlineRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_ack_deadline), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.modify_ack_deadline(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_modify_ack_deadline_async_from_dict(): - await test_modify_ack_deadline_async(request_type=dict) - - -def test_modify_ack_deadline_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ModifyAckDeadlineRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_ack_deadline), - '__call__') as call: - call.return_value = None - client.modify_ack_deadline(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_modify_ack_deadline_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ModifyAckDeadlineRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_ack_deadline), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.modify_ack_deadline(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -def test_modify_ack_deadline_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_ack_deadline), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.modify_ack_deadline( - subscription='subscription_value', - ack_ids=['ack_ids_value'], - ack_deadline_seconds=2066, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - arg = args[0].ack_ids - mock_val = ['ack_ids_value'] - assert arg == mock_val - arg = args[0].ack_deadline_seconds - mock_val = 2066 - assert arg == mock_val - - -def test_modify_ack_deadline_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.modify_ack_deadline( - pubsub.ModifyAckDeadlineRequest(), - subscription='subscription_value', - ack_ids=['ack_ids_value'], - ack_deadline_seconds=2066, - ) - -@pytest.mark.asyncio -async def test_modify_ack_deadline_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_ack_deadline), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.modify_ack_deadline( - subscription='subscription_value', - ack_ids=['ack_ids_value'], - ack_deadline_seconds=2066, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - arg = args[0].ack_ids - mock_val = ['ack_ids_value'] - assert arg == mock_val - arg = args[0].ack_deadline_seconds - mock_val = 2066 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_modify_ack_deadline_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.modify_ack_deadline( - pubsub.ModifyAckDeadlineRequest(), - subscription='subscription_value', - ack_ids=['ack_ids_value'], - ack_deadline_seconds=2066, - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.AcknowledgeRequest, - dict, -]) -def test_acknowledge(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.acknowledge(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_acknowledge_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge), - '__call__') as call: - client.acknowledge() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() - -@pytest.mark.asyncio -async def test_acknowledge_async(transport: str = 'grpc_asyncio', request_type=pubsub.AcknowledgeRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.acknowledge(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_acknowledge_async_from_dict(): - await test_acknowledge_async(request_type=dict) - - -def test_acknowledge_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.AcknowledgeRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge), - '__call__') as call: - call.return_value = None - client.acknowledge(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_acknowledge_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.AcknowledgeRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.acknowledge(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -def test_acknowledge_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.acknowledge( - subscription='subscription_value', - ack_ids=['ack_ids_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - arg = args[0].ack_ids - mock_val = ['ack_ids_value'] - assert arg == mock_val - - -def test_acknowledge_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.acknowledge( - pubsub.AcknowledgeRequest(), - subscription='subscription_value', - ack_ids=['ack_ids_value'], - ) - -@pytest.mark.asyncio -async def test_acknowledge_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.acknowledge), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.acknowledge( - subscription='subscription_value', - ack_ids=['ack_ids_value'], - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - arg = args[0].ack_ids - mock_val = ['ack_ids_value'] - assert arg == mock_val - -@pytest.mark.asyncio -async def test_acknowledge_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.acknowledge( - pubsub.AcknowledgeRequest(), - subscription='subscription_value', - ack_ids=['ack_ids_value'], - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.PullRequest, - dict, -]) -def test_pull(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pull), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.PullResponse( - ) - response = client.pull(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.PullResponse) - - -def test_pull_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pull), - '__call__') as call: - client.pull() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() - -@pytest.mark.asyncio -async def test_pull_async(transport: str = 'grpc_asyncio', request_type=pubsub.PullRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pull), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse( - )) - response = await client.pull(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.PullResponse) - - -@pytest.mark.asyncio -async def test_pull_async_from_dict(): - await test_pull_async(request_type=dict) - - -def test_pull_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.PullRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pull), - '__call__') as call: - call.return_value = pubsub.PullResponse() - client.pull(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_pull_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.PullRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pull), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) - await client.pull(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -def test_pull_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pull), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.PullResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.pull( - subscription='subscription_value', - return_immediately=True, - max_messages=1277, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - arg = args[0].return_immediately - mock_val = True - assert arg == mock_val - arg = args[0].max_messages - mock_val = 1277 - assert arg == mock_val - - -def test_pull_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.pull( - pubsub.PullRequest(), - subscription='subscription_value', - return_immediately=True, - max_messages=1277, - ) - -@pytest.mark.asyncio -async def test_pull_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.pull), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.PullResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.pull( - subscription='subscription_value', - return_immediately=True, - max_messages=1277, - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - arg = args[0].return_immediately - mock_val = True - assert arg == mock_val - arg = args[0].max_messages - mock_val = 1277 - assert arg == mock_val - -@pytest.mark.asyncio -async def test_pull_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.pull( - pubsub.PullRequest(), - subscription='subscription_value', - return_immediately=True, - max_messages=1277, - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.StreamingPullRequest, - dict, -]) -def test_streaming_pull(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_pull), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = iter([pubsub.StreamingPullResponse()]) - response = client.streaming_pull(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - for message in response: - assert isinstance(message, pubsub.StreamingPullResponse) - - -@pytest.mark.asyncio -async def test_streaming_pull_async(transport: str = 'grpc_asyncio', request_type=pubsub.StreamingPullRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - requests = [request] - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.streaming_pull), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) - call.return_value.read = mock.AsyncMock(side_effect=[pubsub.StreamingPullResponse()]) - response = await client.streaming_pull(iter(requests)) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert next(args[0]) == request - - # Establish that the response is the type that we expect. - message = await response.read() - assert isinstance(message, pubsub.StreamingPullResponse) - - -@pytest.mark.asyncio -async def test_streaming_pull_async_from_dict(): - await test_streaming_pull_async(request_type=dict) - - -@pytest.mark.parametrize("request_type", [ - pubsub.ModifyPushConfigRequest, - dict, -]) -def test_modify_push_config(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_push_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.modify_push_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_modify_push_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_push_config), - '__call__') as call: - client.modify_push_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() - -@pytest.mark.asyncio -async def test_modify_push_config_async(transport: str = 'grpc_asyncio', request_type=pubsub.ModifyPushConfigRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_push_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.modify_push_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_modify_push_config_async_from_dict(): - await test_modify_push_config_async(request_type=dict) - - -def test_modify_push_config_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ModifyPushConfigRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_push_config), - '__call__') as call: - call.return_value = None - client.modify_push_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_modify_push_config_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ModifyPushConfigRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_push_config), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.modify_push_config(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -def test_modify_push_config_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_push_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.modify_push_config( - subscription='subscription_value', - push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - arg = args[0].push_config - mock_val = pubsub.PushConfig(push_endpoint='push_endpoint_value') - assert arg == mock_val - - -def test_modify_push_config_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.modify_push_config( - pubsub.ModifyPushConfigRequest(), - subscription='subscription_value', - push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), - ) - -@pytest.mark.asyncio -async def test_modify_push_config_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_push_config), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.modify_push_config( - subscription='subscription_value', - push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - arg = args[0].push_config - mock_val = pubsub.PushConfig(push_endpoint='push_endpoint_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_modify_push_config_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.modify_push_config( - pubsub.ModifyPushConfigRequest(), - subscription='subscription_value', - push_config=pubsub.PushConfig(push_endpoint='push_endpoint_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.GetSnapshotRequest, - dict, -]) -def test_get_snapshot(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot( - name='name_value', - topic='topic_value', - ) - response = client.get_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - - -def test_get_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - client.get_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() - -@pytest.mark.asyncio -async def test_get_snapshot_async(transport: str = 'grpc_asyncio', request_type=pubsub.GetSnapshotRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot( - name='name_value', - topic='topic_value', - )) - response = await client.get_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - - -@pytest.mark.asyncio -async def test_get_snapshot_async_from_dict(): - await test_get_snapshot_async(request_type=dict) - - -def test_get_snapshot_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.GetSnapshotRequest() - - request.snapshot = 'snapshot_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - call.return_value = pubsub.Snapshot() - client.get_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'snapshot=snapshot_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_snapshot_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.GetSnapshotRequest() - - request.snapshot = 'snapshot_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) - await client.get_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'snapshot=snapshot_value', - ) in kw['metadata'] - - -def test_get_snapshot_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_snapshot( - snapshot='snapshot_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].snapshot - mock_val = 'snapshot_value' - assert arg == mock_val - - -def test_get_snapshot_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_snapshot( - pubsub.GetSnapshotRequest(), - snapshot='snapshot_value', - ) - -@pytest.mark.asyncio -async def test_get_snapshot_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_snapshot( - snapshot='snapshot_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].snapshot - mock_val = 'snapshot_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_snapshot_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_snapshot( - pubsub.GetSnapshotRequest(), - snapshot='snapshot_value', - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.ListSnapshotsRequest, - dict, -]) -def test_list_snapshots(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListSnapshotsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSnapshotsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_snapshots_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - client.list_snapshots() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() - -@pytest.mark.asyncio -async def test_list_snapshots_async(transport: str = 'grpc_asyncio', request_type=pubsub.ListSnapshotsRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSnapshotsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSnapshotsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_snapshots_async_from_dict(): - await test_list_snapshots_async(request_type=dict) - - -def test_list_snapshots_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListSnapshotsRequest() - - request.project = 'project_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - call.return_value = pubsub.ListSnapshotsResponse() - client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project=project_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_snapshots_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.ListSnapshotsRequest() - - request.project = 'project_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSnapshotsResponse()) - await client.list_snapshots(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'project=project_value', - ) in kw['metadata'] - - -def test_list_snapshots_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListSnapshotsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_snapshots( - project='project_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].project - mock_val = 'project_value' - assert arg == mock_val - - -def test_list_snapshots_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_snapshots( - pubsub.ListSnapshotsRequest(), - project='project_value', - ) - -@pytest.mark.asyncio -async def test_list_snapshots_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.ListSnapshotsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.ListSnapshotsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_snapshots( - project='project_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].project - mock_val = 'project_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_snapshots_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_snapshots( - pubsub.ListSnapshotsRequest(), - project='project_value', - ) - - -def test_list_snapshots_pager(transport_name: str = "grpc"): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - pubsub.Snapshot(), - pubsub.Snapshot(), - ], - next_page_token='abc', - ), - pubsub.ListSnapshotsResponse( - snapshots=[], - next_page_token='def', - ), - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - ], - next_page_token='ghi', - ), - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - pubsub.Snapshot(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('project', ''), - )), - ) - pager = client.list_snapshots(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, pubsub.Snapshot) - for i in results) -def test_list_snapshots_pages(transport_name: str = "grpc"): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - pubsub.Snapshot(), - pubsub.Snapshot(), - ], - next_page_token='abc', - ), - pubsub.ListSnapshotsResponse( - snapshots=[], - next_page_token='def', - ), - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - ], - next_page_token='ghi', - ), - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - pubsub.Snapshot(), - ], - ), - RuntimeError, - ) - pages = list(client.list_snapshots(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_snapshots_async_pager(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - pubsub.Snapshot(), - pubsub.Snapshot(), - ], - next_page_token='abc', - ), - pubsub.ListSnapshotsResponse( - snapshots=[], - next_page_token='def', - ), - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - ], - next_page_token='ghi', - ), - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - pubsub.Snapshot(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_snapshots(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, pubsub.Snapshot) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_snapshots_async_pages(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_snapshots), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - pubsub.Snapshot(), - pubsub.Snapshot(), - ], - next_page_token='abc', - ), - pubsub.ListSnapshotsResponse( - snapshots=[], - next_page_token='def', - ), - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - ], - next_page_token='ghi', - ), - pubsub.ListSnapshotsResponse( - snapshots=[ - pubsub.Snapshot(), - pubsub.Snapshot(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_snapshots(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - pubsub.CreateSnapshotRequest, - dict, -]) -def test_create_snapshot(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot( - name='name_value', - topic='topic_value', - ) - response = client.create_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - - -def test_create_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_snapshot), - '__call__') as call: - client.create_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() - -@pytest.mark.asyncio -async def test_create_snapshot_async(transport: str = 'grpc_asyncio', request_type=pubsub.CreateSnapshotRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot( - name='name_value', - topic='topic_value', - )) - response = await client.create_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - - -@pytest.mark.asyncio -async def test_create_snapshot_async_from_dict(): - await test_create_snapshot_async(request_type=dict) - - -def test_create_snapshot_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.CreateSnapshotRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_snapshot), - '__call__') as call: - call.return_value = pubsub.Snapshot() - client.create_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_snapshot_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.CreateSnapshotRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) - await client.create_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_create_snapshot_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_snapshot( - name='name_value', - subscription='subscription_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - - -def test_create_snapshot_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_snapshot( - pubsub.CreateSnapshotRequest(), - name='name_value', - subscription='subscription_value', - ) - -@pytest.mark.asyncio -async def test_create_snapshot_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_snapshot( - name='name_value', - subscription='subscription_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - arg = args[0].subscription - mock_val = 'subscription_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_snapshot_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_snapshot( - pubsub.CreateSnapshotRequest(), - name='name_value', - subscription='subscription_value', - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.UpdateSnapshotRequest, - dict, -]) -def test_update_snapshot(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.Snapshot( - name='name_value', - topic='topic_value', - ) - response = client.update_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - - -def test_update_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_snapshot), - '__call__') as call: - client.update_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() - -@pytest.mark.asyncio -async def test_update_snapshot_async(transport: str = 'grpc_asyncio', request_type=pubsub.UpdateSnapshotRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot( - name='name_value', - topic='topic_value', - )) - response = await client.update_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == 'name_value' - assert response.topic == 'topic_value' - - -@pytest.mark.asyncio -async def test_update_snapshot_async_from_dict(): - await test_update_snapshot_async(request_type=dict) - - -def test_update_snapshot_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.UpdateSnapshotRequest() - - request.snapshot.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_snapshot), - '__call__') as call: - call.return_value = pubsub.Snapshot() - client.update_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'snapshot.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_snapshot_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.UpdateSnapshotRequest() - - request.snapshot.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) - await client.update_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'snapshot.name=name_value', - ) in kw['metadata'] - - -@pytest.mark.parametrize("request_type", [ - pubsub.DeleteSnapshotRequest, - dict, -]) -def test_delete_snapshot(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - client.delete_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() - -@pytest.mark.asyncio -async def test_delete_snapshot_async(transport: str = 'grpc_asyncio', request_type=pubsub.DeleteSnapshotRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_snapshot_async_from_dict(): - await test_delete_snapshot_async(request_type=dict) - - -def test_delete_snapshot_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.DeleteSnapshotRequest() - - request.snapshot = 'snapshot_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - call.return_value = None - client.delete_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'snapshot=snapshot_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_snapshot_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.DeleteSnapshotRequest() - - request.snapshot = 'snapshot_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_snapshot(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'snapshot=snapshot_value', - ) in kw['metadata'] - - -def test_delete_snapshot_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_snapshot( - snapshot='snapshot_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].snapshot - mock_val = 'snapshot_value' - assert arg == mock_val - - -def test_delete_snapshot_flattened_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_snapshot( - pubsub.DeleteSnapshotRequest(), - snapshot='snapshot_value', - ) - -@pytest.mark.asyncio -async def test_delete_snapshot_flattened_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_snapshot), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_snapshot( - snapshot='snapshot_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].snapshot - mock_val = 'snapshot_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_snapshot_flattened_error_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_snapshot( - pubsub.DeleteSnapshotRequest(), - snapshot='snapshot_value', - ) - - -@pytest.mark.parametrize("request_type", [ - pubsub.SeekRequest, - dict, -]) -def test_seek(request_type, transport: str = 'grpc'): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seek), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = pubsub.SeekResponse( - ) - response = client.seek(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.SeekResponse) - - -def test_seek_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seek), - '__call__') as call: - client.seek() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() - -@pytest.mark.asyncio -async def test_seek_async(transport: str = 'grpc_asyncio', request_type=pubsub.SeekRequest): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seek), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse( - )) - response = await client.seek(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.SeekResponse) - - -@pytest.mark.asyncio -async def test_seek_async_from_dict(): - await test_seek_async(request_type=dict) - - -def test_seek_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.SeekRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seek), - '__call__') as call: - call.return_value = pubsub.SeekResponse() - client.seek(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_seek_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = pubsub.SeekRequest() - - request.subscription = 'subscription_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.seek), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) - await client.seek(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'subscription=subscription_value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SubscriberGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.SubscriberGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SubscriberClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.SubscriberGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SubscriberClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SubscriberClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.SubscriberGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SubscriberClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SubscriberGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = SubscriberClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SubscriberGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.SubscriberGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.SubscriberGrpcTransport, - transports.SubscriberGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = SubscriberClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.SubscriberGrpcTransport, - ) - -def test_subscriber_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SubscriberTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_subscriber_base_transport(): - # Instantiate the base transport. - with mock.patch('google.pubsub_v1.services.subscriber.transports.SubscriberTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.SubscriberTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_subscription', - 'get_subscription', - 'update_subscription', - 'list_subscriptions', - 'delete_subscription', - 'modify_ack_deadline', - 'acknowledge', - 'pull', - 'streaming_pull', - 'modify_push_config', - 'get_snapshot', - 'list_snapshots', - 'create_snapshot', - 'update_snapshot', - 'delete_snapshot', - 'seek', - 'set_iam_policy', - 'get_iam_policy', - 'test_iam_permissions', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_subscriber_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SubscriberTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - quota_project_id="octopus", - ) - - -def test_subscriber_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.pubsub_v1.services.subscriber.transports.SubscriberTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SubscriberTransport() - adc.assert_called_once() - - -def test_subscriber_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SubscriberClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.SubscriberGrpcTransport, - transports.SubscriberGrpcAsyncIOTransport, - ], -) -def test_subscriber_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/pubsub',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.SubscriberGrpcTransport, grpc_helpers), - (transports.SubscriberGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_subscriber_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "pubsub.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - 'https://www.googleapis.com/auth/pubsub', -), - scopes=["1", "2"], - default_host="pubsub.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport]) -def test_subscriber_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_subscriber_host_no_port(transport_name): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'pubsub.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_subscriber_host_with_port(transport_name): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='pubsub.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'pubsub.googleapis.com:8000' - ) - -def test_subscriber_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SubscriberGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_subscriber_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.SubscriberGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport]) -def test_subscriber_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport]) -def test_subscriber_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_snapshot_path(): - project = "squid" - snapshot = "clam" - expected = "projects/{project}/snapshots/{snapshot}".format(project=project, snapshot=snapshot, ) - actual = SubscriberClient.snapshot_path(project, snapshot) - assert expected == actual - - -def test_parse_snapshot_path(): - expected = { - "project": "whelk", - "snapshot": "octopus", - } - path = SubscriberClient.snapshot_path(**expected) - - # Check that the path construction is reversible. - actual = SubscriberClient.parse_snapshot_path(path) - assert expected == actual - -def test_subscription_path(): - project = "oyster" - subscription = "nudibranch" - expected = "projects/{project}/subscriptions/{subscription}".format(project=project, subscription=subscription, ) - actual = SubscriberClient.subscription_path(project, subscription) - assert expected == actual - - -def test_parse_subscription_path(): - expected = { - "project": "cuttlefish", - "subscription": "mussel", - } - path = SubscriberClient.subscription_path(**expected) - - # Check that the path construction is reversible. - actual = SubscriberClient.parse_subscription_path(path) - assert expected == actual - -def test_topic_path(): - project = "winkle" - topic = "nautilus" - expected = "projects/{project}/topics/{topic}".format(project=project, topic=topic, ) - actual = SubscriberClient.topic_path(project, topic) - assert expected == actual - - -def test_parse_topic_path(): - expected = { - "project": "scallop", - "topic": "abalone", - } - path = SubscriberClient.topic_path(**expected) - - # Check that the path construction is reversible. - actual = SubscriberClient.parse_topic_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = SubscriberClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = SubscriberClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = SubscriberClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = SubscriberClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = SubscriberClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = SubscriberClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = SubscriberClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = SubscriberClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = SubscriberClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = SubscriberClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = SubscriberClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = SubscriberClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = SubscriberClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = SubscriberClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = SubscriberClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.SubscriberTransport, '_prep_wrapped_messages') as prep: - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.SubscriberTransport, '_prep_wrapped_messages') as prep: - transport_class = SubscriberClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_set_iam_policy(transport: str = "grpc"): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - response = client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" -@pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.SetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - response = await client.set_iam_policy(request) - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - -def test_set_iam_policy_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -@pytest.mark.asyncio -async def test_set_iam_policy_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.SetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.set_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - -def test_set_iam_policy_from_dict(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -@pytest.mark.asyncio -async def test_set_iam_policy_from_dict_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.set_iam_policy( - request={ - "resource": "resource_value", - "policy": policy_pb2.Policy(version=774), - } - ) - call.assert_called() - - -def test_get_iam_policy(transport: str = "grpc"): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy(version=774, etag=b"etag_blob",) - - response = client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -@pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.GetIamPolicyRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy(version=774, etag=b"etag_blob",) - ) - - response = await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - assert response.version == 774 - - assert response.etag == b"etag_blob" - - -def test_get_iam_policy_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - call.return_value = policy_pb2.Policy() - - client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_iam_policy_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.GetIamPolicyRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_iam_policy), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy_pb2.Policy()) - - await client.get_iam_policy(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_get_iam_policy_from_dict(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = policy_pb2.Policy() - - response = client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_get_iam_policy_from_dict_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - policy_pb2.Policy() - ) - - response = await client.get_iam_policy( - request={ - "resource": "resource_value", - "options": options_pb2.GetPolicyOptions(requested_policy_version=2598), - } - ) - call.assert_called() - - -def test_test_iam_permissions(transport: str = "grpc"): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse( - permissions=["permissions_value"], - ) - - response = client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = iam_policy_pb2.TestIamPermissionsRequest() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse(permissions=["permissions_value"],) - ) - - response = await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - - assert args[0] == request - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - assert response.permissions == ["permissions_value"] - - -def test_test_iam_permissions_field_headers(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_test_iam_permissions_field_headers_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = iam_policy_pb2.TestIamPermissionsRequest() - request.resource = "resource/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - await client.test_iam_permissions(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] - - -def test_test_iam_permissions_from_dict(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = iam_policy_pb2.TestIamPermissionsResponse() - - response = client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - -@pytest.mark.asyncio -async def test_test_iam_permissions_from_dict_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.test_iam_permissions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - iam_policy_pb2.TestIamPermissionsResponse() - ) - - response = await client.test_iam_permissions( - request={ - "resource": "resource_value", - "permissions": ["permissions_value"], - } - ) - call.assert_called() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (SubscriberClient, transports.SubscriberGrpcTransport), - (SubscriberAsyncClient, transports.SubscriberGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) From 82321f72643a28d028ffe44c0c18b3076b584fc7 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 1 Jun 2022 13:06:57 -0400 Subject: [PATCH 0832/1197] docs: fix changelog header to consistent size (#700) --- packages/google-cloud-pubsub/CHANGELOG.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 997583f01ce2..0d786fa1c2ce 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,7 +5,7 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history -### [2.12.1](https://github.com/googleapis/python-pubsub/compare/v2.12.0...v2.12.1) (2022-05-11) +## [2.12.1](https://github.com/googleapis/python-pubsub/compare/v2.12.0...v2.12.1) (2022-05-11) ### Bug Fixes @@ -112,7 +112,7 @@ * clarify the types of Message parameters ([#486](https://www.github.com/googleapis/python-pubsub/issues/486)) ([633e91b](https://www.github.com/googleapis/python-pubsub/commit/633e91bbfc0a8f4f484089acff6812b754f40c75)) -### [2.7.1](https://www.github.com/googleapis/python-pubsub/compare/v2.7.0...v2.7.1) (2021-08-13) +## [2.7.1](https://www.github.com/googleapis/python-pubsub/compare/v2.7.0...v2.7.1) (2021-08-13) ### Bug Fixes From 48a96693ddfbb44864f7ff94b6f0a5524ec875ec Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 1 Jun 2022 22:46:10 -0400 Subject: [PATCH 0833/1197] fix(deps): require protobuf <4.0.0dev (#699) Towards b/234444818 --- packages/google-cloud-pubsub/setup.py | 5 +++-- packages/google-cloud-pubsub/testing/constraints-3.6.txt | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index f178df851530..425b48626728 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -34,8 +34,9 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", - "proto-plus >= 1.15.0", - "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", + "proto-plus >= 1.15.0, <2.0.0dev", + "protobuf >= 3.19.0, <4.0.0dev", + "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", "grpcio-status >= 1.16.0", ] extras = {"libcst": "libcst >= 0.3.10"} diff --git a/packages/google-cloud-pubsub/testing/constraints-3.6.txt b/packages/google-cloud-pubsub/testing/constraints-3.6.txt index 0ce29f32c95e..20da08a34519 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.6.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.6.txt @@ -8,4 +8,5 @@ grpcio==1.38.1 google-api-core==1.31.5 libcst==0.3.10 proto-plus==1.15.0 -grpc-google-iam-v1==0.12.3 +grpc-google-iam-v1==0.12.4 +protobuf==3.19.0 From 6e3157f8f18d48a7dad6c22d5cfed1d382670826 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 6 Jun 2022 10:58:11 -0400 Subject: [PATCH 0834/1197] chore: test minimum dependencies in python 3.7 (#704) --- .../google-cloud-pubsub/testing/constraints-3.7.txt | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index e69de29bb2d1..20da08a34519 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -0,0 +1,12 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +grpcio==1.38.1 +google-api-core==1.31.5 +libcst==0.3.10 +proto-plus==1.15.0 +grpc-google-iam-v1==0.12.4 +protobuf==3.19.0 From f90ac8469a1cc23f41219d69ba9f0e36f98f2b78 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Mon, 6 Jun 2022 11:53:51 -0400 Subject: [PATCH 0835/1197] ci: add owlbot-post-processor to required checks (#698) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/.github/sync-repo-settings.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml index c50f7a03f40e..edda0b5707ce 100644 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -15,3 +15,4 @@ branchProtectionRules: - 'Samples - Python 3.8' - 'Samples - Python 3.9' - 'Samples - Python 3.10' + - 'OwlBot Post Processor' From 8fa6be0893df050175b4f1d78fc71e291bd1dace Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 6 Jun 2022 21:44:29 -0400 Subject: [PATCH 0836/1197] chore(main): release 2.13.0 (#686) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 18 ++++++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 0d786fa1c2ce..b458563ff6fe 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,24 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.0](https://github.com/googleapis/python-pubsub/compare/v2.12.1...v2.13.0) (2022-06-06) + + +### Features + +* add BigQuery configuration for subscriptions ([#685](https://github.com/googleapis/python-pubsub/issues/685)) ([6fa03be](https://github.com/googleapis/python-pubsub/commit/6fa03be779d6a7105bb7c029b95d4c357d2a49df)) + + +### Bug Fixes + +* add info log for bidi streaming pull ack_deadline requests ([#692](https://github.com/googleapis/python-pubsub/issues/692)) ([fcb67dd](https://github.com/googleapis/python-pubsub/commit/fcb67dd0d8fff5a583ebe0a3a08d0219601df8e9)) +* **deps:** require protobuf <4.0.0dev ([#699](https://github.com/googleapis/python-pubsub/issues/699)) ([dcdf013](https://github.com/googleapis/python-pubsub/commit/dcdf0137905949662ce191adcb6dd588bd74f9fe)) + + +### Documentation + +* fix changelog header to consistent size ([#700](https://github.com/googleapis/python-pubsub/issues/700)) ([93f2b62](https://github.com/googleapis/python-pubsub/commit/93f2b62a18f622d8da71043a6b6d3f53295db308)) + ## [2.12.1](https://github.com/googleapis/python-pubsub/compare/v2.12.0...v2.12.1) (2022-05-11) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 425b48626728..fac5d5d2ffaa 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.12.1" +version = "2.13.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 1f9c844017f58ac4f91bf5ccfd00b637a6cb33cf Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 12 Jun 2022 10:41:59 -0400 Subject: [PATCH 0837/1197] chore: add prerelease nox session (#719) Source-Link: https://github.com/googleapis/synthtool/commit/050953d60f71b4ed4be563e032f03c192c50332f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/continuous/prerelease-deps.cfg | 7 ++ .../.kokoro/presubmit/prerelease-deps.cfg | 7 ++ packages/google-cloud-pubsub/noxfile.py | 64 +++++++++++++++++++ 4 files changed, 80 insertions(+), 2 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/continuous/prerelease-deps.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/presubmit/prerelease-deps.cfg diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 757c9dca75ad..2185b591844c 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 -# created: 2022-05-05T22:08:23.383410683Z + digest: sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 +# created: 2022-06-12T13:11:45.905884945Z diff --git a/packages/google-cloud-pubsub/.kokoro/continuous/prerelease-deps.cfg b/packages/google-cloud-pubsub/.kokoro/continuous/prerelease-deps.cfg new file mode 100644 index 000000000000..3595fb43f5c0 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/continuous/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/prerelease-deps.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/prerelease-deps.cfg new file mode 100644 index 000000000000..3595fb43f5c0 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/presubmit/prerelease-deps.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "prerelease_deps" +} diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 914b76e5b8ab..ac91400048e5 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -376,3 +376,67 @@ def docfx(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def prerelease_deps(session): + """Run all tests with prerelease versions of dependencies installed.""" + + prerel_deps = [ + "protobuf", + "googleapis-common-protos", + "google-auth", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + # dependencies of google-auth + "cryptography", + "pyasn1", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = ["requests"] + session.install(*other_deps) + + session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + # Don't overwrite prerelease packages. + deps = [dep for dep in deps if dep not in prerel_deps] + # We use --no-deps to ensure that pre-release versions aren't overwritten + # by the version ranges in setup.py. + session.install(*deps) + session.install("--no-deps", "-e", ".[all]") + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + + session.run("py.test", "tests/unit") + session.run("py.test", "tests/system") + session.run("py.test", "samples/snippets") From eeca2e03972e33341a12f9eecae319a4baff9ad8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 13 Jun 2022 06:42:20 -0400 Subject: [PATCH 0838/1197] chore(python): add missing import for prerelease testing (#720) Source-Link: https://github.com/googleapis/synthtool/commit/d2871d98e1e767d4ad49a557ff979236d64361a1 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/noxfile.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 2185b591844c..50b29ffd2050 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:65e656411895bff71cffcae97246966460160028f253c2e45b7a25d805a5b142 -# created: 2022-06-12T13:11:45.905884945Z + digest: sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 +# created: 2022-06-12T16:09:31.61859086Z diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index ac91400048e5..e6cd9e990d35 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -19,6 +19,7 @@ from __future__ import absolute_import import os import pathlib +import re import shutil import warnings From 9701063989216a1b4047ecf0e3ad7f9cdf3dcb48 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Fri, 24 Jun 2022 10:51:27 -0400 Subject: [PATCH 0839/1197] fix: change info logs to debug (#693) --- .../pubsub_v1/subscriber/_protocol/heartbeater.py | 2 +- .../cloud/pubsub_v1/subscriber/_protocol/leaser.py | 2 +- .../subscriber/_protocol/streaming_pull_manager.py | 12 ++++++------ .../samples/snippets/subscriber.py | 4 ++-- .../tests/unit/pubsub_v1/subscriber/test_leaser.py | 4 ++-- .../subscriber/test_streaming_pull_manager.py | 2 +- 6 files changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py index 7fa964ff622e..a053d5fe4e08 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -48,7 +48,7 @@ def heartbeat(self) -> None: _LOGGER.debug("Sent heartbeat.") self._stop_event.wait(timeout=self._period) - _LOGGER.info("%s exiting.", _HEARTBEAT_WORKER_NAME) + _LOGGER.debug("%s exiting.", _HEARTBEAT_WORKER_NAME) def start(self) -> None: with self._operational_lock: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index dc2b14fc60d0..508f4d7cefcb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -212,7 +212,7 @@ def maintain_leases(self) -> None: _LOGGER.debug("Snoozing lease management for %f seconds.", snooze) self._stop_event.wait(timeout=snooze) - _LOGGER.info("%s exiting.", _LEASE_WORKER_NAME) + _LOGGER.debug("%s exiting.", _LEASE_WORKER_NAME) def start(self) -> None: with self._operational_lock: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 894c41b4c8b1..f7e44cb7f1ce 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -743,7 +743,7 @@ def heartbeat(self) -> bool: request = gapic_types.StreamingPullRequest( stream_ack_deadline_seconds=self.ack_deadline ) - _LOGGER.info( + _LOGGER.debug( "Sending new ack_deadline of %d seconds.", self.ack_deadline ) else: @@ -1081,9 +1081,9 @@ def _should_recover(self, exception: BaseException) -> bool: # If this is in the list of idempotent exceptions, then we want to # recover. if isinstance(exception, _RETRYABLE_STREAM_ERRORS): - _LOGGER.info("Observed recoverable stream error %s", exception) + _LOGGER.debug("Observed recoverable stream error %s", exception) return True - _LOGGER.info("Observed non-recoverable stream error %s", exception) + _LOGGER.debug("Observed non-recoverable stream error %s", exception) return False def _should_terminate(self, exception: BaseException) -> bool: @@ -1101,9 +1101,9 @@ def _should_terminate(self, exception: BaseException) -> bool: """ exception = _wrap_as_exception(exception) if isinstance(exception, _TERMINATING_STREAM_ERRORS): - _LOGGER.info("Observed terminating stream error %s", exception) + _LOGGER.debug("Observed terminating stream error %s", exception) return True - _LOGGER.info("Observed non-terminating stream error %s", exception) + _LOGGER.debug("Observed non-terminating stream error %s", exception) return False def _on_rpc_done(self, future: Any) -> None: @@ -1117,7 +1117,7 @@ def _on_rpc_done(self, future: Any) -> None: with shutting everything down. This is to prevent blocking in the background consumer and preventing it from being ``joined()``. """ - _LOGGER.info("RPC termination has signaled streaming pull manager shutdown.") + _LOGGER.debug("RPC termination has signaled streaming pull manager shutdown.") error = _wrap_as_exception(future) thread = threading.Thread( name=_RPC_ERROR_THREAD_NAME, target=self._shutdown, kwargs={"reason": error} diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index a642bf03a966..2967b52541f0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -771,14 +771,14 @@ def synchronous_pull_with_lease_management( "ack_deadline_seconds": 15, } ) - logger.info(f"Reset ack deadline for {msg_data}.") + logger.debug(f"Reset ack deadline for {msg_data}.") # If the process is complete, acknowledge the message. else: subscriber.acknowledge( request={"subscription": subscription_path, "ack_ids": [ack_id]} ) - logger.info(f"Acknowledged {msg_data}.") + logger.debug(f"Acknowledged {msg_data}.") processes.pop(process) print( f"Received and acknowledged {len(response.received_messages)} messages from {subscription_path}." diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 890c3c947cce..ccc3ec99f8e9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -89,7 +89,7 @@ def create_manager(flow_control=types.FlowControl()): def test_maintain_leases_inactive_manager(caplog): - caplog.set_level(logging.INFO) + caplog.set_level(logging.DEBUG) manager = create_manager() manager.is_active = False @@ -107,7 +107,7 @@ def test_maintain_leases_inactive_manager(caplog): def test_maintain_leases_stopped(caplog): - caplog.set_level(logging.INFO) + caplog.set_level(logging.DEBUG) manager = create_manager() leaser_ = leaser.Leaser(manager) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index ad1647119c04..ab21a1597e93 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1036,7 +1036,7 @@ def test_heartbeat_inactive(): def test_heartbeat_stream_ack_deadline_seconds(caplog): - caplog.set_level(logging.INFO) + caplog.set_level(logging.DEBUG) manager = make_manager() manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) manager._rpc.is_active = True From e3031707c808d9dc6c224df86c18a9156974cab9 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Thu, 7 Jul 2022 13:29:03 -0400 Subject: [PATCH 0840/1197] ci: fix flake8 errors for samples/snippets (#732) Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/test-samples-impl.sh | 4 +- packages/google-cloud-pubsub/README.rst | 2 +- packages/google-cloud-pubsub/noxfile.py | 83 ++++++++++++------- packages/google-cloud-pubsub/owlbot.py | 15 +--- .../samples/snippets/noxfile.py | 2 +- .../samples/snippets/subscriber_test.py | 2 +- .../templates/install_deps.tmpl.rst | 2 +- 8 files changed, 62 insertions(+), 52 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 50b29ffd2050..1ce608523524 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:b2dc5f80edcf5d4486c39068c9fa11f7f851d9568eea4dcba130f994ea9b5e97 -# created: 2022-06-12T16:09:31.61859086Z + digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c +# created: 2022-07-05T18:31:20.838186805Z diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh index 8a324c9c7bc6..2c6500cae0b9 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh @@ -33,7 +33,7 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.6 -m pip install --upgrade --quiet nox +python3.9 -m pip install --upgrade --quiet nox # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then @@ -76,7 +76,7 @@ for file in samples/**/requirements.txt; do echo "------------------------------------------------------------" # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" + python3.9 -m nox -s "$RUN_TESTS_SESSION" EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 6432525b1cb8..e7feebdf506e 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -60,7 +60,7 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.6 +Python >= 3.7 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index e6cd9e990d35..c06c1adfde66 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -383,28 +383,15 @@ def docfx(session): def prerelease_deps(session): """Run all tests with prerelease versions of dependencies installed.""" - prerel_deps = [ - "protobuf", - "googleapis-common-protos", - "google-auth", - "grpcio", - "grpcio-status", - "google-api-core", - "proto-plus", - # dependencies of google-auth - "cryptography", - "pyasn1", - ] - - for dep in prerel_deps: - session.install("--pre", "--no-deps", "--upgrade", dep) - - # Remaining dependencies - other_deps = ["requests"] - session.install(*other_deps) - + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) - session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + + SYSTEM_TEST_EXTRAS + ) + session.install(*system_deps_all) # Because we test minimum dependency versions on the minimum Python # version, the first version we test with in the unit tests sessions has a @@ -418,19 +405,44 @@ def prerelease_deps(session): constraints_text = constraints_file.read() # Ignore leading whitespace and comment lines. - deps = [ + constraints_deps = [ match.group(1) for match in re.finditer( r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE ) ] - # Don't overwrite prerelease packages. - deps = [dep for dep in deps if dep not in prerel_deps] - # We use --no-deps to ensure that pre-release versions aren't overwritten - # by the version ranges in setup.py. - session.install(*deps) - session.install("--no-deps", "-e", ".[all]") + session.install(*constraints_deps) + + if os.path.exists("samples/snippets/requirements.txt"): + session.install("-r", "samples/snippets/requirements.txt") + + if os.path.exists("samples/snippets/requirements-test.txt"): + session.install("-r", "samples/snippets/requirements-test.txt") + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + "google-auth", + ] + session.install(*other_deps) # Print out prerelease package versions session.run( @@ -439,5 +451,16 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("py.test", "tests/unit") - session.run("py.test", "tests/system") - session.run("py.test", "samples/snippets") + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): + session.run("py.test", "tests/system") + + snippets_test_path = os.path.join("samples", "snippets") + + # Only run samples tests if found. + if os.path.exists(snippets_test_path): + session.run("py.test", "samples/snippets") diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index c0176106bccf..e36c228054af 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -341,19 +341,6 @@ "pip install google-pubsub", "pip install google-cloud-pubsub", ) - - s.move( - library, - excludes=[ - "docs/**/*", - "nox.py", - "README.rst", - "setup.py", - f"google/cloud/pubsub_{library.name}/__init__.py", - f"google/cloud/pubsub_{library.name}/types.py", - ], - ) - s.remove_staging_dirs() # ---------------------------------------------------------------------------- @@ -367,7 +354,7 @@ system_test_python_versions=["3.10"], system_test_external_dependencies=["psutil"], ) -s.move(templated_files, excludes=[".coveragerc", ".github/CODEOWNERS"]) +s.move(templated_files, excludes=["README.rst", ".coveragerc", ".github/CODEOWNERS"]) python.configure_previous_major_version_branches() # ---------------------------------------------------------------------------- # Samples templates diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 38bb0a572b81..5fcb9d7461f2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index e07aba775d39..052073142924 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -141,7 +141,7 @@ def _publish_messages( publisher_client: pubsub_v1.PublisherClient, topic: str, message_num: int = 5, - **attrs: Any, + **attrs: Any, # noqa: ANN401 ) -> List[str]: message_ids = [] for n in range(message_num): diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst index 275d649890d7..6f069c6c87a5 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.6+. +#. Create a virtualenv. Samples are compatible with Python 3.7+. .. code-block:: bash From 44a4a324948c06d40a8f2e1394a3897c30324d32 Mon Sep 17 00:00:00 2001 From: Kamal Aboul-Hosn Date: Thu, 7 Jul 2022 14:31:43 -0400 Subject: [PATCH 0841/1197] samples: create BigQuery subscription (#722) * chore: Remove notes about ordering keys being experimental. * Revert "chore: Remove notes about ordering keys being experimental." This reverts commit 38b2a3e91dd4f3f3c6657f4660fa1df8c0239124. * feat: Add support for server-side flow control * Add unit test for flow control * samples: create BigQuery subscription * samples: update BigQuery subscription test * Fix linter error for PR 722 * samples: create BigQuery subscription fix unused variable Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../samples/snippets/requirements-test.txt | 3 +- .../samples/snippets/requirements.txt | 2 +- .../samples/snippets/subscriber.py | 51 ++++++++++++++++ .../samples/snippets/subscriber_test.py | 60 ++++++++++++++++++- 4 files changed, 113 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 3f18f3b00dcd..ce45323b7bea 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,5 @@ backoff==2.0.1 pytest==7.1.2 mock==4.0.3 -flaky==3.7.0 \ No newline at end of file +flaky==3.7.0 +google-cloud-bigquery==1.28.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 27904a8f7f6f..f684cca8df60 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.12.1 +google-cloud-pubsub==2.13.0 avro==1.11.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 2967b52541f0..fb2c98f33e57 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -273,6 +273,42 @@ def create_subscription_with_exactly_once_delivery( # [END pubsub_create_subscription_with_exactly_once_delivery] +def create_bigquery_subscription( + project_id: str, topic_id: str, subscription_id: str, bigquery_table_id: str +) -> None: + """Create a new BigQuery subscription on the given topic.""" + # [START pubsub_create_bigquery_subscription] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # bigquery_table_id = "your-project.your-dataset.your-table" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + bigquery_config = pubsub_v1.types.BigQueryConfig(table=bigquery_table_id, write_metadata=True) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "bigquery_config": bigquery_config, + } + ) + + print(f"BigQuery subscription created: {subscription}.") + print(f"Table for subscription is: {bigquery_table_id}") + # [END pubsub_create_bigquery_subscription] + + def delete_subscription(project_id: str, subscription_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] @@ -922,6 +958,14 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: "subscription_id" ) + create_bigquery_subscription_parser = subparsers.add_parser( + "create-biquery", + help=create_bigquery_subscription.__doc__, + ) + create_bigquery_subscription_parser.add_argument("topic_id") + create_bigquery_subscription_parser.add_argument("subscription_id") + create_bigquery_subscription_parser.add_argument("bigquery_table_id") + delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) delete_parser.add_argument("subscription_id") @@ -1050,6 +1094,13 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_subscription_with_exactly_once_delivery( args.project_id, args.topic_id, args.subscription_id ) + elif args.command == "create-bigquery": + create_bigquery_subscription( + args.project_id, + args.topic_id, + args.subscription_id, + args.bigquery_table_id, + ) elif args.command == "delete": delete_subscription(args.project_id, args.subscription_id) elif args.command == "update-push": diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 052073142924..395c50f56316 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -23,7 +23,7 @@ import backoff from flaky import flaky from google.api_core.exceptions import NotFound -from google.cloud import pubsub_v1 +from google.cloud import bigquery, pubsub_v1 import pytest import subscriber @@ -31,6 +31,7 @@ # This uuid is shared across tests which run in parallel. UUID = uuid.uuid4().hex PY_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}" +UNDERSCORE_PY_VERSION = PY_VERSION.replace(".", "_") PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC = f"subscription-test-topic-{PY_VERSION}-{UUID}" DEAD_LETTER_TOPIC = f"subscription-test-dead-letter-topic-{PY_VERSION}-{UUID}" @@ -42,6 +43,8 @@ DEFAULT_MAX_DELIVERY_ATTEMPTS = 5 UPDATED_MAX_DELIVERY_ATTEMPTS = 20 FILTER = 'attributes.author="unknown"' +BIGQUERY_DATASET_ID = f"python_samples_dataset_{UNDERSCORE_PY_VERSION}_{UUID}" +BIGQUERY_TABLE_ID = f"python_samples_table_{UNDERSCORE_PY_VERSION}_{UUID}" C = TypeVar("C", bound=Callable[..., Any]) @@ -545,6 +548,61 @@ def test_update_push_subscription( subscriber_client.delete_subscription(request={"subscription": subscription_path}) +@pytest.fixture(scope="module") +def bigquery_table() -> Generator[str, None, None]: + client = bigquery.Client() + dataset = bigquery.Dataset(f"{PROJECT_ID}.{BIGQUERY_DATASET_ID}") + dataset.location = "US" + dataset = client.create_dataset(dataset) + + table_id = f"{PROJECT_ID}.{BIGQUERY_DATASET_ID}.{BIGQUERY_TABLE_ID}" + schema = [ + bigquery.SchemaField("data", "STRING", mode="REQUIRED"), + bigquery.SchemaField("message_id", "STRING", mode="REQUIRED"), + bigquery.SchemaField("attributes", "STRING", mode="REQUIRED"), + bigquery.SchemaField("subscription_name", "STRING", mode="REQUIRED"), + bigquery.SchemaField("publish_time", "TIMESTAMP", mode="REQUIRED"), + ] + + table = bigquery.Table(table_id, schema=schema) + table = client.create_table(table) + + yield table_id + + client.delete_dataset(dataset, delete_contents=True) + + +def test_create_bigquery_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + bigquery_table: str, + capsys: CaptureFixture[str], +) -> None: + bigquery_subscription_for_create_name = ( + f"subscription-test-subscription-bigquery-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, bigquery_subscription_for_create_name + ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_bigquery_subscription( + PROJECT_ID, TOPIC, bigquery_subscription_for_create_name, bigquery_table + ) + + out, _ = capsys.readouterr() + assert f"{bigquery_subscription_for_create_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + def test_delete_subscription( subscriber_client: pubsub_v1.SubscriberClient, topic: str, From a46849fb613e85b090c1171a11103dcac016bb2d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 7 Jul 2022 15:51:07 -0400 Subject: [PATCH 0842/1197] fix: require python 3.7+ (#730) * chore(python): drop python 3.6 Source-Link: https://github.com/googleapis/synthtool/commit/4f89b13af10d086458f9b379e56a614f9d6dab7b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c * add api_description to .repo-metadata.json * require python 3.7+ in setup.py * remove python 3.6 sample configs Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: acocuzzo Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../.github/workflows/unittest.yml | 2 +- .../.kokoro/samples/python3.6/common.cfg | 40 ------------------- .../.kokoro/samples/python3.6/continuous.cfg | 7 ---- .../samples/python3.6/periodic-head.cfg | 11 ----- .../.kokoro/samples/python3.6/periodic.cfg | 6 --- .../.kokoro/samples/python3.6/presubmit.cfg | 6 --- .../google-cloud-pubsub/.repo-metadata.json | 3 +- packages/google-cloud-pubsub/CONTRIBUTING.rst | 6 +-- packages/google-cloud-pubsub/README.rst | 2 +- packages/google-cloud-pubsub/noxfile.py | 2 +- packages/google-cloud-pubsub/owlbot.py | 2 +- packages/google-cloud-pubsub/setup.py | 3 +- 12 files changed, 9 insertions(+), 81 deletions(-) delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic-head.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.6/presubmit.cfg diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index e5be6edbd54d..5531b0141297 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg deleted file mode 100644 index 068788e9e910..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.6" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py36" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/continuous.cfg deleted file mode 100644 index 7218af1499e5..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/continuous.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic-head.cfg deleted file mode 100644 index f9cfcd33e058..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.6/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.6/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json index 21bdab122f03..8d12e4cc0b62 100644 --- a/packages/google-cloud-pubsub/.repo-metadata.json +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -13,5 +13,6 @@ "default_version": "v1", "codeowner_team": "@googleapis/api-pubsub", "api_shortname": "pubsub", - "library_type": "GAPIC_COMBO" + "library_type": "GAPIC_COMBO", + "api_description": "is designed to provide reliable, many-to-many, asynchronous messaging between applications. Publisher applications can send messages to a topic and other applications can subscribe to that topic to receive the messages. By decoupling senders and receivers, Google Cloud Pub/Sub allows developers to communicate between independently written applications." } diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 26c1d580b5e2..447a0a4bd643 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.6, 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -221,13 +221,11 @@ Supported Python Versions We support: -- `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ -.. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ @@ -239,7 +237,7 @@ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-pubsub/blob/main/noxfile.py -We also explicitly decided to support Python 3 beginning with version 3.6. +We also explicitly decided to support Python 3 beginning with version 3.7. Reasons for this include: - Encouraging use of newest versions of Python 3 diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index e7feebdf506e..ed1965b8834d 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -64,7 +64,7 @@ Python >= 3.7 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. +Python <= 3.6. The last version of this library compatible with Python 2.7 is google-cloud-pubsub==1.7.0. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index c06c1adfde66..728c02f9a97d 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -33,7 +33,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index e36c228054af..bc2f8fb17f86 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -350,7 +350,7 @@ microgenerator=True, samples=True, cov_level=100, - unit_test_python_versions=["3.6", "3.7", "3.8", "3.9", "3.10"], + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10"], system_test_python_versions=["3.10"], system_test_external_dependencies=["psutil"], ) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index fac5d5d2ffaa..de8a1072c1ed 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -79,7 +79,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", @@ -92,7 +91,7 @@ namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, - python_requires=">=3.6", + python_requires=">=3.7", scripts=["scripts/fixup_pubsub_v1_keywords.py"], include_package_data=True, zip_safe=False, From 853066b2198298c8bfedf7cda34807d733c37676 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 7 Jul 2022 17:07:45 -0400 Subject: [PATCH 0843/1197] chore(main): release 2.13.1 (#727) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index b458563ff6fe..73be201e2ac1 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.1](https://github.com/googleapis/python-pubsub/compare/v2.13.0...v2.13.1) (2022-07-07) + + +### Bug Fixes + +* change info logs to debug ([#693](https://github.com/googleapis/python-pubsub/issues/693)) ([950fbce](https://github.com/googleapis/python-pubsub/commit/950fbce009fd56a55feea971f8e6083fa84d54fc)) +* require python 3.7+ ([#730](https://github.com/googleapis/python-pubsub/issues/730)) ([0d949b8](https://github.com/googleapis/python-pubsub/commit/0d949b8da096d1b0a5e26f607b1cd79fb560252a)) + ## [2.13.0](https://github.com/googleapis/python-pubsub/compare/v2.12.1...v2.13.0) (2022-06-06) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index de8a1072c1ed..739b5ba00775 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.0" +version = "2.13.1" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 553c953f188f26414dad59f49609bb9cfccf0e99 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 8 Jul 2022 09:39:38 -0400 Subject: [PATCH 0844/1197] fix(deps): require google-api-core >= 2.8.0 (#726) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add audience parameter PiperOrigin-RevId: 456827138 Source-Link: https://github.com/googleapis/googleapis/commit/23f1a157189581734c7a77cddfeb7c5bc1e440ae Source-Link: https://github.com/googleapis/googleapis-gen/commit/4075a8514f676691ec156688a5bbf183aa9893ce Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDA3NWE4NTE0ZjY3NjY5MWVjMTU2Njg4YTViYmYxODNhYTk4OTNjZSJ9 * chore: use gapic-generator-python 1.1.1 PiperOrigin-RevId: 459095142 Source-Link: https://github.com/googleapis/googleapis/commit/4f1be992601ed740a581a32cedc4e7b6c6a27793 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ae686d9cde4fc3e36d0ac02efb8643b15890c1ed Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYWU2ODZkOWNkZTRmYzNlMzZkMGFjMDJlZmI4NjQzYjE1ODkwYzFlZCJ9 * fix(deps): require google-api-core 2.8.0 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- packages/google-cloud-pubsub/setup.py | 5 +---- .../google-cloud-pubsub/testing/constraints-3.6.txt | 12 ------------ .../google-cloud-pubsub/testing/constraints-3.7.txt | 2 +- 3 files changed, 2 insertions(+), 17 deletions(-) delete mode 100644 packages/google-cloud-pubsub/testing/constraints-3.6.txt diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 739b5ba00775..60c0a144c00b 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -30,10 +30,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", + "google-api-core[grpc] >= 2.8.0, <3.0.0dev", "proto-plus >= 1.15.0, <2.0.0dev", "protobuf >= 3.19.0, <4.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", diff --git a/packages/google-cloud-pubsub/testing/constraints-3.6.txt b/packages/google-cloud-pubsub/testing/constraints-3.6.txt deleted file mode 100644 index 20da08a34519..000000000000 --- a/packages/google-cloud-pubsub/testing/constraints-3.6.txt +++ /dev/null @@ -1,12 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List *all* library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -grpcio==1.38.1 -google-api-core==1.31.5 -libcst==0.3.10 -proto-plus==1.15.0 -grpc-google-iam-v1==0.12.4 -protobuf==3.19.0 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index 20da08a34519..90fcc601c15d 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 grpcio==1.38.1 -google-api-core==1.31.5 +google-api-core==2.8.0 libcst==0.3.10 proto-plus==1.15.0 grpc-google-iam-v1==0.12.4 From 72c4fbaf2c74e612bf6957e17c72fe730615ce8f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 11 Jul 2022 18:16:57 -0400 Subject: [PATCH 0845/1197] chore(main): release 2.13.2 (#733) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 73be201e2ac1..93015a4c8c99 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.2](https://github.com/googleapis/python-pubsub/compare/v2.13.1...v2.13.2) (2022-07-08) + + +### Bug Fixes + +* **deps:** require google-api-core >= 2.8.0 ([#726](https://github.com/googleapis/python-pubsub/issues/726)) ([c80ad41](https://github.com/googleapis/python-pubsub/commit/c80ad41abf36c709f8299a6fa22f3672705b1b6d)) + ## [2.13.1](https://github.com/googleapis/python-pubsub/compare/v2.13.0...v2.13.1) (2022-07-07) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 60c0a144c00b..507a5fb84ea9 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.1" +version = "2.13.2" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From e07cdcc4795895355d3ece69d14b3e89b9ba0bc0 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 12 Jul 2022 18:58:37 +0200 Subject: [PATCH 0846/1197] chore(deps): update all dependencies (#708) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert Co-authored-by: Owl Bot Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index ce45323b7bea..41002ca45f07 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ -backoff==2.0.1 +backoff==2.1.2 pytest==7.1.2 mock==4.0.3 flaky==3.7.0 From 46ea293d6032ae43b0af6f44c9b6ca294a0de67b Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 13 Jul 2022 13:27:21 -0400 Subject: [PATCH 0847/1197] fix(deps): require google-api-core>=1.32.0,>=2.8.0 (#735) * fix(deps): require google-api-core>=1.32.0,>=2.8.0 * chore: update constraints --- packages/google-cloud-pubsub/setup.py | 2 +- packages/google-cloud-pubsub/testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 507a5fb84ea9..de49ce3d6ed8 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -30,7 +30,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 - "google-api-core[grpc] >= 2.8.0, <3.0.0dev", + "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.15.0, <2.0.0dev", "protobuf >= 3.19.0, <4.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index 90fcc601c15d..38e0f719db00 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -5,7 +5,7 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 grpcio==1.38.1 -google-api-core==2.8.0 +google-api-core==1.32.0 libcst==0.3.10 proto-plus==1.15.0 grpc-google-iam-v1==0.12.4 From a5c5e837e7b7faf657b3b6bdf2f04cc109d928fa Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 13 Jul 2022 13:50:52 -0400 Subject: [PATCH 0848/1197] chore(main): release 2.13.3 (#736) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 93015a4c8c99..3f84564e4df3 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.3](https://github.com/googleapis/python-pubsub/compare/v2.13.2...v2.13.3) (2022-07-13) + + +### Bug Fixes + +* **deps:** require google-api-core>=1.32.0,>=2.8.0 ([#735](https://github.com/googleapis/python-pubsub/issues/735)) ([a5624fb](https://github.com/googleapis/python-pubsub/commit/a5624fbee2951c7f0c3e413d7d399a41fa0aa4bf)) + ## [2.13.2](https://github.com/googleapis/python-pubsub/compare/v2.13.1...v2.13.2) (2022-07-08) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index de49ce3d6ed8..491f19840e54 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.2" +version = "2.13.3" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From d73d5a9671bd05cc21e666a26a3efa3ed574d404 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Fri, 15 Jul 2022 14:09:12 -0400 Subject: [PATCH 0849/1197] fix: Remove bidi modacks on StreamingPull initial request (#738) --- .../subscriber/_protocol/streaming_pull_manager.py | 13 ++----------- .../subscriber/test_streaming_pull_manager.py | 4 ++-- 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index f7e44cb7f1ce..f909c8eec10f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -927,20 +927,11 @@ def _get_initial_request( A request suitable for being the first request on the stream (and not suitable for any other purpose). """ - # Any ack IDs that are under lease management need to have their - # deadline extended immediately. - if self._leaser is not None: - # Explicitly copy the list, as it could be modified by another - # thread. - lease_ids = list(self._leaser.ack_ids) - else: - lease_ids = [] - # Put the request together. request = gapic_types.StreamingPullRequest( - modify_deadline_ack_ids=list(lease_ids), - modify_deadline_seconds=[self.ack_deadline] * len(lease_ids), stream_ack_deadline_seconds=stream_ack_deadline_seconds, + modify_deadline_ack_ids=[], + modify_deadline_seconds=[], subscription=self._subscription, client_id=self._client_id, max_outstanding_messages=( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index ab21a1597e93..a8cbfbcdaa9c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1318,8 +1318,8 @@ def test__get_initial_request(): assert isinstance(initial_request, gapic_types.StreamingPullRequest) assert initial_request.subscription == "subscription-name" assert initial_request.stream_ack_deadline_seconds == 123 - assert initial_request.modify_deadline_ack_ids == ["1", "2"] - assert initial_request.modify_deadline_seconds == [10, 10] + assert initial_request.modify_deadline_ack_ids == [] + assert initial_request.modify_deadline_seconds == [] def test__get_initial_request_wo_leaser(): From 0b3e0b89116744842dc764d1b94682b167afe8b6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sat, 16 Jul 2022 13:19:13 -0400 Subject: [PATCH 0850/1197] chore(main): release 2.13.4 (#740) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 3f84564e4df3..8bad0d2d2745 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.4](https://github.com/googleapis/python-pubsub/compare/v2.13.3...v2.13.4) (2022-07-15) + + +### Bug Fixes + +* Remove bidi modacks on StreamingPull initial request ([#738](https://github.com/googleapis/python-pubsub/issues/738)) ([1e7d469](https://github.com/googleapis/python-pubsub/commit/1e7d46901c4472a3534980621e88d81aa2e50760)) + ## [2.13.3](https://github.com/googleapis/python-pubsub/compare/v2.13.2...v2.13.3) (2022-07-13) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 491f19840e54..fbdfbb68ef2c 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.3" +version = "2.13.4" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 2a7814bf1562cbf32ef805358aa4a068397e0c76 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 25 Jul 2022 22:23:27 -0400 Subject: [PATCH 0851/1197] chore(python): fix prerelease session [autoapprove] (#745) Source-Link: https://github.com/googleapis/synthtool/commit/1b9ad7694e44ddb4d9844df55ff7af77b51a4435 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +-- packages/google-cloud-pubsub/noxfile.py | 33 ++++++++++--------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 1ce608523524..0eb02fda4c09 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e7bb19d47c13839fe8c147e50e02e8b6cf5da8edd1af8b82208cd6f66cc2829c -# created: 2022-07-05T18:31:20.838186805Z + digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 +# created: 2022-07-25T16:02:49.174178716Z diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 728c02f9a97d..cd9590f8adce 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -385,7 +385,8 @@ def prerelease_deps(session): # Install all dependencies session.install("-e", ".[all, tests, tracing]") - session.install(*UNIT_TEST_STANDARD_DEPENDENCIES) + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) system_deps_all = ( SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES @@ -414,12 +415,6 @@ def prerelease_deps(session): session.install(*constraints_deps) - if os.path.exists("samples/snippets/requirements.txt"): - session.install("-r", "samples/snippets/requirements.txt") - - if os.path.exists("samples/snippets/requirements-test.txt"): - session.install("-r", "samples/snippets/requirements-test.txt") - prerel_deps = [ "protobuf", # dependency of grpc @@ -456,11 +451,19 @@ def prerelease_deps(session): system_test_folder_path = os.path.join("tests", "system") # Only run system tests if found. - if os.path.exists(system_test_path) or os.path.exists(system_test_folder_path): - session.run("py.test", "tests/system") - - snippets_test_path = os.path.join("samples", "snippets") - - # Only run samples tests if found. - if os.path.exists(snippets_test_path): - session.run("py.test", "samples/snippets") + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) From 820908d1703422edd50ad72f94a2a1cd011387ec Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 26 Jul 2022 16:16:46 -0400 Subject: [PATCH 0852/1197] chore: run the code generator (#743) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../pubsub_v1/services/publisher/client.py | 1 + .../services/publisher/transports/base.py | 16 +++-- .../services/publisher/transports/grpc.py | 2 + .../publisher/transports/grpc_asyncio.py | 2 + .../services/schema_service/client.py | 1 + .../schema_service/transports/base.py | 16 +++-- .../schema_service/transports/grpc.py | 2 + .../schema_service/transports/grpc_asyncio.py | 2 + .../pubsub_v1/services/subscriber/client.py | 1 + .../services/subscriber/transports/base.py | 16 +++-- .../services/subscriber/transports/grpc.py | 2 + .../subscriber/transports/grpc_asyncio.py | 2 + packages/google-cloud-pubsub/owlbot.py | 16 ++++- .../unit/gapic/pubsub_v1/test_publisher.py | 60 ++++++++++++++++++- .../gapic/pubsub_v1/test_schema_service.py | 60 ++++++++++++++++++- .../unit/gapic/pubsub_v1/test_subscriber.py | 60 ++++++++++++++++++- 16 files changed, 240 insertions(+), 19 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 5dc77d2a23f6..f383bf4487a8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -488,6 +488,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_topic( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 6a14ce03c36a..11c085ffa7c0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -60,6 +60,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -87,11 +88,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -112,6 +108,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -124,6 +125,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 839b78d8d528..0d41c36f2a13 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -157,6 +158,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 878a9e3471f5..e236c165cae7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -202,6 +203,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 18b7ba632ca4..1d95cce4239b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -432,6 +432,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_schema( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index ff1c871438fd..e2b5ad6608d4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -61,6 +61,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -88,11 +89,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -113,6 +109,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -125,6 +126,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index e8c8d811e217..3fa0a5b12749 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -157,6 +158,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index c182bfe8c020..fb89c89a2173 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -107,6 +107,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -202,6 +203,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index c0af49577c39..b70d6e38938d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -500,6 +500,7 @@ def __init__( quota_project_id=client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, + api_audience=client_options.api_audience, ) def create_subscription( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index eec697f16979..0d815e06890a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -60,6 +60,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, **kwargs, ) -> None: """Instantiate the transport. @@ -87,11 +88,6 @@ def __init__( be used for service account credentials. """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. @@ -112,6 +108,11 @@ def __init__( credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) # If the credentials are service account credentials, then always try to use self signed JWT. if ( @@ -124,6 +125,11 @@ def __init__( # Save the credentials. self._credentials = credentials + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index d9ebcd3a6241..5954b6403343 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -64,6 +64,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -159,6 +160,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index ce2af7afa781..778e49c5dc8e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -109,6 +109,7 @@ def __init__( quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, ) -> None: """Instantiate the transport. @@ -204,6 +205,7 @@ def __init__( quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, ) if not self._grpc_channel: diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index bc2f8fb17f86..0deb2c006358 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -175,7 +175,7 @@ # Silence deprecation warnings in pull() method flattened parameter tests. s.replace( library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", - "import mock", + "import os", "\g<0>\nimport warnings", ) @@ -341,6 +341,20 @@ "pip install google-pubsub", "pip install google-cloud-pubsub", ) + + # This line is required to move the generated code from the `owl-bot-staging` folder + # to the destination folder `google/pubsub` + s.move( + library, + excludes=[ + "docs/**/*", + "nox.py", + "README.rst", + "setup.py", + f"google/cloud/pubsub_{library.name}/__init__.py", + f"google/cloud/pubsub_{library.name}/types.py", + ], + ) s.remove_staging_dirs() # ---------------------------------------------------------------------------- diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index c5ab354d68fb..a372c7e4d9ef 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -216,6 +222,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -233,6 +240,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -250,6 +258,7 @@ def test_publisher_client_client_options(client_class, transport_class, transpor quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -279,6 +288,25 @@ def test_publisher_client_client_options(client_class, transport_class, transpor quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -344,6 +372,7 @@ def test_publisher_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -378,6 +407,7 @@ def test_publisher_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -400,6 +430,7 @@ def test_publisher_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -510,6 +541,7 @@ def test_publisher_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -543,6 +575,7 @@ def test_publisher_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -561,6 +594,7 @@ def test_publisher_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -594,6 +628,7 @@ def test_publisher_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -3425,6 +3460,28 @@ def test_publisher_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, + ], +) +def test_publisher_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -4444,4 +4501,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 852e4ef3b475..1d3a6ec262d2 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -14,7 +14,13 @@ # limitations under the License. # import os -import mock + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock import grpc from grpc.experimental import aio @@ -222,6 +228,7 @@ def test_schema_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -239,6 +246,7 @@ def test_schema_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -256,6 +264,7 @@ def test_schema_service_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -285,6 +294,25 @@ def test_schema_service_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -352,6 +380,7 @@ def test_schema_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -386,6 +415,7 @@ def test_schema_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -408,6 +438,7 @@ def test_schema_service_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -522,6 +553,7 @@ def test_schema_service_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -560,6 +592,7 @@ def test_schema_service_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -580,6 +613,7 @@ def test_schema_service_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -618,6 +652,7 @@ def test_schema_service_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -2402,6 +2437,28 @@ def test_schema_service_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + ], +) +def test_schema_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -3384,4 +3441,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index b4f9b3882a8c..273c8edd3fc0 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -14,9 +14,15 @@ # limitations under the License. # import os -import mock import warnings +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock + import grpc from grpc.experimental import aio import math @@ -220,6 +226,7 @@ def test_subscriber_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -237,6 +244,7 @@ def test_subscriber_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -254,6 +262,7 @@ def test_subscriber_client_client_options( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -283,6 +292,25 @@ def test_subscriber_client_client_options( quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", ) @@ -348,6 +376,7 @@ def test_subscriber_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -382,6 +411,7 @@ def test_subscriber_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -404,6 +434,7 @@ def test_subscriber_client_mtls_env_auto( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -514,6 +545,7 @@ def test_subscriber_client_client_options_scopes( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -547,6 +579,7 @@ def test_subscriber_client_client_options_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -565,6 +598,7 @@ def test_subscriber_client_client_options_from_dict(): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) @@ -598,6 +632,7 @@ def test_subscriber_client_create_channel_credentials_file( quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) # test that the credentials from file are saved and used as the credentials. @@ -4760,6 +4795,28 @@ def test_subscriber_transport_auth_adc(transport_class): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, + ], +) +def test_subscriber_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + @pytest.mark.parametrize( "transport_class,grpc_helpers", [ @@ -5779,4 +5836,5 @@ def test_api_key_credentials(client_class, transport_class): quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, always_use_jwt_access=True, + api_audience=None, ) From 878bb2dc19b878f4ffb959194b5000fb8c1c4793 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 2 Aug 2022 14:59:01 +0200 Subject: [PATCH 0853/1197] chore(deps): update all dependencies (#749) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * revert Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../samples/snippets/requirements-test.txt | 2 +- .../google-cloud-pubsub/samples/snippets/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 41002ca45f07..6655f4cf1a31 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.1.2 pytest==7.1.2 mock==4.0.3 flaky==3.7.0 -google-cloud-bigquery==1.28.0 +google-cloud-bigquery==3.3.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index f684cca8df60..22bd8ac4836b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.13.0 -avro==1.11.0 +google-cloud-pubsub==2.13.4 +avro==1.11.1 From 81344e1bffce4e606c0934ecad42db8bdc707531 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 9 Aug 2022 13:30:40 +0200 Subject: [PATCH 0854/1197] chore(deps): update all dependencies (#755) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * revert * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Anthonios Partheniou Co-authored-by: Owl Bot --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 6655f4cf1a31..08ebc306263b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.1.2 pytest==7.1.2 mock==4.0.3 flaky==3.7.0 -google-cloud-bigquery==3.3.0 +google-cloud-bigquery==3.3.1 From c0330d8612c1fb10828da12a280b366b77b1fcf7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Aug 2022 21:02:05 -0400 Subject: [PATCH 0855/1197] chore(deps): update actions/setup-python action to v4 [autoapprove] (#756) Source-Link: https://github.com/googleapis/synthtool/commit/8e55b327bae44b6640c7ab4be91df85fc4d6fe8a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.github/workflows/docs.yml | 4 ++-- packages/google-cloud-pubsub/.github/workflows/lint.yml | 2 +- packages/google-cloud-pubsub/.github/workflows/unittest.yml | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 0eb02fda4c09..c701359fc58c 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9db98b055a7f8bd82351238ccaacfd3cda58cdf73012ab58b8da146368330021 -# created: 2022-07-25T16:02:49.174178716Z + digest: sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 +# created: 2022-08-09T15:58:56.463048506Z diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml index b46d7305d8cf..7092a139aed3 100644 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox @@ -26,7 +26,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml index f512a4960beb..d2aee5b7d8ec 100644 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -10,7 +10,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index 5531b0141297..87ade4d54362 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -13,7 +13,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - name: Install nox @@ -39,7 +39,7 @@ jobs: - name: Checkout uses: actions/checkout@v3 - name: Setup Python - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install coverage From b1398ff8c5245962906372523c6a05a7012b3960 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 10 Aug 2022 00:45:18 -0500 Subject: [PATCH 0856/1197] docs: reorganize sphinx structure (#751) * docs: reorganize sphinx structure * docs: update directory --- packages/google-cloud-pubsub/docs/index.rst | 8 ++++---- .../docs/{ => pubsub}/publisher/api/client.rst | 0 .../docs/{ => pubsub}/publisher/api/futures.rst | 0 .../docs/{ => pubsub}/publisher/api/pagers.rst | 0 .../docs/{ => pubsub}/publisher/index.rst | 0 .../docs/{ => pubsub}/subscriber/api/client.rst | 0 .../docs/{ => pubsub}/subscriber/api/futures.rst | 0 .../docs/{ => pubsub}/subscriber/api/message.rst | 0 .../docs/{ => pubsub}/subscriber/api/pagers.rst | 0 .../docs/{ => pubsub}/subscriber/api/scheduler.rst | 0 .../docs/{ => pubsub}/subscriber/index.rst | 0 packages/google-cloud-pubsub/docs/{ => pubsub}/types.rst | 0 12 files changed, 4 insertions(+), 4 deletions(-) rename packages/google-cloud-pubsub/docs/{ => pubsub}/publisher/api/client.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/publisher/api/futures.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/publisher/api/pagers.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/publisher/index.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/subscriber/api/client.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/subscriber/api/futures.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/subscriber/api/message.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/subscriber/api/pagers.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/subscriber/api/scheduler.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/subscriber/index.rst (100%) rename packages/google-cloud-pubsub/docs/{ => pubsub}/types.rst (100%) diff --git a/packages/google-cloud-pubsub/docs/index.rst b/packages/google-cloud-pubsub/docs/index.rst index 06b09605f157..6b3e1583bd23 100644 --- a/packages/google-cloud-pubsub/docs/index.rst +++ b/packages/google-cloud-pubsub/docs/index.rst @@ -13,11 +13,11 @@ API Documentation across the documentation. .. toctree:: - :maxdepth: 3 + :maxdepth: 4 - Publisher Client - Subscriber Client - Types + Publisher Client + Subscriber Client + Types Migration Guide diff --git a/packages/google-cloud-pubsub/docs/publisher/api/client.rst b/packages/google-cloud-pubsub/docs/pubsub/publisher/api/client.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/publisher/api/client.rst rename to packages/google-cloud-pubsub/docs/pubsub/publisher/api/client.rst diff --git a/packages/google-cloud-pubsub/docs/publisher/api/futures.rst b/packages/google-cloud-pubsub/docs/pubsub/publisher/api/futures.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/publisher/api/futures.rst rename to packages/google-cloud-pubsub/docs/pubsub/publisher/api/futures.rst diff --git a/packages/google-cloud-pubsub/docs/publisher/api/pagers.rst b/packages/google-cloud-pubsub/docs/pubsub/publisher/api/pagers.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/publisher/api/pagers.rst rename to packages/google-cloud-pubsub/docs/pubsub/publisher/api/pagers.rst diff --git a/packages/google-cloud-pubsub/docs/publisher/index.rst b/packages/google-cloud-pubsub/docs/pubsub/publisher/index.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/publisher/index.rst rename to packages/google-cloud-pubsub/docs/pubsub/publisher/index.rst diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/client.rst b/packages/google-cloud-pubsub/docs/pubsub/subscriber/api/client.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/subscriber/api/client.rst rename to packages/google-cloud-pubsub/docs/pubsub/subscriber/api/client.rst diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/futures.rst b/packages/google-cloud-pubsub/docs/pubsub/subscriber/api/futures.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/subscriber/api/futures.rst rename to packages/google-cloud-pubsub/docs/pubsub/subscriber/api/futures.rst diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/message.rst b/packages/google-cloud-pubsub/docs/pubsub/subscriber/api/message.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/subscriber/api/message.rst rename to packages/google-cloud-pubsub/docs/pubsub/subscriber/api/message.rst diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/pagers.rst b/packages/google-cloud-pubsub/docs/pubsub/subscriber/api/pagers.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/subscriber/api/pagers.rst rename to packages/google-cloud-pubsub/docs/pubsub/subscriber/api/pagers.rst diff --git a/packages/google-cloud-pubsub/docs/subscriber/api/scheduler.rst b/packages/google-cloud-pubsub/docs/pubsub/subscriber/api/scheduler.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/subscriber/api/scheduler.rst rename to packages/google-cloud-pubsub/docs/pubsub/subscriber/api/scheduler.rst diff --git a/packages/google-cloud-pubsub/docs/subscriber/index.rst b/packages/google-cloud-pubsub/docs/pubsub/subscriber/index.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/subscriber/index.rst rename to packages/google-cloud-pubsub/docs/pubsub/subscriber/index.rst diff --git a/packages/google-cloud-pubsub/docs/types.rst b/packages/google-cloud-pubsub/docs/pubsub/types.rst similarity index 100% rename from packages/google-cloud-pubsub/docs/types.rst rename to packages/google-cloud-pubsub/docs/pubsub/types.rst From 12b31f34fc78a91dd72451e115c1ee06a750f29e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 10 Aug 2022 16:16:09 -0400 Subject: [PATCH 0857/1197] chore(main): release 2.13.5 (#757) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 8bad0d2d2745..36408ed81b33 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.5](https://github.com/googleapis/python-pubsub/compare/v2.13.4...v2.13.5) (2022-08-10) + + +### Documentation + +* reorganize sphinx structure ([#751](https://github.com/googleapis/python-pubsub/issues/751)) ([b6de574](https://github.com/googleapis/python-pubsub/commit/b6de57458a1976a068dd229208b9b678a9d3f866)) + ## [2.13.4](https://github.com/googleapis/python-pubsub/compare/v2.13.3...v2.13.4) (2022-07-15) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index fbdfbb68ef2c..12533438ea90 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.4" +version = "2.13.5" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 80cd9844b4f8866187b056af19be611580f86eb8 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Thu, 11 Aug 2022 11:55:03 -0400 Subject: [PATCH 0858/1197] fix: set stream_ack_deadline to max_duration_per_lease_extension or 60 s, set ack_deadline to min_duration_per_lease_extension or 10 s (#760) --- .../_protocol/streaming_pull_manager.py | 50 +++++++++++-- .../subscriber/test_streaming_pull_manager.py | 70 ++++++++++++++++--- 2 files changed, 106 insertions(+), 14 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index f909c8eec10f..22d3b73d54ef 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -20,7 +20,7 @@ import logging import threading import typing -from typing import Any, Dict, Callable, Iterable, List, Optional, Tuple, Union +from typing import Any, Dict, Callable, Iterable, List, Optional, Tuple import uuid import grpc # type: ignore @@ -74,6 +74,15 @@ a subscription. We do this to reduce premature ack expiration. """ +_DEFAULT_STREAM_ACK_DEADLINE: float = 60 +"""The default stream ack deadline in seconds.""" + +_MAX_STREAM_ACK_DEADLINE: float = 600 +"""The maximum stream ack deadline in seconds.""" + +_MIN_STREAM_ACK_DEADLINE: float = 10 +"""The minimum stream ack deadline in seconds.""" + _EXACTLY_ONCE_DELIVERY_TEMPORARY_RETRY_ERRORS = { code_pb2.DEADLINE_EXCEEDED, code_pb2.RESOURCE_EXHAUSTED, @@ -270,7 +279,36 @@ def __init__( self._await_callbacks_on_shutdown = await_callbacks_on_shutdown self._ack_histogram = histogram.Histogram() self._last_histogram_size = 0 - self._ack_deadline: Union[int, float] = histogram.MIN_ACK_DEADLINE + + # If max_duration_per_lease_extension is the default + # we set the stream_ack_deadline to the default of 60 + if self._flow_control.max_duration_per_lease_extension == 0: + self._stream_ack_deadline = _DEFAULT_STREAM_ACK_DEADLINE + # We will not be able to extend more than the default minimum + elif ( + self._flow_control.max_duration_per_lease_extension + < _MIN_STREAM_ACK_DEADLINE + ): + self._stream_ack_deadline = _MIN_STREAM_ACK_DEADLINE + # Will not be able to extend past the max + elif ( + self._flow_control.max_duration_per_lease_extension + > _MAX_STREAM_ACK_DEADLINE + ): + self._stream_ack_deadline = _MAX_STREAM_ACK_DEADLINE + else: + self._stream_ack_deadline = ( + self._flow_control.max_duration_per_lease_extension + ) + + self._ack_deadline = max( + min( + self._flow_control.min_duration_per_lease_extension, + histogram.MAX_ACK_DEADLINE, + ), + histogram.MIN_ACK_DEADLINE, + ) + self._rpc: Optional[bidi.ResumableBidiRpc] = None self._callback: Optional[functools.partial] = None self._closing = threading.Lock() @@ -741,10 +779,10 @@ def heartbeat(self) -> bool: if send_new_ack_deadline: request = gapic_types.StreamingPullRequest( - stream_ack_deadline_seconds=self.ack_deadline + stream_ack_deadline_seconds=self._stream_ack_deadline ) _LOGGER.debug( - "Sending new ack_deadline of %d seconds.", self.ack_deadline + "Sending new ack_deadline of %d seconds.", self._stream_ack_deadline ) else: request = gapic_types.StreamingPullRequest() @@ -796,7 +834,7 @@ def open( _LOGGER.debug( "Creating a stream, default ACK deadline set to {} seconds.".format( - stream_ack_deadline_seconds + self._stream_ack_deadline ) ) @@ -928,6 +966,8 @@ def _get_initial_request( suitable for any other purpose). """ # Put the request together. + # We need to set streaming ack deadline, but it's not useful since we'll modack to send receipt + # anyway. Set to some big-ish value in case we modack late. request = gapic_types.StreamingPullRequest( stream_ack_deadline_seconds=stream_ack_deadline_seconds, modify_deadline_ack_ids=[], diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index a8cbfbcdaa9c..f44235335824 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -107,16 +107,61 @@ def test_constructor_and_default_state(): assert manager._client_id is not None -def test_constructor_with_options(): +def test_constructor_with_default_options(): + flow_control_ = types.FlowControl() manager = streaming_pull_manager.StreamingPullManager( mock.sentinel.client, mock.sentinel.subscription, - flow_control=mock.sentinel.flow_control, + flow_control=flow_control_, scheduler=mock.sentinel.scheduler, ) - assert manager.flow_control == mock.sentinel.flow_control + assert manager.flow_control == flow_control_ assert manager._scheduler == mock.sentinel.scheduler + assert manager._ack_deadline == 10 + assert manager._stream_ack_deadline == 60 + + +def test_constructor_with_min_and_max_duration_per_lease_extension_(): + flow_control_ = types.FlowControl( + min_duration_per_lease_extension=15, max_duration_per_lease_extension=20 + ) + manager = streaming_pull_manager.StreamingPullManager( + mock.sentinel.client, + mock.sentinel.subscription, + flow_control=flow_control_, + scheduler=mock.sentinel.scheduler, + ) + assert manager._ack_deadline == 15 + assert manager._stream_ack_deadline == 20 + + +def test_constructor_with_min_duration_per_lease_extension_too_low(): + flow_control_ = types.FlowControl( + min_duration_per_lease_extension=9, max_duration_per_lease_extension=9 + ) + manager = streaming_pull_manager.StreamingPullManager( + mock.sentinel.client, + mock.sentinel.subscription, + flow_control=flow_control_, + scheduler=mock.sentinel.scheduler, + ) + assert manager._ack_deadline == 10 + assert manager._stream_ack_deadline == 10 + + +def test_constructor_with_max_duration_per_lease_extension_too_high(): + flow_control_ = types.FlowControl( + max_duration_per_lease_extension=601, min_duration_per_lease_extension=601 + ) + manager = streaming_pull_manager.StreamingPullManager( + mock.sentinel.client, + mock.sentinel.subscription, + flow_control=flow_control_, + scheduler=mock.sentinel.scheduler, + ) + assert manager._ack_deadline == 600 + assert manager._stream_ack_deadline == 600 def make_manager(**kwargs): @@ -164,9 +209,13 @@ def test__obtain_ack_deadline_no_custom_flow_control_setting(): manager._flow_control = types.FlowControl( min_duration_per_lease_extension=0, max_duration_per_lease_extension=0 ) + assert manager._stream_ack_deadline == 60 + assert manager._ack_deadline == 10 + assert manager._obtain_ack_deadline(maybe_update=False) == 10 deadline = manager._obtain_ack_deadline(maybe_update=True) assert deadline == histogram.MIN_ACK_DEADLINE + assert manager._stream_ack_deadline == 60 # When we get some historical data, the deadline is adjusted. manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE * 2) @@ -186,11 +235,14 @@ def test__obtain_ack_deadline_with_max_duration_per_lease_extension(): manager._flow_control = types.FlowControl( max_duration_per_lease_extension=histogram.MIN_ACK_DEADLINE + 1 ) + assert manager._ack_deadline == 10 + manager.ack_histogram.add(histogram.MIN_ACK_DEADLINE * 3) # make p99 value large # The deadline configured in flow control should prevail. deadline = manager._obtain_ack_deadline(maybe_update=True) assert deadline == histogram.MIN_ACK_DEADLINE + 1 + assert manager._stream_ack_deadline == 60 def test__obtain_ack_deadline_with_min_duration_per_lease_extension(): @@ -292,12 +344,12 @@ def test__obtain_ack_deadline_no_value_update(): def test_client_id(): manager1 = make_manager() - request1 = manager1._get_initial_request(stream_ack_deadline_seconds=10) + request1 = manager1._get_initial_request(stream_ack_deadline_seconds=60) client_id_1 = request1.client_id assert client_id_1 manager2 = make_manager() - request2 = manager2._get_initial_request(stream_ack_deadline_seconds=10) + request2 = manager2._get_initial_request(stream_ack_deadline_seconds=60) client_id_2 = request2.client_id assert client_id_2 @@ -308,7 +360,7 @@ def test_streaming_flow_control(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) ) - request = manager._get_initial_request(stream_ack_deadline_seconds=10) + request = manager._get_initial_request(stream_ack_deadline_seconds=60) assert request.max_outstanding_messages == 10 assert request.max_outstanding_bytes == 1000 @@ -318,7 +370,7 @@ def test_streaming_flow_control_use_legacy_flow_control(): flow_control=types.FlowControl(max_messages=10, max_bytes=1000), use_legacy_flow_control=True, ) - request = manager._get_initial_request(stream_ack_deadline_seconds=10) + request = manager._get_initial_request(stream_ack_deadline_seconds=60) assert request.max_outstanding_messages == 0 assert request.max_outstanding_bytes == 0 @@ -1046,12 +1098,12 @@ def test_heartbeat_stream_ack_deadline_seconds(caplog): result = manager.heartbeat() manager._rpc.send.assert_called_once_with( - gapic_types.StreamingPullRequest(stream_ack_deadline_seconds=10) + gapic_types.StreamingPullRequest(stream_ack_deadline_seconds=60) ) assert result # Set to false after a send is initiated. assert not manager._send_new_ack_deadline - assert "Sending new ack_deadline of 10 seconds." in caplog.text + assert "Sending new ack_deadline of 60 seconds." in caplog.text @mock.patch("google.api_core.bidi.ResumableBidiRpc", autospec=True) From 57d9f94cadfc1eeac3e49878cc848f67a3404afc Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 11 Aug 2022 15:26:15 -0400 Subject: [PATCH 0859/1197] fix(deps): allow protobuf < 5.0.0 (#762) fix(deps): require proto-plus >= 1.22.0 --- packages/google-cloud-pubsub/setup.py | 4 ++-- packages/google-cloud-pubsub/testing/constraints-3.7.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 12533438ea90..015ae362fd04 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -31,8 +31,8 @@ dependencies = [ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", - "proto-plus >= 1.15.0, <2.0.0dev", - "protobuf >= 3.19.0, <4.0.0dev", + "proto-plus >= 1.22.0, <2.0.0dev", + "protobuf >= 3.19.0, <5.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", "grpcio-status >= 1.16.0", ] diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index 38e0f719db00..07498af4ba48 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -7,6 +7,6 @@ grpcio==1.38.1 google-api-core==1.32.0 libcst==0.3.10 -proto-plus==1.15.0 +proto-plus==1.22.0 grpc-google-iam-v1==0.12.4 protobuf==3.19.0 From bccd53bb5cce886df6e3ccea533507125279bd5d Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Thu, 11 Aug 2022 17:26:02 -0400 Subject: [PATCH 0860/1197] fix: Update stream_ack_deadline with ack_deadline (#763) --- .../subscriber/_protocol/streaming_pull_manager.py | 7 +++++-- .../pubsub_v1/subscriber/test_streaming_pull_manager.py | 7 ++++++- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 22d3b73d54ef..932699261b0f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -451,7 +451,10 @@ def _obtain_ack_deadline(self, maybe_update: bool) -> float: self._ack_deadline = max( self._ack_deadline, _MIN_ACK_DEADLINE_SECS_WHEN_EXACTLY_ONCE_ENABLED ) - + # If we have updated the ack_deadline and it is longer than the stream_ack_deadline + # set the stream_ack_deadline to the new ack_deadline. + if self._ack_deadline > self._stream_ack_deadline: + self._stream_ack_deadline = self._ack_deadline return self._ack_deadline @property @@ -818,7 +821,7 @@ def open( ) # Create the RPC - stream_ack_deadline_seconds = self.ack_deadline + stream_ack_deadline_seconds = self._stream_ack_deadline get_initial_request = functools.partial( self._get_initial_request, stream_ack_deadline_seconds diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index f44235335824..deb476eb1a2a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -257,6 +257,7 @@ def test__obtain_ack_deadline_with_min_duration_per_lease_extension(): # The deadline configured in flow control should prevail. deadline = manager._obtain_ack_deadline(maybe_update=True) assert deadline == histogram.MAX_ACK_DEADLINE + assert manager._stream_ack_deadline == histogram.MAX_ACK_DEADLINE def test__obtain_ack_deadline_with_max_duration_per_lease_extension_too_low(): @@ -283,6 +284,7 @@ def test__obtain_ack_deadline_with_min_duration_per_lease_extension_too_high(): # The deadline configured in flow control should be adjusted to the maximum allowed. deadline = manager._obtain_ack_deadline(maybe_update=True) assert deadline == histogram.MAX_ACK_DEADLINE + assert manager._stream_ack_deadline == histogram.MAX_ACK_DEADLINE def test__obtain_ack_deadline_with_exactly_once_enabled(): @@ -299,6 +301,7 @@ def test__obtain_ack_deadline_with_exactly_once_enabled(): # Since the 60-second min ack_deadline value for exactly_once subscriptions # seconds is higher than the histogram value, the deadline should be 60 sec. assert deadline == 60 + assert manager._stream_ack_deadline == 60 def test__obtain_ack_deadline_with_min_duration_per_lease_extension_with_exactly_once_enabled(): @@ -316,6 +319,7 @@ def test__obtain_ack_deadline_with_min_duration_per_lease_extension_with_exactly # User-defined custom min ack_deadline value takes precedence over # exactly_once default of 60 seconds. assert deadline == histogram.MAX_ACK_DEADLINE + assert manager._stream_ack_deadline == histogram.MAX_ACK_DEADLINE def test__obtain_ack_deadline_no_value_update(): @@ -1148,7 +1152,7 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi ) initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] assert initial_request_arg.func == manager._get_initial_request - assert initial_request_arg.args[0] == 18 + assert initial_request_arg.args[0] == 60 assert not manager._client.get_subscription.called resumable_bidi_rpc.return_value.add_done_callback.assert_called_once_with( @@ -1833,6 +1837,7 @@ def test__on_response_disable_exactly_once(): # exactly_once minimum since exactly_once has been disabled. deadline = manager._obtain_ack_deadline(maybe_update=True) assert deadline == histogram.MIN_ACK_DEADLINE + assert manager._stream_ack_deadline == 60 def test__on_response_exactly_once_immediate_modacks_fail(): From 8d196b9b1f025fca13dedcbb812b28086ceee7cf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 11 Aug 2022 20:58:08 -0400 Subject: [PATCH 0861/1197] chore(main): release 2.13.6 (#761) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 10 ++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 36408ed81b33..63ac7ab9af82 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,16 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.6](https://github.com/googleapis/python-pubsub/compare/v2.13.5...v2.13.6) (2022-08-11) + + +### Bug Fixes + +* **deps:** allow protobuf < 5.0.0 ([#762](https://github.com/googleapis/python-pubsub/issues/762)) ([260bd18](https://github.com/googleapis/python-pubsub/commit/260bd183ffe19992be9a1c1d298438c1f44d3fa9)) +* **deps:** require proto-plus >= 1.22.0 ([260bd18](https://github.com/googleapis/python-pubsub/commit/260bd183ffe19992be9a1c1d298438c1f44d3fa9)) +* set stream_ack_deadline to max_duration_per_lease_extension or 60 s, set ack_deadline to min_duration_per_lease_extension or 10 s ([#760](https://github.com/googleapis/python-pubsub/issues/760)) ([4444129](https://github.com/googleapis/python-pubsub/commit/4444129b28a19296752e865b73827b78e99adea5)) +* Update stream_ack_deadline with ack_deadline ([#763](https://github.com/googleapis/python-pubsub/issues/763)) ([e600ad8](https://github.com/googleapis/python-pubsub/commit/e600ad8228930445765ffa0c45500a7779e25817)) + ## [2.13.5](https://github.com/googleapis/python-pubsub/compare/v2.13.4...v2.13.5) (2022-08-10) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 015ae362fd04..4eb55ed31eb5 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.5" +version = "2.13.6" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From cc878db7ecfcad36fb9a8b418b89aca0c37c3a49 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 12 Aug 2022 06:32:07 +0200 Subject: [PATCH 0862/1197] chore(deps): update dependency google-cloud-pubsub to v2.13.6 (#759) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 22bd8ac4836b..6291462f2bfb 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.13.4 +google-cloud-pubsub==2.13.6 avro==1.11.1 From fba3e259cb45abff0b8b21934d57c035ef87dfba Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 17 Aug 2022 11:42:57 -0400 Subject: [PATCH 0863/1197] chore: use gapic-generator-python 1.2.0 (#764) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 1.2.0 PiperOrigin-RevId: 467286830 Source-Link: https://github.com/googleapis/googleapis/commit/e6e875a456c046e94eeb5a76211daa046a8e72c9 Source-Link: https://github.com/googleapis/googleapis-gen/commit/0295ea14d9cd4d47ddb23b9ebd39a31e2035e28f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDI5NWVhMTRkOWNkNGQ0N2RkYjIzYjllYmQzOWEzMWUyMDM1ZTI4ZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove replacements in owlbot.py Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../services/publisher/async_client.py | 13 ++++++ .../pubsub_v1/services/publisher/client.py | 34 ++++++--------- .../services/schema_service/async_client.py | 13 ++++++ .../services/schema_service/client.py | 14 +++++++ .../services/subscriber/async_client.py | 13 ++++++ .../pubsub_v1/services/subscriber/client.py | 41 ++++++------------- packages/google-cloud-pubsub/owlbot.py | 25 ----------- 7 files changed, 77 insertions(+), 76 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index fbe6216f0a6c..08c490f2007b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1213,7 +1213,9 @@ async def set_iam_policy( expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: { "bindings": [ @@ -1238,8 +1240,11 @@ async def set_iam_policy( } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -1254,6 +1259,7 @@ async def set_iam_policy( title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -1328,8 +1334,11 @@ async def get_iam_policy( expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -1353,8 +1362,11 @@ async def get_iam_policy( } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -1369,6 +1381,7 @@ async def get_iam_policy( title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index f383bf4487a8..3721ce92ebb9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -67,7 +67,6 @@ def get_transport_class( ) -> Type[PublisherTransport]: """Returns an appropriate transport class. - Args: label: The name of the desired transport. If none is provided, then the first transport in the registry is used. @@ -95,7 +94,6 @@ def _get_default_mtls_endpoint(api_endpoint): Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: api_endpoint (Optional[str]): the api endpoint to convert. Returns: @@ -140,7 +138,6 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials info. - Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. @@ -158,7 +155,6 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. - Args: filename (str): The path to the service account private key json file. @@ -336,7 +332,6 @@ def get_mtls_endpoint_and_cert_source( More details can be found at https://google.aip.dev/auth/4114. - Args: client_options (google.api_core.client_options.ClientOptions): Custom options for the client. Only the `api_endpoint` and `client_cert_source` properties may be used @@ -392,7 +387,6 @@ def __init__( ) -> None: """Instantiates the publisher client. - Args: credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These @@ -523,7 +517,6 @@ def sample_create_topic(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.Topic, dict]): The request object. A topic resource. @@ -626,7 +619,6 @@ def sample_update_topic(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): The request object. Request for the UpdateTopic method. @@ -704,7 +696,6 @@ def sample_publish(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.PublishRequest, dict]): The request object. Request for the Publish method. @@ -806,7 +797,6 @@ def sample_get_topic(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): The request object. Request for the GetTopic method. @@ -901,7 +891,6 @@ def sample_list_topics(): for response in page_result: print(response) - Args: request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): The request object. Request for the `ListTopics` method. @@ -1010,7 +999,6 @@ def sample_list_topic_subscriptions(): for response in page_result: print(response) - Args: request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): The request object. Request for the @@ -1125,7 +1113,6 @@ def sample_list_topic_snapshots(): for response in page_result: print(response) - Args: request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): The request object. Request for the `ListTopicSnapshots` @@ -1236,7 +1223,6 @@ def sample_delete_topic(): # Make the request client.delete_topic(request=request) - Args: request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): The request object. Request for the `DeleteTopic` @@ -1327,7 +1313,6 @@ def sample_detach_subscription(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): The request object. Request for the DetachSubscription @@ -1401,7 +1386,6 @@ def set_iam_policy( Replaces any existing policy. - Args: request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` @@ -1428,7 +1412,9 @@ def set_iam_policy( based on attributes about the request and/or target resource. - **JSON Example**:: + **JSON Example** + + :: { "bindings": [ @@ -1454,7 +1440,9 @@ def set_iam_policy( ] } - **YAML Example**:: + **YAML Example** + + :: bindings: - members: @@ -1520,7 +1508,6 @@ def get_iam_policy( Returns an empty policy if the function exists and does not have a policy set. - Args: request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` @@ -1547,7 +1534,9 @@ def get_iam_policy( based on attributes about the request and/or target resource. - **JSON Example**:: + **JSON Example** + + :: { "bindings": [ @@ -1573,7 +1562,9 @@ def get_iam_policy( ] } - **YAML Example**:: + **YAML Example** + + :: bindings: - members: @@ -1640,7 +1631,6 @@ def test_iam_permissions( If the function does not exist, this will return an empty set of permissions, not a NOT_FOUND error. - Args: request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index b0831fe09fa5..71c3ed802041 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -829,7 +829,9 @@ async def set_iam_policy( expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: { "bindings": [ @@ -854,8 +856,11 @@ async def set_iam_policy( } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -870,6 +875,7 @@ async def set_iam_policy( title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -943,8 +949,11 @@ async def get_iam_policy( expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -968,8 +977,11 @@ async def get_iam_policy( } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -984,6 +996,7 @@ async def get_iam_policy( title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 1d95cce4239b..8ca96447aea3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1073,8 +1073,11 @@ def set_iam_policy( expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -1098,8 +1101,11 @@ def set_iam_policy( } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -1114,6 +1120,7 @@ def set_iam_policy( title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -1187,8 +1194,11 @@ def get_iam_policy( expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -1212,8 +1222,11 @@ def get_iam_policy( } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -1228,6 +1241,7 @@ def get_iam_policy( title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index fa88cc64936b..9a19dad1fef6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -2122,7 +2122,9 @@ async def set_iam_policy( expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: { "bindings": [ @@ -2147,8 +2149,11 @@ async def set_iam_policy( } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -2163,6 +2168,7 @@ async def set_iam_policy( title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. @@ -2236,8 +2242,11 @@ async def get_iam_policy( expression that further constrains the role binding based on attributes about the request and/or target resource. + **JSON Example** + :: + { "bindings": [ { @@ -2261,8 +2270,11 @@ async def get_iam_policy( } ] } + **YAML Example** + :: + bindings: - members: - user:mike@example.com @@ -2277,6 +2289,7 @@ async def get_iam_policy( title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + For a description of IAM and its features, see the `IAM developer's guide `__. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index b70d6e38938d..e15afabe1b86 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -77,7 +77,6 @@ def get_transport_class( ) -> Type[SubscriberTransport]: """Returns an appropriate transport class. - Args: label: The name of the desired transport. If none is provided, then the first transport in the registry is used. @@ -107,7 +106,6 @@ def _get_default_mtls_endpoint(api_endpoint): Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: api_endpoint (Optional[str]): the api endpoint to convert. Returns: @@ -152,7 +150,6 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials info. - Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. @@ -170,7 +167,6 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. - Args: filename (str): The path to the service account private key json file. @@ -348,7 +344,6 @@ def get_mtls_endpoint_and_cert_source( More details can be found at https://google.aip.dev/auth/4114. - Args: client_options (google.api_core.client_options.ClientOptions): Custom options for the client. Only the `api_endpoint` and `client_cert_source` properties may be used @@ -404,7 +399,6 @@ def __init__( ) -> None: """Instantiates the subscriber client. - Args: credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These @@ -549,7 +543,6 @@ def sample_create_subscription(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.Subscription, dict]): The request object. A subscription resource. @@ -704,7 +697,6 @@ def sample_get_subscription(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): The request object. Request for the GetSubscription @@ -805,7 +797,6 @@ def sample_update_subscription(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): The request object. Request for the UpdateSubscription @@ -882,7 +873,6 @@ def sample_list_subscriptions(): for response in page_result: print(response) - Args: request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): The request object. Request for the `ListSubscriptions` @@ -991,7 +981,6 @@ def sample_delete_subscription(): # Make the request client.delete_subscription(request=request) - Args: request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): The request object. Request for the DeleteSubscription @@ -1086,7 +1075,6 @@ def sample_modify_ack_deadline(): # Make the request client.modify_ack_deadline(request=request) - Args: request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): The request object. Request for the ModifyAckDeadline @@ -1206,7 +1194,6 @@ def sample_acknowledge(): # Make the request client.acknowledge(request=request) - Args: request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): The request object. Request for the Acknowledge method. @@ -1310,7 +1297,6 @@ def sample_pull(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.PullRequest, dict]): The request object. Request for the `Pull` method. @@ -1458,7 +1444,6 @@ def request_generator(): for response in stream: print(response) - Args: requests (Iterator[google.pubsub_v1.types.StreamingPullRequest]): The request object iterator. Request for the `StreamingPull` @@ -1533,7 +1518,6 @@ def sample_modify_push_config(): # Make the request client.modify_push_config(request=request) - Args: request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): The request object. Request for the ModifyPushConfig @@ -1642,7 +1626,6 @@ def sample_get_snapshot(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): The request object. Request for the GetSnapshot method. @@ -1746,7 +1729,6 @@ def sample_list_snapshots(): for response in page_result: print(response) - Args: request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): The request object. Request for the `ListSnapshots` @@ -1873,7 +1855,6 @@ def sample_create_snapshot(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): The request object. Request for the `CreateSnapshot` @@ -2000,7 +1981,6 @@ def sample_update_snapshot(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): The request object. Request for the UpdateSnapshot @@ -2088,7 +2068,6 @@ def sample_delete_snapshot(): # Make the request client.delete_snapshot(request=request) - Args: request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): The request object. Request for the `DeleteSnapshot` @@ -2182,7 +2161,6 @@ def sample_seek(): # Handle the response print(response) - Args: request (Union[google.pubsub_v1.types.SeekRequest, dict]): The request object. Request for the `Seek` method. @@ -2252,7 +2230,6 @@ def set_iam_policy( Replaces any existing policy. - Args: request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` @@ -2278,7 +2255,9 @@ def set_iam_policy( based on attributes about the request and/or target resource. - **JSON Example**:: + **JSON Example** + + :: { "bindings": [ @@ -2304,7 +2283,9 @@ def set_iam_policy( ] } - **YAML Example**:: + **YAML Example** + + :: bindings: - members: @@ -2370,7 +2351,6 @@ def get_iam_policy( Returns an empty policy if the function exists and does not have a policy set. - Args: request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` @@ -2396,7 +2376,9 @@ def get_iam_policy( based on attributes about the request and/or target resource. - **JSON Example**:: + **JSON Example** + + :: { "bindings": [ @@ -2422,7 +2404,9 @@ def get_iam_policy( ] } - **YAML Example**:: + **YAML Example** + + :: bindings: - members: @@ -2489,7 +2473,6 @@ def test_iam_permissions( If the function does not exist, this will return an empty set of permissions, not a NOT_FOUND error. - Args: request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 0deb2c006358..888d156c1e90 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -238,31 +238,6 @@ "client_library_version=\g<1>'google-cloud-pubsub'", ) - # Docstrings of *_iam_policy() methods are formatted poorly and must be fixed - # in order to avoid docstring format warnings in docs. - s.replace( - library / f"google/pubsub_{library.name}/services/*er/client.py", - r"(\s+)Args:", - "\n\g<1>Args:", - ) - s.replace( - library / f"google/pubsub_{library.name}/services/*er/client.py", - r"(\s+)\*\*JSON Example\*\*\s+::", - "\n\g<1>**JSON Example**::\n", - ) - - s.replace( - library / f"google/pubsub_{library.name}/services/*er/client.py", - r"(\s+)\*\*YAML Example\*\*\s+::", - "\n\g<1>**YAML Example**::\n", - ) - - s.replace( - library / f"google/pubsub_{library.name}/services/*er/client.py", - r"(\s+)For a description of IAM and its features, see", - "\n\g<0>", - ) - # Allow timeout to be an instance of google.api_core.timeout.* s.replace( library / f"google/pubsub_{library.name}/types/__init__.py", From 8deadc0d09c9630163cc008c8ee6dde175ff8a0b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 19 Aug 2022 18:32:11 +0200 Subject: [PATCH 0864/1197] chore(deps): update dependency google-cloud-bigquery to v3.3.2 (#765) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 08ebc306263b..9ab4702569e8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.1.2 pytest==7.1.2 mock==4.0.3 flaky==3.7.0 -google-cloud-bigquery==3.3.1 +google-cloud-bigquery==3.3.2 From 6e499d3affe06e8024fa24811a30426f595903bd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 24 Aug 2022 19:01:29 -0400 Subject: [PATCH 0865/1197] chore(python): exclude path in renovate.json [autoapprove] (#768) Source-Link: https://github.com/googleapis/synthtool/commit/69fabaee9eca28af7ecaa02c86895e606fbbebd6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/publish-docs.sh | 4 +- .../google-cloud-pubsub/.kokoro/release.sh | 5 +- .../.kokoro/requirements.in | 8 + .../.kokoro/requirements.txt | 464 ++++++++++++++++++ packages/google-cloud-pubsub/renovate.json | 2 +- 6 files changed, 477 insertions(+), 10 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/requirements.in create mode 100644 packages/google-cloud-pubsub/.kokoro/requirements.txt diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index c701359fc58c..c6acdf3f90c4 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c6c965a4bf40c19011b11f87dbc801a66d3a23fbc6704102be064ef31c51f1c3 -# created: 2022-08-09T15:58:56.463048506Z + digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 +# created: 2022-08-24T17:07:22.006876712Z diff --git a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh index 8acb14e802b0..1c4d62370042 100755 --- a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh +++ b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh @@ -21,14 +21,12 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --user --upgrade --quiet nox +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m nox --version # build docs nox -s docs -python3 -m pip install --user gcp-docuploader - # create metadata python3 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh index 5c00cba3e852..40b967043b4f 100755 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -16,12 +16,9 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install gcp-releasetool +python3 -m pip install --require-hashes -r .kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script -# Ensure that we have the latest versions of Twine, Wheel, and Setuptools. -python3 -m pip install --upgrade twine wheel setuptools - # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.in b/packages/google-cloud-pubsub/.kokoro/requirements.in new file mode 100644 index 000000000000..7718391a34d7 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/requirements.in @@ -0,0 +1,8 @@ +gcp-docuploader +gcp-releasetool +importlib-metadata +typing-extensions +twine +wheel +setuptools +nox \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt new file mode 100644 index 000000000000..c4b824f247e3 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -0,0 +1,464 @@ +# +# This file is autogenerated by pip-compile with python 3.10 +# To update, run: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==2.0.0 \ + --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ + --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e + # via nox +attrs==22.1.0 \ + --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ + --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c + # via gcp-releasetool +bleach==5.0.1 \ + --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ + --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c + # via readme-renderer +cachetools==5.2.0 \ + --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ + --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db + # via google-auth +certifi==2022.6.15 \ + --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ + --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 + # via requests +cffi==1.15.1 \ + --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ + --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ + --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ + --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ + --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ + --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ + --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ + --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ + --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ + --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ + --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ + --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ + --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ + --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ + --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ + --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ + --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ + --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ + --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ + --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ + --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ + --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ + --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ + --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ + --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ + --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ + --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ + --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ + --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ + --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ + --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ + --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ + --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ + --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ + --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ + --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ + --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ + --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ + --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ + --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ + --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ + --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ + --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ + --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ + --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ + --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ + --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ + --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ + --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ + --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ + --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ + --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ + --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ + --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ + --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ + --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ + --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ + --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ + --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ + --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ + --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ + --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ + --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ + --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 + # via cryptography +charset-normalizer==2.1.1 \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f + # via requests +click==8.0.4 \ + --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ + --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb + # via + # gcp-docuploader + # gcp-releasetool +colorlog==6.6.0 \ + --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ + --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e + # via + # gcp-docuploader + # nox +commonmark==0.9.1 \ + --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ + --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 + # via rich +cryptography==37.0.4 \ + --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ + --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ + --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ + --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ + --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ + --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ + --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ + --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ + --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ + --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ + --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ + --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ + --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ + --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ + --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ + --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ + --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ + --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ + --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ + --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ + --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ + --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 + # via + # gcp-releasetool + # secretstorage +distlib==0.3.5 \ + --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ + --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c + # via virtualenv +docutils==0.19 \ + --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ + --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc + # via readme-renderer +filelock==3.8.0 \ + --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ + --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 + # via virtualenv +gcp-docuploader==0.6.3 \ + --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ + --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b + # via -r requirements.in +gcp-releasetool==1.8.6 \ + --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ + --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 + # via -r requirements.in +google-api-core==2.8.2 \ + --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ + --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.11.0 \ + --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ + --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb + # via + # gcp-releasetool + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.3.2 \ + --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ + --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a + # via google-cloud-storage +google-cloud-storage==2.5.0 \ + --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ + --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 + # via gcp-docuploader +google-crc32c==1.3.0 \ + --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ + --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ + --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ + --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ + --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ + --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ + --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ + --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ + --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ + --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ + --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ + --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ + --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ + --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ + --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ + --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ + --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ + --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ + --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ + --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ + --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ + --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ + --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ + --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ + --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ + --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ + --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ + --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ + --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ + --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ + --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ + --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ + --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ + --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ + --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ + --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ + --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ + --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ + --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ + --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ + --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ + --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ + --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 + # via google-resumable-media +google-resumable-media==2.3.3 \ + --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ + --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 + # via google-cloud-storage +googleapis-common-protos==1.56.4 \ + --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ + --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 + # via google-api-core +idna==3.3 \ + --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ + --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d + # via requests +importlib-metadata==4.12.0 \ + --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ + --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 + # via + # -r requirements.in + # twine +jeepney==0.8.0 \ + --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ + --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 + # via + # keyring + # secretstorage +jinja2==3.1.2 \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 + # via gcp-releasetool +keyring==23.8.2 \ + --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ + --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a + # via + # gcp-releasetool + # twine +markupsafe==2.1.1 \ + --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ + --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ + --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ + --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ + --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ + --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ + --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ + --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ + --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ + --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ + --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ + --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ + --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ + --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ + --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ + --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ + --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ + --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ + --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ + --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ + --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ + --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ + --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ + --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ + --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ + --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ + --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ + --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ + --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ + --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ + --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ + --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ + --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ + --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ + --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ + --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ + --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ + --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ + --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ + --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 + # via jinja2 +nox==2022.8.7 \ + --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ + --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c + # via -r requirements.in +packaging==21.3 \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 + # via + # gcp-releasetool + # nox +pkginfo==1.8.3 \ + --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ + --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c + # via twine +platformdirs==2.5.2 \ + --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ + --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 + # via virtualenv +protobuf==3.20.1 \ + --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ + --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ + --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ + --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ + --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ + --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ + --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ + --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ + --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ + --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ + --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ + --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ + --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ + --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ + --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ + --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ + --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ + --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ + --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ + --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ + --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ + --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ + --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ + --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 + # via + # gcp-docuploader + # gcp-releasetool + # google-api-core +py==1.11.0 \ + --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ + --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 + # via nox +pyasn1==0.4.8 \ + --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ + --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.2.8 \ + --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ + --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 + # via google-auth +pycparser==2.21 \ + --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ + --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 + # via cffi +pygments==2.13.0 \ + --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ + --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 + # via + # readme-renderer + # rich +pyjwt==2.4.0 \ + --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ + --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba + # via gcp-releasetool +pyparsing==3.0.9 \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc + # via packaging +pyperclip==1.8.2 \ + --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 + # via gcp-releasetool +python-dateutil==2.8.2 \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 + # via gcp-releasetool +readme-renderer==37.0 \ + --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ + --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 + # via twine +requests==2.28.1 \ + --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ + --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 + # via + # gcp-releasetool + # google-api-core + # google-cloud-storage + # requests-toolbelt + # twine +requests-toolbelt==0.9.1 \ + --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ + --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 + # via twine +rfc3986==2.0.0 \ + --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ + --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c + # via twine +rich==12.5.1 \ + --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ + --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca + # via twine +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +secretstorage==3.3.3 \ + --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ + --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 + # via keyring +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # bleach + # gcp-docuploader + # google-auth + # python-dateutil +twine==4.0.1 \ + --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ + --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 + # via -r requirements.in +typing-extensions==4.3.0 \ + --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ + --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 + # via -r requirements.in +urllib3==1.26.12 \ + --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ + --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 + # via + # requests + # twine +virtualenv==20.16.3 \ + --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ + --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 + # via nox +webencodings==0.5.1 \ + --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ + --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 + # via bleach +wheel==0.37.1 \ + --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ + --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 + # via -r requirements.in +zipp==3.8.1 \ + --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ + --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +setuptools==65.2.0 \ + --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ + --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 + # via -r requirements.in diff --git a/packages/google-cloud-pubsub/renovate.json b/packages/google-cloud-pubsub/renovate.json index c21036d385e5..566a70f3cc3c 100644 --- a/packages/google-cloud-pubsub/renovate.json +++ b/packages/google-cloud-pubsub/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From f340a95d6fef47df0acdca5e87b76a76df166919 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 30 Aug 2022 08:56:52 -0400 Subject: [PATCH 0866/1197] chore(python): exclude `grpcio==1.49.0rc1` in tests (#770) Source-Link: https://github.com/googleapis/synthtool/commit/c4dd5953003d13b239f872d329c3146586bb417e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.kokoro/requirements.txt | 6 +++--- packages/google-cloud-pubsub/noxfile.py | 7 +++++-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index c6acdf3f90c4..23e106b65770 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:562802bfac02e012a6ac34eda282f81d06e77326b82a32d7bbb1369ff552b387 -# created: 2022-08-24T17:07:22.006876712Z + digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 +# created: 2022-08-29T17:28:30.441852797Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index c4b824f247e3..4b29ef247bed 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -136,9 +136,9 @@ cryptography==37.0.4 \ # via # gcp-releasetool # secretstorage -distlib==0.3.5 \ - --hash=sha256:a7f75737c70be3b25e2bee06288cec4e4c221de18455b2dd037fe2a795cab2fe \ - --hash=sha256:b710088c59f06338ca514800ad795a132da19fda270e3ce4affc74abf955a26c +distlib==0.3.6 \ + --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ + --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e # via virtualenv docutils==0.19 \ --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index cd9590f8adce..332e4a4877f4 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -240,7 +240,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") + # Exclude version 1.49.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/pull/30642 + session.install("--pre", "grpcio!=1.49.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -420,7 +422,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - "grpcio", + # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 + "grpcio!=1.49.0rc1", "grpcio-status", "google-api-core", "proto-plus", From e528f4244e8362df064a76804c2153f9e2b79849 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 1 Sep 2022 15:36:36 -0400 Subject: [PATCH 0867/1197] ci(python): fix path to requirements.txt in release script (#772) Source-Link: https://github.com/googleapis/synthtool/commit/fdba3ed145bdb2f4f3eff434d4284b1d03b80d34 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 3 +-- .../google-cloud-pubsub/.kokoro/release.sh | 2 +- .../.kokoro/requirements.txt | 24 +++++++++---------- 3 files changed, 14 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 23e106b65770..0d9eb2af9352 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ce3c1686bc81145c81dd269bd12c4025c6b275b22d14641358827334fddb1d72 -# created: 2022-08-29T17:28:30.441852797Z + digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh index 40b967043b4f..a6bba127fe84 100755 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -16,7 +16,7 @@ set -eo pipefail # Start the releasetool reporter -python3 -m pip install --require-hashes -r .kokoro/requirements.txt +python3 -m pip install --require-hashes -r github/python-pubsub/.kokoro/requirements.txt python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script # Disable buffering, so that the logs stream through. diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 4b29ef247bed..92b2f727e777 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -100,9 +100,9 @@ click==8.0.4 \ # via # gcp-docuploader # gcp-releasetool -colorlog==6.6.0 \ - --hash=sha256:344f73204009e4c83c5b6beb00b3c45dc70fcdae3c80db919e0a4171d006fde8 \ - --hash=sha256:351c51e866c86c3217f08e4b067a7974a678be78f07f85fc2d55b8babde6d94e +colorlog==6.7.0 \ + --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ + --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 # via # gcp-docuploader # nox @@ -152,9 +152,9 @@ gcp-docuploader==0.6.3 \ --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b # via -r requirements.in -gcp-releasetool==1.8.6 \ - --hash=sha256:42e51ab8e2e789bc8e22a03c09352962cd3452951c801a2230d564816630304a \ - --hash=sha256:a3518b79d1b243c494eac392a01c7fd65187fd6d52602dcab9b529bc934d4da1 +gcp-releasetool==1.8.7 \ + --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ + --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d # via -r requirements.in google-api-core==2.8.2 \ --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ @@ -251,9 +251,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.8.2 \ - --hash=sha256:0d9973f8891850f1ade5f26aafd06bb16865fbbae3fc56b0defb6a14a2624003 \ - --hash=sha256:10d2a8639663fe2090705a00b8c47c687cacdf97598ea9c11456679fa974473a +keyring==23.9.0 \ + --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ + --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db # via # gcp-releasetool # twine @@ -440,9 +440,9 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.3 \ - --hash=sha256:4193b7bc8a6cd23e4eb251ac64f29b4398ab2c233531e66e40b19a6b7b0d30c1 \ - --hash=sha256:d86ea0bb50e06252d79e6c241507cb904fcd66090c3271381372d6221a3970f9 +virtualenv==20.16.4 \ + --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ + --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ From 8a34955fc8d8fccd617d10fa474effaa517930e6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 10:48:58 -0400 Subject: [PATCH 0868/1197] chore(python): update .kokoro/requirements.txt (#773) * chore(python): update .kokoro/requirements.txt Source-Link: https://github.com/googleapis/synthtool/commit/703554a14c7479542335b62fa69279f93a9e38ec Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b * python3.6 -> python3.9 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/presubmit-against-pubsublite-samples.sh | 2 +- packages/google-cloud-pubsub/.kokoro/requirements.txt | 8 ++++++++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 0d9eb2af9352..2fa0f7c4fe15 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:1f0dbd02745fb7cf255563dab5968345989308544e52b7f460deadd5e78e63b0 + digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh index 1078a5f5ea31..587e491ee365 100755 --- a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh @@ -27,7 +27,7 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.6 -m pip install --upgrade --quiet nox +python3.9 -m pip install --upgrade --quiet nox # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 92b2f727e777..385f2d4d6106 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -241,6 +241,10 @@ importlib-metadata==4.12.0 \ # via # -r requirements.in # twine +jaraco-classes==3.2.2 \ + --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ + --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 + # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 @@ -299,6 +303,10 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 +more-itertools==8.14.0 \ + --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ + --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 + # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c From 5a68da54b225f609adbb9254f2fa719a991090e8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 2 Sep 2022 20:14:25 +0000 Subject: [PATCH 0869/1197] chore(python): exclude setup.py in renovate config (#776) Source-Link: https://github.com/googleapis/synthtool/commit/56da63e80c384a871356d1ea6640802017f213b4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/renovate.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 2fa0f7c4fe15..b8dcb4a4af99 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:94961fdc5c9ca6d13530a6a414a49d2f607203168215d074cdb0a1df9ec31c0b + digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 diff --git a/packages/google-cloud-pubsub/renovate.json b/packages/google-cloud-pubsub/renovate.json index 566a70f3cc3c..39b2a0ec9296 100644 --- a/packages/google-cloud-pubsub/renovate.json +++ b/packages/google-cloud-pubsub/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From aa4b31cf55ab97508a0a97ce0abfda75822f3112 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 6 Sep 2022 17:40:29 +0200 Subject: [PATCH 0870/1197] chore(deps): update dependency pytest to v7.1.3 (#777) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 9ab4702569e8..290925f86683 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.1.2 -pytest==7.1.2 +pytest==7.1.3 mock==4.0.3 flaky==3.7.0 google-cloud-bigquery==3.3.2 From f792bdbd70d095182b1a2d2df0f31ddb41ebc8da Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 7 Sep 2022 15:28:15 +0000 Subject: [PATCH 0871/1197] chore: Bump gapic-generator-python version to 1.3.0 (#779) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 472561635 Source-Link: https://github.com/googleapis/googleapis/commit/332ecf599f8e747d8d1213b77ae7db26eff12814 Source-Link: https://github.com/googleapis/googleapis-gen/commit/4313d682880fd9d7247291164d4e9d3d5bd9f177 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDMxM2Q2ODI4ODBmZDlkNzI0NzI5MTE2NGQ0ZTlkM2Q1YmQ5ZjE3NyJ9 --- .../services/publisher/async_client.py | 63 + .../pubsub_v1/services/publisher/client.py | 63 + .../services/schema_service/async_client.py | 42 + .../services/schema_service/client.py | 42 + .../services/subscriber/async_client.py | 116 +- .../pubsub_v1/services/subscriber/client.py | 116 +- packages/google-cloud-pubsub/mypy.ini | 2 +- ..._generated_publisher_create_topic_async.py | 7 + ...1_generated_publisher_create_topic_sync.py | 7 + ..._generated_publisher_delete_topic_async.py | 7 + ...1_generated_publisher_delete_topic_sync.py | 7 + ...ted_publisher_detach_subscription_async.py | 7 + ...ated_publisher_detach_subscription_sync.py | 7 + ..._v1_generated_publisher_get_topic_async.py | 7 + ...b_v1_generated_publisher_get_topic_sync.py | 7 + ...ed_publisher_list_topic_snapshots_async.py | 7 + ...ted_publisher_list_topic_snapshots_sync.py | 7 + ...ublisher_list_topic_subscriptions_async.py | 7 + ...publisher_list_topic_subscriptions_sync.py | 7 + ...1_generated_publisher_list_topics_async.py | 7 + ...v1_generated_publisher_list_topics_sync.py | 7 + ...ub_v1_generated_publisher_publish_async.py | 7 + ...sub_v1_generated_publisher_publish_sync.py | 7 + ..._generated_publisher_update_topic_async.py | 7 + ...1_generated_publisher_update_topic_sync.py | 7 + ...ated_schema_service_create_schema_async.py | 7 + ...rated_schema_service_create_schema_sync.py | 7 + ...ated_schema_service_delete_schema_async.py | 7 + ...rated_schema_service_delete_schema_sync.py | 7 + ...nerated_schema_service_get_schema_async.py | 7 + ...enerated_schema_service_get_schema_sync.py | 7 + ...rated_schema_service_list_schemas_async.py | 7 + ...erated_schema_service_list_schemas_sync.py | 7 + ...d_schema_service_validate_message_async.py | 7 + ...ed_schema_service_validate_message_sync.py | 7 + ...ed_schema_service_validate_schema_async.py | 7 + ...ted_schema_service_validate_schema_sync.py | 7 + ..._generated_subscriber_acknowledge_async.py | 9 +- ...1_generated_subscriber_acknowledge_sync.py | 9 +- ...erated_subscriber_create_snapshot_async.py | 7 + ...nerated_subscriber_create_snapshot_sync.py | 7 + ...ed_subscriber_create_subscription_async.py | 7 + ...ted_subscriber_create_subscription_sync.py | 7 + ...erated_subscriber_delete_snapshot_async.py | 7 + ...nerated_subscriber_delete_snapshot_sync.py | 7 + ...ed_subscriber_delete_subscription_async.py | 7 + ...ted_subscriber_delete_subscription_sync.py | 7 + ...generated_subscriber_get_snapshot_async.py | 7 + ..._generated_subscriber_get_snapshot_sync.py | 7 + ...rated_subscriber_get_subscription_async.py | 7 + ...erated_subscriber_get_subscription_sync.py | 7 + ...nerated_subscriber_list_snapshots_async.py | 7 + ...enerated_subscriber_list_snapshots_sync.py | 7 + ...ted_subscriber_list_subscriptions_async.py | 7 + ...ated_subscriber_list_subscriptions_sync.py | 7 + ...ed_subscriber_modify_ack_deadline_async.py | 9 +- ...ted_subscriber_modify_ack_deadline_sync.py | 9 +- ...ted_subscriber_modify_push_config_async.py | 7 + ...ated_subscriber_modify_push_config_sync.py | 7 + ...bsub_v1_generated_subscriber_pull_async.py | 7 + ...ubsub_v1_generated_subscriber_pull_sync.py | 7 + ...bsub_v1_generated_subscriber_seek_async.py | 7 + ...ubsub_v1_generated_subscriber_seek_sync.py | 7 + ...nerated_subscriber_streaming_pull_async.py | 7 + ...enerated_subscriber_streaming_pull_sync.py | 7 + ...erated_subscriber_update_snapshot_async.py | 7 + ...nerated_subscriber_update_snapshot_sync.py | 7 + ...ed_subscriber_update_subscription_async.py | 7 + ...ted_subscriber_update_subscription_sync.py | 7 + .../snippet_metadata_pubsub_v1.json | 1184 ++++++++--------- 70 files changed, 1469 insertions(+), 601 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 08c490f2007b..dbcd516b2ba9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -222,6 +222,13 @@ async def create_topic( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_create_topic(): @@ -330,6 +337,13 @@ async def update_topic( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_update_topic(): @@ -418,6 +432,13 @@ async def publish( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_publish(): @@ -534,6 +555,13 @@ async def get_topic( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_get_topic(): @@ -638,6 +666,13 @@ async def list_topics( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_list_topics(): @@ -757,6 +792,13 @@ async def list_topic_subscriptions( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_list_topic_subscriptions(): @@ -882,6 +924,13 @@ async def list_topic_snapshots( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_list_topic_snapshots(): @@ -1007,6 +1056,13 @@ async def delete_topic( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_delete_topic(): @@ -1103,6 +1159,13 @@ async def detach_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_detach_subscription(): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 3721ce92ebb9..f8a640422f8d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -500,6 +500,13 @@ def create_topic( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_create_topic(): @@ -599,6 +606,13 @@ def update_topic( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_update_topic(): @@ -679,6 +693,13 @@ def publish( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_publish(): @@ -780,6 +801,13 @@ def get_topic( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_get_topic(): @@ -873,6 +901,13 @@ def list_topics( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_list_topics(): @@ -981,6 +1016,13 @@ def list_topic_subscriptions( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_list_topic_subscriptions(): @@ -1095,6 +1137,13 @@ def list_topic_snapshots( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_list_topic_snapshots(): @@ -1209,6 +1258,13 @@ def delete_topic( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_delete_topic(): @@ -1296,6 +1352,13 @@ def detach_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_detach_subscription(): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 71c3ed802041..f0f158b6fac4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -220,6 +220,13 @@ async def create_schema( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_create_schema(): @@ -341,6 +348,13 @@ async def get_schema( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_get_schema(): @@ -433,6 +447,13 @@ async def list_schemas( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_list_schemas(): @@ -540,6 +561,13 @@ async def delete_schema( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_delete_schema(): @@ -624,6 +652,13 @@ async def validate_schema( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_validate_schema(): @@ -731,6 +766,13 @@ async def validate_message( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_validate_message(): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 8ca96447aea3..9ecff30f4991 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -450,6 +450,13 @@ def create_schema( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_create_schema(): @@ -571,6 +578,13 @@ def get_schema( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_get_schema(): @@ -663,6 +677,13 @@ def list_schemas( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_list_schemas(): @@ -770,6 +791,13 @@ def delete_schema( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_delete_schema(): @@ -854,6 +882,13 @@ def validate_schema( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_validate_schema(): @@ -961,6 +996,13 @@ def validate_message( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_validate_message(): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 9a19dad1fef6..902d134c632c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -248,6 +248,13 @@ async def create_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_create_subscription(): @@ -414,6 +421,13 @@ async def get_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_get_subscription(): @@ -521,6 +535,13 @@ async def update_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_update_subscription(): @@ -608,6 +629,13 @@ async def list_subscriptions( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_list_subscriptions(): @@ -731,6 +759,13 @@ async def delete_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_delete_subscription(): @@ -832,6 +867,13 @@ async def modify_ack_deadline( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_modify_ack_deadline(): @@ -841,7 +883,7 @@ async def sample_modify_ack_deadline(): # Initialize request argument(s) request = pubsub_v1.ModifyAckDeadlineRequest( subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_ids=['ack_ids_value1', 'ack_ids_value2'], ack_deadline_seconds=2066, ) @@ -961,6 +1003,13 @@ async def acknowledge( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_acknowledge(): @@ -970,7 +1019,7 @@ async def sample_acknowledge(): # Initialize request argument(s) request = pubsub_v1.AcknowledgeRequest( subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_ids=['ack_ids_value1', 'ack_ids_value2'], ) # Make the request @@ -1070,6 +1119,13 @@ async def pull( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_pull(): @@ -1217,6 +1273,13 @@ def streaming_pull( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_streaming_pull(): @@ -1318,6 +1381,13 @@ async def modify_push_config( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_modify_push_config(): @@ -1432,6 +1502,13 @@ async def get_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_get_snapshot(): @@ -1545,6 +1622,13 @@ async def list_snapshots( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_list_snapshots(): @@ -1682,6 +1766,13 @@ async def create_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_create_snapshot(): @@ -1819,6 +1910,13 @@ async def update_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_update_snapshot(): @@ -1916,6 +2014,13 @@ async def delete_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_delete_snapshot(): @@ -2015,6 +2120,13 @@ async def seek( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 async def sample_seek(): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index e15afabe1b86..6f08e2792e1a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -525,6 +525,13 @@ def create_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_create_subscription(): @@ -680,6 +687,13 @@ def get_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_get_subscription(): @@ -776,6 +790,13 @@ def update_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_update_subscription(): @@ -855,6 +876,13 @@ def list_subscriptions( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_list_subscriptions(): @@ -967,6 +995,13 @@ def delete_subscription( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_delete_subscription(): @@ -1059,6 +1094,13 @@ def modify_ack_deadline( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_modify_ack_deadline(): @@ -1068,7 +1110,7 @@ def sample_modify_ack_deadline(): # Initialize request argument(s) request = pubsub_v1.ModifyAckDeadlineRequest( subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_ids=['ack_ids_value1', 'ack_ids_value2'], ack_deadline_seconds=2066, ) @@ -1179,6 +1221,13 @@ def acknowledge( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_acknowledge(): @@ -1188,7 +1237,7 @@ def sample_acknowledge(): # Initialize request argument(s) request = pubsub_v1.AcknowledgeRequest( subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_ids=['ack_ids_value1', 'ack_ids_value2'], ) # Make the request @@ -1279,6 +1328,13 @@ def pull( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_pull(): @@ -1415,6 +1471,13 @@ def streaming_pull( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_streaming_pull(): @@ -1504,6 +1567,13 @@ def modify_push_config( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_modify_push_config(): @@ -1609,6 +1679,13 @@ def get_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_get_snapshot(): @@ -1711,6 +1788,13 @@ def list_snapshots( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_list_snapshots(): @@ -1837,6 +1921,13 @@ def create_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_create_snapshot(): @@ -1965,6 +2056,13 @@ def update_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_update_snapshot(): @@ -2054,6 +2152,13 @@ def delete_snapshot( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_delete_snapshot(): @@ -2144,6 +2249,13 @@ def seek( .. code-block:: python + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 def sample_seek(): diff --git a/packages/google-cloud-pubsub/mypy.ini b/packages/google-cloud-pubsub/mypy.ini index 4505b485436b..574c5aed394b 100644 --- a/packages/google-cloud-pubsub/mypy.ini +++ b/packages/google-cloud-pubsub/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.6 +python_version = 3.7 namespace_packages = True diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py index e79f28c983b3..009404d86abc 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_CreateTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py index 6a6f04a271f4..e697e8788aa0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_CreateTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py index 2a0148abbad1..b39ca4ccc875 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_DeleteTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py index 376a93ba085e..eddea9147911 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_DeleteTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py index 6fb8d4e7d3e7..20188a017ba4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_DetachSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py index 7c36e4df1291..9271840b7100 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_DetachSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py index 87904db2b378..659125bf27be 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_GetTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py index 2f28cef0a8a2..4351b5638f8e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_GetTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py index b6388f7f5722..85b983f5d0e8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_ListTopicSnapshots_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py index f7f3a61ec9bf..9ebd94326103 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_ListTopicSnapshots_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py index 59b35194b459..a15dba6f4052 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py index d7dffa0e2d3b..f16943066f87 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py index 0d0f10a9896f..b6cd0f682899 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_ListTopics_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py index cffdd77a49af..6913f815bd44 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_ListTopics_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py index 98bfc618e64e..51561cede9d6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_Publish_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py index 650440a78436..2985ca39bac5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_Publish_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py index 473144d07caa..19a95fadffa7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_UpdateTopic_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py index 5a9838c2acfd..4d334c680109 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Publisher_UpdateTopic_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py index 9f979072528b..13b0c86bcd38 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_CreateSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py index 798194050d75..094a6c8ecfe0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_CreateSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py index 6d5e8f7345ca..39b81883a6c0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_DeleteSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py index 2e516b97aa77..abd335f8cf9e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_DeleteSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py index 10db352c3abf..b13d5aa0058d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_GetSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py index 7d3cdf6d1d44..62d3360db768 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_GetSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py index a1c9be6ee6e2..c5c45753d0d9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_ListSchemas_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py index 4604da242389..fd199857b8d5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_ListSchemas_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py index 94a699e53de0..f51f8d7b693f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_ValidateMessage_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py index 26e32efa13d8..42885d3d0ec3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_ValidateMessage_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py index 86647c7bd65f..c5ac0e10f4ab 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_ValidateSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py index 102fb75edc03..6b0a38f40777 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_SchemaService_ValidateSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py index 8f87241a1f7e..f4153cc36c86 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_Acknowledge_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 @@ -34,7 +41,7 @@ async def sample_acknowledge(): # Initialize request argument(s) request = pubsub_v1.AcknowledgeRequest( subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_ids=['ack_ids_value1', 'ack_ids_value2'], ) # Make the request diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py index a56c55a33c73..09a34d4ccf90 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_Acknowledge_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 @@ -34,7 +41,7 @@ def sample_acknowledge(): # Initialize request argument(s) request = pubsub_v1.AcknowledgeRequest( subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_ids=['ack_ids_value1', 'ack_ids_value2'], ) # Make the request diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py index 6e2d4538771d..b3c2d020de9b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_CreateSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py index b6145acb903f..9a3c7d13488a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_CreateSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py index 4c63c47cd594..605fb4d39e46 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_CreateSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py index 6e37969f1f8c..672da7efa7b8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_CreateSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py index 26e2c7aa783e..efdd6928d95d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_DeleteSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py index f2538ddb0ca3..e7ed25d7ad5d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_DeleteSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py index f310d24b2869..57022485796a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_DeleteSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py index c601dd6633b2..408945b92ced 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_DeleteSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py index 3a56e4fbbe0e..a0960acd85cd 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_GetSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py index 3a6cd24ca08a..d12f58a4d8f9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_GetSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py index 7ad71832664f..34b8938840af 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_GetSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py index d883e085dfbe..cbb59781fde6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_GetSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py index edc7976a1293..13c973563019 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_ListSnapshots_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py index e67ca2a39633..9b8dbfb9c5ee 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_ListSnapshots_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py index 01c45577a7f2..87631e58b849 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_ListSubscriptions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py index 272b0408d9cf..8c31fb126402 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_ListSubscriptions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py index b85c2033ff49..04dabe0bd090 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 @@ -34,7 +41,7 @@ async def sample_modify_ack_deadline(): # Initialize request argument(s) request = pubsub_v1.ModifyAckDeadlineRequest( subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_ids=['ack_ids_value1', 'ack_ids_value2'], ack_deadline_seconds=2066, ) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py index ac0805db437c..b0927facef97 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 @@ -34,7 +41,7 @@ def sample_modify_ack_deadline(): # Initialize request argument(s) request = pubsub_v1.ModifyAckDeadlineRequest( subscription="subscription_value", - ack_ids=['ack_ids_value_1', 'ack_ids_value_2'], + ack_ids=['ack_ids_value1', 'ack_ids_value2'], ack_deadline_seconds=2066, ) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py index 662823a1d682..67bd51c31d8c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_ModifyPushConfig_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py index a7499941c486..d7dd306dadfb 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_ModifyPushConfig_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py index 113f3ddfcffe..61eda9788c17 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_Pull_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py index abb47bfa16b2..fb61cfe9e8c3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_Pull_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py index 062c69409de9..65ab1796beda 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_Seek_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py index f28570e7c779..deb2340d0ad4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_Seek_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py index 64c1e37483c0..a32ecc37b3a1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_StreamingPull_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py index 0aa02fa40cdc..ae5549793745 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_StreamingPull_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py index f07bca1f5d76..b51ffac08fbe 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_UpdateSnapshot_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py index 7afe32ec2259..bc29f5e0b32c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_UpdateSnapshot_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py index 5a0410ec36cb..f85f7ce4a214 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_UpdateSubscription_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py index 75d6e8a95299..4e15e3e4dc13 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py @@ -24,6 +24,13 @@ # [START pubsub_v1_generated_Subscriber_UpdateSubscription_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html from google import pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json index 0f5906e95364..57f929ff59df 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json @@ -59,33 +59,33 @@ "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -139,33 +139,33 @@ "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -219,31 +219,31 @@ "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -296,31 +296,31 @@ "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -371,33 +371,33 @@ "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -447,33 +447,33 @@ "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -528,33 +528,33 @@ "regionTag": "pubsub_v1_generated_Publisher_GetTopic_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -608,33 +608,33 @@ "regionTag": "pubsub_v1_generated_Publisher_GetTopic_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -689,33 +689,33 @@ "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -769,33 +769,33 @@ "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -850,33 +850,33 @@ "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -930,33 +930,33 @@ "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1011,33 +1011,33 @@ "regionTag": "pubsub_v1_generated_Publisher_ListTopics_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1091,33 +1091,33 @@ "regionTag": "pubsub_v1_generated_Publisher_ListTopics_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1176,33 +1176,33 @@ "regionTag": "pubsub_v1_generated_Publisher_Publish_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1260,33 +1260,33 @@ "regionTag": "pubsub_v1_generated_Publisher_Publish_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1337,33 +1337,33 @@ "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_async", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -1413,33 +1413,33 @@ "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_sync", "segments": [ { - "end": 47, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 47, + "end": 54, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 41, - "start": 34, + "end": 48, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 44, - "start": 42, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 48, - "start": 45, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], @@ -1502,33 +1502,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -1590,33 +1590,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -1670,31 +1670,31 @@ "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1747,31 +1747,31 @@ "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -1826,33 +1826,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1906,33 +1906,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1987,33 +1987,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2067,33 +2067,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -2144,33 +2144,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -2220,33 +2220,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -2305,33 +2305,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -2389,33 +2389,33 @@ "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -2473,31 +2473,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_async", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 40, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 51, "type": "RESPONSE_HANDLING" } ], @@ -2554,31 +2554,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 40, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 44, + "end": 51, "type": "RESPONSE_HANDLING" } ], @@ -2637,33 +2637,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -2721,33 +2721,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -2814,33 +2814,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -2906,33 +2906,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -2986,31 +2986,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -3063,31 +3063,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -3141,31 +3141,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -3218,31 +3218,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -3297,33 +3297,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3377,33 +3377,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3458,33 +3458,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3538,33 +3538,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3619,33 +3619,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3699,33 +3699,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3780,33 +3780,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3860,33 +3860,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 42, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -3948,31 +3948,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 40, - "start": 34, + "end": 47, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 41, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 52, "type": "RESPONSE_HANDLING" } ], @@ -4033,31 +4033,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 40, - "start": 34, + "end": 47, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 41, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 45, + "end": 52, "type": "RESPONSE_HANDLING" } ], @@ -4115,31 +4115,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_async", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -4196,31 +4196,31 @@ "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_sync", "segments": [ { - "end": 42, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 42, + "end": 49, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 39, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 43, + "end": 50, "type": "RESPONSE_HANDLING" } ], @@ -4283,33 +4283,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_Pull_async", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -4371,33 +4371,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_Pull_sync", "segments": [ { - "end": 45, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 45, + "end": 52, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 39, - "start": 34, + "end": 46, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 42, - "start": 40, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 46, - "start": 43, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], @@ -4448,33 +4448,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_Seek_async", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4524,33 +4524,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_Seek_sync", "segments": [ { - "end": 44, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 44, + "end": 51, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 38, - "start": 34, + "end": 45, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 41, - "start": 39, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 45, - "start": 42, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -4601,33 +4601,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_async", "segments": [ { - "end": 56, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 63, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 49, - "start": 34, + "end": 56, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 59, + "start": 57, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 53, + "end": 64, + "start": 60, "type": "RESPONSE_HANDLING" } ], @@ -4677,33 +4677,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_sync", "segments": [ { - "end": 56, + "end": 63, "start": 27, "type": "FULL" }, { - "end": 56, + "end": 63, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 49, - "start": 34, + "end": 56, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 52, - "start": 50, + "end": 59, + "start": 57, "type": "REQUEST_EXECUTION" }, { - "end": 57, - "start": 53, + "end": 64, + "start": 60, "type": "RESPONSE_HANDLING" } ], @@ -4754,33 +4754,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_async", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -4830,33 +4830,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_sync", "segments": [ { - "end": 43, + "end": 50, "start": 27, "type": "FULL" }, { - "end": 43, + "end": 50, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 37, - "start": 34, + "end": 44, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 40, - "start": 38, + "end": 47, + "start": 45, "type": "REQUEST_EXECUTION" }, { - "end": 44, - "start": 41, + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -4907,33 +4907,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_async", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], @@ -4983,33 +4983,33 @@ "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_sync", "segments": [ { - "end": 48, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 48, + "end": 55, "start": 27, "type": "SHORT" }, { - "end": 33, - "start": 31, + "end": 40, + "start": 38, "type": "CLIENT_INITIALIZATION" }, { - "end": 42, - "start": 34, + "end": 49, + "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 45, - "start": 43, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 49, - "start": 46, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], From f443810b19720cb586984c33bc65ed94cd9ee5ac Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Sep 2022 13:58:25 +0000 Subject: [PATCH 0872/1197] chore: use gapic-generator-python 1.3.1 (#780) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 472772457 Source-Link: https://github.com/googleapis/googleapis/commit/855b74d203deeb0f7a0215f9454cdde62a1f9b86 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b64b1e7da3e138f15ca361552ef0545e54891b4f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjY0YjFlN2RhM2UxMzhmMTVjYTM2MTU1MmVmMDU0NWU1NDg5MWI0ZiJ9 --- .../tests/unit/gapic/pubsub_v1/test_publisher.py | 4 ++-- .../tests/unit/gapic/pubsub_v1/test_schema_service.py | 4 ++-- .../tests/unit/gapic/pubsub_v1/test_subscriber.py | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index a372c7e4d9ef..ee3b286be90e 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 1d3a6ec262d2..12ee8e302a31 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -18,8 +18,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 273c8edd3fc0..0cb9e1752300 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -19,8 +19,8 @@ # try/except added for compatibility with python < 3.8 try: from unittest import mock - from unittest.mock import AsyncMock -except ImportError: + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER import mock import grpc From b4795b78d796f0f7591d9ebeea5cea6595cb5f61 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 14:40:13 +0000 Subject: [PATCH 0873/1197] chore: use gapic generator python 1.4.1 (#781) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 473833416 Source-Link: https://github.com/googleapis/googleapis/commit/565a5508869557a3228b871101e4e4ebd8f93d11 Source-Link: https://github.com/googleapis/googleapis-gen/commit/1ee1a06c6de3ca8b843572c1fde0548f84236989 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMWVlMWEwNmM2ZGUzY2E4Yjg0MzU3MmMxZmRlMDU0OGY4NDIzNjk4OSJ9 --- .../tests/unit/gapic/pubsub_v1/test_publisher.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_schema_service.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_subscriber.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index ee3b286be90e..1b86bb7358c6 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -27,7 +27,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 12ee8e302a31..ce559bb9b9b0 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -27,7 +27,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 0cb9e1752300..79fd1bdbe7e2 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -28,7 +28,7 @@ import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule - +from proto.marshal.rules import wrappers from google.api_core import client_options from google.api_core import exceptions as core_exceptions From 0028a42531e22c51df08beb3509e4c3b4e124ebd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 13 Sep 2022 12:31:33 -0400 Subject: [PATCH 0874/1197] chore: detect samples tests in nested directories (#782) Source-Link: https://github.com/googleapis/synthtool/commit/50db768f450a50d7c1fd62513c113c9bb96fd434 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/samples/snippets/noxfile.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index b8dcb4a4af99..aa547962eb0a 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:993a058718e84a82fda04c3177e58f0a43281a996c7c395e0a56ccc4d6d210d7 + digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 5fcb9d7461f2..0398d72ff690 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -207,8 +207,8 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("*_test.py") + glob.glob("test_*.py") - test_list.extend(glob.glob("tests")) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: print("No tests found, skipping directory.") From bb50854025a33a1e7fc1c8fb56ffc382bdc75bfd Mon Sep 17 00:00:00 2001 From: Major Hayden Date: Wed, 21 Sep 2022 15:58:30 -0500 Subject: [PATCH 0875/1197] test: import mock via unittest.mock (#702) * Import mock via unittest.mock The `mock` module is deprecated and modern versions of Python havd `mock` available in the `unittest` standard library. Fixes: #701 Signed-off-by: Major Hayden Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/tests/system.py | 8 +++++++- .../google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py | 9 ++++++++- .../tests/unit/pubsub_v1/publisher/batch/test_base.py | 7 ++++++- .../tests/unit/pubsub_v1/publisher/batch/test_thread.py | 8 +++++++- .../publisher/sequencer/test_ordered_sequencer.py | 9 ++++++++- .../publisher/sequencer/test_unordered_sequencer.py | 8 +++++++- .../unit/pubsub_v1/publisher/test_publisher_client.py | 8 +++++++- .../tests/unit/pubsub_v1/subscriber/test_dispatcher.py | 8 +++++++- .../unit/pubsub_v1/subscriber/test_futures_subscriber.py | 8 +++++++- .../tests/unit/pubsub_v1/subscriber/test_heartbeater.py | 8 +++++++- .../unit/pubsub_v1/subscriber/test_helper_threads.py | 9 ++++++++- .../tests/unit/pubsub_v1/subscriber/test_leaser.py | 8 +++++++- .../tests/unit/pubsub_v1/subscriber/test_message.py | 7 ++++++- .../tests/unit/pubsub_v1/subscriber/test_scheduler.py | 7 ++++++- .../pubsub_v1/subscriber/test_streaming_pull_manager.py | 7 ++++++- .../unit/pubsub_v1/subscriber/test_subscriber_client.py | 9 ++++++++- .../tests/unit/pubsub_v1/test_futures.py | 7 ++++++- 17 files changed, 118 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index aa6ca486fbc9..44444325fb37 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -20,10 +20,16 @@ import operator as op import os import psutil +import sys import threading import time -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest import google.auth diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py index 64729a6b5a76..e39832d5f5ab 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py @@ -11,9 +11,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import sys import google.auth.credentials -import mock + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index 3ded77b00d86..c35f482e7745 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -13,8 +13,13 @@ # limitations under the License. from __future__ import absolute_import +import sys -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock from google.auth import credentials from google.cloud.pubsub_v1 import publisher diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index b15128489f5d..60658b4ce353 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -13,10 +13,16 @@ # limitations under the License. import datetime +import sys import threading import time -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest import google.api_core.exceptions diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py index 7384af2a2191..e126c829f0a0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -13,7 +13,14 @@ # limitations under the License. import concurrent.futures as futures -import mock +import sys + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest from google.auth import credentials diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index ee0cfab83e4a..8a2c486ad803 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -11,8 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import sys + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock -import mock import pytest from google.auth import credentials diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 372f53015ad2..6c68c3943aa0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -16,10 +16,16 @@ from __future__ import division import inspect +import sys import grpc -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest import time import warnings diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index c6902da69621..91ee2a66d0fc 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -14,6 +14,7 @@ import collections import queue +import sys import threading from google.cloud.pubsub_v1.subscriber._protocol import dispatcher @@ -22,7 +23,12 @@ from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager from google.cloud.pubsub_v1.subscriber import futures -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest from google.cloud.pubsub_v1.subscriber.exceptions import ( AcknowledgeStatus, diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index 9f71109e711a..d10da6fb19c5 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -13,8 +13,14 @@ # limitations under the License. from __future__ import absolute_import +import sys + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock -import mock import pytest from google.cloud.pubsub_v1.subscriber import futures diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py index 1a52af231cc5..503fde2c9ade 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -13,12 +13,18 @@ # limitations under the License. import logging +import sys import threading from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index 9ebd37f4fbdc..bfbaf3e56509 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -12,7 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock +import sys + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import queue from google.cloud.pubsub_v1.subscriber._protocol import helper_threads diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index ccc3ec99f8e9..7e11e3ccbd0a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import sys import threading from google.cloud.pubsub_v1 import types @@ -22,7 +23,12 @@ from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 0debabaf3476..49b07b7fd496 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -14,9 +14,14 @@ import datetime import queue +import sys import time -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber import message diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index 0545c967c41b..ff76fa09d8d0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -14,11 +14,16 @@ import concurrent.futures import queue +import sys import threading import time import warnings -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock from google.cloud.pubsub_v1.subscriber import scheduler diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index deb476eb1a2a..459ab6d6769c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -14,11 +14,16 @@ import functools import logging +import sys import threading import time import types as stdlib_types -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock import pytest from google.api_core import bidi diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 793ceca3c16c..83ef3f06dc5f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -12,10 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys import warnings import grpc -import mock + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest from google.api_core.gapic_v1.client_info import METRICS_METADATA_KEY diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py index 2b26289c49e8..5a4dad41a9a2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py @@ -17,7 +17,12 @@ import threading import time -import mock +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + import pytest from google.cloud.pubsub_v1 import exceptions From dd7a2ecc3a22233f1d3491312d9e7bdb97fe9406 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Thu, 22 Sep 2022 12:43:11 -0400 Subject: [PATCH 0876/1197] fix: remove expired ack_ids (#787) --- .../_protocol/streaming_pull_manager.py | 47 ++++++++++++------- .../subscriber/test_streaming_pull_manager.py | 46 ++++++++++++++---- 2 files changed, 66 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 932699261b0f..21c1bab7b063 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -988,7 +988,9 @@ def _get_initial_request( # Return the initial request. return request - def _send_lease_modacks(self, ack_ids: Iterable[str], ack_deadline: float): + def _send_lease_modacks( + self, ack_ids: Iterable[str], ack_deadline: float + ) -> List[str]: exactly_once_enabled = False with self._exactly_once_enabled_lock: exactly_once_enabled = self._exactly_once_enabled @@ -1002,15 +1004,19 @@ def _send_lease_modacks(self, ack_ids: Iterable[str], ack_deadline: float): assert self._dispatcher is not None self._dispatcher.modify_ack_deadline(items) + expired_ack_ids = [] for req in items: try: assert req.future is not None req.future.result() - except AcknowledgeError: + except AcknowledgeError as ack_error: _LOGGER.warning( "AcknowledgeError when lease-modacking a message.", exc_info=True, ) + if ack_error.error_code == AcknowledgeStatus.INVALID_ACK_ID: + expired_ack_ids.append(req.ack_id) + return expired_ack_ids else: items = [ requests.ModAckRequest(ack_id, self.ack_deadline, None) @@ -1018,6 +1024,7 @@ def _send_lease_modacks(self, ack_ids: Iterable[str], ack_deadline: float): ] assert self._dispatcher is not None self._dispatcher.modify_ack_deadline(items) + return [] def _exactly_once_delivery_enabled(self) -> bool: """Whether exactly-once delivery is enabled for the subscription.""" @@ -1071,28 +1078,32 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: # modack the messages we received, as this tells the server that we've # received them. ack_id_gen = (message.ack_id for message in received_messages) - self._send_lease_modacks(ack_id_gen, self.ack_deadline) + expired_ack_ids = set(self._send_lease_modacks(ack_id_gen, self.ack_deadline)) with self._pause_resume_lock: assert self._scheduler is not None assert self._leaser is not None for received_message in received_messages: - message = google.cloud.pubsub_v1.subscriber.message.Message( - received_message.message, - received_message.ack_id, - received_message.delivery_attempt, - self._scheduler.queue, - self._exactly_once_delivery_enabled, - ) - self._messages_on_hold.put(message) - self._on_hold_bytes += message.size - req = requests.LeaseRequest( - ack_id=message.ack_id, - byte_size=message.size, - ordering_key=message.ordering_key, - ) - self._leaser.add([req]) + if ( + not self._exactly_once_delivery_enabled() + or received_message.ack_id not in expired_ack_ids + ): + message = google.cloud.pubsub_v1.subscriber.message.Message( + received_message.message, + received_message.ack_id, + received_message.delivery_attempt, + self._scheduler.queue, + self._exactly_once_delivery_enabled, + ) + self._messages_on_hold.put(message) + self._on_hold_bytes += message.size + req = requests.LeaseRequest( + ack_id=message.ack_id, + byte_size=message.size, + ordering_key=message.ordering_key, + ) + self._leaser.add([req]) self._maybe_release_messages() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 459ab6d6769c..b4f76f20b210 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1125,6 +1125,7 @@ def test_heartbeat_stream_ack_deadline_seconds(caplog): "google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater", autospec=True ) def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): + manager = make_manager() with mock.patch.object( @@ -1852,11 +1853,18 @@ def test__on_response_exactly_once_immediate_modacks_fail(): def complete_futures_with_error(*args, **kwargs): modack_requests = args[0] for req in modack_requests: - req.future.set_exception( - subscriber_exceptions.AcknowledgeError( - subscriber_exceptions.AcknowledgeStatus.SUCCESS, None + if req.ack_id == "fack": + req.future.set_exception( + subscriber_exceptions.AcknowledgeError( + subscriber_exceptions.AcknowledgeStatus.INVALID_ACK_ID, None + ) + ) + else: + req.future.set_exception( + subscriber_exceptions.AcknowledgeError( + subscriber_exceptions.AcknowledgeStatus.SUCCESS, None + ) ) - ) dispatcher.modify_ack_deadline.side_effect = complete_futures_with_error @@ -1866,19 +1874,39 @@ def complete_futures_with_error(*args, **kwargs): gapic_types.ReceivedMessage( ack_id="fack", message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), - ) + ), + gapic_types.ReceivedMessage( + ack_id="good", + message=gapic_types.PubsubMessage(data=b"foo", message_id="2"), + ), ], subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( exactly_once_delivery_enabled=True ), ) - # adjust message bookkeeping in leaser - fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + # Actually run the method and prove that modack and schedule are called in + # the expected way. + + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10) - # exactly_once should be enabled manager._on_response(response) - # exceptions are logged, but otherwise no effect + + # The second messages should be scheduled, and not the first. + + schedule_calls = scheduler.schedule.mock_calls + assert len(schedule_calls) == 1 + call_args = schedule_calls[0][1] + assert call_args[0] == mock.sentinel.callback + assert isinstance(call_args[1], message.Message) + assert call_args[1].message_id == "2" + + assert manager._messages_on_hold.size == 0 + # No messages available + assert manager._messages_on_hold.get() is None + + # do not add message + assert manager.load == 0.001 def test__should_recover_true(): From 0dfe81a293cce50500f65009bee628c2de916237 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 22 Sep 2022 13:28:47 -0400 Subject: [PATCH 0877/1197] chore(main): release 2.13.7 (#788) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 63ac7ab9af82..cb71fdb2713b 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.7](https://github.com/googleapis/python-pubsub/compare/v2.13.6...v2.13.7) (2022-09-22) + + +### Bug Fixes + +* Remove expired ack_ids ([#787](https://github.com/googleapis/python-pubsub/issues/787)) ([b4b809d](https://github.com/googleapis/python-pubsub/commit/b4b809d616cf93881815d6baadf2dd322ab566d1)) + ## [2.13.6](https://github.com/googleapis/python-pubsub/compare/v2.13.5...v2.13.6) (2022-08-11) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 4eb55ed31eb5..6034e201ffa4 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.6" +version = "2.13.7" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 71af8c2b930660d025ca460c6f69855b1bcfa94a Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 23 Sep 2022 20:50:59 +0200 Subject: [PATCH 0878/1197] chore(deps): update dependency google-cloud-pubsub to v2.13.7 (#789) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 6291462f2bfb..77886b8cc68a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.13.6 +google-cloud-pubsub==2.13.7 avro==1.11.1 From 81e62d69142e98c4c5c1981ab006ddc46cfcd267 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Mon, 3 Oct 2022 17:35:16 +0200 Subject: [PATCH 0879/1197] chore(deps): update dependency google-cloud-bigquery to v3.3.3 (#793) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 290925f86683..7f05516b711e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.1.2 pytest==7.1.3 mock==4.0.3 flaky==3.7.0 -google-cloud-bigquery==3.3.2 +google-cloud-bigquery==3.3.3 From 7c4b7fb50d602d93140f1c75978d8103d63991c5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 12:02:01 -0400 Subject: [PATCH 0880/1197] fix(deps): require protobuf >= 3.20.2 (#792) * chore: exclude requirements.txt file from renovate-bot Source-Link: https://github.com/googleapis/synthtool/commit/f58d3135a2fab20e225d98741dbc06d57459b816 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 * update constraints files * fix(deps): require protobuf 3.20.2 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/requirements.txt | 49 +++++++++---------- packages/google-cloud-pubsub/setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- 4 files changed, 27 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index aa547962eb0a..3815c983cb16 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e09366bdf0fd9c8976592988390b24d53583dd9f002d476934da43725adbb978 + digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 385f2d4d6106..d15994bac93c 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -325,31 +325,30 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.1 \ - --hash=sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf \ - --hash=sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f \ - --hash=sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f \ - --hash=sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7 \ - --hash=sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996 \ - --hash=sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067 \ - --hash=sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c \ - --hash=sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7 \ - --hash=sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9 \ - --hash=sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c \ - --hash=sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739 \ - --hash=sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91 \ - --hash=sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c \ - --hash=sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153 \ - --hash=sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9 \ - --hash=sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388 \ - --hash=sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e \ - --hash=sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab \ - --hash=sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde \ - --hash=sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531 \ - --hash=sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8 \ - --hash=sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7 \ - --hash=sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20 \ - --hash=sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3 +protobuf==3.20.2 \ + --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ + --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ + --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ + --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ + --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ + --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ + --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ + --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ + --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ + --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ + --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ + --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ + --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ + --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ + --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ + --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ + --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ + --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ + --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ + --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ + --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ + --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ + --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 # via # gcp-docuploader # gcp-releasetool diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 6034e201ffa4..dba91aa6b4cc 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -32,7 +32,7 @@ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.19.0, <5.0.0dev", + "protobuf >= 3.20.2, <5.0.0dev", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", "grpcio-status >= 1.16.0", ] diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index 07498af4ba48..22fedb9e7c31 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -9,4 +9,4 @@ google-api-core==1.32.0 libcst==0.3.10 proto-plus==1.22.0 grpc-google-iam-v1==0.12.4 -protobuf==3.19.0 +protobuf==3.20.2 From 5dac5e1abfb0393ad3570be451865748f8c39979 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 3 Oct 2022 12:26:33 -0400 Subject: [PATCH 0881/1197] chore(main): release 2.13.8 (#795) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index cb71fdb2713b..0a7db66da5ef 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.8](https://github.com/googleapis/python-pubsub/compare/v2.13.7...v2.13.8) (2022-10-03) + + +### Bug Fixes + +* **deps:** Require protobuf >= 3.20.2 ([#792](https://github.com/googleapis/python-pubsub/issues/792)) ([1a54f7c](https://github.com/googleapis/python-pubsub/commit/1a54f7cd3d997270e0a5d70f7caea32d8753be76)) + ## [2.13.7](https://github.com/googleapis/python-pubsub/compare/v2.13.6...v2.13.7) (2022-09-22) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index dba91aa6b4cc..19f092686a89 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.7" +version = "2.13.8" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 80a46364a39938e9a2c186d6080a9735ed2682aa Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Wed, 5 Oct 2022 21:40:12 -0400 Subject: [PATCH 0882/1197] Fix: Silence invalid_ack_id warnings for receipt modacks (#798) --- .../_protocol/streaming_pull_manager.py | 20 +++-- .../subscriber/test_streaming_pull_manager.py | 85 ++++++++++++++++++- 2 files changed, 97 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 21c1bab7b063..89dc93e74ee8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -989,7 +989,7 @@ def _get_initial_request( return request def _send_lease_modacks( - self, ack_ids: Iterable[str], ack_deadline: float + self, ack_ids: Iterable[str], ack_deadline: float, warn_on_invalid=True ) -> List[str]: exactly_once_enabled = False with self._exactly_once_enabled_lock: @@ -1010,10 +1010,14 @@ def _send_lease_modacks( assert req.future is not None req.future.result() except AcknowledgeError as ack_error: - _LOGGER.warning( - "AcknowledgeError when lease-modacking a message.", - exc_info=True, - ) + if ( + ack_error.error_code != AcknowledgeStatus.INVALID_ACK_ID + or warn_on_invalid + ): + _LOGGER.warning( + "AcknowledgeError when lease-modacking a message.", + exc_info=True, + ) if ack_error.error_code == AcknowledgeStatus.INVALID_ACK_ID: expired_ack_ids.append(req.ack_id) return expired_ack_ids @@ -1078,7 +1082,11 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: # modack the messages we received, as this tells the server that we've # received them. ack_id_gen = (message.ack_id for message in received_messages) - expired_ack_ids = set(self._send_lease_modacks(ack_id_gen, self.ack_deadline)) + expired_ack_ids = set( + self._send_lease_modacks( + ack_id_gen, self.ack_deadline, warn_on_invalid=False + ) + ) with self._pause_resume_lock: assert self._scheduler is not None diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index b4f76f20b210..1f28b3f40575 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1846,7 +1846,7 @@ def test__on_response_disable_exactly_once(): assert manager._stream_ack_deadline == 60 -def test__on_response_exactly_once_immediate_modacks_fail(): +def test__on_response_exactly_once_immediate_modacks_fail(caplog): manager, _, dispatcher, leaser, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback @@ -1890,7 +1890,8 @@ def complete_futures_with_error(*args, **kwargs): fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10) - manager._on_response(response) + with caplog.at_level(logging.WARNING): + manager._on_response(response) # The second messages should be scheduled, and not the first. @@ -1902,6 +1903,14 @@ def complete_futures_with_error(*args, **kwargs): assert call_args[1].message_id == "2" assert manager._messages_on_hold.size == 0 + + expected_warnings = [ + record.message.lower() + for record in caplog.records + if "AcknowledgeError when lease-modacking a message." in record.message + ] + assert len(expected_warnings) == 1 + # No messages available assert manager._messages_on_hold.get() is None @@ -1909,6 +1918,78 @@ def complete_futures_with_error(*args, **kwargs): assert manager.load == 0.001 +def test__on_response_exactly_once_immediate_modacks_fail_non_invalid(caplog): + manager, _, dispatcher, leaser, _, scheduler = make_running_manager() + manager._callback = mock.sentinel.callback + + def complete_futures_with_error(*args, **kwargs): + modack_requests = args[0] + for req in modack_requests: + if req.ack_id == "fack": + req.future.set_exception( + subscriber_exceptions.AcknowledgeError( + subscriber_exceptions.AcknowledgeStatus.OTHER, None + ) + ) + else: + req.future.set_exception( + subscriber_exceptions.AcknowledgeError( + subscriber_exceptions.AcknowledgeStatus.SUCCESS, None + ) + ) + + dispatcher.modify_ack_deadline.side_effect = complete_futures_with_error + + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="fack", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="good", + message=gapic_types.PubsubMessage(data=b"foo", message_id="2"), + ), + ], + subscription_properties=gapic_types.StreamingPullResponse.SubscriptionProperties( + exactly_once_delivery_enabled=True + ), + ) + + # Actually run the method and prove that modack and schedule are called in + # the expected way. + + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=10) + + with caplog.at_level(logging.WARNING): + manager._on_response(response) + + # The second messages should be scheduled, and not the first. + + schedule_calls = scheduler.schedule.mock_calls + assert len(schedule_calls) == 2 + call_args = schedule_calls[0][1] + assert call_args[0] == mock.sentinel.callback + assert isinstance(call_args[1], message.Message) + assert call_args[1].message_id == "1" + + assert manager._messages_on_hold.size == 0 + + expected_warnings = [ + record.message.lower() + for record in caplog.records + if "AcknowledgeError when lease-modacking a message." in record.message + ] + assert len(expected_warnings) == 2 + + # No messages available + assert manager._messages_on_hold.get() is None + + # do not add message + assert manager.load == 0.002 + + def test__should_recover_true(): manager = make_manager() From 98b7462a3b900582427e921d0ec8d7d48e299ae1 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 6 Oct 2022 15:50:13 +0200 Subject: [PATCH 0883/1197] chore(deps): update dependency backoff to v2.2.1 (#797) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 7f05516b711e..f98cebbb5d53 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,4 +1,4 @@ -backoff==2.1.2 +backoff==2.2.1 pytest==7.1.3 mock==4.0.3 flaky==3.7.0 From 61bcd4d0cbafa5fa9c312c04aa22b9a5c6073712 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 7 Oct 2022 23:40:05 -0400 Subject: [PATCH 0884/1197] fix(deps): allow protobuf 3.19.5 (#801) * fix(deps): allow protobuf 3.19.5 * explicitly exclude protobuf 4.21.0 --- packages/google-cloud-pubsub/setup.py | 2 +- packages/google-cloud-pubsub/testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 19f092686a89..9fb99c11165b 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -32,7 +32,7 @@ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", - "protobuf >= 3.20.2, <5.0.0dev", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", "grpcio-status >= 1.16.0", ] diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index 22fedb9e7c31..08b242a12462 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -9,4 +9,4 @@ google-api-core==1.32.0 libcst==0.3.10 proto-plus==1.22.0 grpc-google-iam-v1==0.12.4 -protobuf==3.20.2 +protobuf==3.19.5 From fcaadfdeac388f001d9469e77ed2cf1a353a538f Mon Sep 17 00:00:00 2001 From: Jaume Marhuenda Date: Sat, 8 Oct 2022 14:26:38 -0400 Subject: [PATCH 0885/1197] fix: batch at most 1,000 ack ids per request (#802) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Co-authored-by: Owl Bot --- .../cloud/pubsub_v1/subscriber/_protocol/dispatcher.py | 10 +--------- .../tests/unit/pubsub_v1/subscriber/test_dispatcher.py | 4 ++-- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index c6dbf067f21c..ed2f5d21777a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -59,16 +59,8 @@ """The maximum amount of time in seconds to wait for additional request items before processing the next batch of requests.""" -_ACK_IDS_BATCH_SIZE = 2500 +_ACK_IDS_BATCH_SIZE = 1000 """The maximum number of ACK IDs to send in a single StreamingPullRequest. - -The backend imposes a maximum request size limit of 524288 bytes (512 KiB) per -acknowledge / modifyAckDeadline request. ACK IDs have a maximum size of 164 -bytes, thus we cannot send more than o 524288/176 ~= 2979 ACK IDs in a single -StreamingPullRequest message. - -Accounting for some overhead, we should thus only send a maximum of 2500 ACK -IDs at a time. """ _MIN_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS = 1 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 91ee2a66d0fc..a5107fe7b0e4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -437,7 +437,7 @@ def test_ack_splitting_large_payload(): dispatcher_.ack(items) calls = manager.send_unary_ack.call_args_list - assert len(calls) == 3 + assert len(calls) == 6 all_ack_ids = {item.ack_id for item in items} sent_ack_ids = collections.Counter() @@ -689,7 +689,7 @@ def test_modify_ack_deadline_splitting_large_payload(): dispatcher_.modify_ack_deadline(items) calls = manager.send_unary_modack.call_args_list - assert len(calls) == 3 + assert len(calls) == 6 all_ack_ids = {item.ack_id for item in items} sent_ack_ids = collections.Counter() From 15bd075c46672d362e706217a9316aec279dde65 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Oct 2022 16:15:40 -0400 Subject: [PATCH 0886/1197] chore: release as 2.3.10 (#805) * chore: release 2.3.10 Release-As: 2.3.10 * mark 2.13.9 as taken --- packages/google-cloud-pubsub/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 9fb99c11165b..51c30a85b289 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.8" +version = "2.13.9" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From b3fb4e17d35285ae3cdea7c0458b622bc8b421e4 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 14 Oct 2022 11:15:53 -0400 Subject: [PATCH 0887/1197] chore(main): release 2.13.10 (#799) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 14 ++++++++++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 0a7db66da5ef..76c13f4f41fa 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,20 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.10](https://github.com/googleapis/python-pubsub/compare/v2.13.8...v2.13.10) (2022-10-14) + + +### Bug Fixes + +* Batch at most 1,000 ack ids per request ([#802](https://github.com/googleapis/python-pubsub/issues/802)) ([4361e67](https://github.com/googleapis/python-pubsub/commit/4361e6735004a5600ee73979b99e6b9dd587c49b)) +* **deps:** Allow protobuf 3.19.5 ([#801](https://github.com/googleapis/python-pubsub/issues/801)) ([fa23503](https://github.com/googleapis/python-pubsub/commit/fa235033481783c2ec378b2a26b223bdff206461)) +* Silence invalid_ack_id warnings for receipt modacks ([#798](https://github.com/googleapis/python-pubsub/issues/798)) ([17feea5](https://github.com/googleapis/python-pubsub/commit/17feea5783f3a878b4dcfb3a8570585f7637378f)) + + +### Miscellaneous Chores + +* release as 2.13.10 ([34f022b](https://github.com/googleapis/python-pubsub/commit/34f022b4ee62d53a193bc2babafad508e2f2540b)) + ## [2.13.8](https://github.com/googleapis/python-pubsub/compare/v2.13.7...v2.13.8) (2022-10-03) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 51c30a85b289..134b4d30e1c7 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.9" +version = "2.13.10" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 9018e20ad4c497b13ac1a1a5c2e9ec17faefe220 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Tue, 18 Oct 2022 15:15:05 +0200 Subject: [PATCH 0888/1197] chore(deps): update all dependencies (#806) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index f98cebbb5d53..1ce452ec15d3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.1.3 mock==4.0.3 flaky==3.7.0 -google-cloud-bigquery==3.3.3 +google-cloud-bigquery==3.3.5 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 77886b8cc68a..18485cd5799a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.13.7 +google-cloud-pubsub==2.13.10 avro==1.11.1 From 486e5fe8ef4987acf9f8bed427f87080b40a5309 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 26 Oct 2022 12:48:25 +0200 Subject: [PATCH 0889/1197] chore(deps): update dependency pytest to v7.2.0 (#814) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 1ce452ec15d3..fc2ee318f519 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.1.3 +pytest==7.2.0 mock==4.0.3 flaky==3.7.0 google-cloud-bigquery==3.3.5 From e151a1920ef83b849b8c97f872f69c068facafdb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Nov 2022 14:02:42 -0500 Subject: [PATCH 0890/1197] chore(python): update dependencies in .kokoro/requirements.txt (#819) Source-Link: https://github.com/googleapis/synthtool/commit/e3a1277ac35fc88c09db1930533e24292b132ced Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/requirements.txt | 325 ++++++++++-------- packages/google-cloud-pubsub/noxfile.py | 11 +- 3 files changed, 187 insertions(+), 151 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 3815c983cb16..12edee77695a 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:7a40313731a7cb1454eef6b33d3446ebb121836738dc3ab3d2d3ded5268c35b6 + digest: sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index d15994bac93c..31425f164783 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.6.15 \ - --hash=sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d \ - --hash=sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412 +certifi==2022.9.24 \ + --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ + --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ @@ -110,29 +110,33 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==37.0.4 \ - --hash=sha256:190f82f3e87033821828f60787cfa42bff98404483577b591429ed99bed39d59 \ - --hash=sha256:2be53f9f5505673eeda5f2736bea736c40f051a739bfae2f92d18aed1eb54596 \ - --hash=sha256:30788e070800fec9bbcf9faa71ea6d8068f5136f60029759fd8c3efec3c9dcb3 \ - --hash=sha256:3d41b965b3380f10e4611dbae366f6dc3cefc7c9ac4e8842a806b9672ae9add5 \ - --hash=sha256:4c590ec31550a724ef893c50f9a97a0c14e9c851c85621c5650d699a7b88f7ab \ - --hash=sha256:549153378611c0cca1042f20fd9c5030d37a72f634c9326e225c9f666d472884 \ - --hash=sha256:63f9c17c0e2474ccbebc9302ce2f07b55b3b3fcb211ded18a42d5764f5c10a82 \ - --hash=sha256:6bc95ed67b6741b2607298f9ea4932ff157e570ef456ef7ff0ef4884a134cc4b \ - --hash=sha256:7099a8d55cd49b737ffc99c17de504f2257e3787e02abe6d1a6d136574873441 \ - --hash=sha256:75976c217f10d48a8b5a8de3d70c454c249e4b91851f6838a4e48b8f41eb71aa \ - --hash=sha256:7bc997818309f56c0038a33b8da5c0bfbb3f1f067f315f9abd6fc07ad359398d \ - --hash=sha256:80f49023dd13ba35f7c34072fa17f604d2f19bf0989f292cedf7ab5770b87a0b \ - --hash=sha256:91ce48d35f4e3d3f1d83e29ef4a9267246e6a3be51864a5b7d2247d5086fa99a \ - --hash=sha256:a958c52505c8adf0d3822703078580d2c0456dd1d27fabfb6f76fe63d2971cd6 \ - --hash=sha256:b62439d7cd1222f3da897e9a9fe53bbf5c104fff4d60893ad1355d4c14a24157 \ - --hash=sha256:b7f8dd0d4c1f21759695c05a5ec8536c12f31611541f8904083f3dc582604280 \ - --hash=sha256:d204833f3c8a33bbe11eda63a54b1aad7aa7456ed769a982f21ec599ba5fa282 \ - --hash=sha256:e007f052ed10cc316df59bc90fbb7ff7950d7e2919c9757fd42a2b8ecf8a5f67 \ - --hash=sha256:f2dcb0b3b63afb6df7fd94ec6fbddac81b5492513f7b0436210d390c14d46ee8 \ - --hash=sha256:f721d1885ecae9078c3f6bbe8a88bc0786b6e749bf32ccec1ef2b18929a05046 \ - --hash=sha256:f7a6de3e98771e183645181b3627e2563dcde3ce94a9e42a3f427d2255190327 \ - --hash=sha256:f8c0a6e9e1dd3eb0414ba320f85da6b0dcbd543126e30fcc546e7372a7fbf3b9 +cryptography==38.0.3 \ + --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ + --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ + --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ + --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ + --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ + --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ + --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ + --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ + --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ + --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ + --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ + --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ + --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ + --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ + --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ + --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ + --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ + --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ + --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ + --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ + --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ + --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ + --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ + --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ + --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ + --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 # via # gcp-releasetool # secretstorage @@ -148,23 +152,23 @@ filelock==3.8.0 \ --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 # via virtualenv -gcp-docuploader==0.6.3 \ - --hash=sha256:ba8c9d76b3bbac54b0311c503a373b00edc2dc02d6d54ea9507045adb8e870f7 \ - --hash=sha256:c0f5aaa82ce1854a386197e4e359b120ad6d4e57ae2c812fce42219a3288026b +gcp-docuploader==0.6.4 \ + --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ + --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.8.7 \ - --hash=sha256:3d2a67c9db39322194afb3b427e9cb0476ce8f2a04033695f0aeb63979fc2b37 \ - --hash=sha256:5e4d28f66e90780d77f3ecf1e9155852b0c3b13cbccb08ab07e66b2357c8da8d +gcp-releasetool==1.9.1 \ + --hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \ + --hash=sha256:d0d3c814a97c1a237517e837d8cfa668ced8df4b882452578ecef4a4e79c583b # via -r requirements.in -google-api-core==2.8.2 \ - --hash=sha256:06f7244c640322b508b125903bb5701bebabce8832f85aba9335ec00b3d02edc \ - --hash=sha256:93c6a91ccac79079ac6bbf8b74ee75db970cc899278b97d53bc012f35908cf50 +google-api-core==2.10.2 \ + --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ + --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e # via # google-cloud-core # google-cloud-storage -google-auth==2.11.0 \ - --hash=sha256:be62acaae38d0049c21ca90f27a23847245c9f161ff54ede13af2cb6afecbac9 \ - --hash=sha256:ed65ecf9f681832298e29328e1ef0a3676e3732b2e56f41532d45f70a22de0fb +google-auth==2.14.0 \ + --hash=sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700 \ + --hash=sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d # via # gcp-releasetool # google-api-core @@ -178,72 +182,97 @@ google-cloud-storage==2.5.0 \ --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 # via gcp-docuploader -google-crc32c==1.3.0 \ - --hash=sha256:04e7c220798a72fd0f08242bc8d7a05986b2a08a0573396187fd32c1dcdd58b3 \ - --hash=sha256:05340b60bf05b574159e9bd940152a47d38af3fb43803ffe71f11d704b7696a6 \ - --hash=sha256:12674a4c3b56b706153a358eaa1018c4137a5a04635b92b4652440d3d7386206 \ - --hash=sha256:127f9cc3ac41b6a859bd9dc4321097b1a4f6aa7fdf71b4f9227b9e3ebffb4422 \ - --hash=sha256:13af315c3a0eec8bb8b8d80b8b128cb3fcd17d7e4edafc39647846345a3f003a \ - --hash=sha256:1926fd8de0acb9d15ee757175ce7242e235482a783cd4ec711cc999fc103c24e \ - --hash=sha256:226f2f9b8e128a6ca6a9af9b9e8384f7b53a801907425c9a292553a3a7218ce0 \ - --hash=sha256:276de6273eb074a35bc598f8efbc00c7869c5cf2e29c90748fccc8c898c244df \ - --hash=sha256:318f73f5484b5671f0c7f5f63741ab020a599504ed81d209b5c7129ee4667407 \ - --hash=sha256:3bbce1be3687bbfebe29abdb7631b83e6b25da3f4e1856a1611eb21854b689ea \ - --hash=sha256:42ae4781333e331a1743445931b08ebdad73e188fd554259e772556fc4937c48 \ - --hash=sha256:58be56ae0529c664cc04a9c76e68bb92b091e0194d6e3c50bea7e0f266f73713 \ - --hash=sha256:5da2c81575cc3ccf05d9830f9e8d3c70954819ca9a63828210498c0774fda1a3 \ - --hash=sha256:6311853aa2bba4064d0c28ca54e7b50c4d48e3de04f6770f6c60ebda1e975267 \ - --hash=sha256:650e2917660e696041ab3dcd7abac160b4121cd9a484c08406f24c5964099829 \ - --hash=sha256:6a4db36f9721fdf391646685ecffa404eb986cbe007a3289499020daf72e88a2 \ - --hash=sha256:779cbf1ce375b96111db98fca913c1f5ec11b1d870e529b1dc7354b2681a8c3a \ - --hash=sha256:7f6fe42536d9dcd3e2ffb9d3053f5d05221ae3bbcefbe472bdf2c71c793e3183 \ - --hash=sha256:891f712ce54e0d631370e1f4997b3f182f3368179198efc30d477c75d1f44942 \ - --hash=sha256:95c68a4b9b7828ba0428f8f7e3109c5d476ca44996ed9a5f8aac6269296e2d59 \ - --hash=sha256:96a8918a78d5d64e07c8ea4ed2bc44354e3f93f46a4866a40e8db934e4c0d74b \ - --hash=sha256:9c3cf890c3c0ecfe1510a452a165431b5831e24160c5fcf2071f0f85ca5a47cd \ - --hash=sha256:9f58099ad7affc0754ae42e6d87443299f15d739b0ce03c76f515153a5cda06c \ - --hash=sha256:a0b9e622c3b2b8d0ce32f77eba617ab0d6768b82836391e4f8f9e2074582bf02 \ - --hash=sha256:a7f9cbea4245ee36190f85fe1814e2d7b1e5f2186381b082f5d59f99b7f11328 \ - --hash=sha256:bab4aebd525218bab4ee615786c4581952eadc16b1ff031813a2fd51f0cc7b08 \ - --hash=sha256:c124b8c8779bf2d35d9b721e52d4adb41c9bfbde45e6a3f25f0820caa9aba73f \ - --hash=sha256:c9da0a39b53d2fab3e5467329ed50e951eb91386e9d0d5b12daf593973c3b168 \ - --hash=sha256:ca60076c388728d3b6ac3846842474f4250c91efbfe5afa872d3ffd69dd4b318 \ - --hash=sha256:cb6994fff247987c66a8a4e550ef374671c2b82e3c0d2115e689d21e511a652d \ - --hash=sha256:d1c1d6236feab51200272d79b3d3e0f12cf2cbb12b208c835b175a21efdb0a73 \ - --hash=sha256:dd7760a88a8d3d705ff562aa93f8445ead54f58fd482e4f9e2bafb7e177375d4 \ - --hash=sha256:dda4d8a3bb0b50f540f6ff4b6033f3a74e8bf0bd5320b70fab2c03e512a62812 \ - --hash=sha256:e0f1ff55dde0ebcfbef027edc21f71c205845585fffe30d4ec4979416613e9b3 \ - --hash=sha256:e7a539b9be7b9c00f11ef16b55486141bc2cdb0c54762f84e3c6fc091917436d \ - --hash=sha256:eb0b14523758e37802f27b7f8cd973f5f3d33be7613952c0df904b68c4842f0e \ - --hash=sha256:ed447680ff21c14aaceb6a9f99a5f639f583ccfe4ce1a5e1d48eb41c3d6b3217 \ - --hash=sha256:f52a4ad2568314ee713715b1e2d79ab55fab11e8b304fd1462ff5cccf4264b3e \ - --hash=sha256:fbd60c6aaa07c31d7754edbc2334aef50601b7f1ada67a96eb1eb57c7c72378f \ - --hash=sha256:fc28e0db232c62ca0c3600884933178f0825c99be4474cdd645e378a10588125 \ - --hash=sha256:fe31de3002e7b08eb20823b3735b97c86c5926dd0581c7710a680b418a8709d4 \ - --hash=sha256:fec221a051150eeddfdfcff162e6db92c65ecf46cb0f7bb1bf812a1520ec026b \ - --hash=sha256:ff71073ebf0e42258a42a0b34f2c09ec384977e7f6808999102eedd5b49920e3 +google-crc32c==1.5.0 \ + --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ + --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ + --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ + --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ + --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ + --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ + --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ + --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ + --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ + --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ + --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ + --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ + --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ + --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ + --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ + --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ + --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ + --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ + --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ + --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ + --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ + --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ + --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ + --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ + --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ + --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ + --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ + --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ + --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ + --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ + --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ + --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ + --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ + --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ + --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ + --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ + --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ + --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ + --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ + --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ + --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ + --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ + --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ + --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ + --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ + --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ + --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ + --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ + --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ + --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ + --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ + --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ + --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ + --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ + --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ + --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ + --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ + --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ + --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ + --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ + --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ + --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ + --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ + --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ + --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ + --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ + --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ + --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 # via google-resumable-media -google-resumable-media==2.3.3 \ - --hash=sha256:27c52620bd364d1c8116eaac4ea2afcbfb81ae9139fb3199652fcac1724bfb6c \ - --hash=sha256:5b52774ea7a829a8cdaa8bd2d4c3d4bc660c91b30857ab2668d0eb830f4ea8c5 +google-resumable-media==2.4.0 \ + --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ + --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage googleapis-common-protos==1.56.4 \ --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 # via google-api-core -idna==3.3 \ - --hash=sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff \ - --hash=sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d +idna==3.4 \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==4.12.0 \ - --hash=sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670 \ - --hash=sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23 +importlib-metadata==5.0.0 \ + --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ + --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in # twine -jaraco-classes==3.2.2 \ - --hash=sha256:6745f113b0b588239ceb49532aa09c3ebb947433ce311ef2f8e3ad64ebb74594 \ - --hash=sha256:e6ef6fd3fcf4579a7a019d87d1e56a883f4e4c35cfe925f86731abc58804e647 +jaraco-classes==3.2.3 \ + --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ + --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -255,9 +284,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.0 \ - --hash=sha256:4c32a31174faaee48f43a7e2c7e9c3216ec5e95acf22a2bebfb4a1d05056ee44 \ - --hash=sha256:98f060ec95ada2ab910c195a2d4317be6ef87936a766b239c46aa3c7aac4f0db +keyring==23.9.3 \ + --hash=sha256:69732a15cb1433bdfbc3b980a8a36a04878a6cfd7cb99f497b573f31618001c0 \ + --hash=sha256:69b01dd83c42f590250fe7a1f503fc229b14de83857314b1933a3ddbf595c4a5 # via # gcp-releasetool # twine @@ -303,9 +332,9 @@ markupsafe==2.1.1 \ --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 # via jinja2 -more-itertools==8.14.0 \ - --hash=sha256:1bc4f91ee5b1b31ac7ceacc17c09befe6a40a503907baf9c839c229b5095cfd2 \ - --hash=sha256:c09443cd3d5438b8dafccd867a6bc1cb0894389e90cb53d227456b0b0bccb750 +more-itertools==9.0.0 \ + --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ + --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes nox==2022.8.7 \ --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ @@ -325,34 +354,34 @@ platformdirs==2.5.2 \ --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 # via virtualenv -protobuf==3.20.2 \ - --hash=sha256:03d76b7bd42ac4a6e109742a4edf81ffe26ffd87c5993126d894fe48a120396a \ - --hash=sha256:09e25909c4297d71d97612f04f41cea8fa8510096864f2835ad2f3b3df5a5559 \ - --hash=sha256:18e34a10ae10d458b027d7638a599c964b030c1739ebd035a1dfc0e22baa3bfe \ - --hash=sha256:291fb4307094bf5ccc29f424b42268640e00d5240bf0d9b86bf3079f7576474d \ - --hash=sha256:2c0b040d0b5d5d207936ca2d02f00f765906622c07d3fa19c23a16a8ca71873f \ - --hash=sha256:384164994727f274cc34b8abd41a9e7e0562801361ee77437099ff6dfedd024b \ - --hash=sha256:3cb608e5a0eb61b8e00fe641d9f0282cd0eedb603be372f91f163cbfbca0ded0 \ - --hash=sha256:5d9402bf27d11e37801d1743eada54372f986a372ec9679673bfcc5c60441151 \ - --hash=sha256:712dca319eee507a1e7df3591e639a2b112a2f4a62d40fe7832a16fd19151750 \ - --hash=sha256:7a5037af4e76c975b88c3becdf53922b5ffa3f2cddf657574a4920a3b33b80f3 \ - --hash=sha256:8228e56a865c27163d5d1d1771d94b98194aa6917bcfb6ce139cbfa8e3c27334 \ - --hash=sha256:84a1544252a933ef07bb0b5ef13afe7c36232a774affa673fc3636f7cee1db6c \ - --hash=sha256:84fe5953b18a383fd4495d375fe16e1e55e0a3afe7b4f7b4d01a3a0649fcda9d \ - --hash=sha256:9c673c8bfdf52f903081816b9e0e612186684f4eb4c17eeb729133022d6032e3 \ - --hash=sha256:9f876a69ca55aed879b43c295a328970306e8e80a263ec91cf6e9189243c613b \ - --hash=sha256:a9e5ae5a8e8985c67e8944c23035a0dff2c26b0f5070b2f55b217a1c33bbe8b1 \ - --hash=sha256:b4fdb29c5a7406e3f7ef176b2a7079baa68b5b854f364c21abe327bbeec01cdb \ - --hash=sha256:c184485e0dfba4dfd451c3bd348c2e685d6523543a0f91b9fd4ae90eb09e8422 \ - --hash=sha256:c9cdf251c582c16fd6a9f5e95836c90828d51b0069ad22f463761d27c6c19019 \ - --hash=sha256:e39cf61bb8582bda88cdfebc0db163b774e7e03364bbf9ce1ead13863e81e359 \ - --hash=sha256:e8fbc522303e09036c752a0afcc5c0603e917222d8bedc02813fd73b4b4ed804 \ - --hash=sha256:f34464ab1207114e73bba0794d1257c150a2b89b7a9faf504e00af7c9fd58978 \ - --hash=sha256:f52dabc96ca99ebd2169dadbe018824ebda08a795c7684a0b7d203a290f3adb0 +protobuf==3.20.3 \ + --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ + --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ + --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ + --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ + --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ + --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ + --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ + --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ + --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ + --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ + --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ + --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ + --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ + --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ + --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ + --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ + --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ + --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ + --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ + --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ + --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ + --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee # via # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 @@ -377,9 +406,9 @@ pygments==2.13.0 \ # via # readme-renderer # rich -pyjwt==2.4.0 \ - --hash=sha256:72d1d253f32dbd4f5c88eaf1fdc62f3a19f676ccbadb9dbc5d07e951b2b26daf \ - --hash=sha256:d42908208c699b3b973cbeb01a969ba6a96c821eefb1c5bfe4c390c01d67abba +pyjwt==2.6.0 \ + --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ + --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 # via gcp-releasetool pyparsing==3.0.9 \ --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ @@ -392,9 +421,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.0 \ - --hash=sha256:07b7ea234e03e58f77cc222e206e6abb8f4c0435becce5104794ee591f9301c5 \ - --hash=sha256:9fa416704703e509eeb900696751c908ddeb2011319d93700d8f18baff887a69 +readme-renderer==37.3 \ + --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ + --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine requests==2.28.1 \ --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ @@ -405,17 +434,17 @@ requests==2.28.1 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.9.1 \ - --hash=sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f \ - --hash=sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0 +requests-toolbelt==0.10.1 \ + --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ + --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.5.1 \ - --hash=sha256:2eb4e6894cde1e017976d2975ac210ef515d7548bc595ba20e195fb9628acdeb \ - --hash=sha256:63a5c5ce3673d3d5fbbf23cd87e11ab84b6b451436f1b7f19ec54b6bc36ed7ca +rich==12.6.0 \ + --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ + --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -437,9 +466,9 @@ twine==4.0.1 \ --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 # via -r requirements.in -typing-extensions==4.3.0 \ - --hash=sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02 \ - --hash=sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6 +typing-extensions==4.4.0 \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in urllib3==1.26.12 \ --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ @@ -447,9 +476,9 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.4 \ - --hash=sha256:014f766e4134d0008dcaa1f95bafa0fb0f575795d07cae50b1bee514185d6782 \ - --hash=sha256:035ed57acce4ac35c82c9d8802202b0e71adac011a511ff650cbcf9635006a22 +virtualenv==20.16.6 \ + --hash=sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108 \ + --hash=sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ @@ -459,13 +488,13 @@ wheel==0.37.1 \ --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 # via -r requirements.in -zipp==3.8.1 \ - --hash=sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2 \ - --hash=sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009 +zipp==3.10.0 \ + --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ + --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.2.0 \ - --hash=sha256:7f4bc85450898a09f76ebf28b72fa25bc7111f6c7d665d514a60bba9c75ef2a9 \ - --hash=sha256:a3ca5857c89f82f5c9410e8508cb32f4872a3bafd4aa7ae122a24ca33bccc750 +setuptools==65.5.0 \ + --hash=sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17 \ + --hash=sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356 # via -r requirements.in diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 332e4a4877f4..a090378e40e3 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -329,7 +329,11 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") + session.install( + "sphinx==4.0.1", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -352,7 +356,10 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml" + "sphinx==4.0.1", + "alabaster", + "recommonmark", + "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From fa516b1a0f914b83b4397c3677c712269b8fada9 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 8 Nov 2022 22:06:14 +0100 Subject: [PATCH 0891/1197] chore(deps): update dependency google-cloud-bigquery to v3.3.6 (#818) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index fc2ee318f519..b52e56f395a0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.2.0 mock==4.0.3 flaky==3.7.0 -google-cloud-bigquery==3.3.5 +google-cloud-bigquery==3.3.6 From 2a7d4ce3a339517df18c578d6829d8b955a08b73 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Fri, 11 Nov 2022 10:38:38 -0500 Subject: [PATCH 0892/1197] fix: remove suboptimal logic in leasing behavior (#816) * fix: subtract time spent leasing from max snooze value * Revert "fix: subtract time spent leasing from max snooze value" This reverts commit 01f7ff4319508bc570dd8a335ffe2968102157b7. * fix: remove suboptimal list operations in leasing * remove typing * add default_deadline as separate argument to send_unary_modack * remove unused import * fix test_streaming_pull_manager * fix test_streaming_pull_manager lint * drop expired_ack_ids from lease management * add return value to _send_lease_modacks in unit tests * remove unused import * addressing comments * fix comment * fix modify_deadline_seconds generator * fix modify_deadline_seconds generator * fix subscripting in streaming_pull_manager * fix mypy checks * fix mypy checks * fix lint --- .../subscriber/_protocol/dispatcher.py | 38 +++++++---- .../pubsub_v1/subscriber/_protocol/leaser.py | 25 ++++++- .../_protocol/streaming_pull_manager.py | 65 +++++++++++-------- .../pubsub_v1/subscriber/test_dispatcher.py | 65 +++++++++++++++---- .../unit/pubsub_v1/subscriber/test_leaser.py | 49 ++++++++++++++ .../subscriber/test_streaming_pull_manager.py | 42 ++++++++++-- 6 files changed, 228 insertions(+), 56 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index ed2f5d21777a..15ad4abb3ab6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -319,7 +319,11 @@ def lease(self, items: Sequence[requests.LeaseRequest]) -> None: self._manager.leaser.add(items) self._manager.maybe_pause_consumer() - def modify_ack_deadline(self, items: Sequence[requests.ModAckRequest]) -> None: + def modify_ack_deadline( + self, + items: Sequence[requests.ModAckRequest], + default_deadline: Optional[float] = None, + ) -> None: """Modify the ack deadline for the given messages. Args: @@ -337,16 +341,28 @@ def modify_ack_deadline(self, items: Sequence[requests.ModAckRequest]) -> None: req.ack_id: req for req in itertools.islice(items_gen, _ACK_IDS_BATCH_SIZE) } - # no further work needs to be done for `requests_to_retry` - requests_completed, requests_to_retry = self._manager.send_unary_modack( - modify_deadline_ack_ids=list( - itertools.islice(ack_ids_gen, _ACK_IDS_BATCH_SIZE) - ), - modify_deadline_seconds=list( - itertools.islice(deadline_seconds_gen, _ACK_IDS_BATCH_SIZE) - ), - ack_reqs_dict=ack_reqs_dict, - ) + requests_to_retry: List[requests.ModAckRequest] + if default_deadline is None: + # no further work needs to be done for `requests_to_retry` + _, requests_to_retry = self._manager.send_unary_modack( + modify_deadline_ack_ids=list( + itertools.islice(ack_ids_gen, _ACK_IDS_BATCH_SIZE) + ), + modify_deadline_seconds=list( + itertools.islice(deadline_seconds_gen, _ACK_IDS_BATCH_SIZE) + ), + ack_reqs_dict=ack_reqs_dict, + default_deadline=None, + ) + else: + _, requests_to_retry = self._manager.send_unary_modack( + modify_deadline_ack_ids=itertools.islice( + ack_ids_gen, _ACK_IDS_BATCH_SIZE + ), + modify_deadline_seconds=None, + ack_reqs_dict=ack_reqs_dict, + default_deadline=default_deadline, + ) assert ( len(requests_to_retry) <= _ACK_IDS_BATCH_SIZE ), "Too many requests to be retried." diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 508f4d7cefcb..16018e384745 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -187,6 +187,7 @@ def maintain_leases(self) -> None: # We do not actually call `modify_ack_deadline` over and over # because it is more efficient to make a single request. ack_ids = leased_messages.keys() + expired_ack_ids = set() if ack_ids: _LOGGER.debug("Renewing lease for %d ack IDs.", len(ack_ids)) @@ -197,8 +198,25 @@ def maintain_leases(self) -> None: # is inactive. assert self._manager.dispatcher is not None ack_id_gen = (ack_id for ack_id in ack_ids) - self._manager._send_lease_modacks(ack_id_gen, deadline) + expired_ack_ids = self._manager._send_lease_modacks( + ack_id_gen, deadline + ) + start_time = time.time() + # If exactly once delivery is enabled, we should drop all expired ack_ids from lease management. + if self._manager._exactly_once_delivery_enabled() and len(expired_ack_ids): + assert self._manager.dispatcher is not None + self._manager.dispatcher.drop( + [ + requests.DropRequest( + ack_id, + leased_messages.get(ack_id).size, # type: ignore + leased_messages.get(ack_id).ordering_key, # type: ignore + ) + for ack_id in expired_ack_ids + if ack_id in leased_messages + ] + ) # Now wait an appropriate period of time and do this again. # # We determine the appropriate period of time based on a random @@ -208,7 +226,10 @@ def maintain_leases(self) -> None: # This maximum time attempts to prevent ack expiration before new lease modacks arrive at the server. # This use of jitter (http://bit.ly/2s2ekL7) helps decrease contention in cases # where there are many clients. - snooze = random.uniform(_MAX_BATCH_LATENCY, deadline * 0.9) + # If we spent any time iterating over expired acks, we should subtract this from the deadline. + snooze = random.uniform( + _MAX_BATCH_LATENCY, (deadline * 0.9 - (time.time() - start_time)) + ) _LOGGER.debug("Snoozing lease management for %f seconds.", snooze) self._stop_event.wait(timeout=snooze) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 89dc93e74ee8..13974ebe4bad 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -20,7 +20,7 @@ import logging import threading import typing -from typing import Any, Dict, Callable, Iterable, List, Optional, Tuple +from typing import Any, Dict, Callable, Iterable, List, Optional, Set, Tuple import uuid import grpc # type: ignore @@ -686,7 +686,11 @@ def send_unary_ack( return requests_completed, requests_to_retry def send_unary_modack( - self, modify_deadline_ack_ids, modify_deadline_seconds, ack_reqs_dict + self, + modify_deadline_ack_ids, + modify_deadline_seconds, + ack_reqs_dict, + default_deadline=None, ) -> Tuple[List[requests.ModAckRequest], List[requests.ModAckRequest]]: """Send a request using a separate unary request instead of over the stream. @@ -694,22 +698,32 @@ def send_unary_modack( error is re-raised. """ assert modify_deadline_ack_ids + # Either we have a generator or a single deadline. + assert modify_deadline_seconds is None or default_deadline is None error_status = None modack_errors_dict = None try: - # Send ack_ids with the same deadline seconds together. - deadline_to_ack_ids = collections.defaultdict(list) - - for n, ack_id in enumerate(modify_deadline_ack_ids): - deadline = modify_deadline_seconds[n] - deadline_to_ack_ids[deadline].append(ack_id) - - for deadline, ack_ids in deadline_to_ack_ids.items(): + if default_deadline is None: + # Send ack_ids with the same deadline seconds together. + deadline_to_ack_ids = collections.defaultdict(list) + + for n, ack_id in enumerate(modify_deadline_ack_ids): + deadline = modify_deadline_seconds[n] + deadline_to_ack_ids[deadline].append(ack_id) + + for deadline, ack_ids in deadline_to_ack_ids.items(): + self._client.modify_ack_deadline( + subscription=self._subscription, + ack_ids=ack_ids, + ack_deadline_seconds=deadline, + ) + else: + # We can send all requests with the default deadline. self._client.modify_ack_deadline( subscription=self._subscription, - ack_ids=ack_ids, - ack_deadline_seconds=deadline, + ack_ids=modify_deadline_ack_ids, + ack_deadline_seconds=default_deadline, ) except exceptions.GoogleAPICallError as exc: _LOGGER.debug( @@ -990,21 +1004,20 @@ def _get_initial_request( def _send_lease_modacks( self, ack_ids: Iterable[str], ack_deadline: float, warn_on_invalid=True - ) -> List[str]: + ) -> Set[str]: exactly_once_enabled = False with self._exactly_once_enabled_lock: exactly_once_enabled = self._exactly_once_enabled if exactly_once_enabled: - items = [] - for ack_id in ack_ids: - future = futures.Future() - request = requests.ModAckRequest(ack_id, ack_deadline, future) - items.append(request) + items = [ + requests.ModAckRequest(ack_id, ack_deadline, futures.Future()) + for ack_id in ack_ids + ] assert self._dispatcher is not None - self._dispatcher.modify_ack_deadline(items) + self._dispatcher.modify_ack_deadline(items, ack_deadline) - expired_ack_ids = [] + expired_ack_ids = set() for req in items: try: assert req.future is not None @@ -1019,7 +1032,7 @@ def _send_lease_modacks( exc_info=True, ) if ack_error.error_code == AcknowledgeStatus.INVALID_ACK_ID: - expired_ack_ids.append(req.ack_id) + expired_ack_ids.add(req.ack_id) return expired_ack_ids else: items = [ @@ -1027,8 +1040,8 @@ def _send_lease_modacks( for ack_id in ack_ids ] assert self._dispatcher is not None - self._dispatcher.modify_ack_deadline(items) - return [] + self._dispatcher.modify_ack_deadline(items, ack_deadline) + return set() def _exactly_once_delivery_enabled(self) -> bool: """Whether exactly-once delivery is enabled for the subscription.""" @@ -1082,10 +1095,8 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: # modack the messages we received, as this tells the server that we've # received them. ack_id_gen = (message.ack_id for message in received_messages) - expired_ack_ids = set( - self._send_lease_modacks( - ack_id_gen, self.ack_deadline, warn_on_invalid=False - ) + expired_ack_ids = self._send_lease_modacks( + ack_id_gen, self.ack_deadline, warn_on_invalid=False ) with self._pause_resume_lock: diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index a5107fe7b0e4..d4813911c25a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -645,16 +645,20 @@ def test_nack(): ] manager.send_unary_modack.return_value = (items, []) dispatcher_.nack(items) + calls = manager.send_unary_modack.call_args_list + assert len(calls) == 1 - manager.send_unary_modack.assert_called_once_with( - modify_deadline_ack_ids=["ack_id_string"], - modify_deadline_seconds=[0], - ack_reqs_dict={ + for call in calls: + modify_deadline_ack_ids = call[1]["modify_deadline_ack_ids"] + assert list(modify_deadline_ack_ids) == ["ack_id_string"] + modify_deadline_seconds = call[1]["modify_deadline_seconds"] + assert list(modify_deadline_seconds) == [0] + ack_reqs_dict = call[1]["ack_reqs_dict"] + assert ack_reqs_dict == { "ack_id_string": requests.ModAckRequest( ack_id="ack_id_string", seconds=0, future=None ) - }, - ) + } def test_modify_ack_deadline(): @@ -666,12 +670,16 @@ def test_modify_ack_deadline(): items = [requests.ModAckRequest(ack_id="ack_id_string", seconds=60, future=None)] manager.send_unary_modack.return_value = (items, []) dispatcher_.modify_ack_deadline(items) + calls = manager.send_unary_modack.call_args_list + assert len(calls) == 1 - manager.send_unary_modack.assert_called_once_with( - modify_deadline_ack_ids=["ack_id_string"], - modify_deadline_seconds=[60], - ack_reqs_dict={"ack_id_string": items[0]}, - ) + for call in calls: + modify_deadline_ack_ids = call[1]["modify_deadline_ack_ids"] + assert list(modify_deadline_ack_ids) == ["ack_id_string"] + modify_deadline_seconds = call[1]["modify_deadline_seconds"] + assert list(modify_deadline_seconds) == [60] + ack_reqs_dict = call[1]["ack_reqs_dict"] + assert ack_reqs_dict == {"ack_id_string": items[0]} def test_modify_ack_deadline_splitting_large_payload(): @@ -695,7 +703,7 @@ def test_modify_ack_deadline_splitting_large_payload(): sent_ack_ids = collections.Counter() for call in calls: - modack_ackids = call[1]["modify_deadline_ack_ids"] + modack_ackids = list(call[1]["modify_deadline_ack_ids"]) assert len(modack_ackids) <= dispatcher._ACK_IDS_BATCH_SIZE sent_ack_ids.update(modack_ackids) @@ -703,6 +711,39 @@ def test_modify_ack_deadline_splitting_large_payload(): assert sent_ack_ids.most_common(1)[0][1] == 1 # each message MODACK-ed exactly once +def test_modify_ack_deadline_splitting_large_payload_with_default_deadline(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + items = [ + # use realistic lengths for ACK IDs (max 176 bytes) + requests.ModAckRequest(ack_id=str(i).zfill(176), seconds=60, future=None) + for i in range(5001) + ] + manager.send_unary_modack.return_value = (items, []) + dispatcher_.modify_ack_deadline(items, 60) + + calls = manager.send_unary_modack.call_args_list + assert len(calls) == 6 + + all_ack_ids = {item.ack_id for item in items} + sent_ack_ids = collections.Counter() + + for call in calls: + modack_ackids = list(call[1]["modify_deadline_ack_ids"]) + modack_deadline_seconds = call[1]["modify_deadline_seconds"] + default_deadline = call[1]["default_deadline"] + assert len(list(modack_ackids)) <= dispatcher._ACK_IDS_BATCH_SIZE + assert modack_deadline_seconds is None + assert default_deadline == 60 + sent_ack_ids.update(modack_ackids) + + assert set(sent_ack_ids) == all_ack_ids # all messages should have been MODACK-ed + assert sent_ack_ids.most_common(1)[0][1] == 1 # each message MODACK-ed exactly once + + @mock.patch("threading.Thread", autospec=True) def test_start(thread): manager = mock.create_autospec( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 7e11e3ccbd0a..f38717c6f378 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -105,6 +105,7 @@ def test_maintain_leases_inactive_manager(caplog): [requests.LeaseRequest(ack_id="my_ack_ID", byte_size=42, ordering_key="")] ) + manager._send_lease_modacks.return_value = set() leaser_.maintain_leases() # Leases should still be maintained even if the manager is inactive. @@ -119,6 +120,7 @@ def test_maintain_leases_stopped(caplog): leaser_ = leaser.Leaser(manager) leaser_.stop() + manager._send_lease_modacks.return_value = set() leaser_.maintain_leases() assert "exiting" in caplog.text @@ -142,6 +144,7 @@ def test_maintain_leases_ack_ids(): [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] ) + manager._send_lease_modacks.return_value = set() leaser_.maintain_leases() assert len(manager._send_lease_modacks.mock_calls) == 1 @@ -151,6 +154,51 @@ def test_maintain_leases_ack_ids(): assert call.args[1] == 10 +def test_maintain_leases_expired_ack_ids_ignored(): + manager = create_manager() + leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + leaser_.add( + [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] + ) + manager._exactly_once_delivery_enabled.return_value = False + manager._send_lease_modacks.return_value = set(["my ack id"]) + leaser_.maintain_leases() + + assert len(manager._send_lease_modacks.mock_calls) == 1 + + call = manager._send_lease_modacks.mock_calls[0] + ack_ids = list(call.args[0]) + assert ack_ids == ["my ack id"] + assert call.args[1] == 10 + + +def test_maintain_leases_expired_ack_ids_exactly_once(): + manager = create_manager() + leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + leaser_.add( + [requests.LeaseRequest(ack_id="my ack id", byte_size=50, ordering_key="")] + ) + manager._exactly_once_delivery_enabled.return_value = True + manager._send_lease_modacks.return_value = set(["my ack id"]) + leaser_.maintain_leases() + + assert len(manager._send_lease_modacks.mock_calls) == 1 + + call = manager._send_lease_modacks.mock_calls[0] + ack_ids = list(call.args[0]) + assert ack_ids == ["my ack id"] + assert call.args[1] == 10 + + assert len(manager.dispatcher.drop.mock_calls) == 1 + call = manager.dispatcher.drop.mock_calls[0] + drop_requests = list(call.args[0]) + assert drop_requests[0].ack_id == "my ack id" + assert drop_requests[0].byte_size == 50 + assert drop_requests[0].ordering_key == "" + + def test_maintain_leases_no_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) @@ -187,6 +235,7 @@ def test_maintain_leases_outdated_items(time): # Now make sure time reports that we are past the end of our timeline. time.return_value = manager.flow_control.max_lease_duration + 1 + manager._send_lease_modacks.return_value = set() leaser_.maintain_leases() # ack2, ack3, and ack4 should be renewed. ack1 should've been dropped diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 1f28b3f40575..e01299ef9227 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -674,6 +674,33 @@ def test_send_unary_modack(): ) +def test_send_unary_modack_default_deadline(): + manager = make_manager() + + ack_reqs_dict = { + "ack_id3": requests.ModAckRequest(ack_id="ack_id3", seconds=60, future=None), + "ack_id4": requests.ModAckRequest(ack_id="ack_id4", seconds=60, future=None), + "ack_id5": requests.ModAckRequest(ack_id="ack_id5", seconds=60, future=None), + } + manager.send_unary_modack( + modify_deadline_ack_ids=["ack_id3", "ack_id4", "ack_id5"], + modify_deadline_seconds=None, + ack_reqs_dict=ack_reqs_dict, + default_deadline=10, + ) + + manager._client.modify_ack_deadline.assert_has_calls( + [ + mock.call( + subscription=manager._subscription, + ack_ids=["ack_id3", "ack_id4", "ack_id5"], + ack_deadline_seconds=10, + ), + ], + any_order=True, + ) + + def test_send_unary_modack_exactly_once_enabled_with_futures(): manager = make_manager() manager._exactly_once_enabled = True @@ -1460,7 +1487,8 @@ def test__on_response_modifies_ack_deadline(): [ requests.ModAckRequest("ack_1", 18, None), requests.ModAckRequest("ack_2", 18, None), - ] + ], + 18, ) @@ -1521,6 +1549,7 @@ def test__on_response_modifies_ack_deadline_with_exactly_once_min_lease(): requests.ModAckRequest("ack_1", 10, None), requests.ModAckRequest("ack_2", 10, None), ] + assert call.args[1] == 10 # exactly_once should be enabled after this request b/c subscription_properties says so manager._on_response(response2) @@ -1534,6 +1563,8 @@ def test__on_response_modifies_ack_deadline_with_exactly_once_min_lease(): assert modack_reqs[0].seconds == 60 assert modack_reqs[1].ack_id == "ack_4" assert modack_reqs[1].seconds == 60 + modack_deadline = call.args[1] + assert modack_deadline == 60 def test__on_response_send_ack_deadline_after_enabling_exactly_once(): @@ -1610,7 +1641,8 @@ def test__on_response_no_leaser_overload(): [ requests.ModAckRequest("fack", 10, None), requests.ModAckRequest("back", 10, None), - ] + ], + 10, ) schedule_calls = scheduler.schedule.mock_calls @@ -1660,7 +1692,8 @@ def test__on_response_with_leaser_overload(): requests.ModAckRequest("fack", 10, None), requests.ModAckRequest("back", 10, None), requests.ModAckRequest("zack", 10, None), - ] + ], + 10, ) # one message should be scheduled, the flow control limits allow for it @@ -1740,7 +1773,8 @@ def test__on_response_with_ordering_keys(): requests.ModAckRequest("fack", 10, None), requests.ModAckRequest("back", 10, None), requests.ModAckRequest("zack", 10, None), - ] + ], + 10, ) # The first two messages should be scheduled, The third should be put on From ab5e103eedb462a5bcea5ad4f9ba6811c0f5a25e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 11 Nov 2022 11:04:19 -0500 Subject: [PATCH 0893/1197] chore(main): release 2.13.11 (#820) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ packages/google-cloud-pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 76c13f4f41fa..19b330fb1c2f 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.11](https://github.com/googleapis/python-pubsub/compare/v2.13.10...v2.13.11) (2022-11-11) + + +### Bug Fixes + +* Remove suboptimal logic in leasing behavior ([#816](https://github.com/googleapis/python-pubsub/issues/816)) ([f067af3](https://github.com/googleapis/python-pubsub/commit/f067af348b8d3deb72981c58d942e887c0efb5ff)) + ## [2.13.10](https://github.com/googleapis/python-pubsub/compare/v2.13.8...v2.13.10) (2022-10-14) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 134b4d30e1c7..883b499f77d0 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-pubsub" description = "Google Cloud Pub/Sub API client library" -version = "2.13.10" +version = "2.13.11" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 73e49cda200d962b298c9b23b01c16058aadef84 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 12 Nov 2022 13:13:29 -0500 Subject: [PATCH 0894/1197] chore: Update gapic-generator-python to v1.6.1 (#817) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update to gapic-generator-python 1.5.0 feat: add support for `google.cloud..__version__` PiperOrigin-RevId: 484665853 Source-Link: https://github.com/googleapis/googleapis/commit/8eb249a19db926c2fbc4ecf1dc09c0e521a88b22 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c8aa327b5f478865fc3fd91e3c2768e54e26ad44 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYzhhYTMyN2I1ZjQ3ODg2NWZjM2ZkOTFlM2MyNzY4ZTU0ZTI2YWQ0NCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * update version in gapic_version.py * chore: Update to gapic-generator-python 1.6.0 feat(python): Add typing to proto.Message based class attributes feat(python): Snippetgen handling of repeated enum field PiperOrigin-RevId: 487326846 Source-Link: https://github.com/googleapis/googleapis/commit/da380c77bb87ba0f752baf07605dd1db30e1f7e1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/61ef5762ee6731a0cbbfea22fd0eecee51ab1c8e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjFlZjU3NjJlZTY3MzFhMGNiYmZlYTIyZmQwZWVjZWU1MWFiMWM4ZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.6.1 PiperOrigin-RevId: 488036204 Source-Link: https://github.com/googleapis/googleapis/commit/08f275f5c1c0d99056e1cb68376323414459ee19 Source-Link: https://github.com/googleapis/googleapis-gen/commit/555c0945e60649e38739ae64bc45719cdf72178f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU1YzA5NDVlNjA2NDllMzg3MzlhZTY0YmM0NTcxOWNkZjcyMTc4ZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../google/pubsub/__init__.py | 4 + .../google/pubsub/gapic_version.py | 16 + .../google/pubsub_v1/__init__.py | 4 + .../services/publisher/async_client.py | 100 +- .../pubsub_v1/services/publisher/client.py | 88 +- .../services/publisher/transports/base.py | 2 +- .../services/publisher/transports/grpc.py | 20 +- .../publisher/transports/grpc_asyncio.py | 16 +- .../services/schema_service/async_client.py | 80 +- .../services/schema_service/client.py | 74 +- .../schema_service/transports/base.py | 2 +- .../schema_service/transports/grpc.py | 20 +- .../schema_service/transports/grpc_asyncio.py | 16 +- .../services/subscriber/async_client.py | 160 +- .../pubsub_v1/services/subscriber/client.py | 136 +- .../services/subscriber/transports/base.py | 2 +- .../services/subscriber/transports/grpc.py | 20 +- .../subscriber/transports/grpc_asyncio.py | 16 +- .../google/pubsub_v1/types/pubsub.py | 316 +- .../google/pubsub_v1/types/schema.py | 48 +- .../snippet_metadata_google.pubsub.v1.json | 5020 +++++++++++++++++ .../testing/constraints-3.10.txt | 7 + .../testing/constraints-3.11.txt | 7 + .../testing/constraints-3.7.txt | 12 +- .../testing/constraints-3.8.txt | 7 + .../testing/constraints-3.9.txt | 7 + 26 files changed, 5662 insertions(+), 538 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/pubsub/gapic_version.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index 7c94c23082b0..97953fcc072b 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.pubsub import gapic_version as package_version + +__version__ = package_version.__version__ + from google.pubsub_v1.services.publisher.client import PublisherClient from google.pubsub_v1.services.publisher.async_client import PublisherAsyncClient diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py new file mode 100644 index 000000000000..35859c3f7fc1 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 80fc23d59644..4a5351757005 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.pubsub import gapic_version as package_version + +__version__ = package_version.__version__ + from .services.publisher import PublisherClient from .services.publisher import PublisherAsyncClient diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index dbcd516b2ba9..269b273c9360 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core.client_options import ClientOptions @@ -163,9 +173,9 @@ def transport(self) -> PublisherTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, PublisherTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the publisher client. @@ -209,11 +219,11 @@ def __init__( async def create_topic( self, - request: Union[pubsub.Topic, dict] = None, + request: Optional[Union[pubsub.Topic, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource @@ -247,7 +257,7 @@ async def sample_create_topic(): print(response) Args: - request (Union[google.pubsub_v1.types.Topic, dict]): + request (Optional[Union[google.pubsub_v1.types.Topic, dict]]): The request object. A topic resource. name (:class:`str`): Required. The name of the topic. It must have the format @@ -326,10 +336,10 @@ async def sample_create_topic(): async def update_topic( self, - request: Union[pubsub.UpdateTopicRequest, dict] = None, + request: Optional[Union[pubsub.UpdateTopicRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Updates an existing topic. Note that certain @@ -365,7 +375,7 @@ async def sample_update_topic(): print(response) Args: - request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.UpdateTopicRequest, dict]]): The request object. Request for the UpdateTopic method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -419,12 +429,12 @@ async def sample_update_topic(): async def publish( self, - request: Union[pubsub.PublishRequest, dict] = None, + request: Optional[Union[pubsub.PublishRequest, dict]] = None, *, - topic: str = None, - messages: Sequence[pubsub.PubsubMessage] = None, + topic: Optional[str] = None, + messages: Optional[MutableSequence[pubsub.PubsubMessage]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PublishResponse: r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if @@ -457,7 +467,7 @@ async def sample_publish(): print(response) Args: - request (Union[google.pubsub_v1.types.PublishRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.PublishRequest, dict]]): The request object. Request for the Publish method. topic (:class:`str`): Required. The messages in the request will be published @@ -467,7 +477,7 @@ async def sample_publish(): This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - messages (:class:`Sequence[google.pubsub_v1.types.PubsubMessage]`): + messages (:class:`MutableSequence[google.pubsub_v1.types.PubsubMessage]`): Required. The messages to publish. This corresponds to the ``messages`` field on the ``request`` instance; if ``request`` is provided, this @@ -544,11 +554,11 @@ async def sample_publish(): async def get_topic( self, - request: Union[pubsub.GetTopicRequest, dict] = None, + request: Optional[Union[pubsub.GetTopicRequest, dict]] = None, *, - topic: str = None, + topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Gets the configuration of a topic. @@ -580,7 +590,7 @@ async def sample_get_topic(): print(response) Args: - request (Union[google.pubsub_v1.types.GetTopicRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.GetTopicRequest, dict]]): The request object. Request for the GetTopic method. topic (:class:`str`): Required. The name of the topic to get. Format is @@ -655,11 +665,11 @@ async def sample_get_topic(): async def list_topics( self, - request: Union[pubsub.ListTopicsRequest, dict] = None, + request: Optional[Union[pubsub.ListTopicsRequest, dict]] = None, *, - project: str = None, + project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicsAsyncPager: r"""Lists matching topics. @@ -692,7 +702,7 @@ async def sample_list_topics(): print(response) Args: - request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ListTopicsRequest, dict]]): The request object. Request for the `ListTopics` method. project (:class:`str`): Required. The name of the project in which to list @@ -780,11 +790,11 @@ async def sample_list_topics(): async def list_topic_subscriptions( self, - request: Union[pubsub.ListTopicSubscriptionsRequest, dict] = None, + request: Optional[Union[pubsub.ListTopicSubscriptionsRequest, dict]] = None, *, - topic: str = None, + topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSubscriptionsAsyncPager: r"""Lists the names of the attached subscriptions on this @@ -818,7 +828,7 @@ async def sample_list_topic_subscriptions(): print(response) Args: - request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]]): The request object. Request for the `ListTopicSubscriptions` method. topic (:class:`str`): @@ -908,11 +918,11 @@ async def sample_list_topic_subscriptions(): async def list_topic_snapshots( self, - request: Union[pubsub.ListTopicSnapshotsRequest, dict] = None, + request: Optional[Union[pubsub.ListTopicSnapshotsRequest, dict]] = None, *, - topic: str = None, + topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSnapshotsAsyncPager: r"""Lists the names of the snapshots on this topic. Snapshots are @@ -950,7 +960,7 @@ async def sample_list_topic_snapshots(): print(response) Args: - request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]]): The request object. Request for the `ListTopicSnapshots` method. topic (:class:`str`): @@ -1040,11 +1050,11 @@ async def sample_list_topic_snapshots(): async def delete_topic( self, - request: Union[pubsub.DeleteTopicRequest, dict] = None, + request: Optional[Union[pubsub.DeleteTopicRequest, dict]] = None, *, - topic: str = None, + topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if @@ -1078,7 +1088,7 @@ async def sample_delete_topic(): await client.delete_topic(request=request) Args: - request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.DeleteTopicRequest, dict]]): The request object. Request for the `DeleteTopic` method. topic (:class:`str`): @@ -1145,10 +1155,10 @@ async def sample_delete_topic(): async def detach_subscription( self, - request: Union[pubsub.DetachSubscriptionRequest, dict] = None, + request: Optional[Union[pubsub.DetachSubscriptionRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.DetachSubscriptionResponse: r"""Detaches a subscription from this topic. All messages retained @@ -1184,7 +1194,7 @@ async def sample_detach_subscription(): print(response) Args: - request (Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]]): The request object. Request for the DetachSubscription method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1241,10 +1251,10 @@ async def sample_detach_subscription(): async def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1361,10 +1371,10 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1483,10 +1493,10 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index f8a640422f8d..d052293f85d0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -17,7 +17,18 @@ import functools import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import pkg_resources from google.api_core import client_options as client_options_lib @@ -63,7 +74,7 @@ class PublisherClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[PublisherTransport]: """Returns an appropriate transport class. @@ -381,8 +392,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, PublisherTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, PublisherTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the publisher client. @@ -396,7 +407,7 @@ def __init__( transport (Union[str, PublisherTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -426,6 +437,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -487,11 +499,11 @@ def __init__( def create_topic( self, - request: Union[pubsub.Topic, dict] = None, + request: Optional[Union[pubsub.Topic, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource @@ -595,10 +607,10 @@ def sample_create_topic(): def update_topic( self, - request: Union[pubsub.UpdateTopicRequest, dict] = None, + request: Optional[Union[pubsub.UpdateTopicRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Updates an existing topic. Note that certain @@ -680,12 +692,12 @@ def sample_update_topic(): def publish( self, - request: Union[pubsub.PublishRequest, dict] = None, + request: Optional[Union[pubsub.PublishRequest, dict]] = None, *, - topic: str = None, - messages: Sequence[pubsub.PubsubMessage] = None, + topic: Optional[str] = None, + messages: Optional[MutableSequence[pubsub.PubsubMessage]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PublishResponse: r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if @@ -728,7 +740,7 @@ def sample_publish(): This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - messages (Sequence[google.pubsub_v1.types.PubsubMessage]): + messages (MutableSequence[google.pubsub_v1.types.PubsubMessage]): Required. The messages to publish. This corresponds to the ``messages`` field on the ``request`` instance; if ``request`` is provided, this @@ -790,11 +802,11 @@ def sample_publish(): def get_topic( self, - request: Union[pubsub.GetTopicRequest, dict] = None, + request: Optional[Union[pubsub.GetTopicRequest, dict]] = None, *, - topic: str = None, + topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Gets the configuration of a topic. @@ -890,11 +902,11 @@ def sample_get_topic(): def list_topics( self, - request: Union[pubsub.ListTopicsRequest, dict] = None, + request: Optional[Union[pubsub.ListTopicsRequest, dict]] = None, *, - project: str = None, + project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicsPager: r"""Lists matching topics. @@ -1004,11 +1016,11 @@ def sample_list_topics(): def list_topic_subscriptions( self, - request: Union[pubsub.ListTopicSubscriptionsRequest, dict] = None, + request: Optional[Union[pubsub.ListTopicSubscriptionsRequest, dict]] = None, *, - topic: str = None, + topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSubscriptionsPager: r"""Lists the names of the attached subscriptions on this @@ -1121,11 +1133,11 @@ def sample_list_topic_subscriptions(): def list_topic_snapshots( self, - request: Union[pubsub.ListTopicSnapshotsRequest, dict] = None, + request: Optional[Union[pubsub.ListTopicSnapshotsRequest, dict]] = None, *, - topic: str = None, + topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSnapshotsPager: r"""Lists the names of the snapshots on this topic. Snapshots are @@ -1242,11 +1254,11 @@ def sample_list_topic_snapshots(): def delete_topic( self, - request: Union[pubsub.DeleteTopicRequest, dict] = None, + request: Optional[Union[pubsub.DeleteTopicRequest, dict]] = None, *, - topic: str = None, + topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if @@ -1338,10 +1350,10 @@ def sample_delete_topic(): def detach_subscription( self, - request: Union[pubsub.DetachSubscriptionRequest, dict] = None, + request: Optional[Union[pubsub.DetachSubscriptionRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.DetachSubscriptionResponse: r"""Detaches a subscription from this topic. All messages retained @@ -1439,10 +1451,10 @@ def __exit__(self, type, value, traceback): def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1560,10 +1572,10 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1682,10 +1694,10 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: TimeoutType = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 11c085ffa7c0..d441f8113632 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -54,7 +54,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 0d41c36f2a13..ebb3f25e9be2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -51,14 +51,14 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -187,8 +187,8 @@ def __init__( def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index e236c165cae7..fb73f78a3f1a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -53,7 +53,7 @@ class PublisherGrpcAsyncIOTransport(PublisherTransport): def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -96,15 +96,15 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index f0f158b6fac4..1996a1e4fa95 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -16,7 +16,17 @@ from collections import OrderedDict import functools import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) import pkg_resources from google.api_core.client_options import ClientOptions @@ -161,9 +171,9 @@ def transport(self) -> SchemaServiceTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, SchemaServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the schema service client. @@ -207,13 +217,13 @@ def __init__( async def create_schema( self, - request: Union[gp_schema.CreateSchemaRequest, dict] = None, + request: Optional[Union[gp_schema.CreateSchemaRequest, dict]] = None, *, - parent: str = None, - schema: gp_schema.Schema = None, - schema_id: str = None, + parent: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + schema_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.Schema: r"""Creates a schema. @@ -249,7 +259,7 @@ async def sample_create_schema(): print(response) Args: - request (Union[google.pubsub_v1.types.CreateSchemaRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.CreateSchemaRequest, dict]]): The request object. Request for the CreateSchema method. parent (:class:`str`): Required. The name of the project in which to create the @@ -337,11 +347,11 @@ async def sample_create_schema(): async def get_schema( self, - request: Union[schema.GetSchemaRequest, dict] = None, + request: Optional[Union[schema.GetSchemaRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.Schema: r"""Gets a schema. @@ -373,7 +383,7 @@ async def sample_get_schema(): print(response) Args: - request (Union[google.pubsub_v1.types.GetSchemaRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.GetSchemaRequest, dict]]): The request object. Request for the GetSchema method. name (:class:`str`): Required. The name of the schema to get. Format is @@ -436,11 +446,11 @@ async def sample_get_schema(): async def list_schemas( self, - request: Union[schema.ListSchemasRequest, dict] = None, + request: Optional[Union[schema.ListSchemasRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSchemasAsyncPager: r"""Lists schemas in a project. @@ -473,7 +483,7 @@ async def sample_list_schemas(): print(response) Args: - request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ListSchemasRequest, dict]]): The request object. Request for the `ListSchemas` method. parent (:class:`str`): @@ -550,11 +560,11 @@ async def sample_list_schemas(): async def delete_schema( self, - request: Union[schema.DeleteSchemaRequest, dict] = None, + request: Optional[Union[schema.DeleteSchemaRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a schema. @@ -583,7 +593,7 @@ async def sample_delete_schema(): await client.delete_schema(request=request) Args: - request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]]): The request object. Request for the `DeleteSchema` method. name (:class:`str`): @@ -640,12 +650,12 @@ async def sample_delete_schema(): async def validate_schema( self, - request: Union[gp_schema.ValidateSchemaRequest, dict] = None, + request: Optional[Union[gp_schema.ValidateSchemaRequest, dict]] = None, *, - parent: str = None, - schema: gp_schema.Schema = None, + parent: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. @@ -681,7 +691,7 @@ async def sample_validate_schema(): print(response) Args: - request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]]): The request object. Request for the `ValidateSchema` method. parent (:class:`str`): @@ -756,10 +766,10 @@ async def sample_validate_schema(): async def validate_message( self, - request: Union[schema.ValidateMessageRequest, dict] = None, + request: Optional[Union[schema.ValidateMessageRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. @@ -792,7 +802,7 @@ async def sample_validate_message(): print(response) Args: - request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ValidateMessageRequest, dict]]): The request object. Request for the `ValidateMessage` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -837,10 +847,10 @@ async def sample_validate_message(): async def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -956,10 +966,10 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1077,10 +1087,10 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 9ecff30f4991..45f013e91208 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -17,7 +17,18 @@ import functools import os import re -from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union +from typing import ( + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) import pkg_resources from google.api_core import client_options as client_options_lib @@ -61,7 +72,7 @@ class SchemaServiceClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[SchemaServiceTransport]: """Returns an appropriate transport class. @@ -331,8 +342,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, SchemaServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, SchemaServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the schema service client. @@ -346,7 +357,7 @@ def __init__( transport (Union[str, SchemaServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -376,6 +387,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -437,13 +449,13 @@ def __init__( def create_schema( self, - request: Union[gp_schema.CreateSchemaRequest, dict] = None, + request: Optional[Union[gp_schema.CreateSchemaRequest, dict]] = None, *, - parent: str = None, - schema: gp_schema.Schema = None, - schema_id: str = None, + parent: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + schema_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.Schema: r"""Creates a schema. @@ -567,11 +579,11 @@ def sample_create_schema(): def get_schema( self, - request: Union[schema.GetSchemaRequest, dict] = None, + request: Optional[Union[schema.GetSchemaRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.Schema: r"""Gets a schema. @@ -666,11 +678,11 @@ def sample_get_schema(): def list_schemas( self, - request: Union[schema.ListSchemasRequest, dict] = None, + request: Optional[Union[schema.ListSchemasRequest, dict]] = None, *, - parent: str = None, + parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSchemasPager: r"""Lists schemas in a project. @@ -780,11 +792,11 @@ def sample_list_schemas(): def delete_schema( self, - request: Union[schema.DeleteSchemaRequest, dict] = None, + request: Optional[Union[schema.DeleteSchemaRequest, dict]] = None, *, - name: str = None, + name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a schema. @@ -870,12 +882,12 @@ def sample_delete_schema(): def validate_schema( self, - request: Union[gp_schema.ValidateSchemaRequest, dict] = None, + request: Optional[Union[gp_schema.ValidateSchemaRequest, dict]] = None, *, - parent: str = None, - schema: gp_schema.Schema = None, + parent: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. @@ -986,10 +998,10 @@ def sample_validate_schema(): def validate_message( self, - request: Union[schema.ValidateMessageRequest, dict] = None, + request: Optional[Union[schema.ValidateMessageRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. @@ -1081,10 +1093,10 @@ def __exit__(self, type, value, traceback): def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1201,10 +1213,10 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1322,10 +1334,10 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index e2b5ad6608d4..8035f7fd278d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -55,7 +55,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 3fa0a5b12749..feace1e27201 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -51,14 +51,14 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -187,8 +187,8 @@ def __init__( def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index fb89c89a2173..83bed7ecde50 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -53,7 +53,7 @@ class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -96,15 +96,15 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 902d134c632c..fb860596d637 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -19,6 +19,8 @@ from typing import ( Dict, Mapping, + MutableMapping, + MutableSequence, Optional, AsyncIterable, Awaitable, @@ -176,9 +178,9 @@ def transport(self) -> SubscriberTransport: def __init__( self, *, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, transport: Union[str, SubscriberTransport] = "grpc_asyncio", - client_options: ClientOptions = None, + client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the subscriber client. @@ -222,14 +224,14 @@ def __init__( async def create_subscription( self, - request: Union[pubsub.Subscription, dict] = None, + request: Optional[Union[pubsub.Subscription, dict]] = None, *, - name: str = None, - topic: str = None, - push_config: pubsub.PushConfig = None, - ack_deadline_seconds: int = None, + name: Optional[str] = None, + topic: Optional[str] = None, + push_config: Optional[pubsub.PushConfig] = None, + ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Creates a subscription to a given topic. See the [resource name @@ -274,7 +276,7 @@ async def sample_create_subscription(): print(response) Args: - request (Union[google.pubsub_v1.types.Subscription, dict]): + request (Optional[Union[google.pubsub_v1.types.Subscription, dict]]): The request object. A subscription resource. name (:class:`str`): Required. The name of the subscription. It must have the @@ -410,11 +412,11 @@ async def sample_create_subscription(): async def get_subscription( self, - request: Union[pubsub.GetSubscriptionRequest, dict] = None, + request: Optional[Union[pubsub.GetSubscriptionRequest, dict]] = None, *, - subscription: str = None, + subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. @@ -446,7 +448,7 @@ async def sample_get_subscription(): print(response) Args: - request (Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.GetSubscriptionRequest, dict]]): The request object. Request for the GetSubscription method. subscription (:class:`str`): @@ -523,10 +525,10 @@ async def sample_get_subscription(): async def update_subscription( self, - request: Union[pubsub.UpdateSubscriptionRequest, dict] = None, + request: Optional[Union[pubsub.UpdateSubscriptionRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Updates an existing subscription. Note that certain @@ -564,7 +566,7 @@ async def sample_update_subscription(): print(response) Args: - request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]]): The request object. Request for the UpdateSubscription method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -618,11 +620,11 @@ async def sample_update_subscription(): async def list_subscriptions( self, - request: Union[pubsub.ListSubscriptionsRequest, dict] = None, + request: Optional[Union[pubsub.ListSubscriptionsRequest, dict]] = None, *, - project: str = None, + project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSubscriptionsAsyncPager: r"""Lists matching subscriptions. @@ -655,7 +657,7 @@ async def sample_list_subscriptions(): print(response) Args: - request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]]): The request object. Request for the `ListSubscriptions` method. project (:class:`str`): @@ -743,11 +745,11 @@ async def sample_list_subscriptions(): async def delete_subscription( self, - request: Union[pubsub.DeleteSubscriptionRequest, dict] = None, + request: Optional[Union[pubsub.DeleteSubscriptionRequest, dict]] = None, *, - subscription: str = None, + subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an existing subscription. All messages retained in the @@ -781,7 +783,7 @@ async def sample_delete_subscription(): await client.delete_subscription(request=request) Args: - request (Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.DeleteSubscriptionRequest, dict]]): The request object. Request for the DeleteSubscription method. subscription (:class:`str`): @@ -849,13 +851,13 @@ async def sample_delete_subscription(): async def modify_ack_deadline( self, - request: Union[pubsub.ModifyAckDeadlineRequest, dict] = None, + request: Optional[Union[pubsub.ModifyAckDeadlineRequest, dict]] = None, *, - subscription: str = None, - ack_ids: Sequence[str] = None, - ack_deadline_seconds: int = None, + subscription: Optional[str] = None, + ack_ids: Optional[MutableSequence[str]] = None, + ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Modifies the ack deadline for a specific message. This method is @@ -891,7 +893,7 @@ async def sample_modify_ack_deadline(): await client.modify_ack_deadline(request=request) Args: - request (Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ModifyAckDeadlineRequest, dict]]): The request object. Request for the ModifyAckDeadline method. subscription (:class:`str`): @@ -901,7 +903,7 @@ async def sample_modify_ack_deadline(): This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - ack_ids (:class:`Sequence[str]`): + ack_ids (:class:`MutableSequence[str]`): Required. List of acknowledgment IDs. This corresponds to the ``ack_ids`` field on the ``request`` instance; if ``request`` is provided, this @@ -984,12 +986,12 @@ async def sample_modify_ack_deadline(): async def acknowledge( self, - request: Union[pubsub.AcknowledgeRequest, dict] = None, + request: Optional[Union[pubsub.AcknowledgeRequest, dict]] = None, *, - subscription: str = None, - ack_ids: Sequence[str] = None, + subscription: Optional[str] = None, + ack_ids: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Acknowledges the messages associated with the ``ack_ids`` in the @@ -1026,7 +1028,7 @@ async def sample_acknowledge(): await client.acknowledge(request=request) Args: - request (Union[google.pubsub_v1.types.AcknowledgeRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.AcknowledgeRequest, dict]]): The request object. Request for the Acknowledge method. subscription (:class:`str`): Required. The subscription whose message is being @@ -1036,7 +1038,7 @@ async def sample_acknowledge(): This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - ack_ids (:class:`Sequence[str]`): + ack_ids (:class:`MutableSequence[str]`): Required. The acknowledgment ID for the messages being acknowledged that was returned by the Pub/Sub system in the ``Pull`` response. Must not be empty. @@ -1104,13 +1106,13 @@ async def sample_acknowledge(): async def pull( self, - request: Union[pubsub.PullRequest, dict] = None, + request: Optional[Union[pubsub.PullRequest, dict]] = None, *, - subscription: str = None, - return_immediately: bool = None, - max_messages: int = None, + subscription: Optional[str] = None, + return_immediately: Optional[bool] = None, + max_messages: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PullResponse: r"""Pulls messages from the server. The server may return @@ -1145,7 +1147,7 @@ async def sample_pull(): print(response) Args: - request (Union[google.pubsub_v1.types.PullRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.PullRequest, dict]]): The request object. Request for the `Pull` method. subscription (:class:`str`): Required. The subscription from which messages should be @@ -1256,10 +1258,10 @@ async def sample_pull(): def streaming_pull( self, - requests: AsyncIterator[pubsub.StreamingPullRequest] = None, + requests: Optional[AsyncIterator[pubsub.StreamingPullRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[pubsub.StreamingPullResponse]]: r"""Establishes a stream with the server, which sends messages down @@ -1363,12 +1365,12 @@ def request_generator(): async def modify_push_config( self, - request: Union[pubsub.ModifyPushConfigRequest, dict] = None, + request: Optional[Union[pubsub.ModifyPushConfigRequest, dict]] = None, *, - subscription: str = None, - push_config: pubsub.PushConfig = None, + subscription: Optional[str] = None, + push_config: Optional[pubsub.PushConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Modifies the ``PushConfig`` for a specified subscription. @@ -1403,7 +1405,7 @@ async def sample_modify_push_config(): await client.modify_push_config(request=request) Args: - request (Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ModifyPushConfigRequest, dict]]): The request object. Request for the ModifyPushConfig method. subscription (:class:`str`): @@ -1485,11 +1487,11 @@ async def sample_modify_push_config(): async def get_snapshot( self, - request: Union[pubsub.GetSnapshotRequest, dict] = None, + request: Optional[Union[pubsub.GetSnapshotRequest, dict]] = None, *, - snapshot: str = None, + snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Gets the configuration details of a snapshot. @@ -1527,7 +1529,7 @@ async def sample_get_snapshot(): print(response) Args: - request (Union[google.pubsub_v1.types.GetSnapshotRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.GetSnapshotRequest, dict]]): The request object. Request for the GetSnapshot method. snapshot (:class:`str`): Required. The name of the snapshot to get. Format is @@ -1607,11 +1609,11 @@ async def sample_get_snapshot(): async def list_snapshots( self, - request: Union[pubsub.ListSnapshotsRequest, dict] = None, + request: Optional[Union[pubsub.ListSnapshotsRequest, dict]] = None, *, - project: str = None, + project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSnapshotsAsyncPager: r"""Lists the existing snapshots. Snapshots are used in @@ -1648,7 +1650,7 @@ async def sample_list_snapshots(): print(response) Args: - request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]]): The request object. Request for the `ListSnapshots` method. project (:class:`str`): @@ -1736,12 +1738,12 @@ async def sample_list_snapshots(): async def create_snapshot( self, - request: Union[pubsub.CreateSnapshotRequest, dict] = None, + request: Optional[Union[pubsub.CreateSnapshotRequest, dict]] = None, *, - name: str = None, - subscription: str = None, + name: Optional[str] = None, + subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Creates a snapshot from the requested subscription. Snapshots @@ -1792,7 +1794,7 @@ async def sample_create_snapshot(): print(response) Args: - request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]]): The request object. Request for the `CreateSnapshot` method. name (:class:`str`): @@ -1893,10 +1895,10 @@ async def sample_create_snapshot(): async def update_snapshot( self, - request: Union[pubsub.UpdateSnapshotRequest, dict] = None, + request: Optional[Union[pubsub.UpdateSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot. Snapshots are used in @@ -1934,7 +1936,7 @@ async def sample_update_snapshot(): print(response) Args: - request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]]): The request object. Request for the UpdateSnapshot method. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -1994,11 +1996,11 @@ async def sample_update_snapshot(): async def delete_snapshot( self, - request: Union[pubsub.DeleteSnapshotRequest, dict] = None, + request: Optional[Union[pubsub.DeleteSnapshotRequest, dict]] = None, *, - snapshot: str = None, + snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Removes an existing snapshot. Snapshots are used in [Seek] @@ -2036,7 +2038,7 @@ async def sample_delete_snapshot(): await client.delete_snapshot(request=request) Args: - request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]]): The request object. Request for the `DeleteSnapshot` method. snapshot (:class:`str`): @@ -2102,10 +2104,10 @@ async def sample_delete_snapshot(): async def seek( self, - request: Union[pubsub.SeekRequest, dict] = None, + request: Optional[Union[pubsub.SeekRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.SeekResponse: r"""Seeks an existing subscription to a point in time or to a given @@ -2145,7 +2147,7 @@ async def sample_seek(): print(response) Args: - request (Union[google.pubsub_v1.types.SeekRequest, dict]): + request (Optional[Union[google.pubsub_v1.types.SeekRequest, dict]]): The request object. Request for the `Seek` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. @@ -2200,10 +2202,10 @@ async def sample_seek(): async def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -2319,10 +2321,10 @@ async def set_iam_policy( async def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -2440,10 +2442,10 @@ async def get_iam_policy( async def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 6f08e2792e1a..35d96b5b5c7e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -20,6 +20,8 @@ from typing import ( Dict, Mapping, + MutableMapping, + MutableSequence, Optional, Iterable, Iterator, @@ -27,6 +29,7 @@ Tuple, Type, Union, + cast, ) import warnings import pkg_resources @@ -73,7 +76,7 @@ class SubscriberClientMeta(type): def get_transport_class( cls, - label: str = None, + label: Optional[str] = None, ) -> Type[SubscriberTransport]: """Returns an appropriate transport class. @@ -393,8 +396,8 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, SubscriberTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, + transport: Optional[Union[str, SubscriberTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the subscriber client. @@ -408,7 +411,7 @@ def __init__( transport (Union[str, SubscriberTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -438,6 +441,7 @@ def __init__( client_options = client_options_lib.from_dict(client_options) if client_options is None: client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( client_options @@ -499,14 +503,14 @@ def __init__( def create_subscription( self, - request: Union[pubsub.Subscription, dict] = None, + request: Optional[Union[pubsub.Subscription, dict]] = None, *, - name: str = None, - topic: str = None, - push_config: pubsub.PushConfig = None, - ack_deadline_seconds: int = None, + name: Optional[str] = None, + topic: Optional[str] = None, + push_config: Optional[pubsub.PushConfig] = None, + ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Creates a subscription to a given topic. See the [resource name @@ -676,11 +680,11 @@ def sample_create_subscription(): def get_subscription( self, - request: Union[pubsub.GetSubscriptionRequest, dict] = None, + request: Optional[Union[pubsub.GetSubscriptionRequest, dict]] = None, *, - subscription: str = None, + subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. @@ -778,10 +782,10 @@ def sample_get_subscription(): def update_subscription( self, - request: Union[pubsub.UpdateSubscriptionRequest, dict] = None, + request: Optional[Union[pubsub.UpdateSubscriptionRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Updates an existing subscription. Note that certain @@ -865,11 +869,11 @@ def sample_update_subscription(): def list_subscriptions( self, - request: Union[pubsub.ListSubscriptionsRequest, dict] = None, + request: Optional[Union[pubsub.ListSubscriptionsRequest, dict]] = None, *, - project: str = None, + project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSubscriptionsPager: r"""Lists matching subscriptions. @@ -979,11 +983,11 @@ def sample_list_subscriptions(): def delete_subscription( self, - request: Union[pubsub.DeleteSubscriptionRequest, dict] = None, + request: Optional[Union[pubsub.DeleteSubscriptionRequest, dict]] = None, *, - subscription: str = None, + subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an existing subscription. All messages retained in the @@ -1076,13 +1080,13 @@ def sample_delete_subscription(): def modify_ack_deadline( self, - request: Union[pubsub.ModifyAckDeadlineRequest, dict] = None, + request: Optional[Union[pubsub.ModifyAckDeadlineRequest, dict]] = None, *, - subscription: str = None, - ack_ids: Sequence[str] = None, - ack_deadline_seconds: int = None, + subscription: Optional[str] = None, + ack_ids: Optional[MutableSequence[str]] = None, + ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Modifies the ack deadline for a specific message. This method is @@ -1128,7 +1132,7 @@ def sample_modify_ack_deadline(): This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - ack_ids (Sequence[str]): + ack_ids (MutableSequence[str]): Required. List of acknowledgment IDs. This corresponds to the ``ack_ids`` field on the ``request`` instance; if ``request`` is provided, this @@ -1202,12 +1206,12 @@ def sample_modify_ack_deadline(): def acknowledge( self, - request: Union[pubsub.AcknowledgeRequest, dict] = None, + request: Optional[Union[pubsub.AcknowledgeRequest, dict]] = None, *, - subscription: str = None, - ack_ids: Sequence[str] = None, + subscription: Optional[str] = None, + ack_ids: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Acknowledges the messages associated with the ``ack_ids`` in the @@ -1254,7 +1258,7 @@ def sample_acknowledge(): This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - ack_ids (Sequence[str]): + ack_ids (MutableSequence[str]): Required. The acknowledgment ID for the messages being acknowledged that was returned by the Pub/Sub system in the ``Pull`` response. Must not be empty. @@ -1313,13 +1317,13 @@ def sample_acknowledge(): def pull( self, - request: Union[pubsub.PullRequest, dict] = None, + request: Optional[Union[pubsub.PullRequest, dict]] = None, *, - subscription: str = None, - return_immediately: bool = None, - max_messages: int = None, + subscription: Optional[str] = None, + return_immediately: Optional[bool] = None, + max_messages: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PullResponse: r"""Pulls messages from the server. The server may return @@ -1454,10 +1458,10 @@ def sample_pull(): def streaming_pull( self, - requests: Iterator[pubsub.StreamingPullRequest] = None, + requests: Optional[Iterator[pubsub.StreamingPullRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[pubsub.StreamingPullResponse]: r"""Establishes a stream with the server, which sends messages down @@ -1549,12 +1553,12 @@ def request_generator(): def modify_push_config( self, - request: Union[pubsub.ModifyPushConfigRequest, dict] = None, + request: Optional[Union[pubsub.ModifyPushConfigRequest, dict]] = None, *, - subscription: str = None, - push_config: pubsub.PushConfig = None, + subscription: Optional[str] = None, + push_config: Optional[pubsub.PushConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Modifies the ``PushConfig`` for a specified subscription. @@ -1662,11 +1666,11 @@ def sample_modify_push_config(): def get_snapshot( self, - request: Union[pubsub.GetSnapshotRequest, dict] = None, + request: Optional[Union[pubsub.GetSnapshotRequest, dict]] = None, *, - snapshot: str = None, + snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Gets the configuration details of a snapshot. @@ -1773,11 +1777,11 @@ def sample_get_snapshot(): def list_snapshots( self, - request: Union[pubsub.ListSnapshotsRequest, dict] = None, + request: Optional[Union[pubsub.ListSnapshotsRequest, dict]] = None, *, - project: str = None, + project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSnapshotsPager: r"""Lists the existing snapshots. Snapshots are used in @@ -1891,12 +1895,12 @@ def sample_list_snapshots(): def create_snapshot( self, - request: Union[pubsub.CreateSnapshotRequest, dict] = None, + request: Optional[Union[pubsub.CreateSnapshotRequest, dict]] = None, *, - name: str = None, - subscription: str = None, + name: Optional[str] = None, + subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Creates a snapshot from the requested subscription. Snapshots @@ -2039,10 +2043,10 @@ def sample_create_snapshot(): def update_snapshot( self, - request: Union[pubsub.UpdateSnapshotRequest, dict] = None, + request: Optional[Union[pubsub.UpdateSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot. Snapshots are used in @@ -2132,11 +2136,11 @@ def sample_update_snapshot(): def delete_snapshot( self, - request: Union[pubsub.DeleteSnapshotRequest, dict] = None, + request: Optional[Union[pubsub.DeleteSnapshotRequest, dict]] = None, *, - snapshot: str = None, + snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Removes an existing snapshot. Snapshots are used in [Seek] @@ -2231,10 +2235,10 @@ def sample_delete_snapshot(): def seek( self, - request: Union[pubsub.SeekRequest, dict] = None, + request: Optional[Union[pubsub.SeekRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.SeekResponse: r"""Seeks an existing subscription to a point in time or to a given @@ -2332,10 +2336,10 @@ def __exit__(self, type, value, traceback): def set_iam_policy( self, - request: iam_policy_pb2.SetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -2452,10 +2456,10 @@ def set_iam_policy( def get_iam_policy( self, - request: iam_policy_pb2.GetIamPolicyRequest = None, + request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -2573,10 +2577,10 @@ def get_iam_policy( def test_iam_permissions( self, - request: iam_policy_pb2.TestIamPermissionsRequest = None, + request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, + timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 0d815e06890a..c1d2d8aff3b8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -54,7 +54,7 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 5954b6403343..5a0cf0d8e5eb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -53,14 +53,14 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, @@ -189,8 +189,8 @@ def __init__( def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, **kwargs, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 778e49c5dc8e..edcaf4911142 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -55,7 +55,7 @@ class SubscriberGrpcAsyncIOTransport(SubscriberTransport): def create_channel( cls, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, @@ -98,15 +98,15 @@ def __init__( self, *, host: str = "pubsub.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, api_audience: Optional[str] = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 73658e9c0989..a58226a152fb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -78,7 +80,7 @@ class MessageStoragePolicy(proto.Message): the topic. Attributes: - allowed_persistence_regions (Sequence[str]): + allowed_persistence_regions (MutableSequence[str]): A list of IDs of GCP regions where messages that are published to the topic may be persisted in storage. Messages published by publishers @@ -89,7 +91,7 @@ class MessageStoragePolicy(proto.Message): not a valid configuration. """ - allowed_persistence_regions = proto.RepeatedField( + allowed_persistence_regions: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) @@ -109,11 +111,11 @@ class SchemaSettings(proto.Message): The encoding of messages validated against ``schema``. """ - schema = proto.Field( + schema: str = proto.Field( proto.STRING, number=1, ) - encoding = proto.Field( + encoding: gp_schema.Encoding = proto.Field( proto.ENUM, number=2, enum=gp_schema.Encoding, @@ -133,7 +135,7 @@ class Topic(proto.Message): (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with ``"goog"``. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): See [Creating and managing labels] (https://cloud.google.com/pubsub/docs/labels). message_storage_policy (google.pubsub_v1.types.MessageStoragePolicy): @@ -168,34 +170,34 @@ class Topic(proto.Message): days or less than 10 minutes. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=2, ) - message_storage_policy = proto.Field( + message_storage_policy: "MessageStoragePolicy" = proto.Field( proto.MESSAGE, number=3, message="MessageStoragePolicy", ) - kms_key_name = proto.Field( + kms_key_name: str = proto.Field( proto.STRING, number=5, ) - schema_settings = proto.Field( + schema_settings: "SchemaSettings" = proto.Field( proto.MESSAGE, number=6, message="SchemaSettings", ) - satisfies_pzs = proto.Field( + satisfies_pzs: bool = proto.Field( proto.BOOL, number=7, ) - message_retention_duration = proto.Field( + message_retention_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=8, message=duration_pb2.Duration, @@ -218,7 +220,7 @@ class PubsubMessage(proto.Message): The message data field. If this field is empty, the message must contain at least one attribute. - attributes (Mapping[str, str]): + attributes (MutableMapping[str, str]): Attributes for this message. If this field is empty, the message must contain non-empty data. This can be used to filter messages on the @@ -245,25 +247,25 @@ class PubsubMessage(proto.Message): same ``ordering_key`` value. """ - data = proto.Field( + data: bytes = proto.Field( proto.BYTES, number=1, ) - attributes = proto.MapField( + attributes: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=2, ) - message_id = proto.Field( + message_id: str = proto.Field( proto.STRING, number=3, ) - publish_time = proto.Field( + publish_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=4, message=timestamp_pb2.Timestamp, ) - ordering_key = proto.Field( + ordering_key: str = proto.Field( proto.STRING, number=5, ) @@ -278,7 +280,7 @@ class GetTopicRequest(proto.Message): ``projects/{project}/topics/{topic}``. """ - topic = proto.Field( + topic: str = proto.Field( proto.STRING, number=1, ) @@ -299,12 +301,12 @@ class UpdateTopicRequest(proto.Message): policy configured at the project or organization level. """ - topic = proto.Field( + topic: "Topic" = proto.Field( proto.MESSAGE, number=1, message="Topic", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, @@ -318,15 +320,15 @@ class PublishRequest(proto.Message): topic (str): Required. The messages in the request will be published on this topic. Format is ``projects/{project}/topics/{topic}``. - messages (Sequence[google.pubsub_v1.types.PubsubMessage]): + messages (MutableSequence[google.pubsub_v1.types.PubsubMessage]): Required. The messages to publish. """ - topic = proto.Field( + topic: str = proto.Field( proto.STRING, number=1, ) - messages = proto.RepeatedField( + messages: MutableSequence["PubsubMessage"] = proto.RepeatedField( proto.MESSAGE, number=2, message="PubsubMessage", @@ -337,14 +339,14 @@ class PublishResponse(proto.Message): r"""Response for the ``Publish`` method. Attributes: - message_ids (Sequence[str]): + message_ids (MutableSequence[str]): The server-assigned ID of each published message, in the same order as the messages in the request. IDs are guaranteed to be unique within the topic. """ - message_ids = proto.RepeatedField( + message_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) @@ -366,15 +368,15 @@ class ListTopicsRequest(proto.Message): next page of data. """ - project = proto.Field( + project: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -384,7 +386,7 @@ class ListTopicsResponse(proto.Message): r"""Response for the ``ListTopics`` method. Attributes: - topics (Sequence[google.pubsub_v1.types.Topic]): + topics (MutableSequence[google.pubsub_v1.types.Topic]): The resulting topics. next_page_token (str): If not empty, indicates that there may be more topics that @@ -396,12 +398,12 @@ class ListTopicsResponse(proto.Message): def raw_page(self): return self - topics = proto.RepeatedField( + topics: MutableSequence["Topic"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Topic", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -425,15 +427,15 @@ class ListTopicSubscriptionsRequest(proto.Message): that the system should return the next page of data. """ - topic = proto.Field( + topic: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -443,7 +445,7 @@ class ListTopicSubscriptionsResponse(proto.Message): r"""Response for the ``ListTopicSubscriptions`` method. Attributes: - subscriptions (Sequence[str]): + subscriptions (MutableSequence[str]): The names of subscriptions attached to the topic specified in the request. next_page_token (str): @@ -456,11 +458,11 @@ class ListTopicSubscriptionsResponse(proto.Message): def raw_page(self): return self - subscriptions = proto.RepeatedField( + subscriptions: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -482,15 +484,15 @@ class ListTopicSnapshotsRequest(proto.Message): that the system should return the next page of data. """ - topic = proto.Field( + topic: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -500,7 +502,7 @@ class ListTopicSnapshotsResponse(proto.Message): r"""Response for the ``ListTopicSnapshots`` method. Attributes: - snapshots (Sequence[str]): + snapshots (MutableSequence[str]): The names of the snapshots that match the request. next_page_token (str): @@ -513,11 +515,11 @@ class ListTopicSnapshotsResponse(proto.Message): def raw_page(self): return self - snapshots = proto.RepeatedField( + snapshots: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -532,7 +534,7 @@ class DeleteTopicRequest(proto.Message): ``projects/{project}/topics/{topic}``. """ - topic = proto.Field( + topic: str = proto.Field( proto.STRING, number=1, ) @@ -547,7 +549,7 @@ class DetachSubscriptionRequest(proto.Message): ``projects/{project}/subscriptions/{subscription}``. """ - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=1, ) @@ -631,7 +633,7 @@ class Subscription(proto.Message): thus configures how far back in time a ``Seek`` can be done. Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): See Creating and managing labels. @@ -720,79 +722,79 @@ class State(proto.Enum): ACTIVE = 1 RESOURCE_ERROR = 2 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - topic = proto.Field( + topic: str = proto.Field( proto.STRING, number=2, ) - push_config = proto.Field( + push_config: "PushConfig" = proto.Field( proto.MESSAGE, number=4, message="PushConfig", ) - bigquery_config = proto.Field( + bigquery_config: "BigQueryConfig" = proto.Field( proto.MESSAGE, number=18, message="BigQueryConfig", ) - ack_deadline_seconds = proto.Field( + ack_deadline_seconds: int = proto.Field( proto.INT32, number=5, ) - retain_acked_messages = proto.Field( + retain_acked_messages: bool = proto.Field( proto.BOOL, number=7, ) - message_retention_duration = proto.Field( + message_retention_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=8, message=duration_pb2.Duration, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=9, ) - enable_message_ordering = proto.Field( + enable_message_ordering: bool = proto.Field( proto.BOOL, number=10, ) - expiration_policy = proto.Field( + expiration_policy: "ExpirationPolicy" = proto.Field( proto.MESSAGE, number=11, message="ExpirationPolicy", ) - filter = proto.Field( + filter: str = proto.Field( proto.STRING, number=12, ) - dead_letter_policy = proto.Field( + dead_letter_policy: "DeadLetterPolicy" = proto.Field( proto.MESSAGE, number=13, message="DeadLetterPolicy", ) - retry_policy = proto.Field( + retry_policy: "RetryPolicy" = proto.Field( proto.MESSAGE, number=14, message="RetryPolicy", ) - detached = proto.Field( + detached: bool = proto.Field( proto.BOOL, number=15, ) - enable_exactly_once_delivery = proto.Field( + enable_exactly_once_delivery: bool = proto.Field( proto.BOOL, number=16, ) - topic_message_retention_duration = proto.Field( + topic_message_retention_duration: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=17, message=duration_pb2.Duration, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=19, enum=State, @@ -826,12 +828,12 @@ class RetryPolicy(proto.Message): seconds. """ - minimum_backoff = proto.Field( + minimum_backoff: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=1, message=duration_pb2.Duration, ) - maximum_backoff = proto.Field( + maximum_backoff: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=2, message=duration_pb2.Duration, @@ -877,11 +879,11 @@ class DeadLetterPolicy(proto.Message): If this parameter is 0, a default value of 5 is used. """ - dead_letter_topic = proto.Field( + dead_letter_topic: str = proto.Field( proto.STRING, number=1, ) - max_delivery_attempts = proto.Field( + max_delivery_attempts: int = proto.Field( proto.INT32, number=2, ) @@ -902,7 +904,7 @@ class ExpirationPolicy(proto.Message): associated resource never expires. """ - ttl = proto.Field( + ttl: duration_pb2.Duration = proto.Field( proto.MESSAGE, number=1, message=duration_pb2.Duration, @@ -919,7 +921,7 @@ class PushConfig(proto.Message): A URL locating the endpoint to which messages should be pushed. For example, a Webhook endpoint might use ``https://example.com/push``. - attributes (Mapping[str, str]): + attributes (MutableMapping[str, str]): Endpoint configuration attributes that can be used to control different aspects of the message delivery. @@ -982,25 +984,25 @@ class OidcToken(proto.Message): will be used. """ - service_account_email = proto.Field( + service_account_email: str = proto.Field( proto.STRING, number=1, ) - audience = proto.Field( + audience: str = proto.Field( proto.STRING, number=2, ) - push_endpoint = proto.Field( + push_endpoint: str = proto.Field( proto.STRING, number=1, ) - attributes = proto.MapField( + attributes: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=2, ) - oidc_token = proto.Field( + oidc_token: OidcToken = proto.Field( proto.MESSAGE, number=3, oneof="authentication_method", @@ -1046,23 +1048,23 @@ class State(proto.Enum): NOT_FOUND = 3 SCHEMA_MISMATCH = 4 - table = proto.Field( + table: str = proto.Field( proto.STRING, number=1, ) - use_topic_schema = proto.Field( + use_topic_schema: bool = proto.Field( proto.BOOL, number=2, ) - write_metadata = proto.Field( + write_metadata: bool = proto.Field( proto.BOOL, number=3, ) - drop_unknown_fields = proto.Field( + drop_unknown_fields: bool = proto.Field( proto.BOOL, number=4, ) - state = proto.Field( + state: State = proto.Field( proto.ENUM, number=5, enum=State, @@ -1099,16 +1101,16 @@ class ReceivedMessage(proto.Message): will be 0. """ - ack_id = proto.Field( + ack_id: str = proto.Field( proto.STRING, number=1, ) - message = proto.Field( + message: "PubsubMessage" = proto.Field( proto.MESSAGE, number=2, message="PubsubMessage", ) - delivery_attempt = proto.Field( + delivery_attempt: int = proto.Field( proto.INT32, number=3, ) @@ -1123,7 +1125,7 @@ class GetSubscriptionRequest(proto.Message): ``projects/{project}/subscriptions/{sub}``. """ - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=1, ) @@ -1141,12 +1143,12 @@ class UpdateSubscriptionRequest(proto.Message): specified and non-empty. """ - subscription = proto.Field( + subscription: "Subscription" = proto.Field( proto.MESSAGE, number=1, message="Subscription", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, @@ -1169,15 +1171,15 @@ class ListSubscriptionsRequest(proto.Message): the system should return the next page of data. """ - project = proto.Field( + project: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -1187,7 +1189,7 @@ class ListSubscriptionsResponse(proto.Message): r"""Response for the ``ListSubscriptions`` method. Attributes: - subscriptions (Sequence[google.pubsub_v1.types.Subscription]): + subscriptions (MutableSequence[google.pubsub_v1.types.Subscription]): The subscriptions that match the request. next_page_token (str): If not empty, indicates that there may be more subscriptions @@ -1199,12 +1201,12 @@ class ListSubscriptionsResponse(proto.Message): def raw_page(self): return self - subscriptions = proto.RepeatedField( + subscriptions: MutableSequence["Subscription"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Subscription", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -1219,7 +1221,7 @@ class DeleteSubscriptionRequest(proto.Message): ``projects/{project}/subscriptions/{sub}``. """ - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=1, ) @@ -1242,11 +1244,11 @@ class ModifyPushConfigRequest(proto.Message): not called. """ - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=1, ) - push_config = proto.Field( + push_config: "PushConfig" = proto.Field( proto.MESSAGE, number=2, message="PushConfig", @@ -1277,15 +1279,15 @@ class PullRequest(proto.Message): than the number specified. """ - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=1, ) - return_immediately = proto.Field( + return_immediately: bool = proto.Field( proto.BOOL, number=2, ) - max_messages = proto.Field( + max_messages: int = proto.Field( proto.INT32, number=3, ) @@ -1295,7 +1297,7 @@ class PullResponse(proto.Message): r"""Response for the ``Pull`` method. Attributes: - received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): + received_messages (MutableSequence[google.pubsub_v1.types.ReceivedMessage]): Received Pub/Sub messages. The list will be empty if there are no more messages available in the backlog. For JSON, the response can be entirely empty. The Pub/Sub system may @@ -1303,7 +1305,7 @@ class PullResponse(proto.Message): there are more messages available in the backlog. """ - received_messages = proto.RepeatedField( + received_messages: MutableSequence["ReceivedMessage"] = proto.RepeatedField( proto.MESSAGE, number=1, message="ReceivedMessage", @@ -1317,7 +1319,7 @@ class ModifyAckDeadlineRequest(proto.Message): subscription (str): Required. The name of the subscription. Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (Sequence[str]): + ack_ids (MutableSequence[str]): Required. List of acknowledgment IDs. ack_deadline_seconds (int): Required. The new ack deadline with respect to the time this @@ -1332,15 +1334,15 @@ class ModifyAckDeadlineRequest(proto.Message): seconds (10 minutes). """ - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=1, ) - ack_ids = proto.RepeatedField( + ack_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=4, ) - ack_deadline_seconds = proto.Field( + ack_deadline_seconds: int = proto.Field( proto.INT32, number=3, ) @@ -1354,17 +1356,17 @@ class AcknowledgeRequest(proto.Message): Required. The subscription whose message is being acknowledged. Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (Sequence[str]): + ack_ids (MutableSequence[str]): Required. The acknowledgment ID for the messages being acknowledged that was returned by the Pub/Sub system in the ``Pull`` response. Must not be empty. """ - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=1, ) - ack_ids = proto.RepeatedField( + ack_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) @@ -1383,7 +1385,7 @@ class StreamingPullRequest(proto.Message): stream, and must not be set in subsequent requests from client to server. Format is ``projects/{project}/subscriptions/{sub}``. - ack_ids (Sequence[str]): + ack_ids (MutableSequence[str]): List of acknowledgement IDs for acknowledging previously received messages (received on this stream or a different stream). If an ack ID has expired, the corresponding message @@ -1391,7 +1393,7 @@ class StreamingPullRequest(proto.Message): once will not result in an error. If the acknowledgement ID is malformed, the stream will be aborted with status ``INVALID_ARGUMENT``. - modify_deadline_seconds (Sequence[int]): + modify_deadline_seconds (MutableSequence[int]): The list of new ack deadlines for the IDs listed in ``modify_deadline_ack_ids``. The size of this list must be the same as the size of ``modify_deadline_ack_ids``. If it @@ -1406,7 +1408,7 @@ class StreamingPullRequest(proto.Message): made available for another streaming or non-streaming pull request. If the value is < 0 (an error), the stream will be aborted with status ``INVALID_ARGUMENT``. - modify_deadline_ack_ids (Sequence[str]): + modify_deadline_ack_ids (MutableSequence[str]): List of acknowledgement IDs whose deadline will be modified based on the corresponding element in ``modify_deadline_seconds``. This field can be used to @@ -1454,35 +1456,35 @@ class StreamingPullRequest(proto.Message): ``INVALID_ARGUMENT``. """ - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=1, ) - ack_ids = proto.RepeatedField( + ack_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - modify_deadline_seconds = proto.RepeatedField( + modify_deadline_seconds: MutableSequence[int] = proto.RepeatedField( proto.INT32, number=3, ) - modify_deadline_ack_ids = proto.RepeatedField( + modify_deadline_ack_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=4, ) - stream_ack_deadline_seconds = proto.Field( + stream_ack_deadline_seconds: int = proto.Field( proto.INT32, number=5, ) - client_id = proto.Field( + client_id: str = proto.Field( proto.STRING, number=6, ) - max_outstanding_messages = proto.Field( + max_outstanding_messages: int = proto.Field( proto.INT64, number=7, ) - max_outstanding_bytes = proto.Field( + max_outstanding_bytes: int = proto.Field( proto.INT64, number=8, ) @@ -1493,7 +1495,7 @@ class StreamingPullResponse(proto.Message): stream messages from the server to the client. Attributes: - received_messages (Sequence[google.pubsub_v1.types.ReceivedMessage]): + received_messages (MutableSequence[google.pubsub_v1.types.ReceivedMessage]): Received Pub/Sub messages. This will not be empty. acknowledge_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): @@ -1511,26 +1513,26 @@ class AcknowledgeConfirmation(proto.Message): acknowledge a previously received message. Attributes: - ack_ids (Sequence[str]): + ack_ids (MutableSequence[str]): Successfully processed acknowledgement IDs. - invalid_ack_ids (Sequence[str]): + invalid_ack_ids (MutableSequence[str]): List of acknowledgement IDs that were malformed or whose acknowledgement deadline has expired. - unordered_ack_ids (Sequence[str]): + unordered_ack_ids (MutableSequence[str]): List of acknowledgement IDs that were out of order. """ - ack_ids = proto.RepeatedField( + ack_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - invalid_ack_ids = proto.RepeatedField( + invalid_ack_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) - unordered_ack_ids = proto.RepeatedField( + unordered_ack_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=3, ) @@ -1540,19 +1542,19 @@ class ModifyAckDeadlineConfirmation(proto.Message): modify the deadline for a specific message. Attributes: - ack_ids (Sequence[str]): + ack_ids (MutableSequence[str]): Successfully processed acknowledgement IDs. - invalid_ack_ids (Sequence[str]): + invalid_ack_ids (MutableSequence[str]): List of acknowledgement IDs that were malformed or whose acknowledgement deadline has expired. """ - ack_ids = proto.RepeatedField( + ack_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) - invalid_ack_ids = proto.RepeatedField( + invalid_ack_ids: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, ) @@ -1569,31 +1571,31 @@ class SubscriptionProperties(proto.Message): subscription. """ - exactly_once_delivery_enabled = proto.Field( + exactly_once_delivery_enabled: bool = proto.Field( proto.BOOL, number=1, ) - message_ordering_enabled = proto.Field( + message_ordering_enabled: bool = proto.Field( proto.BOOL, number=2, ) - received_messages = proto.RepeatedField( + received_messages: MutableSequence["ReceivedMessage"] = proto.RepeatedField( proto.MESSAGE, number=1, message="ReceivedMessage", ) - acknowledge_confirmation = proto.Field( + acknowledge_confirmation: AcknowledgeConfirmation = proto.Field( proto.MESSAGE, number=5, message=AcknowledgeConfirmation, ) - modify_ack_deadline_confirmation = proto.Field( + modify_ack_deadline_confirmation: ModifyAckDeadlineConfirmation = proto.Field( proto.MESSAGE, number=3, message=ModifyAckDeadlineConfirmation, ) - subscription_properties = proto.Field( + subscription_properties: SubscriptionProperties = proto.Field( proto.MESSAGE, number=4, message=SubscriptionProperties, @@ -1622,21 +1624,21 @@ class CreateSnapshotRequest(proto.Message): topic following the successful completion of the CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): See Creating and managing labels. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=2, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=3, @@ -1655,12 +1657,12 @@ class UpdateSnapshotRequest(proto.Message): and non-empty. """ - snapshot = proto.Field( + snapshot: "Snapshot" = proto.Field( proto.MESSAGE, number=1, message="Snapshot", ) - update_mask = proto.Field( + update_mask: field_mask_pb2.FieldMask = proto.Field( proto.MESSAGE, number=2, message=field_mask_pb2.FieldMask, @@ -1694,25 +1696,25 @@ class Snapshot(proto.Message): expire in 4 days. The service will refuse to create a snapshot that would expire in less than 1 hour after creation. - labels (Mapping[str, str]): + labels (MutableMapping[str, str]): See [Creating and managing labels] (https://cloud.google.com/pubsub/docs/labels). """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - topic = proto.Field( + topic: str = proto.Field( proto.STRING, number=2, ) - expire_time = proto.Field( + expire_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=3, message=timestamp_pb2.Timestamp, ) - labels = proto.MapField( + labels: MutableMapping[str, str] = proto.MapField( proto.STRING, proto.STRING, number=4, @@ -1728,7 +1730,7 @@ class GetSnapshotRequest(proto.Message): ``projects/{project}/snapshots/{snap}``. """ - snapshot = proto.Field( + snapshot: str = proto.Field( proto.STRING, number=1, ) @@ -1750,15 +1752,15 @@ class ListSnapshotsRequest(proto.Message): the next page of data. """ - project = proto.Field( + project: str = proto.Field( proto.STRING, number=1, ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=2, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=3, ) @@ -1768,7 +1770,7 @@ class ListSnapshotsResponse(proto.Message): r"""Response for the ``ListSnapshots`` method. Attributes: - snapshots (Sequence[google.pubsub_v1.types.Snapshot]): + snapshots (MutableSequence[google.pubsub_v1.types.Snapshot]): The resulting snapshots. next_page_token (str): If not empty, indicates that there may be more snapshot that @@ -1780,12 +1782,12 @@ class ListSnapshotsResponse(proto.Message): def raw_page(self): return self - snapshots = proto.RepeatedField( + snapshots: MutableSequence["Snapshot"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Snapshot", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -1800,7 +1802,7 @@ class DeleteSnapshotRequest(proto.Message): ``projects/{project}/snapshots/{snap}``. """ - snapshot = proto.Field( + snapshot: str = proto.Field( proto.STRING, number=1, ) @@ -1843,17 +1845,17 @@ class SeekRequest(proto.Message): This field is a member of `oneof`_ ``target``. """ - subscription = proto.Field( + subscription: str = proto.Field( proto.STRING, number=1, ) - time = proto.Field( + time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=2, oneof="target", message=timestamp_pb2.Timestamp, ) - snapshot = proto.Field( + snapshot: str = proto.Field( proto.STRING, number=3, oneof="target", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 59fe3aa3e5b8..affb4343ac40 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from typing import MutableMapping, MutableSequence + import proto # type: ignore @@ -72,16 +74,16 @@ class Type(proto.Enum): PROTOCOL_BUFFER = 1 AVRO = 2 - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - type_ = proto.Field( + type_: Type = proto.Field( proto.ENUM, number=2, enum=Type, ) - definition = proto.Field( + definition: str = proto.Field( proto.STRING, number=3, ) @@ -109,16 +111,16 @@ class CreateSchemaRequest(proto.Message): for resource name constraints. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - schema = proto.Field( + schema: "Schema" = proto.Field( proto.MESSAGE, number=2, message="Schema", ) - schema_id = proto.Field( + schema_id: str = proto.Field( proto.STRING, number=3, ) @@ -137,11 +139,11 @@ class GetSchemaRequest(proto.Message): ``definition``. Set to ``FULL`` to retrieve all fields. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) - view = proto.Field( + view: "SchemaView" = proto.Field( proto.ENUM, number=2, enum="SchemaView", @@ -168,20 +170,20 @@ class ListSchemasRequest(proto.Message): next page of data. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - view = proto.Field( + view: "SchemaView" = proto.Field( proto.ENUM, number=2, enum="SchemaView", ) - page_size = proto.Field( + page_size: int = proto.Field( proto.INT32, number=3, ) - page_token = proto.Field( + page_token: str = proto.Field( proto.STRING, number=4, ) @@ -191,7 +193,7 @@ class ListSchemasResponse(proto.Message): r"""Response for the ``ListSchemas`` method. Attributes: - schemas (Sequence[google.pubsub_v1.types.Schema]): + schemas (MutableSequence[google.pubsub_v1.types.Schema]): The resulting schemas. next_page_token (str): If not empty, indicates that there may be more schemas that @@ -203,12 +205,12 @@ class ListSchemasResponse(proto.Message): def raw_page(self): return self - schemas = proto.RepeatedField( + schemas: MutableSequence["Schema"] = proto.RepeatedField( proto.MESSAGE, number=1, message="Schema", ) - next_page_token = proto.Field( + next_page_token: str = proto.Field( proto.STRING, number=2, ) @@ -223,7 +225,7 @@ class DeleteSchemaRequest(proto.Message): ``projects/{project}/schemas/{schema}``. """ - name = proto.Field( + name: str = proto.Field( proto.STRING, number=1, ) @@ -240,11 +242,11 @@ class ValidateSchemaRequest(proto.Message): Required. The schema object to validate. """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - schema = proto.Field( + schema: "Schema" = proto.Field( proto.MESSAGE, number=2, message="Schema", @@ -285,26 +287,26 @@ class ValidateMessageRequest(proto.Message): The encoding expected for messages """ - parent = proto.Field( + parent: str = proto.Field( proto.STRING, number=1, ) - name = proto.Field( + name: str = proto.Field( proto.STRING, number=2, oneof="schema_spec", ) - schema = proto.Field( + schema: "Schema" = proto.Field( proto.MESSAGE, number=3, oneof="schema_spec", message="Schema", ) - message = proto.Field( + message: bytes = proto.Field( proto.BYTES, number=4, ) - encoding = proto.Field( + encoding: "Encoding" = proto.Field( proto.ENUM, number=5, enum="Encoding", diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json new file mode 100644 index 000000000000..581ec0e7c514 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -0,0 +1,5020 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.pubsub.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-pubsub", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.create_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.CreateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "CreateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "create_topic" + }, + "description": "Sample for CreateTopic", + "file": "pubsub_v1_generated_publisher_create_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_create_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.create_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.CreateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "CreateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "create_topic" + }, + "description": "Sample for CreateTopic", + "file": "pubsub_v1_generated_publisher_create_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_create_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.delete_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.DeleteTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DeleteTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_topic" + }, + "description": "Sample for DeleteTopic", + "file": "pubsub_v1_generated_publisher_delete_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_delete_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.delete_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.DeleteTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DeleteTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_topic" + }, + "description": "Sample for DeleteTopic", + "file": "pubsub_v1_generated_publisher_delete_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_delete_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.detach_subscription", + "method": { + "fullName": "google.pubsub.v1.Publisher.DetachSubscription", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DetachSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DetachSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", + "shortName": "detach_subscription" + }, + "description": "Sample for DetachSubscription", + "file": "pubsub_v1_generated_publisher_detach_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_detach_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.detach_subscription", + "method": { + "fullName": "google.pubsub.v1.Publisher.DetachSubscription", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "DetachSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DetachSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", + "shortName": "detach_subscription" + }, + "description": "Sample for DetachSubscription", + "file": "pubsub_v1_generated_publisher_detach_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_detach_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.get_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.GetTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "GetTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "get_topic" + }, + "description": "Sample for GetTopic", + "file": "pubsub_v1_generated_publisher_get_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_GetTopic_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_get_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.get_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.GetTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "GetTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetTopicRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "get_topic" + }, + "description": "Sample for GetTopic", + "file": "pubsub_v1_generated_publisher_get_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_GetTopic_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_get_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_snapshots", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager", + "shortName": "list_topic_snapshots" + }, + "description": "Sample for ListTopicSnapshots", + "file": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topic_snapshots", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager", + "shortName": "list_topic_snapshots" + }, + "description": "Sample for ListTopicSnapshots", + "file": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager", + "shortName": "list_topic_subscriptions" + }, + "description": "Sample for ListTopicSubscriptions", + "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topic_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopicSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager", + "shortName": "list_topic_subscriptions" + }, + "description": "Sample for ListTopicSubscriptions", + "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topics", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopics", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopics" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager", + "shortName": "list_topics" + }, + "description": "Sample for ListTopics", + "file": "pubsub_v1_generated_publisher_list_topics_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopics_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topics_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.list_topics", + "method": { + "fullName": "google.pubsub.v1.Publisher.ListTopics", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "ListTopics" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListTopicsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsPager", + "shortName": "list_topics" + }, + "description": "Sample for ListTopics", + "file": "pubsub_v1_generated_publisher_list_topics_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_ListTopics_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_list_topics_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.publish", + "method": { + "fullName": "google.pubsub.v1.Publisher.Publish", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "Publish" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PublishRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "messages", + "type": "MutableSequence[google.pubsub_v1.types.PubsubMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PublishResponse", + "shortName": "publish" + }, + "description": "Sample for Publish", + "file": "pubsub_v1_generated_publisher_publish_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_Publish_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_publish_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.publish", + "method": { + "fullName": "google.pubsub.v1.Publisher.Publish", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "Publish" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PublishRequest" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "messages", + "type": "MutableSequence[google.pubsub_v1.types.PubsubMessage]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PublishResponse", + "shortName": "publish" + }, + "description": "Sample for Publish", + "file": "pubsub_v1_generated_publisher_publish_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_Publish_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_publish_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.PublisherAsyncClient", + "shortName": "PublisherAsyncClient" + }, + "fullName": "google.pubsub_v1.PublisherAsyncClient.update_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.UpdateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "UpdateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateTopicRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "update_topic" + }, + "description": "Sample for UpdateTopic", + "file": "pubsub_v1_generated_publisher_update_topic_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_update_topic_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.PublisherClient", + "shortName": "PublisherClient" + }, + "fullName": "google.pubsub_v1.PublisherClient.update_topic", + "method": { + "fullName": "google.pubsub.v1.Publisher.UpdateTopic", + "service": { + "fullName": "google.pubsub.v1.Publisher", + "shortName": "Publisher" + }, + "shortName": "UpdateTopic" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateTopicRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Topic", + "shortName": "update_topic" + }, + "description": "Sample for UpdateTopic", + "file": "pubsub_v1_generated_publisher_update_topic_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_publisher_update_topic_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.create_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "pubsub_v1_generated_schema_service_create_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_create_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.create_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "CreateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" + }, + "description": "Sample for CreateSchema", + "file": "pubsub_v1_generated_schema_service_create_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_create_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "pubsub_v1_generated_schema_service_delete_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "DeleteSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_schema" + }, + "description": "Sample for DeleteSchema", + "file": "pubsub_v1_generated_schema_service_delete_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_delete_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.get_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "pubsub_v1_generated_schema_service_get_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_get_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.get_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "pubsub_v1_generated_schema_service_get_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_get_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schemas", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "pubsub_v1_generated_schema_service_list_schemas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.list_schemas", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "pubsub_v1_generated_schema_service_list_schemas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_message", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateMessageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateMessageResponse", + "shortName": "validate_message" + }, + "description": "Sample for ValidateMessage", + "file": "pubsub_v1_generated_schema_service_validate_message_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_message_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.validate_message", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateMessage" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateMessageRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateMessageResponse", + "shortName": "validate_message" + }, + "description": "Sample for ValidateMessage", + "file": "pubsub_v1_generated_schema_service_validate_message_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_message_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", + "shortName": "validate_schema" + }, + "description": "Sample for ValidateSchema", + "file": "pubsub_v1_generated_schema_service_validate_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.validate_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ValidateSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ValidateSchemaRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", + "shortName": "validate_schema" + }, + "description": "Sample for ValidateSchema", + "file": "pubsub_v1_generated_schema_service_validate_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_validate_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.acknowledge", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Acknowledge", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Acknowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.AcknowledgeRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "acknowledge" + }, + "description": "Sample for Acknowledge", + "file": "pubsub_v1_generated_subscriber_acknowledge_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_acknowledge_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.acknowledge", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Acknowledge", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Acknowledge" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.AcknowledgeRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "MutableSequence[str]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "acknowledge" + }, + "description": "Sample for Acknowledge", + "file": "pubsub_v1_generated_subscriber_acknowledge_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_acknowledge_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "pubsub_v1_generated_subscriber_create_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.create_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.CreateSnapshotRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "create_snapshot" + }, + "description": "Sample for CreateSnapshot", + "file": "pubsub_v1_generated_subscriber_create_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "create_subscription" + }, + "description": "Sample for CreateSubscription", + "file": "pubsub_v1_generated_subscriber_create_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.create_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "CreateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "topic", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "create_subscription" + }, + "description": "Sample for CreateSubscription", + "file": "pubsub_v1_generated_subscriber_create_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_create_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "pubsub_v1_generated_subscriber_delete_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.delete_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_snapshot" + }, + "description": "Sample for DeleteSnapshot", + "file": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_subscription" + }, + "description": "Sample for DeleteSubscription", + "file": "pubsub_v1_generated_subscriber_delete_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.delete_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "DeleteSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_subscription" + }, + "description": "Sample for DeleteSubscription", + "file": "pubsub_v1_generated_subscriber_delete_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_delete_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "pubsub_v1_generated_subscriber_get_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.get_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSnapshotRequest" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "get_snapshot" + }, + "description": "Sample for GetSnapshot", + "file": "pubsub_v1_generated_subscriber_get_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "get_subscription" + }, + "description": "Sample for GetSubscription", + "file": "pubsub_v1_generated_subscriber_get_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.get_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.GetSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "GetSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSubscriptionRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "get_subscription" + }, + "description": "Sample for GetSubscription", + "file": "pubsub_v1_generated_subscriber_get_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_get_subscription_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_snapshots", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager", + "shortName": "list_snapshots" + }, + "description": "Sample for ListSnapshots", + "file": "pubsub_v1_generated_subscriber_list_snapshots_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_snapshots_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.list_snapshots", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSnapshots" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSnapshotsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager", + "shortName": "list_snapshots" + }, + "description": "Sample for ListSnapshots", + "file": "pubsub_v1_generated_subscriber_list_snapshots_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_snapshots_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager", + "shortName": "list_subscriptions" + }, + "description": "Sample for ListSubscriptions", + "file": "pubsub_v1_generated_subscriber_list_subscriptions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_subscriptions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.list_subscriptions", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ListSubscriptions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSubscriptionsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager", + "shortName": "list_subscriptions" + }, + "description": "Sample for ListSubscriptions", + "file": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_ack_deadline", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyAckDeadline" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "MutableSequence[str]" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_ack_deadline" + }, + "description": "Sample for ModifyAckDeadline", + "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.modify_ack_deadline", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyAckDeadline" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "ack_ids", + "type": "MutableSequence[str]" + }, + { + "name": "ack_deadline_seconds", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_ack_deadline" + }, + "description": "Sample for ModifyAckDeadline", + "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_push_config", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyPushConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyPushConfigRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_push_config" + }, + "description": "Sample for ModifyPushConfig", + "file": "pubsub_v1_generated_subscriber_modify_push_config_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_push_config_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.modify_push_config", + "method": { + "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "ModifyPushConfig" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ModifyPushConfigRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "push_config", + "type": "google.pubsub_v1.types.PushConfig" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "modify_push_config" + }, + "description": "Sample for ModifyPushConfig", + "file": "pubsub_v1_generated_subscriber_modify_push_config_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_modify_push_config_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Pull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Pull" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PullRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "return_immediately", + "type": "bool" + }, + { + "name": "max_messages", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PullResponse", + "shortName": "pull" + }, + "description": "Sample for Pull", + "file": "pubsub_v1_generated_subscriber_pull_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Pull_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_pull_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Pull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Pull" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.PullRequest" + }, + { + "name": "subscription", + "type": "str" + }, + { + "name": "return_immediately", + "type": "bool" + }, + { + "name": "max_messages", + "type": "int" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.PullResponse", + "shortName": "pull" + }, + "description": "Sample for Pull", + "file": "pubsub_v1_generated_subscriber_pull_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Pull_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_pull_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.seek", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Seek", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Seek" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.SeekRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.SeekResponse", + "shortName": "seek" + }, + "description": "Sample for Seek", + "file": "pubsub_v1_generated_subscriber_seek_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Seek_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_seek_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.seek", + "method": { + "fullName": "google.pubsub.v1.Subscriber.Seek", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "Seek" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.SeekRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.SeekResponse", + "shortName": "seek" + }, + "description": "Sample for Seek", + "file": "pubsub_v1_generated_subscriber_seek_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_Seek_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_seek_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.streaming_pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.StreamingPull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "StreamingPull" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", + "shortName": "streaming_pull" + }, + "description": "Sample for StreamingPull", + "file": "pubsub_v1_generated_subscriber_streaming_pull_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_async", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 56, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 57, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_streaming_pull_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.streaming_pull", + "method": { + "fullName": "google.pubsub.v1.Subscriber.StreamingPull", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "StreamingPull" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", + "shortName": "streaming_pull" + }, + "description": "Sample for StreamingPull", + "file": "pubsub_v1_generated_subscriber_streaming_pull_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_sync", + "segments": [ + { + "end": 63, + "start": 27, + "type": "FULL" + }, + { + "end": 63, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 56, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 59, + "start": 57, + "type": "REQUEST_EXECUTION" + }, + { + "end": 64, + "start": 60, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_streaming_pull_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "update_snapshot" + }, + "description": "Sample for UpdateSnapshot", + "file": "pubsub_v1_generated_subscriber_update_snapshot_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_snapshot_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.update_snapshot", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSnapshot" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSnapshotRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Snapshot", + "shortName": "update_snapshot" + }, + "description": "Sample for UpdateSnapshot", + "file": "pubsub_v1_generated_subscriber_update_snapshot_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_snapshot_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SubscriberAsyncClient", + "shortName": "SubscriberAsyncClient" + }, + "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "update_subscription" + }, + "description": "Sample for UpdateSubscription", + "file": "pubsub_v1_generated_subscriber_update_subscription_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_subscription_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SubscriberClient", + "shortName": "SubscriberClient" + }, + "fullName": "google.pubsub_v1.SubscriberClient.update_subscription", + "method": { + "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", + "service": { + "fullName": "google.pubsub.v1.Subscriber", + "shortName": "Subscriber" + }, + "shortName": "UpdateSubscription" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Subscription", + "shortName": "update_subscription" + }, + "description": "Sample for UpdateSubscription", + "file": "pubsub_v1_generated_subscriber_update_subscription_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_subscriber_update_subscription_sync.py" + } + ] +} diff --git a/packages/google-cloud-pubsub/testing/constraints-3.10.txt b/packages/google-cloud-pubsub/testing/constraints-3.10.txt index e69de29bb2d1..ad3f0fa58e2d 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.10.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.10.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.11.txt b/packages/google-cloud-pubsub/testing/constraints-3.11.txt index e69de29bb2d1..ad3f0fa58e2d 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.11.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.11.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index 08b242a12462..883a87aefd06 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -1,12 +1,10 @@ # This constraints file is used to check that lower bounds # are correct in setup.py -# List *all* library dependencies and extras in this file. +# List all library dependencies and extras in this file. # Pin the version to the lower bound. -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -grpcio==1.38.1 -google-api-core==1.32.0 -libcst==0.3.10 +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.33.2 proto-plus==1.22.0 -grpc-google-iam-v1==0.12.4 protobuf==3.19.5 +grpc-google-iam-v1==0.12.4 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.8.txt b/packages/google-cloud-pubsub/testing/constraints-3.8.txt index e69de29bb2d1..ad3f0fa58e2d 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.8.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.8.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.9.txt b/packages/google-cloud-pubsub/testing/constraints-3.9.txt index e69de29bb2d1..ad3f0fa58e2d 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.9.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.9.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 From 886c6d0b09118f99e951cb27ac096175e9b7e33e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 16 Nov 2022 21:05:55 +0100 Subject: [PATCH 0895/1197] chore(deps): update dependency google-cloud-pubsub to v2.13.11 (#821) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 18485cd5799a..48db3432afc6 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.13.10 +google-cloud-pubsub==2.13.11 avro==1.11.1 From 34219121ad7e97d7f3fe5671a515f94de13c980a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 21 Nov 2022 10:55:07 -0500 Subject: [PATCH 0896/1197] chore(python): update release script dependencies (#824) Source-Link: https://github.com/googleapis/synthtool/commit/25083af347468dd5f90f69627420f7d452b6c50e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 2 +- .../.github/workflows/docs.yml | 4 +- .../.github/workflows/lint.yml | 2 +- .../.github/workflows/unittest.yml | 2 +- .../.kokoro/docker/docs/Dockerfile | 12 ++-- .../.kokoro/requirements.in | 4 +- .../.kokoro/requirements.txt | 61 ++++++++++--------- packages/google-cloud-pubsub/noxfile.py | 4 +- 8 files changed, 48 insertions(+), 43 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 12edee77695a..3f1ccc085ef7 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:452901c74a22f9b9a3bd02bce780b8e8805c97270d424684bff809ce5be8c2a2 + digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml index 7092a139aed3..e97d89e484c9 100644 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.9" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml index d2aee5b7d8ec..16d5a9e90f6d 100644 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index 87ade4d54362..23000c05d9d8 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -41,7 +41,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.10" + python-version: "3.8" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile index 238b87b9d1c9..f8137d0ae497 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -60,16 +60,16 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.8.11 +###################### Install python 3.9.13 -# Download python 3.8.11 -RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz +# Download python 3.9.13 +RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz # Extract files -RUN tar -xvf Python-3.8.11.tgz +RUN tar -xvf Python-3.9.13.tgz -# Install python 3.8.11 -RUN ./Python-3.8.11/configure --enable-optimizations +# Install python 3.9.13 +RUN ./Python-3.9.13/configure --enable-optimizations RUN make altinstall ###################### Install pip diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.in b/packages/google-cloud-pubsub/.kokoro/requirements.in index 7718391a34d7..cbd7e77f44db 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.in +++ b/packages/google-cloud-pubsub/.kokoro/requirements.in @@ -5,4 +5,6 @@ typing-extensions twine wheel setuptools -nox \ No newline at end of file +nox +charset-normalizer<3 +click<8.1.0 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 31425f164783..9c1b9be34e6b 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -93,11 +93,14 @@ cffi==1.15.1 \ charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via requests + # via + # -r requirements.in + # requests click==8.0.4 \ --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb # via + # -r requirements.in # gcp-docuploader # gcp-releasetool colorlog==6.7.0 \ @@ -156,9 +159,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.9.1 \ - --hash=sha256:952f4055d5d986b070ae2a71c4410b250000f9cc5a1e26398fcd55a5bbc5a15f \ - --hash=sha256:d0d3c814a97c1a237517e837d8cfa668ced8df4b882452578ecef4a4e79c583b +gcp-releasetool==1.10.0 \ + --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ + --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ @@ -166,9 +169,9 @@ google-api-core==2.10.2 \ # via # google-cloud-core # google-cloud-storage -google-auth==2.14.0 \ - --hash=sha256:1ad5b0e6eba5f69645971abb3d2c197537d5914070a8c6d30299dfdb07c5c700 \ - --hash=sha256:cf24817855d874ede2efd071aa22125445f555de1685b739a9782fcf408c2a3d +google-auth==2.14.1 \ + --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ + --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 # via # gcp-releasetool # google-api-core @@ -178,9 +181,9 @@ google-cloud-core==2.3.2 \ --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a # via google-cloud-storage -google-cloud-storage==2.5.0 \ - --hash=sha256:19a26c66c317ce542cea0830b7e787e8dac2588b6bfa4d3fd3b871ba16305ab0 \ - --hash=sha256:382f34b91de2212e3c2e7b40ec079d27ee2e3dbbae99b75b1bcd8c63063ce235 +google-cloud-storage==2.6.0 \ + --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ + --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -256,9 +259,9 @@ google-resumable-media==2.4.0 \ --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f # via google-cloud-storage -googleapis-common-protos==1.56.4 \ - --hash=sha256:8eb2cbc91b69feaf23e32452a7ae60e791e09967d81d4fcc7fc388182d1bd394 \ - --hash=sha256:c25873c47279387cfdcbdafa36149887901d36202cb645a0e4f29686bf6e4417 +googleapis-common-protos==1.57.0 \ + --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ + --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ @@ -269,6 +272,7 @@ importlib-metadata==5.0.0 \ --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 # via # -r requirements.in + # keyring # twine jaraco-classes==3.2.3 \ --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ @@ -284,9 +288,9 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.9.3 \ - --hash=sha256:69732a15cb1433bdfbc3b980a8a36a04878a6cfd7cb99f497b573f31618001c0 \ - --hash=sha256:69b01dd83c42f590250fe7a1f503fc229b14de83857314b1933a3ddbf595c4a5 +keyring==23.11.0 \ + --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ + --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 # via # gcp-releasetool # twine @@ -350,9 +354,9 @@ pkginfo==1.8.3 \ --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c # via twine -platformdirs==2.5.2 \ - --hash=sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788 \ - --hash=sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19 +platformdirs==2.5.4 \ + --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ + --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -381,7 +385,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core - # googleapis-common-protos py==1.11.0 \ --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 @@ -476,17 +479,17 @@ urllib3==1.26.12 \ # via # requests # twine -virtualenv==20.16.6 \ - --hash=sha256:186ca84254abcbde98180fd17092f9628c5fe742273c02724972a1d8a2035108 \ - --hash=sha256:530b850b523c6449406dfba859d6345e48ef19b8439606c5d74d7d3c9e14d76e +virtualenv==20.16.7 \ + --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ + --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 # via nox webencodings==0.5.1 \ --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 # via bleach -wheel==0.37.1 \ - --hash=sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a \ - --hash=sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4 +wheel==0.38.4 \ + --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ + --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 # via -r requirements.in zipp==3.10.0 \ --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ @@ -494,7 +497,7 @@ zipp==3.10.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.0 \ - --hash=sha256:512e5536220e38146176efb833d4a62aa726b7bbff82cfbc8ba9eaa3996e0b17 \ - --hash=sha256:f62ea9da9ed6289bfe868cd6845968a2c854d1427f8548d52cae02a42b4f0356 +setuptools==65.5.1 \ + --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ + --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f # via -r requirements.in diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index a090378e40e3..cc70873a31b6 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -324,7 +324,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docs(session): """Build the docs for this library.""" @@ -350,7 +350,7 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +@nox.session(python="3.9") def docfx(session): """Build the docfx yaml files for this library.""" From 2b79175af4a9f039eec912b3ca489b15f2e4ddc7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 26 Nov 2022 18:56:52 -0500 Subject: [PATCH 0897/1197] chore(python): drop flake8-import-order in samples noxfile (#829) Source-Link: https://github.com/googleapis/synthtool/commit/6ed3a831cb9ff69ef8a504c353e098ec0192ad93 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 2 +- .../samples/snippets/noxfile.py | 26 +++---------------- 2 files changed, 4 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 3f1ccc085ef7..bb21147e4c23 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e6cbd61f1838d9ff6a31436dfc13717f372a7482a82fc1863ca954ec47bff8c8 + digest: sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 0398d72ff690..f5c32b22789b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -18,7 +18,7 @@ import os from pathlib import Path import sys -from typing import Callable, Dict, List, Optional +from typing import Callable, Dict, Optional import nox @@ -109,22 +109,6 @@ def get_pytest_env_vars() -> Dict[str, str]: # -def _determine_local_import_names(start_dir: str) -> List[str]: - """Determines all import names that should be considered "local". - - This is used when running the linter to insure that import order is - properly checked. - """ - file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] - return [ - basename - for basename, extension in file_ext_pairs - if extension == ".py" - or os.path.isdir(os.path.join(start_dir, basename)) - and basename not in ("__pycache__") - ] - - # Linting with flake8. # # We ignore the following rules: @@ -139,7 +123,6 @@ def _determine_local_import_names(start_dir: str) -> List[str]: "--show-source", "--builtin=gettext", "--max-complexity=20", - "--import-order-style=google", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", @@ -149,14 +132,11 @@ def _determine_local_import_names(start_dir: str) -> List[str]: @nox.session def lint(session: nox.sessions.Session) -> None: if not TEST_CONFIG["enforce_type_hints"]: - session.install("flake8", "flake8-import-order") + session.install("flake8") else: - session.install("flake8", "flake8-import-order", "flake8-annotations") + session.install("flake8", "flake8-annotations") - local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ - "--application-import-names", - ",".join(local_names), ".", ] session.run("flake8", *args) From 34c05401f6aa397de654c20fa89076341101b272 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 2 Dec 2022 16:39:14 +0100 Subject: [PATCH 0898/1197] chore(deps): update dependency google-cloud-bigquery to v3.4.0 (#826) Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index b52e56f395a0..0c49adc041ec 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.2.0 mock==4.0.3 flaky==3.7.0 -google-cloud-bigquery==3.3.6 +google-cloud-bigquery==3.4.0 From 830d6009441d91d4ee22b7b395f791ed7fac9f9a Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Fri, 2 Dec 2022 17:34:25 -0500 Subject: [PATCH 0899/1197] chore: move version to gapic_version.py (#830) --- .../google-cloud-pubsub/.github/CODEOWNERS | 10 +- .../.github/release-please.yml | 1 + .../.release-please-manifest.json | 4 + .../google/pubsub/gapic_version.py | 2 +- .../services/publisher/async_client.py | 24 +- .../pubsub_v1/services/publisher/client.py | 24 +- packages/google-cloud-pubsub/owlbot.py | 111 +- .../release-please-config.json | 24 + .../snippet_metadata_pubsub_v1.json | 5019 ----------------- .../samples/snippets/iam.py | 6 +- .../samples/snippets/iam_test.py | 9 +- .../samples/snippets/noxfile.py | 15 +- .../samples/snippets/publisher.py | 12 +- .../samples/snippets/schema.py | 3 +- .../samples/snippets/subscriber.py | 20 +- packages/google-cloud-pubsub/setup.py | 45 +- .../testing/constraints-3.7.txt | 2 + 17 files changed, 179 insertions(+), 5152 deletions(-) create mode 100644 packages/google-cloud-pubsub/.release-please-manifest.json create mode 100644 packages/google-cloud-pubsub/release-please-config.json delete mode 100644 packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json diff --git a/packages/google-cloud-pubsub/.github/CODEOWNERS b/packages/google-cloud-pubsub/.github/CODEOWNERS index dfe671a9382e..f1b33465e33d 100644 --- a/packages/google-cloud-pubsub/.github/CODEOWNERS +++ b/packages/google-cloud-pubsub/.github/CODEOWNERS @@ -3,10 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. +# @googleapis/yoshi-python @googleapis/api-pubsub are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-pubsub -# The @googleapis/api-pubsub and yoshi-python are the default owners for changes in this repo -* @googleapis/api-pubsub @googleapis/yoshi-python - -# Additionally, the python-samples-reviewers team is also among the default owners for samples changes -/samples/ @googleapis/api-pubsub @googleapis/python-samples-owners @googleapis/yoshi-python +# @googleapis/python-samples-reviewers @googleapis/api-pubsub are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-pubsub diff --git a/packages/google-cloud-pubsub/.github/release-please.yml b/packages/google-cloud-pubsub/.github/release-please.yml index 29601ad4692c..fe749ff6b15d 100644 --- a/packages/google-cloud-pubsub/.github/release-please.yml +++ b/packages/google-cloud-pubsub/.github/release-please.yml @@ -1,5 +1,6 @@ releaseType: python handleGHRelease: true +manifest: true # NOTE: this section is generated by synthtool.languages.python # See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py branches: diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json new file mode 100644 index 000000000000..2a1de97295f7 --- /dev/null +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -0,0 +1,4 @@ +{ + ".": "2.13.11" +} + \ No newline at end of file diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 35859c3f7fc1..51b84e9a69d6 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "0.1.0" +__version__ = "2.13.11" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 269b273c9360..d12eeb1da20e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -223,7 +223,7 @@ async def create_topic( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource @@ -339,7 +339,7 @@ async def update_topic( request: Optional[Union[pubsub.UpdateTopicRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Updates an existing topic. Note that certain @@ -434,7 +434,7 @@ async def publish( topic: Optional[str] = None, messages: Optional[MutableSequence[pubsub.PubsubMessage]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PublishResponse: r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if @@ -558,7 +558,7 @@ async def get_topic( *, topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Gets the configuration of a topic. @@ -669,7 +669,7 @@ async def list_topics( *, project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicsAsyncPager: r"""Lists matching topics. @@ -794,7 +794,7 @@ async def list_topic_subscriptions( *, topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSubscriptionsAsyncPager: r"""Lists the names of the attached subscriptions on this @@ -922,7 +922,7 @@ async def list_topic_snapshots( *, topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSnapshotsAsyncPager: r"""Lists the names of the snapshots on this topic. Snapshots are @@ -1054,7 +1054,7 @@ async def delete_topic( *, topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if @@ -1158,7 +1158,7 @@ async def detach_subscription( request: Optional[Union[pubsub.DetachSubscriptionRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.DetachSubscriptionResponse: r"""Detaches a subscription from this topic. All messages retained @@ -1254,7 +1254,7 @@ async def set_iam_policy( request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1374,7 +1374,7 @@ async def get_iam_policy( request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1496,7 +1496,7 @@ async def test_iam_permissions( request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index d052293f85d0..32b1def3dbc2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -503,7 +503,7 @@ def create_topic( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource @@ -610,7 +610,7 @@ def update_topic( request: Optional[Union[pubsub.UpdateTopicRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Updates an existing topic. Note that certain @@ -697,7 +697,7 @@ def publish( topic: Optional[str] = None, messages: Optional[MutableSequence[pubsub.PubsubMessage]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PublishResponse: r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if @@ -806,7 +806,7 @@ def get_topic( *, topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: r"""Gets the configuration of a topic. @@ -906,7 +906,7 @@ def list_topics( *, project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicsPager: r"""Lists matching topics. @@ -1020,7 +1020,7 @@ def list_topic_subscriptions( *, topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSubscriptionsPager: r"""Lists the names of the attached subscriptions on this @@ -1137,7 +1137,7 @@ def list_topic_snapshots( *, topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListTopicSnapshotsPager: r"""Lists the names of the snapshots on this topic. Snapshots are @@ -1258,7 +1258,7 @@ def delete_topic( *, topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if @@ -1353,7 +1353,7 @@ def detach_subscription( request: Optional[Union[pubsub.DetachSubscriptionRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.DetachSubscriptionResponse: r"""Detaches a subscription from this topic. All messages retained @@ -1454,7 +1454,7 @@ def set_iam_policy( request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1575,7 +1575,7 @@ def get_iam_policy( request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1697,7 +1697,7 @@ def test_iam_permissions( request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 888d156c1e90..81ec371a2959 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -1,4 +1,4 @@ -# Copyright 2018 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,37 +12,31 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""This script is used to synthesize generated parts of this library.""" - +import json +from pathlib import Path import re +import shutil import textwrap import synthtool as s -from synthtool import gcp +import synthtool.gcp as gcp from synthtool.languages import python -common = gcp.CommonTemplates() +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- -default_version = "v1" +clean_up_generated_samples = True -for library in s.get_staging_dirs(default_version): - # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 - s.replace( - library / f"google/pubsub_{library.name}/types/*.py", - r""". - Attributes:""", - r""".\n - Attributes:""", - ) +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get( + "default_version" +) - # Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/902 - s.replace( - library / f"google/pubsub_{library.name}/types/*.py", - r""". - Attributes:""", - r""".\n - Attributes:""", - ) +for library in s.get_staging_dirs(default_version): + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False # DEFAULT SCOPES and SERVICE_ADDRESS are being used. so let's force them in. s.replace( @@ -239,13 +233,16 @@ ) # Allow timeout to be an instance of google.api_core.timeout.* - s.replace( + count = s.replace( library / f"google/pubsub_{library.name}/types/__init__.py", r"from \.pubsub import \(", "from typing import Union\n\n\g<0>", ) - s.replace( + if count < 1: + raise Exception("Catch timeout replacement 1 failed.") + + count = s.replace( library / f"google/pubsub_{library.name}/types/__init__.py", r"__all__ = \(\n", textwrap.dedent( @@ -262,30 +259,45 @@ ), ) - s.replace( + if count < 1: + raise Exception("Catch timeout replacement 2 failed.") + + count = s.replace( library / f"google/pubsub_{library.name}/services/publisher/*client.py", r"from google.api_core import retry as retries.*\n", "\g<0>from google.api_core import timeout as timeouts # type: ignore\n", ) - s.replace( + if count < 1: + raise Exception("Catch timeout replacement 3 failed.") + + count = s.replace( library / f"google/pubsub_{library.name}/services/publisher/*client.py", f"from google\.pubsub_{library.name}\.types import pubsub", f"\g<0>\nfrom google.pubsub_{library.name}.types import TimeoutType", ) - s.replace( + if count < 1: + raise Exception("Catch timeout replacement 4 failed.") + + count = s.replace( library / f"google/pubsub_{library.name}/services/publisher/*client.py", - r"(\s+)timeout: float = None.*\n", + r"(\s+)timeout: Optional\[float\] = None.*\n", f"\g<1>timeout: TimeoutType = gapic_{library.name}.method.DEFAULT,", ) - s.replace( + if count < 1: + raise Exception("Catch timeout replacement 5 failed.") + + count = s.replace( library / f"google/pubsub_{library.name}/services/publisher/*client.py", r"([^\S\r\n]+)timeout \(float\): (.*)\n", ("\g<1>timeout (TimeoutType):\n" "\g<1> \g<2>\n"), ) + if count < 1: + raise Exception("Catch timeout replacement 6 failed.") + # Override the default max retry deadline for publisher methods. count = s.replace( library / f"google/pubsub_{library.name}/services/publisher/transports/base.py", @@ -309,46 +321,23 @@ if count < 1: raise Exception(".coveragerc replacement failed.") - # fix the package name in samples/generated_samples to reflect - # the package on pypi. https://pypi.org/project/google-cloud-pubsub/ - s.replace( - library / "samples/generated_samples/**/*.py", - "pip install google-pubsub", - "pip install google-cloud-pubsub", - ) - - # This line is required to move the generated code from the `owl-bot-staging` folder - # to the destination folder `google/pubsub` - s.move( - library, - excludes=[ - "docs/**/*", - "nox.py", - "README.rst", - "setup.py", - f"google/cloud/pubsub_{library.name}/__init__.py", - f"google/cloud/pubsub_{library.name}/types.py", - ], - ) + s.move([library], excludes=["**/gapic_version.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt"]) s.remove_staging_dirs() # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- + templated_files = gcp.CommonTemplates().py_library( microgenerator=True, samples=True, cov_level=100, + versions=gcp.common.detect_versions(path="./google", default_first=True), unit_test_python_versions=["3.7", "3.8", "3.9", "3.10"], system_test_python_versions=["3.10"], system_test_external_dependencies=["psutil"], ) -s.move(templated_files, excludes=["README.rst", ".coveragerc", ".github/CODEOWNERS"]) -python.configure_previous_major_version_branches() -# ---------------------------------------------------------------------------- -# Samples templates -# ---------------------------------------------------------------------------- -python.py_samples() +s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml", "README.rst", "docs/index.rst"]) # ---------------------------------------------------------------------------- # Add mypy nox session. @@ -438,5 +427,9 @@ def mypy_samples(session): "noxfile.py", "--cov=google", "--cov=google/cloud", ) -# Final code style adjustments. -s.shell.run(["nox", "-s", "blacken"], hide_output=False) + +python.py_samples(skip_readmes=True) + +# run format session for all directories which have a noxfile +for noxfile in Path(".").glob("**/noxfile.py"): + s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) diff --git a/packages/google-cloud-pubsub/release-please-config.json b/packages/google-cloud-pubsub/release-please-config.json new file mode 100644 index 000000000000..939d477d160a --- /dev/null +++ b/packages/google-cloud-pubsub/release-please-config.json @@ -0,0 +1,24 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "release-type": "python", + "extra-files": [ + "google/pubsub/gapic_version.py", + { + "type": "json", + "path": "samples/generated_samples/snippet_metadata_google.pubsub.v1.json", + "jsonpath": "$.clientLibrary.version" + } + ] + } + }, + "release-type": "python", + "plugins": [ + { + "type": "sentence-case" + } + ], + "initial-version": "0.1.0" +} + \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json deleted file mode 100644 index 57f929ff59df..000000000000 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_pubsub_v1.json +++ /dev/null @@ -1,5019 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.pubsub.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-pubsub" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.create_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.CreateTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "CreateTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.Topic" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "create_topic" - }, - "description": "Sample for CreateTopic", - "file": "pubsub_v1_generated_publisher_create_topic_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_create_topic_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.create_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.CreateTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "CreateTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.Topic" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "create_topic" - }, - "description": "Sample for CreateTopic", - "file": "pubsub_v1_generated_publisher_create_topic_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_CreateTopic_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_create_topic_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.delete_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.DeleteTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "DeleteTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteTopicRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_topic" - }, - "description": "Sample for DeleteTopic", - "file": "pubsub_v1_generated_publisher_delete_topic_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_delete_topic_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.delete_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.DeleteTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "DeleteTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteTopicRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_topic" - }, - "description": "Sample for DeleteTopic", - "file": "pubsub_v1_generated_publisher_delete_topic_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_DeleteTopic_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_delete_topic_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.detach_subscription", - "method": { - "fullName": "google.pubsub.v1.Publisher.DetachSubscription", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "DetachSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DetachSubscriptionRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", - "shortName": "detach_subscription" - }, - "description": "Sample for DetachSubscription", - "file": "pubsub_v1_generated_publisher_detach_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_detach_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.detach_subscription", - "method": { - "fullName": "google.pubsub.v1.Publisher.DetachSubscription", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "DetachSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DetachSubscriptionRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", - "shortName": "detach_subscription" - }, - "description": "Sample for DetachSubscription", - "file": "pubsub_v1_generated_publisher_detach_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_DetachSubscription_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_detach_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.get_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.GetTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "GetTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetTopicRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "get_topic" - }, - "description": "Sample for GetTopic", - "file": "pubsub_v1_generated_publisher_get_topic_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_GetTopic_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_get_topic_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.get_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.GetTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "GetTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetTopicRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "get_topic" - }, - "description": "Sample for GetTopic", - "file": "pubsub_v1_generated_publisher_get_topic_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_GetTopic_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_get_topic_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_snapshots", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopicSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager", - "shortName": "list_topic_snapshots" - }, - "description": "Sample for ListTopicSnapshots", - "file": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topic_snapshots_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.list_topic_snapshots", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopicSnapshots", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopicSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicSnapshotsRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager", - "shortName": "list_topic_snapshots" - }, - "description": "Sample for ListTopicSnapshots", - "file": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopicSnapshots_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topic_snapshots_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topic_subscriptions", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopicSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager", - "shortName": "list_topic_subscriptions" - }, - "description": "Sample for ListTopicSubscriptions", - "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.list_topic_subscriptions", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopicSubscriptions", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopicSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicSubscriptionsRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager", - "shortName": "list_topic_subscriptions" - }, - "description": "Sample for ListTopicSubscriptions", - "file": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopicSubscriptions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.list_topics", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopics", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopics" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager", - "shortName": "list_topics" - }, - "description": "Sample for ListTopics", - "file": "pubsub_v1_generated_publisher_list_topics_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopics_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topics_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.list_topics", - "method": { - "fullName": "google.pubsub.v1.Publisher.ListTopics", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "ListTopics" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListTopicsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsPager", - "shortName": "list_topics" - }, - "description": "Sample for ListTopics", - "file": "pubsub_v1_generated_publisher_list_topics_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_ListTopics_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_list_topics_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.publish", - "method": { - "fullName": "google.pubsub.v1.Publisher.Publish", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "Publish" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.PublishRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "messages", - "type": "Sequence[google.pubsub_v1.types.PubsubMessage]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.PublishResponse", - "shortName": "publish" - }, - "description": "Sample for Publish", - "file": "pubsub_v1_generated_publisher_publish_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_Publish_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_publish_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.publish", - "method": { - "fullName": "google.pubsub.v1.Publisher.Publish", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "Publish" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.PublishRequest" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "messages", - "type": "Sequence[google.pubsub_v1.types.PubsubMessage]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.PublishResponse", - "shortName": "publish" - }, - "description": "Sample for Publish", - "file": "pubsub_v1_generated_publisher_publish_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_Publish_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_publish_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.PublisherAsyncClient", - "shortName": "PublisherAsyncClient" - }, - "fullName": "google.pubsub_v1.PublisherAsyncClient.update_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.UpdateTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "UpdateTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateTopicRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "update_topic" - }, - "description": "Sample for UpdateTopic", - "file": "pubsub_v1_generated_publisher_update_topic_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_async", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_update_topic_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.PublisherClient", - "shortName": "PublisherClient" - }, - "fullName": "google.pubsub_v1.PublisherClient.update_topic", - "method": { - "fullName": "google.pubsub.v1.Publisher.UpdateTopic", - "service": { - "fullName": "google.pubsub.v1.Publisher", - "shortName": "Publisher" - }, - "shortName": "UpdateTopic" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateTopicRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Topic", - "shortName": "update_topic" - }, - "description": "Sample for UpdateTopic", - "file": "pubsub_v1_generated_publisher_update_topic_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Publisher_UpdateTopic_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_publisher_update_topic_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.create_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.CreateSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "CreateSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.CreateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.pubsub_v1.types.Schema" - }, - { - "name": "schema_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Schema", - "shortName": "create_schema" - }, - "description": "Sample for CreateSchema", - "file": "pubsub_v1_generated_schema_service_create_schema_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_create_schema_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.create_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.CreateSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "CreateSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.CreateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.pubsub_v1.types.Schema" - }, - { - "name": "schema_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Schema", - "shortName": "create_schema" - }, - "description": "Sample for CreateSchema", - "file": "pubsub_v1_generated_schema_service_create_schema_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_create_schema_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "DeleteSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSchemaRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_schema" - }, - "description": "Sample for DeleteSchema", - "file": "pubsub_v1_generated_schema_service_delete_schema_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_delete_schema_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "DeleteSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSchemaRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_schema" - }, - "description": "Sample for DeleteSchema", - "file": "pubsub_v1_generated_schema_service_delete_schema_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_delete_schema_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.get_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.GetSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "GetSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSchemaRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Schema", - "shortName": "get_schema" - }, - "description": "Sample for GetSchema", - "file": "pubsub_v1_generated_schema_service_get_schema_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_get_schema_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.get_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.GetSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "GetSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSchemaRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Schema", - "shortName": "get_schema" - }, - "description": "Sample for GetSchema", - "file": "pubsub_v1_generated_schema_service_get_schema_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_get_schema_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schemas", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ListSchemas", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ListSchemas" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSchemasRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager", - "shortName": "list_schemas" - }, - "description": "Sample for ListSchemas", - "file": "pubsub_v1_generated_schema_service_list_schemas_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_list_schemas_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.list_schemas", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ListSchemas", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ListSchemas" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSchemasRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasPager", - "shortName": "list_schemas" - }, - "description": "Sample for ListSchemas", - "file": "pubsub_v1_generated_schema_service_list_schemas_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_list_schemas_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_message", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ValidateMessage" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ValidateMessageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.ValidateMessageResponse", - "shortName": "validate_message" - }, - "description": "Sample for ValidateMessage", - "file": "pubsub_v1_generated_schema_service_validate_message_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_validate_message_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.validate_message", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ValidateMessage", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ValidateMessage" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ValidateMessageRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.ValidateMessageResponse", - "shortName": "validate_message" - }, - "description": "Sample for ValidateMessage", - "file": "pubsub_v1_generated_schema_service_validate_message_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ValidateMessage_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_validate_message_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", - "shortName": "SchemaServiceAsyncClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.validate_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ValidateSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ValidateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.pubsub_v1.types.Schema" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", - "shortName": "validate_schema" - }, - "description": "Sample for ValidateSchema", - "file": "pubsub_v1_generated_schema_service_validate_schema_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_validate_schema_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SchemaServiceClient", - "shortName": "SchemaServiceClient" - }, - "fullName": "google.pubsub_v1.SchemaServiceClient.validate_schema", - "method": { - "fullName": "google.pubsub.v1.SchemaService.ValidateSchema", - "service": { - "fullName": "google.pubsub.v1.SchemaService", - "shortName": "SchemaService" - }, - "shortName": "ValidateSchema" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ValidateSchemaRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "schema", - "type": "google.pubsub_v1.types.Schema" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", - "shortName": "validate_schema" - }, - "description": "Sample for ValidateSchema", - "file": "pubsub_v1_generated_schema_service_validate_schema_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ValidateSchema_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_schema_service_validate_schema_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.acknowledge", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Acknowledge", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Acknowledge" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.AcknowledgeRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "ack_ids", - "type": "Sequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "acknowledge" - }, - "description": "Sample for Acknowledge", - "file": "pubsub_v1_generated_subscriber_acknowledge_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_acknowledge_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.acknowledge", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Acknowledge", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Acknowledge" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.AcknowledgeRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "ack_ids", - "type": "Sequence[str]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "acknowledge" - }, - "description": "Sample for Acknowledge", - "file": "pubsub_v1_generated_subscriber_acknowledge_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Acknowledge_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_acknowledge_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "CreateSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.CreateSnapshotRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "create_snapshot" - }, - "description": "Sample for CreateSnapshot", - "file": "pubsub_v1_generated_subscriber_create_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_create_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.create_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.CreateSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "CreateSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.CreateSnapshotRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "create_snapshot" - }, - "description": "Sample for CreateSnapshot", - "file": "pubsub_v1_generated_subscriber_create_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_CreateSnapshot_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_create_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.create_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "CreateSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.Subscription" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "push_config", - "type": "google.pubsub_v1.types.PushConfig" - }, - { - "name": "ack_deadline_seconds", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "create_subscription" - }, - "description": "Sample for CreateSubscription", - "file": "pubsub_v1_generated_subscriber_create_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_create_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.create_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.CreateSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "CreateSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.Subscription" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "topic", - "type": "str" - }, - { - "name": "push_config", - "type": "google.pubsub_v1.types.PushConfig" - }, - { - "name": "ack_deadline_seconds", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "create_subscription" - }, - "description": "Sample for CreateSubscription", - "file": "pubsub_v1_generated_subscriber_create_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_CreateSubscription_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_create_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "DeleteSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSnapshotRequest" - }, - { - "name": "snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_snapshot" - }, - "description": "Sample for DeleteSnapshot", - "file": "pubsub_v1_generated_subscriber_delete_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_delete_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.delete_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.DeleteSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "DeleteSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSnapshotRequest" - }, - { - "name": "snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_snapshot" - }, - "description": "Sample for DeleteSnapshot", - "file": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_DeleteSnapshot_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_delete_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.delete_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "DeleteSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_subscription" - }, - "description": "Sample for DeleteSubscription", - "file": "pubsub_v1_generated_subscriber_delete_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_delete_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.delete_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.DeleteSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "DeleteSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.DeleteSubscriptionRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_subscription" - }, - "description": "Sample for DeleteSubscription", - "file": "pubsub_v1_generated_subscriber_delete_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_DeleteSubscription_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_delete_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "GetSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSnapshotRequest" - }, - { - "name": "snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "get_snapshot" - }, - "description": "Sample for GetSnapshot", - "file": "pubsub_v1_generated_subscriber_get_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_get_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.get_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.GetSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "GetSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSnapshotRequest" - }, - { - "name": "snapshot", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "get_snapshot" - }, - "description": "Sample for GetSnapshot", - "file": "pubsub_v1_generated_subscriber_get_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_GetSnapshot_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_get_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.get_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.GetSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "GetSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSubscriptionRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "get_subscription" - }, - "description": "Sample for GetSubscription", - "file": "pubsub_v1_generated_subscriber_get_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_get_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.get_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.GetSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "GetSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.GetSubscriptionRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "get_subscription" - }, - "description": "Sample for GetSubscription", - "file": "pubsub_v1_generated_subscriber_get_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_GetSubscription_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_get_subscription_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_snapshots", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ListSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSnapshotsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager", - "shortName": "list_snapshots" - }, - "description": "Sample for ListSnapshots", - "file": "pubsub_v1_generated_subscriber_list_snapshots_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_list_snapshots_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.list_snapshots", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ListSnapshots", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ListSnapshots" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSnapshotsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager", - "shortName": "list_snapshots" - }, - "description": "Sample for ListSnapshots", - "file": "pubsub_v1_generated_subscriber_list_snapshots_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ListSnapshots_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_list_snapshots_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.list_subscriptions", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ListSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSubscriptionsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager", - "shortName": "list_subscriptions" - }, - "description": "Sample for ListSubscriptions", - "file": "pubsub_v1_generated_subscriber_list_subscriptions_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_list_subscriptions_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.list_subscriptions", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ListSubscriptions", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ListSubscriptions" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ListSubscriptionsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager", - "shortName": "list_subscriptions" - }, - "description": "Sample for ListSubscriptions", - "file": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ListSubscriptions_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_list_subscriptions_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_ack_deadline", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ModifyAckDeadline" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "ack_ids", - "type": "Sequence[str]" - }, - { - "name": "ack_deadline_seconds", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "modify_ack_deadline" - }, - "description": "Sample for ModifyAckDeadline", - "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.modify_ack_deadline", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ModifyAckDeadline", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ModifyAckDeadline" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ModifyAckDeadlineRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "ack_ids", - "type": "Sequence[str]" - }, - { - "name": "ack_deadline_seconds", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "modify_ack_deadline" - }, - "description": "Sample for ModifyAckDeadline", - "file": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ModifyAckDeadline_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.modify_push_config", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ModifyPushConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ModifyPushConfigRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "push_config", - "type": "google.pubsub_v1.types.PushConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "modify_push_config" - }, - "description": "Sample for ModifyPushConfig", - "file": "pubsub_v1_generated_subscriber_modify_push_config_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_modify_push_config_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.modify_push_config", - "method": { - "fullName": "google.pubsub.v1.Subscriber.ModifyPushConfig", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "ModifyPushConfig" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.ModifyPushConfigRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "push_config", - "type": "google.pubsub_v1.types.PushConfig" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "modify_push_config" - }, - "description": "Sample for ModifyPushConfig", - "file": "pubsub_v1_generated_subscriber_modify_push_config_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_ModifyPushConfig_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_modify_push_config_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.pull", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Pull", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Pull" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.PullRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "return_immediately", - "type": "bool" - }, - { - "name": "max_messages", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.PullResponse", - "shortName": "pull" - }, - "description": "Sample for Pull", - "file": "pubsub_v1_generated_subscriber_pull_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Pull_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_pull_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.pull", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Pull", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Pull" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.PullRequest" - }, - { - "name": "subscription", - "type": "str" - }, - { - "name": "return_immediately", - "type": "bool" - }, - { - "name": "max_messages", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.PullResponse", - "shortName": "pull" - }, - "description": "Sample for Pull", - "file": "pubsub_v1_generated_subscriber_pull_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Pull_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_pull_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.seek", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Seek", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Seek" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.SeekRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.SeekResponse", - "shortName": "seek" - }, - "description": "Sample for Seek", - "file": "pubsub_v1_generated_subscriber_seek_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Seek_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_seek_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.seek", - "method": { - "fullName": "google.pubsub.v1.Subscriber.Seek", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "Seek" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.SeekRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.SeekResponse", - "shortName": "seek" - }, - "description": "Sample for Seek", - "file": "pubsub_v1_generated_subscriber_seek_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_Seek_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_seek_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.streaming_pull", - "method": { - "fullName": "google.pubsub.v1.Subscriber.StreamingPull", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "StreamingPull" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", - "shortName": "streaming_pull" - }, - "description": "Sample for StreamingPull", - "file": "pubsub_v1_generated_subscriber_streaming_pull_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_async", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 56, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 57, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_streaming_pull_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.streaming_pull", - "method": { - "fullName": "google.pubsub.v1.Subscriber.StreamingPull", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "StreamingPull" - }, - "parameters": [ - { - "name": "requests", - "type": "Iterator[google.pubsub_v1.types.StreamingPullRequest]" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", - "shortName": "streaming_pull" - }, - "description": "Sample for StreamingPull", - "file": "pubsub_v1_generated_subscriber_streaming_pull_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_StreamingPull_sync", - "segments": [ - { - "end": 63, - "start": 27, - "type": "FULL" - }, - { - "end": 63, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 56, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 59, - "start": 57, - "type": "REQUEST_EXECUTION" - }, - { - "end": 64, - "start": 60, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_streaming_pull_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "UpdateSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateSnapshotRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "update_snapshot" - }, - "description": "Sample for UpdateSnapshot", - "file": "pubsub_v1_generated_subscriber_update_snapshot_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_async", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_update_snapshot_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.update_snapshot", - "method": { - "fullName": "google.pubsub.v1.Subscriber.UpdateSnapshot", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "UpdateSnapshot" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateSnapshotRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Snapshot", - "shortName": "update_snapshot" - }, - "description": "Sample for UpdateSnapshot", - "file": "pubsub_v1_generated_subscriber_update_snapshot_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_UpdateSnapshot_sync", - "segments": [ - { - "end": 50, - "start": 27, - "type": "FULL" - }, - { - "end": 50, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 44, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 47, - "start": 45, - "type": "REQUEST_EXECUTION" - }, - { - "end": 51, - "start": 48, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_update_snapshot_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.pubsub_v1.SubscriberAsyncClient", - "shortName": "SubscriberAsyncClient" - }, - "fullName": "google.pubsub_v1.SubscriberAsyncClient.update_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "UpdateSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "update_subscription" - }, - "description": "Sample for UpdateSubscription", - "file": "pubsub_v1_generated_subscriber_update_subscription_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_update_subscription_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.pubsub_v1.SubscriberClient", - "shortName": "SubscriberClient" - }, - "fullName": "google.pubsub_v1.SubscriberClient.update_subscription", - "method": { - "fullName": "google.pubsub.v1.Subscriber.UpdateSubscription", - "service": { - "fullName": "google.pubsub.v1.Subscriber", - "shortName": "Subscriber" - }, - "shortName": "UpdateSubscription" - }, - "parameters": [ - { - "name": "request", - "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.pubsub_v1.types.Subscription", - "shortName": "update_subscription" - }, - "description": "Sample for UpdateSubscription", - "file": "pubsub_v1_generated_subscriber_update_subscription_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_Subscriber_UpdateSubscription_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "pubsub_v1_generated_subscriber_update_subscription_sync.py" - } - ] -} diff --git a/packages/google-cloud-pubsub/samples/snippets/iam.py b/packages/google-cloud-pubsub/samples/snippets/iam.py index b638a53449d7..aaf024864e90 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam.py @@ -183,7 +183,8 @@ def check_subscription_permissions(project_id: str, subscription_id: str) -> Non if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") @@ -215,7 +216,8 @@ def check_subscription_permissions(project_id: str, subscription_id: str) -> Non check_topic_permissions_parser.add_argument("topic_id") check_subscription_permissions_parser = subparsers.add_parser( - "check-subscription-permissions", help=check_subscription_permissions.__doc__, + "check-subscription-permissions", + help=check_subscription_permissions.__doc__, ) check_subscription_permissions_parser.add_argument("subscription_id") diff --git a/packages/google-cloud-pubsub/samples/snippets/iam_test.py b/packages/google-cloud-pubsub/samples/snippets/iam_test.py index 655e43e3689f..c1289ad39f98 100644 --- a/packages/google-cloud-pubsub/samples/snippets/iam_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/iam_test.py @@ -62,7 +62,8 @@ def subscriber_client() -> Generator[pubsub_v1.SubscriberClient, None, None]: @pytest.fixture(scope="module") def subscription_path( - subscriber_client: pubsub_v1.SubscriberClient, topic_path: str, + subscriber_client: pubsub_v1.SubscriberClient, + topic_path: str, ) -> Generator[str, None, None]: subscription_path = subscriber_client.subscription_path(PROJECT_ID, SUBSCRIPTION_ID) subscription = subscriber_client.create_subscription( @@ -102,7 +103,8 @@ def test_set_topic_policy( def test_set_subscription_policy( - subscriber_client: pubsub_v1.SubscriberClient, subscription_path: str, + subscriber_client: pubsub_v1.SubscriberClient, + subscription_path: str, ) -> None: iam.set_subscription_policy(PROJECT_ID, SUBSCRIPTION_ID) policy = subscriber_client.get_iam_policy(request={"resource": subscription_path}) @@ -118,7 +120,8 @@ def test_check_topic_permissions(topic_path: str, capsys: CaptureFixture[str]) - def test_check_subscription_permissions( - subscription_path: str, capsys: CaptureFixture[str], + subscription_path: str, + capsys: CaptureFixture[str], ) -> None: iam.check_subscription_permissions(PROJECT_ID, SUBSCRIPTION_ID) out, _ = capsys.readouterr() diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index f5c32b22789b..e8283c38d4a0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -160,6 +160,7 @@ def blacken(session: nox.sessions.Session) -> None: # format = isort + black # + @nox.session def format(session: nox.sessions.Session) -> None: """ @@ -187,7 +188,9 @@ def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: # check for presence of tests - test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob("**/test_*.py", recursive=True) + test_list = glob.glob("**/*_test.py", recursive=True) + glob.glob( + "**/test_*.py", recursive=True + ) test_list.extend(glob.glob("**/tests", recursive=True)) if len(test_list) == 0: @@ -209,9 +212,7 @@ def _session_tests( if os.path.exists("requirements-test.txt"): if os.path.exists("constraints-test.txt"): - session.install( - "-r", "requirements-test.txt", "-c", "constraints-test.txt" - ) + session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") else: session.install("-r", "requirements-test.txt") with open("requirements-test.txt") as rtfile: @@ -224,9 +225,9 @@ def _session_tests( post_install(session) if "pytest-parallel" in packages: - concurrent_args.extend(['--workers', 'auto', '--tests-per-worker', 'auto']) + concurrent_args.extend(["--workers", "auto", "--tests-per-worker", "auto"]) elif "pytest-xdist" in packages: - concurrent_args.extend(['-n', 'auto']) + concurrent_args.extend(["-n", "auto"]) session.run( "pytest", @@ -256,7 +257,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index d6e52077213d..e154cf574c98 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -419,7 +419,8 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") @@ -442,7 +443,8 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: publish_with_custom_attributes_parser.add_argument("topic_id") publish_with_error_handler_parser = subparsers.add_parser( - "publish-with-error-handler", help=publish_messages_with_error_handler.__doc__, + "publish-with-error-handler", + help=publish_messages_with_error_handler.__doc__, ) publish_with_error_handler_parser.add_argument("topic_id") @@ -465,7 +467,8 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: publish_with_retry_settings_parser.add_argument("topic_id") publish_with_ordering_keys_parser = subparsers.add_parser( - "publish-with-ordering-keys", help=publish_with_ordering_keys.__doc__, + "publish-with-ordering-keys", + help=publish_with_ordering_keys.__doc__, ) publish_with_ordering_keys_parser.add_argument("topic_id") @@ -476,7 +479,8 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: resume_publish_with_ordering_keys_parser.add_argument("topic_id") detach_subscription_parser = subparsers.add_parser( - "detach-subscription", help=detach_subscription.__doc__, + "detach-subscription", + help=detach_subscription.__doc__, ) detach_subscription_parser.add_argument("subscription_id") diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py index 977e4c0c432d..e2a171d1dbf3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -414,7 +414,8 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: if __name__ == "__main__": parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index fb2c98f33e57..b1b9ad0ea85d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -217,7 +217,10 @@ def create_subscription_with_ordering( def create_subscription_with_filtering( - project_id: str, topic_id: str, subscription_id: str, filter: str, + project_id: str, + topic_id: str, + subscription_id: str, + filter: str, ) -> None: """Create a subscription with filtering enabled.""" # [START pubsub_create_subscription_with_filter] @@ -291,7 +294,9 @@ def create_bigquery_subscription( topic_path = publisher.topic_path(project_id, topic_id) subscription_path = subscriber.subscription_path(project_id, subscription_id) - bigquery_config = pubsub_v1.types.BigQueryConfig(table=bigquery_table_id, write_metadata=True) + bigquery_config = pubsub_v1.types.BigQueryConfig( + table=bigquery_table_id, write_metadata=True + ) # Wrap the subscriber in a 'with' block to automatically call close() to # close the underlying gRPC channel when done. @@ -421,7 +426,9 @@ def update_subscription_with_dead_letter_policy( # after the update. Here, values in the required fields (name, topic) help # identify the subscription. subscription = pubsub_v1.types.Subscription( - name=subscription_path, topic=topic_path, dead_letter_policy=dead_letter_policy, + name=subscription_path, + topic=topic_path, + dead_letter_policy=dead_letter_policy, ) with subscriber: @@ -626,7 +633,9 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"Done processing the message {message.data!r}.") streaming_pull_future = subscriber.subscribe( - subscription_path, callback=callback, await_callbacks_on_shutdown=True, + subscription_path, + callback=callback, + await_callbacks_on_shutdown=True, ) print(f"Listening for messages on {subscription_path}..\n") @@ -900,7 +909,8 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: if __name__ == "__main__": # noqa parser = argparse.ArgumentParser( - description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument("project_id", help="Your Google Cloud project ID") diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 883b499f77d0..1fa80c2cadea 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -1,4 +1,5 @@ -# Copyright 2018 Google LLC +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,35 +12,39 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - +# import io import os -import setuptools - +import setuptools # type: ignore -# Package metadata. +package_root = os.path.abspath(os.path.dirname(__file__)) name = "google-cloud-pubsub" + + description = "Google Cloud Pub/Sub API client library" -version = "2.13.11" -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 5 - Production/Stable" + +version = {} +with open(os.path.join(package_root, "google/pubsub/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + dependencies = [ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 - "google-api-core[grpc] >= 1.32.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >=0.12.4, <1.0.0dev", - "grpcio-status >= 1.16.0", + "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", + "grpcio-status >= 1.33.2", ] extras = {"libcst": "libcst >= 0.3.10"} - - -# Setup boilerplate below this line. +url = "https://github.com/googleapis/python-pubsub" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -47,20 +52,16 @@ with io.open(readme_filename, encoding="utf-8") as readme_file: readme = readme_file.read() -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. packages = [ package for package in setuptools.PEP420PackageFinder.find() if package.startswith("google") ] -# Determine which namespaces are needed. namespaces = ["google"] if "google.cloud" in packages: namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, @@ -69,7 +70,7 @@ author="Google LLC", author_email="googleapis-packages@google.com", license="Apache 2.0", - url="https://github.com/googleapis/python-pubsub", + url=url, classifiers=[ release_status, "Intended Audience :: Developers", diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index 883a87aefd06..b6b0ed782d3c 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -8,3 +8,5 @@ google-api-core==1.33.2 proto-plus==1.22.0 protobuf==3.19.5 grpc-google-iam-v1==0.12.4 +grpcio==1.38.1 +grpcio-status==1.33.2 From 962f68302404ba0348a1c6dd99e263871f2c1cc0 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Dec 2022 13:31:40 -0500 Subject: [PATCH 0900/1197] fix: Drop usage of pkg_resources (#832) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix(deps): Require google-api-core >=1.34.0, >=2.11.0 fix: Drop usage of pkg_resources fix: Fix timeout default values docs(samples): Snippetgen should call await on the operation coroutine before calling result PiperOrigin-RevId: 493260409 Source-Link: https://github.com/googleapis/googleapis/commit/fea43879f83a8d0dacc9353b3f75f8f46d37162f Source-Link: https://github.com/googleapis/googleapis-gen/commit/387b7344c7529ee44be84e613b19a820508c612b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzg3YjczNDRjNzUyOWVlNDRiZTg0ZTYxM2IxOWE4MjA1MDhjNjEyYiJ9 * update replacements in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * drop pkg_resources * update replacement in owlbot.py * fix(deps): require google-api-core>=1.34.0,>=2.11.0 * chore: update release-please-config.json * update replacement in owlbot.py * move replacement in owlbot.py Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/.coveragerc | 5 -- .../cloud/pubsub_v1/publisher/client.py | 9 +--- .../cloud/pubsub_v1/subscriber/client.py | 10 +--- .../google/pubsub_v1/gapic_version.py | 16 ++++++ .../services/publisher/async_client.py | 14 ++--- .../pubsub_v1/services/publisher/client.py | 14 ++--- .../services/publisher/transports/base.py | 14 ++--- .../services/schema_service/async_client.py | 32 +++++------- .../services/schema_service/client.py | 32 +++++------- .../schema_service/transports/base.py | 14 ++--- .../services/subscriber/async_client.py | 52 +++++++++---------- .../pubsub_v1/services/subscriber/client.py | 52 +++++++++---------- .../services/subscriber/transports/base.py | 14 ++--- packages/google-cloud-pubsub/owlbot.py | 13 +++-- .../release-please-config.json | 1 + packages/google-cloud-pubsub/setup.py | 2 +- .../testing/constraints-3.7.txt | 2 +- 17 files changed, 132 insertions(+), 164 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index 96190b454e0a..02d211c564d6 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -11,8 +11,3 @@ exclude_lines = pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index e3266e57f854..3e668533de65 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -17,7 +17,6 @@ import copy import logging import os -import pkg_resources import threading import time import typing @@ -35,15 +34,11 @@ from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer from google.cloud.pubsub_v1.publisher.flow_controller import FlowController +from google.pubsub_v1 import gapic_version as package_version from google.pubsub_v1 import types as gapic_types from google.pubsub_v1.services.publisher import client as publisher_client -try: - __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version -except pkg_resources.DistributionNotFound: - # Distribution might not be available if we are not running from within a - # PIP package. - __version__ = "0.0" +__version__ = package_version.__version__ if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud import pubsub_v1 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 9c12a0bfba3f..0d0d36a0c618 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -15,7 +15,6 @@ from __future__ import absolute_import import os -import pkg_resources import typing from typing import cast, Any, Callable, Optional, Sequence, Union import warnings @@ -27,6 +26,7 @@ from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager from google.pubsub_v1.services.subscriber import client as subscriber_client +from google.pubsub_v1 import gapic_version as package_version if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1 import subscriber @@ -34,13 +34,7 @@ SubscriberGrpcTransport, ) - -try: - __version__ = pkg_resources.get_distribution("google-cloud-pubsub").version -except pkg_resources.DistributionNotFound: - # Distribution might not be available if we are not running from within - # a PIP package. - __version__ = "0.0" +__version__ = package_version.__version__ class Client(subscriber_client.SubscriberClient): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py new file mode 100644 index 000000000000..51b84e9a69d6 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "2.13.11" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index d12eeb1da20e..6a282d7dd28e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -27,7 +27,8 @@ Type, Union, ) -import pkg_resources + +from google.pubsub_v1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -1558,14 +1559,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - client_library_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) __all__ = ("PublisherAsyncClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 32b1def3dbc2..436c25a2dafa 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -29,7 +29,8 @@ Union, cast, ) -import pkg_resources + +from google.pubsub_v1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -1753,14 +1754,9 @@ def test_iam_permissions( return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - client_library_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) __all__ = ("PublisherClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index d441f8113632..79ec2d4ff295 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.pubsub_v1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -30,14 +31,9 @@ from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - client_library_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) class PublisherTransport(abc.ABC): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 1996a1e4fa95..262190ea3375 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -27,7 +27,8 @@ Type, Union, ) -import pkg_resources + +from google.pubsub_v1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -223,7 +224,7 @@ async def create_schema( schema: Optional[gp_schema.Schema] = None, schema_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.Schema: r"""Creates a schema. @@ -351,7 +352,7 @@ async def get_schema( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.Schema: r"""Gets a schema. @@ -450,7 +451,7 @@ async def list_schemas( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSchemasAsyncPager: r"""Lists schemas in a project. @@ -564,7 +565,7 @@ async def delete_schema( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a schema. @@ -655,7 +656,7 @@ async def validate_schema( parent: Optional[str] = None, schema: Optional[gp_schema.Schema] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. @@ -769,7 +770,7 @@ async def validate_message( request: Optional[Union[schema.ValidateMessageRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. @@ -850,7 +851,7 @@ async def set_iam_policy( request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -969,7 +970,7 @@ async def get_iam_policy( request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1090,7 +1091,7 @@ async def test_iam_permissions( request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control @@ -1151,14 +1152,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - client_library_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) __all__ = ("SchemaServiceAsyncClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 45f013e91208..b02ee14e6882 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -29,7 +29,8 @@ Union, cast, ) -import pkg_resources + +from google.pubsub_v1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -455,7 +456,7 @@ def create_schema( schema: Optional[gp_schema.Schema] = None, schema_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.Schema: r"""Creates a schema. @@ -583,7 +584,7 @@ def get_schema( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.Schema: r"""Gets a schema. @@ -682,7 +683,7 @@ def list_schemas( *, parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSchemasPager: r"""Lists schemas in a project. @@ -796,7 +797,7 @@ def delete_schema( *, name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes a schema. @@ -887,7 +888,7 @@ def validate_schema( parent: Optional[str] = None, schema: Optional[gp_schema.Schema] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. @@ -1001,7 +1002,7 @@ def validate_message( request: Optional[Union[schema.ValidateMessageRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. @@ -1096,7 +1097,7 @@ def set_iam_policy( request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1216,7 +1217,7 @@ def get_iam_policy( request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1337,7 +1338,7 @@ def test_iam_permissions( request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -1392,14 +1393,9 @@ def test_iam_permissions( return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - client_library_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) __all__ = ("SchemaServiceClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 8035f7fd278d..0365627965fb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.pubsub_v1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -31,14 +32,9 @@ from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - client_library_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) class SchemaServiceTransport(abc.ABC): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index fb860596d637..f0c38b04386e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -30,8 +30,9 @@ Type, Union, ) + import warnings -import pkg_resources +from google.pubsub_v1 import gapic_version as package_version from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions @@ -231,7 +232,7 @@ async def create_subscription( push_config: Optional[pubsub.PushConfig] = None, ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Creates a subscription to a given topic. See the [resource name @@ -416,7 +417,7 @@ async def get_subscription( *, subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. @@ -528,7 +529,7 @@ async def update_subscription( request: Optional[Union[pubsub.UpdateSubscriptionRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Updates an existing subscription. Note that certain @@ -624,7 +625,7 @@ async def list_subscriptions( *, project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSubscriptionsAsyncPager: r"""Lists matching subscriptions. @@ -749,7 +750,7 @@ async def delete_subscription( *, subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an existing subscription. All messages retained in the @@ -857,7 +858,7 @@ async def modify_ack_deadline( ack_ids: Optional[MutableSequence[str]] = None, ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Modifies the ack deadline for a specific message. This method is @@ -991,7 +992,7 @@ async def acknowledge( subscription: Optional[str] = None, ack_ids: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Acknowledges the messages associated with the ``ack_ids`` in the @@ -1112,7 +1113,7 @@ async def pull( return_immediately: Optional[bool] = None, max_messages: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PullResponse: r"""Pulls messages from the server. The server may return @@ -1261,7 +1262,7 @@ def streaming_pull( requests: Optional[AsyncIterator[pubsub.StreamingPullRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Awaitable[AsyncIterable[pubsub.StreamingPullResponse]]: r"""Establishes a stream with the server, which sends messages down @@ -1370,7 +1371,7 @@ async def modify_push_config( subscription: Optional[str] = None, push_config: Optional[pubsub.PushConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Modifies the ``PushConfig`` for a specified subscription. @@ -1491,7 +1492,7 @@ async def get_snapshot( *, snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Gets the configuration details of a snapshot. @@ -1613,7 +1614,7 @@ async def list_snapshots( *, project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSnapshotsAsyncPager: r"""Lists the existing snapshots. Snapshots are used in @@ -1743,7 +1744,7 @@ async def create_snapshot( name: Optional[str] = None, subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Creates a snapshot from the requested subscription. Snapshots @@ -1898,7 +1899,7 @@ async def update_snapshot( request: Optional[Union[pubsub.UpdateSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot. Snapshots are used in @@ -2000,7 +2001,7 @@ async def delete_snapshot( *, snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Removes an existing snapshot. Snapshots are used in [Seek] @@ -2107,7 +2108,7 @@ async def seek( request: Optional[Union[pubsub.SeekRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.SeekResponse: r"""Seeks an existing subscription to a point in time or to a given @@ -2205,7 +2206,7 @@ async def set_iam_policy( request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -2324,7 +2325,7 @@ async def get_iam_policy( request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -2445,7 +2446,7 @@ async def test_iam_permissions( request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified permissions against the IAM access control @@ -2506,14 +2507,9 @@ async def __aexit__(self, exc_type, exc, tb): await self.transport.close() -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - client_library_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) __all__ = ("SubscriberAsyncClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 35d96b5b5c7e..888af9883b06 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -31,8 +31,9 @@ Union, cast, ) + import warnings -import pkg_resources +from google.pubsub_v1 import gapic_version as package_version from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions @@ -510,7 +511,7 @@ def create_subscription( push_config: Optional[pubsub.PushConfig] = None, ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Creates a subscription to a given topic. See the [resource name @@ -684,7 +685,7 @@ def get_subscription( *, subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. @@ -785,7 +786,7 @@ def update_subscription( request: Optional[Union[pubsub.UpdateSubscriptionRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: r"""Updates an existing subscription. Note that certain @@ -873,7 +874,7 @@ def list_subscriptions( *, project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSubscriptionsPager: r"""Lists matching subscriptions. @@ -987,7 +988,7 @@ def delete_subscription( *, subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes an existing subscription. All messages retained in the @@ -1086,7 +1087,7 @@ def modify_ack_deadline( ack_ids: Optional[MutableSequence[str]] = None, ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Modifies the ack deadline for a specific message. This method is @@ -1211,7 +1212,7 @@ def acknowledge( subscription: Optional[str] = None, ack_ids: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Acknowledges the messages associated with the ``ack_ids`` in the @@ -1323,7 +1324,7 @@ def pull( return_immediately: Optional[bool] = None, max_messages: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PullResponse: r"""Pulls messages from the server. The server may return @@ -1461,7 +1462,7 @@ def streaming_pull( requests: Optional[Iterator[pubsub.StreamingPullRequest]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> Iterable[pubsub.StreamingPullResponse]: r"""Establishes a stream with the server, which sends messages down @@ -1558,7 +1559,7 @@ def modify_push_config( subscription: Optional[str] = None, push_config: Optional[pubsub.PushConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Modifies the ``PushConfig`` for a specified subscription. @@ -1670,7 +1671,7 @@ def get_snapshot( *, snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Gets the configuration details of a snapshot. @@ -1781,7 +1782,7 @@ def list_snapshots( *, project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListSnapshotsPager: r"""Lists the existing snapshots. Snapshots are used in @@ -1900,7 +1901,7 @@ def create_snapshot( name: Optional[str] = None, subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Creates a snapshot from the requested subscription. Snapshots @@ -2046,7 +2047,7 @@ def update_snapshot( request: Optional[Union[pubsub.UpdateSnapshotRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot. Snapshots are used in @@ -2140,7 +2141,7 @@ def delete_snapshot( *, snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Removes an existing snapshot. Snapshots are used in [Seek] @@ -2238,7 +2239,7 @@ def seek( request: Optional[Union[pubsub.SeekRequest, dict]] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.SeekResponse: r"""Seeks an existing subscription to a point in time or to a given @@ -2339,7 +2340,7 @@ def set_iam_policy( request: Optional[iam_policy_pb2.SetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -2459,7 +2460,7 @@ def get_iam_policy( request: Optional[iam_policy_pb2.GetIamPolicyRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -2580,7 +2581,7 @@ def test_iam_permissions( request: Optional[iam_policy_pb2.TestIamPermissionsRequest] = None, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Optional[float] = None, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control @@ -2635,14 +2636,9 @@ def test_iam_permissions( return response -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - client_library_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) __all__ = ("SubscriberClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index c1d2d8aff3b8..2cf93a7262ba 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -15,7 +15,8 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources + +from google.pubsub_v1 import gapic_version as package_version import google.auth # type: ignore import google.api_core @@ -30,14 +31,9 @@ from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - client_library_version=pkg_resources.get_distribution( - "google-cloud-pubsub", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + client_library_version=package_version.__version__ +) class SubscriberTransport(abc.ABC): diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 81ec371a2959..0af937c4d6af 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -139,7 +139,7 @@ # Emit deprecation warning if return_immediately flag is set with synchronous pull. s.replace( library / f"google/pubsub_{library.name}/services/subscriber/*client.py", - r"import pkg_resources", + r"from google.pubsub_v1 import gapic_version as package_version", "import warnings\n\g<0>", ) @@ -210,7 +210,7 @@ raise Exception("Catch warnings replacement failed.") # Make sure that client library version is present in user agent header. - s.replace( + count = s.replace( [ library / f"google/pubsub_{library.name}/services/publisher/async_client.py", @@ -228,10 +228,13 @@ library / f"google/pubsub_{library.name}/services/subscriber/transports/base.py", ], - r"""gapic_version=(pkg_resources\.get_distribution\(\s+)['"]google-cloud-pubsub['"]""", - "client_library_version=\g<1>'google-cloud-pubsub'", + r"""gapic_version=package_version.__version__""", + "client_library_version=package_version.__version__", ) + if count < 1: + raise Exception("client_library_version replacement failed.") + # Allow timeout to be an instance of google.api_core.timeout.* count = s.replace( library / f"google/pubsub_{library.name}/types/__init__.py", @@ -282,7 +285,7 @@ count = s.replace( library / f"google/pubsub_{library.name}/services/publisher/*client.py", - r"(\s+)timeout: Optional\[float\] = None.*\n", + r"(\s+)timeout: Union\[float, object\] = gapic_v1.method.DEFAULT.*\n", f"\g<1>timeout: TimeoutType = gapic_{library.name}.method.DEFAULT,", ) diff --git a/packages/google-cloud-pubsub/release-please-config.json b/packages/google-cloud-pubsub/release-please-config.json index 939d477d160a..9093524152e4 100644 --- a/packages/google-cloud-pubsub/release-please-config.json +++ b/packages/google-cloud-pubsub/release-please-config.json @@ -5,6 +5,7 @@ "release-type": "python", "extra-files": [ "google/pubsub/gapic_version.py", + "google/pubsub_v1/gapic_version.py", { "type": "json", "path": "samples/generated_samples/snippet_metadata_google.pubsub.v1.json", diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 1fa80c2cadea..b38f1e04f028 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -37,7 +37,7 @@ dependencies = [ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 - "google-api-core[grpc] >= 1.33.2, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*", + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index b6b0ed782d3c..54e27914f575 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -4,7 +4,7 @@ # Pin the version to the lower bound. # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.33.2 +google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 grpc-google-iam-v1==0.12.4 From 677bdbf22f0932167057b2bd95c0da0007fde428 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 8 Dec 2022 14:31:51 -0500 Subject: [PATCH 0901/1197] build(deps): bump certifi from 2022.9.24 to 2022.12.7 in /synthtool/gcp/templates/python_library/.kokoro (#835) Source-Link: https://github.com/googleapis/synthtool/commit/b4fe62efb5114b6738ad4b13d6f654f2bf4b7cc0 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/.kokoro/requirements.txt | 6 +++--- packages/google-cloud-pubsub/.pre-commit-config.yaml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index bb21147e4c23..fccaa8e84449 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3abfa0f1886adaf0b83f07cb117b24a639ea1cb9cffe56d43280b977033563eb + digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 9c1b9be34e6b..05dc4672edaa 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.9.24 \ - --hash=sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14 \ - --hash=sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382 +certifi==2022.12.7 \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index 46d237160f6d..5405cc8ff1f3 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -25,7 +25,7 @@ repos: rev: 22.3.0 hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: 3.9.2 hooks: - id: flake8 From 55f2a6529bf633a540ff5ed6550cc2d632718b24 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 14 Dec 2022 21:17:13 +0100 Subject: [PATCH 0902/1197] chore(deps): update dependency google-cloud-bigquery to v3.4.1 (#836) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 0c49adc041ec..0096aaac319c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.2.0 mock==4.0.3 flaky==3.7.0 -google-cloud-bigquery==3.4.0 +google-cloud-bigquery==3.4.1 From 7f1aea0a63584e5a815dcd29a1a6b60f1bdd77f2 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 4 Jan 2023 21:03:07 +0100 Subject: [PATCH 0903/1197] chore(deps): update dependency mock to v5 (#837) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 0096aaac319c..9cf3ccff1f37 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 pytest==7.2.0 -mock==4.0.3 +mock==5.0.0 flaky==3.7.0 google-cloud-bigquery==3.4.1 From 4637d229d79070547bc70a29d29f29f386221508 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Jan 2023 08:40:38 -0500 Subject: [PATCH 0904/1197] chore(python): add support for python 3.11 [autoapprove] (#838) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): add support for python 3.11 Source-Link: https://github.com/googleapis/synthtool/commit/7197a001ffb6d8ce7b0b9b11c280f0c536c1033a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 * update unit_test_python_versions to include 3.11 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/workflows/unittest.yml | 2 +- .../.kokoro/samples/python3.11/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.11/continuous.cfg | 6 +++ .../samples/python3.11/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.11/periodic.cfg | 6 +++ .../.kokoro/samples/python3.11/presubmit.cfg | 6 +++ packages/google-cloud-pubsub/CONTRIBUTING.rst | 6 ++- packages/google-cloud-pubsub/noxfile.py | 2 +- packages/google-cloud-pubsub/owlbot.py | 2 +- .../samples/snippets/noxfile.py | 2 +- 11 files changed, 79 insertions(+), 8 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic-head.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/presubmit.cfg diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index fccaa8e84449..889f77dfa25d 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3bf87e47c2173d7eed42714589dc4da2c07c3268610f1e47f8e1a30decbfc7f1 + digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index 23000c05d9d8..8057a7691b12 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/common.cfg new file mode 100644 index 000000000000..f337a0d54a67 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.11" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-311" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 447a0a4bd643..59283006ed85 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9 and 3.10 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.10 -- -k + $ nox -s unit-3.11 -- -k .. note:: @@ -225,11 +225,13 @@ We support: - `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ +- `Python 3.11`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ +.. _Python 3.11: https://docs.python.org/3.11/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index cc70873a31b6..9bb6c98d7eb4 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -33,7 +33,7 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 0af937c4d6af..1ac55a8a9526 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -336,7 +336,7 @@ samples=True, cov_level=100, versions=gcp.common.detect_versions(path="./google", default_first=True), - unit_test_python_versions=["3.7", "3.8", "3.9", "3.10"], + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11"], system_test_python_versions=["3.10"], system_test_external_dependencies=["psutil"], ) diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index e8283c38d4a0..1224cbe212e4 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From f8e9cb56d1c9c999b74ec549954ad4d7cb2849fa Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 10 Jan 2023 12:36:56 -0500 Subject: [PATCH 0905/1197] chore(main): release 2.13.12 (#833) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 14 ++++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 18 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 2a1de97295f7..8bfebc3a0e45 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.13.11" + ".": "2.13.12" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 19b330fb1c2f..ee2dd4ecf6ce 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,20 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.13.12](https://github.com/googleapis/python-pubsub/compare/v2.13.11...v2.13.12) (2023-01-06) + + +### Bug Fixes + +* **deps:** Require google-api-core >=1.34.0, >=2.11.0 ([060f00b](https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8)) +* Drop usage of pkg_resources ([060f00b](https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8)) +* Fix timeout default values ([060f00b](https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8)) + + +### Documentation + +* **samples:** Snippetgen should call await on the operation coroutine before calling result ([060f00b](https://github.com/googleapis/python-pubsub/commit/060f00bcea5cd129be3a2d37078535cc97b4f5e8)) + ## [2.13.11](https://github.com/googleapis/python-pubsub/compare/v2.13.10...v2.13.11) (2022-11-11) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 51b84e9a69d6..08273790f73c 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.11" # {x-release-please-version} +__version__ = "2.13.12" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 51b84e9a69d6..08273790f73c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.11" # {x-release-please-version} +__version__ = "2.13.12" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 581ec0e7c514..4ed26b3ffdc9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.13.12" }, "snippets": [ { From 071fe342ec130b31ad2a7ec9e1954dce83035134 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Tue, 10 Jan 2023 13:46:09 -0500 Subject: [PATCH 0906/1197] test: pin mock version to 5.0.0 in noxfile for unit tests (#844) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: pin mock version to 5.0.0 in noxfile for unit tests * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * replace mock in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * replace mock in owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/noxfile.py | 4 ++-- packages/google-cloud-pubsub/owlbot.py | 3 +++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 9bb6c98d7eb4..f0c9d01af16e 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -35,7 +35,7 @@ UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock", + "mock==5.0.0", "asyncmock", "pytest", "pytest-cov", @@ -49,7 +49,7 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] SYSTEM_TEST_STANDARD_DEPENDENCIES = [ - "mock", + "mock==5.0.0", "pytest", "google-cloud-testutils", ] diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 1ac55a8a9526..8787d90c6a4b 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -353,6 +353,9 @@ s.replace( "noxfile.py", r'"blacken",', '\g<0>\n "mypy",', ) +s.replace( + "noxfile.py", r'"mock"', '"mock==5.0.0"', +) s.replace( "noxfile.py", r"nox\.options\.error_on_missing_interpreters = True", From 9f0367296843510768cc6ac3b60a9e65c6c02274 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 10 Jan 2023 14:27:39 -0500 Subject: [PATCH 0907/1197] feat: add schema evolution methods and fields (#842) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Add support for python 3.11 chore: Update gapic-generator-python to v1.8.0 PiperOrigin-RevId: 500768693 Source-Link: https://github.com/googleapis/googleapis/commit/190b612e3d0ff8f025875a669e5d68a1446d43c1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7bf29a414b9ecac3170f0b65bdc2a95705c0ef1a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2JmMjlhNDE0YjllY2FjMzE3MGYwYjY1YmRjMmE5NTcwNWMwZWYxYSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * feat: add schema evolution methods and fields PiperOrigin-RevId: 500819578 Source-Link: https://github.com/googleapis/googleapis/commit/05a375f20b0e76e0106990aa9fadab98498dbea0 Source-Link: https://github.com/googleapis/googleapis-gen/commit/303c9592c498dc02432daa29acb46d67decfb0c2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMzAzYzk1OTJjNDk4ZGMwMjQzMmRhYTI5YWNiNDZkNjdkZWNmYjBjMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * require proto-plus 1.22.2 for python 3.11 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou Co-authored-by: Kamal Aboul-Hosn --- .../google/pubsub/__init__.py | 10 + .../google/pubsub_v1/__init__.py | 10 + .../google/pubsub_v1/gapic_metadata.json | 40 + .../services/publisher/async_client.py | 2 +- .../pubsub_v1/services/publisher/client.py | 2 +- .../services/schema_service/async_client.py | 460 ++++++- .../services/schema_service/client.py | 460 ++++++- .../services/schema_service/pagers.py | 128 ++ .../schema_service/transports/base.py | 58 + .../schema_service/transports/grpc.py | 107 ++ .../schema_service/transports/grpc_asyncio.py | 108 ++ .../services/subscriber/async_client.py | 2 +- .../pubsub_v1/services/subscriber/client.py | 2 +- .../google/pubsub_v1/types/__init__.py | 10 + .../google/pubsub_v1/types/pubsub.py | 18 + .../google/pubsub_v1/types/schema.py | 164 ++- ...ated_schema_service_commit_schema_async.py | 56 + ...rated_schema_service_commit_schema_sync.py | 56 + ...ma_service_delete_schema_revision_async.py | 53 + ...ema_service_delete_schema_revision_sync.py | 53 + ...ema_service_list_schema_revisions_async.py | 53 + ...hema_service_list_schema_revisions_sync.py | 53 + ...ed_schema_service_rollback_schema_async.py | 53 + ...ted_schema_service_rollback_schema_sync.py | 53 + .../snippet_metadata_google.pubsub.v1.json | 912 ++++++++++-- .../scripts/fixup_pubsub_v1_keywords.py | 4 + packages/google-cloud-pubsub/setup.py | 2 + .../testing/constraints-3.12.txt | 7 + .../gapic/pubsub_v1/test_schema_service.py | 1219 +++++++++++++++++ 29 files changed, 4025 insertions(+), 130 deletions(-) create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py create mode 100644 packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py create mode 100644 packages/google-cloud-pubsub/testing/constraints-3.12.txt diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index 97953fcc072b..06df1da17b62 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -72,11 +72,16 @@ from google.pubsub_v1.types.pubsub import UpdateSnapshotRequest from google.pubsub_v1.types.pubsub import UpdateSubscriptionRequest from google.pubsub_v1.types.pubsub import UpdateTopicRequest +from google.pubsub_v1.types.schema import CommitSchemaRequest from google.pubsub_v1.types.schema import CreateSchemaRequest from google.pubsub_v1.types.schema import DeleteSchemaRequest +from google.pubsub_v1.types.schema import DeleteSchemaRevisionRequest from google.pubsub_v1.types.schema import GetSchemaRequest +from google.pubsub_v1.types.schema import ListSchemaRevisionsRequest +from google.pubsub_v1.types.schema import ListSchemaRevisionsResponse from google.pubsub_v1.types.schema import ListSchemasRequest from google.pubsub_v1.types.schema import ListSchemasResponse +from google.pubsub_v1.types.schema import RollbackSchemaRequest from google.pubsub_v1.types.schema import Schema from google.pubsub_v1.types.schema import ValidateMessageRequest from google.pubsub_v1.types.schema import ValidateMessageResponse @@ -137,11 +142,16 @@ "UpdateSnapshotRequest", "UpdateSubscriptionRequest", "UpdateTopicRequest", + "CommitSchemaRequest", "CreateSchemaRequest", "DeleteSchemaRequest", + "DeleteSchemaRevisionRequest", "GetSchemaRequest", + "ListSchemaRevisionsRequest", + "ListSchemaRevisionsResponse", "ListSchemasRequest", "ListSchemasResponse", + "RollbackSchemaRequest", "Schema", "ValidateMessageRequest", "ValidateMessageResponse", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 4a5351757005..4c762606e0cb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -70,11 +70,16 @@ from .types.pubsub import UpdateSnapshotRequest from .types.pubsub import UpdateSubscriptionRequest from .types.pubsub import UpdateTopicRequest +from .types.schema import CommitSchemaRequest from .types.schema import CreateSchemaRequest from .types.schema import DeleteSchemaRequest +from .types.schema import DeleteSchemaRevisionRequest from .types.schema import GetSchemaRequest +from .types.schema import ListSchemaRevisionsRequest +from .types.schema import ListSchemaRevisionsResponse from .types.schema import ListSchemasRequest from .types.schema import ListSchemasResponse +from .types.schema import RollbackSchemaRequest from .types.schema import Schema from .types.schema import ValidateMessageRequest from .types.schema import ValidateMessageResponse @@ -89,10 +94,12 @@ "SubscriberAsyncClient", "AcknowledgeRequest", "BigQueryConfig", + "CommitSchemaRequest", "CreateSchemaRequest", "CreateSnapshotRequest", "DeadLetterPolicy", "DeleteSchemaRequest", + "DeleteSchemaRevisionRequest", "DeleteSnapshotRequest", "DeleteSubscriptionRequest", "DeleteTopicRequest", @@ -104,6 +111,8 @@ "GetSnapshotRequest", "GetSubscriptionRequest", "GetTopicRequest", + "ListSchemaRevisionsRequest", + "ListSchemaRevisionsResponse", "ListSchemasRequest", "ListSchemasResponse", "ListSnapshotsRequest", @@ -128,6 +137,7 @@ "PushConfig", "ReceivedMessage", "RetryPolicy", + "RollbackSchemaRequest", "Schema", "SchemaServiceClient", "SchemaSettings", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json index 4c5b86bd13bc..ac814d06543f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json @@ -114,6 +114,11 @@ "grpc": { "libraryClient": "SchemaServiceClient", "rpcs": { + "CommitSchema": { + "methods": [ + "commit_schema" + ] + }, "CreateSchema": { "methods": [ "create_schema" @@ -124,16 +129,31 @@ "delete_schema" ] }, + "DeleteSchemaRevision": { + "methods": [ + "delete_schema_revision" + ] + }, "GetSchema": { "methods": [ "get_schema" ] }, + "ListSchemaRevisions": { + "methods": [ + "list_schema_revisions" + ] + }, "ListSchemas": { "methods": [ "list_schemas" ] }, + "RollbackSchema": { + "methods": [ + "rollback_schema" + ] + }, "ValidateMessage": { "methods": [ "validate_message" @@ -149,6 +169,11 @@ "grpc-async": { "libraryClient": "SchemaServiceAsyncClient", "rpcs": { + "CommitSchema": { + "methods": [ + "commit_schema" + ] + }, "CreateSchema": { "methods": [ "create_schema" @@ -159,16 +184,31 @@ "delete_schema" ] }, + "DeleteSchemaRevision": { + "methods": [ + "delete_schema_revision" + ] + }, "GetSchema": { "methods": [ "get_schema" ] }, + "ListSchemaRevisions": { + "methods": [ + "list_schema_revisions" + ] + }, "ListSchemas": { "methods": [ "list_schemas" ] }, + "RollbackSchema": { + "methods": [ + "rollback_schema" + ] + }, "ValidateMessage": { "methods": [ "validate_message" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 6a282d7dd28e..b272df768683 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -138,7 +138,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 436c25a2dafa..2f192fd06be2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -338,7 +338,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 262190ea3375..251b59e75ea2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -44,6 +44,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.schema_service import pagers from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema @@ -136,7 +137,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. @@ -559,6 +560,463 @@ async def sample_list_schemas(): # Done; return the response. return response + async def list_schema_revisions( + self, + request: Optional[Union[schema.ListSchemaRevisionsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemaRevisionsAsyncPager: + r"""Lists all schema revisions for the named schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_list_schema_revisions(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemaRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_schema_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.ListSchemaRevisionsRequest, dict]]): + The request object. Request for the + `ListSchemaRevisions` method. + name (:class:`str`): + Required. The name of the schema to + list revisions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsAsyncPager: + Response for the ListSchemaRevisions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema.ListSchemaRevisionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_schema_revisions, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListSchemaRevisionsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def commit_schema( + self, + request: Optional[Union[gp_schema.CommitSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.Schema: + r"""Commits a new schema revision to an existing schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_commit_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CommitSchemaRequest( + name="name_value", + schema=schema, + ) + + # Make the request + response = await client.commit_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.CommitSchemaRequest, dict]]): + The request object. Request for CommitSchema method. + name (:class:`str`): + Required. The name of the schema we are revising. Format + is ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (:class:`google.pubsub_v1.types.Schema`): + Required. The schema revision to + commit. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schema]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = gp_schema.CommitSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.commit_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def rollback_schema( + self, + request: Optional[Union[schema.RollbackSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + revision_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Creates a new schema revision that is a copy of the provided + revision_id. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_rollback_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = await client.rollback_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.RollbackSchemaRequest, dict]]): + The request object. Request for the `RollbackSchema` + method. + name (:class:`str`): + Required. The schema being rolled + back with revision id. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + revision_id (:class:`str`): + Required. The revision ID to roll + back to. It must be a revision of the + same schema. + Example: c7cfa2a8 + + This corresponds to the ``revision_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, revision_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema.RollbackSchemaRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if revision_id is not None: + request.revision_id = revision_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.rollback_schema, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_schema_revision( + self, + request: Optional[Union[schema.DeleteSchemaRevisionRequest, dict]] = None, + *, + name: Optional[str] = None, + revision_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Deletes a specific schema revision. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + async def sample_delete_schema_revision(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRevisionRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = await client.delete_schema_revision(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.pubsub_v1.types.DeleteSchemaRevisionRequest, dict]]): + The request object. Request for the + `DeleteSchemaRevision` method. + name (:class:`str`): + Required. The name of the schema + revision to be deleted, with a revision + ID explicitly included. + Example: + projects/123/schemas/my-schema@c7cfa2a8 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + revision_id (:class:`str`): + Required. The revision ID to roll + back to. It must be a revision of the + same schema. + Example: c7cfa2a8 + + This corresponds to the ``revision_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, revision_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = schema.DeleteSchemaRevisionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if revision_id is not None: + request.revision_id = revision_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_schema_revision, + default_timeout=None, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def delete_schema( self, request: Optional[Union[schema.DeleteSchemaRequest, dict]] = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index b02ee14e6882..d8c520312bdd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -49,6 +49,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.schema_service import pagers from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema @@ -288,7 +289,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. @@ -791,6 +792,463 @@ def sample_list_schemas(): # Done; return the response. return response + def list_schema_revisions( + self, + request: Optional[Union[schema.ListSchemaRevisionsRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListSchemaRevisionsPager: + r"""Lists all schema revisions for the named schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_list_schema_revisions(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemaRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_schema_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.pubsub_v1.types.ListSchemaRevisionsRequest, dict]): + The request object. Request for the + `ListSchemaRevisions` method. + name (str): + Required. The name of the schema to + list revisions for. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsPager: + Response for the ListSchemaRevisions method. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema.ListSchemaRevisionsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.ListSchemaRevisionsRequest): + request = schema.ListSchemaRevisionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_schema_revisions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListSchemaRevisionsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def commit_schema( + self, + request: Optional[Union[gp_schema.CommitSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + schema: Optional[gp_schema.Schema] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.Schema: + r"""Commits a new schema revision to an existing schema. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_commit_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CommitSchemaRequest( + name="name_value", + schema=schema, + ) + + # Make the request + response = client.commit_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.CommitSchemaRequest, dict]): + The request object. Request for CommitSchema method. + name (str): + Required. The name of the schema we are revising. Format + is ``projects/{project}/schemas/{schema}``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + schema (google.pubsub_v1.types.Schema): + Required. The schema revision to + commit. + + This corresponds to the ``schema`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, schema]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a gp_schema.CommitSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, gp_schema.CommitSchemaRequest): + request = gp_schema.CommitSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if schema is not None: + request.schema = schema + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.commit_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def rollback_schema( + self, + request: Optional[Union[schema.RollbackSchemaRequest, dict]] = None, + *, + name: Optional[str] = None, + revision_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Creates a new schema revision that is a copy of the provided + revision_id. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_rollback_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = client.rollback_schema(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.RollbackSchemaRequest, dict]): + The request object. Request for the `RollbackSchema` + method. + name (str): + Required. The schema being rolled + back with revision id. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + revision_id (str): + Required. The revision ID to roll + back to. It must be a revision of the + same schema. + Example: c7cfa2a8 + + This corresponds to the ``revision_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, revision_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema.RollbackSchemaRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.RollbackSchemaRequest): + request = schema.RollbackSchemaRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if revision_id is not None: + request.revision_id = revision_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.rollback_schema] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_schema_revision( + self, + request: Optional[Union[schema.DeleteSchemaRevisionRequest, dict]] = None, + *, + name: Optional[str] = None, + revision_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Deletes a specific schema revision. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google import pubsub_v1 + + def sample_delete_schema_revision(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRevisionRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = client.delete_schema_revision(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.pubsub_v1.types.DeleteSchemaRevisionRequest, dict]): + The request object. Request for the + `DeleteSchemaRevision` method. + name (str): + Required. The name of the schema + revision to be deleted, with a revision + ID explicitly included. + Example: + projects/123/schemas/my-schema@c7cfa2a8 + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + revision_id (str): + Required. The revision ID to roll + back to. It must be a revision of the + same schema. + Example: c7cfa2a8 + + This corresponds to the ``revision_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.pubsub_v1.types.Schema: + A schema resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, revision_id]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a schema.DeleteSchemaRevisionRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, schema.DeleteSchemaRevisionRequest): + request = schema.DeleteSchemaRevisionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if revision_id is not None: + request.revision_id = revision_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_schema_revision] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def delete_schema( self, request: Optional[Union[schema.DeleteSchemaRequest, dict]] = None, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index 965778d459af..840428a158c3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -153,3 +153,131 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchemaRevisionsPager: + """A pager for iterating through ``list_schema_revisions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSchemaRevisionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListSchemaRevisions`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSchemaRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., schema.ListSchemaRevisionsResponse], + request: schema.ListSchemaRevisionsRequest, + response: schema.ListSchemaRevisionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSchemaRevisionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSchemaRevisionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema.ListSchemaRevisionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[schema.ListSchemaRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[schema.Schema]: + for page in self.pages: + yield from page.schemas + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListSchemaRevisionsAsyncPager: + """A pager for iterating through ``list_schema_revisions`` requests. + + This class thinly wraps an initial + :class:`google.pubsub_v1.types.ListSchemaRevisionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``schemas`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListSchemaRevisions`` requests and continue to iterate + through the ``schemas`` field on the + corresponding responses. + + All the usual :class:`google.pubsub_v1.types.ListSchemaRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[schema.ListSchemaRevisionsResponse]], + request: schema.ListSchemaRevisionsRequest, + response: schema.ListSchemaRevisionsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.pubsub_v1.types.ListSchemaRevisionsRequest): + The initial request object. + response (google.pubsub_v1.types.ListSchemaRevisionsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = schema.ListSchemaRevisionsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[schema.ListSchemaRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[schema.Schema]: + async def async_generator(): + async for page in self.pages: + for response in page.schemas: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 0365627965fb..40b89d61a9c8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -145,6 +145,26 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.list_schema_revisions: gapic_v1.method.wrap_method( + self.list_schema_revisions, + default_timeout=None, + client_info=client_info, + ), + self.commit_schema: gapic_v1.method.wrap_method( + self.commit_schema, + default_timeout=None, + client_info=client_info, + ), + self.rollback_schema: gapic_v1.method.wrap_method( + self.rollback_schema, + default_timeout=None, + client_info=client_info, + ), + self.delete_schema_revision: gapic_v1.method.wrap_method( + self.delete_schema_revision, + default_timeout=None, + client_info=client_info, + ), self.delete_schema: gapic_v1.method.wrap_method( self.delete_schema, default_timeout=None, @@ -197,6 +217,44 @@ def list_schemas( ]: raise NotImplementedError() + @property + def list_schema_revisions( + self, + ) -> Callable[ + [schema.ListSchemaRevisionsRequest], + Union[ + schema.ListSchemaRevisionsResponse, + Awaitable[schema.ListSchemaRevisionsResponse], + ], + ]: + raise NotImplementedError() + + @property + def commit_schema( + self, + ) -> Callable[ + [gp_schema.CommitSchemaRequest], + Union[gp_schema.Schema, Awaitable[gp_schema.Schema]], + ]: + raise NotImplementedError() + + @property + def rollback_schema( + self, + ) -> Callable[ + [schema.RollbackSchemaRequest], Union[schema.Schema, Awaitable[schema.Schema]] + ]: + raise NotImplementedError() + + @property + def delete_schema_revision( + self, + ) -> Callable[ + [schema.DeleteSchemaRevisionRequest], + Union[schema.Schema, Awaitable[schema.Schema]], + ]: + raise NotImplementedError() + @property def delete_schema( self, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index feace1e27201..4a99c8b29b78 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -311,6 +311,113 @@ def list_schemas( ) return self._stubs["list_schemas"] + @property + def list_schema_revisions( + self, + ) -> Callable[ + [schema.ListSchemaRevisionsRequest], schema.ListSchemaRevisionsResponse + ]: + r"""Return a callable for the list schema revisions method over gRPC. + + Lists all schema revisions for the named schema. + + Returns: + Callable[[~.ListSchemaRevisionsRequest], + ~.ListSchemaRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schema_revisions" not in self._stubs: + self._stubs["list_schema_revisions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ListSchemaRevisions", + request_serializer=schema.ListSchemaRevisionsRequest.serialize, + response_deserializer=schema.ListSchemaRevisionsResponse.deserialize, + ) + return self._stubs["list_schema_revisions"] + + @property + def commit_schema( + self, + ) -> Callable[[gp_schema.CommitSchemaRequest], gp_schema.Schema]: + r"""Return a callable for the commit schema method over gRPC. + + Commits a new schema revision to an existing schema. + + Returns: + Callable[[~.CommitSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit_schema" not in self._stubs: + self._stubs["commit_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/CommitSchema", + request_serializer=gp_schema.CommitSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs["commit_schema"] + + @property + def rollback_schema( + self, + ) -> Callable[[schema.RollbackSchemaRequest], schema.Schema]: + r"""Return a callable for the rollback schema method over gRPC. + + Creates a new schema revision that is a copy of the provided + revision_id. + + Returns: + Callable[[~.RollbackSchemaRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_schema" not in self._stubs: + self._stubs["rollback_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/RollbackSchema", + request_serializer=schema.RollbackSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["rollback_schema"] + + @property + def delete_schema_revision( + self, + ) -> Callable[[schema.DeleteSchemaRevisionRequest], schema.Schema]: + r"""Return a callable for the delete schema revision method over gRPC. + + Deletes a specific schema revision. + + Returns: + Callable[[~.DeleteSchemaRevisionRequest], + ~.Schema]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema_revision" not in self._stubs: + self._stubs["delete_schema_revision"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/DeleteSchemaRevision", + request_serializer=schema.DeleteSchemaRevisionRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["delete_schema_revision"] + @property def delete_schema(self) -> Callable[[schema.DeleteSchemaRequest], empty_pb2.Empty]: r"""Return a callable for the delete schema method over gRPC. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 83bed7ecde50..840d19007627 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -316,6 +316,114 @@ def list_schemas( ) return self._stubs["list_schemas"] + @property + def list_schema_revisions( + self, + ) -> Callable[ + [schema.ListSchemaRevisionsRequest], + Awaitable[schema.ListSchemaRevisionsResponse], + ]: + r"""Return a callable for the list schema revisions method over gRPC. + + Lists all schema revisions for the named schema. + + Returns: + Callable[[~.ListSchemaRevisionsRequest], + Awaitable[~.ListSchemaRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_schema_revisions" not in self._stubs: + self._stubs["list_schema_revisions"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/ListSchemaRevisions", + request_serializer=schema.ListSchemaRevisionsRequest.serialize, + response_deserializer=schema.ListSchemaRevisionsResponse.deserialize, + ) + return self._stubs["list_schema_revisions"] + + @property + def commit_schema( + self, + ) -> Callable[[gp_schema.CommitSchemaRequest], Awaitable[gp_schema.Schema]]: + r"""Return a callable for the commit schema method over gRPC. + + Commits a new schema revision to an existing schema. + + Returns: + Callable[[~.CommitSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "commit_schema" not in self._stubs: + self._stubs["commit_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/CommitSchema", + request_serializer=gp_schema.CommitSchemaRequest.serialize, + response_deserializer=gp_schema.Schema.deserialize, + ) + return self._stubs["commit_schema"] + + @property + def rollback_schema( + self, + ) -> Callable[[schema.RollbackSchemaRequest], Awaitable[schema.Schema]]: + r"""Return a callable for the rollback schema method over gRPC. + + Creates a new schema revision that is a copy of the provided + revision_id. + + Returns: + Callable[[~.RollbackSchemaRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "rollback_schema" not in self._stubs: + self._stubs["rollback_schema"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/RollbackSchema", + request_serializer=schema.RollbackSchemaRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["rollback_schema"] + + @property + def delete_schema_revision( + self, + ) -> Callable[[schema.DeleteSchemaRevisionRequest], Awaitable[schema.Schema]]: + r"""Return a callable for the delete schema revision method over gRPC. + + Deletes a specific schema revision. + + Returns: + Callable[[~.DeleteSchemaRevisionRequest], + Awaitable[~.Schema]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_schema_revision" not in self._stubs: + self._stubs["delete_schema_revision"] = self.grpc_channel.unary_unary( + "/google.pubsub.v1.SchemaService/DeleteSchemaRevision", + request_serializer=schema.DeleteSchemaRevisionRequest.serialize, + response_deserializer=schema.Schema.deserialize, + ) + return self._stubs["delete_schema_revision"] + @property def delete_schema( self, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index f0c38b04386e..e832b0eba0ea 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -143,7 +143,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 888af9883b06..fe3bc17d1f09 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -342,7 +342,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index c0d9c4619d75..f24034c3b63a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -63,11 +63,16 @@ UpdateTopicRequest, ) from .schema import ( + CommitSchemaRequest, CreateSchemaRequest, DeleteSchemaRequest, + DeleteSchemaRevisionRequest, GetSchemaRequest, + ListSchemaRevisionsRequest, + ListSchemaRevisionsResponse, ListSchemasRequest, ListSchemasResponse, + RollbackSchemaRequest, Schema, ValidateMessageRequest, ValidateMessageResponse, @@ -132,11 +137,16 @@ "UpdateSnapshotRequest", "UpdateSubscriptionRequest", "UpdateTopicRequest", + "CommitSchemaRequest", "CreateSchemaRequest", "DeleteSchemaRequest", + "DeleteSchemaRevisionRequest", "GetSchemaRequest", + "ListSchemaRevisionsRequest", + "ListSchemaRevisionsResponse", "ListSchemasRequest", "ListSchemasResponse", + "RollbackSchemaRequest", "Schema", "ValidateMessageRequest", "ValidateMessageResponse", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index a58226a152fb..4a4ea635a77d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -109,6 +109,16 @@ class SchemaSettings(proto.Message): deleted. encoding (google.pubsub_v1.types.Encoding): The encoding of messages validated against ``schema``. + first_revision_id (str): + The minimum (inclusive) revision allowed for validating + messages. If empty or not present, allow any revision to be + validated against last_revision or any revision created + before. + last_revision_id (str): + The maximum (inclusive) revision allowed for validating + messages. If empty or not present, allow any revision to be + validated against first_revision or any revision created + after. """ schema: str = proto.Field( @@ -120,6 +130,14 @@ class SchemaSettings(proto.Message): number=2, enum=gp_schema.Encoding, ) + first_revision_id: str = proto.Field( + proto.STRING, + number=3, + ) + last_revision_id: str = proto.Field( + proto.STRING, + number=4, + ) class Topic(proto.Message): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index affb4343ac40..a5ea2f06d52d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -17,6 +17,8 @@ import proto # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + __protobuf__ = proto.module( package="google.pubsub.v1", @@ -28,6 +30,11 @@ "GetSchemaRequest", "ListSchemasRequest", "ListSchemasResponse", + "ListSchemaRevisionsRequest", + "ListSchemaRevisionsResponse", + "CommitSchemaRequest", + "RollbackSchemaRequest", + "DeleteSchemaRevisionRequest", "DeleteSchemaRequest", "ValidateSchemaRequest", "ValidateSchemaResponse", @@ -66,6 +73,12 @@ class Schema(proto.Message): The definition of the schema. This should contain a string representing the full definition of the schema that is a valid schema definition of the type specified in ``type``. + revision_id (str): + Output only. Immutable. The revision ID of + the schema. + revision_create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp that the revision + was created. """ class Type(proto.Enum): @@ -87,6 +100,15 @@ class Type(proto.Enum): proto.STRING, number=3, ) + revision_id: str = proto.Field( + proto.STRING, + number=4, + ) + revision_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) class CreateSchemaRequest(proto.Message): @@ -135,8 +157,8 @@ class GetSchemaRequest(proto.Message): ``projects/{project}/schemas/{schema}``. view (google.pubsub_v1.types.SchemaView): The set of fields to return in the response. If not set, - returns a Schema with ``name`` and ``type``, but not - ``definition``. Set to ``FULL`` to retrieve all fields. + returns a Schema with all fields filled out. Set to + ``BASIC`` to omit the ``definition``. """ name: str = proto.Field( @@ -216,6 +238,144 @@ def raw_page(self): ) +class ListSchemaRevisionsRequest(proto.Message): + r"""Request for the ``ListSchemaRevisions`` method. + + Attributes: + name (str): + Required. The name of the schema to list + revisions for. + view (google.pubsub_v1.types.SchemaView): + The set of Schema fields to return in the response. If not + set, returns Schemas with ``name`` and ``type``, but not + ``definition``. Set to ``FULL`` to retrieve all fields. + page_size (int): + The maximum number of revisions to return per + page. + page_token (str): + The page token, received from a previous + ListSchemaRevisions call. Provide this to + retrieve the subsequent page. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + view: "SchemaView" = proto.Field( + proto.ENUM, + number=2, + enum="SchemaView", + ) + page_size: int = proto.Field( + proto.INT32, + number=3, + ) + page_token: str = proto.Field( + proto.STRING, + number=4, + ) + + +class ListSchemaRevisionsResponse(proto.Message): + r"""Response for the ``ListSchemaRevisions`` method. + + Attributes: + schemas (MutableSequence[google.pubsub_v1.types.Schema]): + The revisions of the schema. + next_page_token (str): + A token that can be sent as ``page_token`` to retrieve the + next page. If this field is empty, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + schemas: MutableSequence["Schema"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Schema", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CommitSchemaRequest(proto.Message): + r"""Request for CommitSchema method. + + Attributes: + name (str): + Required. The name of the schema we are revising. Format is + ``projects/{project}/schemas/{schema}``. + schema (google.pubsub_v1.types.Schema): + Required. The schema revision to commit. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + schema: "Schema" = proto.Field( + proto.MESSAGE, + number=2, + message="Schema", + ) + + +class RollbackSchemaRequest(proto.Message): + r"""Request for the ``RollbackSchema`` method. + + Attributes: + name (str): + Required. The schema being rolled back with + revision id. + revision_id (str): + Required. The revision ID to roll back to. + It must be a revision of the same schema. + + Example: c7cfa2a8 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + revision_id: str = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteSchemaRevisionRequest(proto.Message): + r"""Request for the ``DeleteSchemaRevision`` method. + + Attributes: + name (str): + Required. The name of the schema revision to + be deleted, with a revision ID explicitly + included. + Example: projects/123/schemas/my-schema@c7cfa2a8 + revision_id (str): + Required. The revision ID to roll back to. + It must be a revision of the same schema. + + Example: c7cfa2a8 + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + revision_id: str = proto.Field( + proto.STRING, + number=2, + ) + + class DeleteSchemaRequest(proto.Message): r"""Request for the ``DeleteSchema`` method. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py new file mode 100644 index 000000000000..eb69ca3e9e88 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_CommitSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_commit_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CommitSchemaRequest( + name="name_value", + schema=schema, + ) + + # Make the request + response = await client.commit_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_CommitSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py new file mode 100644 index 000000000000..ac6da483c041 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CommitSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_CommitSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_commit_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + schema = pubsub_v1.Schema() + schema.name = "name_value" + + request = pubsub_v1.CommitSchemaRequest( + name="name_value", + schema=schema, + ) + + # Make the request + response = client.commit_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_CommitSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py new file mode 100644 index 000000000000..3e3d178a01f9 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchemaRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_DeleteSchemaRevision_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_delete_schema_revision(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRevisionRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = await client.delete_schema_revision(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_DeleteSchemaRevision_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py new file mode 100644 index 000000000000..3aad86c9510b --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteSchemaRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_DeleteSchemaRevision_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_delete_schema_revision(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.DeleteSchemaRevisionRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = client.delete_schema_revision(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_DeleteSchemaRevision_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py new file mode 100644 index 000000000000..5ad8bee4212e --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemaRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ListSchemaRevisions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_list_schema_revisions(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemaRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_schema_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END pubsub_v1_generated_SchemaService_ListSchemaRevisions_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py new file mode 100644 index 000000000000..54ea87778c89 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListSchemaRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_ListSchemaRevisions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_list_schema_revisions(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.ListSchemaRevisionsRequest( + name="name_value", + ) + + # Make the request + page_result = client.list_schema_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END pubsub_v1_generated_SchemaService_ListSchemaRevisions_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py new file mode 100644 index 000000000000..8c7d467377a7 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_RollbackSchema_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +async def sample_rollback_schema(): + # Create a client + client = pubsub_v1.SchemaServiceAsyncClient() + + # Initialize request argument(s) + request = pubsub_v1.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = await client.rollback_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_RollbackSchema_async] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py new file mode 100644 index 000000000000..96c79deea0c3 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RollbackSchema +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-pubsub + + +# [START pubsub_v1_generated_SchemaService_RollbackSchema_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google import pubsub_v1 + + +def sample_rollback_schema(): + # Create a client + client = pubsub_v1.SchemaServiceClient() + + # Initialize request argument(s) + request = pubsub_v1.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Make the request + response = client.rollback_schema(request=request) + + # Handle the response + print(response) + +# [END pubsub_v1_generated_SchemaService_RollbackSchema_sync] diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 4ed26b3ffdc9..7783003749fc 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -1454,32 +1454,28 @@ "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", "shortName": "SchemaServiceAsyncClient" }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.create_schema", + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.commit_schema", "method": { - "fullName": "google.pubsub.v1.SchemaService.CreateSchema", + "fullName": "google.pubsub.v1.SchemaService.CommitSchema", "service": { "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, - "shortName": "CreateSchema" + "shortName": "CommitSchema" }, "parameters": [ { "name": "request", - "type": "google.pubsub_v1.types.CreateSchemaRequest" + "type": "google.pubsub_v1.types.CommitSchemaRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { "name": "schema", "type": "google.pubsub_v1.types.Schema" }, - { - "name": "schema_id", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1494,13 +1490,13 @@ } ], "resultType": "google.pubsub_v1.types.Schema", - "shortName": "create_schema" + "shortName": "commit_schema" }, - "description": "Sample for CreateSchema", - "file": "pubsub_v1_generated_schema_service_create_schema_async.py", + "description": "Sample for CommitSchema", + "file": "pubsub_v1_generated_schema_service_commit_schema_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", + "regionTag": "pubsub_v1_generated_SchemaService_CommitSchema_async", "segments": [ { "end": 55, @@ -1533,7 +1529,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "pubsub_v1_generated_schema_service_create_schema_async.py" + "title": "pubsub_v1_generated_schema_service_commit_schema_async.py" }, { "canonical": true, @@ -1542,32 +1538,28 @@ "fullName": "google.pubsub_v1.SchemaServiceClient", "shortName": "SchemaServiceClient" }, - "fullName": "google.pubsub_v1.SchemaServiceClient.create_schema", + "fullName": "google.pubsub_v1.SchemaServiceClient.commit_schema", "method": { - "fullName": "google.pubsub.v1.SchemaService.CreateSchema", + "fullName": "google.pubsub.v1.SchemaService.CommitSchema", "service": { "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, - "shortName": "CreateSchema" + "shortName": "CommitSchema" }, "parameters": [ { "name": "request", - "type": "google.pubsub_v1.types.CreateSchemaRequest" + "type": "google.pubsub_v1.types.CommitSchemaRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { "name": "schema", "type": "google.pubsub_v1.types.Schema" }, - { - "name": "schema_id", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1582,13 +1574,13 @@ } ], "resultType": "google.pubsub_v1.types.Schema", - "shortName": "create_schema" + "shortName": "commit_schema" }, - "description": "Sample for CreateSchema", - "file": "pubsub_v1_generated_schema_service_create_schema_sync.py", + "description": "Sample for CommitSchema", + "file": "pubsub_v1_generated_schema_service_commit_schema_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", + "regionTag": "pubsub_v1_generated_SchemaService_CommitSchema_sync", "segments": [ { "end": 55, @@ -1621,7 +1613,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "pubsub_v1_generated_schema_service_create_schema_sync.py" + "title": "pubsub_v1_generated_schema_service_commit_schema_sync.py" }, { "canonical": true, @@ -1631,22 +1623,30 @@ "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", "shortName": "SchemaServiceAsyncClient" }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema", + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.create_schema", "method": { - "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", "service": { "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, - "shortName": "DeleteSchema" + "shortName": "CreateSchema" }, "parameters": [ { "name": "request", - "type": "google.pubsub_v1.types.DeleteSchemaRequest" + "type": "google.pubsub_v1.types.CreateSchemaRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", "type": "str" }, { @@ -1662,21 +1662,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_schema" + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" }, - "description": "Sample for DeleteSchema", - "file": "pubsub_v1_generated_schema_service_delete_schema_async.py", + "description": "Sample for CreateSchema", + "file": "pubsub_v1_generated_schema_service_create_schema_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_async", "segments": [ { - "end": 49, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1686,20 +1687,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "pubsub_v1_generated_schema_service_delete_schema_async.py" + "title": "pubsub_v1_generated_schema_service_create_schema_async.py" }, { "canonical": true, @@ -1708,22 +1711,30 @@ "fullName": "google.pubsub_v1.SchemaServiceClient", "shortName": "SchemaServiceClient" }, - "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema", + "fullName": "google.pubsub_v1.SchemaServiceClient.create_schema", "method": { - "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", + "fullName": "google.pubsub.v1.SchemaService.CreateSchema", "service": { "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, - "shortName": "DeleteSchema" + "shortName": "CreateSchema" }, "parameters": [ { "name": "request", - "type": "google.pubsub_v1.types.DeleteSchemaRequest" + "type": "google.pubsub_v1.types.CreateSchemaRequest" }, { - "name": "name", + "name": "parent", + "type": "str" + }, + { + "name": "schema", + "type": "google.pubsub_v1.types.Schema" + }, + { + "name": "schema_id", "type": "str" }, { @@ -1739,21 +1750,22 @@ "type": "Sequence[Tuple[str, str]" } ], - "shortName": "delete_schema" + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "create_schema" }, - "description": "Sample for DeleteSchema", - "file": "pubsub_v1_generated_schema_service_delete_schema_sync.py", + "description": "Sample for CreateSchema", + "file": "pubsub_v1_generated_schema_service_create_schema_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", + "regionTag": "pubsub_v1_generated_SchemaService_CreateSchema_sync", "segments": [ { - "end": 49, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 49, + "end": 55, "start": 27, "type": "SHORT" }, @@ -1763,20 +1775,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 49, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 46, + "end": 52, + "start": 50, "type": "REQUEST_EXECUTION" }, { - "end": 50, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "pubsub_v1_generated_schema_service_delete_schema_sync.py" + "title": "pubsub_v1_generated_schema_service_create_schema_sync.py" }, { "canonical": true, @@ -1786,24 +1800,28 @@ "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", "shortName": "SchemaServiceAsyncClient" }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.get_schema", + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema_revision", "method": { - "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "fullName": "google.pubsub.v1.SchemaService.DeleteSchemaRevision", "service": { "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, - "shortName": "GetSchema" + "shortName": "DeleteSchemaRevision" }, "parameters": [ { "name": "request", - "type": "google.pubsub_v1.types.GetSchemaRequest" + "type": "google.pubsub_v1.types.DeleteSchemaRevisionRequest" }, { "name": "name", "type": "str" }, + { + "name": "revision_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1818,21 +1836,21 @@ } ], "resultType": "google.pubsub_v1.types.Schema", - "shortName": "get_schema" + "shortName": "delete_schema_revision" }, - "description": "Sample for GetSchema", - "file": "pubsub_v1_generated_schema_service_get_schema_async.py", + "description": "Sample for DeleteSchemaRevision", + "file": "pubsub_v1_generated_schema_service_delete_schema_revision_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchemaRevision_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1842,22 +1860,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "pubsub_v1_generated_schema_service_get_schema_async.py" + "title": "pubsub_v1_generated_schema_service_delete_schema_revision_async.py" }, { "canonical": true, @@ -1866,24 +1884,28 @@ "fullName": "google.pubsub_v1.SchemaServiceClient", "shortName": "SchemaServiceClient" }, - "fullName": "google.pubsub_v1.SchemaServiceClient.get_schema", + "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema_revision", "method": { - "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "fullName": "google.pubsub.v1.SchemaService.DeleteSchemaRevision", "service": { "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, - "shortName": "GetSchema" + "shortName": "DeleteSchemaRevision" }, "parameters": [ { "name": "request", - "type": "google.pubsub_v1.types.GetSchemaRequest" + "type": "google.pubsub_v1.types.DeleteSchemaRevisionRequest" }, { "name": "name", "type": "str" }, + { + "name": "revision_id", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1898,21 +1920,21 @@ } ], "resultType": "google.pubsub_v1.types.Schema", - "shortName": "get_schema" + "shortName": "delete_schema_revision" }, - "description": "Sample for GetSchema", - "file": "pubsub_v1_generated_schema_service_get_schema_sync.py", + "description": "Sample for DeleteSchemaRevision", + "file": "pubsub_v1_generated_schema_service_delete_schema_revision_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchemaRevision_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -1922,22 +1944,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "pubsub_v1_generated_schema_service_get_schema_sync.py" + "title": "pubsub_v1_generated_schema_service_delete_schema_revision_sync.py" }, { "canonical": true, @@ -1947,22 +1969,22 @@ "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", "shortName": "SchemaServiceAsyncClient" }, - "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schemas", + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.delete_schema", "method": { - "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", "service": { "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, - "shortName": "ListSchemas" + "shortName": "DeleteSchema" }, "parameters": [ { "name": "request", - "type": "google.pubsub_v1.types.ListSchemasRequest" + "type": "google.pubsub_v1.types.DeleteSchemaRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -1978,22 +2000,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager", - "shortName": "list_schemas" + "shortName": "delete_schema" }, - "description": "Sample for ListSchemas", - "file": "pubsub_v1_generated_schema_service_list_schemas_async.py", + "description": "Sample for DeleteSchema", + "file": "pubsub_v1_generated_schema_service_delete_schema_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_async", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2008,17 +2029,15 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "pubsub_v1_generated_schema_service_list_schemas_async.py" + "title": "pubsub_v1_generated_schema_service_delete_schema_async.py" }, { "canonical": true, @@ -2027,22 +2046,22 @@ "fullName": "google.pubsub_v1.SchemaServiceClient", "shortName": "SchemaServiceClient" }, - "fullName": "google.pubsub_v1.SchemaServiceClient.list_schemas", + "fullName": "google.pubsub_v1.SchemaServiceClient.delete_schema", "method": { - "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "fullName": "google.pubsub.v1.SchemaService.DeleteSchema", "service": { "fullName": "google.pubsub.v1.SchemaService", "shortName": "SchemaService" }, - "shortName": "ListSchemas" + "shortName": "DeleteSchema" }, "parameters": [ { "name": "request", - "type": "google.pubsub_v1.types.ListSchemasRequest" + "type": "google.pubsub_v1.types.DeleteSchemaRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -2058,22 +2077,21 @@ "type": "Sequence[Tuple[str, str]" } ], - "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasPager", - "shortName": "list_schemas" + "shortName": "delete_schema" }, - "description": "Sample for ListSchemas", - "file": "pubsub_v1_generated_schema_service_list_schemas_sync.py", + "description": "Sample for DeleteSchema", + "file": "pubsub_v1_generated_schema_service_delete_schema_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", + "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchema_sync", "segments": [ { - "end": 52, + "end": 49, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 49, "start": 27, "type": "SHORT" }, @@ -2088,17 +2106,667 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 50, "type": "RESPONSE_HANDLING" } ], - "title": "pubsub_v1_generated_schema_service_list_schemas_sync.py" + "title": "pubsub_v1_generated_schema_service_delete_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.get_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "pubsub_v1_generated_schema_service_get_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_get_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.get_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.GetSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "GetSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.GetSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "get_schema" + }, + "description": "Sample for GetSchema", + "file": "pubsub_v1_generated_schema_service_get_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_GetSchema_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_get_schema_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schema_revisions", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemaRevisions", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemaRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemaRevisionsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsAsyncPager", + "shortName": "list_schema_revisions" + }, + "description": "Sample for ListSchemaRevisions", + "file": "pubsub_v1_generated_schema_service_list_schema_revisions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemaRevisions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schema_revisions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.list_schema_revisions", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemaRevisions", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemaRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemaRevisionsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsPager", + "shortName": "list_schema_revisions" + }, + "description": "Sample for ListSchemaRevisions", + "file": "pubsub_v1_generated_schema_service_list_schema_revisions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemaRevisions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schema_revisions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.list_schemas", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "pubsub_v1_generated_schema_service_list_schemas_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.list_schemas", + "method": { + "fullName": "google.pubsub.v1.SchemaService.ListSchemas", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "ListSchemas" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.ListSchemasRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasPager", + "shortName": "list_schemas" + }, + "description": "Sample for ListSchemas", + "file": "pubsub_v1_generated_schema_service_list_schemas_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_ListSchemas_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_list_schemas_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient", + "shortName": "SchemaServiceAsyncClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceAsyncClient.rollback_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.RollbackSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "RollbackSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.RollbackSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "revision_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "rollback_schema" + }, + "description": "Sample for RollbackSchema", + "file": "pubsub_v1_generated_schema_service_rollback_schema_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_RollbackSchema_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_rollback_schema_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.pubsub_v1.SchemaServiceClient", + "shortName": "SchemaServiceClient" + }, + "fullName": "google.pubsub_v1.SchemaServiceClient.rollback_schema", + "method": { + "fullName": "google.pubsub.v1.SchemaService.RollbackSchema", + "service": { + "fullName": "google.pubsub.v1.SchemaService", + "shortName": "SchemaService" + }, + "shortName": "RollbackSchema" + }, + "parameters": [ + { + "name": "request", + "type": "google.pubsub_v1.types.RollbackSchemaRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "revision_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.pubsub_v1.types.Schema", + "shortName": "rollback_schema" + }, + "description": "Sample for RollbackSchema", + "file": "pubsub_v1_generated_schema_service_rollback_schema_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "pubsub_v1_generated_SchemaService_RollbackSchema_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "pubsub_v1_generated_schema_service_rollback_schema_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index d1bbcedf98af..d288e6ebb383 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -40,11 +40,13 @@ class pubsubCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { 'acknowledge': ('subscription', 'ack_ids', ), + 'commit_schema': ('name', 'schema', ), 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', ), 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), 'delete_schema': ('name', ), + 'delete_schema_revision': ('name', 'revision_id', ), 'delete_snapshot': ('snapshot', ), 'delete_subscription': ('subscription', ), 'delete_topic': ('topic', ), @@ -53,6 +55,7 @@ class pubsubCallTransformer(cst.CSTTransformer): 'get_snapshot': ('snapshot', ), 'get_subscription': ('subscription', ), 'get_topic': ('topic', ), + 'list_schema_revisions': ('name', 'view', 'page_size', 'page_token', ), 'list_schemas': ('parent', 'view', 'page_size', 'page_token', ), 'list_snapshots': ('project', 'page_size', 'page_token', ), 'list_subscriptions': ('project', 'page_size', 'page_token', ), @@ -63,6 +66,7 @@ class pubsubCallTransformer(cst.CSTTransformer): 'modify_push_config': ('subscription', 'push_config', ), 'publish': ('topic', 'messages', ), 'pull': ('subscription', 'max_messages', 'return_immediately', ), + 'rollback_schema': ('name', 'revision_id', ), 'seek': ('subscription', 'time', 'snapshot', ), 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), 'update_snapshot': ('snapshot', 'update_mask', ), diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index b38f1e04f028..88645f77aeea 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -39,6 +39,7 @@ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", "grpcio-status >= 1.33.2", @@ -81,6 +82,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/google-cloud-pubsub/testing/constraints-3.12.txt b/packages/google-cloud-pubsub/testing/constraints-3.12.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-pubsub/testing/constraints-3.12.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index ce559bb9b9b0..4c7ffd5ed934 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -41,6 +41,7 @@ from google.iam.v1 import options_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.oauth2 import service_account +from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.schema_service import SchemaServiceAsyncClient from google.pubsub_v1.services.schema_service import SchemaServiceClient from google.pubsub_v1.services.schema_service import pagers @@ -713,6 +714,7 @@ def test_create_schema(request_type, transport: str = "grpc"): name="name_value", type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, definition="definition_value", + revision_id="revision_id_value", ) response = client.create_schema(request) @@ -726,6 +728,7 @@ def test_create_schema(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" def test_create_schema_empty_call(): @@ -765,6 +768,7 @@ async def test_create_schema_async( name="name_value", type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, definition="definition_value", + revision_id="revision_id_value", ) ) response = await client.create_schema(request) @@ -779,6 +783,7 @@ async def test_create_schema_async( assert response.name == "name_value" assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" @pytest.mark.asyncio @@ -969,6 +974,7 @@ def test_get_schema(request_type, transport: str = "grpc"): name="name_value", type_=schema.Schema.Type.PROTOCOL_BUFFER, definition="definition_value", + revision_id="revision_id_value", ) response = client.get_schema(request) @@ -982,6 +988,7 @@ def test_get_schema(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" def test_get_schema_empty_call(): @@ -1021,6 +1028,7 @@ async def test_get_schema_async( name="name_value", type_=schema.Schema.Type.PROTOCOL_BUFFER, definition="definition_value", + revision_id="revision_id_value", ) ) response = await client.get_schema(request) @@ -1035,6 +1043,7 @@ async def test_get_schema_async( assert response.name == "name_value" assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" @pytest.mark.asyncio @@ -1601,6 +1610,1212 @@ async def test_list_schemas_async_pages(): assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemaRevisionsRequest, + dict, + ], +) +def test_list_schema_revisions(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemaRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemaRevisionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_schema_revisions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + client.list_schema_revisions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemaRevisionsRequest() + + +@pytest.mark.asyncio +async def test_list_schema_revisions_async( + transport: str = "grpc_asyncio", request_type=schema.ListSchemaRevisionsRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemaRevisionsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemaRevisionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_schema_revisions_async_from_dict(): + await test_list_schema_revisions_async(request_type=dict) + + +def test_list_schema_revisions_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemaRevisionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + call.return_value = schema.ListSchemaRevisionsResponse() + client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_schema_revisions_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.ListSchemaRevisionsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemaRevisionsResponse() + ) + await client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_list_schema_revisions_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemaRevisionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_schema_revisions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_list_schema_revisions_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schema_revisions( + schema.ListSchemaRevisionsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_list_schema_revisions_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.ListSchemaRevisionsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemaRevisionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_schema_revisions( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_schema_revisions_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_schema_revisions( + schema.ListSchemaRevisionsRequest(), + name="name_value", + ) + + +def test_list_schema_revisions_pager(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), + ) + pager = client.list_schema_revisions(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + +def test_list_schema_revisions_pages(transport_name: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = list(client.list_schema_revisions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_schema_revisions_async_pager(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_schema_revisions( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, schema.Schema) for i in responses) + + +@pytest.mark.asyncio +async def test_list_schema_revisions_async_pages(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in ( + await client.list_schema_revisions(request={}) + ).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CommitSchemaRequest, + dict, + ], +) +def test_commit_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + response = client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CommitSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_commit_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + client.commit_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CommitSchemaRequest() + + +@pytest.mark.asyncio +async def test_commit_schema_async( + transport: str = "grpc_asyncio", request_type=gp_schema.CommitSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CommitSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.asyncio +async def test_commit_schema_async_from_dict(): + await test_commit_schema_async(request_type=dict) + + +def test_commit_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CommitSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + call.return_value = gp_schema.Schema() + client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_commit_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gp_schema.CommitSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + await client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_commit_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.commit_schema( + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + + +def test_commit_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit_schema( + gp_schema.CommitSchemaRequest(), + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_commit_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = gp_schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(gp_schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.commit_schema( + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].schema + mock_val = gp_schema.Schema(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_commit_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.commit_schema( + gp_schema.CommitSchemaRequest(), + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.RollbackSchemaRequest, + dict, + ], +) +def test_rollback_schema(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + response = client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema.RollbackSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_rollback_schema_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + client.rollback_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.RollbackSchemaRequest() + + +@pytest.mark.asyncio +async def test_rollback_schema_async( + transport: str = "grpc_asyncio", request_type=schema.RollbackSchemaRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema.RollbackSchemaRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.asyncio +async def test_rollback_schema_async_from_dict(): + await test_rollback_schema_async(request_type=dict) + + +def test_rollback_schema_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.RollbackSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + call.return_value = schema.Schema() + client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_rollback_schema_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.RollbackSchemaRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + await client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_rollback_schema_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.rollback_schema( + name="name_value", + revision_id="revision_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].revision_id + mock_val = "revision_id_value" + assert arg == mock_val + + +def test_rollback_schema_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_schema( + schema.RollbackSchemaRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +@pytest.mark.asyncio +async def test_rollback_schema_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.rollback_schema( + name="name_value", + revision_id="revision_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].revision_id + mock_val = "revision_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_rollback_schema_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.rollback_schema( + schema.RollbackSchemaRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRevisionRequest, + dict, + ], +) +def test_delete_schema_revision(request_type, transport: str = "grpc"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + response = client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRevisionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_delete_schema_revision_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + client.delete_schema_revision() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRevisionRequest() + + +@pytest.mark.asyncio +async def test_delete_schema_revision_async( + transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRevisionRequest +): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRevisionRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.asyncio +async def test_delete_schema_revision_async_from_dict(): + await test_delete_schema_revision_async(request_type=dict) + + +def test_delete_schema_revision_field_headers(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRevisionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + call.return_value = schema.Schema() + client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_schema_revision_field_headers_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = schema.DeleteSchemaRevisionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + await client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_schema_revision_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_schema_revision( + name="name_value", + revision_id="revision_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].revision_id + mock_val = "revision_id_value" + assert arg == mock_val + + +def test_delete_schema_revision_flattened_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema_revision( + schema.DeleteSchemaRevisionRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +@pytest.mark.asyncio +async def test_delete_schema_revision_flattened_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = schema.Schema() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(schema.Schema()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_schema_revision( + name="name_value", + revision_id="revision_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].revision_id + mock_val = "revision_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_schema_revision_flattened_error_async(): + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_schema_revision( + schema.DeleteSchemaRevisionRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + @pytest.mark.parametrize( "request_type", [ @@ -2341,6 +3556,10 @@ def test_schema_service_base_transport(): "create_schema", "get_schema", "list_schemas", + "list_schema_revisions", + "commit_schema", + "rollback_schema", + "delete_schema_revision", "delete_schema", "validate_schema", "validate_message", From 143adf5d595ee6808a2f4dbc7a5d2be04072ba82 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 11 Jan 2023 18:39:12 +0000 Subject: [PATCH 0908/1197] chore(deps): update dependency mock to v5.0.1 (#839) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 9cf3ccff1f37..c44e54716b2a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 pytest==7.2.0 -mock==5.0.0 +mock==5.0.1 flaky==3.7.0 google-cloud-bigquery==3.4.1 From f0a5ed6e95771322b3c4c5300fef684a542d8943 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 12 Jan 2023 00:18:21 +0000 Subject: [PATCH 0909/1197] chore(deps): update dependency google-cloud-pubsub to v2.13.12 (#846) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 48db3432afc6..302c47e4fa1d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.13.11 +google-cloud-pubsub==2.13.12 avro==1.11.1 From bec116b0e661782c8e48758913225303aeef2cc6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 14 Jan 2023 18:08:50 +0000 Subject: [PATCH 0910/1197] chore(deps): update dependency pytest to v7.2.1 (#847) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index c44e54716b2a..7b626ba30005 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.2.0 +pytest==7.2.1 mock==5.0.1 flaky==3.7.0 google-cloud-bigquery==3.4.1 From 7a2373e5c7153eb2b6ebe91df9c9991a6eee57c5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 18 Jan 2023 17:00:07 +0000 Subject: [PATCH 0911/1197] chore(deps): update dependency google-cloud-bigquery to v3.4.2 (#848) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 7b626ba30005..784310636792 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.2.1 mock==5.0.1 flaky==3.7.0 -google-cloud-bigquery==3.4.1 +google-cloud-bigquery==3.4.2 From 8425a726cc564d4bb2f631ffcadea6d0f40a475d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 19 Jan 2023 13:43:39 -0500 Subject: [PATCH 0912/1197] chore(main): release 2.14.0 (#845) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 8bfebc3a0e45..c80b1d287d4d 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.13.12" + ".": "2.14.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index ee2dd4ecf6ce..e253e49c1dfe 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.14.0](https://github.com/googleapis/python-pubsub/compare/v2.13.12...v2.14.0) (2023-01-18) + + +### Features + +* Add schema evolution methods and fields ([9479356](https://github.com/googleapis/python-pubsub/commit/9479356029f28c565a06ab759330c6e430a47c51)) +* Add support for python 3.11 ([9479356](https://github.com/googleapis/python-pubsub/commit/9479356029f28c565a06ab759330c6e430a47c51)) + ## [2.13.12](https://github.com/googleapis/python-pubsub/compare/v2.13.11...v2.13.12) (2023-01-06) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 08273790f73c..8be002907dd0 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.12" # {x-release-please-version} +__version__ = "2.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 08273790f73c..8be002907dd0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.13.12" # {x-release-please-version} +__version__ = "2.14.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 7783003749fc..fab818afab32 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.13.12" + "version": "2.14.0" }, "snippets": [ { From 94853bbac11d299fc5da3da147e129eb0f0d7380 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 23 Jan 2023 10:08:23 -0500 Subject: [PATCH 0913/1197] docs: Add documentation for enums (#850) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Add documentation for enums fix: Add context manager return types chore: Update gapic-generator-python to v1.8.1 PiperOrigin-RevId: 503210727 Source-Link: https://github.com/googleapis/googleapis/commit/a391fd1dac18dfdfa00c18c8404f2c3a6ff8e98e Source-Link: https://github.com/googleapis/googleapis-gen/commit/0080f830dec37c3384157082bce279e37079ea58 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDA4MGY4MzBkZWMzN2MzMzg0MTU3MDgyYmNlMjc5ZTM3MDc5ZWE1OCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../pubsub_v1/services/publisher/client.py | 2 +- .../services/schema_service/client.py | 2 +- .../pubsub_v1/services/subscriber/client.py | 2 +- .../google/pubsub_v1/types/pubsub.py | 34 +++++++++++++++++-- .../google/pubsub_v1/types/schema.py | 34 +++++++++++++++++-- .../snippet_metadata_google.pubsub.v1.json | 2 +- 6 files changed, 68 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 2f192fd06be2..df459dafd1dc 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -1437,7 +1437,7 @@ def sample_detach_subscription(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "PublisherClient": return self def __exit__(self, type, value, traceback): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index d8c520312bdd..da1be8d2523d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1537,7 +1537,7 @@ def sample_validate_message(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "SchemaServiceClient": return self def __exit__(self, type, value, traceback): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index fe3bc17d1f09..eebfb873695f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -2322,7 +2322,7 @@ def sample_seek(): # Done; return the response. return response - def __enter__(self): + def __enter__(self) -> "SubscriberClient": return self def __exit__(self, type, value, traceback): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 4a4ea635a77d..878b1f381724 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -735,7 +735,20 @@ class Subscription(proto.Message): """ class State(proto.Enum): - r"""Possible states for a subscription.""" + r"""Possible states for a subscription. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The subscription can actively receive + messages + RESOURCE_ERROR (2): + The subscription cannot receive messages + because of an error with the resource to which + it pushes messages. See the more detailed error + state in the corresponding configuration. + """ STATE_UNSPECIFIED = 0 ACTIVE = 1 RESOURCE_ERROR = 2 @@ -1059,7 +1072,24 @@ class BigQueryConfig(proto.Message): """ class State(proto.Enum): - r"""Possible states for a BigQuery subscription.""" + r"""Possible states for a BigQuery subscription. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The subscription can actively send messages + to BigQuery + PERMISSION_DENIED (2): + Cannot write to the BigQuery table because of + permission denied errors. + NOT_FOUND (3): + Cannot write to the BigQuery table because it + does not exist. + SCHEMA_MISMATCH (4): + Cannot write to the BigQuery table due to a + schema mismatch. + """ STATE_UNSPECIFIED = 0 ACTIVE = 1 PERMISSION_DENIED = 2 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index a5ea2f06d52d..8f778a285ca4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -47,6 +47,16 @@ class SchemaView(proto.Enum): r"""View of Schema object fields to be returned by GetSchema and ListSchemas. + + Values: + SCHEMA_VIEW_UNSPECIFIED (0): + The default / unset value. + The API will default to the BASIC view. + BASIC (1): + Include the name and type of the schema, but + not the definition. + FULL (2): + Include all Schema object fields. """ SCHEMA_VIEW_UNSPECIFIED = 0 BASIC = 1 @@ -54,7 +64,18 @@ class SchemaView(proto.Enum): class Encoding(proto.Enum): - r"""Possible encoding types for messages.""" + r"""Possible encoding types for messages. + + Values: + ENCODING_UNSPECIFIED (0): + Unspecified + JSON (1): + JSON encoding + BINARY (2): + Binary encoding, as defined by the schema + type. For some schema types, binary encoding may + not be available. + """ ENCODING_UNSPECIFIED = 0 JSON = 1 BINARY = 2 @@ -82,7 +103,16 @@ class Schema(proto.Message): """ class Type(proto.Enum): - r"""Possible schema definition types.""" + r"""Possible schema definition types. + + Values: + TYPE_UNSPECIFIED (0): + Default value. This value is unused. + PROTOCOL_BUFFER (1): + A Protocol Buffer schema definition. + AVRO (2): + An Avro schema definition. + """ TYPE_UNSPECIFIED = 0 PROTOCOL_BUFFER = 1 AVRO = 2 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index fab818afab32..144fda2f6d90 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.14.0" + "version": "0.1.0" }, "snippets": [ { From 3657570a704af6125a92d099226500b22770e488 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 24 Jan 2023 15:11:52 +0000 Subject: [PATCH 0914/1197] chore(deps): update dependency google-cloud-pubsub to v2.14.0 (#849) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 302c47e4fa1d..1202f091ddba 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.13.12 +google-cloud-pubsub==2.14.0 avro==1.11.1 From 95a624191a902e383076ab8a3cd638d2c113383f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 25 Jan 2023 11:39:40 -0500 Subject: [PATCH 0915/1197] chore: Update gapic-generator-python to v1.8.2 (#855) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.2 PiperOrigin-RevId: 504289125 Source-Link: https://github.com/googleapis/googleapis/commit/38a48a44a44279e9cf9f2f864b588958a2d87491 Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2dc22663dbe47a972c8d8c2f8a4df013dafdcbc Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJkYzIyNjYzZGJlNDdhOTcyYzhkOGMyZjhhNGRmMDEzZGFmZGNiYyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.coveragerc | 1 + packages/google-cloud-pubsub/google/pubsub_v1/__init__.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.coveragerc b/packages/google-cloud-pubsub/.coveragerc index 02d211c564d6..1be6bc67fc47 100644 --- a/packages/google-cloud-pubsub/.coveragerc +++ b/packages/google-cloud-pubsub/.coveragerc @@ -6,6 +6,7 @@ show_missing = True omit = google/cloud/__init__.py google/pubsub/__init__.py + google/pubsub/gapic_version.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 4c762606e0cb..0150658c0842 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from google.pubsub import gapic_version as package_version +from google.pubsub_v1 import gapic_version as package_version __version__ = package_version.__version__ From 05e44575a8ecdbc77ecb7102c87b5d6b201e7712 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 30 Jan 2023 16:40:38 +0000 Subject: [PATCH 0916/1197] chore: fix prerelease_deps nox session [autoapprove] (#856) Source-Link: https://togithub.com/googleapis/synthtool/commit/26c7505b2f76981ec1707b851e1595c8c06e90fc Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 --- .../google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/noxfile.py | 14 ++++++-------- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 889f77dfa25d..f0f3b24b20cd 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:c43f1d918bcf817d337aa29ff833439494a158a0831508fda4ec75dc4c0d0320 + digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index f0c9d01af16e..574fbd64467e 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -240,9 +240,9 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. - # Exclude version 1.49.0rc1 which has a known issue. - # See https://github.com/grpc/grpc/pull/30642 - session.install("--pre", "grpcio!=1.49.0rc1") + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) @@ -397,9 +397,7 @@ def prerelease_deps(session): unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES session.install(*unit_deps_all) system_deps_all = ( - SYSTEM_TEST_STANDARD_DEPENDENCIES - + SYSTEM_TEST_EXTERNAL_DEPENDENCIES - + SYSTEM_TEST_EXTRAS + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES ) session.install(*system_deps_all) @@ -429,8 +427,8 @@ def prerelease_deps(session): # dependency of grpc "six", "googleapis-common-protos", - # Exclude version 1.49.0rc1 which has a known issue. See https://github.com/grpc/grpc/pull/30642 - "grpcio!=1.49.0rc1", + # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 + "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", "proto-plus", From 03243d466d2df8416efdae68605ebc5c42c3cc80 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 2 Feb 2023 10:44:22 +0000 Subject: [PATCH 0917/1197] chore(deps): update dependency google-cloud-bigquery to v3.5.0 (#857) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 784310636792..ba89d85dcc15 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.2.1 mock==5.0.1 flaky==3.7.0 -google-cloud-bigquery==3.4.2 +google-cloud-bigquery==3.5.0 From 96e57fed28c04215b25be9f8fade4444ca1e6455 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 15:12:12 +0000 Subject: [PATCH 0918/1197] build(deps): bump cryptography from 38.0.3 to 39.0.1 in /synthtool/gcp/templates/python_library/.kokoro (#860) Source-Link: https://togithub.com/googleapis/synthtool/commit/bb171351c3946d3c3c32e60f5f18cee8c464ec51 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf --- .../.github/.OwlBot.lock.yaml | 2 +- .../.kokoro/requirements.txt | 49 +++++++++---------- 2 files changed, 23 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index f0f3b24b20cd..894fb6bc9b47 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f946c75373c2b0040e8e318c5e85d0cf46bc6e61d0a01f3ef94d8de974ac6790 + digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 05dc4672edaa..096e4800a9ac 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -113,33 +113,28 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==38.0.3 \ - --hash=sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d \ - --hash=sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd \ - --hash=sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146 \ - --hash=sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7 \ - --hash=sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436 \ - --hash=sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0 \ - --hash=sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828 \ - --hash=sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b \ - --hash=sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55 \ - --hash=sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36 \ - --hash=sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50 \ - --hash=sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2 \ - --hash=sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a \ - --hash=sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8 \ - --hash=sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0 \ - --hash=sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548 \ - --hash=sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320 \ - --hash=sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748 \ - --hash=sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249 \ - --hash=sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959 \ - --hash=sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f \ - --hash=sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0 \ - --hash=sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd \ - --hash=sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220 \ - --hash=sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c \ - --hash=sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722 +cryptography==39.0.1 \ + --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ + --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ + --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ + --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ + --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ + --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ + --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ + --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ + --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ + --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ + --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ + --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ + --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ + --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ + --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ + --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ + --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ + --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ + --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ + --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ + --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 # via # gcp-releasetool # secretstorage From 5510af25ef83774fcc7c34221cb23b9fc5f5259a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 13:21:53 -0500 Subject: [PATCH 0919/1197] docs: Mark revision_id in CommitSchemaRevisionRequest as deprecated (#861) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Mark revision_id in CommitSchemaRevisionRequest deprecated PiperOrigin-RevId: 508076213 Source-Link: https://github.com/googleapis/googleapis/commit/3b9ae88062e8f0f6603cc8bcba945197cc60d314 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d5e26492de9100eb2cf686ea7bccf2498b6600d4 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZDVlMjY0OTJkZTkxMDBlYjJjZjY4NmVhN2JjY2YyNDk4YjY2MDBkNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/schema_service/async_client.py | 17 +++++------ .../services/schema_service/client.py | 17 +++++------ .../google/pubsub_v1/types/schema.py | 15 +++++----- ...ma_service_delete_schema_revision_async.py | 1 - ...ema_service_delete_schema_revision_sync.py | 1 - .../snippet_metadata_google.pubsub.v1.json | 28 +++++++++---------- 6 files changed, 35 insertions(+), 44 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 251b59e75ea2..68b896e4802c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -931,7 +931,6 @@ async def sample_delete_schema_revision(): # Initialize request argument(s) request = pubsub_v1.DeleteSchemaRevisionRequest( name="name_value", - revision_id="revision_id_value", ) # Make the request @@ -945,20 +944,18 @@ async def sample_delete_schema_revision(): The request object. Request for the `DeleteSchemaRevision` method. name (:class:`str`): - Required. The name of the schema - revision to be deleted, with a revision - ID explicitly included. - Example: - projects/123/schemas/my-schema@c7cfa2a8 + Required. The name of the schema revision to be deleted, + with a revision ID explicitly included. + + Example: ``projects/123/schemas/my-schema@c7cfa2a8`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. revision_id (:class:`str`): - Required. The revision ID to roll - back to. It must be a revision of the - same schema. - Example: c7cfa2a8 + Optional. This field is deprecated and should not be + used for specifying the revision ID. The revision ID + should be specified via the ``name`` parameter. This corresponds to the ``revision_id`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index da1be8d2523d..d217c49f0670 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1163,7 +1163,6 @@ def sample_delete_schema_revision(): # Initialize request argument(s) request = pubsub_v1.DeleteSchemaRevisionRequest( name="name_value", - revision_id="revision_id_value", ) # Make the request @@ -1177,20 +1176,18 @@ def sample_delete_schema_revision(): The request object. Request for the `DeleteSchemaRevision` method. name (str): - Required. The name of the schema - revision to be deleted, with a revision - ID explicitly included. - Example: - projects/123/schemas/my-schema@c7cfa2a8 + Required. The name of the schema revision to be deleted, + with a revision ID explicitly included. + + Example: ``projects/123/schemas/my-schema@c7cfa2a8`` This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. revision_id (str): - Required. The revision ID to roll - back to. It must be a revision of the - same schema. - Example: c7cfa2a8 + Optional. This field is deprecated and should not be + used for specifying the revision ID. The revision ID + should be specified via the ``name`` parameter. This corresponds to the ``revision_id`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 8f778a285ca4..27a6efbbdd19 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -385,15 +385,14 @@ class DeleteSchemaRevisionRequest(proto.Message): Attributes: name (str): - Required. The name of the schema revision to - be deleted, with a revision ID explicitly - included. - Example: projects/123/schemas/my-schema@c7cfa2a8 - revision_id (str): - Required. The revision ID to roll back to. - It must be a revision of the same schema. + Required. The name of the schema revision to be deleted, + with a revision ID explicitly included. - Example: c7cfa2a8 + Example: ``projects/123/schemas/my-schema@c7cfa2a8`` + revision_id (str): + Optional. This field is deprecated and should not be used + for specifying the revision ID. The revision ID should be + specified via the ``name`` parameter. """ name: str = proto.Field( diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py index 3e3d178a01f9..67d0ce7a5b13 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py @@ -41,7 +41,6 @@ async def sample_delete_schema_revision(): # Initialize request argument(s) request = pubsub_v1.DeleteSchemaRevisionRequest( name="name_value", - revision_id="revision_id_value", ) # Make the request diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py index 3aad86c9510b..45d173a19448 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py @@ -41,7 +41,6 @@ def sample_delete_schema_revision(): # Initialize request argument(s) request = pubsub_v1.DeleteSchemaRevisionRequest( name="name_value", - revision_id="revision_id_value", ) # Make the request diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 144fda2f6d90..181a30ef9f40 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -1845,12 +1845,12 @@ "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchemaRevision_async", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1860,18 +1860,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], @@ -1929,12 +1929,12 @@ "regionTag": "pubsub_v1_generated_SchemaService_DeleteSchemaRevision_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -1944,18 +1944,18 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], From 6b11e6dc691687fb69904eb7aebb70a7e92f5c1b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 8 Feb 2023 14:10:08 -0500 Subject: [PATCH 0920/1197] chore(main): release 2.14.1 (#853) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 13 +++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 17 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index c80b1d287d4d..06613a33b430 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.14.0" + ".": "2.14.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index e253e49c1dfe..cb33891da8aa 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,19 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.14.1](https://github.com/googleapis/python-pubsub/compare/v2.14.0...v2.14.1) (2023-02-08) + + +### Bug Fixes + +* Add context manager return types ([4f690b9](https://github.com/googleapis/python-pubsub/commit/4f690b9287beefbca6505cf88637f4a8c5077152)) + + +### Documentation + +* Add documentation for enums ([4f690b9](https://github.com/googleapis/python-pubsub/commit/4f690b9287beefbca6505cf88637f4a8c5077152)) +* Mark revision_id in CommitSchemaRevisionRequest as deprecated ([#861](https://github.com/googleapis/python-pubsub/issues/861)) ([09b846d](https://github.com/googleapis/python-pubsub/commit/09b846ddd066519c0570522b8525ec5705714b0a)) + ## [2.14.0](https://github.com/googleapis/python-pubsub/compare/v2.13.12...v2.14.0) (2023-01-18) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 8be002907dd0..ef7c50064e79 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "2.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 8be002907dd0..ef7c50064e79 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.0" # {x-release-please-version} +__version__ = "2.14.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 181a30ef9f40..e0f3623a999c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.14.1" }, "snippets": [ { From f0368ab323aae622021d6fc8bdbe34d8a7daacd1 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 14 Feb 2023 16:59:21 +0000 Subject: [PATCH 0921/1197] chore(deps): update dependency google-cloud-pubsub to v2.14.1 (#862) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 1202f091ddba..af598f2ff102 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.14.0 +google-cloud-pubsub==2.14.1 avro==1.11.1 From e6a1756615749f049fbcb1f33c04445da11961aa Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Tue, 14 Feb 2023 13:29:46 -0500 Subject: [PATCH 0922/1197] fix: move global import in publisher sample (#866) --- packages/google-cloud-pubsub/samples/snippets/publisher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index e154cf574c98..e2c63556c794 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -22,7 +22,6 @@ """ import argparse -from typing import Callable def list_topics(project_id: str) -> None: @@ -139,6 +138,7 @@ def publish_messages_with_error_handler(project_id: str, topic_id: str) -> None: """Publishes multiple messages to a Pub/Sub topic with an error handler.""" from concurrent import futures from google.cloud import pubsub_v1 + from typing import Callable # TODO(developer) # project_id = "your-project-id" From 5f5742f5b58fd5bbcfa41b63ad7fac46e2eb239f Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Fri, 17 Feb 2023 15:23:44 -0500 Subject: [PATCH 0923/1197] ci: Refactor system tests with transport (#870) * ci: Refactor system tests * add rest param * fix param name * remove rest --- packages/google-cloud-pubsub/tests/system.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 44444325fb37..d7f7c5bea6b8 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -50,14 +50,14 @@ def project(): yield default_project -@pytest.fixture() -def publisher(): - yield pubsub_v1.PublisherClient() +@pytest.fixture(params=["grpc"]) +def publisher(request): + yield pubsub_v1.PublisherClient(transport=request.param) -@pytest.fixture() -def subscriber(): - yield pubsub_v1.SubscriberClient() +@pytest.fixture(params=["grpc"]) +def subscriber(request): + yield pubsub_v1.SubscriberClient(transport=request.param) @pytest.fixture @@ -419,8 +419,8 @@ def test_subscriber_not_leaking_open_sockets( # Also, since the client will get closed, we need another subscriber client # to clean up the subscription. We also need to make sure that auxiliary # subscriber releases the sockets, too. - subscriber = pubsub_v1.SubscriberClient() - subscriber_2 = pubsub_v1.SubscriberClient() + subscriber = pubsub_v1.SubscriberClient(transport="grpc") + subscriber_2 = pubsub_v1.SubscriberClient(transport="grpc") cleanup.append( (subscriber_2.delete_subscription, (), {"subscription": subscription_path}) ) From 268371c1c71362c9f06b46cc49793f497e9de384 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Wed, 22 Feb 2023 17:14:44 -0500 Subject: [PATCH 0924/1197] Fix: Port proto changes (#871) feat: Add temporary_failed_ack_ids to ModifyAckDeadlineConfirmation fix: Add service_yaml_parameters to py_gapic_library BUILD.bazel targets docs: Clarify BigQueryConfig PERMISSION_DENIED state docs: Clarify subscription description docs: Replacing HTML code with Markdown docs: Fix PullResponse description docs: Fix Pull description feat: Add google.api.method.signature to update methods docs: Update Pub/Sub topic retention limit from 7 days to 31 days --- .../services/publisher/async_client.py | 36 ++++ .../pubsub_v1/services/publisher/client.py | 35 ++++ .../services/subscriber/async_client.py | 136 ++++++++++--- .../pubsub_v1/services/subscriber/client.py | 133 +++++++++--- .../services/subscriber/transports/base.py | 1 + .../services/subscriber/transports/grpc.py | 28 +-- .../subscriber/transports/grpc_asyncio.py | 28 +-- .../google/pubsub_v1/types/pubsub.py | 79 +++++--- .../snippet_metadata_google.pubsub.v1.json | 50 ++++- .../unit/gapic/pubsub_v1/test_publisher.py | 97 +++++++++ .../gapic/pubsub_v1/test_schema_service.py | 7 + .../unit/gapic/pubsub_v1/test_subscriber.py | 191 ++++++++++++++++++ 12 files changed, 694 insertions(+), 127 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index b272df768683..e749892f5648 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -46,6 +46,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType @@ -339,6 +340,8 @@ async def update_topic( self, request: Optional[Union[pubsub.UpdateTopicRequest, dict]] = None, *, + topic: Optional[pubsub.Topic] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), @@ -378,6 +381,23 @@ async def sample_update_topic(): Args: request (Optional[Union[google.pubsub_v1.types.UpdateTopicRequest, dict]]): The request object. Request for the UpdateTopic method. + topic (:class:`google.pubsub_v1.types.Topic`): + Required. The updated topic object. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Indicates which fields in the provided topic + to update. Must be specified and non-empty. Note that if + ``update_mask`` contains "message_storage_policy" but + the ``message_storage_policy`` is not set in the + ``topic`` provided above, then the updated value is + determined by the policy configured at the project or + organization level. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -390,8 +410,24 @@ async def sample_update_topic(): A topic resource. """ # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + request = pubsub.UpdateTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if update_mask is not None: + request.update_mask = update_mask + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index df459dafd1dc..a9684144f4b0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -51,6 +51,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.pubsub_v1.services.publisher import pagers from google.pubsub_v1.types import pubsub from google.pubsub_v1.types import TimeoutType @@ -610,6 +611,8 @@ def update_topic( self, request: Optional[Union[pubsub.UpdateTopicRequest, dict]] = None, *, + topic: Optional[pubsub.Topic] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), @@ -649,6 +652,23 @@ def sample_update_topic(): Args: request (Union[google.pubsub_v1.types.UpdateTopicRequest, dict]): The request object. Request for the UpdateTopic method. + topic (google.pubsub_v1.types.Topic): + Required. The updated topic object. + This corresponds to the ``topic`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in the provided topic + to update. Must be specified and non-empty. Note that if + ``update_mask`` contains "message_storage_policy" but + the ``message_storage_policy`` is not set in the + ``topic`` provided above, then the updated value is + determined by the policy configured at the project or + organization level. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (TimeoutType): @@ -661,12 +681,27 @@ def sample_update_topic(): A topic resource. """ # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([topic, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # Minor optimization to avoid making a copy if the user passes # in a pubsub.UpdateTopicRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, pubsub.UpdateTopicRequest): request = pubsub.UpdateTopicRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if topic is not None: + request.topic = topic + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index e832b0eba0ea..dbe4fd0e7094 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -49,6 +49,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.subscriber import pagers from google.pubsub_v1.types import pubsub @@ -278,7 +279,10 @@ async def sample_create_subscription(): Args: request (Optional[Union[google.pubsub_v1.types.Subscription, dict]]): - The request object. A subscription resource. + The request object. A subscription resource. If none of + `push_config` or `bigquery_config` is set, then the + subscriber will pull and ack messages using API methods. + At most one of these fields may be set. name (:class:`str`): Required. The name of the subscription. It must have the format @@ -304,11 +308,9 @@ async def sample_create_subscription(): on the ``request`` instance; if ``request`` is provided, this should not be set. push_config (:class:`google.pubsub_v1.types.PushConfig`): - If push delivery is used with this subscription, this - field is used to configure it. Either ``pushConfig`` or - ``bigQueryConfig`` can be set, but not both. If both are - empty, then the subscriber will pull and ack messages - using API methods. + If push delivery is used with this + subscription, this field is used to + configure it. This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this @@ -318,7 +320,7 @@ async def sample_create_subscription(): Pub/Sub waits for the subscriber to acknowledge receipt before resending the message. In the interval after the message is delivered and before it is acknowledged, it - is considered to be outstanding. During that time + is considered to be *outstanding*. During that time period, the message will not be redelivered (on a best-effort basis). @@ -350,7 +352,11 @@ async def sample_create_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. + A subscription resource. If none of push_config or bigquery_config is + set, then the subscriber will pull and ack messages + using API methods. At most one of these fields may be + set. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -467,7 +473,11 @@ async def sample_get_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. + A subscription resource. If none of push_config or bigquery_config is + set, then the subscriber will pull and ack messages + using API methods. At most one of these fields may be + set. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -528,6 +538,8 @@ async def update_subscription( self, request: Optional[Union[pubsub.UpdateSubscriptionRequest, dict]] = None, *, + subscription: Optional[pubsub.Subscription] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), @@ -570,6 +582,21 @@ async def sample_update_subscription(): request (Optional[Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]]): The request object. Request for the UpdateSubscription method. + subscription (:class:`google.pubsub_v1.types.Subscription`): + Required. The updated subscription + object. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Indicates which fields in + the provided subscription to update. + Must be specified and non-empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -578,11 +605,31 @@ async def sample_update_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. + A subscription resource. If none of push_config or bigquery_config is + set, then the subscriber will pull and ack messages + using API methods. At most one of these fields may be + set. + """ # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + request = pubsub.UpdateSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if update_mask is not None: + request.update_mask = update_mask + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( @@ -1116,9 +1163,7 @@ async def pull( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PullResponse: - r"""Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests - pending for the given subscription. + r"""Pulls messages from the server. .. code-block:: python @@ -1229,6 +1274,7 @@ async def sample_pull(): multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.Aborted, + core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, core_exceptions.Unknown, ), @@ -1495,13 +1541,12 @@ async def get_snapshot( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: - r"""Gets the configuration details of a snapshot. - Snapshots are used in Seek - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + r"""Gets the configuration details of a snapshot. Snapshots are used + in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. .. code-block:: python @@ -1803,9 +1848,10 @@ async def sample_create_snapshot(): name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription. Note that for REST API - requests, you must specify a name. See the resource name - rules. Format is - ``projects/{project}/snapshots/{snap}``. + requests, you must specify a name. See the `resource + name + rules `__. + Format is ``projects/{project}/snapshots/{snap}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -1898,18 +1944,17 @@ async def update_snapshot( self, request: Optional[Union[pubsub.UpdateSnapshotRequest, dict]] = None, *, + snapshot: Optional[pubsub.Snapshot] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, - you can set the acknowledgment state of messages in an - existing subscription to the state captured by a - snapshot. + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. .. code-block:: python @@ -1940,6 +1985,21 @@ async def sample_update_snapshot(): request (Optional[Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]]): The request object. Request for the UpdateSnapshot method. + snapshot (:class:`google.pubsub_v1.types.Snapshot`): + Required. The updated snapshot + object. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Required. Indicates which fields in + the provided snapshot to update. Must be + specified and non-empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1957,8 +2017,24 @@ async def sample_update_snapshot(): """ # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + request = pubsub.UpdateSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + if update_mask is not None: + request.update_mask = update_mask + # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index eebfb873695f..816275ef7a63 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -53,6 +53,7 @@ from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.subscriber import pagers from google.pubsub_v1.types import pubsub @@ -557,7 +558,10 @@ def sample_create_subscription(): Args: request (Union[google.pubsub_v1.types.Subscription, dict]): - The request object. A subscription resource. + The request object. A subscription resource. If none of + `push_config` or `bigquery_config` is set, then the + subscriber will pull and ack messages using API methods. + At most one of these fields may be set. name (str): Required. The name of the subscription. It must have the format @@ -583,11 +587,9 @@ def sample_create_subscription(): on the ``request`` instance; if ``request`` is provided, this should not be set. push_config (google.pubsub_v1.types.PushConfig): - If push delivery is used with this subscription, this - field is used to configure it. Either ``pushConfig`` or - ``bigQueryConfig`` can be set, but not both. If both are - empty, then the subscriber will pull and ack messages - using API methods. + If push delivery is used with this + subscription, this field is used to + configure it. This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this @@ -597,7 +599,7 @@ def sample_create_subscription(): Pub/Sub waits for the subscriber to acknowledge receipt before resending the message. In the interval after the message is delivered and before it is acknowledged, it - is considered to be outstanding. During that time + is considered to be *outstanding*. During that time period, the message will not be redelivered (on a best-effort basis). @@ -629,7 +631,11 @@ def sample_create_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. + A subscription resource. If none of push_config or bigquery_config is + set, then the subscriber will pull and ack messages + using API methods. At most one of these fields may be + set. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -735,7 +741,11 @@ def sample_get_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. + A subscription resource. If none of push_config or bigquery_config is + set, then the subscriber will pull and ack messages + using API methods. At most one of these fields may be + set. + """ # Create or coerce a protobuf request object. # Quick check: If we got a request object, we should *not* have @@ -785,6 +795,8 @@ def update_subscription( self, request: Optional[Union[pubsub.UpdateSubscriptionRequest, dict]] = None, *, + subscription: Optional[pubsub.Subscription] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), @@ -827,6 +839,21 @@ def sample_update_subscription(): request (Union[google.pubsub_v1.types.UpdateSubscriptionRequest, dict]): The request object. Request for the UpdateSubscription method. + subscription (google.pubsub_v1.types.Subscription): + Required. The updated subscription + object. + + This corresponds to the ``subscription`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in + the provided subscription to update. + Must be specified and non-empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -835,15 +862,34 @@ def sample_update_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. + A subscription resource. If none of push_config or bigquery_config is + set, then the subscriber will pull and ack messages + using API methods. At most one of these fields may be + set. + """ # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([subscription, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # Minor optimization to avoid making a copy if the user passes # in a pubsub.UpdateSubscriptionRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, pubsub.UpdateSubscriptionRequest): request = pubsub.UpdateSubscriptionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if subscription is not None: + request.subscription = subscription + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1327,9 +1373,7 @@ def pull( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.PullResponse: - r"""Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests - pending for the given subscription. + r"""Pulls messages from the server. .. code-block:: python @@ -1674,13 +1718,12 @@ def get_snapshot( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: - r"""Gets the configuration details of a snapshot. - Snapshots are used in Seek - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + r"""Gets the configuration details of a snapshot. Snapshots are used + in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. .. code-block:: python @@ -1960,9 +2003,10 @@ def sample_create_snapshot(): name is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription. Note that for REST API - requests, you must specify a name. See the resource name - rules. Format is - ``projects/{project}/snapshots/{snap}``. + requests, you must specify a name. See the `resource + name + rules `__. + Format is ``projects/{project}/snapshots/{snap}``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this @@ -2046,18 +2090,17 @@ def update_snapshot( self, request: Optional[Union[pubsub.UpdateSnapshotRequest, dict]] = None, *, + snapshot: Optional[pubsub.Snapshot] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, - you can set the acknowledgment state of messages in an - existing subscription to the state captured by a - snapshot. + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. .. code-block:: python @@ -2088,6 +2131,21 @@ def sample_update_snapshot(): request (Union[google.pubsub_v1.types.UpdateSnapshotRequest, dict]): The request object. Request for the UpdateSnapshot method. + snapshot (google.pubsub_v1.types.Snapshot): + Required. The updated snapshot + object. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Required. Indicates which fields in + the provided snapshot to update. Must be + specified and non-empty. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2105,12 +2163,27 @@ def sample_update_snapshot(): """ # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([snapshot, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + # Minor optimization to avoid making a copy if the user passes # in a pubsub.UpdateSnapshotRequest. # There's no risk of modifying the input as we've already verified # there are no flattened fields. if not isinstance(request, pubsub.UpdateSnapshotRequest): request = pubsub.UpdateSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if snapshot is not None: + request.snapshot = snapshot + if update_mask is not None: + request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 2cf93a7262ba..ea2991f390bf 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -241,6 +241,7 @@ def _prep_wrapped_messages(self, client_info): multiplier=1.3, predicate=retries.if_exception_type( core_exceptions.Aborted, + core_exceptions.InternalServerError, core_exceptions.ServiceUnavailable, core_exceptions.Unknown, ), diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 5a0cf0d8e5eb..4667bf7079eb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -452,9 +452,7 @@ def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty_pb2.Empty]: def pull(self) -> Callable[[pubsub.PullRequest], pubsub.PullResponse]: r"""Return a callable for the pull method over gRPC. - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests - pending for the given subscription. + Pulls messages from the server. Returns: Callable[[~.PullRequest], @@ -543,13 +541,12 @@ def modify_push_config( def get_snapshot(self) -> Callable[[pubsub.GetSnapshotRequest], pubsub.Snapshot]: r"""Return a callable for the get snapshot method over gRPC. - Gets the configuration details of a snapshot. - Snapshots are used in Seek - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + Gets the configuration details of a snapshot. Snapshots are used + in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. Returns: Callable[[~.GetSnapshotRequest], @@ -650,13 +647,10 @@ def update_snapshot( r"""Return a callable for the update snapshot method over gRPC. Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, - you can set the acknowledgment state of messages in an - existing subscription to the state captured by a - snapshot. + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. Returns: Callable[[~.UpdateSnapshotRequest], diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index edcaf4911142..9a266b4288c9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -459,9 +459,7 @@ def acknowledge( def pull(self) -> Callable[[pubsub.PullRequest], Awaitable[pubsub.PullResponse]]: r"""Return a callable for the pull method over gRPC. - Pulls messages from the server. The server may return - ``UNAVAILABLE`` if there are too many concurrent pull requests - pending for the given subscription. + Pulls messages from the server. Returns: Callable[[~.PullRequest], @@ -554,13 +552,12 @@ def get_snapshot( ) -> Callable[[pubsub.GetSnapshotRequest], Awaitable[pubsub.Snapshot]]: r"""Return a callable for the get snapshot method over gRPC. - Gets the configuration details of a snapshot. - Snapshots are used in Seek - operations, which allow you to manage message - acknowledgments in bulk. That is, you can set the - acknowledgment state of messages in an existing - subscription to the state captured by a snapshot. + Gets the configuration details of a snapshot. Snapshots are used + in + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. Returns: Callable[[~.GetSnapshotRequest], @@ -663,13 +660,10 @@ def update_snapshot( r"""Return a callable for the update snapshot method over gRPC. Updates an existing snapshot. Snapshots are used in - Seek - operations, which allow - you to manage message acknowledgments in bulk. That is, - you can set the acknowledgment state of messages in an - existing subscription to the state captured by a - snapshot. + `Seek `__ + operations, which allow you to manage message acknowledgments in + bulk. That is, you can set the acknowledgment state of messages + in an existing subscription to the state captured by a snapshot. Returns: Callable[[~.UpdateSnapshotRequest], diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 878b1f381724..3e2f225ad908 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -184,7 +184,7 @@ class Topic(proto.Message): timestamp `__ that is up to ``message_retention_duration`` in the past. If this field is not set, message retention is controlled by - settings on individual subscriptions. Cannot be more than 7 + settings on individual subscriptions. Cannot be more than 31 days or less than 10 minutes. """ @@ -581,7 +581,9 @@ class DetachSubscriptionResponse(proto.Message): class Subscription(proto.Message): - r"""A subscription resource. + r"""A subscription resource. If none of ``push_config`` or + ``bigquery_config`` is set, then the subscriber will pull and ack + messages using API methods. At most one of these fields may be set. Attributes: name (str): @@ -601,23 +603,19 @@ class Subscription(proto.Message): field will be ``_deleted-topic_`` if the topic has been deleted. push_config (google.pubsub_v1.types.PushConfig): - If push delivery is used with this subscription, this field - is used to configure it. Either ``pushConfig`` or - ``bigQueryConfig`` can be set, but not both. If both are - empty, then the subscriber will pull and ack messages using - API methods. + If push delivery is used with this + subscription, this field is used to configure + it. bigquery_config (google.pubsub_v1.types.BigQueryConfig): - If delivery to BigQuery is used with this subscription, this - field is used to configure it. Either ``pushConfig`` or - ``bigQueryConfig`` can be set, but not both. If both are - empty, then the subscriber will pull and ack messages using - API methods. + If delivery to BigQuery is used with this + subscription, this field is used to configure + it. ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits for the subscriber to acknowledge receipt before resending the message. In the interval after the message is delivered and before it is acknowledged, it is - considered to be outstanding. During that time period, the + considered to be *outstanding*. During that time period, the message will not be redelivered (on a best-effort basis). For pull subscriptions, this value is used as the initial @@ -652,9 +650,8 @@ class Subscription(proto.Message): Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes. labels (MutableMapping[str, str]): - See - Creating and managing labels. + See `Creating and managing + labels `__. enable_message_ordering (bool): If true, messages published with the same ``ordering_key`` in ``PubsubMessage`` will be delivered to the subscribers in @@ -977,11 +974,7 @@ class PushConfig(proto.Message): - ``v1`` or ``v1beta2``: uses the push format defined in the v1 Pub/Sub API. - For example: - - .. raw:: html - -
attributes { "x-goog-version": "v1" } 
+ For example: ``attributes { "x-goog-version": "v1" }`` oidc_token (google.pubsub_v1.types.PushConfig.OidcToken): If specified, Pub/Sub will generate and attach an OIDC JWT token as an ``Authorization`` header in the HTTP request for @@ -1047,7 +1040,7 @@ class BigQueryConfig(proto.Message): Attributes: table (str): The name of the table to which to write data, - of the form {projectId}:{datasetId}.{tableId} + of the form {projectId}.{datasetId}.{tableId} use_topic_schema (bool): When true, use the topic's schema as the columns to write to in BigQuery, if it exists. @@ -1081,8 +1074,15 @@ class State(proto.Enum): The subscription can actively send messages to BigQuery PERMISSION_DENIED (2): - Cannot write to the BigQuery table because of - permission denied errors. + Cannot write to the BigQuery table because of permission + denied errors. This can happen if + + - Pub/Sub SA has not been granted the `appropriate BigQuery + IAM + permissions `__ + - bigquery.googleapis.com API is not enabled for the + project + (`instructions `__) NOT_FOUND (3): Cannot write to the BigQuery table because it does not exist. @@ -1347,9 +1347,10 @@ class PullResponse(proto.Message): Attributes: received_messages (MutableSequence[google.pubsub_v1.types.ReceivedMessage]): Received Pub/Sub messages. The list will be empty if there - are no more messages available in the backlog. For JSON, the - response can be entirely empty. The Pub/Sub system may - return fewer than the ``maxMessages`` requested even if + are no more messages available in the backlog, or if no + messages could be returned before the request timeout. For + JSON, the response can be entirely empty. The Pub/Sub system + may return fewer than the ``maxMessages`` requested even if there are more messages available in the backlog. """ @@ -1570,6 +1571,9 @@ class AcknowledgeConfirmation(proto.Message): unordered_ack_ids (MutableSequence[str]): List of acknowledgement IDs that were out of order. + temporary_failed_ack_ids (MutableSequence[str]): + List of acknowledgement IDs that failed + processing with temporary issues. """ ack_ids: MutableSequence[str] = proto.RepeatedField( @@ -1584,6 +1588,10 @@ class AcknowledgeConfirmation(proto.Message): proto.STRING, number=3, ) + temporary_failed_ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class ModifyAckDeadlineConfirmation(proto.Message): r"""Acknowledgement IDs sent in one or more previous requests to @@ -1596,6 +1604,9 @@ class ModifyAckDeadlineConfirmation(proto.Message): List of acknowledgement IDs that were malformed or whose acknowledgement deadline has expired. + temporary_failed_ack_ids (MutableSequence[str]): + List of acknowledgement IDs that failed + processing with temporary issues. """ ack_ids: MutableSequence[str] = proto.RepeatedField( @@ -1606,6 +1617,10 @@ class ModifyAckDeadlineConfirmation(proto.Message): proto.STRING, number=2, ) + temporary_failed_ack_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) class SubscriptionProperties(proto.Message): r"""Subscription properties sent as part of the response. @@ -1659,8 +1674,9 @@ class CreateSnapshotRequest(proto.Message): is not provided in the request, the server will assign a random name for this snapshot on the same project as the subscription. Note that for REST API requests, you must - specify a name. See the resource name rules. Format is - ``projects/{project}/snapshots/{snap}``. + specify a name. See the `resource name + rules `__. + Format is ``projects/{project}/snapshots/{snap}``. subscription (str): Required. The subscription whose backlog the snapshot retains. Specifically, the created snapshot is guaranteed to @@ -1673,9 +1689,8 @@ class CreateSnapshotRequest(proto.Message): CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. labels (MutableMapping[str, str]): - See - Creating and managing labels. + See `Creating and managing + labels `__. """ name: str = proto.Field( diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index e0f3623a999c..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.14.1" + "version": "0.1.0" }, "snippets": [ { @@ -1315,6 +1315,14 @@ "name": "request", "type": "google.pubsub_v1.types.UpdateTopicRequest" }, + { + "name": "topic", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -1391,6 +1399,14 @@ "name": "request", "type": "google.pubsub_v1.types.UpdateTopicRequest" }, + { + "name": "topic", + "type": "google.pubsub_v1.types.Topic" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5400,6 +5416,14 @@ "name": "request", "type": "google.pubsub_v1.types.UpdateSnapshotRequest" }, + { + "name": "snapshot", + "type": "google.pubsub_v1.types.Snapshot" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5476,6 +5500,14 @@ "name": "request", "type": "google.pubsub_v1.types.UpdateSnapshotRequest" }, + { + "name": "snapshot", + "type": "google.pubsub_v1.types.Snapshot" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5553,6 +5585,14 @@ "name": "request", "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" }, + { + "name": "subscription", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -5629,6 +5669,14 @@ "name": "request", "type": "google.pubsub_v1.types.UpdateSubscriptionRequest" }, + { + "name": "subscription", + "type": "google.pubsub_v1.types.Subscription" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, { "name": "retry", "type": "google.api_core.retry.Retry" diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 1b86bb7358c6..6badf82d6544 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -24,10 +24,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -1057,6 +1064,96 @@ async def test_update_topic_field_headers_async(): ) in kw["metadata"] +def test_update_topic_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_topic( + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = pubsub.Topic(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_topic_flattened_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_topic( + pubsub.UpdateTopicRequest(), + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_topic_flattened_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Topic() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Topic()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_topic( + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].topic + mock_val = pubsub.Topic(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_topic_flattened_error_async(): + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_topic( + pubsub.UpdateTopicRequest(), + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + @pytest.mark.parametrize( "request_type", [ diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 4c7ffd5ed934..54b8d8ac3d6a 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -24,10 +24,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 79fd1bdbe7e2..3be5857f3175 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -25,10 +25,17 @@ import grpc from grpc.experimental import aio +from collections.abc import Iterable +from google.protobuf import json_format +import json import math import pytest from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers +from requests import Response +from requests import Request, PreparedRequest +from requests.sessions import Session +from google.protobuf import json_format from google.api_core import client_options from google.api_core import exceptions as core_exceptions @@ -1423,6 +1430,100 @@ async def test_update_subscription_field_headers_async(): ) in kw["metadata"] +def test_update_subscription_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_subscription( + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = pubsub.Subscription(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_subscription_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_subscription( + pubsub.UpdateSubscriptionRequest(), + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_subscription_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Subscription() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Subscription()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_subscription( + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].subscription + mock_val = pubsub.Subscription(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_subscription_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_subscription( + pubsub.UpdateSubscriptionRequest(), + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + @pytest.mark.parametrize( "request_type", [ @@ -4189,6 +4290,96 @@ async def test_update_snapshot_field_headers_async(): ) in kw["metadata"] +def test_update_snapshot_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_snapshot( + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = pubsub.Snapshot(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_snapshot_flattened_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_snapshot( + pubsub.UpdateSnapshotRequest(), + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_snapshot_flattened_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = pubsub.Snapshot() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.Snapshot()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_snapshot( + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].snapshot + mock_val = pubsub.Snapshot(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_snapshot_flattened_error_async(): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_snapshot( + pubsub.UpdateSnapshotRequest(), + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + @pytest.mark.parametrize( "request_type", [ From 168913244e042a8733160de6f068abfec44f6c3e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 23 Feb 2023 11:00:52 -0500 Subject: [PATCH 0925/1197] chore(main): release 2.15.0 (#867) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 25 +++++++++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 29 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 06613a33b430..4fa518a27750 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.14.1" + ".": "2.15.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index cb33891da8aa..201301eb4d42 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,31 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.15.0](https://github.com/googleapis/python-pubsub/compare/v2.14.1...v2.15.0) (2023-02-22) + + +### Features + +* Add google.api.method.signature to update methods ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Add temporary_failed_ack_ids to ModifyAckDeadlineConfirmation ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) + + +### Bug Fixes + +* Add service_yaml_parameters to py_gapic_library BUILD.bazel targets ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Move global import in publisher sample ([#866](https://github.com/googleapis/python-pubsub/issues/866)) ([271a46d](https://github.com/googleapis/python-pubsub/commit/271a46d4da0c668674a36c0f58bbe0fe70985b75)) +* Port proto changes ([#871](https://github.com/googleapis/python-pubsub/issues/871)) ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) + + +### Documentation + +* Clarify BigQueryConfig PERMISSION_DENIED state ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Clarify subscription description ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Fix Pull description ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Fix PullResponse description ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Replacing HTML code with Markdown ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) +* Update Pub/Sub topic retention limit from 7 days to 31 days ([3dd43d6](https://github.com/googleapis/python-pubsub/commit/3dd43d6c9facc59c7c4913cac605aa95176cc857)) + ## [2.14.1](https://github.com/googleapis/python-pubsub/compare/v2.14.0...v2.14.1) (2023-02-08) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index ef7c50064e79..2788e5e55993 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.1" # {x-release-please-version} +__version__ = "2.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index ef7c50064e79..2788e5e55993 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.14.1" # {x-release-please-version} +__version__ = "2.15.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..3e40ad50d6ce 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.15.0" }, "snippets": [ { From 88efec2c3eb815ac60c3d196eb744ba1711a665b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 27 Feb 2023 12:11:49 -0500 Subject: [PATCH 0926/1197] chore(python): upgrade gcp-releasetool in .kokoro [autoapprove] (#876) Source-Link: https://github.com/googleapis/synthtool/commit/5f2a6089f73abf06238fe4310f6a14d6f6d1eed3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- packages/google-cloud-pubsub/.kokoro/requirements.in | 2 +- packages/google-cloud-pubsub/.kokoro/requirements.txt | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 894fb6bc9b47..5fc5daa31783 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f62c53736eccb0c4934a3ea9316e0d57696bb49c1a7c86c726e9bb8a2f87dadf + digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.in b/packages/google-cloud-pubsub/.kokoro/requirements.in index cbd7e77f44db..882178ce6001 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.in +++ b/packages/google-cloud-pubsub/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool +gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x importlib-metadata typing-extensions twine diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 096e4800a9ac..fa99c12908f0 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -154,9 +154,9 @@ gcp-docuploader==0.6.4 \ --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf # via -r requirements.in -gcp-releasetool==1.10.0 \ - --hash=sha256:72a38ca91b59c24f7e699e9227c90cbe4dd71b789383cb0164b088abae294c83 \ - --hash=sha256:8c7c99320208383d4bb2b808c6880eb7a81424afe7cdba3c8d84b25f4f0e097d +gcp-releasetool==1.10.5 \ + --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ + --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 # via -r requirements.in google-api-core==2.10.2 \ --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ From 1a6638c0c9f610aee12432fe546c88eb7ec64e6d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 1 Mar 2023 10:41:27 +0000 Subject: [PATCH 0927/1197] chore(deps): update dependency google-cloud-bigquery to v3.6.0 (#873) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index ba89d85dcc15..3c743a349f91 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.2.1 mock==5.0.1 flaky==3.7.0 -google-cloud-bigquery==3.5.0 +google-cloud-bigquery==3.6.0 From 9156ade32a4b68c166b55350c2be92f14a974cac Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 1 Mar 2023 13:49:02 +0000 Subject: [PATCH 0928/1197] chore(deps): update dependency google-cloud-pubsub to v2.15.0 (#878) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index af598f2ff102..a69515e44cff 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.14.1 +google-cloud-pubsub==2.15.0 avro==1.11.1 From ce78bd964474fa4be6a36b30351720c0698b061e Mon Sep 17 00:00:00 2001 From: Kamal Aboul-Hosn Date: Fri, 10 Mar 2023 21:59:59 -0500 Subject: [PATCH 0929/1197] samples: Schema evolution (#881) * samples: schema evolution * Add command-line commands * Fix tag for rollback * Make formatting fixes * Formatting fixes * Fix exceptions --- .../snippets/resources/us-states-plus.avsc | 24 ++ .../snippets/resources/us-states-plus.proto | 9 + .../samples/snippets/schema.py | 366 +++++++++++++++++- .../samples/snippets/schema_test.py | 132 ++++++- 4 files changed, 523 insertions(+), 8 deletions(-) create mode 100644 packages/google-cloud-pubsub/samples/snippets/resources/us-states-plus.avsc create mode 100644 packages/google-cloud-pubsub/samples/snippets/resources/us-states-plus.proto diff --git a/packages/google-cloud-pubsub/samples/snippets/resources/us-states-plus.avsc b/packages/google-cloud-pubsub/samples/snippets/resources/us-states-plus.avsc new file mode 100644 index 000000000000..74225ae7e2e6 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/resources/us-states-plus.avsc @@ -0,0 +1,24 @@ +{ + "type":"record", + "name":"State", + "namespace":"utilities", + "doc":"A list of states in the United States of America.", + "fields":[ + { + "name":"name", + "type":"string", + "doc":"The common name of the state." + }, + { + "name":"post_abbr", + "type":"string", + "doc":"The postal code abbreviation of the state." + }, + { + "name":"population", + "type":"long", + "default":0, + "doc":"The population of the state." + } + ] +} diff --git a/packages/google-cloud-pubsub/samples/snippets/resources/us-states-plus.proto b/packages/google-cloud-pubsub/samples/snippets/resources/us-states-plus.proto new file mode 100644 index 000000000000..9f845d9f4421 --- /dev/null +++ b/packages/google-cloud-pubsub/samples/snippets/resources/us-states-plus.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package utilities; + +message StateProto { + string name = 1; + string post_abbr = 2; + int64 population = 3; +} diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py index e2a171d1dbf3..9c0dd656ffd5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2021 Google LLC. All Rights Reserved. +# Copyright 2023 Google LLC. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -54,6 +54,7 @@ def create_avro_schema(project_id: str, schema_id: str, avsc_file: str) -> None: request={"parent": project_path, "schema": schema, "schema_id": schema_id} ) print(f"Created a schema using an Avro schema file:\n{result}") + return result except AlreadyExists: print(f"{schema_id} already exists.") # [END pubsub_create_avro_schema] @@ -88,11 +89,76 @@ def create_proto_schema(project_id: str, schema_id: str, proto_file: str) -> Non request={"parent": project_path, "schema": schema, "schema_id": schema_id} ) print(f"Created a schema using a protobuf schema file:\n{result}") + return result except AlreadyExists: print(f"{schema_id} already exists.") # [END pubsub_create_proto_schema] +def commit_avro_schema(project_id: str, schema_id: str, avsc_file: str) -> None: + """Commit a schema resource from a JSON-formatted Avro schema file.""" + # [START pubsub_commit_avro_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + from google.pubsub_v1.types import Schema + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + + # Read a JSON-formatted Avro schema file as a string. + with open(avsc_file, "rb") as f: + avsc_source = f.read().decode("utf-8") + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + schema = Schema(name=schema_path, type_=Schema.Type.AVRO, definition=avsc_source) + + try: + result = schema_client.commit_schema( + request={"schema": schema, "name": schema_path} + ) + print(f"Committed a schema revision using an Avro schema file:\n{result}") + return result + except NotFound: + print(f"{schema_id} does not exist.") + # [END pubsub_commit_avro_schema] + + +def commit_proto_schema(project_id: str, schema_id: str, proto_file: str) -> None: + """Commit a schema revision from a protobuf schema file.""" + # [START pubsub_commit_proto_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + from google.pubsub_v1.types import Schema + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # proto_file = "path/to/a/proto/file/(.proto)/formatted/in/protocol/buffers" + + # Read a protobuf schema file as a string. + with open(proto_file, "rb") as f: + proto_source = f.read().decode("utf-8") + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + schema = Schema( + name=schema_path, type_=Schema.Type.PROTOCOL_BUFFER, definition=proto_source + ) + + try: + result = schema_client.commit_schema( + request={"schema": schema, "name": schema_path} + ) + print(f"Committed a schema revision using a protobuf schema file:\n{result}") + return result + except NotFound: + print(f"{schema_id} does not exist.") + # [END pubsub_commit_proto_schema] + + def get_schema(project_id: str, schema_id: str) -> None: """Get a schema resource.""" # [START pubsub_get_schema] @@ -114,6 +180,32 @@ def get_schema(project_id: str, schema_id: str) -> None: # [END pubsub_get_schema] +def get_schema_revision( + project_id: str, schema_id: str, schema_revision_id: str +) -> None: + """Get a schema revision.""" + # [START pubsub_get_schema_revision] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # schema_revision_id = "your-schema-revision-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path( + project_id, schema_id + "@" + schema_revision_id + ) + + try: + result = schema_client.get_schema(request={"name": schema_path}) + print(f"Got a schema revision:\n{result}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_get_schema_revision] + + def list_schemas(project_id: str) -> None: """List schema resources.""" # [START pubsub_list_schemas] @@ -132,6 +224,51 @@ def list_schemas(project_id: str) -> None: # [END pubsub_list_schemas] +def list_schema_revisions(project_id: str, schema_id: str) -> None: + """List schema revisions for a schema resource.""" + # [START pubsub_list_schema_revisions] + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + for schema in schema_client.list_schema_revisions(request={"name": schema_path}): + print(schema) + + print("Listed schema revisions.") + # [END pubsub_list_schema_revisions] + + +def rollback_schema_revision( + project_id: str, schema_id: str, schema_revision_id: str +) -> None: + """Roll back a schema revision.""" + # [START pubsub_rollback_schema] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # schema_revision_id = "your-schema-revision-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + try: + result = schema_client.rollback_schema( + request={"name": schema_path, "revision_id": schema_revision_id} + ) + print(f"Rolled back a schema revision:\n{result}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_rollback_schema] + + def delete_schema(project_id: str, schema_id: str) -> None: """Delete a schema resource.""" # [START pubsub_delete_schema] @@ -153,6 +290,28 @@ def delete_schema(project_id: str, schema_id: str) -> None: # [END pubsub_delete_schema] +def delete_schema_revision(project_id: str, schema_id: str, revision_id: str) -> None: + """Delete a schema revision.""" + # [START pubsub_delete_schema_revision] + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # schema_id = "your-schema-id" + # revision_id = "your-revision-id" + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id + "@" + revision_id) + + try: + schema_client.delete_schema_revision(request={"name": schema_path}) + print(f"Deleted a schema revision:\n{schema_path}") + except NotFound: + print(f"{schema_id} not found.") + # [END pubsub_delete_schema_revision] + + def create_topic_with_schema( project_id: str, topic_id: str, schema_id: str, message_encoding: str ) -> None: @@ -194,10 +353,106 @@ def create_topic_with_schema( except AlreadyExists: print(f"{topic_id} already exists.") except InvalidArgument: - print("Please choose either BINARY or JSON as a valid message encoding type.") + print("Schema settings are not valid.") # [END pubsub_create_topic_with_schema] +def update_topic_schema( + project_id: str, topic_id: str, first_revision_id: str, last_revision_id: str +) -> None: + """Update a topic resource's first schema revision.""" + # [START pubsub_update_topic_schema] + from google.api_core.exceptions import InvalidArgument, NotFound + from google.cloud.pubsub import PublisherClient + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # first_revision_id = "your-revision-id" + # last_revision_id = "your-revision-id" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + try: + response = publisher_client.update_topic( + request={ + "topic": { + "name": topic_path, + "schema_settings": { + "first_revision_id": first_revision_id, + "last_revision_id": last_revision_id, + }, + }, + "update_mask": "schemaSettings.firstRevisionId,schemaSettings.lastRevisionId", + } + ) + print(f"Updated a topic schema:\n{response}") + + except NotFound: + print(f"{topic_id} not found.") + except InvalidArgument: + print("Schema settings are not valid.") + # [END pubsub_update_topic_schema] + + +def create_topic_with_schema_revisions( + project_id: str, + topic_id: str, + schema_id: str, + first_revision_id: str, + last_revision_id: str, + message_encoding: str, +) -> None: + """Create a topic resource with a schema.""" + # [START pubsub_create_topic_with_schema_revisions] + from google.api_core.exceptions import AlreadyExists, InvalidArgument + from google.cloud.pubsub import PublisherClient, SchemaServiceClient + from google.pubsub_v1.types import Encoding + + # TODO(developer): Replace these variables before running the sample. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # schema_id = "your-schema-id" + # first_revision_id = "your-revision-id" + # last_revision_id = "your-revision-id" + # Choose either BINARY or JSON as valid message encoding in this topic. + # message_encoding = "BINARY" + + publisher_client = PublisherClient() + topic_path = publisher_client.topic_path(project_id, topic_id) + + schema_client = SchemaServiceClient() + schema_path = schema_client.schema_path(project_id, schema_id) + + if message_encoding == "BINARY": + encoding = Encoding.BINARY + elif message_encoding == "JSON": + encoding = Encoding.JSON + else: + encoding = Encoding.ENCODING_UNSPECIFIED + + try: + response = publisher_client.create_topic( + request={ + "name": topic_path, + "schema_settings": { + "schema": schema_path, + "encoding": encoding, + "first_revision_id": first_revision_id, + "last_revision_id": last_revision_id, + }, + } + ) + print(f"Created a topic:\n{response}") + + except AlreadyExists: + print(f"{topic_id} already exists.") + except InvalidArgument: + print("Please choose either BINARY or JSON as a valid message encoding type.") + # [END pubsub_create_topic_with_schema_revisions] + + def publish_avro_records(project_id: str, topic_id: str, avsc_file: str) -> None: """Pulbish a BINARY or JSON encoded message to a topic configured with an Avro schema.""" # [START pubsub_publish_avro_records] @@ -359,6 +614,90 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: # [END pubsub_subscribe_avro_records] +def subscribe_with_avro_schema_with_revisions( + project_id: str, + subscription_id: str, + avsc_file: str, + timeout: Optional[float] = None, +) -> None: + """Receive and decode messages sent to a topic with an Avro schema.""" + # [START pubsub_subscribe_avro_records_with_revisions] + import avro + from avro.io import BinaryDecoder, DatumReader + from concurrent.futures import TimeoutError + import io + import json + from google.api_core.exceptions import NotFound + from google.cloud.pubsub import SchemaServiceClient, SubscriberClient + + schema_client = SchemaServiceClient() + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # avsc_file = "path/to/an/avro/schema/file/(.avsc)/formatted/in/json" + # Number of seconds the subscriber listens for messages + # timeout = 5.0 + + subscriber = SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + writer_avro_schema = avro.schema.parse(open(avsc_file, "rb").read()) + # Dict to keep readers for different schema revisions. + revisions_to_readers = {} + + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + # Get the message serialization type. + schema_name = message.attributes.get("googclient_schemaname") + schema_revision_id = message.attributes.get("googclient_schemarevisionid") + encoding = message.attributes.get("googclient_schemaencoding") + + if schema_revision_id not in revisions_to_readers: + schema_path = schema_name + "@" + schema_revision_id + try: + received_avro_schema = schema_client.get_schema( + request={"name": schema_path} + ) + except NotFound: + print(f"{schema_path} not found.") + message.nack() + return + reader_avro_schema = avro.schema.parse(received_avro_schema.definition) + revisions_to_readers[schema_revision_id] = DatumReader( + writer_avro_schema, reader_avro_schema + ) + reader = revisions_to_readers[schema_revision_id] + + # Deserialize the message data accordingly. + if encoding == "BINARY": + bout = io.BytesIO(message.data) + decoder = BinaryDecoder(bout) + message_data = reader.read(decoder) + print(f"Received a binary-encoded message:\n{message_data}") + elif encoding == "JSON": + message_data = json.loads(message.data) + print(f"Received a JSON-encoded message:\n{message_data}") + else: + print(f"Received a message with no encoding:\n{message}") + message.nack() + + message.ack() + + streaming_pull_future = subscriber.subscribe(subscription_path, callback=callback) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception occurs first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + # [END pubsub_subscribe_avro_records_with_revisions] + + def subscribe_with_proto_schema( project_id: str, subscription_id: str, timeout: float ) -> None: @@ -484,16 +823,35 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_avro_schema(args.project_id, args.schema_id, args.avsc_file) if args.command == "create-proto": create_proto_schema(args.project_id, args.schema_id, args.proto_file) + if args.command == "commit-avro": + commit_avro_schema(args.project_id, args.schema_id, args.avsc_file) + if args.command == "commit-proto": + commit_proto_schema(args.project_id, args.schema_id, args.proto_file) if args.command == "get": get_schema(args.project_id, args.schema_id) + if args.command == "get-revision": + get_schema_revision(args.project_id, args.schema_id, args.revision_id) if args.command == "list": list_schemas(args.project_id) + if args.command == "list-revisions": + list_schema_revisions(args.project_id, args.schema_id) if args.command == "delete": delete_schema(args.project_id, args.schema_id) + if args.command == "delete-revision": + delete_schema_revision(args.project_id, args.schema_id, args.revision_id) if args.command == "create-topic": create_topic_with_schema( args.project_id, args.topic_id, args.schema_id, args.message_encoding ) + if args.command == "create-topic-with-revisions": + create_topic_with_schema_revisions( + args.project_id, + args.topic_id, + args.schema_id, + args.first_revision_id, + args.last_revision_id, + args.message_encoding, + ) if args.command == "publish-avro": publish_avro_records(args.project_id, args.topic_id, args.avsc_file) if args.command == "publish-proto": @@ -502,5 +860,9 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: subscribe_with_avro_schema( args.project_id, args.subscription_id, args.avsc_file, args.timeout ) + if args.command == "receive-avro-with-revisions": + subscribe_with_avro_schema_with_revisions( + args.project_id, args.subscription_id, args.avsc_file, args.timeout + ) if args.command == "receive-proto": subscribe_with_proto_schema(args.project_id, args.subscription_id, args.timeout) diff --git a/packages/google-cloud-pubsub/samples/snippets/schema_test.py b/packages/google-cloud-pubsub/samples/snippets/schema_test.py index 7780bebc1c16..f62449e88db8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema_test.py @@ -15,17 +15,14 @@ # limitations under the License. import os -from typing import Any, Callable, cast, Generator, TypeVar +from typing import Any, Callable, Generator, TypeVar, cast import uuid from _pytest.capture import CaptureFixture from flaky import flaky -from google.api_core.exceptions import InternalServerError -from google.api_core.exceptions import NotFound +from google.api_core.exceptions import InternalServerError, NotFound from google.cloud import pubsub_v1 -from google.cloud.pubsub import PublisherClient -from google.cloud.pubsub import SchemaServiceClient -from google.cloud.pubsub import SubscriberClient +from google.cloud.pubsub import PublisherClient, SchemaServiceClient, SubscriberClient from google.pubsub_v1.types import Encoding import pytest @@ -39,12 +36,15 @@ raise KeyError("Need to set GOOGLE_CLOUD_PROJECT as an environment variable.") AVRO_TOPIC_ID = f"schema-test-avro-topic-{UUID}" PROTO_TOPIC_ID = f"schema-test-proto-topic-{UUID}" +PROTO_WITH_REVISIONS_TOPIC_ID = f"schema-test-proto-with-revisions-topic-{UUID}" AVRO_SUBSCRIPTION_ID = f"schema-test-avro-subscription-{UUID}" PROTO_SUBSCRIPTION_ID = f"schema-test-proto-subscription-{UUID}" AVRO_SCHEMA_ID = f"schema-test-avro-schema-{UUID}" PROTO_SCHEMA_ID = f"schema-test-proto-schema-{UUID}" AVSC_FILE = "resources/us-states.avsc" +AVSC_REVISION_FILE = "resources/us-states.avsc" PROTO_FILE = "resources/us-states.proto" +PROTO_REVISION_FILE = "resources/us-states.proto" # These tests run in parallel if pytest-parallel is installed. # Avoid modifying resources that are shared across tests, @@ -229,6 +229,30 @@ def test_create_proto_schema( assert f"{proto_schema}" in out +def test_commit_avro_schema( + schema_client: pubsub_v1.SchemaServiceClient, + avro_schema: str, + capsys: CaptureFixture[str], +) -> None: + schema.commit_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE) + + out, _ = capsys.readouterr() + assert "Committed a schema revision using an Avro schema file:" in out + # assert f"{avro_schema}" in out + + +def test_commit_proto_schema( + schema_client: pubsub_v1.SchemaServiceClient, + proto_schema: str, + capsys: CaptureFixture[str], +) -> None: + schema.commit_proto_schema(PROJECT_ID, PROTO_SCHEMA_ID, PROTO_REVISION_FILE) + + out, _ = capsys.readouterr() + assert "Committed a schema revision using a protobuf schema file:" in out + # assert f"{proto_schema}" in out + + def test_get_schema(avro_schema: str, capsys: CaptureFixture[str]) -> None: schema.get_schema(PROJECT_ID, AVRO_SCHEMA_ID) out, _ = capsys.readouterr() @@ -236,12 +260,54 @@ def test_get_schema(avro_schema: str, capsys: CaptureFixture[str]) -> None: assert f"{avro_schema}" in out +def test_get_schema_revsion(avro_schema: str, capsys: CaptureFixture[str]) -> None: + committed_schema = schema.commit_avro_schema( + PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE + ) + schema.get_schema_revision(PROJECT_ID, AVRO_SCHEMA_ID, committed_schema.revision_id) + out, _ = capsys.readouterr() + assert "Got a schema revision" in out + assert f"{avro_schema}" in out + + +def test_rollback_schema_revsion(avro_schema: str, capsys: CaptureFixture[str]) -> None: + committed_schema = schema.commit_avro_schema( + PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE + ) + schema.commit_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE) + schema.rollback_schema_revision( + PROJECT_ID, AVRO_SCHEMA_ID, committed_schema.revision_id + ) + out, _ = capsys.readouterr() + assert "Rolled back a schema revision" in out + # assert f"{avro_schema}" in out + + +def test_delete_schema_revsion(avro_schema: str, capsys: CaptureFixture[str]) -> None: + committed_schema = schema.commit_avro_schema( + PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE + ) + schema.commit_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE) + schema.delete_schema_revision( + PROJECT_ID, AVRO_SCHEMA_ID, committed_schema.revision_id + ) + out, _ = capsys.readouterr() + assert "Deleted a schema revision" in out + # assert f"{avro_schema}" in out + + def test_list_schemas(capsys: CaptureFixture[str]) -> None: schema.list_schemas(PROJECT_ID) out, _ = capsys.readouterr() assert "Listed schemas." in out +def test_list_schema_revisions(capsys: CaptureFixture[str]) -> None: + schema.list_schema_revisions(PROJECT_ID, AVRO_SCHEMA_ID) + out, _ = capsys.readouterr() + assert "Listed schema revisions." in out + + def test_create_topic_with_schema( avro_schema: str, capsys: CaptureFixture[str] ) -> None: @@ -253,6 +319,45 @@ def test_create_topic_with_schema( assert "BINARY" in out or "2" in out +def test_create_topic_with_schema_revisions( + proto_schema: str, capsys: CaptureFixture[str] +) -> None: + committed_schema = schema.commit_proto_schema( + PROJECT_ID, PROTO_SCHEMA_ID, PROTO_REVISION_FILE + ) + schema.create_topic_with_schema_revisions( + PROJECT_ID, + PROTO_WITH_REVISIONS_TOPIC_ID, + PROTO_SCHEMA_ID, + committed_schema.revision_id, + committed_schema.revision_id, + "BINARY", + ) + out, _ = capsys.readouterr() + assert "Created a topic" in out + assert f"{PROTO_WITH_REVISIONS_TOPIC_ID}" in out + assert f"{proto_schema}" in out + assert "BINARY" in out or "2" in out + + +def test_update_topic_schema( + proto_schema: str, proto_topic: str, capsys: CaptureFixture[str] +) -> None: + committed_schema = schema.commit_proto_schema( + PROJECT_ID, PROTO_SCHEMA_ID, PROTO_REVISION_FILE + ) + schema.update_topic_schema( + PROJECT_ID, + PROTO_WITH_REVISIONS_TOPIC_ID, + committed_schema.revision_id, + committed_schema.revision_id, + ) + out, _ = capsys.readouterr() + assert "Updated a topic schema" in out + assert f"{PROTO_WITH_REVISIONS_TOPIC_ID}" in out + assert f"{proto_schema}" in out + + def test_publish_avro_records( avro_schema: str, avro_topic: str, capsys: CaptureFixture[str] ) -> None: @@ -275,6 +380,21 @@ def test_subscribe_with_avro_schema( assert "Received a binary-encoded message:" in out +def test_subscribe_with_avro_schema_revisions( + avro_schema: str, + avro_topic: str, + avro_subscription: str, + capsys: CaptureFixture[str], +) -> None: + schema.publish_avro_records(PROJECT_ID, AVRO_TOPIC_ID, AVSC_FILE) + + schema.subscribe_with_avro_schema_with_revisions( + PROJECT_ID, AVRO_SUBSCRIPTION_ID, AVSC_FILE, 9 + ) + out, _ = capsys.readouterr() + assert "Received a binary-encoded message:" in out + + def test_publish_proto_records(proto_topic: str, capsys: CaptureFixture[str]) -> None: schema.publish_proto_messages(PROJECT_ID, PROTO_TOPIC_ID) out, _ = capsys.readouterr() From 16f1d3972156fb706759967091f89398dab741e5 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 11 Mar 2023 03:33:14 +0000 Subject: [PATCH 0930/1197] chore(deps): update dependency pytest to v7.2.2 (#882) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 3c743a349f91..97457d9413dd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.2.1 +pytest==7.2.2 mock==5.0.1 flaky==3.7.0 google-cloud-bigquery==3.6.0 From 975eeb99a397234214b45da3c77768346e9104fe Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 13 Mar 2023 21:25:36 +0000 Subject: [PATCH 0931/1197] chore(deps): update dependency google-cloud-bigquery to v3.7.0 (#883) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 97457d9413dd..9dca1d1d5d91 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.2.2 mock==5.0.1 flaky==3.7.0 -google-cloud-bigquery==3.6.0 +google-cloud-bigquery==3.7.0 From 4383808b34f03a1e13793cb680093d319683a231 Mon Sep 17 00:00:00 2001 From: Kamal Aboul-Hosn Date: Tue, 14 Mar 2023 11:02:53 -0400 Subject: [PATCH 0932/1197] fix: set x-goog-request-params for streaming pull request (#884) * samples: schema evolution * Add command-line commands * Fix tag for rollback * Make formatting fixes * Formatting fixes * Fix exceptions * fix: Set x-goog-request-params for streaming pull request --- .../subscriber/_protocol/streaming_pull_manager.py | 4 ++++ .../pubsub_v1/subscriber/test_streaming_pull_manager.py | 6 ++++++ 2 files changed, 10 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 13974ebe4bad..2f5a31e496e7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -279,6 +279,9 @@ def __init__( self._await_callbacks_on_shutdown = await_callbacks_on_shutdown self._ack_histogram = histogram.Histogram() self._last_histogram_size = 0 + self._stream_metadata = [ + ["x-goog-request-params", "subscription=" + subscription] + ] # If max_duration_per_lease_extension is the default # we set the stream_ack_deadline to the default of 60 @@ -845,6 +848,7 @@ def open( initial_request=get_initial_request, should_recover=self._should_recover, should_terminate=self._should_terminate, + metadata=self._stream_metadata, throttle_reopen=True, ) self._rpc.add_done_callback(self._on_rpc_done) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index e01299ef9227..199aea25611e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -91,6 +91,7 @@ def test__wrap_callback_errors_error(): def test_constructor_and_default_state(): + mock.sentinel.subscription = str() manager = streaming_pull_manager.StreamingPullManager( mock.sentinel.client, mock.sentinel.subscription ) @@ -113,6 +114,7 @@ def test_constructor_and_default_state(): def test_constructor_with_default_options(): + mock.sentinel.subscription = str() flow_control_ = types.FlowControl() manager = streaming_pull_manager.StreamingPullManager( mock.sentinel.client, @@ -128,6 +130,7 @@ def test_constructor_with_default_options(): def test_constructor_with_min_and_max_duration_per_lease_extension_(): + mock.sentinel.subscription = str() flow_control_ = types.FlowControl( min_duration_per_lease_extension=15, max_duration_per_lease_extension=20 ) @@ -142,6 +145,7 @@ def test_constructor_with_min_and_max_duration_per_lease_extension_(): def test_constructor_with_min_duration_per_lease_extension_too_low(): + mock.sentinel.subscription = str() flow_control_ = types.FlowControl( min_duration_per_lease_extension=9, max_duration_per_lease_extension=9 ) @@ -156,6 +160,7 @@ def test_constructor_with_min_duration_per_lease_extension_too_low(): def test_constructor_with_max_duration_per_lease_extension_too_high(): + mock.sentinel.subscription = str() flow_control_ = types.FlowControl( max_duration_per_lease_extension=601, min_duration_per_lease_extension=601 ) @@ -1181,6 +1186,7 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi initial_request=mock.ANY, should_recover=manager._should_recover, should_terminate=manager._should_terminate, + metadata=manager._stream_metadata, throttle_reopen=True, ) initial_request_arg = resumable_bidi_rpc.call_args.kwargs["initial_request"] From dabbd1166a0069838a811689aa8b1b85627b5904 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 14 Mar 2023 20:33:04 -0400 Subject: [PATCH 0933/1197] chore(main): release 2.15.1 (#885) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 4fa518a27750..6abca04f48e0 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.15.0" + ".": "2.15.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 201301eb4d42..0108ba02ee59 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.15.1](https://github.com/googleapis/python-pubsub/compare/v2.15.0...v2.15.1) (2023-03-14) + + +### Bug Fixes + +* Set x-goog-request-params for streaming pull request ([#884](https://github.com/googleapis/python-pubsub/issues/884)) ([0d247e6](https://github.com/googleapis/python-pubsub/commit/0d247e6b189409b4d57c95dbbbf3df3e0fac0fa2)) + ## [2.15.0](https://github.com/googleapis/python-pubsub/compare/v2.14.1...v2.15.0) (2023-02-22) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 2788e5e55993..505a42f155da 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 2788e5e55993..505a42f155da 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.0" # {x-release-please-version} +__version__ = "2.15.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 3e40ad50d6ce..40e8727530a8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.15.0" + "version": "2.15.1" }, "snippets": [ { From 657c3813450bb5e2d46c3455c4f5bae53b9c3d7b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 16 Mar 2023 08:25:50 -0400 Subject: [PATCH 0934/1197] chore(deps): Update nox in .kokoro/requirements.in [autoapprove] (#887) Source-Link: https://github.com/googleapis/synthtool/commit/92006bb3cdc84677aa93c7f5235424ec2b157146 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 Co-authored-by: Owl Bot --- .../google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- .../google-cloud-pubsub/.kokoro/requirements.in | 2 +- .../google-cloud-pubsub/.kokoro/requirements.txt | 14 +++++--------- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 5fc5daa31783..b8edda51cf46 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,4 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8555f0e37e6261408f792bfd6635102d2da5ad73f8f09bcb24f25e6afb5fac97 + digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.in b/packages/google-cloud-pubsub/.kokoro/requirements.in index 882178ce6001..ec867d9fd65a 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.in +++ b/packages/google-cloud-pubsub/.kokoro/requirements.in @@ -5,6 +5,6 @@ typing-extensions twine wheel setuptools -nox +nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index fa99c12908f0..66a2172a76a8 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in # @@ -335,9 +335,9 @@ more-itertools==9.0.0 \ --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab # via jaraco-classes -nox==2022.8.7 \ - --hash=sha256:1b894940551dc5c389f9271d197ca5d655d40bdc6ccf93ed6880e4042760a34b \ - --hash=sha256:96cca88779e08282a699d672258ec01eb7c792d35bbbf538c723172bce23212c +nox==2022.11.21 \ + --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ + --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 # via -r requirements.in packaging==21.3 \ --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ @@ -380,10 +380,6 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core -py==1.11.0 \ - --hash=sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719 \ - --hash=sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378 - # via nox pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 870bf6ada121d44780bd33ad7a62fa1b02c4b073 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Mon, 20 Mar 2023 13:21:34 -0400 Subject: [PATCH 0935/1197] Samples: Fix avro.schema.Parse in snippets (#888) * fix avro.schema.parse in snippets * fix all * change avro.schema to schema * change revert Parse --- .../google-cloud-pubsub/samples/snippets/schema.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py index 9c0dd656ffd5..57186b3252ce 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -457,7 +457,7 @@ def publish_avro_records(project_id: str, topic_id: str, avsc_file: str) -> None """Pulbish a BINARY or JSON encoded message to a topic configured with an Avro schema.""" # [START pubsub_publish_avro_records] from avro.io import BinaryEncoder, DatumWriter - import avro + import avro.schema as schema import io import json from google.api_core.exceptions import NotFound @@ -473,7 +473,7 @@ def publish_avro_records(project_id: str, topic_id: str, avsc_file: str) -> None topic_path = publisher_client.topic_path(project_id, topic_id) # Prepare to write Avro records to the binary output stream. - avro_schema = avro.schema.parse(open(avsc_file, "rb").read()) + avro_schema = schema.parse(open(avsc_file, "rb").read()) writer = DatumWriter(avro_schema) bout = io.BytesIO() @@ -562,7 +562,7 @@ def subscribe_with_avro_schema( ) -> None: """Receive and decode messages sent to a topic with an Avro schema.""" # [START pubsub_subscribe_avro_records] - import avro + import avro.schema as schema from avro.io import BinaryDecoder, DatumReader from concurrent.futures import TimeoutError import io @@ -579,7 +579,7 @@ def subscribe_with_avro_schema( subscriber = SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - avro_schema = avro.schema.parse(open(avsc_file, "rb").read()) + avro_schema = schema.parse(open(avsc_file, "rb").read()) def callback(message: pubsub_v1.subscriber.message.Message) -> None: # Get the message serialization type. @@ -622,7 +622,7 @@ def subscribe_with_avro_schema_with_revisions( ) -> None: """Receive and decode messages sent to a topic with an Avro schema.""" # [START pubsub_subscribe_avro_records_with_revisions] - import avro + import avro.schema as schema from avro.io import BinaryDecoder, DatumReader from concurrent.futures import TimeoutError import io @@ -642,7 +642,7 @@ def subscribe_with_avro_schema_with_revisions( subscriber = SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - writer_avro_schema = avro.schema.parse(open(avsc_file, "rb").read()) + writer_avro_schema = schema.parse(open(avsc_file, "rb").read()) # Dict to keep readers for different schema revisions. revisions_to_readers = {} @@ -662,7 +662,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"{schema_path} not found.") message.nack() return - reader_avro_schema = avro.schema.parse(received_avro_schema.definition) + reader_avro_schema = schema.parse(received_avro_schema.definition) revisions_to_readers[schema_revision_id] = DatumReader( writer_avro_schema, reader_avro_schema ) From 172ec655f4265fb66e78705890942bcc9b00d535 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Mon, 20 Mar 2023 15:01:54 -0500 Subject: [PATCH 0936/1197] docs: update missing docstrings (#890) --- .../cloud/pubsub_v1/subscriber/message.py | 44 ++++++++++--------- 1 file changed, 24 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index c0a2e70ea965..f744966a257c 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -75,15 +75,15 @@ class Message(object): :class:`~.pubsub_v1.subscriber._consumer.Consumer`.) Attributes: - message_id: + message_id (str): The message ID. In general, you should not need to use this directly. - data: + data (bytes): The data in the message. Note that this will be a :class:`bytes`, not a text string. - attributes: + attributes (MutableMapping[str, str]): The attributes sent along with the message. See :attr:`attributes` for more information on this type. - publish_time: + publish_time (google.protobuf.timestamp_pb2.Timestamp): The time that this message was originally published. """ @@ -103,21 +103,21 @@ def __init__( responsibility of :class:`BasePolicy` subclasses to do so. Args: - message: + message (types.PubsubMessage._meta._pb): The message received from Pub/Sub. For performance reasons it should be the raw protobuf message normally wrapped by :class:`~pubsub_v1.types.PubsubMessage`. A raw message can be obtained from a :class:`~pubsub_v1.types.PubsubMessage` instance through the latter's ``._pb`` attribute. - ack_id: + ack_id (str): The ack_id received from Pub/Sub. - delivery_attempt: + delivery_attempt (int): The delivery attempt counter received from Pub/Sub if a DeadLetterPolicy is set on the subscription, and zero otherwise. - request_queue: + request_queue (queue.Queue): A queue provided by the policy that can accept requests; the policy is responsible for handling those requests. - exactly_once_delivery_enabled_func: + exactly_once_delivery_enabled_func (Callable[[], bool]): A Callable that returns whether exactly-once delivery is currently-enabled. Defaults to a lambda that always returns False. """ self._message = message @@ -172,8 +172,8 @@ def attributes(self) -> "containers.ScalarMap": to just cast the map to a ``dict`` or to one use ``map.get``. Returns: - The message's attributes. This is a ``dict``-like object provided by - ``google.protobuf``. + containers.ScalarMap: The message's attributes. This is a + ``dict``-like object provided by ``google.protobuf``. """ return self._attributes @@ -182,8 +182,8 @@ def data(self) -> bytes: """Return the data for the underlying Pub/Sub Message. Returns: - The message data. This is always a bytestring; if you want a text string, - call :meth:`bytes.decode`. + bytes: The message data. This is always a bytestring; if you want + a text string, call :meth:`bytes.decode`. """ return self._data @@ -192,7 +192,8 @@ def publish_time(self) -> "datetime.datetime": """Return the time that the message was originally published. Returns: - The date and time that the message was published. + datetime.datetime: The date and time that the message was + published. """ return self._publish_time @@ -227,7 +228,7 @@ def delivery_attempt(self) -> Optional[int]: is calculated at best effort and is approximate. Returns: - The delivery attempt counter or ``None``. + Optional[int]: The delivery attempt counter or ``None``. """ return self._delivery_attempt @@ -290,7 +291,8 @@ def ack_with_response(self) -> "futures.Future": see https://cloud.google.com/pubsub/docs/exactly-once-delivery." Returns: - A :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` + futures.Future: A + :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` instance that conforms to Python Standard library's :class:`~concurrent.futures.Future` interface (but not an instance of that class). Call `result()` to get the result @@ -349,7 +351,7 @@ def modify_ack_deadline(self, seconds: int) -> None: :class:`~.pubsub_v1.subcriber._consumer.Consumer`. Args: - seconds: + seconds (int): The number of seconds to set the lease deadline to. This should be between 0 and 600. Due to network latency, values below 10 are advised against. @@ -387,12 +389,13 @@ def modify_ack_deadline_with_response(self, seconds: int) -> "futures.Future": see https://cloud.google.com/pubsub/docs/exactly-once-delivery." Args: - seconds: + seconds (int): The number of seconds to set the lease deadline to. This should be between 0 and 600. Due to network latency, values below 10 are advised against. Returns: - A :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` + futures.Future: A + :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` instance that conforms to Python Standard library's :class:`~concurrent.futures.Future` interface (but not an instance of that class). Call `result()` to get the result @@ -457,7 +460,8 @@ def nack_with_response(self) -> "futures.Future": see https://cloud.google.com/pubsub/docs/exactly-once-delivery." Returns: - A :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` + futures.Future: A + :class:`~google.cloud.pubsub_v1.subscriber.futures.Future` instance that conforms to Python Standard library's :class:`~concurrent.futures.Future` interface (but not an instance of that class). Call `result()` to get the result From 0438fb2290713ac487675504241d71f5481011ce Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 22 Mar 2023 22:36:02 -0400 Subject: [PATCH 0937/1197] chore(main): release 2.15.2 (#891) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 6abca04f48e0..d83aa2448a4c 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.15.1" + ".": "2.15.2" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 0108ba02ee59..6ca26dc376b4 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.15.2](https://github.com/googleapis/python-pubsub/compare/v2.15.1...v2.15.2) (2023-03-20) + + +### Documentation + +* Update missing docstrings ([#890](https://github.com/googleapis/python-pubsub/issues/890)) ([5849e04](https://github.com/googleapis/python-pubsub/commit/5849e048f48074e3a8ecddbe3bfbcfc9da094a28)) + ## [2.15.1](https://github.com/googleapis/python-pubsub/compare/v2.15.0...v2.15.1) (2023-03-14) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 505a42f155da..db31fdc2ac14 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.1" # {x-release-please-version} +__version__ = "2.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 505a42f155da..db31fdc2ac14 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.1" # {x-release-please-version} +__version__ = "2.15.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 40e8727530a8..256ef8a9439e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.15.1" + "version": "2.15.2" }, "snippets": [ { From 68b2d388ab7717391647088bc36e3d557f2d1dfd Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Sat, 1 Apr 2023 09:38:02 -0400 Subject: [PATCH 0938/1197] samples: cleanup test resources (#893) --- .../samples/snippets/schema_test.py | 248 ++++++++++++++---- .../samples/snippets/subscriber_test.py | 2 + 2 files changed, 194 insertions(+), 56 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/schema_test.py b/packages/google-cloud-pubsub/samples/snippets/schema_test.py index f62449e88db8..ccf65034f802 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema_test.py @@ -23,7 +23,7 @@ from google.api_core.exceptions import InternalServerError, NotFound from google.cloud import pubsub_v1 from google.cloud.pubsub import PublisherClient, SchemaServiceClient, SubscriberClient -from google.pubsub_v1.types import Encoding +from google.pubsub_v1.types import Encoding, Schema, Topic import pytest import schema @@ -35,18 +35,26 @@ except KeyError: raise KeyError("Need to set GOOGLE_CLOUD_PROJECT as an environment variable.") AVRO_TOPIC_ID = f"schema-test-avro-topic-{UUID}" +AVRO_TOPIC_ID_TO_CREATE = f"schema-test-avro-topic-to-create-{UUID}" PROTO_TOPIC_ID = f"schema-test-proto-topic-{UUID}" PROTO_WITH_REVISIONS_TOPIC_ID = f"schema-test-proto-with-revisions-topic-{UUID}" +PROTO_WITH_REVISIONS_TOPIC_ID_TO_CREATE = ( + f"schema-test-proto-with-revisions-topic-to-create-{UUID}" +) AVRO_SUBSCRIPTION_ID = f"schema-test-avro-subscription-{UUID}" PROTO_SUBSCRIPTION_ID = f"schema-test-proto-subscription-{UUID}" AVRO_SCHEMA_ID = f"schema-test-avro-schema-{UUID}" +AVRO_SCHEMA_ID_TO_CREATE = f"schema-test-avro-schema-to-create-{UUID}" PROTO_SCHEMA_ID = f"schema-test-proto-schema-{UUID}" +PROTO_SCHEMA_ID_TO_CREATE = f"schema-test-proto-schema-to-create-{UUID}" +PROTO_SCHEMA_ID_TO_DELETE = f"schema-test-proto-schema-to-delete-{UUID}" AVSC_FILE = "resources/us-states.avsc" AVSC_REVISION_FILE = "resources/us-states.avsc" PROTO_FILE = "resources/us-states.proto" PROTO_REVISION_FILE = "resources/us-states.proto" -# These tests run in parallel if pytest-parallel is installed. +# These tests run in parallel in continuous integration, +# with the same UUID. # Avoid modifying resources that are shared across tests, # as this results in test flake. @@ -57,11 +65,42 @@ def schema_client() -> Generator[pubsub_v1.SchemaServiceClient, None, None]: yield schema_client +def ensure_schema_exists( + name: str, type: Schema.Type, schema_client: pubsub_v1.SchemaServiceClient +) -> Schema: + schema_path = schema_client.schema_path(PROJECT_ID, name) + + try: + return schema_client.get_schema(request={"name": schema_path}) + except NotFound: + project_path = f"projects/{PROJECT_ID}" + with open(AVSC_FILE if type == Schema.Type.AVRO else PROTO_FILE, "rb") as f: + definition_text = f.read().decode("utf-8") + schema = Schema(name=schema_path, type_=type, definition=definition_text) + return schema_client.create_schema( + request={"parent": project_path, "schema": schema, "schema_id": name} + ) + + @pytest.fixture(scope="module") def avro_schema( schema_client: pubsub_v1.SchemaServiceClient, ) -> Generator[str, None, None]: - avro_schema_path = schema_client.schema_path(PROJECT_ID, AVRO_SCHEMA_ID) + avro_schema = ensure_schema_exists(AVRO_SCHEMA_ID, Schema.Type.AVRO, schema_client) + + yield avro_schema.name + + try: + schema_client.delete_schema(request={"name": avro_schema.name}) + except (NotFound, InternalServerError): + pass + + +@pytest.fixture(scope="module") +def avro_schema_to_create( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: + avro_schema_path = schema_client.schema_path(PROJECT_ID, AVRO_SCHEMA_ID_TO_CREATE) yield avro_schema_path @@ -75,7 +114,39 @@ def avro_schema( def proto_schema( schema_client: pubsub_v1.SchemaServiceClient, ) -> Generator[str, None, None]: - proto_schema_path = schema_client.schema_path(PROJECT_ID, PROTO_SCHEMA_ID) + proto_schema = ensure_schema_exists( + PROTO_SCHEMA_ID, Schema.Type.PROTOCOL_BUFFER, schema_client + ) + + yield proto_schema.name + + try: + schema_client.delete_schema(request={"name": proto_schema.name}) + except (NotFound, InternalServerError): + pass + + +@pytest.fixture(scope="module") +def proto_schema_to_delete( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: + proto_schema = ensure_schema_exists( + PROTO_SCHEMA_ID_TO_DELETE, Schema.Type.PROTOCOL_BUFFER, schema_client + ) + + yield proto_schema.name + + try: + schema_client.delete_schema(request={"name": proto_schema.name}) + except (NotFound, InternalServerError): + pass + + +@pytest.fixture(scope="module") +def proto_schema_to_create( + schema_client: pubsub_v1.SchemaServiceClient, +) -> Generator[str, None, None]: + proto_schema_path = schema_client.schema_path(PROJECT_ID, PROTO_SCHEMA_ID_TO_CREATE) yield proto_schema_path @@ -90,54 +161,99 @@ def publisher_client() -> Generator[pubsub_v1.PublisherClient, None, None]: yield PublisherClient() -@pytest.fixture(scope="module") -def avro_topic( - publisher_client: pubsub_v1.PublisherClient, avro_schema: str -) -> Generator[str, None, None]: - from google.pubsub_v1.types import Encoding - - avro_topic_path = publisher_client.topic_path(PROJECT_ID, AVRO_TOPIC_ID) +def ensure_topic_exists( + name: str, + schema_path: str, + encoding: Encoding, + publisher_client: pubsub_v1.PublisherClient, +) -> Topic: + topic_path = publisher_client.topic_path(PROJECT_ID, name) try: - avro_topic = publisher_client.get_topic(request={"topic": avro_topic_path}) + return publisher_client.get_topic(request={"topic": topic_path}) except NotFound: - avro_topic = publisher_client.create_topic( + return publisher_client.create_topic( request={ - "name": avro_topic_path, + "name": topic_path, "schema_settings": { - "schema": avro_schema, - "encoding": Encoding.BINARY, + "schema": schema_path, + "encoding": encoding, }, } ) + +@pytest.fixture(scope="module") +def avro_topic( + publisher_client: pubsub_v1.PublisherClient, avro_schema: str +) -> Generator[str, None, None]: + avro_topic = ensure_topic_exists( + AVRO_TOPIC_ID, avro_schema, Encoding.BINARY, publisher_client + ) + yield avro_topic.name + try: + publisher_client.delete_topic(request={"topic": avro_topic.name}) + except NotFound: + pass - publisher_client.delete_topic(request={"topic": avro_topic.name}) + +@pytest.fixture(scope="module") +def avro_topic_to_create( + publisher_client: pubsub_v1.PublisherClient, avro_schema: str +) -> Generator[str, None, None]: + avro_topic_path = publisher_client.topic_path(PROJECT_ID, AVRO_TOPIC_ID_TO_CREATE) + + yield avro_topic_path + try: + publisher_client.delete_topic(request={"topic": avro_topic_path}) + except NotFound: + pass @pytest.fixture(scope="module") def proto_topic( publisher_client: pubsub_v1.PublisherClient, proto_schema: str ) -> Generator[str, None, None]: - proto_topic_path = publisher_client.topic_path(PROJECT_ID, PROTO_TOPIC_ID) + proto_topic = ensure_topic_exists( + PROTO_TOPIC_ID, proto_schema, Encoding.BINARY, publisher_client + ) + yield proto_topic.name try: - proto_topic = publisher_client.get_topic(request={"topic": proto_topic_path}) + publisher_client.delete_topic(request={"topic": proto_topic.name}) except NotFound: - proto_topic = publisher_client.create_topic( - request={ - "name": proto_topic_path, - "schema_settings": { - "schema": proto_schema, - "encoding": Encoding.BINARY, - }, - } - ) + pass + + +@pytest.fixture(scope="module") +def proto_with_revisions_topic( + publisher_client: pubsub_v1.PublisherClient, proto_schema: str +) -> Generator[str, None, None]: + proto_topic = ensure_topic_exists( + PROTO_WITH_REVISIONS_TOPIC_ID, proto_schema, Encoding.BINARY, publisher_client + ) yield proto_topic.name + try: + publisher_client.delete_topic(request={"topic": proto_topic.name}) + except NotFound: + pass + + +@pytest.fixture(scope="module") +def proto_with_revisions_topic_to_create( + publisher_client: pubsub_v1.PublisherClient, proto_schema: str +) -> Generator[str, None, None]: + topic_path = publisher_client.topic_path( + PROJECT_ID, PROTO_WITH_REVISIONS_TOPIC_ID_TO_CREATE + ) - publisher_client.delete_topic(request={"topic": proto_topic.name}) + yield topic_path + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass @pytest.fixture(scope="module") @@ -166,9 +282,12 @@ def avro_subscription( yield avro_subscription.name - subscriber_client.delete_subscription( - request={"subscription": avro_subscription.name} - ) + try: + subscriber_client.delete_subscription( + request={"subscription": avro_subscription.name} + ) + except NotFound: + pass @pytest.fixture(scope="module") @@ -190,43 +309,46 @@ def proto_subscription( yield proto_subscription.name - subscriber_client.delete_subscription( - request={"subscription": proto_subscription.name} - ) + try: + subscriber_client.delete_subscription( + request={"subscription": proto_subscription.name} + ) + except NotFound: + pass def test_create_avro_schema( schema_client: pubsub_v1.SchemaServiceClient, - avro_schema: str, + avro_schema_to_create: str, capsys: CaptureFixture[str], ) -> None: try: - schema_client.delete_schema(request={"name": avro_schema}) + schema_client.delete_schema(request={"name": avro_schema_to_create}) except NotFound: pass - schema.create_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID, AVSC_FILE) + schema.create_avro_schema(PROJECT_ID, AVRO_SCHEMA_ID_TO_CREATE, AVSC_FILE) out, _ = capsys.readouterr() assert "Created a schema using an Avro schema file:" in out - assert f"{avro_schema}" in out + assert f"{avro_schema_to_create}" in out def test_create_proto_schema( schema_client: pubsub_v1.SchemaServiceClient, - proto_schema: str, + proto_schema_to_create: str, capsys: CaptureFixture[str], ) -> None: try: - schema_client.delete_schema(request={"name": proto_schema}) + schema_client.delete_schema(request={"name": proto_schema_to_create}) except NotFound: pass - schema.create_proto_schema(PROJECT_ID, PROTO_SCHEMA_ID, PROTO_FILE) + schema.create_proto_schema(PROJECT_ID, PROTO_SCHEMA_ID_TO_CREATE, PROTO_FILE) out, _ = capsys.readouterr() assert "Created a schema using a protobuf schema file:" in out - assert f"{proto_schema}" in out + assert f"{proto_schema_to_create}" in out def test_commit_avro_schema( @@ -260,7 +382,7 @@ def test_get_schema(avro_schema: str, capsys: CaptureFixture[str]) -> None: assert f"{avro_schema}" in out -def test_get_schema_revsion(avro_schema: str, capsys: CaptureFixture[str]) -> None: +def test_get_schema_revision(avro_schema: str, capsys: CaptureFixture[str]) -> None: committed_schema = schema.commit_avro_schema( PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE ) @@ -270,7 +392,9 @@ def test_get_schema_revsion(avro_schema: str, capsys: CaptureFixture[str]) -> No assert f"{avro_schema}" in out -def test_rollback_schema_revsion(avro_schema: str, capsys: CaptureFixture[str]) -> None: +def test_rollback_schema_revision( + avro_schema: str, capsys: CaptureFixture[str] +) -> None: committed_schema = schema.commit_avro_schema( PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE ) @@ -283,7 +407,7 @@ def test_rollback_schema_revsion(avro_schema: str, capsys: CaptureFixture[str]) # assert f"{avro_schema}" in out -def test_delete_schema_revsion(avro_schema: str, capsys: CaptureFixture[str]) -> None: +def test_delete_schema_revision(avro_schema: str, capsys: CaptureFixture[str]) -> None: committed_schema = schema.commit_avro_schema( PROJECT_ID, AVRO_SCHEMA_ID, AVSC_REVISION_FILE ) @@ -309,25 +433,34 @@ def test_list_schema_revisions(capsys: CaptureFixture[str]) -> None: def test_create_topic_with_schema( - avro_schema: str, capsys: CaptureFixture[str] + avro_schema: str, + avro_topic_to_create: str, + publisher_client: pubsub_v1.PublisherClient, + capsys: CaptureFixture[str], ) -> None: - schema.create_topic_with_schema(PROJECT_ID, AVRO_TOPIC_ID, AVRO_SCHEMA_ID, "BINARY") + schema.create_topic_with_schema( + PROJECT_ID, AVRO_TOPIC_ID_TO_CREATE, AVRO_SCHEMA_ID, "BINARY" + ) out, _ = capsys.readouterr() assert "Created a topic" in out - assert f"{AVRO_TOPIC_ID}" in out + assert f"{AVRO_TOPIC_ID_TO_CREATE}" in out assert f"{avro_schema}" in out assert "BINARY" in out or "2" in out def test_create_topic_with_schema_revisions( - proto_schema: str, capsys: CaptureFixture[str] + proto_schema: str, + proto_with_revisions_topic_to_create: str, + publisher_client: pubsub_v1.PublisherClient, + capsys: CaptureFixture[str], ) -> None: committed_schema = schema.commit_proto_schema( PROJECT_ID, PROTO_SCHEMA_ID, PROTO_REVISION_FILE ) + schema.create_topic_with_schema_revisions( PROJECT_ID, - PROTO_WITH_REVISIONS_TOPIC_ID, + PROTO_WITH_REVISIONS_TOPIC_ID_TO_CREATE, PROTO_SCHEMA_ID, committed_schema.revision_id, committed_schema.revision_id, @@ -335,17 +468,18 @@ def test_create_topic_with_schema_revisions( ) out, _ = capsys.readouterr() assert "Created a topic" in out - assert f"{PROTO_WITH_REVISIONS_TOPIC_ID}" in out + assert f"{PROTO_WITH_REVISIONS_TOPIC_ID_TO_CREATE}" in out assert f"{proto_schema}" in out assert "BINARY" in out or "2" in out def test_update_topic_schema( - proto_schema: str, proto_topic: str, capsys: CaptureFixture[str] + proto_schema: str, proto_with_revisions_topic: str, capsys: CaptureFixture[str] ) -> None: committed_schema = schema.commit_proto_schema( PROJECT_ID, PROTO_SCHEMA_ID, PROTO_REVISION_FILE ) + schema.update_topic_schema( PROJECT_ID, PROTO_WITH_REVISIONS_TOPIC_ID, @@ -420,8 +554,10 @@ def test_subscribe_with_proto_schema( @typed_flaky -def test_delete_schema(proto_schema: str, capsys: CaptureFixture[str]) -> None: - schema.delete_schema(PROJECT_ID, PROTO_SCHEMA_ID) +def test_delete_schema( + proto_schema_to_delete: str, capsys: CaptureFixture[str] +) -> None: + schema.delete_schema(PROJECT_ID, PROTO_SCHEMA_ID_TO_DELETE) out, _ = capsys.readouterr() assert "Deleted a schema" in out - assert f"{proto_schema}" in out + assert f"{proto_schema_to_delete}" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 395c50f56316..435724782c1b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -86,6 +86,8 @@ def subscription_admin( yield subscription.name + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + @pytest.fixture(scope="module") def topic(publisher_client: pubsub_v1.PublisherClient) -> Generator[str, None, None]: From f5f451d8089072e874d25328b8753e4422393ee8 Mon Sep 17 00:00:00 2001 From: meredithslota Date: Sat, 1 Apr 2023 15:39:00 +0000 Subject: [PATCH 0939/1197] ci: do not run 3.6 tests by default (#880) Copied from https://github.com/GoogleCloudPlatform/python-docs-samples/pull/7409 Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../samples/snippets/noxfile_config.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py b/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py index 32f8b4351c77..545546d21cb6 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile_config.py @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,15 +14,15 @@ # Default TEST_CONFIG_OVERRIDE for python repos. -# You can copy this file into your directory, then it will be inported from +# You can copy this file into your directory, then it will be imported from # the noxfile.py. # The source of truth: -# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/noxfile_config.py +# https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/noxfile_config.py TEST_CONFIG_OVERRIDE = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + "ignored_versions": ["2.7", "3.6"], # Old samples are opted out of enforcing Python type hints # All new samples should feature them "enforce_type_hints": True, @@ -32,6 +32,10 @@ # to use your own Cloud project. "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. "envs": {}, From f0d55ff7f97371c61d2c36b0baecabcbc0da6107 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 3 Apr 2023 14:41:07 +0100 Subject: [PATCH 0940/1197] chore(deps): update all dependencies (#886) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 9dca1d1d5d91..c704f8e3dd1d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.2.2 mock==5.0.1 flaky==3.7.0 -google-cloud-bigquery==3.7.0 +google-cloud-bigquery==3.9.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index a69515e44cff..f5ab0c1629bb 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.15.0 +google-cloud-pubsub==2.15.2 avro==1.11.1 From 3b58e01847897ce273f53ddfe78866220a02d2b8 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Tue, 4 Apr 2023 16:45:33 -0400 Subject: [PATCH 0941/1197] docs: Fix formatting of request arg in docstring (#894) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.8.5 PiperOrigin-RevId: 511892190 Source-Link: https://github.com/googleapis/googleapis/commit/a45d9c09c1287ffdf938f4e8083e791046c0b23b Source-Link: https://github.com/googleapis/googleapis-gen/commit/1907294b1d8365ea24f8c5f2e059a64124c4ed3b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTkwNzI5NGIxZDgzNjVlYTI0ZjhjNWYyZTA1OWE2NDEyNGM0ZWQzYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * remove rest.py * chore: Update gapic-generator-python to v1.9.0 PiperOrigin-RevId: 517425588 Source-Link: https://github.com/googleapis/googleapis/commit/33c93eb8b4d3aaf88e44a1be197811052be62282 Source-Link: https://github.com/googleapis/googleapis-gen/commit/d5f59789d19fc43270ff2124967d4ec8992b8e8f Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZDVmNTk3ODlkMTlmYzQzMjcwZmYyMTI0OTY3ZDRlYzg5OTJiOGU4ZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs: Fix formatting of request arg in docstring chore: Update gapic-generator-python to v1.9.1 PiperOrigin-RevId: 518604533 Source-Link: https://github.com/googleapis/googleapis/commit/8a085aeddfa010af5bcef090827aac5255383d7e Source-Link: https://github.com/googleapis/googleapis-gen/commit/b2ab4b0a0ae2907e812c209198a74e0898afcb04 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjJhYjRiMGEwYWUyOTA3ZTgxMmMyMDkxOThhNzRlMDg5OGFmY2IwNCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/publisher/async_client.py | 11 +++---- .../pubsub_v1/services/publisher/client.py | 11 +++---- .../services/schema_service/async_client.py | 21 ++++-------- .../services/schema_service/client.py | 21 ++++-------- .../services/subscriber/async_client.py | 33 ++++++++----------- .../pubsub_v1/services/subscriber/client.py | 33 ++++++++----------- .../google/pubsub_v1/types/pubsub.py | 2 ++ .../google/pubsub_v1/types/schema.py | 2 ++ .../snippet_metadata_google.pubsub.v1.json | 2 +- 9 files changed, 55 insertions(+), 81 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index e749892f5648..92a10c38e206 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -740,7 +740,7 @@ async def sample_list_topics(): Args: request (Optional[Union[google.pubsub_v1.types.ListTopicsRequest, dict]]): - The request object. Request for the `ListTopics` method. + The request object. Request for the ``ListTopics`` method. project (:class:`str`): Required. The name of the project in which to list topics. Format is ``projects/{project-id}``. @@ -866,8 +866,7 @@ async def sample_list_topic_subscriptions(): Args: request (Optional[Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]]): - The request object. Request for the - `ListTopicSubscriptions` method. + The request object. Request for the ``ListTopicSubscriptions`` method. topic (:class:`str`): Required. The name of the topic that subscriptions are attached to. Format is @@ -998,8 +997,7 @@ async def sample_list_topic_snapshots(): Args: request (Optional[Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]]): - The request object. Request for the `ListTopicSnapshots` - method. + The request object. Request for the ``ListTopicSnapshots`` method. topic (:class:`str`): Required. The name of the topic that snapshots are attached to. Format is @@ -1126,8 +1124,7 @@ async def sample_delete_topic(): Args: request (Optional[Union[google.pubsub_v1.types.DeleteTopicRequest, dict]]): - The request object. Request for the `DeleteTopic` - method. + The request object. Request for the ``DeleteTopic`` method. topic (:class:`str`): Required. Name of the topic to delete. Format is ``projects/{project}/topics/{topic}``. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index a9684144f4b0..32549cff78f8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -976,7 +976,7 @@ def sample_list_topics(): Args: request (Union[google.pubsub_v1.types.ListTopicsRequest, dict]): - The request object. Request for the `ListTopics` method. + The request object. Request for the ``ListTopics`` method. project (str): Required. The name of the project in which to list topics. Format is ``projects/{project-id}``. @@ -1091,8 +1091,7 @@ def sample_list_topic_subscriptions(): Args: request (Union[google.pubsub_v1.types.ListTopicSubscriptionsRequest, dict]): - The request object. Request for the - `ListTopicSubscriptions` method. + The request object. Request for the ``ListTopicSubscriptions`` method. topic (str): Required. The name of the topic that subscriptions are attached to. Format is @@ -1212,8 +1211,7 @@ def sample_list_topic_snapshots(): Args: request (Union[google.pubsub_v1.types.ListTopicSnapshotsRequest, dict]): - The request object. Request for the `ListTopicSnapshots` - method. + The request object. Request for the ``ListTopicSnapshots`` method. topic (str): Required. The name of the topic that snapshots are attached to. Format is @@ -1329,8 +1327,7 @@ def sample_delete_topic(): Args: request (Union[google.pubsub_v1.types.DeleteTopicRequest, dict]): - The request object. Request for the `DeleteTopic` - method. + The request object. Request for the ``DeleteTopic`` method. topic (str): Required. Name of the topic to delete. Format is ``projects/{project}/topics/{topic}``. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 68b896e4802c..cfb566e8404e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -486,8 +486,7 @@ async def sample_list_schemas(): Args: request (Optional[Union[google.pubsub_v1.types.ListSchemasRequest, dict]]): - The request object. Request for the `ListSchemas` - method. + The request object. Request for the ``ListSchemas`` method. parent (:class:`str`): Required. The name of the project in which to list schemas. Format is ``projects/{project-id}``. @@ -600,8 +599,7 @@ async def sample_list_schema_revisions(): Args: request (Optional[Union[google.pubsub_v1.types.ListSchemaRevisionsRequest, dict]]): - The request object. Request for the - `ListSchemaRevisions` method. + The request object. Request for the ``ListSchemaRevisions`` method. name (:class:`str`): Required. The name of the schema to list revisions for. @@ -829,8 +827,7 @@ async def sample_rollback_schema(): Args: request (Optional[Union[google.pubsub_v1.types.RollbackSchemaRequest, dict]]): - The request object. Request for the `RollbackSchema` - method. + The request object. Request for the ``RollbackSchema`` method. name (:class:`str`): Required. The schema being rolled back with revision id. @@ -941,8 +938,7 @@ async def sample_delete_schema_revision(): Args: request (Optional[Union[google.pubsub_v1.types.DeleteSchemaRevisionRequest, dict]]): - The request object. Request for the - `DeleteSchemaRevision` method. + The request object. Request for the ``DeleteSchemaRevision`` method. name (:class:`str`): Required. The name of the schema revision to be deleted, with a revision ID explicitly included. @@ -1050,8 +1046,7 @@ async def sample_delete_schema(): Args: request (Optional[Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]]): - The request object. Request for the `DeleteSchema` - method. + The request object. Request for the ``DeleteSchema`` method. name (:class:`str`): Required. Name of the schema to delete. Format is ``projects/{project}/schemas/{schema}``. @@ -1148,8 +1143,7 @@ async def sample_validate_schema(): Args: request (Optional[Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]]): - The request object. Request for the `ValidateSchema` - method. + The request object. Request for the ``ValidateSchema`` method. parent (:class:`str`): Required. The name of the project in which to validate schemas. Format is ``projects/{project-id}``. @@ -1259,8 +1253,7 @@ async def sample_validate_message(): Args: request (Optional[Union[google.pubsub_v1.types.ValidateMessageRequest, dict]]): - The request object. Request for the `ValidateMessage` - method. + The request object. Request for the ``ValidateMessage`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index d217c49f0670..2944caed91d8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -718,8 +718,7 @@ def sample_list_schemas(): Args: request (Union[google.pubsub_v1.types.ListSchemasRequest, dict]): - The request object. Request for the `ListSchemas` - method. + The request object. Request for the ``ListSchemas`` method. parent (str): Required. The name of the project in which to list schemas. Format is ``projects/{project-id}``. @@ -832,8 +831,7 @@ def sample_list_schema_revisions(): Args: request (Union[google.pubsub_v1.types.ListSchemaRevisionsRequest, dict]): - The request object. Request for the - `ListSchemaRevisions` method. + The request object. Request for the ``ListSchemaRevisions`` method. name (str): Required. The name of the schema to list revisions for. @@ -1061,8 +1059,7 @@ def sample_rollback_schema(): Args: request (Union[google.pubsub_v1.types.RollbackSchemaRequest, dict]): - The request object. Request for the `RollbackSchema` - method. + The request object. Request for the ``RollbackSchema`` method. name (str): Required. The schema being rolled back with revision id. @@ -1173,8 +1170,7 @@ def sample_delete_schema_revision(): Args: request (Union[google.pubsub_v1.types.DeleteSchemaRevisionRequest, dict]): - The request object. Request for the - `DeleteSchemaRevision` method. + The request object. Request for the ``DeleteSchemaRevision`` method. name (str): Required. The name of the schema revision to be deleted, with a revision ID explicitly included. @@ -1282,8 +1278,7 @@ def sample_delete_schema(): Args: request (Union[google.pubsub_v1.types.DeleteSchemaRequest, dict]): - The request object. Request for the `DeleteSchema` - method. + The request object. Request for the ``DeleteSchema`` method. name (str): Required. Name of the schema to delete. Format is ``projects/{project}/schemas/{schema}``. @@ -1380,8 +1375,7 @@ def sample_validate_schema(): Args: request (Union[google.pubsub_v1.types.ValidateSchemaRequest, dict]): - The request object. Request for the `ValidateSchema` - method. + The request object. Request for the ``ValidateSchema`` method. parent (str): Required. The name of the project in which to validate schemas. Format is ``projects/{project-id}``. @@ -1491,8 +1485,7 @@ def sample_validate_message(): Args: request (Union[google.pubsub_v1.types.ValidateMessageRequest, dict]): - The request object. Request for the `ValidateMessage` - method. + The request object. Request for the ``ValidateMessage`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index dbe4fd0e7094..694b166d9624 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -279,10 +279,10 @@ async def sample_create_subscription(): Args: request (Optional[Union[google.pubsub_v1.types.Subscription, dict]]): - The request object. A subscription resource. If none of - `push_config` or `bigquery_config` is set, then the - subscriber will pull and ack messages using API methods. - At most one of these fields may be set. + The request object. A subscription resource. If none of ``push_config`` or + ``bigquery_config`` is set, then the subscriber will + pull and ack messages using API methods. At most one of + these fields may be set. name (:class:`str`): Required. The name of the subscription. It must have the format @@ -706,8 +706,7 @@ async def sample_list_subscriptions(): Args: request (Optional[Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]]): - The request object. Request for the `ListSubscriptions` - method. + The request object. Request for the ``ListSubscriptions`` method. project (:class:`str`): Required. The name of the project in which to list subscriptions. Format is ``projects/{project-id}``. @@ -1194,7 +1193,7 @@ async def sample_pull(): Args: request (Optional[Union[google.pubsub_v1.types.PullRequest, dict]]): - The request object. Request for the `Pull` method. + The request object. Request for the ``Pull`` method. subscription (:class:`str`): Required. The subscription from which messages should be pulled. Format is @@ -1360,11 +1359,10 @@ def request_generator(): Args: requests (AsyncIterator[`google.pubsub_v1.types.StreamingPullRequest`]): - The request object AsyncIterator. Request for the `StreamingPull` - streaming RPC method. This request is used to establish - the initial stream as well as to stream acknowledgements - and ack deadline modifications from the client to the - server. + The request object AsyncIterator. Request for the ``StreamingPull`` streaming RPC method. + This request is used to establish the initial stream as + well as to stream acknowledgements and ack deadline + modifications from the client to the server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1697,8 +1695,7 @@ async def sample_list_snapshots(): Args: request (Optional[Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]]): - The request object. Request for the `ListSnapshots` - method. + The request object. Request for the ``ListSnapshots`` method. project (:class:`str`): Required. The name of the project in which to list snapshots. Format is ``projects/{project-id}``. @@ -1841,8 +1838,7 @@ async def sample_create_snapshot(): Args: request (Optional[Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]]): - The request object. Request for the `CreateSnapshot` - method. + The request object. Request for the ``CreateSnapshot`` method. name (:class:`str`): Required. User-provided name for this snapshot. If the name is not provided in the request, the server will @@ -2116,8 +2112,7 @@ async def sample_delete_snapshot(): Args: request (Optional[Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]]): - The request object. Request for the `DeleteSnapshot` - method. + The request object. Request for the ``DeleteSnapshot`` method. snapshot (:class:`str`): Required. The name of the snapshot to delete. Format is ``projects/{project}/snapshots/{snap}``. @@ -2225,7 +2220,7 @@ async def sample_seek(): Args: request (Optional[Union[google.pubsub_v1.types.SeekRequest, dict]]): - The request object. Request for the `Seek` method. + The request object. Request for the ``Seek`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 816275ef7a63..a3237d122e87 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -558,10 +558,10 @@ def sample_create_subscription(): Args: request (Union[google.pubsub_v1.types.Subscription, dict]): - The request object. A subscription resource. If none of - `push_config` or `bigquery_config` is set, then the - subscriber will pull and ack messages using API methods. - At most one of these fields may be set. + The request object. A subscription resource. If none of ``push_config`` or + ``bigquery_config`` is set, then the subscriber will + pull and ack messages using API methods. At most one of + these fields may be set. name (str): Required. The name of the subscription. It must have the format @@ -954,8 +954,7 @@ def sample_list_subscriptions(): Args: request (Union[google.pubsub_v1.types.ListSubscriptionsRequest, dict]): - The request object. Request for the `ListSubscriptions` - method. + The request object. Request for the ``ListSubscriptions`` method. project (str): Required. The name of the project in which to list subscriptions. Format is ``projects/{project-id}``. @@ -1404,7 +1403,7 @@ def sample_pull(): Args: request (Union[google.pubsub_v1.types.PullRequest, dict]): - The request object. Request for the `Pull` method. + The request object. Request for the ``Pull`` method. subscription (str): Required. The subscription from which messages should be pulled. Format is @@ -1558,11 +1557,10 @@ def request_generator(): Args: requests (Iterator[google.pubsub_v1.types.StreamingPullRequest]): - The request object iterator. Request for the `StreamingPull` - streaming RPC method. This request is used to establish - the initial stream as well as to stream acknowledgements - and ack deadline modifications from the client to the - server. + The request object iterator. Request for the ``StreamingPull`` streaming RPC method. + This request is used to establish the initial stream as + well as to stream acknowledgements and ack deadline + modifications from the client to the server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -1863,8 +1861,7 @@ def sample_list_snapshots(): Args: request (Union[google.pubsub_v1.types.ListSnapshotsRequest, dict]): - The request object. Request for the `ListSnapshots` - method. + The request object. Request for the ``ListSnapshots`` method. project (str): Required. The name of the project in which to list snapshots. Format is ``projects/{project-id}``. @@ -1996,8 +1993,7 @@ def sample_create_snapshot(): Args: request (Union[google.pubsub_v1.types.CreateSnapshotRequest, dict]): - The request object. Request for the `CreateSnapshot` - method. + The request object. Request for the ``CreateSnapshot`` method. name (str): Required. User-provided name for this snapshot. If the name is not provided in the request, the server will @@ -2253,8 +2249,7 @@ def sample_delete_snapshot(): Args: request (Union[google.pubsub_v1.types.DeleteSnapshotRequest, dict]): - The request object. Request for the `DeleteSnapshot` - method. + The request object. Request for the ``DeleteSnapshot`` method. snapshot (str): Required. The name of the snapshot to delete. Format is ``projects/{project}/snapshots/{snap}``. @@ -2353,7 +2348,7 @@ def sample_seek(): Args: request (Union[google.pubsub_v1.types.SeekRequest, dict]): - The request object. Request for the `Seek` method. + The request object. Request for the ``Seek`` method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 3e2f225ad908..cee8cee4cd7e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 27a6efbbdd19..7b432906b18a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -13,6 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from __future__ import annotations + from typing import MutableMapping, MutableSequence import proto # type: ignore diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 256ef8a9439e..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.15.2" + "version": "0.1.0" }, "snippets": [ { From 682a7b4974971be8c46b22d94845f3333aa12e54 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Wed, 5 Apr 2023 09:44:12 -0400 Subject: [PATCH 0942/1197] CI: Add typed_flaky to streaming_pull system tests (#895) --- packages/google-cloud-pubsub/noxfile.py | 1 + packages/google-cloud-pubsub/owlbot.py | 2 +- packages/google-cloud-pubsub/tests/system.py | 119 +++++++++++++++---- 3 files changed, 98 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 574fbd64467e..35e5916a9b9f 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -55,6 +55,7 @@ ] SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ "psutil", + "flaky", ] SYSTEM_TEST_LOCAL_DEPENDENCIES = [] SYSTEM_TEST_DEPENDENCIES = [] diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 8787d90c6a4b..7539adfdb4ca 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -338,7 +338,7 @@ versions=gcp.common.detect_versions(path="./google", default_first=True), unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11"], system_test_python_versions=["3.10"], - system_test_external_dependencies=["psutil"], + system_test_external_dependencies=["psutil","flaky"], ) s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml", "README.rst", "docs/index.rst"]) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index d7f7c5bea6b8..bb1265453268 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -23,6 +23,7 @@ import sys import threading import time +from typing import Any, Callable, cast, TypeVar # special case python < 3.8 if sys.version_info.major == 3 and sys.version_info.minor < 8: @@ -30,6 +31,7 @@ else: from unittest import mock +from flaky import flaky import pytest import google.auth @@ -43,6 +45,9 @@ from test_utils.system import unique_resource_id +C = TypeVar("C", bound=Callable[..., Any]) +typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) + @pytest.fixture(scope="module") def project(): @@ -61,13 +66,13 @@ def subscriber(request): @pytest.fixture -def topic_path(project, publisher): +def topic_path_base(project, publisher): topic_name = "t" + unique_resource_id("-") yield publisher.topic_path(project, topic_name) @pytest.fixture -def subscription_path(project, subscriber): +def subscription_path_base(project, subscriber): sub_name = "s" + unique_resource_id("-") yield subscriber.subscription_path(project, sub_name) @@ -82,7 +87,9 @@ def cleanup(): to_call(*args, **kwargs) -def test_publish_messages(publisher, topic_path, cleanup): +def test_publish_messages(publisher, topic_path_base, cleanup): + # Customize topic path to test. + topic_path = topic_path_base + "-publish-messages" # Make sure the topic gets deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) @@ -100,7 +107,9 @@ def test_publish_messages(publisher, topic_path, cleanup): assert isinstance(result, str) -def test_publish_large_messages(publisher, topic_path, cleanup): +def test_publish_large_messages(publisher, topic_path_base, cleanup): + # Customize topic path to test. + topic_path = topic_path_base + "-publish-large-messages" # Make sure the topic gets deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) @@ -130,8 +139,11 @@ def test_publish_large_messages(publisher, topic_path, cleanup): def test_subscribe_to_messages( - publisher, topic_path, subscriber, subscription_path, cleanup + publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): + # Customize topic path to test. + topic_path = topic_path_base + "-subscribe-to-messages" + subscription_path = subscription_path_base + "-subscribe-to-messages" # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -175,8 +187,12 @@ def test_subscribe_to_messages( def test_subscribe_to_messages_async_callbacks( - publisher, topic_path, subscriber, subscription_path, cleanup + publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): + # Customize topic path to test. + custom_str = "-subscribe-to-messages-async-callback" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -227,8 +243,12 @@ def test_subscribe_to_messages_async_callbacks( def test_creating_subscriptions_with_non_default_settings( - publisher, subscriber, project, topic_path, subscription_path, cleanup + publisher, subscriber, project, topic_path_base, subscription_path_base, cleanup ): + # Customize topic path to test. + custom_str = "-creating-subscriptions-with-non-default-settings" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -346,7 +366,8 @@ def test_listing_topic_subscriptions(publisher, subscriber, project, cleanup): assert subscriptions == {subscription_paths[0], subscription_paths[2]} -def test_managing_topic_iam_policy(publisher, topic_path, cleanup): +def test_managing_topic_iam_policy(publisher, topic_path_base, cleanup): + topic_path = topic_path_base + "-managing-topic-iam-policy" cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) # create a topic and customize its policy @@ -375,8 +396,11 @@ def test_managing_topic_iam_policy(publisher, topic_path, cleanup): def test_managing_subscription_iam_policy( - publisher, subscriber, topic_path, subscription_path, cleanup + publisher, subscriber, topic_path_base, subscription_path_base, cleanup ): + custom_str = "-managing-subscription-iam-policy" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -410,7 +434,7 @@ def test_managing_subscription_iam_policy( def test_subscriber_not_leaking_open_sockets( - publisher, topic_path, subscription_path, cleanup + publisher, topic_path_base, subscription_path_base, cleanup ): # Make sure the topic and the supscription get deleted. # NOTE: Since subscriber client will be closed in the test, we should not @@ -419,8 +443,12 @@ def test_subscriber_not_leaking_open_sockets( # Also, since the client will get closed, we need another subscriber client # to clean up the subscription. We also need to make sure that auxiliary # subscriber releases the sockets, too. + custom_str = "-not-leaking-open-sockets" + subscription_path = subscription_path_base + custom_str + topic_path = topic_path_base + custom_str subscriber = pubsub_v1.SubscriberClient(transport="grpc") subscriber_2 = pubsub_v1.SubscriberClient(transport="grpc") + cleanup.append( (subscriber_2.delete_subscription, (), {"subscription": subscription_path}) ) @@ -460,8 +488,11 @@ def test_subscriber_not_leaking_open_sockets( def test_synchronous_pull_no_deadline_error_if_no_messages( - publisher, topic_path, subscriber, subscription_path, cleanup + publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): + custom_str = "-synchronous-pull-deadline-error-if-no-messages" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -485,8 +516,11 @@ def test_synchronous_pull_no_deadline_error_if_no_messages( class TestStreamingPull(object): def test_streaming_pull_callback_error_propagation( - self, publisher, topic_path, subscriber, subscription_path, cleanup + self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): + custom_str = "-streaming-pull-callback-error-propagation" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -512,9 +546,19 @@ class CallbackError(Exception): with pytest.raises(CallbackError): future.result(timeout=30) + @typed_flaky def test_streaming_pull_ack_deadline( - self, publisher, subscriber, project, topic_path, subscription_path, cleanup + self, + publisher, + subscriber, + project, + topic_path_base, + subscription_path_base, + cleanup, ): + custom_str = "-streaming-pull-ack-deadline" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -564,8 +608,11 @@ def test_streaming_pull_ack_deadline( subscription_future.cancel() def test_streaming_pull_max_messages( - self, publisher, topic_path, subscriber, subscription_path, cleanup + self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): + custom_str = "-streaming-pull-max-messages" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -619,9 +666,13 @@ def test_streaming_pull_max_messages( finally: subscription_future.cancel() # trigger clean shutdown + @typed_flaky def test_streaming_pull_blocking_shutdown( - self, publisher, topic_path, subscriber, subscription_path, cleanup + self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): + custom_str = "-streaming-pull-blocking-shutdown" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -702,9 +753,11 @@ def callback2(message): ) class TestBasicRBAC(object): def test_streaming_pull_subscriber_permissions_sufficient( - self, publisher, topic_path, subscriber, subscription_path, cleanup + self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): - + custom_str = "-streaming-pull-subscriber-permissions-sufficient" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -739,9 +792,11 @@ def test_streaming_pull_subscriber_permissions_sufficient( future.cancel() def test_publisher_role_can_publish_messages( - self, publisher, topic_path, subscriber, subscription_path, cleanup + self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup ): - + custom_str = "-publisher-role-can-publish-messages" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str # Make sure the topic and subscription get deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) cleanup.append( @@ -767,8 +822,17 @@ def test_publisher_role_can_publish_messages( "Snapshot creation is not instant on the backend, causing test falkiness." ) def test_snapshot_seek_subscriber_permissions_sufficient( - self, project, publisher, topic_path, subscriber, subscription_path, cleanup + self, + project, + publisher, + topic_path_base, + subscriber, + subscription_path_base, + cleanup, ): + custom_str = "-snapshot-seek-subscriber-permissions-sufficient" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str snapshot_name = "snap" + unique_resource_id("-") snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) @@ -813,10 +877,10 @@ def test_snapshot_seek_subscriber_permissions_sufficient( assert len(response.received_messages) == 1 def test_viewer_role_can_list_resources( - self, project, publisher, topic_path, subscriber, cleanup + self, project, publisher, topic_path_base, subscriber, cleanup ): project_path = "projects/" + project - + topic_path = topic_path_base + "-viewer-role-can-list-resources" # Make sure the created topic gets deleted. cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) @@ -844,8 +908,17 @@ def test_viewer_role_can_list_resources( next(iter(viewer_only_subscriber.list_snapshots(project=project_path)), None) def test_editor_role_can_create_resources( - self, project, publisher, topic_path, subscriber, subscription_path, cleanup + self, + project, + publisher, + topic_path_base, + subscriber, + subscription_path_base, + cleanup, ): + custom_str = "-editor-role-can-create-resources" + topic_path = topic_path_base + custom_str + subscription_path = subscription_path_base + custom_str snapshot_name = "snap" + unique_resource_id("-") snapshot_path = "projects/{}/snapshots/{}".format(project, snapshot_name) From 3dd488f5d23f005eab3cabf8f25b68da18dec2ab Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Apr 2023 10:04:29 -0400 Subject: [PATCH 0943/1197] feat: enable "rest" transport in Python for services supporting numeric enums (#863) --- .../google/pubsub_v1/gapic_metadata.json | 190 + .../pubsub_v1/services/publisher/client.py | 2 + .../services/publisher/transports/__init__.py | 5 + .../services/publisher/transports/rest.py | 1612 ++++++ .../services/schema_service/client.py | 2 + .../schema_service/transports/__init__.py | 5 + .../schema_service/transports/rest.py | 1763 +++++++ .../pubsub_v1/services/subscriber/client.py | 2 + .../subscriber/transports/__init__.py | 5 + .../services/subscriber/transports/rest.py | 2368 +++++++++ .../google/pubsub_v1/types/pubsub.py | 8 +- packages/google-cloud-pubsub/tests/system.py | 37 +- .../unit/gapic/pubsub_v1/test_publisher.py | 3040 ++++++++++- .../gapic/pubsub_v1/test_schema_service.py | 3372 +++++++++++- .../unit/gapic/pubsub_v1/test_subscriber.py | 4694 ++++++++++++++++- 15 files changed, 16681 insertions(+), 424 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json index ac814d06543f..4a8f51a516ed 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_metadata.json @@ -106,6 +106,56 @@ ] } } + }, + "rest": { + "libraryClient": "PublisherClient", + "rpcs": { + "CreateTopic": { + "methods": [ + "create_topic" + ] + }, + "DeleteTopic": { + "methods": [ + "delete_topic" + ] + }, + "DetachSubscription": { + "methods": [ + "detach_subscription" + ] + }, + "GetTopic": { + "methods": [ + "get_topic" + ] + }, + "ListTopicSnapshots": { + "methods": [ + "list_topic_snapshots" + ] + }, + "ListTopicSubscriptions": { + "methods": [ + "list_topic_subscriptions" + ] + }, + "ListTopics": { + "methods": [ + "list_topics" + ] + }, + "Publish": { + "methods": [ + "publish" + ] + }, + "UpdateTopic": { + "methods": [ + "update_topic" + ] + } + } } } }, @@ -220,6 +270,61 @@ ] } } + }, + "rest": { + "libraryClient": "SchemaServiceClient", + "rpcs": { + "CommitSchema": { + "methods": [ + "commit_schema" + ] + }, + "CreateSchema": { + "methods": [ + "create_schema" + ] + }, + "DeleteSchema": { + "methods": [ + "delete_schema" + ] + }, + "DeleteSchemaRevision": { + "methods": [ + "delete_schema_revision" + ] + }, + "GetSchema": { + "methods": [ + "get_schema" + ] + }, + "ListSchemaRevisions": { + "methods": [ + "list_schema_revisions" + ] + }, + "ListSchemas": { + "methods": [ + "list_schemas" + ] + }, + "RollbackSchema": { + "methods": [ + "rollback_schema" + ] + }, + "ValidateMessage": { + "methods": [ + "validate_message" + ] + }, + "ValidateSchema": { + "methods": [ + "validate_schema" + ] + } + } } } }, @@ -394,6 +499,91 @@ ] } } + }, + "rest": { + "libraryClient": "SubscriberClient", + "rpcs": { + "Acknowledge": { + "methods": [ + "acknowledge" + ] + }, + "CreateSnapshot": { + "methods": [ + "create_snapshot" + ] + }, + "CreateSubscription": { + "methods": [ + "create_subscription" + ] + }, + "DeleteSnapshot": { + "methods": [ + "delete_snapshot" + ] + }, + "DeleteSubscription": { + "methods": [ + "delete_subscription" + ] + }, + "GetSnapshot": { + "methods": [ + "get_snapshot" + ] + }, + "GetSubscription": { + "methods": [ + "get_subscription" + ] + }, + "ListSnapshots": { + "methods": [ + "list_snapshots" + ] + }, + "ListSubscriptions": { + "methods": [ + "list_subscriptions" + ] + }, + "ModifyAckDeadline": { + "methods": [ + "modify_ack_deadline" + ] + }, + "ModifyPushConfig": { + "methods": [ + "modify_push_config" + ] + }, + "Pull": { + "methods": [ + "pull" + ] + }, + "Seek": { + "methods": [ + "seek" + ] + }, + "StreamingPull": { + "methods": [ + "streaming_pull" + ] + }, + "UpdateSnapshot": { + "methods": [ + "update_snapshot" + ] + }, + "UpdateSubscription": { + "methods": [ + "update_subscription" + ] + } + } } } } diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 32549cff78f8..98caee0516b1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -60,6 +60,7 @@ from .transports.base import PublisherTransport, DEFAULT_CLIENT_INFO from .transports.grpc import PublisherGrpcTransport from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport +from .transports.rest import PublisherRestTransport class PublisherClientMeta(type): @@ -73,6 +74,7 @@ class PublisherClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] _transport_registry["grpc"] = PublisherGrpcTransport _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport + _transport_registry["rest"] = PublisherRestTransport def get_transport_class( cls, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py index e73fe8901f80..8a2b06839029 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py @@ -19,15 +19,20 @@ from .base import PublisherTransport from .grpc import PublisherGrpcTransport from .grpc_asyncio import PublisherGrpcAsyncIOTransport +from .rest import PublisherRestTransport +from .rest import PublisherRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[PublisherTransport]] _transport_registry["grpc"] = PublisherGrpcTransport _transport_registry["grpc_asyncio"] = PublisherGrpcAsyncIOTransport +_transport_registry["rest"] = PublisherRestTransport __all__ = ( "PublisherTransport", "PublisherGrpcTransport", "PublisherGrpcAsyncIOTransport", + "PublisherRestTransport", + "PublisherRestInterceptor", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py new file mode 100644 index 000000000000..fc31ce68127a --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -0,0 +1,1612 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import PublisherTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class PublisherRestInterceptor: + """Interceptor for Publisher. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the PublisherRestTransport. + + .. code-block:: python + class MyCustomPublisherInterceptor(PublisherRestInterceptor): + def pre_create_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_topic(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_detach_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_detach_subscription(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_topic(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_topics(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_topics(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_topic_snapshots(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_topic_snapshots(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_topic_subscriptions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_topic_subscriptions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_publish(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_publish(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_topic(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_topic(self, response): + logging.log(f"Received response: {response}") + return response + + transport = PublisherRestTransport(interceptor=MyCustomPublisherInterceptor()) + client = PublisherClient(transport=transport) + + + """ + + def pre_create_topic( + self, request: pubsub.Topic, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_create_topic(self, response: pubsub.Topic) -> pubsub.Topic: + """Post-rpc interceptor for create_topic + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_delete_topic( + self, request: pubsub.DeleteTopicRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.DeleteTopicRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def pre_detach_subscription( + self, + request: pubsub.DetachSubscriptionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pubsub.DetachSubscriptionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for detach_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_detach_subscription( + self, response: pubsub.DetachSubscriptionResponse + ) -> pubsub.DetachSubscriptionResponse: + """Post-rpc interceptor for detach_subscription + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_get_topic( + self, request: pubsub.GetTopicRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.GetTopicRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_get_topic(self, response: pubsub.Topic) -> pubsub.Topic: + """Post-rpc interceptor for get_topic + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_list_topics( + self, request: pubsub.ListTopicsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.ListTopicsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_topics + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_list_topics( + self, response: pubsub.ListTopicsResponse + ) -> pubsub.ListTopicsResponse: + """Post-rpc interceptor for list_topics + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_list_topic_snapshots( + self, + request: pubsub.ListTopicSnapshotsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pubsub.ListTopicSnapshotsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_topic_snapshots + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_list_topic_snapshots( + self, response: pubsub.ListTopicSnapshotsResponse + ) -> pubsub.ListTopicSnapshotsResponse: + """Post-rpc interceptor for list_topic_snapshots + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_list_topic_subscriptions( + self, + request: pubsub.ListTopicSubscriptionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pubsub.ListTopicSubscriptionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_topic_subscriptions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_list_topic_subscriptions( + self, response: pubsub.ListTopicSubscriptionsResponse + ) -> pubsub.ListTopicSubscriptionsResponse: + """Post-rpc interceptor for list_topic_subscriptions + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_publish( + self, request: pubsub.PublishRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.PublishRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for publish + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_publish(self, response: pubsub.PublishResponse) -> pubsub.PublishResponse: + """Post-rpc interceptor for publish + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_update_topic( + self, request: pubsub.UpdateTopicRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.UpdateTopicRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_topic + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_update_topic(self, response: pubsub.Topic) -> pubsub.Topic: + """Post-rpc interceptor for update_topic + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Publisher server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Publisher server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class PublisherRestStub: + _session: AuthorizedSession + _host: str + _interceptor: PublisherRestInterceptor + + +class PublisherRestTransport(PublisherTransport): + """REST backend transport for Publisher. + + The service that an application uses to manipulate topics, + and to send messages to a topic. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[PublisherRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or PublisherRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CreateTopic(PublisherRestStub): + def __hash__(self): + return hash("CreateTopic") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.Topic, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Call the create topic method over HTTP. + + Args: + request (~.pubsub.Topic): + The request object. A topic resource. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/topics/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_topic(request, metadata) + pb_request = pubsub.Topic.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Topic() + pb_resp = pubsub.Topic.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_topic(resp) + return resp + + class _DeleteTopic(PublisherRestStub): + def __hash__(self): + return hash("DeleteTopic") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.DeleteTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete topic method over HTTP. + + Args: + request (~.pubsub.DeleteTopicRequest): + The request object. Request for the ``DeleteTopic`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{topic=projects/*/topics/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_topic(request, metadata) + pb_request = pubsub.DeleteTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DetachSubscription(PublisherRestStub): + def __hash__(self): + return hash("DetachSubscription") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.DetachSubscriptionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.DetachSubscriptionResponse: + r"""Call the detach subscription method over HTTP. + + Args: + request (~.pubsub.DetachSubscriptionRequest): + The request object. Request for the DetachSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.DetachSubscriptionResponse: + Response for the DetachSubscription + method. Reserved for future use. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:detach", + }, + ] + request, metadata = self._interceptor.pre_detach_subscription( + request, metadata + ) + pb_request = pubsub.DetachSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.DetachSubscriptionResponse() + pb_resp = pubsub.DetachSubscriptionResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_detach_subscription(resp) + return resp + + class _GetTopic(PublisherRestStub): + def __hash__(self): + return hash("GetTopic") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.GetTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Call the get topic method over HTTP. + + Args: + request (~.pubsub.GetTopicRequest): + The request object. Request for the GetTopic method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{topic=projects/*/topics/*}", + }, + ] + request, metadata = self._interceptor.pre_get_topic(request, metadata) + pb_request = pubsub.GetTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Topic() + pb_resp = pubsub.Topic.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_topic(resp) + return resp + + class _ListTopics(PublisherRestStub): + def __hash__(self): + return hash("ListTopics") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.ListTopicsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.ListTopicsResponse: + r"""Call the list topics method over HTTP. + + Args: + request (~.pubsub.ListTopicsRequest): + The request object. Request for the ``ListTopics`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.ListTopicsResponse: + Response for the ``ListTopics`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{project=projects/*}/topics", + }, + ] + request, metadata = self._interceptor.pre_list_topics(request, metadata) + pb_request = pubsub.ListTopicsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListTopicsResponse() + pb_resp = pubsub.ListTopicsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_topics(resp) + return resp + + class _ListTopicSnapshots(PublisherRestStub): + def __hash__(self): + return hash("ListTopicSnapshots") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.ListTopicSnapshotsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.ListTopicSnapshotsResponse: + r"""Call the list topic snapshots method over HTTP. + + Args: + request (~.pubsub.ListTopicSnapshotsRequest): + The request object. Request for the ``ListTopicSnapshots`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.ListTopicSnapshotsResponse: + Response for the ``ListTopicSnapshots`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{topic=projects/*/topics/*}/snapshots", + }, + ] + request, metadata = self._interceptor.pre_list_topic_snapshots( + request, metadata + ) + pb_request = pubsub.ListTopicSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListTopicSnapshotsResponse() + pb_resp = pubsub.ListTopicSnapshotsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_topic_snapshots(resp) + return resp + + class _ListTopicSubscriptions(PublisherRestStub): + def __hash__(self): + return hash("ListTopicSubscriptions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.ListTopicSubscriptionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.ListTopicSubscriptionsResponse: + r"""Call the list topic subscriptions method over HTTP. + + Args: + request (~.pubsub.ListTopicSubscriptionsRequest): + The request object. Request for the ``ListTopicSubscriptions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.ListTopicSubscriptionsResponse: + Response for the ``ListTopicSubscriptions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{topic=projects/*/topics/*}/subscriptions", + }, + ] + request, metadata = self._interceptor.pre_list_topic_subscriptions( + request, metadata + ) + pb_request = pubsub.ListTopicSubscriptionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListTopicSubscriptionsResponse() + pb_resp = pubsub.ListTopicSubscriptionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_topic_subscriptions(resp) + return resp + + class _Publish(PublisherRestStub): + def __hash__(self): + return hash("Publish") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.PublishRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PublishResponse: + r"""Call the publish method over HTTP. + + Args: + request (~.pubsub.PublishRequest): + The request object. Request for the Publish method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PublishResponse: + Response for the ``Publish`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{topic=projects/*/topics/*}:publish", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_publish(request, metadata) + pb_request = pubsub.PublishRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.PublishResponse() + pb_resp = pubsub.PublishResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_publish(resp) + return resp + + class _UpdateTopic(PublisherRestStub): + def __hash__(self): + return hash("UpdateTopic") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.UpdateTopicRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Topic: + r"""Call the update topic method over HTTP. + + Args: + request (~.pubsub.UpdateTopicRequest): + The request object. Request for the UpdateTopic method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Topic: + A topic resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{topic.name=projects/*/topics/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_topic(request, metadata) + pb_request = pubsub.UpdateTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Topic() + pb_resp = pubsub.Topic.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_topic(resp) + return resp + + @property + def create_topic(self) -> Callable[[pubsub.Topic], pubsub.Topic]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_topic(self) -> Callable[[pubsub.DeleteTopicRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def detach_subscription( + self, + ) -> Callable[ + [pubsub.DetachSubscriptionRequest], pubsub.DetachSubscriptionResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DetachSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_topic(self) -> Callable[[pubsub.GetTopicRequest], pubsub.Topic]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_topics( + self, + ) -> Callable[[pubsub.ListTopicsRequest], pubsub.ListTopicsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTopics(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_topic_snapshots( + self, + ) -> Callable[ + [pubsub.ListTopicSnapshotsRequest], pubsub.ListTopicSnapshotsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTopicSnapshots(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_topic_subscriptions( + self, + ) -> Callable[ + [pubsub.ListTopicSubscriptionsRequest], pubsub.ListTopicSubscriptionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListTopicSubscriptions(self._session, self._host, self._interceptor) # type: ignore + + @property + def publish(self) -> Callable[[pubsub.PublishRequest], pubsub.PublishResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Publish(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_topic(self) -> Callable[[pubsub.UpdateTopicRequest], pubsub.Topic]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateTopic(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(PublisherRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(PublisherRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(PublisherRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("PublisherRestTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 2944caed91d8..fdec65ae1eb4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -58,6 +58,7 @@ from .transports.base import SchemaServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import SchemaServiceGrpcTransport from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .transports.rest import SchemaServiceRestTransport class SchemaServiceClientMeta(type): @@ -71,6 +72,7 @@ class SchemaServiceClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] _transport_registry["grpc"] = SchemaServiceGrpcTransport _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport + _transport_registry["rest"] = SchemaServiceRestTransport def get_transport_class( cls, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py index 6c2d9460a073..fb62a346f701 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py @@ -19,15 +19,20 @@ from .base import SchemaServiceTransport from .grpc import SchemaServiceGrpcTransport from .grpc_asyncio import SchemaServiceGrpcAsyncIOTransport +from .rest import SchemaServiceRestTransport +from .rest import SchemaServiceRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[SchemaServiceTransport]] _transport_registry["grpc"] = SchemaServiceGrpcTransport _transport_registry["grpc_asyncio"] = SchemaServiceGrpcAsyncIOTransport +_transport_registry["rest"] = SchemaServiceRestTransport __all__ = ( "SchemaServiceTransport", "SchemaServiceGrpcTransport", "SchemaServiceGrpcAsyncIOTransport", + "SchemaServiceRestTransport", + "SchemaServiceRestInterceptor", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py new file mode 100644 index 000000000000..afa08f8eda70 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -0,0 +1,1763 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + +from .base import ( + SchemaServiceTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SchemaServiceRestInterceptor: + """Interceptor for SchemaService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SchemaServiceRestTransport. + + .. code-block:: python + class MyCustomSchemaServiceInterceptor(SchemaServiceRestInterceptor): + def pre_commit_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_commit_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_schema_revision(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_schema_revision(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_schema_revisions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_schema_revisions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_schemas(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_schemas(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_rollback_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_rollback_schema(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_validate_message(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_validate_message(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_validate_schema(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_validate_schema(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SchemaServiceRestTransport(interceptor=MyCustomSchemaServiceInterceptor()) + client = SchemaServiceClient(transport=transport) + + + """ + + def pre_commit_schema( + self, + request: gp_schema.CommitSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gp_schema.CommitSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for commit_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_commit_schema(self, response: gp_schema.Schema) -> gp_schema.Schema: + """Post-rpc interceptor for commit_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_create_schema( + self, + request: gp_schema.CreateSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gp_schema.CreateSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_create_schema(self, response: gp_schema.Schema) -> gp_schema.Schema: + """Post-rpc interceptor for create_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_delete_schema( + self, request: schema.DeleteSchemaRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[schema.DeleteSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def pre_delete_schema_revision( + self, + request: schema.DeleteSchemaRevisionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema.DeleteSchemaRevisionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_schema_revision + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_delete_schema_revision(self, response: schema.Schema) -> schema.Schema: + """Post-rpc interceptor for delete_schema_revision + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_get_schema( + self, request: schema.GetSchemaRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[schema.GetSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_get_schema(self, response: schema.Schema) -> schema.Schema: + """Post-rpc interceptor for get_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_list_schema_revisions( + self, + request: schema.ListSchemaRevisionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema.ListSchemaRevisionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_schema_revisions + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_list_schema_revisions( + self, response: schema.ListSchemaRevisionsResponse + ) -> schema.ListSchemaRevisionsResponse: + """Post-rpc interceptor for list_schema_revisions + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_list_schemas( + self, request: schema.ListSchemasRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[schema.ListSchemasRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_schemas + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_list_schemas( + self, response: schema.ListSchemasResponse + ) -> schema.ListSchemasResponse: + """Post-rpc interceptor for list_schemas + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_rollback_schema( + self, request: schema.RollbackSchemaRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[schema.RollbackSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for rollback_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_rollback_schema(self, response: schema.Schema) -> schema.Schema: + """Post-rpc interceptor for rollback_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_validate_message( + self, + request: schema.ValidateMessageRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[schema.ValidateMessageRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for validate_message + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_validate_message( + self, response: schema.ValidateMessageResponse + ) -> schema.ValidateMessageResponse: + """Post-rpc interceptor for validate_message + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_validate_schema( + self, + request: gp_schema.ValidateSchemaRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[gp_schema.ValidateSchemaRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for validate_schema + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_validate_schema( + self, response: gp_schema.ValidateSchemaResponse + ) -> gp_schema.ValidateSchemaResponse: + """Post-rpc interceptor for validate_schema + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the SchemaService server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the SchemaService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SchemaServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SchemaServiceRestInterceptor + + +class SchemaServiceRestTransport(SchemaServiceTransport): + """REST backend transport for SchemaService. + + Service for doing schema-related operations. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SchemaServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SchemaServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _CommitSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("CommitSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gp_schema.CommitSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.Schema: + r"""Call the commit schema method over HTTP. + + Args: + request (~.gp_schema.CommitSchemaRequest): + The request object. Request for CommitSchema method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gp_schema.Schema: + A schema resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/schemas/*}:commit", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_commit_schema(request, metadata) + pb_request = gp_schema.CommitSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gp_schema.Schema() + pb_resp = gp_schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_commit_schema(resp) + return resp + + class _CreateSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("CreateSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gp_schema.CreateSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.Schema: + r"""Call the create schema method over HTTP. + + Args: + request (~.gp_schema.CreateSchemaRequest): + The request object. Request for the CreateSchema method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gp_schema.Schema: + A schema resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/schemas", + "body": "schema", + }, + ] + request, metadata = self._interceptor.pre_create_schema(request, metadata) + pb_request = gp_schema.CreateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gp_schema.Schema() + pb_resp = gp_schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_schema(resp) + return resp + + class _DeleteSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("DeleteSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema.DeleteSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete schema method over HTTP. + + Args: + request (~.schema.DeleteSchemaRequest): + The request object. Request for the ``DeleteSchema`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/schemas/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_schema(request, metadata) + pb_request = schema.DeleteSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteSchemaRevision(SchemaServiceRestStub): + def __hash__(self): + return hash("DeleteSchemaRevision") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema.DeleteSchemaRevisionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Call the delete schema revision method over HTTP. + + Args: + request (~.schema.DeleteSchemaRevisionRequest): + The request object. Request for the ``DeleteSchemaRevision`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.Schema: + A schema resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/schemas/*}:deleteRevision", + }, + ] + request, metadata = self._interceptor.pre_delete_schema_revision( + request, metadata + ) + pb_request = schema.DeleteSchemaRevisionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.Schema() + pb_resp = schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_schema_revision(resp) + return resp + + class _GetSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("GetSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema.GetSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Call the get schema method over HTTP. + + Args: + request (~.schema.GetSchemaRequest): + The request object. Request for the GetSchema method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.Schema: + A schema resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/schemas/*}", + }, + ] + request, metadata = self._interceptor.pre_get_schema(request, metadata) + pb_request = schema.GetSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.Schema() + pb_resp = schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_schema(resp) + return resp + + class _ListSchemaRevisions(SchemaServiceRestStub): + def __hash__(self): + return hash("ListSchemaRevisions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema.ListSchemaRevisionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.ListSchemaRevisionsResponse: + r"""Call the list schema revisions method over HTTP. + + Args: + request (~.schema.ListSchemaRevisionsRequest): + The request object. Request for the ``ListSchemaRevisions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.ListSchemaRevisionsResponse: + Response for the ``ListSchemaRevisions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/schemas/*}:listRevisions", + }, + ] + request, metadata = self._interceptor.pre_list_schema_revisions( + request, metadata + ) + pb_request = schema.ListSchemaRevisionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.ListSchemaRevisionsResponse() + pb_resp = schema.ListSchemaRevisionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_schema_revisions(resp) + return resp + + class _ListSchemas(SchemaServiceRestStub): + def __hash__(self): + return hash("ListSchemas") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema.ListSchemasRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.ListSchemasResponse: + r"""Call the list schemas method over HTTP. + + Args: + request (~.schema.ListSchemasRequest): + The request object. Request for the ``ListSchemas`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.ListSchemasResponse: + Response for the ``ListSchemas`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/schemas", + }, + ] + request, metadata = self._interceptor.pre_list_schemas(request, metadata) + pb_request = schema.ListSchemasRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.ListSchemasResponse() + pb_resp = schema.ListSchemasResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_schemas(resp) + return resp + + class _RollbackSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("RollbackSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema.RollbackSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.Schema: + r"""Call the rollback schema method over HTTP. + + Args: + request (~.schema.RollbackSchemaRequest): + The request object. Request for the ``RollbackSchema`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.Schema: + A schema resource. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/schemas/*}:rollback", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_rollback_schema(request, metadata) + pb_request = schema.RollbackSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.Schema() + pb_resp = schema.Schema.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rollback_schema(resp) + return resp + + class _ValidateMessage(SchemaServiceRestStub): + def __hash__(self): + return hash("ValidateMessage") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: schema.ValidateMessageRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> schema.ValidateMessageResponse: + r"""Call the validate message method over HTTP. + + Args: + request (~.schema.ValidateMessageRequest): + The request object. Request for the ``ValidateMessage`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.schema.ValidateMessageResponse: + Response for the ``ValidateMessage`` method. Empty for + now. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/schemas:validateMessage", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_validate_message( + request, metadata + ) + pb_request = schema.ValidateMessageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = schema.ValidateMessageResponse() + pb_resp = schema.ValidateMessageResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_validate_message(resp) + return resp + + class _ValidateSchema(SchemaServiceRestStub): + def __hash__(self): + return hash("ValidateSchema") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: gp_schema.ValidateSchemaRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> gp_schema.ValidateSchemaResponse: + r"""Call the validate schema method over HTTP. + + Args: + request (~.gp_schema.ValidateSchemaRequest): + The request object. Request for the ``ValidateSchema`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.gp_schema.ValidateSchemaResponse: + Response for the ``ValidateSchema`` method. Empty for + now. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/schemas:validate", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_validate_schema(request, metadata) + pb_request = gp_schema.ValidateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gp_schema.ValidateSchemaResponse() + pb_resp = gp_schema.ValidateSchemaResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_validate_schema(resp) + return resp + + @property + def commit_schema( + self, + ) -> Callable[[gp_schema.CommitSchemaRequest], gp_schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CommitSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_schema( + self, + ) -> Callable[[gp_schema.CreateSchemaRequest], gp_schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_schema(self) -> Callable[[schema.DeleteSchemaRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_schema_revision( + self, + ) -> Callable[[schema.DeleteSchemaRevisionRequest], schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSchemaRevision(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_schema(self) -> Callable[[schema.GetSchemaRequest], schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_schema_revisions( + self, + ) -> Callable[ + [schema.ListSchemaRevisionsRequest], schema.ListSchemaRevisionsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSchemaRevisions(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_schemas( + self, + ) -> Callable[[schema.ListSchemasRequest], schema.ListSchemasResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSchemas(self._session, self._host, self._interceptor) # type: ignore + + @property + def rollback_schema( + self, + ) -> Callable[[schema.RollbackSchemaRequest], schema.Schema]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._RollbackSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def validate_message( + self, + ) -> Callable[[schema.ValidateMessageRequest], schema.ValidateMessageResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ValidateMessage(self._session, self._host, self._interceptor) # type: ignore + + @property + def validate_schema( + self, + ) -> Callable[[gp_schema.ValidateSchemaRequest], gp_schema.ValidateSchemaResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ValidateSchema(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(SchemaServiceRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(SchemaServiceRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(SchemaServiceRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SchemaServiceRestTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index a3237d122e87..a6518fff84ac 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -62,6 +62,7 @@ from .transports.base import SubscriberTransport, DEFAULT_CLIENT_INFO from .transports.grpc import SubscriberGrpcTransport from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport +from .transports.rest import SubscriberRestTransport class SubscriberClientMeta(type): @@ -75,6 +76,7 @@ class SubscriberClientMeta(type): _transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] _transport_registry["grpc"] = SubscriberGrpcTransport _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport + _transport_registry["rest"] = SubscriberRestTransport def get_transport_class( cls, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py index f71cdecd4a4c..bb13ec634981 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -19,15 +19,20 @@ from .base import SubscriberTransport from .grpc import SubscriberGrpcTransport from .grpc_asyncio import SubscriberGrpcAsyncIOTransport +from .rest import SubscriberRestTransport +from .rest import SubscriberRestInterceptor # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] _transport_registry["grpc"] = SubscriberGrpcTransport _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport +_transport_registry["rest"] = SubscriberRestTransport __all__ = ( "SubscriberTransport", "SubscriberGrpcTransport", "SubscriberGrpcAsyncIOTransport", + "SubscriberRestTransport", + "SubscriberRestInterceptor", ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py new file mode 100644 index 000000000000..c78fd7297ed2 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -0,0 +1,2368 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import rest_streaming +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from requests import __version__ as requests_version +import dataclasses +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) + + +class SubscriberRestInterceptor: + """Interceptor for Subscriber. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the SubscriberRestTransport. + + .. code-block:: python + class MyCustomSubscriberInterceptor(SubscriberRestInterceptor): + def pre_acknowledge(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_create_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_subscription(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_subscription(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_snapshots(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_snapshots(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_subscriptions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_subscriptions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_modify_ack_deadline(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_modify_push_config(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_pull(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_pull(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_seek(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_seek(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_snapshot(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_snapshot(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_subscription(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_subscription(self, response): + logging.log(f"Received response: {response}") + return response + + transport = SubscriberRestTransport(interceptor=MyCustomSubscriberInterceptor()) + client = SubscriberClient(transport=transport) + + + """ + + def pre_acknowledge( + self, request: pubsub.AcknowledgeRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.AcknowledgeRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for acknowledge + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_create_snapshot( + self, request: pubsub.CreateSnapshotRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.CreateSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_create_snapshot(self, response: pubsub.Snapshot) -> pubsub.Snapshot: + """Post-rpc interceptor for create_snapshot + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_create_subscription( + self, request: pubsub.Subscription, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for create_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_create_subscription( + self, response: pubsub.Subscription + ) -> pubsub.Subscription: + """Post-rpc interceptor for create_subscription + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_delete_snapshot( + self, request: pubsub.DeleteSnapshotRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.DeleteSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_delete_subscription( + self, + request: pubsub.DeleteSubscriptionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pubsub.DeleteSubscriptionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for delete_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_get_snapshot( + self, request: pubsub.GetSnapshotRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.GetSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_get_snapshot(self, response: pubsub.Snapshot) -> pubsub.Snapshot: + """Post-rpc interceptor for get_snapshot + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_get_subscription( + self, + request: pubsub.GetSubscriptionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pubsub.GetSubscriptionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_get_subscription( + self, response: pubsub.Subscription + ) -> pubsub.Subscription: + """Post-rpc interceptor for get_subscription + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_list_snapshots( + self, request: pubsub.ListSnapshotsRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.ListSnapshotsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_snapshots + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_list_snapshots( + self, response: pubsub.ListSnapshotsResponse + ) -> pubsub.ListSnapshotsResponse: + """Post-rpc interceptor for list_snapshots + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_list_subscriptions( + self, + request: pubsub.ListSubscriptionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pubsub.ListSubscriptionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for list_subscriptions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_list_subscriptions( + self, response: pubsub.ListSubscriptionsResponse + ) -> pubsub.ListSubscriptionsResponse: + """Post-rpc interceptor for list_subscriptions + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_modify_ack_deadline( + self, + request: pubsub.ModifyAckDeadlineRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pubsub.ModifyAckDeadlineRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for modify_ack_deadline + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_modify_push_config( + self, + request: pubsub.ModifyPushConfigRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pubsub.ModifyPushConfigRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for modify_push_config + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def pre_pull( + self, request: pubsub.PullRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.PullRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for pull + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_pull(self, response: pubsub.PullResponse) -> pubsub.PullResponse: + """Post-rpc interceptor for pull + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_seek( + self, request: pubsub.SeekRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.SeekRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for seek + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_seek(self, response: pubsub.SeekResponse) -> pubsub.SeekResponse: + """Post-rpc interceptor for seek + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_update_snapshot( + self, request: pubsub.UpdateSnapshotRequest, metadata: Sequence[Tuple[str, str]] + ) -> Tuple[pubsub.UpdateSnapshotRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_snapshot + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_update_snapshot(self, response: pubsub.Snapshot) -> pubsub.Snapshot: + """Post-rpc interceptor for update_snapshot + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_update_subscription( + self, + request: pubsub.UpdateSubscriptionRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[pubsub.UpdateSubscriptionRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for update_subscription + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_update_subscription( + self, response: pubsub.Subscription + ) -> pubsub.Subscription: + """Post-rpc interceptor for update_subscription + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_get_iam_policy( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_set_iam_policy( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + def pre_test_iam_permissions( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + metadata: Sequence[Tuple[str, str]], + ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the Subscriber server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: iam_policy_pb2.TestIamPermissionsResponse + ) -> iam_policy_pb2.TestIamPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the response + after it is returned by the Subscriber server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class SubscriberRestStub: + _session: AuthorizedSession + _host: str + _interceptor: SubscriberRestInterceptor + + +class SubscriberRestTransport(SubscriberTransport): + """REST backend transport for Subscriber. + + The service that an application uses to manipulate subscriptions and + to consume messages from a subscription via the ``Pull`` method or + by establishing a bi-directional stream using the ``StreamingPull`` + method. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[SubscriberRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or SubscriberRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Acknowledge(SubscriberRestStub): + def __hash__(self): + return hash("Acknowledge") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.AcknowledgeRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the acknowledge method over HTTP. + + Args: + request (~.pubsub.AcknowledgeRequest): + The request object. Request for the Acknowledge method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:acknowledge", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_acknowledge(request, metadata) + pb_request = pubsub.AcknowledgeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _CreateSnapshot(SubscriberRestStub): + def __hash__(self): + return hash("CreateSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.CreateSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Call the create snapshot method over HTTP. + + Args: + request (~.pubsub.CreateSnapshotRequest): + The request object. Request for the ``CreateSnapshot`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/snapshots/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_snapshot(request, metadata) + pb_request = pubsub.CreateSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Snapshot() + pb_resp = pubsub.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_snapshot(resp) + return resp + + class _CreateSubscription(SubscriberRestStub): + def __hash__(self): + return hash("CreateSubscription") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.Subscription, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Call the create subscription method over HTTP. + + Args: + request (~.pubsub.Subscription): + The request object. A subscription resource. If none of ``push_config`` or + ``bigquery_config`` is set, then the subscriber will + pull and ack messages using API methods. At most one of + these fields may be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. If none of ``push_config`` or + ``bigquery_config`` is set, then the subscriber will + pull and ack messages using API methods. At most one of + these fields may be set. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/subscriptions/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_create_subscription( + request, metadata + ) + pb_request = pubsub.Subscription.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Subscription() + pb_resp = pubsub.Subscription.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_subscription(resp) + return resp + + class _DeleteSnapshot(SubscriberRestStub): + def __hash__(self): + return hash("DeleteSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.DeleteSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete snapshot method over HTTP. + + Args: + request (~.pubsub.DeleteSnapshotRequest): + The request object. Request for the ``DeleteSnapshot`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{snapshot=projects/*/snapshots/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) + pb_request = pubsub.DeleteSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteSubscription(SubscriberRestStub): + def __hash__(self): + return hash("DeleteSubscription") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.DeleteSubscriptionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the delete subscription method over HTTP. + + Args: + request (~.pubsub.DeleteSubscriptionRequest): + The request object. Request for the DeleteSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{subscription=projects/*/subscriptions/*}", + }, + ] + request, metadata = self._interceptor.pre_delete_subscription( + request, metadata + ) + pb_request = pubsub.DeleteSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetSnapshot(SubscriberRestStub): + def __hash__(self): + return hash("GetSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.GetSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Call the get snapshot method over HTTP. + + Args: + request (~.pubsub.GetSnapshotRequest): + The request object. Request for the GetSnapshot method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{snapshot=projects/*/snapshots/*}", + }, + ] + request, metadata = self._interceptor.pre_get_snapshot(request, metadata) + pb_request = pubsub.GetSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Snapshot() + pb_resp = pubsub.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_snapshot(resp) + return resp + + class _GetSubscription(SubscriberRestStub): + def __hash__(self): + return hash("GetSubscription") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.GetSubscriptionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Call the get subscription method over HTTP. + + Args: + request (~.pubsub.GetSubscriptionRequest): + The request object. Request for the GetSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. If none of ``push_config`` or + ``bigquery_config`` is set, then the subscriber will + pull and ack messages using API methods. At most one of + these fields may be set. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{subscription=projects/*/subscriptions/*}", + }, + ] + request, metadata = self._interceptor.pre_get_subscription( + request, metadata + ) + pb_request = pubsub.GetSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Subscription() + pb_resp = pubsub.Subscription.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_subscription(resp) + return resp + + class _ListSnapshots(SubscriberRestStub): + def __hash__(self): + return hash("ListSnapshots") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.ListSnapshotsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.ListSnapshotsResponse: + r"""Call the list snapshots method over HTTP. + + Args: + request (~.pubsub.ListSnapshotsRequest): + The request object. Request for the ``ListSnapshots`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.ListSnapshotsResponse: + Response for the ``ListSnapshots`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{project=projects/*}/snapshots", + }, + ] + request, metadata = self._interceptor.pre_list_snapshots(request, metadata) + pb_request = pubsub.ListSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListSnapshotsResponse() + pb_resp = pubsub.ListSnapshotsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_snapshots(resp) + return resp + + class _ListSubscriptions(SubscriberRestStub): + def __hash__(self): + return hash("ListSubscriptions") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.ListSubscriptionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.ListSubscriptionsResponse: + r"""Call the list subscriptions method over HTTP. + + Args: + request (~.pubsub.ListSubscriptionsRequest): + The request object. Request for the ``ListSubscriptions`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.ListSubscriptionsResponse: + Response for the ``ListSubscriptions`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{project=projects/*}/subscriptions", + }, + ] + request, metadata = self._interceptor.pre_list_subscriptions( + request, metadata + ) + pb_request = pubsub.ListSubscriptionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.ListSubscriptionsResponse() + pb_resp = pubsub.ListSubscriptionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_subscriptions(resp) + return resp + + class _ModifyAckDeadline(SubscriberRestStub): + def __hash__(self): + return hash("ModifyAckDeadline") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.ModifyAckDeadlineRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the modify ack deadline method over HTTP. + + Args: + request (~.pubsub.ModifyAckDeadlineRequest): + The request object. Request for the ModifyAckDeadline + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_modify_ack_deadline( + request, metadata + ) + pb_request = pubsub.ModifyAckDeadlineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _ModifyPushConfig(SubscriberRestStub): + def __hash__(self): + return hash("ModifyPushConfig") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.ModifyPushConfigRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ): + r"""Call the modify push config method over HTTP. + + Args: + request (~.pubsub.ModifyPushConfigRequest): + The request object. Request for the ModifyPushConfig + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_modify_push_config( + request, metadata + ) + pb_request = pubsub.ModifyPushConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _Pull(SubscriberRestStub): + def __hash__(self): + return hash("Pull") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.PullRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.PullResponse: + r"""Call the pull method over HTTP. + + Args: + request (~.pubsub.PullRequest): + The request object. Request for the ``Pull`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.PullResponse: + Response for the ``Pull`` method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:pull", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_pull(request, metadata) + pb_request = pubsub.PullRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.PullResponse() + pb_resp = pubsub.PullResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_pull(resp) + return resp + + class _Seek(SubscriberRestStub): + def __hash__(self): + return hash("Seek") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.SeekRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.SeekResponse: + r"""Call the seek method over HTTP. + + Args: + request (~.pubsub.SeekRequest): + The request object. Request for the ``Seek`` method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.SeekResponse: + Response for the ``Seek`` method (this response is + empty). + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:seek", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_seek(request, metadata) + pb_request = pubsub.SeekRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.SeekResponse() + pb_resp = pubsub.SeekResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_seek(resp) + return resp + + class _StreamingPull(SubscriberRestStub): + def __hash__(self): + return hash("StreamingPull") + + def __call__( + self, + request: pubsub.StreamingPullRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method StreamingPull is not available over REST transport" + ) + + class _UpdateSnapshot(SubscriberRestStub): + def __hash__(self): + return hash("UpdateSnapshot") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.UpdateSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Snapshot: + r"""Call the update snapshot method over HTTP. + + Args: + request (~.pubsub.UpdateSnapshotRequest): + The request object. Request for the UpdateSnapshot + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Snapshot: + A snapshot resource. Snapshots are used in + `Seek `__ + operations, which allow you to manage message + acknowledgments in bulk. That is, you can set the + acknowledgment state of messages in an existing + subscription to the state captured by a snapshot. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{snapshot.name=projects/*/snapshots/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_snapshot(request, metadata) + pb_request = pubsub.UpdateSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Snapshot() + pb_resp = pubsub.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_snapshot(resp) + return resp + + class _UpdateSubscription(SubscriberRestStub): + def __hash__(self): + return hash("UpdateSubscription") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + def __call__( + self, + request: pubsub.UpdateSubscriptionRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pubsub.Subscription: + r"""Call the update subscription method over HTTP. + + Args: + request (~.pubsub.UpdateSubscriptionRequest): + The request object. Request for the UpdateSubscription + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pubsub.Subscription: + A subscription resource. If none of ``push_config`` or + ``bigquery_config`` is set, then the subscriber will + pull and ack messages using API methods. At most one of + these fields may be set. + + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{subscription.name=projects/*/subscriptions/*}", + "body": "*", + }, + ] + request, metadata = self._interceptor.pre_update_subscription( + request, metadata + ) + pb_request = pubsub.UpdateSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + including_default_value_fields=False, + use_integers_for_enums=True, + ) + ) + query_params.update(self._get_unset_required_fields(query_params)) + + query_params["$alt"] = "json;enum-encoding=int" + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = pubsub.Subscription() + pb_resp = pubsub.Subscription.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_subscription(resp) + return resp + + @property + def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Acknowledge(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_snapshot( + self, + ) -> Callable[[pubsub.CreateSnapshotRequest], pubsub.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_subscription( + self, + ) -> Callable[[pubsub.Subscription], pubsub.Subscription]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_snapshot( + self, + ) -> Callable[[pubsub.DeleteSnapshotRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_subscription( + self, + ) -> Callable[[pubsub.DeleteSubscriptionRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_snapshot(self) -> Callable[[pubsub.GetSnapshotRequest], pubsub.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_subscription( + self, + ) -> Callable[[pubsub.GetSubscriptionRequest], pubsub.Subscription]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_snapshots( + self, + ) -> Callable[[pubsub.ListSnapshotsRequest], pubsub.ListSnapshotsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSnapshots(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_subscriptions( + self, + ) -> Callable[[pubsub.ListSubscriptionsRequest], pubsub.ListSubscriptionsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListSubscriptions(self._session, self._host, self._interceptor) # type: ignore + + @property + def modify_ack_deadline( + self, + ) -> Callable[[pubsub.ModifyAckDeadlineRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModifyAckDeadline(self._session, self._host, self._interceptor) # type: ignore + + @property + def modify_push_config( + self, + ) -> Callable[[pubsub.ModifyPushConfigRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ModifyPushConfig(self._session, self._host, self._interceptor) # type: ignore + + @property + def pull(self) -> Callable[[pubsub.PullRequest], pubsub.PullResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Pull(self._session, self._host, self._interceptor) # type: ignore + + @property + def seek(self) -> Callable[[pubsub.SeekRequest], pubsub.SeekResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Seek(self._session, self._host, self._interceptor) # type: ignore + + @property + def streaming_pull( + self, + ) -> Callable[[pubsub.StreamingPullRequest], pubsub.StreamingPullResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamingPull(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_snapshot( + self, + ) -> Callable[[pubsub.UpdateSnapshotRequest], pubsub.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSnapshot(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_subscription( + self, + ) -> Callable[[pubsub.UpdateSubscriptionRequest], pubsub.Subscription]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateSubscription(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy(self): + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _GetIamPolicy(SubscriberRestStub): + def __call__( + self, + request: iam_policy_pb2.GetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the get iam policy method over HTTP. + + Args: + request (iam_policy_pb2.GetIamPolicyRequest): + The request object for GetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from GetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", + }, + ] + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_get_iam_policy(resp) + return resp + + @property + def set_iam_policy(self): + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + class _SetIamPolicy(SubscriberRestStub): + def __call__( + self, + request: iam_policy_pb2.SetIamPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> policy_pb2.Policy: + + r"""Call the set iam policy method over HTTP. + + Args: + request (iam_policy_pb2.SetIamPolicyRequest): + The request object for SetIamPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + policy_pb2.Policy: Response from SetIamPolicy method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = policy_pb2.Policy() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_set_iam_policy(resp) + return resp + + @property + def test_iam_permissions(self): + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + class _TestIamPermissions(SubscriberRestStub): + def __call__( + self, + request: iam_policy_pb2.TestIamPermissionsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> iam_policy_pb2.TestIamPermissionsResponse: + + r"""Call the test iam permissions method over HTTP. + + Args: + request (iam_policy_pb2.TestIamPermissionsRequest): + The request object for TestIamPermissions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. + """ + + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", + "body": "*", + }, + ] + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + body = json.loads(json.dumps(transcoded_request["body"])) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + + response = getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + resp = iam_policy_pb2.TestIamPermissionsResponse() + resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = self._interceptor.post_test_iam_permissions(resp) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("SubscriberRestTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index cee8cee4cd7e..df299cc8bf02 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -264,7 +264,9 @@ class PubsubMessage(proto.Message): will be delivered to subscribers in the order in which they are received by the Pub/Sub system. All ``PubsubMessage``\ s published in a given ``PublishRequest`` must specify the - same ``ordering_key`` value. + same ``ordering_key`` value. For more information, see + `ordering + messages `__. """ data: bytes = proto.Field( @@ -667,7 +669,9 @@ class Subscription(proto.Message): operations on the subscription. If ``expiration_policy`` is not set, a *default policy* with ``ttl`` of 31 days will be used. The minimum allowed value for - ``expiration_policy.ttl`` is 1 day. + ``expiration_policy.ttl`` is 1 day. If ``expiration_policy`` + is set, but ``expiration_policy.ttl`` is not set, the + subscription never expires. filter (str): An expression written in the Pub/Sub `filter language `__. diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index bb1265453268..9a216c48fcd9 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -55,12 +55,12 @@ def project(): yield default_project -@pytest.fixture(params=["grpc"]) +@pytest.fixture(params=["grpc", "rest"]) def publisher(request): yield pubsub_v1.PublisherClient(transport=request.param) -@pytest.fixture(params=["grpc"]) +@pytest.fixture(params=["grpc", "rest"]) def subscriber(request): yield pubsub_v1.SubscriberClient(transport=request.param) @@ -107,7 +107,8 @@ def test_publish_messages(publisher, topic_path_base, cleanup): assert isinstance(result, str) -def test_publish_large_messages(publisher, topic_path_base, cleanup): +def test_publish_large_messages(topic_path_base, cleanup): + publisher = pubsub_v1.PublisherClient(transport="grpc") # Customize topic path to test. topic_path = topic_path_base + "-publish-large-messages" # Make sure the topic gets deleted. @@ -139,8 +140,9 @@ def test_publish_large_messages(publisher, topic_path_base, cleanup): def test_subscribe_to_messages( - publisher, topic_path_base, subscriber, subscription_path_base, cleanup + publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") # Customize topic path to test. topic_path = topic_path_base + "-subscribe-to-messages" subscription_path = subscription_path_base + "-subscribe-to-messages" @@ -187,8 +189,9 @@ def test_subscribe_to_messages( def test_subscribe_to_messages_async_callbacks( - publisher, topic_path_base, subscriber, subscription_path_base, cleanup + publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") # Customize topic path to test. custom_str = "-subscribe-to-messages-async-callback" topic_path = topic_path_base + custom_str @@ -366,10 +369,10 @@ def test_listing_topic_subscriptions(publisher, subscriber, project, cleanup): assert subscriptions == {subscription_paths[0], subscription_paths[2]} -def test_managing_topic_iam_policy(publisher, topic_path_base, cleanup): +def test_managing_topic_iam_policy(topic_path_base, cleanup): + publisher = pubsub_v1.PublisherClient(transport="grpc") topic_path = topic_path_base + "-managing-topic-iam-policy" cleanup.append((publisher.delete_topic, (), {"topic": topic_path})) - # create a topic and customize its policy publisher.create_topic(name=topic_path) topic_policy = publisher.get_iam_policy(request={"resource": topic_path}) @@ -396,8 +399,9 @@ def test_managing_topic_iam_policy(publisher, topic_path_base, cleanup): def test_managing_subscription_iam_policy( - publisher, subscriber, topic_path_base, subscription_path_base, cleanup + publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") custom_str = "-managing-subscription-iam-policy" topic_path = topic_path_base + custom_str subscription_path = subscription_path_base + custom_str @@ -433,8 +437,9 @@ def test_managing_subscription_iam_policy( assert bindings[1].members == ["group:cloud-logs@google.com"] +@pytest.mark.parametrize("transport", ["grpc", "rest"]) def test_subscriber_not_leaking_open_sockets( - publisher, topic_path_base, subscription_path_base, cleanup + publisher, topic_path_base, subscription_path_base, cleanup, transport ): # Make sure the topic and the supscription get deleted. # NOTE: Since subscriber client will be closed in the test, we should not @@ -516,8 +521,9 @@ def test_synchronous_pull_no_deadline_error_if_no_messages( class TestStreamingPull(object): def test_streaming_pull_callback_error_propagation( - self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup + self, publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") custom_str = "-streaming-pull-callback-error-propagation" topic_path = topic_path_base + custom_str subscription_path = subscription_path_base + custom_str @@ -550,12 +556,12 @@ class CallbackError(Exception): def test_streaming_pull_ack_deadline( self, publisher, - subscriber, project, topic_path_base, subscription_path_base, cleanup, ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") custom_str = "-streaming-pull-ack-deadline" topic_path = topic_path_base + custom_str subscription_path = subscription_path_base + custom_str @@ -608,8 +614,9 @@ def test_streaming_pull_ack_deadline( subscription_future.cancel() def test_streaming_pull_max_messages( - self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup + self, publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") custom_str = "-streaming-pull-max-messages" topic_path = topic_path_base + custom_str subscription_path = subscription_path_base + custom_str @@ -668,8 +675,9 @@ def test_streaming_pull_max_messages( @typed_flaky def test_streaming_pull_blocking_shutdown( - self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup + self, publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") custom_str = "-streaming-pull-blocking-shutdown" topic_path = topic_path_base + custom_str subscription_path = subscription_path_base + custom_str @@ -753,8 +761,9 @@ def callback2(message): ) class TestBasicRBAC(object): def test_streaming_pull_subscriber_permissions_sufficient( - self, publisher, topic_path_base, subscriber, subscription_path_base, cleanup + self, publisher, topic_path_base, subscription_path_base, cleanup ): + subscriber = pubsub_v1.SubscriberClient(transport="grpc") custom_str = "-streaming-pull-subscriber-permissions-sufficient" topic_path = topic_path_base + custom_str subscription_path = subscription_path_base + custom_str diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 6badf82d6544..12d584e4944f 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -104,6 +104,7 @@ def test__get_default_mtls_endpoint(): [ (PublisherClient, "grpc"), (PublisherAsyncClient, "grpc_asyncio"), + (PublisherClient, "rest"), ], ) def test_publisher_client_from_service_account_info(client_class, transport_name): @@ -117,7 +118,11 @@ def test_publisher_client_from_service_account_info(client_class, transport_name assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("pubsub.googleapis.com:443") + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) @pytest.mark.parametrize( @@ -125,6 +130,7 @@ def test_publisher_client_from_service_account_info(client_class, transport_name [ (transports.PublisherGrpcTransport, "grpc"), (transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.PublisherRestTransport, "rest"), ], ) def test_publisher_client_service_account_always_use_jwt( @@ -150,6 +156,7 @@ def test_publisher_client_service_account_always_use_jwt( [ (PublisherClient, "grpc"), (PublisherAsyncClient, "grpc_asyncio"), + (PublisherClient, "rest"), ], ) def test_publisher_client_from_service_account_file(client_class, transport_name): @@ -170,13 +177,18 @@ def test_publisher_client_from_service_account_file(client_class, transport_name assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("pubsub.googleapis.com:443") + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) def test_publisher_client_get_transport_class(): transport = PublisherClient.get_transport_class() available_transports = [ transports.PublisherGrpcTransport, + transports.PublisherRestTransport, ] assert transport in available_transports @@ -193,6 +205,7 @@ def test_publisher_client_get_transport_class(): transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", ), + (PublisherClient, transports.PublisherRestTransport, "rest"), ], ) @mock.patch.object( @@ -334,6 +347,8 @@ def test_publisher_client_client_options(client_class, transport_class, transpor "grpc_asyncio", "false", ), + (PublisherClient, transports.PublisherRestTransport, "rest", "true"), + (PublisherClient, transports.PublisherRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -527,6 +542,7 @@ def test_publisher_client_get_mtls_endpoint_and_cert_source(client_class): transports.PublisherGrpcAsyncIOTransport, "grpc_asyncio", ), + (PublisherClient, transports.PublisherRestTransport, "rest"), ], ) def test_publisher_client_client_options_scopes( @@ -562,6 +578,7 @@ def test_publisher_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (PublisherClient, transports.PublisherRestTransport, "rest", None), ], ) def test_publisher_client_client_options_credentials_file( @@ -3318,180 +3335,2705 @@ async def test_detach_subscription_field_headers_async(): ) in kw["metadata"] -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.PublisherGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Topic, + dict, + ], +) +def test_create_topic_rest(request_type): + client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.PublisherGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = PublisherClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + + +def test_create_topic_rest_required_fields(request_type=pubsub.Topic): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.PublisherGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_topic_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PublisherClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = PublisherClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + unset_fields = transport.create_topic._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # It is an error to provide scopes and a transport instance. - transport = transports.PublisherGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), ) - with pytest.raises(ValueError): - client = PublisherClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = PublisherClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_create_topic" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_create_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.Topic.pb(pubsub.Topic()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.Topic.to_json(pubsub.Topic()) + + request = pubsub.Topic() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Topic() + + client.create_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.PublisherGrpcTransport( + +def test_create_topic_rest_bad_request( + transport: str = "rest", request_type=pubsub.Topic +): + client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = PublisherClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/topics/sample2"} + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.PublisherGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_topic(request) + + +def test_create_topic_rest_flattened(): + client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.PublisherGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/topics/*}" % client.transport._host, args[1] + ) + + +def test_create_topic_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_topic( + pubsub.Topic(), + name="name_value", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.PublisherGrpcTransport, - transports.PublisherGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +def test_create_topic_rest_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + pubsub.UpdateTopicRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = PublisherClient.get_transport_class(transport_name)( +def test_update_topic_rest(request_type): + client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + # send a request that will satisfy transcoding + request_init = {"topic": {"name": "projects/sample1/topics/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + + +def test_update_topic_rest_required_fields(request_type=pubsub.UpdateTopicRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_topic_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - assert isinstance( - client.transport, - transports.PublisherGrpcTransport, + + unset_fields = transport.update_topic._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "topic", + "updateMask", + ) + ) ) -def test_publisher_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.PublisherTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_update_topic" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_update_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.UpdateTopicRequest.pb(pubsub.UpdateTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.Topic.to_json(pubsub.Topic()) + + request = pubsub.UpdateTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Topic() + + client.update_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_publisher_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.pubsub_v1.services.publisher.transports.PublisherTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.PublisherTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_topic", - "update_topic", - "publish", - "get_topic", - "list_topics", - "list_topic_subscriptions", - "list_topic_snapshots", - "delete_topic", - "detach_subscription", - "set_iam_policy", - "get_iam_policy", - "test_iam_permissions", +def test_update_topic_rest_bad_request( + transport: str = "rest", request_type=pubsub.UpdateTopicRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # send a request that will satisfy transcoding + request_init = {"topic": {"name": "projects/sample1/topics/sample2"}} + request = request_type(**request_init) - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_topic(request) -def test_publisher_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None +def test_update_topic_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": {"name": "projects/sample1/topics/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic.name=projects/*/topics/*}" % client.transport._host, args[1] + ) + + +def test_update_topic_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_topic( + pubsub.UpdateTopicRequest(), + topic=pubsub.Topic(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_topic_rest_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PublishRequest, + dict, + ], +) +def test_publish_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.publish(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + assert response.message_ids == ["message_ids_value"] + + +def test_publish_rest_required_fields(request_type=pubsub.PublishRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).publish._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).publish._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.PublishResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.publish(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_publish_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.publish._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "topic", + "messages", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_publish_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_publish" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_publish" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.PublishRequest.pb(pubsub.PublishRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.PublishResponse.to_json( + pubsub.PublishResponse() + ) + + request = pubsub.PublishRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.PublishResponse() + + client.publish( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_publish_rest_bad_request( + transport: str = "rest", request_type=pubsub.PublishRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.publish(request) + + +def test_publish_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PublishResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.publish(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}:publish" % client.transport._host, + args[1], + ) + + +def test_publish_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.publish( + pubsub.PublishRequest(), + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], + ) + + +def test_publish_rest_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetTopicRequest, + dict, + ], +) +def test_get_topic_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + + +def test_get_topic_rest_required_fields(request_type=pubsub.GetTopicRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_topic_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_topic._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("topic",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_get_topic" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_get_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.GetTopicRequest.pb(pubsub.GetTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.Topic.to_json(pubsub.Topic()) + + request = pubsub.GetTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Topic() + + client.get_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_topic_rest_bad_request( + transport: str = "rest", request_type=pubsub.GetTopicRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_topic(request) + + +def test_get_topic_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}" % client.transport._host, args[1] + ) + + +def test_get_topic_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_topic( + pubsub.GetTopicRequest(), + topic="topic_value", + ) + + +def test_get_topic_rest_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicsRequest, + dict, + ], +) +def test_list_topics_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_topics(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_topics_rest_required_fields(request_type=pubsub.ListTopicsRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topics._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topics._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_topics(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_topics_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_topics._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("project",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topics_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topics" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_list_topics" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListTopicsRequest.pb(pubsub.ListTopicsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.ListTopicsResponse.to_json( + pubsub.ListTopicsResponse() + ) + + request = pubsub.ListTopicsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListTopicsResponse() + + client.list_topics( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_topics_rest_bad_request( + transport: str = "rest", request_type=pubsub.ListTopicsRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_topics(request) + + +def test_list_topics_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_topics(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{project=projects/*}/topics" % client.transport._host, args[1] + ) + + +def test_list_topics_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topics( + pubsub.ListTopicsRequest(), + project="project_value", + ) + + +def test_list_topics_rest_pager(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + pubsub.Topic(), + ], + next_page_token="abc", + ), + pubsub.ListTopicsResponse( + topics=[], + next_page_token="def", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicsResponse( + topics=[ + pubsub.Topic(), + pubsub.Topic(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(pubsub.ListTopicsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "projects/sample1"} + + pager = client.list_topics(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Topic) for i in results) + + pages = list(client.list_topics(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSubscriptionsRequest, + dict, + ], +) +def test_list_topic_subscriptions_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_topic_subscriptions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsPager) + assert response.subscriptions == ["subscriptions_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_subscriptions_rest_required_fields( + request_type=pubsub.ListTopicSubscriptionsRequest, +): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topic_subscriptions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topic_subscriptions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSubscriptionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_topic_subscriptions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_topic_subscriptions_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_topic_subscriptions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("topic",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topic_subscriptions_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topic_subscriptions" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_list_topic_subscriptions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListTopicSubscriptionsRequest.pb( + pubsub.ListTopicSubscriptionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.ListTopicSubscriptionsResponse.to_json( + pubsub.ListTopicSubscriptionsResponse() + ) + + request = pubsub.ListTopicSubscriptionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListTopicSubscriptionsResponse() + + client.list_topic_subscriptions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_topic_subscriptions_rest_bad_request( + transport: str = "rest", request_type=pubsub.ListTopicSubscriptionsRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_topic_subscriptions(request) + + +def test_list_topic_subscriptions_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSubscriptionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_topic_subscriptions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}/subscriptions" % client.transport._host, + args[1], + ) + + +def test_list_topic_subscriptions_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_subscriptions( + pubsub.ListTopicSubscriptionsRequest(), + topic="topic_value", + ) + + +def test_list_topic_subscriptions_rest_pager(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSubscriptionsResponse( + subscriptions=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + pubsub.ListTopicSubscriptionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"topic": "projects/sample1/topics/sample2"} + + pager = client.list_topic_subscriptions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + pages = list(client.list_topic_subscriptions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSnapshotsRequest, + dict, + ], +) +def test_list_topic_snapshots_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_topic_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsPager) + assert response.snapshots == ["snapshots_value"] + assert response.next_page_token == "next_page_token_value" + + +def test_list_topic_snapshots_rest_required_fields( + request_type=pubsub.ListTopicSnapshotsRequest, +): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topic_snapshots._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_topic_snapshots._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSnapshotsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_topic_snapshots(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_topic_snapshots_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_topic_snapshots._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("topic",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topic_snapshots_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topic_snapshots" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_list_topic_snapshots" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListTopicSnapshotsRequest.pb( + pubsub.ListTopicSnapshotsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.ListTopicSnapshotsResponse.to_json( + pubsub.ListTopicSnapshotsResponse() + ) + + request = pubsub.ListTopicSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListTopicSnapshotsResponse() + + client.list_topic_snapshots( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_topic_snapshots_rest_bad_request( + transport: str = "rest", request_type=pubsub.ListTopicSnapshotsRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_topic_snapshots(request) + + +def test_list_topic_snapshots_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSnapshotsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_topic_snapshots(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}/snapshots" % client.transport._host, + args[1], + ) + + +def test_list_topic_snapshots_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_topic_snapshots( + pubsub.ListTopicSnapshotsRequest(), + topic="topic_value", + ) + + +def test_list_topic_snapshots_rest_pager(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + str(), + ], + next_page_token="abc", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + ], + next_page_token="ghi", + ), + pubsub.ListTopicSnapshotsResponse( + snapshots=[ + str(), + str(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(pubsub.ListTopicSnapshotsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"topic": "projects/sample1/topics/sample2"} + + pager = client.list_topic_snapshots(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, str) for i in results) + + pages = list(client.list_topic_snapshots(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteTopicRequest, + dict, + ], +) +def test_delete_topic_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_topic(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_topic_rest_required_fields(request_type=pubsub.DeleteTopicRequest): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_topic._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_topic(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_topic_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_topic._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("topic",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "pre_delete_topic" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.DeleteTopicRequest.pb(pubsub.DeleteTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = pubsub.DeleteTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_topic_rest_bad_request( + transport: str = "rest", request_type=pubsub.DeleteTopicRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_topic(request) + + +def test_delete_topic_rest_flattened(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"topic": "projects/sample1/topics/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + topic="topic_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_topic(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{topic=projects/*/topics/*}" % client.transport._host, args[1] + ) + + +def test_delete_topic_rest_flattened_error(transport: str = "rest"): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_topic( + pubsub.DeleteTopicRequest(), + topic="topic_value", + ) + + +def test_delete_topic_rest_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DetachSubscriptionRequest, + dict, + ], +) +def test_detach_subscription_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.DetachSubscriptionResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.DetachSubscriptionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.detach_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +def test_detach_subscription_rest_required_fields( + request_type=pubsub.DetachSubscriptionRequest, +): + transport_class = transports.PublisherRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).detach_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.DetachSubscriptionResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.DetachSubscriptionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.detach_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_detach_subscription_rest_unset_required_fields(): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.detach_subscription._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("subscription",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_subscription_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_detach_subscription" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_detach_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.DetachSubscriptionRequest.pb( + pubsub.DetachSubscriptionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.DetachSubscriptionResponse.to_json( + pubsub.DetachSubscriptionResponse() + ) + + request = pubsub.DetachSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.DetachSubscriptionResponse() + + client.detach_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_detach_subscription_rest_bad_request( + transport: str = "rest", request_type=pubsub.DetachSubscriptionRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_subscription(request) + + +def test_detach_subscription_rest_error(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublisherClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = PublisherClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = PublisherClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = PublisherClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.PublisherGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.PublisherGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, + transports.PublisherRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = PublisherClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.PublisherGrpcTransport, + ) + + +def test_publisher_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.PublisherTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_publisher_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.PublisherTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_topic", + "update_topic", + "publish", + "get_topic", + "list_topics", + "list_topic_subscriptions", + "list_topic_snapshots", + "delete_topic", + "detach_subscription", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_publisher_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.pubsub_v1.services.publisher.transports.PublisherTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.PublisherTransport( credentials_file="credentials.json", @@ -3562,6 +6104,7 @@ def test_publisher_transport_auth_adc(transport_class): [ transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport, + transports.PublisherRestTransport, ], ) def test_publisher_transport_auth_gdch_credentials(transport_class): @@ -3663,11 +6206,23 @@ def test_publisher_grpc_transport_client_cert_source_for_mtls(transport_class): ) +def test_publisher_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.PublisherRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_publisher_host_no_port(transport_name): @@ -3678,7 +6233,11 @@ def test_publisher_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("pubsub.googleapis.com:443") + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) @pytest.mark.parametrize( @@ -3686,6 +6245,7 @@ def test_publisher_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_publisher_host_with_port(transport_name): @@ -3696,7 +6256,57 @@ def test_publisher_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("pubsub.googleapis.com:8000") + assert client.transport._host == ( + "pubsub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_publisher_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = PublisherClient( + credentials=creds1, + transport=transport_name, + ) + client2 = PublisherClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_topic._session + session2 = client2.transport.create_topic._session + assert session1 != session2 + session1 = client1.transport.update_topic._session + session2 = client2.transport.update_topic._session + assert session1 != session2 + session1 = client1.transport.publish._session + session2 = client2.transport.publish._session + assert session1 != session2 + session1 = client1.transport.get_topic._session + session2 = client2.transport.get_topic._session + assert session1 != session2 + session1 = client1.transport.list_topics._session + session2 = client2.transport.list_topics._session + assert session1 != session2 + session1 = client1.transport.list_topic_subscriptions._session + session2 = client2.transport.list_topic_subscriptions._session + assert session1 != session2 + session1 = client1.transport.list_topic_snapshots._session + session2 = client2.transport.list_topic_snapshots._session + assert session1 != session2 + session1 = client1.transport.delete_topic._session + session2 = client2.transport.delete_topic._session + assert session1 != session2 + session1 = client1.transport.detach_subscription._session + session2 = client2.transport.detach_subscription._session + assert session1 != session2 def test_publisher_grpc_transport_channel(): @@ -4030,6 +6640,180 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/subscriptions/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + def test_set_iam_policy(transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4540,6 +7324,7 @@ async def test_test_iam_permissions_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -4557,6 +7342,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 54b8d8ac3d6a..132136a38a13 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -107,6 +107,7 @@ def test__get_default_mtls_endpoint(): [ (SchemaServiceClient, "grpc"), (SchemaServiceAsyncClient, "grpc_asyncio"), + (SchemaServiceClient, "rest"), ], ) def test_schema_service_client_from_service_account_info(client_class, transport_name): @@ -120,7 +121,11 @@ def test_schema_service_client_from_service_account_info(client_class, transport assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("pubsub.googleapis.com:443") + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) @pytest.mark.parametrize( @@ -128,6 +133,7 @@ def test_schema_service_client_from_service_account_info(client_class, transport [ (transports.SchemaServiceGrpcTransport, "grpc"), (transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SchemaServiceRestTransport, "rest"), ], ) def test_schema_service_client_service_account_always_use_jwt( @@ -153,6 +159,7 @@ def test_schema_service_client_service_account_always_use_jwt( [ (SchemaServiceClient, "grpc"), (SchemaServiceAsyncClient, "grpc_asyncio"), + (SchemaServiceClient, "rest"), ], ) def test_schema_service_client_from_service_account_file(client_class, transport_name): @@ -173,13 +180,18 @@ def test_schema_service_client_from_service_account_file(client_class, transport assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("pubsub.googleapis.com:443") + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) def test_schema_service_client_get_transport_class(): transport = SchemaServiceClient.get_transport_class() available_transports = [ transports.SchemaServiceGrpcTransport, + transports.SchemaServiceRestTransport, ] assert transport in available_transports @@ -196,6 +208,7 @@ def test_schema_service_client_get_transport_class(): transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), ], ) @mock.patch.object( @@ -341,6 +354,8 @@ def test_schema_service_client_client_options( "grpc_asyncio", "false", ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", "true"), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -540,6 +555,7 @@ def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): transports.SchemaServiceGrpcAsyncIOTransport, "grpc_asyncio", ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), ], ) def test_schema_service_client_client_options_scopes( @@ -580,6 +596,7 @@ def test_schema_service_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest", None), ], ) def test_schema_service_client_client_options_credentials_file( @@ -3423,202 +3440,3008 @@ async def test_validate_message_field_headers_async(): ) in kw["metadata"] -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SchemaServiceGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CreateSchemaRequest, + dict, + ], +) +def test_create_schema_rest(request_type): + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["schema"] = { + "name": "name_value", + "type_": 1, + "definition": "definition_value", + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.SchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SchemaServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_create_schema_rest_required_fields(request_type=gp_schema.CreateSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.SchemaServiceGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_schema._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("schema_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SchemaServiceClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SchemaServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + unset_fields = transport.create_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("schemaId",)) + & set( + ( + "parent", + "schema", + ) ) + ) - # It is an error to provide scopes and a transport instance. - transport = transports.SchemaServiceGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), ) - with pytest.raises(ValueError): - client = SchemaServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_create_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gp_schema.CreateSchemaRequest.pb(gp_schema.CreateSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gp_schema.Schema.to_json(gp_schema.Schema()) + + request = gp_schema.CreateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gp_schema.Schema() + + client.create_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SchemaServiceGrpcTransport( + +def test_create_schema_rest_bad_request( + transport: str = "rest", request_type=gp_schema.CreateSchemaRequest +): + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = SchemaServiceClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["schema"] = { + "name": "name_value", + "type_": 1, + "definition": "definition_value", + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + } + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SchemaServiceGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_schema(request) + + +def test_create_schema_rest_flattened(): + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.SchemaServiceGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/schemas" % client.transport._host, args[1] + ) + + +def test_create_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_schema( + gp_schema.CreateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + schema_id="schema_id_value", + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.SchemaServiceGrpcTransport, - transports.SchemaServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_create_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + schema.GetSchemaRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = SchemaServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_get_schema_rest(request_type): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.SchemaServiceGrpcTransport, + transport="rest", ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) -def test_schema_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SchemaServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_schema_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.SchemaServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_schema(request) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_schema", - "get_schema", - "list_schemas", - "list_schema_revisions", - "commit_schema", - "rollback_schema", - "delete_schema_revision", - "delete_schema", - "validate_schema", - "validate_message", - "set_iam_policy", - "get_iam_policy", - "test_iam_permissions", + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_get_schema_rest_required_fields(request_type=schema.GetSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # verify fields with default values are dropped - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_schema_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SchemaServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/pubsub", - ), - quota_project_id="octopus", - ) + jsonified_request["name"] = "name_value" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_schema._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("view",)) + jsonified_request.update(unset_fields) -def test_schema_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" - ) as Transport: + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(("view",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_get_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.GetSchemaRequest.pb(schema.GetSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema.Schema.to_json(schema.Schema()) + + request = schema.GetSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + + client.get_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_schema_rest_bad_request( + transport: str = "rest", request_type=schema.GetSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_schema(request) + + +def test_get_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}" % client.transport._host, args[1] + ) + + +def test_get_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_schema( + schema.GetSchemaRequest(), + name="name_value", + ) + + +def test_get_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemasRequest, + dict, + ], +) +def test_list_schemas_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_schemas(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_schemas_rest_required_fields(request_type=schema.ListSchemasRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schemas._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schemas._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemasResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_schemas(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_schemas_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_schemas._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "view", + ) + ) + & set(("parent",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_schemas_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_list_schemas" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.ListSchemasRequest.pb(schema.ListSchemasRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema.ListSchemasResponse.to_json( + schema.ListSchemasResponse() + ) + + request = schema.ListSchemasRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.ListSchemasResponse() + + client.list_schemas( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_schemas_rest_bad_request( + transport: str = "rest", request_type=schema.ListSchemasRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_schemas(request) + + +def test_list_schemas_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemasResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_schemas(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/schemas" % client.transport._host, args[1] + ) + + +def test_list_schemas_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schemas( + schema.ListSchemasRequest(), + parent="parent_value", + ) + + +def test_list_schemas_rest_pager(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemasResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemasResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(schema.ListSchemasResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1"} + + pager = client.list_schemas(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + pages = list(client.list_schemas(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemaRevisionsRequest, + dict, + ], +) +def test_list_schema_revisions_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_schema_revisions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemaRevisionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_schema_revisions_rest_required_fields( + request_type=schema.ListSchemaRevisionsRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schema_revisions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_schema_revisions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + "view", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemaRevisionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_schema_revisions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_schema_revisions_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_schema_revisions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + "view", + ) + ) + & set(("name",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_schema_revisions_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schema_revisions" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_list_schema_revisions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.ListSchemaRevisionsRequest.pb( + schema.ListSchemaRevisionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema.ListSchemaRevisionsResponse.to_json( + schema.ListSchemaRevisionsResponse() + ) + + request = schema.ListSchemaRevisionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.ListSchemaRevisionsResponse() + + client.list_schema_revisions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_schema_revisions_rest_bad_request( + transport: str = "rest", request_type=schema.ListSchemaRevisionsRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_schema_revisions(request) + + +def test_list_schema_revisions_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemaRevisionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_schema_revisions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}:listRevisions" % client.transport._host, + args[1], + ) + + +def test_list_schema_revisions_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_schema_revisions( + schema.ListSchemaRevisionsRequest(), + name="name_value", + ) + + +def test_list_schema_revisions_rest_pager(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + schema.Schema(), + ], + next_page_token="abc", + ), + schema.ListSchemaRevisionsResponse( + schemas=[], + next_page_token="def", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + ], + next_page_token="ghi", + ), + schema.ListSchemaRevisionsResponse( + schemas=[ + schema.Schema(), + schema.Schema(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + schema.ListSchemaRevisionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"name": "projects/sample1/schemas/sample2"} + + pager = client.list_schema_revisions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, schema.Schema) for i in results) + + pages = list(client.list_schema_revisions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CommitSchemaRequest, + dict, + ], +) +def test_commit_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.commit_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_commit_schema_rest_required_fields(request_type=gp_schema.CommitSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).commit_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).commit_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.commit_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_commit_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.commit_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "schema", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_commit_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_commit_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_commit_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gp_schema.CommitSchemaRequest.pb(gp_schema.CommitSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gp_schema.Schema.to_json(gp_schema.Schema()) + + request = gp_schema.CommitSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gp_schema.Schema() + + client.commit_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_commit_schema_rest_bad_request( + transport: str = "rest", request_type=gp_schema.CommitSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.commit_schema(request) + + +def test_commit_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.commit_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}:commit" % client.transport._host, args[1] + ) + + +def test_commit_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.commit_schema( + gp_schema.CommitSchemaRequest(), + name="name_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +def test_commit_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.RollbackSchemaRequest, + dict, + ], +) +def test_rollback_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rollback_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_rollback_schema_rest_required_fields( + request_type=schema.RollbackSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request_init["revision_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rollback_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["revisionId"] = "revision_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).rollback_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "revisionId" in jsonified_request + assert jsonified_request["revisionId"] == "revision_id_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.rollback_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_rollback_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.rollback_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "revisionId", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_rollback_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_rollback_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.RollbackSchemaRequest.pb(schema.RollbackSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema.Schema.to_json(schema.Schema()) + + request = schema.RollbackSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + + client.rollback_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rollback_schema_rest_bad_request( + transport: str = "rest", request_type=schema.RollbackSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.rollback_schema(request) + + +def test_rollback_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + revision_id="revision_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.rollback_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}:rollback" % client.transport._host, + args[1], + ) + + +def test_rollback_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.rollback_schema( + schema.RollbackSchemaRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +def test_rollback_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRevisionRequest, + dict, + ], +) +def test_delete_schema_revision_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_schema_revision(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +def test_delete_schema_revision_rest_required_fields( + request_type=schema.DeleteSchemaRevisionRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema_revision._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema_revision._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("revision_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_schema_revision(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_schema_revision_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_schema_revision._get_unset_required_fields({}) + assert set(unset_fields) == (set(("revisionId",)) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_schema_revision_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_delete_schema_revision" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_delete_schema_revision" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.DeleteSchemaRevisionRequest.pb( + schema.DeleteSchemaRevisionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema.Schema.to_json(schema.Schema()) + + request = schema.DeleteSchemaRevisionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + + client.delete_schema_revision( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_schema_revision_rest_bad_request( + transport: str = "rest", request_type=schema.DeleteSchemaRevisionRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_schema_revision(request) + + +def test_delete_schema_revision_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + revision_id="revision_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_schema_revision(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}:deleteRevision" % client.transport._host, + args[1], + ) + + +def test_delete_schema_revision_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema_revision( + schema.DeleteSchemaRevisionRequest(), + name="name_value", + revision_id="revision_id_value", + ) + + +def test_delete_schema_revision_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRequest, + dict, + ], +) +def test_delete_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_schema(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_schema_rest_required_fields(request_type=schema.DeleteSchemaRequest): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_schema._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_delete_schema" + ) as pre: + pre.assert_not_called() + pb_message = schema.DeleteSchemaRequest.pb(schema.DeleteSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = schema.DeleteSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_schema_rest_bad_request( + transport: str = "rest", request_type=schema.DeleteSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_schema(request) + + +def test_delete_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/schemas/*}" % client.transport._host, args[1] + ) + + +def test_delete_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_schema( + schema.DeleteSchemaRequest(), + name="name_value", + ) + + +def test_delete_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.ValidateSchemaRequest, + dict, + ], +) +def test_validate_schema_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.ValidateSchemaResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.validate_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.ValidateSchemaResponse) + + +def test_validate_schema_rest_required_fields( + request_type=gp_schema.ValidateSchemaRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_schema._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gp_schema.ValidateSchemaResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.validate_schema(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_validate_schema_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.validate_schema._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "schema", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_validate_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_validate_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gp_schema.ValidateSchemaRequest.pb( + gp_schema.ValidateSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = gp_schema.ValidateSchemaResponse.to_json( + gp_schema.ValidateSchemaResponse() + ) + + request = gp_schema.ValidateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gp_schema.ValidateSchemaResponse() + + client.validate_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_validate_schema_rest_bad_request( + transport: str = "rest", request_type=gp_schema.ValidateSchemaRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.validate_schema(request) + + +def test_validate_schema_rest_flattened(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.ValidateSchemaResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.validate_schema(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*}/schemas:validate" % client.transport._host, + args[1], + ) + + +def test_validate_schema_rest_flattened_error(transport: str = "rest"): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.validate_schema( + gp_schema.ValidateSchemaRequest(), + parent="parent_value", + schema=gp_schema.Schema(name="name_value"), + ) + + +def test_validate_schema_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ValidateMessageRequest, + dict, + ], +) +def test_validate_message_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ValidateMessageResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = schema.ValidateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.validate_message(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.ValidateMessageResponse) + + +def test_validate_message_rest_required_fields( + request_type=schema.ValidateMessageRequest, +): + transport_class = transports.SchemaServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).validate_message._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = schema.ValidateMessageResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = schema.ValidateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.validate_message(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_validate_message_rest_unset_required_fields(): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.validate_message._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_validate_message_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_message" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_validate_message" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.ValidateMessageRequest.pb(schema.ValidateMessageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = schema.ValidateMessageResponse.to_json( + schema.ValidateMessageResponse() + ) + + request = schema.ValidateMessageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.ValidateMessageResponse() + + client.validate_message( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_validate_message_rest_bad_request( + transport: str = "rest", request_type=schema.ValidateMessageRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.validate_message(request) + + +def test_validate_message_rest_error(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SchemaServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SchemaServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + transports.SchemaServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SchemaServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SchemaServiceGrpcTransport, + ) + + +def test_schema_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_schema_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SchemaServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_schema", + "get_schema", + "list_schemas", + "list_schema_revisions", + "commit_schema", + "rollback_schema", + "delete_schema_revision", + "delete_schema", + "validate_schema", + "validate_message", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_schema_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.SchemaServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/pubsub", + ), + quota_project_id="octopus", + ) + + +def test_schema_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( + "google.pubsub_v1.services.schema_service.transports.SchemaServiceTransport._prep_wrapped_messages" + ) as Transport: Transport.return_value = None adc.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.SchemaServiceTransport() @@ -3668,6 +6491,7 @@ def test_schema_service_transport_auth_adc(transport_class): [ transports.SchemaServiceGrpcTransport, transports.SchemaServiceGrpcAsyncIOTransport, + transports.SchemaServiceRestTransport, ], ) def test_schema_service_transport_auth_gdch_credentials(transport_class): @@ -3772,11 +6596,23 @@ def test_schema_service_grpc_transport_client_cert_source_for_mtls(transport_cla ) +def test_schema_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SchemaServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_schema_service_host_no_port(transport_name): @@ -3787,7 +6623,11 @@ def test_schema_service_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("pubsub.googleapis.com:443") + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) @pytest.mark.parametrize( @@ -3795,6 +6635,7 @@ def test_schema_service_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_schema_service_host_with_port(transport_name): @@ -3805,7 +6646,60 @@ def test_schema_service_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("pubsub.googleapis.com:8000") + assert client.transport._host == ( + "pubsub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_schema_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SchemaServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SchemaServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_schema._session + session2 = client2.transport.create_schema._session + assert session1 != session2 + session1 = client1.transport.get_schema._session + session2 = client2.transport.get_schema._session + assert session1 != session2 + session1 = client1.transport.list_schemas._session + session2 = client2.transport.list_schemas._session + assert session1 != session2 + session1 = client1.transport.list_schema_revisions._session + session2 = client2.transport.list_schema_revisions._session + assert session1 != session2 + session1 = client1.transport.commit_schema._session + session2 = client2.transport.commit_schema._session + assert session1 != session2 + session1 = client1.transport.rollback_schema._session + session2 = client2.transport.rollback_schema._session + assert session1 != session2 + session1 = client1.transport.delete_schema_revision._session + session2 = client2.transport.delete_schema_revision._session + assert session1 != session2 + session1 = client1.transport.delete_schema._session + session2 = client2.transport.delete_schema._session + assert session1 != session2 + session1 = client1.transport.validate_schema._session + session2 = client2.transport.validate_schema._session + assert session1 != session2 + session1 = client1.transport.validate_message._session + session2 = client2.transport.validate_message._session + assert session1 != session2 def test_schema_service_grpc_transport_channel(): @@ -4099,6 +6993,180 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/subscriptions/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + def test_set_iam_policy(transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -4609,6 +7677,7 @@ async def test_test_iam_permissions_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -4626,6 +7695,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 3be5857f3175..0a8ebfc9944d 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -106,6 +106,7 @@ def test__get_default_mtls_endpoint(): [ (SubscriberClient, "grpc"), (SubscriberAsyncClient, "grpc_asyncio"), + (SubscriberClient, "rest"), ], ) def test_subscriber_client_from_service_account_info(client_class, transport_name): @@ -119,7 +120,11 @@ def test_subscriber_client_from_service_account_info(client_class, transport_nam assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("pubsub.googleapis.com:443") + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) @pytest.mark.parametrize( @@ -127,6 +132,7 @@ def test_subscriber_client_from_service_account_info(client_class, transport_nam [ (transports.SubscriberGrpcTransport, "grpc"), (transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.SubscriberRestTransport, "rest"), ], ) def test_subscriber_client_service_account_always_use_jwt( @@ -152,6 +158,7 @@ def test_subscriber_client_service_account_always_use_jwt( [ (SubscriberClient, "grpc"), (SubscriberAsyncClient, "grpc_asyncio"), + (SubscriberClient, "rest"), ], ) def test_subscriber_client_from_service_account_file(client_class, transport_name): @@ -172,13 +179,18 @@ def test_subscriber_client_from_service_account_file(client_class, transport_nam assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == ("pubsub.googleapis.com:443") + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) def test_subscriber_client_get_transport_class(): transport = SubscriberClient.get_transport_class() available_transports = [ transports.SubscriberGrpcTransport, + transports.SubscriberRestTransport, ] assert transport in available_transports @@ -195,6 +207,7 @@ def test_subscriber_client_get_transport_class(): transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", ), + (SubscriberClient, transports.SubscriberRestTransport, "rest"), ], ) @mock.patch.object( @@ -338,6 +351,8 @@ def test_subscriber_client_client_options( "grpc_asyncio", "false", ), + (SubscriberClient, transports.SubscriberRestTransport, "rest", "true"), + (SubscriberClient, transports.SubscriberRestTransport, "rest", "false"), ], ) @mock.patch.object( @@ -531,6 +546,7 @@ def test_subscriber_client_get_mtls_endpoint_and_cert_source(client_class): transports.SubscriberGrpcAsyncIOTransport, "grpc_asyncio", ), + (SubscriberClient, transports.SubscriberRestTransport, "rest"), ], ) def test_subscriber_client_client_options_scopes( @@ -566,6 +582,7 @@ def test_subscriber_client_client_options_scopes( "grpc_asyncio", grpc_helpers_async, ), + (SubscriberClient, transports.SubscriberRestTransport, "rest", None), ], ) def test_subscriber_client_client_options_credentials_file( @@ -4740,177 +4757,4329 @@ async def test_seek_field_headers_async(): ) in kw["metadata"] -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SubscriberGrpcTransport( +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Subscription, + dict, + ], +) +def test_create_subscription_rest(request_type): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - with pytest.raises(ValueError): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.SubscriberGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SubscriberClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +def test_create_subscription_rest_required_fields(request_type=pubsub.Subscription): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["name"] = "" + request_init["topic"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, ) + ) - # It is an error to provide an api_key and a transport instance. - transport = transports.SubscriberGrpcTransport( + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["topic"] = "topic_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "topic" in jsonified_request + assert jsonified_request["topic"] == "topic_value" + + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_subscription_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SubscriberClient( - client_options=options, - transport=transport, - ) - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = SubscriberClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + unset_fields = transport.create_subscription._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "topic", + ) ) + ) - # It is an error to provide scopes and a transport instance. - transport = transports.SubscriberGrpcTransport( + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), ) - with pytest.raises(ValueError): - client = SubscriberClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_subscription" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_create_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.Subscription.pb(pubsub.Subscription()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.Subscription.to_json(pubsub.Subscription()) + + request = pubsub.Subscription() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Subscription() + + client.create_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) + pre.assert_called_once() + post.assert_called_once() -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.SubscriberGrpcTransport( + +def test_create_subscription_rest_bad_request( + transport: str = "rest", request_type=pubsub.Subscription +): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - client = SubscriberClient(transport=transport) - assert client.transport is transport + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.SubscriberGrpcTransport( + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_subscription(request) + + +def test_create_subscription_rest_flattened(): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - channel = transport.grpc_channel - assert channel - transport = transports.SubscriberGrpcAsyncIOTransport( + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_subscription(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/subscriptions/*}" % client.transport._host, args[1] + ) + + +def test_create_subscription_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - channel = transport.grpc_channel - assert channel + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_subscription( + pubsub.Subscription(), + name="name_value", + topic="topic_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ack_deadline_seconds=2066, + ) -@pytest.mark.parametrize( - "transport_class", - [ - transports.SubscriberGrpcTransport, - transports.SubscriberGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + +def test_create_subscription_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) @pytest.mark.parametrize( - "transport_name", + "request_type", [ - "grpc", + pubsub.GetSubscriptionRequest, + dict, ], ) -def test_transport_kind(transport_name): - transport = SubscriberClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. +def test_get_subscription_rest(request_type): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.SubscriberGrpcTransport, + transport="rest", ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) -def test_subscriber_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.SubscriberTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, ) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) -def test_subscriber_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.pubsub_v1.services.subscriber.transports.SubscriberTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.SubscriberTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_subscription(request) - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_subscription", - "get_subscription", - "update_subscription", - "list_subscriptions", - "delete_subscription", - "modify_ack_deadline", - "acknowledge", - "pull", - "streaming_pull", - "modify_push_config", - "get_snapshot", - "list_snapshots", - "create_snapshot", - "update_snapshot", - "delete_snapshot", - "seek", - "set_iam_policy", - "get_iam_policy", - "test_iam_permissions", + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +def test_get_subscription_rest_required_fields( + request_type=pubsub.GetSubscriptionRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - with pytest.raises(NotImplementedError): - transport.close() + # verify fields with default values are dropped - # Catch all for all remaining methods and properties - remainder = [ - "kind", - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_subscription_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_subscription._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("subscription",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_subscription" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_get_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.GetSubscriptionRequest.pb(pubsub.GetSubscriptionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.Subscription.to_json(pubsub.Subscription()) + + request = pubsub.GetSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Subscription() + + client.get_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_subscription_rest_bad_request( + transport: str = "rest", request_type=pubsub.GetSubscriptionRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_subscription(request) + + +def test_get_subscription_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_subscription(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}" % client.transport._host, + args[1], + ) + + +def test_get_subscription_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_subscription( + pubsub.GetSubscriptionRequest(), + subscription="subscription_value", + ) + + +def test_get_subscription_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSubscriptionRequest, + dict, + ], +) +def test_update_subscription_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": {"name": "projects/sample1/subscriptions/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +def test_update_subscription_rest_required_fields( + request_type=pubsub.UpdateSubscriptionRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_subscription_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_subscription._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_subscription" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_update_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.UpdateSubscriptionRequest.pb( + pubsub.UpdateSubscriptionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.Subscription.to_json(pubsub.Subscription()) + + request = pubsub.UpdateSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Subscription() + + client.update_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_subscription_rest_bad_request( + transport: str = "rest", request_type=pubsub.UpdateSubscriptionRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": {"name": "projects/sample1/subscriptions/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_subscription(request) + + +def test_update_subscription_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() + + # get arguments that satisfy an http rule for this method + sample_request = { + "subscription": {"name": "projects/sample1/subscriptions/sample2"} + } + + # get truthy value for each flattened field + mock_args = dict( + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_subscription(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription.name=projects/*/subscriptions/*}" + % client.transport._host, + args[1], + ) + + +def test_update_subscription_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_subscription( + pubsub.UpdateSubscriptionRequest(), + subscription=pubsub.Subscription(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_subscription_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSubscriptionsRequest, + dict, + ], +) +def test_list_subscriptions_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_subscriptions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_subscriptions_rest_required_fields( + request_type=pubsub.ListSubscriptionsRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_subscriptions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_subscriptions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSubscriptionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_subscriptions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_subscriptions_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_subscriptions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("project",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_subscriptions_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_subscriptions" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_list_subscriptions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListSubscriptionsRequest.pb( + pubsub.ListSubscriptionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.ListSubscriptionsResponse.to_json( + pubsub.ListSubscriptionsResponse() + ) + + request = pubsub.ListSubscriptionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListSubscriptionsResponse() + + client.list_subscriptions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_subscriptions_rest_bad_request( + transport: str = "rest", request_type=pubsub.ListSubscriptionsRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_subscriptions(request) + + +def test_list_subscriptions_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSubscriptionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_subscriptions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{project=projects/*}/subscriptions" % client.transport._host, args[1] + ) + + +def test_list_subscriptions_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_subscriptions( + pubsub.ListSubscriptionsRequest(), + project="project_value", + ) + + +def test_list_subscriptions_rest_pager(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + pubsub.Subscription(), + ], + next_page_token="abc", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[], + next_page_token="def", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + ], + next_page_token="ghi", + ), + pubsub.ListSubscriptionsResponse( + subscriptions=[ + pubsub.Subscription(), + pubsub.Subscription(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(pubsub.ListSubscriptionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "projects/sample1"} + + pager = client.list_subscriptions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Subscription) for i in results) + + pages = list(client.list_subscriptions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSubscriptionRequest, + dict, + ], +) +def test_delete_subscription_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_subscription(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_subscription_rest_required_fields( + request_type=pubsub.DeleteSubscriptionRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_subscription._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_subscription(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_subscription_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_subscription._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("subscription",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_delete_subscription" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.DeleteSubscriptionRequest.pb( + pubsub.DeleteSubscriptionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = pubsub.DeleteSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_subscription_rest_bad_request( + transport: str = "rest", request_type=pubsub.DeleteSubscriptionRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_subscription(request) + + +def test_delete_subscription_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_subscription(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}" % client.transport._host, + args[1], + ) + + +def test_delete_subscription_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_subscription( + pubsub.DeleteSubscriptionRequest(), + subscription="subscription_value", + ) + + +def test_delete_subscription_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyAckDeadlineRequest, + dict, + ], +) +def test_modify_ack_deadline_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.modify_ack_deadline(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_ack_deadline_rest_required_fields( + request_type=pubsub.ModifyAckDeadlineRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request_init["ack_ids"] = "" + request_init["ack_deadline_seconds"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).modify_ack_deadline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + jsonified_request["ackIds"] = "ack_ids_value" + jsonified_request["ackDeadlineSeconds"] = 2066 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).modify_ack_deadline._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + assert "ackIds" in jsonified_request + assert jsonified_request["ackIds"] == "ack_ids_value" + assert "ackDeadlineSeconds" in jsonified_request + assert jsonified_request["ackDeadlineSeconds"] == 2066 + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.modify_ack_deadline(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_modify_ack_deadline_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.modify_ack_deadline._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "ackIds", + "ackDeadlineSeconds", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_modify_ack_deadline_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_modify_ack_deadline" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.ModifyAckDeadlineRequest.pb( + pubsub.ModifyAckDeadlineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = pubsub.ModifyAckDeadlineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.modify_ack_deadline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_modify_ack_deadline_rest_bad_request( + transport: str = "rest", request_type=pubsub.ModifyAckDeadlineRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.modify_ack_deadline(request) + + +def test_modify_ack_deadline_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.modify_ack_deadline(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline" + % client.transport._host, + args[1], + ) + + +def test_modify_ack_deadline_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_ack_deadline( + pubsub.ModifyAckDeadlineRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ack_deadline_seconds=2066, + ) + + +def test_modify_ack_deadline_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.AcknowledgeRequest, + dict, + ], +) +def test_acknowledge_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.acknowledge(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_acknowledge_rest_required_fields(request_type=pubsub.AcknowledgeRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request_init["ack_ids"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).acknowledge._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + jsonified_request["ackIds"] = "ack_ids_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).acknowledge._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + assert "ackIds" in jsonified_request + assert jsonified_request["ackIds"] == "ack_ids_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.acknowledge(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_acknowledge_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.acknowledge._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "ackIds", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_acknowledge_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_acknowledge" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.AcknowledgeRequest.pb(pubsub.AcknowledgeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = pubsub.AcknowledgeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.acknowledge( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_acknowledge_rest_bad_request( + transport: str = "rest", request_type=pubsub.AcknowledgeRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.acknowledge(request) + + +def test_acknowledge_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.acknowledge(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}:acknowledge" + % client.transport._host, + args[1], + ) + + +def test_acknowledge_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.acknowledge( + pubsub.AcknowledgeRequest(), + subscription="subscription_value", + ack_ids=["ack_ids_value"], + ) + + +def test_acknowledge_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PullRequest, + dict, + ], +) +def test_pull_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PullResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.pull(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +def test_pull_rest_required_fields(request_type=pubsub.PullRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request_init["max_messages"] = 0 + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pull._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + jsonified_request["maxMessages"] = 1277 + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).pull._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + assert "maxMessages" in jsonified_request + assert jsonified_request["maxMessages"] == 1277 + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.PullResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.pull(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_pull_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.pull._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "maxMessages", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pull_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_pull" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_pull" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.PullRequest.pb(pubsub.PullRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.PullResponse.to_json(pubsub.PullResponse()) + + request = pubsub.PullRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.PullResponse() + + client.pull( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_pull_rest_bad_request( + transport: str = "rest", request_type=pubsub.PullRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.pull(request) + + +def test_pull_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PullResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.pull(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}:pull" + % client.transport._host, + args[1], + ) + + +def test_pull_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.pull( + pubsub.PullRequest(), + subscription="subscription_value", + return_immediately=True, + max_messages=1277, + ) + + +def test_pull_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_streaming_pull_rest_no_http_options(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = pubsub.StreamingPullRequest() + requests = [request] + with pytest.raises(RuntimeError): + client.streaming_pull(requests) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyPushConfigRequest, + dict, + ], +) +def test_modify_push_config_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.modify_push_config(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_modify_push_config_rest_required_fields( + request_type=pubsub.ModifyPushConfigRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).modify_push_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).modify_push_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.modify_push_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_modify_push_config_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.modify_push_config._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "subscription", + "pushConfig", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_modify_push_config_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_modify_push_config" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.ModifyPushConfigRequest.pb(pubsub.ModifyPushConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = pubsub.ModifyPushConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.modify_push_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_modify_push_config_rest_bad_request( + transport: str = "rest", request_type=pubsub.ModifyPushConfigRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.modify_push_config(request) + + +def test_modify_push_config_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.modify_push_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig" + % client.transport._host, + args[1], + ) + + +def test_modify_push_config_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.modify_push_config( + pubsub.ModifyPushConfigRequest(), + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), + ) + + +def test_modify_push_config_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetSnapshotRequest, + dict, + ], +) +def test_get_snapshot_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +def test_get_snapshot_rest_required_fields(request_type=pubsub.GetSnapshotRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_snapshot_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("snapshot",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_snapshot" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_get_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.GetSnapshotRequest.pb(pubsub.GetSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.Snapshot.to_json(pubsub.Snapshot()) + + request = pubsub.GetSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Snapshot() + + client.get_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_snapshot_rest_bad_request( + transport: str = "rest", request_type=pubsub.GetSnapshotRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_snapshot(request) + + +def test_get_snapshot_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + + # get arguments that satisfy an http rule for this method + sample_request = {"snapshot": "projects/sample1/snapshots/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + snapshot="snapshot_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.get_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{snapshot=projects/*/snapshots/*}" % client.transport._host, args[1] + ) + + +def test_get_snapshot_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_snapshot( + pubsub.GetSnapshotRequest(), + snapshot="snapshot_value", + ) + + +def test_get_snapshot_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSnapshotsRequest, + dict, + ], +) +def test_list_snapshots_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_snapshots_rest_required_fields(request_type=pubsub.ListSnapshotsRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_snapshots._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_snapshots._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSnapshotsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.list_snapshots(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_snapshots_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_snapshots._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("project",)) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_snapshots_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_snapshots" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_list_snapshots" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListSnapshotsRequest.pb(pubsub.ListSnapshotsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.ListSnapshotsResponse.to_json( + pubsub.ListSnapshotsResponse() + ) + + request = pubsub.ListSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListSnapshotsResponse() + + client.list_snapshots( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_snapshots_rest_bad_request( + transport: str = "rest", request_type=pubsub.ListSnapshotsRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_snapshots(request) + + +def test_list_snapshots_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSnapshotsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "projects/sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.list_snapshots(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{project=projects/*}/snapshots" % client.transport._host, args[1] + ) + + +def test_list_snapshots_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_snapshots( + pubsub.ListSnapshotsRequest(), + project="project_value", + ) + + +def test_list_snapshots_rest_pager(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + next_page_token="abc", + ), + pubsub.ListSnapshotsResponse( + snapshots=[], + next_page_token="def", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + ], + next_page_token="ghi", + ), + pubsub.ListSnapshotsResponse( + snapshots=[ + pubsub.Snapshot(), + pubsub.Snapshot(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(pubsub.ListSnapshotsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "projects/sample1"} + + pager = client.list_snapshots(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, pubsub.Snapshot) for i in results) + + pages = list(client.list_snapshots(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.CreateSnapshotRequest, + dict, + ], +) +def test_create_snapshot_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +def test_create_snapshot_rest_required_fields( + request_type=pubsub.CreateSnapshotRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["name"] = "" + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "put", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.create_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_snapshot_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "subscription", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_snapshot" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_create_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.CreateSnapshotRequest.pb(pubsub.CreateSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.Snapshot.to_json(pubsub.Snapshot()) + + request = pubsub.CreateSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Snapshot() + + client.create_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_snapshot_rest_bad_request( + transport: str = "rest", request_type=pubsub.CreateSnapshotRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_snapshot(request) + + +def test_create_snapshot_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/snapshots/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + subscription="subscription_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.create_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/snapshots/*}" % client.transport._host, args[1] + ) + + +def test_create_snapshot_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_snapshot( + pubsub.CreateSnapshotRequest(), + name="name_value", + subscription="subscription_value", + ) + + +def test_create_snapshot_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSnapshotRequest, + dict, + ], +) +def test_update_snapshot_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +def test_update_snapshot_rest_required_fields( + request_type=pubsub.UpdateSnapshotRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.update_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_snapshot_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "snapshot", + "updateMask", + ) + ) + ) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_snapshot" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_update_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.UpdateSnapshotRequest.pb(pubsub.UpdateSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.Snapshot.to_json(pubsub.Snapshot()) + + request = pubsub.UpdateSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Snapshot() + + client.update_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_snapshot_rest_bad_request( + transport: str = "rest", request_type=pubsub.UpdateSnapshotRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_snapshot(request) + + +def test_update_snapshot_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() + + # get arguments that satisfy an http rule for this method + sample_request = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} + + # get truthy value for each flattened field + mock_args = dict( + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.update_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{snapshot.name=projects/*/snapshots/*}" % client.transport._host, + args[1], + ) + + +def test_update_snapshot_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_snapshot( + pubsub.UpdateSnapshotRequest(), + snapshot=pubsub.Snapshot(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_update_snapshot_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSnapshotRequest, + dict, + ], +) +def test_delete_snapshot_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_snapshot(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_snapshot_rest_required_fields( + request_type=pubsub.DeleteSnapshotRequest, +): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_snapshot._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.delete_snapshot(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_snapshot_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_snapshot._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("snapshot",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_delete_snapshot" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.DeleteSnapshotRequest.pb(pubsub.DeleteSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + + request = pubsub.DeleteSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_delete_snapshot_rest_bad_request( + transport: str = "rest", request_type=pubsub.DeleteSnapshotRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_snapshot(request) + + +def test_delete_snapshot_rest_flattened(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = {"snapshot": "projects/sample1/snapshots/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + snapshot="snapshot_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + client.delete_snapshot(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{snapshot=projects/*/snapshots/*}" % client.transport._host, args[1] + ) + + +def test_delete_snapshot_rest_flattened_error(transport: str = "rest"): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_snapshot( + pubsub.DeleteSnapshotRequest(), + snapshot="snapshot_value", + ) + + +def test_delete_snapshot_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.SeekRequest, + dict, + ], +) +def test_seek_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.SeekResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + pb_return_value = pubsub.SeekResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.seek(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +def test_seek_rest_required_fields(request_type=pubsub.SeekRequest): + transport_class = transports.SubscriberRestTransport + + request_init = {} + request_init["subscription"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson( + pb_request, + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).seek._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["subscription"] = "subscription_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).seek._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "subscription" in jsonified_request + assert jsonified_request["subscription"] == "subscription_value" + + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = pubsub.SeekResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + pb_return_value = pubsub.SeekResponse.pb(return_value) + json_return_value = json_format.MessageToJson(pb_return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.seek(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_seek_rest_unset_required_fields(): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.seek._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("subscription",))) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_seek_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_seek" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_seek" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.SeekRequest.pb(pubsub.SeekRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = Response() + req.return_value.status_code = 200 + req.return_value.request = PreparedRequest() + req.return_value._content = pubsub.SeekResponse.to_json(pubsub.SeekResponse()) + + request = pubsub.SeekRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.SeekResponse() + + client.seek( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_seek_rest_bad_request( + transport: str = "rest", request_type=pubsub.SeekRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.seek(request) + + +def test_seek_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + +def test_streaming_pull_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.streaming_pull({}) + assert "Method StreamingPull is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubscriberClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = SubscriberClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = SubscriberClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.SubscriberGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, + transports.SubscriberRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "rest", + ], +) +def test_transport_kind(transport_name): + transport = SubscriberClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.SubscriberGrpcTransport, + ) + + +def test_subscriber_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.SubscriberTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_subscriber_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.pubsub_v1.services.subscriber.transports.SubscriberTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.SubscriberTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_subscription", + "get_subscription", + "update_subscription", + "list_subscriptions", + "delete_subscription", + "modify_ack_deadline", + "acknowledge", + "pull", + "streaming_pull", + "modify_push_config", + "get_snapshot", + "list_snapshots", + "create_snapshot", + "update_snapshot", + "delete_snapshot", + "seek", + "set_iam_policy", + "get_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() def test_subscriber_base_transport_with_credentials_file(): @@ -4991,6 +9160,7 @@ def test_subscriber_transport_auth_adc(transport_class): [ transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport, + transports.SubscriberRestTransport, ], ) def test_subscriber_transport_auth_gdch_credentials(transport_class): @@ -5092,11 +9262,23 @@ def test_subscriber_grpc_transport_client_cert_source_for_mtls(transport_class): ) +def test_subscriber_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.SubscriberRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + @pytest.mark.parametrize( "transport_name", [ "grpc", "grpc_asyncio", + "rest", ], ) def test_subscriber_host_no_port(transport_name): @@ -5107,7 +9289,11 @@ def test_subscriber_host_no_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("pubsub.googleapis.com:443") + assert client.transport._host == ( + "pubsub.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com" + ) @pytest.mark.parametrize( @@ -5115,6 +9301,7 @@ def test_subscriber_host_no_port(transport_name): [ "grpc", "grpc_asyncio", + "rest", ], ) def test_subscriber_host_with_port(transport_name): @@ -5125,7 +9312,78 @@ def test_subscriber_host_with_port(transport_name): ), transport=transport_name, ) - assert client.transport._host == ("pubsub.googleapis.com:8000") + assert client.transport._host == ( + "pubsub.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://pubsub.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_subscriber_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = SubscriberClient( + credentials=creds1, + transport=transport_name, + ) + client2 = SubscriberClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_subscription._session + session2 = client2.transport.create_subscription._session + assert session1 != session2 + session1 = client1.transport.get_subscription._session + session2 = client2.transport.get_subscription._session + assert session1 != session2 + session1 = client1.transport.update_subscription._session + session2 = client2.transport.update_subscription._session + assert session1 != session2 + session1 = client1.transport.list_subscriptions._session + session2 = client2.transport.list_subscriptions._session + assert session1 != session2 + session1 = client1.transport.delete_subscription._session + session2 = client2.transport.delete_subscription._session + assert session1 != session2 + session1 = client1.transport.modify_ack_deadline._session + session2 = client2.transport.modify_ack_deadline._session + assert session1 != session2 + session1 = client1.transport.acknowledge._session + session2 = client2.transport.acknowledge._session + assert session1 != session2 + session1 = client1.transport.pull._session + session2 = client2.transport.pull._session + assert session1 != session2 + session1 = client1.transport.streaming_pull._session + session2 = client2.transport.streaming_pull._session + assert session1 != session2 + session1 = client1.transport.modify_push_config._session + session2 = client2.transport.modify_push_config._session + assert session1 != session2 + session1 = client1.transport.get_snapshot._session + session2 = client2.transport.get_snapshot._session + assert session1 != session2 + session1 = client1.transport.list_snapshots._session + session2 = client2.transport.list_snapshots._session + assert session1 != session2 + session1 = client1.transport.create_snapshot._session + session2 = client2.transport.create_snapshot._session + assert session1 != session2 + session1 = client1.transport.update_snapshot._session + session2 = client2.transport.update_snapshot._session + assert session1 != session2 + session1 = client1.transport.delete_snapshot._session + session2 = client2.transport.delete_snapshot._session + assert session1 != session2 + session1 = client1.transport.seek._session + session2 = client2.transport.seek._session + assert session1 != session2 def test_subscriber_grpc_transport_channel(): @@ -5459,6 +9717,180 @@ async def test_transport_close_async(): close.assert_called_once() +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/subscriptions/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request_init = {"resource": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + def test_set_iam_policy(transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5969,6 +10401,7 @@ async def test_test_iam_permissions_from_dict_async(): def test_transport_close(): transports = { + "rest": "_session", "grpc": "_grpc_channel", } @@ -5986,6 +10419,7 @@ def test_transport_close(): def test_client_ctx(): transports = [ + "rest", "grpc", ] for transport in transports: From f5a68a3cd05460fe819c474485e55b795fa3e472 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 6 Apr 2023 14:03:53 -0400 Subject: [PATCH 0944/1197] chore(main): release 2.16.0 (#896) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 12 ++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 16 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index d83aa2448a4c..2f416052a786 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.15.2" + ".": "2.16.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 6ca26dc376b4..f4f1f925ea89 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,18 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.16.0](https://github.com/googleapis/python-pubsub/compare/v2.15.2...v2.16.0) (2023-04-06) + + +### Features + +* Enable "rest" transport in Python for services supporting numeric enums ([#863](https://github.com/googleapis/python-pubsub/issues/863)) ([a80c1d1](https://github.com/googleapis/python-pubsub/commit/a80c1d1f6f880cd13c247231bdc86c824edab8cb)) + + +### Documentation + +* Fix formatting of request arg in docstring ([#894](https://github.com/googleapis/python-pubsub/issues/894)) ([ee2ea73](https://github.com/googleapis/python-pubsub/commit/ee2ea7341268fd5428d98208b8af2fc96efe8d03)) + ## [2.15.2](https://github.com/googleapis/python-pubsub/compare/v2.15.1...v2.15.2) (2023-03-20) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index db31fdc2ac14..a2303530d547 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "2.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index db31fdc2ac14..a2303530d547 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.15.2" # {x-release-please-version} +__version__ = "2.16.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..b80eb543a77e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.16.0" }, "snippets": [ { From 09dbe5006321bb1c8192139f98246cc54869c4f5 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Tue, 11 Apr 2023 17:04:47 -0400 Subject: [PATCH 0945/1197] ci: use python 3.8 for pubsublite samples testing (#899) --- .../.kokoro/presubmit-against-pubsublite-samples.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh index 587e491ee365..639cbb8d3181 100755 --- a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh @@ -75,9 +75,9 @@ for file in python-pubsublite/samples/**/requirements.txt; do echo "- testing $file" echo "------------------------------------------------------------" - # Use pytest to execute tests for py-3.7 - python3.7 -m venv py-3.7 - source py-3.7/bin/activate + # Use pytest to execute tests for py-3.8 + python3.8 -m venv py-3.8 + source py-3.8/bin/activate # Install python-pubsublite samples tests requirements. python -m pip install --upgrade pip python -m pip install -r requirements.txt -q @@ -87,8 +87,8 @@ for file in python-pubsublite/samples/**/requirements.txt; do python -m pytest quickstart_test.py EXIT=$? - deactivate py-3.7 - rm -rf py-3.7/ + deactivate py-3.8 + rm -rf py-3.8/ if [[ $EXIT -ne 0 ]]; then RTN=1 From 0f3ba1afb5b3760c3580e169893907dce48acd93 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 12 Apr 2023 05:03:27 +0100 Subject: [PATCH 0946/1197] chore(deps): update all dependencies (#897) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index c704f8e3dd1d..78ec295ff267 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.2.2 +pytest==7.3.0 mock==5.0.1 flaky==3.7.0 google-cloud-bigquery==3.9.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index f5ab0c1629bb..c1e50df3a7e4 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.15.2 +google-cloud-pubsub==2.16.0 avro==1.11.1 From 016e6642a82ad1680bbe766d5da6edbc2b335558 Mon Sep 17 00:00:00 2001 From: Nathan Martins Date: Wed, 12 Apr 2023 22:25:06 +0200 Subject: [PATCH 0947/1197] chore: Bump grpcio version to 1.51.3 (#900) Bumps version for compatibility with mac M1, check https://github.com/grpc/grpc/releases/tag/v1.51.3 Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- packages/google-cloud-pubsub/setup.py | 2 +- packages/google-cloud-pubsub/testing/constraints-3.7.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 88645f77aeea..8fc04405edb0 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -36,7 +36,7 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/414 + "grpcio >= 1.51.3, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/609 "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index 54e27914f575..75ec7a6234ae 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -8,5 +8,5 @@ google-api-core==1.34.0 proto-plus==1.22.0 protobuf==3.19.5 grpc-google-iam-v1==0.12.4 -grpcio==1.38.1 +grpcio==1.51.3 grpcio-status==1.33.2 From 91d245601553023a30c4aec97d156cc9abe53db8 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Fri, 14 Apr 2023 17:36:13 -0400 Subject: [PATCH 0948/1197] Docs: Add comment to setup.py (#905) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adding a comment to clarify google-api-core[grpc] requirements Fixes #903 🦕 --- packages/google-cloud-pubsub/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 8fc04405edb0..7d70e25bc48a 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -37,6 +37,7 @@ dependencies = [ "grpcio >= 1.51.3, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/609 + # google-api-core >= 1.34.0 is allowed in order to support google-api-core 1.x "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", From 0b533416cef270746893314055d1374cac65ed14 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 17 Apr 2023 22:03:50 +0100 Subject: [PATCH 0949/1197] chore(deps): update dependency pytest to v7.3.1 (#904) Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 78ec295ff267..211a3a59d862 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.3.0 +pytest==7.3.1 mock==5.0.1 flaky==3.7.0 google-cloud-bigquery==3.9.0 From cab5de6a4eecbbeef6a839a6c3a117ef2c9c245e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 17 Apr 2023 23:32:02 +0100 Subject: [PATCH 0950/1197] chore(deps): update dependency mock to v5.0.2 (#908) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 211a3a59d862..2ee99e20600a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 pytest==7.3.1 -mock==5.0.1 +mock==5.0.2 flaky==3.7.0 google-cloud-bigquery==3.9.0 From c41e8787e42280e9422b11696b9823f721bb2106 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 5 May 2023 20:20:12 +0200 Subject: [PATCH 0951/1197] chore(deps): update dependency google-cloud-bigquery to v3.10.0 (#910) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 2ee99e20600a..f373d1f8d2df 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.3.1 mock==5.0.2 flaky==3.7.0 -google-cloud-bigquery==3.9.0 +google-cloud-bigquery==3.10.0 From c5de44cc086918470b34d0c10a8f5c7d700d48e4 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Fri, 5 May 2023 15:43:02 -0400 Subject: [PATCH 0952/1197] fix: Allow dropping cleaned-up keys (#911) --- .../subscriber/_protocol/messages_on_hold.py | 36 ++++--- .../subscriber/test_messages_on_hold.py | 102 ++++++++++++++++++ 2 files changed, 126 insertions(+), 12 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py index 5c3cc1a75e8e..63c2edbfa969 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py @@ -13,6 +13,7 @@ # limitations under the License. import collections +import logging import typing from typing import Any, Callable, Iterable, Optional @@ -20,6 +21,9 @@ from google.cloud.pubsub_v1 import subscriber +_LOGGER = logging.getLogger(__name__) + + class MessagesOnHold(object): """Tracks messages on hold by ordering key. Not thread-safe.""" @@ -113,14 +117,17 @@ def activate_ordering_keys( Args: ordering_keys: - The ordering keys to activate. May be empty. + The ordering keys to activate. May be empty, or contain duplicates. schedule_message_callback: The callback to call to schedule a message to be sent to the user. """ for key in ordering_keys: - assert ( - self._pending_ordered_messages.get(key) is not None - ), "A message queue should exist for every ordered message in flight." + pending_ordered_messages = self._pending_ordered_messages.get(key) + if pending_ordered_messages is None: + _LOGGER.warning( + "No message queue exists for message ordering key: %s.", key + ) + continue next_msg = self._get_next_for_ordering_key(key) if next_msg: # Schedule the next message because the previous was dropped. @@ -154,15 +161,20 @@ def _get_next_for_ordering_key( def _clean_up_ordering_key(self, ordering_key: str) -> None: """Clean up state for an ordering key with no pending messages. - Args: + Args ordering_key: The ordering key to clean up. """ message_queue = self._pending_ordered_messages.get(ordering_key) - assert ( - message_queue is not None - ), "Cleaning up ordering key that does not exist." - assert not len(message_queue), ( - "Ordering key must only be removed if there are no messages " - "left for that key." - ) + if message_queue is None: + _LOGGER.warning( + "Tried to clean up ordering key that does not exist: %s", ordering_key + ) + return + if len(message_queue) > 0: + _LOGGER.warning( + "Tried to clean up ordering key: %s with %d messages remaining.", + ordering_key, + len(message_queue), + ) + return del self._pending_ordered_messages[ordering_key] diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py index 0d28ec447c7c..5e1dcf91b518 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py @@ -109,6 +109,72 @@ def test_ordered_messages_one_key(): assert moh.size == 0 +def test_ordered_messages_drop_duplicate_keys(caplog): + moh = messages_on_hold.MessagesOnHold() + + msg1 = make_message(ack_id="ack1", ordering_key="key1") + moh.put(msg1) + assert moh.size == 1 + + msg2 = make_message(ack_id="ack2", ordering_key="key1") + moh.put(msg2) + assert moh.size == 2 + + # Get first message for "key1" + assert moh.get() == msg1 + assert moh.size == 1 + + # Still waiting on the previously-sent message for "key1", and there are no + # other messages, so return None. + assert moh.get() is None + assert moh.size == 1 + + # Activate "key1". + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1", "key1"], callback_tracker) + assert callback_tracker.called + assert callback_tracker.message == msg2 + assert moh.size == 0 + assert len(moh._pending_ordered_messages) == 0 + + # Activate "key1" again + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Activate "key1" again. There are no other messages for that key, so clean + # up state for that key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + msg3 = make_message(ack_id="ack3", ordering_key="key1") + moh.put(msg3) + assert moh.size == 1 + + # Get next message for "key1" + assert moh.get() == msg3 + assert moh.size == 0 + + # Activate "key1". + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Activate "key1" again. There are no other messages for that key, so clean + # up state for that key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + + # Activate "key1" again after being cleaned up. There are no other messages for that key, so clean + # up state for that key. + callback_tracker = ScheduleMessageCallbackTracker() + moh.activate_ordering_keys(["key1"], callback_tracker) + assert not callback_tracker.called + assert "No message queue exists for message ordering key: key1" in caplog.text + + def test_ordered_messages_two_keys(): moh = messages_on_hold.MessagesOnHold() @@ -278,3 +344,39 @@ def test_ordered_and_unordered_messages_interleaved(): # No messages left. assert moh.get() is None assert moh.size == 0 + + +def test_cleanup_nonexistent_key(caplog): + moh = messages_on_hold.MessagesOnHold() + moh._clean_up_ordering_key("non-existent-key") + assert ( + "Tried to clean up ordering key that does not exist: non-existent-key" + in caplog.text + ) + + +def test_cleanup_key_with_messages(caplog): + moh = messages_on_hold.MessagesOnHold() + + # Put message with "key1". + msg1 = make_message(ack_id="ack1", ordering_key="key1") + moh.put(msg1) + assert moh.size == 1 + + # Put another message "key1" + msg2 = make_message(ack_id="ack2", ordering_key="key1") + moh.put(msg2) + assert moh.size == 2 + + # Get first message for "key1" + assert moh.get() == msg1 + assert moh.size == 1 + + # Get first message for "key1" + assert moh.get() is None + assert moh.size == 1 + + moh._clean_up_ordering_key("key1") + assert ( + "Tried to clean up ordering key: key1 with 1 messages remaining." in caplog.text + ) From 2ad66fae092368cad36a46e1a4cba3f04c4af286 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 5 May 2023 22:26:50 -0400 Subject: [PATCH 0953/1197] chore(main): release 2.16.1 (#906) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 12 ++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 16 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 2f416052a786..63e8f9d932ac 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.16.0" + ".": "2.16.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index f4f1f925ea89..c14ce50a8fd3 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,18 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.16.1](https://github.com/googleapis/python-pubsub/compare/v2.16.0...v2.16.1) (2023-05-05) + + +### Bug Fixes + +* Allow dropping cleaned-up keys ([#911](https://github.com/googleapis/python-pubsub/issues/911)) ([4b3157c](https://github.com/googleapis/python-pubsub/commit/4b3157ccb83771a2e613fc3475035f24d358ccf6)) + + +### Documentation + +* Add comment to setup.py ([#905](https://github.com/googleapis/python-pubsub/issues/905)) ([9825109](https://github.com/googleapis/python-pubsub/commit/9825109a826e63cd076c21367157be7a3c01c45b)) + ## [2.16.0](https://github.com/googleapis/python-pubsub/compare/v2.15.2...v2.16.0) (2023-04-06) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index a2303530d547..8edfaef7141c 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index a2303530d547..8edfaef7141c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.0" # {x-release-please-version} +__version__ = "2.16.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index b80eb543a77e..aadbccf0d9f8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.16.0" + "version": "2.16.1" }, "snippets": [ { From bd11b29406bacfc6578dd20f6a8897ade52841e6 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 6 May 2023 14:56:50 +0200 Subject: [PATCH 0954/1197] chore(deps): update dependency google-cloud-pubsub to v2.16.1 (#914) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index c1e50df3a7e4..588ae36d6680 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.16.0 +google-cloud-pubsub==2.16.1 avro==1.11.1 From 9021025a36482e646ad459ec3aa6c91b36f617ba Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 08:50:06 -0400 Subject: [PATCH 0955/1197] feat: add cloud storage subscription fields (#918) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add cloud storage subscription fields PiperOrigin-RevId: 531202368 Source-Link: https://github.com/googleapis/googleapis/commit/8a4cc94afcb9ecf3c604ebefbfab2c645e977e7d Source-Link: https://github.com/googleapis/googleapis-gen/commit/ec60ad76356d88e99fa4844fbd90fdcca728f5cb Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZWM2MGFkNzYzNTZkODhlOTlmYTQ4NDRmYmQ5MGZkY2NhNzI4ZjVjYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/pubsub/__init__.py | 2 + .../google/pubsub_v1/__init__.py | 2 + .../services/schema_service/async_client.py | 110 ++++++++++-- .../schema_service/transports/base.py | 110 ++++++++++-- .../services/subscriber/async_client.py | 32 ++-- .../pubsub_v1/services/subscriber/client.py | 32 ++-- .../services/subscriber/transports/rest.py | 32 ++-- .../google/pubsub_v1/types/__init__.py | 2 + .../google/pubsub_v1/types/pubsub.py | 161 +++++++++++++++++- .../snippet_metadata_google.pubsub.v1.json | 2 +- .../scripts/fixup_pubsub_v1_keywords.py | 2 +- 11 files changed, 410 insertions(+), 77 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index 06df1da17b62..8fcd19555ca5 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -29,6 +29,7 @@ from google.pubsub_v1.types.pubsub import AcknowledgeRequest from google.pubsub_v1.types.pubsub import BigQueryConfig +from google.pubsub_v1.types.pubsub import CloudStorageConfig from google.pubsub_v1.types.pubsub import CreateSnapshotRequest from google.pubsub_v1.types.pubsub import DeadLetterPolicy from google.pubsub_v1.types.pubsub import DeleteSnapshotRequest @@ -99,6 +100,7 @@ "SubscriberAsyncClient", "AcknowledgeRequest", "BigQueryConfig", + "CloudStorageConfig", "CreateSnapshotRequest", "DeadLetterPolicy", "DeleteSnapshotRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 0150658c0842..f81eceb60c5a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -27,6 +27,7 @@ from .types.pubsub import AcknowledgeRequest from .types.pubsub import BigQueryConfig +from .types.pubsub import CloudStorageConfig from .types.pubsub import CreateSnapshotRequest from .types.pubsub import DeadLetterPolicy from .types.pubsub import DeleteSnapshotRequest @@ -94,6 +95,7 @@ "SubscriberAsyncClient", "AcknowledgeRequest", "BigQueryConfig", + "CloudStorageConfig", "CommitSchemaRequest", "CreateSchemaRequest", "CreateSnapshotRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index cfb566e8404e..a4b8d8b189a4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -326,7 +326,16 @@ async def sample_create_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -425,7 +434,16 @@ async def sample_get_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -529,7 +547,16 @@ async def sample_list_schemas(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_schemas, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -642,7 +669,16 @@ async def sample_list_schema_revisions(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_schema_revisions, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -764,7 +800,16 @@ async def sample_commit_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.commit_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -877,7 +922,16 @@ async def sample_rollback_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.rollback_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -989,7 +1043,16 @@ async def sample_delete_schema_revision(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_schema_revision, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -1081,7 +1144,16 @@ async def sample_delete_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -1193,7 +1265,16 @@ async def sample_validate_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.validate_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -1273,7 +1354,16 @@ async def sample_validate_message(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.validate_message, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=DEFAULT_CLIENT_INFO, ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 40b89d61a9c8..db6d8050aaaf 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -132,52 +132,142 @@ def _prep_wrapped_messages(self, client_info): self._wrapped_methods = { self.create_schema: gapic_v1.method.wrap_method( self.create_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.get_schema: gapic_v1.method.wrap_method( self.get_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.list_schemas: gapic_v1.method.wrap_method( self.list_schemas, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.list_schema_revisions: gapic_v1.method.wrap_method( self.list_schema_revisions, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.commit_schema: gapic_v1.method.wrap_method( self.commit_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.rollback_schema: gapic_v1.method.wrap_method( self.rollback_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.delete_schema_revision: gapic_v1.method.wrap_method( self.delete_schema_revision, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.delete_schema: gapic_v1.method.wrap_method( self.delete_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.validate_schema: gapic_v1.method.wrap_method( self.validate_schema, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.validate_message: gapic_v1.method.wrap_method( self.validate_message, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), } diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 694b166d9624..b5fbd8e3909d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -279,10 +279,10 @@ async def sample_create_subscription(): Args: request (Optional[Union[google.pubsub_v1.types.Subscription, dict]]): - The request object. A subscription resource. If none of ``push_config`` or - ``bigquery_config`` is set, then the subscriber will - pull and ack messages using API methods. At most one of - these fields may be set. + The request object. A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. name (:class:`str`): Required. The name of the subscription. It must have the format @@ -352,10 +352,10 @@ async def sample_create_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. If none of push_config or bigquery_config is - set, then the subscriber will pull and ack messages - using API methods. At most one of these fields may be - set. + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. """ # Create or coerce a protobuf request object. @@ -473,10 +473,10 @@ async def sample_get_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. If none of push_config or bigquery_config is - set, then the subscriber will pull and ack messages - using API methods. At most one of these fields may be - set. + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. """ # Create or coerce a protobuf request object. @@ -605,10 +605,10 @@ async def sample_update_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. If none of push_config or bigquery_config is - set, then the subscriber will pull and ack messages - using API methods. At most one of these fields may be - set. + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index a6518fff84ac..a47091f365f2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -560,10 +560,10 @@ def sample_create_subscription(): Args: request (Union[google.pubsub_v1.types.Subscription, dict]): - The request object. A subscription resource. If none of ``push_config`` or - ``bigquery_config`` is set, then the subscriber will - pull and ack messages using API methods. At most one of - these fields may be set. + The request object. A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. name (str): Required. The name of the subscription. It must have the format @@ -633,10 +633,10 @@ def sample_create_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. If none of push_config or bigquery_config is - set, then the subscriber will pull and ack messages - using API methods. At most one of these fields may be - set. + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. """ # Create or coerce a protobuf request object. @@ -743,10 +743,10 @@ def sample_get_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. If none of push_config or bigquery_config is - set, then the subscriber will pull and ack messages - using API methods. At most one of these fields may be - set. + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. """ # Create or coerce a protobuf request object. @@ -864,10 +864,10 @@ def sample_update_subscription(): Returns: google.pubsub_v1.types.Subscription: - A subscription resource. If none of push_config or bigquery_config is - set, then the subscriber will pull and ack messages - using API methods. At most one of these fields may be - set. + A subscription resource. If none of push_config, bigquery_config, or + cloud_storage_config is set, then the subscriber will + pull and ack messages using API methods. At most one + of these fields may be set. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index c78fd7297ed2..7dba1e4d41d1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -814,10 +814,10 @@ def __call__( Args: request (~.pubsub.Subscription): - The request object. A subscription resource. If none of ``push_config`` or - ``bigquery_config`` is set, then the subscriber will - pull and ack messages using API methods. At most one of - these fields may be set. + The request object. A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -826,10 +826,10 @@ def __call__( Returns: ~.pubsub.Subscription: - A subscription resource. If none of ``push_config`` or - ``bigquery_config`` is set, then the subscriber will - pull and ack messages using API methods. At most one of - these fields may be set. + A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. """ @@ -1171,10 +1171,10 @@ def __call__( Returns: ~.pubsub.Subscription: - A subscription resource. If none of ``push_config`` or - ``bigquery_config`` is set, then the subscriber will - pull and ack messages using API methods. At most one of - these fields may be set. + A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. """ @@ -1920,10 +1920,10 @@ def __call__( Returns: ~.pubsub.Subscription: - A subscription resource. If none of ``push_config`` or - ``bigquery_config`` is set, then the subscriber will - pull and ack messages using API methods. At most one of - these fields may be set. + A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, + then the subscriber will pull and ack messages using API + methods. At most one of these fields may be set. """ diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index f24034c3b63a..756d9db21573 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -18,6 +18,7 @@ from .pubsub import ( AcknowledgeRequest, BigQueryConfig, + CloudStorageConfig, CreateSnapshotRequest, DeadLetterPolicy, DeleteSnapshotRequest, @@ -94,6 +95,7 @@ "TimeoutType", "AcknowledgeRequest", "BigQueryConfig", + "CloudStorageConfig", "CreateSnapshotRequest", "DeadLetterPolicy", "DeleteSnapshotRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index df299cc8bf02..bdc12a5705f6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -51,6 +51,7 @@ "ExpirationPolicy", "PushConfig", "BigQueryConfig", + "CloudStorageConfig", "ReceivedMessage", "GetSubscriptionRequest", "UpdateSubscriptionRequest", @@ -585,9 +586,10 @@ class DetachSubscriptionResponse(proto.Message): class Subscription(proto.Message): - r"""A subscription resource. If none of ``push_config`` or - ``bigquery_config`` is set, then the subscriber will pull and ack - messages using API methods. At most one of these fields may be set. + r"""A subscription resource. If none of ``push_config``, + ``bigquery_config``, or ``cloud_storage_config`` is set, then the + subscriber will pull and ack messages using API methods. At most one + of these fields may be set. Attributes: name (str): @@ -614,6 +616,10 @@ class Subscription(proto.Message): If delivery to BigQuery is used with this subscription, this field is used to configure it. + cloud_storage_config (google.pubsub_v1.types.CloudStorageConfig): + If delivery to Google Cloud Storage is used + with this subscription, this field is used to + configure it. ack_deadline_seconds (int): The approximate amount of time (on a best-effort basis) Pub/Sub waits for the subscriber to acknowledge receipt @@ -774,6 +780,11 @@ class State(proto.Enum): number=18, message="BigQueryConfig", ) + cloud_storage_config: "CloudStorageConfig" = proto.Field( + proto.MESSAGE, + number=22, + message="CloudStorageConfig", + ) ack_deadline_seconds: int = proto.Field( proto.INT32, number=5, @@ -997,10 +1008,9 @@ class OidcToken(proto.Message): service_account_email (str): `Service account email `__ - to be used for generating the OIDC token. The caller (for - CreateSubscription, UpdateSubscription, and ModifyPushConfig - RPCs) must have the iam.serviceAccounts.actAs permission for - the service account. + used for generating the OIDC token. For more information on + setting up authentication, see `Push + subscriptions `__. audience (str): Audience to be used when generating OIDC token. The audience claim identifies the @@ -1125,6 +1135,143 @@ class State(proto.Enum): ) +class CloudStorageConfig(proto.Message): + r"""Configuration for a Cloud Storage subscription. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bucket (str): + Required. User-provided name for the Cloud Storage bucket. + The bucket must be created by the user. The bucket name must + be without any prefix like "gs://". See the [bucket naming + requirements] + (https://cloud.google.com/storage/docs/buckets#naming). + filename_prefix (str): + User-provided prefix for Cloud Storage filename. See the + `object naming + requirements `__. + filename_suffix (str): + User-provided suffix for Cloud Storage filename. See the + `object naming + requirements `__. + text_config (google.pubsub_v1.types.CloudStorageConfig.TextConfig): + If set, message data will be written to Cloud + Storage in text format. + + This field is a member of `oneof`_ ``output_format``. + avro_config (google.pubsub_v1.types.CloudStorageConfig.AvroConfig): + If set, message data will be written to Cloud + Storage in Avro format. + + This field is a member of `oneof`_ ``output_format``. + max_duration (google.protobuf.duration_pb2.Duration): + The maximum duration that can elapse before a + new Cloud Storage file is created. Min 1 minute, + max 10 minutes, default 5 minutes. May not + exceed the subscription's acknowledgement + deadline. + max_bytes (int): + The maximum bytes that can be written to a Cloud Storage + file before a new file is created. Min 1 KB, max 10 GiB. The + max_bytes limit may be exceeded in cases where messages are + larger than the limit. + state (google.pubsub_v1.types.CloudStorageConfig.State): + Output only. An output-only field that + indicates whether or not the subscription can + receive messages. + """ + + class State(proto.Enum): + r"""Possible states for a Cloud Storage subscription. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The subscription can actively send messages + to Cloud Storage. + PERMISSION_DENIED (2): + Cannot write to the Cloud Storage bucket + because of permission denied errors. + NOT_FOUND (3): + Cannot write to the Cloud Storage bucket + because it does not exist. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + PERMISSION_DENIED = 2 + NOT_FOUND = 3 + + class TextConfig(proto.Message): + r"""Configuration for writing message data in text format. + Message payloads will be written to files as raw text, separated + by a newline. + + """ + + class AvroConfig(proto.Message): + r"""Configuration for writing message data in Avro format. + Message payloads and metadata will be written to files as an + Avro binary. + + Attributes: + write_metadata (bool): + When true, write the subscription name, message_id, + publish_time, attributes, and ordering_key as additional + fields in the output. + """ + + write_metadata: bool = proto.Field( + proto.BOOL, + number=1, + ) + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + filename_prefix: str = proto.Field( + proto.STRING, + number=2, + ) + filename_suffix: str = proto.Field( + proto.STRING, + number=3, + ) + text_config: TextConfig = proto.Field( + proto.MESSAGE, + number=4, + oneof="output_format", + message=TextConfig, + ) + avro_config: AvroConfig = proto.Field( + proto.MESSAGE, + number=5, + oneof="output_format", + message=AvroConfig, + ) + max_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=6, + message=duration_pb2.Duration, + ) + max_bytes: int = proto.Field( + proto.INT64, + number=7, + ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + + class ReceivedMessage(proto.Message): r"""A message and its corresponding acknowledgment ID. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index aadbccf0d9f8..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.16.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index d288e6ebb383..e6c27901621e 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -43,7 +43,7 @@ class pubsubCallTransformer(cst.CSTTransformer): 'commit_schema': ('name', 'schema', ), 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', ), + 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'cloud_storage_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', ), 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), 'delete_schema': ('name', ), 'delete_schema_revision': ('name', 'revision_id', ), From 5c449efa6dcf7052ddfa75e47bd720c89dac6a8f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 12 May 2023 10:43:22 -0400 Subject: [PATCH 0956/1197] chore(main): release 2.17.0 (#919) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 63e8f9d932ac..0ebfe4c7edf6 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.16.1" + ".": "2.17.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index c14ce50a8fd3..a27165409c23 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.17.0](https://github.com/googleapis/python-pubsub/compare/v2.16.1...v2.17.0) (2023-05-12) + + +### Features + +* Add cloud storage subscription fields ([#918](https://github.com/googleapis/python-pubsub/issues/918)) ([6e262da](https://github.com/googleapis/python-pubsub/commit/6e262da9810f58f3f34b352e4771e084381ed0aa)) + ## [2.16.1](https://github.com/googleapis/python-pubsub/compare/v2.16.0...v2.16.1) (2023-05-05) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 8edfaef7141c..8d4f4cfb61d6 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.1" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 8edfaef7141c..8d4f4cfb61d6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.16.1" # {x-release-please-version} +__version__ = "2.17.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..5cb2c1c6dcb3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.17.0" }, "snippets": [ { From fb06267d4f16782ec17cea58a26e04cf242af93e Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Tue, 23 May 2023 13:39:02 -0400 Subject: [PATCH 0957/1197] Docs: Add attributes to pubsub_v1.types (#921) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Docs: Add Attributes to pubsub_v1.types * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix lint * fix indent * Apply suggestions from code review Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> --------- Co-authored-by: Owl Bot Co-authored-by: Dan Lee <71398022+dandhlee@users.noreply.github.com> --- .../google/cloud/pubsub_v1/types.py | 67 ++++++++++++++++++- 1 file changed, 64 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 109d4aadc8a7..3d071a1893d0 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -61,7 +61,21 @@ # these settings can be altered to tweak Pub/Sub behavior. # The defaults should be fine for most use cases. class BatchSettings(NamedTuple): - """The settings for batch publishing the messages.""" + """The settings for batch publishing the messages. + + Attributes: + max_bytes (int): + The maximum total size of the messages to collect before automatically + publishing the batch, including any byte size overhead of the publish + request itself. The maximum value is bound by the server-side limit of + 10_000_000 bytes. Defaults to 1 MB. + max_latency (float): + The maximum number of seconds to wait for additional messages before + automatically publishing the batch. Defaults to 10ms. + max_messages (int): + The maximum number of messages to collect before automatically + publishing the batch. Defaults to 100. + """ max_bytes: int = 1 * 1000 * 1000 # 1 MB ( @@ -93,7 +107,19 @@ class LimitExceededBehavior(str, enum.Enum): class PublishFlowControl(NamedTuple): - """The client flow control settings for message publishing.""" + """The client flow control settings for message publishing. + + Attributes: + message_limit (int): + The maximum number of messages awaiting to be published. + Defaults to 1000. + byte_limit (int): + The maximum total size of messages awaiting to be published. + Defaults to 10MB. + limit_exceeded_behavior (LimitExceededBehavior): + The action to take when publish flow control limits are exceeded. + Defaults to LimitExceededBehavior.IGNORE. + """ message_limit: int = 10 * BatchSettings.__new__.__defaults__[2] # type: ignore """The maximum number of messages awaiting to be published.""" @@ -110,7 +136,22 @@ class PublishFlowControl(NamedTuple): # This class is used when creating a publisher client to pass in options # to enable/disable features. class PublisherOptions(NamedTuple): - """The options for the publisher client.""" + """The options for the publisher client. + + Attributes: + enable_message_ordering (bool): + Whether to order messages in a batch by a supplied ordering key. + Defaults to false. + flow_control (PublishFlowControl): + Flow control settings for message publishing by the client. By default + the publisher client does not do any throttling. + retry (OptionalRetry): + Retry settings for message publishing by the client. This should be + an instance of :class:`google.api_core.retry.Retry`. + timeout (OptionalTimeout): + Timeout settings for message publishing by the client. It should be + compatible with :class:`~.pubsub_v1.types.TimeoutType`. + """ enable_message_ordering: bool = False """Whether to order messages in a batch by a supplied ordering key.""" @@ -142,6 +183,26 @@ class PublisherOptions(NamedTuple): class FlowControl(NamedTuple): """The settings for controlling the rate at which messages are pulled with an asynchronous subscription. + + Attributes: + max_bytes (int): + The maximum total size of received - but not yet processed - messages + before pausing the message stream. Defaults to 100 MiB. + max_messages (int): + The maximum number of received - but not yet processed - messages before + pausing the message stream. Defaults to 1000. + max_lease_duration (float): + The maximum amount of time in seconds to hold a lease on a message + before dropping it from the lease management. Defaults to 1 hour. + min_duration_per_lease_extension (float): + The min amount of time in seconds for a single lease extension attempt. + Must be between 10 and 600 (inclusive). Ignored by default, but set to + 60 seconds if the subscription has exactly-once delivery enabled. + max_duration_per_lease_extension (float): + The max amount of time in seconds for a single lease extension attempt. + Bounds the delay before a message redelivery if the subscriber + fails to extend the deadline. Must be between 10 and 600 (inclusive). Ignored + if set to 0. """ max_bytes: int = 100 * 1024 * 1024 # 100 MiB From 0be496e53fdbfd1c7aacb83c3789e3c41d41286b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 23 May 2023 14:11:45 -0400 Subject: [PATCH 0958/1197] chore(main): release 2.17.1 (#924) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 0ebfe4c7edf6..2c62fbc51d4c 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.17.0" + ".": "2.17.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index a27165409c23..cd25f70c80df 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.17.1](https://github.com/googleapis/python-pubsub/compare/v2.17.0...v2.17.1) (2023-05-23) + + +### Documentation + +* Add attributes to pubsub_v1.types ([#921](https://github.com/googleapis/python-pubsub/issues/921)) ([4607dca](https://github.com/googleapis/python-pubsub/commit/4607dca983a8f5d4043c5661165da99453f2ef4a)) + ## [2.17.0](https://github.com/googleapis/python-pubsub/compare/v2.16.1...v2.17.0) (2023-05-12) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 8d4f4cfb61d6..c3950a4a3f51 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.17.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 8d4f4cfb61d6..c3950a4a3f51 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.0" # {x-release-please-version} +__version__ = "2.17.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 5cb2c1c6dcb3..2d2d32f64c7c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.17.0" + "version": "2.17.1" }, "snippets": [ { From 16e2e756e5f79158b8e03e95ebea13681c40e968 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 25 May 2023 02:53:54 +0200 Subject: [PATCH 0959/1197] chore(deps): update dependency google-cloud-pubsub to v2.17.1 (#920) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 588ae36d6680..f78638994a64 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.16.1 +google-cloud-pubsub==2.17.1 avro==1.11.1 From 6a4fdacef349b8c03a149101da5c9a1c9b8bf7af Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 10:27:35 -0400 Subject: [PATCH 0960/1197] feat: add push config wrapper fields (#925) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add push config wrapper fields PiperOrigin-RevId: 534994946 Source-Link: https://github.com/googleapis/googleapis/commit/64ecfebceee57a7fd22723d1b11b729270baf9c5 Source-Link: https://github.com/googleapis/googleapis-gen/commit/03534b3dd4c926f706729d979d893458db7e174d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMDM1MzRiM2RkNGM5MjZmNzA2NzI5ZDk3OWQ4OTM0NThkYjdlMTc0ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/pubsub_v1/types/pubsub.py | 52 +++++++++++++++++++ .../snippet_metadata_google.pubsub.v1.json | 2 +- 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index bdc12a5705f6..9605452ce0ea 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -959,6 +959,11 @@ class ExpirationPolicy(proto.Message): class PushConfig(proto.Message): r"""Configuration for a push delivery endpoint. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -998,6 +1003,18 @@ class PushConfig(proto.Message): every pushed message. This field is a member of `oneof`_ ``authentication_method``. + pubsub_wrapper (google.pubsub_v1.types.PushConfig.PubsubWrapper): + When set, the payload to the push endpoint is + in the form of the JSON representation of a + PubsubMessage + (https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#pubsubmessage). + + This field is a member of `oneof`_ ``wrapper``. + no_wrapper (google.pubsub_v1.types.PushConfig.NoWrapper): + When set, the payload to the push endpoint is + not wrapped. + + This field is a member of `oneof`_ ``wrapper``. """ class OidcToken(proto.Message): @@ -1033,6 +1050,29 @@ class OidcToken(proto.Message): number=2, ) + class PubsubWrapper(proto.Message): + r"""The payload to the push endpoint is in the form of the JSON + representation of a PubsubMessage + (https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#pubsubmessage). + + """ + + class NoWrapper(proto.Message): + r"""Sets the ``data`` field as the HTTP body for delivery. + + Attributes: + write_metadata (bool): + When true, writes the Pub/Sub message metadata to + ``x-goog-pubsub-:`` headers of the HTTP request. + Writes the Pub/Sub message attributes to ``:`` + headers of the HTTP request. + """ + + write_metadata: bool = proto.Field( + proto.BOOL, + number=1, + ) + push_endpoint: str = proto.Field( proto.STRING, number=1, @@ -1048,6 +1088,18 @@ class OidcToken(proto.Message): oneof="authentication_method", message=OidcToken, ) + pubsub_wrapper: PubsubWrapper = proto.Field( + proto.MESSAGE, + number=4, + oneof="wrapper", + message=PubsubWrapper, + ) + no_wrapper: NoWrapper = proto.Field( + proto.MESSAGE, + number=5, + oneof="wrapper", + message=NoWrapper, + ) class BigQueryConfig(proto.Message): diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 2d2d32f64c7c..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.17.1" + "version": "0.1.0" }, "snippets": [ { From 14cd94914fbfd80ff894e24a72af134df15b1ca5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 25 May 2023 13:21:04 -0400 Subject: [PATCH 0961/1197] build(deps): bump requests from 2.28.1 to 2.31.0 in /synthtool/gcp/templates/python_library/.kokoro (#927) Source-Link: https://github.com/googleapis/synthtool/commit/30bd01b4ab78bf1b2a425816e15b3e7e090993dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 3 ++- packages/google-cloud-pubsub/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index b8edda51cf46..32b3c486591a 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2e247c7bf5154df7f98cce087a20ca7605e236340c7d6d1a14447e5c06791bd6 + digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b +# created: 2023-05-25T14:56:16.294623272Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 66a2172a76a8..3b8d7ee81848 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -419,9 +419,9 @@ readme-renderer==37.3 \ --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 # via twine -requests==2.28.1 \ - --hash=sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983 \ - --hash=sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349 +requests==2.31.0 \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 # via # gcp-releasetool # google-api-core From aa1fb3e3700b3f671a33b91e652bce7f40c4af73 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 1 Jun 2023 21:44:06 +0200 Subject: [PATCH 0962/1197] chore(deps): update dependency google-cloud-bigquery to v3.11.0 (#929) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index f373d1f8d2df..755a4771dc5e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.3.1 mock==5.0.2 flaky==3.7.0 -google-cloud-bigquery==3.10.0 +google-cloud-bigquery==3.11.0 From 0d1c46a7d41fd663b6a994feb0a02be2f77b56c3 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 3 Jun 2023 19:58:55 -0400 Subject: [PATCH 0963/1197] build(deps): bump cryptography from 39.0.1 to 41.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#931) Source-Link: https://github.com/googleapis/synthtool/commit/d0f51a0c2a9a6bcca86911eabea9e484baadf64b Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 42 +++++++++---------- 2 files changed, 22 insertions(+), 24 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 32b3c486591a..02a4dedced74 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:9bc5fa3b62b091f60614c08a7fb4fd1d3e1678e326f34dd66ce1eefb5dc3267b -# created: 2023-05-25T14:56:16.294623272Z + digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc +# created: 2023-06-03T21:25:37.968717478Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 3b8d7ee81848..c7929db6d152 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -113,28 +113,26 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==39.0.1 \ - --hash=sha256:0f8da300b5c8af9f98111ffd512910bc792b4c77392a9523624680f7956a99d4 \ - --hash=sha256:35f7c7d015d474f4011e859e93e789c87d21f6f4880ebdc29896a60403328f1f \ - --hash=sha256:5aa67414fcdfa22cf052e640cb5ddc461924a045cacf325cd164e65312d99502 \ - --hash=sha256:5d2d8b87a490bfcd407ed9d49093793d0f75198a35e6eb1a923ce1ee86c62b41 \ - --hash=sha256:6687ef6d0a6497e2b58e7c5b852b53f62142cfa7cd1555795758934da363a965 \ - --hash=sha256:6f8ba7f0328b79f08bdacc3e4e66fb4d7aab0c3584e0bd41328dce5262e26b2e \ - --hash=sha256:706843b48f9a3f9b9911979761c91541e3d90db1ca905fd63fee540a217698bc \ - --hash=sha256:807ce09d4434881ca3a7594733669bd834f5b2c6d5c7e36f8c00f691887042ad \ - --hash=sha256:83e17b26de248c33f3acffb922748151d71827d6021d98c70e6c1a25ddd78505 \ - --hash=sha256:96f1157a7c08b5b189b16b47bc9db2332269d6680a196341bf30046330d15388 \ - --hash=sha256:aec5a6c9864be7df2240c382740fcf3b96928c46604eaa7f3091f58b878c0bb6 \ - --hash=sha256:b0afd054cd42f3d213bf82c629efb1ee5f22eba35bf0eec88ea9ea7304f511a2 \ - --hash=sha256:ced4e447ae29ca194449a3f1ce132ded8fcab06971ef5f618605aacaa612beac \ - --hash=sha256:d1f6198ee6d9148405e49887803907fe8962a23e6c6f83ea7d98f1c0de375695 \ - --hash=sha256:e124352fd3db36a9d4a21c1aa27fd5d051e621845cb87fb851c08f4f75ce8be6 \ - --hash=sha256:e422abdec8b5fa8462aa016786680720d78bdce7a30c652b7fadf83a4ba35336 \ - --hash=sha256:ef8b72fa70b348724ff1218267e7f7375b8de4e8194d1636ee60510aae104cd0 \ - --hash=sha256:f0c64d1bd842ca2633e74a1a28033d139368ad959872533b1bab8c80e8240a0c \ - --hash=sha256:f24077a3b5298a5a06a8e0536e3ea9ec60e4c7ac486755e5fb6e6ea9b3500106 \ - --hash=sha256:fdd188c8a6ef8769f148f88f859884507b954cc64db6b52f66ef199bb9ad660a \ - --hash=sha256:fe913f20024eb2cb2f323e42a64bdf2911bb9738a15dba7d3cce48151034e3a8 +cryptography==41.0.0 \ + --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ + --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ + --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ + --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ + --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ + --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ + --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ + --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ + --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ + --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ + --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ + --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ + --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ + --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ + --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ + --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ + --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ + --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ + --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be # via # gcp-releasetool # secretstorage From def1ec849740a2464aa73814dddc11df7a6f5015 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 14 Jun 2023 17:21:44 +0200 Subject: [PATCH 0964/1197] chore(deps): update all dependencies (#932) --- .../samples/snippets/requirements-test.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 755a4771dc5e..09c48e1afa48 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.3.1 +pytest==7.3.2 mock==5.0.2 flaky==3.7.0 -google-cloud-bigquery==3.11.0 +google-cloud-bigquery==3.11.1 From 22fe1cd1432b001c27db6c1e966a65a55db357c7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 23 Jun 2023 22:33:22 +0200 Subject: [PATCH 0965/1197] chore(deps): update all dependencies (#936) --- .../samples/snippets/requirements-test.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 09c48e1afa48..1740fee74e22 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.3.2 +pytest==7.4.0 mock==5.0.2 flaky==3.7.0 -google-cloud-bigquery==3.11.1 +google-cloud-bigquery==3.11.2 From 130e443cb6071b6e9a6e50f85d0f27b764abbef7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 27 Jun 2023 13:08:33 -0400 Subject: [PATCH 0966/1197] chore: remove pinned Sphinx version [autoapprove] (#939) Source-Link: https://github.com/googleapis/synthtool/commit/909573ce9da2819eeb835909c795d29aea5c724e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/noxfile.py | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 02a4dedced74..1b3cb6c52663 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:240b5bcc2bafd450912d2da2be15e62bc6de2cf839823ae4bf94d4f392b451dc -# created: 2023-06-03T21:25:37.968717478Z + digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b +# created: 2023-06-27T13:04:21.96690344Z diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 35e5916a9b9f..139640c6f2a7 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -357,10 +357,9 @@ def docfx(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", - "gcp-sphinx-docfx-yaml", ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) From 47fbd96044cb9ee2dd0404e9a100a52af8de8304 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 29 Jun 2023 12:10:26 -0400 Subject: [PATCH 0967/1197] chore: store artifacts in placer (#942) Source-Link: https://github.com/googleapis/synthtool/commit/cb960373d12d20f8dc38beee2bf884d49627165e Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.kokoro/release/common.cfg | 9 +++++++++ packages/google-cloud-pubsub/noxfile.py | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 1b3cb6c52663..98994f474104 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ddf4551385d566771dc713090feb7b4c1164fb8a698fe52bbe7670b24236565b -# created: 2023-06-27T13:04:21.96690344Z + digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd +# created: 2023-06-28T17:03:33.371210701Z diff --git a/packages/google-cloud-pubsub/.kokoro/release/common.cfg b/packages/google-cloud-pubsub/.kokoro/release/common.cfg index c67fccae42fa..5b1bbe360f0a 100644 --- a/packages/google-cloud-pubsub/.kokoro/release/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/release/common.cfg @@ -38,3 +38,12 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } + +# Store the packages we uploaded to PyPI. That way, we have a record of exactly +# what we published, which we can use to generate SBOMs and attestations. +action { + define_artifacts { + regex: "github/python-pubsub/**/*.tar.gz" + strip_prefix: "github/python-pubsub" + } +} diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 139640c6f2a7..50fb5d278409 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -431,6 +431,7 @@ def prerelease_deps(session): "grpcio!=1.52.0rc1", "grpcio-status", "google-api-core", + "google-auth", "proto-plus", "google-cloud-testutils", # dependencies of google-cloud-testutils" @@ -443,7 +444,6 @@ def prerelease_deps(session): # Remaining dependencies other_deps = [ "requests", - "google-auth", ] session.install(*other_deps) From 4e6ac7fcf040ac33fafe593321771cf9268538b8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 13:20:13 +0000 Subject: [PATCH 0968/1197] docs: tightened requirements on cloud storage subscription filename suffixes (#938) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 543503563 Source-Link: https://togithub.com/googleapis/googleapis/commit/212ecef96e3ea7c67465ff3f52680301ad0ea1f9 Source-Link: https://togithub.com/googleapis/googleapis-gen/commit/532d27eea6ff128fdcbb3332176e894e76dcc685 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTMyZDI3ZWVhNmZmMTI4ZmRjYmIzMzMyMTc2ZTg5NGU3NmRjYzY4NSJ9 --- packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 9605452ce0ea..2b4c7cbf6385 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1212,6 +1212,7 @@ class CloudStorageConfig(proto.Message): User-provided suffix for Cloud Storage filename. See the `object naming requirements `__. + Must not end in "/". text_config (google.pubsub_v1.types.CloudStorageConfig.TextConfig): If set, message data will be written to Cloud Storage in text format. From 112307d4b4b8444070ef2509367bf5547e7a29f4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 4 Jul 2023 13:45:30 -0400 Subject: [PATCH 0969/1197] fix: Add async context manager return types (#944) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Add async context manager return types chore: Mock return_value should not populate oneof message fields chore: Support snippet generation for services that only support REST transport chore: Update gapic-generator-python to v1.11.0 PiperOrigin-RevId: 545430278 Source-Link: https://github.com/googleapis/googleapis/commit/601b5326107eeb74800b426d1f9933faa233258a Source-Link: https://github.com/googleapis/googleapis-gen/commit/b3f18d0f6560a855022fd058865e7620479d7af9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYjNmMThkMGY2NTYwYTg1NTAyMmZkMDU4ODY1ZTc2MjA0NzlkN2FmOSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/publisher/async_client.py | 2 +- .../services/publisher/transports/rest.py | 4 ++-- .../services/schema_service/async_client.py | 2 +- .../services/schema_service/transports/rest.py | 4 ++-- .../services/subscriber/async_client.py | 2 +- .../services/subscriber/transports/rest.py | 4 ++-- .../unit/gapic/pubsub_v1/test_publisher.py | 18 ++++++++++++------ .../gapic/pubsub_v1/test_schema_service.py | 12 ++++++++---- .../unit/gapic/pubsub_v1/test_subscriber.py | 12 ++++++++---- 9 files changed, 37 insertions(+), 23 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 92a10c38e206..8ec273541198 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1585,7 +1585,7 @@ async def test_iam_permissions( # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "PublisherAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index fc31ce68127a..9aa5602c2138 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -1485,7 +1485,7 @@ def __call__( request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) - body = json.loads(json.dumps(transcoded_request["body"])) + body = json.dumps(transcoded_request["body"]) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1572,7 +1572,7 @@ def __call__( request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) - body = json.loads(json.dumps(transcoded_request["body"])) + body = json.dumps(transcoded_request["body"]) uri = transcoded_request["uri"] method = transcoded_request["method"] diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index a4b8d8b189a4..e0151319aff9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -1683,7 +1683,7 @@ async def test_iam_permissions( # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "SchemaServiceAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index afa08f8eda70..ce617f631daf 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -1636,7 +1636,7 @@ def __call__( request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) - body = json.loads(json.dumps(transcoded_request["body"])) + body = json.dumps(transcoded_request["body"]) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1723,7 +1723,7 @@ def __call__( request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) - body = json.loads(json.dumps(transcoded_request["body"])) + body = json.dumps(transcoded_request["body"]) uri = transcoded_request["uri"] method = transcoded_request["method"] diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index b5fbd8e3909d..6bb56cdf7bf9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -2571,7 +2571,7 @@ async def test_iam_permissions( # Done; return the response. return response - async def __aenter__(self): + async def __aenter__(self) -> "SubscriberAsyncClient": return self async def __aexit__(self, exc_type, exc, tb): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index 7dba1e4d41d1..ec0ee7d12957 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -2241,7 +2241,7 @@ def __call__( request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) - body = json.loads(json.dumps(transcoded_request["body"])) + body = json.dumps(transcoded_request["body"]) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -2328,7 +2328,7 @@ def __call__( request_kwargs = json_format.MessageToDict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) - body = json.loads(json.dumps(transcoded_request["body"])) + body = json.dumps(transcoded_request["body"]) uri = transcoded_request["uri"] method = transcoded_request["method"] diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 12d584e4944f..2796d7e6f739 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -2061,9 +2061,11 @@ async def test_list_topics_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_topics(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2507,9 +2509,11 @@ async def test_list_topic_subscriptions_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_topic_subscriptions(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2953,9 +2957,11 @@ async def test_list_topic_snapshots_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_topic_snapshots(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 132136a38a13..885a6d93df69 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -1626,9 +1626,11 @@ async def test_list_schemas_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_schemas(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2068,9 +2070,11 @@ async def test_list_schema_revisions_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_schema_revisions(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 0a8ebfc9944d..36cf87b595f5 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -1975,9 +1975,11 @@ async def test_list_subscriptions_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_subscriptions(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -3905,9 +3907,11 @@ async def test_list_snapshots_async_pages(): RuntimeError, ) pages = [] - async for page_ in ( + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch await client.list_snapshots(request={}) - ).pages: # pragma: no branch + ).pages: pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token From df325c216c3cb99447dd4efef0af3d4bf4a8a208 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 5 Jul 2023 21:22:50 +0200 Subject: [PATCH 0970/1197] chore(deps): update all dependencies (#940) Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 1740fee74e22..057675ae16f8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.4.0 mock==5.0.2 flaky==3.7.0 -google-cloud-bigquery==3.11.2 +google-cloud-bigquery==3.11.3 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index f78638994a64..1ee4e20558a5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ google-cloud-pubsub==2.17.1 -avro==1.11.1 +avro==1.11.2 From 8cd389e899288e5aff7cf50b9109b8efaac4b5bf Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Thu, 6 Jul 2023 14:14:20 -0400 Subject: [PATCH 0971/1197] docs: Update Community section in README.rst (#945) * Update README.rst * Update README.rst Co-authored-by: Anthonios Partheniou --------- Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/README.rst | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index ed1965b8834d..dd3032e002cb 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -219,11 +219,8 @@ See the `CONTRIBUTING doc`_ for more information on how to get started. Community --------- -Google Cloud Platform Python developers hang out in `Slack`_ in the ``#python`` -channel, click here to `get an invitation`_. +The best place to ask questions is via Stackoverflow: https://stackoverflow.com/questions/tagged/google-cloud-pubsub -.. _Slack: https://googlecloud-community.slack.com -.. _get an invitation: https://gcp-slack.appspot.com/ License ------- From a8b9b9b29d43dc481dd8aa060a8db5885627a72e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 10 Jul 2023 09:30:05 -0400 Subject: [PATCH 0972/1197] chore: Update gapic-generator-python to v1.11.2 (#946) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.2 PiperOrigin-RevId: 546510849 Source-Link: https://github.com/googleapis/googleapis/commit/736073ad9a9763a170eceaaa54519bcc0ea55a5e Source-Link: https://github.com/googleapis/googleapis-gen/commit/deb64e8ec19d141e31089fe932b3a997ad541c4d Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZGViNjRlOGVjMTlkMTQxZTMxMDg5ZmU5MzJiM2E5OTdhZDU0MWM0ZCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/google/pubsub/__init__.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/__init__.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/services/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/async_client.py | 2 +- .../google/pubsub_v1/services/publisher/client.py | 2 +- .../google/pubsub_v1/services/publisher/pagers.py | 2 +- .../google/pubsub_v1/services/publisher/transports/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/transports/base.py | 2 +- .../google/pubsub_v1/services/publisher/transports/grpc.py | 2 +- .../pubsub_v1/services/publisher/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/publisher/transports/rest.py | 2 +- .../google/pubsub_v1/services/schema_service/__init__.py | 2 +- .../google/pubsub_v1/services/schema_service/async_client.py | 2 +- .../google/pubsub_v1/services/schema_service/client.py | 2 +- .../google/pubsub_v1/services/schema_service/pagers.py | 2 +- .../pubsub_v1/services/schema_service/transports/__init__.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/base.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/grpc.py | 2 +- .../services/schema_service/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/rest.py | 2 +- .../google/pubsub_v1/services/subscriber/__init__.py | 2 +- .../google/pubsub_v1/services/subscriber/async_client.py | 2 +- .../google/pubsub_v1/services/subscriber/client.py | 2 +- .../google/pubsub_v1/services/subscriber/pagers.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/__init__.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/base.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/grpc.py | 2 +- .../pubsub_v1/services/subscriber/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/rest.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py | 2 +- .../pubsub_v1_generated_publisher_create_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_create_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_delete_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_delete_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_detach_subscription_async.py | 2 +- .../pubsub_v1_generated_publisher_detach_subscription_sync.py | 2 +- .../pubsub_v1_generated_publisher_get_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_get_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_list_topic_snapshots_async.py | 2 +- .../pubsub_v1_generated_publisher_list_topic_snapshots_sync.py | 2 +- ...sub_v1_generated_publisher_list_topic_subscriptions_async.py | 2 +- ...bsub_v1_generated_publisher_list_topic_subscriptions_sync.py | 2 +- .../pubsub_v1_generated_publisher_list_topics_async.py | 2 +- .../pubsub_v1_generated_publisher_list_topics_sync.py | 2 +- .../pubsub_v1_generated_publisher_publish_async.py | 2 +- .../pubsub_v1_generated_publisher_publish_sync.py | 2 +- .../pubsub_v1_generated_publisher_update_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_update_topic_sync.py | 2 +- .../pubsub_v1_generated_schema_service_commit_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_commit_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_create_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_create_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_delete_schema_async.py | 2 +- ..._v1_generated_schema_service_delete_schema_revision_async.py | 2 +- ...b_v1_generated_schema_service_delete_schema_revision_sync.py | 2 +- .../pubsub_v1_generated_schema_service_delete_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_get_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_get_schema_sync.py | 2 +- ...b_v1_generated_schema_service_list_schema_revisions_async.py | 2 +- ...ub_v1_generated_schema_service_list_schema_revisions_sync.py | 2 +- .../pubsub_v1_generated_schema_service_list_schemas_async.py | 2 +- .../pubsub_v1_generated_schema_service_list_schemas_sync.py | 2 +- .../pubsub_v1_generated_schema_service_rollback_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_rollback_schema_sync.py | 2 +- ...pubsub_v1_generated_schema_service_validate_message_async.py | 2 +- .../pubsub_v1_generated_schema_service_validate_message_sync.py | 2 +- .../pubsub_v1_generated_schema_service_validate_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_validate_schema_sync.py | 2 +- .../pubsub_v1_generated_subscriber_acknowledge_async.py | 2 +- .../pubsub_v1_generated_subscriber_acknowledge_sync.py | 2 +- .../pubsub_v1_generated_subscriber_create_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_create_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_create_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_create_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_delete_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_delete_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_delete_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_delete_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_get_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_get_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_get_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_get_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_list_snapshots_async.py | 2 +- .../pubsub_v1_generated_subscriber_list_snapshots_sync.py | 2 +- .../pubsub_v1_generated_subscriber_list_subscriptions_async.py | 2 +- .../pubsub_v1_generated_subscriber_list_subscriptions_sync.py | 2 +- .../pubsub_v1_generated_subscriber_modify_ack_deadline_async.py | 2 +- .../pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py | 2 +- .../pubsub_v1_generated_subscriber_modify_push_config_async.py | 2 +- .../pubsub_v1_generated_subscriber_modify_push_config_sync.py | 2 +- .../pubsub_v1_generated_subscriber_pull_async.py | 2 +- .../pubsub_v1_generated_subscriber_pull_sync.py | 2 +- .../pubsub_v1_generated_subscriber_seek_async.py | 2 +- .../pubsub_v1_generated_subscriber_seek_sync.py | 2 +- .../pubsub_v1_generated_subscriber_streaming_pull_async.py | 2 +- .../pubsub_v1_generated_subscriber_streaming_pull_sync.py | 2 +- .../pubsub_v1_generated_subscriber_update_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_update_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_update_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_update_subscription_sync.py | 2 +- .../google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py | 2 +- packages/google-cloud-pubsub/tests/__init__.py | 2 +- packages/google-cloud-pubsub/tests/unit/__init__.py | 2 +- packages/google-cloud-pubsub/tests/unit/gapic/__init__.py | 2 +- .../google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_publisher.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_schema_service.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_subscriber.py | 2 +- 111 files changed, 111 insertions(+), 111 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index 8fcd19555ca5..453dca45f117 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index f81eceb60c5a..08c6bc72afcf 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py index 56fb64a17a88..105e64dc0b4a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 8ec273541198..2950acb6804f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 98caee0516b1..a06a6918e772 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py index 1e095181f7be..9c40664998cb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py index 8a2b06839029..dc172aa02956 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 79ec2d4ff295..e24ec314dfd9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index ebb3f25e9be2..1aea7272c1aa 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index fb73f78a3f1a..9174a20330b2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index 9aa5602c2138..d02c4edee560 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py index 2609e9ecd722..4e9eb056410d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index e0151319aff9..cadd252b5738 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index fdec65ae1eb4..777c5e92e64c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index 840428a158c3..acff3e954655 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py index fb62a346f701..52fa2850b096 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index db6d8050aaaf..08b370cf7a69 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 4a99c8b29b78..c14a1d5f46f4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 840d19007627..5b435aff6977 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index ce617f631daf..faff019bccf9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py index 1d7599467227..fb0b7dfba826 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 6bb56cdf7bf9..c4ed218d09ac 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index a47091f365f2..03ff4ea51725 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py index cb3896bb59fa..6a68a4c515dc 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py index bb13ec634981..b34bbb6870e6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index ea2991f390bf..a9ebec196f98 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 4667bf7079eb..1bc5ce8ca1e1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 9a266b4288c9..5ea0e13af4ba 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index ec0ee7d12957..7ce519310c2b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 756d9db21573..c82fefd1379f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 2b4c7cbf6385..3c1d2b9f93c0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 7b432906b18a..ff1d22770326 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py index 009404d86abc..c047ae881166 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py index e697e8788aa0..bdfbe2973298 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py index b39ca4ccc875..d134b061483a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py index eddea9147911..56e65a6d0a63 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py index 20188a017ba4..9b9015cd9d11 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py index 9271840b7100..c2eb69647a8b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py index 659125bf27be..318c0319a409 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py index 4351b5638f8e..e3c14e0524ee 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py index 85b983f5d0e8..a65fb35dbd84 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py index 9ebd94326103..0627b8277fff 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py index a15dba6f4052..ab3a07d55b47 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py index f16943066f87..93f1020d7753 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py index b6cd0f682899..0d2af9b98425 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py index 6913f815bd44..4399c8350a09 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py index 51561cede9d6..529acbf00d99 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py index 2985ca39bac5..b85ad8d4276b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py index 19a95fadffa7..3f913a7dc00d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py index 4d334c680109..fbac6a928c56 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py index eb69ca3e9e88..7d4d0b0f151d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py index ac6da483c041..16be67cd2014 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py index 13b0c86bcd38..a399a7db1878 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py index 094a6c8ecfe0..73f9e407e5db 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py index 39b81883a6c0..d34bd17a1eb8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py index 67d0ce7a5b13..194ab803393e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py index 45d173a19448..6e956dee7d49 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py index abd335f8cf9e..766b60396ac0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py index b13d5aa0058d..97fa79d870eb 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py index 62d3360db768..b6882faa9ea4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py index 5ad8bee4212e..310a32d71e12 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py index 54ea87778c89..ce62376fa183 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py index c5c45753d0d9..37d67280aece 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py index fd199857b8d5..831a0f6f1031 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py index 8c7d467377a7..249c1ad3198c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py index 96c79deea0c3..977bb10a65fe 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py index f51f8d7b693f..add86c6fa894 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py index 42885d3d0ec3..8e2fa2219177 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py index c5ac0e10f4ab..d53710204e11 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py index 6b0a38f40777..dc9add5cf400 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py index f4153cc36c86..c2edca707897 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py index 09a34d4ccf90..1d270cf2fa57 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py index b3c2d020de9b..b314a652ecf9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py index 9a3c7d13488a..a8936e80cbaa 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py index 605fb4d39e46..488b06838b37 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py index 672da7efa7b8..7aaf8e423ea2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py index efdd6928d95d..c7db1f593600 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py index e7ed25d7ad5d..0d66e43e8297 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py index 57022485796a..dafe8fe3b3f6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py index 408945b92ced..8b7d7d35f792 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py index a0960acd85cd..37fc866a9356 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py index d12f58a4d8f9..9fbed39a1f74 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py index 34b8938840af..3c9ac382a7a3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py index cbb59781fde6..867b090e8fd6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py index 13c973563019..cd9a50a39b8c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py index 9b8dbfb9c5ee..a02b5c9bd76a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py index 87631e58b849..ade3e5e57f6c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py index 8c31fb126402..fe61e24f56a5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py index 04dabe0bd090..0a6a8ef9e215 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py index b0927facef97..4107a81a880f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py index 67bd51c31d8c..cf45c94794c1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py index d7dd306dadfb..f6a48ec60a50 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py index 61eda9788c17..787ebd29aed7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py index fb61cfe9e8c3..9e009fcc80ff 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py index 65ab1796beda..e24c2452ce28 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py index deb2340d0ad4..322673af9bc3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py index a32ecc37b3a1..392550de509f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py index ae5549793745..7ce0e96195b3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py index b51ffac08fbe..ae1d2c301b5c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py index bc29f5e0b32c..3fd588f4357f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py index f85f7ce4a214..6e5632e84527 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py index 4e15e3e4dc13..eabb9036d5b7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index e6c27901621e..8a294ba36aa8 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/__init__.py b/packages/google-cloud-pubsub/tests/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-pubsub/tests/__init__.py +++ b/packages/google-cloud-pubsub/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/__init__.py b/packages/google-cloud-pubsub/tests/unit/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-pubsub/tests/unit/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py index e8e1c3845db5..89a37dc92c5a 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 2796d7e6f739..6ced2805a9ed 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 885a6d93df69..ea09a79f36f3 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 36cf87b595f5..058c33d9fe88 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From bb1f3c77d757810b1f9fe44fdbe161f3fbca274d Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 12 Jul 2023 17:33:57 +0200 Subject: [PATCH 0973/1197] chore(deps): update dependency mock to v5.1.0 (#947) --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 057675ae16f8..8fee9d747107 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 pytest==7.4.0 -mock==5.0.2 +mock==5.1.0 flaky==3.7.0 google-cloud-bigquery==3.11.3 From 1fb0b3147e95864145670bfcc9d8c676a4b702e6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 12 Jul 2023 18:21:46 -0400 Subject: [PATCH 0974/1197] chore(main): release 2.18.0 (#926) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 18 ++++++++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 22 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 2c62fbc51d4c..f2565ccecf18 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.17.1" + ".": "2.18.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index cd25f70c80df..96c16c6a264b 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,24 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.18.0](https://github.com/googleapis/python-pubsub/compare/v2.17.1...v2.18.0) (2023-07-12) + + +### Features + +* Add push config wrapper fields ([#925](https://github.com/googleapis/python-pubsub/issues/925)) ([8e803cf](https://github.com/googleapis/python-pubsub/commit/8e803cf4ab136d606a0be459ab6d281b65560599)) + + +### Bug Fixes + +* Add async context manager return types ([#944](https://github.com/googleapis/python-pubsub/issues/944)) ([a3b2061](https://github.com/googleapis/python-pubsub/commit/a3b2061c4edf42123335fcfee6fcc4a44e90a5eb)) + + +### Documentation + +* Tightened requirements on cloud storage subscription filename suffixes ([#938](https://github.com/googleapis/python-pubsub/issues/938)) ([f54dcd0](https://github.com/googleapis/python-pubsub/commit/f54dcd0e7324218d87c37c0266c441a62012866d)) +* Update Community section in README.rst ([#945](https://github.com/googleapis/python-pubsub/issues/945)) ([dea258c](https://github.com/googleapis/python-pubsub/commit/dea258cff3ad19ffba67659bb03a2edcc44889d9)) + ## [2.17.1](https://github.com/googleapis/python-pubsub/compare/v2.17.0...v2.17.1) (2023-05-23) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index c3950a4a3f51..f09943f6bdf7 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.1" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index c3950a4a3f51..f09943f6bdf7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.17.1" # {x-release-please-version} +__version__ = "2.18.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..6ddc3f01be40 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.18.0" }, "snippets": [ { From 9ce17b132bc1facaa7fe6a5a181810fea1d88195 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 14 Jul 2023 20:44:33 +0200 Subject: [PATCH 0975/1197] chore(deps): update dependency google-cloud-pubsub to v2.18.0 (#948) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 1ee4e20558a5..f74ea492bc04 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.17.1 +google-cloud-pubsub==2.18.0 avro==1.11.2 From ae0e27126d25df032cdbaf1a1936798ae007b450 Mon Sep 17 00:00:00 2001 From: liuyunn Date: Mon, 17 Jul 2023 11:32:58 -0400 Subject: [PATCH 0976/1197] Fix: Fix resource not found error for flaky test. (#951) * Fix resource not found error for flaky test --------- Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/tests/system.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 9a216c48fcd9..6bf8ef10f60c 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -84,7 +84,10 @@ def cleanup(): # Perform all clean up. for to_call, args, kwargs in registry: - to_call(*args, **kwargs) + try: + to_call(*args, **kwargs) + except core_exceptions.NotFound: + pass def test_publish_messages(publisher, topic_path_base, cleanup): From 56a90758fd6675cc1b660a316de373af0b6f84bc Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 12:58:48 -0400 Subject: [PATCH 0977/1197] build(deps): [autoapprove] bump cryptography from 41.0.0 to 41.0.2 (#955) Source-Link: https://github.com/googleapis/synthtool/commit/d6103f4a3540ba60f633a9e25c37ec5fe7e6286d Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/.flake8 | 2 +- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/auto-label.yaml | 2 +- packages/google-cloud-pubsub/.kokoro/build.sh | 2 +- .../.kokoro/docker/docs/Dockerfile | 2 +- .../.kokoro/populate-secrets.sh | 2 +- .../.kokoro/publish-docs.sh | 2 +- .../google-cloud-pubsub/.kokoro/release.sh | 2 +- .../.kokoro/requirements.txt | 44 ++++++++++--------- .../.kokoro/test-samples-against-head.sh | 2 +- .../.kokoro/test-samples-impl.sh | 2 +- .../.kokoro/test-samples.sh | 2 +- .../google-cloud-pubsub/.kokoro/trampoline.sh | 2 +- .../.kokoro/trampoline_v2.sh | 2 +- .../.pre-commit-config.yaml | 2 +- packages/google-cloud-pubsub/.trampolinerc | 4 +- packages/google-cloud-pubsub/MANIFEST.in | 2 +- packages/google-cloud-pubsub/docs/conf.py | 2 +- packages/google-cloud-pubsub/noxfile.py | 3 +- .../scripts/decrypt-secrets.sh | 2 +- .../scripts/readme-gen/readme_gen.py | 18 ++++---- packages/google-cloud-pubsub/setup.cfg | 2 +- 22 files changed, 55 insertions(+), 52 deletions(-) diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index 2e438749863d..87f6e408c47d 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 98994f474104..ae4a522b9e5f 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2d816f26f728ac8b24248741e7d4c461c09764ef9f7be3684d557c9632e46dbd -# created: 2023-06-28T17:03:33.371210701Z + digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb +# created: 2023-07-17T15:20:13.819193964Z diff --git a/packages/google-cloud-pubsub/.github/auto-label.yaml b/packages/google-cloud-pubsub/.github/auto-label.yaml index 41bff0b5375a..b2016d119b40 100644 --- a/packages/google-cloud-pubsub/.github/auto-label.yaml +++ b/packages/google-cloud-pubsub/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/build.sh b/packages/google-cloud-pubsub/.kokoro/build.sh index 9a48d205afed..c264634884b5 100755 --- a/packages/google-cloud-pubsub/.kokoro/build.sh +++ b/packages/google-cloud-pubsub/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile index f8137d0ae497..8e39a2cc438d 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh b/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh index f52514257ef0..6f3972140e80 100755 --- a/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh +++ b/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC. +# Copyright 2023 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh index 1c4d62370042..9eafe0be3bba 100755 --- a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh +++ b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh index a6bba127fe84..c333c366defe 100755 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index c7929db6d152..67d70a110897 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -113,26 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.0 \ - --hash=sha256:0ddaee209d1cf1f180f1efa338a68c4621154de0afaef92b89486f5f96047c55 \ - --hash=sha256:14754bcdae909d66ff24b7b5f166d69340ccc6cb15731670435efd5719294895 \ - --hash=sha256:344c6de9f8bda3c425b3a41b319522ba3208551b70c2ae00099c205f0d9fd3be \ - --hash=sha256:34d405ea69a8b34566ba3dfb0521379b210ea5d560fafedf9f800a9a94a41928 \ - --hash=sha256:3680248309d340fda9611498a5319b0193a8dbdb73586a1acf8109d06f25b92d \ - --hash=sha256:3c5ef25d060c80d6d9f7f9892e1d41bb1c79b78ce74805b8cb4aa373cb7d5ec8 \ - --hash=sha256:4ab14d567f7bbe7f1cdff1c53d5324ed4d3fc8bd17c481b395db224fb405c237 \ - --hash=sha256:5c1f7293c31ebc72163a9a0df246f890d65f66b4a40d9ec80081969ba8c78cc9 \ - --hash=sha256:6b71f64beeea341c9b4f963b48ee3b62d62d57ba93eb120e1196b31dc1025e78 \ - --hash=sha256:7d92f0248d38faa411d17f4107fc0bce0c42cae0b0ba5415505df72d751bf62d \ - --hash=sha256:8362565b3835ceacf4dc8f3b56471a2289cf51ac80946f9087e66dc283a810e0 \ - --hash=sha256:84a165379cb9d411d58ed739e4af3396e544eac190805a54ba2e0322feb55c46 \ - --hash=sha256:88ff107f211ea696455ea8d911389f6d2b276aabf3231bf72c8853d22db755c5 \ - --hash=sha256:9f65e842cb02550fac96536edb1d17f24c0a338fd84eaf582be25926e993dde4 \ - --hash=sha256:a4fc68d1c5b951cfb72dfd54702afdbbf0fb7acdc9b7dc4301bbf2225a27714d \ - --hash=sha256:b7f2f5c525a642cecad24ee8670443ba27ac1fab81bba4cc24c7b6b41f2d0c75 \ - --hash=sha256:b846d59a8d5a9ba87e2c3d757ca019fa576793e8758174d3868aecb88d6fc8eb \ - --hash=sha256:bf8fc66012ca857d62f6a347007e166ed59c0bc150cefa49f28376ebe7d992a2 \ - --hash=sha256:f5d0bf9b252f30a31664b6f64432b4730bb7038339bd18b1fafe129cfc2be9be +cryptography==41.0.2 \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 # via # gcp-releasetool # secretstorage diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh index ba3a707b040c..63ac41dfae1d 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh index 2c6500cae0b9..5a0f5fab6a89 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples.sh b/packages/google-cloud-pubsub/.kokoro/test-samples.sh index 11c042d342d7..50b35a48c190 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline.sh b/packages/google-cloud-pubsub/.kokoro/trampoline.sh index f39236e943a8..d85b1f267693 100755 --- a/packages/google-cloud-pubsub/.kokoro/trampoline.sh +++ b/packages/google-cloud-pubsub/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2017 Google Inc. +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh index 4af6cdc26dbc..59a7cf3a9373 100755 --- a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index 5405cc8ff1f3..9e3898fd1c12 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.trampolinerc b/packages/google-cloud-pubsub/.trampolinerc index 0eee72ab62aa..a7dfeb42c6d0 100644 --- a/packages/google-cloud-pubsub/.trampolinerc +++ b/packages/google-cloud-pubsub/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Template for .trampolinerc - # Add required env vars here. required_envvars+=( ) diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index e783f4c6209b..e0a66705318e 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index 9245a8edfc51..bacbcd5653e5 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2021 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 50fb5d278409..e93740a13581 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2018 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -452,6 +452,7 @@ def prerelease_deps(session): "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" ) session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") session.run("py.test", "tests/unit") diff --git a/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh b/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh index 21f6d2a26d90..0018b421ddf8 100755 --- a/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2015 Google Inc. All rights reserved. +# Copyright 2023 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py b/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py index 91b59676bfc7..1acc119835b5 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py +++ b/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2016 Google Inc +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,17 +33,17 @@ autoescape=True, ) -README_TMPL = jinja_env.get_template('README.tmpl.rst') +README_TMPL = jinja_env.get_template("README.tmpl.rst") def get_help(file): - return subprocess.check_output(['python', file, '--help']).decode() + return subprocess.check_output(["python", file, "--help"]).decode() def main(): parser = argparse.ArgumentParser() - parser.add_argument('source') - parser.add_argument('--destination', default='README.rst') + parser.add_argument("source") + parser.add_argument("--destination", default="README.rst") args = parser.parse_args() @@ -51,9 +51,9 @@ def main(): root = os.path.dirname(source) destination = os.path.join(root, args.destination) - jinja_env.globals['get_help'] = get_help + jinja_env.globals["get_help"] = get_help - with io.open(source, 'r') as f: + with io.open(source, "r") as f: config = yaml.load(f) # This allows get_help to execute in the right directory. @@ -61,9 +61,9 @@ def main(): output = README_TMPL.render(config) - with io.open(destination, 'w') as f: + with io.open(destination, "w") as f: f.write(output) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/packages/google-cloud-pubsub/setup.cfg b/packages/google-cloud-pubsub/setup.cfg index c3a2b39f6528..052350089505 100644 --- a/packages/google-cloud-pubsub/setup.cfg +++ b/packages/google-cloud-pubsub/setup.cfg @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2020 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 8b2907ff4166b288f5771f282856ac80b2fa01da Mon Sep 17 00:00:00 2001 From: Casey O'Hare Date: Tue, 18 Jul 2023 17:48:37 -0400 Subject: [PATCH 0978/1197] Samples: Payload Unwrapping (NoWrapper) (#933) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * samples: Payload Unwrapping (NoWrapper) * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * Update subscriber.py --------- Co-authored-by: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Co-authored-by: Owl Bot --- .../samples/snippets/subscriber.py | 51 +++++++++++++++++++ .../samples/snippets/subscriber_test.py | 31 +++++++++++ 2 files changed, 82 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index b1b9ad0ea85d..b7fd1ebdd18f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -187,6 +187,46 @@ def create_push_subscription( # [END pubsub_create_push_subscription] +def create_push_no_wrapper_subscription( + project_id: str, topic_id: str, subscription_id: str, endpoint: str +) -> None: + """Create a new push no wrapper subscription on the given topic.""" + # [START pubsub_create_push_no_wrapper_subscription] + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # endpoint = "https://my-test-project.appspot.com/push" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + no_wrapper = pubsub_v1.types.PushConfig.NoWrapper(write_metadata=True) + push_config = pubsub_v1.types.PushConfig( + push_endpoint=endpoint, no_wrapper=no_wrapper + ) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "push_config": push_config, + } + ) + + print(f"Push no wrapper subscription created: {subscription}.") + print(f"Endpoint for subscription is: {endpoint}") + print(f"No wrapper configuration for subscription is: {no_wrapper}") + # [END pubsub_create_push_no_wrapper_subscription] + + def create_subscription_with_ordering( project_id: str, topic_id: str, subscription_id: str ) -> None: @@ -946,6 +986,13 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_push_parser.add_argument("subscription_id") create_push_parser.add_argument("endpoint") + create_push_no_wrapper_parser = subparsers.add_parser( + "create-push-no-wrapper", help=create_push_no_wrapper_subscription.__doc__ + ) + create_push_no_wrapper_parser.add_argument("topic_id") + create_push_no_wrapper_parser.add_argument("subscription_id") + create_push_no_wrapper_parser.add_argument("endpoint") + create_subscription_with_ordering_parser = subparsers.add_parser( "create-with-ordering", help=create_subscription_with_ordering.__doc__ ) @@ -1092,6 +1139,10 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_push_subscription( args.project_id, args.topic_id, args.subscription_id, args.endpoint ) + elif args.command == "create-push-no-wrapper": + create_push_no_wrapper_subscription( + args.project_id, args.topic_id, args.subscription_id, args.endpoint + ) elif args.command == "create-with-ordering": create_subscription_with_ordering( args.project_id, args.topic_id, args.subscription_id diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 435724782c1b..3eed0d88678d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -550,6 +550,37 @@ def test_update_push_subscription( subscriber_client.delete_subscription(request={"subscription": subscription_path}) +def test_create_push_no_wrapper_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + + push_subscription_for_create_name = ( + f"subscription-test-subscription-push-no-wrapper-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, push_subscription_for_create_name + ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_push_no_wrapper_subscription( + PROJECT_ID, TOPIC, push_subscription_for_create_name, ENDPOINT + ) + + out, _ = capsys.readouterr() + assert f"{push_subscription_for_create_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + @pytest.fixture(scope="module") def bigquery_table() -> Generator[str, None, None]: client = bigquery.Client() From c745c4394cd02fb8aa346b5e0a3cff7f0b4b3453 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 21 Jul 2023 09:18:08 -0400 Subject: [PATCH 0979/1197] build(deps): [autoapprove] bump pygments from 2.13.0 to 2.15.0 (#959) Source-Link: https://github.com/googleapis/synthtool/commit/eaef28efd179e6eeb9f4e9bf697530d074a6f3b9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index ae4a522b9e5f..17c21d96d654 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:39f0f3f2be02ef036e297e376fe3b6256775576da8a6ccb1d5eeb80f4c8bf8fb -# created: 2023-07-17T15:20:13.819193964Z + digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e +# created: 2023-07-21T02:12:46.49799314Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 67d70a110897..b563eb284459 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -396,9 +396,9 @@ pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.13.0 \ - --hash=sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1 \ - --hash=sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42 +pygments==2.15.0 \ + --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ + --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 # via # readme-renderer # rich From 9a0bb6152b9c324bad424ba814d751907fcef718 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sat, 22 Jul 2023 13:09:24 +0200 Subject: [PATCH 0980/1197] chore(deps): update dependency google-cloud-bigquery to v3.11.4 (#957) Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 8fee9d747107..53d976a09bfe 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,4 +2,4 @@ backoff==2.2.1 pytest==7.4.0 mock==5.1.0 flaky==3.7.0 -google-cloud-bigquery==3.11.3 +google-cloud-bigquery==3.11.4 From a4a291465228a21718930fda03bc3479bb1d4312 Mon Sep 17 00:00:00 2001 From: Chak <103051353+chak-radhar007@users.noreply.github.com> Date: Sat, 22 Jul 2023 13:50:12 +0200 Subject: [PATCH 0981/1197] samples: Update schema.py print (#949) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Format string issue Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [ ] Make sure to open an issue as a [bug/issue](https://togithub.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [ ] Ensure the tests and linter pass - [ ] Code coverage does not decrease (if any source code was changed) - [ ] Appropriate docs were updated (if necessary) Fixes # 🦕 --- packages/google-cloud-pubsub/samples/snippets/schema.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py index 57186b3252ce..40cd853c9cf8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -727,7 +727,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: # Deserialize the message data accordingly. if encoding == "BINARY": state.ParseFromString(message.data) - print("Received a binary-encoded message:\n{state}") + print(f"Received a binary-encoded message:\n{state}") elif encoding == "JSON": Parse(message.data, state) print(f"Received a JSON-encoded message:\n{state}") From ef5ef59d64dc622e1f498a50e74eed8eb0b53fee Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Mon, 24 Jul 2023 11:23:10 -1000 Subject: [PATCH 0982/1197] chore: add flakybot.yaml (#961) * chore: add flakyboy.yaml * chore: add flakybot.yaml --- packages/google-cloud-pubsub/.github/flakybot.yaml | 1 + 1 file changed, 1 insertion(+) create mode 100644 packages/google-cloud-pubsub/.github/flakybot.yaml diff --git a/packages/google-cloud-pubsub/.github/flakybot.yaml b/packages/google-cloud-pubsub/.github/flakybot.yaml new file mode 100644 index 000000000000..cb83375f9893 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/flakybot.yaml @@ -0,0 +1 @@ +issuePriority: p2 From a655663a5026e176bd0409b689ae0a98e2386a63 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 26 Jul 2023 07:03:50 -0400 Subject: [PATCH 0983/1197] build(deps): [autoapprove] bump certifi from 2022.12.7 to 2023.7.22 (#964) Source-Link: https://github.com/googleapis/synthtool/commit/395d53adeeacfca00b73abf197f65f3c17c8f1e9 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 17c21d96d654..0ddd0e4d1873 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f8ca7655fa8a449cadcabcbce4054f593dcbae7aeeab34aa3fcc8b5cf7a93c9e -# created: 2023-07-21T02:12:46.49799314Z + digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 +# created: 2023-07-25T21:01:10.396410762Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index b563eb284459..76d9bba0f7d0 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.2.0 \ --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db # via google-auth -certifi==2022.12.7 \ - --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ - --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +certifi==2023.7.22 \ + --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ + --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests cffi==1.15.1 \ --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ From 9a3fa3bfd0037de35d068c51f69e3e7a3f583ef6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 26 Jul 2023 13:54:22 -0400 Subject: [PATCH 0984/1197] docs: clarified where ordering_key will be written if write_metadata is set (#965) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: clarified where ordering_key will be written if write_metadata is set PiperOrigin-RevId: 551210991 Source-Link: https://github.com/googleapis/googleapis/commit/7c762d770a82da397956b80379a6fb73505835af Source-Link: https://github.com/googleapis/googleapis-gen/commit/15fe4c5ff5ebd52911b429b05b992e232f53351e Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMTVmZTRjNWZmNWViZDUyOTExYjQyOWIwNWI5OTJlMjMyZjUzMzUxZSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 6 +++++- .../snippet_metadata_google.pubsub.v1.json | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 3c1d2b9f93c0..0331d0c97f3a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1277,7 +1277,11 @@ class AvroConfig(proto.Message): write_metadata (bool): When true, write the subscription name, message_id, publish_time, attributes, and ordering_key as additional - fields in the output. + fields in the output. The subscription name, message_id, and + publish_time fields are put in their own fields while all + other message properties other than data (for example, an + ordering_key, if present) are added as entries in the + attributes map. """ write_metadata: bool = proto.Field( diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 6ddc3f01be40..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.18.0" + "version": "0.1.0" }, "snippets": [ { From c11d56440cf2863c1d196e47a0a0637ceea0b655 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 27 Jul 2023 10:25:22 -0400 Subject: [PATCH 0985/1197] chore(main): release 2.18.1 (#966) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index f2565ccecf18..75006b003f5c 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.18.0" + ".": "2.18.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 96c16c6a264b..bfdb680286ed 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.18.1](https://github.com/googleapis/python-pubsub/compare/v2.18.0...v2.18.1) (2023-07-26) + + +### Documentation + +* Clarified where ordering_key will be written if write_metadata is set ([#965](https://github.com/googleapis/python-pubsub/issues/965)) ([3d95034](https://github.com/googleapis/python-pubsub/commit/3d95034f94426cdcf5b87323b9e463a7e8ce4f91)) + ## [2.18.0](https://github.com/googleapis/python-pubsub/compare/v2.17.1...v2.18.0) (2023-07-12) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index f09943f6bdf7..e1b4da1deb3e 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.0" # {x-release-please-version} +__version__ = "2.18.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index f09943f6bdf7..e1b4da1deb3e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.0" # {x-release-please-version} +__version__ = "2.18.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..551a55277c4d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.18.1" }, "snippets": [ { From ca2a45d0e5564edeb9b90a068c5efb7091a95051 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 27 Jul 2023 20:42:59 +0200 Subject: [PATCH 0986/1197] chore(deps): update dependency google-cloud-pubsub to v2.18.1 (#967) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index f74ea492bc04..9b5ef8e5bc91 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,2 @@ -google-cloud-pubsub==2.18.0 +google-cloud-pubsub==2.18.1 avro==1.11.2 From 107e0783c9f83766669caa2fffe342ba6622a6d7 Mon Sep 17 00:00:00 2001 From: Anna Cocuzzo <63511057+acocuzzo@users.noreply.github.com> Date: Mon, 31 Jul 2023 04:17:09 -1000 Subject: [PATCH 0987/1197] Samples: Add CloudStorage Subscription (#928) * add cloudstorage samples * fix lint * add protobuf * Create unused topic --- .../samples/snippets/requirements-test.txt | 1 + .../samples/snippets/requirements.txt | 2 + .../samples/snippets/subscriber.py | 69 ++++++++++++++ .../samples/snippets/subscriber_test.py | 90 ++++++++++++++----- 4 files changed, 142 insertions(+), 20 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 53d976a09bfe..9f0580cbf2d2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -3,3 +3,4 @@ pytest==7.4.0 mock==5.1.0 flaky==3.7.0 google-cloud-bigquery==3.11.4 +google-cloud-storage==2.9.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 9b5ef8e5bc91..dd5fae7c89ae 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,2 +1,4 @@ google-cloud-pubsub==2.18.1 avro==1.11.2 +protobuf==4.21.9 +avro==1.11.2 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index b7fd1ebdd18f..897d2f47b8a8 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -354,6 +354,62 @@ def create_bigquery_subscription( # [END pubsub_create_bigquery_subscription] +def create_cloudstorage_subscription( + project_id: str, topic_id: str, subscription_id: str, bucket: str +) -> None: + """Create a new CloudStorage subscription on the given topic.""" + # [START pubsub_cloudstorage_subscription] + from google.cloud import pubsub_v1 + from google.protobuf import duration_pb2 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + # bucket = "my-bucket" + + filename_prefix = "log_events_" + filename_suffix = ".avro" + # Either CloudStorageConfig.AvroConfig or CloudStorageConfig.TextConfig + # defaults to TextConfig + avro_config = pubsub_v1.types.CloudStorageConfig.AvroConfig(write_metadata=True) + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + max_duration = duration_pb2.Duration() + max_duration.FromSeconds(300) + + cloudstorage_config = pubsub_v1.types.CloudStorageConfig( + bucket=bucket, + filename_prefix=filename_prefix, + filename_suffix=filename_suffix, + avro_config=avro_config, + # Min 1 minutes, max 10 minutes + max_duration=max_duration, + # Min 1 KB, max 10 GiB + max_bytes=2000, + ) + + # Wrap the subscriber in a 'with' block to automatically call close() to + # close the underlying gRPC channel when done. + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "cloud_storage_config": cloudstorage_config, + } + ) + + print(f"CloudStorage subscription created: {subscription}.") + print(f"Bucket for subscription is: {bucket}") + print(f"Prefix is: {filename_prefix}") + print(f"Suffix is: {filename_suffix}") + # [END pubsub_cloudstorage_subscription] + + def delete_subscription(project_id: str, subscription_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] @@ -1023,6 +1079,14 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_bigquery_subscription_parser.add_argument("subscription_id") create_bigquery_subscription_parser.add_argument("bigquery_table_id") + create_cloudstorage_subscription_parser = subparsers.add_parser( + "create-cloudstorage", + help=create_cloudstorage_subscription.__doc__, + ) + create_cloudstorage_subscription_parser.add_argument("topic_id") + create_cloudstorage_subscription_parser.add_argument("subscription_id") + create_cloudstorage_subscription_parser.add_argument("bucket") + delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) delete_parser.add_argument("subscription_id") @@ -1162,6 +1226,11 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: args.subscription_id, args.bigquery_table_id, ) + elif args.command == "create-cloudstorage": + create_cloudstorage_subscription( + args.project_id, args.topic_id, args.subscription_id, args.bucket + ) + elif args.command == "delete": delete_subscription(args.project_id, args.subscription_id) elif args.command == "update-push": diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 3eed0d88678d..3fa94761c6f7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -23,7 +23,7 @@ import backoff from flaky import flaky from google.api_core.exceptions import NotFound -from google.cloud import bigquery, pubsub_v1 +from google.cloud import bigquery, pubsub_v1, storage import pytest import subscriber @@ -35,6 +35,7 @@ PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"] TOPIC = f"subscription-test-topic-{PY_VERSION}-{UUID}" DEAD_LETTER_TOPIC = f"subscription-test-dead-letter-topic-{PY_VERSION}-{UUID}" +UNUSED_TOPIC = f"subscription-unused-topic-{PY_VERSION}-{UUID}" EOD_TOPIC = f"subscription-test-eod-topic-{PY_VERSION}-{UUID}" SUBSCRIPTION_ADMIN = f"subscription-test-subscription-admin-{PY_VERSION}-{UUID}" ENDPOINT = f"https://{PROJECT_ID}.appspot.com/push" @@ -45,6 +46,7 @@ FILTER = 'attributes.author="unknown"' BIGQUERY_DATASET_ID = f"python_samples_dataset_{UNDERSCORE_PY_VERSION}_{UUID}" BIGQUERY_TABLE_ID = f"python_samples_table_{UNDERSCORE_PY_VERSION}_{UUID}" +CLOUDSTORAGE_BUCKET = f"python_samples_bucket_{UNDERSCORE_PY_VERSION}_{UUID}" C = TypeVar("C", bound=Callable[..., Any]) @@ -103,6 +105,23 @@ def topic(publisher_client: pubsub_v1.PublisherClient) -> Generator[str, None, N publisher_client.delete_topic(request={"topic": topic.name}) +# This topic is only for creating subscriptions, no messages should be published on this topic. +@pytest.fixture(scope="module") +def unused_topic( + publisher_client: pubsub_v1.PublisherClient, +) -> Generator[str, None, None]: + topic_path = publisher_client.topic_path(PROJECT_ID, UNUSED_TOPIC) + + try: + topic = publisher_client.get_topic(request={"topic": topic_path}) + except: # noqa + topic = publisher_client.create_topic(request={"name": topic_path}) + + yield topic.name + + publisher_client.delete_topic(request={"topic": topic.name}) + + @pytest.fixture(scope="module") def dead_letter_topic( publisher_client: pubsub_v1.PublisherClient, @@ -191,7 +210,6 @@ def test_create_subscription( topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_for_create_name = ( f"subscription-test-subscription-for-create-{PY_VERSION}-{UUID}" ) @@ -221,7 +239,6 @@ def test_create_subscription_with_dead_letter_policy( dead_letter_topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_dlq_name = ( f"subscription-test-subscription-dlq-for-create-{PY_VERSION}-{UUID}" ) @@ -257,7 +274,6 @@ def test_receive_with_delivery_attempts( dead_letter_topic: str, capsys: CaptureFixture[str], ) -> None: - from google.cloud.pubsub_v1.types import DeadLetterPolicy subscription_dlq_for_receive_name = ( @@ -304,7 +320,6 @@ def test_update_dead_letter_policy( dead_letter_topic: str, capsys: CaptureFixture[str], ) -> None: - from google.cloud.pubsub_v1.types import DeadLetterPolicy subscription_dlq_for_update_name = ( @@ -354,7 +369,6 @@ def test_remove_dead_letter_policy( dead_letter_topic: str, capsys: CaptureFixture[str], ) -> None: - from google.cloud.pubsub_v1.types import DeadLetterPolicy subscription_dlq_for_remove_name = ( @@ -425,7 +439,6 @@ def test_create_subscription_with_filtering( topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_with_filtering_name = ( f"subscription-test-subscription-with-filtering-{PY_VERSION}-{UUID}" ) @@ -458,7 +471,6 @@ def test_create_subscription_with_exactly_once_delivery( exactly_once_delivery_topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_eod_for_create_name = ( f"subscription-test-subscription-eod-for-create-{PY_VERSION}-{UUID}" ) @@ -492,7 +504,6 @@ def test_create_push_subscription( topic: str, capsys: CaptureFixture[str], ) -> None: - push_subscription_for_create_name = ( f"subscription-test-subscription-push-for-create-{PY_VERSION}-{UUID}" ) @@ -523,7 +534,6 @@ def test_update_push_subscription( topic: str, capsys: CaptureFixture[str], ) -> None: - push_subscription_for_update_name = ( f"subscription-test-subscription-push-for-create-{PY_VERSION}-{UUID}" ) @@ -555,7 +565,6 @@ def test_create_push_no_wrapper_subscription( topic: str, capsys: CaptureFixture[str], ) -> None: - push_subscription_for_create_name = ( f"subscription-test-subscription-push-no-wrapper-for-create-{PY_VERSION}-{UUID}" ) @@ -636,11 +645,60 @@ def test_create_bigquery_subscription( subscriber_client.delete_subscription(request={"subscription": subscription_path}) +@pytest.fixture(scope="module") +def cloudstorage_bucket() -> Generator[str, None, None]: + storage_client = storage.Client() + + bucket_name = CLOUDSTORAGE_BUCKET + + bucket = storage_client.create_bucket(bucket_name) + print(f"Bucket {bucket.name} created.") + + yield bucket.name + + bucket.delete() + + +def test_create_cloudstorage_subscription( + subscriber_client: pubsub_v1.SubscriberClient, + unused_topic: str, + cloudstorage_bucket: str, + capsys: CaptureFixture[str], +) -> None: + cloudstorage_subscription_for_create_name = ( + f"subscription-test-subscription-cloudstorage-for-create-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, cloudstorage_subscription_for_create_name + ) + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_cloudstorage_subscription( + PROJECT_ID, + # We have to use a topic with no messages published, + # so that the bucket will be empty and can be deleted. + UNUSED_TOPIC, + cloudstorage_subscription_for_create_name, + cloudstorage_bucket, + ) + + out, _ = capsys.readouterr() + assert f"{cloudstorage_subscription_for_create_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + def test_delete_subscription( subscriber_client: pubsub_v1.SubscriberClient, topic: str, ) -> None: - subscription_for_delete_name = ( f"subscription-test-subscription-for-delete-{PY_VERSION}-{UUID}" ) @@ -672,7 +730,6 @@ def test_receive( publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str], ) -> None: - subscription_async_for_receive_name = ( f"subscription-test-subscription-async-for-receive-{PY_VERSION}-{UUID}" ) @@ -707,7 +764,6 @@ def test_receive_with_custom_attributes( topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_async_receive_with_custom_name = ( f"subscription-test-subscription-async-receive-with-custom-{PY_VERSION}-{UUID}" ) @@ -745,7 +801,6 @@ def test_receive_with_flow_control( topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_async_receive_with_flow_control_name = f"subscription-test-subscription-async-receive-with-flow-control-{PY_VERSION}-{UUID}" subscription_path = subscriber_client.subscription_path( @@ -780,7 +835,6 @@ def test_receive_with_blocking_shutdown( topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_async_receive_with_blocking_name = f"subscription-test-subscription-async-receive-with-blocking-shutdown-{PY_VERSION}-{UUID}" subscription_path = subscriber_client.subscription_path( @@ -851,7 +905,6 @@ def test_receive_messages_with_exactly_once_delivery_enabled( exactly_once_delivery_topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_eod_for_receive_name = ( f"subscription-test-subscription-eod-for-receive-{PY_VERSION}-{UUID}" ) @@ -894,7 +947,6 @@ def test_listen_for_errors( topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_async_listen = ( f"subscription-test-subscription-async-listen-{PY_VERSION}-{UUID}" ) @@ -928,7 +980,6 @@ def test_receive_synchronously( topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_sync_for_receive_name = ( f"subscription-test-subscription-sync-for-receive-{PY_VERSION}-{UUID}" ) @@ -964,7 +1015,6 @@ def test_receive_synchronously_with_lease( topic: str, capsys: CaptureFixture[str], ) -> None: - subscription_sync_for_receive_with_lease_name = f"subscription-test-subscription-sync-for-receive-with-lease-{PY_VERSION}-{UUID}" subscription_path = subscriber_client.subscription_path( From 184899c1cfeb31d0290931699c7664fb290ecf69 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 1 Aug 2023 16:15:13 -0400 Subject: [PATCH 0988/1197] chore: [autoapprove] Pin flake8 version (#969) Source-Link: https://github.com/googleapis/synthtool/commit/0ddbff8012e47cde4462fe3f9feab01fbc4cdfd6 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bced5ca77c4dda0fd2f5d845d4035fc3c5d3d6b81f245246a36aee114970082b Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.pre-commit-config.yaml | 2 +- packages/google-cloud-pubsub/noxfile.py | 3 ++- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 0ddd0e4d1873..d71329cc807d 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:6c1cbc75c74b8bdd71dada2fa1677e9d6d78a889e9a70ee75b93d1d0543f96e1 -# created: 2023-07-25T21:01:10.396410762Z + digest: sha256:bced5ca77c4dda0fd2f5d845d4035fc3c5d3d6b81f245246a36aee114970082b +# created: 2023-08-01T17:41:45.434027321Z diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index 9e3898fd1c12..19409cbd37a4 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -26,6 +26,6 @@ repos: hooks: - id: black - repo: https://github.com/pycqa/flake8 - rev: 3.9.2 + rev: 6.1.0 hooks: - id: flake8 diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index e93740a13581..ae863d0fa6f6 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -25,6 +25,7 @@ import nox +FLAKE8_VERSION = "flake8==6.1.0" BLACK_VERSION = "black==22.3.0" ISORT_VERSION = "isort==5.10.1" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] @@ -134,7 +135,7 @@ def lint(session): Returns a failure if the linters find linting errors or sufficiently serious code quality issues. """ - session.install("flake8", BLACK_VERSION) + session.install(FLAKE8_VERSION, BLACK_VERSION) session.run( "black", "--check", From c21a290fc0078f295b82048af7cf5600b6a192b7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 2 Aug 2023 10:20:07 -0400 Subject: [PATCH 0989/1197] build: [autoapprove] bump cryptography from 41.0.2 to 41.0.3 (#973) Source-Link: https://github.com/googleapis/synthtool/commit/352b9d4c068ce7c05908172af128b294073bf53c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 48 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index d71329cc807d..a3da1b0d4cd3 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bced5ca77c4dda0fd2f5d845d4035fc3c5d3d6b81f245246a36aee114970082b -# created: 2023-08-01T17:41:45.434027321Z + digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 +# created: 2023-08-02T10:53:29.114535628Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 76d9bba0f7d0..029bd342de94 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.2 \ - --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ - --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ - --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ - --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ - --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ - --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ - --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ - --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ - --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ - --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ - --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ - --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ - --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ - --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ - --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ - --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ - --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ - --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ - --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ - --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ - --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ - --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ - --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 +cryptography==41.0.3 \ + --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ + --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ + --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ + --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ + --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ + --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ + --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ + --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ + --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ + --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ + --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ + --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ + --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ + --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ + --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ + --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ + --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ + --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ + --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ + --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ + --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ + --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ + --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de # via # gcp-releasetool # secretstorage From a1262a1f3ff265a32354f779382366c62dd01bfb Mon Sep 17 00:00:00 2001 From: amfisher-404 Date: Mon, 7 Aug 2023 14:14:30 -0400 Subject: [PATCH 0990/1197] =?UTF-8?q?fix:=20Change=20retry=20multiplier=20?= =?UTF-8?q?from=201.3=20to=204,=20for=20requests=20that=20retry=20Resour?= =?UTF-8?q?=E2=80=A6=20(#971)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Change retry multiplier from 1.3 to 4, for requests that retry ResourceExhausted. --- .../google/pubsub_v1/services/publisher/async_client.py | 2 +- .../google/pubsub_v1/services/publisher/transports/base.py | 2 +- .../google/pubsub_v1/services/subscriber/async_client.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/base.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 2950acb6804f..e3c9647e90e6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -556,7 +556,7 @@ async def sample_publish(): default_retry=retries.Retry( initial=0.1, maximum=60.0, - multiplier=1.3, + multiplier=4.0, predicate=retries.if_exception_type( core_exceptions.Aborted, core_exceptions.Cancelled, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index e24ec314dfd9..e8caa080b091 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -162,7 +162,7 @@ def _prep_wrapped_messages(self, client_info): default_retry=retries.Retry( initial=0.1, maximum=60.0, - multiplier=1.3, + multiplier=4.0, predicate=retries.if_exception_type( core_exceptions.Aborted, core_exceptions.Cancelled, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index c4ed218d09ac..fbc5535f1cab 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1383,7 +1383,7 @@ def request_generator(): default_retry=retries.Retry( initial=0.1, maximum=60.0, - multiplier=1.3, + multiplier=4.0, predicate=retries.if_exception_type( core_exceptions.Aborted, core_exceptions.DeadlineExceeded, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index a9ebec196f98..d50b8baf683c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -255,7 +255,7 @@ def _prep_wrapped_messages(self, client_info): default_retry=retries.Retry( initial=0.1, maximum=60.0, - multiplier=1.3, + multiplier=4.0, predicate=retries.if_exception_type( core_exceptions.Aborted, core_exceptions.DeadlineExceeded, From 9815a9f9426799cba4b553404a76136a2f1e468d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 7 Aug 2023 16:28:47 -0400 Subject: [PATCH 0991/1197] chore(main): release 2.18.2 (#974) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 75006b003f5c..e97945cb8aee 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.18.1" + ".": "2.18.2" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index bfdb680286ed..53108031e8d0 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.18.2](https://github.com/googleapis/python-pubsub/compare/v2.18.1...v2.18.2) (2023-08-07) + + +### Bug Fixes + +* Change retry multiplier from 1.3 to 4, for requests that retry Resour… ([#971](https://github.com/googleapis/python-pubsub/issues/971)) ([e4364d2](https://github.com/googleapis/python-pubsub/commit/e4364d2a061bb73fe3410d2ef213a04f3315e282)) + ## [2.18.1](https://github.com/googleapis/python-pubsub/compare/v2.18.0...v2.18.1) (2023-07-26) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index e1b4da1deb3e..6f338c034ba0 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.1" # {x-release-please-version} +__version__ = "2.18.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index e1b4da1deb3e..6f338c034ba0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.1" # {x-release-please-version} +__version__ = "2.18.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 551a55277c4d..8c531f980136 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.18.1" + "version": "2.18.2" }, "snippets": [ { From 1f38e39902c0f1d4d0db0902f44e7e8b5eb95c6c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 8 Aug 2023 01:49:23 +0200 Subject: [PATCH 0992/1197] chore(deps): update all dependencies (#968) --- .../samples/snippets/requirements-test.txt | 2 +- .../google-cloud-pubsub/samples/snippets/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 9f0580cbf2d2..0b65f2dfc67b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -3,4 +3,4 @@ pytest==7.4.0 mock==5.1.0 flaky==3.7.0 google-cloud-bigquery==3.11.4 -google-cloud-storage==2.9.0 +google-cloud-storage==2.10.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index dd5fae7c89ae..9f9b3a3c648f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.18.1 +google-cloud-pubsub==2.18.2 avro==1.11.2 -protobuf==4.21.9 +protobuf==4.23.4 avro==1.11.2 From 3a4186e29b3d1233fe10ab5b783e9cc489dcd511 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 9 Aug 2023 15:28:50 +0200 Subject: [PATCH 0993/1197] chore(deps): update dependency protobuf to v4.24.0 (#975) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 9f9b3a3c648f..f15a7535351b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-pubsub==2.18.2 avro==1.11.2 -protobuf==4.23.4 +protobuf==4.24.0 avro==1.11.2 From ed2e4e32015088585e620d61bcdce2da5543e8c7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 18 Aug 2023 10:15:53 -0400 Subject: [PATCH 0994/1197] fix: Make retry policy back off more aggressively for RPCs that retry RESOURCE_EXHAUSTD (#979) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: Make retry policy back off more aggressively for RPCs that retry RESOURCE_EXHAUSTD PiperOrigin-RevId: 557935020 Source-Link: https://github.com/googleapis/googleapis/commit/38e1f31e46d840075f14d9716b592e8a53c89855 Source-Link: https://github.com/googleapis/googleapis-gen/commit/5bbe39d37218a420b2368c37541de5887db7d6af Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNWJiZTM5ZDM3MjE4YTQyMGIyMzY4YzM3NTQxZGU1ODg3ZGI3ZDZhZiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../generated_samples/snippet_metadata_google.pubsub.v1.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 8c531f980136..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.18.2" + "version": "0.1.0" }, "snippets": [ { From 0be04c7d709a6c733008e266fd832980c77a18cb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 18 Aug 2023 12:03:46 -0400 Subject: [PATCH 0995/1197] chore(main): release 2.18.3 (#980) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index e97945cb8aee..65444fe37cd5 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.18.2" + ".": "2.18.3" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 53108031e8d0..2d4672c75795 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.18.3](https://github.com/googleapis/python-pubsub/compare/v2.18.2...v2.18.3) (2023-08-18) + + +### Bug Fixes + +* Make retry policy back off more aggressively for RPCs that retry RESOURCE_EXHAUSTD ([#979](https://github.com/googleapis/python-pubsub/issues/979)) ([4073b3d](https://github.com/googleapis/python-pubsub/commit/4073b3dd6a6989e86d5e19bdb9b9c47ae2b0db87)) + ## [2.18.2](https://github.com/googleapis/python-pubsub/compare/v2.18.1...v2.18.2) (2023-08-07) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 6f338c034ba0..573fcecc0347 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.2" # {x-release-please-version} +__version__ = "2.18.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 6f338c034ba0..573fcecc0347 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.2" # {x-release-please-version} +__version__ = "2.18.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..9e218e52ef09 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.18.3" }, "snippets": [ { From 6ad25dbe809a124465dce5fd826ef78b58185d39 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 21 Aug 2023 15:56:03 +0200 Subject: [PATCH 0996/1197] chore(deps): update all dependencies (#982) --- .../google-cloud-pubsub/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index f15a7535351b..9264fa152ce4 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.18.2 +google-cloud-pubsub==2.18.3 avro==1.11.2 -protobuf==4.24.0 +protobuf==4.24.1 avro==1.11.2 From 941eeb96e2e3a6b5d23dc791c4de091aa0cecf33 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 28 Aug 2023 15:18:36 +0200 Subject: [PATCH 0997/1197] chore(deps): update dependency protobuf to v4.24.2 (#985) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 9264fa152ce4..a9308f39c6f2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-pubsub==2.18.3 avro==1.11.2 -protobuf==4.24.1 +protobuf==4.24.2 avro==1.11.2 From 4ff8f162ac546457b21bdb8b869477e13eca17d2 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 5 Sep 2023 16:30:27 +0200 Subject: [PATCH 0998/1197] chore(deps): update all dependencies (#986) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 0b65f2dfc67b..ce825e79dec5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.4.0 +pytest==7.4.1 mock==5.1.0 flaky==3.7.0 google-cloud-bigquery==3.11.4 From 390d0dafd691744306d964b9a0994eb216df9639 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 9 Sep 2023 08:09:05 -0400 Subject: [PATCH 0999/1197] docs: Minor formatting (#988) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * docs: Minor formatting chore: Update gapic-generator-python to v1.11.5 build: Update rules_python to 0.24.0 PiperOrigin-RevId: 563436317 Source-Link: https://github.com/googleapis/googleapis/commit/42fd37b18d706f6f51f52f209973b3b2c28f509a Source-Link: https://github.com/googleapis/googleapis-gen/commit/280264ca02fb9316b4237a96d0af1a2343a81a56 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjgwMjY0Y2EwMmZiOTMxNmI0MjM3YTk2ZDBhZjFhMjM0M2E4MWE1NiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/pubsub_v1/services/schema_service/async_client.py | 1 + .../google/pubsub_v1/services/schema_service/client.py | 1 + .../generated_samples/snippet_metadata_google.pubsub.v1.json | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index cadd252b5738..59773b3858f2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -884,6 +884,7 @@ async def sample_rollback_schema(): Required. The revision ID to roll back to. It must be a revision of the same schema. + Example: c7cfa2a8 This corresponds to the ``revision_id`` field diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 777c5e92e64c..3106a32921f7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1073,6 +1073,7 @@ def sample_rollback_schema(): Required. The revision ID to roll back to. It must be a revision of the same schema. + Example: c7cfa2a8 This corresponds to the ``revision_id`` field diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 9e218e52ef09..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.18.3" + "version": "0.1.0" }, "snippets": [ { From ad9598488f244f7cdf1969d57ef06634b48c951b Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sun, 10 Sep 2023 01:11:28 +0200 Subject: [PATCH 1000/1197] chore(deps): update all dependencies (#989) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: liuyunn --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index ce825e79dec5..43d23800d1d5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.4.1 +pytest==7.4.2 mock==5.1.0 flaky==3.7.0 google-cloud-bigquery==3.11.4 From 36b3018fbcd7b8018df825ea8f60a36a436ec9ee Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sun, 10 Sep 2023 12:15:59 -0400 Subject: [PATCH 1001/1197] chore(main): release 2.18.4 (#991) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: liuyunn --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 65444fe37cd5..7f3c731bef00 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.18.3" + ".": "2.18.4" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 2d4672c75795..b0d33b1c56eb 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.18.4](https://github.com/googleapis/python-pubsub/compare/v2.18.3...v2.18.4) (2023-09-09) + + +### Documentation + +* Minor formatting ([#988](https://github.com/googleapis/python-pubsub/issues/988)) ([4eea8c5](https://github.com/googleapis/python-pubsub/commit/4eea8c5c757da6800ba6958e4b8e66085b0e9ddb)) + ## [2.18.3](https://github.com/googleapis/python-pubsub/compare/v2.18.2...v2.18.3) (2023-08-18) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 573fcecc0347..5b1f1ec8192d 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.3" # {x-release-please-version} +__version__ = "2.18.4" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 573fcecc0347..5b1f1ec8192d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.3" # {x-release-please-version} +__version__ = "2.18.4" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..4980252293a1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.18.4" }, "snippets": [ { From 4a1d965f5aa4c60540d1d7e4a69a7e99faeaf9a1 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sun, 10 Sep 2023 21:39:04 +0200 Subject: [PATCH 1002/1197] chore(deps): update all dependencies (#992) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: liuyunn --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index a9308f39c6f2..e64e7ca1a778 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-pubsub==2.18.3 avro==1.11.2 -protobuf==4.24.2 +protobuf==4.24.3 avro==1.11.2 From 37f503f24c33236caefd85006402e59e515a6e84 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 11 Sep 2023 01:29:09 +0200 Subject: [PATCH 1003/1197] chore(deps): update all dependencies (#993) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index e64e7ca1a778..e9d904f6c329 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.18.3 +google-cloud-pubsub==2.18.4 avro==1.11.2 protobuf==4.24.3 avro==1.11.2 From 2609f4ea4337f3519656d3ad2c1f74a6dc4e4d39 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 19 Sep 2023 22:27:36 +0200 Subject: [PATCH 1004/1197] chore(deps): update all dependencies (#996) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 43d23800d1d5..37f51ef90abd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -3,4 +3,4 @@ pytest==7.4.2 mock==5.1.0 flaky==3.7.0 google-cloud-bigquery==3.11.4 -google-cloud-storage==2.10.0 +google-cloud-storage==2.11.0 From f6e04cbb64bfd92f414b7da2201f015d94db1638 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 25 Sep 2023 20:40:46 +0200 Subject: [PATCH 1005/1197] chore(deps): update all dependencies (#999) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google-cloud-pubsub/samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index e9d904f6c329..ecb9fe5f6280 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-pubsub==2.18.4 -avro==1.11.2 +avro==1.11.3 protobuf==4.24.3 -avro==1.11.2 +avro==1.11.3 From d07b6e72cf31da6d611bd384798bdbba87d56166 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 2 Oct 2023 20:06:05 -0400 Subject: [PATCH 1006/1197] chore: [autoapprove] bump cryptography from 41.0.3 to 41.0.4 (#1000) Source-Link: https://github.com/googleapis/synthtool/commit/dede53ff326079b457cfb1aae5bbdc82cbb51dc3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-pubsub/.gitignore | 1 + .../.kokoro/requirements.txt | 49 ++++++++++--------- 3 files changed, 28 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index a3da1b0d4cd3..a9bdb1b7ac0f 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:3e3800bb100af5d7f9e810d48212b37812c1856d20ffeafb99ebe66461b61fc7 -# created: 2023-08-02T10:53:29.114535628Z + digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb +# created: 2023-10-02T21:31:03.517640371Z diff --git a/packages/google-cloud-pubsub/.gitignore b/packages/google-cloud-pubsub/.gitignore index b4243ced74e4..d083ea1ddc3e 100644 --- a/packages/google-cloud-pubsub/.gitignore +++ b/packages/google-cloud-pubsub/.gitignore @@ -50,6 +50,7 @@ docs.metadata # Virtual environment env/ +venv/ # Test logs coverage.xml diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 029bd342de94..96d593c8c82a 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -113,30 +113,30 @@ commonmark==0.9.1 \ --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 # via rich -cryptography==41.0.3 \ - --hash=sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306 \ - --hash=sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84 \ - --hash=sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47 \ - --hash=sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d \ - --hash=sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116 \ - --hash=sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207 \ - --hash=sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81 \ - --hash=sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087 \ - --hash=sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd \ - --hash=sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507 \ - --hash=sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858 \ - --hash=sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae \ - --hash=sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34 \ - --hash=sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906 \ - --hash=sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd \ - --hash=sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922 \ - --hash=sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7 \ - --hash=sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4 \ - --hash=sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574 \ - --hash=sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1 \ - --hash=sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c \ - --hash=sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e \ - --hash=sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de +cryptography==41.0.4 \ + --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ + --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ + --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ + --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ + --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ + --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ + --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ + --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ + --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ + --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ + --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ + --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ + --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ + --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ + --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ + --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ + --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ + --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ + --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ + --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ + --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ + --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ + --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f # via # gcp-releasetool # secretstorage @@ -382,6 +382,7 @@ protobuf==3.20.3 \ # gcp-docuploader # gcp-releasetool # google-api-core + # googleapis-common-protos pyasn1==0.4.8 \ --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba From 3ca6e29bfc1ab939fcfae40023123f2e73fa12fc Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Wed, 4 Oct 2023 18:22:06 +0200 Subject: [PATCH 1007/1197] chore(deps): update all dependencies (#1002) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 37f51ef90abd..7dc75557c63a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,5 +2,5 @@ backoff==2.2.1 pytest==7.4.2 mock==5.1.0 flaky==3.7.0 -google-cloud-bigquery==3.11.4 +google-cloud-bigquery==3.12.0 google-cloud-storage==2.11.0 From 5b86a2dba74440b7eedad751d94e1acb2ce4853d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 9 Oct 2023 10:59:17 -0400 Subject: [PATCH 1008/1197] chore: [autoapprove] Update `black` and `isort` to latest versions (#1007) Source-Link: https://github.com/googleapis/synthtool/commit/0c7b0333f44b2b7075447f43a121a12d15a7b76a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +-- .../.kokoro/requirements.txt | 6 ++-- .../.pre-commit-config.yaml | 2 +- .../pubsub_v1/publisher/_batch/thread.py | 1 - .../services/publisher/transports/rest.py | 3 -- .../schema_service/transports/rest.py | 3 -- .../services/subscriber/transports/rest.py | 3 -- packages/google-cloud-pubsub/noxfile.py | 36 ++++++++++--------- .../subscriber/test_streaming_pull_manager.py | 1 - 9 files changed, 25 insertions(+), 34 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index a9bdb1b7ac0f..dd98abbdeebe 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:fac304457974bb530cc5396abd4ab25d26a469cd3bc97cbfb18c8d4324c584eb -# created: 2023-10-02T21:31:03.517640371Z + digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 +# created: 2023-10-09T14:06:13.397766266Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 96d593c8c82a..0332d3267e15 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.12 \ - --hash=sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e \ - --hash=sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997 +urllib3==1.26.17 \ + --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ + --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b # via # requests # twine diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index 19409cbd37a4..6a8e16950664 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 22.3.0 + rev: 23.7.0 hooks: - id: black - repo: https://github.com/pycqa/flake8 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 8b868eaee88f..e872fcf2b8cc 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -388,7 +388,6 @@ def publish( overflow = new_size > size_limit or new_count >= self.settings.max_messages if not self._messages or not overflow: - # Store the actual message in the batch's message queue. self._messages.append(message) self._size = new_size diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index d02c4edee560..de7215153bac 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -1363,7 +1363,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1442,7 +1441,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1527,7 +1525,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index faff019bccf9..0586bbf9d5fd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -1514,7 +1514,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -1593,7 +1592,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -1678,7 +1676,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index 7ce519310c2b..c633c41156fb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -2119,7 +2119,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: - r"""Call the get iam policy method over HTTP. Args: @@ -2198,7 +2197,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> policy_pb2.Policy: - r"""Call the set iam policy method over HTTP. Args: @@ -2283,7 +2281,6 @@ def __call__( timeout: Optional[float] = None, metadata: Sequence[Tuple[str, str]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Call the test iam permissions method over HTTP. Args: diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index ae863d0fa6f6..4965d53c3e4a 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -17,24 +17,26 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import re import shutil +from typing import Dict, List import warnings import nox FLAKE8_VERSION = "flake8==6.1.0" -BLACK_VERSION = "black==22.3.0" -ISORT_VERSION = "isort==5.10.1" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] MYPY_VERSION = "mypy==0.910" DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock==5.0.0", "asyncmock", @@ -42,26 +44,26 @@ "pytest-cov", "pytest-asyncio", ] -UNIT_TEST_EXTERNAL_DEPENDENCIES = [] -UNIT_TEST_LOCAL_DEPENDENCIES = [] -UNIT_TEST_DEPENDENCIES = [] -UNIT_TEST_EXTRAS = [] -UNIT_TEST_EXTRAS_BY_PYTHON = {} - -SYSTEM_TEST_PYTHON_VERSIONS = ["3.10"] -SYSTEM_TEST_STANDARD_DEPENDENCIES = [ +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.10"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock==5.0.0", "pytest", "google-cloud-testutils", ] -SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [ +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ "psutil", "flaky", ] -SYSTEM_TEST_LOCAL_DEPENDENCIES = [] -SYSTEM_TEST_DEPENDENCIES = [] -SYSTEM_TEST_EXTRAS = [] -SYSTEM_TEST_EXTRAS_BY_PYTHON = {} +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -77,6 +79,7 @@ # https://github.com/googleapis/python-pubsub/pull/552#issuecomment-1016256936 # "mypy_samples", # TODO: uncomment when the check passes "docs", + "format", ] # Error if a python version is missing @@ -240,7 +243,6 @@ def unit(session): def install_systemtest_dependencies(session, *constraints): - # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. # See https://github.com/grpc/grpc/issues/32163 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 199aea25611e..1f781b72207c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1157,7 +1157,6 @@ def test_heartbeat_stream_ack_deadline_seconds(caplog): "google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater", autospec=True ) def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bidi_rpc): - manager = make_manager() with mock.patch.object( From 047c0c06dc5eb6e573ba90eedcc1e5a662597b36 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 10 Oct 2023 18:07:00 +0200 Subject: [PATCH 1009/1197] chore(deps): update all dependencies (#1003) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: liuyunn --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index ecb9fe5f6280..683de05573f6 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-pubsub==2.18.4 avro==1.11.3 -protobuf==4.24.3 +protobuf==4.24.4 avro==1.11.3 From e89a7cbe5b5bc64f5a99add5482f405b3bac8c83 Mon Sep 17 00:00:00 2001 From: liuyunn Date: Thu, 12 Oct 2023 19:22:52 -0400 Subject: [PATCH 1010/1197] doc: Update CPS to GCS and push payload unwrapping sample region tag (#1005) --- .../google-cloud-pubsub/samples/snippets/subscriber.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 897d2f47b8a8..8f39acb1390b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -191,7 +191,7 @@ def create_push_no_wrapper_subscription( project_id: str, topic_id: str, subscription_id: str, endpoint: str ) -> None: """Create a new push no wrapper subscription on the given topic.""" - # [START pubsub_create_push_no_wrapper_subscription] + # [START pubsub_create_unwrapped_push_subscription] from google.cloud import pubsub_v1 # TODO(developer) @@ -224,7 +224,7 @@ def create_push_no_wrapper_subscription( print(f"Push no wrapper subscription created: {subscription}.") print(f"Endpoint for subscription is: {endpoint}") print(f"No wrapper configuration for subscription is: {no_wrapper}") - # [END pubsub_create_push_no_wrapper_subscription] + # [END pubsub_create_unwrapped_push_subscription] def create_subscription_with_ordering( @@ -358,7 +358,7 @@ def create_cloudstorage_subscription( project_id: str, topic_id: str, subscription_id: str, bucket: str ) -> None: """Create a new CloudStorage subscription on the given topic.""" - # [START pubsub_cloudstorage_subscription] + # [START pubsub_create_cloud_storage_subscription] from google.cloud import pubsub_v1 from google.protobuf import duration_pb2 @@ -407,7 +407,7 @@ def create_cloudstorage_subscription( print(f"Bucket for subscription is: {bucket}") print(f"Prefix is: {filename_prefix}") print(f"Suffix is: {filename_suffix}") - # [END pubsub_cloudstorage_subscription] + # [END pubsub_create_cloud_storage_subscription] def delete_subscription(project_id: str, subscription_id: str) -> None: From bd8597ae833bc07c0fd6913e11571acb6d4e6e7e Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 13 Oct 2023 16:18:19 +0200 Subject: [PATCH 1011/1197] chore(deps): update all dependencies (#1008) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: liuyunn --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 7dc75557c63a..ba4002f684ba 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -3,4 +3,4 @@ pytest==7.4.2 mock==5.1.0 flaky==3.7.0 google-cloud-bigquery==3.12.0 -google-cloud-storage==2.11.0 +google-cloud-storage==2.12.0 From 5c8eb319b5ce4e5d6d7c0c47cced4adbb07e800d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 18 Oct 2023 11:37:42 -0400 Subject: [PATCH 1012/1197] chore: Update gapic-generator-python to v1.11.8 (#1009) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.11.7 PiperOrigin-RevId: 573230664 Source-Link: https://github.com/googleapis/googleapis/commit/93beed334607e70709cc60e6145be65fdc8ec386 Source-Link: https://github.com/googleapis/googleapis-gen/commit/f4a4edaa8057639fcf6adf9179872280d1a8f651 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiZjRhNGVkYWE4MDU3NjM5ZmNmNmFkZjkxNzk4NzIyODBkMWE4ZjY1MSJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs: modified some descriptions PiperOrigin-RevId: 573936401 Source-Link: https://github.com/googleapis/googleapis/commit/c1f013491bc8dce60e93ca355ea494ee2e522dd8 Source-Link: https://github.com/googleapis/googleapis-gen/commit/a063e1be04179c7386317bb395dea10eb38229b2 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYTA2M2UxYmUwNDE3OWM3Mzg2MzE3YmIzOTVkZWExMGViMzgyMjliMiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.11.8 PiperOrigin-RevId: 574178735 Source-Link: https://github.com/googleapis/googleapis/commit/7307199008ee2d57a4337066de29f9cd8c444bc6 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ce3af21b7c559a87c2befc076be0e3aeda3a26f0 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiY2UzYWYyMWI3YzU1OWE4N2MyYmVmYzA3NmJlMGUzYWVkYTNhMjZmMCJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../services/publisher/async_client.py | 2 +- .../pubsub_v1/services/publisher/client.py | 2 +- .../services/publisher/transports/grpc.py | 2 +- .../publisher/transports/grpc_asyncio.py | 2 +- .../services/subscriber/async_client.py | 20 +- .../pubsub_v1/services/subscriber/client.py | 20 +- .../services/subscriber/transports/grpc.py | 18 +- .../subscriber/transports/grpc_asyncio.py | 18 +- .../google/pubsub_v1/types/pubsub.py | 19 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- .../unit/gapic/pubsub_v1/test_publisher.py | 115 ++++++---- .../gapic/pubsub_v1/test_schema_service.py | 204 +++++++++++++----- .../unit/gapic/pubsub_v1/test_subscriber.py | 145 ++++++++----- 13 files changed, 354 insertions(+), 215 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index e3c9647e90e6..28a247ccaa82 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -230,7 +230,7 @@ async def create_topic( ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). .. code-block:: python diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index a06a6918e772..1a92362c557d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -512,7 +512,7 @@ def create_topic( ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). .. code-block:: python diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 1aea7272c1aa..268e687f01cd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -241,7 +241,7 @@ def create_topic(self) -> Callable[[pubsub.Topic], pubsub.Topic]: Creates the given topic with the given name. See the [resource name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). Returns: Callable[[~.Topic], diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 9174a20330b2..8d971fa3251c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -244,7 +244,7 @@ def create_topic(self) -> Callable[[pubsub.Topic], Awaitable[pubsub.Topic]]: Creates the given topic with the given name. See the [resource name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). Returns: Callable[[~.Topic], diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index fbc5535f1cab..17b8bc1d2b20 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -238,17 +238,17 @@ async def create_subscription( ) -> pubsub.Subscription: r"""Creates a subscription to a given topic. See the [resource name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). If - the subscription already exists, returns ``ALREADY_EXISTS``. If - the corresponding topic doesn't exist, returns ``NOT_FOUND``. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the - request. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Subscription + object. Note that for REST API requests, you must specify a name + in the request. .. code-block:: python @@ -1804,8 +1804,8 @@ async def create_snapshot( the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Snapshot object. Note that for REST API requests, you must specify a name in the request. @@ -1846,7 +1846,7 @@ async def sample_create_snapshot(): project as the subscription. Note that for REST API requests, you must specify a name. See the `resource name - rules `__. + rules `__. Format is ``projects/{project}/snapshots/{snap}``. This corresponds to the ``name`` field diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 03ff4ea51725..f74e895a39d5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -519,17 +519,17 @@ def create_subscription( ) -> pubsub.Subscription: r"""Creates a subscription to a given topic. See the [resource name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). If - the subscription already exists, returns ``ALREADY_EXISTS``. If - the corresponding topic doesn't exist, returns ``NOT_FOUND``. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the - request. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Subscription + object. Note that for REST API requests, you must specify a name + in the request. .. code-block:: python @@ -1961,8 +1961,8 @@ def create_snapshot( the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Snapshot object. Note that for REST API requests, you must specify a name in the request. @@ -2003,7 +2003,7 @@ def sample_create_snapshot(): project as the subscription. Note that for REST API requests, you must specify a name. See the `resource name - rules `__. + rules `__. Format is ``projects/{project}/snapshots/{snap}``. This corresponds to the ``name`` field diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 1bc5ce8ca1e1..0b1fcd3aab6a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -245,17 +245,17 @@ def create_subscription( Creates a subscription to a given topic. See the [resource name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). If - the subscription already exists, returns ``ALREADY_EXISTS``. If - the corresponding topic doesn't exist, returns ``NOT_FOUND``. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the - request. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Subscription + object. Note that for REST API requests, you must specify a name + in the request. Returns: Callable[[~.Subscription], @@ -617,8 +617,8 @@ def create_snapshot( the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Snapshot object. Note that for REST API requests, you must specify a name in the request. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 5ea0e13af4ba..d32730c1e2e6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -248,17 +248,17 @@ def create_subscription( Creates a subscription to a given topic. See the [resource name rules] - (https://cloud.google.com/pubsub/docs/admin#resource_names). If - the subscription already exists, returns ``ALREADY_EXISTS``. If - the corresponding topic doesn't exist, returns ``NOT_FOUND``. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + If the subscription already exists, returns ``ALREADY_EXISTS``. + If the corresponding topic doesn't exist, returns ``NOT_FOUND``. If the name is not provided in the request, the server will assign a random name for this subscription on the same project as the topic, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Subscription object. - Note that for REST API requests, you must specify a name in the - request. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Subscription + object. Note that for REST API requests, you must specify a name + in the request. Returns: Callable[[~.Subscription], @@ -630,8 +630,8 @@ def create_snapshot( the request, the server will assign a random name for this snapshot on the same project as the subscription, conforming to the [resource name format] - (https://cloud.google.com/pubsub/docs/admin#resource_names). The - generated name is populated in the returned Snapshot object. + (https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names). + The generated name is populated in the returned Snapshot object. Note that for REST API requests, you must specify a name in the request. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 0331d0c97f3a..d44778ddc642 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -84,14 +84,15 @@ class MessageStoragePolicy(proto.Message): Attributes: allowed_persistence_regions (MutableSequence[str]): - A list of IDs of GCP regions where messages - that are published to the topic may be persisted - in storage. Messages published by publishers - running in non-allowed GCP regions (or running - outside of GCP altogether) will be routed for - storage in one of the allowed regions. An empty - list means that no regions are allowed, and is - not a valid configuration. + A list of IDs of Google Cloud regions where + messages that are published to the topic may be + persisted in storage. Messages published by + publishers running in non-allowed Google Cloud + regions (or running outside of Google Cloud + altogether) are routed for storage in one of the + allowed regions. An empty list means that no + regions are allowed, and is not a valid + configuration. """ allowed_persistence_regions: MutableSequence[str] = proto.RepeatedField( @@ -1885,7 +1886,7 @@ class CreateSnapshotRequest(proto.Message): random name for this snapshot on the same project as the subscription. Note that for REST API requests, you must specify a name. See the `resource name - rules `__. + rules `__. Format is ``projects/{project}/snapshots/{snap}``. subscription (str): Required. The subscription whose backlog the snapshot diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 4980252293a1..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.18.4" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 6ced2805a9ed..6511e2f8b886 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -3370,8 +3370,9 @@ def test_create_topic_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3447,8 +3448,9 @@ def test_create_topic_rest_required_fields(request_type=pubsub.Topic): response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3567,8 +3569,9 @@ def test_create_topic_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3633,8 +3636,9 @@ def test_update_topic_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3705,8 +3709,9 @@ def test_update_topic_rest_required_fields(request_type=pubsub.UpdateTopicReques response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3834,8 +3839,9 @@ def test_update_topic_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3899,8 +3905,9 @@ def test_publish_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.PublishResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3974,8 +3981,9 @@ def test_publish_rest_required_fields(request_type=pubsub.PublishRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.PublishResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4105,8 +4113,9 @@ def test_publish_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.PublishResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4173,8 +4182,9 @@ def test_get_topic_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4249,8 +4259,9 @@ def test_get_topic_rest_required_fields(request_type=pubsub.GetTopicRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4369,8 +4380,9 @@ def test_get_topic_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4433,8 +4445,9 @@ def test_list_topics_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListTopicsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4514,8 +4527,9 @@ def test_list_topics_rest_required_fields(request_type=pubsub.ListTopicsRequest) response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListTopicsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4644,8 +4658,9 @@ def test_list_topics_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListTopicsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4764,8 +4779,9 @@ def test_list_topic_subscriptions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4848,8 +4864,9 @@ def test_list_topic_subscriptions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4980,8 +4997,9 @@ def test_list_topic_subscriptions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5103,8 +5121,9 @@ def test_list_topic_snapshots_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5187,8 +5206,9 @@ def test_list_topic_snapshots_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5319,8 +5339,9 @@ def test_list_topic_snapshots_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5682,8 +5703,9 @@ def test_detach_subscription_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.DetachSubscriptionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.DetachSubscriptionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5757,8 +5779,9 @@ def test_detach_subscription_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.DetachSubscriptionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.DetachSubscriptionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index ea09a79f36f3..c3585ae3bfad 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -3466,6 +3466,73 @@ def test_create_schema_rest(request_type): "revision_id": "revision_id_value", "revision_create_time": {"seconds": 751, "nanos": 543}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gp_schema.CreateSchemaRequest.meta.fields["schema"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["schema"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["schema"][field])): + del request_init["schema"][field][i][subfield] + else: + del request_init["schema"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -3481,8 +3548,9 @@ def test_create_schema_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gp_schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3561,8 +3629,9 @@ def test_create_schema_rest_required_fields(request_type=gp_schema.CreateSchemaR response_value = Response() response_value.status_code = 200 - pb_return_value = gp_schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3654,13 +3723,6 @@ def test_create_schema_rest_bad_request( # send a request that will satisfy transcoding request_init = {"parent": "projects/sample1"} - request_init["schema"] = { - "name": "name_value", - "type_": 1, - "definition": "definition_value", - "revision_id": "revision_id_value", - "revision_create_time": {"seconds": 751, "nanos": 543}, - } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -3700,8 +3762,9 @@ def test_create_schema_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gp_schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3769,8 +3832,9 @@ def test_get_schema_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3848,8 +3912,9 @@ def test_get_schema_rest_required_fields(request_type=schema.GetSchemaRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -3970,8 +4035,9 @@ def test_get_schema_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4034,8 +4100,9 @@ def test_list_schemas_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.ListSchemasResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4116,8 +4183,9 @@ def test_list_schemas_rest_required_fields(request_type=schema.ListSchemasReques response_value = Response() response_value.status_code = 200 - pb_return_value = schema.ListSchemasResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4249,8 +4317,9 @@ def test_list_schemas_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.ListSchemasResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4368,8 +4437,9 @@ def test_list_schema_revisions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.ListSchemaRevisionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4452,8 +4522,9 @@ def test_list_schema_revisions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = schema.ListSchemaRevisionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4587,8 +4658,9 @@ def test_list_schema_revisions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.ListSchemaRevisionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4712,8 +4784,9 @@ def test_commit_schema_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gp_schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4790,8 +4863,9 @@ def test_commit_schema_rest_required_fields(request_type=gp_schema.CommitSchemaR response_value = Response() response_value.status_code = 200 - pb_return_value = gp_schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4921,8 +4995,9 @@ def test_commit_schema_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gp_schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4989,8 +5064,9 @@ def test_rollback_schema_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5073,8 +5149,9 @@ def test_rollback_schema_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5204,8 +5281,9 @@ def test_rollback_schema_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5273,8 +5351,9 @@ def test_delete_schema_revision_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5354,8 +5433,9 @@ def test_delete_schema_revision_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5479,8 +5559,9 @@ def test_delete_schema_revision_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5790,8 +5871,9 @@ def test_validate_schema_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gp_schema.ValidateSchemaResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5866,8 +5948,9 @@ def test_validate_schema_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = gp_schema.ValidateSchemaResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6001,8 +6084,9 @@ def test_validate_schema_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = gp_schema.ValidateSchemaResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6065,8 +6149,9 @@ def test_validate_message_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = schema.ValidateMessageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.ValidateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6141,8 +6226,9 @@ def test_validate_message_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = schema.ValidateMessageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = schema.ValidateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 058c33d9fe88..c31b98cd01ca 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -4796,8 +4796,9 @@ def test_create_subscription_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -4883,8 +4884,9 @@ def test_create_subscription_rest_required_fields(request_type=pubsub.Subscripti response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5016,8 +5018,9 @@ def test_create_subscription_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5091,8 +5094,9 @@ def test_get_subscription_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5175,8 +5179,9 @@ def test_get_subscription_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5297,8 +5302,9 @@ def test_get_subscription_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5370,8 +5376,9 @@ def test_update_subscription_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5450,8 +5457,9 @@ def test_update_subscription_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5585,8 +5593,9 @@ def test_update_subscription_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5652,8 +5661,9 @@ def test_list_subscriptions_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListSubscriptionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5735,8 +5745,9 @@ def test_list_subscriptions_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListSubscriptionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -5869,8 +5880,9 @@ def test_list_subscriptions_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListSubscriptionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6777,8 +6789,9 @@ def test_pull_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.PullResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6855,8 +6868,9 @@ def test_pull_rest_required_fields(request_type=pubsub.PullRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.PullResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -6987,8 +7001,9 @@ def test_pull_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.PullResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7329,8 +7344,9 @@ def test_get_snapshot_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7404,8 +7420,9 @@ def test_get_snapshot_rest_required_fields(request_type=pubsub.GetSnapshotReques response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7526,8 +7543,9 @@ def test_get_snapshot_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7590,8 +7608,9 @@ def test_list_snapshots_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7671,8 +7690,9 @@ def test_list_snapshots_rest_required_fields(request_type=pubsub.ListSnapshotsRe response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7803,8 +7823,9 @@ def test_list_snapshots_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.ListSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -7923,8 +7944,9 @@ def test_create_snapshot_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8005,8 +8027,9 @@ def test_create_snapshot_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8136,8 +8159,9 @@ def test_create_snapshot_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8202,8 +8226,9 @@ def test_update_snapshot_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8275,8 +8300,9 @@ def test_update_snapshot_rest_required_fields( response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8406,8 +8432,9 @@ def test_update_snapshot_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8719,8 +8746,9 @@ def test_seek_rest(request_type): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.SeekResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.SeekResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value @@ -8793,8 +8821,9 @@ def test_seek_rest_required_fields(request_type=pubsub.SeekRequest): response_value = Response() response_value.status_code = 200 - pb_return_value = pubsub.SeekResponse.pb(return_value) - json_return_value = json_format.MessageToJson(pb_return_value) + # Convert return value to protobuf type + return_value = pubsub.SeekResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value From c8101ce5098563934892d16ba98a33ea4e35d232 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 20 Oct 2023 16:10:52 -0400 Subject: [PATCH 1013/1197] chore: rename rst files to avoid conflict with service names (#1011) Source-Link: https://github.com/googleapis/synthtool/commit/d52e638b37b091054c869bfa6f5a9fedaba9e0dd Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.kokoro/requirements.txt | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index dd98abbdeebe..7f291dbd5f9b 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:08e34975760f002746b1d8c86fdc90660be45945ee6d9db914d1508acdf9a547 -# created: 2023-10-09T14:06:13.397766266Z + digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 +# created: 2023-10-18T20:26:37.410353675Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 0332d3267e15..16170d0ca7b8 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -467,9 +467,9 @@ typing-extensions==4.4.0 \ --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e # via -r requirements.in -urllib3==1.26.17 \ - --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \ - --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b +urllib3==1.26.18 \ + --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ + --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 # via # requests # twine From 5a9352472f2c2d36723a55a9fb2181aa2654ab99 Mon Sep 17 00:00:00 2001 From: Kamal Aboul-Hosn Date: Mon, 23 Oct 2023 10:14:02 -0400 Subject: [PATCH 1014/1197] chore: Set blunderbuss config to auto-assign issues and PRs (#1013) * samples: schema evolution * Add command-line commands * Fix tag for rollback * Make formatting fixes * Formatting fixes * Fix exceptions * fix: Set x-goog-request-params for streaming pull request * Set blunderbuss config to auto-assign issues and PRs --- packages/google-cloud-pubsub/.github/blunderbuss.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 packages/google-cloud-pubsub/.github/blunderbuss.yml diff --git a/packages/google-cloud-pubsub/.github/blunderbuss.yml b/packages/google-cloud-pubsub/.github/blunderbuss.yml new file mode 100644 index 000000000000..687c7f91b5b4 --- /dev/null +++ b/packages/google-cloud-pubsub/.github/blunderbuss.yml @@ -0,0 +1,6 @@ +# Configuration for the Blunderbuss GitHub app. For more info see +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/blunderbuss +assign_issues: + - pradn +assign_prs: + - pradn \ No newline at end of file From c209fd42847c072be5a5f819ff07bccdbc90d4dd Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 24 Oct 2023 23:38:50 +0200 Subject: [PATCH 1015/1197] chore(deps): update all dependencies (#1014) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index ba4002f684ba..0d4924032601 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,5 +1,5 @@ backoff==2.2.1 -pytest==7.4.2 +pytest==7.4.3 mock==5.1.0 flaky==3.7.0 google-cloud-bigquery==3.12.0 From eeb78950b4e666eba7a98af0484a6aa8c732d77d Mon Sep 17 00:00:00 2001 From: liuyunn Date: Tue, 31 Oct 2023 16:59:37 -0400 Subject: [PATCH 1016/1197] Samples: Add concurrency control sample (#1015) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Samples: Add concurrency control sample * fix format * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add unittest * fix format * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../samples/snippets/subscriber.py | 56 +++++++++++++++++++ .../samples/snippets/subscriber_test.py | 34 +++++++++++ 2 files changed, 90 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 8f39acb1390b..ab0c8aafa44b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -1003,6 +1003,49 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: # [END pubsub_dead_letter_delivery_attempt] +def receive_messages_with_concurrency_control( + project_id: str, subscription_id: str, timeout: Optional[float] = None +) -> None: + # [START pubsub_subscriber_concurrency_control] + from concurrent import futures + from google.cloud import pubsub_v1 + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + + # An optional executor to use. If not specified, a default one with maximum 10 + # threads will be created. + executor = futures.ThreadPoolExecutor(max_workers=5) + # A thread pool-based scheduler. It must not be shared across SubscriberClients. + scheduler = pubsub_v1.subscriber.scheduler.ThreadScheduler(executor) + + subscriber = pubsub_v1.SubscriberClient() + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + print(f"Received {message.data!r}.") + message.ack() + + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback, scheduler=scheduler + ) + print(f"Listening for messages on {subscription_path}..\n") + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # When `timeout` is not set, result() will block indefinitely, + # unless an exception is encountered first. + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + # [END pubsub_subscriber_concurrency_control] + + if __name__ == "__main__": # noqa parser = argparse.ArgumentParser( description=__doc__, @@ -1183,6 +1226,15 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: "timeout", default=None, type=float, nargs="?" ) + receive_messages_with_concurrency_control_parser = subparsers.add_parser( + "receive-messages-with-concurrency-control", + help=receive_messages_with_concurrency_control.__doc__, + ) + receive_messages_with_concurrency_control_parser.add_argument("subscription_id") + receive_messages_with_concurrency_control_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + args = parser.parse_args() if args.command == "list-in-topic": @@ -1275,3 +1327,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: receive_messages_with_delivery_attempts( args.project_id, args.subscription_id, args.timeout ) + elif args.command == "receive-messages-with-concurrency-control": + receive_messages_with_concurrency_control( + args.project_id, args.subscription_id, args.timeout + ) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 3fa94761c6f7..53fefa109c0b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -1008,6 +1008,40 @@ def test_receive_synchronously( subscriber_client.delete_subscription(request={"subscription": subscription_path}) +def test_receive_messages_with_concurrency_control( + subscriber_client: pubsub_v1.SubscriberClient, + publisher_client: pubsub_v1.PublisherClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_async_receive_messages_with_concurrency_control_name = f"subscription-test-subscription-async-receive-messages-with-concurrency-control-{PY_VERSION}-{UUID}" + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_async_receive_messages_with_concurrency_control_name + ) + + try: + subscriber_client.get_subscription(request={"subscription": subscription_path}) + except NotFound: + subscriber_client.create_subscription( + request={"name": subscription_path, "topic": topic} + ) + + _ = _publish_messages(publisher_client, topic) + + subscriber.receive_messages_with_flow_control( + PROJECT_ID, subscription_async_receive_messages_with_concurrency_control_name, 5 + ) + + out, _ = capsys.readouterr() + assert "Listening" in out + assert subscription_async_receive_messages_with_concurrency_control_name in out + assert "message" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + @typed_flaky def test_receive_synchronously_with_lease( subscriber_client: pubsub_v1.SubscriberClient, From 1e11faeeb125b06502ea7e17f64250a3e0aeb10a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 2 Nov 2023 21:49:36 -0400 Subject: [PATCH 1017/1197] chore: update docfx minimum Python version (#1019) Source-Link: https://github.com/googleapis/synthtool/commit/bc07fd415c39853b382bcf8315f8eeacdf334055 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.github/workflows/docs.yml | 2 +- packages/google-cloud-pubsub/noxfile.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 7f291dbd5f9b..ec696b558c35 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4f9b3b106ad0beafc2c8a415e3f62c1a0cc23cabea115dbe841b848f581cfe99 -# created: 2023-10-18T20:26:37.410353675Z + digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 +# created: 2023-11-03T00:57:07.335914631Z diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml index e97d89e484c9..221806cedf58 100644 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -28,7 +28,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 4965d53c3e4a..ad452184a5ec 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -354,7 +354,7 @@ def docs(session): ) -@nox.session(python="3.9") +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" From 498e32303cc887f4be333cbd32f9b7b789a407fd Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 15 Nov 2023 02:33:04 -0500 Subject: [PATCH 1018/1197] chore: bump urllib3 from 1.26.12 to 1.26.18 (#1022) Source-Link: https://github.com/googleapis/synthtool/commit/febacccc98d6d224aff9d0bd0373bb5a4cd5969c Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 532 +++++++++--------- 2 files changed, 277 insertions(+), 259 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index ec696b558c35..453b540c1e58 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:30470597773378105e239b59fce8eb27cc97375580d592699206d17d117143d0 -# created: 2023-11-03T00:57:07.335914631Z + digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 +# created: 2023-11-08T19:46:45.022803742Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 16170d0ca7b8..8957e21104e2 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -4,91 +4,75 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==2.0.0 \ - --hash=sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20 \ - --hash=sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e +argcomplete==3.1.4 \ + --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ + --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f # via nox -attrs==22.1.0 \ - --hash=sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6 \ - --hash=sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c +attrs==23.1.0 \ + --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ + --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 # via gcp-releasetool -bleach==5.0.1 \ - --hash=sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a \ - --hash=sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c - # via readme-renderer -cachetools==5.2.0 \ - --hash=sha256:6a94c6402995a99c3970cc7e4884bb60b4a8639938157eeed436098bf9831757 \ - --hash=sha256:f9f17d2aec496a9aa6b76f53e3b614c965223c061982d434d160f930c698a9db +cachetools==5.3.2 \ + --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ + --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 # via google-auth certifi==2023.7.22 \ --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 # via requests -cffi==1.15.1 \ - --hash=sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5 \ - --hash=sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef \ - --hash=sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104 \ - --hash=sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426 \ - --hash=sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405 \ - --hash=sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375 \ - --hash=sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a \ - --hash=sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e \ - --hash=sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc \ - --hash=sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf \ - --hash=sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185 \ - --hash=sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497 \ - --hash=sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3 \ - --hash=sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35 \ - --hash=sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c \ - --hash=sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83 \ - --hash=sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21 \ - --hash=sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca \ - --hash=sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984 \ - --hash=sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac \ - --hash=sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd \ - --hash=sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee \ - --hash=sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a \ - --hash=sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2 \ - --hash=sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192 \ - --hash=sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7 \ - --hash=sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585 \ - --hash=sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f \ - --hash=sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e \ - --hash=sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27 \ - --hash=sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b \ - --hash=sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e \ - --hash=sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e \ - --hash=sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d \ - --hash=sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c \ - --hash=sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415 \ - --hash=sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82 \ - --hash=sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02 \ - --hash=sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314 \ - --hash=sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325 \ - --hash=sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c \ - --hash=sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3 \ - --hash=sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914 \ - --hash=sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045 \ - --hash=sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d \ - --hash=sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9 \ - --hash=sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5 \ - --hash=sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2 \ - --hash=sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c \ - --hash=sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3 \ - --hash=sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2 \ - --hash=sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8 \ - --hash=sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d \ - --hash=sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d \ - --hash=sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9 \ - --hash=sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162 \ - --hash=sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76 \ - --hash=sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4 \ - --hash=sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e \ - --hash=sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9 \ - --hash=sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6 \ - --hash=sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b \ - --hash=sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01 \ - --hash=sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0 +cffi==1.16.0 \ + --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ + --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ + --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ + --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ + --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ + --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ + --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ + --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ + --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ + --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ + --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ + --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ + --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ + --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ + --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ + --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ + --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ + --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ + --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ + --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ + --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ + --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ + --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ + --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ + --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ + --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ + --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ + --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ + --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ + --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ + --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ + --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ + --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ + --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ + --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ + --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ + --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ + --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ + --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ + --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ + --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ + --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ + --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ + --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ + --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ + --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ + --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ + --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ + --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ + --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ + --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ + --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -109,78 +93,74 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -commonmark==0.9.1 \ - --hash=sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60 \ - --hash=sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9 - # via rich -cryptography==41.0.4 \ - --hash=sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67 \ - --hash=sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311 \ - --hash=sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8 \ - --hash=sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13 \ - --hash=sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143 \ - --hash=sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f \ - --hash=sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829 \ - --hash=sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd \ - --hash=sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397 \ - --hash=sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac \ - --hash=sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d \ - --hash=sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a \ - --hash=sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839 \ - --hash=sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e \ - --hash=sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6 \ - --hash=sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9 \ - --hash=sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860 \ - --hash=sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca \ - --hash=sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91 \ - --hash=sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d \ - --hash=sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714 \ - --hash=sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb \ - --hash=sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f +cryptography==41.0.5 \ + --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ + --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ + --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ + --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ + --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ + --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ + --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ + --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ + --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ + --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ + --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ + --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ + --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ + --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ + --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ + --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ + --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ + --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ + --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ + --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ + --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ + --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ + --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 # via # gcp-releasetool # secretstorage -distlib==0.3.6 \ - --hash=sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46 \ - --hash=sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e +distlib==0.3.7 \ + --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ + --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 # via virtualenv -docutils==0.19 \ - --hash=sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6 \ - --hash=sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc +docutils==0.20.1 \ + --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ + --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b # via readme-renderer -filelock==3.8.0 \ - --hash=sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc \ - --hash=sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4 +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c # via virtualenv -gcp-docuploader==0.6.4 \ - --hash=sha256:01486419e24633af78fd0167db74a2763974765ee8078ca6eb6964d0ebd388af \ - --hash=sha256:70861190c123d907b3b067da896265ead2eeb9263969d6955c9e0bb091b5ccbf +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.10.5 \ - --hash=sha256:174b7b102d704b254f2a26a3eda2c684fd3543320ec239baf771542a2e58e109 \ - --hash=sha256:e29d29927fe2ca493105a82958c6873bb2b90d503acac56be2c229e74de0eec9 +gcp-releasetool==1.16.0 \ + --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ + --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 # via -r requirements.in -google-api-core==2.10.2 \ - --hash=sha256:10c06f7739fe57781f87523375e8e1a3a4674bf6392cd6131a3222182b971320 \ - --hash=sha256:34f24bd1d5f72a8c4519773d99ca6bf080a6c4e041b4e9f024fe230191dda62e +google-api-core==2.12.0 \ + --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ + --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 # via # google-cloud-core # google-cloud-storage -google-auth==2.14.1 \ - --hash=sha256:ccaa901f31ad5cbb562615eb8b664b3dd0bf5404a67618e642307f00613eda4d \ - --hash=sha256:f5d8701633bebc12e0deea4df8abd8aff31c28b355360597f7f2ee60f2e4d016 +google-auth==2.23.4 \ + --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ + --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.2 \ - --hash=sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe \ - --hash=sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a +google-cloud-core==2.3.3 \ + --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ + --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 # via google-cloud-storage -google-cloud-storage==2.6.0 \ - --hash=sha256:104ca28ae61243b637f2f01455cc8a05e8f15a2a18ced96cb587241cdd3820f5 \ - --hash=sha256:4ad0415ff61abdd8bb2ae81c1f8f7ec7d91a1011613f2db87c614c550f97bfe9 +google-cloud-storage==2.13.0 \ + --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ + --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -251,29 +231,31 @@ google-crc32c==1.5.0 \ --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via google-resumable-media -google-resumable-media==2.4.0 \ - --hash=sha256:2aa004c16d295c8f6c33b2b4788ba59d366677c0a25ae7382436cb30f776deaa \ - --hash=sha256:8d5518502f92b9ecc84ac46779bd4f09694ecb3ba38a3e7ca737a86d15cbca1f + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.6.0 \ + --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ + --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b # via google-cloud-storage -googleapis-common-protos==1.57.0 \ - --hash=sha256:27a849d6205838fb6cc3c1c21cb9800707a661bb21c6ce7fb13e99eb1f8a0c46 \ - --hash=sha256:a9f4a1d7f6d9809657b7f1316a1aa527f6664891531bcfcc13b6696e685f443c +googleapis-common-protos==1.61.0 \ + --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ + --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core idna==3.4 \ --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 # via requests -importlib-metadata==5.0.0 \ - --hash=sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab \ - --hash=sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43 +importlib-metadata==6.8.0 \ + --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ + --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 # via # -r requirements.in # keyring # twine -jaraco-classes==3.2.3 \ - --hash=sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158 \ - --hash=sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a +jaraco-classes==3.3.0 \ + --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ + --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -285,75 +267,121 @@ jinja2==3.1.2 \ --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 # via gcp-releasetool -keyring==23.11.0 \ - --hash=sha256:3dd30011d555f1345dec2c262f0153f2f0ca6bca041fb1dc4588349bb4c0ac1e \ - --hash=sha256:ad192263e2cdd5f12875dedc2da13534359a7e760e77f8d04b50968a821c2361 +keyring==24.2.0 \ + --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ + --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 # via # gcp-releasetool # twine -markupsafe==2.1.1 \ - --hash=sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003 \ - --hash=sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88 \ - --hash=sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5 \ - --hash=sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7 \ - --hash=sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a \ - --hash=sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603 \ - --hash=sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1 \ - --hash=sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135 \ - --hash=sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247 \ - --hash=sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6 \ - --hash=sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601 \ - --hash=sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77 \ - --hash=sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02 \ - --hash=sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e \ - --hash=sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63 \ - --hash=sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f \ - --hash=sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980 \ - --hash=sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b \ - --hash=sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812 \ - --hash=sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff \ - --hash=sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96 \ - --hash=sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1 \ - --hash=sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925 \ - --hash=sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a \ - --hash=sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6 \ - --hash=sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e \ - --hash=sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f \ - --hash=sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4 \ - --hash=sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f \ - --hash=sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3 \ - --hash=sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c \ - --hash=sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a \ - --hash=sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417 \ - --hash=sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a \ - --hash=sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a \ - --hash=sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37 \ - --hash=sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452 \ - --hash=sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933 \ - --hash=sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a \ - --hash=sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7 +markdown-it-py==3.0.0 \ + --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ + --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb + # via rich +markupsafe==2.1.3 \ + --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ + --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ + --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ + --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ + --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ + --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ + --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ + --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ + --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ + --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ + --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ + --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ + --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ + --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ + --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ + --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ + --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ + --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ + --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ + --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ + --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ + --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ + --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ + --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ + --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ + --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ + --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ + --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ + --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ + --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ + --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ + --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ + --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ + --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ + --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ + --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ + --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ + --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ + --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ + --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ + --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ + --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ + --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ + --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ + --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ + --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ + --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ + --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ + --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ + --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ + --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ + --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ + --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ + --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ + --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ + --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ + --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ + --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ + --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ + --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 # via jinja2 -more-itertools==9.0.0 \ - --hash=sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41 \ - --hash=sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab +mdurl==0.1.2 \ + --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ + --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba + # via markdown-it-py +more-itertools==10.1.0 \ + --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ + --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 # via jaraco-classes -nox==2022.11.21 \ - --hash=sha256:0e41a990e290e274cb205a976c4c97ee3c5234441a8132c8c3fd9ea3c22149eb \ - --hash=sha256:e21c31de0711d1274ca585a2c5fde36b1aa962005ba8e9322bf5eeed16dcd684 +nh3==0.2.14 \ + --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ + --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ + --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ + --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ + --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ + --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ + --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ + --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ + --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ + --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ + --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ + --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ + --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ + --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ + --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ + --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 + # via readme-renderer +nox==2023.4.22 \ + --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ + --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f # via -r requirements.in -packaging==21.3 \ - --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ - --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +packaging==23.2 \ + --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ + --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 # via # gcp-releasetool # nox -pkginfo==1.8.3 \ - --hash=sha256:848865108ec99d4901b2f7e84058b6e7660aae8ae10164e015a6dcf5b242a594 \ - --hash=sha256:a84da4318dd86f870a9447a8c98340aa06216bfc6f2b7bdc4b8766984ae1867c +pkginfo==1.9.6 \ + --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ + --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 # via twine -platformdirs==2.5.4 \ - --hash=sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7 \ - --hash=sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10 +platformdirs==3.11.0 \ + --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ + --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv protobuf==3.20.3 \ --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ @@ -383,34 +411,30 @@ protobuf==3.20.3 \ # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.4.8 \ - --hash=sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d \ - --hash=sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba +pyasn1==0.5.0 \ + --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ + --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde # via # pyasn1-modules # rsa -pyasn1-modules==0.2.8 \ - --hash=sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e \ - --hash=sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74 +pyasn1-modules==0.3.0 \ + --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ + --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d # via google-auth pycparser==2.21 \ --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 # via cffi -pygments==2.15.0 \ - --hash=sha256:77a3299119af881904cd5ecd1ac6a66214b6e9bed1f2db16993b54adede64094 \ - --hash=sha256:f7e36cffc4c517fbc252861b9a6e4644ca0e5abadf9a113c72d1358ad09b9500 +pygments==2.16.1 \ + --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ + --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 # via # readme-renderer # rich -pyjwt==2.6.0 \ - --hash=sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd \ - --hash=sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14 +pyjwt==2.8.0 \ + --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ + --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyparsing==3.0.9 \ - --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ - --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc - # via packaging pyperclip==1.8.2 \ --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 # via gcp-releasetool @@ -418,9 +442,9 @@ python-dateutil==2.8.2 \ --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 # via gcp-releasetool -readme-renderer==37.3 \ - --hash=sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273 \ - --hash=sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343 +readme-renderer==42.0 \ + --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ + --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 # via twine requests==2.31.0 \ --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ @@ -431,17 +455,17 @@ requests==2.31.0 \ # google-cloud-storage # requests-toolbelt # twine -requests-toolbelt==0.10.1 \ - --hash=sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7 \ - --hash=sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d +requests-toolbelt==1.0.0 \ + --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ + --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 # via twine rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==12.6.0 \ - --hash=sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e \ - --hash=sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0 +rich==13.6.0 \ + --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ + --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -455,43 +479,37 @@ six==1.16.0 \ --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 # via - # bleach # gcp-docuploader - # google-auth # python-dateutil -twine==4.0.1 \ - --hash=sha256:42026c18e394eac3e06693ee52010baa5313e4811d5a11050e7d48436cf41b9e \ - --hash=sha256:96b1cf12f7ae611a4a40b6ae8e9570215daff0611828f5fe1f37a16255ab24a0 +twine==4.0.2 \ + --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ + --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 # via -r requirements.in -typing-extensions==4.4.0 \ - --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ - --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +typing-extensions==4.8.0 \ + --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ + --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef # via -r requirements.in -urllib3==1.26.18 \ - --hash=sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07 \ - --hash=sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0 +urllib3==2.0.7 \ + --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ + --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e # via # requests # twine -virtualenv==20.16.7 \ - --hash=sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e \ - --hash=sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29 +virtualenv==20.24.6 \ + --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ + --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 # via nox -webencodings==0.5.1 \ - --hash=sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78 \ - --hash=sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923 - # via bleach -wheel==0.38.4 \ - --hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \ - --hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8 +wheel==0.41.3 \ + --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ + --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 # via -r requirements.in -zipp==3.10.0 \ - --hash=sha256:4fcb6f278987a6605757302a6e40e896257570d11c51628968ccb2a47e80c6c1 \ - --hash=sha256:7a7262fd930bd3e36c50b9a64897aec3fafff3dfdeec9623ae22b40e93f99bb8 +zipp==3.17.0 \ + --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ + --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==65.5.1 \ - --hash=sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31 \ - --hash=sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f +setuptools==68.2.2 \ + --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ + --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a # via -r requirements.in From 16080eddb1a08fcd5dfb8acb74c1e333956608ce Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 30 Nov 2023 11:38:28 -0500 Subject: [PATCH 1019/1197] feat: Introduce compatibility with native namespace packages (#1024) * feat: Introduce compatibility with native namespace packages * use find_namespace_packages in setup.py --- .../google-cloud-pubsub/google/__init__.py | 24 ------------ .../google/cloud/__init__.py | 26 ------------- packages/google-cloud-pubsub/noxfile.py | 2 +- packages/google-cloud-pubsub/owlbot.py | 4 +- packages/google-cloud-pubsub/setup.py | 7 +--- .../tests/unit/test_packaging.py | 37 +++++++++++++++++++ 6 files changed, 41 insertions(+), 59 deletions(-) delete mode 100644 packages/google-cloud-pubsub/google/__init__.py delete mode 100644 packages/google-cloud-pubsub/google/cloud/__init__.py create mode 100644 packages/google-cloud-pubsub/tests/unit/test_packaging.py diff --git a/packages/google-cloud-pubsub/google/__init__.py b/packages/google-cloud-pubsub/google/__init__.py deleted file mode 100644 index 9a1b64a6d586..000000000000 --- a/packages/google-cloud-pubsub/google/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/packages/google-cloud-pubsub/google/cloud/__init__.py b/packages/google-cloud-pubsub/google/cloud/__init__.py deleted file mode 100644 index e1f8a4d20fd1..000000000000 --- a/packages/google-cloud-pubsub/google/cloud/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import List - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__: List[str] = pkgutil.extend_path(__path__, __name__) # type: ignore diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index ad452184a5ec..f2cea17e9aac 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -106,7 +106,7 @@ def mypy(session): # TODO: Only check the hand-written layer, the generated code does not pass # mypy checks yet. # https://github.com/googleapis/gapic-generator-python/issues/1092 - session.run("mypy", "google/cloud") + session.run("mypy", "-p", "google.cloud") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 7539adfdb4ca..451be6e99ef4 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -383,7 +383,7 @@ def mypy(session): # TODO: Only check the hand-written layer, the generated code does not pass # mypy checks yet. # https://github.com/googleapis/gapic-generator-python/issues/1092 - session.run("mypy", "google/cloud")''' + session.run("mypy", "-p", "google.cloud")''' ), ) @@ -399,7 +399,7 @@ def mypy(session): ) s.replace( "noxfile.py", - r'session\.run\("mypy", "google/cloud"\)', + r'session\.run\("mypy", "-p", "google.cloud"\)', textwrap.dedent( ''' \g<0> diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 7d70e25bc48a..72b859321950 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -56,14 +56,10 @@ packages = [ package - for package in setuptools.PEP420PackageFinder.find() + for package in setuptools.find_namespace_packages() if package.startswith("google") ] -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - setuptools.setup( name=name, version=version, @@ -89,7 +85,6 @@ ], platforms="Posix; MacOS X; Windows", packages=packages, - namespace_packages=namespaces, install_requires=dependencies, extras_require=extras, python_requires=">=3.7", diff --git a/packages/google-cloud-pubsub/tests/unit/test_packaging.py b/packages/google-cloud-pubsub/tests/unit/test_packaging.py new file mode 100644 index 000000000000..6dc70e3d1ab6 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/test_packaging.py @@ -0,0 +1,37 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-cloud-pubsub``. + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.othermod"] + subprocess.check_call(cmd, env=env) + + # The ``google.cloud`` namespace package should not be masked + # by the presence of ``google-cloud-pubsub``. + google_cloud = tmp_path / "google" / "cloud" + google_cloud.mkdir() + google_cloud.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.cloud.othermod"] + subprocess.check_call(cmd, env=env) From 600d023a8af5cb9f29c996bae332368175486f3a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 12:56:58 -0500 Subject: [PATCH 1020/1197] feat: Add support for Python 3.12 (#1025) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(python): Add Python 3.12 Source-Link: https://github.com/googleapis/synthtool/commit/af16e6d4672cc7b400f144de2fc3068b54ff47d2 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 * Add python 3.12 to setup.py and owlbot.py * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix incorrect usage of assert_called_once() * update required checks --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/sync-repo-settings.yaml | 12 ++++++ .../.github/workflows/unittest.yml | 2 +- .../.kokoro/samples/python3.12/common.cfg | 40 +++++++++++++++++++ .../.kokoro/samples/python3.12/continuous.cfg | 6 +++ .../samples/python3.12/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.12/periodic.cfg | 6 +++ .../.kokoro/samples/python3.12/presubmit.cfg | 6 +++ packages/google-cloud-pubsub/CONTRIBUTING.rst | 6 ++- packages/google-cloud-pubsub/noxfile.py | 6 +-- packages/google-cloud-pubsub/owlbot.py | 5 +-- .../samples/snippets/noxfile.py | 2 +- packages/google-cloud-pubsub/setup.py | 1 + .../publisher/test_publisher_client.py | 2 +- 14 files changed, 95 insertions(+), 14 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic-head.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/presubmit.cfg diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 453b540c1e58..eb4d9f794dc1 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 -# created: 2023-11-08T19:46:45.022803742Z + digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 +# created: 2023-11-23T18:17:28.105124211Z diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml index edda0b5707ce..77c1a4fb5cf4 100644 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -15,4 +15,16 @@ branchProtectionRules: - 'Samples - Python 3.8' - 'Samples - Python 3.9' - 'Samples - Python 3.10' + - 'Samples - Python 3.11' + - 'Samples - Python 3.12' - 'OwlBot Post Processor' + - 'docs' + - 'docfx' + - 'lint' + - 'unit (3.7)' + - 'unit (3.8)' + - 'unit (3.9)' + - 'unit (3.10)' + - 'unit (3.11)' + - 'unit (3.12)' + - 'cover' diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index 8057a7691b12..a32027b49bc2 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 000000000000..ae61007721b8 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 59283006ed85..03a700296d22 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10 and 3.11 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.11 -- -k + $ nox -s unit-3.12 -- -k .. note:: @@ -226,12 +226,14 @@ We support: - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ +- `Python 3.12`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index f2cea17e9aac..3f9db8b2c73d 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -36,9 +36,9 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] UNIT_TEST_STANDARD_DEPENDENCIES = [ - "mock==5.0.0", + "mock", "asyncmock", "pytest", "pytest-cov", @@ -52,7 +52,7 @@ SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.10"] SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ - "mock==5.0.0", + "mock", "pytest", "google-cloud-testutils", ] diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 451be6e99ef4..33868758872d 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -336,7 +336,7 @@ samples=True, cov_level=100, versions=gcp.common.detect_versions(path="./google", default_first=True), - unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11"], + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], system_test_python_versions=["3.10"], system_test_external_dependencies=["psutil","flaky"], ) @@ -353,9 +353,6 @@ s.replace( "noxfile.py", r'"blacken",', '\g<0>\n "mypy",', ) -s.replace( - "noxfile.py", r'"mock"', '"mock==5.0.0"', -) s.replace( "noxfile.py", r"nox\.options\.error_on_missing_interpreters = True", diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 1224cbe212e4..3b7135946fd5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 72b859321950..d388d9443d93 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -80,6 +80,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 6c68c3943aa0..91c556cd66d1 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -636,7 +636,7 @@ def test_resume_publish(creds): client._set_sequencer(topic=topic, sequencer=sequencer, ordering_key=ordering_key) client.resume_publish(topic, ordering_key) - assert sequencer.unpause.called_once() + sequencer.unpause.assert_called_once() def test_resume_publish_no_sequencer_found(creds): From 3be090dc6f2e337b0e92fca8a005dd4eaf4bf748 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 14:05:41 -0500 Subject: [PATCH 1021/1197] chore: bump cryptography from 41.0.5 to 41.0.6 in /synthtool/gcp/templates/python_library/.kokoro (#1029) Source-Link: https://github.com/googleapis/synthtool/commit/9367caadcbb30b5b2719f30eb00c44cc913550ed Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 48 +++++++++---------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index eb4d9f794dc1..773c1dfd2146 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:bacc3af03bff793a03add584537b36b5644342931ad989e3ba1171d3bd5399f5 -# created: 2023-11-23T18:17:28.105124211Z + digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c +# created: 2023-11-29T14:54:29.548172703Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 8957e21104e2..e5c1ffca94b7 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -93,30 +93,30 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 +cryptography==41.0.6 \ + --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ + --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ + --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ + --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ + --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ + --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ + --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ + --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ + --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ + --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ + --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ + --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ + --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ + --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ + --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ + --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ + --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ + --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ + --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ + --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ + --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ + --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ + --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae # via # gcp-releasetool # secretstorage From eb9b971264372d63e5782433e8c7213c013e86c7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 30 Nov 2023 20:20:30 +0100 Subject: [PATCH 1022/1197] chore(deps): update all dependencies (#1016) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Prad Nelluru --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 0d4924032601..2dbe36372418 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,5 +2,5 @@ backoff==2.2.1 pytest==7.4.3 mock==5.1.0 flaky==3.7.0 -google-cloud-bigquery==3.12.0 +google-cloud-bigquery==3.13.0 google-cloud-storage==2.12.0 From eff362424550f09c396adbb29bd037249aaa1837 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 14:47:02 -0500 Subject: [PATCH 1023/1197] fix: use `retry_async` instead of `retry` in async client (#1030) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: Update gapic-generator-python to v1.12.0 PiperOrigin-RevId: 586356061 Source-Link: https://github.com/googleapis/googleapis/commit/72a1f55abaedbb62decd8ae8a44a4de223799c76 Source-Link: https://github.com/googleapis/googleapis-gen/commit/558a04bcd1cc0576e8fac1089e48e48b27ac161b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNTU4YTA0YmNkMWNjMDU3NmU4ZmFjMTA4OWU0OGU0OGIyN2FjMTYxYiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * chore: Update gapic-generator-python to v1.13.0 PiperOrigin-RevId: 586460538 Source-Link: https://github.com/googleapis/googleapis/commit/44582d0577fdc95dd2af37628a0569e16aac0bfe Source-Link: https://github.com/googleapis/googleapis-gen/commit/5e7073c9de847929c4ae97f8a444c3fca2d45a6b Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNWU3MDczYzlkZTg0NzkyOWM0YWU5N2Y4YTQ0NGMzZmNhMmQ0NWE2YiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../services/publisher/async_client.py | 49 ++++++------ .../services/schema_service/async_client.py | 52 ++++++------- .../services/subscriber/async_client.py | 76 +++++++++---------- 3 files changed, 88 insertions(+), 89 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 28a247ccaa82..ae632ea9616c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -33,15 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.api_core import timeout as timeouts # type: ignore +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -274,7 +273,7 @@ async def sample_create_topic(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -306,7 +305,7 @@ async def sample_create_topic(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_topic, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -398,7 +397,7 @@ async def sample_update_topic(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -432,7 +431,7 @@ async def sample_update_topic(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_topic, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -519,7 +518,7 @@ async def sample_publish(): This corresponds to the ``messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -553,7 +552,7 @@ async def sample_publish(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.publish, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=4.0, @@ -636,7 +635,7 @@ async def sample_get_topic(): This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -668,7 +667,7 @@ async def sample_get_topic(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_topic, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -748,7 +747,7 @@ async def sample_list_topics(): This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -784,7 +783,7 @@ async def sample_list_topics(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_topics, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -875,7 +874,7 @@ async def sample_list_topic_subscriptions(): This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -911,7 +910,7 @@ async def sample_list_topic_subscriptions(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_topic_subscriptions, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1006,7 +1005,7 @@ async def sample_list_topic_snapshots(): This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -1042,7 +1041,7 @@ async def sample_list_topic_snapshots(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_topic_snapshots, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1132,7 +1131,7 @@ async def sample_delete_topic(): This corresponds to the ``topic`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -1160,7 +1159,7 @@ async def sample_delete_topic(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_topic, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1231,7 +1230,7 @@ async def sample_detach_subscription(): request (Optional[Union[google.pubsub_v1.types.DetachSubscriptionRequest, dict]]): The request object. Request for the DetachSubscription method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -1251,7 +1250,7 @@ async def sample_detach_subscription(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.detach_subscription, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1299,7 +1298,7 @@ async def set_iam_policy( request (:class:`~.policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -1420,7 +1419,7 @@ async def get_iam_policy( request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. @@ -1543,7 +1542,7 @@ async def test_iam_permissions( request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 59773b3858f2..2d4b5d77ba60 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -33,14 +33,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -291,7 +291,7 @@ async def sample_create_schema(): This corresponds to the ``schema_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -326,7 +326,7 @@ async def sample_create_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_schema, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -403,7 +403,7 @@ async def sample_get_schema(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -434,7 +434,7 @@ async def sample_get_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_schema, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -512,7 +512,7 @@ async def sample_list_schemas(): This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -547,7 +547,7 @@ async def sample_list_schemas(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_schemas, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -634,7 +634,7 @@ async def sample_list_schema_revisions(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -669,7 +669,7 @@ async def sample_list_schema_revisions(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_schema_revisions, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -767,7 +767,7 @@ async def sample_commit_schema(): This corresponds to the ``schema`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -800,7 +800,7 @@ async def sample_commit_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.commit_schema, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -890,7 +890,7 @@ async def sample_rollback_schema(): This corresponds to the ``revision_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -923,7 +923,7 @@ async def sample_rollback_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.rollback_schema, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1011,7 +1011,7 @@ async def sample_delete_schema_revision(): This corresponds to the ``revision_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1044,7 +1044,7 @@ async def sample_delete_schema_revision(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_schema_revision, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1118,7 +1118,7 @@ async def sample_delete_schema(): This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1145,7 +1145,7 @@ async def sample_delete_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_schema, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1231,7 +1231,7 @@ async def sample_validate_schema(): This corresponds to the ``schema`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1266,7 +1266,7 @@ async def sample_validate_schema(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.validate_schema, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1336,7 +1336,7 @@ async def sample_validate_message(): Args: request (Optional[Union[google.pubsub_v1.types.ValidateMessageRequest, dict]]): The request object. Request for the ``ValidateMessage`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1355,7 +1355,7 @@ async def sample_validate_message(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.validate_message, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1401,7 +1401,7 @@ async def set_iam_policy( request (:class:`~.policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1521,7 +1521,7 @@ async def get_iam_policy( request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1643,7 +1643,7 @@ async def test_iam_permissions( request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 17b8bc1d2b20..13f30bd8937b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -37,14 +37,14 @@ from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 -from google.api_core import retry as retries +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -344,7 +344,7 @@ async def sample_create_subscription(): This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -385,7 +385,7 @@ async def sample_create_subscription(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_subscription, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -465,7 +465,7 @@ async def sample_get_subscription(): This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -500,7 +500,7 @@ async def sample_get_subscription(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_subscription, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -597,7 +597,7 @@ async def sample_update_subscription(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -634,7 +634,7 @@ async def sample_update_subscription(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_subscription, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -714,7 +714,7 @@ async def sample_list_subscriptions(): This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -749,7 +749,7 @@ async def sample_list_subscriptions(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_subscriptions, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -840,7 +840,7 @@ async def sample_delete_subscription(): This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -867,7 +867,7 @@ async def sample_delete_subscription(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_subscription, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -971,7 +971,7 @@ async def sample_modify_ack_deadline(): This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1002,7 +1002,7 @@ async def sample_modify_ack_deadline(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.modify_ack_deadline, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1093,7 +1093,7 @@ async def sample_acknowledge(): This corresponds to the ``ack_ids`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1122,7 +1122,7 @@ async def sample_acknowledge(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.acknowledge, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1226,7 +1226,7 @@ async def sample_pull(): This corresponds to the ``max_messages`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1267,7 +1267,7 @@ async def sample_pull(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.pull, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1363,7 +1363,7 @@ def request_generator(): This request is used to establish the initial stream as well as to stream acknowledgements and ack deadline modifications from the client to the server. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1380,7 +1380,7 @@ def request_generator(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.streaming_pull, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=4.0, @@ -1472,7 +1472,7 @@ async def sample_modify_push_config(): This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1501,7 +1501,7 @@ async def sample_modify_push_config(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.modify_push_config, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1582,7 +1582,7 @@ async def sample_get_snapshot(): This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1619,7 +1619,7 @@ async def sample_get_snapshot(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_snapshot, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1703,7 +1703,7 @@ async def sample_list_snapshots(): This corresponds to the ``project`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1738,7 +1738,7 @@ async def sample_list_snapshots(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_snapshots, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1867,7 +1867,7 @@ async def sample_create_snapshot(): This corresponds to the ``subscription`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -1906,7 +1906,7 @@ async def sample_create_snapshot(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_snapshot, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -1996,7 +1996,7 @@ async def sample_update_snapshot(): This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2035,7 +2035,7 @@ async def sample_update_snapshot(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_snapshot, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2120,7 +2120,7 @@ async def sample_delete_snapshot(): This corresponds to the ``snapshot`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2147,7 +2147,7 @@ async def sample_delete_snapshot(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_snapshot, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2221,7 +2221,7 @@ async def sample_seek(): Args: request (Optional[Union[google.pubsub_v1.types.SeekRequest, dict]]): The request object. Request for the ``Seek`` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2238,7 +2238,7 @@ async def sample_seek(): # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.seek, - default_retry=retries.Retry( + default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, multiplier=1.3, @@ -2288,7 +2288,7 @@ async def set_iam_policy( request (:class:`~.policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2408,7 +2408,7 @@ async def get_iam_policy( request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be @@ -2530,7 +2530,7 @@ async def test_iam_permissions( request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be From 6b884449abf889520c14316b1f74298d55f0d597 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 30 Nov 2023 22:11:48 +0100 Subject: [PATCH 1024/1197] chore(deps): update all dependencies (#1032) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * pin protobuf for python 3.7 - See https://github.com/protocolbuffers/protobuf/pull/14148 --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/requirements-test.txt | 2 +- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 2dbe36372418..b3325ed9ffcd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -3,4 +3,4 @@ pytest==7.4.3 mock==5.1.0 flaky==3.7.0 google-cloud-bigquery==3.13.0 -google-cloud-storage==2.12.0 +google-cloud-storage==2.13.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 683de05573f6..a3124ddce13c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,5 @@ google-cloud-pubsub==2.18.4 avro==1.11.3 -protobuf==4.24.4 +protobuf===4.24.4; python_version == '3.7' +protobuf==4.25.1; python_version >= '3.8' avro==1.11.3 From fa9c894e9786c2f4dbd5237296603c705d12a52b Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 2 Dec 2023 11:31:49 -0500 Subject: [PATCH 1025/1197] feat: add `use_table_schema` field to BigQueryConfig (#1035) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add `use_table_schema` field to BigQueryConfig PiperOrigin-RevId: 587079085 Source-Link: https://github.com/googleapis/googleapis/commit/95fabe6ae89c9206e89fd38a4d5c0f40c13bedfb Source-Link: https://github.com/googleapis/googleapis-gen/commit/90b35e9d8bc6780a80db3bababefc29072fa3506 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiOTBiMzVlOWQ4YmM2NzgwYTgwZGIzYmFiYWJlZmMyOTA3MmZhMzUwNiJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot --- .../google/pubsub_v1/types/pubsub.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index d44778ddc642..ff7becd483fb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1111,8 +1111,9 @@ class BigQueryConfig(proto.Message): The name of the table to which to write data, of the form {projectId}.{datasetId}.{tableId} use_topic_schema (bool): - When true, use the topic's schema as the - columns to write to in BigQuery, if it exists. + Optional. When true, use the topic's schema as the columns + to write to in BigQuery, if it exists. ``use_topic_schema`` + and ``use_table_schema`` cannot be enabled at the same time. write_metadata (bool): When true, write the subscription name, message_id, publish_time, attributes, and ordering_key to additional @@ -1131,6 +1132,10 @@ class BigQueryConfig(proto.Message): Output only. An output-only field that indicates whether or not the subscription can receive messages. + use_table_schema (bool): + Optional. When true, use the BigQuery table's schema as the + columns to write to in BigQuery. ``use_table_schema`` and + ``use_topic_schema`` cannot be enabled at the same time. """ class State(proto.Enum): @@ -1186,6 +1191,10 @@ class State(proto.Enum): number=5, enum=State, ) + use_table_schema: bool = proto.Field( + proto.BOOL, + number=6, + ) class CloudStorageConfig(proto.Message): From eef82a36afc899ab708688c0f039290cdfe327e5 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 5 Dec 2023 09:52:19 -0500 Subject: [PATCH 1026/1197] build: treat warnings as errors (#1033) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * build: treat warnings as errors * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * lint * fix warning in samples * remove warning from pytest.ini which doesn't have a tracking bug --------- Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/pytest.ini | 17 +++++++++++++++++ .../samples/snippets/schema.py | 9 ++++++--- .../unit/gapic/pubsub_v1/test_subscriber.py | 4 ---- .../pubsub_v1/publisher/test_flow_controller.py | 8 +++++--- .../publisher/test_publisher_client.py | 5 ++--- .../pubsub_v1/subscriber/test_dispatcher.py | 3 ++- .../unit/pubsub_v1/subscriber/test_scheduler.py | 6 ++++-- .../subscriber/test_subscriber_client.py | 17 ++++++++++------- 8 files changed, 46 insertions(+), 23 deletions(-) create mode 100644 packages/google-cloud-pubsub/pytest.ini diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini new file mode 100644 index 000000000000..fd477ac99af9 --- /dev/null +++ b/packages/google-cloud-pubsub/pytest.ini @@ -0,0 +1,17 @@ +[pytest] +filterwarnings = + # treat all warnings as errors + error + # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed + ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning + # Remove once https://github.com/googleapis/python-api-common-protos/pull/191 is merged + ignore:.*pkg_resources.declare_namespace:DeprecationWarning + ignore:.*pkg_resources is deprecated as an API:DeprecationWarning + # Remove once release PR https://github.com/googleapis/proto-plus-python/pull/391 is merged + ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:proto.datetime_helpers + # Remove once release PR https://github.com/googleapis/python-api-core/pull/555 is merged + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:google.api_core.datetime_helpers + # Remove once https://github.com/grpc/grpc/issues/35086 is fixed + ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel + # Remove once a version of grpcio newer than 1.59.3 is released to PyPI + ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py index 40cd853c9cf8..3260a0e19a3c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -473,7 +473,8 @@ def publish_avro_records(project_id: str, topic_id: str, avsc_file: str) -> None topic_path = publisher_client.topic_path(project_id, topic_id) # Prepare to write Avro records to the binary output stream. - avro_schema = schema.parse(open(avsc_file, "rb").read()) + with open(avsc_file, "rb") as file: + avro_schema = schema.parse(file.read()) writer = DatumWriter(avro_schema) bout = io.BytesIO() @@ -579,7 +580,8 @@ def subscribe_with_avro_schema( subscriber = SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - avro_schema = schema.parse(open(avsc_file, "rb").read()) + with open(avsc_file, "rb") as file: + avro_schema = schema.parse(file.read()) def callback(message: pubsub_v1.subscriber.message.Message) -> None: # Get the message serialization type. @@ -642,7 +644,8 @@ def subscribe_with_avro_schema_with_revisions( subscriber = SubscriberClient() subscription_path = subscriber.subscription_path(project_id, subscription_id) - writer_avro_schema = schema.parse(open(avsc_file, "rb").read()) + with open(avsc_file, "rb") as file: + writer_avro_schema = schema.parse(file.read()) # Dict to keep readers for different schema revisions. revisions_to_readers = {} diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index c31b98cd01ca..94f7e301e978 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -2888,7 +2888,6 @@ def test_pull_flattened_error(): client.pull( pubsub.PullRequest(), subscription="subscription_value", - return_immediately=True, max_messages=1277, ) @@ -2942,7 +2941,6 @@ async def test_pull_flattened_error_async(): await client.pull( pubsub.PullRequest(), subscription="subscription_value", - return_immediately=True, max_messages=1277, ) @@ -6993,7 +6991,6 @@ def test_pull_rest_flattened(): # get truthy value for each flattened field mock_args = dict( subscription="subscription_value", - return_immediately=True, max_messages=1277, ) mock_args.update(sample_request) @@ -7032,7 +7029,6 @@ def test_pull_rest_flattened_error(transport: str = "rest"): client.pull( pubsub.PullRequest(), subscription="subscription_value", - return_immediately=True, max_messages=1277, ) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py index ee923a435dec..776c6db4124f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_flow_controller.py @@ -19,7 +19,6 @@ from typing import Callable from typing import Sequence from typing import Union -import warnings import pytest @@ -179,7 +178,10 @@ def test_incorrectly_releasing_too_many_messages(): msg3 = grpc_types.PubsubMessage(data=b"z" * 100) # Releasing a message that would make the load negative should result in a warning. - with warnings.catch_warnings(record=True) as warned: + with pytest.warns( + RuntimeWarning, + match="Releasing a message that was never added or already released", + ) as warned: flow_controller.release(msg1) assert len(warned) == 1 @@ -438,7 +440,7 @@ def test_warning_on_internal_reservation_stats_error_when_unblocking(): assert reservation is not None, "No messages blocked by flow controller." reservation.bytes_reserved = reservation.bytes_needed + 1 - with warnings.catch_warnings(record=True) as warned: + with pytest.warns(RuntimeWarning, match="Too many bytes reserved.") as warned: _run_in_daemon(flow_controller.release, [msg1], releasing_1_done) if not releasing_1_done.wait(timeout=0.1): pytest.fail("Releasing a message blocked or errored.") # pragma: NO COVER diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 91c556cd66d1..5d6013654076 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -28,7 +28,6 @@ import pytest import time -import warnings from google.api_core import gapic_v1 from google.api_core import retry as retries @@ -60,7 +59,7 @@ def _assert_retries_equal(retry, retry2): def test_api_property_deprecated(creds): client = publisher.Client(credentials=creds) - with warnings.catch_warnings(record=True) as warned: + with pytest.warns(DeprecationWarning, match="client.api") as warned: client.api assert len(warned) == 1 @@ -72,7 +71,7 @@ def test_api_property_deprecated(creds): def test_api_property_proxy_to_generated_client(creds): client = publisher.Client(credentials=creds) - with warnings.catch_warnings(record=True): + with pytest.warns(DeprecationWarning, match="client.api"): api_object = client.api # Not a perfect check, but we are satisficed if the returned API object indeed diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index d4813911c25a..89d72c61d39a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -361,7 +361,8 @@ def test_unknown_request_type(): items = ["a random string, not a known request type"] manager.send_unary_ack.return_value = (items, []) - dispatcher_.dispatch_callback(items) + with pytest.warns(RuntimeWarning, match="Skipping unknown request item of type"): + dispatcher_.dispatch_callback(items) def test_ack(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index ff76fa09d8d0..3ed1978c1ecb 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -14,10 +14,10 @@ import concurrent.futures import queue +import pytest import sys import threading import time -import warnings # special case python < 3.8 if sys.version_info.major == 3 and sys.version_info.minor < 8: @@ -76,7 +76,9 @@ def callback(*args, **kwargs): scheduler_.schedule(callback, "arg1", kwarg1="meep") scheduler_._executor.shutdown() - with warnings.catch_warnings(record=True) as warned: + with pytest.warns( + RuntimeWarning, match="Scheduling a callback after executor shutdown" + ) as warned: scheduler_.schedule(callback, "arg2", kwarg2="boop") assert len(warned) == 1 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 83ef3f06dc5f..bedfde79ab66 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -13,7 +13,6 @@ # limitations under the License. import sys -import warnings import grpc @@ -218,7 +217,7 @@ def test_context_manager_raises_if_closed(creds): def test_api_property_deprecated(creds): client = subscriber.Client(credentials=creds) - with warnings.catch_warnings(record=True) as warned: + with pytest.warns(DeprecationWarning, match="client.api") as warned: client.api assert len(warned) == 1 @@ -230,7 +229,7 @@ def test_api_property_deprecated(creds): def test_api_property_proxy_to_generated_client(creds): client = subscriber.Client(credentials=creds) - with warnings.catch_warnings(record=True): + with pytest.warns(DeprecationWarning, match="client.api"): api_object = client.api # Not a perfect check, but we are satisficed if the returned API object indeed @@ -262,9 +261,10 @@ def test_sync_pull_warning_if_return_immediately(creds): client = subscriber.Client(credentials=creds) subscription_path = "projects/foo/subscriptions/bar" - with mock.patch.object( - client._transport, "_wrapped_methods" - ), warnings.catch_warnings(record=True) as warned: + with mock.patch.object(client._transport, "_wrapped_methods"), pytest.warns( + DeprecationWarning, + match="The return_immediately flag is deprecated and should be set to False", + ) as warned: client.pull(subscription=subscription_path, return_immediately=True) # Setting the deprecated return_immediately flag to True should emit a warning. @@ -287,7 +287,10 @@ async def test_sync_pull_warning_if_return_immediately_async(creds): new=mock.AsyncMock, ) - with patcher, warnings.catch_warnings(record=True) as warned: + with patcher, pytest.warns( + DeprecationWarning, + match="The return_immediately flag is deprecated and should be set to False", + ) as warned: await client.pull(subscription=subscription_path, return_immediately=True) # Setting the deprecated return_immediately flag to True should emit a warning. From ba51825d6e376264d4100aba98ba8f158c616e85 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 10 Dec 2023 09:31:28 -0500 Subject: [PATCH 1027/1197] build: update actions/checkout and actions/setup-python (#1040) * build: update actions/checkout and actions/setup-python Source-Link: https://github.com/googleapis/synthtool/commit/3551acd1261fd8f616cbfd054cda9bd6d6ac75f4 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 * build: update warning filters --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-pubsub/.github/workflows/docs.yml | 8 ++++---- .../google-cloud-pubsub/.github/workflows/lint.yml | 4 ++-- .../.github/workflows/unittest.yml | 8 ++++---- packages/google-cloud-pubsub/pytest.ini | 11 ++--------- 5 files changed, 14 insertions(+), 21 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 773c1dfd2146..40bf99731959 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2f155882785883336b4468d5218db737bb1d10c9cea7cb62219ad16fe248c03c -# created: 2023-11-29T14:54:29.548172703Z + digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 +# created: 2023-12-09T15:16:25.430769578Z diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml index 221806cedf58..698fbc5c94da 100644 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.9" - name: Install nox @@ -24,9 +24,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml index 16d5a9e90f6d..4866193af2a9 100644 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install nox diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index a32027b49bc2..d6ca65627c2d 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -11,9 +11,9 @@ jobs: python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Install nox @@ -37,9 +37,9 @@ jobs: - unit steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" - name: Install coverage diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index fd477ac99af9..49de304e3ee5 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -4,14 +4,7 @@ filterwarnings = error # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning - # Remove once https://github.com/googleapis/python-api-common-protos/pull/191 is merged - ignore:.*pkg_resources.declare_namespace:DeprecationWarning - ignore:.*pkg_resources is deprecated as an API:DeprecationWarning - # Remove once release PR https://github.com/googleapis/proto-plus-python/pull/391 is merged - ignore:datetime.datetime.utcfromtimestamp\(\) is deprecated:DeprecationWarning:proto.datetime_helpers - # Remove once release PR https://github.com/googleapis/python-api-core/pull/555 is merged - ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:google.api_core.datetime_helpers # Remove once https://github.com/grpc/grpc/issues/35086 is fixed ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel - # Remove once a version of grpcio newer than 1.59.3 is released to PyPI - ignore:datetime.datetime.utcnow\(\) is deprecated:DeprecationWarning:grpc._channel + # Remove after support for Python 3.7 is dropped + ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning \ No newline at end of file From 473dea971e2d1faa3416335ca032c309d2cb6c60 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 11 Dec 2023 14:15:13 -0500 Subject: [PATCH 1028/1197] chore(main): release 2.19.0 (#1031) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 14 ++++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 18 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 7f3c731bef00..36be43f26a5e 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.18.4" + ".": "2.19.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index b0d33b1c56eb..5c54270b9fd8 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,20 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.19.0](https://github.com/googleapis/python-pubsub/compare/v2.18.4...v2.19.0) (2023-12-10) + + +### Features + +* Add `use_table_schema` field to BigQueryConfig ([#1035](https://github.com/googleapis/python-pubsub/issues/1035)) ([ac6d912](https://github.com/googleapis/python-pubsub/commit/ac6d9126413b5c8e2b00727f7d74f03b7fb9d9ed)) +* Add support for Python 3.12 ([#1025](https://github.com/googleapis/python-pubsub/issues/1025)) ([660b8ea](https://github.com/googleapis/python-pubsub/commit/660b8eaf0daf975834a8333aedf8415867a4874d)) +* Introduce compatibility with native namespace packages ([#1024](https://github.com/googleapis/python-pubsub/issues/1024)) ([0432420](https://github.com/googleapis/python-pubsub/commit/0432420dcf18304dc1912075482eff0d2dc73009)) + + +### Bug Fixes + +* Use `retry_async` instead of `retry` in async client ([#1030](https://github.com/googleapis/python-pubsub/issues/1030)) ([05dd571](https://github.com/googleapis/python-pubsub/commit/05dd571760b71ae2930072f0677616dfc19d9511)) + ## [2.18.4](https://github.com/googleapis/python-pubsub/compare/v2.18.3...v2.18.4) (2023-09-09) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 5b1f1ec8192d..0f1a446f3802 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.4" # {x-release-please-version} +__version__ = "2.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 5b1f1ec8192d..0f1a446f3802 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.18.4" # {x-release-please-version} +__version__ = "2.19.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..25ee6d05607a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.19.0" }, "snippets": [ { From 95c804daa63b032f02423e0ac71c4f70394d4656 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 14 Dec 2023 13:27:56 +0100 Subject: [PATCH 1029/1197] chore(deps): update all dependencies (#1050) --- .../samples/snippets/requirements-test.txt | 4 ++-- .../google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index b3325ed9ffcd..17e317ce3e7f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -2,5 +2,5 @@ backoff==2.2.1 pytest==7.4.3 mock==5.1.0 flaky==3.7.0 -google-cloud-bigquery==3.13.0 -google-cloud-storage==2.13.0 +google-cloud-bigquery==3.14.1 +google-cloud-storage==2.14.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index a3124ddce13c..3fb4e0a6905c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.18.4 +google-cloud-pubsub==2.19.0 avro==1.11.3 protobuf===4.24.4; python_version == '3.7' protobuf==4.25.1; python_version >= '3.8' From 0fc00ef9500249c8bf9d8e17ca7d176117247083 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 19:18:32 -0500 Subject: [PATCH 1030/1197] build: update actions/upload-artifact and actions/download-artifact (#1053) Source-Link: https://github.com/googleapis/synthtool/commit/280ddaed417057dfe5b1395731de07b7d09f5058 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- .../google-cloud-pubsub/.github/workflows/unittest.yml | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 40bf99731959..9bee24097165 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:230f7fe8a0d2ed81a519cfc15c6bb11c5b46b9fb449b8b1219b3771bcb520ad2 -# created: 2023-12-09T15:16:25.430769578Z + digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 +# created: 2023-12-14T22:17:57.611773021Z diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index d6ca65627c2d..f4a337c496a0 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -26,9 +26,9 @@ jobs: run: | nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage-artifacts + name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} cover: @@ -47,11 +47,11 @@ jobs: python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: coverage-artifacts path: .coverage-results/ - name: Report coverage results run: | - coverage combine .coverage-results/.coverage* + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* coverage report --show-missing --fail-under=100 From 88343cfb46b06b91fc808bd3bf97ceb55a3eefb2 Mon Sep 17 00:00:00 2001 From: Kamal Aboul-Hosn Date: Fri, 15 Dec 2023 12:41:38 -0500 Subject: [PATCH 1031/1197] fix: Swap writer and reader schema to correct places in sample (#1052) * samples: schema evolution * Add command-line commands * Fix tag for rollback * Make formatting fixes * Formatting fixes * Fix exceptions * fix: Set x-goog-request-params for streaming pull request * Set blunderbuss config to auto-assign issues and PRs * fix: Swap writer and reader schema to correct places in sample --- packages/google-cloud-pubsub/samples/snippets/schema.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/schema.py b/packages/google-cloud-pubsub/samples/snippets/schema.py index 3260a0e19a3c..b492ccf333a9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/schema.py +++ b/packages/google-cloud-pubsub/samples/snippets/schema.py @@ -645,7 +645,7 @@ def subscribe_with_avro_schema_with_revisions( subscription_path = subscriber.subscription_path(project_id, subscription_id) with open(avsc_file, "rb") as file: - writer_avro_schema = schema.parse(file.read()) + reader_avro_schema = schema.parse(file.read()) # Dict to keep readers for different schema revisions. revisions_to_readers = {} @@ -665,7 +665,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: print(f"{schema_path} not found.") message.nack() return - reader_avro_schema = schema.parse(received_avro_schema.definition) + writer_avro_schema = schema.parse(received_avro_schema.definition) revisions_to_readers[schema_revision_id] = DatumReader( writer_avro_schema, reader_avro_schema ) From 214b9b0fd26c5f2436bf880eccb5f074ab8e9fbb Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 22 Jan 2024 16:14:08 -0500 Subject: [PATCH 1032/1197] build(python): fix `docs` and `docfx` builds (#1063) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * build(python): fix `docs` and `docfx` builds Source-Link: https://github.com/googleapis/synthtool/commit/fac8444edd5f5526e804c306b766a271772a3e2f Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa * exclude types-protobuf==4.24.0.20240106 release * typo * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * exclude types-protobuf==4.24.0.20240106 release * update replacement * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --------- Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 6 ++-- .../.kokoro/requirements.txt | 6 ++-- packages/google-cloud-pubsub/noxfile.py | 30 +++++++++++++++---- packages/google-cloud-pubsub/owlbot.py | 10 ++++--- 4 files changed, 37 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 9bee24097165..d8a1bbca7179 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:346ab2efb51649c5dde7756cbbdc60dd394852ba83b9bbffc292a63549f33c17 -# created: 2023-12-14T22:17:57.611773021Z + digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa +# created: 2024-01-15T16:32:08.142785673Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index e5c1ffca94b7..bb3d6ca38b14 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -263,9 +263,9 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +jinja2==3.1.3 \ + --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ + --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 # via gcp-releasetool keyring==24.2.0 \ --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 3f9db8b2c73d..ec63afcb4775 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -92,10 +92,6 @@ def mypy(session): session.install("-e", ".[all]") session.install(MYPY_VERSION) - # Just install the type info directly, since "mypy --install-types" might - # require an additional pass. - session.install("types-protobuf", "types-setuptools") - # Version 2.1.1 of google-api-core version is the first type-checked release. # Version 2.2.0 of google-cloud-core version is the first type-checked release. session.install( @@ -103,6 +99,12 @@ def mypy(session): "google-cloud-core>=2.2.0", ) + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 + session.install("types-protobuf!=4.24.0.20240106", "types-setuptools") + # TODO: Only check the hand-written layer, the generated code does not pass # mypy checks yet. # https://github.com/googleapis/gapic-generator-python/issues/1092 @@ -334,7 +336,16 @@ def docs(session): session.install("-e", ".") session.install( - "sphinx==4.0.1", + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", "alabaster", "recommonmark", ) @@ -360,6 +371,15 @@ def docfx(session): session.install("-e", ".") session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", "gcp-sphinx-docfx-yaml", "alabaster", "recommonmark", diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 33868758872d..0483b04df63f 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -366,10 +366,6 @@ def mypy(session): session.install("-e", ".[all]") session.install(MYPY_VERSION) - # Just install the type info directly, since "mypy --install-types" might - # require an additional pass. - session.install("types-protobuf", "types-setuptools") - # Version 2.1.1 of google-api-core version is the first type-checked release. # Version 2.2.0 of google-cloud-core version is the first type-checked release. session.install( @@ -377,6 +373,12 @@ def mypy(session): "google-cloud-core>=2.2.0", ) + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 + session.install("types-protobuf!=4.24.0.20240106", "types-setuptools") + # TODO: Only check the hand-written layer, the generated code does not pass # mypy checks yet. # https://github.com/googleapis/gapic-generator-python/issues/1092 From ab4d81d59771eccedd7c1106e8640263e4558a5a Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Fri, 2 Feb 2024 11:09:45 -0500 Subject: [PATCH 1033/1197] test: Silence return_immediately flag Deprecation Warning (#1066) --- packages/google-cloud-pubsub/pytest.ini | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index 49de304e3ee5..e612d31f164a 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -7,4 +7,6 @@ filterwarnings = # Remove once https://github.com/grpc/grpc/issues/35086 is fixed ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel # Remove after support for Python 3.7 is dropped - ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning \ No newline at end of file + ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning + # Remove warning once https://github.com/googleapis/python-pubsub/issues/1067 is fixed + ignore:The return_immediately flag is deprecated and should be set to False.:DeprecationWarning \ No newline at end of file From 01389414a35eb8f1d2a7d7a2325fc6dc15943f38 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 2 Feb 2024 11:45:27 -0500 Subject: [PATCH 1034/1197] chore(main): release 2.19.1 (#1054) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 36be43f26a5e..ffffd9a18a60 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.19.0" + ".": "2.19.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 5c54270b9fd8..721fdf3676b7 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.19.1](https://github.com/googleapis/python-pubsub/compare/v2.19.0...v2.19.1) (2024-02-02) + + +### Documentation + +* **samples:** Swap writer and reader schema to correct places ([265f410](https://github.com/googleapis/python-pubsub/commit/265f4106f499ec5d2d01a127ba192404c1836a28)) + ## [2.19.0](https://github.com/googleapis/python-pubsub/compare/v2.18.4...v2.19.0) (2023-12-10) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 0f1a446f3802..c5d0439f6dde 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.19.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 0f1a446f3802..c5d0439f6dde 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.0" # {x-release-please-version} +__version__ = "2.19.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 25ee6d05607a..32f3378f9469 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.19.0" + "version": "2.19.1" }, "snippets": [ { From aeab125fa1e3dd6cd7f8adf1dff22c819a4e4621 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 6 Feb 2024 09:57:47 -0500 Subject: [PATCH 1035/1197] build(deps): bump cryptography from 41.0.6 to 42.0.0 in /synthtool/gcp/templates/python_library/.kokoro (#1071) Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 57 +++++++++++-------- 2 files changed, 35 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index d8a1bbca7179..2aefd0e91175 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5ea6d0ab82c956b50962f91d94e206d3921537ae5fe1549ec5326381d8905cfa -# created: 2024-01-15T16:32:08.142785673Z + digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 +# created: 2024-02-06T03:20:16.660474034Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index bb3d6ca38b14..8c11c9f3e9b6 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -93,30 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==41.0.6 \ - --hash=sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596 \ - --hash=sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c \ - --hash=sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660 \ - --hash=sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4 \ - --hash=sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead \ - --hash=sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed \ - --hash=sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3 \ - --hash=sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7 \ - --hash=sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09 \ - --hash=sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c \ - --hash=sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43 \ - --hash=sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65 \ - --hash=sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6 \ - --hash=sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da \ - --hash=sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c \ - --hash=sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b \ - --hash=sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8 \ - --hash=sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c \ - --hash=sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d \ - --hash=sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9 \ - --hash=sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86 \ - --hash=sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36 \ - --hash=sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae +cryptography==42.0.0 \ + --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ + --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ + --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ + --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ + --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ + --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ + --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ + --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ + --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ + --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ + --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ + --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ + --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ + --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ + --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ + --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ + --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ + --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ + --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ + --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ + --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ + --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ + --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ + --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ + --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ + --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ + --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ + --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ + --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ + --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ + --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ + --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 # via # gcp-releasetool # secretstorage From eea771dc4dbb2e3f5c39a65d299afea23a7ecec5 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Thu, 8 Feb 2024 14:02:36 -0500 Subject: [PATCH 1036/1197] =?UTF-8?q?fix:=20unit=20test=20failures=20in=20?= =?UTF-8?q?https://github.com/googleapis/python-pubsu=E2=80=A6=20(#1074)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/google-cloud-pubsub/pytest.ini | 6 ++++-- .../tests/unit/pubsub_v1/conftest.py | 10 +--------- .../tests/unit/pubsub_v1/publisher/batch/test_base.py | 9 +-------- .../unit/pubsub_v1/publisher/batch/test_thread.py | 3 +-- .../publisher/sequencer/test_ordered_sequencer.py | 3 +-- .../publisher/sequencer/test_unordered_sequencer.py | 3 +-- 6 files changed, 9 insertions(+), 25 deletions(-) diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index e612d31f164a..7308b0871af9 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -8,5 +8,7 @@ filterwarnings = ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel # Remove after support for Python 3.7 is dropped ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning - # Remove warning once https://github.com/googleapis/python-pubsub/issues/1067 is fixed - ignore:The return_immediately flag is deprecated and should be set to False.:DeprecationWarning \ No newline at end of file + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1938 is fixed + ignore:The return_immediately flag is deprecated and should be set to False.:DeprecationWarning + # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed + ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning \ No newline at end of file diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py index e39832d5f5ab..dc4192931098 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py @@ -11,16 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import sys import google.auth.credentials - -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock - import pytest @@ -30,4 +22,4 @@ def creds(): Provide test creds to unit tests so that they can run without GOOGLE_APPLICATION_CREDENTIALS set. """ - yield mock.Mock(spec=google.auth.credentials.Credentials) + yield google.auth.credentials.AnonymousCredentials() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index c35f482e7745..a95d72c123c3 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -13,13 +13,7 @@ # limitations under the License. from __future__ import absolute_import -import sys -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock from google.auth import credentials from google.cloud.pubsub_v1 import publisher @@ -38,8 +32,7 @@ def create_batch(status, settings=types.BatchSettings()): Returns: ~.pubsub_v1.publisher.batch.thread.Batch: The batch object """ - creds = mock.Mock(spec=credentials.Credentials) - client = publisher.Client(credentials=creds) + client = publisher.Client(credentials=credentials.AnonymousCredentials()) batch = Batch(client, "topic_name", settings) batch._status = status return batch diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 60658b4ce353..2752d62a20e4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -39,8 +39,7 @@ def create_client(): - creds = mock.Mock(spec=credentials.Credentials) - return publisher.Client(credentials=creds) + return publisher.Client(credentials=credentials.AnonymousCredentials()) def create_batch( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py index e126c829f0a0..7570c2970938 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -36,8 +36,7 @@ def create_message(): def create_client(): - creds = mock.Mock(spec=credentials.Credentials) - return publisher.Client(credentials=creds) + return publisher.Client(credentials=credentials.AnonymousCredentials()) def create_ordered_sequencer(client): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index 8a2c486ad803..01d9d6ca405c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -34,8 +34,7 @@ def create_message(): def create_client(): - creds = mock.Mock(spec=credentials.Credentials) - return publisher.Client(credentials=creds) + return publisher.Client(credentials=credentials.AnonymousCredentials()) def test_stop(): From a63da850490a3bc136be04e2bc9c84c2e0b3a935 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 8 Feb 2024 11:48:15 -0800 Subject: [PATCH 1037/1197] chore(main): release 2.19.2 (#1075) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index ffffd9a18a60..4c00940a5e24 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.19.1" + ".": "2.19.2" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 721fdf3676b7..31c167a028c1 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.19.2](https://github.com/googleapis/python-pubsub/compare/v2.19.1...v2.19.2) (2024-02-08) + + +### Bug Fixes + +* Unit test failures in https://github.com/googleapis/python-pubsu… ([#1074](https://github.com/googleapis/python-pubsub/issues/1074)) ([3c6d128](https://github.com/googleapis/python-pubsub/commit/3c6d128a53d83439036aaec1f1fd48331152935b)) + ## [2.19.1](https://github.com/googleapis/python-pubsub/compare/v2.19.0...v2.19.1) (2024-02-02) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index c5d0439f6dde..72b71f9cde33 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.1" # {x-release-please-version} +__version__ = "2.19.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index c5d0439f6dde..72b71f9cde33 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.1" # {x-release-please-version} +__version__ = "2.19.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 32f3378f9469..fb45cf3dacdf 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.19.1" + "version": "2.19.2" }, "snippets": [ { From 71ebbd798c299fb52229c61c05aaa6050ce46129 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Thu, 8 Feb 2024 16:16:29 -0500 Subject: [PATCH 1038/1197] chore: change assignees for issues and PRs to mukund-ananthu (#1072) --- packages/google-cloud-pubsub/.github/blunderbuss.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/blunderbuss.yml b/packages/google-cloud-pubsub/.github/blunderbuss.yml index 687c7f91b5b4..992f04dce1fe 100644 --- a/packages/google-cloud-pubsub/.github/blunderbuss.yml +++ b/packages/google-cloud-pubsub/.github/blunderbuss.yml @@ -1,6 +1,6 @@ # Configuration for the Blunderbuss GitHub app. For more info see # https://github.com/googleapis/repo-automation-bots/tree/main/packages/blunderbuss assign_issues: - - pradn + - mukund-ananthu assign_prs: - - pradn \ No newline at end of file + - mukund-ananthu From 107d5eb466318a3521f91dfa47265224f3b067fb Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Thu, 8 Feb 2024 17:47:53 -0500 Subject: [PATCH 1039/1197] fix: add google-auth as a direct dependency (#1076) --- packages/google-cloud-pubsub/setup.py | 1 + packages/google-cloud-pubsub/testing/constraints-3.7.txt | 1 + 2 files changed, 2 insertions(+) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index d388d9443d93..a6af312072db 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -38,6 +38,7 @@ dependencies = [ "grpcio >= 1.51.3, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/609 # google-api-core >= 1.34.0 is allowed in order to support google-api-core 1.x + "google-auth >= 2.14.1, <3.0.0dev", "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index 75ec7a6234ae..ee447eb62c89 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -5,6 +5,7 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 google-api-core==1.34.0 +google-auth==2.14.1 proto-plus==1.22.0 protobuf==3.19.5 grpc-google-iam-v1==0.12.4 From 3037dae0ea0a5cf854530b2d0e093a09160efd87 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 11:02:57 -0500 Subject: [PATCH 1040/1197] chore(main): release 2.19.3 (#1077) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 4c00940a5e24..a31cb5941cf7 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.19.2" + ".": "2.19.3" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 31c167a028c1..d9f24c15cdb6 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.19.3](https://github.com/googleapis/python-pubsub/compare/v2.19.2...v2.19.3) (2024-02-08) + + +### Bug Fixes + +* Add google-auth as a direct dependency ([#1076](https://github.com/googleapis/python-pubsub/issues/1076)) ([5ce7301](https://github.com/googleapis/python-pubsub/commit/5ce7301b3056191203bc89bbcf1f33083de72a2d)) + ## [2.19.2](https://github.com/googleapis/python-pubsub/compare/v2.19.1...v2.19.2) (2024-02-08) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 72b71f9cde33..3feb0e097c14 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.2" # {x-release-please-version} +__version__ = "2.19.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 72b71f9cde33..3feb0e097c14 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.2" # {x-release-please-version} +__version__ = "2.19.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index fb45cf3dacdf..259b241f79fc 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.19.2" + "version": "2.19.3" }, "snippets": [ { From 431fea056e876fae92188d5c27fc5d0dade60c8f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 13:54:48 -0800 Subject: [PATCH 1041/1197] fix(diregapic): s/bazel/bazelisk/ in DIREGAPIC build GitHub action (#1064) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .../cloud/pubsub_v1/publisher/client.py | 2 +- .../google/pubsub/__init__.py | 2 + .../google/pubsub_v1/__init__.py | 2 + .../services/publisher/async_client.py | 107 ++- .../pubsub_v1/services/publisher/client.py | 327 +++++++- .../services/publisher/transports/base.py | 8 +- .../services/publisher/transports/grpc.py | 5 +- .../publisher/transports/grpc_asyncio.py | 5 +- .../services/publisher/transports/rest.py | 6 +- .../services/schema_service/async_client.py | 107 ++- .../services/schema_service/client.py | 331 +++++++- .../schema_service/transports/base.py | 6 +- .../schema_service/transports/grpc.py | 2 +- .../schema_service/transports/grpc_asyncio.py | 2 +- .../schema_service/transports/rest.py | 6 +- .../services/subscriber/async_client.py | 155 +++- .../pubsub_v1/services/subscriber/client.py | 375 +++++++-- .../services/subscriber/transports/base.py | 8 +- .../services/subscriber/transports/grpc.py | 8 +- .../subscriber/transports/grpc_asyncio.py | 8 +- .../services/subscriber/transports/rest.py | 6 +- .../google/pubsub_v1/types/__init__.py | 2 + .../google/pubsub_v1/types/pubsub.py | 780 +++++++++++------- .../google/pubsub_v1/types/schema.py | 2 +- packages/google-cloud-pubsub/owlbot.py | 4 +- .../scripts/fixup_pubsub_v1_keywords.py | 2 +- .../testing/constraints-3.8.txt | 2 +- .../unit/gapic/pubsub_v1/test_publisher.py | 482 ++++++++++- .../gapic/pubsub_v1/test_schema_service.py | 471 ++++++++++- .../unit/gapic/pubsub_v1/test_subscriber.py | 457 +++++++++- .../publisher/test_publisher_client.py | 40 +- 31 files changed, 3112 insertions(+), 608 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 3e668533de65..caf9fa180eea 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -399,7 +399,7 @@ def on_publish_done(future): transport = self._transport base_retry = transport._wrapped_methods[transport.publish]._retry retry = base_retry.with_deadline(2.0**32) - else: + elif retry is not None: retry = retry.with_deadline(2.0**32) # Delegate the publishing to the sequencer. diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index 453dca45f117..ebcbb8271dc6 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -41,6 +41,7 @@ from google.pubsub_v1.types.pubsub import GetSnapshotRequest from google.pubsub_v1.types.pubsub import GetSubscriptionRequest from google.pubsub_v1.types.pubsub import GetTopicRequest +from google.pubsub_v1.types.pubsub import IngestionDataSourceSettings from google.pubsub_v1.types.pubsub import ListSnapshotsRequest from google.pubsub_v1.types.pubsub import ListSnapshotsResponse from google.pubsub_v1.types.pubsub import ListSubscriptionsRequest @@ -112,6 +113,7 @@ "GetSnapshotRequest", "GetSubscriptionRequest", "GetTopicRequest", + "IngestionDataSourceSettings", "ListSnapshotsRequest", "ListSnapshotsResponse", "ListSubscriptionsRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 08c6bc72afcf..731d03999fb9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -39,6 +39,7 @@ from .types.pubsub import GetSnapshotRequest from .types.pubsub import GetSubscriptionRequest from .types.pubsub import GetTopicRequest +from .types.pubsub import IngestionDataSourceSettings from .types.pubsub import ListSnapshotsRequest from .types.pubsub import ListSnapshotsResponse from .types.pubsub import ListSubscriptionsRequest @@ -113,6 +114,7 @@ "GetSnapshotRequest", "GetSubscriptionRequest", "GetTopicRequest", + "IngestionDataSourceSettings", "ListSchemaRevisionsRequest", "ListSchemaRevisionsResponse", "ListSchemasRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index ae632ea9616c..730d5c8a5b56 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -38,9 +38,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -61,8 +61,12 @@ class PublisherAsyncClient: _client: PublisherClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = PublisherClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = PublisherClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = PublisherClient._DEFAULT_UNIVERSE schema_path = staticmethod(PublisherClient.schema_path) parse_schema_path = staticmethod(PublisherClient.parse_schema_path) @@ -167,6 +171,25 @@ def transport(self) -> PublisherTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(PublisherClient).get_transport_class, type(PublisherClient) ) @@ -179,7 +202,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the publisher client. + """Instantiates the publisher async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -190,23 +213,38 @@ def __init__( transport (Union[str, ~.PublisherTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -324,6 +362,9 @@ async def sample_create_topic(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -345,7 +386,8 @@ async def update_topic( timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: - r"""Updates an existing topic. Note that certain + r"""Updates an existing topic by updating the fields + specified in the update mask. Note that certain properties of a topic are not modifiable. .. code-block:: python @@ -452,6 +494,9 @@ async def sample_update_topic(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -555,7 +600,7 @@ async def sample_publish(): default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, - multiplier=4.0, + multiplier=4, predicate=retries.if_exception_type( core_exceptions.Aborted, core_exceptions.Cancelled, @@ -577,6 +622,9 @@ async def sample_publish(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -688,6 +736,9 @@ async def sample_get_topic(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -804,6 +855,9 @@ async def sample_list_topics(): gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -931,6 +985,9 @@ async def sample_list_topic_subscriptions(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1062,6 +1119,9 @@ async def sample_list_topic_snapshots(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1178,6 +1238,9 @@ async def sample_delete_topic(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1271,6 +1334,9 @@ async def sample_detach_subscription(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1391,6 +1457,9 @@ async def set_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1513,6 +1582,9 @@ async def get_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1573,6 +1645,9 @@ async def test_iam_permissions( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 1a92362c557d..37e7410f9fb4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -29,6 +29,7 @@ Union, cast, ) +import warnings from google.pubsub_v1 import gapic_version as package_version @@ -44,9 +45,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -133,6 +134,8 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + # The scopes needed to make gRPC calls to all of the methods defined in # this service _DEFAULT_SCOPES = ( @@ -148,6 +151,9 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "pubsub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -329,7 +335,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -359,6 +365,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -392,6 +403,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = PublisherClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = PublisherClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = PublisherClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = PublisherClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or PublisherClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -411,22 +594,32 @@ def __init__( transport (Union[str, PublisherTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -437,17 +630,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = PublisherClient._read_environment_variables() + self._client_cert_source = PublisherClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = PublisherClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` - api_key_value = getattr(client_options, "api_key", None) + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -456,20 +666,30 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, PublisherTransport): + transport_provided = isinstance(transport, PublisherTransport) + if transport_provided: # transport is a PublisherTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(PublisherTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or PublisherClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -479,7 +699,7 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: @@ -491,14 +711,14 @@ def __init__( self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def create_topic( @@ -598,6 +818,9 @@ def sample_create_topic(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -619,7 +842,8 @@ def update_topic( timeout: TimeoutType = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Topic: - r"""Updates an existing topic. Note that certain + r"""Updates an existing topic by updating the fields + specified in the update mask. Note that certain properties of a topic are not modifiable. .. code-block:: python @@ -717,6 +941,9 @@ def sample_update_topic(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -827,6 +1054,9 @@ def sample_publish(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -927,6 +1157,9 @@ def sample_get_topic(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1032,6 +1265,9 @@ def sample_list_topics(): gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1148,6 +1384,9 @@ def sample_list_topic_subscriptions(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1268,6 +1507,9 @@ def sample_list_topic_snapshots(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1375,6 +1617,9 @@ def sample_delete_topic(): gapic_v1.routing_header.to_grpc_metadata((("topic", request.topic),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1460,6 +1705,9 @@ def sample_detach_subscription(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1594,6 +1842,9 @@ def set_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1716,6 +1967,9 @@ def get_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1776,6 +2030,9 @@ def test_iam_permissions( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index e8caa080b091..6c92dd8f9f59 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -63,7 +63,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -126,6 +126,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -162,7 +166,7 @@ def _prep_wrapped_messages(self, client_info): default_retry=retries.Retry( initial=0.1, maximum=60.0, - multiplier=4.0, + multiplier=4, predicate=retries.if_exception_type( core_exceptions.Aborted, core_exceptions.Cancelled, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 268e687f01cd..ed6410fd4300 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -68,7 +68,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -265,7 +265,8 @@ def create_topic(self) -> Callable[[pubsub.Topic], pubsub.Topic]: def update_topic(self) -> Callable[[pubsub.UpdateTopicRequest], pubsub.Topic]: r"""Return a callable for the update topic method over gRPC. - Updates an existing topic. Note that certain + Updates an existing topic by updating the fields + specified in the update mask. Note that certain properties of a topic are not modifiable. Returns: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 8d971fa3251c..c620cd4aeaa0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -113,7 +113,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -270,7 +270,8 @@ def update_topic( ) -> Callable[[pubsub.UpdateTopicRequest], Awaitable[pubsub.Topic]]: r"""Return a callable for the update topic method over gRPC. - Updates an existing topic. Note that certain + Updates an existing topic by updating the fields + specified in the update mask. Note that certain properties of a topic are not modifiable. Returns: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index de7215153bac..d450cb6eb26a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -36,9 +36,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -427,7 +427,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 2d4b5d77ba60..44a4916e6e2f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -38,9 +38,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -58,8 +58,12 @@ class SchemaServiceAsyncClient: _client: SchemaServiceClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = SchemaServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SchemaServiceClient._DEFAULT_UNIVERSE schema_path = staticmethod(SchemaServiceClient.schema_path) parse_schema_path = staticmethod(SchemaServiceClient.parse_schema_path) @@ -166,6 +170,25 @@ def transport(self) -> SchemaServiceTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient) ) @@ -178,7 +201,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the schema service client. + """Instantiates the schema service async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -189,23 +212,38 @@ def __init__( transport (Union[str, ~.SchemaServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -285,7 +323,7 @@ async def sample_create_schema(): final component of the schema's resource name. See - https://cloud.google.com/pubsub/docs/admin#resource_names + https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names for resource name constraints. This corresponds to the ``schema_id`` field @@ -345,6 +383,9 @@ async def sample_create_schema(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -453,6 +494,9 @@ async def sample_get_schema(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -566,6 +610,9 @@ async def sample_list_schemas(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -688,6 +735,9 @@ async def sample_list_schema_revisions(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -819,6 +869,9 @@ async def sample_commit_schema(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -942,6 +995,9 @@ async def sample_rollback_schema(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1063,6 +1119,9 @@ async def sample_delete_schema_revision(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1164,6 +1223,9 @@ async def sample_delete_schema(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1285,6 +1347,9 @@ async def sample_validate_schema(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1374,6 +1439,9 @@ async def sample_validate_message(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1493,6 +1561,9 @@ async def set_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1614,6 +1685,9 @@ async def get_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1673,6 +1747,9 @@ async def test_iam_permissions( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 3106a32921f7..d869e4dec4dd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -29,6 +29,7 @@ Union, cast, ) +import warnings from google.pubsub_v1 import gapic_version as package_version @@ -43,9 +44,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -129,11 +130,15 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "pubsub.googleapis.com" DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "pubsub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -279,7 +284,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -309,6 +314,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -342,6 +352,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SchemaServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SchemaServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = SchemaServiceClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or SchemaServiceClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -361,22 +543,32 @@ def __init__( transport (Union[str, SchemaServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -387,17 +579,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SchemaServiceClient._read_environment_variables() + self._client_cert_source = SchemaServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = SchemaServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` - api_key_value = getattr(client_options, "api_key", None) + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -406,20 +615,33 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, SchemaServiceTransport): + transport_provided = isinstance(transport, SchemaServiceTransport) + if transport_provided: # transport is a SchemaServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(SchemaServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or SchemaServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -429,7 +651,7 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: @@ -441,14 +663,14 @@ def __init__( self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def create_schema( @@ -519,7 +741,7 @@ def sample_create_schema(): final component of the schema's resource name. See - https://cloud.google.com/pubsub/docs/admin#resource_names + https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names for resource name constraints. This corresponds to the ``schema_id`` field @@ -570,6 +792,9 @@ def sample_create_schema(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -669,6 +894,9 @@ def sample_get_schema(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -773,6 +1001,9 @@ def sample_list_schemas(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -886,6 +1117,9 @@ def sample_list_schema_revisions(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1008,6 +1242,9 @@ def sample_commit_schema(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1122,6 +1359,9 @@ def sample_rollback_schema(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1234,6 +1474,9 @@ def sample_delete_schema_revision(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1326,6 +1569,9 @@ def sample_delete_schema(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1438,6 +1684,9 @@ def sample_validate_schema(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1519,6 +1768,9 @@ def sample_validate_message(): gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1652,6 +1904,9 @@ def set_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1773,6 +2028,9 @@ def get_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1832,6 +2090,9 @@ def test_iam_permissions( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 08b370cf7a69..39151e3975ce 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -64,7 +64,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -127,6 +127,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index c14a1d5f46f4..66b382bfb067 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -68,7 +68,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 5b435aff6977..94a872b568de 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -113,7 +113,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index 0586bbf9d5fd..5e5d7d478c71 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -36,9 +36,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -463,7 +463,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 13f30bd8937b..78ad7c9a249c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -42,9 +42,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.AsyncRetry, object] # type: ignore + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -67,8 +67,12 @@ class SubscriberAsyncClient: _client: SubscriberClient + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = SubscriberClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = SubscriberClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = SubscriberClient._DEFAULT_UNIVERSE snapshot_path = staticmethod(SubscriberClient.snapshot_path) parse_snapshot_path = staticmethod(SubscriberClient.parse_snapshot_path) @@ -173,6 +177,25 @@ def transport(self) -> SubscriberTransport: """ return self._client.transport + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + get_transport_class = functools.partial( type(SubscriberClient).get_transport_class, type(SubscriberClient) ) @@ -185,7 +208,7 @@ def __init__( client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: - """Instantiates the subscriber client. + """Instantiates the subscriber async client. Args: credentials (Optional[google.auth.credentials.Credentials]): The @@ -196,23 +219,38 @@ def __init__( transport (Union[str, ~.SubscriberTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. @@ -308,21 +346,21 @@ async def sample_create_subscription(): on the ``request`` instance; if ``request`` is provided, this should not be set. push_config (:class:`google.pubsub_v1.types.PushConfig`): - If push delivery is used with this - subscription, this field is used to - configure it. + Optional. If push delivery is used + with this subscription, this field is + used to configure it. This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. ack_deadline_seconds (:class:`int`): - The approximate amount of time (on a best-effort basis) - Pub/Sub waits for the subscriber to acknowledge receipt - before resending the message. In the interval after the - message is delivered and before it is acknowledged, it - is considered to be *outstanding*. During that time - period, the message will not be redelivered (on a - best-effort basis). + Optional. The approximate amount of time (on a + best-effort basis) Pub/Sub waits for the subscriber to + acknowledge receipt before resending the message. In the + interval after the message is delivered and before it is + acknowledged, it is considered to be *outstanding*. + During that time period, the message will not be + redelivered (on a best-effort basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this @@ -406,6 +444,9 @@ async def sample_create_subscription(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -523,6 +564,9 @@ async def sample_get_subscription(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -544,7 +588,8 @@ async def update_subscription( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: - r"""Updates an existing subscription. Note that certain + r"""Updates an existing subscription by updating the + fields specified in the update mask. Note that certain properties of a subscription, such as its topic, are not modifiable. @@ -655,6 +700,9 @@ async def sample_update_subscription(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -770,6 +818,9 @@ async def sample_list_subscriptions(): gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -888,6 +939,9 @@ async def sample_delete_subscription(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -965,8 +1019,8 @@ async def sample_modify_ack_deadline(): client. This typically results in an increase in the rate of message redeliveries (that is, duplicates). The minimum deadline you can specify is 0 seconds. The - maximum deadline you can specify is 600 seconds (10 - minutes). + maximum deadline you can specify in a single request is + 600 seconds (10 minutes). This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this @@ -1023,6 +1077,9 @@ async def sample_modify_ack_deadline(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1143,6 +1200,9 @@ async def sample_acknowledge(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1291,6 +1351,9 @@ async def sample_pull(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1383,7 +1446,7 @@ def request_generator(): default_retry=retries.AsyncRetry( initial=0.1, maximum=60.0, - multiplier=4.0, + multiplier=4, predicate=retries.if_exception_type( core_exceptions.Aborted, core_exceptions.DeadlineExceeded, @@ -1397,6 +1460,9 @@ def request_generator(): client_info=DEFAULT_CLIENT_INFO, ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = rpc( requests, @@ -1522,6 +1588,9 @@ async def sample_modify_push_config(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -1640,6 +1709,9 @@ async def sample_get_snapshot(): gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1759,6 +1831,9 @@ async def sample_list_snapshots(): gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1925,6 +2000,9 @@ async def sample_create_snapshot(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -1946,7 +2024,8 @@ async def update_snapshot( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: - r"""Updates an existing snapshot. Snapshots are used in + r"""Updates an existing snapshot by updating the fields specified in + the update mask. Snapshots are used in `Seek `__ operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages @@ -2056,6 +2135,9 @@ async def sample_update_snapshot(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2166,6 +2248,9 @@ async def sample_delete_snapshot(): gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. await rpc( request, @@ -2261,6 +2346,9 @@ async def sample_seek(): ), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2380,6 +2468,9 @@ async def set_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2501,6 +2592,9 @@ async def get_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, @@ -2560,6 +2654,9 @@ async def test_iam_permissions( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._client._validate_universe_domain() + # Send the request. response = await rpc( request, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index f74e895a39d5..74fdb1d288eb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -31,6 +31,7 @@ Union, cast, ) +import warnings import warnings from google.pubsub_v1 import gapic_version as package_version @@ -46,9 +47,9 @@ from google.oauth2 import service_account # type: ignore try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -137,6 +138,8 @@ def _get_default_mtls_endpoint(api_endpoint): return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + # The scopes needed to make gRPC calls to all of the methods defined in # this service _DEFAULT_SCOPES = ( @@ -152,6 +155,9 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + _DEFAULT_ENDPOINT_TEMPLATE = "pubsub.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -333,7 +339,7 @@ def parse_common_location_path(path: str) -> Dict[str, str]: def get_mtls_endpoint_and_cert_source( cls, client_options: Optional[client_options_lib.ClientOptions] = None ): - """Return the API endpoint and client cert source for mutual TLS. + """Deprecated. Return the API endpoint and client cert source for mutual TLS. The client cert source is determined in the following order: (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the @@ -363,6 +369,11 @@ def get_mtls_endpoint_and_cert_source( Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) if client_options is None: client_options = client_options_lib.ClientOptions() use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") @@ -396,6 +407,178 @@ def get_mtls_endpoint_and_cert_source( return api_endpoint, client_cert_source + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ): + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = SubscriberClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = SubscriberClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = SubscriberClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + @staticmethod + def _compare_universes( + client_universe: str, credentials: ga_credentials.Credentials + ) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials (ga_credentials.Credentials): The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + + default_universe = SubscriberClient._DEFAULT_UNIVERSE + credentials_universe = getattr(credentials, "universe_domain", default_universe) + + if client_universe != credentials_universe: + raise ValueError( + "The configured universe domain " + f"({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{default_universe}` is the default." + ) + return True + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + self._is_universe_domain_valid = ( + self._is_universe_domain_valid + or SubscriberClient._compare_universes( + self.universe_domain, self.transport._credentials + ) + ) + return self._is_universe_domain_valid + + @property + def api_endpoint(self): + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + def __init__( self, *, @@ -415,22 +598,32 @@ def __init__( transport (Union[str, SubscriberTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If + to provide a client certificate for mTLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. @@ -441,17 +634,34 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + ( + self._use_client_cert, + self._use_mtls_endpoint, + self._universe_domain_env, + ) = SubscriberClient._read_environment_variables() + self._client_cert_source = SubscriberClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = SubscriberClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env ) + self._api_endpoint = None # updated below, depending on `transport` - api_key_value = getattr(client_options, "api_key", None) + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( "client_options.api_key and credentials are mutually exclusive" @@ -460,20 +670,30 @@ def __init__( # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. - if isinstance(transport, SubscriberTransport): + transport_provided = isinstance(transport, SubscriberTransport) + if transport_provided: # transport is a SubscriberTransport instance. - if credentials or client_options.credentials_file or api_key_value: + if credentials or self._client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." ) - if client_options.scopes: + if self._client_options.scopes: raise ValueError( "When providing a transport instance, provide its scopes " "directly." ) - self._transport = transport - else: + self._transport = cast(SubscriberTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = self._api_endpoint or SubscriberClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + + if not transport_provided: import google.auth._default # type: ignore if api_key_value and hasattr( @@ -483,7 +703,7 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(transport) + Transport = type(self).get_transport_class(cast(str, transport)) emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: @@ -495,14 +715,14 @@ def __init__( self._transport = Transport( credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, client_info=client_info, always_use_jwt_access=True, - api_audience=client_options.api_audience, + api_audience=self._client_options.api_audience, ) def create_subscription( @@ -589,21 +809,21 @@ def sample_create_subscription(): on the ``request`` instance; if ``request`` is provided, this should not be set. push_config (google.pubsub_v1.types.PushConfig): - If push delivery is used with this - subscription, this field is used to - configure it. + Optional. If push delivery is used + with this subscription, this field is + used to configure it. This corresponds to the ``push_config`` field on the ``request`` instance; if ``request`` is provided, this should not be set. ack_deadline_seconds (int): - The approximate amount of time (on a best-effort basis) - Pub/Sub waits for the subscriber to acknowledge receipt - before resending the message. In the interval after the - message is delivered and before it is acknowledged, it - is considered to be *outstanding*. During that time - period, the message will not be redelivered (on a - best-effort basis). + Optional. The approximate amount of time (on a + best-effort basis) Pub/Sub waits for the subscriber to + acknowledge receipt before resending the message. In the + interval after the message is delivered and before it is + acknowledged, it is considered to be *outstanding*. + During that time period, the message will not be + redelivered (on a best-effort basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this @@ -676,6 +896,9 @@ def sample_create_subscription(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -782,6 +1005,9 @@ def sample_get_subscription(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -803,7 +1029,8 @@ def update_subscription( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Subscription: - r"""Updates an existing subscription. Note that certain + r"""Updates an existing subscription by updating the + fields specified in the update mask. Note that certain properties of a subscription, such as its topic, are not modifiable. @@ -905,6 +1132,9 @@ def sample_update_subscription(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1009,6 +1239,9 @@ def sample_list_subscriptions(): gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1118,6 +1351,9 @@ def sample_delete_subscription(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1195,8 +1431,8 @@ def sample_modify_ack_deadline(): client. This typically results in an increase in the rate of message redeliveries (that is, duplicates). The minimum deadline you can specify is 0 seconds. The - maximum deadline you can specify is 600 seconds (10 - minutes). + maximum deadline you can specify in a single request is + 600 seconds (10 minutes). This corresponds to the ``ack_deadline_seconds`` field on the ``request`` instance; if ``request`` is provided, this @@ -1244,6 +1480,9 @@ def sample_modify_ack_deadline(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1355,6 +1594,9 @@ def sample_acknowledge(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1491,6 +1733,9 @@ def sample_pull(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1585,6 +1830,9 @@ def request_generator(): # and friendly error handling. rpc = self._transport._wrapped_methods[self._transport.streaming_pull] + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( requests, @@ -1701,6 +1949,9 @@ def sample_modify_push_config(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -1808,6 +2059,9 @@ def sample_get_snapshot(): gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -1916,6 +2170,9 @@ def sample_list_snapshots(): gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2073,6 +2330,9 @@ def sample_create_snapshot(): gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2094,7 +2354,8 @@ def update_snapshot( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = (), ) -> pubsub.Snapshot: - r"""Updates an existing snapshot. Snapshots are used in + r"""Updates an existing snapshot by updating the fields specified in + the update mask. Snapshots are used in `Seek `__ operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages @@ -2195,6 +2456,9 @@ def sample_update_snapshot(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2296,6 +2560,9 @@ def sample_delete_snapshot(): gapic_v1.routing_header.to_grpc_metadata((("snapshot", request.snapshot),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. rpc( request, @@ -2381,6 +2648,9 @@ def sample_seek(): ), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2514,6 +2784,9 @@ def set_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2635,6 +2908,9 @@ def get_iam_policy( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, @@ -2694,6 +2970,9 @@ def test_iam_permissions( gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)), ) + # Validate the universe domain. + self._validate_universe_domain() + # Send the request. response = rpc( request, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index d50b8baf683c..cea627d7638e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -63,7 +63,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -126,6 +126,10 @@ def __init__( host += ":443" self._host = host + @property + def host(self): + return self._host + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -255,7 +259,7 @@ def _prep_wrapped_messages(self, client_info): default_retry=retries.Retry( initial=0.1, maximum=60.0, - multiplier=4.0, + multiplier=4, predicate=retries.if_exception_type( core_exceptions.Aborted, core_exceptions.DeadlineExceeded, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 0b1fcd3aab6a..85a1942af65d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -70,7 +70,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -307,7 +307,8 @@ def update_subscription( ) -> Callable[[pubsub.UpdateSubscriptionRequest], pubsub.Subscription]: r"""Return a callable for the update subscription method over gRPC. - Updates an existing subscription. Note that certain + Updates an existing subscription by updating the + fields specified in the update mask. Note that certain properties of a subscription, such as its topic, are not modifiable. @@ -646,7 +647,8 @@ def update_snapshot( ) -> Callable[[pubsub.UpdateSnapshotRequest], pubsub.Snapshot]: r"""Return a callable for the update snapshot method over gRPC. - Updates an existing snapshot. Snapshots are used in + Updates an existing snapshot by updating the fields specified in + the update mask. Snapshots are used in `Seek `__ operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index d32730c1e2e6..b4a73baf4a62 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -115,7 +115,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none @@ -310,7 +310,8 @@ def update_subscription( ) -> Callable[[pubsub.UpdateSubscriptionRequest], Awaitable[pubsub.Subscription]]: r"""Return a callable for the update subscription method over gRPC. - Updates an existing subscription. Note that certain + Updates an existing subscription by updating the + fields specified in the update mask. Note that certain properties of a subscription, such as its topic, are not modifiable. @@ -659,7 +660,8 @@ def update_snapshot( ) -> Callable[[pubsub.UpdateSnapshotRequest], Awaitable[pubsub.Snapshot]]: r"""Return a callable for the update snapshot method over gRPC. - Updates an existing snapshot. Snapshots are used in + Updates an existing snapshot by updating the fields specified in + the update mask. Snapshots are used in `Seek `__ operations, which allow you to manage message acknowledgments in bulk. That is, you can set the acknowledgment state of messages diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index c633c41156fb..26dee31f2e7b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -36,9 +36,9 @@ import warnings try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore + OptionalRetry = Union[retries.Retry, object, None] # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -547,7 +547,7 @@ def __init__( Args: host (Optional[str]): - The hostname to connect to. + The hostname to connect to (default: 'pubsub.googleapis.com'). credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index c82fefd1379f..80135a019128 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -30,6 +30,7 @@ GetSnapshotRequest, GetSubscriptionRequest, GetTopicRequest, + IngestionDataSourceSettings, ListSnapshotsRequest, ListSnapshotsResponse, ListSubscriptionsRequest, @@ -107,6 +108,7 @@ "GetSnapshotRequest", "GetSubscriptionRequest", "GetTopicRequest", + "IngestionDataSourceSettings", "ListSnapshotsRequest", "ListSnapshotsResponse", "ListSubscriptionsRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index ff7becd483fb..b04d5d74515b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -30,6 +30,7 @@ manifest={ "MessageStoragePolicy", "SchemaSettings", + "IngestionDataSourceSettings", "Topic", "PubsubMessage", "GetTopicRequest", @@ -84,21 +85,32 @@ class MessageStoragePolicy(proto.Message): Attributes: allowed_persistence_regions (MutableSequence[str]): - A list of IDs of Google Cloud regions where - messages that are published to the topic may be - persisted in storage. Messages published by - publishers running in non-allowed Google Cloud - regions (or running outside of Google Cloud - altogether) are routed for storage in one of the - allowed regions. An empty list means that no - regions are allowed, and is not a valid - configuration. + Optional. A list of IDs of Google Cloud + regions where messages that are published to the + topic may be persisted in storage. Messages + published by publishers running in non-allowed + Google Cloud regions (or running outside of + Google Cloud altogether) are routed for storage + in one of the allowed regions. An empty list + means that no regions are allowed, and is not a + valid configuration. + enforce_in_transit (bool): + Optional. If true, ``allowed_persistence_regions`` is also + used to enforce in-transit guarantees for messages. That is, + Pub/Sub will fail Publish operations on this topic and + subscribe operations on any subscription attached to this + topic in any region that is not in + ``allowed_persistence_regions``. """ allowed_persistence_regions: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=1, ) + enforce_in_transit: bool = proto.Field( + proto.BOOL, + number=2, + ) class SchemaSettings(proto.Message): @@ -112,17 +124,18 @@ class SchemaSettings(proto.Message): field will be ``_deleted-schema_`` if the schema has been deleted. encoding (google.pubsub_v1.types.Encoding): - The encoding of messages validated against ``schema``. + Optional. The encoding of messages validated against + ``schema``. first_revision_id (str): - The minimum (inclusive) revision allowed for validating - messages. If empty or not present, allow any revision to be - validated against last_revision or any revision created - before. + Optional. The minimum (inclusive) revision allowed for + validating messages. If empty or not present, allow any + revision to be validated against last_revision or any + revision created before. last_revision_id (str): - The maximum (inclusive) revision allowed for validating - messages. If empty or not present, allow any revision to be - validated against first_revision or any revision created - after. + Optional. The maximum (inclusive) revision allowed for + validating messages. If empty or not present, allow any + revision to be validated against first_revision or any + revision created after. """ schema: str = proto.Field( @@ -144,6 +157,116 @@ class SchemaSettings(proto.Message): ) +class IngestionDataSourceSettings(proto.Message): + r"""Settings for an ingestion data source on a topic. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + aws_kinesis (google.pubsub_v1.types.IngestionDataSourceSettings.AwsKinesis): + Optional. Amazon Kinesis Data Streams. + + This field is a member of `oneof`_ ``source``. + """ + + class AwsKinesis(proto.Message): + r"""Ingestion settings for Amazon Kinesis Data Streams. + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.AwsKinesis.State): + Output only. An output-only field that + indicates the state of the Kinesis ingestion + source. + stream_arn (str): + Required. The Kinesis stream ARN to ingest + data from. + consumer_arn (str): + Required. The Kinesis consumer ARN to used + for ingestion in Enhanced Fan-Out mode. The + consumer must be already created and ready to be + used. + aws_role_arn (str): + Required. AWS role ARN to be used for + Federated Identity authentication with Kinesis. + Check the Pub/Sub docs for how to set up this + role and the required permissions that need to + be attached to it. + gcp_service_account (str): + Required. The GCP service account to be used for Federated + Identity authentication with Kinesis (via a + ``AssumeRoleWithWebIdentity`` call for the provided role). + The ``aws_role_arn`` must be set up with + ``accounts.google.com:sub`` equals to this service account + number. + """ + + class State(proto.Enum): + r"""Possible states for managed ingestion from Amazon Kinesis + Data Streams. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + KINESIS_PERMISSION_DENIED (2): + Permission denied encountered while consuming data from + Kinesis. This can happen if: + + - The provided ``aws_role_arn`` does not exist or does not + have the appropriate permissions attached. + - The provided ``aws_role_arn`` is not set up properly for + Identity Federation using ``gcp_service_account``. + - The Pub/Sub SA is not granted the + ``iam.serviceAccounts.getOpenIdToken`` permission on + ``gcp_service_account``. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while publishing to the topic. + This can happen due to Pub/Sub SA has not been granted the + `appropriate publish + permissions `__ + STREAM_NOT_FOUND (4): + The Kinesis stream does not exist. + CONSUMER_NOT_FOUND (5): + The Kinesis consumer does not exist. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + KINESIS_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + STREAM_NOT_FOUND = 4 + CONSUMER_NOT_FOUND = 5 + + state: "IngestionDataSourceSettings.AwsKinesis.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.AwsKinesis.State", + ) + stream_arn: str = proto.Field( + proto.STRING, + number=2, + ) + consumer_arn: str = proto.Field( + proto.STRING, + number=3, + ) + aws_role_arn: str = proto.Field( + proto.STRING, + number=4, + ) + gcp_service_account: str = proto.Field( + proto.STRING, + number=5, + ) + + aws_kinesis: AwsKinesis = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message=AwsKinesis, + ) + + class Topic(proto.Message): r"""A topic resource. @@ -158,30 +281,30 @@ class Topic(proto.Message): 255 characters in length, and it must not start with ``"goog"``. labels (MutableMapping[str, str]): - See [Creating and managing labels] + Optional. See [Creating and managing labels] (https://cloud.google.com/pubsub/docs/labels). message_storage_policy (google.pubsub_v1.types.MessageStoragePolicy): - Policy constraining the set of Google Cloud - Platform regions where messages published to the - topic may be stored. If not present, then no - constraints are in effect. + Optional. Policy constraining the set of + Google Cloud Platform regions where messages + published to the topic may be stored. If not + present, then no constraints are in effect. kms_key_name (str): - The resource name of the Cloud KMS CryptoKey to be used to - protect access to messages published on this topic. + Optional. The resource name of the Cloud KMS CryptoKey to be + used to protect access to messages published on this topic. The expected format is ``projects/*/locations/*/keyRings/*/cryptoKeys/*``. schema_settings (google.pubsub_v1.types.SchemaSettings): - Settings for validating messages published - against a schema. + Optional. Settings for validating messages + published against a schema. satisfies_pzs (bool): - Reserved for future use. This field is set - only in responses from the server; it is ignored - if it is set in any requests. + Optional. Reserved for future use. This field + is set only in responses from the server; it is + ignored if it is set in any requests. message_retention_duration (google.protobuf.duration_pb2.Duration): - Indicates the minimum duration to retain a message after it - is published to the topic. If this field is set, messages - published to the topic in the last + Optional. Indicates the minimum duration to retain a message + after it is published to the topic. If this field is set, + messages published to the topic in the last ``message_retention_duration`` are always available to subscribers. For instance, it allows any attached subscription to `seek to a @@ -190,8 +313,33 @@ class Topic(proto.Message): this field is not set, message retention is controlled by settings on individual subscriptions. Cannot be more than 31 days or less than 10 minutes. + state (google.pubsub_v1.types.Topic.State): + Output only. An output-only field indicating + the state of the topic. + ingestion_data_source_settings (google.pubsub_v1.types.IngestionDataSourceSettings): + Optional. Settings for managed ingestion from + a data source into this topic. """ + class State(proto.Enum): + r"""The state of the topic. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + The topic does not have any persistent + errors. + INGESTION_RESOURCE_ERROR (2): + Ingestion from the data source has + encountered a permanent error. See the more + detailed error state in the corresponding + ingestion source configuration. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + INGESTION_RESOURCE_ERROR = 2 + name: str = proto.Field( proto.STRING, number=1, @@ -224,6 +372,16 @@ class Topic(proto.Message): number=8, message=duration_pb2.Duration, ) + state: State = proto.Field( + proto.ENUM, + number=9, + enum=State, + ) + ingestion_data_source_settings: "IngestionDataSourceSettings" = proto.Field( + proto.MESSAGE, + number=10, + message="IngestionDataSourceSettings", + ) class PubsubMessage(proto.Message): @@ -239,14 +397,14 @@ class PubsubMessage(proto.Message): Attributes: data (bytes): - The message data field. If this field is - empty, the message must contain at least one - attribute. + Optional. The message data field. If this + field is empty, the message must contain at + least one attribute. attributes (MutableMapping[str, str]): - Attributes for this message. If this field is - empty, the message must contain non-empty data. - This can be used to filter messages on the - subscription. + Optional. Attributes for this message. If + this field is empty, the message must contain + non-empty data. This can be used to filter + messages on the subscription. message_id (str): ID of this message, assigned by the server when the message is published. Guaranteed to be unique within the topic. This @@ -259,15 +417,15 @@ class PubsubMessage(proto.Message): the server when it receives the ``Publish`` call. It must not be populated by the publisher in a ``Publish`` call. ordering_key (str): - If non-empty, identifies related messages for which publish - order should be respected. If a ``Subscription`` has - ``enable_message_ordering`` set to ``true``, messages - published with the same non-empty ``ordering_key`` value - will be delivered to subscribers in the order in which they - are received by the Pub/Sub system. All ``PubsubMessage``\ s - published in a given ``PublishRequest`` must specify the - same ``ordering_key`` value. For more information, see - `ordering + Optional. If non-empty, identifies related messages for + which publish order should be respected. If a + ``Subscription`` has ``enable_message_ordering`` set to + ``true``, messages published with the same non-empty + ``ordering_key`` value will be delivered to subscribers in + the order in which they are received by the Pub/Sub system. + All ``PubsubMessage``\ s published in a given + ``PublishRequest`` must specify the same ``ordering_key`` + value. For more information, see `ordering messages `__. """ @@ -364,10 +522,10 @@ class PublishResponse(proto.Message): Attributes: message_ids (MutableSequence[str]): - The server-assigned ID of each published - message, in the same order as the messages in - the request. IDs are guaranteed to be unique - within the topic. + Optional. The server-assigned ID of each + published message, in the same order as the + messages in the request. IDs are guaranteed to + be unique within the topic. """ message_ids: MutableSequence[str] = proto.RepeatedField( @@ -384,12 +542,12 @@ class ListTopicsRequest(proto.Message): Required. The name of the project in which to list topics. Format is ``projects/{project-id}``. page_size (int): - Maximum number of topics to return. + Optional. Maximum number of topics to return. page_token (str): - The value returned by the last ``ListTopicsResponse``; - indicates that this is a continuation of a prior - ``ListTopics`` call, and that the system should return the - next page of data. + Optional. The value returned by the last + ``ListTopicsResponse``; indicates that this is a + continuation of a prior ``ListTopics`` call, and that the + system should return the next page of data. """ project: str = proto.Field( @@ -411,11 +569,11 @@ class ListTopicsResponse(proto.Message): Attributes: topics (MutableSequence[google.pubsub_v1.types.Topic]): - The resulting topics. + Optional. The resulting topics. next_page_token (str): - If not empty, indicates that there may be more topics that - match the request; this value should be passed in a new - ``ListTopicsRequest``. + Optional. If not empty, indicates that there may be more + topics that match the request; this value should be passed + in a new ``ListTopicsRequest``. """ @property @@ -442,10 +600,10 @@ class ListTopicSubscriptionsRequest(proto.Message): attached to. Format is ``projects/{project}/topics/{topic}``. page_size (int): - Maximum number of subscription names to - return. + Optional. Maximum number of subscription + names to return. page_token (str): - The value returned by the last + Optional. The value returned by the last ``ListTopicSubscriptionsResponse``; indicates that this is a continuation of a prior ``ListTopicSubscriptions`` call, and that the system should return the next page of data. @@ -470,12 +628,13 @@ class ListTopicSubscriptionsResponse(proto.Message): Attributes: subscriptions (MutableSequence[str]): - The names of subscriptions attached to the - topic specified in the request. + Optional. The names of subscriptions attached + to the topic specified in the request. next_page_token (str): - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListTopicSubscriptionsRequest`` to get more subscriptions. + Optional. If not empty, indicates that there may be more + subscriptions that match the request; this value should be + passed in a new ``ListTopicSubscriptionsRequest`` to get + more subscriptions. """ @property @@ -500,9 +659,10 @@ class ListTopicSnapshotsRequest(proto.Message): Required. The name of the topic that snapshots are attached to. Format is ``projects/{project}/topics/{topic}``. page_size (int): - Maximum number of snapshot names to return. + Optional. Maximum number of snapshot names to + return. page_token (str): - The value returned by the last + Optional. The value returned by the last ``ListTopicSnapshotsResponse``; indicates that this is a continuation of a prior ``ListTopicSnapshots`` call, and that the system should return the next page of data. @@ -527,12 +687,13 @@ class ListTopicSnapshotsResponse(proto.Message): Attributes: snapshots (MutableSequence[str]): - The names of the snapshots that match the - request. + Optional. The names of the snapshots that + match the request. next_page_token (str): - If not empty, indicates that there may be more snapshots - that match the request; this value should be passed in a new - ``ListTopicSnapshotsRequest`` to get more snapshots. + Optional. If not empty, indicates that there may be more + snapshots that match the request; this value should be + passed in a new ``ListTopicSnapshotsRequest`` to get more + snapshots. """ @property @@ -610,24 +771,25 @@ class Subscription(proto.Message): field will be ``_deleted-topic_`` if the topic has been deleted. push_config (google.pubsub_v1.types.PushConfig): - If push delivery is used with this + Optional. If push delivery is used with this subscription, this field is used to configure it. bigquery_config (google.pubsub_v1.types.BigQueryConfig): - If delivery to BigQuery is used with this - subscription, this field is used to configure - it. - cloud_storage_config (google.pubsub_v1.types.CloudStorageConfig): - If delivery to Google Cloud Storage is used + Optional. If delivery to BigQuery is used with this subscription, this field is used to configure it. + cloud_storage_config (google.pubsub_v1.types.CloudStorageConfig): + Optional. If delivery to Google Cloud Storage + is used with this subscription, this field is + used to configure it. ack_deadline_seconds (int): - The approximate amount of time (on a best-effort basis) - Pub/Sub waits for the subscriber to acknowledge receipt - before resending the message. In the interval after the - message is delivered and before it is acknowledged, it is - considered to be *outstanding*. During that time period, the - message will not be redelivered (on a best-effort basis). + Optional. The approximate amount of time (on a best-effort + basis) Pub/Sub waits for the subscriber to acknowledge + receipt before resending the message. In the interval after + the message is delivered and before it is acknowledged, it + is considered to be *outstanding*. During that time period, + the message will not be redelivered (on a best-effort + basis). For pull subscriptions, this value is used as the initial value for the ack deadline. To override this value for a @@ -645,15 +807,16 @@ class Subscription(proto.Message): If the subscriber never acknowledges the message, the Pub/Sub system will eventually redeliver the message. retain_acked_messages (bool): - Indicates whether to retain acknowledged messages. If true, - then messages are not expunged from the subscription's - backlog, even if they are acknowledged, until they fall out - of the ``message_retention_duration`` window. This must be - true if you would like to [``Seek`` to a timestamp] + Optional. Indicates whether to retain acknowledged messages. + If true, then messages are not expunged from the + subscription's backlog, even if they are acknowledged, until + they fall out of the ``message_retention_duration`` window. + This must be true if you would like to [``Seek`` to a + timestamp] (https://cloud.google.com/pubsub/docs/replay-overview#seek_to_a_time) in the past to replay previously-acknowledged messages. message_retention_duration (google.protobuf.duration_pb2.Duration): - How long to retain unacknowledged messages in the + Optional. How long to retain unacknowledged messages in the subscription's backlog, from the moment a message is published. If ``retain_acked_messages`` is true, then this also configures the retention of acknowledged messages, and @@ -661,15 +824,16 @@ class Subscription(proto.Message): Defaults to 7 days. Cannot be more than 7 days or less than 10 minutes. labels (MutableMapping[str, str]): - See `Creating and managing + Optional. See `Creating and managing labels `__. enable_message_ordering (bool): - If true, messages published with the same ``ordering_key`` - in ``PubsubMessage`` will be delivered to the subscribers in - the order in which they are received by the Pub/Sub system. - Otherwise, they may be delivered in any order. + Optional. If true, messages published with the same + ``ordering_key`` in ``PubsubMessage`` will be delivered to + the subscribers in the order in which they are received by + the Pub/Sub system. Otherwise, they may be delivered in any + order. expiration_policy (google.pubsub_v1.types.ExpirationPolicy): - A policy that specifies the conditions for this + Optional. A policy that specifies the conditions for this subscription's expiration. A subscription is considered active as long as any connected subscriber is successfully consuming messages from the subscription or is issuing @@ -680,25 +844,25 @@ class Subscription(proto.Message): is set, but ``expiration_policy.ttl`` is not set, the subscription never expires. filter (str): - An expression written in the Pub/Sub `filter + Optional. An expression written in the Pub/Sub `filter language `__. If non-empty, then only ``PubsubMessage``\ s whose ``attributes`` field matches the filter are delivered on this subscription. If empty, then no messages are filtered out. dead_letter_policy (google.pubsub_v1.types.DeadLetterPolicy): - A policy that specifies the conditions for dead lettering - messages in this subscription. If dead_letter_policy is not - set, dead lettering is disabled. + Optional. A policy that specifies the conditions for dead + lettering messages in this subscription. If + dead_letter_policy is not set, dead lettering is disabled. - The Cloud Pub/Sub service account associated with this + The Pub/Sub service account associated with this subscriptions's parent project (i.e., service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have permission to Acknowledge() messages on this subscription. retry_policy (google.pubsub_v1.types.RetryPolicy): - A policy that specifies how Pub/Sub retries - message delivery for this subscription. + Optional. A policy that specifies how Pub/Sub + retries message delivery for this subscription. If not set, the default retry policy is applied. This generally implies that messages will be @@ -707,16 +871,16 @@ class Subscription(proto.Message): NACKs or acknowledgement deadline exceeded events for a given message. detached (bool): - Indicates whether the subscription is detached from its - topic. Detached subscriptions don't receive messages from - their topic and don't retain any backlog. ``Pull`` and - ``StreamingPull`` requests will return FAILED_PRECONDITION. - If the subscription is a push subscription, pushes to the - endpoint will not be made. + Optional. Indicates whether the subscription is detached + from its topic. Detached subscriptions don't receive + messages from their topic and don't retain any backlog. + ``Pull`` and ``StreamingPull`` requests will return + FAILED_PRECONDITION. If the subscription is a push + subscription, pushes to the endpoint will not be made. enable_exactly_once_delivery (bool): - If true, Pub/Sub provides the following guarantees for the - delivery of a message with a given value of ``message_id`` - on this subscription: + Optional. If true, Pub/Sub provides the following guarantees + for the delivery of a message with a given value of + ``message_id`` on this subscription: - The message sent to a subscriber is guaranteed not to be resent before the message's acknowledgement deadline @@ -848,7 +1012,7 @@ class State(proto.Enum): class RetryPolicy(proto.Message): - r"""A policy that specifies how Cloud Pub/Sub retries message delivery. + r"""A policy that specifies how Pub/Sub retries message delivery. Retry delay will be exponential based on provided minimum and maximum backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. @@ -863,15 +1027,15 @@ class RetryPolicy(proto.Message): Attributes: minimum_backoff (google.protobuf.duration_pb2.Duration): - The minimum delay between consecutive - deliveries of a given message. Value should be - between 0 and 600 seconds. Defaults to 10 - seconds. + Optional. The minimum delay between + consecutive deliveries of a given message. Value + should be between 0 and 600 seconds. Defaults to + 10 seconds. maximum_backoff (google.protobuf.duration_pb2.Duration): - The maximum delay between consecutive - deliveries of a given message. Value should be - between 0 and 600 seconds. Defaults to 600 - seconds. + Optional. The maximum delay between + consecutive deliveries of a given message. Value + should be between 0 and 600 seconds. Defaults to + 600 seconds. """ minimum_backoff: duration_pb2.Duration = proto.Field( @@ -896,11 +1060,11 @@ class DeadLetterPolicy(proto.Message): Attributes: dead_letter_topic (str): - The name of the topic to which dead letter messages should - be published. Format is - ``projects/{project}/topics/{topic}``.The Cloud Pub/Sub - service account associated with the enclosing subscription's - parent project (i.e., + Optional. The name of the topic to which dead letter + messages should be published. Format is + ``projects/{project}/topics/{topic}``.The Pub/Sub service + account associated with the enclosing subscription's parent + project (i.e., service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have permission to Publish() to this topic. @@ -909,8 +1073,8 @@ class DeadLetterPolicy(proto.Message): topic since messages published to a topic with no subscriptions are lost. max_delivery_attempts (int): - The maximum number of delivery attempts for any message. The - value must be between 5 and 100. + Optional. The maximum number of delivery attempts for any + message. The value must be between 5 and 100. The number of delivery attempts is defined as 1 + (the sum of number of NACKs and number of times the acknowledgement @@ -941,12 +1105,12 @@ class ExpirationPolicy(proto.Message): Attributes: ttl (google.protobuf.duration_pb2.Duration): - Specifies the "time-to-live" duration for an associated - resource. The resource expires if it is not active for a - period of ``ttl``. The definition of "activity" depends on - the type of the associated resource. The minimum and maximum - allowed values for ``ttl`` depend on the type of the - associated resource, as well. If ``ttl`` is not set, the + Optional. Specifies the "time-to-live" duration for an + associated resource. The resource expires if it is not + active for a period of ``ttl``. The definition of "activity" + depends on the type of the associated resource. The minimum + and maximum allowed values for ``ttl`` depend on the type of + the associated resource, as well. If ``ttl`` is not set, the associated resource never expires. """ @@ -969,12 +1133,12 @@ class PushConfig(proto.Message): Attributes: push_endpoint (str): - A URL locating the endpoint to which messages should be - pushed. For example, a Webhook endpoint might use + Optional. A URL locating the endpoint to which messages + should be pushed. For example, a Webhook endpoint might use ``https://example.com/push``. attributes (MutableMapping[str, str]): - Endpoint configuration attributes that can be used to - control different aspects of the message delivery. + Optional. Endpoint configuration attributes that can be used + to control different aspects of the message delivery. The only currently supported attribute is ``x-goog-version``, which you can use to change the format @@ -999,21 +1163,21 @@ class PushConfig(proto.Message): For example: ``attributes { "x-goog-version": "v1" }`` oidc_token (google.pubsub_v1.types.PushConfig.OidcToken): - If specified, Pub/Sub will generate and attach an OIDC JWT - token as an ``Authorization`` header in the HTTP request for - every pushed message. + Optional. If specified, Pub/Sub will generate and attach an + OIDC JWT token as an ``Authorization`` header in the HTTP + request for every pushed message. This field is a member of `oneof`_ ``authentication_method``. pubsub_wrapper (google.pubsub_v1.types.PushConfig.PubsubWrapper): - When set, the payload to the push endpoint is - in the form of the JSON representation of a - PubsubMessage + Optional. When set, the payload to the push + endpoint is in the form of the JSON + representation of a PubsubMessage (https://cloud.google.com/pubsub/docs/reference/rpc/google.pubsub.v1#pubsubmessage). This field is a member of `oneof`_ ``wrapper``. no_wrapper (google.pubsub_v1.types.PushConfig.NoWrapper): - When set, the payload to the push endpoint is - not wrapped. + Optional. When set, the payload to the push + endpoint is not wrapped. This field is a member of `oneof`_ ``wrapper``. """ @@ -1024,19 +1188,20 @@ class OidcToken(proto.Message): Attributes: service_account_email (str): - `Service account + Optional. `Service account email `__ used for generating the OIDC token. For more information on setting up authentication, see `Push subscriptions `__. audience (str): - Audience to be used when generating OIDC - token. The audience claim identifies the + Optional. Audience to be used when generating + OIDC token. The audience claim identifies the recipients that the JWT is intended for. The audience value is a single case-sensitive string. Having multiple values (array) for the audience field is not supported. More info about the OIDC JWT token audience here: + https://tools.ietf.org/html/rfc7519#section-4.1.3 Note: if not specified, the Push endpoint URL will be used. @@ -1063,7 +1228,7 @@ class NoWrapper(proto.Message): Attributes: write_metadata (bool): - When true, writes the Pub/Sub message metadata to + Optional. When true, writes the Pub/Sub message metadata to ``x-goog-pubsub-:`` headers of the HTTP request. Writes the Pub/Sub message attributes to ``:`` headers of the HTTP request. @@ -1108,23 +1273,24 @@ class BigQueryConfig(proto.Message): Attributes: table (str): - The name of the table to which to write data, - of the form {projectId}.{datasetId}.{tableId} + Optional. The name of the table to which to + write data, of the form + {projectId}.{datasetId}.{tableId} use_topic_schema (bool): Optional. When true, use the topic's schema as the columns to write to in BigQuery, if it exists. ``use_topic_schema`` and ``use_table_schema`` cannot be enabled at the same time. write_metadata (bool): - When true, write the subscription name, message_id, - publish_time, attributes, and ordering_key to additional - columns in the table. The subscription name, message_id, and - publish_time fields are put in their own columns while all - other message properties (other than data) are written to a - JSON object in the attributes column. + Optional. When true, write the subscription name, + message_id, publish_time, attributes, and ordering_key to + additional columns in the table. The subscription name, + message_id, and publish_time fields are put in their own + columns while all other message properties (other than data) + are written to a JSON object in the attributes column. drop_unknown_fields (bool): - When true and use_topic_schema is true, any fields that are - a part of the topic schema that are not part of the BigQuery - table schema are dropped when writing to BigQuery. + Optional. When true and use_topic_schema is true, any fields + that are a part of the topic schema that are not part of the + BigQuery table schema are dropped when writing to BigQuery. Otherwise, the schemas must be kept in sync and any messages with extra fields are not written and remain in the subscription's backlog. @@ -1163,12 +1329,17 @@ class State(proto.Enum): SCHEMA_MISMATCH (4): Cannot write to the BigQuery table due to a schema mismatch. + IN_TRANSIT_LOCATION_RESTRICTION (5): + Cannot write to the destination because enforce_in_transit + is set to true and the destination locations are not in the + allowed regions. """ STATE_UNSPECIFIED = 0 ACTIVE = 1 PERMISSION_DENIED = 2 NOT_FOUND = 3 SCHEMA_MISMATCH = 4 + IN_TRANSIT_LOCATION_RESTRICTION = 5 table: str = proto.Field( proto.STRING, @@ -1215,35 +1386,35 @@ class CloudStorageConfig(proto.Message): requirements] (https://cloud.google.com/storage/docs/buckets#naming). filename_prefix (str): - User-provided prefix for Cloud Storage filename. See the - `object naming + Optional. User-provided prefix for Cloud Storage filename. + See the `object naming requirements `__. filename_suffix (str): - User-provided suffix for Cloud Storage filename. See the - `object naming + Optional. User-provided suffix for Cloud Storage filename. + See the `object naming requirements `__. Must not end in "/". text_config (google.pubsub_v1.types.CloudStorageConfig.TextConfig): - If set, message data will be written to Cloud - Storage in text format. + Optional. If set, message data will be + written to Cloud Storage in text format. This field is a member of `oneof`_ ``output_format``. avro_config (google.pubsub_v1.types.CloudStorageConfig.AvroConfig): - If set, message data will be written to Cloud - Storage in Avro format. + Optional. If set, message data will be + written to Cloud Storage in Avro format. This field is a member of `oneof`_ ``output_format``. max_duration (google.protobuf.duration_pb2.Duration): - The maximum duration that can elapse before a - new Cloud Storage file is created. Min 1 minute, - max 10 minutes, default 5 minutes. May not - exceed the subscription's acknowledgement - deadline. + Optional. The maximum duration that can + elapse before a new Cloud Storage file is + created. Min 1 minute, max 10 minutes, default 5 + minutes. May not exceed the subscription's + acknowledgement deadline. max_bytes (int): - The maximum bytes that can be written to a Cloud Storage - file before a new file is created. Min 1 KB, max 10 GiB. The - max_bytes limit may be exceeded in cases where messages are - larger than the limit. + Optional. The maximum bytes that can be written to a Cloud + Storage file before a new file is created. Min 1 KB, max 10 + GiB. The max_bytes limit may be exceeded in cases where + messages are larger than the limit. state (google.pubsub_v1.types.CloudStorageConfig.State): Output only. An output-only field that indicates whether or not the subscription can @@ -1265,11 +1436,16 @@ class State(proto.Enum): NOT_FOUND (3): Cannot write to the Cloud Storage bucket because it does not exist. + IN_TRANSIT_LOCATION_RESTRICTION (4): + Cannot write to the destination because enforce_in_transit + is set to true and the destination locations are not in the + allowed regions. """ STATE_UNSPECIFIED = 0 ACTIVE = 1 PERMISSION_DENIED = 2 NOT_FOUND = 3 + IN_TRANSIT_LOCATION_RESTRICTION = 4 class TextConfig(proto.Message): r"""Configuration for writing message data in text format. @@ -1285,13 +1461,13 @@ class AvroConfig(proto.Message): Attributes: write_metadata (bool): - When true, write the subscription name, message_id, - publish_time, attributes, and ordering_key as additional - fields in the output. The subscription name, message_id, and - publish_time fields are put in their own fields while all - other message properties other than data (for example, an - ordering_key, if present) are added as entries in the - attributes map. + Optional. When true, write the subscription name, + message_id, publish_time, attributes, and ordering_key as + additional fields in the output. The subscription name, + message_id, and publish_time fields are put in their own + fields while all other message properties other than data + (for example, an ordering_key, if present) are added as + entries in the attributes map. """ write_metadata: bool = proto.Field( @@ -1344,12 +1520,12 @@ class ReceivedMessage(proto.Message): Attributes: ack_id (str): - This ID can be used to acknowledge the - received message. + Optional. This ID can be used to acknowledge + the received message. message (google.pubsub_v1.types.PubsubMessage): - The message. + Optional. The message. delivery_attempt (int): - The approximate number of times that Cloud Pub/Sub has + Optional. The approximate number of times that Pub/Sub has attempted to deliver the associated message to a subscriber. More precisely, this is 1 + (number of NACKs) + (number of @@ -1431,9 +1607,10 @@ class ListSubscriptionsRequest(proto.Message): Required. The name of the project in which to list subscriptions. Format is ``projects/{project-id}``. page_size (int): - Maximum number of subscriptions to return. + Optional. Maximum number of subscriptions to + return. page_token (str): - The value returned by the last + Optional. The value returned by the last ``ListSubscriptionsResponse``; indicates that this is a continuation of a prior ``ListSubscriptions`` call, and that the system should return the next page of data. @@ -1458,11 +1635,13 @@ class ListSubscriptionsResponse(proto.Message): Attributes: subscriptions (MutableSequence[google.pubsub_v1.types.Subscription]): - The subscriptions that match the request. + Optional. The subscriptions that match the + request. next_page_token (str): - If not empty, indicates that there may be more subscriptions - that match the request; this value should be passed in a new - ``ListSubscriptionsRequest`` to get more subscriptions. + Optional. If not empty, indicates that there may be more + subscriptions that match the request; this value should be + passed in a new ``ListSubscriptionsRequest`` to get more + subscriptions. """ @property @@ -1566,12 +1745,12 @@ class PullResponse(proto.Message): Attributes: received_messages (MutableSequence[google.pubsub_v1.types.ReceivedMessage]): - Received Pub/Sub messages. The list will be empty if there - are no more messages available in the backlog, or if no - messages could be returned before the request timeout. For - JSON, the response can be entirely empty. The Pub/Sub system - may return fewer than the ``maxMessages`` requested even if - there are more messages available in the backlog. + Optional. Received Pub/Sub messages. The list will be empty + if there are no more messages available in the backlog, or + if no messages could be returned before the request timeout. + For JSON, the response can be entirely empty. The Pub/Sub + system may return fewer than the ``maxMessages`` requested + even if there are more messages available in the backlog. """ received_messages: MutableSequence["ReceivedMessage"] = proto.RepeatedField( @@ -1599,8 +1778,8 @@ class ModifyAckDeadlineRequest(proto.Message): delivery to another subscriber client. This typically results in an increase in the rate of message redeliveries (that is, duplicates). The minimum deadline you can specify - is 0 seconds. The maximum deadline you can specify is 600 - seconds (10 minutes). + is 0 seconds. The maximum deadline you can specify in a + single request is 600 seconds (10 minutes). """ subscription: str = proto.Field( @@ -1655,18 +1834,18 @@ class StreamingPullRequest(proto.Message): client to server. Format is ``projects/{project}/subscriptions/{sub}``. ack_ids (MutableSequence[str]): - List of acknowledgement IDs for acknowledging previously - received messages (received on this stream or a different - stream). If an ack ID has expired, the corresponding message - may be redelivered later. Acknowledging a message more than - once will not result in an error. If the acknowledgement ID - is malformed, the stream will be aborted with status - ``INVALID_ARGUMENT``. + Optional. List of acknowledgement IDs for acknowledging + previously received messages (received on this stream or a + different stream). If an ack ID has expired, the + corresponding message may be redelivered later. + Acknowledging a message more than once will not result in an + error. If the acknowledgement ID is malformed, the stream + will be aborted with status ``INVALID_ARGUMENT``. modify_deadline_seconds (MutableSequence[int]): - The list of new ack deadlines for the IDs listed in - ``modify_deadline_ack_ids``. The size of this list must be - the same as the size of ``modify_deadline_ack_ids``. If it - differs the stream will be aborted with + Optional. The list of new ack deadlines for the IDs listed + in ``modify_deadline_ack_ids``. The size of this list must + be the same as the size of ``modify_deadline_ack_ids``. If + it differs the stream will be aborted with ``INVALID_ARGUMENT``. Each element in this list is applied to the element in the same position in ``modify_deadline_ack_ids``. The new ack deadline is with @@ -1678,8 +1857,8 @@ class StreamingPullRequest(proto.Message): request. If the value is < 0 (an error), the stream will be aborted with status ``INVALID_ARGUMENT``. modify_deadline_ack_ids (MutableSequence[str]): - List of acknowledgement IDs whose deadline will be modified - based on the corresponding element in + Optional. List of acknowledgement IDs whose deadline will be + modified based on the corresponding element in ``modify_deadline_seconds``. This field can be used to indicate that more time is needed to process a message by the subscriber, or to make the message available for @@ -1693,36 +1872,37 @@ class StreamingPullRequest(proto.Message): 10 seconds. The maximum deadline you can specify is 600 seconds (10 minutes). client_id (str): - A unique identifier that is used to distinguish client - instances from each other. Only needs to be provided on the - initial request. When a stream disconnects and reconnects - for the same stream, the client_id should be set to the same - value so that state associated with the old stream can be - transferred to the new stream. The same client_id should not - be used for different client instances. + Optional. A unique identifier that is used to distinguish + client instances from each other. Only needs to be provided + on the initial request. When a stream disconnects and + reconnects for the same stream, the client_id should be set + to the same value so that state associated with the old + stream can be transferred to the new stream. The same + client_id should not be used for different client instances. max_outstanding_messages (int): - Flow control settings for the maximum number of outstanding - messages. When there are ``max_outstanding_messages`` or - more currently sent to the streaming pull client that have - not yet been acked or nacked, the server stops sending more - messages. The sending of messages resumes once the number of - outstanding messages is less than this value. If the value - is <= 0, there is no limit to the number of outstanding - messages. This property can only be set on the initial - StreamingPullRequest. If it is set on a subsequent request, - the stream will be aborted with status ``INVALID_ARGUMENT``. - max_outstanding_bytes (int): - Flow control settings for the maximum number of outstanding - bytes. When there are ``max_outstanding_bytes`` or more - worth of messages currently sent to the streaming pull - client that have not yet been acked or nacked, the server - will stop sending more messages. The sending of messages - resumes once the number of outstanding bytes is less than + Optional. Flow control settings for the maximum number of + outstanding messages. When there are + ``max_outstanding_messages`` currently sent to the streaming + pull client that have not yet been acked or nacked, the + server stops sending more messages. The sending of messages + resumes once the number of outstanding messages is less than this value. If the value is <= 0, there is no limit to the - number of outstanding bytes. This property can only be set - on the initial StreamingPullRequest. If it is set on a + number of outstanding messages. This property can only be + set on the initial StreamingPullRequest. If it is set on a subsequent request, the stream will be aborted with status ``INVALID_ARGUMENT``. + max_outstanding_bytes (int): + Optional. Flow control settings for the maximum number of + outstanding bytes. When there are ``max_outstanding_bytes`` + or more worth of messages currently sent to the streaming + pull client that have not yet been acked or nacked, the + server will stop sending more messages. The sending of + messages resumes once the number of outstanding bytes is + less than this value. If the value is <= 0, there is no + limit to the number of outstanding bytes. This property can + only be set on the initial StreamingPullRequest. If it is + set on a subsequent request, the stream will be aborted with + status ``INVALID_ARGUMENT``. """ subscription: str = proto.Field( @@ -1765,16 +1945,17 @@ class StreamingPullResponse(proto.Message): Attributes: received_messages (MutableSequence[google.pubsub_v1.types.ReceivedMessage]): - Received Pub/Sub messages. This will not be - empty. + Optional. Received Pub/Sub messages. This + will not be empty. acknowledge_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): - This field will only be set if + Optional. This field will only be set if ``enable_exactly_once_delivery`` is set to ``true``. modify_ack_deadline_confirmation (google.pubsub_v1.types.StreamingPullResponse.ModifyAckDeadlineConfirmation): - This field will only be set if + Optional. This field will only be set if ``enable_exactly_once_delivery`` is set to ``true``. subscription_properties (google.pubsub_v1.types.StreamingPullResponse.SubscriptionProperties): - Properties associated with this subscription. + Optional. Properties associated with this + subscription. """ class AcknowledgeConfirmation(proto.Message): @@ -1783,17 +1964,18 @@ class AcknowledgeConfirmation(proto.Message): Attributes: ack_ids (MutableSequence[str]): - Successfully processed acknowledgement IDs. + Optional. Successfully processed + acknowledgement IDs. invalid_ack_ids (MutableSequence[str]): - List of acknowledgement IDs that were - malformed or whose acknowledgement deadline has - expired. + Optional. List of acknowledgement IDs that + were malformed or whose acknowledgement deadline + has expired. unordered_ack_ids (MutableSequence[str]): - List of acknowledgement IDs that were out of - order. + Optional. List of acknowledgement IDs that + were out of order. temporary_failed_ack_ids (MutableSequence[str]): - List of acknowledgement IDs that failed - processing with temporary issues. + Optional. List of acknowledgement IDs that + failed processing with temporary issues. """ ack_ids: MutableSequence[str] = proto.RepeatedField( @@ -1819,14 +2001,15 @@ class ModifyAckDeadlineConfirmation(proto.Message): Attributes: ack_ids (MutableSequence[str]): - Successfully processed acknowledgement IDs. + Optional. Successfully processed + acknowledgement IDs. invalid_ack_ids (MutableSequence[str]): - List of acknowledgement IDs that were - malformed or whose acknowledgement deadline has - expired. + Optional. List of acknowledgement IDs that + were malformed or whose acknowledgement deadline + has expired. temporary_failed_ack_ids (MutableSequence[str]): - List of acknowledgement IDs that failed - processing with temporary issues. + Optional. List of acknowledgement IDs that + failed processing with temporary issues. """ ack_ids: MutableSequence[str] = proto.RepeatedField( @@ -1847,11 +2030,11 @@ class SubscriptionProperties(proto.Message): Attributes: exactly_once_delivery_enabled (bool): - True iff exactly once delivery is enabled for - this subscription. + Optional. True iff exactly once delivery is + enabled for this subscription. message_ordering_enabled (bool): - True iff message ordering is enabled for this - subscription. + Optional. True iff message ordering is + enabled for this subscription. """ exactly_once_delivery_enabled: bool = proto.Field( @@ -1909,7 +2092,7 @@ class CreateSnapshotRequest(proto.Message): CreateSnapshot request. Format is ``projects/{project}/subscriptions/{sub}``. labels (MutableMapping[str, str]): - See `Creating and managing + Optional. See `Creating and managing labels `__. """ @@ -1961,16 +2144,16 @@ class Snapshot(proto.Message): Attributes: name (str): - The name of the snapshot. + Optional. The name of the snapshot. topic (str): - The name of the topic from which this - snapshot is retaining messages. + Optional. The name of the topic from which + this snapshot is retaining messages. expire_time (google.protobuf.timestamp_pb2.Timestamp): - The snapshot is guaranteed to exist up until this time. A - newly-created snapshot expires no later than 7 days from the - time of its creation. Its exact lifetime is determined at - creation by the existing backlog in the source subscription. - Specifically, the lifetime of the snapshot is + Optional. The snapshot is guaranteed to exist up until this + time. A newly-created snapshot expires no later than 7 days + from the time of its creation. Its exact lifetime is + determined at creation by the existing backlog in the source + subscription. Specifically, the lifetime of the snapshot is ``7 days - (age of oldest unacked message in the subscription)``. For example, consider a subscription whose oldest unacked message is 3 days old. If a snapshot is created from this @@ -1980,7 +2163,7 @@ class Snapshot(proto.Message): snapshot that would expire in less than 1 hour after creation. labels (MutableMapping[str, str]): - See [Creating and managing labels] + Optional. See [Creating and managing labels] (https://cloud.google.com/pubsub/docs/labels). """ @@ -2027,12 +2210,13 @@ class ListSnapshotsRequest(proto.Message): Required. The name of the project in which to list snapshots. Format is ``projects/{project-id}``. page_size (int): - Maximum number of snapshots to return. + Optional. Maximum number of snapshots to + return. page_token (str): - The value returned by the last ``ListSnapshotsResponse``; - indicates that this is a continuation of a prior - ``ListSnapshots`` call, and that the system should return - the next page of data. + Optional. The value returned by the last + ``ListSnapshotsResponse``; indicates that this is a + continuation of a prior ``ListSnapshots`` call, and that the + system should return the next page of data. """ project: str = proto.Field( @@ -2054,11 +2238,11 @@ class ListSnapshotsResponse(proto.Message): Attributes: snapshots (MutableSequence[google.pubsub_v1.types.Snapshot]): - The resulting snapshots. + Optional. The resulting snapshots. next_page_token (str): - If not empty, indicates that there may be more snapshot that - match the request; this value should be passed in a new - ``ListSnapshotsRequest``. + Optional. If not empty, indicates that there may be more + snapshot that match the request; this value should be passed + in a new ``ListSnapshotsRequest``. """ @property @@ -2105,13 +2289,13 @@ class SeekRequest(proto.Message): subscription (str): Required. The subscription to affect. time (google.protobuf.timestamp_pb2.Timestamp): - The time to seek to. Messages retained in the subscription - that were published before this time are marked as - acknowledged, and messages retained in the subscription that - were published after this time are marked as unacknowledged. - Note that this operation affects only those messages - retained in the subscription (configured by the combination - of ``message_retention_duration`` and + Optional. The time to seek to. Messages retained in the + subscription that were published before this time are marked + as acknowledged, and messages retained in the subscription + that were published after this time are marked as + unacknowledged. Note that this operation affects only those + messages retained in the subscription (configured by the + combination of ``message_retention_duration`` and ``retain_acked_messages``). For example, if ``time`` corresponds to a point before the message retention window (or to a point before the system's notion of the @@ -2121,8 +2305,8 @@ class SeekRequest(proto.Message): This field is a member of `oneof`_ ``target``. snapshot (str): - The snapshot to seek to. The snapshot's topic must be the - same as that of the provided subscription. Format is + Optional. The snapshot to seek to. The snapshot's topic must + be the same as that of the provided subscription. Format is ``projects/{project}/snapshots/{snap}``. This field is a member of `oneof`_ ``target``. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index ff1d22770326..11853406dce7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -161,7 +161,7 @@ class CreateSchemaRequest(proto.Message): component of the schema's resource name. See - https://cloud.google.com/pubsub/docs/admin#resource_names + https://cloud.google.com/pubsub/docs/pubsub-basics#resource_names for resource name constraints. """ diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 0483b04df63f..c156909ff4e5 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -101,7 +101,7 @@ count = s.replace( clients_to_patch, - r"Transport = type\(self\)\.get_transport_class\(transport\)", + r"Transport = type\(self\)\.get_transport_class\(cast\(str, transport\)\)", """\g<0> emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") @@ -324,7 +324,7 @@ if count < 1: raise Exception(".coveragerc replacement failed.") - s.move([library], excludes=["**/gapic_version.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt"]) + s.move([library], excludes=["**/gapic_version.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) s.remove_staging_dirs() # ---------------------------------------------------------------------------- diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 8a294ba36aa8..d8379e9bb312 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -44,7 +44,7 @@ class pubsubCallTransformer(cst.CSTTransformer): 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'cloud_storage_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', 'state', 'ingestion_data_source_settings', ), 'delete_schema': ('name', ), 'delete_schema_revision': ('name', 'revision_id', ), 'delete_snapshot': ('snapshot', ), diff --git a/packages/google-cloud-pubsub/testing/constraints-3.8.txt b/packages/google-cloud-pubsub/testing/constraints-3.8.txt index ad3f0fa58e2d..30520e2d05cc 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.8.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.8.txt @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core +google-api-core==1.34.0 proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 6511e2f8b886..aca4c59dd4f2 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -29,6 +29,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -75,6 +76,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -99,6 +111,254 @@ def test__get_default_mtls_endpoint(): assert PublisherClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + assert PublisherClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PublisherClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert PublisherClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + PublisherClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert PublisherClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert PublisherClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert PublisherClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + PublisherClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert PublisherClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert PublisherClient._get_client_cert_source(None, False) is None + assert ( + PublisherClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + PublisherClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + PublisherClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + PublisherClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + PublisherClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherClient), +) +@mock.patch.object( + PublisherAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = PublisherClient._DEFAULT_UNIVERSE + default_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + PublisherClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + PublisherClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == PublisherClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PublisherClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + PublisherClient._get_api_endpoint(None, None, default_universe, "always") + == PublisherClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PublisherClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == PublisherClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + PublisherClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + PublisherClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + PublisherClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + PublisherClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + PublisherClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + PublisherClient._get_universe_domain(None, None) + == PublisherClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + PublisherClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (PublisherClient, transports.PublisherGrpcTransport, "grpc"), + (PublisherClient, transports.PublisherRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -209,12 +469,14 @@ def test_publisher_client_get_transport_class(): ], ) @mock.patch.object( - PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient) + PublisherClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherClient), ) @mock.patch.object( PublisherAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(PublisherAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherAsyncClient), ) def test_publisher_client_client_options(client_class, transport_class, transport_name): # Check that if channel is provided we won't create a new one. @@ -254,7 +516,9 @@ def test_publisher_client_client_options(client_class, transport_class, transpor patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -284,15 +548,23 @@ def test_publisher_client_client_options(client_class, transport_class, transpor # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -302,7 +574,9 @@ def test_publisher_client_client_options(client_class, transport_class, transpor patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -320,7 +594,9 @@ def test_publisher_client_client_options(client_class, transport_class, transpor patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -352,12 +628,14 @@ def test_publisher_client_client_options(client_class, transport_class, transpor ], ) @mock.patch.object( - PublisherClient, "DEFAULT_ENDPOINT", modify_default_endpoint(PublisherClient) + PublisherClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherClient), ) @mock.patch.object( PublisherAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(PublisherAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_publisher_client_mtls_env_auto( @@ -380,7 +658,9 @@ def test_publisher_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -412,7 +692,9 @@ def test_publisher_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -446,7 +728,9 @@ def test_publisher_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -532,6 +816,113 @@ def test_publisher_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient]) +@mock.patch.object( + PublisherClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherClient), +) +@mock.patch.object( + PublisherAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(PublisherAsyncClient), +) +def test_publisher_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = PublisherClient._DEFAULT_UNIVERSE + default_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -558,7 +949,9 @@ def test_publisher_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -593,7 +986,9 @@ def test_publisher_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -646,7 +1041,9 @@ def test_publisher_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -713,6 +1110,7 @@ def test_create_topic(request_type, transport: str = "grpc"): name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, ) response = client.create_topic(request) @@ -726,6 +1124,7 @@ def test_create_topic(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.kms_key_name == "kms_key_name_value" assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE def test_create_topic_empty_call(): @@ -765,6 +1164,7 @@ async def test_create_topic_async( name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, ) ) response = await client.create_topic(request) @@ -779,6 +1179,7 @@ async def test_create_topic_async( assert response.name == "name_value" assert response.kms_key_name == "kms_key_name_value" assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE @pytest.mark.asyncio @@ -949,6 +1350,7 @@ def test_update_topic(request_type, transport: str = "grpc"): name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, ) response = client.update_topic(request) @@ -962,6 +1364,7 @@ def test_update_topic(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.kms_key_name == "kms_key_name_value" assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE def test_update_topic_empty_call(): @@ -1001,6 +1404,7 @@ async def test_update_topic_async( name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, ) ) response = await client.update_topic(request) @@ -1015,6 +1419,7 @@ async def test_update_topic_async( assert response.name == "name_value" assert response.kms_key_name == "kms_key_name_value" assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE @pytest.mark.asyncio @@ -1333,8 +1738,7 @@ def test_publish_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.publish( - topic="topic_value", - messages=[pubsub.PubsubMessage(data=b"data_blob")], + topic="topic_value", messages=[pubsub.PubsubMessage(data=b"data_blob")] ) # Establish that the underlying call was made with the expected @@ -1437,6 +1841,7 @@ def test_get_topic(request_type, transport: str = "grpc"): name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, ) response = client.get_topic(request) @@ -1450,6 +1855,7 @@ def test_get_topic(request_type, transport: str = "grpc"): assert response.name == "name_value" assert response.kms_key_name == "kms_key_name_value" assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE def test_get_topic_empty_call(): @@ -1489,6 +1895,7 @@ async def test_get_topic_async( name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, ) ) response = await client.get_topic(request) @@ -1503,6 +1910,7 @@ async def test_get_topic_async( assert response.name == "name_value" assert response.kms_key_name == "kms_key_name_value" assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE @pytest.mark.asyncio @@ -1883,7 +2291,7 @@ async def test_list_topics_flattened_error_async(): def test_list_topics_pager(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1933,7 +2341,7 @@ def test_list_topics_pager(transport_name: str = "grpc"): def test_list_topics_pages(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1975,7 +2383,7 @@ def test_list_topics_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_topics_async_pager(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2025,7 +2433,7 @@ async def test_list_topics_async_pager(): @pytest.mark.asyncio async def test_list_topics_async_pages(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2323,7 +2731,7 @@ async def test_list_topic_subscriptions_flattened_error_async(): def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2375,7 +2783,7 @@ def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2419,7 +2827,7 @@ def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_topic_subscriptions_async_pager(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2471,7 +2879,7 @@ async def test_list_topic_subscriptions_async_pager(): @pytest.mark.asyncio async def test_list_topic_subscriptions_async_pages(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2771,7 +3179,7 @@ async def test_list_topic_snapshots_flattened_error_async(): def test_list_topic_snapshots_pager(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2823,7 +3231,7 @@ def test_list_topic_snapshots_pager(transport_name: str = "grpc"): def test_list_topic_snapshots_pages(transport_name: str = "grpc"): client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -2867,7 +3275,7 @@ def test_list_topic_snapshots_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_topic_snapshots_async_pager(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2919,7 +3327,7 @@ async def test_list_topic_snapshots_async_pager(): @pytest.mark.asyncio async def test_list_topic_snapshots_async_pages(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3365,6 +3773,7 @@ def test_create_topic_rest(request_type): name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, ) # Wrap the value into a proper Response obj @@ -3383,6 +3792,7 @@ def test_create_topic_rest(request_type): assert response.name == "name_value" assert response.kms_key_name == "kms_key_name_value" assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE def test_create_topic_rest_required_fields(request_type=pubsub.Topic): @@ -3631,6 +4041,7 @@ def test_update_topic_rest(request_type): name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, ) # Wrap the value into a proper Response obj @@ -3649,6 +4060,7 @@ def test_update_topic_rest(request_type): assert response.name == "name_value" assert response.kms_key_name == "kms_key_name_value" assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE def test_update_topic_rest_required_fields(request_type=pubsub.UpdateTopicRequest): @@ -4177,6 +4589,7 @@ def test_get_topic_rest(request_type): name="name_value", kms_key_name="kms_key_name_value", satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, ) # Wrap the value into a proper Response obj @@ -4195,6 +4608,7 @@ def test_get_topic_rest(request_type): assert response.name == "name_value" assert response.kms_key_name == "kms_key_name_value" assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE def test_get_topic_rest_required_fields(request_type=pubsub.GetTopicRequest): @@ -5921,7 +6335,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = PublisherClient( @@ -7407,7 +7821,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index c3585ae3bfad..e73ef911440d 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -29,6 +29,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -73,6 +74,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -102,6 +114,270 @@ def test__get_default_mtls_endpoint(): ) +def test__read_environment_variables(): + assert SchemaServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SchemaServiceClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + SchemaServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SchemaServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SchemaServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SchemaServiceClient._get_client_cert_source(None, False) is None + assert ( + SchemaServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + SchemaServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SchemaServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SchemaServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SchemaServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SchemaServiceClient._DEFAULT_UNIVERSE + default_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SchemaServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SchemaServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SchemaServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + SchemaServiceClient._get_api_endpoint(None, None, default_universe, "always") + == SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SchemaServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SchemaServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SchemaServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + SchemaServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SchemaServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SchemaServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SchemaServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SchemaServiceClient._get_universe_domain(None, None) + == SchemaServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SchemaServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), + (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -213,13 +489,13 @@ def test_schema_service_client_get_transport_class(): ) @mock.patch.object( SchemaServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(SchemaServiceClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceClient), ) @mock.patch.object( SchemaServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(SchemaServiceAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceAsyncClient), ) def test_schema_service_client_client_options( client_class, transport_class, transport_name @@ -261,7 +537,9 @@ def test_schema_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -291,15 +569,23 @@ def test_schema_service_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -309,7 +595,9 @@ def test_schema_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -327,7 +615,9 @@ def test_schema_service_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -360,13 +650,13 @@ def test_schema_service_client_client_options( ) @mock.patch.object( SchemaServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(SchemaServiceClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceClient), ) @mock.patch.object( SchemaServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(SchemaServiceAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_schema_service_client_mtls_env_auto( @@ -389,7 +679,9 @@ def test_schema_service_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -421,7 +713,9 @@ def test_schema_service_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -455,7 +749,9 @@ def test_schema_service_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -545,6 +841,115 @@ def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize( + "client_class", [SchemaServiceClient, SchemaServiceAsyncClient] +) +@mock.patch.object( + SchemaServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceClient), +) +@mock.patch.object( + SchemaServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SchemaServiceAsyncClient), +) +def test_schema_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SchemaServiceClient._DEFAULT_UNIVERSE + default_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SchemaServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -571,7 +976,9 @@ def test_schema_service_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -611,7 +1018,9 @@ def test_schema_service_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -671,7 +1080,9 @@ def test_schema_service_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -1448,7 +1859,7 @@ async def test_list_schemas_flattened_error_async(): def test_list_schemas_pager(transport_name: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1498,7 +1909,7 @@ def test_list_schemas_pager(transport_name: str = "grpc"): def test_list_schemas_pages(transport_name: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1540,7 +1951,7 @@ def test_list_schemas_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_schemas_async_pager(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1590,7 +2001,7 @@ async def test_list_schemas_async_pager(): @pytest.mark.asyncio async def test_list_schemas_async_pages(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1884,7 +2295,7 @@ async def test_list_schema_revisions_flattened_error_async(): def test_list_schema_revisions_pager(transport_name: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1936,7 +2347,7 @@ def test_list_schema_revisions_pager(transport_name: str = "grpc"): def test_list_schema_revisions_pages(transport_name: str = "grpc"): client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1980,7 +2391,7 @@ def test_list_schema_revisions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_schema_revisions_async_pager(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2032,7 +2443,7 @@ async def test_list_schema_revisions_async_pager(): @pytest.mark.asyncio async def test_list_schema_revisions_async_pages(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6368,7 +6779,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = SchemaServiceClient( @@ -7821,7 +8232,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 94f7e301e978..4ff6a3f27111 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -30,6 +30,7 @@ import json import math import pytest +from google.api_core import api_core_version from proto.marshal.rules.dates import DurationRule, TimestampRule from proto.marshal.rules import wrappers from requests import Response @@ -75,6 +76,17 @@ def modify_default_endpoint(client): ) +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -101,6 +113,254 @@ def test__get_default_mtls_endpoint(): assert SubscriberClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi +def test__read_environment_variables(): + assert SubscriberClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SubscriberClient._read_environment_variables() == (True, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert SubscriberClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + SubscriberClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert SubscriberClient._read_environment_variables() == (False, "never", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert SubscriberClient._read_environment_variables() == (False, "always", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert SubscriberClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + SubscriberClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert SubscriberClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert SubscriberClient._get_client_cert_source(None, False) is None + assert ( + SubscriberClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + SubscriberClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + SubscriberClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + SubscriberClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + SubscriberClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberClient), +) +@mock.patch.object( + SubscriberAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = SubscriberClient._DEFAULT_UNIVERSE + default_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + SubscriberClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + SubscriberClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == SubscriberClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SubscriberClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + SubscriberClient._get_api_endpoint(None, None, default_universe, "always") + == SubscriberClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SubscriberClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == SubscriberClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + SubscriberClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + SubscriberClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + SubscriberClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + SubscriberClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + SubscriberClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + SubscriberClient._get_universe_domain(None, None) + == SubscriberClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + SubscriberClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), + (SubscriberClient, transports.SubscriberRestTransport, "rest"), + ], +) +def test__validate_universe_domain(client_class, transport_class, transport_name): + client = client_class( + transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) + ) + assert client._validate_universe_domain() == True + + # Test the case when universe is already validated. + assert client._validate_universe_domain() == True + + if transport_name == "grpc": + # Test the case where credentials are provided by the + # `local_channel_credentials`. The default universes in both match. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + client = client_class(transport=transport_class(channel=channel)) + assert client._validate_universe_domain() == True + + # Test the case where credentials do not exist: e.g. a transport is provided + # with no credentials. Validation should still succeed because there is no + # mismatch with non-existent credentials. + channel = grpc.secure_channel( + "http://localhost/", grpc.local_channel_credentials() + ) + transport = transport_class(channel=channel) + transport._credentials = None + client = client_class(transport=transport) + assert client._validate_universe_domain() == True + + # TODO: This is needed to cater for older versions of google-auth + # Make this test unconditional once the minimum supported version of + # google-auth becomes 2.23.0 or higher. + google_auth_major, google_auth_minor = [ + int(part) for part in google.auth.__version__.split(".")[0:2] + ] + if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): + credentials = ga_credentials.AnonymousCredentials() + credentials._universe_domain = "foo.com" + # Test the case when there is a universe mismatch from the credentials. + client = client_class(transport=transport_class(credentials=credentials)) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test the case when there is a universe mismatch from the client. + # + # TODO: Make this test unconditional once the minimum supported version of + # google-api-core becomes 2.15.0 or higher. + api_core_major, api_core_minor = [ + int(part) for part in api_core_version.__version__.split(".")[0:2] + ] + if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): + client = client_class( + client_options={"universe_domain": "bar.com"}, + transport=transport_class( + credentials=ga_credentials.AnonymousCredentials(), + ), + ) + with pytest.raises(ValueError) as excinfo: + client._validate_universe_domain() + assert ( + str(excinfo.value) + == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." + ) + + # Test that ValueError is raised if universe_domain is provided via client options and credentials is None + with pytest.raises(ValueError): + client._compare_universes("foo.bar", None) + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -211,12 +471,14 @@ def test_subscriber_client_get_transport_class(): ], ) @mock.patch.object( - SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient) + SubscriberClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberClient), ) @mock.patch.object( SubscriberAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(SubscriberAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberAsyncClient), ) def test_subscriber_client_client_options( client_class, transport_class, transport_name @@ -258,7 +520,9 @@ def test_subscriber_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -288,15 +552,23 @@ def test_subscriber_client_client_options( # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): + with pytest.raises(MutualTLSChannelError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): + with pytest.raises(ValueError) as excinfo: client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") @@ -306,7 +578,9 @@ def test_subscriber_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id="octopus", @@ -324,7 +598,9 @@ def test_subscriber_client_client_options( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -356,12 +632,14 @@ def test_subscriber_client_client_options( ], ) @mock.patch.object( - SubscriberClient, "DEFAULT_ENDPOINT", modify_default_endpoint(SubscriberClient) + SubscriberClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberClient), ) @mock.patch.object( SubscriberAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(SubscriberAsyncClient), + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberAsyncClient), ) @mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) def test_subscriber_client_mtls_env_auto( @@ -384,7 +662,9 @@ def test_subscriber_client_mtls_env_auto( if use_client_cert_env == "false": expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) else: expected_client_cert_source = client_cert_source_callback expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -416,7 +696,9 @@ def test_subscriber_client_mtls_env_auto( return_value=client_cert_source_callback, ): if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) expected_client_cert_source = None else: expected_host = client.DEFAULT_MTLS_ENDPOINT @@ -450,7 +732,9 @@ def test_subscriber_client_mtls_env_auto( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -536,6 +820,113 @@ def test_subscriber_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT assert cert_source == mock_client_cert_source + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + + +@pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient]) +@mock.patch.object( + SubscriberClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberClient), +) +@mock.patch.object( + SubscriberAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(SubscriberAsyncClient), +) +def test_subscriber_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = SubscriberClient._DEFAULT_UNIVERSE + default_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -562,7 +953,9 @@ def test_subscriber_client_client_options_scopes( patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=["1", "2"], client_cert_source_for_mtls=None, quota_project_id=None, @@ -597,7 +990,9 @@ def test_subscriber_client_client_options_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -650,7 +1045,9 @@ def test_subscriber_client_create_channel_credentials_file( patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, @@ -1789,7 +2186,7 @@ async def test_list_subscriptions_flattened_error_async(): def test_list_subscriptions_pager(transport_name: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1841,7 +2238,7 @@ def test_list_subscriptions_pager(transport_name: str = "grpc"): def test_list_subscriptions_pages(transport_name: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -1885,7 +2282,7 @@ def test_list_subscriptions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_subscriptions_async_pager(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1937,7 +2334,7 @@ async def test_list_subscriptions_async_pager(): @pytest.mark.asyncio async def test_list_subscriptions_async_pages(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2888,6 +3285,7 @@ def test_pull_flattened_error(): client.pull( pubsub.PullRequest(), subscription="subscription_value", + return_immediately=True, max_messages=1277, ) @@ -2941,6 +3339,7 @@ async def test_pull_flattened_error_async(): await client.pull( pubsub.PullRequest(), subscription="subscription_value", + return_immediately=True, max_messages=1277, ) @@ -3727,7 +4126,7 @@ async def test_list_snapshots_flattened_error_async(): def test_list_snapshots_pager(transport_name: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -3777,7 +4176,7 @@ def test_list_snapshots_pager(transport_name: str = "grpc"): def test_list_snapshots_pages(transport_name: str = "grpc"): client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), transport=transport_name, ) @@ -3819,7 +4218,7 @@ def test_list_snapshots_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_snapshots_async_pager(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3869,7 +4268,7 @@ async def test_list_snapshots_async_pager(): @pytest.mark.asyncio async def test_list_snapshots_async_pages(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials, + credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6991,6 +7390,7 @@ def test_pull_rest_flattened(): # get truthy value for each flattened field mock_args = dict( subscription="subscription_value", + return_immediately=True, max_messages=1277, ) mock_args.update(sample_request) @@ -7029,6 +7429,7 @@ def test_pull_rest_flattened_error(transport: str = "rest"): client.pull( pubsub.PullRequest(), subscription="subscription_value", + return_immediately=True, max_messages=1277, ) @@ -8970,7 +9371,7 @@ def test_credentials_transport_error(): ) # It is an error to provide an api_key and a credential. - options = mock.Mock() + options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): client = SubscriberClient( @@ -10484,7 +10885,9 @@ def test_api_key_credentials(client_class, transport_class): patched.assert_called_once_with( credentials=mock_cred, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), scopes=None, client_cert_source_for_mtls=None, quota_project_id=None, diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 5d6013654076..cc8eda56ce7e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -317,6 +317,32 @@ def test_publish_with_ordering_key_uses_extended_retry_deadline(creds): _assert_retries_equal(batch_commit_retry, expected_retry) +def test_publish_with_ordering_key_with_no_retry(creds): + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions(enable_message_ordering=True), + ) + + # Use mocks in lieu of the actual batch class. + batch = mock.Mock(spec=client._batch_class) + future = mock.sentinel.future + future.add_done_callback = mock.Mock(spec=["__call__"]) + batch.publish.return_value = future + + topic = "topic/path" + client._set_batch(topic, batch) + + # Actually mock the batch class now. + batch_class = mock.Mock(spec=(), return_value=batch) + client._set_batch_class(batch_class) + + future = client.publish(topic, b"foo", ordering_key="first", retry=None) + assert future is mock.sentinel.future + + # Check the retry settings used for the batch. + batch_class.assert_called_once() + + def test_publish_attrs_bytestring(creds): client = publisher.Client(credentials=creds) @@ -447,20 +473,12 @@ def test_stop(creds): def test_gapic_instance_method(creds): client = publisher.Client(credentials=creds) - transport_mock = mock.Mock(create_topic=mock.sentinel) - fake_create_topic_rpc = mock.Mock() - transport_mock._wrapped_methods = { - transport_mock.create_topic: fake_create_topic_rpc - } - patcher = mock.patch.object(client, "_transport", new=transport_mock) - topic = gapic_types.Topic(name="projects/foo/topics/bar") - - with patcher: + with mock.patch.object(client, "create_topic") as patched: client.create_topic(topic) - assert fake_create_topic_rpc.call_count == 1 - _, args, _ = fake_create_topic_rpc.mock_calls[0] + assert patched.call_count == 1 + _, args, _ = patched.mock_calls[0] assert args[0] == gapic_types.Topic(name="projects/foo/topics/bar") From 4b8fe93006a2a6aa315b98e6891ff56cf9430c15 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 9 Feb 2024 17:38:03 -0500 Subject: [PATCH 1042/1197] chore(main): release 2.19.4 (#1079) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index a31cb5941cf7..318c38f787be 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.19.3" + ".": "2.19.4" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index d9f24c15cdb6..d2a7c2051109 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.19.4](https://github.com/googleapis/python-pubsub/compare/v2.19.3...v2.19.4) (2024-02-09) + + +### Bug Fixes + +* **diregapic:** S/bazel/bazelisk/ in DIREGAPIC build GitHub action ([#1064](https://github.com/googleapis/python-pubsub/issues/1064)) ([d56ad12](https://github.com/googleapis/python-pubsub/commit/d56ad12f197e9e379d2a4a0a38be108808985c23)) + ## [2.19.3](https://github.com/googleapis/python-pubsub/compare/v2.19.2...v2.19.3) (2024-02-08) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 3feb0e097c14..34f81704a3e7 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.3" # {x-release-please-version} +__version__ = "2.19.4" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 3feb0e097c14..34f81704a3e7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.3" # {x-release-please-version} +__version__ = "2.19.4" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 259b241f79fc..6cbf881c8b39 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.19.3" + "version": "2.19.4" }, "snippets": [ { From 12ee53ffbbb180ea4ae2017fcf6f4633365e06aa Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 17 Feb 2024 18:46:40 -0500 Subject: [PATCH 1043/1197] build(deps): bump cryptography from 42.0.0 to 42.0.2 in .kokoro (#1082) Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 66 +++++++++---------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 2aefd0e91175..51213ca00ee3 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:97b671488ad548ef783a452a9e1276ac10f144d5ae56d98cc4bf77ba504082b4 -# created: 2024-02-06T03:20:16.660474034Z + digest: sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 +# created: 2024-02-17T12:21:23.177926195Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 8c11c9f3e9b6..f80bdcd62981 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.0 \ - --hash=sha256:0a68bfcf57a6887818307600c3c0ebc3f62fbb6ccad2240aa21887cda1f8df1b \ - --hash=sha256:146e971e92a6dd042214b537a726c9750496128453146ab0ee8971a0299dc9bd \ - --hash=sha256:14e4b909373bc5bf1095311fa0f7fcabf2d1a160ca13f1e9e467be1ac4cbdf94 \ - --hash=sha256:206aaf42e031b93f86ad60f9f5d9da1b09164f25488238ac1dc488334eb5e221 \ - --hash=sha256:3005166a39b70c8b94455fdbe78d87a444da31ff70de3331cdec2c568cf25b7e \ - --hash=sha256:324721d93b998cb7367f1e6897370644751e5580ff9b370c0a50dc60a2003513 \ - --hash=sha256:33588310b5c886dfb87dba5f013b8d27df7ffd31dc753775342a1e5ab139e59d \ - --hash=sha256:35cf6ed4c38f054478a9df14f03c1169bb14bd98f0b1705751079b25e1cb58bc \ - --hash=sha256:3ca482ea80626048975360c8e62be3ceb0f11803180b73163acd24bf014133a0 \ - --hash=sha256:56ce0c106d5c3fec1038c3cca3d55ac320a5be1b44bf15116732d0bc716979a2 \ - --hash=sha256:5a217bca51f3b91971400890905a9323ad805838ca3fa1e202a01844f485ee87 \ - --hash=sha256:678cfa0d1e72ef41d48993a7be75a76b0725d29b820ff3cfd606a5b2b33fda01 \ - --hash=sha256:69fd009a325cad6fbfd5b04c711a4da563c6c4854fc4c9544bff3088387c77c0 \ - --hash=sha256:6cf9b76d6e93c62114bd19485e5cb003115c134cf9ce91f8ac924c44f8c8c3f4 \ - --hash=sha256:74f18a4c8ca04134d2052a140322002fef535c99cdbc2a6afc18a8024d5c9d5b \ - --hash=sha256:85f759ed59ffd1d0baad296e72780aa62ff8a71f94dc1ab340386a1207d0ea81 \ - --hash=sha256:87086eae86a700307b544625e3ba11cc600c3c0ef8ab97b0fda0705d6db3d4e3 \ - --hash=sha256:8814722cffcfd1fbd91edd9f3451b88a8f26a5fd41b28c1c9193949d1c689dc4 \ - --hash=sha256:8fedec73d590fd30c4e3f0d0f4bc961aeca8390c72f3eaa1a0874d180e868ddf \ - --hash=sha256:9515ea7f596c8092fdc9902627e51b23a75daa2c7815ed5aa8cf4f07469212ec \ - --hash=sha256:988b738f56c665366b1e4bfd9045c3efae89ee366ca3839cd5af53eaa1401bce \ - --hash=sha256:a2a8d873667e4fd2f34aedab02ba500b824692c6542e017075a2efc38f60a4c0 \ - --hash=sha256:bd7cf7a8d9f34cc67220f1195884151426ce616fdc8285df9054bfa10135925f \ - --hash=sha256:bdce70e562c69bb089523e75ef1d9625b7417c6297a76ac27b1b8b1eb51b7d0f \ - --hash=sha256:be14b31eb3a293fc6e6aa2807c8a3224c71426f7c4e3639ccf1a2f3ffd6df8c3 \ - --hash=sha256:be41b0c7366e5549265adf2145135dca107718fa44b6e418dc7499cfff6b4689 \ - --hash=sha256:c310767268d88803b653fffe6d6f2f17bb9d49ffceb8d70aed50ad45ea49ab08 \ - --hash=sha256:c58115384bdcfe9c7f644c72f10f6f42bed7cf59f7b52fe1bf7ae0a622b3a139 \ - --hash=sha256:c640b0ef54138fde761ec99a6c7dc4ce05e80420262c20fa239e694ca371d434 \ - --hash=sha256:ca20550bb590db16223eb9ccc5852335b48b8f597e2f6f0878bbfd9e7314eb17 \ - --hash=sha256:d97aae66b7de41cdf5b12087b5509e4e9805ed6f562406dfcf60e8481a9a28f8 \ - --hash=sha256:e9326ca78111e4c645f7e49cbce4ed2f3f85e17b61a563328c85a5208cf34440 +cryptography==42.0.2 \ + --hash=sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380 \ + --hash=sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589 \ + --hash=sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea \ + --hash=sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65 \ + --hash=sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a \ + --hash=sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3 \ + --hash=sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008 \ + --hash=sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1 \ + --hash=sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2 \ + --hash=sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635 \ + --hash=sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2 \ + --hash=sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90 \ + --hash=sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee \ + --hash=sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a \ + --hash=sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242 \ + --hash=sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12 \ + --hash=sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2 \ + --hash=sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d \ + --hash=sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be \ + --hash=sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee \ + --hash=sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6 \ + --hash=sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529 \ + --hash=sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929 \ + --hash=sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1 \ + --hash=sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6 \ + --hash=sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a \ + --hash=sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446 \ + --hash=sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9 \ + --hash=sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888 \ + --hash=sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4 \ + --hash=sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33 \ + --hash=sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f # via # gcp-releasetool # secretstorage From 654584b3186e8eee7807ea250421dc842f1b0bc2 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Thu, 22 Feb 2024 18:33:57 -0500 Subject: [PATCH 1044/1197] fix: Update system_test_python_versions (#1096) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/CONTRIBUTING.rst | 4 ++-- packages/google-cloud-pubsub/noxfile.py | 2 +- packages/google-cloud-pubsub/owlbot.py | 2 +- packages/google-cloud-pubsub/pytest.ini | 4 +++- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 03a700296d22..727b5ec7fe3e 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -143,12 +143,12 @@ Running System Tests $ nox -s system # Run a single system test - $ nox -s system-3.10 -- -k + $ nox -s system-3.12 -- -k .. note:: - System tests are only configured to run under Python 3.10. + System tests are only configured to run under Python 3.12. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index ec63afcb4775..ae87830ec545 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -50,7 +50,7 @@ UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} -SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.10"] +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ "mock", "pytest", diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index c156909ff4e5..ad76a48ee63f 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -337,7 +337,7 @@ cov_level=100, versions=gcp.common.detect_versions(path="./google", default_first=True), unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], - system_test_python_versions=["3.10"], + system_test_python_versions=["3.12"], system_test_external_dependencies=["psutil","flaky"], ) s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml", "README.rst", "docs/index.rst"]) diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index 7308b0871af9..ce16e4730652 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -11,4 +11,6 @@ filterwarnings = # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1938 is fixed ignore:The return_immediately flag is deprecated and should be set to False.:DeprecationWarning # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed - ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning \ No newline at end of file + ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning + # Remove warning once https://github.com/grpc/grpc/issues/35974 is fixed + ignore:unclosed:ResourceWarning \ No newline at end of file From 5e44ab051ed93e9e05272510e9b5c50d6d932916 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 22 Feb 2024 16:11:19 -0800 Subject: [PATCH 1045/1197] chore(main): release 2.19.5 (#1097) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 318c38f787be..ba120c9f5d46 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.19.4" + ".": "2.19.5" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index d2a7c2051109..ab1f26d73b16 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.19.5](https://github.com/googleapis/python-pubsub/compare/v2.19.4...v2.19.5) (2024-02-22) + + +### Bug Fixes + +* Update system_test_python_versions ([#1096](https://github.com/googleapis/python-pubsub/issues/1096)) ([c659ac7](https://github.com/googleapis/python-pubsub/commit/c659ac777f177e54d7272a8de93fa9f554b15d46)) + ## [2.19.4](https://github.com/googleapis/python-pubsub/compare/v2.19.3...v2.19.4) (2024-02-09) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 34f81704a3e7..3aa3b013a28c 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.4" # {x-release-please-version} +__version__ = "2.19.5" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 34f81704a3e7..3aa3b013a28c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.4" # {x-release-please-version} +__version__ = "2.19.5" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 6cbf881c8b39..78988e6a1acf 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.19.4" + "version": "2.19.5" }, "snippets": [ { From c55249374f22c1bbcc7bb7dc425d27ba634d14ec Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Thu, 22 Feb 2024 20:46:35 -0500 Subject: [PATCH 1046/1197] fix: Remove LOGGER.exception() line (#1087) --- .../google/cloud/pubsub_v1/publisher/_batch/thread.py | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index e872fcf2b8cc..f6436fb7be4e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -290,7 +290,6 @@ def _commit(self) -> None: # Failed to publish batch. self._batch_done_callback(batch_transport_succeeded) - _LOGGER.exception("Failed to publish %s messages.", len(self._futures)) return end = time.time() From 97156b768a33ee6ad5cd00ab7bc73f16d6ac0ad5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 22 Feb 2024 22:38:49 -0500 Subject: [PATCH 1047/1197] chore(main): release 2.19.6 (#1098) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index ba120c9f5d46..801b4eea6d1d 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.19.5" + ".": "2.19.6" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index ab1f26d73b16..7e4f7a305610 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.19.6](https://github.com/googleapis/python-pubsub/compare/v2.19.5...v2.19.6) (2024-02-23) + + +### Bug Fixes + +* Remove LOGGER.exception() line ([#1087](https://github.com/googleapis/python-pubsub/issues/1087)) ([a395d26](https://github.com/googleapis/python-pubsub/commit/a395d26ed0fffaee8662f988da97dd35c480af4f)) + ## [2.19.5](https://github.com/googleapis/python-pubsub/compare/v2.19.4...v2.19.5) (2024-02-22) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 3aa3b013a28c..10730db30559 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.5" # {x-release-please-version} +__version__ = "2.19.6" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 3aa3b013a28c..10730db30559 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.5" # {x-release-please-version} +__version__ = "2.19.6" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 78988e6a1acf..d426be31afaf 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.19.5" + "version": "2.19.6" }, "snippets": [ { From b26a99e8a76c4bbf1f79dd8a1c8053add4abdf47 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 24 Feb 2024 12:54:57 -0800 Subject: [PATCH 1048/1197] fix(deps): Require `google-api-core>=1.34.1` (#1080) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- .../services/publisher/transports/rest.py | 21 +---- .../schema_service/transports/rest.py | 30 ++----- .../services/subscriber/transports/rest.py | 51 ++--------- .../google/pubsub_v1/types/pubsub.py | 8 +- .../unit/gapic/pubsub_v1/test_publisher.py | 57 +++--------- .../gapic/pubsub_v1/test_schema_service.py | 60 +++---------- .../unit/gapic/pubsub_v1/test_subscriber.py | 90 ++++--------------- 7 files changed, 57 insertions(+), 260 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index d450cb6eb26a..2e274efa756c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -536,9 +536,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -547,7 +545,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -630,7 +627,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -713,7 +709,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -799,7 +794,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -885,7 +879,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -973,7 +966,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1061,7 +1053,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1144,9 +1135,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1155,7 +1144,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1239,9 +1227,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1250,7 +1236,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index 5e5d7d478c71..230476a5bd16 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -572,9 +572,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -583,7 +581,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -667,9 +664,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -678,7 +673,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -761,7 +755,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -841,7 +834,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -927,7 +919,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1015,7 +1006,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1101,7 +1091,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1184,9 +1173,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1195,7 +1182,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1283,9 +1269,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1294,7 +1278,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1380,9 +1363,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1391,7 +1372,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index 26dee31f2e7b..f25678965645 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -652,9 +652,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -663,7 +661,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -745,9 +742,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -756,7 +751,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -849,9 +843,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -860,7 +852,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -943,7 +934,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1020,7 +1010,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1104,7 +1093,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1197,7 +1185,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1283,7 +1270,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1371,7 +1357,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1453,9 +1438,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1464,7 +1447,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1539,9 +1521,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1550,7 +1530,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1626,9 +1605,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1637,7 +1614,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1723,9 +1699,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1734,7 +1708,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1841,9 +1814,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1852,7 +1823,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) @@ -1943,9 +1913,7 @@ def __call__( # Jsonify the request body body = json_format.MessageToJson( - transcoded_request["body"], - including_default_value_fields=False, - use_integers_for_enums=True, + transcoded_request["body"], use_integers_for_enums=True ) uri = transcoded_request["uri"] method = transcoded_request["method"] @@ -1954,7 +1922,6 @@ def __call__( query_params = json.loads( json_format.MessageToJson( transcoded_request["query_params"], - including_default_value_fields=False, use_integers_for_enums=True, ) ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index b04d5d74515b..fef8e475fa74 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -201,8 +201,8 @@ class AwsKinesis(proto.Message): """ class State(proto.Enum): - r"""Possible states for managed ingestion from Amazon Kinesis - Data Streams. + r"""Possible states for ingestion from Amazon Kinesis Data + Streams. Values: STATE_UNSPECIFIED (0): @@ -317,8 +317,8 @@ class Topic(proto.Message): Output only. An output-only field indicating the state of the topic. ingestion_data_source_settings (google.pubsub_v1.types.IngestionDataSourceSettings): - Optional. Settings for managed ingestion from - a data source into this topic. + Optional. Settings for ingestion from a data + source into this topic. """ class State(proto.Enum): diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index aca4c59dd4f2..0df65c02d2c1 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -1738,7 +1738,8 @@ def test_publish_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.publish( - topic="topic_value", messages=[pubsub.PubsubMessage(data=b"data_blob")] + topic="topic_value", + messages=[pubsub.PubsubMessage(data=b"data_blob")], ) # Establish that the underlying call was made with the expected @@ -3803,11 +3804,7 @@ def test_create_topic_rest_required_fields(request_type=pubsub.Topic): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4070,11 +4067,7 @@ def test_update_topic_rest_required_fields(request_type=pubsub.UpdateTopicReques request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4338,11 +4331,7 @@ def test_publish_rest_required_fields(request_type=pubsub.PublishRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4619,11 +4608,7 @@ def test_get_topic_rest_required_fields(request_type=pubsub.GetTopicRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4880,11 +4865,7 @@ def test_list_topics_rest_required_fields(request_type=pubsub.ListTopicsRequest) request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -5217,11 +5198,7 @@ def test_list_topic_subscriptions_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -5559,11 +5536,7 @@ def test_list_topic_snapshots_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -5890,11 +5863,7 @@ def test_delete_topic_rest_required_fields(request_type=pubsub.DeleteTopicReques request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -6139,11 +6108,7 @@ def test_detach_subscription_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index e73ef911440d..628c8e8e3fae 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -3983,11 +3983,7 @@ def test_create_schema_rest_required_fields(request_type=gp_schema.CreateSchemaR request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4267,11 +4263,7 @@ def test_get_schema_rest_required_fields(request_type=schema.GetSchemaRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4532,11 +4524,7 @@ def test_list_schemas_rest_required_fields(request_type=schema.ListSchemasReques request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -4871,11 +4859,7 @@ def test_list_schema_revisions_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -5219,11 +5203,7 @@ def test_commit_schema_rest_required_fields(request_type=gp_schema.CommitSchemaR request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -5502,11 +5482,7 @@ def test_rollback_schema_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -5788,11 +5764,7 @@ def test_delete_schema_revision_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -6053,11 +6025,7 @@ def test_delete_schema_rest_required_fields(request_type=schema.DeleteSchemaRequ request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -6304,11 +6272,7 @@ def test_validate_schema_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -6582,11 +6546,7 @@ def test_validate_message_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 4ff6a3f27111..c5db8348639f 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -5223,11 +5223,7 @@ def test_create_subscription_rest_required_fields(request_type=pubsub.Subscripti request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -5522,11 +5518,7 @@ def test_get_subscription_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -5803,11 +5795,7 @@ def test_update_subscription_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -6081,11 +6069,7 @@ def test_list_subscriptions_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -6415,11 +6399,7 @@ def test_delete_subscription_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -6669,11 +6649,7 @@ def test_modify_ack_deadline_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -6941,11 +6917,7 @@ def test_acknowledge_rest_required_fields(request_type=pubsub.AcknowledgeRequest request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -7207,11 +7179,7 @@ def test_pull_rest_required_fields(request_type=pubsub.PullRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -7496,11 +7464,7 @@ def test_modify_push_config_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -7763,11 +7727,7 @@ def test_get_snapshot_rest_required_fields(request_type=pubsub.GetSnapshotReques request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -8026,11 +7986,7 @@ def test_list_snapshots_rest_required_fields(request_type=pubsub.ListSnapshotsRe request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -8366,11 +8322,7 @@ def test_create_snapshot_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -8646,11 +8598,7 @@ def test_update_snapshot_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -8914,11 +8862,7 @@ def test_delete_snapshot_rest_required_fields( request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped @@ -9163,11 +9107,7 @@ def test_seek_rest_required_fields(request_type=pubsub.SeekRequest): request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( - json_format.MessageToJson( - pb_request, - including_default_value_fields=False, - use_integers_for_enums=False, - ) + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) # verify fields with default values are dropped From 0218ab456798ab9b4918b283bd33cd75a4b10197 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 15:50:04 -0500 Subject: [PATCH 1049/1197] chore(main): release 2.19.7 (#1099) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 801b4eea6d1d..dfab433adf36 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.19.6" + ".": "2.19.7" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 7e4f7a305610..5a8131241a4a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.19.7](https://github.com/googleapis/python-pubsub/compare/v2.19.6...v2.19.7) (2024-02-24) + + +### Bug Fixes + +* **deps:** Require `google-api-core>=1.34.1` ([#1080](https://github.com/googleapis/python-pubsub/issues/1080)) ([1a5a134](https://github.com/googleapis/python-pubsub/commit/1a5a1342de8736c6a2b1ac63476667f8a02b5bb8)) + ## [2.19.6](https://github.com/googleapis/python-pubsub/compare/v2.19.5...v2.19.6) (2024-02-23) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 10730db30559..4e7d05e3ee45 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.6" # {x-release-please-version} +__version__ = "2.19.7" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 10730db30559..4e7d05e3ee45 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.6" # {x-release-please-version} +__version__ = "2.19.7" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d426be31afaf..a38cdca9650c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.19.6" + "version": "2.19.7" }, "snippets": [ { From c86fa5dd0295f5be383f9f925c04ea8834992297 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 08:44:54 -0800 Subject: [PATCH 1050/1197] build(deps): bump cryptography from 42.0.2 to 42.0.4 in .kokoro (#1100) Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/requirements.txt | 66 +++++++++---------- 2 files changed, 35 insertions(+), 35 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 51213ca00ee3..e4e943e0259a 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a0c4463fcfd9893fc172a3b3db2b6ac0c7b94ec6ad458c7dcea12d9693615ac3 -# created: 2024-02-17T12:21:23.177926195Z + digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad +# created: 2024-02-27T15:56:18.442440378Z diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index f80bdcd62981..bda8e38c4f31 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -93,39 +93,39 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.2 \ - --hash=sha256:087887e55e0b9c8724cf05361357875adb5c20dec27e5816b653492980d20380 \ - --hash=sha256:09a77e5b2e8ca732a19a90c5bca2d124621a1edb5438c5daa2d2738bfeb02589 \ - --hash=sha256:130c0f77022b2b9c99d8cebcdd834d81705f61c68e91ddd614ce74c657f8b3ea \ - --hash=sha256:141e2aa5ba100d3788c0ad7919b288f89d1fe015878b9659b307c9ef867d3a65 \ - --hash=sha256:28cb2c41f131a5758d6ba6a0504150d644054fd9f3203a1e8e8d7ac3aea7f73a \ - --hash=sha256:2f9f14185962e6a04ab32d1abe34eae8a9001569ee4edb64d2304bf0d65c53f3 \ - --hash=sha256:320948ab49883557a256eab46149df79435a22d2fefd6a66fe6946f1b9d9d008 \ - --hash=sha256:36d4b7c4be6411f58f60d9ce555a73df8406d484ba12a63549c88bd64f7967f1 \ - --hash=sha256:3b15c678f27d66d247132cbf13df2f75255627bcc9b6a570f7d2fd08e8c081d2 \ - --hash=sha256:3dbd37e14ce795b4af61b89b037d4bc157f2cb23e676fa16932185a04dfbf635 \ - --hash=sha256:4383b47f45b14459cab66048d384614019965ba6c1a1a141f11b5a551cace1b2 \ - --hash=sha256:44c95c0e96b3cb628e8452ec060413a49002a247b2b9938989e23a2c8291fc90 \ - --hash=sha256:4b063d3413f853e056161eb0c7724822a9740ad3caa24b8424d776cebf98e7ee \ - --hash=sha256:52ed9ebf8ac602385126c9a2fe951db36f2cb0c2538d22971487f89d0de4065a \ - --hash=sha256:55d1580e2d7e17f45d19d3b12098e352f3a37fe86d380bf45846ef257054b242 \ - --hash=sha256:5ef9bc3d046ce83c4bbf4c25e1e0547b9c441c01d30922d812e887dc5f125c12 \ - --hash=sha256:5fa82a26f92871eca593b53359c12ad7949772462f887c35edaf36f87953c0e2 \ - --hash=sha256:61321672b3ac7aade25c40449ccedbc6db72c7f5f0fdf34def5e2f8b51ca530d \ - --hash=sha256:701171f825dcab90969596ce2af253143b93b08f1a716d4b2a9d2db5084ef7be \ - --hash=sha256:841ec8af7a8491ac76ec5a9522226e287187a3107e12b7d686ad354bb78facee \ - --hash=sha256:8a06641fb07d4e8f6c7dda4fc3f8871d327803ab6542e33831c7ccfdcb4d0ad6 \ - --hash=sha256:8e88bb9eafbf6a4014d55fb222e7360eef53e613215085e65a13290577394529 \ - --hash=sha256:a00aee5d1b6c20620161984f8ab2ab69134466c51f58c052c11b076715e72929 \ - --hash=sha256:a047682d324ba56e61b7ea7c7299d51e61fd3bca7dad2ccc39b72bd0118d60a1 \ - --hash=sha256:a7ef8dd0bf2e1d0a27042b231a3baac6883cdd5557036f5e8df7139255feaac6 \ - --hash=sha256:ad28cff53f60d99a928dfcf1e861e0b2ceb2bc1f08a074fdd601b314e1cc9e0a \ - --hash=sha256:b9097a208875fc7bbeb1286d0125d90bdfed961f61f214d3f5be62cd4ed8a446 \ - --hash=sha256:b97fe7d7991c25e6a31e5d5e795986b18fbbb3107b873d5f3ae6dc9a103278e9 \ - --hash=sha256:e0ec52ba3c7f1b7d813cd52649a5b3ef1fc0d433219dc8c93827c57eab6cf888 \ - --hash=sha256:ea2c3ffb662fec8bbbfce5602e2c159ff097a4631d96235fcf0fb00e59e3ece4 \ - --hash=sha256:fa3dec4ba8fb6e662770b74f62f1a0c7d4e37e25b58b2bf2c1be4c95372b4a33 \ - --hash=sha256:fbeb725c9dc799a574518109336acccaf1303c30d45c075c665c0793c2f79a7f +cryptography==42.0.4 \ + --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ + --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ + --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ + --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ + --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ + --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ + --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ + --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ + --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ + --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ + --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ + --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ + --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ + --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ + --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ + --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ + --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ + --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ + --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ + --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ + --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ + --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ + --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ + --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ + --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ + --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ + --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ + --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ + --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ + --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ + --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ + --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 # via # gcp-releasetool # secretstorage From 649e29147f95b187e34577f7cfe52138e49e54ba Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 18:02:40 -0800 Subject: [PATCH 1051/1197] fix(deps): Exclude google-auth 2.24.0 and 2.25.0 (#1102) Co-authored-by: Owl Bot --- .../generated_samples/snippet_metadata_google.pubsub.v1.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index a38cdca9650c..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.19.7" + "version": "0.1.0" }, "snippets": [ { From 5f97ebede58605175c8abddb8145228e552feff0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 5 Mar 2024 10:21:11 -0500 Subject: [PATCH 1052/1197] chore(main): release 2.19.8 (#1106) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index dfab433adf36..b2b789d39296 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.19.7" + ".": "2.19.8" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 5a8131241a4a..a8f514f5cfec 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.19.8](https://github.com/googleapis/python-pubsub/compare/v2.19.7...v2.19.8) (2024-03-05) + + +### Bug Fixes + +* **deps:** Exclude google-auth 2.24.0 and 2.25.0 ([#1102](https://github.com/googleapis/python-pubsub/issues/1102)) ([165c983](https://github.com/googleapis/python-pubsub/commit/165c983803c48a17141765395cf9ec2e6a7056fa)) + ## [2.19.7](https://github.com/googleapis/python-pubsub/compare/v2.19.6...v2.19.7) (2024-02-24) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 4e7d05e3ee45..58506fff2b4c 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.7" # {x-release-please-version} +__version__ = "2.19.8" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 4e7d05e3ee45..58506fff2b4c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.7" # {x-release-please-version} +__version__ = "2.19.8" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..adc1b81fb2c4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.19.8" }, "snippets": [ { From abd31073a4e32f25bcb237c04efe2a7a1df83def Mon Sep 17 00:00:00 2001 From: Prad Nelluru Date: Tue, 5 Mar 2024 17:02:50 -0500 Subject: [PATCH 1053/1197] docs(samples): correct type and description of `timeout` parameter in subscriber quickstart (#1051) Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Co-authored-by: Alex Hong <9397363+hongalex@users.noreply.github.com> --- packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py index 0900f652d4e9..fd99aac2ddf5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py +++ b/packages/google-cloud-pubsub/samples/snippets/quickstart/sub.py @@ -57,7 +57,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: parser.add_argument("project_id", help="Google Cloud project ID") parser.add_argument("subscription_id", help="Pub/Sub subscription ID") parser.add_argument( - "timeout", default=None, nargs="?", const=1, help="Pub/Sub subscription ID" + "timeout", default=None, type=float, nargs="?", const=1, help="StreamingPull timeout in seconds" ) args = parser.parse_args() From 3dc2fee0f65807c88367f5e81dc5f8b86522d08a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 5 Mar 2024 14:43:35 -0800 Subject: [PATCH 1054/1197] feat: Add include_recaptcha_script for as a new action in firewall policies (#1109) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- packages/google-cloud-pubsub/google/pubsub/__init__.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/__init__.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/services/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/async_client.py | 2 +- .../google/pubsub_v1/services/publisher/client.py | 2 +- .../google/pubsub_v1/services/publisher/pagers.py | 2 +- .../google/pubsub_v1/services/publisher/transports/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/transports/base.py | 2 +- .../google/pubsub_v1/services/publisher/transports/grpc.py | 2 +- .../pubsub_v1/services/publisher/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/publisher/transports/rest.py | 2 +- .../google/pubsub_v1/services/schema_service/__init__.py | 2 +- .../google/pubsub_v1/services/schema_service/async_client.py | 2 +- .../google/pubsub_v1/services/schema_service/client.py | 2 +- .../google/pubsub_v1/services/schema_service/pagers.py | 2 +- .../pubsub_v1/services/schema_service/transports/__init__.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/base.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/grpc.py | 2 +- .../services/schema_service/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/rest.py | 2 +- .../google/pubsub_v1/services/subscriber/__init__.py | 2 +- .../google/pubsub_v1/services/subscriber/async_client.py | 2 +- .../google/pubsub_v1/services/subscriber/client.py | 2 +- .../google/pubsub_v1/services/subscriber/pagers.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/__init__.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/base.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/grpc.py | 2 +- .../pubsub_v1/services/subscriber/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/rest.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py | 2 +- .../pubsub_v1_generated_publisher_create_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_create_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_delete_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_delete_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_detach_subscription_async.py | 2 +- .../pubsub_v1_generated_publisher_detach_subscription_sync.py | 2 +- .../pubsub_v1_generated_publisher_get_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_get_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_list_topic_snapshots_async.py | 2 +- .../pubsub_v1_generated_publisher_list_topic_snapshots_sync.py | 2 +- ...sub_v1_generated_publisher_list_topic_subscriptions_async.py | 2 +- ...bsub_v1_generated_publisher_list_topic_subscriptions_sync.py | 2 +- .../pubsub_v1_generated_publisher_list_topics_async.py | 2 +- .../pubsub_v1_generated_publisher_list_topics_sync.py | 2 +- .../pubsub_v1_generated_publisher_publish_async.py | 2 +- .../pubsub_v1_generated_publisher_publish_sync.py | 2 +- .../pubsub_v1_generated_publisher_update_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_update_topic_sync.py | 2 +- .../pubsub_v1_generated_schema_service_commit_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_commit_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_create_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_create_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_delete_schema_async.py | 2 +- ..._v1_generated_schema_service_delete_schema_revision_async.py | 2 +- ...b_v1_generated_schema_service_delete_schema_revision_sync.py | 2 +- .../pubsub_v1_generated_schema_service_delete_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_get_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_get_schema_sync.py | 2 +- ...b_v1_generated_schema_service_list_schema_revisions_async.py | 2 +- ...ub_v1_generated_schema_service_list_schema_revisions_sync.py | 2 +- .../pubsub_v1_generated_schema_service_list_schemas_async.py | 2 +- .../pubsub_v1_generated_schema_service_list_schemas_sync.py | 2 +- .../pubsub_v1_generated_schema_service_rollback_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_rollback_schema_sync.py | 2 +- ...pubsub_v1_generated_schema_service_validate_message_async.py | 2 +- .../pubsub_v1_generated_schema_service_validate_message_sync.py | 2 +- .../pubsub_v1_generated_schema_service_validate_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_validate_schema_sync.py | 2 +- .../pubsub_v1_generated_subscriber_acknowledge_async.py | 2 +- .../pubsub_v1_generated_subscriber_acknowledge_sync.py | 2 +- .../pubsub_v1_generated_subscriber_create_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_create_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_create_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_create_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_delete_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_delete_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_delete_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_delete_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_get_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_get_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_get_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_get_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_list_snapshots_async.py | 2 +- .../pubsub_v1_generated_subscriber_list_snapshots_sync.py | 2 +- .../pubsub_v1_generated_subscriber_list_subscriptions_async.py | 2 +- .../pubsub_v1_generated_subscriber_list_subscriptions_sync.py | 2 +- .../pubsub_v1_generated_subscriber_modify_ack_deadline_async.py | 2 +- .../pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py | 2 +- .../pubsub_v1_generated_subscriber_modify_push_config_async.py | 2 +- .../pubsub_v1_generated_subscriber_modify_push_config_sync.py | 2 +- .../pubsub_v1_generated_subscriber_pull_async.py | 2 +- .../pubsub_v1_generated_subscriber_pull_sync.py | 2 +- .../pubsub_v1_generated_subscriber_seek_async.py | 2 +- .../pubsub_v1_generated_subscriber_seek_sync.py | 2 +- .../pubsub_v1_generated_subscriber_streaming_pull_async.py | 2 +- .../pubsub_v1_generated_subscriber_streaming_pull_sync.py | 2 +- .../pubsub_v1_generated_subscriber_update_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_update_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_update_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_update_subscription_sync.py | 2 +- .../generated_samples/snippet_metadata_google.pubsub.v1.json | 2 +- .../google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py | 2 +- packages/google-cloud-pubsub/tests/__init__.py | 2 +- packages/google-cloud-pubsub/tests/unit/__init__.py | 2 +- packages/google-cloud-pubsub/tests/unit/gapic/__init__.py | 2 +- .../google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_publisher.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_schema_service.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_subscriber.py | 2 +- 112 files changed, 112 insertions(+), 112 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index ebcbb8271dc6..d6d0a00ff358 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 731d03999fb9..61b89e6b17d1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py index 105e64dc0b4a..7e1e8e5ce602 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 730d5c8a5b56..700c99c02d09 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 37e7410f9fb4..43ccc6df351e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py index 9c40664998cb..21ae22b8a6fa 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py index dc172aa02956..393b9a55fc2a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 6c92dd8f9f59..47fea83e696e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index ed6410fd4300..0fb520404de0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index c620cd4aeaa0..2d5c0137dc55 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index 2e274efa756c..4d70c3e64402 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py index 4e9eb056410d..570d29e7c544 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 44a4916e6e2f..c0eb61075563 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index d869e4dec4dd..72fbd44c825f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index acff3e954655..309e57f5356e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py index 52fa2850b096..73976e7fb32d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 39151e3975ce..835fcd63e85a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 66b382bfb067..ffe490b22e59 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 94a872b568de..78c44142ebae 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index 230476a5bd16..953e58052e15 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py index fb0b7dfba826..e6994bdf6520 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 78ad7c9a249c..33d8de056fd4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 74fdb1d288eb..6b7103852ee8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py index 6a68a4c515dc..94c88b9b3d93 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py index b34bbb6870e6..af60aba8eb03 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index cea627d7638e..34cb97d49b14 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 85a1942af65d..82c752fa5f59 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index b4a73baf4a62..356acf17c0de 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index f25678965645..0b5f2ccc9609 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 80135a019128..62568bf6664e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index fef8e475fa74..b4d823c6f2ee 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 11853406dce7..9353e4817cf9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py index c047ae881166..5694b24ef2d5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py index bdfbe2973298..7de319c67e67 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py index d134b061483a..3d2b74803eb4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py index 56e65a6d0a63..e760eddbd671 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py index 9b9015cd9d11..86508954b39c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py index c2eb69647a8b..af6b4583799a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py index 318c0319a409..a8bea68c2d6b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py index e3c14e0524ee..051b980931a0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py index a65fb35dbd84..4eed18b38ac6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py index 0627b8277fff..1e2266757919 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py index ab3a07d55b47..959f1e824c81 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py index 93f1020d7753..c648b8d1a8d3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py index 0d2af9b98425..dc4a36ef83da 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py index 4399c8350a09..8d71be39861a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py index 529acbf00d99..5c6b63a2dd96 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py index b85ad8d4276b..9af236d44ece 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py index 3f913a7dc00d..d9451b6241d7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py index fbac6a928c56..9b7326c2cf02 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py index 7d4d0b0f151d..b93ec0b5d1b1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py index 16be67cd2014..c7caa1cbb577 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py index a399a7db1878..9cdd164f5bdb 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py index 73f9e407e5db..ce8619977972 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py index d34bd17a1eb8..1bbebfff5dc6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py index 194ab803393e..a35c3af1b19a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py index 6e956dee7d49..118fe734067f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py index 766b60396ac0..77dfe887b2a6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py index 97fa79d870eb..240df5a2a39b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py index b6882faa9ea4..2d45faebf8e5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py index 310a32d71e12..7ea5120011ca 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py index ce62376fa183..6aebac042f1f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py index 37d67280aece..963618fa9808 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py index 831a0f6f1031..d7b9e64641b2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py index 249c1ad3198c..b50a08b189a0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py index 977bb10a65fe..4d83c8cacd77 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py index add86c6fa894..3dcd25490f40 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py index 8e2fa2219177..4f509d6b3023 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py index d53710204e11..5deb08a44f80 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py index dc9add5cf400..cc78267927f4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py index c2edca707897..7fc2967d5bcf 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py index 1d270cf2fa57..85b492df02f1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py index b314a652ecf9..3f7c691da483 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py index a8936e80cbaa..7738b03da4bd 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py index 488b06838b37..3062180367f6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py index 7aaf8e423ea2..cbb05f62af69 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py index c7db1f593600..5bea031a7dd2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py index 0d66e43e8297..01342901b37f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py index dafe8fe3b3f6..08a15f4ed063 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py index 8b7d7d35f792..1cde73a72c16 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py index 37fc866a9356..40960acc7187 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py index 9fbed39a1f74..3ee2e9c62b62 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py index 3c9ac382a7a3..47c7399989ef 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py index 867b090e8fd6..22832bc89a3d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py index cd9a50a39b8c..bcc3be15dac4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py index a02b5c9bd76a..3477e32d32e7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py index ade3e5e57f6c..7b00831a303c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py index fe61e24f56a5..8e1a96487b5c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py index 0a6a8ef9e215..8e93722929b4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py index 4107a81a880f..1d32afec25b7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py index cf45c94794c1..1d88c959046b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py index f6a48ec60a50..43ac23e4fc35 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py index 787ebd29aed7..3e6f26a0b2d8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py index 9e009fcc80ff..950f18cc17fa 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py index e24c2452ce28..9038bfd11f10 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py index 322673af9bc3..b57af8fb014f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py index 392550de509f..332c128954b4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py index 7ce0e96195b3..47926a80a3d7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py index ae1d2c301b5c..833885af5927 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py index 3fd588f4357f..1c8f8530da4b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py index 6e5632e84527..5c00a74dcd5c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py index eabb9036d5b7..5cc3de6ece70 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index adc1b81fb2c4..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.19.8" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index d8379e9bb312..35262ea8d459 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/__init__.py b/packages/google-cloud-pubsub/tests/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-pubsub/tests/__init__.py +++ b/packages/google-cloud-pubsub/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/__init__.py b/packages/google-cloud-pubsub/tests/unit/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-pubsub/tests/unit/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py index 89a37dc92c5a..8f6cf068242c 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 0df65c02d2c1..4aa24c3ff1e9 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 628c8e8e3fae..03502f54ea9f 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index c5db8348639f..1e66e7b70e86 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 238c18ff7de274a5218a66f8abe088725753a269 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 5 Mar 2024 19:04:20 -0500 Subject: [PATCH 1055/1197] chore(main): release 2.20.0 (#1110) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 12 ++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 16 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index b2b789d39296..d753b65da399 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.19.8" + ".": "2.20.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index a8f514f5cfec..bf31d0226c43 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,18 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.20.0](https://github.com/googleapis/python-pubsub/compare/v2.19.8...v2.20.0) (2024-03-05) + + +### Features + +* Add include_recaptcha_script for as a new action in firewall policies ([#1109](https://github.com/googleapis/python-pubsub/issues/1109)) ([54041a5](https://github.com/googleapis/python-pubsub/commit/54041a527398eb0ec5daa97a346ba3202ce349f3)) + + +### Documentation + +* **samples:** Correct type and description of `timeout` parameter in subscriber quickstart ([#1051](https://github.com/googleapis/python-pubsub/issues/1051)) ([141a473](https://github.com/googleapis/python-pubsub/commit/141a473561bd0e45d3137a02cbefddb454ab3af4)) + ## [2.19.8](https://github.com/googleapis/python-pubsub/compare/v2.19.7...v2.19.8) (2024-03-05) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 58506fff2b4c..551f0d2ebacb 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.8" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 58506fff2b4c..551f0d2ebacb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.19.8" # {x-release-please-version} +__version__ = "2.20.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..2a8aeeec3c87 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.20.0" }, "snippets": [ { From 3060084e5d2c8ac5b4b02e60e309a88999ba5857 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Wed, 6 Mar 2024 06:26:10 -0500 Subject: [PATCH 1056/1197] fix: Catch and surface BaseException() (#1108) Co-authored-by: Owl Bot --- .../subscriber/_protocol/streaming_pull_manager.py | 4 ++-- .../subscriber/test_streaming_pull_manager.py | 11 ++++++++--- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 2f5a31e496e7..f07db85469ed 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -112,7 +112,7 @@ def _wrap_as_exception(maybe_exception: Any) -> BaseException: def _wrap_callback_errors( callback: Callable[["google.cloud.pubsub_v1.subscriber.message.Message"], Any], - on_callback_error: Callable[[Exception], Any], + on_callback_error: Callable[[BaseException], Any], message: "google.cloud.pubsub_v1.subscriber.message.Message", ): """Wraps a user callback so that if an exception occurs the message is @@ -124,7 +124,7 @@ def _wrap_callback_errors( """ try: callback(message) - except Exception as exc: + except BaseException as exc: # Note: the likelihood of this failing is extremely low. This just adds # a message to a queue, so if this doesn't work the world is in an # unrecoverable state and this thread should just bail. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 1f781b72207c..278f3e88ebde 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -77,9 +77,14 @@ def test__wrap_callback_errors_no_error(): on_callback_error.assert_not_called() -def test__wrap_callback_errors_error(): - callback_error = ValueError("meep") - +@pytest.mark.parametrize( + "callback_error", + [ + (ValueError("ValueError")), + (BaseException("BaseException")), + ], +) +def test__wrap_callback_errors_error(callback_error): msg = mock.create_autospec(message.Message, instance=True) callback = mock.Mock(side_effect=callback_error) on_callback_error = mock.Mock() From 7b43f0accc1cb5aea10fc3222a3b4d38982eb90f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 10:53:36 -0500 Subject: [PATCH 1057/1197] chore(main): release 2.20.1 (#1114) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index d753b65da399..f2db52b48ff9 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.20.0" + ".": "2.20.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index bf31d0226c43..4b3e37d6c7eb 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.20.1](https://github.com/googleapis/python-pubsub/compare/v2.20.0...v2.20.1) (2024-03-06) + + +### Bug Fixes + +* Catch and surface BaseException() ([#1108](https://github.com/googleapis/python-pubsub/issues/1108)) ([07e427f](https://github.com/googleapis/python-pubsub/commit/07e427f675464b9aa79c68dede67082529054980)) + ## [2.20.0](https://github.com/googleapis/python-pubsub/compare/v2.19.8...v2.20.0) (2024-03-05) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 551f0d2ebacb..5585b0b1a0d8 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 551f0d2ebacb..5585b0b1a0d8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.0" # {x-release-please-version} +__version__ = "2.20.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 2a8aeeec3c87..34769342cd74 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.20.0" + "version": "2.20.1" }, "snippets": [ { From 6ae17da91a53b327ab14457901414aa12c67f54e Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Thu, 14 Mar 2024 10:28:23 -0400 Subject: [PATCH 1058/1197] docs(samples): Add Create Topic with Kinesis IngestionDataSourceSettings Sample (#1120) Co-authored-by: Owl Bot --- .../samples/snippets/publisher.py | 63 +++++++++++++++++++ .../samples/snippets/publisher_test.py | 32 ++++++++++ .../samples/snippets/requirements.txt | 2 +- 3 files changed, 96 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index e2c63556c794..282bbf4db005 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -60,6 +60,51 @@ def create_topic(project_id: str, topic_id: str) -> None: # [END pubsub_create_topic] +def create_topic_kinesis_ingestion( + project_id: str, + topic_id: str, + stream_arn: str, + consumer_arn: str, + aws_role_arn: str, + gcp_service_account: str, +) -> None: + """Create a new Pub/Sub topic with AWS Kinesis Ingestion Settings.""" + # [START pubsub_quickstart_create_topic_kinesis_ingestion] + # [START pubsub_create_topic_kinesis_ingestion] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # stream_arn = "your-stream-arn" + # consumer_arn = "your-consumer-arn" + # aws_role_arn = "your-aws-role-arn" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + aws_kinesis=IngestionDataSourceSettings.AwsKinesis( + stream_arn=stream_arn, + consumer_arn=consumer_arn, + aws_role_arn=aws_role_arn, + gcp_service_account=gcp_service_account, + ) + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with AWS Kinesis Ingestion Settings") + # [END pubsub_quickstart_create_topic_kinesis_ingestion] + # [END pubsub_create_topic_kinesis_ingestion] + + def delete_topic(project_id: str, topic_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_topic] @@ -430,6 +475,15 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: create_parser = subparsers.add_parser("create", help=create_topic.__doc__) create_parser.add_argument("topic_id") + create_topic_kinesis_ingestion_parser = subparsers.add_parser( + "create_kinesis_ingestion", help=create_topic_kinesis_ingestion.__doc__ + ) + create_topic_kinesis_ingestion_parser.add_argument("topic_id") + create_topic_kinesis_ingestion_parser.add_argument("stream_arn") + create_topic_kinesis_ingestion_parser.add_argument("consumer_arn") + create_topic_kinesis_ingestion_parser.add_argument("aws_role_arn") + create_topic_kinesis_ingestion_parser.add_argument("gcp_service_account") + delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) delete_parser.add_argument("topic_id") @@ -490,6 +544,15 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: list_topics(args.project_id) elif args.command == "create": create_topic(args.project_id, args.topic_id) + elif args.command == "create_kinesis_ingestion": + create_topic_kinesis_ingestion( + args.project_id, + args.topic_id, + args.stream_arn, + args.consumer_arn, + args.aws_role_arn, + args.gcp_service_account, + ) elif args.command == "delete": delete_topic(args.project_id, args.topic_id) elif args.command == "publish": diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 0a63113085ff..fa31a74cff70 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -124,6 +124,38 @@ def test_create( assert f"Created topic: {topic_path}" in out +def test_create_kinesis_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual AWS resources for the test to pass. + stream_arn = "arn:aws:kinesis:us-west-2:111111111111:stream/fake-stream-name" + consumer_arn = "arn:aws:kinesis:us-west-2:111111111111:stream/fake-stream-name/consumer/consumer-1:1111111111" + aws_role_arn = "arn:aws:iam::111111111111:role/fake-role-name" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_kinesis_ingestion( + PROJECT_ID, + TOPIC_ID, + stream_arn, + consumer_arn, + aws_role_arn, + gcp_service_account, + ) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with AWS Kinesis Ingestion Settings" in out + + def test_list(topic_path: str, capsys: CaptureFixture[str]) -> None: publisher.list_topics(PROJECT_ID) out, _ = capsys.readouterr() diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 3fb4e0a6905c..aba41c7d7893 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.19.0 +google-cloud-pubsub==2.20.1 avro==1.11.3 protobuf===4.24.4; python_version == '3.7' protobuf==4.25.1; python_version >= '3.8' From 60915cf4a61281726c205fce5b6c06d9ca9714fd Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Fri, 15 Mar 2024 13:56:33 -0400 Subject: [PATCH 1059/1197] docs(samples): Update Topic with Kinesis Ingestion Settings (#1123) Co-authored-by: Owl Bot --- .../samples/snippets/publisher.py | 69 ++++++++++++++++++- .../samples/snippets/publisher_test.py | 46 +++++++++++++ 2 files changed, 113 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 282bbf4db005..6453762a36a2 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -69,7 +69,6 @@ def create_topic_kinesis_ingestion( gcp_service_account: str, ) -> None: """Create a new Pub/Sub topic with AWS Kinesis Ingestion Settings.""" - # [START pubsub_quickstart_create_topic_kinesis_ingestion] # [START pubsub_create_topic_kinesis_ingestion] from google.cloud import pubsub_v1 from google.pubsub_v1.types import Topic @@ -101,10 +100,58 @@ def create_topic_kinesis_ingestion( topic = publisher.create_topic(request=request) print(f"Created topic: {topic.name} with AWS Kinesis Ingestion Settings") - # [END pubsub_quickstart_create_topic_kinesis_ingestion] # [END pubsub_create_topic_kinesis_ingestion] +def update_topic_kinesis_ingestion( + project_id: str, + topic_id: str, + stream_arn: str, + consumer_arn: str, + aws_role_arn: str, + gcp_service_account: str, +) -> None: + """Update Pub/Sub topic with AWS Kinesis Ingestion Settings.""" + # [START pubsub_update_topic_kinesis_ingestion] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + from google.pubsub_v1.types import UpdateTopicRequest + from google.protobuf import field_mask_pb2 + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # stream_arn = "your-stream-arn" + # consumer_arn = "your-consumer-arn" + # aws_role_arn = "your-aws-role-arn" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + update_request = UpdateTopicRequest( + topic=Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + aws_kinesis=IngestionDataSourceSettings.AwsKinesis( + stream_arn=stream_arn, + consumer_arn=consumer_arn, + aws_role_arn=aws_role_arn, + gcp_service_account=gcp_service_account, + ) + ), + ), + update_mask=field_mask_pb2.FieldMask(paths=["ingestion_data_source_settings"]), + ) + + topic = publisher.update_topic(request=update_request) + print(f"Updated topic: {topic.name} with AWS Kinesis Ingestion Settings") + + +# [END pubsub_update_topic_kinesis_ingestion] + + def delete_topic(project_id: str, topic_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_topic] @@ -484,6 +531,15 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: create_topic_kinesis_ingestion_parser.add_argument("aws_role_arn") create_topic_kinesis_ingestion_parser.add_argument("gcp_service_account") + update_topic_kinesis_ingestion_parser = subparsers.add_parser( + "update_kinesis_ingestion", help=update_topic_kinesis_ingestion.__doc__ + ) + update_topic_kinesis_ingestion_parser.add_argument("topic_id") + update_topic_kinesis_ingestion_parser.add_argument("stream_arn") + update_topic_kinesis_ingestion_parser.add_argument("consumer_arn") + update_topic_kinesis_ingestion_parser.add_argument("aws_role_arn") + update_topic_kinesis_ingestion_parser.add_argument("gcp_service_account") + delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) delete_parser.add_argument("topic_id") @@ -553,6 +609,15 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: args.aws_role_arn, args.gcp_service_account, ) + elif args.command == "update_kinesis_ingestion": + update_topic_kinesis_ingestion( + args.project_id, + args.topic_id, + args.stream_arn, + args.consumer_arn, + args.aws_role_arn, + args.gcp_service_account, + ) elif args.command == "delete": delete_topic(args.project_id, args.topic_id) elif args.command == "publish": diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index fa31a74cff70..1e673f134d32 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -123,6 +123,9 @@ def test_create( out, _ = capsys.readouterr() assert f"Created topic: {topic_path}" in out + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + def test_create_kinesis_ingestion( publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] @@ -155,6 +158,49 @@ def test_create_kinesis_ingestion( out, _ = capsys.readouterr() assert f"Created topic: {topic_path} with AWS Kinesis Ingestion Settings" in out + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + +def test_update_kinesis_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual AWS resources for the test to pass. + stream_arn = "arn:aws:kinesis:us-west-2:111111111111:stream/fake-stream-name" + consumer_arn = "arn:aws:kinesis:us-west-2:111111111111:stream/fake-stream-name/consumer/consumer-1:1111111111" + aws_role_arn = "arn:aws:iam::111111111111:role/fake-role-name" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path}" in out + + publisher.update_topic_kinesis_ingestion( + PROJECT_ID, + TOPIC_ID, + stream_arn, + consumer_arn, + aws_role_arn, + gcp_service_account, + ) + + out, _ = capsys.readouterr() + assert f"Updated topic: {topic_path} with AWS Kinesis Ingestion Settings" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + def test_list(topic_path: str, capsys: CaptureFixture[str]) -> None: publisher.list_topics(PROJECT_ID) From 5d05a44714449ece4faeb7e5c4d694f097a4672c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 15:04:38 -0700 Subject: [PATCH 1060/1197] chore(python): update dependencies in /.kokoro (#1126) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-pubsub/.kokoro/build.sh | 7 -- .../.kokoro/docker/docs/Dockerfile | 4 + .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 38 ++++++ .../.kokoro/requirements.in | 3 +- .../.kokoro/requirements.txt | 114 ++++++++---------- 7 files changed, 99 insertions(+), 72 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in create mode 100644 packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index e4e943e0259a..4bdeef3904e2 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:98f3afd11308259de6e828e37376d18867fd321aba07826e29e4f8d9cab56bad -# created: 2024-02-27T15:56:18.442440378Z + digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 +# created: 2024-03-15T16:25:47.905264637Z diff --git a/packages/google-cloud-pubsub/.kokoro/build.sh b/packages/google-cloud-pubsub/.kokoro/build.sh index c264634884b5..b3d9152963b8 100755 --- a/packages/google-cloud-pubsub/.kokoro/build.sh +++ b/packages/google-cloud-pubsub/.kokoro/build.sh @@ -33,13 +33,6 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json # Setup project id. export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version - # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile index 8e39a2cc438d..bdaf39fe22d0 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -80,4 +80,8 @@ RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ # Test pip RUN python3 -m pip +# Install build requirements +COPY requirements.txt /requirements.txt +RUN python3 -m pip install --require-hashes -r requirements.txt + CMD ["python3.8"] diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in new file mode 100644 index 000000000000..816817c672a1 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in @@ -0,0 +1 @@ +nox diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt new file mode 100644 index 000000000000..0e5d70f20f83 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt @@ -0,0 +1,38 @@ +# +# This file is autogenerated by pip-compile with Python 3.9 +# by the following command: +# +# pip-compile --allow-unsafe --generate-hashes requirements.in +# +argcomplete==3.2.3 \ + --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ + --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c + # via nox +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 + # via nox +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 + # via virtualenv +filelock==3.13.1 \ + --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ + --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c + # via virtualenv +nox==2024.3.2 \ + --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ + --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 + # via -r requirements.in +packaging==24.0 \ + --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ + --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 + # via nox +platformdirs==4.2.0 \ + --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ + --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 + # via virtualenv +virtualenv==20.25.1 \ + --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ + --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 + # via nox diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.in b/packages/google-cloud-pubsub/.kokoro/requirements.in index ec867d9fd65a..fff4d9ce0d0a 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.in +++ b/packages/google-cloud-pubsub/.kokoro/requirements.in @@ -1,5 +1,5 @@ gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +gcp-releasetool>=2 # required for compatibility with cryptography>=42.x importlib-metadata typing-extensions twine @@ -8,3 +8,4 @@ setuptools nox>=2022.11.21 # required to remove dependency on py charset-normalizer<3 click<8.1.0 +cryptography>=42.0.5 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index bda8e38c4f31..dd61f5f32018 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -93,40 +93,41 @@ colorlog==6.7.0 \ # via # gcp-docuploader # nox -cryptography==42.0.4 \ - --hash=sha256:01911714117642a3f1792c7f376db572aadadbafcd8d75bb527166009c9f1d1b \ - --hash=sha256:0e89f7b84f421c56e7ff69f11c441ebda73b8a8e6488d322ef71746224c20fce \ - --hash=sha256:12d341bd42cdb7d4937b0cabbdf2a94f949413ac4504904d0cdbdce4a22cbf88 \ - --hash=sha256:15a1fb843c48b4a604663fa30af60818cd28f895572386e5f9b8a665874c26e7 \ - --hash=sha256:1cdcdbd117681c88d717437ada72bdd5be9de117f96e3f4d50dab3f59fd9ab20 \ - --hash=sha256:1df6fcbf60560d2113b5ed90f072dc0b108d64750d4cbd46a21ec882c7aefce9 \ - --hash=sha256:3c6048f217533d89f2f8f4f0fe3044bf0b2090453b7b73d0b77db47b80af8dff \ - --hash=sha256:3e970a2119507d0b104f0a8e281521ad28fc26f2820687b3436b8c9a5fcf20d1 \ - --hash=sha256:44a64043f743485925d3bcac548d05df0f9bb445c5fcca6681889c7c3ab12764 \ - --hash=sha256:4e36685cb634af55e0677d435d425043967ac2f3790ec652b2b88ad03b85c27b \ - --hash=sha256:5f8907fcf57392cd917892ae83708761c6ff3c37a8e835d7246ff0ad251d9298 \ - --hash=sha256:69b22ab6506a3fe483d67d1ed878e1602bdd5912a134e6202c1ec672233241c1 \ - --hash=sha256:6bfadd884e7280df24d26f2186e4e07556a05d37393b0f220a840b083dc6a824 \ - --hash=sha256:6d0fbe73728c44ca3a241eff9aefe6496ab2656d6e7a4ea2459865f2e8613257 \ - --hash=sha256:6ffb03d419edcab93b4b19c22ee80c007fb2d708429cecebf1dd3258956a563a \ - --hash=sha256:810bcf151caefc03e51a3d61e53335cd5c7316c0a105cc695f0959f2c638b129 \ - --hash=sha256:831a4b37accef30cccd34fcb916a5d7b5be3cbbe27268a02832c3e450aea39cb \ - --hash=sha256:887623fe0d70f48ab3f5e4dbf234986b1329a64c066d719432d0698522749929 \ - --hash=sha256:a0298bdc6e98ca21382afe914c642620370ce0470a01e1bef6dd9b5354c36854 \ - --hash=sha256:a1327f280c824ff7885bdeef8578f74690e9079267c1c8bd7dc5cc5aa065ae52 \ - --hash=sha256:c1f25b252d2c87088abc8bbc4f1ecbf7c919e05508a7e8628e6875c40bc70923 \ - --hash=sha256:c3a5cbc620e1e17009f30dd34cb0d85c987afd21c41a74352d1719be33380885 \ - --hash=sha256:ce8613beaffc7c14f091497346ef117c1798c202b01153a8cc7b8e2ebaaf41c0 \ - --hash=sha256:d2a27aca5597c8a71abbe10209184e1a8e91c1fd470b5070a2ea60cafec35bcd \ - --hash=sha256:dad9c385ba8ee025bb0d856714f71d7840020fe176ae0229de618f14dae7a6e2 \ - --hash=sha256:db4b65b02f59035037fde0998974d84244a64c3265bdef32a827ab9b63d61b18 \ - --hash=sha256:e09469a2cec88fb7b078e16d4adec594414397e8879a4341c6ace96013463d5b \ - --hash=sha256:e53dc41cda40b248ebc40b83b31516487f7db95ab8ceac1f042626bc43a2f992 \ - --hash=sha256:f1e85a178384bf19e36779d91ff35c7617c885da487d689b05c1366f9933ad74 \ - --hash=sha256:f47be41843200f7faec0683ad751e5ef11b9a56a220d57f300376cd8aba81660 \ - --hash=sha256:fb0cef872d8193e487fc6bdb08559c3aa41b659a7d9be48b2e10747f47863925 \ - --hash=sha256:ffc73996c4fca3d2b6c1c8c12bfd3ad00def8621da24f547626bf06441400449 +cryptography==42.0.5 \ + --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ + --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ + --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ + --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ + --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ + --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ + --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ + --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ + --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ + --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ + --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ + --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ + --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ + --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ + --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ + --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ + --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ + --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ + --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ + --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ + --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ + --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ + --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ + --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ + --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ + --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ + --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ + --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ + --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ + --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ + --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ + --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 # via + # -r requirements.in # gcp-releasetool # secretstorage distlib==0.3.7 \ @@ -145,9 +146,9 @@ gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 +gcp-releasetool==2.0.0 \ + --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ + --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f # via -r requirements.in google-api-core==2.12.0 \ --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ @@ -392,29 +393,18 @@ platformdirs==3.11.0 \ --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee +protobuf==4.25.3 \ + --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ + --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ + --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ + --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ + --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ + --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ + --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ + --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ + --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ + --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ + --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 # via # gcp-docuploader # gcp-releasetool @@ -518,7 +508,7 @@ zipp==3.17.0 \ # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a +setuptools==69.2.0 \ + --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ + --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c # via -r requirements.in From 1d7476a3a156ae2eda03549775fd6ab8f28d8d91 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 16:09:48 -0700 Subject: [PATCH 1061/1197] chore(main): release 2.20.2 (#1121) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index f2db52b48ff9..e51d046b0eb3 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.20.1" + ".": "2.20.2" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 4b3e37d6c7eb..dd6bc4bb1dd9 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.20.2](https://github.com/googleapis/python-pubsub/compare/v2.20.1...v2.20.2) (2024-03-15) + + +### Documentation + +* **samples:** Add Create Topic with Kinesis IngestionDataSourceSettings Sample ([#1120](https://github.com/googleapis/python-pubsub/issues/1120)) ([83dc9ff](https://github.com/googleapis/python-pubsub/commit/83dc9fff13aa35518fb9b6a73472816da852d975)) +* **samples:** Update Topic with Kinesis Ingestion Settings ([#1123](https://github.com/googleapis/python-pubsub/issues/1123)) ([e0e2d83](https://github.com/googleapis/python-pubsub/commit/e0e2d831da8d17288c3ae8900bea2388ce8758af)) + ## [2.20.1](https://github.com/googleapis/python-pubsub/compare/v2.20.0...v2.20.1) (2024-03-06) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 5585b0b1a0d8..4c1787c53865 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 5585b0b1a0d8..4c1787c53865 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.1" # {x-release-please-version} +__version__ = "2.20.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 34769342cd74..11e0f952b494 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.20.1" + "version": "2.20.2" }, "snippets": [ { From b7361509d32fd16ba5beaef5852f2e7927a6bf85 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 16:58:33 -0700 Subject: [PATCH 1062/1197] chore(python): add requirements for docs build (#1125) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 4bdeef3904e2..50d6cbf43783 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -15,3 +15,4 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 # created: 2024-03-15T16:25:47.905264637Z + From dfc60e1815ae0b9510eb6951f218aaa1ada96e38 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 16 Mar 2024 08:49:05 -0700 Subject: [PATCH 1063/1197] chore: remove nox uninstall/reinstall from python build.sh template (#1124) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 50d6cbf43783..4bdeef3904e2 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -15,4 +15,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 # created: 2024-03-15T16:25:47.905264637Z - From c326b310da8652e25c8a58a54324f0abb37227c2 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Thu, 21 Mar 2024 16:07:09 -0400 Subject: [PATCH 1064/1197] docs(samples): Update Region Tags (#1128) --- .../samples/snippets/publisher.py | 46 +++++++++---------- .../samples/snippets/publisher_test.py | 8 ++-- 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 6453762a36a2..73afc8c978fc 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -60,7 +60,7 @@ def create_topic(project_id: str, topic_id: str) -> None: # [END pubsub_create_topic] -def create_topic_kinesis_ingestion( +def create_topic_with_kinesis_ingestion( project_id: str, topic_id: str, stream_arn: str, @@ -69,7 +69,7 @@ def create_topic_kinesis_ingestion( gcp_service_account: str, ) -> None: """Create a new Pub/Sub topic with AWS Kinesis Ingestion Settings.""" - # [START pubsub_create_topic_kinesis_ingestion] + # [START pubsub_create_topic_with_kinesis_ingestion] from google.cloud import pubsub_v1 from google.pubsub_v1.types import Topic from google.pubsub_v1.types import IngestionDataSourceSettings @@ -100,10 +100,10 @@ def create_topic_kinesis_ingestion( topic = publisher.create_topic(request=request) print(f"Created topic: {topic.name} with AWS Kinesis Ingestion Settings") - # [END pubsub_create_topic_kinesis_ingestion] + # [END pubsub_create_topic_with_kinesis_ingestion] -def update_topic_kinesis_ingestion( +def update_topic_type( project_id: str, topic_id: str, stream_arn: str, @@ -112,7 +112,7 @@ def update_topic_kinesis_ingestion( gcp_service_account: str, ) -> None: """Update Pub/Sub topic with AWS Kinesis Ingestion Settings.""" - # [START pubsub_update_topic_kinesis_ingestion] + # [START pubsub_update_topic_type] from google.cloud import pubsub_v1 from google.pubsub_v1.types import Topic from google.pubsub_v1.types import IngestionDataSourceSettings @@ -149,7 +149,7 @@ def update_topic_kinesis_ingestion( print(f"Updated topic: {topic.name} with AWS Kinesis Ingestion Settings") -# [END pubsub_update_topic_kinesis_ingestion] +# [END pubsub_update_topic_type] def delete_topic(project_id: str, topic_id: str) -> None: @@ -522,23 +522,23 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: create_parser = subparsers.add_parser("create", help=create_topic.__doc__) create_parser.add_argument("topic_id") - create_topic_kinesis_ingestion_parser = subparsers.add_parser( - "create_kinesis_ingestion", help=create_topic_kinesis_ingestion.__doc__ + create_topic_with_kinesis_ingestion_parser = subparsers.add_parser( + "create_kinesis_ingestion", help=create_topic_with_kinesis_ingestion.__doc__ ) - create_topic_kinesis_ingestion_parser.add_argument("topic_id") - create_topic_kinesis_ingestion_parser.add_argument("stream_arn") - create_topic_kinesis_ingestion_parser.add_argument("consumer_arn") - create_topic_kinesis_ingestion_parser.add_argument("aws_role_arn") - create_topic_kinesis_ingestion_parser.add_argument("gcp_service_account") - - update_topic_kinesis_ingestion_parser = subparsers.add_parser( - "update_kinesis_ingestion", help=update_topic_kinesis_ingestion.__doc__ + create_topic_with_kinesis_ingestion_parser.add_argument("topic_id") + create_topic_with_kinesis_ingestion_parser.add_argument("stream_arn") + create_topic_with_kinesis_ingestion_parser.add_argument("consumer_arn") + create_topic_with_kinesis_ingestion_parser.add_argument("aws_role_arn") + create_topic_with_kinesis_ingestion_parser.add_argument("gcp_service_account") + + update_topic_type_parser = subparsers.add_parser( + "update_kinesis_ingestion", help=update_topic_type.__doc__ ) - update_topic_kinesis_ingestion_parser.add_argument("topic_id") - update_topic_kinesis_ingestion_parser.add_argument("stream_arn") - update_topic_kinesis_ingestion_parser.add_argument("consumer_arn") - update_topic_kinesis_ingestion_parser.add_argument("aws_role_arn") - update_topic_kinesis_ingestion_parser.add_argument("gcp_service_account") + update_topic_type_parser.add_argument("topic_id") + update_topic_type_parser.add_argument("stream_arn") + update_topic_type_parser.add_argument("consumer_arn") + update_topic_type_parser.add_argument("aws_role_arn") + update_topic_type_parser.add_argument("gcp_service_account") delete_parser = subparsers.add_parser("delete", help=delete_topic.__doc__) delete_parser.add_argument("topic_id") @@ -601,7 +601,7 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: elif args.command == "create": create_topic(args.project_id, args.topic_id) elif args.command == "create_kinesis_ingestion": - create_topic_kinesis_ingestion( + create_topic_with_kinesis_ingestion( args.project_id, args.topic_id, args.stream_arn, @@ -610,7 +610,7 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: args.gcp_service_account, ) elif args.command == "update_kinesis_ingestion": - update_topic_kinesis_ingestion( + update_topic_type( args.project_id, args.topic_id, args.stream_arn, diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 1e673f134d32..adb015e8af31 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -127,7 +127,7 @@ def test_create( publisher_client.delete_topic(request={"topic": topic_path}) -def test_create_kinesis_ingestion( +def test_create_topic_with_kinesis_ingestion( publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] ) -> None: # The scope of `topic_path` is limited to this function. @@ -146,7 +146,7 @@ def test_create_kinesis_ingestion( except NotFound: pass - publisher.create_topic_kinesis_ingestion( + publisher.create_topic_with_kinesis_ingestion( PROJECT_ID, TOPIC_ID, stream_arn, @@ -162,7 +162,7 @@ def test_create_kinesis_ingestion( publisher_client.delete_topic(request={"topic": topic_path}) -def test_update_kinesis_ingestion( +def test_update_topic_type( publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] ) -> None: # The scope of `topic_path` is limited to this function. @@ -186,7 +186,7 @@ def test_update_kinesis_ingestion( out, _ = capsys.readouterr() assert f"Created topic: {topic_path}" in out - publisher.update_topic_kinesis_ingestion( + publisher.update_topic_type( PROJECT_ID, TOPIC_ID, stream_arn, From 06f0a200f85b6b2363a432aa9078eebc0b7fd693 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 13:46:17 -0700 Subject: [PATCH 1065/1197] chore(main): release 2.20.3 (#1129) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index e51d046b0eb3..bee47555fc7f 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.20.2" + ".": "2.20.3" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index dd6bc4bb1dd9..0174016654a3 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.20.3](https://github.com/googleapis/python-pubsub/compare/v2.20.2...v2.20.3) (2024-03-21) + + +### Documentation + +* **samples:** Update Region Tags ([#1128](https://github.com/googleapis/python-pubsub/issues/1128)) ([e3bc89e](https://github.com/googleapis/python-pubsub/commit/e3bc89eaa51337c93144d6c3100486353d494ad9)) + ## [2.20.2](https://github.com/googleapis/python-pubsub/compare/v2.20.1...v2.20.2) (2024-03-15) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 4c1787c53865..12b6c6e02812 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.20.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 4c1787c53865..12b6c6e02812 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.2" # {x-release-please-version} +__version__ = "2.20.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 11e0f952b494..2611ce81ee29 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.20.2" + "version": "2.20.3" }, "snippets": [ { From 4c1f11b2db16efb598864e056696a324892055d6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 22 Mar 2024 14:36:54 -0700 Subject: [PATCH 1066/1197] chore: Update gapic-generator-python to v1.16.1 (#1130) Co-authored-by: Owl Bot --- .../snippet_metadata_google.pubsub.v1.json | 2 +- .../unit/gapic/pubsub_v1/test_publisher.py | 507 ++++++++++- .../gapic/pubsub_v1/test_schema_service.py | 567 +++++++++++- .../unit/gapic/pubsub_v1/test_subscriber.py | 841 +++++++++++++++++- 4 files changed, 1846 insertions(+), 71 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 2611ce81ee29..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.20.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 4aa24c3ff1e9..20cecf1c922e 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -1117,7 +1117,8 @@ def test_create_topic(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() + request = pubsub.Topic() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) @@ -1143,6 +1144,59 @@ def test_create_topic_empty_call(): assert args[0] == pubsub.Topic() +def test_create_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + client.create_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + ) + + +@pytest.mark.asyncio +async def test_create_topic_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + response = await client.create_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Topic() + + @pytest.mark.asyncio async def test_create_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.Topic @@ -1172,7 +1226,8 @@ async def test_create_topic_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() + request = pubsub.Topic() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) @@ -1357,7 +1412,8 @@ def test_update_topic(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() + request = pubsub.UpdateTopicRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) @@ -1383,6 +1439,53 @@ def test_update_topic_empty_call(): assert args[0] == pubsub.UpdateTopicRequest() +def test_update_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.UpdateTopicRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + client.update_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateTopicRequest() + + +@pytest.mark.asyncio +async def test_update_topic_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + response = await client.update_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateTopicRequest() + + @pytest.mark.asyncio async def test_update_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateTopicRequest @@ -1412,7 +1515,8 @@ async def test_update_topic_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() + request = pubsub.UpdateTopicRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) @@ -1604,7 +1708,8 @@ def test_publish(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() + request = pubsub.PublishRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.PublishResponse) @@ -1627,6 +1732,54 @@ def test_publish_empty_call(): assert args[0] == pubsub.PublishRequest() +def test_publish_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.PublishRequest( + topic="topic_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + client.publish(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PublishRequest( + topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_publish_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) + ) + response = await client.publish() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PublishRequest() + + @pytest.mark.asyncio async def test_publish_async( transport: str = "grpc_asyncio", request_type=pubsub.PublishRequest @@ -1653,7 +1806,8 @@ async def test_publish_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() + request = pubsub.PublishRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.PublishResponse) @@ -1849,7 +2003,8 @@ def test_get_topic(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() + request = pubsub.GetTopicRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) @@ -1875,6 +2030,57 @@ def test_get_topic_empty_call(): assert args[0] == pubsub.GetTopicRequest() +def test_get_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.GetTopicRequest( + topic="topic_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + client.get_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetTopicRequest( + topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_get_topic_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + response = await client.get_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetTopicRequest() + + @pytest.mark.asyncio async def test_get_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.GetTopicRequest @@ -1904,7 +2110,8 @@ async def test_get_topic_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() + request = pubsub.GetTopicRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Topic) @@ -2086,7 +2293,8 @@ def test_list_topics(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() + request = pubsub.ListTopicsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicsPager) @@ -2109,6 +2317,56 @@ def test_list_topics_empty_call(): assert args[0] == pubsub.ListTopicsRequest() +def test_list_topics_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListTopicsRequest( + project="project_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + client.list_topics(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicsRequest( + project="project_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_topics_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_topics() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicsRequest() + + @pytest.mark.asyncio async def test_list_topics_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicsRequest @@ -2135,7 +2393,8 @@ async def test_list_topics_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() + request = pubsub.ListTopicsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicsAsyncPager) @@ -2511,7 +2770,8 @@ def test_list_topic_subscriptions(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() + request = pubsub.ListTopicSubscriptionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicSubscriptionsPager) @@ -2537,6 +2797,61 @@ def test_list_topic_subscriptions_empty_call(): assert args[0] == pubsub.ListTopicSubscriptionsRequest() +def test_list_topic_subscriptions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListTopicSubscriptionsRequest( + topic="topic_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + client.list_topic_subscriptions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSubscriptionsRequest( + topic="topic_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_topic_subscriptions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.list_topic_subscriptions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSubscriptionsRequest() + + @pytest.mark.asyncio async def test_list_topic_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSubscriptionsRequest @@ -2566,7 +2881,8 @@ async def test_list_topic_subscriptions_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() + request = pubsub.ListTopicSubscriptionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicSubscriptionsAsyncPager) @@ -2959,7 +3275,8 @@ def test_list_topic_snapshots(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() + request = pubsub.ListTopicSnapshotsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicSnapshotsPager) @@ -2985,6 +3302,61 @@ def test_list_topic_snapshots_empty_call(): assert args[0] == pubsub.ListTopicSnapshotsRequest() +def test_list_topic_snapshots_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListTopicSnapshotsRequest( + topic="topic_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + client.list_topic_snapshots(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSnapshotsRequest( + topic="topic_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_topic_snapshots_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", + ) + ) + response = await client.list_topic_snapshots() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListTopicSnapshotsRequest() + + @pytest.mark.asyncio async def test_list_topic_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSnapshotsRequest @@ -3014,7 +3386,8 @@ async def test_list_topic_snapshots_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() + request = pubsub.ListTopicSnapshotsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListTopicSnapshotsAsyncPager) @@ -3402,7 +3775,8 @@ def test_delete_topic(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() + request = pubsub.DeleteTopicRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3424,6 +3798,50 @@ def test_delete_topic_empty_call(): assert args[0] == pubsub.DeleteTopicRequest() +def test_delete_topic_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.DeleteTopicRequest( + topic="topic_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + client.delete_topic(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteTopicRequest( + topic="topic_value", + ) + + +@pytest.mark.asyncio +async def test_delete_topic_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_topic() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteTopicRequest() + + @pytest.mark.asyncio async def test_delete_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteTopicRequest @@ -3446,7 +3864,8 @@ async def test_delete_topic_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() + request = pubsub.DeleteTopicRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3624,7 +4043,8 @@ def test_detach_subscription(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() + request = pubsub.DetachSubscriptionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.DetachSubscriptionResponse) @@ -3648,6 +4068,56 @@ def test_detach_subscription_empty_call(): assert args[0] == pubsub.DetachSubscriptionRequest() +def test_detach_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + client.detach_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DetachSubscriptionRequest( + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_detach_subscription_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.DetachSubscriptionResponse() + ) + response = await client.detach_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DetachSubscriptionRequest() + + @pytest.mark.asyncio async def test_detach_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DetachSubscriptionRequest @@ -3674,7 +4144,8 @@ async def test_detach_subscription_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() + request = pubsub.DetachSubscriptionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.DetachSubscriptionResponse) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 03502f54ea9f..5de9c6c457be 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -1156,7 +1156,8 @@ def test_create_schema(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() + request = gp_schema.CreateSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gp_schema.Schema) @@ -1182,6 +1183,59 @@ def test_create_schema_empty_call(): assert args[0] == gp_schema.CreateSchemaRequest() +def test_create_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gp_schema.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + client.create_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CreateSchemaRequest( + parent="parent_value", + schema_id="schema_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_schema_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.create_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CreateSchemaRequest() + + @pytest.mark.asyncio async def test_create_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.CreateSchemaRequest @@ -1211,7 +1265,8 @@ async def test_create_schema_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() + request = gp_schema.CreateSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gp_schema.Schema) @@ -1416,7 +1471,8 @@ def test_get_schema(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() + request = schema.GetSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, schema.Schema) @@ -1442,6 +1498,57 @@ def test_get_schema_empty_call(): assert args[0] == schema.GetSchemaRequest() +def test_get_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.GetSchemaRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + client.get_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.GetSchemaRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_schema_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.get_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.GetSchemaRequest() + + @pytest.mark.asyncio async def test_get_schema_async( transport: str = "grpc_asyncio", request_type=schema.GetSchemaRequest @@ -1471,7 +1578,8 @@ async def test_get_schema_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() + request = schema.GetSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, schema.Schema) @@ -1653,7 +1761,8 @@ def test_list_schemas(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() + request = schema.ListSchemasRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSchemasPager) @@ -1676,6 +1785,56 @@ def test_list_schemas_empty_call(): assert args[0] == schema.ListSchemasRequest() +def test_list_schemas_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.ListSchemasRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + client.list_schemas(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemasRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_schemas_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_schemas() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemasRequest() + + @pytest.mark.asyncio async def test_list_schemas_async( transport: str = "grpc_asyncio", request_type=schema.ListSchemasRequest @@ -1702,7 +1861,8 @@ async def test_list_schemas_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() + request = schema.ListSchemasRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSchemasAsyncPager) @@ -2077,7 +2237,8 @@ def test_list_schema_revisions(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemaRevisionsRequest() + request = schema.ListSchemaRevisionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSchemaRevisionsPager) @@ -2102,6 +2263,60 @@ def test_list_schema_revisions_empty_call(): assert args[0] == schema.ListSchemaRevisionsRequest() +def test_list_schema_revisions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.ListSchemaRevisionsRequest( + name="name_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + client.list_schema_revisions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemaRevisionsRequest( + name="name_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_schema_revisions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_schema_revisions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ListSchemaRevisionsRequest() + + @pytest.mark.asyncio async def test_list_schema_revisions_async( transport: str = "grpc_asyncio", request_type=schema.ListSchemaRevisionsRequest @@ -2130,7 +2345,8 @@ async def test_list_schema_revisions_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemaRevisionsRequest() + request = schema.ListSchemaRevisionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSchemaRevisionsAsyncPager) @@ -2522,7 +2738,8 @@ def test_commit_schema(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CommitSchemaRequest() + request = gp_schema.CommitSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gp_schema.Schema) @@ -2548,6 +2765,57 @@ def test_commit_schema_empty_call(): assert args[0] == gp_schema.CommitSchemaRequest() +def test_commit_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gp_schema.CommitSchemaRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + client.commit_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CommitSchemaRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_commit_schema_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.commit_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.CommitSchemaRequest() + + @pytest.mark.asyncio async def test_commit_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.CommitSchemaRequest @@ -2577,7 +2845,8 @@ async def test_commit_schema_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CommitSchemaRequest() + request = gp_schema.CommitSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gp_schema.Schema) @@ -2772,7 +3041,8 @@ def test_rollback_schema(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.RollbackSchemaRequest() + request = schema.RollbackSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, schema.Schema) @@ -2798,6 +3068,59 @@ def test_rollback_schema_empty_call(): assert args[0] == schema.RollbackSchemaRequest() +def test_rollback_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + client.rollback_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.RollbackSchemaRequest( + name="name_value", + revision_id="revision_id_value", + ) + + +@pytest.mark.asyncio +async def test_rollback_schema_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.rollback_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.RollbackSchemaRequest() + + @pytest.mark.asyncio async def test_rollback_schema_async( transport: str = "grpc_asyncio", request_type=schema.RollbackSchemaRequest @@ -2827,7 +3150,8 @@ async def test_rollback_schema_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.RollbackSchemaRequest() + request = schema.RollbackSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, schema.Schema) @@ -3024,7 +3348,8 @@ def test_delete_schema_revision(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRevisionRequest() + request = schema.DeleteSchemaRevisionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, schema.Schema) @@ -3052,6 +3377,63 @@ def test_delete_schema_revision_empty_call(): assert args[0] == schema.DeleteSchemaRevisionRequest() +def test_delete_schema_revision_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.DeleteSchemaRevisionRequest( + name="name_value", + revision_id="revision_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + client.delete_schema_revision(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRevisionRequest( + name="name_value", + revision_id="revision_id_value", + ) + + +@pytest.mark.asyncio +async def test_delete_schema_revision_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + response = await client.delete_schema_revision() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRevisionRequest() + + @pytest.mark.asyncio async def test_delete_schema_revision_async( transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRevisionRequest @@ -3083,7 +3465,8 @@ async def test_delete_schema_revision_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRevisionRequest() + request = schema.DeleteSchemaRevisionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, schema.Schema) @@ -3281,7 +3664,8 @@ def test_delete_schema(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() + request = schema.DeleteSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3303,6 +3687,50 @@ def test_delete_schema_empty_call(): assert args[0] == schema.DeleteSchemaRequest() +def test_delete_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.DeleteSchemaRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + client.delete_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRequest( + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_schema_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.DeleteSchemaRequest() + + @pytest.mark.asyncio async def test_delete_schema_async( transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRequest @@ -3325,7 +3753,8 @@ async def test_delete_schema_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() + request = schema.DeleteSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3501,7 +3930,8 @@ def test_validate_schema(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() + request = gp_schema.ValidateSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gp_schema.ValidateSchemaResponse) @@ -3523,6 +3953,52 @@ def test_validate_schema_empty_call(): assert args[0] == gp_schema.ValidateSchemaRequest() +def test_validate_schema_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gp_schema.ValidateSchemaRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + client.validate_schema(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.ValidateSchemaRequest( + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_validate_schema_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.ValidateSchemaResponse() + ) + response = await client.validate_schema() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gp_schema.ValidateSchemaRequest() + + @pytest.mark.asyncio async def test_validate_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.ValidateSchemaRequest @@ -3547,7 +4023,8 @@ async def test_validate_schema_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() + request = gp_schema.ValidateSchemaRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, gp_schema.ValidateSchemaResponse) @@ -3737,7 +4214,8 @@ def test_validate_message(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() + request = schema.ValidateMessageRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, schema.ValidateMessageResponse) @@ -3759,6 +4237,54 @@ def test_validate_message_empty_call(): assert args[0] == schema.ValidateMessageRequest() +def test_validate_message_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = schema.ValidateMessageRequest( + parent="parent_value", + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + client.validate_message(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ValidateMessageRequest( + parent="parent_value", + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_validate_message_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ValidateMessageResponse() + ) + response = await client.validate_message() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == schema.ValidateMessageRequest() + + @pytest.mark.asyncio async def test_validate_message_async( transport: str = "grpc_asyncio", request_type=schema.ValidateMessageRequest @@ -3783,7 +4309,8 @@ async def test_validate_message_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() + request = schema.ValidateMessageRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, schema.ValidateMessageResponse) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 1e66e7b70e86..50e02b52d47d 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -1128,7 +1128,8 @@ def test_create_subscription(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() + request = pubsub.Subscription() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) @@ -1161,6 +1162,70 @@ def test_create_subscription_empty_call(): assert args[0] == pubsub.Subscription() +def test_create_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.Subscription( + name="name_value", + topic="topic_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + client.create_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Subscription( + name="name_value", + topic="topic_value", + filter="filter_value", + ) + + +@pytest.mark.asyncio +async def test_create_subscription_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + response = await client.create_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.Subscription() + + @pytest.mark.asyncio async def test_create_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.Subscription @@ -1197,7 +1262,8 @@ async def test_create_subscription_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() + request = pubsub.Subscription() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) @@ -1430,7 +1496,8 @@ def test_get_subscription(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() + request = pubsub.GetSubscriptionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) @@ -1461,6 +1528,62 @@ def test_get_subscription_empty_call(): assert args[0] == pubsub.GetSubscriptionRequest() +def test_get_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.GetSubscriptionRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + client.get_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSubscriptionRequest( + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_get_subscription_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + response = await client.get_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSubscriptionRequest() + + @pytest.mark.asyncio async def test_get_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSubscriptionRequest @@ -1495,7 +1618,8 @@ async def test_get_subscription_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() + request = pubsub.GetSubscriptionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) @@ -1692,7 +1816,8 @@ def test_update_subscription(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() + request = pubsub.UpdateSubscriptionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) @@ -1725,6 +1850,62 @@ def test_update_subscription_empty_call(): assert args[0] == pubsub.UpdateSubscriptionRequest() +def test_update_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.UpdateSubscriptionRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + client.update_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSubscriptionRequest() + + +@pytest.mark.asyncio +async def test_update_subscription_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + response = await client.update_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSubscriptionRequest() + + @pytest.mark.asyncio async def test_update_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSubscriptionRequest @@ -1761,7 +1942,8 @@ async def test_update_subscription_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() + request = pubsub.UpdateSubscriptionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Subscription) @@ -1968,7 +2150,8 @@ def test_list_subscriptions(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() + request = pubsub.ListSubscriptionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSubscriptionsPager) @@ -1993,6 +2176,60 @@ def test_list_subscriptions_empty_call(): assert args[0] == pubsub.ListSubscriptionsRequest() +def test_list_subscriptions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListSubscriptionsRequest( + project="project_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + client.list_subscriptions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSubscriptionsRequest( + project="project_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_subscriptions_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_subscriptions() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSubscriptionsRequest() + + @pytest.mark.asyncio async def test_list_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSubscriptionsRequest @@ -2021,7 +2258,8 @@ async def test_list_subscriptions_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() + request = pubsub.ListSubscriptionsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSubscriptionsAsyncPager) @@ -2410,7 +2648,8 @@ def test_delete_subscription(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() + request = pubsub.DeleteSubscriptionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2434,6 +2673,54 @@ def test_delete_subscription_empty_call(): assert args[0] == pubsub.DeleteSubscriptionRequest() +def test_delete_subscription_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + client.delete_subscription(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSubscriptionRequest( + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_delete_subscription_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_subscription() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSubscriptionRequest() + + @pytest.mark.asyncio async def test_delete_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSubscriptionRequest @@ -2458,7 +2745,8 @@ async def test_delete_subscription_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() + request = pubsub.DeleteSubscriptionRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2644,7 +2932,8 @@ def test_modify_ack_deadline(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() + request = pubsub.ModifyAckDeadlineRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2668,6 +2957,54 @@ def test_modify_ack_deadline_empty_call(): assert args[0] == pubsub.ModifyAckDeadlineRequest() +def test_modify_ack_deadline_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ModifyAckDeadlineRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + client.modify_ack_deadline(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyAckDeadlineRequest( + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_modify_ack_deadline_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.modify_ack_deadline() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyAckDeadlineRequest() + + @pytest.mark.asyncio async def test_modify_ack_deadline_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyAckDeadlineRequest @@ -2692,7 +3029,8 @@ async def test_modify_ack_deadline_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() + request = pubsub.ModifyAckDeadlineRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2896,7 +3234,8 @@ def test_acknowledge(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() + request = pubsub.AcknowledgeRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -2918,6 +3257,50 @@ def test_acknowledge_empty_call(): assert args[0] == pubsub.AcknowledgeRequest() +def test_acknowledge_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.AcknowledgeRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + client.acknowledge(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.AcknowledgeRequest( + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_acknowledge_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.acknowledge() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.AcknowledgeRequest() + + @pytest.mark.asyncio async def test_acknowledge_async( transport: str = "grpc_asyncio", request_type=pubsub.AcknowledgeRequest @@ -2940,7 +3323,8 @@ async def test_acknowledge_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() + request = pubsub.AcknowledgeRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3126,7 +3510,8 @@ def test_pull(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() + request = pubsub.PullRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.PullResponse) @@ -3148,6 +3533,50 @@ def test_pull_empty_call(): assert args[0] == pubsub.PullRequest() +def test_pull_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.PullRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + client.pull(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PullRequest( + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_pull_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + response = await client.pull() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.PullRequest() + + @pytest.mark.asyncio async def test_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.PullRequest @@ -3170,7 +3599,8 @@ async def test_pull_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() + request = pubsub.PullRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.PullResponse) @@ -3444,7 +3874,8 @@ def test_modify_push_config(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() + request = pubsub.ModifyPushConfigRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3455,14 +3886,62 @@ def test_modify_push_config_empty_call(): # i.e. request == None and no flattened fields passed, work. client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + client.modify_push_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyPushConfigRequest() + + +def test_modify_push_config_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + client.modify_push_config(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ModifyPushConfigRequest( + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_modify_push_config_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( type(client.transport.modify_push_config), "__call__" ) as call: - client.modify_push_config() + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.modify_push_config() call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == pubsub.ModifyPushConfigRequest() @@ -3492,7 +3971,8 @@ async def test_modify_push_config_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() + request = pubsub.ModifyPushConfigRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -3689,7 +4169,8 @@ def test_get_snapshot(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() + request = pubsub.GetSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) @@ -3713,6 +4194,55 @@ def test_get_snapshot_empty_call(): assert args[0] == pubsub.GetSnapshotRequest() +def test_get_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + client.get_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSnapshotRequest( + snapshot="snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_get_snapshot_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + response = await client.get_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.GetSnapshotRequest() + + @pytest.mark.asyncio async def test_get_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSnapshotRequest @@ -3740,7 +4270,8 @@ async def test_get_snapshot_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() + request = pubsub.GetSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) @@ -3920,7 +4451,8 @@ def test_list_snapshots(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() + request = pubsub.ListSnapshotsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSnapshotsPager) @@ -3943,6 +4475,56 @@ def test_list_snapshots_empty_call(): assert args[0] == pubsub.ListSnapshotsRequest() +def test_list_snapshots_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.ListSnapshotsRequest( + project="project_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + client.list_snapshots(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSnapshotsRequest( + project="project_value", + page_token="page_token_value", + ) + + +@pytest.mark.asyncio +async def test_list_snapshots_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_snapshots() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.ListSnapshotsRequest() + + @pytest.mark.asyncio async def test_list_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSnapshotsRequest @@ -3969,7 +4551,8 @@ async def test_list_snapshots_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() + request = pubsub.ListSnapshotsRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListSnapshotsAsyncPager) @@ -4343,7 +4926,8 @@ def test_create_snapshot(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() + request = pubsub.CreateSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) @@ -4367,6 +4951,57 @@ def test_create_snapshot_empty_call(): assert args[0] == pubsub.CreateSnapshotRequest() +def test_create_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + client.create_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", + ) + + +@pytest.mark.asyncio +async def test_create_snapshot_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + response = await client.create_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.CreateSnapshotRequest() + + @pytest.mark.asyncio async def test_create_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.CreateSnapshotRequest @@ -4394,7 +5029,8 @@ async def test_create_snapshot_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() + request = pubsub.CreateSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) @@ -4585,7 +5221,8 @@ def test_update_snapshot(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() + request = pubsub.UpdateSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) @@ -4609,6 +5246,51 @@ def test_update_snapshot_empty_call(): assert args[0] == pubsub.UpdateSnapshotRequest() +def test_update_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.UpdateSnapshotRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + client.update_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSnapshotRequest() + + +@pytest.mark.asyncio +async def test_update_snapshot_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + response = await client.update_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.UpdateSnapshotRequest() + + @pytest.mark.asyncio async def test_update_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSnapshotRequest @@ -4636,7 +5318,8 @@ async def test_update_snapshot_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() + request = pubsub.UpdateSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.Snapshot) @@ -4824,7 +5507,8 @@ def test_delete_snapshot(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() + request = pubsub.DeleteSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -4846,6 +5530,50 @@ def test_delete_snapshot_empty_call(): assert args[0] == pubsub.DeleteSnapshotRequest() +def test_delete_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + client.delete_snapshot(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSnapshotRequest( + snapshot="snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_delete_snapshot_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_snapshot() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.DeleteSnapshotRequest() + + @pytest.mark.asyncio async def test_delete_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSnapshotRequest @@ -4868,7 +5596,8 @@ async def test_delete_snapshot_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() + request = pubsub.DeleteSnapshotRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert response is None @@ -5044,7 +5773,8 @@ def test_seek(request_type, transport: str = "grpc"): # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() + request = pubsub.SeekRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.SeekResponse) @@ -5066,6 +5796,52 @@ def test_seek_empty_call(): assert args[0] == pubsub.SeekRequest() +def test_seek_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.SeekRequest( + subscription="subscription_value", + snapshot="snapshot_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + client.seek(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.SeekRequest( + subscription="subscription_value", + snapshot="snapshot_value", + ) + + +@pytest.mark.asyncio +async def test_seek_empty_call_async(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + response = await client.seek() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == pubsub.SeekRequest() + + @pytest.mark.asyncio async def test_seek_async( transport: str = "grpc_asyncio", request_type=pubsub.SeekRequest @@ -5088,7 +5864,8 @@ async def test_seek_async( # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() + request = pubsub.SeekRequest() + assert args[0] == request # Establish that the response is the type that we expect. assert isinstance(response, pubsub.SeekResponse) From 66a4a4141fdde53b216d15c5bcc77ce225b90108 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 26 Mar 2024 09:38:06 -0700 Subject: [PATCH 1067/1197] feat: add custom datetime format for Cloud Storage subscriptions (#1131) Co-authored-by: Owl Bot --- .../google/pubsub_v1/types/pubsub.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index b4d823c6f2ee..55cda3abd93f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -222,7 +222,7 @@ class State(proto.Enum): ``gcp_service_account``. PUBLISH_PERMISSION_DENIED (3): Permission denied encountered while publishing to the topic. - This can happen due to Pub/Sub SA has not been granted the + This can happen if the Pub/Sub SA has not been granted the `appropriate publish permissions `__ STREAM_NOT_FOUND (4): @@ -1394,6 +1394,11 @@ class CloudStorageConfig(proto.Message): See the `object naming requirements `__. Must not end in "/". + filename_datetime_format (str): + Optional. User-provided format string specifying how to + represent datetimes in Cloud Storage filenames. See the + `datetime format + guidance `__. text_config (google.pubsub_v1.types.CloudStorageConfig.TextConfig): Optional. If set, message data will be written to Cloud Storage in text format. @@ -1487,6 +1492,10 @@ class AvroConfig(proto.Message): proto.STRING, number=3, ) + filename_datetime_format: str = proto.Field( + proto.STRING, + number=10, + ) text_config: TextConfig = proto.Field( proto.MESSAGE, number=4, From c71da304a2245a08b0826f113926b376f870446f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 26 Mar 2024 10:20:39 -0700 Subject: [PATCH 1068/1197] chore(main): release 2.21.0 (#1132) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index bee47555fc7f..1ff4c05fcff4 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.20.3" + ".": "2.21.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 0174016654a3..6b648e4ee002 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.21.0](https://github.com/googleapis/python-pubsub/compare/v2.20.3...v2.21.0) (2024-03-26) + + +### Features + +* Add custom datetime format for Cloud Storage subscriptions ([#1131](https://github.com/googleapis/python-pubsub/issues/1131)) ([4da6744](https://github.com/googleapis/python-pubsub/commit/4da67441ddab01a173620d8c03bc640271c785c6)) + ## [2.20.3](https://github.com/googleapis/python-pubsub/compare/v2.20.2...v2.20.3) (2024-03-21) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 12b6c6e02812..e546bae0531e 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.3" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 12b6c6e02812..e546bae0531e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.20.3" # {x-release-please-version} +__version__ = "2.21.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..b053d68de548 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.21.0" }, "snippets": [ { From b06e503a21c86136ecd1ce3690d4562f1c7ee1d3 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Wed, 3 Apr 2024 23:17:58 -0400 Subject: [PATCH 1069/1197] fix: Set timeout to infinite for publishing with ordering keys enabled (#1134) --- .../google/cloud/pubsub_v1/publisher/client.py | 5 +++++ .../tests/unit/pubsub_v1/publisher/test_publisher_client.py | 4 ++++ 2 files changed, 9 insertions(+) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index caf9fa180eea..54b353276196 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -399,8 +399,13 @@ def on_publish_done(future): transport = self._transport base_retry = transport._wrapped_methods[transport.publish]._retry retry = base_retry.with_deadline(2.0**32) + # timeout needs to be overridden and set to infinite in + # addition to the retry deadline since both determine + # the duration for which retries are attempted. + timeout = 2.0**32 elif retry is not None: retry = retry.with_deadline(2.0**32) + timeout = 2.0**32 # Delegate the publishing to the sequencer. sequencer = self._get_or_create_sequencer(topic, ordering_key) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index cc8eda56ce7e..460246738601 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -316,6 +316,10 @@ def test_publish_with_ordering_key_uses_extended_retry_deadline(creds): expected_retry = custom_retry.with_deadline(2.0**32) _assert_retries_equal(batch_commit_retry, expected_retry) + batch_commit_timeout = kwargs["commit_timeout"] + expected_timeout = 2.0**32 + assert batch_commit_timeout == pytest.approx(expected_timeout) + def test_publish_with_ordering_key_with_no_retry(creds): client = publisher.Client( From 8589154dd86e08d87dfc0fb8ec250356a1b21d80 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 21:02:12 -0700 Subject: [PATCH 1070/1197] chore(main): release 2.21.1 (#1135) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 1ff4c05fcff4..72e6fd856e07 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.21.0" + ".": "2.21.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 6b648e4ee002..0854b871b7cd 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.21.1](https://github.com/googleapis/python-pubsub/compare/v2.21.0...v2.21.1) (2024-04-04) + + +### Bug Fixes + +* Set timeout to infinite for publishing with ordering keys enabled ([#1134](https://github.com/googleapis/python-pubsub/issues/1134)) ([67daf3c](https://github.com/googleapis/python-pubsub/commit/67daf3c64239d22eabe59c3df214057a4e59a39e)) + ## [2.21.0](https://github.com/googleapis/python-pubsub/compare/v2.20.3...v2.21.0) (2024-03-26) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index e546bae0531e..e71611fa6849 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.0" # {x-release-please-version} +__version__ = "2.21.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index e546bae0531e..e71611fa6849 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.0" # {x-release-please-version} +__version__ = "2.21.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index b053d68de548..619f0eac4288 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.21.0" + "version": "2.21.1" }, "snippets": [ { From 1689e49b9567e3254302850f5fc88ca5c79988fe Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 12 Apr 2024 13:16:11 -0400 Subject: [PATCH 1071/1197] chore(python): bump idna from 3.4 to 3.7 in .kokoro (#1140) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../.github/auto-label.yaml | 5 +++++ .../.github/blunderbuss.yml | 15 +++++++++++-- .../.kokoro/requirements.txt | 6 ++--- packages/google-cloud-pubsub/docs/index.rst | 5 +++++ .../docs/summary_overview.md | 22 +++++++++++++++++++ packages/google-cloud-pubsub/owlbot.py | 1 + 7 files changed, 51 insertions(+), 7 deletions(-) create mode 100644 packages/google-cloud-pubsub/docs/summary_overview.md diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 4bdeef3904e2..81f87c56917d 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a8a80fc6456e433df53fc2a0d72ca0345db0ddefb409f1b75b118dfd1babd952 -# created: 2024-03-15T16:25:47.905264637Z + digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 +# created: 2024-04-12T11:35:58.922854369Z diff --git a/packages/google-cloud-pubsub/.github/auto-label.yaml b/packages/google-cloud-pubsub/.github/auto-label.yaml index b2016d119b40..8b37ee89711f 100644 --- a/packages/google-cloud-pubsub/.github/auto-label.yaml +++ b/packages/google-cloud-pubsub/.github/auto-label.yaml @@ -13,3 +13,8 @@ # limitations under the License. requestsize: enabled: true + +path: + pullrequest: true + paths: + samples: "samples" diff --git a/packages/google-cloud-pubsub/.github/blunderbuss.yml b/packages/google-cloud-pubsub/.github/blunderbuss.yml index 992f04dce1fe..3408b580a839 100644 --- a/packages/google-cloud-pubsub/.github/blunderbuss.yml +++ b/packages/google-cloud-pubsub/.github/blunderbuss.yml @@ -1,6 +1,17 @@ -# Configuration for the Blunderbuss GitHub app. For more info see -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/blunderbuss +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. assign_issues: - mukund-ananthu + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - mukund-ananthu + assign_prs: - mukund-ananthu diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index dd61f5f32018..51f92b8e12f1 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -252,9 +252,9 @@ googleapis-common-protos==1.61.0 \ --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +idna==3.7 \ + --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ + --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests importlib-metadata==6.8.0 \ --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ diff --git a/packages/google-cloud-pubsub/docs/index.rst b/packages/google-cloud-pubsub/docs/index.rst index 6b3e1583bd23..daba0c7b324f 100644 --- a/packages/google-cloud-pubsub/docs/index.rst +++ b/packages/google-cloud-pubsub/docs/index.rst @@ -41,3 +41,8 @@ For a list of all ``google-cloud-pubsub`` releases: changelog + +.. toctree:: + :hidden: + + summary_overview.md diff --git a/packages/google-cloud-pubsub/docs/summary_overview.md b/packages/google-cloud-pubsub/docs/summary_overview.md new file mode 100644 index 000000000000..171339711196 --- /dev/null +++ b/packages/google-cloud-pubsub/docs/summary_overview.md @@ -0,0 +1,22 @@ +[ +This is a templated file. Adding content to this file may result in it being +reverted. Instead, if you want to place additional content, create an +"overview_content.md" file in `docs/` directory. The Sphinx tool will +pick up on the content and merge the content. +]: # + +# Google Cloud Pub/Sub API + +Overview of the APIs available for Google Cloud Pub/Sub API. + +## All entries + +Classes, methods and properties & attributes for +Google Cloud Pub/Sub API. + +[classes](https://cloud.google.com/python/docs/reference/pubsub/latest/summary_class.html) + +[methods](https://cloud.google.com/python/docs/reference/pubsub/latest/summary_method.html) + +[properties and +attributes](https://cloud.google.com/python/docs/reference/pubsub/latest/summary_property.html) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index ad76a48ee63f..d80434efd667 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -432,6 +432,7 @@ def mypy_samples(session): "noxfile.py", "--cov=google", "--cov=google/cloud", ) +s.replace(".github/blunderbuss.yml", "googleapis/api-pubsub", "mukund-ananthu") python.py_samples(skip_readmes=True) From 7fbb3b2388a622ac46811dbc7524532d274c4284 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 12 Apr 2024 20:07:49 +0200 Subject: [PATCH 1072/1197] chore(deps): update all dependencies (#1057) --- .../samples/snippets/requirements-test.txt | 5 +++-- .../google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 17e317ce3e7f..286a4de5d511 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,6 +1,7 @@ backoff==2.2.1 -pytest==7.4.3 +pytest===7.4.4; python_version == '3.7' +pytest==8.0.0; python_version >= '3.8' mock==5.1.0 flaky==3.7.0 -google-cloud-bigquery==3.14.1 +google-cloud-bigquery==3.17.2 google-cloud-storage==2.14.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index aba41c7d7893..8979c92e0acd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,5 +1,5 @@ google-cloud-pubsub==2.20.1 avro==1.11.3 protobuf===4.24.4; python_version == '3.7' -protobuf==4.25.1; python_version >= '3.8' +protobuf==4.25.2; python_version >= '3.8' avro==1.11.3 From 6d2cc4d8d81c820f87c6f1ebca25f63f3a120720 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 12 Apr 2024 23:42:20 +0200 Subject: [PATCH 1073/1197] chore(deps): update all dependencies (#1142) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../samples/snippets/requirements-test.txt | 8 ++++---- .../google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 286a4de5d511..ab0de59c1d7d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,7 +1,7 @@ backoff==2.2.1 pytest===7.4.4; python_version == '3.7' -pytest==8.0.0; python_version >= '3.8' +pytest==8.1.1; python_version >= '3.8' mock==5.1.0 -flaky==3.7.0 -google-cloud-bigquery==3.17.2 -google-cloud-storage==2.14.0 +flaky==3.8.1 +google-cloud-bigquery==3.20.1 +google-cloud-storage==2.16.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 8979c92e0acd..5ea08a511447 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.20.1 +google-cloud-pubsub==2.21.1 avro==1.11.3 protobuf===4.24.4; python_version == '3.7' protobuf==4.25.2; python_version >= '3.8' From bb70e23a25aea2db81ff81cccdc46f40424d7a91 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Thu, 30 May 2024 10:18:37 -0400 Subject: [PATCH 1074/1197] fix: Test failures due to grpcio changes (#1178) --- .../publisher/test_publisher_client.py | 25 ++++++++++++++++--- .../subscriber/test_subscriber_client.py | 25 ++++++++++++++++--- 2 files changed, 42 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 460246738601..9db5e0ef8b1a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -133,17 +133,28 @@ def test_init_w_api_endpoint(creds): client_options = {"api_endpoint": "testendpoint.google.com"} client = publisher.Client(client_options=client_options, credentials=creds) + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:///testendpoint.google.com:443" + else: + _EXPECTED_TARGET = "testendpoint.google.com:443" assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == "testendpoint.google.com:443" + ) == _EXPECTED_TARGET def test_init_w_empty_client_options(creds): client = publisher.Client(client_options={}, credentials=creds) - + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:///pubsub.googleapis.com:443" + else: + _EXPECTED_TARGET = "pubsub.googleapis.com:443" assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == publisher_client.PublisherClient.SERVICE_ADDRESS + ) == _EXPECTED_TARGET def test_init_client_options_pass_through(): @@ -182,7 +193,13 @@ def test_init_emulator(monkeypatch): # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. channel = client._transport.publish._channel - assert channel.target().decode("utf8") == "/foo/bar:123" + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:////foo/bar:123" + else: + _EXPECTED_TARGET = "/foo/bar:123" + assert channel.target().decode("utf8") == _EXPECTED_TARGET def test_message_ordering_enabled(creds): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index bedfde79ab66..a09d85b008c2 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -66,17 +66,28 @@ def test_init_w_api_endpoint(creds): client_options = {"api_endpoint": "testendpoint.google.com"} client = subscriber.Client(client_options=client_options, credentials=creds) + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:///testendpoint.google.com:443" + else: + _EXPECTED_TARGET = "testendpoint.google.com:443" assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == "testendpoint.google.com:443" + ) == _EXPECTED_TARGET def test_init_w_empty_client_options(creds): client = subscriber.Client(client_options={}, credentials=creds) - + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:///pubsub.googleapis.com:443" + else: + _EXPECTED_TARGET = "pubsub.googleapis.com:443" assert (client._transport.grpc_channel._channel.target()).decode( "utf-8" - ) == subscriber_client.SubscriberClient.SERVICE_ADDRESS + ) == _EXPECTED_TARGET def test_init_client_options_pass_through(): @@ -115,7 +126,13 @@ def test_init_emulator(monkeypatch): # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. channel = client._transport.pull._channel - assert channel.target().decode("utf8") == "/baz/bacon:123" + # Behavior to include dns prefix changed in gRPCv1.63 + grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] + if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): + _EXPECTED_TARGET = "dns:////baz/bacon:123" + else: + _EXPECTED_TARGET = "/baz/bacon:123" + assert channel.target().decode("utf8") == _EXPECTED_TARGET def test_class_method_factory(): From 86be9cd594b8f509f82acf82d6ca85039cf762bb Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 30 May 2024 16:36:10 -0400 Subject: [PATCH 1075/1197] chore(main): release 2.21.2 (#1179) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 72e6fd856e07..f32ff8487a49 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.21.1" + ".": "2.21.2" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 0854b871b7cd..bfd5ce7b1101 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.21.2](https://github.com/googleapis/python-pubsub/compare/v2.21.1...v2.21.2) (2024-05-30) + + +### Bug Fixes + +* Test failures due to grpcio changes ([#1178](https://github.com/googleapis/python-pubsub/issues/1178)) ([086dd46](https://github.com/googleapis/python-pubsub/commit/086dd4660ec56d9ff2d41a32ec0b8e8dc44acc55)) + ## [2.21.1](https://github.com/googleapis/python-pubsub/compare/v2.21.0...v2.21.1) (2024-04-04) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index e71611fa6849..b5c52129eb5a 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.1" # {x-release-please-version} +__version__ = "2.21.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index e71611fa6849..b5c52129eb5a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.1" # {x-release-please-version} +__version__ = "2.21.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 619f0eac4288..5bfc0254ba1e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.21.1" + "version": "2.21.2" }, "snippets": [ { From e900c138722f5f5985ea720f575ebc9cbbef76c6 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Fri, 7 Jun 2024 12:01:05 -0400 Subject: [PATCH 1076/1197] chore: Update mypy version (#1185) Co-authored-by: Owl Bot --- .../google/cloud/pubsub_v1/publisher/_batch/thread.py | 2 +- .../google/cloud/pubsub_v1/publisher/futures.py | 2 +- .../cloud/pubsub_v1/subscriber/_protocol/helper_threads.py | 4 ++-- .../pubsub_v1/subscriber/_protocol/streaming_pull_manager.py | 2 +- .../google/cloud/pubsub_v1/subscriber/futures.py | 2 +- .../google/cloud/pubsub_v1/subscriber/scheduler.py | 2 +- packages/google-cloud-pubsub/noxfile.py | 2 +- packages/google-cloud-pubsub/owlbot.py | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index f6436fb7be4e..e6b8407fd6c8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -90,7 +90,7 @@ def __init__( client: "PublisherClient", topic: str, settings: "types.BatchSettings", - batch_done_callback: Callable[[bool], Any] = None, + batch_done_callback: Optional[Callable[[bool], Any]] = None, commit_when_full: bool = True, commit_retry: "OptionalRetry" = gapic_v1.method.DEFAULT, commit_timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py index c7cc66f18d2c..7b5921673223 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/futures.py @@ -45,7 +45,7 @@ def cancelled(self) -> bool: """ return False - def result(self, timeout: Union[int, float] = None) -> str: + def result(self, timeout: Union[int, float, None] = None) -> str: """Return the message ID or raise an exception. This blocks until the message has been published successfully and diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py index fbcab781df84..a7e18a88e52a 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py @@ -15,7 +15,7 @@ import logging import queue import time -from typing import Any, Callable, List, Sequence +from typing import Any, Callable, List, Sequence, Optional import uuid @@ -32,7 +32,7 @@ def _get_many( - queue_: queue.Queue, max_items: int = None, max_latency: float = 0 + queue_: queue.Queue, max_items: Optional[int] = None, max_latency: float = 0 ) -> List[Any]: """Get multiple items from a Queue. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index f07db85469ed..b8531db17dc8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -267,7 +267,7 @@ def __init__( client: "subscriber.Client", subscription: str, flow_control: types.FlowControl = types.FlowControl(), - scheduler: ThreadScheduler = None, + scheduler: Optional[ThreadScheduler] = None, use_legacy_flow_control: bool = False, await_callbacks_on_shutdown: bool = False, ): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py index f043b7eb517e..4c46c6813ebe 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/futures.py @@ -104,7 +104,7 @@ def cancelled(self) -> bool: """ return False - def result(self, timeout: Union[int, float] = None) -> AcknowledgeStatus: + def result(self, timeout: Union[int, float, None] = None) -> AcknowledgeStatus: """Return a success code or raise an exception. This blocks until the operation completes successfully and diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index ca270a077dfb..a3b3c88e1a16 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -162,7 +162,7 @@ def shutdown( work_item = self._executor._work_queue.get(block=False) if work_item is None: # Exceutor in shutdown mode. continue - dropped_messages.append(work_item.args[0]) + dropped_messages.append(work_item.args[0]) # type: ignore[index] except queue.Empty: pass diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index ae87830ec545..61e73e0d4a6d 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -32,7 +32,7 @@ ISORT_VERSION = "isort==5.11.0" LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -MYPY_VERSION = "mypy==0.910" +MYPY_VERSION = "mypy==1.10.0" DEFAULT_PYTHON_VERSION = "3.8" diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index d80434efd667..3551413ee128 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -348,7 +348,7 @@ s.replace( "noxfile.py", r"LINT_PATHS = \[.*?\]", - '\g<0>\n\nMYPY_VERSION = "mypy==0.910"', + '\g<0>\n\nMYPY_VERSION = "mypy==1.10.0"', ) s.replace( "noxfile.py", r'"blacken",', '\g<0>\n "mypy",', From 8448ed83e6bcd30ec5cb20a528202f6241211c01 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Fri, 7 Jun 2024 16:10:19 -0400 Subject: [PATCH 1077/1197] fix: Typecheck errors in samples/snippets/subscriber.py (#1186) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../google-cloud-pubsub/samples/snippets/mypy.ini | 6 +++++- .../samples/snippets/subscriber.py | 12 ++++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/mypy.ini b/packages/google-cloud-pubsub/samples/snippets/mypy.ini index 8f2bae69aebc..3c8dd6f41dcd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/mypy.ini +++ b/packages/google-cloud-pubsub/samples/snippets/mypy.ini @@ -4,5 +4,9 @@ strict = True exclude = noxfile\.py warn_unused_configs = True -[mypy-avro.*,backoff,flaky] +; Ignore errors caused due to missing library stubs or py.typed marker +; Refer https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-library-stubs-or-py-typed-marker +; Errors ignored instead of adding stubs as a workaround, since this directory contains sample code +; that does not affect the functionality of the client library. +[mypy-avro.*,backoff,flaky,google.cloud.*] ignore_missing_imports = True diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index ab0c8aafa44b..2cd03b7855d3 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -528,8 +528,10 @@ def update_subscription_with_dead_letter_policy( ) with subscriber: - subscription_after_update = subscriber.update_subscription( - request={"subscription": subscription, "update_mask": update_mask} + subscription_after_update: gapic_types.Subscription = ( + subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} + ) ) print(f"After the update: {subscription_after_update}.") @@ -573,8 +575,10 @@ def remove_dead_letter_policy( ) with subscriber: - subscription_after_update = subscriber.update_subscription( - request={"subscription": subscription, "update_mask": update_mask} + subscription_after_update: gapic_types.Subscription = ( + subscriber.update_subscription( + request={"subscription": subscription, "update_mask": update_mask} + ) ) print(f"After removing the policy: {subscription_after_update}.") From 45f62df7e6a5d0281f7651cf53e82d90f33e0fbd Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Mon, 10 Jun 2024 10:59:49 -0400 Subject: [PATCH 1078/1197] fix: Suppress warnings caused during pytest runs (#1189) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/pytest.ini | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index ce16e4730652..865897dd552c 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -13,4 +13,8 @@ filterwarnings = # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed ignore:get_mtls_endpoint_and_cert_source is deprecated.:DeprecationWarning # Remove warning once https://github.com/grpc/grpc/issues/35974 is fixed - ignore:unclosed:ResourceWarning \ No newline at end of file + ignore:unclosed:ResourceWarning + # Added to suppress "DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html" + # Remove once the minimum supported version of googleapis-common-protos is 1.62.0 + ignore:.*pkg_resources.declare_namespace:DeprecationWarning + ignore:.*pkg_resources is deprecated as an API:DeprecationWarning \ No newline at end of file From 769ac53507059ec03c4149d497f63491fd7b795a Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Mon, 10 Jun 2024 12:07:09 -0400 Subject: [PATCH 1079/1197] fix: Race condition where future callbacks invoked before client is in paused state (#1145) Co-authored-by: Owl Bot --- .../google/cloud/pubsub_v1/publisher/_batch/thread.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index e6b8407fd6c8..1617f8c90288 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -282,14 +282,14 @@ def _commit(self) -> None: # all futures and exit. self._status = base.BatchStatus.ERROR - for future in self._futures: - future.set_exception(exc) - batch_transport_succeeded = False if self._batch_done_callback is not None: # Failed to publish batch. self._batch_done_callback(batch_transport_succeeded) + for future in self._futures: + future.set_exception(exc) + return end = time.time() From 1b891c33ef2baec0c1a7e3e0b76441724e2e578d Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 10 Jun 2024 09:51:02 -0700 Subject: [PATCH 1080/1197] chore(main): release 2.21.3 (#1190) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 9 +++++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 13 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index f32ff8487a49..f893c80673a8 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.21.2" + ".": "2.21.3" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index bfd5ce7b1101..bc4d2f3d08c9 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,15 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.21.3](https://github.com/googleapis/python-pubsub/compare/v2.21.2...v2.21.3) (2024-06-10) + + +### Bug Fixes + +* Race condition where future callbacks invoked before client is in paused state ([#1145](https://github.com/googleapis/python-pubsub/issues/1145)) ([d12bac6](https://github.com/googleapis/python-pubsub/commit/d12bac6d94b337aa8978006600fb00e5b13d741d)) +* Suppress warnings caused during pytest runs ([#1189](https://github.com/googleapis/python-pubsub/issues/1189)) ([cd51149](https://github.com/googleapis/python-pubsub/commit/cd51149c9e0d3c59d1c75395c05308e860908bf9)) +* Typecheck errors in samples/snippets/subscriber.py ([#1186](https://github.com/googleapis/python-pubsub/issues/1186)) ([3698450](https://github.com/googleapis/python-pubsub/commit/3698450041cb4db0e2957832c24450f674b89c11)) + ## [2.21.2](https://github.com/googleapis/python-pubsub/compare/v2.21.1...v2.21.2) (2024-05-30) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index b5c52129eb5a..71dc4e9b4332 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.2" # {x-release-please-version} +__version__ = "2.21.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index b5c52129eb5a..71dc4e9b4332 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.2" # {x-release-please-version} +__version__ = "2.21.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 5bfc0254ba1e..74c5d4caafe9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.21.2" + "version": "2.21.3" }, "snippets": [ { From 5a8217907ffccf124837d71c896bed710db26157 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Tue, 18 Jun 2024 10:01:24 -0400 Subject: [PATCH 1081/1197] docs(samples): Add code sample for optimistic subscribe (#1182) Co-authored-by: Owl Bot --- .../samples/snippets/subscriber.py | 93 +++++++++++++++++++ .../samples/snippets/subscriber_test.py | 50 ++++++++++ 2 files changed, 143 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 2cd03b7855d3..79cd0ebf1c98 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -94,6 +94,86 @@ def create_subscription(project_id: str, topic_id: str, subscription_id: str) -> # [END pubsub_create_pull_subscription] +def optimistic_subscribe( + project_id: str, + topic_id: str, + subscription_id: str, + timeout: Optional[float] = None, +) -> None: + """Optimistically subscribe to messages instead of making calls to verify existence + of a subscription first and then subscribing to messages from it. This avoids admin + operation calls to verify the existence of a subscription and reduces the probability + of running out of quota for admin operations.""" + # [START pubsub_optimistic_subscribe] + from google.api_core.exceptions import NotFound + from google.cloud import pubsub_v1 + from concurrent.futures import TimeoutError + + # TODO(developer) + # project_id = "your-project-id" + # subscription_id = "your-subscription-id" + # Number of seconds the subscriber should listen for messages + # timeout = 5.0 + # topic_id = "your-topic-id" + + # Create a subscriber client. + subscriber = pubsub_v1.SubscriberClient() + + # The `subscription_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + # Define callback to be called when a message is received. + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + # Ack message after processing it. + message.ack() + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # Optimistically subscribe to messages on the subscription. + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback + ) + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + print("Successfully subscribed until the timeout passed.") + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + except NotFound: + print(f"Subscription {subscription_path} not found, creating it.") + + try: + # If the subscription does not exist, then create it. + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription = subscriber.create_subscription( + request={"name": subscription_path, "topic": topic_path} + ) + + if subscription: + print(f"Subscription {subscription.name} created") + else: + raise ValueError("Subscription creation failed.") + + # Subscribe on the created subscription. + try: + streaming_pull_future = subscriber.subscribe( + subscription.name, callback=callback + ) + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + except Exception as e: + print( + f"Exception occurred when creating subscription and subscribing to it: {e}" + ) + except Exception as e: + print(f"Exception occurred when attempting optimistic subscribe: {e}") + # [END pubsub_optimistic_subscribe] + + def create_subscription_with_dead_letter_topic( project_id: str, topic_id: str, @@ -1161,6 +1241,15 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: remove_dead_letter_policy_parser.add_argument("topic_id") remove_dead_letter_policy_parser.add_argument("subscription_id") + optimistic_subscribe_parser = subparsers.add_parser( + "optimistic-subscribe", help=optimistic_subscribe.__doc__ + ) + optimistic_subscribe_parser.add_argument("topic_id") + optimistic_subscribe_parser.add_argument("subscription_id") + optimistic_subscribe_parser.add_argument( + "timeout", default=None, type=float, nargs="?" + ) + receive_parser = subparsers.add_parser("receive", help=receive_messages.__doc__) receive_parser.add_argument("subscription_id") receive_parser.add_argument("timeout", default=None, type=float, nargs="?") @@ -1303,6 +1392,10 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: ) elif args.command == "remove-dead-letter-policy": remove_dead_letter_policy(args.project_id, args.topic_id, args.subscription_id) + elif args.command == "optimistic-subscribe": + optimistic_subscribe( + args.project_id, args.topic_id, args.subscription_id, args.timeout + ) elif args.command == "receive": receive_messages(args.project_id, args.subscription_id, args.timeout) elif args.command == "receive-custom-attributes": diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 53fefa109c0b..86f7a94cef93 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -234,6 +234,56 @@ def test_create_subscription( subscriber_client.delete_subscription(request={"subscription": subscription_path}) +def test_optimistic_subscribe( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + publisher_client: pubsub_v1.PublisherClient, + capsys: CaptureFixture[str], +) -> None: + subscription_id = f"subscription_for_optimistic_subscribe-{PY_VERSION}-{UUID}" + subscription_path = subscriber_client.subscription_path(PROJECT_ID, subscription_id) + # Ensure there is no pre-existing subscription. + # So that we can test the case where optimistic subscribe fails. + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + # Invoke optimistic_subscribe when the subscription is not present. + # This tests scenario where optimistic subscribe fails. + subscriber.optimistic_subscribe(PROJECT_ID, TOPIC, subscription_id, 5) + out, _ = capsys.readouterr() + # Verify optimistic subscription failed. + assert f"Subscription {subscription_path} not found, creating it." in out + # Verify that subscription created due to optimistic subscribe failure. + assert f"Subscription {subscription_path} created" in out + # Verify that subscription didn't already exist. + assert "Successfully subscribed until the timeout passed." not in out + + # Invoke optimistic_subscribe when the subscription is present. + # This tests scenario where optimistic subscribe succeeds. + subscriber.optimistic_subscribe(PROJECT_ID, TOPIC, subscription_id, 5) + + out, _ = capsys.readouterr() + # Verify optimistic subscription succeeded. + assert f"Subscription {subscription_path} not found, creating it." not in out + # Verify that subscription was not created due to optimistic subscribe failure. + assert f"Subscription {subscription_path} created" not in out + # Verify that subscription already existed. + assert "Successfully subscribed until the timeout passed." in out + + # Test case where optimistic subscribe throws an exception other than NotFound + # or TimeoutError. + subscriber.optimistic_subscribe(PROJECT_ID, TOPIC, "123", 5) + out, _ = capsys.readouterr() + assert "Exception occurred when attempting optimistic subscribe:" in out + + # Clean up resources created during test. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + def test_create_subscription_with_dead_letter_policy( subscriber_client: pubsub_v1.SubscriberClient, dead_letter_topic: str, From 8a0a4ba02fd40097a80f307fcf105c72a6835e64 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 18 Jun 2024 07:45:42 -0700 Subject: [PATCH 1082/1197] chore(main): release 2.21.4 (#1196) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index f893c80673a8..2dc14de525c4 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.21.3" + ".": "2.21.4" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index bc4d2f3d08c9..bbb545d506e4 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.21.4](https://github.com/googleapis/python-pubsub/compare/v2.21.3...v2.21.4) (2024-06-18) + + +### Documentation + +* **samples:** Add code sample for optimistic subscribe ([#1182](https://github.com/googleapis/python-pubsub/issues/1182)) ([d8e8aa5](https://github.com/googleapis/python-pubsub/commit/d8e8aa59ab0288fdaf5a1cc5e476581e73d0f82c)) + ## [2.21.3](https://github.com/googleapis/python-pubsub/compare/v2.21.2...v2.21.3) (2024-06-10) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 71dc4e9b4332..b51b6dad8ee6 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.3" # {x-release-please-version} +__version__ = "2.21.4" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 71dc4e9b4332..b51b6dad8ee6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.3" # {x-release-please-version} +__version__ = "2.21.4" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 74c5d4caafe9..a3e95a2fcced 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.21.3" + "version": "2.21.4" }, "snippets": [ { From 8265359f67da88e66f6c0c731633598f6f8f4113 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Thu, 20 Jun 2024 11:14:20 -0400 Subject: [PATCH 1083/1197] chore: Remove restriction that protobuf should be less than 5 (#1193) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/pytest.ini | 4 +++- packages/google-cloud-pubsub/setup.py | 2 +- packages/google-cloud-pubsub/testing/constraints-3.7.txt | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index 865897dd552c..34f39331626c 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -17,4 +17,6 @@ filterwarnings = # Added to suppress "DeprecationWarning: pkg_resources is deprecated as an API. See https://setuptools.pypa.io/en/latest/pkg_resources.html" # Remove once the minimum supported version of googleapis-common-protos is 1.62.0 ignore:.*pkg_resources.declare_namespace:DeprecationWarning - ignore:.*pkg_resources is deprecated as an API:DeprecationWarning \ No newline at end of file + ignore:.*pkg_resources is deprecated as an API:DeprecationWarning + # Remove once https://github.com/googleapis/python-pubsub/issues/1206 is fixed. + ignore:.*connections\(\) is deprecated and will be removed; use net_connections\(\) instead:DeprecationWarning \ No newline at end of file diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index a6af312072db..dbb66cf7ca9a 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -42,7 +42,7 @@ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", "grpcio-status >= 1.33.2", ] diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt index ee447eb62c89..08db5de87ef9 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.7.txt @@ -7,7 +7,7 @@ google-api-core==1.34.0 google-auth==2.14.1 proto-plus==1.22.0 -protobuf==3.19.5 +protobuf==3.20.2 grpc-google-iam-v1==0.12.4 grpcio==1.51.3 grpcio-status==1.33.2 From 89a9eb11def18c5cc102828b3da3ad6f6c4b5941 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 20 Jun 2024 12:59:23 -0400 Subject: [PATCH 1084/1197] chore(main): release 2.21.5 (#1207) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 2dc14de525c4..cd075e2620a5 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.21.4" + ".": "2.21.5" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index bbb545d506e4..96442ed5fb6a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.21.5](https://github.com/googleapis/python-pubsub/compare/v2.21.4...v2.21.5) (2024-06-20) + + +### Bug Fixes + +* Allow Protobuf 5.x ([a369f04](https://github.com/googleapis/python-pubsub/commit/a369f04c46e4b3db34dcf8cc2ef7cda4ea491e26)) + ## [2.21.4](https://github.com/googleapis/python-pubsub/compare/v2.21.3...v2.21.4) (2024-06-18) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index b51b6dad8ee6..1f7391fd4f17 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.4" # {x-release-please-version} +__version__ = "2.21.5" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index b51b6dad8ee6..1f7391fd4f17 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.4" # {x-release-please-version} +__version__ = "2.21.5" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index a3e95a2fcced..2ab8d41b77f4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.21.4" + "version": "2.21.5" }, "snippets": [ { From 8d4f3720868b7b282decb85e39d393e078f96545 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 3 Jul 2024 12:28:05 -0700 Subject: [PATCH 1085/1197] chore: update templated files (#1211) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.flake8 | 2 +- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/auto-label.yaml | 2 +- packages/google-cloud-pubsub/.kokoro/build.sh | 2 +- .../.kokoro/docker/docs/Dockerfile | 2 +- .../.kokoro/populate-secrets.sh | 2 +- .../.kokoro/publish-docs.sh | 2 +- .../google-cloud-pubsub/.kokoro/release.sh | 2 +- .../.kokoro/requirements.txt | 509 +++++++++--------- .../.kokoro/test-samples-against-head.sh | 2 +- .../.kokoro/test-samples-impl.sh | 2 +- .../.kokoro/test-samples.sh | 2 +- .../google-cloud-pubsub/.kokoro/trampoline.sh | 2 +- .../.kokoro/trampoline_v2.sh | 2 +- .../.pre-commit-config.yaml | 2 +- packages/google-cloud-pubsub/.trampolinerc | 2 +- packages/google-cloud-pubsub/MANIFEST.in | 2 +- packages/google-cloud-pubsub/docs/conf.py | 2 +- packages/google-cloud-pubsub/noxfile.py | 54 +- .../scripts/decrypt-secrets.sh | 2 +- .../scripts/readme-gen/readme_gen.py | 2 +- 21 files changed, 328 insertions(+), 275 deletions(-) diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index 87f6e408c47d..32986c79287a 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 81f87c56917d..91d742b5b9fe 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5a4c19d17e597b92d786e569be101e636c9c2817731f80a5adec56b2aa8fe070 -# created: 2024-04-12T11:35:58.922854369Z + digest: sha256:d3de8a02819f65001effcbd3ea76ce97e9bcff035c7a89457f40f892c87c5b32 +# created: 2024-07-03T17:43:00.77142528Z diff --git a/packages/google-cloud-pubsub/.github/auto-label.yaml b/packages/google-cloud-pubsub/.github/auto-label.yaml index 8b37ee89711f..21786a4eb085 100644 --- a/packages/google-cloud-pubsub/.github/auto-label.yaml +++ b/packages/google-cloud-pubsub/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/build.sh b/packages/google-cloud-pubsub/.kokoro/build.sh index b3d9152963b8..90e690e7a8bf 100755 --- a/packages/google-cloud-pubsub/.kokoro/build.sh +++ b/packages/google-cloud-pubsub/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile index bdaf39fe22d0..a26ce61930f5 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh b/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh index 6f3972140e80..c435402f473e 100755 --- a/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh +++ b/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh index 9eafe0be3bba..38f083f05aa0 100755 --- a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh +++ b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh index c333c366defe..9b17b081a3d8 100755 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 51f92b8e12f1..35ece0e4d2e9 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -4,21 +4,25 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 +attrs==23.2.0 \ + --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ + --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 +backports-tarfile==1.2.0 \ + --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ + --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 + # via jaraco-context +cachetools==5.3.3 \ + --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ + --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 +certifi==2024.6.2 \ + --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ + --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -87,90 +91,90 @@ click==8.0.4 \ # -r requirements.in # gcp-docuploader # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 +colorlog==6.8.2 \ + --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ + --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via # gcp-docuploader # nox -cryptography==42.0.5 \ - --hash=sha256:0270572b8bd2c833c3981724b8ee9747b3ec96f699a9665470018594301439ee \ - --hash=sha256:111a0d8553afcf8eb02a4fea6ca4f59d48ddb34497aa8706a6cf536f1a5ec576 \ - --hash=sha256:16a48c23a62a2f4a285699dba2e4ff2d1cff3115b9df052cdd976a18856d8e3d \ - --hash=sha256:1b95b98b0d2af784078fa69f637135e3c317091b615cd0905f8b8a087e86fa30 \ - --hash=sha256:1f71c10d1e88467126f0efd484bd44bca5e14c664ec2ede64c32f20875c0d413 \ - --hash=sha256:2424ff4c4ac7f6b8177b53c17ed5d8fa74ae5955656867f5a8affaca36a27abb \ - --hash=sha256:2bce03af1ce5a5567ab89bd90d11e7bbdff56b8af3acbbec1faded8f44cb06da \ - --hash=sha256:329906dcc7b20ff3cad13c069a78124ed8247adcac44b10bea1130e36caae0b4 \ - --hash=sha256:37dd623507659e08be98eec89323469e8c7b4c1407c85112634ae3dbdb926fdd \ - --hash=sha256:3eaafe47ec0d0ffcc9349e1708be2aaea4c6dd4978d76bf6eb0cb2c13636c6fc \ - --hash=sha256:5e6275c09d2badf57aea3afa80d975444f4be8d3bc58f7f80d2a484c6f9485c8 \ - --hash=sha256:6fe07eec95dfd477eb9530aef5bead34fec819b3aaf6c5bd6d20565da607bfe1 \ - --hash=sha256:7367d7b2eca6513681127ebad53b2582911d1736dc2ffc19f2c3ae49997496bc \ - --hash=sha256:7cde5f38e614f55e28d831754e8a3bacf9ace5d1566235e39d91b35502d6936e \ - --hash=sha256:9481ffe3cf013b71b2428b905c4f7a9a4f76ec03065b05ff499bb5682a8d9ad8 \ - --hash=sha256:98d8dc6d012b82287f2c3d26ce1d2dd130ec200c8679b6213b3c73c08b2b7940 \ - --hash=sha256:a011a644f6d7d03736214d38832e030d8268bcff4a41f728e6030325fea3e400 \ - --hash=sha256:a2913c5375154b6ef2e91c10b5720ea6e21007412f6437504ffea2109b5a33d7 \ - --hash=sha256:a30596bae9403a342c978fb47d9b0ee277699fa53bbafad14706af51fe543d16 \ - --hash=sha256:b03c2ae5d2f0fc05f9a2c0c997e1bc18c8229f392234e8a0194f202169ccd278 \ - --hash=sha256:b6cd2203306b63e41acdf39aa93b86fb566049aeb6dc489b70e34bcd07adca74 \ - --hash=sha256:b7ffe927ee6531c78f81aa17e684e2ff617daeba7f189f911065b2ea2d526dec \ - --hash=sha256:b8cac287fafc4ad485b8a9b67d0ee80c66bf3574f655d3b97ef2e1082360faf1 \ - --hash=sha256:ba334e6e4b1d92442b75ddacc615c5476d4ad55cc29b15d590cc6b86efa487e2 \ - --hash=sha256:ba3e4a42397c25b7ff88cdec6e2a16c2be18720f317506ee25210f6d31925f9c \ - --hash=sha256:c41fb5e6a5fe9ebcd58ca3abfeb51dffb5d83d6775405305bfa8715b76521922 \ - --hash=sha256:cd2030f6650c089aeb304cf093f3244d34745ce0cfcc39f20c6fbfe030102e2a \ - --hash=sha256:cd65d75953847815962c84a4654a84850b2bb4aed3f26fadcc1c13892e1e29f6 \ - --hash=sha256:e4985a790f921508f36f81831817cbc03b102d643b5fcb81cd33df3fa291a1a1 \ - --hash=sha256:e807b3188f9eb0eaa7bbb579b462c5ace579f1cedb28107ce8b48a9f7ad3679e \ - --hash=sha256:f12764b8fffc7a123f641d7d049d382b73f96a34117e0b637b80643169cec8ac \ - --hash=sha256:f8837fe1d6ac4a8052a9a8ddab256bc006242696f03368a4009be7ee3075cdb7 +cryptography==42.0.8 \ + --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ + --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ + --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ + --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ + --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ + --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ + --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ + --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ + --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ + --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ + --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ + --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ + --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ + --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ + --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ + --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ + --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ + --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ + --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ + --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ + --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ + --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ + --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ + --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ + --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ + --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ + --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ + --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ + --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ + --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ + --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ + --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 +distlib==0.3.8 \ + --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ + --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b +docutils==0.21.2 \ + --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ + --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.0 \ - --hash=sha256:3d73480b50ba243f22d7c7ec08b115a30e1c7817c4899781840c26f9c55b8277 \ - --hash=sha256:7aa9fd935ec61e581eb8458ad00823786d91756c25e492f372b2b30962f3c28f +gcp-releasetool==2.0.1 \ + --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ + --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 +google-api-core==2.19.1 \ + --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ + --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd # via # google-cloud-core # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 +google-auth==2.31.0 \ + --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ + --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 # via # gcp-releasetool # google-api-core # google-cloud-core # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 +google-cloud-storage==2.17.0 \ + --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ + --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 # via gcp-docuploader google-crc32c==1.5.0 \ --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ @@ -244,28 +248,36 @@ google-crc32c==1.5.0 \ # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b +google-resumable-media==2.7.1 \ + --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ + --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b +googleapis-common-protos==1.63.2 \ + --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ + --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 # via google-api-core idna==3.7 \ --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 +importlib-metadata==8.0.0 \ + --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ + --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 # via # -r requirements.in # keyring # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 +jaraco-classes==3.4.0 \ + --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ + --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 + # via keyring +jaraco-context==5.3.0 \ + --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ + --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 + # via keyring +jaraco-functools==4.0.1 \ + --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ + --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -273,13 +285,13 @@ jeepney==0.8.0 \ # via # keyring # secretstorage -jinja2==3.1.3 \ - --hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \ - --hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90 +jinja2==3.1.4 \ + --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ + --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 +keyring==25.2.1 \ + --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ + --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b # via # gcp-releasetool # twine @@ -287,146 +299,153 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 +markupsafe==2.1.5 \ + --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ + --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ + --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ + --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ + --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ + --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ + --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ + --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ + --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ + --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ + --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ + --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ + --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ + --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ + --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ + --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ + --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ + --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ + --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ + --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ + --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ + --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ + --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ + --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ + --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ + --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ + --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ + --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ + --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ + --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ + --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ + --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ + --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ + --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ + --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ + --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ + --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ + --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ + --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ + --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ + --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ + --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ + --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ + --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ + --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ + --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ + --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ + --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ + --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ + --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ + --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ + --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ + --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ + --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ + --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ + --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ + --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ + --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ + --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ + --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 +more-itertools==10.3.0 \ + --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ + --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 + # via + # jaraco-classes + # jaraco-functools +nh3==0.2.17 \ + --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ + --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ + --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ + --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ + --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ + --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ + --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ + --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ + --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ + --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ + --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ + --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ + --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ + --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ + --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ + --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via # gcp-releasetool # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 +pkginfo==1.10.0 \ + --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ + --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -protobuf==4.25.3 \ - --hash=sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4 \ - --hash=sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8 \ - --hash=sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c \ - --hash=sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d \ - --hash=sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4 \ - --hash=sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa \ - --hash=sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c \ - --hash=sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019 \ - --hash=sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9 \ - --hash=sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c \ - --hash=sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2 +proto-plus==1.24.0 \ + --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ + --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 + # via google-api-core +protobuf==5.27.2 \ + --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ + --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ + --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ + --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ + --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ + --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ + --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ + --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ + --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ + --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ + --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde + # proto-plus +pyasn1==0.6.0 \ + --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ + --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 # via # pyasn1-modules # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d +pyasn1-modules==0.4.0 \ + --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ + --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 +pycparser==2.22 \ + --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ + --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 +pygments==2.18.0 \ + --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ + --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a # via # readme-renderer # rich @@ -434,20 +453,20 @@ pyjwt==2.8.0 \ --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 +pyperclip==1.9.0 \ + --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 +readme-renderer==43.0 \ + --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ + --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 # via # gcp-releasetool # google-api-core @@ -462,9 +481,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef +rich==13.7.1 \ + --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ + --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -480,35 +499,39 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +twine==5.1.1 \ + --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ + --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef +typing-extensions==4.12.2 \ + --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ + --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e +urllib3==2.2.2 \ + --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ + --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 # via # requests # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 +wheel==0.43.0 \ + --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ + --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 +zipp==3.19.2 \ + --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ + --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==69.2.0 \ - --hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \ - --hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c +setuptools==70.2.0 \ + --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ + --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 # via -r requirements.in diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh index 63ac41dfae1d..e9d8bd79a644 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh index 5a0f5fab6a89..55910c8ba178 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples.sh b/packages/google-cloud-pubsub/.kokoro/test-samples.sh index 50b35a48c190..7933d820149a 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline.sh b/packages/google-cloud-pubsub/.kokoro/trampoline.sh index d85b1f267693..48f79699706e 100755 --- a/packages/google-cloud-pubsub/.kokoro/trampoline.sh +++ b/packages/google-cloud-pubsub/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh index 59a7cf3a9373..35fa529231dc 100755 --- a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.pre-commit-config.yaml b/packages/google-cloud-pubsub/.pre-commit-config.yaml index 6a8e16950664..1d74695f70b6 100644 --- a/packages/google-cloud-pubsub/.pre-commit-config.yaml +++ b/packages/google-cloud-pubsub/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/.trampolinerc b/packages/google-cloud-pubsub/.trampolinerc index a7dfeb42c6d0..0080152373d5 100644 --- a/packages/google-cloud-pubsub/.trampolinerc +++ b/packages/google-cloud-pubsub/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index e0a66705318e..d6814cd60037 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/docs/conf.py b/packages/google-cloud-pubsub/docs/conf.py index bacbcd5653e5..44d92cca7cbd 100644 --- a/packages/google-cloud-pubsub/docs/conf.py +++ b/packages/google-cloud-pubsub/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 61e73e0d4a6d..8c9256d5cd2a 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -214,14 +214,28 @@ def install_unittest_dependencies(session, *constraints): session.install("-e", ".", *constraints) -def default(session): +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) install_unittest_dependencies(session, "-c", constraints_path) + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + # Run py.test against the unit tests. session.run( "py.test", @@ -235,15 +249,12 @@ def default(session): "--cov-fail-under=0", os.path.join("tests", "unit"), *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) -@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) -def unit(session): - """Run the unit test suite.""" - default(session) - - def install_systemtest_dependencies(session, *constraints): # Use pre-release gRPC for system tests. # Exclude version 1.52.0rc1 which has a known issue. @@ -412,9 +423,16 @@ def docfx(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) -def prerelease_deps(session): +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + session.skip("cpp implementation is not supported in python 3.11+") + # Install all dependencies session.install("-e", ".[all, tests, tracing]") unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES @@ -449,9 +467,9 @@ def prerelease_deps(session): "protobuf", # dependency of grpc "six", + "grpc-google-iam-v1", "googleapis-common-protos", - # Exclude version 1.52.0rc1 which has a known issue. See https://github.com/grpc/grpc/issues/32163 - "grpcio!=1.52.0rc1", + "grpcio", "grpcio-status", "google-api-core", "google-auth", @@ -477,7 +495,13 @@ def prerelease_deps(session): session.run("python", "-c", "import grpc; print(grpc.__version__)") session.run("python", "-c", "import google.auth; print(google.auth.__version__)") - session.run("py.test", "tests/unit") + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -490,6 +514,9 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) if os.path.exists(system_test_folder_path): session.run( @@ -498,4 +525,7 @@ def prerelease_deps(session): f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, ) diff --git a/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh b/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh index 0018b421ddf8..120b0ddc4364 100755 --- a/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh +++ b/packages/google-cloud-pubsub/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py b/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py index 1acc119835b5..8f5e248a0da1 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py +++ b/packages/google-cloud-pubsub/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 426932643e64a18b7caa75929e23c4d3ef880a70 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 5 Jul 2024 08:49:04 -0400 Subject: [PATCH 1086/1197] chore(python): Use latest python runtime in prerelease_deps session (#1212) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/noxfile.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 91d742b5b9fe..76524393faf1 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:d3de8a02819f65001effcbd3ea76ce97e9bcff035c7a89457f40f892c87c5b32 -# created: 2024-07-03T17:43:00.77142528Z + digest: sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e +# created: 2024-07-04T19:38:10.086106449Z diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 8c9256d5cd2a..1d9b72ca71a7 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -422,7 +422,7 @@ def docfx(session): ) -@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +@nox.session(python="3.12") @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], From 8d8d068edde230c2f8e5accaa0eec7ccecc1734a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 5 Jul 2024 14:53:22 +0200 Subject: [PATCH 1087/1197] chore(deps): update all dependencies (#1143) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- .../samples/snippets/requirements-test.txt | 6 +++--- .../google-cloud-pubsub/samples/snippets/requirements.txt | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index ab0de59c1d7d..43e018272efc 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,7 +1,7 @@ backoff==2.2.1 pytest===7.4.4; python_version == '3.7' -pytest==8.1.1; python_version >= '3.8' +pytest==8.2.2; python_version >= '3.8' mock==5.1.0 flaky==3.8.1 -google-cloud-bigquery==3.20.1 -google-cloud-storage==2.16.0 +google-cloud-bigquery==3.25.0 +google-cloud-storage==2.17.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 5ea08a511447..9955737c3037 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,5 +1,5 @@ -google-cloud-pubsub==2.21.1 +google-cloud-pubsub==2.21.5 avro==1.11.3 protobuf===4.24.4; python_version == '3.7' -protobuf==4.25.2; python_version >= '3.8' +protobuf==5.27.2; python_version >= '3.8' avro==1.11.3 From 8aabc962236892d39b0f7225e72e498d90d9e233 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 5 Jul 2024 21:23:36 -0700 Subject: [PATCH 1088/1197] feat: add use_topic_schema for Cloud Storage Subscriptions (#1154) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- .../services/publisher/async_client.py | 256 +-- .../pubsub_v1/services/publisher/client.py | 114 +- .../services/publisher/transports/base.py | 4 +- .../services/publisher/transports/grpc.py | 28 +- .../publisher/transports/grpc_asyncio.py | 174 +- .../services/schema_service/async_client.py | 270 +-- .../services/schema_service/client.py | 124 +- .../schema_service/transports/base.py | 4 +- .../schema_service/transports/grpc.py | 28 +- .../schema_service/transports/grpc_asyncio.py | 174 +- .../services/subscriber/async_client.py | 432 ++-- .../pubsub_v1/services/subscriber/client.py | 174 +- .../services/subscriber/transports/base.py | 4 +- .../services/subscriber/transports/grpc.py | 28 +- .../subscriber/transports/grpc_asyncio.py | 277 ++- .../google/pubsub_v1/types/pubsub.py | 38 + packages/google-cloud-pubsub/owlbot.py | 6 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- .../unit/gapic/pubsub_v1/test_publisher.py | 1102 +++++++++- .../gapic/pubsub_v1/test_schema_service.py | 1214 ++++++++++- .../unit/gapic/pubsub_v1/test_subscriber.py | 1924 ++++++++++++++++- .../subscriber/test_subscriber_client.py | 7 +- 22 files changed, 5424 insertions(+), 960 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 700c99c02d09..2b6df487d012 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -198,7 +200,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, PublisherTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, PublisherTransport, Callable[..., PublisherTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -210,9 +214,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.PublisherTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,PublisherTransport,Callable[..., PublisherTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PublisherTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -323,8 +329,8 @@ async def sample_create_topic(): A topic resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -332,7 +338,10 @@ async def sample_create_topic(): "the individual field arguments should be set." ) - request = pubsub.Topic(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.Topic): + request = pubsub.Topic(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -341,20 +350,9 @@ async def sample_create_topic(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_topic, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_topic + ] # Certain fields should be provided within the metadata header; # add these here. @@ -451,8 +449,8 @@ async def sample_update_topic(): A topic resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -460,7 +458,10 @@ async def sample_update_topic(): "the individual field arguments should be set." ) - request = pubsub.UpdateTopicRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.UpdateTopicRequest): + request = pubsub.UpdateTopicRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -471,20 +472,9 @@ async def sample_update_topic(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_topic, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_topic + ] # Certain fields should be provided within the metadata header; # add these here. @@ -575,8 +565,8 @@ async def sample_publish(): Response for the Publish method. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic, messages]) if request is not None and has_flattened_params: raise ValueError( @@ -584,7 +574,10 @@ async def sample_publish(): "the individual field arguments should be set." ) - request = pubsub.PublishRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.PublishRequest): + request = pubsub.PublishRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -595,26 +588,7 @@ async def sample_publish(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.publish, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=4, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.Cancelled, - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.publish] # Certain fields should be provided within the metadata header; # add these here. @@ -695,8 +669,8 @@ async def sample_get_topic(): A topic resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: raise ValueError( @@ -704,7 +678,10 @@ async def sample_get_topic(): "the individual field arguments should be set." ) - request = pubsub.GetTopicRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.GetTopicRequest): + request = pubsub.GetTopicRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -713,22 +690,9 @@ async def sample_get_topic(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_topic, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_topic + ] # Certain fields should be provided within the metadata header; # add these here. @@ -814,8 +778,8 @@ async def sample_list_topics(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -823,7 +787,10 @@ async def sample_list_topics(): "the individual field arguments should be set." ) - request = pubsub.ListTopicsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListTopicsRequest): + request = pubsub.ListTopicsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -832,22 +799,9 @@ async def sample_list_topics(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_topics, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_topics + ] # Certain fields should be provided within the metadata header; # add these here. @@ -944,8 +898,8 @@ async def sample_list_topic_subscriptions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: raise ValueError( @@ -953,7 +907,10 @@ async def sample_list_topic_subscriptions(): "the individual field arguments should be set." ) - request = pubsub.ListTopicSubscriptionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListTopicSubscriptionsRequest): + request = pubsub.ListTopicSubscriptionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -962,22 +919,9 @@ async def sample_list_topic_subscriptions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_topic_subscriptions, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_topic_subscriptions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1078,8 +1022,8 @@ async def sample_list_topic_snapshots(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: raise ValueError( @@ -1087,7 +1031,10 @@ async def sample_list_topic_snapshots(): "the individual field arguments should be set." ) - request = pubsub.ListTopicSnapshotsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListTopicSnapshotsRequest): + request = pubsub.ListTopicSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1096,22 +1043,9 @@ async def sample_list_topic_snapshots(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_topic_snapshots, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_topic_snapshots + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1199,8 +1133,8 @@ async def sample_delete_topic(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: raise ValueError( @@ -1208,7 +1142,10 @@ async def sample_delete_topic(): "the individual field arguments should be set." ) - request = pubsub.DeleteTopicRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DeleteTopicRequest): + request = pubsub.DeleteTopicRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1217,20 +1154,9 @@ async def sample_delete_topic(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_topic, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_topic + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1307,24 +1233,16 @@ async def sample_detach_subscription(): """ # Create or coerce a protobuf request object. - request = pubsub.DetachSubscriptionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DetachSubscriptionRequest): + request = pubsub.DetachSubscriptionRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.detach_subscription, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.detach_subscription + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 43ccc6df351e..f07c4bbd497c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -19,6 +19,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -579,7 +580,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, PublisherTransport]] = None, + transport: Optional[ + Union[str, PublisherTransport, Callable[..., PublisherTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -591,9 +594,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, PublisherTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,PublisherTransport,Callable[..., PublisherTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the PublisherTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -699,17 +704,24 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) + transport_init: Union[ + Type[PublisherTransport], Callable[..., PublisherTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., PublisherTransport], transport) + ) + # initialize with the provided callable or the passed in class emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: - if issubclass(Transport, type(self)._transport_registry["grpc"]): + if issubclass(transport_init, type(self)._transport_registry["grpc"]): channel = grpc.insecure_channel(target=emulator_host) else: channel = grpc.aio.insecure_channel(target=emulator_host) - Transport = functools.partial(Transport, channel=channel) + transport_init = functools.partial(transport_init, channel=channel) - self._transport = Transport( + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -788,8 +800,8 @@ def sample_create_topic(): A topic resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -797,10 +809,8 @@ def sample_create_topic(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.Topic. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.Topic): request = pubsub.Topic(request) # If we have keyword arguments corresponding to fields on the @@ -907,8 +917,8 @@ def sample_update_topic(): A topic resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -916,10 +926,8 @@ def sample_update_topic(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.UpdateTopicRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.UpdateTopicRequest): request = pubsub.UpdateTopicRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1022,8 +1030,8 @@ def sample_publish(): Response for the Publish method. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic, messages]) if request is not None and has_flattened_params: raise ValueError( @@ -1031,10 +1039,8 @@ def sample_publish(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.PublishRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.PublishRequest): request = pubsub.PublishRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1127,8 +1133,8 @@ def sample_get_topic(): A topic resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: raise ValueError( @@ -1136,10 +1142,8 @@ def sample_get_topic(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.GetTopicRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.GetTopicRequest): request = pubsub.GetTopicRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1235,8 +1239,8 @@ def sample_list_topics(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1244,10 +1248,8 @@ def sample_list_topics(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListTopicsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.ListTopicsRequest): request = pubsub.ListTopicsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1354,8 +1356,8 @@ def sample_list_topic_subscriptions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: raise ValueError( @@ -1363,10 +1365,8 @@ def sample_list_topic_subscriptions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListTopicSubscriptionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.ListTopicSubscriptionsRequest): request = pubsub.ListTopicSubscriptionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1477,8 +1477,8 @@ def sample_list_topic_snapshots(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: raise ValueError( @@ -1486,10 +1486,8 @@ def sample_list_topic_snapshots(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListTopicSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.ListTopicSnapshotsRequest): request = pubsub.ListTopicSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1587,8 +1585,8 @@ def sample_delete_topic(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([topic]) if request is not None and has_flattened_params: raise ValueError( @@ -1596,10 +1594,8 @@ def sample_delete_topic(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.DeleteTopicRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.DeleteTopicRequest): request = pubsub.DeleteTopicRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1686,10 +1682,8 @@ def sample_detach_subscription(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.DetachSubscriptionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.DetachSubscriptionRequest): request = pubsub.DetachSubscriptionRequest(request) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 47fea83e696e..800ba82ce723 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -88,6 +88,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -100,7 +102,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 0fb520404de0..b6e07b21ebaa 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -54,7 +54,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -74,14 +74,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -91,11 +94,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -121,9 +124,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -162,7 +166,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 2d5c0137dc55..3d98d6b5164c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -69,7 +71,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -99,7 +100,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -119,15 +120,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -137,11 +141,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -167,9 +171,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -207,7 +212,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -575,6 +582,151 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_topic: gapic_v1.method_async.wrap_method( + self.create_topic, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_topic: gapic_v1.method_async.wrap_method( + self.update_topic, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.publish: gapic_v1.method_async.wrap_method( + self.publish, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=4, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.Cancelled, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_topic: gapic_v1.method_async.wrap_method( + self.get_topic, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topics: gapic_v1.method_async.wrap_method( + self.list_topics, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_subscriptions: gapic_v1.method_async.wrap_method( + self.list_topic_subscriptions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_topic_snapshots: gapic_v1.method_async.wrap_method( + self.list_topic_snapshots, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_topic: gapic_v1.method_async.wrap_method( + self.delete_topic, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.detach_subscription: gapic_v1.method_async.wrap_method( + self.detach_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index c0eb61075563..f8d1ac4da7ec 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -37,6 +38,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -197,7 +199,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, SchemaServiceTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, SchemaServiceTransport, Callable[..., SchemaServiceTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -209,9 +213,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.SchemaServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SchemaServiceTransport,Callable[..., SchemaServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SchemaServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -340,8 +346,8 @@ async def sample_create_schema(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, schema, schema_id]) if request is not None and has_flattened_params: raise ValueError( @@ -349,7 +355,10 @@ async def sample_create_schema(): "the individual field arguments should be set." ) - request = gp_schema.CreateSchemaRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gp_schema.CreateSchemaRequest): + request = gp_schema.CreateSchemaRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -362,20 +371,9 @@ async def sample_create_schema(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_schema, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_schema + ] # Certain fields should be provided within the metadata header; # add these here. @@ -455,8 +453,8 @@ async def sample_get_schema(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -464,7 +462,10 @@ async def sample_get_schema(): "the individual field arguments should be set." ) - request = schema.GetSchemaRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.GetSchemaRequest): + request = schema.GetSchemaRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -473,20 +474,9 @@ async def sample_get_schema(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_schema, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_schema + ] # Certain fields should be provided within the metadata header; # add these here. @@ -571,8 +561,8 @@ async def sample_list_schemas(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -580,7 +570,10 @@ async def sample_list_schemas(): "the individual field arguments should be set." ) - request = schema.ListSchemasRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.ListSchemasRequest): + request = schema.ListSchemasRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -589,20 +582,9 @@ async def sample_list_schemas(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_schemas, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_schemas + ] # Certain fields should be provided within the metadata header; # add these here. @@ -696,8 +678,8 @@ async def sample_list_schema_revisions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -705,7 +687,10 @@ async def sample_list_schema_revisions(): "the individual field arguments should be set." ) - request = schema.ListSchemaRevisionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.ListSchemaRevisionsRequest): + request = schema.ListSchemaRevisionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -714,20 +699,9 @@ async def sample_list_schema_revisions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_schema_revisions, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_schema_revisions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -828,8 +802,8 @@ async def sample_commit_schema(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, schema]) if request is not None and has_flattened_params: raise ValueError( @@ -837,7 +811,10 @@ async def sample_commit_schema(): "the individual field arguments should be set." ) - request = gp_schema.CommitSchemaRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gp_schema.CommitSchemaRequest): + request = gp_schema.CommitSchemaRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -848,20 +825,9 @@ async def sample_commit_schema(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.commit_schema, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.commit_schema + ] # Certain fields should be provided within the metadata header; # add these here. @@ -954,8 +920,8 @@ async def sample_rollback_schema(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, revision_id]) if request is not None and has_flattened_params: raise ValueError( @@ -963,7 +929,10 @@ async def sample_rollback_schema(): "the individual field arguments should be set." ) - request = schema.RollbackSchemaRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.RollbackSchemaRequest): + request = schema.RollbackSchemaRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -974,20 +943,9 @@ async def sample_rollback_schema(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.rollback_schema, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.rollback_schema + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1078,8 +1036,8 @@ async def sample_delete_schema_revision(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, revision_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1087,7 +1045,10 @@ async def sample_delete_schema_revision(): "the individual field arguments should be set." ) - request = schema.DeleteSchemaRevisionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.DeleteSchemaRevisionRequest): + request = schema.DeleteSchemaRevisionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1098,20 +1059,9 @@ async def sample_delete_schema_revision(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_schema_revision, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_schema_revision + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1184,8 +1134,8 @@ async def sample_delete_schema(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1193,7 +1143,10 @@ async def sample_delete_schema(): "the individual field arguments should be set." ) - request = schema.DeleteSchemaRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.DeleteSchemaRequest): + request = schema.DeleteSchemaRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1202,20 +1155,9 @@ async def sample_delete_schema(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_schema, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_schema + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1306,8 +1248,8 @@ async def sample_validate_schema(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, schema]) if request is not None and has_flattened_params: raise ValueError( @@ -1315,7 +1257,10 @@ async def sample_validate_schema(): "the individual field arguments should be set." ) - request = gp_schema.ValidateSchemaRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gp_schema.ValidateSchemaRequest): + request = gp_schema.ValidateSchemaRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1326,20 +1271,9 @@ async def sample_validate_schema(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.validate_schema, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.validate_schema + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1414,24 +1348,16 @@ async def sample_validate_message(): """ # Create or coerce a protobuf request object. - request = schema.ValidateMessageRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, schema.ValidateMessageRequest): + request = schema.ValidateMessageRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.validate_message, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.validate_message + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 72fbd44c825f..ca9d4f19fe6b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -19,6 +19,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -528,7 +529,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SchemaServiceTransport]] = None, + transport: Optional[ + Union[str, SchemaServiceTransport, Callable[..., SchemaServiceTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -540,9 +543,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, SchemaServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SchemaServiceTransport,Callable[..., SchemaServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SchemaServiceTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -651,17 +656,24 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) + transport_init: Union[ + Type[SchemaServiceTransport], Callable[..., SchemaServiceTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SchemaServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: - if issubclass(Transport, type(self)._transport_registry["grpc"]): + if issubclass(transport_init, type(self)._transport_registry["grpc"]): channel = grpc.insecure_channel(target=emulator_host) else: channel = grpc.aio.insecure_channel(target=emulator_host) - Transport = functools.partial(Transport, channel=channel) + transport_init = functools.partial(transport_init, channel=channel) - self._transport = Transport( + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -758,8 +770,8 @@ def sample_create_schema(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, schema, schema_id]) if request is not None and has_flattened_params: raise ValueError( @@ -767,10 +779,8 @@ def sample_create_schema(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a gp_schema.CreateSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, gp_schema.CreateSchemaRequest): request = gp_schema.CreateSchemaRequest(request) # If we have keyword arguments corresponding to fields on the @@ -864,8 +874,8 @@ def sample_get_schema(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -873,10 +883,8 @@ def sample_get_schema(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a schema.GetSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, schema.GetSchemaRequest): request = schema.GetSchemaRequest(request) # If we have keyword arguments corresponding to fields on the @@ -971,8 +979,8 @@ def sample_list_schemas(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( @@ -980,10 +988,8 @@ def sample_list_schemas(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a schema.ListSchemasRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, schema.ListSchemasRequest): request = schema.ListSchemasRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1087,8 +1093,8 @@ def sample_list_schema_revisions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1096,10 +1102,8 @@ def sample_list_schema_revisions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a schema.ListSchemaRevisionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, schema.ListSchemaRevisionsRequest): request = schema.ListSchemaRevisionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1210,8 +1214,8 @@ def sample_commit_schema(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, schema]) if request is not None and has_flattened_params: raise ValueError( @@ -1219,10 +1223,8 @@ def sample_commit_schema(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a gp_schema.CommitSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, gp_schema.CommitSchemaRequest): request = gp_schema.CommitSchemaRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1327,8 +1329,8 @@ def sample_rollback_schema(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, revision_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1336,10 +1338,8 @@ def sample_rollback_schema(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a schema.RollbackSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, schema.RollbackSchemaRequest): request = schema.RollbackSchemaRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1442,8 +1442,8 @@ def sample_delete_schema_revision(): A schema resource. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, revision_id]) if request is not None and has_flattened_params: raise ValueError( @@ -1451,10 +1451,8 @@ def sample_delete_schema_revision(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a schema.DeleteSchemaRevisionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, schema.DeleteSchemaRevisionRequest): request = schema.DeleteSchemaRevisionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1539,8 +1537,8 @@ def sample_delete_schema(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( @@ -1548,10 +1546,8 @@ def sample_delete_schema(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a schema.DeleteSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, schema.DeleteSchemaRequest): request = schema.DeleteSchemaRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1652,8 +1648,8 @@ def sample_validate_schema(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, schema]) if request is not None and has_flattened_params: raise ValueError( @@ -1661,10 +1657,8 @@ def sample_validate_schema(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a gp_schema.ValidateSchemaRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, gp_schema.ValidateSchemaRequest): request = gp_schema.ValidateSchemaRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1751,10 +1745,8 @@ def sample_validate_message(): """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a schema.ValidateMessageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, schema.ValidateMessageRequest): request = schema.ValidateMessageRequest(request) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 835fcd63e85a..5c7f35aa8ac6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -89,6 +89,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -101,7 +103,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index ffe490b22e59..421879d193d4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -54,7 +54,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -74,14 +74,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -91,11 +94,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -121,9 +124,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -162,7 +166,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 78c44142ebae..71c362436eb2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -69,7 +71,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -99,7 +100,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -119,15 +120,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -137,11 +141,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -167,9 +171,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -207,7 +212,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -586,6 +593,151 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_schema: gapic_v1.method_async.wrap_method( + self.create_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_schema: gapic_v1.method_async.wrap_method( + self.get_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_schemas: gapic_v1.method_async.wrap_method( + self.list_schemas, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_schema_revisions: gapic_v1.method_async.wrap_method( + self.list_schema_revisions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.commit_schema: gapic_v1.method_async.wrap_method( + self.commit_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.rollback_schema: gapic_v1.method_async.wrap_method( + self.rollback_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_schema_revision: gapic_v1.method_async.wrap_method( + self.delete_schema_revision, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_schema: gapic_v1.method_async.wrap_method( + self.delete_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.validate_schema: gapic_v1.method_async.wrap_method( + self.validate_schema, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.validate_message: gapic_v1.method_async.wrap_method( + self.validate_message, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 33d8de056fd4..1a7295131053 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -41,6 +42,7 @@ from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore + try: OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] except AttributeError: # pragma: NO COVER @@ -204,7 +206,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, SubscriberTransport] = "grpc_asyncio", + transport: Optional[ + Union[str, SubscriberTransport, Callable[..., SubscriberTransport]] + ] = "grpc_asyncio", client_options: Optional[ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -216,9 +220,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.SubscriberTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SubscriberTransport,Callable[..., SubscriberTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SubscriberTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -397,8 +403,8 @@ async def sample_create_subscription(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) if request is not None and has_flattened_params: raise ValueError( @@ -406,7 +412,10 @@ async def sample_create_subscription(): "the individual field arguments should be set." ) - request = pubsub.Subscription(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.Subscription): + request = pubsub.Subscription(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -421,22 +430,9 @@ async def sample_create_subscription(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_subscription, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_subscription + ] # Certain fields should be provided within the metadata header; # add these here. @@ -521,8 +517,8 @@ async def sample_get_subscription(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription]) if request is not None and has_flattened_params: raise ValueError( @@ -530,7 +526,10 @@ async def sample_get_subscription(): "the individual field arguments should be set." ) - request = pubsub.GetSubscriptionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.GetSubscriptionRequest): + request = pubsub.GetSubscriptionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -539,22 +538,9 @@ async def sample_get_subscription(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_subscription, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_subscription + ] # Certain fields should be provided within the metadata header; # add these here. @@ -657,8 +643,8 @@ async def sample_update_subscription(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -666,7 +652,10 @@ async def sample_update_subscription(): "the individual field arguments should be set." ) - request = pubsub.UpdateSubscriptionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.UpdateSubscriptionRequest): + request = pubsub.UpdateSubscriptionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -677,20 +666,9 @@ async def sample_update_subscription(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_subscription, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_subscription + ] # Certain fields should be provided within the metadata header; # add these here. @@ -777,8 +755,8 @@ async def sample_list_subscriptions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -786,7 +764,10 @@ async def sample_list_subscriptions(): "the individual field arguments should be set." ) - request = pubsub.ListSubscriptionsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListSubscriptionsRequest): + request = pubsub.ListSubscriptionsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -795,22 +776,9 @@ async def sample_list_subscriptions(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_subscriptions, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_subscriptions + ] # Certain fields should be provided within the metadata header; # add these here. @@ -898,8 +866,8 @@ async def sample_delete_subscription(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription]) if request is not None and has_flattened_params: raise ValueError( @@ -907,7 +875,10 @@ async def sample_delete_subscription(): "the individual field arguments should be set." ) - request = pubsub.DeleteSubscriptionRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DeleteSubscriptionRequest): + request = pubsub.DeleteSubscriptionRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -916,20 +887,9 @@ async def sample_delete_subscription(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_subscription, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_subscription + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1032,8 +992,8 @@ async def sample_modify_ack_deadline(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) if request is not None and has_flattened_params: raise ValueError( @@ -1041,7 +1001,10 @@ async def sample_modify_ack_deadline(): "the individual field arguments should be set." ) - request = pubsub.ModifyAckDeadlineRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ModifyAckDeadlineRequest): + request = pubsub.ModifyAckDeadlineRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1054,20 +1017,9 @@ async def sample_modify_ack_deadline(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.modify_ack_deadline, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.modify_ack_deadline + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1157,8 +1109,8 @@ async def sample_acknowledge(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, ack_ids]) if request is not None and has_flattened_params: raise ValueError( @@ -1166,7 +1118,10 @@ async def sample_acknowledge(): "the individual field arguments should be set." ) - request = pubsub.AcknowledgeRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.AcknowledgeRequest): + request = pubsub.AcknowledgeRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1177,20 +1132,9 @@ async def sample_acknowledge(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.acknowledge, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.acknowledge + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1297,8 +1241,8 @@ async def sample_pull(): Response for the Pull method. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, return_immediately, max_messages]) if request is not None and has_flattened_params: raise ValueError( @@ -1306,7 +1250,10 @@ async def sample_pull(): "the individual field arguments should be set." ) - request = pubsub.PullRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.PullRequest): + request = pubsub.PullRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1325,23 +1272,7 @@ async def sample_pull(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.pull, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.InternalServerError, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.pull] # Certain fields should be provided within the metadata header; # add these here. @@ -1441,24 +1372,9 @@ def request_generator(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.streaming_pull, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=4, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.DeadlineExceeded, - core_exceptions.InternalServerError, - core_exceptions.ResourceExhausted, - core_exceptions.ServiceUnavailable, - ), - deadline=900.0, - ), - default_timeout=900.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.streaming_pull + ] # Validate the universe domain. self._client._validate_universe_domain() @@ -1545,8 +1461,8 @@ async def sample_modify_push_config(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, push_config]) if request is not None and has_flattened_params: raise ValueError( @@ -1554,7 +1470,10 @@ async def sample_modify_push_config(): "the individual field arguments should be set." ) - request = pubsub.ModifyPushConfigRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ModifyPushConfigRequest): + request = pubsub.ModifyPushConfigRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1565,20 +1484,9 @@ async def sample_modify_push_config(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.modify_push_config, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.modify_push_config + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1668,8 +1576,8 @@ async def sample_get_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -1677,7 +1585,10 @@ async def sample_get_snapshot(): "the individual field arguments should be set." ) - request = pubsub.GetSnapshotRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.GetSnapshotRequest): + request = pubsub.GetSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1686,22 +1597,9 @@ async def sample_get_snapshot(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_snapshot, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_snapshot + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1790,8 +1688,8 @@ async def sample_list_snapshots(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1799,7 +1697,10 @@ async def sample_list_snapshots(): "the individual field arguments should be set." ) - request = pubsub.ListSnapshotsRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.ListSnapshotsRequest): + request = pubsub.ListSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1808,22 +1709,9 @@ async def sample_list_snapshots(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_snapshots, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_snapshots + ] # Certain fields should be provided within the metadata header; # add these here. @@ -1959,8 +1847,8 @@ async def sample_create_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, subscription]) if request is not None and has_flattened_params: raise ValueError( @@ -1968,7 +1856,10 @@ async def sample_create_snapshot(): "the individual field arguments should be set." ) - request = pubsub.CreateSnapshotRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.CreateSnapshotRequest): + request = pubsub.CreateSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -1979,20 +1870,9 @@ async def sample_create_snapshot(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_snapshot, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_snapshot + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2092,8 +1972,8 @@ async def sample_update_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2101,7 +1981,10 @@ async def sample_update_snapshot(): "the individual field arguments should be set." ) - request = pubsub.UpdateSnapshotRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.UpdateSnapshotRequest): + request = pubsub.UpdateSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2112,20 +1995,9 @@ async def sample_update_snapshot(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_snapshot, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_snapshot + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2209,8 +2081,8 @@ async def sample_delete_snapshot(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -2218,7 +2090,10 @@ async def sample_delete_snapshot(): "the individual field arguments should be set." ) - request = pubsub.DeleteSnapshotRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.DeleteSnapshotRequest): + request = pubsub.DeleteSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. @@ -2227,20 +2102,9 @@ async def sample_delete_snapshot(): # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_snapshot, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.ServiceUnavailable, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_snapshot + ] # Certain fields should be provided within the metadata header; # add these here. @@ -2317,26 +2181,14 @@ async def sample_seek(): Response for the Seek method (this response is empty). """ # Create or coerce a protobuf request object. - request = pubsub.SeekRequest(request) + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, pubsub.SeekRequest): + request = pubsub.SeekRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.seek, - default_retry=retries.AsyncRetry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - core_exceptions.Aborted, - core_exceptions.ServiceUnavailable, - core_exceptions.Unknown, - ), - deadline=60.0, - ), - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._client._transport._wrapped_methods[self._client._transport.seek] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 6b7103852ee8..bc7639c5931b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -19,6 +19,7 @@ import re from typing import ( Dict, + Callable, Mapping, MutableMapping, MutableSequence, @@ -583,7 +584,9 @@ def __init__( self, *, credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, SubscriberTransport]] = None, + transport: Optional[ + Union[str, SubscriberTransport, Callable[..., SubscriberTransport]] + ] = None, client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -595,9 +598,11 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, SubscriberTransport]): The - transport to use. If set to None, a transport is chosen - automatically. + transport (Optional[Union[str,SubscriberTransport,Callable[..., SubscriberTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the SubscriberTransport constructor. + If set to None, a transport is chosen automatically. client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the client. @@ -703,17 +708,24 @@ def __init__( api_key_value ) - Transport = type(self).get_transport_class(cast(str, transport)) + transport_init: Union[ + Type[SubscriberTransport], Callable[..., SubscriberTransport] + ] = ( + type(self).get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., SubscriberTransport], transport) + ) + # initialize with the provided callable or the passed in class emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: - if issubclass(Transport, type(self)._transport_registry["grpc"]): + if issubclass(transport_init, type(self)._transport_registry["grpc"]): channel = grpc.insecure_channel(target=emulator_host) else: channel = grpc.aio.insecure_channel(target=emulator_host) - Transport = functools.partial(Transport, channel=channel) + transport_init = functools.partial(transport_init, channel=channel) - self._transport = Transport( + self._transport = transport_init( credentials=credentials, credentials_file=self._client_options.credentials_file, host=self._api_endpoint, @@ -860,8 +872,8 @@ def sample_create_subscription(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) if request is not None and has_flattened_params: raise ValueError( @@ -869,10 +881,8 @@ def sample_create_subscription(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.Subscription. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.Subscription): request = pubsub.Subscription(request) # If we have keyword arguments corresponding to fields on the @@ -973,8 +983,8 @@ def sample_get_subscription(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription]) if request is not None and has_flattened_params: raise ValueError( @@ -982,10 +992,8 @@ def sample_get_subscription(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.GetSubscriptionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.GetSubscriptionRequest): request = pubsub.GetSubscriptionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1098,8 +1106,8 @@ def sample_update_subscription(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -1107,10 +1115,8 @@ def sample_update_subscription(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.UpdateSubscriptionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.UpdateSubscriptionRequest): request = pubsub.UpdateSubscriptionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1209,8 +1215,8 @@ def sample_list_subscriptions(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -1218,10 +1224,8 @@ def sample_list_subscriptions(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListSubscriptionsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.ListSubscriptionsRequest): request = pubsub.ListSubscriptionsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1319,8 +1323,8 @@ def sample_delete_subscription(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription]) if request is not None and has_flattened_params: raise ValueError( @@ -1328,10 +1332,8 @@ def sample_delete_subscription(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.DeleteSubscriptionRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.DeleteSubscriptionRequest): request = pubsub.DeleteSubscriptionRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1444,8 +1446,8 @@ def sample_modify_ack_deadline(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) if request is not None and has_flattened_params: raise ValueError( @@ -1453,10 +1455,8 @@ def sample_modify_ack_deadline(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ModifyAckDeadlineRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.ModifyAckDeadlineRequest): request = pubsub.ModifyAckDeadlineRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1560,8 +1560,8 @@ def sample_acknowledge(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, ack_ids]) if request is not None and has_flattened_params: raise ValueError( @@ -1569,10 +1569,8 @@ def sample_acknowledge(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.AcknowledgeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.AcknowledgeRequest): request = pubsub.AcknowledgeRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1691,8 +1689,8 @@ def sample_pull(): Response for the Pull method. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, return_immediately, max_messages]) if request is not None and has_flattened_params: raise ValueError( @@ -1700,10 +1698,8 @@ def sample_pull(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.PullRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.PullRequest): request = pubsub.PullRequest(request) # If we have keyword arguments corresponding to fields on the @@ -1915,8 +1911,8 @@ def sample_modify_push_config(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([subscription, push_config]) if request is not None and has_flattened_params: raise ValueError( @@ -1924,10 +1920,8 @@ def sample_modify_push_config(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ModifyPushConfigRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.ModifyPushConfigRequest): request = pubsub.ModifyPushConfigRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2029,8 +2023,8 @@ def sample_get_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -2038,10 +2032,8 @@ def sample_get_snapshot(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.GetSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.GetSnapshotRequest): request = pubsub.GetSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2140,8 +2132,8 @@ def sample_list_snapshots(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([project]) if request is not None and has_flattened_params: raise ValueError( @@ -2149,10 +2141,8 @@ def sample_list_snapshots(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.ListSnapshotsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.ListSnapshotsRequest): request = pubsub.ListSnapshotsRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2298,8 +2288,8 @@ def sample_create_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([name, subscription]) if request is not None and has_flattened_params: raise ValueError( @@ -2307,10 +2297,8 @@ def sample_create_snapshot(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.CreateSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.CreateSnapshotRequest): request = pubsub.CreateSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2422,8 +2410,8 @@ def sample_update_snapshot(): """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot, update_mask]) if request is not None and has_flattened_params: raise ValueError( @@ -2431,10 +2419,8 @@ def sample_update_snapshot(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.UpdateSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.UpdateSnapshotRequest): request = pubsub.UpdateSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2530,8 +2516,8 @@ def sample_delete_snapshot(): sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. has_flattened_params = any([snapshot]) if request is not None and has_flattened_params: raise ValueError( @@ -2539,10 +2525,8 @@ def sample_delete_snapshot(): "the individual field arguments should be set." ) - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.DeleteSnapshotRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.DeleteSnapshotRequest): request = pubsub.DeleteSnapshotRequest(request) # If we have keyword arguments corresponding to fields on the @@ -2629,10 +2613,8 @@ def sample_seek(): Response for the Seek method (this response is empty). """ # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a pubsub.SeekRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. if not isinstance(request, pubsub.SeekRequest): request = pubsub.SeekRequest(request) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 34cb97d49b14..c5fa183b1103 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -88,6 +88,8 @@ def __init__( # Save the scopes. self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False # If no credentials are provided, then determine the appropriate # defaults. @@ -100,7 +102,7 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: + elif credentials is None and not self._ignore_credentials: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 82c752fa5f59..e706190ced95 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -56,7 +56,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -76,14 +76,17 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -93,11 +96,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -123,9 +126,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, grpc.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -164,7 +168,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 356acf17c0de..9dab4a21bd4d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -18,6 +18,8 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers_async +from google.api_core import exceptions as core_exceptions +from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -71,7 +73,6 @@ def create_channel( the credentials from the environment. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -101,7 +102,7 @@ def __init__( credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, api_mtls_endpoint: Optional[str] = None, client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, @@ -121,15 +122,18 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if a ``channel`` instance is provided. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from @@ -139,11 +143,11 @@ def __init__( private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. + for the grpc channel. It is ignored if a ``channel`` instance is provided. client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback to provide client certificate bytes and private key bytes, both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -169,9 +173,10 @@ def __init__( if client_cert_source: warnings.warn("client_cert_source is deprecated", DeprecationWarning) - if channel: + if isinstance(channel, aio.Channel): # Ignore credentials if a channel was passed. - credentials = False + credentials = None + self._ignore_credentials = True # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None @@ -209,7 +214,9 @@ def __init__( ) if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( self._host, # use the credentials which are saved credentials=self._credentials, @@ -832,6 +839,254 @@ def test_iam_permissions( ) return self._stubs["test_iam_permissions"] + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_subscription: gapic_v1.method_async.wrap_method( + self.create_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_subscription: gapic_v1.method_async.wrap_method( + self.get_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_subscription: gapic_v1.method_async.wrap_method( + self.update_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_subscriptions: gapic_v1.method_async.wrap_method( + self.list_subscriptions, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_subscription: gapic_v1.method_async.wrap_method( + self.delete_subscription, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.modify_ack_deadline: gapic_v1.method_async.wrap_method( + self.modify_ack_deadline, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.acknowledge: gapic_v1.method_async.wrap_method( + self.acknowledge, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.pull: gapic_v1.method_async.wrap_method( + self.pull, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.InternalServerError, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.streaming_pull: gapic_v1.method_async.wrap_method( + self.streaming_pull, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=4, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.DeadlineExceeded, + core_exceptions.InternalServerError, + core_exceptions.ResourceExhausted, + core_exceptions.ServiceUnavailable, + ), + deadline=900.0, + ), + default_timeout=900.0, + client_info=client_info, + ), + self.modify_push_config: gapic_v1.method_async.wrap_method( + self.modify_push_config, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_snapshot: gapic_v1.method_async.wrap_method( + self.get_snapshot, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.list_snapshots: gapic_v1.method_async.wrap_method( + self.list_snapshots, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.create_snapshot: gapic_v1.method_async.wrap_method( + self.create_snapshot, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_snapshot: gapic_v1.method_async.wrap_method( + self.update_snapshot, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.delete_snapshot: gapic_v1.method_async.wrap_method( + self.delete_snapshot, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.seek: gapic_v1.method_async.wrap_method( + self.seek, + default_retry=retries.AsyncRetry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.Aborted, + core_exceptions.ServiceUnavailable, + core_exceptions.Unknown, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + } + def close(self): return self.grpc_channel.close() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 55cda3abd93f..a45a34a14a82 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1302,6 +1302,14 @@ class BigQueryConfig(proto.Message): Optional. When true, use the BigQuery table's schema as the columns to write to in BigQuery. ``use_table_schema`` and ``use_topic_schema`` cannot be enabled at the same time. + service_account_email (str): + Optional. The service account to use to write to BigQuery. + The subscription creator or updater that specifies this + field must have ``iam.serviceAccounts.actAs`` permission on + the service account. If not specified, the Pub/Sub `service + agent `__, + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, + is used. """ class State(proto.Enum): @@ -1366,6 +1374,10 @@ class State(proto.Enum): proto.BOOL, number=6, ) + service_account_email: str = proto.Field( + proto.STRING, + number=7, + ) class CloudStorageConfig(proto.Message): @@ -1424,6 +1436,15 @@ class CloudStorageConfig(proto.Message): Output only. An output-only field that indicates whether or not the subscription can receive messages. + service_account_email (str): + Optional. The service account to use to write to Cloud + Storage. The subscription creator or updater that specifies + this field must have ``iam.serviceAccounts.actAs`` + permission on the service account. If not specified, the + Pub/Sub `service + agent `__, + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, + is used. """ class State(proto.Enum): @@ -1445,12 +1466,17 @@ class State(proto.Enum): Cannot write to the destination because enforce_in_transit is set to true and the destination locations are not in the allowed regions. + SCHEMA_MISMATCH (5): + Cannot write to the Cloud Storage bucket due + to an incompatibility between the topic schema + and subscription settings. """ STATE_UNSPECIFIED = 0 ACTIVE = 1 PERMISSION_DENIED = 2 NOT_FOUND = 3 IN_TRANSIT_LOCATION_RESTRICTION = 4 + SCHEMA_MISMATCH = 5 class TextConfig(proto.Message): r"""Configuration for writing message data in text format. @@ -1473,12 +1499,20 @@ class AvroConfig(proto.Message): fields while all other message properties other than data (for example, an ordering_key, if present) are added as entries in the attributes map. + use_topic_schema (bool): + Optional. When true, the output Cloud Storage + file will be serialized using the topic schema, + if it exists. """ write_metadata: bool = proto.Field( proto.BOOL, number=1, ) + use_topic_schema: bool = proto.Field( + proto.BOOL, + number=2, + ) bucket: str = proto.Field( proto.STRING, @@ -1522,6 +1556,10 @@ class AvroConfig(proto.Message): number=9, enum=State, ) + service_account_email: str = proto.Field( + proto.STRING, + number=11, + ) class ReceivedMessage(proto.Message): diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 3551413ee128..204b30ba5409 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -101,16 +101,16 @@ count = s.replace( clients_to_patch, - r"Transport = type\(self\)\.get_transport_class\(cast\(str, transport\)\)", + r"# initialize with the provided callable or the passed in class", """\g<0> emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: - if issubclass(Transport, type(self)._transport_registry["grpc"]): + if issubclass(transport_init, type(self)._transport_registry["grpc"]): channel = grpc.insecure_channel(target=emulator_host) else: channel = grpc.aio.insecure_channel(target=emulator_host) - Transport = functools.partial(Transport, channel=channel) + transport_init = functools.partial(transport_init, channel=channel) """, ) diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 2ab8d41b77f4..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.21.5" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 20cecf1c922e..6f1c95722964 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -1138,6 +1138,9 @@ def test_create_topic_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_topic() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1162,6 +1165,9 @@ def test_create_topic_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_topic(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1171,6 +1177,41 @@ def test_create_topic_non_empty_request_with_auto_populated_field(): ) +def test_create_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_topic] = mock_rpc + request = {} + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_topic_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1197,6 +1238,47 @@ async def test_create_topic_empty_call_async(): assert args[0] == pubsub.Topic() +@pytest.mark.asyncio +async def test_create_topic_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_topic + ] = mock_object + + request = {} + await client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.Topic @@ -1433,6 +1515,9 @@ def test_update_topic_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_topic() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1454,12 +1539,50 @@ def test_update_topic_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_topic(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == pubsub.UpdateTopicRequest() +def test_update_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_topic] = mock_rpc + request = {} + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_topic_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1486,6 +1609,47 @@ async def test_update_topic_empty_call_async(): assert args[0] == pubsub.UpdateTopicRequest() +@pytest.mark.asyncio +async def test_update_topic_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_topic + ] = mock_object + + request = {} + await client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateTopicRequest @@ -1726,6 +1890,9 @@ def test_publish_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.publish() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1749,6 +1916,9 @@ def test_publish_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.publish), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.publish(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1757,6 +1927,41 @@ def test_publish_non_empty_request_with_auto_populated_field(): ) +def test_publish_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.publish in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.publish] = mock_rpc + request = {} + client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.publish(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_publish_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1780,6 +1985,45 @@ async def test_publish_empty_call_async(): assert args[0] == pubsub.PublishRequest() +@pytest.mark.asyncio +async def test_publish_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.publish + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.publish + ] = mock_object + + request = {} + await client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.publish(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_publish_async( transport: str = "grpc_asyncio", request_type=pubsub.PublishRequest @@ -2024,6 +2268,9 @@ def test_get_topic_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_topic() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2047,6 +2294,9 @@ def test_get_topic_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_topic(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2055,6 +2305,41 @@ def test_get_topic_non_empty_request_with_auto_populated_field(): ) +def test_get_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_topic] = mock_rpc + request = {} + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_topic_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2081,6 +2366,45 @@ async def test_get_topic_empty_call_async(): assert args[0] == pubsub.GetTopicRequest() +@pytest.mark.asyncio +async def test_get_topic_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_topic + ] = mock_object + + request = {} + await client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.GetTopicRequest @@ -2311,6 +2635,9 @@ def test_list_topics_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_topics() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2335,6 +2662,9 @@ def test_list_topics_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_topics(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2344,6 +2674,41 @@ def test_list_topics_non_empty_request_with_auto_populated_field(): ) +def test_list_topics_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_topics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_topics] = mock_rpc + request = {} + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_topics_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2367,6 +2732,47 @@ async def test_list_topics_empty_call_async(): assert args[0] == pubsub.ListTopicsRequest() +@pytest.mark.asyncio +async def test_list_topics_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_topics + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_topics + ] = mock_object + + request = {} + await client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_topics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_topics_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicsRequest @@ -2586,13 +2992,13 @@ def test_list_topics_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), ) pager = client.list_topics(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2791,6 +3197,9 @@ def test_list_topic_subscriptions_empty_call(): with mock.patch.object( type(client.transport.list_topic_subscriptions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_topic_subscriptions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2817,6 +3226,9 @@ def test_list_topic_subscriptions_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_topic_subscriptions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_topic_subscriptions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2826,6 +3238,46 @@ def test_list_topic_subscriptions_non_empty_request_with_auto_populated_field(): ) +def test_list_topic_subscriptions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_topic_subscriptions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_topic_subscriptions + ] = mock_rpc + request = {} + client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topic_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_topic_subscriptions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2852,6 +3304,47 @@ async def test_list_topic_subscriptions_empty_call_async(): assert args[0] == pubsub.ListTopicSubscriptionsRequest() +@pytest.mark.asyncio +async def test_list_topic_subscriptions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_topic_subscriptions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_topic_subscriptions + ] = mock_object + + request = {} + await client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_topic_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_topic_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSubscriptionsRequest @@ -3085,13 +3578,13 @@ def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), ) pager = client.list_topic_subscriptions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3296,6 +3789,9 @@ def test_list_topic_snapshots_empty_call(): with mock.patch.object( type(client.transport.list_topic_snapshots), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_topic_snapshots() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3322,6 +3818,9 @@ def test_list_topic_snapshots_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_topic_snapshots), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_topic_snapshots(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3331,6 +3830,45 @@ def test_list_topic_snapshots_non_empty_request_with_auto_populated_field(): ) +def test_list_topic_snapshots_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_topic_snapshots in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_topic_snapshots + ] = mock_rpc + request = {} + client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topic_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_topic_snapshots_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3357,6 +3895,47 @@ async def test_list_topic_snapshots_empty_call_async(): assert args[0] == pubsub.ListTopicSnapshotsRequest() +@pytest.mark.asyncio +async def test_list_topic_snapshots_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_topic_snapshots + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_topic_snapshots + ] = mock_object + + request = {} + await client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_topic_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_topic_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSnapshotsRequest @@ -3590,13 +4169,13 @@ def test_list_topic_snapshots_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), ) pager = client.list_topic_snapshots(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -3792,6 +4371,9 @@ def test_delete_topic_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_topic() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3815,6 +4397,9 @@ def test_delete_topic_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_topic(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3823,6 +4408,41 @@ def test_delete_topic_non_empty_request_with_auto_populated_field(): ) +def test_delete_topic_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_topic] = mock_rpc + request = {} + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_topic_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3842,6 +4462,47 @@ async def test_delete_topic_empty_call_async(): assert args[0] == pubsub.DeleteTopicRequest() +@pytest.mark.asyncio +async def test_delete_topic_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_topic + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_topic + ] = mock_object + + request = {} + await client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteTopicRequest @@ -4062,6 +4723,9 @@ def test_detach_subscription_empty_call(): with mock.patch.object( type(client.transport.detach_subscription), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.detach_subscription() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4087,6 +4751,9 @@ def test_detach_subscription_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.detach_subscription), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.detach_subscription(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4095,6 +4762,45 @@ def test_detach_subscription_non_empty_request_with_auto_populated_field(): ) +def test_detach_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_subscription + ] = mock_rpc + request = {} + client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.detach_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_detach_subscription_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4118,6 +4824,47 @@ async def test_detach_subscription_empty_call_async(): assert args[0] == pubsub.DetachSubscriptionRequest() +@pytest.mark.asyncio +async def test_detach_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = PublisherAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.detach_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.detach_subscription + ] = mock_object + + request = {} + await client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.detach_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_detach_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DetachSubscriptionRequest @@ -4267,6 +5014,42 @@ def test_create_topic_rest(request_type): assert response.state == pubsub.Topic.State.ACTIVE +def test_create_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_topic] = mock_rpc + + request = {} + client.create_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_topic_rest_required_fields(request_type=pubsub.Topic): transport_class = transports.PublisherRestTransport @@ -4531,6 +5314,42 @@ def test_update_topic_rest(request_type): assert response.state == pubsub.Topic.State.ACTIVE +def test_update_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_topic] = mock_rpc + + request = {} + client.update_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_topic_rest_required_fields(request_type=pubsub.UpdateTopicRequest): transport_class = transports.PublisherRestTransport @@ -4794,6 +5613,42 @@ def test_publish_rest(request_type): assert response.message_ids == ["message_ids_value"] +def test_publish_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.publish in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.publish] = mock_rpc + + request = {} + client.publish(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.publish(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_publish_rest_required_fields(request_type=pubsub.PublishRequest): transport_class = transports.PublisherRestTransport @@ -5071,6 +5926,42 @@ def test_get_topic_rest(request_type): assert response.state == pubsub.Topic.State.ACTIVE +def test_get_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_topic] = mock_rpc + + request = {} + client.get_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_topic_rest_required_fields(request_type=pubsub.GetTopicRequest): transport_class = transports.PublisherRestTransport @@ -5328,6 +6219,42 @@ def test_list_topics_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_topics_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_topics in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_topics] = mock_rpc + + request = {} + client.list_topics(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topics(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_topics_rest_required_fields(request_type=pubsub.ListTopicsRequest): transport_class = transports.PublisherRestTransport @@ -5659,6 +6586,47 @@ def test_list_topic_subscriptions_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_topic_subscriptions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_topic_subscriptions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_topic_subscriptions + ] = mock_rpc + + request = {} + client.list_topic_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topic_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_topic_subscriptions_rest_required_fields( request_type=pubsub.ListTopicSubscriptionsRequest, ): @@ -5997,6 +6965,46 @@ def test_list_topic_snapshots_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_topic_snapshots_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_topic_snapshots in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_topic_snapshots + ] = mock_rpc + + request = {} + client.list_topic_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_topic_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_topic_snapshots_rest_required_fields( request_type=pubsub.ListTopicSnapshotsRequest, ): @@ -6326,6 +7334,42 @@ def test_delete_topic_rest(request_type): assert response is None +def test_delete_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_topic in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_topic] = mock_rpc + + request = {} + client.delete_topic(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_topic(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_topic_rest_required_fields(request_type=pubsub.DeleteTopicRequest): transport_class = transports.PublisherRestTransport @@ -6569,6 +7613,46 @@ def test_detach_subscription_rest(request_type): assert isinstance(response, pubsub.DetachSubscriptionResponse) +def test_detach_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.detach_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.detach_subscription + ] = mock_rpc + + request = {} + client.detach_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.detach_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_detach_subscription_rest_required_fields( request_type=pubsub.DetachSubscriptionRequest, ): diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 5de9c6c457be..f44f6846f52a 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -1177,6 +1177,9 @@ def test_create_schema_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_schema() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1201,6 +1204,9 @@ def test_create_schema_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_schema(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1210,6 +1216,41 @@ def test_create_schema_non_empty_request_with_auto_populated_field(): ) +def test_create_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_schema] = mock_rpc + request = {} + client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_schema_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1236,6 +1277,47 @@ async def test_create_schema_empty_call_async(): assert args[0] == gp_schema.CreateSchemaRequest() +@pytest.mark.asyncio +async def test_create_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_schema + ] = mock_object + + request = {} + await client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.CreateSchemaRequest @@ -1492,6 +1574,9 @@ def test_get_schema_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_schema() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1515,6 +1600,9 @@ def test_get_schema_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_schema(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1523,6 +1611,41 @@ def test_get_schema_non_empty_request_with_auto_populated_field(): ) +def test_get_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_schema] = mock_rpc + request = {} + client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_schema_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1549,6 +1672,45 @@ async def test_get_schema_empty_call_async(): assert args[0] == schema.GetSchemaRequest() +@pytest.mark.asyncio +async def test_get_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_schema + ] = mock_object + + request = {} + await client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_schema_async( transport: str = "grpc_asyncio", request_type=schema.GetSchemaRequest @@ -1779,6 +1941,9 @@ def test_list_schemas_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_schemas() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1803,6 +1968,9 @@ def test_list_schemas_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_schemas(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1812,6 +1980,41 @@ def test_list_schemas_non_empty_request_with_auto_populated_field(): ) +def test_list_schemas_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_schemas in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_schemas] = mock_rpc + request = {} + client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_schemas(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_schemas_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1835,6 +2038,47 @@ async def test_list_schemas_empty_call_async(): assert args[0] == schema.ListSchemasRequest() +@pytest.mark.asyncio +async def test_list_schemas_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_schemas + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_schemas + ] = mock_object + + request = {} + await client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_schemas(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_schemas_async( transport: str = "grpc_asyncio", request_type=schema.ListSchemasRequest @@ -2054,13 +2298,13 @@ def test_list_schemas_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) pager = client.list_schemas(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2257,6 +2501,9 @@ def test_list_schema_revisions_empty_call(): with mock.patch.object( type(client.transport.list_schema_revisions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_schema_revisions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2283,6 +2530,9 @@ def test_list_schema_revisions_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_schema_revisions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_schema_revisions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2292,6 +2542,46 @@ def test_list_schema_revisions_non_empty_request_with_auto_populated_field(): ) +def test_list_schema_revisions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_schema_revisions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_schema_revisions + ] = mock_rpc + request = {} + client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_schema_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_schema_revisions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2317,6 +2607,47 @@ async def test_list_schema_revisions_empty_call_async(): assert args[0] == schema.ListSchemaRevisionsRequest() +@pytest.mark.asyncio +async def test_list_schema_revisions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_schema_revisions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_schema_revisions + ] = mock_object + + request = {} + await client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_schema_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_schema_revisions_async( transport: str = "grpc_asyncio", request_type=schema.ListSchemaRevisionsRequest @@ -2548,13 +2879,13 @@ def test_list_schema_revisions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) pager = client.list_schema_revisions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2759,6 +3090,9 @@ def test_commit_schema_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.commit_schema() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2782,6 +3116,9 @@ def test_commit_schema_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.commit_schema(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2790,6 +3127,41 @@ def test_commit_schema_non_empty_request_with_auto_populated_field(): ) +def test_commit_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.commit_schema] = mock_rpc + request = {} + client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_commit_schema_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2816,6 +3188,47 @@ async def test_commit_schema_empty_call_async(): assert args[0] == gp_schema.CommitSchemaRequest() +@pytest.mark.asyncio +async def test_commit_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.commit_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.commit_schema + ] = mock_object + + request = {} + await client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.commit_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_commit_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.CommitSchemaRequest @@ -3062,6 +3475,9 @@ def test_rollback_schema_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.rollback_schema() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3086,6 +3502,9 @@ def test_rollback_schema_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.rollback_schema(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3095,6 +3514,41 @@ def test_rollback_schema_non_empty_request_with_auto_populated_field(): ) +def test_rollback_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rollback_schema] = mock_rpc + request = {} + client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_rollback_schema_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3121,6 +3575,47 @@ async def test_rollback_schema_empty_call_async(): assert args[0] == schema.RollbackSchemaRequest() +@pytest.mark.asyncio +async def test_rollback_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.rollback_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.rollback_schema + ] = mock_object + + request = {} + await client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.rollback_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_rollback_schema_async( transport: str = "grpc_asyncio", request_type=schema.RollbackSchemaRequest @@ -3371,6 +3866,9 @@ def test_delete_schema_revision_empty_call(): with mock.patch.object( type(client.transport.delete_schema_revision), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_schema_revision() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3397,6 +3895,9 @@ def test_delete_schema_revision_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_schema_revision), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_schema_revision(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3406,6 +3907,46 @@ def test_delete_schema_revision_non_empty_request_with_auto_populated_field(): ) +def test_delete_schema_revision_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_schema_revision + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_schema_revision + ] = mock_rpc + request = {} + client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_schema_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_schema_revision_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3434,6 +3975,47 @@ async def test_delete_schema_revision_empty_call_async(): assert args[0] == schema.DeleteSchemaRevisionRequest() +@pytest.mark.asyncio +async def test_delete_schema_revision_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_schema_revision + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_schema_revision + ] = mock_object + + request = {} + await client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_schema_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_schema_revision_async( transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRevisionRequest @@ -3681,6 +4263,9 @@ def test_delete_schema_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_schema() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3704,6 +4289,9 @@ def test_delete_schema_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_schema(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3712,6 +4300,41 @@ def test_delete_schema_non_empty_request_with_auto_populated_field(): ) +def test_delete_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_schema] = mock_rpc + request = {} + client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_schema_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3731,6 +4354,47 @@ async def test_delete_schema_empty_call_async(): assert args[0] == schema.DeleteSchemaRequest() +@pytest.mark.asyncio +async def test_delete_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_schema + ] = mock_object + + request = {} + await client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_schema_async( transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRequest @@ -3947,6 +4611,9 @@ def test_validate_schema_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.validate_schema() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3970,6 +4637,9 @@ def test_validate_schema_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.validate_schema(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3978,6 +4648,41 @@ def test_validate_schema_non_empty_request_with_auto_populated_field(): ) +def test_validate_schema_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.validate_schema] = mock_rpc + request = {} + client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_validate_schema_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3999,6 +4704,47 @@ async def test_validate_schema_empty_call_async(): assert args[0] == gp_schema.ValidateSchemaRequest() +@pytest.mark.asyncio +async def test_validate_schema_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.validate_schema + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.validate_schema + ] = mock_object + + request = {} + await client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.validate_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_validate_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.ValidateSchemaRequest @@ -4231,6 +4977,9 @@ def test_validate_message_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.validate_message() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4255,6 +5004,9 @@ def test_validate_message_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.validate_message(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4264,6 +5016,43 @@ def test_validate_message_non_empty_request_with_auto_populated_field(): ) +def test_validate_message_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate_message in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.validate_message + ] = mock_rpc + request = {} + client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate_message(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_validate_message_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4285,6 +5074,47 @@ async def test_validate_message_empty_call_async(): assert args[0] == schema.ValidateMessageRequest() +@pytest.mark.asyncio +async def test_validate_message_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SchemaServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.validate_message + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.validate_message + ] = mock_object + + request = {} + await client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.validate_message(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_validate_message_async( transport: str = "grpc_asyncio", request_type=schema.ValidateMessageRequest @@ -4502,6 +5332,42 @@ def get_message_fields(field): assert response.revision_id == "revision_id_value" +def test_create_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_schema] = mock_rpc + + request = {} + client.create_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_schema_rest_required_fields(request_type=gp_schema.CreateSchemaRequest): transport_class = transports.SchemaServiceRestTransport @@ -4782,6 +5648,42 @@ def test_get_schema_rest(request_type): assert response.revision_id == "revision_id_value" +def test_get_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_schema] = mock_rpc + + request = {} + client.get_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_schema_rest_required_fields(request_type=schema.GetSchemaRequest): transport_class = transports.SchemaServiceRestTransport @@ -5043,6 +5945,42 @@ def test_list_schemas_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_schemas_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_schemas in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_schemas] = mock_rpc + + request = {} + client.list_schemas(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_schemas(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_schemas_rest_required_fields(request_type=schema.ListSchemasRequest): transport_class = transports.SchemaServiceRestTransport @@ -5376,6 +6314,47 @@ def test_list_schema_revisions_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_schema_revisions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_schema_revisions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_schema_revisions + ] = mock_rpc + + request = {} + client.list_schema_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_schema_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_schema_revisions_rest_required_fields( request_type=schema.ListSchemaRevisionsRequest, ): @@ -5722,6 +6701,42 @@ def test_commit_schema_rest(request_type): assert response.revision_id == "revision_id_value" +def test_commit_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.commit_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.commit_schema] = mock_rpc + + request = {} + client.commit_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.commit_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_commit_schema_rest_required_fields(request_type=gp_schema.CommitSchemaRequest): transport_class = transports.SchemaServiceRestTransport @@ -5998,6 +7013,42 @@ def test_rollback_schema_rest(request_type): assert response.revision_id == "revision_id_value" +def test_rollback_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.rollback_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.rollback_schema] = mock_rpc + + request = {} + client.rollback_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.rollback_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_rollback_schema_rest_required_fields( request_type=schema.RollbackSchemaRequest, ): @@ -6281,6 +7332,47 @@ def test_delete_schema_revision_rest(request_type): assert response.revision_id == "revision_id_value" +def test_delete_schema_revision_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_schema_revision + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_schema_revision + ] = mock_rpc + + request = {} + client.delete_schema_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_schema_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_schema_revision_rest_required_fields( request_type=schema.DeleteSchemaRevisionRequest, ): @@ -6544,6 +7636,42 @@ def test_delete_schema_rest(request_type): assert response is None +def test_delete_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_schema] = mock_rpc + + request = {} + client.delete_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_schema_rest_required_fields(request_type=schema.DeleteSchemaRequest): transport_class = transports.SchemaServiceRestTransport @@ -6789,6 +7917,42 @@ def test_validate_schema_rest(request_type): assert isinstance(response, gp_schema.ValidateSchemaResponse) +def test_validate_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate_schema in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.validate_schema] = mock_rpc + + request = {} + client.validate_schema(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate_schema(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_validate_schema_rest_required_fields( request_type=gp_schema.ValidateSchemaRequest, ): @@ -7063,6 +8227,44 @@ def test_validate_message_rest(request_type): assert isinstance(response, schema.ValidateMessageResponse) +def test_validate_message_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.validate_message in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.validate_message + ] = mock_rpc + + request = {} + client.validate_message(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.validate_message(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_validate_message_rest_required_fields( request_type=schema.ValidateMessageRequest, ): diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 50e02b52d47d..0251bda96ea4 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -1156,6 +1156,9 @@ def test_create_subscription_empty_call(): with mock.patch.object( type(client.transport.create_subscription), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_subscription() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1183,6 +1186,9 @@ def test_create_subscription_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.create_subscription), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_subscription(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1193,6 +1199,45 @@ def test_create_subscription_non_empty_request_with_auto_populated_field(): ) +def test_create_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_subscription + ] = mock_rpc + request = {} + client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_subscription_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1226,6 +1271,47 @@ async def test_create_subscription_empty_call_async(): assert args[0] == pubsub.Subscription() +@pytest.mark.asyncio +async def test_create_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_subscription + ] = mock_object + + request = {} + await client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.Subscription @@ -1522,6 +1608,9 @@ def test_get_subscription_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_subscription() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1545,6 +1634,9 @@ def test_get_subscription_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_subscription(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -1553,6 +1645,43 @@ def test_get_subscription_non_empty_request_with_auto_populated_field(): ) +def test_get_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_subscription in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_subscription + ] = mock_rpc + request = {} + client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_subscription_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1584,6 +1713,47 @@ async def test_get_subscription_empty_call_async(): assert args[0] == pubsub.GetSubscriptionRequest() +@pytest.mark.asyncio +async def test_get_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_subscription + ] = mock_object + + request = {} + await client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSubscriptionRequest @@ -1844,6 +2014,9 @@ def test_update_subscription_empty_call(): with mock.patch.object( type(client.transport.update_subscription), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_subscription() call.assert_called() _, args, _ = call.mock_calls[0] @@ -1867,12 +2040,54 @@ def test_update_subscription_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.update_subscription), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_subscription(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == pubsub.UpdateSubscriptionRequest() +def test_update_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_subscription + ] = mock_rpc + request = {} + client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_subscription_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -1906,6 +2121,47 @@ async def test_update_subscription_empty_call_async(): assert args[0] == pubsub.UpdateSubscriptionRequest() +@pytest.mark.asyncio +async def test_update_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_subscription + ] = mock_object + + request = {} + await client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSubscriptionRequest @@ -2170,6 +2426,9 @@ def test_list_subscriptions_empty_call(): with mock.patch.object( type(client.transport.list_subscriptions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_subscriptions() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2196,6 +2455,9 @@ def test_list_subscriptions_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.list_subscriptions), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_subscriptions(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2205,6 +2467,45 @@ def test_list_subscriptions_non_empty_request_with_auto_populated_field(): ) +def test_list_subscriptions_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_subscriptions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_subscriptions + ] = mock_rpc + request = {} + client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_subscriptions_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2230,6 +2531,47 @@ async def test_list_subscriptions_empty_call_async(): assert args[0] == pubsub.ListSubscriptionsRequest() +@pytest.mark.asyncio +async def test_list_subscriptions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_subscriptions + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_subscriptions + ] = mock_object + + request = {} + await client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSubscriptionsRequest @@ -2461,13 +2803,13 @@ def test_list_subscriptions_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), ) pager = client.list_subscriptions(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -2667,6 +3009,9 @@ def test_delete_subscription_empty_call(): with mock.patch.object( type(client.transport.delete_subscription), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_subscription() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2692,6 +3037,9 @@ def test_delete_subscription_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.delete_subscription), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_subscription(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2700,6 +3048,45 @@ def test_delete_subscription_non_empty_request_with_auto_populated_field(): ) +def test_delete_subscription_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_subscription + ] = mock_rpc + request = {} + client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_subscription_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -2721,6 +3108,47 @@ async def test_delete_subscription_empty_call_async(): assert args[0] == pubsub.DeleteSubscriptionRequest() +@pytest.mark.asyncio +async def test_delete_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_subscription + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_subscription + ] = mock_object + + request = {} + await client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSubscriptionRequest @@ -2951,6 +3379,9 @@ def test_modify_ack_deadline_empty_call(): with mock.patch.object( type(client.transport.modify_ack_deadline), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.modify_ack_deadline() call.assert_called() _, args, _ = call.mock_calls[0] @@ -2976,6 +3407,9 @@ def test_modify_ack_deadline_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.modify_ack_deadline), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.modify_ack_deadline(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -2984,6 +3418,45 @@ def test_modify_ack_deadline_non_empty_request_with_auto_populated_field(): ) +def test_modify_ack_deadline_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.modify_ack_deadline in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_ack_deadline + ] = mock_rpc + request = {} + client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.modify_ack_deadline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_modify_ack_deadline_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3005,6 +3478,47 @@ async def test_modify_ack_deadline_empty_call_async(): assert args[0] == pubsub.ModifyAckDeadlineRequest() +@pytest.mark.asyncio +async def test_modify_ack_deadline_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.modify_ack_deadline + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.modify_ack_deadline + ] = mock_object + + request = {} + await client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.modify_ack_deadline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_modify_ack_deadline_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyAckDeadlineRequest @@ -3251,6 +3765,9 @@ def test_acknowledge_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.acknowledge() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3274,6 +3791,9 @@ def test_acknowledge_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.acknowledge(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3282,6 +3802,41 @@ def test_acknowledge_non_empty_request_with_auto_populated_field(): ) +def test_acknowledge_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.acknowledge in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.acknowledge] = mock_rpc + request = {} + client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.acknowledge(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_acknowledge_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3302,19 +3857,60 @@ async def test_acknowledge_empty_call_async(): @pytest.mark.asyncio -async def test_acknowledge_async( - transport: str = "grpc_asyncio", request_type=pubsub.AcknowledgeRequest +async def test_acknowledge_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call within the gRPC stub, and fake the request. + # Ensure method has been cached + assert ( + client._client._transport.acknowledge + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.acknowledge + ] = mock_object + + request = {} + await client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.acknowledge(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + +@pytest.mark.asyncio +async def test_acknowledge_async( + transport: str = "grpc_asyncio", request_type=pubsub.AcknowledgeRequest +): + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -3527,6 +4123,9 @@ def test_pull_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.pull() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3550,6 +4149,9 @@ def test_pull_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.pull), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.pull(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3558,6 +4160,41 @@ def test_pull_non_empty_request_with_auto_populated_field(): ) +def test_pull_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.pull in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.pull] = mock_rpc + request = {} + client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_pull_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3577,6 +4214,44 @@ async def test_pull_empty_call_async(): assert args[0] == pubsub.PullRequest() +@pytest.mark.asyncio +async def test_pull_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.pull in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.pull + ] = mock_object + + request = {} + await client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.PullRequest @@ -3808,6 +4483,82 @@ def test_streaming_pull(request_type, transport: str = "grpc"): assert isinstance(message, pubsub.StreamingPullResponse) +def test_streaming_pull_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.streaming_pull in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.streaming_pull] = mock_rpc + request = [{}] + client.streaming_pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.streaming_pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_streaming_pull_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.streaming_pull + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.streaming_pull + ] = mock_object + + request = [{}] + await client.streaming_pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.streaming_pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_streaming_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.StreamingPullRequest @@ -3893,6 +4644,9 @@ def test_modify_push_config_empty_call(): with mock.patch.object( type(client.transport.modify_push_config), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.modify_push_config() call.assert_called() _, args, _ = call.mock_calls[0] @@ -3918,6 +4672,9 @@ def test_modify_push_config_non_empty_request_with_auto_populated_field(): with mock.patch.object( type(client.transport.modify_push_config), "__call__" ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.modify_push_config(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -3926,6 +4683,45 @@ def test_modify_push_config_non_empty_request_with_auto_populated_field(): ) +def test_modify_push_config_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.modify_push_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_push_config + ] = mock_rpc + request = {} + client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.modify_push_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_modify_push_config_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -3947,6 +4743,47 @@ async def test_modify_push_config_empty_call_async(): assert args[0] == pubsub.ModifyPushConfigRequest() +@pytest.mark.asyncio +async def test_modify_push_config_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.modify_push_config + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.modify_push_config + ] = mock_object + + request = {} + await client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.modify_push_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_modify_push_config_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyPushConfigRequest @@ -4188,6 +5025,9 @@ def test_get_snapshot_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4211,6 +5051,9 @@ def test_get_snapshot_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.get_snapshot(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4219,6 +5062,41 @@ def test_get_snapshot_non_empty_request_with_auto_populated_field(): ) +def test_get_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_snapshot] = mock_rpc + request = {} + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_get_snapshot_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4243,6 +5121,47 @@ async def test_get_snapshot_empty_call_async(): assert args[0] == pubsub.GetSnapshotRequest() +@pytest.mark.asyncio +async def test_get_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.get_snapshot + ] = mock_object + + request = {} + await client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.get_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_get_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSnapshotRequest @@ -4469,6 +5388,9 @@ def test_list_snapshots_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_snapshots() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4493,6 +5415,9 @@ def test_list_snapshots_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.list_snapshots(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4502,6 +5427,41 @@ def test_list_snapshots_non_empty_request_with_auto_populated_field(): ) +def test_list_snapshots_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_snapshots in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_snapshots] = mock_rpc + request = {} + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_list_snapshots_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -4525,6 +5485,47 @@ async def test_list_snapshots_empty_call_async(): assert args[0] == pubsub.ListSnapshotsRequest() +@pytest.mark.asyncio +async def test_list_snapshots_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_snapshots + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.list_snapshots + ] = mock_object + + request = {} + await client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.list_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_list_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSnapshotsRequest @@ -4744,13 +5745,13 @@ def test_list_snapshots_pager(transport_name: str = "grpc"): RuntimeError, ) - metadata = () - metadata = tuple(metadata) + ( + expected_metadata = () + expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), ) pager = client.list_snapshots(request={}) - assert pager._metadata == metadata + assert pager._metadata == expected_metadata results = list(pager) assert len(results) == 6 @@ -4945,6 +5946,9 @@ def test_create_snapshot_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] @@ -4969,6 +5973,9 @@ def test_create_snapshot_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.create_snapshot(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -4978,6 +5985,41 @@ def test_create_snapshot_non_empty_request_with_auto_populated_field(): ) +def test_create_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc + request = {} + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_create_snapshot_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5002,6 +6044,47 @@ async def test_create_snapshot_empty_call_async(): assert args[0] == pubsub.CreateSnapshotRequest() +@pytest.mark.asyncio +async def test_create_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.create_snapshot + ] = mock_object + + request = {} + await client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.create_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_create_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.CreateSnapshotRequest @@ -5240,6 +6323,9 @@ def test_update_snapshot_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5261,12 +6347,50 @@ def test_update_snapshot_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.update_snapshot(request=request) call.assert_called() _, args, _ = call.mock_calls[0] assert args[0] == pubsub.UpdateSnapshotRequest() +def test_update_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_snapshot] = mock_rpc + request = {} + client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_update_snapshot_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5291,6 +6415,47 @@ async def test_update_snapshot_empty_call_async(): assert args[0] == pubsub.UpdateSnapshotRequest() +@pytest.mark.asyncio +async def test_update_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.update_snapshot + ] = mock_object + + request = {} + await client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.update_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_update_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSnapshotRequest @@ -5524,6 +6689,9 @@ def test_delete_snapshot_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_snapshot() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5547,6 +6715,9 @@ def test_delete_snapshot_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.delete_snapshot(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5555,6 +6726,41 @@ def test_delete_snapshot_non_empty_request_with_auto_populated_field(): ) +def test_delete_snapshot_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_snapshot] = mock_rpc + request = {} + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_delete_snapshot_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5574,6 +6780,47 @@ async def test_delete_snapshot_empty_call_async(): assert args[0] == pubsub.DeleteSnapshotRequest() +@pytest.mark.asyncio +async def test_delete_snapshot_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_snapshot + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_snapshot + ] = mock_object + + request = {} + await client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.delete_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_delete_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSnapshotRequest @@ -5790,6 +7037,9 @@ def test_seek_empty_call(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.seek), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.seek() call.assert_called() _, args, _ = call.mock_calls[0] @@ -5814,6 +7064,9 @@ def test_seek_non_empty_request_with_auto_populated_field(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.seek), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) client.seek(request=request) call.assert_called() _, args, _ = call.mock_calls[0] @@ -5823,6 +7076,41 @@ def test_seek_non_empty_request_with_auto_populated_field(): ) +def test_seek_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.seek in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.seek] = mock_rpc + request = {} + client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.seek(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + @pytest.mark.asyncio async def test_seek_empty_call_async(): # This test is a coverage failsafe to make sure that totally empty calls, @@ -5842,6 +7130,44 @@ async def test_seek_empty_call_async(): assert args[0] == pubsub.SeekRequest() +@pytest.mark.asyncio +async def test_seek_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.seek in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_object = mock.AsyncMock() + client._client._transport._wrapped_methods[ + client._client._transport.seek + ] = mock_object + + request = {} + await client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_object.call_count == 1 + + await client.seek(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_object.call_count == 2 + + @pytest.mark.asyncio async def test_seek_async( transport: str = "grpc_asyncio", request_type=pubsub.SeekRequest @@ -5991,6 +7317,46 @@ def test_create_subscription_rest(request_type): assert response.state == pubsub.Subscription.State.ACTIVE +def test_create_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_subscription + ] = mock_rpc + + request = {} + client.create_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_subscription_rest_required_fields(request_type=pubsub.Subscription): transport_class = transports.SubscriberRestTransport @@ -6285,6 +7651,44 @@ def test_get_subscription_rest(request_type): assert response.state == pubsub.Subscription.State.ACTIVE +def test_get_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_subscription in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_subscription + ] = mock_rpc + + request = {} + client.get_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_subscription_rest_required_fields( request_type=pubsub.GetSubscriptionRequest, ): @@ -6563,6 +7967,46 @@ def test_update_subscription_rest(request_type): assert response.state == pubsub.Subscription.State.ACTIVE +def test_update_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_subscription + ] = mock_rpc + + request = {} + client.update_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_subscription_rest_required_fields( request_type=pubsub.UpdateSubscriptionRequest, ): @@ -6836,6 +8280,46 @@ def test_list_subscriptions_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_subscriptions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_subscriptions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_subscriptions + ] = mock_rpc + + request = {} + client.list_subscriptions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_subscriptions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_subscriptions_rest_required_fields( request_type=pubsub.ListSubscriptionsRequest, ): @@ -7166,6 +8650,46 @@ def test_delete_subscription_rest(request_type): assert response is None +def test_delete_subscription_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_subscription in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_subscription + ] = mock_rpc + + request = {} + client.delete_subscription(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_subscription(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_subscription_rest_required_fields( request_type=pubsub.DeleteSubscriptionRequest, ): @@ -7414,6 +8938,46 @@ def test_modify_ack_deadline_rest(request_type): assert response is None +def test_modify_ack_deadline_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.modify_ack_deadline in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_ack_deadline + ] = mock_rpc + + request = {} + client.modify_ack_deadline(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.modify_ack_deadline(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_modify_ack_deadline_rest_required_fields( request_type=pubsub.ModifyAckDeadlineRequest, ): @@ -7685,6 +9249,42 @@ def test_acknowledge_rest(request_type): assert response is None +def test_acknowledge_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.acknowledge in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.acknowledge] = mock_rpc + + request = {} + client.acknowledge(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.acknowledge(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_acknowledge_rest_required_fields(request_type=pubsub.AcknowledgeRequest): transport_class = transports.SubscriberRestTransport @@ -7947,6 +9547,42 @@ def test_pull_rest(request_type): assert isinstance(response, pubsub.PullResponse) +def test_pull_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.pull in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.pull] = mock_rpc + + request = {} + client.pull(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.pull(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_pull_rest_required_fields(request_type=pubsub.PullRequest): transport_class = transports.SubscriberRestTransport @@ -8231,6 +9867,46 @@ def test_modify_push_config_rest(request_type): assert response is None +def test_modify_push_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.modify_push_config in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.modify_push_config + ] = mock_rpc + + request = {} + client.modify_push_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.modify_push_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_modify_push_config_rest_required_fields( request_type=pubsub.ModifyPushConfigRequest, ): @@ -8496,6 +10172,42 @@ def test_get_snapshot_rest(request_type): assert response.topic == "topic_value" +def test_get_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_snapshot] = mock_rpc + + request = {} + client.get_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_get_snapshot_rest_required_fields(request_type=pubsub.GetSnapshotRequest): transport_class = transports.SubscriberRestTransport @@ -8755,6 +10467,42 @@ def test_list_snapshots_rest(request_type): assert response.next_page_token == "next_page_token_value" +def test_list_snapshots_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_snapshots in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_snapshots] = mock_rpc + + request = {} + client.list_snapshots(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_snapshots(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_list_snapshots_rest_required_fields(request_type=pubsub.ListSnapshotsRequest): transport_class = transports.SubscriberRestTransport @@ -9088,6 +10836,42 @@ def test_create_snapshot_rest(request_type): assert response.topic == "topic_value" +def test_create_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_snapshot] = mock_rpc + + request = {} + client.create_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_create_snapshot_rest_required_fields( request_type=pubsub.CreateSnapshotRequest, ): @@ -9366,6 +11150,42 @@ def test_update_snapshot_rest(request_type): assert response.topic == "topic_value" +def test_update_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_snapshot] = mock_rpc + + request = {} + client.update_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_update_snapshot_rest_required_fields( request_type=pubsub.UpdateSnapshotRequest, ): @@ -9629,6 +11449,42 @@ def test_delete_snapshot_rest(request_type): assert response is None +def test_delete_snapshot_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_snapshot in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_snapshot] = mock_rpc + + request = {} + client.delete_snapshot(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_snapshot(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_delete_snapshot_rest_required_fields( request_type=pubsub.DeleteSnapshotRequest, ): @@ -9876,6 +11732,42 @@ def test_seek_rest(request_type): assert isinstance(response, pubsub.SeekResponse) +def test_seek_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.seek in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.seek] = mock_rpc + + request = {} + client.seek(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.seek(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + def test_seek_rest_required_fields(request_type=pubsub.SeekRequest): transport_class = transports.SubscriberRestTransport diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index a09d85b008c2..16a6150af9ad 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -299,9 +299,10 @@ async def test_sync_pull_warning_if_return_immediately_async(creds): client = SubscriberAsyncClient(credentials=creds) subscription_path = "projects/foo/subscriptions/bar" - patcher = mock.patch( - "google.pubsub_v1.services.subscriber.async_client.gapic_v1.method_async.wrap_method", - new=mock.AsyncMock, + patcher = mock.patch.object( + type(client.transport.pull), + "__call__", + new_callable=mock.AsyncMock, ) with patcher, pytest.warns( From 0feb3fbab2a3e9c07db6a8994a7e6c81d36e83ad Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sat, 6 Jul 2024 01:35:45 -0400 Subject: [PATCH 1089/1197] chore(main): release 2.22.0 (#1216) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index cd075e2620a5..b433c0bb92e9 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.21.5" + ".": "2.22.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 96442ed5fb6a..55cb6c9ec3e4 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.22.0](https://github.com/googleapis/python-pubsub/compare/v2.21.5...v2.22.0) (2024-07-06) + + +### Features + +* Add service_account_email for export subscriptions ([ec0cc34](https://github.com/googleapis/python-pubsub/commit/ec0cc349b344b6882979838171b6cae4209a9b02)) +* Add use_topic_schema for Cloud Storage Subscriptions ([ec0cc34](https://github.com/googleapis/python-pubsub/commit/ec0cc349b344b6882979838171b6cae4209a9b02)) + ## [2.21.5](https://github.com/googleapis/python-pubsub/compare/v2.21.4...v2.21.5) (2024-06-20) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 1f7391fd4f17..03d6d0200b82 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.5" # {x-release-please-version} +__version__ = "2.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 1f7391fd4f17..03d6d0200b82 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.21.5" # {x-release-please-version} +__version__ = "2.22.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..e8d116b9bb0d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.22.0" }, "snippets": [ { From fbde57eb97b97806791b3062c1914ad973460b4a Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Sun, 7 Jul 2024 01:51:17 +0200 Subject: [PATCH 1090/1197] chore(deps): update all dependencies (#1217) --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 9955737c3037..0f8d2f938670 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.21.5 +google-cloud-pubsub==2.22.0 avro==1.11.3 protobuf===4.24.4; python_version == '3.7' protobuf==5.27.2; python_version >= '3.8' From a3d452d2f74da88b82d3919c67df4dde2426b3b6 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 8 Jul 2024 16:12:30 -0400 Subject: [PATCH 1091/1197] chore(python): update dependencies in .kokoro (#1220) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/Dockerfile | 22 +++++---- .../.kokoro/docker/docs/requirements.txt | 40 ++++++++-------- .../.kokoro/requirements.txt | 46 +++++++++---------- packages/google-cloud-pubsub/noxfile.py | 2 +- 5 files changed, 61 insertions(+), 53 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 76524393faf1..f9451fda6a80 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5651442a6336971a2fb2df40fb56b3337df67cafa14c0809cc89cb34ccee1b8e -# created: 2024-07-04T19:38:10.086106449Z + digest: sha256:99ab465187b4891e878ee4f9977b4a6aeeb0ceadf404870c416c50e06500eb42 +# created: 2024-07-08T16:17:14.833595692Z diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile index a26ce61930f5..741084af5a10 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:22.04 +from ubuntu:24.04 ENV DEBIAN_FRONTEND noninteractive @@ -40,7 +40,6 @@ RUN apt-get update \ libssl-dev \ libsqlite3-dev \ portaudio19-dev \ - python3-distutils \ redis-server \ software-properties-common \ ssh \ @@ -60,18 +59,23 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.9.13 +###################### Install python 3.10.14 for docfx session -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz +# Download python 3.10.14 +RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz # Extract files -RUN tar -xvf Python-3.9.13.tgz +RUN tar -xvf Python-3.10.14.tgz -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations +# Install python 3.10.14 +RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall +###################### Use python 3.10 by default + +RUN python3.10 -m venv /venv +ENV PATH /venv/bin:$PATH + ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ && python3 /tmp/get-pip.py \ @@ -84,4 +88,4 @@ RUN python3 -m pip COPY requirements.txt /requirements.txt RUN python3 -m pip install --require-hashes -r requirements.txt -CMD ["python3.8"] +CMD ["python3.10"] diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt index 0e5d70f20f83..7129c7715594 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.2.3 \ - --hash=sha256:bf7900329262e481be5a15f56f19736b376df6f82ed27576fa893652c5de6c23 \ - --hash=sha256:c12355e0494c76a2a7b73e3a59b09024ca0ba1e279fb9ed6c1b82d5b74b6a70c +argcomplete==3.4.0 \ + --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ + --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ @@ -16,23 +16,27 @@ distlib==0.3.8 \ --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 # via virtualenv -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c +filelock==3.15.4 \ + --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ + --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 # via virtualenv -nox==2024.3.2 \ - --hash=sha256:e53514173ac0b98dd47585096a55572fe504fecede58ced708979184d05440be \ - --hash=sha256:f521ae08a15adbf5e11f16cb34e8d0e6ea521e0b92868f684e91677deb974553 +nox==2024.4.15 \ + --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ + --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f # via -r requirements.in -packaging==24.0 \ - --hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \ - --hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9 +packaging==24.1 \ + --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ + --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.0 \ - --hash=sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068 \ - --hash=sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768 +platformdirs==4.2.2 \ + --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ + --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 # via virtualenv -virtualenv==20.25.1 \ - --hash=sha256:961c026ac520bac5f69acb8ea063e8a4f071bcc9457b9c1f28f6b085c511583a \ - --hash=sha256:e08e13ecdca7a0bd53798f356d5831434afa5b07b93f0abdf0797b7a06ffe197 +tomli==2.0.1 \ + --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ + --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f + # via nox +virtualenv==20.26.3 \ + --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ + --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 # via nox diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 35ece0e4d2e9..9622baf0ba38 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -20,9 +20,9 @@ cachetools==5.3.3 \ --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 # via google-auth -certifi==2024.6.2 \ - --hash=sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516 \ - --hash=sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56 +certifi==2024.7.4 \ + --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ + --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 # via requests cffi==1.16.0 \ --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ @@ -371,23 +371,23 @@ more-itertools==10.3.0 \ # via # jaraco-classes # jaraco-functools -nh3==0.2.17 \ - --hash=sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a \ - --hash=sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911 \ - --hash=sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb \ - --hash=sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a \ - --hash=sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc \ - --hash=sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028 \ - --hash=sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9 \ - --hash=sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3 \ - --hash=sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351 \ - --hash=sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10 \ - --hash=sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71 \ - --hash=sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f \ - --hash=sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b \ - --hash=sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a \ - --hash=sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062 \ - --hash=sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a +nh3==0.2.18 \ + --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ + --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ + --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ + --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ + --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ + --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ + --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ + --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ + --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ + --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ + --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ + --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ + --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ + --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ + --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ + --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer nox==2024.4.15 \ --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ @@ -460,9 +460,9 @@ python-dateutil==2.9.0.post0 \ --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 # via gcp-releasetool -readme-renderer==43.0 \ - --hash=sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311 \ - --hash=sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9 +readme-renderer==44.0 \ + --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ + --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 # via twine requests==2.32.3 \ --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 1d9b72ca71a7..c6d0c11a37ce 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -341,7 +341,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" From 2374d6be62682e223846df65badd837836ea627d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 8 Jul 2024 14:06:53 -0700 Subject: [PATCH 1092/1197] chore(python): use python 3.10 for docs build (#1221) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index f9451fda6a80..f30cb3775afc 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:99ab465187b4891e878ee4f9977b4a6aeeb0ceadf404870c416c50e06500eb42 -# created: 2024-07-08T16:17:14.833595692Z + digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e +# created: 2024-07-08T19:25:35.862283192Z diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile index 741084af5a10..5205308b334d 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -59,7 +59,8 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb -###################### Install python 3.10.14 for docfx session + +###################### Install python 3.10.14 for docs/docfx session # Download python 3.10.14 RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz @@ -71,8 +72,6 @@ RUN tar -xvf Python-3.10.14.tgz RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall -###################### Use python 3.10 by default - RUN python3.10 -m venv /venv ENV PATH /venv/bin:$PATH From 25c2a4e5a67d234874e7cd35b322656401531866 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 29 Jul 2024 11:58:29 -0400 Subject: [PATCH 1093/1197] feat: add max messages batching for Cloud Storage subscriptions (#1224) Co-authored-by: Owl Bot --- .../google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 8 ++++++++ .../snippet_metadata_google.pubsub.v1.json | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index a45a34a14a82..5f826601494d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1432,6 +1432,10 @@ class CloudStorageConfig(proto.Message): Storage file before a new file is created. Min 1 KB, max 10 GiB. The max_bytes limit may be exceeded in cases where messages are larger than the limit. + max_messages (int): + Optional. The maximum number of messages that + can be written to a Cloud Storage file before a + new file is created. Min 1000 messages. state (google.pubsub_v1.types.CloudStorageConfig.State): Output only. An output-only field that indicates whether or not the subscription can @@ -1551,6 +1555,10 @@ class AvroConfig(proto.Message): proto.INT64, number=7, ) + max_messages: int = proto.Field( + proto.INT64, + number=8, + ) state: State = proto.Field( proto.ENUM, number=9, diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index e8d116b9bb0d..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.22.0" + "version": "0.1.0" }, "snippets": [ { From df397ccc2f6844f039c02835eae805d7f3472e8e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 29 Jul 2024 09:54:04 -0700 Subject: [PATCH 1094/1197] chore(main): release 2.23.0 (#1230) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index b433c0bb92e9..b6be5b944370 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.22.0" + ".": "2.23.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 55cb6c9ec3e4..a410d0dc40ea 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.23.0](https://github.com/googleapis/python-pubsub/compare/v2.22.0...v2.23.0) (2024-07-29) + + +### Features + +* Add max messages batching for Cloud Storage subscriptions ([#1224](https://github.com/googleapis/python-pubsub/issues/1224)) ([91c89d3](https://github.com/googleapis/python-pubsub/commit/91c89d36c5099591408ab0661c55929e786b1b04)) + ## [2.22.0](https://github.com/googleapis/python-pubsub/compare/v2.21.5...v2.22.0) (2024-07-06) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 03d6d0200b82..f01e1d3a583c 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.22.0" # {x-release-please-version} +__version__ = "2.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 03d6d0200b82..f01e1d3a583c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.22.0" # {x-release-please-version} +__version__ = "2.23.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..d391b168b386 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.23.0" }, "snippets": [ { From 5a16993a8c667b9d88983decce4dfca4901e7c77 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 12 Aug 2024 09:38:17 -0700 Subject: [PATCH 1095/1197] chore: Update gapic-generator-python to v1.18.5 (#1231) Co-authored-by: Owl Bot --- .../services/publisher/async_client.py | 11 +- .../pubsub_v1/services/publisher/client.py | 8 +- .../pubsub_v1/services/publisher/pagers.py | 97 ++++++++++- .../services/schema_service/async_client.py | 9 +- .../services/schema_service/client.py | 6 +- .../services/schema_service/pagers.py | 69 +++++++- .../services/subscriber/async_client.py | 9 +- .../pubsub_v1/services/subscriber/client.py | 6 +- .../pubsub_v1/services/subscriber/pagers.py | 69 +++++++- .../snippet_metadata_google.pubsub.v1.json | 2 +- .../unit/gapic/pubsub_v1/test_publisher.py | 102 +++++++----- .../gapic/pubsub_v1/test_schema_service.py | 103 +++++++----- .../unit/gapic/pubsub_v1/test_subscriber.py | 157 ++++++++++-------- 13 files changed, 471 insertions(+), 177 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 2b6df487d012..eda259ad5265 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -192,9 +191,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(PublisherClient).get_transport_class, type(PublisherClient) - ) + get_transport_class = PublisherClient.get_transport_class def __init__( self, @@ -826,6 +823,8 @@ async def sample_list_topics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -946,6 +945,8 @@ async def sample_list_topic_subscriptions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1070,6 +1071,8 @@ async def sample_list_topic_snapshots(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index f07c4bbd497c..5a4b5a6ffa25 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -707,7 +707,7 @@ def __init__( transport_init: Union[ Type[PublisherTransport], Callable[..., PublisherTransport] ] = ( - type(self).get_transport_class(transport) + PublisherClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., PublisherTransport], transport) ) @@ -1284,6 +1284,8 @@ def sample_list_topics(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1401,6 +1403,8 @@ def sample_list_topic_subscriptions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1522,6 +1526,8 @@ def sample_list_topic_snapshots(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py index 21ae22b8a6fa..de3490c39300 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.pubsub_v1.types import pubsub @@ -51,6 +64,8 @@ def __init__( request: pubsub.ListTopicsRequest, response: pubsub.ListTopicsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -62,12 +77,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListTopicsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListTopicsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +98,12 @@ def pages(self) -> Iterator[pubsub.ListTopicsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[pubsub.Topic]: @@ -113,6 +138,8 @@ def __init__( request: pubsub.ListTopicsRequest, response: pubsub.ListTopicsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -124,12 +151,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListTopicsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListTopicsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +172,12 @@ async def pages(self) -> AsyncIterator[pubsub.ListTopicsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[pubsub.Topic]: @@ -179,6 +216,8 @@ def __init__( request: pubsub.ListTopicSubscriptionsRequest, response: pubsub.ListTopicSubscriptionsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -190,12 +229,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListTopicSubscriptionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -206,7 +250,12 @@ def pages(self) -> Iterator[pubsub.ListTopicSubscriptionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[str]: @@ -241,6 +290,8 @@ def __init__( request: pubsub.ListTopicSubscriptionsRequest, response: pubsub.ListTopicSubscriptionsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -252,12 +303,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListTopicSubscriptionsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListTopicSubscriptionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -268,7 +324,12 @@ async def pages(self) -> AsyncIterator[pubsub.ListTopicSubscriptionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[str]: @@ -307,6 +368,8 @@ def __init__( request: pubsub.ListTopicSnapshotsRequest, response: pubsub.ListTopicSnapshotsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -318,12 +381,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListTopicSnapshotsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListTopicSnapshotsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -334,7 +402,12 @@ def pages(self) -> Iterator[pubsub.ListTopicSnapshotsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[str]: @@ -369,6 +442,8 @@ def __init__( request: pubsub.ListTopicSnapshotsRequest, response: pubsub.ListTopicSnapshotsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -380,12 +455,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListTopicSnapshotsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListTopicSnapshotsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -396,7 +476,12 @@ async def pages(self) -> AsyncIterator[pubsub.ListTopicSnapshotsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[str]: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index f8d1ac4da7ec..9c8eecaecff0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -191,9 +190,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SchemaServiceClient).get_transport_class, type(SchemaServiceClient) - ) + get_transport_class = SchemaServiceClient.get_transport_class def __init__( self, @@ -609,6 +606,8 @@ async def sample_list_schemas(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -726,6 +725,8 @@ async def sample_list_schema_revisions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index ca9d4f19fe6b..dff44944dda6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -659,7 +659,7 @@ def __init__( transport_init: Union[ Type[SchemaServiceTransport], Callable[..., SchemaServiceTransport] ] = ( - type(self).get_transport_class(transport) + SchemaServiceClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SchemaServiceTransport], transport) ) @@ -1024,6 +1024,8 @@ def sample_list_schemas(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1138,6 +1140,8 @@ def sample_list_schema_revisions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index 309e57f5356e..fa42a6b8cc2c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.pubsub_v1.types import schema @@ -51,6 +64,8 @@ def __init__( request: schema.ListSchemasRequest, response: schema.ListSchemasResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -62,12 +77,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListSchemasResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = schema.ListSchemasRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +98,12 @@ def pages(self) -> Iterator[schema.ListSchemasResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[schema.Schema]: @@ -113,6 +138,8 @@ def __init__( request: schema.ListSchemasRequest, response: schema.ListSchemasResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -124,12 +151,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListSchemasResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = schema.ListSchemasRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +172,12 @@ async def pages(self) -> AsyncIterator[schema.ListSchemasResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[schema.Schema]: @@ -179,6 +216,8 @@ def __init__( request: schema.ListSchemaRevisionsRequest, response: schema.ListSchemaRevisionsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -190,12 +229,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListSchemaRevisionsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = schema.ListSchemaRevisionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -206,7 +250,12 @@ def pages(self) -> Iterator[schema.ListSchemaRevisionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[schema.Schema]: @@ -241,6 +290,8 @@ def __init__( request: schema.ListSchemaRevisionsRequest, response: schema.ListSchemaRevisionsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -252,12 +303,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListSchemaRevisionsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = schema.ListSchemaRevisionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -268,7 +324,12 @@ async def pages(self) -> AsyncIterator[schema.ListSchemaRevisionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[schema.Schema]: diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 1a7295131053..dcd4f0bbbce0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -14,7 +14,6 @@ # limitations under the License. # from collections import OrderedDict -import functools import re from typing import ( Dict, @@ -198,9 +197,7 @@ def universe_domain(self) -> str: """ return self._client._universe_domain - get_transport_class = functools.partial( - type(SubscriberClient).get_transport_class, type(SubscriberClient) - ) + get_transport_class = SubscriberClient.get_transport_class def __init__( self, @@ -803,6 +800,8 @@ async def sample_list_subscriptions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -1736,6 +1735,8 @@ async def sample_list_snapshots(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index bc7639c5931b..9bad804c620f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -711,7 +711,7 @@ def __init__( transport_init: Union[ Type[SubscriberTransport], Callable[..., SubscriberTransport] ] = ( - type(self).get_transport_class(transport) + SubscriberClient.get_transport_class(transport) if isinstance(transport, str) or transport is None else cast(Callable[..., SubscriberTransport], transport) ) @@ -1260,6 +1260,8 @@ def sample_list_subscriptions(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) @@ -2177,6 +2179,8 @@ def sample_list_snapshots(): method=rpc, request=request, response=response, + retry=retry, + timeout=timeout, metadata=metadata, ) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py index 94c88b9b3d93..c09c42027427 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async from typing import ( Any, AsyncIterator, @@ -22,8 +25,18 @@ Tuple, Optional, Iterator, + Union, ) +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + from google.pubsub_v1.types import pubsub @@ -51,6 +64,8 @@ def __init__( request: pubsub.ListSubscriptionsRequest, response: pubsub.ListSubscriptionsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -62,12 +77,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListSubscriptionsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListSubscriptionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -78,7 +98,12 @@ def pages(self) -> Iterator[pubsub.ListSubscriptionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[pubsub.Subscription]: @@ -113,6 +138,8 @@ def __init__( request: pubsub.ListSubscriptionsRequest, response: pubsub.ListSubscriptionsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -124,12 +151,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListSubscriptionsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListSubscriptionsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -140,7 +172,12 @@ async def pages(self) -> AsyncIterator[pubsub.ListSubscriptionsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[pubsub.Subscription]: @@ -179,6 +216,8 @@ def __init__( request: pubsub.ListSnapshotsRequest, response: pubsub.ListSnapshotsResponse, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiate the pager. @@ -190,12 +229,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListSnapshotsResponse): The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListSnapshotsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -206,7 +250,12 @@ def pages(self) -> Iterator[pubsub.ListSnapshotsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __iter__(self) -> Iterator[pubsub.Snapshot]: @@ -241,6 +290,8 @@ def __init__( request: pubsub.ListSnapshotsRequest, response: pubsub.ListSnapshotsResponse, *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, str]] = () ): """Instantiates the pager. @@ -252,12 +303,17 @@ def __init__( The initial request object. response (google.pubsub_v1.types.ListSnapshotsResponse): The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = pubsub.ListSnapshotsRequest(request) self._response = response + self._retry = retry + self._timeout = timeout self._metadata = metadata def __getattr__(self, name: str) -> Any: @@ -268,7 +324,12 @@ async def pages(self) -> AsyncIterator[pubsub.ListSnapshotsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) yield self._response def __aiter__(self) -> AsyncIterator[pubsub.Snapshot]: diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d391b168b386..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.23.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 6f1c95722964..5af39dbf8421 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -43,6 +43,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -1261,22 +1262,23 @@ async def test_create_topic_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_topic - ] = mock_object + ] = mock_rpc request = {} await client.create_topic(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_topic(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1632,22 +1634,23 @@ async def test_update_topic_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_topic - ] = mock_object + ] = mock_rpc request = {} await client.update_topic(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_topic(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2006,22 +2009,23 @@ async def test_publish_async_use_cached_wrapped_rpc(transport: str = "grpc_async ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.publish - ] = mock_object + ] = mock_rpc request = {} await client.publish(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.publish(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2387,22 +2391,23 @@ async def test_get_topic_async_use_cached_wrapped_rpc(transport: str = "grpc_asy ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_topic - ] = mock_object + ] = mock_rpc request = {} await client.get_topic(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_topic(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2755,22 +2760,23 @@ async def test_list_topics_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_topics - ] = mock_object + ] = mock_rpc request = {} await client.list_topics(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_topics(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2993,12 +2999,16 @@ def test_list_topics_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), ) - pager = client.list_topics(request={}) + pager = client.list_topics(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -3327,22 +3337,23 @@ async def test_list_topic_subscriptions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_topic_subscriptions - ] = mock_object + ] = mock_rpc request = {} await client.list_topic_subscriptions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_topic_subscriptions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3579,12 +3590,18 @@ def test_list_topic_subscriptions_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), ) - pager = client.list_topic_subscriptions(request={}) + pager = client.list_topic_subscriptions( + request={}, retry=retry, timeout=timeout + ) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -3918,22 +3935,23 @@ async def test_list_topic_snapshots_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_topic_snapshots - ] = mock_object + ] = mock_rpc request = {} await client.list_topic_snapshots(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_topic_snapshots(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4170,12 +4188,16 @@ def test_list_topic_snapshots_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("topic", ""),)), ) - pager = client.list_topic_snapshots(request={}) + pager = client.list_topic_snapshots(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -4485,22 +4507,23 @@ async def test_delete_topic_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_topic - ] = mock_object + ] = mock_rpc request = {} await client.delete_topic(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_topic(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4847,22 +4870,23 @@ async def test_detach_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.detach_subscription - ] = mock_object + ] = mock_rpc request = {} await client.detach_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.detach_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index f44f6846f52a..7be5183788b7 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -43,6 +43,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -1300,22 +1301,23 @@ async def test_create_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_schema - ] = mock_object + ] = mock_rpc request = {} await client.create_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1693,22 +1695,23 @@ async def test_get_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_as ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_schema - ] = mock_object + ] = mock_rpc request = {} await client.get_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2061,22 +2064,23 @@ async def test_list_schemas_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_schemas - ] = mock_object + ] = mock_rpc request = {} await client.list_schemas(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_schemas(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2299,12 +2303,16 @@ def test_list_schemas_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - pager = client.list_schemas(request={}) + pager = client.list_schemas(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -2630,22 +2638,23 @@ async def test_list_schema_revisions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_schema_revisions - ] = mock_object + ] = mock_rpc request = {} await client.list_schema_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_schema_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2880,12 +2889,16 @@ def test_list_schema_revisions_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), ) - pager = client.list_schema_revisions(request={}) + pager = client.list_schema_revisions(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -3211,22 +3224,23 @@ async def test_commit_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.commit_schema - ] = mock_object + ] = mock_rpc request = {} await client.commit_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.commit_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3598,22 +3612,23 @@ async def test_rollback_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.rollback_schema - ] = mock_object + ] = mock_rpc request = {} await client.rollback_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.rollback_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3998,22 +4013,23 @@ async def test_delete_schema_revision_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_schema_revision - ] = mock_object + ] = mock_rpc request = {} await client.delete_schema_revision(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_schema_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4377,22 +4393,23 @@ async def test_delete_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_schema - ] = mock_object + ] = mock_rpc request = {} await client.delete_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4727,22 +4744,23 @@ async def test_validate_schema_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_schema - ] = mock_object + ] = mock_rpc request = {} await client.validate_schema(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_schema(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5097,22 +5115,23 @@ async def test_validate_message_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.validate_message - ] = mock_object + ] = mock_rpc request = {} await client.validate_message(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.validate_message(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 0251bda96ea4..4bef862ec680 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -44,6 +44,7 @@ from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async from google.api_core import path_template +from google.api_core import retry as retries from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -1294,22 +1295,23 @@ async def test_create_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_subscription - ] = mock_object + ] = mock_rpc request = {} await client.create_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -1736,22 +1738,23 @@ async def test_get_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_subscription - ] = mock_object + ] = mock_rpc request = {} await client.get_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2144,22 +2147,23 @@ async def test_update_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_subscription - ] = mock_object + ] = mock_rpc request = {} await client.update_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2554,22 +2558,23 @@ async def test_list_subscriptions_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_subscriptions - ] = mock_object + ] = mock_rpc request = {} await client.list_subscriptions(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_subscriptions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -2804,12 +2809,16 @@ def test_list_subscriptions_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), ) - pager = client.list_subscriptions(request={}) + pager = client.list_subscriptions(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -3131,22 +3140,23 @@ async def test_delete_subscription_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_subscription - ] = mock_object + ] = mock_rpc request = {} await client.delete_subscription(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_subscription(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3501,22 +3511,23 @@ async def test_modify_ack_deadline_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.modify_ack_deadline - ] = mock_object + ] = mock_rpc request = {} await client.modify_ack_deadline(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.modify_ack_deadline(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -3879,22 +3890,23 @@ async def test_acknowledge_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.acknowledge - ] = mock_object + ] = mock_rpc request = {} await client.acknowledge(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.acknowledge(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4234,22 +4246,23 @@ async def test_pull_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio" ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.pull - ] = mock_object + ] = mock_rpc request = {} await client.pull(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.pull(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4541,22 +4554,23 @@ async def test_streaming_pull_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.streaming_pull - ] = mock_object + ] = mock_rpc request = [{}] await client.streaming_pull(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.streaming_pull(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -4766,22 +4780,23 @@ async def test_modify_push_config_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.modify_push_config - ] = mock_object + ] = mock_rpc request = {} await client.modify_push_config(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.modify_push_config(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5144,22 +5159,23 @@ async def test_get_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.get_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.get_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.get_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5508,22 +5524,23 @@ async def test_list_snapshots_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.list_snapshots - ] = mock_object + ] = mock_rpc request = {} await client.list_snapshots(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.list_snapshots(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -5746,12 +5763,16 @@ def test_list_snapshots_pager(transport_name: str = "grpc"): ) expected_metadata = () + retry = retries.Retry() + timeout = 5 expected_metadata = tuple(expected_metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("project", ""),)), ) - pager = client.list_snapshots(request={}) + pager = client.list_snapshots(request={}, retry=retry, timeout=timeout) assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout results = list(pager) assert len(results) == 6 @@ -6067,22 +6088,23 @@ async def test_create_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.create_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.create_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.create_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6438,22 +6460,23 @@ async def test_update_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.update_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.update_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.update_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -6803,22 +6826,23 @@ async def test_delete_snapshot_async_use_cached_wrapped_rpc( ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.delete_snapshot - ] = mock_object + ] = mock_rpc request = {} await client.delete_snapshot(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.delete_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio @@ -7150,22 +7174,23 @@ async def test_seek_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio" ) # Replace cached wrapped function with mock - mock_object = mock.AsyncMock() + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ client._client._transport.seek - ] = mock_object + ] = mock_rpc request = {} await client.seek(request) # Establish that the underlying gRPC stub method was called. - assert mock_object.call_count == 1 + assert mock_rpc.call_count == 1 await client.seek(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 - assert mock_object.call_count == 2 + assert mock_rpc.call_count == 2 @pytest.mark.asyncio From ca5a98531b416c24aa05b6084b80a0fc2ff72ab4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Sep 2024 13:50:59 -0400 Subject: [PATCH 1096/1197] chore(python): update unittest workflow template (#1242) Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../.github/workflows/unittest.yml | 1 + .../.kokoro/docker/docs/Dockerfile | 9 ++++----- .../.kokoro/publish-docs.sh | 20 +++++++++---------- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index f30cb3775afc..f8bd8149fa87 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:52210e0e0559f5ea8c52be148b33504022e1faef4e95fbe4b32d68022af2fa7e -# created: 2024-07-08T19:25:35.862283192Z + digest: sha256:365d92ef2206cfad00a8c5955c36789d0de124e2b6d92a72dd0486315a0f2e57 +# created: 2024-09-04T14:50:52.658171431Z diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index f4a337c496a0..dd8bd76922f9 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -30,6 +30,7 @@ jobs: with: name: coverage-artifact-${{ matrix.python }} path: .coverage-${{ matrix.python }} + include-hidden-files: true cover: runs-on: ubuntu-latest diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile index 5205308b334d..e5410e296bd8 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile @@ -72,19 +72,18 @@ RUN tar -xvf Python-3.10.14.tgz RUN ./Python-3.10.14/configure --enable-optimizations RUN make altinstall -RUN python3.10 -m venv /venv -ENV PATH /venv/bin:$PATH +ENV PATH /usr/local/bin/python3.10:$PATH ###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ + && python3.10 /tmp/get-pip.py \ && rm /tmp/get-pip.py # Test pip -RUN python3 -m pip +RUN python3.10 -m pip # Install build requirements COPY requirements.txt /requirements.txt -RUN python3 -m pip install --require-hashes -r requirements.txt +RUN python3.10 -m pip install --require-hashes -r requirements.txt CMD ["python3.10"] diff --git a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh index 38f083f05aa0..233205d580e9 100755 --- a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh +++ b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh @@ -21,18 +21,18 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3 -m pip install --require-hashes -r .kokoro/requirements.txt -python3 -m nox --version +python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt +python3.10 -m nox --version # build docs nox -s docs # create metadata -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -40,18 +40,18 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" # docfx yaml files nox -s docfx # create metadata. -python3 -m docuploader create-metadata \ +python3.10 -m docuploader create-metadata \ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ + --version=$(python3.10 setup.py --version) \ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ + --distribution-name=$(python3.10 setup.py --name) \ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) @@ -59,4 +59,4 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" +python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" From 0ba0e1047e0384d037b5487f182726f1ff8efec7 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Mon, 9 Sep 2024 18:21:46 +0200 Subject: [PATCH 1097/1197] chore(deps): update all dependencies (#1227) Co-authored-by: Owl Bot --- .../samples/snippets/requirements-test.txt | 4 ++-- .../google-cloud-pubsub/samples/snippets/requirements.txt | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 43e018272efc..c705889ef4e6 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,7 +1,7 @@ backoff==2.2.1 pytest===7.4.4; python_version == '3.7' -pytest==8.2.2; python_version >= '3.8' +pytest==8.3.2; python_version >= '3.8' mock==5.1.0 flaky==3.8.1 google-cloud-bigquery==3.25.0 -google-cloud-storage==2.17.0 +google-cloud-storage==2.18.2 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 0f8d2f938670..1a6ec636c79b 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,5 +1,5 @@ -google-cloud-pubsub==2.22.0 -avro==1.11.3 +google-cloud-pubsub==2.23.0 +avro==1.12.0 protobuf===4.24.4; python_version == '3.7' -protobuf==5.27.2; python_version >= '3.8' -avro==1.11.3 +protobuf==5.27.4; python_version >= '3.8' +avro==1.12.0 From 4f2dfabc4be5156c11b82cdaad1ae07c2f8ee025 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Mon, 9 Sep 2024 15:35:56 -0400 Subject: [PATCH 1098/1197] fix: Replace asserts with None checks for graceful shutdown (#1244) --- .../_protocol/streaming_pull_manager.py | 7 ++- .../subscriber/test_streaming_pull_manager.py | 56 +++++++++++++++++++ 2 files changed, 61 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index b8531db17dc8..c01dd7f2ec2f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -1104,8 +1104,11 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: ) with self._pause_resume_lock: - assert self._scheduler is not None - assert self._leaser is not None + if self._scheduler is None or self._leaser is None: + _LOGGER.debug( + f"self._scheduler={self._scheduler} or self._leaser={self._leaser} is None. Stopping further processing." + ) + return for received_message in received_messages: if ( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 278f3e88ebde..d4ce2cfdb9c0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1375,6 +1375,62 @@ def test_close_blocking_scheduler_shutdown(): scheduler.shutdown.assert_called_once_with(await_msg_callbacks=True) +def test__on_response_none_scheduler(): + manager, _, _, _, _, _ = make_running_manager() + + manager._callback = mock.sentinel.callback + manager._scheduler = None + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + delivery_attempt=6, + ), + ] + ) + + manager._maybe_release_messages = mock.Mock() + + # adjust message bookkeeping in leaser + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + manager._on_response(response) + + manager._maybe_release_messages.assert_not_called + + +def test__on_response_none_leaser(): + manager, _, _, _, _, _ = make_running_manager() + + manager._callback = mock.sentinel.callback + manager._leaser = None + # Set up the messages. + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + delivery_attempt=6, + ), + ] + ) + + manager._maybe_release_messages = mock.Mock() + + manager._on_response(response) + + manager._maybe_release_messages.assert_not_called + + def test_close_nonblocking_scheduler_shutdown(): manager, _, _, _, _, _ = make_running_manager(await_callbacks_on_shutdown=False) scheduler = manager._scheduler From f1653b2fa8438d66ee11961c3b827980609929a8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 Sep 2024 13:17:52 -0700 Subject: [PATCH 1099/1197] chore(main): release 2.23.1 (#1246) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index b6be5b944370..94d5d8cc48af 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.23.0" + ".": "2.23.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index a410d0dc40ea..93db31182095 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.23.1](https://github.com/googleapis/python-pubsub/compare/v2.23.0...v2.23.1) (2024-09-09) + + +### Bug Fixes + +* Replace asserts with None checks for graceful shutdown ([#1244](https://github.com/googleapis/python-pubsub/issues/1244)) ([ced4f52](https://github.com/googleapis/python-pubsub/commit/ced4f527c7f918a87d1b89c2b5da59dbdf00e2c3)) + ## [2.23.0](https://github.com/googleapis/python-pubsub/compare/v2.22.0...v2.23.0) (2024-07-29) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index f01e1d3a583c..008f4dd36be8 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.0" # {x-release-please-version} +__version__ = "2.23.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index f01e1d3a583c..008f4dd36be8 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.0" # {x-release-please-version} +__version__ = "2.23.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..c1602f5baa98 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.23.1" }, "snippets": [ { From e8806930b63819a2ec303937c0fe145bc7af178f Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Tue, 10 Sep 2024 01:19:13 +0200 Subject: [PATCH 1100/1197] chore(deps): update all dependencies (#1245) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 1a6ec636c79b..b2dfe2d9296a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,5 +1,5 @@ google-cloud-pubsub==2.23.0 avro==1.12.0 protobuf===4.24.4; python_version == '3.7' -protobuf==5.27.4; python_version >= '3.8' +protobuf==5.28.0; python_version >= '3.8' avro==1.12.0 From c91e8f4b9e17b0dce7628bf7df9482ef4016e39c Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Mon, 16 Sep 2024 13:59:20 -0400 Subject: [PATCH 1101/1197] feat: Open Telemetry Publish Side Support (#1241) --- .../pubsub_v1/open_telemetry/__init__.py | 13 + .../open_telemetry/context_propagation.py | 39 ++ .../open_telemetry/publish_message_wrapper.py | 142 +++++ .../cloud/pubsub_v1/publisher/_batch/base.py | 8 +- .../pubsub_v1/publisher/_batch/thread.py | 111 +++- .../publisher/_sequencer/ordered_sequencer.py | 14 +- .../_sequencer/unordered_sequencer.py | 12 +- .../cloud/pubsub_v1/publisher/client.py | 139 ++++- .../google/cloud/pubsub_v1/types.py | 3 + packages/google-cloud-pubsub/setup.py | 2 + .../tests/unit/pubsub_v1/conftest.py | 22 +- .../pubsub_v1/publisher/batch/test_base.py | 5 +- .../pubsub_v1/publisher/batch/test_thread.py | 515 +++++++++++++++--- .../sequencer/test_ordered_sequencer.py | 7 +- .../sequencer/test_unordered_sequencer.py | 11 +- .../publisher/test_publish_message_wrapper.py | 55 ++ .../publisher/test_publisher_client.py | 265 ++++++++- 17 files changed, 1220 insertions(+), 143 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/__init__.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/context_propagation.py create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/publish_message_wrapper.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publish_message_wrapper.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/__init__.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/__init__.py new file mode 100644 index 000000000000..e88bb5dbbaa2 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2024, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/context_propagation.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/context_propagation.py new file mode 100644 index 000000000000..37fad3e20106 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/context_propagation.py @@ -0,0 +1,39 @@ +# Copyright 2024, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from opentelemetry.propagators.textmap import Setter + +from google.pubsub_v1 import PubsubMessage + + +class OpenTelemetryContextSetter(Setter): + """ + Used by Open Telemetry for context propagation. + """ + + def set(self, carrier: PubsubMessage, key: str, value: str) -> None: + """ + Injects trace context into Pub/Sub message attributes with + "googclient_" prefix. + + Args: + carrier(PubsubMessage): The Pub/Sub message which is the carrier of Open Telemetry + data. + key(str): The key for which the Open Telemetry context data needs to be set. + value(str): The Open Telemetry context value to be set. + + Returns: + None + """ + carrier.attributes["googclient_" + key] = value diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/publish_message_wrapper.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/publish_message_wrapper.py new file mode 100644 index 000000000000..e03a8f800efa --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/publish_message_wrapper.py @@ -0,0 +1,142 @@ +# Copyright 2017, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +from datetime import datetime +from typing import Optional + +from opentelemetry import trace +from opentelemetry.trace.propagation import set_span_in_context +from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator + +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextSetter, +) + + +class PublishMessageWrapper: + _OPEN_TELEMETRY_TRACER_NAME: str = "google.cloud.pubsub_v1" + _OPEN_TELEMETRY_MESSAGING_SYSTEM: str = "gcp_pubsub" + _OPEN_TELEMETRY_PUBLISHER_BATCHING = "publisher batching" + + _PUBLISH_START_EVENT: str = "publish start" + _PUBLISH_FLOW_CONTROL: str = "publisher flow control" + + def __init__(self, message: gapic_types.PubsubMessage): + self._message: gapic_types.PubsubMessage = message + self._create_span: Optional[trace.Span] = None + self._flow_control_span: Optional[trace.Span] = None + self._batching_span: Optional[trace.Span] = None + + @property + def message(self): + return self._message + + @message.setter # type: ignore[no-redef] # resetting message value is intentional here + def message(self, message: gapic_types.PubsubMessage): + self._message = message + + @property + def create_span(self): + return self._create_span + + def __eq__(self, other): # pragma: NO COVER + """Used for pytest asserts to compare two PublishMessageWrapper objects with the same message""" + if isinstance(self, other.__class__): + return self.message == other.message + return False + + def start_create_span(self, topic: str, ordering_key: str) -> None: + tracer = trace.get_tracer(self._OPEN_TELEMETRY_TRACER_NAME) + assert len(topic.split("/")) == 4 + topic_short_name = topic.split("/")[3] + with tracer.start_as_current_span( + name=f"{topic_short_name} create", + attributes={ + "messaging.system": self._OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.destination.name": topic_short_name, + "code.function": "publish", + "messaging.gcp_pubsub.message.ordering_key": ordering_key, + "messaging.operation": "create", + "gcp.project_id": topic.split("/")[1], + "messaging.message.body.size": sys.getsizeof( + self._message.data + ), # sys.getsizeof() used since the attribute expects size of message body in bytes + }, + kind=trace.SpanKind.PRODUCER, + end_on_exit=False, + ) as create_span: + create_span.add_event( + name=self._PUBLISH_START_EVENT, + attributes={ + "timestamp": str(datetime.now()), + }, + ) + self._create_span = create_span + TraceContextTextMapPropagator().inject( + carrier=self._message, + setter=OpenTelemetryContextSetter(), + ) + + def end_create_span(self, exc: Optional[BaseException] = None) -> None: + assert self._create_span is not None + if exc: + self._create_span.record_exception(exception=exc) + self._create_span.set_status( + trace.Status(status_code=trace.StatusCode.ERROR) + ) + self._create_span.end() + + def start_publisher_flow_control_span(self) -> None: + tracer = trace.get_tracer(self._OPEN_TELEMETRY_TRACER_NAME) + assert self._create_span is not None + with tracer.start_as_current_span( + name=self._PUBLISH_FLOW_CONTROL, + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._create_span), + end_on_exit=False, + ) as flow_control_span: + self._flow_control_span = flow_control_span + + def end_publisher_flow_control_span( + self, exc: Optional[BaseException] = None + ) -> None: + assert self._flow_control_span is not None + if exc: + self._flow_control_span.record_exception(exception=exc) + self._flow_control_span.set_status( + trace.Status(status_code=trace.StatusCode.ERROR) + ) + self._flow_control_span.end() + + def start_publisher_batching_span(self) -> None: + assert self._create_span is not None + tracer = trace.get_tracer(self._OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name=self._OPEN_TELEMETRY_PUBLISHER_BATCHING, + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._create_span), + end_on_exit=False, + ) as batching_span: + self._batching_span = batching_span + + def end_publisher_batching_span(self, exc: Optional[BaseException] = None) -> None: + assert self._batching_span is not None + if exc: + self._batching_span.record_exception(exception=exc) + self._batching_span.set_status( + trace.Status(status_code=trace.StatusCode.ERROR) + ) + self._batching_span.end() diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py index 52505996be0b..c91e0a444c77 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/base.py @@ -19,6 +19,10 @@ import typing from typing import Optional, Sequence +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) + if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud import pubsub_v1 @@ -54,7 +58,7 @@ class Batch(metaclass=abc.ABCMeta): def __len__(self): """Return the number of messages currently in the batch.""" - return len(self.messages) + return len(self.message_wrappers) @staticmethod @abc.abstractmethod @@ -68,7 +72,7 @@ def make_lock(): # pragma: NO COVER @property @abc.abstractmethod - def messages(self) -> Sequence["gapic_types.PubsubMessage"]: # pragma: NO COVER + def message_wrappers(self) -> Sequence[PublishMessageWrapper]: # pragma: NO COVER """Return the messages currently in the batch. Returns: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index 1617f8c90288..c4bf67c35b84 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -19,13 +19,19 @@ import time import typing from typing import Any, Callable, List, Optional, Sequence +from datetime import datetime +from opentelemetry import trace import google.api_core.exceptions from google.api_core import gapic_v1 + from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures from google.cloud.pubsub_v1.publisher._batch import base from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud import pubsub_v1 @@ -85,6 +91,9 @@ class Batch(base.Batch): timeout is used. """ + _OPEN_TELEMETRY_TRACER_NAME: str = "google.cloud.pubsub_v1" + _OPEN_TELEMETRY_MESSAGING_SYSTEM: str = "gcp_pubsub" + def __init__( self, client: "PublisherClient", @@ -108,7 +117,7 @@ def __init__( # status changed from ACCEPTING_MESSAGES to any other # in order to avoid race conditions self._futures: List[futures.Future] = [] - self._messages: List[gapic_types.PubsubMessage] = [] + self._message_wrappers: List[PublishMessageWrapper] = [] self._status = base.BatchStatus.ACCEPTING_MESSAGES # The initial size is not zero, we need to account for the size overhead @@ -119,6 +128,10 @@ def __init__( self._commit_retry = commit_retry self._commit_timeout = commit_timeout + # Publish RPC Span that will be set by method `_start_publish_rpc_span` + # if Open Telemetry is enabled. + self._rpc_span: Optional[trace.Span] = None + @staticmethod def make_lock() -> threading.Lock: """Return a threading lock. @@ -134,9 +147,9 @@ def client(self) -> "PublisherClient": return self._client @property - def messages(self) -> Sequence[gapic_types.PubsubMessage]: - """The messages currently in the batch.""" - return self._messages + def message_wrappers(self) -> Sequence[PublishMessageWrapper]: + """The message wrappers currently in the batch.""" + return self._message_wrappers @property def settings(self) -> "types.BatchSettings": @@ -226,6 +239,38 @@ def _start_commit_thread(self) -> None: ) commit_thread.start() + def _start_publish_rpc_span(self) -> None: + tracer = trace.get_tracer(self._OPEN_TELEMETRY_TRACER_NAME) + links = [] + + for wrapper in self._message_wrappers: + span = wrapper.create_span + # Add links only for sampled spans. + if span.get_span_context().trace_flags.sampled: + links.append(trace.Link(span.get_span_context())) + assert len(self._topic.split("/")) == 4 + topic_short_name = self._topic.split("/")[3] + with tracer.start_as_current_span( + name=f"{topic_short_name} publish", + attributes={ + "messaging.system": self._OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.destination.name": topic_short_name, + "gcp.project_id": self._topic.split("/")[1], + "messaging.batch.message_count": len(self._message_wrappers), + "messaging.operation": "publish", + "code.function": "_commit", + }, + links=links, + kind=trace.SpanKind.CLIENT, + end_on_exit=False, + ) as rpc_span: + ctx = rpc_span.get_span_context() + for wrapper in self._message_wrappers: + span = wrapper.create_span + if span.get_span_context().trace_flags.sampled: + span.add_link(ctx) + self._rpc_span = rpc_span + def _commit(self) -> None: """Actually publish all of the messages on the active batch. @@ -259,7 +304,7 @@ def _commit(self) -> None: # https://github.com/googleapis/google-cloud-python/issues/8036 # Sanity check: If there are no messages, no-op. - if not self._messages: + if not self._message_wrappers: _LOGGER.debug("No messages to publish, exiting commit") self._status = base.BatchStatus.SUCCESS return @@ -270,18 +315,51 @@ def _commit(self) -> None: batch_transport_succeeded = True try: + if self._client.open_telemetry_enabled: + self._start_publish_rpc_span() + # Performs retries for errors defined by the retry configuration. response = self._client._gapic_publish( topic=self._topic, - messages=self._messages, + messages=[wrapper.message for wrapper in self._message_wrappers], retry=self._commit_retry, timeout=self._commit_timeout, ) + + if self._client.open_telemetry_enabled: + assert self._rpc_span is not None + self._rpc_span.end() + end_time = str(datetime.now()) + for message_id, wrapper in zip( + response.message_ids, self._message_wrappers + ): + span = wrapper.create_span + span.add_event( + name="publish end", + attributes={ + "timestamp": end_time, + }, + ) + span.set_attribute(key="messaging.message.id", value=message_id) + wrapper.end_create_span() except google.api_core.exceptions.GoogleAPIError as exc: # We failed to publish, even after retries, so set the exception on # all futures and exit. self._status = base.BatchStatus.ERROR + if self._client.open_telemetry_enabled: + if self._rpc_span: + self._rpc_span.record_exception( + exception=exc, + ) + self._rpc_span.set_status( + trace.Status(status_code=trace.StatusCode.ERROR) + ) + self._rpc_span.end() + + for wrapper in self._message_wrappers: + wrapper.end_create_span(exc=exc) + batch_transport_succeeded = False if self._batch_done_callback is not None: # Failed to publish batch. @@ -326,7 +404,8 @@ def _commit(self) -> None: self._batch_done_callback(batch_transport_succeeded) def publish( - self, message: gapic_types.PubsubMessage + self, + wrapper: PublishMessageWrapper, ) -> Optional["pubsub_v1.publisher.futures.Future"]: """Publish a single message. @@ -338,7 +417,7 @@ def publish( This method is called by :meth:`~.PublisherClient.publish`. Args: - message: The Pub/Sub message. + wrapper: The Pub/Sub message wrapper. Returns: An object conforming to the :class:`~concurrent.futures.Future` interface @@ -351,12 +430,14 @@ def publish( """ # Coerce the type, just in case. - if not isinstance(message, gapic_types.PubsubMessage): + if not isinstance( + wrapper.message, gapic_types.PubsubMessage + ): # pragma: NO COVER # For performance reasons, the message should be constructed by directly # using the raw protobuf class, and only then wrapping it into the # higher-level PubsubMessage class. - vanilla_pb = _raw_proto_pubbsub_message(**message) - message = gapic_types.PubsubMessage.wrap(vanilla_pb) + vanilla_pb = _raw_proto_pubbsub_message(**wrapper.message) + wrapper.message = gapic_types.PubsubMessage.wrap(vanilla_pb) future = None @@ -369,7 +450,7 @@ def publish( return None size_increase = gapic_types.PublishRequest( - messages=[message] + messages=[wrapper.message] )._pb.ByteSize() if (self._base_request_size + size_increase) > _SERVER_PUBLISH_MAX_BYTES: @@ -381,14 +462,14 @@ def publish( raise exceptions.MessageTooLargeError(err_msg) new_size = self._size + size_increase - new_count = len(self._messages) + 1 + new_count = len(self._message_wrappers) + 1 size_limit = min(self.settings.max_bytes, _SERVER_PUBLISH_MAX_BYTES) overflow = new_size > size_limit or new_count >= self.settings.max_messages - if not self._messages or not overflow: + if not self._message_wrappers or not overflow: # Store the actual message in the batch's message queue. - self._messages.append(message) + self._message_wrappers.append(wrapper) self._size = new_size # Track the future on this batch (so that the result of the diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py index 30c76a44f4d2..9644a1fa2953 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/ordered_sequencer.py @@ -23,7 +23,9 @@ from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import base as sequencer_base from google.cloud.pubsub_v1.publisher._batch import base as batch_base -from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1 import types @@ -262,15 +264,15 @@ def _create_batch( def publish( self, - message: gapic_types.PubsubMessage, + wrapper: PublishMessageWrapper, retry: "OptionalRetry" = gapic_v1.method.DEFAULT, timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> futures.Future: """Publish message for this ordering key. Args: - message: - The Pub/Sub message. + wrapper: + The Pub/Sub message wrapper. retry: The retry settings to apply when publishing the message. timeout: @@ -317,11 +319,11 @@ def publish( self._ordered_batches.append(new_batch) batch = self._ordered_batches[-1] - future = batch.publish(message) + future = batch.publish(wrapper) while future is None: batch = self._create_batch(commit_retry=retry, commit_timeout=timeout) self._ordered_batches.append(batch) - future = batch.publish(message) + future = batch.publish(wrapper) return future diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py index 7d57aa8218b9..7dbd3f084f1e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/unordered_sequencer.py @@ -18,7 +18,9 @@ from google.api_core import gapic_v1 from google.cloud.pubsub_v1.publisher._sequencer import base -from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1.publisher import _batch @@ -115,15 +117,15 @@ def _create_batch( def publish( self, - message: gapic_types.PubsubMessage, + wrapper: PublishMessageWrapper, retry: "OptionalRetry" = gapic_v1.method.DEFAULT, timeout: "types.OptionalTimeout" = gapic_v1.method.DEFAULT, ) -> "futures.Future": """Batch message into existing or new batch. Args: - message: - The Pub/Sub message. + wrapper: + The Pub/Sub message wrapper. retry: The retry settings to apply when publishing the message. timeout: @@ -151,7 +153,7 @@ def publish( future = None while future is None: # Might throw MessageTooLargeError - future = batch.publish(message) + future = batch.publish(wrapper) # batch is full, triggering commit_when_full if future is None: batch = self._create_batch(commit_retry=retry, commit_timeout=timeout) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 54b353276196..481a8472d720 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -22,6 +22,7 @@ import typing from typing import Any, Dict, Optional, Sequence, Tuple, Type, Union import warnings +import sys from google.api_core import gapic_v1 from google.auth.credentials import AnonymousCredentials # type: ignore @@ -37,6 +38,9 @@ from google.pubsub_v1 import gapic_version as package_version from google.pubsub_v1 import types as gapic_types from google.pubsub_v1.services.publisher import client as publisher_client +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) __version__ = package_version.__version__ @@ -153,6 +157,22 @@ def __init__( # The object controlling the message publishing flow self._flow_controller = FlowController(self.publisher_options.flow_control) + self._open_telemetry_enabled = ( + self.publisher_options.enable_open_telemetry_tracing + ) + # OpenTelemetry features used by the library are not supported in Python versions <= 3.7. + # Refer https://github.com/open-telemetry/opentelemetry-python/issues/3993#issuecomment-2211976389 + if ( + self.publisher_options.enable_open_telemetry_tracing + and sys.version_info.major == 3 + and sys.version_info.minor < 8 + ): + warnings.warn( + message="Open Telemetry for Python version 3.7 or lower is not supported. Disabling Open Telemetry tracing.", + category=RuntimeWarning, + ) + self._open_telemetry_enabled = False + @classmethod def from_service_account_file( # type: ignore[override] cls, @@ -209,6 +229,10 @@ def api(self): warnings.warn(msg, category=DeprecationWarning) return super() + @property + def open_telemetry_enabled(self) -> bool: + return self._open_telemetry_enabled + def _get_or_create_sequencer(self, topic: str, ordering_key: str) -> SequencerType: """Get an existing sequencer or create a new one given the (topic, ordering_key) pair. @@ -368,11 +392,41 @@ def publish( # type: ignore[override] ) message = gapic_types.PubsubMessage.wrap(vanilla_pb) + wrapper: PublishMessageWrapper = PublishMessageWrapper(message) + if self._open_telemetry_enabled: + wrapper.start_create_span(topic=topic, ordering_key=ordering_key) + # Messages should go through flow control to prevent excessive # queuing on the client side (depending on the settings). try: + if self._open_telemetry_enabled: + if wrapper: + wrapper.start_publisher_flow_control_span() + else: # pragma: NO COVER + warnings.warn( + message="PubSubMessageWrapper is None. Not starting publisher flow control span.", + category=RuntimeWarning, + ) self._flow_controller.add(message) + if self._open_telemetry_enabled: + if wrapper: + wrapper.end_publisher_flow_control_span() + else: # pragma: NO COVER + warnings.warn( + message="PubSubMessageWrapper is None. Not ending publisher flow control span.", + category=RuntimeWarning, + ) except exceptions.FlowControlLimitError as exc: + if self._open_telemetry_enabled: + if wrapper: + wrapper.end_publisher_flow_control_span(exc) + wrapper.end_create_span(exc) + else: # pragma: NO COVER + warnings.warn( + message="PubSubMessageWrapper is None. Not ending publisher create and flow control spans on FlowControlLimitError.", + category=RuntimeWarning, + ) + future = futures.Future() future.set_exception(exc) return future @@ -386,31 +440,68 @@ def on_publish_done(future): if timeout is gapic_v1.method.DEFAULT: # if custom timeout not passed in timeout = self.publisher_options.timeout + if self._open_telemetry_enabled: + if wrapper: + wrapper.start_publisher_batching_span() + else: # pragma: NO COVER + warnings.warn( + message="PublishMessageWrapper is None. Hence, not starting publisher batching span", + category=RuntimeWarning, + ) with self._batch_lock: - if self._is_stopped: - raise RuntimeError("Cannot publish on a stopped publisher.") - - # Set retry timeout to "infinite" when message ordering is enabled. - # Note that this then also impacts messages added with an empty - # ordering key. - if self._enable_message_ordering: - if retry is gapic_v1.method.DEFAULT: - # use the default retry for the publish GRPC method as a base - transport = self._transport - base_retry = transport._wrapped_methods[transport.publish]._retry - retry = base_retry.with_deadline(2.0**32) - # timeout needs to be overridden and set to infinite in - # addition to the retry deadline since both determine - # the duration for which retries are attempted. - timeout = 2.0**32 - elif retry is not None: - retry = retry.with_deadline(2.0**32) - timeout = 2.0**32 - - # Delegate the publishing to the sequencer. - sequencer = self._get_or_create_sequencer(topic, ordering_key) - future = sequencer.publish(message, retry=retry, timeout=timeout) - future.add_done_callback(on_publish_done) + try: + if self._is_stopped: + raise RuntimeError("Cannot publish on a stopped publisher.") + + # Set retry timeout to "infinite" when message ordering is enabled. + # Note that this then also impacts messages added with an empty + # ordering key. + if self._enable_message_ordering: + if retry is gapic_v1.method.DEFAULT: + # use the default retry for the publish GRPC method as a base + transport = self._transport + base_retry = transport._wrapped_methods[ + transport.publish + ]._retry + retry = base_retry.with_deadline(2.0**32) + # timeout needs to be overridden and set to infinite in + # addition to the retry deadline since both determine + # the duration for which retries are attempted. + timeout = 2.0**32 + elif retry is not None: + retry = retry.with_deadline(2.0**32) + timeout = 2.0**32 + + # Delegate the publishing to the sequencer. + sequencer = self._get_or_create_sequencer(topic, ordering_key) + future = sequencer.publish( + wrapper=wrapper, retry=retry, timeout=timeout + ) + future.add_done_callback(on_publish_done) + except BaseException as be: + # Exceptions can be thrown when attempting to add messages to + # the batch. If they're thrown, record them in publisher + # batching and create span, end the spans and bubble the + # exception up. + if self._open_telemetry_enabled: + if wrapper: + wrapper.end_publisher_batching_span(be) + wrapper.end_create_span(be) + else: # pragma: NO COVER + warnings.warn( + message="PublishMessageWrapper is None. Hence, not recording exception and ending publisher batching span and create span", + category=RuntimeWarning, + ) + raise be + + if self._open_telemetry_enabled: + if wrapper: + wrapper.end_publisher_batching_span() + else: # pragma: NO COVER + warnings.warn( + message="PublishMessageWrapper is None. Hence, not ending publisher batching span", + category=RuntimeWarning, + ) # Create a timer thread if necessary to enforce the batching # timeout. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 3d071a1893d0..c4282e68594d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -174,6 +174,9 @@ class PublisherOptions(NamedTuple): "compatible with :class:`~.pubsub_v1.types.TimeoutType`." ) + enable_open_telemetry_tracing: bool = False # disabled by default + """Open Telemetry tracing is enabled if this is set to True.""" + # Define the type class and default values for flow control settings. # diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index dbb66cf7ca9a..cc852f7d8060 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -45,6 +45,8 @@ "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", "grpcio-status >= 1.33.2", + "opentelemetry-api", + "opentelemetry-sdk", ] extras = {"libcst": "libcst >= 0.3.10"} url = "https://github.com/googleapis/python-pubsub" diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py index dc4192931098..b44e2fd84f82 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py @@ -12,9 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import google.auth.credentials import pytest +from opentelemetry.sdk.trace import TracerProvider +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter +from opentelemetry.sdk.trace.export import SimpleSpanProcessor +from opentelemetry import trace +import google.auth.credentials + @pytest.fixture def creds(): @@ -23,3 +28,18 @@ def creds(): GOOGLE_APPLICATION_CREDENTIALS set. """ yield google.auth.credentials.AnonymousCredentials() + + +@pytest.fixture(scope="session", autouse=True) +def set_trace_provider(): + provider = TracerProvider() + trace.set_tracer_provider(provider) + + +@pytest.fixture(scope="function") +def span_exporter(): + exporter = InMemorySpanExporter() + processor = SimpleSpanProcessor(exporter) + provider = trace.get_tracer_provider() + provider.add_span_processor(processor) + yield exporter diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index a95d72c123c3..ae5dbea0438e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -21,6 +21,9 @@ from google.cloud.pubsub_v1.publisher._batch.base import BatchStatus from google.cloud.pubsub_v1.publisher._batch.thread import Batch from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) def create_batch(status, settings=types.BatchSettings()): @@ -41,5 +44,5 @@ def create_batch(status, settings=types.BatchSettings()): def test_len(): batch = create_batch(status=BatchStatus.ACCEPTING_MESSAGES) assert len(batch) == 0 - batch.publish(gapic_types.PubsubMessage(data=b"foo")) + batch.publish(PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo"))) assert len(batch) == 1 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 2752d62a20e4..32eaa3d9815a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -25,6 +25,9 @@ import pytest +from opentelemetry import trace +from opentelemetry.trace import SpanContext + import google.api_core.exceptions from google.api_core import gapic_v1 from google.auth import credentials @@ -36,10 +39,18 @@ from google.cloud.pubsub_v1.publisher._batch import thread from google.cloud.pubsub_v1.publisher._batch.thread import Batch from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) -def create_client(): - return publisher.Client(credentials=credentials.AnonymousCredentials()) +def create_client(enable_open_telemetry: bool = False): + return publisher.Client( + credentials=credentials.AnonymousCredentials(), + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=enable_open_telemetry, + ), + ) def create_batch( @@ -48,7 +59,8 @@ def create_batch( commit_when_full=True, commit_retry=gapic_v1.method.DEFAULT, commit_timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, - **batch_settings + enable_open_telemetry: bool = False, + **batch_settings, ): """Return a batch object suitable for testing. @@ -62,13 +74,14 @@ def create_batch( for the batch commit call. commit_timeout (:class:`~.pubsub_v1.types.TimeoutType`): The timeout to apply to the batch commit call. + enable_open_telemetry (bool): Whether to enable OpenTelemetry. batch_settings (Mapping[str, str]): Arguments passed on to the :class:``~.pubsub_v1.types.BatchSettings`` constructor. Returns: ~.pubsub_v1.publisher.batch.thread.Batch: A batch object. """ - client = create_client() + client = create_client(enable_open_telemetry=enable_open_telemetry) settings = types.BatchSettings(**batch_settings) return Batch( client, @@ -126,8 +139,16 @@ def test_commit_no_op(): def test_blocking__commit(): batch = create_batch() futures = ( - batch.publish({"data": b"This is my message."}), - batch.publish({"data": b"This is another message."}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is my message.") + ) + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is another message.") + ) + ), ) # Set up the underlying API publish method to return a PublishResponse. @@ -160,7 +181,11 @@ def test_blocking__commit(): def test_blocking__commit_custom_retry(): batch = create_batch(commit_retry=mock.sentinel.custom_retry) - batch.publish({"data": b"This is my message."}) + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is my message.") + ) + ) # Set up the underlying API publish method to return a PublishResponse. publish_response = gapic_types.PublishResponse(message_ids=["a"]) @@ -182,7 +207,11 @@ def test_blocking__commit_custom_retry(): def test_blocking__commit_custom_timeout(): batch = create_batch(commit_timeout=mock.sentinel.custom_timeout) - batch.publish({"data": b"This is my message."}) + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is my message.") + ) + ) # Set up the underlying API publish method to return a PublishResponse. publish_response = gapic_types.PublishResponse(message_ids=["a"]) @@ -217,13 +246,21 @@ def api_publish_delay(topic="", messages=(), retry=None, timeout=None): ) with api_publish_patch: - batch.publish({"data": b"first message"}) + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"first message") + ) + ) start = datetime.datetime.now() event_set = api_publish_called.wait(timeout=1.0) if not event_set: # pragma: NO COVER pytest.fail("API publish was not called in time") - batch.publish({"data": b"second message"}) + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"second message") + ) + ) end = datetime.datetime.now() # While a batch commit in progress, waiting for the API publish call to @@ -266,8 +303,16 @@ def test_blocking__commit_no_messages(): def test_blocking__commit_wrong_messageid_length(): batch = create_batch() futures = ( - batch.publish({"data": b"blah blah blah"}), - batch.publish({"data": b"blah blah blah blah"}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah") + ) + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah blah") + ) + ), ) # Set up a PublishResponse that only returns one message ID. @@ -287,8 +332,16 @@ def test_blocking__commit_wrong_messageid_length(): def test_block__commmit_api_error(): batch = create_batch() futures = ( - batch.publish({"data": b"blah blah blah"}), - batch.publish({"data": b"blah blah blah blah"}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah") + ) + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah blah") + ) + ), ) # Make the API throw an error when publishing. @@ -306,8 +359,16 @@ def test_block__commmit_api_error(): def test_block__commmit_retry_error(): batch = create_batch() futures = ( - batch.publish({"data": b"blah blah blah"}), - batch.publish({"data": b"blah blah blah blah"}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah") + ) + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"blah blah blah blah") + ) + ), ) # Make the API throw an error when publishing. @@ -324,24 +385,31 @@ def test_block__commmit_retry_error(): def test_publish_updating_batch_size(): batch = create_batch(topic="topic_foo") - messages = ( - gapic_types.PubsubMessage(data=b"foobarbaz"), - gapic_types.PubsubMessage(data=b"spameggs"), - gapic_types.PubsubMessage(data=b"1335020400"), + wrappers = ( + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spameggs"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"1335020400"), + ), ) # Publish each of the messages, which should save them to the batch. - futures = [batch.publish(message) for message in messages] + futures = [batch.publish(wrapper) for wrapper in wrappers] # There should be three messages on the batch, and three futures. - assert len(batch.messages) == 3 + assert len(batch.message_wrappers) == 3 assert batch._futures == futures # The size should have been incremented by the sum of the size # contributions of each message to the PublishRequest. base_request_size = gapic_types.PublishRequest(topic="topic_foo")._pb.ByteSize() expected_request_size = base_request_size + sum( - gapic_types.PublishRequest(messages=[msg])._pb.ByteSize() for msg in messages + gapic_types.PublishRequest(messages=[wrapper.message])._pb.ByteSize() + for wrapper in wrappers ) assert batch.size == expected_request_size @@ -350,68 +418,82 @@ def test_publish_updating_batch_size(): def test_publish(): batch = create_batch() - message = gapic_types.PubsubMessage() - future = batch.publish(message) + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage()) + future = batch.publish(wrapper) - assert len(batch.messages) == 1 + assert len(batch.message_wrappers) == 1 assert batch._futures == [future] def test_publish_max_messages_zero(): batch = create_batch(topic="topic_foo", max_messages=0) - - message = gapic_types.PubsubMessage(data=b"foobarbaz") + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ) with mock.patch.object(batch, "commit") as commit: - future = batch.publish(message) + future = batch.publish(wrapper) assert future is not None - assert len(batch.messages) == 1 + assert len(batch.message_wrappers) == 1 assert batch._futures == [future] commit.assert_called_once() def test_publish_max_messages_enforced(): batch = create_batch(topic="topic_foo", max_messages=1) + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz") + ) + wrapper2 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz2") + ) - message = gapic_types.PubsubMessage(data=b"foobarbaz") - message2 = gapic_types.PubsubMessage(data=b"foobarbaz2") - - future = batch.publish(message) - future2 = batch.publish(message2) + future = batch.publish(wrapper) + future2 = batch.publish(wrapper2) assert future is not None assert future2 is None - assert len(batch.messages) == 1 + assert len(batch.message_wrappers) == 1 assert len(batch._futures) == 1 def test_publish_max_bytes_enforced(): batch = create_batch(topic="topic_foo", max_bytes=15) - message = gapic_types.PubsubMessage(data=b"foobarbaz") - message2 = gapic_types.PubsubMessage(data=b"foobarbaz2") + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz") + ) + wrapper2 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz2") + ) - future = batch.publish(message) - future2 = batch.publish(message2) + future = batch.publish(wrapper) + future2 = batch.publish(wrapper2) assert future is not None assert future2 is None - assert len(batch.messages) == 1 + assert len(batch.message_wrappers) == 1 assert len(batch._futures) == 1 def test_publish_exceed_max_messages(): max_messages = 4 batch = create_batch(max_messages=max_messages) - messages = ( - gapic_types.PubsubMessage(data=b"foobarbaz"), - gapic_types.PubsubMessage(data=b"spameggs"), - gapic_types.PubsubMessage(data=b"1335020400"), + wrappers = ( + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spameggs"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"1335020400"), + ), ) # Publish each of the messages, which should save them to the batch. with mock.patch.object(batch, "commit") as commit: - futures = [batch.publish(message) for message in messages] + futures = [batch.publish(wrapper) for wrapper in wrappers] assert batch._futures == futures assert len(futures) == max_messages - 1 @@ -420,7 +502,11 @@ def test_publish_exceed_max_messages(): # When a fourth message is published, commit should be called. # No future will be returned in this case. - future = batch.publish(gapic_types.PubsubMessage(data=b"last one")) + future = batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"last one") + ) + ) commit.assert_called_once_with() assert future is None @@ -443,28 +529,32 @@ def test_publish_single_message_size_exceeds_server_size_limit(): assert request_size == 1001 # sanity check, just above the (mocked) server limit with pytest.raises(exceptions.MessageTooLargeError): - batch.publish(big_message) + batch.publish(wrapper=PublishMessageWrapper(message=big_message)) @mock.patch.object(thread, "_SERVER_PUBLISH_MAX_BYTES", 1000) def test_publish_total_messages_size_exceeds_server_size_limit(): batch = create_batch(topic="topic_foo", max_messages=10, max_bytes=1500) - messages = ( - gapic_types.PubsubMessage(data=b"x" * 500), - gapic_types.PubsubMessage(data=b"x" * 600), + wrappers = ( + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"x" * 500), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"x" * 600), + ), ) # Sanity check - request size is still below BatchSettings.max_bytes, # but it exceeds the server-side size limit. request_size = gapic_types.PublishRequest( - topic="topic_foo", messages=messages + topic="topic_foo", messages=[wrapper.message for wrapper in wrappers] )._pb.ByteSize() assert 1000 < request_size < 1500 with mock.patch.object(batch, "commit") as fake_commit: - batch.publish(messages[0]) - batch.publish(messages[1]) + batch.publish(wrappers[0]) + batch.publish(wrappers[1]) # The server side limit should kick in and cause a commit. fake_commit.assert_called_once() @@ -472,21 +562,40 @@ def test_publish_total_messages_size_exceeds_server_size_limit(): def test_publish_dict(): batch = create_batch() - future = batch.publish({"data": b"foobarbaz", "attributes": {"spam": "eggs"}}) + future = batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage( + data=b"foobarbaz", + attributes={"spam": "eggs"}, + ), + ) + ) # There should be one message on the batch. - expected_message = gapic_types.PubsubMessage( - data=b"foobarbaz", attributes={"spam": "eggs"} + expected_message_wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage( + data=b"foobarbaz", + attributes={"spam": "eggs"}, + ) ) - assert batch.messages == [expected_message] + + assert batch.message_wrappers == [expected_message_wrapper] assert batch._futures == [future] def test_cancel(): batch = create_batch() futures = ( - batch.publish({"data": b"This is my message."}), - batch.publish({"data": b"This is another message."}), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is my message."), + ), + ), + batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"This is another message."), + ), + ), ) batch.cancel(BatchCancellationReason.PRIOR_ORDERED_MESSAGE_FAILED) @@ -502,19 +611,29 @@ def test_do_not_commit_when_full_when_flag_is_off(): max_messages = 4 # Set commit_when_full flag to False batch = create_batch(max_messages=max_messages, commit_when_full=False) - messages = ( - gapic_types.PubsubMessage(data=b"foobarbaz"), - gapic_types.PubsubMessage(data=b"spameggs"), - gapic_types.PubsubMessage(data=b"1335020400"), + wrappers = ( + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spameggs"), + ), + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"1335020400"), + ), ) with mock.patch.object(batch, "commit") as commit: # Publish 3 messages. - futures = [batch.publish(message) for message in messages] + futures = [batch.publish(wrapper) for wrapper in wrappers] assert len(futures) == 3 # When a fourth message is published, commit should not be called. - future = batch.publish(gapic_types.PubsubMessage(data=b"last one")) + future = batch.publish( + wrapper=PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"last one"), + ) + ) assert commit.call_count == 0 assert future is None @@ -534,8 +653,10 @@ def test_batch_done_callback_called_on_success(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = gapic_types.PubsubMessage(data=b"foobarbaz") - batch.publish(message) + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz") + ) + batch.publish(wrapper) # One response for one published message. publish_response = gapic_types.PublishResponse(message_ids=["a"]) @@ -554,8 +675,10 @@ def test_batch_done_callback_called_on_publish_failure(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = gapic_types.PubsubMessage(data=b"foobarbaz") - batch.publish(message) + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz") + ) + batch.publish(wrapper) # One response for one published message. publish_response = gapic_types.PublishResponse(message_ids=["a"]) @@ -580,8 +703,10 @@ def test_batch_done_callback_called_on_publish_response_invalid(): batch = create_batch(batch_done_callback=batch_done_callback_tracker) # Ensure messages exist. - message = gapic_types.PubsubMessage(data=b"foobarbaz") - batch.publish(message) + wrapper = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foobarbaz"), + ) + batch.publish(wrapper) # No message ids returned in successful publish response -> invalid. publish_response = gapic_types.PublishResponse(message_ids=[]) @@ -593,3 +718,249 @@ def test_batch_done_callback_called_on_publish_response_invalid(): assert batch_done_callback_tracker.called assert not batch_done_callback_tracker.success + + +# Refer https://opentelemetry.io/docs/languages/python/#version-support +@pytest.mark.skipif( + sys.version_info < (3, 8), reason="Open Telemetry requires python3.8 or higher" +) +def test_open_telemetry_commit_publish_rpc_span_none(span_exporter): + """ + Test scenario where OpenTelemetry is enabled, publish RPC + span creation fails(unexpected) and hence batch._rpc_span is None when + attempting to close it. Required for code coverage. + """ + TOPIC = "projects/projectID/topics/topicID" + batch = create_batch(topic=TOPIC, enable_open_telemetry=True) + + message = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo"), + ) + message.start_create_span(topic=TOPIC, ordering_key=None) + batch.publish(message) + + # Mock error when publish RPC span creation is attempted. + error = google.api_core.exceptions.InternalServerError("error") + + with mock.patch.object( + type(batch), + "_start_publish_rpc_span", + side_effect=error, + ): + batch._commit() + + assert batch._rpc_span is None + spans = span_exporter.get_finished_spans() + + # Only Create span should be exported, since publish RPC span creation + # should fail with a mock error. + assert len(spans) == 1 + + publish_create_span = spans[0] + assert publish_create_span.status.status_code == trace.status.StatusCode.ERROR + assert publish_create_span.end_time is not None + + assert publish_create_span.name == "topicID create" + # Publish start event and exception event should be present in publish + # create span. + assert len(publish_create_span.events) == 2 + assert publish_create_span.events[0].name == "publish start" + assert publish_create_span.events[1].name == "exception" + + +# Refer https://opentelemetry.io/docs/languages/python/#version-support +@pytest.mark.skipif( + sys.version_info < (3, 8), reason="Open Telemetry requires python3.8 or higher" +) +def test_open_telemetry_commit_publish_rpc_exception(span_exporter): + TOPIC = "projects/projectID/topics/topicID" + batch = create_batch(topic=TOPIC, enable_open_telemetry=True) + + message = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo"), + ) + message.start_create_span(topic=TOPIC, ordering_key=None) + batch.publish(message) + + # Mock publish error. + error = google.api_core.exceptions.InternalServerError("error") + + with mock.patch.object( + type(batch.client), + "_gapic_publish", + side_effect=error, + ): + batch._commit() + + spans = span_exporter.get_finished_spans() + # Span 1: Publish RPC span + # Span 2: Create span. + assert len(spans) == 2 + + # Verify both spans recorded error and have ended. + for span in spans: + assert span.status.status_code == trace.status.StatusCode.ERROR + assert span.end_time is not None + + publish_rpc_span = spans[0] + assert publish_rpc_span.name == "topicID publish" + assert len(publish_rpc_span.events) == 1 + assert publish_rpc_span.events[0].name == "exception" + + publish_create_span = spans[1] + assert publish_create_span.name == "topicID create" + # Publish start event and exception event should be present in publish + # create span. + assert len(publish_create_span.events) == 2 + assert publish_create_span.events[0].name == "publish start" + assert publish_create_span.events[1].name == "exception" + + +# Refer https://opentelemetry.io/docs/languages/python/#version-support +@pytest.mark.skipif( + sys.version_info < (3, 8), reason="Open Telemetry requires python3.8 or higher" +) +def test_opentelemetry_commit_sampling(span_exporter): + TOPIC = "projects/projectID/topics/topic" + batch = create_batch( + topic=TOPIC, + enable_open_telemetry=True, + ) + + message1 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo"), + ) + message1.start_create_span(topic=TOPIC, ordering_key=None) + + message2 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"bar"), + ) + message2.start_create_span(topic=TOPIC, ordering_key=None) + + # Mock the 'get_span_context' method to return a mock SpanContext + mock_span_context = mock.Mock(spec=SpanContext) + mock_span_context.trace_flags.sampled = False + + batch.publish(message1) + batch.publish(message2) + + publish_response = gapic_types.PublishResponse(message_ids=["a", "b"]) + + # Patch the 'create_span' method to return the mock SpanContext + with mock.patch.object( + message1.create_span, "get_span_context", return_value=mock_span_context + ): + with mock.patch.object( + type(batch.client), "_gapic_publish", return_value=publish_response + ): + batch._commit() + + spans = span_exporter.get_finished_spans() + + # Span 1: Publish RPC span of both messages + # Span 2: Create span of message 1 + # Span 3: Create span of message 2 + assert len(spans) == 3 + + publish_rpc_span, create_span1, create_span2 = spans + + # Verify publish RPC span has only one link corresponding to + # message 2 which is included in the sample. + assert len(publish_rpc_span.links) == 1 + assert len(create_span1.links) == 0 + assert len(create_span2.links) == 1 + assert publish_rpc_span.links[0].context == create_span2.context + assert create_span2.links[0].context == publish_rpc_span.context + + # Verify all spans have ended. + for span in spans: + assert span.end_time is not None + + # Verify both publish create spans have 2 events - publish start and publish + # end. + for span in spans[1:]: + assert len(span.events) == 2 + assert span.events[0].name == "publish start" + assert span.events[1].name == "publish end" + + +@pytest.mark.skipif( + sys.version_info < (3, 8), reason="Open Telemetry requires python3.8 or higher" +) +def test_opentelemetry_commit(span_exporter): + TOPIC = "projects/projectID/topics/topic" + batch = create_batch( + topic=TOPIC, + enable_open_telemetry=True, + ) + + msg1 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo"), + ) + msg2 = PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"bar"), + ) + msg1.start_create_span(topic=TOPIC, ordering_key=None) + msg2.start_create_span(topic=TOPIC, ordering_key=None) + + # Add both messages to the batch. + batch.publish(msg1) + batch.publish(msg2) + + publish_response = gapic_types.PublishResponse(message_ids=["a", "b"]) + with mock.patch.object( + type(batch.client), "_gapic_publish", return_value=publish_response + ): + batch._commit() + + spans = span_exporter.get_finished_spans() + + # Span 1: publish RPC span - closed after publish RPC success. + # Span 2: publisher create span of message 1 - closed after publish RPC success. + # Span 3: publisher create span of message 2 - closed after publish RPC success. + assert len(spans) == 3 + publish_rpc_span, create_span1, create_span2 = spans + + # Verify publish RPC span + assert publish_rpc_span.name == "topic publish" + assert publish_rpc_span.kind == trace.SpanKind.CLIENT + assert publish_rpc_span.end_time is not None + attributes = publish_rpc_span.attributes + assert attributes["messaging.system"] == "gcp_pubsub" + assert attributes["messaging.destination.name"] == "topic" + assert attributes["gcp.project_id"] == "projectID" + assert attributes["messaging.batch.message_count"] == 2 + assert attributes["messaging.operation"] == "publish" + assert attributes["code.function"] == "_commit" + assert publish_rpc_span.parent is None + # Verify the links correspond to the spans of the published messages. + assert len(publish_rpc_span.links) == 2 + assert publish_rpc_span.links[0].context == create_span1.context + assert publish_rpc_span.links[1].context == create_span2.context + assert len(create_span1.links) == 1 + assert create_span1.links[0].context == publish_rpc_span.get_span_context() + assert len(create_span2.links) == 1 + assert create_span2.links[0].context == publish_rpc_span.get_span_context() + + # Verify spans of the published messages. + assert create_span1.name == "topic create" + assert create_span2.name == "topic create" + + # Verify the publish create spans have been closed after publish success. + assert create_span1.end_time is not None + assert create_span2.end_time is not None + + # Verify message IDs returned from gapic publish are added as attributes + # to the publisher create spans of the messages. + assert "messaging.message.id" in create_span1.attributes + assert create_span1.attributes["messaging.message.id"] == "a" + assert "messaging.message.id" in create_span2.attributes + assert create_span2.attributes["messaging.message.id"] == "b" + + # Verify publish end event added to the span + assert len(create_span1.events) == 2 + assert len(create_span2.events) == 2 + assert create_span1.events[0].name == "publish start" + assert create_span1.events[1].name == "publish end" + assert create_span2.events[0].name == "publish start" + assert create_span2.events[1].name == "publish end" diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py index 7570c2970938..4377d1447287 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -27,12 +27,17 @@ from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) _ORDERING_KEY = "ordering_key_1" def create_message(): - return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + return PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + ) def create_client(): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index 01d9d6ca405c..739bae3bddfc 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -27,10 +27,15 @@ from google.cloud.pubsub_v1.publisher._batch import base from google.cloud.pubsub_v1.publisher._sequencer import unordered_sequencer from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) def create_message(): - return gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + return PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + ) def create_client(): @@ -140,7 +145,9 @@ def test_publish_after_batch_error(): batch = client._batch_class( client, "topic_name", types.BatchSettings(max_latency=float("inf")) ) - batch._messages.append(mock.Mock(name="message")) # Make batch truthy (non-empty). + batch._message_wrappers.append( + mock.Mock(name="message") + ) # Make batch truthy (non-empty). sequencer = unordered_sequencer.UnorderedSequencer(client, "topic_name") sequencer._set_batch(batch) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publish_message_wrapper.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publish_message_wrapper.py new file mode 100644 index 000000000000..e100950ad463 --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publish_message_wrapper.py @@ -0,0 +1,55 @@ +# Copyright 2019, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) + + +def test_message_setter(): + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + another_message = gapic_types.PubsubMessage(data=b"bar") + wrapper.message = another_message + + assert wrapper.message == another_message + + +def test_eq(): + wrapper1 = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + wrapper2 = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"bar")) + wrapper3 = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + + assert wrapper1.__eq__(wrapper2) is False + assert wrapper1.__eq__(wrapper3) is True + + +def test_end_create_span(): + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + with pytest.raises(AssertionError): + wrapper.end_create_span() + + +def test_end_publisher_flow_control_span(): + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + with pytest.raises(AssertionError): + wrapper.end_publisher_flow_control_span() + + +def test_end_publisher_batching_span(): + wrapper = PublishMessageWrapper(message=gapic_types.PubsubMessage(data=b"foo")) + with pytest.raises(AssertionError): + wrapper.end_publisher_batching_span() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 9db5e0ef8b1a..23255db3bd70 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -29,18 +29,24 @@ import pytest import time +from opentelemetry import trace from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core.gapic_v1.client_info import METRICS_METADATA_KEY + from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types - from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher._sequencer import ordered_sequencer - from google.pubsub_v1 import types as gapic_types from google.pubsub_v1.services.publisher import client as publisher_client from google.pubsub_v1.services.publisher.transports.grpc import PublisherGrpcTransport +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextSetter, +) +from google.cloud.pubsub_v1.open_telemetry.publish_message_wrapper import ( + PublishMessageWrapper, +) def _assert_retries_equal(retry, retry2): @@ -129,6 +135,220 @@ def test_init_w_custom_transport(creds): assert client.batch_settings.max_messages == 100 +@pytest.mark.parametrize( + "enable_open_telemetry", + [ + True, + False, + ], +) +def test_open_telemetry_publisher_options(creds, enable_open_telemetry): + if sys.version_info >= (3, 8) or enable_open_telemetry is False: + client = publisher.Client( + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=enable_open_telemetry + ), + credentials=creds, + ) + assert client._open_telemetry_enabled == enable_open_telemetry + else: + # Open Telemetry is not supported and hence disabled for Python + # versions 3.7 or below + with pytest.warns( + RuntimeWarning, + match="Open Telemetry for Python version 3.7 or lower is not supported. Disabling Open Telemetry tracing.", + ): + client = publisher.Client( + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=enable_open_telemetry + ), + credentials=creds, + ) + assert client._open_telemetry_enabled is False + + +def test_opentelemetry_context_setter(): + msg = gapic_types.PubsubMessage(data=b"foo") + OpenTelemetryContextSetter().set(carrier=msg, key="key", value="bar") + + assert "googclient_key" in msg.attributes.keys() + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_context_propagation(creds, span_exporter): + TOPIC = "projects/projectID/topics/topicID" + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=True, + ), + ) + + message_mock = mock.Mock(spec=publisher.flow_controller.FlowController.add) + client._flow_controller.add = message_mock + client.publish(TOPIC, b"data") + + message_mock.assert_called_once() + args = message_mock.call_args.args + assert len(args) == 1 + assert "googclient_traceparent" in args[0].attributes + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +@pytest.mark.parametrize( + "enable_open_telemetry", + [ + True, + False, + ], +) +def test_opentelemetry_publisher_batching_exception( + creds, span_exporter, enable_open_telemetry +): + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=enable_open_telemetry, + ), + ) + + # Throw an exception when sequencer.publish() is called + sequencer = mock.Mock(spec=ordered_sequencer.OrderedSequencer) + sequencer.publish = mock.Mock(side_effect=RuntimeError("some error")) + client._get_or_create_sequencer = mock.Mock(return_value=sequencer) + + TOPIC = "projects/projectID/topics/topicID" + with pytest.raises(RuntimeError): + client.publish(TOPIC, b"message") + + spans = span_exporter.get_finished_spans() + + if enable_open_telemetry: + # Span 1: Publisher Flow Control span + # Span 2: Publisher Batching span + # Span 3: Create Publish span + assert len(spans) == 3 + + flow_control_span, batching_span, create_span = spans + + # Verify batching span contents. + assert batching_span.name == "publisher batching" + assert batching_span.kind == trace.SpanKind.INTERNAL + assert batching_span.parent.span_id == create_span.get_span_context().span_id + + # Verify exception recorded by the publisher batching span. + assert batching_span.status.status_code == trace.StatusCode.ERROR + assert len(batching_span.events) == 1 + assert batching_span.events[0].name == "exception" + + # Verify exception recorded by the publisher create span. + assert create_span.status.status_code == trace.StatusCode.ERROR + assert len(create_span.events) == 2 + assert create_span.events[0].name == "publish start" + assert create_span.events[1].name == "exception" + + # Verify the finished flow control span. + assert flow_control_span.name == "publisher flow control" + assert len(flow_control_span.events) == 0 + else: + assert len(spans) == 0 + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_flow_control_exception(creds, span_exporter): + publisher_options = types.PublisherOptions( + flow_control=types.PublishFlowControl( + message_limit=10, + byte_limit=150, + limit_exceeded_behavior=types.LimitExceededBehavior.ERROR, + ), + enable_open_telemetry_tracing=True, + ) + client = publisher.Client(credentials=creds, publisher_options=publisher_options) + + mock_batch = mock.Mock(spec=client._batch_class) + topic = "projects/projectID/topics/topicID" + client._set_batch(topic, mock_batch) + + future1 = client.publish(topic, b"a" * 60) + future2 = client.publish(topic, b"b" * 100) + + future1.result() # no error, still within flow control limits + with pytest.raises(exceptions.FlowControlLimitError): + future2.result() + + spans = span_exporter.get_finished_spans() + # Span 1 = Publisher Flow Control Span of first publish + # Span 2 = Publisher Batching Span of first publish + # Span 3 = Publisher Flow Control Span of second publish(raises FlowControlLimitError) + # Span 4 = Publish Create Span of second publish(raises FlowControlLimitError) + assert len(spans) == 4 + + failed_flow_control_span = spans[2] + finished_publish_create_span = spans[3] + + # Verify failed flow control span values. + assert failed_flow_control_span.name == "publisher flow control" + assert failed_flow_control_span.kind == trace.SpanKind.INTERNAL + assert ( + failed_flow_control_span.parent.span_id + == finished_publish_create_span.get_span_context().span_id + ) + assert failed_flow_control_span.status.status_code == trace.StatusCode.ERROR + + assert len(failed_flow_control_span.events) == 1 + assert failed_flow_control_span.events[0].name == "exception" + + # Verify finished publish create span values + assert finished_publish_create_span.name == "topicID create" + assert finished_publish_create_span.status.status_code == trace.StatusCode.ERROR + assert len(finished_publish_create_span.events) == 2 + assert finished_publish_create_span.events[0].name == "publish start" + assert finished_publish_create_span.events[1].name == "exception" + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_publish(creds, span_exporter): + TOPIC = "projects/projectID/topics/topicID" + client = publisher.Client( + credentials=creds, + publisher_options=types.PublisherOptions( + enable_open_telemetry_tracing=True, + ), + ) + + client.publish(TOPIC, b"message") + spans = span_exporter.get_finished_spans() + + # Span 1: Publisher Flow control span + # Span 2: Publisher Batching span + # Publish Create Span would still be active, and hence not exported. + flow_control_span = spans[0] + assert flow_control_span.name == "publisher flow control" + assert flow_control_span.kind == trace.SpanKind.INTERNAL + # Assert the Publisher Flow Control Span has a parent(the Publish Create + # span is still active, and hence unexported. So, the value of parent cannot + # be asserted) + assert flow_control_span.parent is not None + + batching_span = spans[1] + assert batching_span.name == "publisher batching" + assert batching_span.kind == trace.SpanKind.INTERNAL + assert batching_span.parent is not None + + def test_init_w_api_endpoint(creds): client_options = {"api_endpoint": "testendpoint.google.com"} client = publisher.Client(client_options=client_options, credentials=creds) @@ -240,9 +460,17 @@ def test_publish(creds): # Check mock. batch.publish.assert_has_calls( [ - mock.call(gapic_types.PubsubMessage(data=b"spam")), mock.call( - gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spam"), + ) + ), + mock.call( + PublishMessageWrapper( + message=gapic_types.PubsubMessage( + data=b"foo", attributes={"bar": "baz"} + ) + ) ), ] ) @@ -381,7 +609,9 @@ def test_publish_attrs_bytestring(creds): # The attributes should have been sent as text. batch.publish.assert_called_once_with( - gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) + ) ) @@ -421,8 +651,9 @@ def test_publish_new_batch_needed(creds): commit_timeout=gapic_v1.method.DEFAULT, ) message_pb = gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) - batch1.publish.assert_called_once_with(message_pb) - batch2.publish.assert_called_once_with(message_pb) + wrapper = PublishMessageWrapper(message=message_pb) + batch1.publish.assert_called_once_with(wrapper) + batch2.publish.assert_called_once_with(wrapper) def test_publish_attrs_type_error(creds): @@ -445,9 +676,9 @@ def test_publish_custom_retry_overrides_configured_retry(creds): client.publish(topic, b"hello!", retry=mock.sentinel.custom_retry) fake_sequencer.publish.assert_called_once_with( - mock.ANY, retry=mock.sentinel.custom_retry, timeout=mock.ANY + wrapper=mock.ANY, retry=mock.sentinel.custom_retry, timeout=mock.ANY ) - message = fake_sequencer.publish.call_args.args[0] + message = fake_sequencer.publish.call_args.kwargs["wrapper"].message assert message.data == b"hello!" @@ -464,9 +695,9 @@ def test_publish_custom_timeout_overrides_configured_timeout(creds): client.publish(topic, b"hello!", timeout=mock.sentinel.custom_timeout) fake_sequencer.publish.assert_called_once_with( - mock.ANY, retry=mock.ANY, timeout=mock.sentinel.custom_timeout + wrapper=mock.ANY, retry=mock.ANY, timeout=mock.sentinel.custom_timeout ) - message = fake_sequencer.publish.call_args.args[0] + message = fake_sequencer.publish.call_args.kwargs["wrapper"].message assert message.data == b"hello!" @@ -626,10 +857,16 @@ def test_publish_with_ordering_key(creds): # Check mock. batch.publish.assert_has_calls( [ - mock.call(gapic_types.PubsubMessage(data=b"spam", ordering_key="k1")), mock.call( - gapic_types.PubsubMessage( - data=b"foo", attributes={"bar": "baz"}, ordering_key="k1" + PublishMessageWrapper( + message=gapic_types.PubsubMessage(data=b"spam", ordering_key="k1") + ), + ), + mock.call( + PublishMessageWrapper( + message=gapic_types.PubsubMessage( + data=b"foo", attributes={"bar": "baz"}, ordering_key="k1" + ) ) ), ] From 6715a949287122af7025c9ac1a6b89c2ab4cc92c Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Tue, 17 Sep 2024 16:43:51 -0400 Subject: [PATCH 1102/1197] fix: Fix flaky test (#1254) --- .../publisher/test_publisher_client.py | 35 +++++++++++-------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 23255db3bd70..abc33f8cb470 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -332,21 +332,26 @@ def test_opentelemetry_publish(creds, span_exporter): client.publish(TOPIC, b"message") spans = span_exporter.get_finished_spans() - # Span 1: Publisher Flow control span - # Span 2: Publisher Batching span - # Publish Create Span would still be active, and hence not exported. - flow_control_span = spans[0] - assert flow_control_span.name == "publisher flow control" - assert flow_control_span.kind == trace.SpanKind.INTERNAL - # Assert the Publisher Flow Control Span has a parent(the Publish Create - # span is still active, and hence unexported. So, the value of parent cannot - # be asserted) - assert flow_control_span.parent is not None - - batching_span = spans[1] - assert batching_span.name == "publisher batching" - assert batching_span.kind == trace.SpanKind.INTERNAL - assert batching_span.parent is not None + # Publisher Flow control and batching spans would be ended in the + # publish() function and are deterministically expected to be in the + # list of exported spans. The Publish Create span and Publish RPC span + # are run async and end at a non-deterministic time. Hence, + # asserting that we have atleast two spans(flow control and batching span) + assert len(spans) >= 2 + flow_control_span = None + batching_span = None + for span in spans: + if span.name == "publisher flow control": + flow_control_span = span + assert flow_control_span.kind == trace.SpanKind.INTERNAL + assert flow_control_span.parent is not None + if span.name == "publisher batching": + batching_span = span + assert batching_span.kind == trace.SpanKind.INTERNAL + assert batching_span.parent is not None + + assert flow_control_span is not None + assert batching_span is not None def test_init_w_api_endpoint(creds): From 89e148821a2033901dc91a7866a2307bca8c8e82 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Tue, 24 Sep 2024 18:54:02 -0400 Subject: [PATCH 1103/1197] feat: Add OpenTelemetry support for Subscribe Side (#1252) --- .../open_telemetry/context_propagation.py | 18 +- .../open_telemetry/subscribe_opentelemetry.py | 280 ++++++++++++ .../subscriber/_protocol/dispatcher.py | 244 +++++++++- .../pubsub_v1/subscriber/_protocol/leaser.py | 32 +- .../subscriber/_protocol/messages_on_hold.py | 2 + .../subscriber/_protocol/requests.py | 8 + .../_protocol/streaming_pull_manager.py | 140 +++++- .../cloud/pubsub_v1/subscriber/client.py | 38 +- .../cloud/pubsub_v1/subscriber/message.py | 44 +- .../google/cloud/pubsub_v1/types.py | 32 +- .../pubsub_v1/subscriber/test_dispatcher.py | 427 ++++++++++++++++++ .../unit/pubsub_v1/subscriber/test_leaser.py | 99 ++++ .../unit/pubsub_v1/subscriber/test_message.py | 152 +++++++ .../subscriber/test_messages_on_hold.py | 33 +- .../subscriber/test_streaming_pull_manager.py | 296 +++++++++++- .../test_subscribe_opentelemetry.py | 202 +++++++++ .../subscriber/test_subscriber_client.py | 49 ++ 17 files changed, 2066 insertions(+), 30 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py create mode 100644 packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/context_propagation.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/context_propagation.py index 37fad3e20106..bfa1aa638029 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/context_propagation.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/context_propagation.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from opentelemetry.propagators.textmap import Setter +from typing import Optional, List + +from opentelemetry.propagators.textmap import Setter, Getter from google.pubsub_v1 import PubsubMessage @@ -37,3 +39,17 @@ def set(self, carrier: PubsubMessage, key: str, value: str) -> None: None """ carrier.attributes["googclient_" + key] = value + + +class OpenTelemetryContextGetter(Getter): + """ + Used by Open Telemetry for context propagation. + """ + + def get(self, carrier: PubsubMessage, key: str) -> Optional[List[str]]: + if ("googclient_" + key) not in carrier.attributes: + return None + return [carrier.attributes["googclient_" + key]] + + def keys(self, carrier: PubsubMessage) -> List[str]: + return list(map(str, carrier.attributes.keys())) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py new file mode 100644 index 000000000000..88870be605a0 --- /dev/null +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py @@ -0,0 +1,280 @@ +# Copyright 2024, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, List +from datetime import datetime + +from opentelemetry import trace, context +from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator +from opentelemetry.trace.propagation import set_span_in_context + +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextGetter, +) +from google.pubsub_v1.types import PubsubMessage + +_OPEN_TELEMETRY_TRACER_NAME: str = "google.cloud.pubsub_v1" +_OPEN_TELEMETRY_MESSAGING_SYSTEM: str = "gcp_pubsub" + + +class SubscribeOpenTelemetry: + def __init__(self, message: PubsubMessage): + self._message: PubsubMessage = message + + # subscribe span will be initialized by the `start_subscribe_span` + # method. + self._subscribe_span: Optional[trace.Span] = None + + # subscriber concurrency control span will be initialized by the + # `start_subscribe_concurrency_control_span` method. + self._concurrency_control_span: Optional[trace.Span] = None + + # scheduler span will be initialized by the + # `start_subscribe_scheduler_span` method. + self._scheduler_span: Optional[trace.Span] = None + + # This will be set by `start_subscribe_span` method and will be used + # for other spans, such as process span. + self._subscription_id: Optional[str] = None + + # This will be set by `start_process_span` method. + self._process_span: Optional[trace.Span] = None + + # This will be set by `start_subscribe_span` method, if a publisher create span + # context was extracted from trace propagation. And will be used by spans like + # proces span to add links to the publisher create span. + self._publisher_create_span_context: Optional[context.Context] = None + + # This will be set by `start_subscribe_span` method and will be used + # for other spans, such as modack span. + self._project_id: Optional[str] = None + + @property + def subscription_id(self) -> Optional[str]: + return self._subscription_id + + @property + def project_id(self) -> Optional[str]: + return self._project_id + + @property + def subscribe_span(self) -> Optional[trace.Span]: + return self._subscribe_span + + def start_subscribe_span( + self, + subscription: str, + exactly_once_enabled: bool, + ack_id: str, + delivery_attempt: int, + ) -> None: + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + parent_span_context = TraceContextTextMapPropagator().extract( + carrier=self._message, + getter=OpenTelemetryContextGetter(), + ) + self._publisher_create_span_context = parent_span_context + split_subscription: List[str] = subscription.split("/") + assert len(split_subscription) == 4 + subscription_short_name = split_subscription[3] + self._project_id = split_subscription[1] + self._subscription_id = subscription_short_name + with tracer.start_as_current_span( + name=f"{subscription_short_name} subscribe", + context=parent_span_context if parent_span_context else None, + kind=trace.SpanKind.CONSUMER, + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.destination.name": subscription_short_name, + "gcp.project_id": subscription.split("/")[1], + "messaging.message.id": self._message.message_id, + "messaging.message.body.size": len(self._message.data), + "messaging.gcp_pubsub.message.ack_id": ack_id, + "messaging.gcp_pubsub.message.ordering_key": self._message.ordering_key, + "messaging.gcp_pubsub.message.exactly_once_delivery": exactly_once_enabled, + "code.function": "_on_response", + "messaging.gcp_pubsub.message.delivery_attempt": delivery_attempt, + }, + end_on_exit=False, + ) as subscribe_span: + self._subscribe_span = subscribe_span + + def add_subscribe_span_event(self, event: str) -> None: + assert self._subscribe_span is not None + self._subscribe_span.add_event( + name=event, + attributes={ + "timestamp": str(datetime.now()), + }, + ) + + def end_subscribe_span(self) -> None: + assert self._subscribe_span is not None + self._subscribe_span.end() + + def set_subscribe_span_result(self, result: str) -> None: + assert self._subscribe_span is not None + self._subscribe_span.set_attribute( + key="messaging.gcp_pubsub.result", + value=result, + ) + + def start_subscribe_concurrency_control_span(self) -> None: + assert self._subscribe_span is not None + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name="subscriber concurrency control", + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._subscribe_span), + end_on_exit=False, + ) as concurrency_control_span: + self._concurrency_control_span = concurrency_control_span + + def end_subscribe_concurrency_control_span(self) -> None: + assert self._concurrency_control_span is not None + self._concurrency_control_span.end() + + def start_subscribe_scheduler_span(self) -> None: + assert self._subscribe_span is not None + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name="subscriber scheduler", + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._subscribe_span), + end_on_exit=False, + ) as scheduler_span: + self._scheduler_span = scheduler_span + + def end_subscribe_scheduler_span(self) -> None: + assert self._scheduler_span is not None + self._scheduler_span.end() + + def start_process_span(self) -> None: + assert self._subscribe_span is not None + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + publish_create_span_link: Optional[trace.Link] = None + if self._publisher_create_span_context: + publish_create_span: trace.Span = trace.get_current_span( + self._publisher_create_span_context + ) + span_context: Optional[ + trace.SpanContext + ] = publish_create_span.get_span_context() + publish_create_span_link = ( + trace.Link(span_context) if span_context else None + ) + + with tracer.start_as_current_span( + name=f"{self._subscription_id} process", + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + }, + kind=trace.SpanKind.INTERNAL, + context=set_span_in_context(self._subscribe_span), + links=[publish_create_span_link] if publish_create_span_link else None, + end_on_exit=False, + ) as process_span: + self._process_span = process_span + + def end_process_span(self) -> None: + assert self._process_span is not None + self._process_span.end() + + def add_process_span_event(self, event: str) -> None: + assert self._process_span is not None + self._process_span.add_event( + name=event, + attributes={ + "timestamp": str(datetime.now()), + }, + ) + + +def start_modack_span( + subscribe_span_links: List[trace.Link], + subscription_id: Optional[str], + message_count: int, + deadline: float, + project_id: Optional[str], + code_function: str, + receipt_modack: bool, +) -> trace.Span: + assert subscription_id is not None + assert project_id is not None + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name=f"{subscription_id} modack", + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.batch.message_count": message_count, + "messaging.gcp_pubsub.message.ack_deadline": deadline, + "messaging.destination.name": subscription_id, + "gcp.project_id": project_id, + "messaging.operation.name": "modack", + "code.function": code_function, + "messaging.gcp_pubsub.is_receipt_modack": receipt_modack, + }, + links=subscribe_span_links, + kind=trace.SpanKind.CLIENT, + end_on_exit=False, + ) as modack_span: + return modack_span + + +def start_ack_span( + subscription_id: str, + message_count: int, + project_id: str, + links: List[trace.Link], +) -> trace.Span: + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name=f"{subscription_id} ack", + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.batch.message_count": message_count, + "messaging.operation": "ack", + "gcp.project_id": project_id, + "messaging.destination.name": subscription_id, + "code.function": "ack", + }, + kind=trace.SpanKind.CLIENT, + links=links, + end_on_exit=False, + ) as ack_span: + return ack_span + + +def start_nack_span( + subscription_id: str, + message_count: int, + project_id: str, + links: List[trace.Link], +) -> trace.Span: + tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) + with tracer.start_as_current_span( + name=f"{subscription_id} nack", + attributes={ + "messaging.system": _OPEN_TELEMETRY_MESSAGING_SYSTEM, + "messaging.batch.message_count": message_count, + "messaging.operation": "nack", + "gcp.project_id": project_id, + "messaging.destination.name": subscription_id, + "code.function": "modify_ack_deadline", + }, + kind=trace.SpanKind.CLIENT, + links=links, + end_on_exit=False, + ) as nack_span: + return nack_span diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py index 15ad4abb3ab6..fe377143212e 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/dispatcher.py @@ -26,11 +26,17 @@ import warnings from google.api_core.retry import exponential_sleep_generator +from opentelemetry import trace + from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber.exceptions import ( AcknowledgeStatus, ) +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + start_ack_span, + start_nack_span, +) if typing.TYPE_CHECKING: # pragma: NO COVER import queue @@ -232,16 +238,69 @@ def ack(self, items: Sequence[requests.AckRequest]) -> None: items_gen = iter(items) ack_ids_gen = (item.ack_id for item in items) total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) + subscription_id: Optional[str] = None + project_id: Optional[str] = None + for item in items: + if item.opentelemetry_data: + item.opentelemetry_data.add_subscribe_span_event("ack start") + if subscription_id is None: + subscription_id = item.opentelemetry_data.subscription_id + if project_id is None: + project_id = item.opentelemetry_data.project_id for _ in range(total_chunks): ack_reqs_dict = { req.ack_id: req for req in itertools.islice(items_gen, _ACK_IDS_BATCH_SIZE) } + + subscribe_links: List[trace.Link] = [] + subscribe_spans: List[trace.Span] = [] + for ack_req in ack_reqs_dict.values(): + if ack_req.opentelemetry_data: + subscribe_span: Optional[ + trace.Span + ] = ack_req.opentelemetry_data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + ack_span: Optional[trace.Span] = None + if subscription_id and project_id: + ack_span = start_ack_span( + subscription_id, + len(ack_reqs_dict), + project_id, + subscribe_links, + ) + if ( + ack_span and ack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + ack_span_context: trace.SpanContext = ack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=ack_span_context, + attributes={ + "messaging.operation.name": "ack", + }, + ) + requests_completed, requests_to_retry = self._manager.send_unary_ack( ack_ids=list(itertools.islice(ack_ids_gen, _ACK_IDS_BATCH_SIZE)), ack_reqs_dict=ack_reqs_dict, ) + if ack_span: + ack_span.end() + + for completed_ack in requests_completed: + if completed_ack.opentelemetry_data: + completed_ack.opentelemetry_data.add_subscribe_span_event("ack end") + completed_ack.opentelemetry_data.set_subscribe_span_result("acked") + completed_ack.opentelemetry_data.end_subscribe_span() # Remove the completed messages from lease management. self.drop(requests_completed) @@ -267,7 +326,7 @@ def _start_retry_thread(self, thread_name, thread_target): # a back-end timeout error or other permanent failure. retry_thread.start() - def _retry_acks(self, requests_to_retry): + def _retry_acks(self, requests_to_retry: List[requests.AckRequest]): retry_delay_gen = exponential_sleep_generator( initial=_MIN_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, maximum=_MAX_EXACTLY_ONCE_DELIVERY_ACK_MODACK_RETRY_DURATION_SECS, @@ -282,10 +341,62 @@ def _retry_acks(self, requests_to_retry): time.sleep(time_to_wait) ack_reqs_dict = {req.ack_id: req for req in requests_to_retry} + subscription_id: Optional[str] = None + project_id: Optional[str] = None + subscribe_links: List[trace.Link] = [] + subscribe_spans: List[trace.Span] = [] + for req in requests_to_retry: + if req.opentelemetry_data: + req.opentelemetry_data.add_subscribe_span_event("ack start") + if subscription_id is None: + subscription_id = req.opentelemetry_data.subscription_id + if project_id is None: + project_id = req.opentelemetry_data.project_id + subscribe_span: Optional[ + trace.Span + ] = req.opentelemetry_data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + ack_span: Optional[trace.Span] = None + if subscription_id and project_id: + ack_span = start_ack_span( + subscription_id, + len(ack_reqs_dict), + project_id, + subscribe_links, + ) + if ( + ack_span and ack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + ack_span_context: trace.SpanContext = ack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=ack_span_context, + attributes={ + "messaging.operation.name": "ack", + }, + ) + requests_completed, requests_to_retry = self._manager.send_unary_ack( ack_ids=[req.ack_id for req in requests_to_retry], ack_reqs_dict=ack_reqs_dict, ) + + if ack_span: + ack_span.end() + + for completed_ack in requests_completed: + if completed_ack.opentelemetry_data: + completed_ack.opentelemetry_data.add_subscribe_span_event("ack end") + completed_ack.opentelemetry_data.set_subscribe_span_result("acked") + completed_ack.opentelemetry_data.end_subscribe_span() + assert ( len(requests_to_retry) <= _ACK_IDS_BATCH_SIZE ), "Too many requests to be retried." @@ -336,15 +447,63 @@ def modify_ack_deadline( deadline_seconds_gen = (item.seconds for item in items) total_chunks = int(math.ceil(len(items) / _ACK_IDS_BATCH_SIZE)) + subscription_id: Optional[str] = None + project_id: Optional[str] = None + + for item in items: + if item.opentelemetry_data: + if math.isclose(item.seconds, 0): + item.opentelemetry_data.add_subscribe_span_event("nack start") + if subscription_id is None: + subscription_id = item.opentelemetry_data.subscription_id + if project_id is None: + project_id = item.opentelemetry_data.project_id + else: + item.opentelemetry_data.add_subscribe_span_event("modack start") for _ in range(total_chunks): ack_reqs_dict = { req.ack_id: req for req in itertools.islice(items_gen, _ACK_IDS_BATCH_SIZE) } + subscribe_links: List[trace.Link] = [] + subscribe_spans: List[trace.Span] = [] + for ack_req in ack_reqs_dict.values(): + if ack_req.opentelemetry_data and math.isclose(ack_req.seconds, 0): + subscribe_span: Optional[ + trace.Span + ] = ack_req.opentelemetry_data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + nack_span: Optional[trace.Span] = None + if subscription_id and project_id: + nack_span = start_nack_span( + subscription_id, + len(ack_reqs_dict), + project_id, + subscribe_links, + ) + if ( + nack_span and nack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + nack_span_context: trace.SpanContext = nack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=nack_span_context, + attributes={ + "messaging.operation.name": "nack", + }, + ) requests_to_retry: List[requests.ModAckRequest] + requests_completed: Optional[List[requests.ModAckRequest]] = None if default_deadline is None: # no further work needs to be done for `requests_to_retry` - _, requests_to_retry = self._manager.send_unary_modack( + requests_completed, requests_to_retry = self._manager.send_unary_modack( modify_deadline_ack_ids=list( itertools.islice(ack_ids_gen, _ACK_IDS_BATCH_SIZE) ), @@ -355,7 +514,7 @@ def modify_ack_deadline( default_deadline=None, ) else: - _, requests_to_retry = self._manager.send_unary_modack( + requests_completed, requests_to_retry = self._manager.send_unary_modack( modify_deadline_ack_ids=itertools.islice( ack_ids_gen, _ACK_IDS_BATCH_SIZE ), @@ -363,10 +522,28 @@ def modify_ack_deadline( ack_reqs_dict=ack_reqs_dict, default_deadline=default_deadline, ) + if nack_span: + nack_span.end() assert ( len(requests_to_retry) <= _ACK_IDS_BATCH_SIZE ), "Too many requests to be retried." + for completed_modack in requests_completed: + if completed_modack.opentelemetry_data: + # nack is a modack with 0 extension seconds. + if math.isclose(completed_modack.seconds, 0): + completed_modack.opentelemetry_data.set_subscribe_span_result( + "nacked" + ) + completed_modack.opentelemetry_data.add_subscribe_span_event( + "nack end" + ) + completed_modack.opentelemetry_data.end_subscribe_span() + else: + completed_modack.opentelemetry_data.add_subscribe_span_event( + "modack end" + ) + # Retry on a separate thread so the dispatcher thread isn't blocked # by sleeps. if requests_to_retry: @@ -390,11 +567,67 @@ def _retry_modacks(self, requests_to_retry): time.sleep(time_to_wait) ack_reqs_dict = {req.ack_id: req for req in requests_to_retry} + + subscription_id = None + project_id = None + subscribe_links = [] + subscribe_spans = [] + for ack_req in ack_reqs_dict.values(): + if ack_req.opentelemetry_data and math.isclose(ack_req.seconds, 0): + if subscription_id is None: + subscription_id = ack_req.opentelemetry_data.subscription_id + if project_id is None: + project_id = ack_req.opentelemetry_data.project_id + subscribe_span = ack_req.opentelemetry_data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + nack_span = None + if subscription_id and project_id: + nack_span = start_nack_span( + subscription_id, + len(ack_reqs_dict), + project_id, + subscribe_links, + ) + if ( + nack_span and nack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + nack_span_context: trace.SpanContext = nack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=nack_span_context, + attributes={ + "messaging.operation.name": "nack", + }, + ) requests_completed, requests_to_retry = self._manager.send_unary_modack( modify_deadline_ack_ids=[req.ack_id for req in requests_to_retry], modify_deadline_seconds=[req.seconds for req in requests_to_retry], ack_reqs_dict=ack_reqs_dict, ) + if nack_span: + nack_span.end() + for completed_modack in requests_completed: + if completed_modack.opentelemetry_data: + # nack is a modack with 0 extension seconds. + if math.isclose(completed_modack.seconds, 0): + completed_modack.opentelemetry_data.set_subscribe_span_result( + "nacked" + ) + completed_modack.opentelemetry_data.add_subscribe_span_event( + "nack end" + ) + completed_modack.opentelemetry_data.end_subscribe_span() + else: + completed_modack.opentelemetry_data.add_subscribe_span_event( + "modack end" + ) def nack(self, items: Sequence[requests.NackRequest]) -> None: """Explicitly deny receipt of messages. @@ -405,7 +638,10 @@ def nack(self, items: Sequence[requests.NackRequest]) -> None: self.modify_ack_deadline( [ requests.ModAckRequest( - ack_id=item.ack_id, seconds=0, future=item.future + ack_id=item.ack_id, + seconds=0, + future=item.future, + opentelemetry_data=item.opentelemetry_data, ) for item in items ] diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 16018e384745..5abdb7081991 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -23,6 +23,9 @@ from typing import Dict, Iterable, Optional, Union from google.cloud.pubsub_v1.subscriber._protocol.dispatcher import _MAX_BATCH_LATENCY +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) try: from collections.abc import KeysView @@ -50,6 +53,7 @@ class _LeasedMessage(typing.NamedTuple): size: int ordering_key: Optional[str] + opentelemetry_data: Optional[SubscribeOpenTelemetry] class Leaser(object): @@ -98,6 +102,7 @@ def add(self, items: Iterable[requests.LeaseRequest]) -> None: sent_time=float("inf"), size=item.byte_size, ordering_key=item.ordering_key, + opentelemetry_data=item.opentelemetry_data, ) self._bytes += item.byte_size else: @@ -175,6 +180,17 @@ def maintain_leases(self) -> None: "Dropping %s items because they were leased too long.", len(to_drop) ) assert self._manager.dispatcher is not None + for drop_msg in to_drop: + leased_message = leased_messages.get(drop_msg.ack_id) + if leased_message and leased_message.opentelemetry_data: + leased_message.opentelemetry_data.add_process_span_event( + "expired" + ) + leased_message.opentelemetry_data.end_process_span() + leased_message.opentelemetry_data.set_subscribe_span_result( + "expired" + ) + leased_message.opentelemetry_data.end_subscribe_span() self._manager.dispatcher.drop(to_drop) # Remove dropped items from our copy of the leased messages (they @@ -198,14 +214,28 @@ def maintain_leases(self) -> None: # is inactive. assert self._manager.dispatcher is not None ack_id_gen = (ack_id for ack_id in ack_ids) + opentelemetry_data = [ + message.opentelemetry_data + for message in list(leased_messages.values()) + if message.opentelemetry_data + ] expired_ack_ids = self._manager._send_lease_modacks( - ack_id_gen, deadline + ack_id_gen, + deadline, + opentelemetry_data, ) start_time = time.time() # If exactly once delivery is enabled, we should drop all expired ack_ids from lease management. if self._manager._exactly_once_delivery_enabled() and len(expired_ack_ids): assert self._manager.dispatcher is not None + for ack_id in expired_ack_ids: + msg = leased_messages.get(ack_id) + if msg and msg.opentelemetry_data: + msg.opentelemetry_data.add_process_span_event("expired") + msg.opentelemetry_data.end_process_span() + msg.opentelemetry_data.set_subscribe_span_result("expired") + msg.opentelemetry_data.end_subscribe_span() self._manager.dispatcher.drop( [ requests.DropRequest( diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py index 63c2edbfa969..3d4c2a392b20 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/messages_on_hold.py @@ -100,6 +100,8 @@ def put(self, message: "subscriber.message.Message") -> None: Args: message: The message to put on hold. """ + if message.opentelemetry_data: + message.opentelemetry_data.start_subscribe_scheduler_span() self._messages_on_hold.append(message) self._size = self._size + 1 diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py index 9cd387545909..6fd35896b9cc 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py @@ -15,6 +15,10 @@ import typing from typing import NamedTuple, Optional +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) + if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1.subscriber import futures @@ -27,6 +31,7 @@ class AckRequest(NamedTuple): time_to_ack: float ordering_key: Optional[str] future: Optional["futures.Future"] + opentelemetry_data: Optional[SubscribeOpenTelemetry] = None class DropRequest(NamedTuple): @@ -39,12 +44,14 @@ class LeaseRequest(NamedTuple): ack_id: str byte_size: int ordering_key: Optional[str] + opentelemetry_data: Optional[SubscribeOpenTelemetry] = None class ModAckRequest(NamedTuple): ack_id: str seconds: float future: Optional["futures.Future"] + opentelemetry_data: Optional[SubscribeOpenTelemetry] = None class NackRequest(NamedTuple): @@ -52,3 +59,4 @@ class NackRequest(NamedTuple): byte_size: int ordering_key: Optional[str] future: Optional["futures.Future"] + opentelemetry_data: Optional[SubscribeOpenTelemetry] = None diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index c01dd7f2ec2f..4c9e1c20e7bc 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -23,6 +23,7 @@ from typing import Any, Dict, Callable, Iterable, List, Optional, Set, Tuple import uuid +from opentelemetry import trace import grpc # type: ignore from google.api_core import bidi @@ -38,6 +39,9 @@ AcknowledgeError, AcknowledgeStatus, ) +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) import google.cloud.pubsub_v1.subscriber.message from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber.scheduler import ThreadScheduler @@ -46,6 +50,9 @@ from google.rpc.error_details_pb2 import ErrorInfo # type: ignore from google.rpc import code_pb2 # type: ignore from google.rpc import status_pb2 +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + start_modack_span, +) if typing.TYPE_CHECKING: # pragma: NO COVER from google.cloud.pubsub_v1 import subscriber @@ -123,6 +130,9 @@ def _wrap_callback_errors( message: The Pub/Sub message. """ try: + if message.opentelemetry_data: + message.opentelemetry_data.end_subscribe_concurrency_control_span() + message.opentelemetry_data.start_process_span() callback(message) except BaseException as exc: # Note: the likelihood of this failing is extremely low. This just adds @@ -582,7 +592,8 @@ def _maybe_release_messages(self) -> None: msg = self._messages_on_hold.get() if not msg: break - + if msg.opentelemetry_data: + msg.opentelemetry_data.end_subscribe_scheduler_span() self._schedule_message_on_hold(msg) released_ack_ids.append(msg.ack_id) @@ -618,6 +629,8 @@ def _schedule_message_on_hold( ) assert self._scheduler is not None assert self._callback is not None + if msg.opentelemetry_data: + msg.opentelemetry_data.start_subscribe_concurrency_control_span() self._scheduler.schedule(self._callback, msg) def send_unary_ack( @@ -1007,22 +1020,85 @@ def _get_initial_request( return request def _send_lease_modacks( - self, ack_ids: Iterable[str], ack_deadline: float, warn_on_invalid=True + self, + ack_ids: Iterable[str], + ack_deadline: float, + opentelemetry_data: List[SubscribeOpenTelemetry], + warn_on_invalid=True, + receipt_modack: bool = False, ) -> Set[str]: exactly_once_enabled = False + + modack_span: Optional[trace.Span] = None + if self._client.open_telemetry_enabled: + subscribe_span_links: List[trace.Link] = [] + subscribe_spans: List[trace.Span] = [] + subscription_split: List[str] = self._subscription.split("/") + assert len(subscription_split) == 4 + subscription_id: str = subscription_split[3] + project_id: str = subscription_split[1] + for data in opentelemetry_data: + subscribe_span: Optional[trace.Span] = data.subscribe_span + if ( + subscribe_span + and subscribe_span.get_span_context().trace_flags.sampled + ): + subscribe_span_links.append( + trace.Link(subscribe_span.get_span_context()) + ) + subscribe_spans.append(subscribe_span) + modack_span = start_modack_span( + subscribe_span_links, + subscription_id, + len(opentelemetry_data), + ack_deadline, + project_id, + "_send_lease_modacks", + receipt_modack, + ) + if ( + modack_span and modack_span.get_span_context().trace_flags.sampled + ): # pragma: NO COVER + modack_span_context: trace.SpanContext = modack_span.get_span_context() + for subscribe_span in subscribe_spans: + subscribe_span.add_link( + context=modack_span_context, + attributes={ + "messaging.operation.name": "modack", + }, + ) + with self._exactly_once_enabled_lock: exactly_once_enabled = self._exactly_once_enabled if exactly_once_enabled: - items = [ - requests.ModAckRequest(ack_id, ack_deadline, futures.Future()) - for ack_id in ack_ids - ] + eod_items: List[requests.ModAckRequest] = [] + if self._client.open_telemetry_enabled: + for ack_id, data in zip( + ack_ids, opentelemetry_data + ): # pragma: NO COVER # Identical code covered in the same function below + assert data is not None + eod_items.append( + requests.ModAckRequest( + ack_id, + ack_deadline, + futures.Future(), + data, + ) + ) + else: + eod_items = [ + requests.ModAckRequest(ack_id, ack_deadline, futures.Future()) + for ack_id in ack_ids + ] assert self._dispatcher is not None - self._dispatcher.modify_ack_deadline(items, ack_deadline) - + self._dispatcher.modify_ack_deadline(eod_items, ack_deadline) + if ( + modack_span + ): # pragma: NO COVER # Identical code covered in the same function below + modack_span.end() expired_ack_ids = set() - for req in items: + for req in eod_items: try: assert req.future is not None req.future.result() @@ -1039,12 +1115,27 @@ def _send_lease_modacks( expired_ack_ids.add(req.ack_id) return expired_ack_ids else: - items = [ - requests.ModAckRequest(ack_id, self.ack_deadline, None) - for ack_id in ack_ids - ] + items: List[requests.ModAckRequest] = [] + if self._client.open_telemetry_enabled: + for ack_id, data in zip(ack_ids, opentelemetry_data): + assert data is not None + items.append( + requests.ModAckRequest( + ack_id, + self.ack_deadline, + None, + data, + ) + ) + else: + items = [ + requests.ModAckRequest(ack_id, self.ack_deadline, None) + for ack_id in ack_ids + ] assert self._dispatcher is not None self._dispatcher.modify_ack_deadline(items, ack_deadline) + if modack_span: + modack_span.end() return set() def _exactly_once_delivery_enabled(self) -> bool: @@ -1075,6 +1166,18 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: # protobuf message to significantly gain on attribute access performance. received_messages = response._pb.received_messages + subscribe_opentelemetry: List[SubscribeOpenTelemetry] = [] + if self._client.open_telemetry_enabled: + for received_message in received_messages: + opentelemetry_data = SubscribeOpenTelemetry(received_message.message) + opentelemetry_data.start_subscribe_span( + self._subscription, + response.subscription_properties.exactly_once_delivery_enabled, + received_message.ack_id, + received_message.delivery_attempt, + ) + subscribe_opentelemetry.append(opentelemetry_data) + _LOGGER.debug( "Processing %s received message(s), currently on hold %s (bytes %s).", len(received_messages), @@ -1100,7 +1203,11 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: # received them. ack_id_gen = (message.ack_id for message in received_messages) expired_ack_ids = self._send_lease_modacks( - ack_id_gen, self.ack_deadline, warn_on_invalid=False + ack_id_gen, + self.ack_deadline, + subscribe_opentelemetry, + warn_on_invalid=False, + receipt_modack=True, ) with self._pause_resume_lock: @@ -1110,6 +1217,7 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: ) return + i: int = 0 for received_message in received_messages: if ( not self._exactly_once_delivery_enabled() @@ -1122,12 +1230,16 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: self._scheduler.queue, self._exactly_once_delivery_enabled, ) + if self._client.open_telemetry_enabled: + message.opentelemetry_data = subscribe_opentelemetry[i] + i = i + 1 self._messages_on_hold.put(message) self._on_hold_bytes += message.size req = requests.LeaseRequest( ack_id=message.ack_id, byte_size=message.size, ordering_key=message.ordering_key, + opentelemetry_data=message.opentelemetry_data, ) self._leaser.add([req]) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 0d0d36a0c618..175095077f92 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -14,6 +14,7 @@ from __future__ import absolute_import +import sys import os import typing from typing import cast, Any, Callable, Optional, Sequence, Union @@ -67,7 +68,16 @@ class Client(subscriber_client.SubscriberClient): ) """ - def __init__(self, **kwargs: Any): + def __init__( + self, + subscriber_options: Union[types.SubscriberOptions, Sequence] = (), + **kwargs: Any + ): + assert ( + isinstance(subscriber_options, types.SubscriberOptions) + or len(subscriber_options) == 0 + ), "subscriber_options must be of type SubscriberOptions or an empty sequence." + # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. @@ -82,6 +92,32 @@ def __init__(self, **kwargs: Any): self._target = self._transport._host self._closed = False + self.subscriber_options = types.SubscriberOptions(*subscriber_options) + + # Set / override Open Telemetry option. + self._open_telemetry_enabled = ( + self.subscriber_options.enable_open_telemetry_tracing + ) + # OpenTelemetry features used by the library are not supported in Python versions <= 3.7. + # Refer https://github.com/open-telemetry/opentelemetry-python/issues/3993#issuecomment-2211976389 + if ( + self.subscriber_options.enable_open_telemetry_tracing + and sys.version_info.major == 3 + and sys.version_info.minor < 8 + ): + warnings.warn( + message="Open Telemetry for Python version 3.7 or lower is not supported. Disabling Open Telemetry tracing.", + category=RuntimeWarning, + ) + self._open_telemetry_enabled = False + + @property + def open_telemetry_enabled(self) -> bool: + """ + Returns True if Open Telemetry is enabled. False otherwise. + """ + return self._open_telemetry_enabled # pragma: NO COVER + @classmethod def from_service_account_file( # type: ignore[override] cls, filename: str, **kwargs: Any diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index f744966a257c..61f60c4d9973 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -24,6 +24,9 @@ from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber import futures from google.cloud.pubsub_v1.subscriber.exceptions import AcknowledgeStatus +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) if typing.TYPE_CHECKING: # pragma: NO COVER @@ -85,6 +88,8 @@ class Message(object): information on this type. publish_time (google.protobuf.timestamp_pb2.Timestamp): The time that this message was originally published. + opentelemetry_data (google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry.SubscribeOpenTelemetry) + Open Telemetry data associated with this message. None if Open Telemetry is not enabled. """ def __init__( @@ -144,6 +149,9 @@ def __init__( self._ordering_key = message.ordering_key self._size = message.ByteSize() + # None if Open Telemetry is disabled. Else contains OpenTelemetry data. + self._opentelemetry_data: Optional[SubscribeOpenTelemetry] = None + def __repr__(self): # Get an abbreviated version of the data. abbv_data = self._message.data @@ -158,6 +166,14 @@ def __repr__(self): pretty_attrs = pretty_attrs.lstrip() return _MESSAGE_REPR.format(abbv_data, str(self.ordering_key), pretty_attrs) + @property + def opentelemetry_data(self): + return self._opentelemetry_data # pragma: NO COVER + + @opentelemetry_data.setter + def opentelemetry_data(self, data): + self._opentelemetry_data = data # pragma: NO COVER + @property def attributes(self) -> "containers.ScalarMap": """Return the attributes of the underlying Pub/Sub Message. @@ -252,6 +268,9 @@ def ack(self) -> None: https://cloud.google.com/pubsub/docs/exactly-once-delivery." """ + if self.opentelemetry_data: + self.opentelemetry_data.add_process_span_event("ack called") + self.opentelemetry_data.end_process_span() time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( requests.AckRequest( @@ -260,6 +279,7 @@ def ack(self) -> None: time_to_ack=time_to_ack, ordering_key=self.ordering_key, future=None, + opentelemetry_data=self.opentelemetry_data, ) ) @@ -302,6 +322,9 @@ def ack_with_response(self) -> "futures.Future": pubsub_v1.subscriber.exceptions.AcknowledgeError exception will be thrown. """ + if self.opentelemetry_data: + self.opentelemetry_data.add_process_span_event("ack called") + self.opentelemetry_data.end_process_span() req_future: Optional[futures.Future] if self._exactly_once_delivery_enabled_func(): future = futures.Future() @@ -317,6 +340,7 @@ def ack_with_response(self) -> "futures.Future": time_to_ack=time_to_ack, ordering_key=self.ordering_key, future=req_future, + opentelemetry_data=self.opentelemetry_data, ) ) return future @@ -357,7 +381,12 @@ def modify_ack_deadline(self, seconds: int) -> None: against. """ self._request_queue.put( - requests.ModAckRequest(ack_id=self._ack_id, seconds=seconds, future=None) + requests.ModAckRequest( + ack_id=self._ack_id, + seconds=seconds, + future=None, + opentelemetry_data=self.opentelemetry_data, + ) ) def modify_ack_deadline_with_response(self, seconds: int) -> "futures.Future": @@ -416,7 +445,10 @@ def modify_ack_deadline_with_response(self, seconds: int) -> "futures.Future": self._request_queue.put( requests.ModAckRequest( - ack_id=self._ack_id, seconds=seconds, future=req_future + ack_id=self._ack_id, + seconds=seconds, + future=req_future, + opentelemetry_data=self.opentelemetry_data, ) ) @@ -429,12 +461,16 @@ def nack(self) -> None: may take place immediately or after a delay, and may arrive at this subscriber or another. """ + if self.opentelemetry_data: + self.opentelemetry_data.add_process_span_event("nack called") + self.opentelemetry_data.end_process_span() self._request_queue.put( requests.NackRequest( ack_id=self._ack_id, byte_size=self.size, ordering_key=self.ordering_key, future=None, + opentelemetry_data=self.opentelemetry_data, ) ) @@ -472,6 +508,9 @@ def nack_with_response(self) -> "futures.Future": will be thrown. """ + if self.opentelemetry_data: + self.opentelemetry_data.add_process_span_event("nack called") + self.opentelemetry_data.end_process_span() req_future: Optional[futures.Future] if self._exactly_once_delivery_enabled_func(): future = futures.Future() @@ -486,6 +525,7 @@ def nack_with_response(self) -> "futures.Future": byte_size=self.size, ordering_key=self.ordering_key, future=req_future, + opentelemetry_data=self.opentelemetry_data, ) ) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index c4282e68594d..7e94a725078d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -131,6 +131,29 @@ class PublishFlowControl(NamedTuple): """The action to take when publish flow control limits are exceeded.""" +# Define the default subscriber options. +# +# This class is used when creating a subscriber client to pass in options +# to enable/disable features. +class SubscriberOptions(NamedTuple): + """ + Options for the subscriber client. + Attributes: + enable_open_telemetry_tracing (bool): + Whether to enable OpenTelemetry tracing. Defaults to False. + """ + + enable_open_telemetry_tracing: bool = False + """ + Whether to enable OpenTelemetry tracing. + + Warning: traces are subject to change. The name and attributes of a span might + change without notice. Only use run traces interactively. Don't use in + automation. Running non-interactive traces can cause problems if the underlying + trace architecture changes without notice. + """ + + # Define the default publisher options. # # This class is used when creating a publisher client to pass in options @@ -175,7 +198,14 @@ class PublisherOptions(NamedTuple): ) enable_open_telemetry_tracing: bool = False # disabled by default - """Open Telemetry tracing is enabled if this is set to True.""" + """ + Open Telemetry tracing is enabled if this is set to True. + + Warning: traces are subject to change. The name and attributes of a span might + change without notice. Only use run traces interactively. Don't use in + automation. Running non-interactive traces can cause problems if the underlying + trace architecture changes without notice. + """ # Define the type class and default values for flow control settings. diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 89d72c61d39a..5483c48c5eae 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -17,11 +17,17 @@ import sys import threading +from opentelemetry import trace + from google.cloud.pubsub_v1.subscriber._protocol import dispatcher from google.cloud.pubsub_v1.subscriber._protocol import helper_threads from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager from google.cloud.pubsub_v1.subscriber import futures +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) +from google.pubsub_v1.types import PubsubMessage # special case python < 3.8 if sys.version_info.major == 3 and sys.version_info.minor < 8: @@ -365,6 +371,125 @@ def test_unknown_request_type(): dispatcher_.dispatch_callback(items) +def test_opentelemetry_modify_ack_deadline(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + opentelemetry_data = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + + items = [ + requests.ModAckRequest( + ack_id="ack_id_string", + seconds=60, + future=None, + opentelemetry_data=opentelemetry_data, + ) + ] + manager.send_unary_modack.return_value = (items, []) + dispatcher_.modify_ack_deadline(items) + + # Subscribe span would not have ended as part of a modack. So, end it + # in the test, so that we can export and assert its contents. + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + subscribe_span = spans[0] + + assert len(subscribe_span.events) == 2 + assert subscribe_span.events[0].name == "modack start" + assert subscribe_span.events[1].name == "modack end" + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_ack(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + data1 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + data2 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + items = [ + requests.AckRequest( + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + opentelemetry_data=data1, + ), + requests.AckRequest( + ack_id="ack_id_string2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=None, + opentelemetry_data=data2, + ), + ] + manager.send_unary_ack.return_value = (items, []) + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch.object( + data2._subscribe_span, "get_span_context", return_value=mock_span_context + ): + dispatcher_.ack(items) + + spans = span_exporter.get_finished_spans() + + assert len(spans) == 3 + ack_span = spans[0] + + for subscribe_span in spans[1:]: + assert subscribe_span.attributes["messaging.gcp_pubsub.result"] == "acked" + assert len(subscribe_span.events) == 2 + assert subscribe_span.events[0].name == "ack start" + assert subscribe_span.events[1].name == "ack end" + + # This subscribe span is sampled, so we expect it to be linked to the ack + # span. + assert len(spans[1].links) == 1 + assert spans[1].links[0].context == ack_span.context + assert len(spans[1].links[0].attributes) == 1 + assert spans[1].links[0].attributes["messaging.operation.name"] == "ack" + # This subscribe span is not sampled, so we expect it to not be linked to + # the ack span + assert len(spans[2].links) == 0 + + assert ack_span.name == "subscriptionID ack" + assert ack_span.kind == trace.SpanKind.CLIENT + assert ack_span.parent is None + assert len(ack_span.links) == 1 + assert ack_span.attributes["messaging.system"] == "gcp_pubsub" + assert ack_span.attributes["messaging.batch.message_count"] == 2 + assert ack_span.attributes["messaging.operation"] == "ack" + assert ack_span.attributes["gcp.project_id"] == "projectID" + assert ack_span.attributes["messaging.destination.name"] == "subscriptionID" + assert ack_span.attributes["code.function"] == "ack" + + def test_ack(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True @@ -481,6 +606,92 @@ def test_retry_acks_in_new_thread(): assert ctor_call.kwargs["daemon"] +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_retry_acks(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + data1 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + data2 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + + f = futures.Future() + items = [ + requests.AckRequest( + ack_id="ack_id_string", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=f, + opentelemetry_data=data1, + ), + requests.AckRequest( + ack_id="ack_id_string2", + byte_size=0, + time_to_ack=20, + ordering_key="", + future=f, + opentelemetry_data=data2, + ), + ] + manager.send_unary_ack.side_effect = [(items, [])] + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch("time.sleep", return_value=None): + with mock.patch.object( + data2._subscribe_span, "get_span_context", return_value=mock_span_context + ): + dispatcher_._retry_acks(items) + + spans = span_exporter.get_finished_spans() + + assert len(spans) == 3 + ack_span = spans[0] + + for subscribe_span in spans[1:]: + assert "messaging.gcp_pubsub.result" in subscribe_span.attributes + assert subscribe_span.attributes["messaging.gcp_pubsub.result"] == "acked" + assert len(subscribe_span.events) == 2 + assert subscribe_span.events[0].name == "ack start" + assert subscribe_span.events[1].name == "ack end" + + # This subscribe span is sampled, so we expect it to be linked to the ack + # span. + assert len(spans[1].links) == 1 + assert spans[1].links[0].context == ack_span.context + assert len(spans[1].links[0].attributes) == 1 + assert spans[1].links[0].attributes["messaging.operation.name"] == "ack" + # This subscribe span is not sampled, so we expect it to not be linked to + # the ack span + assert len(spans[2].links) == 0 + + assert ack_span.name == "subscriptionID ack" + assert ack_span.kind == trace.SpanKind.CLIENT + assert ack_span.parent is None + assert len(ack_span.links) == 1 + assert ack_span.attributes["messaging.system"] == "gcp_pubsub" + assert ack_span.attributes["messaging.batch.message_count"] == 2 + assert ack_span.attributes["messaging.operation"] == "ack" + assert ack_span.attributes["gcp.project_id"] == "projectID" + assert ack_span.attributes["messaging.destination.name"] == "subscriptionID" + assert ack_span.attributes["code.function"] == "ack" + + def test_retry_acks(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True @@ -544,6 +755,125 @@ def test_retry_modacks_in_new_thread(): assert ctor_call.kwargs["daemon"] +def test_opentelemetry_retry_modacks(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + opentelemetry_data = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + + f = futures.Future() + items = [ + requests.ModAckRequest( + ack_id="ack_id_string", + seconds=20, + future=f, + opentelemetry_data=opentelemetry_data, + ) + ] + manager.send_unary_modack.side_effect = [(items, [])] + with mock.patch("time.sleep", return_value=None): + dispatcher_._retry_modacks(items) + + # Subscribe span wouldn't be ended for modacks. So, end it in the test, so + # that we can export and assert its contents. + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + subscribe_span = spans[0] + + assert len(subscribe_span.events) == 1 + assert subscribe_span.events[0].name == "modack end" + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_retry_nacks(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + data1 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id1", + delivery_attempt=5, + ) + data2 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id2", + delivery_attempt=5, + ) + + f = futures.Future() + items = [ + requests.ModAckRequest( + ack_id="ack_id1", + seconds=0, + future=f, + opentelemetry_data=data1, + ), + requests.ModAckRequest( + ack_id="ack_id2", + seconds=0, + future=f, + opentelemetry_data=data2, + ), + ] + manager.send_unary_modack.side_effect = [(items, [])] + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch("time.sleep", return_value=None): + with mock.patch.object( + data2._subscribe_span, "get_span_context", return_value=mock_span_context + ): + dispatcher_._retry_modacks(items) + + spans = span_exporter.get_finished_spans() + assert len(spans) == 3 + nack_span = spans[0] + + for subscribe_span in spans[1:]: + assert "messaging.gcp_pubsub.result" in subscribe_span.attributes + assert subscribe_span.attributes["messaging.gcp_pubsub.result"] == "nacked" + assert len(subscribe_span.events) == 1 + assert subscribe_span.events[0].name == "nack end" + + # This subscribe span is sampled, so we expect it to be linked to the nack + # span. + assert len(spans[1].links) == 1 + assert spans[1].links[0].context == nack_span.context + assert len(spans[1].links[0].attributes) == 1 + assert spans[1].links[0].attributes["messaging.operation.name"] == "nack" + # This subscribe span is not sampled, so we expect it to not be linked to + # the nack span + assert len(spans[2].links) == 0 + + assert nack_span.name == "subscriptionID nack" + assert nack_span.kind == trace.SpanKind.CLIENT + assert nack_span.parent is None + assert len(nack_span.links) == 1 + assert nack_span.attributes["messaging.system"] == "gcp_pubsub" + assert nack_span.attributes["messaging.batch.message_count"] == 2 + assert nack_span.attributes["messaging.operation"] == "nack" + assert nack_span.attributes["gcp.project_id"] == "projectID" + assert nack_span.attributes["messaging.destination.name"] == "subscriptionID" + assert nack_span.attributes["code.function"] == "modify_ack_deadline" + + def test_retry_modacks(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True @@ -633,6 +963,103 @@ def test_drop_ordered_messages(): manager.maybe_resume_consumer.assert_called_once() +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry_nack(span_exporter): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True + ) + dispatcher_ = dispatcher.Dispatcher(manager, mock.sentinel.queue) + + data1 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=5, + ) + data2 = SubscribeOpenTelemetry(message=PubsubMessage(data=b"foo")) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id2", + delivery_attempt=5, + ) + + items = [ + requests.NackRequest( + ack_id="ack_id", + byte_size=10, + ordering_key="", + future=None, + opentelemetry_data=data1, + ), + requests.NackRequest( + ack_id="ack_id2", + byte_size=10, + ordering_key="", + future=None, + opentelemetry_data=data2, + ), + ] + response_items = [ + requests.ModAckRequest( + ack_id="ack_id", + seconds=0, + future=None, + opentelemetry_data=data1, + ), + requests.ModAckRequest( + ack_id="ack_id2", + seconds=0, + future=None, + opentelemetry_data=data2, + ), + ] + manager.send_unary_modack.return_value = (response_items, []) + + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch.object( + data2._subscribe_span, "get_span_context", return_value=mock_span_context + ): + dispatcher_.nack(items) + + spans = span_exporter.get_finished_spans() + + assert len(spans) == 3 + nack_span = spans[0] + for subscribe_span in spans[1:]: + assert "messaging.gcp_pubsub.result" in subscribe_span.attributes + assert subscribe_span.attributes["messaging.gcp_pubsub.result"] == "nacked" + assert len(subscribe_span.events) == 2 + assert subscribe_span.events[0].name == "nack start" + assert subscribe_span.events[1].name == "nack end" + + # This subscribe span is sampled, so we expect it to be linked to the nack + # span. + assert len(spans[1].links) == 1 + assert spans[1].links[0].context == nack_span.context + assert len(spans[1].links[0].attributes) == 1 + assert spans[1].links[0].attributes["messaging.operation.name"] == "nack" + # This subscribe span is not sampled, so we expect it to not be linked to + # the nack span + assert len(spans[2].links) == 0 + + assert nack_span.name == "subscriptionID nack" + assert nack_span.kind == trace.SpanKind.CLIENT + assert nack_span.parent is None + assert len(nack_span.links) == 1 + assert nack_span.attributes["messaging.system"] == "gcp_pubsub" + assert nack_span.attributes["messaging.batch.message_count"] == 2 + assert nack_span.attributes["messaging.operation"] == "nack" + assert nack_span.attributes["gcp.project_id"] == "projectID" + assert nack_span.attributes["messaging.destination.name"] == "subscriptionID" + assert nack_span.attributes["code.function"] == "modify_ack_deadline" + + def test_nack(): manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index f38717c6f378..b5b5cac20bb0 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -22,6 +22,10 @@ from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) +from google.cloud.pubsub_v1.subscriber import message # special case python < 3.8 if sys.version_info.major == 3 and sys.version_info.minor < 8: @@ -136,6 +140,101 @@ def trigger_done(timeout): leaser._stop_event.wait = trigger_done +def test_opentelemetry_dropped_message_process_span(span_exporter): + manager = create_manager() + leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + msg = mock.create_autospec( + message.Message, instance=True, ack_id="ack_foo", size=10 + ) + msg.message_id = 3 + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_process_span() + leaser_.add( + [ + requests.LeaseRequest( + ack_id="my ack id", + byte_size=50, + ordering_key="", + opentelemetry_data=opentelemetry_data, + ) + ] + ) + leased_messages_dict = leaser_._leased_messages + + # Setting the `sent_time`` to be less than `cutoff` in order to make the leased message expire. + # This will exercise the code path where the message would be dropped from the leaser + leased_messages_dict["my ack id"] = leased_messages_dict["my ack id"]._replace( + sent_time=0 + ) + + manager._send_lease_modacks.return_value = set() + leaser_.maintain_leases() + + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert process_span.name == "subscriptionID process" + assert subscribe_span.name == "subscriptionID subscribe" + + assert len(process_span.events) == 1 + assert process_span.events[0].name == "expired" + + assert process_span.parent == subscribe_span.context + + +def test_opentelemetry_expired_message_exactly_once_process_span(span_exporter): + manager = create_manager() + leaser_ = leaser.Leaser(manager) + make_sleep_mark_event_as_done(leaser_) + msg = mock.create_autospec( + message.Message, instance=True, ack_id="ack_foo", size=10 + ) + msg.message_id = 3 + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_process_span() + leaser_.add( + [ + requests.LeaseRequest( + ack_id="my ack id", + byte_size=50, + ordering_key="", + opentelemetry_data=opentelemetry_data, + ) + ] + ) + + manager._send_lease_modacks.return_value = ["my ack id"] + leaser_.maintain_leases() + + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert process_span.name == "subscriptionID process" + assert subscribe_span.name == "subscriptionID subscribe" + + assert len(process_span.events) == 1 + assert process_span.events[0].name == "expired" + + assert process_span.parent == subscribe_span.context + + def test_maintain_leases_ack_ids(): manager = create_manager() leaser_ = leaser.Leaser(manager) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 49b07b7fd496..8d9d2566e0e5 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -29,6 +29,9 @@ from google.protobuf import timestamp_pb2 from google.pubsub_v1 import types as gapic_types from google.cloud.pubsub_v1.subscriber.exceptions import AcknowledgeStatus +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc) @@ -131,6 +134,155 @@ def check_call_types(mock, *args, **kwargs): assert isinstance(call_args[n], argtype) +def test_opentelemetry_ack(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + opentelemetry_data.start_process_span() + msg.opentelemetry_data = opentelemetry_data + msg.ack() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert subscribe_span.name == "subscriptionID subscribe" + assert len(subscribe_span.events) == 0 + + assert process_span.name == "subscriptionID process" + assert len(process_span.events) == 1 + assert process_span.events[0].name == "ack called" + + +def test_opentelemetry_ack_with_response(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + opentelemetry_data.start_process_span() + msg.opentelemetry_data = opentelemetry_data + msg.ack_with_response() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert subscribe_span.name == "subscriptionID subscribe" + assert len(subscribe_span.events) == 0 + + assert process_span.name == "subscriptionID process" + assert len(process_span.events) == 1 + assert process_span.events[0].name == "ack called" + + +def test_opentelemetry_nack(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + opentelemetry_data.start_process_span() + msg.opentelemetry_data = opentelemetry_data + msg.nack() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + process_span, subscribe_span = spans + + assert subscribe_span.name == "subscriptionID subscribe" + assert len(subscribe_span.events) == 0 + + assert process_span.name == "subscriptionID process" + assert len(process_span.events) == 1 + assert process_span.events[0].name == "nack called" + + +def test_opentelemetry_nack_with_response(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + opentelemetry_data.start_process_span() + msg.opentelemetry_data = opentelemetry_data + msg.nack_with_response() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + + process_span, subscribe_span = spans + + assert subscribe_span.name == "subscriptionID subscribe" + assert len(subscribe_span.events) == 0 + + assert process_span.name == "subscriptionID process" + assert len(process_span.events) == 1 + assert process_span.events[0].name == "nack called" + + +def test_opentelemetry_modack(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + msg.opentelemetry_data = opentelemetry_data + msg.modify_ack_deadline(3) + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + + assert len(spans[0].events) == 0 + + +def test_opentelemetry_modack_with_response(span_exporter): + SUBSCRIPTION = "projects/projectID/subscriptions/subscriptionID" + msg = create_message(b"data", ack_id="ack_id") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription=SUBSCRIPTION, + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=2, + ) + msg.opentelemetry_data = opentelemetry_data + msg.modify_ack_deadline_with_response(3) + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + + assert len(spans[0].events) == 0 + + def test_ack(): msg = create_message(b"foo", ack_id="bogus_ack_id") with mock.patch.object(msg._request_queue, "put") as put: diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py index 5e1dcf91b518..64963de48f2e 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py @@ -14,9 +14,14 @@ import queue +from opentelemetry import trace + +from google.pubsub_v1 import types as gapic_types from google.cloud.pubsub_v1.subscriber import message from google.cloud.pubsub_v1.subscriber._protocol import messages_on_hold -from google.pubsub_v1 import types as gapic_types +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) def make_message(ack_id, ordering_key): @@ -37,6 +42,32 @@ def test_init(): assert moh.get() is None +def test_opentelemetry_subscriber_scheduler_span(span_exporter): + moh = messages_on_hold.MessagesOnHold() + msg = make_message(ack_id="ack1", ordering_key="") + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + moh.put(msg) + opentelemetry_data.end_subscribe_scheduler_span() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + + assert len(spans) == 2 + + subscribe_scheduler_span, subscribe_span = spans + + assert subscribe_scheduler_span.name == "subscriber scheduler" + assert subscribe_scheduler_span.kind == trace.SpanKind.INTERNAL + assert subscribe_scheduler_span.parent == subscribe_span.context + + def test_put_and_get_unordered_messages(): moh = messages_on_hold.MessagesOnHold() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index d4ce2cfdb9c0..4d2d1b98e42b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -18,6 +18,20 @@ import threading import time import types as stdlib_types +import datetime +import queue +import math + +from opentelemetry import trace +from google.protobuf import timestamp_pb2 +from google.api_core import datetime_helpers + +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) +from google.cloud.pubsub_v1.subscriber.message import Message +from google.cloud.pubsub_v1.types import PubsubMessage + # special case python < 3.8 if sys.version_info.major == 3 and sys.version_info.minor < 8: @@ -179,11 +193,16 @@ def test_constructor_with_max_duration_per_lease_extension_too_high(): assert manager._stream_ack_deadline == 600 -def make_manager(**kwargs): +def make_manager( + enable_open_telemetry: bool = False, + subscription_name: str = "subscription-name", + **kwargs, +): client_ = mock.create_autospec(client.Client, instance=True) + client_.open_telemetry_enabled = enable_open_telemetry scheduler_ = mock.create_autospec(scheduler.Scheduler, instance=True) return streaming_pull_manager.StreamingPullManager( - client_, "subscription-name", scheduler=scheduler_, **kwargs + client_, subscription_name, scheduler=scheduler_, **kwargs ) @@ -509,6 +528,45 @@ def test__maybe_release_messages_on_overload(): manager._scheduler.schedule.assert_not_called() +def test_opentelemetry__maybe_release_messages_subscribe_scheduler_span(span_exporter): + manager = make_manager( + flow_control=types.FlowControl(max_messages=10, max_bytes=1000) + ) + manager._callback = mock.sentinel.callback + + # Init leaser message count to 11, so that when subtracting the 3 messages + # that are on hold, there is still room for another 2 messages before the + # max load is hit. + _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) + fake_leaser_add(_leaser, init_msg_count=8, assumed_msg_size=10) + msg = mock.create_autospec( + message.Message, instance=True, ack_id="ack_foo", size=10 + ) + msg.message_id = 3 + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + manager._messages_on_hold.put(msg) + manager._maybe_release_messages() + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + + assert len(spans) == 2 + + subscriber_scheduler_span, subscribe_span = spans + + assert subscriber_scheduler_span.name == "subscriber scheduler" + assert subscribe_span.name == "subscriptionID subscribe" + + assert subscriber_scheduler_span.parent == subscribe_span.context + assert subscriber_scheduler_span.kind == trace.SpanKind.INTERNAL + + def test__maybe_release_messages_below_overload(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) @@ -574,6 +632,86 @@ def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): assert manager._on_hold_bytes == 0 # should be auto-corrected +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +@pytest.mark.parametrize( + "receipt_modack", + [ + True, + False, + ], +) +def test_opentelemetry__send_lease_modacks(span_exporter, receipt_modack): + manager, _, _, _, _, _ = make_running_manager( + enable_open_telemetry=True, + subscription_name="projects/projectID/subscriptions/subscriptionID", + ) + data1 = SubscribeOpenTelemetry( + message=gapic_types.PubsubMessage(data=b"foo", message_id="1") + ) + data2 = SubscribeOpenTelemetry( + message=gapic_types.PubsubMessage(data=b"bar", message_id="2") + ) + + data1.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id1", + delivery_attempt=2, + ) + data2.start_subscribe_span( + subscription="projects/projectID/subscriptions/subscriptionID", + exactly_once_enabled=True, + ack_id="ack_id1", + delivery_attempt=2, + ) + mock_span_context = mock.Mock(spec=trace.SpanContext) + mock_span_context.trace_flags.sampled = False + with mock.patch.object( + data1._subscribe_span, "get_span_context", return_value=mock_span_context + ): + manager._send_lease_modacks( + ack_ids=["ack_id1", "ack_id2"], + ack_deadline=20, + opentelemetry_data=[data1, data2], + receipt_modack=receipt_modack, + ) + data1.end_subscribe_span() + data2.end_subscribe_span() + spans = span_exporter.get_finished_spans() + assert len(spans) == 3 + modack_span, subscribe_span1, subscribe_span2 = spans + + assert len(subscribe_span1.events) == 0 + assert len(subscribe_span2.events) == 0 + + assert len(subscribe_span1.links) == 0 + assert len(subscribe_span2.links) == 1 + assert subscribe_span2.links[0].context == modack_span.context + assert subscribe_span2.links[0].attributes["messaging.operation.name"] == "modack" + + assert modack_span.name == "subscriptionID modack" + assert modack_span.parent is None + assert modack_span.kind == trace.SpanKind.CLIENT + assert len(modack_span.links) == 1 + modack_span_attributes = modack_span.attributes + assert modack_span_attributes["messaging.system"] == "gcp_pubsub" + assert modack_span_attributes["messaging.batch.message_count"] == 2 + assert math.isclose( + modack_span_attributes["messaging.gcp_pubsub.message.ack_deadline"], 20 + ) + assert modack_span_attributes["messaging.destination.name"] == "subscriptionID" + assert modack_span_attributes["gcp.project_id"] == "projectID" + assert modack_span_attributes["messaging.operation.name"] == "modack" + assert modack_span_attributes["code.function"] == "_send_lease_modacks" + assert ( + modack_span_attributes["messaging.gcp_pubsub.is_receipt_modack"] + == receipt_modack + ) + + def test_send_unary_ack(): manager = make_manager() @@ -1224,14 +1362,17 @@ def test_open_has_been_closed(): manager.open(mock.sentinel.callback, mock.sentinel.on_callback_error) -def make_running_manager(**kwargs): - manager = make_manager(**kwargs) +def make_running_manager( + enable_open_telemetry: bool = False, + subscription_name: str = "subscription-name", + **kwargs, +): + manager = make_manager(enable_open_telemetry, subscription_name, **kwargs) manager._consumer = mock.create_autospec(bidi.BackgroundConsumer, instance=True) manager._consumer.is_active = True manager._dispatcher = mock.create_autospec(dispatcher.Dispatcher, instance=True) manager._leaser = mock.create_autospec(leaser.Leaser, instance=True) manager._heartbeater = mock.create_autospec(heartbeater.Heartbeater, instance=True) - return ( manager, manager._consumer, @@ -2626,3 +2767,148 @@ def test_process_requests_mixed_success_and_failure_modacks(): # message with ack_id 'ackid3' succeeds assert requests_completed[1].ack_id == "ackid3" assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS + + +@pytest.mark.skipif( + sys.version_info < (3, 8), + reason="Open Telemetry not supported below Python version 3.8", +) +def test_opentelemetry__on_response_subscribe_span_create(span_exporter): + manager, _, _, leaser, _, _ = make_running_manager( + enable_open_telemetry=True, + subscription_name="projects/projectID/subscriptions/subscriptionID", + ) + + fake_leaser_add(leaser, init_msg_count=0, assumed_msg_size=42) + manager._callback = mock.sentinel.callback + + response = gapic_types.StreamingPullResponse( + received_messages=[ + gapic_types.ReceivedMessage( + ack_id="ack1", + message=gapic_types.PubsubMessage(data=b"foo", message_id="1"), + ), + gapic_types.ReceivedMessage( + ack_id="ack2", + message=gapic_types.PubsubMessage(data=b"bar", message_id="2"), + delivery_attempt=6, + ), + ] + ) + + manager._on_response(response) + + spans = span_exporter.get_finished_spans() + + # Subscribe span is still active, hence unexported. + # Subscriber scheduler spans corresponding to the two messages would be started in `messages_on_hold.put()`` + # and ended in `_maybe_release_messages` + assert len(spans) == 3 + modack_span = spans[0] + + for span in spans[1:]: + assert span.name == "subscriber scheduler" + assert span.kind == trace.SpanKind.INTERNAL + assert span.parent is not None + assert len(span.attributes) == 0 + + assert modack_span.name == "subscriptionID modack" + assert modack_span.kind == trace.SpanKind.CLIENT + assert modack_span.parent is None + assert len(modack_span.links) == 2 + + +RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc) +RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000 +PUBLISHED_MICROS = 123456 +PUBLISHED = RECEIVED + datetime.timedelta(days=1, microseconds=PUBLISHED_MICROS) +PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 + + +def create_message( + data, + ack_id="ACKID", + delivery_attempt=0, + ordering_key="", + exactly_once_delivery_enabled=False, + **attrs, +): # pragma: NO COVER + with mock.patch.object(time, "time") as time_: + time_.return_value = RECEIVED_SECONDS + gapic_pubsub_message = PubsubMessage( + attributes=attrs, + data=data, + message_id="message_id", + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 + ), + ordering_key=ordering_key, + ) + msg = Message( + # The code under test uses a raw protobuf PubsubMessage, i.e. w/o additional + # Python class wrappers, hence the "_pb" + message=gapic_pubsub_message._pb, + ack_id=ack_id, + delivery_attempt=delivery_attempt, + request_queue=queue.Queue(), + exactly_once_delivery_enabled_func=lambda: exactly_once_delivery_enabled, + ) + return msg + + +def test_opentelemetry_subscriber_concurrency_control_span(span_exporter): + manager, _, _, leaser, _, _ = make_running_manager( + enable_open_telemetry=True, + subscription_name="projects/projectID/subscriptions/subscriptionID", + ) + manager._callback = mock.Mock() + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + msg.opentelemetry_data = opentelemetry_data + manager._schedule_message_on_hold(msg) + opentelemetry_data.end_subscribe_concurrency_control_span() + opentelemetry_data.end_subscribe_span() + + spans = span_exporter.get_finished_spans() + assert len(spans) == 2 + + concurrency_control_span, subscribe_span = spans + assert concurrency_control_span.name == "subscriber concurrency control" + assert subscribe_span.name == "subscriptionID subscribe" + assert opentelemetry_data.subscription_id == "subscriptionID" + + assert concurrency_control_span.parent == subscribe_span.context + + +def test_opentelemetry_subscriber_concurrency_control_span_end(span_exporter): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_subscribe_concurrency_control_span() + msg.opentelemetry_data = opentelemetry_data + streaming_pull_manager._wrap_callback_errors(mock.Mock(), mock.Mock(), msg) + + spans = span_exporter.get_finished_spans() + assert len(spans) == 1 + + concurrency_control_span = spans[0] + concurrency_control_span.name == "subscriber concurrency control" + + +def test_opentelemetry_wrap_callback_error(span_exporter): + msg = create_message(b"foo") + streaming_pull_manager._wrap_callback_errors(mock.Mock(), mock.Mock(), msg) + + spans = span_exporter.get_finished_spans() + assert len(spans) == 0 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py new file mode 100644 index 000000000000..2fb89aa7c62b --- /dev/null +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py @@ -0,0 +1,202 @@ +# Copyright 2024, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import datetime +import time +import sys +import queue +import pytest + +from google.protobuf import timestamp_pb2 +from google.api_core import datetime_helpers +from opentelemetry import trace +from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextSetter, +) + +from google.cloud.pubsub_v1.open_telemetry.subscribe_opentelemetry import ( + SubscribeOpenTelemetry, +) +from google.cloud.pubsub_v1.subscriber.message import Message +from google.cloud.pubsub_v1.types import PubsubMessage + +# special case python < 3.8 +if sys.version_info.major == 3 and sys.version_info.minor < 8: + import mock +else: + from unittest import mock + +RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc) +RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000 +PUBLISHED_MICROS = 123456 +PUBLISHED = RECEIVED + datetime.timedelta(days=1, microseconds=PUBLISHED_MICROS) +PUBLISHED_SECONDS = datetime_helpers.to_milliseconds(PUBLISHED) // 1000 + + +def create_message( + data, + ack_id="ACKID", + delivery_attempt=0, + ordering_key="", + exactly_once_delivery_enabled=False, + **attrs +): # pragma: NO COVER + with mock.patch.object(time, "time") as time_: + time_.return_value = RECEIVED_SECONDS + gapic_pubsub_message = PubsubMessage( + attributes=attrs, + data=data, + message_id="message_id", + publish_time=timestamp_pb2.Timestamp( + seconds=PUBLISHED_SECONDS, nanos=PUBLISHED_MICROS * 1000 + ), + ordering_key=ordering_key, + ) + msg = Message( + # The code under test uses a raw protobuf PubsubMessage, i.e. w/o additional + # Python class wrappers, hence the "_pb" + message=gapic_pubsub_message._pb, + ack_id=ack_id, + delivery_attempt=delivery_attempt, + request_queue=queue.Queue(), + exactly_once_delivery_enabled_func=lambda: exactly_once_delivery_enabled, + ) + return msg + + +def test_opentelemetry_set_subscribe_span_result(span_exporter): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.set_subscribe_span_result("acked") + opentelemetry_data.end_subscribe_span() + spans = span_exporter.get_finished_spans() + + assert len(spans) == 1 + + assert "messaging.gcp_pubsub.result" in spans[0].attributes + assert spans[0].attributes["messaging.gcp_pubsub.result"] == "acked" + + +def test_opentelemetry_set_subscribe_span_result_assert_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.set_subscribe_span_result("hi") + + +def test_opentelemetry_start_subscribe_concurrency_control_span_no_subscribe_span(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.start_subscribe_concurrency_control_span() + + +def test_opentelemetry_end_subscribe_concurrency_control_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.end_subscribe_concurrency_control_span() + + +def test_opentelemetry_start_subscribe_scheduler_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.start_subscribe_scheduler_span() + + +def test_opentelemetry_end_subscribe_scheduler_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.end_subscribe_scheduler_span() + + +def test_opentelemetry_start_process_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.start_process_span() + + +def test_opentelemetry_end_process_span_assertion_error(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + with pytest.raises(AssertionError): + opentelemetry_data.end_process_span() + + +def test_opentelemetry_start_process_span_publisher_link(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + tracer = trace.get_tracer("foo") + publisher_create_span = None + with tracer.start_as_current_span(name="name") as span: + publisher_create_span = span + TraceContextTextMapPropagator().inject( + carrier=msg._message, + setter=OpenTelemetryContextSetter(), + ) + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_process_span() + assert len(opentelemetry_data._process_span.links) == 1 + assert ( + opentelemetry_data._process_span.links[0].context.span_id + == publisher_create_span.get_span_context().span_id + ) + + +def test_opentelemetry_start_process_span_no_publisher_span(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + opentelemetry_data.start_process_span() + # Assert that when no context is propagated, the subscriber span has no parent. + assert opentelemetry_data._subscribe_span.parent is None + # Assert that when there is no publisher create span context propagated, + # There are no links created in the process span. + assert len(opentelemetry_data._process_span.links) == 0 + + +def test_opentelemetry_project_id_set_after_create_subscribe_span(): + msg = create_message(b"foo") + opentelemetry_data = SubscribeOpenTelemetry(msg) + msg.opentelemetry_data = opentelemetry_data + opentelemetry_data.start_subscribe_span( + subscription="projects/projectId/subscriptions/subscriptionID", + exactly_once_enabled=False, + ack_id="ack_id", + delivery_attempt=4, + ) + assert opentelemetry_data.project_id == "projectId" diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 16a6150af9ad..7c0ebfd83818 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -30,6 +30,10 @@ from google.cloud.pubsub_v1.subscriber import futures from google.pubsub_v1.services.subscriber import client as subscriber_client from google.pubsub_v1.services.subscriber.transports.grpc import SubscriberGrpcTransport +from google.cloud.pubsub_v1.open_telemetry.context_propagation import ( + OpenTelemetryContextGetter, +) +from google.pubsub_v1.types import PubsubMessage def test_init_default_client_info(creds): @@ -317,3 +321,48 @@ async def test_sync_pull_warning_if_return_immediately_async(creds): warning_msg = str(warned[0].message) assert "return_immediately" in warning_msg assert "deprecated" in warning_msg + + +@pytest.mark.parametrize( + "enable_open_telemetry", + [ + True, + False, + ], +) +def test_opentelemetry_subscriber_setting(creds, enable_open_telemetry): + options = types.SubscriberOptions( + enable_open_telemetry_tracing=enable_open_telemetry, + ) + if sys.version_info >= (3, 8) or enable_open_telemetry is False: + client = subscriber.Client(credentials=creds, subscriber_options=options) + assert client.subscriber_options == options + assert client._open_telemetry_enabled == enable_open_telemetry + else: + with pytest.warns( + RuntimeWarning, + match="Open Telemetry for Python version 3.7 or lower is not supported. Disabling Open Telemetry tracing.", + ): + client = subscriber.Client(credentials=creds, subscriber_options=options) + assert client._open_telemetry_enabled is False + + +def test_opentelemetry_propagator_get(): + message = PubsubMessage(data=b"foo") + message.attributes["key1"] = "value1" + message.attributes["googclient_key2"] = "value2" + + assert OpenTelemetryContextGetter().get(message, "key2") == ["value2"] + + assert OpenTelemetryContextGetter().get(message, "key1") is None + + +def test_opentelemetry_propagator_keys(): + message = PubsubMessage(data=b"foo") + message.attributes["key1"] = "value1" + message.attributes["googclient_key2"] = "value2" + + assert sorted(OpenTelemetryContextGetter().keys(message)) == [ + "googclient_key2", + "key1", + ] From 8644e4126495ccc4e4947853165b0413b5f5f304 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 21:12:03 -0400 Subject: [PATCH 1104/1197] chore(main): release 2.24.0 (#1251) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 13 +++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 17 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 94d5d8cc48af..28ce5d0cc794 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.23.1" + ".": "2.24.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 93db31182095..4470137202c3 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,19 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.24.0](https://github.com/googleapis/python-pubsub/compare/v2.23.1...v2.24.0) (2024-09-24) + + +### Features + +* Add OpenTelemetry support for Subscribe Side ([#1252](https://github.com/googleapis/python-pubsub/issues/1252)) ([1b6f3d2](https://github.com/googleapis/python-pubsub/commit/1b6f3d284095e138943576de8551df263f73a506)) +* Open Telemetry Publish Side Support ([#1241](https://github.com/googleapis/python-pubsub/issues/1241)) ([bb5f3d1](https://github.com/googleapis/python-pubsub/commit/bb5f3d1a7df2d661cccc336edc8eceb2161c6921)) + + +### Bug Fixes + +* Fix flaky test ([#1254](https://github.com/googleapis/python-pubsub/issues/1254)) ([1ae49de](https://github.com/googleapis/python-pubsub/commit/1ae49de09996a5cf19f592f996c46e0222d540fc)) + ## [2.23.1](https://github.com/googleapis/python-pubsub/compare/v2.23.0...v2.23.1) (2024-09-09) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 008f4dd36be8..07de09d568ba 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.1" # {x-release-please-version} +__version__ = "2.24.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 008f4dd36be8..07de09d568ba 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.23.1" # {x-release-please-version} +__version__ = "2.24.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index c1602f5baa98..fd163b590440 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.23.1" + "version": "2.24.0" }, "snippets": [ { From b1da687315d155c49fe258db063843958a05db40 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 25 Sep 2024 14:14:46 -0700 Subject: [PATCH 1105/1197] build(python): release script update (#1253) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.kokoro/release.sh | 2 +- packages/google-cloud-pubsub/.kokoro/release/common.cfg | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index f8bd8149fa87..597e0c3261ca 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:365d92ef2206cfad00a8c5955c36789d0de124e2b6d92a72dd0486315a0f2e57 -# created: 2024-09-04T14:50:52.658171431Z + digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 +# created: 2024-09-16T21:04:09.091105552Z diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh index 9b17b081a3d8..aeefd50f2c7a 100755 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") cd github/python-pubsub python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-pubsub/.kokoro/release/common.cfg b/packages/google-cloud-pubsub/.kokoro/release/common.cfg index 5b1bbe360f0a..8638067fa01d 100644 --- a/packages/google-cloud-pubsub/.kokoro/release/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/release/common.cfg @@ -28,7 +28,7 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" + keyname: "google-cloud-pypi-token-keystore-2" } } } From bca95aa74439d0972787cdee0ba2678819822c7d Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Fri, 27 Sep 2024 01:00:33 -0400 Subject: [PATCH 1106/1197] feat: Add OpenTelemetry publish sample (#1258) --- .../samples/snippets/publisher.py | 88 +++++++++++++++++++ .../samples/snippets/requirements.txt | 3 + 2 files changed, 91 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 73afc8c978fc..d2be927b8cd4 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -170,6 +170,83 @@ def delete_topic(project_id: str, topic_id: str) -> None: # [END pubsub_delete_topic] +def pubsub_publish_otel_tracing( + topic_project_id: str, trace_project_id: str, topic_id: str +) -> None: + """ + Publish to `topic_id` in `topic_project_id` with OpenTelemetry enabled. + Export the OpenTelemetry traces to Google Cloud Trace in project + `trace_project_id` + + Args: + topic_project_id: project ID of the topic to publish to. + trace_project_id: project ID to export Cloud Trace to. + topic_id: topic ID to publish to. + + Returns: + None + """ + # [START pubsub_publish_otel_tracing] + + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import ( + BatchSpanProcessor, + ) + from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter + from opentelemetry.sdk.trace.sampling import TraceIdRatioBased, ParentBased + + from google.cloud.pubsub_v1 import PublisherClient + from google.cloud.pubsub_v1.types import PublisherOptions + + # TODO(developer) + # topic_project_id = "your-topic-project-id" + # trace_project_id = "your-trace-project-id" + # topic_id = "your-topic-id" + + # In this sample, we use a Google Cloud Trace to export the OpenTelemetry + # traces: https://cloud.google.com/trace/docs/setup/python-ot + # Choose and configure the exporter for your set up accordingly. + + sampler = ParentBased(root=TraceIdRatioBased(1)) + trace.set_tracer_provider(TracerProvider(sampler=sampler)) + + # Export to Google Trace. + cloud_trace_exporter = CloudTraceSpanExporter( + project_id=trace_project_id, + ) + trace.get_tracer_provider().add_span_processor( + BatchSpanProcessor(cloud_trace_exporter) + ) + + # Set the `enable_open_telemetry_tracing` option to True when creating + # the publisher client. This in itself is necessary and sufficient for + # the library to export OpenTelemetry traces. However, where the traces + # must be exported to needs to be configured based on your OpenTelemetry + # set up. Refer: https://opentelemetry.io/docs/languages/python/exporters/ + publisher = PublisherClient( + publisher_options=PublisherOptions( + enable_open_telemetry_tracing=True, + ), + ) + + # The `topic_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/topics/{topic_id}` + topic_path = publisher.topic_path(topic_project_id, topic_id) + # Publish messages. + for n in range(1, 10): + data_str = f"Message number {n}" + # Data must be a bytestring + data = data_str.encode("utf-8") + # When you publish a message, the client returns a future. + future = publisher.publish(topic_path, data) + print(future.result()) + + print(f"Published messages to {topic_path}.") + + # [END pubsub_publish_otel_tracing] + + def publish_messages(project_id: str, topic_id: str) -> None: """Publishes multiple messages to a Pub/Sub topic.""" # [START pubsub_quickstart_publisher] @@ -522,6 +599,13 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: create_parser = subparsers.add_parser("create", help=create_topic.__doc__) create_parser.add_argument("topic_id") + pubsub_publish_otel_tracing_parser = subparsers.add_parser( + "pubsub-publish-otel-tracing", help=pubsub_publish_otel_tracing.__doc__ + ) + pubsub_publish_otel_tracing_parser.add_argument("topic_project_id") + pubsub_publish_otel_tracing_parser.add_argument("trace_project_id") + pubsub_publish_otel_tracing_parser.add_argument("topic_id") + create_topic_with_kinesis_ingestion_parser = subparsers.add_parser( "create_kinesis_ingestion", help=create_topic_with_kinesis_ingestion.__doc__ ) @@ -638,3 +722,7 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: resume_publish_with_ordering_keys(args.project_id, args.topic_id) elif args.command == "detach-subscription": detach_subscription(args.project_id, args.subscription_id) + elif args.command == "pubsub-publish-otel-tracing": + pubsub_publish_otel_tracing( + args.topic_project_id, args.trace_project_id, args.topic_id + ) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index b2dfe2d9296a..f410f8f62905 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -3,3 +3,6 @@ avro==1.12.0 protobuf===4.24.4; python_version == '3.7' protobuf==5.28.0; python_version >= '3.8' avro==1.12.0 +opentelemetry-api==1.22.0 +opentelemetry-sdk==1.22.0 +opentelemetry-exporter-gcp-trace==1.7.0 \ No newline at end of file From 16d425aa72909a96e3d5627a114ef7baa2a02836 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Sat, 28 Sep 2024 12:43:34 -0400 Subject: [PATCH 1107/1197] doc: Subsribe sample (#1260) --- .../samples/snippets/subscriber.py | 88 +++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 79cd0ebf1c98..7931b31cbbd5 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -68,6 +68,94 @@ def list_subscriptions_in_project(project_id: str) -> None: # [END pubsub_list_subscriptions] +def pubsub_subscribe_otel_tracing( + subscription_project_id: str, + cloud_trace_project_id: str, + subscription_id: str, + timeout: Optional[float] = None, +) -> None: + """ + Subscribe to `subscription_id` in `subscription_project_id` with OpenTelemetry enabled. + Export the OpenTelemetry traces to Google Cloud Trace in project + `trace_project_id` + Args: + subscription_project_id: project ID of the subscription. + cloud_trace_project_id: project ID to export Cloud Trace to. + subscription_id: subscription ID to subscribe from. + timeout: time until which to subscribe to. + Returns: + None + """ + # [START pubsub_subscribe_otel_tracing] + from opentelemetry import trace + from opentelemetry.sdk.trace import TracerProvider + from opentelemetry.sdk.trace.export import ( + BatchSpanProcessor, + ) + from opentelemetry.exporter.cloud_trace import CloudTraceSpanExporter + from opentelemetry.sdk.trace.sampling import TraceIdRatioBased, ParentBased + + from google.cloud import pubsub_v1 + from google.cloud.pubsub_v1 import SubscriberClient + from google.cloud.pubsub_v1.types import SubscriberOptions + + # TODO(developer) + # subscription_project_id = "your-subscription-project-id" + # subscription_id = "your-subscription-id" + # cloud_trace_project_id = "your-cloud-trace-project-id" + # timeout = 300.0 + + # In this sample, we use a Google Cloud Trace to export the OpenTelemetry + # traces: https://cloud.google.com/trace/docs/setup/python-ot + # Choose and configure the exporter for your set up accordingly. + + sampler = ParentBased(root=TraceIdRatioBased(1)) + trace.set_tracer_provider(TracerProvider(sampler=sampler)) + + # Export to Google Trace + cloud_trace_exporter = CloudTraceSpanExporter( + project_id=cloud_trace_project_id, + ) + trace.get_tracer_provider().add_span_processor( + BatchSpanProcessor(cloud_trace_exporter) + ) + # Set the `enable_open_telemetry_tracing` option to True when creating + # the subscriber client. This in itself is necessary and sufficient for + # the library to export OpenTelemetry traces. However, where the traces + # must be exported to needs to be configured based on your OpenTelemetry + # set up. Refer: https://opentelemetry.io/docs/languages/python/exporters/ + subscriber = SubscriberClient( + subscriber_options=SubscriberOptions(enable_open_telemetry_tracing=True) + ) + + # The `subscription_path` method creates a fully qualified identifier + # in the form `projects/{project_id}/subscriptions/{subscription_id}` + subscription_path = subscriber.subscription_path( + subscription_project_id, subscription_id + ) + + # Define callback to be called when a message is received. + def callback(message: pubsub_v1.subscriber.message.Message) -> None: + # Ack message after processing it. + print(message.data) + message.ack() + + # Wrap subscriber in a 'with' block to automatically call close() when done. + with subscriber: + try: + # Optimistically subscribe to messages on the subscription. + streaming_pull_future = subscriber.subscribe( + subscription_path, callback=callback + ) + streaming_pull_future.result(timeout=timeout) + except TimeoutError: + print("Successfully subscribed until the timeout passed.") + streaming_pull_future.cancel() # Trigger the shutdown. + streaming_pull_future.result() # Block until the shutdown is complete. + + # [END pubsub_subscribe_otel_tracing] + + def create_subscription(project_id: str, topic_id: str, subscription_id: str) -> None: """Create a new pull subscription on the given topic.""" # [START pubsub_create_pull_subscription] From ed6e3c1b285d068f187db5494f2dd1e5413f8c2b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sun, 29 Sep 2024 01:24:37 -0400 Subject: [PATCH 1108/1197] chore(main): release 2.25.0 (#1262) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 28ce5d0cc794..787a68e1dfc8 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.24.0" + ".": "2.25.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 4470137202c3..290245b2b25e 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.25.0](https://github.com/googleapis/python-pubsub/compare/v2.24.0...v2.25.0) (2024-09-28) + + +### Features + +* Add OpenTelemetry publish sample ([#1258](https://github.com/googleapis/python-pubsub/issues/1258)) ([bc13ff0](https://github.com/googleapis/python-pubsub/commit/bc13ff05c3d1104c17169c360bdc09340430da37)) + ## [2.24.0](https://github.com/googleapis/python-pubsub/compare/v2.23.1...v2.24.0) (2024-09-24) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 07de09d568ba..e5fa8f60b9fc 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.0" # {x-release-please-version} +__version__ = "2.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 07de09d568ba..e5fa8f60b9fc 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.24.0" # {x-release-please-version} +__version__ = "2.25.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index fd163b590440..e6accad796d5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.24.0" + "version": "2.25.0" }, "snippets": [ { From 5a503b32de106d5f1340613b884d0edb4c4e3c63 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Sun, 29 Sep 2024 19:57:48 -0400 Subject: [PATCH 1109/1197] fix: Update the requirements.txt for samples directory (#1263) --- .../samples/snippets/requirements.txt | 10 ++++++---- packages/google-cloud-pubsub/setup.py | 6 ++++-- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index f410f8f62905..4e86dfa5b92e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,8 +1,10 @@ -google-cloud-pubsub==2.23.0 +google-cloud-pubsub==2.25.0 avro==1.12.0 protobuf===4.24.4; python_version == '3.7' protobuf==5.28.0; python_version >= '3.8' avro==1.12.0 -opentelemetry-api==1.22.0 -opentelemetry-sdk==1.22.0 -opentelemetry-exporter-gcp-trace==1.7.0 \ No newline at end of file +opentelemetry-api==1.22.0; python_version == '3.7' +opentelemetry-sdk==1.22.0; python_version == '3.7' +opentelemetry-api==1.27.0; python_version >= '3.8' +opentelemetry-sdk==1.27.0; python_version >= '3.8' +opentelemetry-exporter-gcp-trace==1.7.0 diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index cc852f7d8060..8339e1e18721 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -45,8 +45,10 @@ "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", "grpcio-status >= 1.33.2", - "opentelemetry-api", - "opentelemetry-sdk", + "opentelemetry-api <= 1.22.0; python_version<='3.7'", + "opentelemetry-api >= 1.27.0; python_version>='3.8'", + "opentelemetry-sdk <= 1.22.0; python_version<='3.7'", + "opentelemetry-sdk >= 1.27.0; python_version>='3.8'", ] extras = {"libcst": "libcst >= 0.3.10"} url = "https://github.com/googleapis/python-pubsub" From 4180be84e95807121c4a2f00292fa91f50aa9de5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sun, 29 Sep 2024 17:44:43 -0700 Subject: [PATCH 1110/1197] chore(main): release 2.25.1 (#1264) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 787a68e1dfc8..ae952ab62ae8 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.25.0" + ".": "2.25.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 290245b2b25e..213d21ed0f53 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.25.1](https://github.com/googleapis/python-pubsub/compare/v2.25.0...v2.25.1) (2024-09-29) + + +### Bug Fixes + +* Update the requirements.txt for samples directory ([#1263](https://github.com/googleapis/python-pubsub/issues/1263)) ([5cce8b1](https://github.com/googleapis/python-pubsub/commit/5cce8b103ab7085613b7ee0efb5c8342d41ebae1)) + ## [2.25.0](https://github.com/googleapis/python-pubsub/compare/v2.24.0...v2.25.0) (2024-09-28) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index e5fa8f60b9fc..bf6fc188de42 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.25.0" # {x-release-please-version} +__version__ = "2.25.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index e5fa8f60b9fc..bf6fc188de42 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.25.0" # {x-release-please-version} +__version__ = "2.25.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index e6accad796d5..9fecd3ecaee9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.25.0" + "version": "2.25.1" }, "snippets": [ { From 9e7af45b25fa7a0b08a97e242810fc6a12011b2a Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Mon, 30 Sep 2024 13:20:37 -0400 Subject: [PATCH 1111/1197] docs: Add command line args for OpenTelemetry Subscribe sample (#1265) Co-authored-by: Owl Bot --- .../samples/snippets/subscriber.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 7931b31cbbd5..180b091db2be 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -1235,6 +1235,14 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: "list-in-project", help=list_subscriptions_in_project.__doc__ ) + otel_subscribe_parse = subparsers.add_parser( + "otel-subscribe", help=pubsub_subscribe_otel_tracing.__doc__ + ) + otel_subscribe_parse.add_argument("subscription_project_id") + otel_subscribe_parse.add_argument("cloud_trace_project_id") + otel_subscribe_parse.add_argument("subscription_id") + otel_subscribe_parse.add_argument("timeout", default=None, type=float, nargs="?") + create_parser = subparsers.add_parser("create", help=create_subscription.__doc__) create_parser.add_argument("topic_id") create_parser.add_argument("subscription_id") @@ -1516,3 +1524,10 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: receive_messages_with_concurrency_control( args.project_id, args.subscription_id, args.timeout ) + elif args.command == "otel-subscribe": + pubsub_subscribe_otel_tracing( + args.subscription_project_id, + args.cloud_trace_project_id, + args.subscription_id, + args.timeout, + ) From 2bee79d7c5b0189e362e3d1763076ba50705378f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 11:08:38 -0700 Subject: [PATCH 1112/1197] chore(main): release 2.25.2 (#1266) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index ae952ab62ae8..f89e45276ea2 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.25.1" + ".": "2.25.2" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 213d21ed0f53..354728b6d55a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.25.2](https://github.com/googleapis/python-pubsub/compare/v2.25.1...v2.25.2) (2024-09-30) + + +### Documentation + +* Add command line args for OpenTelemetry Subscribe sample ([#1265](https://github.com/googleapis/python-pubsub/issues/1265)) ([0ff7f2a](https://github.com/googleapis/python-pubsub/commit/0ff7f2a64b5aa1b0e014e0933e4edaef0fb3f222)) + ## [2.25.1](https://github.com/googleapis/python-pubsub/compare/v2.25.0...v2.25.1) (2024-09-29) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index bf6fc188de42..8c95d3fd6e28 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.25.1" # {x-release-please-version} +__version__ = "2.25.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index bf6fc188de42..8c95d3fd6e28 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.25.1" # {x-release-please-version} +__version__ = "2.25.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 9fecd3ecaee9..f9f4a71c030c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.25.1" + "version": "2.25.2" }, "snippets": [ { From 87af7cb9b197f76dbb7b8f7601ca96ec88c6d754 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 8 Oct 2024 22:34:30 -0700 Subject: [PATCH 1113/1197] feat: add ingestion Cloud Storage fields and Platform Logging fields to Topic (#1248) Co-authored-by: Owl Bot Co-authored-by: Mike Prieto --- .../google/pubsub/__init__.py | 2 + .../google/pubsub_v1/__init__.py | 2 + .../google/pubsub_v1/types/__init__.py | 2 + .../google/pubsub_v1/types/pubsub.py | 264 +++++++++++++++++- .../scripts/fixup_pubsub_v1_keywords.py | 2 +- 5 files changed, 270 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index d6d0a00ff358..84f0b8294dbb 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -55,6 +55,7 @@ from google.pubsub_v1.types.pubsub import MessageStoragePolicy from google.pubsub_v1.types.pubsub import ModifyAckDeadlineRequest from google.pubsub_v1.types.pubsub import ModifyPushConfigRequest +from google.pubsub_v1.types.pubsub import PlatformLogsSettings from google.pubsub_v1.types.pubsub import PublishRequest from google.pubsub_v1.types.pubsub import PublishResponse from google.pubsub_v1.types.pubsub import PubsubMessage @@ -127,6 +128,7 @@ "MessageStoragePolicy", "ModifyAckDeadlineRequest", "ModifyPushConfigRequest", + "PlatformLogsSettings", "PublishRequest", "PublishResponse", "PubsubMessage", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 61b89e6b17d1..fd7ecb6d4c89 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -53,6 +53,7 @@ from .types.pubsub import MessageStoragePolicy from .types.pubsub import ModifyAckDeadlineRequest from .types.pubsub import ModifyPushConfigRequest +from .types.pubsub import PlatformLogsSettings from .types.pubsub import PublishRequest from .types.pubsub import PublishResponse from .types.pubsub import PubsubMessage @@ -132,6 +133,7 @@ "MessageStoragePolicy", "ModifyAckDeadlineRequest", "ModifyPushConfigRequest", + "PlatformLogsSettings", "PublishRequest", "PublishResponse", "PublisherClient", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 62568bf6664e..6feefc154d3b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -44,6 +44,7 @@ MessageStoragePolicy, ModifyAckDeadlineRequest, ModifyPushConfigRequest, + PlatformLogsSettings, PublishRequest, PublishResponse, PubsubMessage, @@ -122,6 +123,7 @@ "MessageStoragePolicy", "ModifyAckDeadlineRequest", "ModifyPushConfigRequest", + "PlatformLogsSettings", "PublishRequest", "PublishResponse", "PubsubMessage", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 5f826601494d..d40d7c24ea85 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -31,6 +31,7 @@ "MessageStoragePolicy", "SchemaSettings", "IngestionDataSourceSettings", + "PlatformLogsSettings", "Topic", "PubsubMessage", "GetTopicRequest", @@ -160,6 +161,11 @@ class SchemaSettings(proto.Message): class IngestionDataSourceSettings(proto.Message): r"""Settings for an ingestion data source on a topic. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -167,6 +173,13 @@ class IngestionDataSourceSettings(proto.Message): Optional. Amazon Kinesis Data Streams. This field is a member of `oneof`_ ``source``. + cloud_storage (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage): + Optional. Cloud Storage. + + This field is a member of `oneof`_ ``source``. + platform_logs_settings (google.pubsub_v1.types.PlatformLogsSettings): + Optional. Platform Logs settings. If unset, + no Platform Logs will be generated. """ class AwsKinesis(proto.Message): @@ -259,12 +272,227 @@ class State(proto.Enum): number=5, ) + class CloudStorage(proto.Message): + r"""Ingestion settings for Cloud Storage. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage.State): + Output only. An output-only field that + indicates the state of the Cloud Storage + ingestion source. + bucket (str): + Optional. Cloud Storage bucket. The bucket name must be + without any prefix like "gs://". See the [bucket naming + requirements] + (https://cloud.google.com/storage/docs/buckets#naming). + text_format (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage.TextFormat): + Optional. Data from Cloud Storage will be + interpreted as text. + + This field is a member of `oneof`_ ``input_format``. + avro_format (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage.AvroFormat): + Optional. Data from Cloud Storage will be + interpreted in Avro format. + + This field is a member of `oneof`_ ``input_format``. + pubsub_avro_format (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage.PubSubAvroFormat): + Optional. It will be assumed data from Cloud Storage was + written via `Cloud Storage + subscriptions `__. + + This field is a member of `oneof`_ ``input_format``. + minimum_object_create_time (google.protobuf.timestamp_pb2.Timestamp): + Optional. Only objects with a larger or equal + creation timestamp will be ingested. + match_glob (str): + Optional. Glob pattern used to match objects that will be + ingested. If unset, all objects will be ingested. See the + `supported + patterns `__. + """ + + class State(proto.Enum): + r"""Possible states for ingestion from Cloud Storage. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + CLOUD_STORAGE_PERMISSION_DENIED (2): + Permission denied encountered while calling the Cloud + Storage API. This can happen if the Pub/Sub SA has not been + granted the `appropriate + permissions `__: + + - storage.objects.list: to list the objects in a bucket. + - storage.objects.get: to read the objects in a bucket. + - storage.buckets.get: to verify the bucket exists. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while publishing to the topic. + This can happen if the Pub/Sub SA has not been granted the + `appropriate publish + permissions `__ + BUCKET_NOT_FOUND (4): + The provided Cloud Storage bucket doesn't + exist. + TOO_MANY_OBJECTS (5): + The Cloud Storage bucket has too many + objects, ingestion will be paused. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CLOUD_STORAGE_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + BUCKET_NOT_FOUND = 4 + TOO_MANY_OBJECTS = 5 + + class TextFormat(proto.Message): + r"""Configuration for reading Cloud Storage data in text format. Each + line of text as specified by the delimiter will be set to the + ``data`` field of a Pub/Sub message. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + delimiter (str): + Optional. When unset, '\n' is used. + + This field is a member of `oneof`_ ``_delimiter``. + """ + + delimiter: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + + class AvroFormat(proto.Message): + r"""Configuration for reading Cloud Storage data in Avro binary format. + The bytes of each object will be set to the ``data`` field of a + Pub/Sub message. + + """ + + class PubSubAvroFormat(proto.Message): + r"""Configuration for reading Cloud Storage data written via `Cloud + Storage + subscriptions `__. + The data and attributes fields of the originally exported Pub/Sub + message will be restored when publishing. + + """ + + state: "IngestionDataSourceSettings.CloudStorage.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.CloudStorage.State", + ) + bucket: str = proto.Field( + proto.STRING, + number=2, + ) + text_format: "IngestionDataSourceSettings.CloudStorage.TextFormat" = ( + proto.Field( + proto.MESSAGE, + number=3, + oneof="input_format", + message="IngestionDataSourceSettings.CloudStorage.TextFormat", + ) + ) + avro_format: "IngestionDataSourceSettings.CloudStorage.AvroFormat" = ( + proto.Field( + proto.MESSAGE, + number=4, + oneof="input_format", + message="IngestionDataSourceSettings.CloudStorage.AvroFormat", + ) + ) + pubsub_avro_format: "IngestionDataSourceSettings.CloudStorage.PubSubAvroFormat" = proto.Field( + proto.MESSAGE, + number=5, + oneof="input_format", + message="IngestionDataSourceSettings.CloudStorage.PubSubAvroFormat", + ) + minimum_object_create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + match_glob: str = proto.Field( + proto.STRING, + number=9, + ) + aws_kinesis: AwsKinesis = proto.Field( proto.MESSAGE, number=1, oneof="source", message=AwsKinesis, ) + cloud_storage: CloudStorage = proto.Field( + proto.MESSAGE, + number=2, + oneof="source", + message=CloudStorage, + ) + platform_logs_settings: "PlatformLogsSettings" = proto.Field( + proto.MESSAGE, + number=4, + message="PlatformLogsSettings", + ) + + +class PlatformLogsSettings(proto.Message): + r"""Settings for Platform Logs produced by Pub/Sub. + + Attributes: + severity (google.pubsub_v1.types.PlatformLogsSettings.Severity): + Optional. The minimum severity level of + Platform Logs that will be written. + """ + + class Severity(proto.Enum): + r"""Severity levels of Platform Logs. + + Values: + SEVERITY_UNSPECIFIED (0): + Default value. Logs level is unspecified. + Logs will be disabled. + DISABLED (1): + Logs will be disabled. + DEBUG (2): + Debug logs and higher-severity logs will be + written. + INFO (3): + Info logs and higher-severity logs will be + written. + WARNING (4): + Warning logs and higher-severity logs will be + written. + ERROR (5): + Only error logs will be written. + """ + SEVERITY_UNSPECIFIED = 0 + DISABLED = 1 + DEBUG = 2 + INFO = 3 + WARNING = 4 + ERROR = 5 + + severity: Severity = proto.Field( + proto.ENUM, + number=1, + enum=Severity, + ) class Topic(proto.Message): @@ -821,7 +1049,7 @@ class Subscription(proto.Message): published. If ``retain_acked_messages`` is true, then this also configures the retention of acknowledged messages, and thus configures how far back in time a ``Seek`` can be done. - Defaults to 7 days. Cannot be more than 7 days or less than + Defaults to 7 days. Cannot be more than 31 days or less than 10 minutes. labels (MutableMapping[str, str]): Optional. See `Creating and managing @@ -906,6 +1134,10 @@ class Subscription(proto.Message): Output only. An output-only field indicating whether or not the subscription can receive messages. + analytics_hub_subscription_info (google.pubsub_v1.types.Subscription.AnalyticsHubSubscriptionInfo): + Output only. Information about the associated + Analytics Hub subscription. Only set if the + subscritpion is created by Analytics Hub. """ class State(proto.Enum): @@ -927,6 +1159,31 @@ class State(proto.Enum): ACTIVE = 1 RESOURCE_ERROR = 2 + class AnalyticsHubSubscriptionInfo(proto.Message): + r"""Information about an associated Analytics Hub subscription + (https://cloud.google.com/bigquery/docs/analytics-hub-manage-subscriptions). + + Attributes: + listing (str): + Optional. The name of the associated Analytics Hub listing + resource. Pattern: + "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}". + subscription (str): + Optional. The name of the associated + Analytics Hub subscription resource. Pattern: + + "projects/{project}/locations/{location}/subscriptions/{subscription}". + """ + + listing: str = proto.Field( + proto.STRING, + number=1, + ) + subscription: str = proto.Field( + proto.STRING, + number=2, + ) + name: str = proto.Field( proto.STRING, number=1, @@ -1009,6 +1266,11 @@ class State(proto.Enum): number=19, enum=State, ) + analytics_hub_subscription_info: AnalyticsHubSubscriptionInfo = proto.Field( + proto.MESSAGE, + number=23, + message=AnalyticsHubSubscriptionInfo, + ) class RetryPolicy(proto.Message): diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 35262ea8d459..543f7e051da4 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -43,7 +43,7 @@ class pubsubCallTransformer(cst.CSTTransformer): 'commit_schema': ('name', 'schema', ), 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'cloud_storage_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', ), + 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'cloud_storage_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', 'analytics_hub_subscription_info', ), 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', 'state', 'ingestion_data_source_settings', ), 'delete_schema': ('name', ), 'delete_schema_revision': ('name', 'revision_id', ), From bd08bca9a8576388700d28461c9488af65d99edf Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 11:11:20 -0400 Subject: [PATCH 1114/1197] chore(main): release 2.26.0 (#1272) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index f89e45276ea2..d53227a83b8f 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.25.2" + ".": "2.26.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 354728b6d55a..e1a67b71193c 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.26.0](https://github.com/googleapis/python-pubsub/compare/v2.25.2...v2.26.0) (2024-10-09) + + +### Features + +* Add ingestion Cloud Storage fields and Platform Logging fields to Topic ([#1248](https://github.com/googleapis/python-pubsub/issues/1248)) ([a7a4caa](https://github.com/googleapis/python-pubsub/commit/a7a4caaa5a73e9b15369471dc892688e24bf52e0)) + ## [2.25.2](https://github.com/googleapis/python-pubsub/compare/v2.25.1...v2.25.2) (2024-09-30) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 8c95d3fd6e28..d56eed5c5db7 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.25.2" # {x-release-please-version} +__version__ = "2.26.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 8c95d3fd6e28..d56eed5c5db7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.25.2" # {x-release-please-version} +__version__ = "2.26.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index f9f4a71c030c..a20353b05e7e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.25.2" + "version": "2.26.0" }, "snippets": [ { From c460cf52493ab24903875089f875efb8d5ddeb34 Mon Sep 17 00:00:00 2001 From: Mike Prieto Date: Thu, 10 Oct 2024 17:42:32 -0400 Subject: [PATCH 1115/1197] docs: Add ingestion from GCS sample (#1273) --- .../samples/snippets/publisher.py | 105 ++++++++++++++++++ .../samples/snippets/publisher_test.py | 34 ++++++ .../samples/snippets/requirements.txt | 2 +- 3 files changed, 140 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index d2be927b8cd4..7cb7ca223d71 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -103,6 +103,88 @@ def create_topic_with_kinesis_ingestion( # [END pubsub_create_topic_with_kinesis_ingestion] +def create_topic_with_cloud_storage_ingestion( + project_id: str, + topic_id: str, + bucket: str, + input_format: str, + text_delimiter: str, + match_glob: str, + minimum_object_create_time: str, +) -> None: + """Create a new Pub/Sub topic with Cloud Storage Ingestion Settings.""" + # [START pubsub_create_topic_with_cloud_storage_ingestion] + from google.cloud import pubsub_v1 + from google.protobuf import timestamp_pb2 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # bucket = "your-bucket" + # input_format = "text" (can be one of "text", "avro", "pubsub_avro") + # text_delimiter = "\n" + # match_glob = "**.txt" + # minimum_object_create_time = "YYYY-MM-DDThh:mm:ssZ" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + cloud_storage_settings = IngestionDataSourceSettings.CloudStorage( + bucket=bucket, + ) + if input_format == "text": + cloud_storage_settings.text_format = ( + IngestionDataSourceSettings.CloudStorage.TextFormat( + delimiter=text_delimiter + ) + ) + elif input_format == "avro": + cloud_storage_settings.avro_format = ( + IngestionDataSourceSettings.CloudStorage.AvroFormat() + ) + elif input_format == "pubsub_avro": + cloud_storage_settings.pubsub_avro_format = ( + IngestionDataSourceSettings.CloudStorage.PubSubAvroFormat() + ) + else: + print( + "Invalid input_format: " + + input_format + + "; must be in ('text', 'avro', 'pubsub_avro')" + ) + return + + if match_glob: + cloud_storage_settings.match_glob = match_glob + + if minimum_object_create_time: + try: + minimum_object_create_time_timestamp = timestamp_pb2.Timestamp() + minimum_object_create_time_timestamp.FromJsonString( + minimum_object_create_time + ) + cloud_storage_settings.minimum_object_create_time = ( + minimum_object_create_time_timestamp + ) + except ValueError: + print("Invalid minimum_object_create_time: " + minimum_object_create_time) + return + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + cloud_storage=cloud_storage_settings, + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with Cloud Storage Ingestion Settings") + # [END pubsub_create_topic_with_cloud_storage_ingestion] + + def update_topic_type( project_id: str, topic_id: str, @@ -615,6 +697,19 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: create_topic_with_kinesis_ingestion_parser.add_argument("aws_role_arn") create_topic_with_kinesis_ingestion_parser.add_argument("gcp_service_account") + create_topic_with_cloud_storage_ingestion_parser = subparsers.add_parser( + "create_cloud_storage_ingestion", + help=create_topic_with_cloud_storage_ingestion.__doc__, + ) + create_topic_with_cloud_storage_ingestion_parser.add_argument("topic_id") + create_topic_with_cloud_storage_ingestion_parser.add_argument("bucket") + create_topic_with_cloud_storage_ingestion_parser.add_argument("input_format") + create_topic_with_cloud_storage_ingestion_parser.add_argument("text_delimiter") + create_topic_with_cloud_storage_ingestion_parser.add_argument("match_glob") + create_topic_with_cloud_storage_ingestion_parser.add_argument( + "minimum_object_create_time" + ) + update_topic_type_parser = subparsers.add_parser( "update_kinesis_ingestion", help=update_topic_type.__doc__ ) @@ -693,6 +788,16 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: args.aws_role_arn, args.gcp_service_account, ) + elif args.command == "create_cloud_storage_ingestion": + create_topic_with_cloud_storage_ingestion( + args.project_id, + args.topic_id, + args.bucket, + args.input_format, + args.text_delimiter, + args.match_glob, + args.minimum_object_create_time, + ) elif args.command == "update_kinesis_ingestion": update_topic_type( args.project_id, diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index adb015e8af31..6f17305cb24e 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -162,6 +162,40 @@ def test_create_topic_with_kinesis_ingestion( publisher_client.delete_topic(request={"topic": topic_path}) +def test_create_topic_with_cloud_storage_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + bucket = "pubsub-cloud-storage-bucket" + input_format = "text" + text_delimiter = "," + match_glob = "**.txt" + minimum_object_create_time = "1970-01-01T00:00:01Z" + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_cloud_storage_ingestion( + PROJECT_ID, + TOPIC_ID, + bucket, + input_format, + text_delimiter, + match_glob, + minimum_object_create_time, + ) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with Cloud Storage Ingestion Settings" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + def test_update_topic_type( publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] ) -> None: diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 4e86dfa5b92e..3a16ebc94839 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.25.0 +google-cloud-pubsub==2.26.0 avro==1.12.0 protobuf===4.24.4; python_version == '3.7' protobuf==5.28.0; python_version >= '3.8' From 7e7c31d89fc25ac6f1d5959e35b353b0cd1ded3e Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 15 Oct 2024 18:51:27 -0400 Subject: [PATCH 1116/1197] chore(main): release 2.26.1 (#1276) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index d53227a83b8f..9c3477bd7941 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.26.0" + ".": "2.26.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index e1a67b71193c..64bb863eb511 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.26.1](https://github.com/googleapis/python-pubsub/compare/v2.26.0...v2.26.1) (2024-10-10) + + +### Documentation + +* Add ingestion from GCS sample ([#1273](https://github.com/googleapis/python-pubsub/issues/1273)) ([b59cc8d](https://github.com/googleapis/python-pubsub/commit/b59cc8d4fae593eb7592455a1696d7ab996a53dd)) + ## [2.26.0](https://github.com/googleapis/python-pubsub/compare/v2.25.2...v2.26.0) (2024-10-09) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index d56eed5c5db7..040d4e7f1cc1 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.26.0" # {x-release-please-version} +__version__ = "2.26.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index d56eed5c5db7..040d4e7f1cc1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.26.0" # {x-release-please-version} +__version__ = "2.26.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index a20353b05e7e..ead6d83f3b36 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.26.0" + "version": "2.26.1" }, "snippets": [ { From b2049546621a8aba9323ffc3d2e40751d6133b42 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 31 Oct 2024 13:57:02 -0400 Subject: [PATCH 1117/1197] build: use multiScm for Kokoro release builds (#1284) Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.github/release-trigger.yml | 1 + .../.kokoro/docker/docs/requirements.txt | 42 +- .../.kokoro/docs/common.cfg | 2 +- .../google-cloud-pubsub/.kokoro/release.sh | 2 +- .../.kokoro/release/common.cfg | 8 +- .../.kokoro/requirements.txt | 610 +++++++++--------- .../.kokoro/samples/python3.13/common.cfg | 40 ++ .../.kokoro/samples/python3.13/continuous.cfg | 6 + .../samples/python3.13/periodic-head.cfg | 11 + .../.kokoro/samples/python3.13/periodic.cfg | 6 + .../.kokoro/samples/python3.13/presubmit.cfg | 6 + .../.kokoro/test-samples-impl.sh | 3 +- packages/google-cloud-pubsub/noxfile.py | 6 +- .../samples/snippets/noxfile.py | 2 +- 15 files changed, 393 insertions(+), 356 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic-head.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/presubmit.cfg diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 597e0c3261ca..7672b49b6307 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:e8dcfd7cbfd8beac3a3ff8d3f3185287ea0625d859168cc80faccfc9a7a00455 -# created: 2024-09-16T21:04:09.091105552Z + digest: sha256:5cddfe2fb5019bbf78335bc55f15bc13e18354a56b3ff46e1834f8e540807f05 +# created: 2024-10-31T01:41:07.349286254Z diff --git a/packages/google-cloud-pubsub/.github/release-trigger.yml b/packages/google-cloud-pubsub/.github/release-trigger.yml index d4ca94189e16..4bb79e58eadf 100644 --- a/packages/google-cloud-pubsub/.github/release-trigger.yml +++ b/packages/google-cloud-pubsub/.github/release-trigger.yml @@ -1 +1,2 @@ enabled: true +multiScmName: diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt index 7129c7715594..66eacc82f041 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt @@ -4,39 +4,39 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.1 \ + --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ + --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox colorlog==6.8.2 \ --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 # via nox -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 # via nox -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.26.6 \ + --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ + --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 # via nox diff --git a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg index 63ce88a8292c..a9392e09af1f 100644 --- a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg @@ -63,4 +63,4 @@ before_action { keyname: "docuploader_service_account" } } -} \ No newline at end of file +} diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh index aeefd50f2c7a..006893576340 100755 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ b/packages/google-cloud-pubsub/.kokoro/release.sh @@ -23,7 +23,7 @@ python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source / export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-2") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") cd github/python-pubsub python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-pubsub/.kokoro/release/common.cfg b/packages/google-cloud-pubsub/.kokoro/release/common.cfg index 8638067fa01d..a6b92c6371c4 100644 --- a/packages/google-cloud-pubsub/.kokoro/release/common.cfg +++ b/packages/google-cloud-pubsub/.kokoro/release/common.cfg @@ -28,17 +28,11 @@ before_action { fetch_keystore { keystore_resource { keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-2" + keyname: "google-cloud-pypi-token-keystore-3" } } } -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - # Store the packages we uploaded to PyPI. That way, we have a record of exactly # what we published, which we can use to generate SBOMs and attestations. action { diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt index 9622baf0ba38..006d8ef931bf 100644 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/requirements.txt @@ -4,79 +4,94 @@ # # pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.4.0 \ - --hash=sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5 \ - --hash=sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f +argcomplete==3.5.1 \ + --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ + --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox -attrs==23.2.0 \ - --hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \ - --hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1 +attrs==24.2.0 \ + --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ + --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 # via gcp-releasetool backports-tarfile==1.2.0 \ --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 # via jaraco-context -cachetools==5.3.3 \ - --hash=sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945 \ - --hash=sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105 +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a # via google-auth -certifi==2024.7.4 \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 +certifi==2024.8.30 \ + --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ + --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 +cffi==1.17.1 \ + --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ + --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ + --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ + --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ + --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ + --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ + --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ + --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ + --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ + --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ + --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ + --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ + --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ + --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ + --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ + --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ + --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ + --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ + --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ + --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ + --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ + --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ + --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ + --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ + --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ + --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ + --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ + --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ + --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ + --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ + --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ + --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ + --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ + --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ + --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ + --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ + --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ + --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ + --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ + --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ + --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ + --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ + --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ + --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ + --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ + --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ + --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ + --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ + --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ + --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ + --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ + --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ + --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ + --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ + --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ + --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ + --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ + --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ + --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ + --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ + --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ + --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ + --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ + --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ + --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ + --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ + --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b # via cryptography charset-normalizer==2.1.1 \ --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ @@ -97,72 +112,67 @@ colorlog==6.8.2 \ # via # gcp-docuploader # nox -cryptography==42.0.8 \ - --hash=sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad \ - --hash=sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583 \ - --hash=sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b \ - --hash=sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c \ - --hash=sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1 \ - --hash=sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648 \ - --hash=sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949 \ - --hash=sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba \ - --hash=sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c \ - --hash=sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9 \ - --hash=sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d \ - --hash=sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c \ - --hash=sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e \ - --hash=sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2 \ - --hash=sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d \ - --hash=sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7 \ - --hash=sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70 \ - --hash=sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2 \ - --hash=sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7 \ - --hash=sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14 \ - --hash=sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe \ - --hash=sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e \ - --hash=sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71 \ - --hash=sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961 \ - --hash=sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7 \ - --hash=sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c \ - --hash=sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28 \ - --hash=sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842 \ - --hash=sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902 \ - --hash=sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801 \ - --hash=sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a \ - --hash=sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e +cryptography==43.0.1 \ + --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ + --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ + --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ + --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ + --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ + --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ + --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ + --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ + --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ + --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ + --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ + --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ + --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ + --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ + --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ + --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ + --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ + --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ + --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ + --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ + --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ + --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ + --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ + --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ + --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ + --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ + --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 # via # -r requirements.in # gcp-releasetool # secretstorage -distlib==0.3.8 \ - --hash=sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784 \ - --hash=sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64 +distlib==0.3.9 \ + --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ + --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 # via virtualenv docutils==0.21.2 \ --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 # via readme-renderer -filelock==3.15.4 \ - --hash=sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb \ - --hash=sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7 +filelock==3.16.1 \ + --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ + --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv gcp-docuploader==0.6.5 \ --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea # via -r requirements.in -gcp-releasetool==2.0.1 \ - --hash=sha256:34314a910c08e8911d9c965bd44f8f2185c4f556e737d719c33a41f6a610de96 \ - --hash=sha256:b0d5863c6a070702b10883d37c4bdfd74bf930fe417f36c0c965d3b7c779ae62 +gcp-releasetool==2.1.1 \ + --hash=sha256:25639269f4eae510094f9dbed9894977e1966933211eb155a451deebc3fc0b30 \ + --hash=sha256:845f4ded3d9bfe8cc7fdaad789e83f4ea014affa77785259a7ddac4b243e099e # via -r requirements.in -google-api-core==2.19.1 \ - --hash=sha256:f12a9b8309b5e21d92483bbd47ce2c445861ec7d269ef6784ecc0ea8c1fa6125 \ - --hash=sha256:f4695f1e3650b316a795108a76a1c416e6afb036199d1c1f1f110916df479ffd +google-api-core==2.21.0 \ + --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ + --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d # via # google-cloud-core # google-cloud-storage -google-auth==2.31.0 \ - --hash=sha256:042c4702efa9f7d3c48d3a69341c209381b125faa6dbf3ebe56bc7e40ae05c23 \ - --hash=sha256:87805c36970047247c8afe614d4e3af8eceafc1ebba0c679fe75ddd1d575e871 +google-auth==2.35.0 \ + --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ + --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a # via # gcp-releasetool # google-api-core @@ -172,97 +182,56 @@ google-cloud-core==2.4.1 \ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 # via google-cloud-storage -google-cloud-storage==2.17.0 \ - --hash=sha256:49378abff54ef656b52dca5ef0f2eba9aa83dc2b2c72c78714b03a1a95fe9388 \ - --hash=sha256:5b393bc766b7a3bc6f5407b9e665b2450d36282614b7945e570b3480a456d1e1 +google-cloud-storage==2.18.2 \ + --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ + --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 # via # google-cloud-storage # google-resumable-media -google-resumable-media==2.7.1 \ - --hash=sha256:103ebc4ba331ab1bfdac0250f8033627a2cd7cde09e7ccff9181e31ba4315b2c \ - --hash=sha256:eae451a7b2e2cdbaaa0fd2eb00cc8a1ee5e95e16b55597359cbc3d27d7d90e33 +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 # via google-cloud-storage -googleapis-common-protos==1.63.2 \ - --hash=sha256:27a2499c7e8aff199665b22741997e485eccc8645aa9176c7c988e6fae507945 \ - --hash=sha256:27c5abdffc4911f28101e635de1533fb4cfd2c37fbaa9174587c799fac90aa87 +googleapis-common-protos==1.65.0 \ + --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ + --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 # via google-api-core -idna==3.7 \ - --hash=sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc \ - --hash=sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0 +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 # via requests -importlib-metadata==8.0.0 \ - --hash=sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f \ - --hash=sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812 +importlib-metadata==8.5.0 \ + --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ + --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 # via # -r requirements.in # keyring @@ -271,13 +240,13 @@ jaraco-classes==3.4.0 \ --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 # via keyring -jaraco-context==5.3.0 \ - --hash=sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266 \ - --hash=sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2 +jaraco-context==6.0.1 \ + --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ + --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 # via keyring -jaraco-functools==4.0.1 \ - --hash=sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664 \ - --hash=sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8 +jaraco-functools==4.1.0 \ + --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ + --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 # via keyring jeepney==0.8.0 \ --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ @@ -289,9 +258,9 @@ jinja2==3.1.4 \ --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d # via gcp-releasetool -keyring==25.2.1 \ - --hash=sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50 \ - --hash=sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b +keyring==25.4.1 \ + --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ + --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b # via # gcp-releasetool # twine @@ -299,75 +268,76 @@ markdown-it-py==3.0.0 \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb # via rich -markupsafe==2.1.5 \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 +markupsafe==3.0.1 \ + --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ + --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ + --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ + --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ + --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ + --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ + --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ + --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ + --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ + --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ + --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ + --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ + --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ + --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ + --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ + --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ + --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ + --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ + --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ + --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ + --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ + --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ + --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ + --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ + --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ + --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ + --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ + --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ + --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ + --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ + --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ + --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ + --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ + --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ + --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ + --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ + --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ + --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ + --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ + --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ + --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ + --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ + --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ + --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ + --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ + --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ + --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ + --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ + --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ + --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ + --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ + --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ + --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ + --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ + --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ + --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ + --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ + --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ + --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ + --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ + --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f # via jinja2 mdurl==0.1.2 \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba # via markdown-it-py -more-itertools==10.3.0 \ - --hash=sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463 \ - --hash=sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320 +more-itertools==10.5.0 \ + --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ + --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 # via # jaraco-classes # jaraco-functools @@ -389,9 +359,9 @@ nh3==0.2.18 \ --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe # via readme-renderer -nox==2024.4.15 \ - --hash=sha256:6492236efa15a460ecb98e7b67562a28b70da006ab0be164e8821177577c0565 \ - --hash=sha256:ecf6700199cdfa9e5ea0a41ff5e6ef4641d09508eda6edb89d9987864115817f +nox==2024.10.9 \ + --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ + --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in packaging==24.1 \ --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ @@ -403,41 +373,41 @@ pkginfo==1.10.0 \ --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 # via twine -platformdirs==4.2.2 \ - --hash=sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee \ - --hash=sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3 +platformdirs==4.3.6 \ + --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ + --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv proto-plus==1.24.0 \ --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 # via google-api-core -protobuf==5.27.2 \ - --hash=sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505 \ - --hash=sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b \ - --hash=sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38 \ - --hash=sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863 \ - --hash=sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470 \ - --hash=sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6 \ - --hash=sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce \ - --hash=sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca \ - --hash=sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5 \ - --hash=sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e \ - --hash=sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714 +protobuf==5.28.2 \ + --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ + --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ + --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ + --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ + --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ + --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ + --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ + --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ + --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ + --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ + --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d # via # gcp-docuploader # gcp-releasetool # google-api-core # googleapis-common-protos # proto-plus -pyasn1==0.6.0 \ - --hash=sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c \ - --hash=sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473 +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 \ - --hash=sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6 \ - --hash=sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c # via google-auth pycparser==2.22 \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ @@ -449,9 +419,9 @@ pygments==2.18.0 \ # via # readme-renderer # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 +pyjwt==2.9.0 \ + --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ + --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c # via gcp-releasetool pyperclip==1.9.0 \ --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 @@ -481,9 +451,9 @@ rfc3986==2.0.0 \ --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c # via twine -rich==13.7.1 \ - --hash=sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222 \ - --hash=sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432 +rich==13.9.2 \ + --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ + --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 # via twine rsa==4.9 \ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ @@ -499,9 +469,9 @@ six==1.16.0 \ # via # gcp-docuploader # python-dateutil -tomli==2.0.1 \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f +tomli==2.0.2 \ + --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ + --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox twine==5.1.1 \ --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ @@ -510,28 +480,30 @@ twine==5.1.1 \ typing-extensions==4.12.2 \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via -r requirements.in -urllib3==2.2.2 \ - --hash=sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472 \ - --hash=sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168 + # via + # -r requirements.in + # rich +urllib3==2.2.3 \ + --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ + --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 # via # requests # twine -virtualenv==20.26.3 \ - --hash=sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a \ - --hash=sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589 +virtualenv==20.26.6 \ + --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ + --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 # via nox -wheel==0.43.0 \ - --hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \ - --hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81 +wheel==0.44.0 \ + --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ + --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 # via -r requirements.in -zipp==3.19.2 \ - --hash=sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19 \ - --hash=sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c +zipp==3.20.2 \ + --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ + --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -setuptools==70.2.0 \ - --hash=sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05 \ - --hash=sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1 +setuptools==75.1.0 \ + --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ + --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 # via -r requirements.in diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/common.cfg new file mode 100644 index 000000000000..96783769ba40 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.13" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-313" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 000000000000..a1c8d9759c88 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh index 55910c8ba178..53e365bc4e79 100755 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh +++ b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh @@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.9 -m pip install --upgrade --quiet nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index c6d0c11a37ce..1ba75e6b251f 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -67,7 +67,6 @@ CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() -# 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", "system", @@ -79,6 +78,7 @@ # https://github.com/googleapis/python-pubsub/pull/552#issuecomment-1016256936 # "mypy_samples", # TODO: uncomment when the check passes "docs", + "docfx", "format", ] @@ -222,7 +222,7 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -430,7 +430,7 @@ def docfx(session): def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12"): + if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 3b7135946fd5..c9a3d1ecbf2a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From d0fe8fdf0d3a2591881e949f881682471de76e1d Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Sat, 2 Nov 2024 13:32:35 -0400 Subject: [PATCH 1118/1197] fix: Mark test_streaming_pull_max_messages flaky (#1288) --- packages/google-cloud-pubsub/tests/system.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 6bf8ef10f60c..7b0c0f93ad9a 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -46,7 +46,7 @@ from test_utils.system import unique_resource_id C = TypeVar("C", bound=Callable[..., Any]) -typed_flaky = cast(Callable[[C], C], flaky(max_runs=3, min_passes=1)) +typed_flaky = cast(Callable[[C], C], flaky(max_runs=5, min_passes=1)) @pytest.fixture(scope="module") @@ -616,6 +616,7 @@ def test_streaming_pull_ack_deadline( finally: subscription_future.cancel() + @typed_flaky def test_streaming_pull_max_messages( self, publisher, topic_path_base, subscription_path_base, cleanup ): From 902dc357ac69c21da1623e2aff650a55cc07b499 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Sat, 2 Nov 2024 16:39:21 -0400 Subject: [PATCH 1119/1197] feat: Add support for Python 3.13 (#1281) Co-authored-by: Owl Bot Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- .../google-cloud-pubsub/.github/.OwlBot.lock.yaml | 2 +- .../.github/workflows/unittest.yml | 2 +- packages/google-cloud-pubsub/CONTRIBUTING.rst | 6 ++++-- packages/google-cloud-pubsub/noxfile.py | 12 ++++++++++-- packages/google-cloud-pubsub/owlbot.py | 2 +- packages/google-cloud-pubsub/setup.py | 1 + .../google-cloud-pubsub/testing/constraints-3.13.txt | 7 +++++++ 7 files changed, 25 insertions(+), 7 deletions(-) create mode 100644 packages/google-cloud-pubsub/testing/constraints-3.13.txt diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 7672b49b6307..862cfa2885a8 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -14,4 +14,4 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest digest: sha256:5cddfe2fb5019bbf78335bc55f15bc13e18354a56b3ff46e1834f8e540807f05 -# created: 2024-10-31T01:41:07.349286254Z + diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index dd8bd76922f9..6a0bc07438fa 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 727b5ec7fe3e..f153c3ae7ec8 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11 and 3.12 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -72,7 +72,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.12 -- -k + $ nox -s unit-3.13 -- -k .. note:: @@ -227,6 +227,7 @@ We support: - `Python 3.10`_ - `Python 3.11`_ - `Python 3.12`_ +- `Python 3.13`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ @@ -234,6 +235,7 @@ We support: .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ .. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 1ba75e6b251f..2ccbfeae7ccf 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -36,7 +36,15 @@ DEFAULT_PYTHON_VERSION = "3.8" -UNIT_TEST_PYTHON_VERSIONS: List[str] = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"] +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", +] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", "asyncmock", @@ -422,7 +430,7 @@ def docfx(session): ) -@nox.session(python="3.12") +@nox.session(python="3.13") @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 204b30ba5409..2e4b00bc9c87 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -336,7 +336,7 @@ samples=True, cov_level=100, versions=gcp.common.detect_versions(path="./google", default_first=True), - unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"], + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], system_test_python_versions=["3.12"], system_test_external_dependencies=["psutil","flaky"], ) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 8339e1e18721..abe06552bc56 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -86,6 +86,7 @@ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/google-cloud-pubsub/testing/constraints-3.13.txt b/packages/google-cloud-pubsub/testing/constraints-3.13.txt new file mode 100644 index 000000000000..ad3f0fa58e2d --- /dev/null +++ b/packages/google-cloud-pubsub/testing/constraints-3.13.txt @@ -0,0 +1,7 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf +grpc-google-iam-v1 From 8eb88d7684f9cf72c86286c26745b849058ca8ba Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sat, 2 Nov 2024 14:29:24 -0700 Subject: [PATCH 1120/1197] chore(main): release 2.27.0 (#1289) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 12 ++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 16 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 9c3477bd7941..b6aa39c5e802 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.26.1" + ".": "2.27.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 64bb863eb511..d55c31822aa8 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,18 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.27.0](https://github.com/googleapis/python-pubsub/compare/v2.26.1...v2.27.0) (2024-11-02) + + +### Features + +* Add support for Python 3.13 ([#1281](https://github.com/googleapis/python-pubsub/issues/1281)) ([0b46a33](https://github.com/googleapis/python-pubsub/commit/0b46a3321d6f19cd72e4f2ccdba73d062c7bd832)) + + +### Bug Fixes + +* Mark test_streaming_pull_max_messages flaky ([#1288](https://github.com/googleapis/python-pubsub/issues/1288)) ([d6635a0](https://github.com/googleapis/python-pubsub/commit/d6635a00dc2c614dd8608ef32ad4e79f9124e040)) + ## [2.26.1](https://github.com/googleapis/python-pubsub/compare/v2.26.0...v2.26.1) (2024-10-10) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 040d4e7f1cc1..f0fcebfa4138 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.26.1" # {x-release-please-version} +__version__ = "2.27.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 040d4e7f1cc1..f0fcebfa4138 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.26.1" # {x-release-please-version} +__version__ = "2.27.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index ead6d83f3b36..b1aab3038345 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.26.1" + "version": "2.27.0" }, "snippets": [ { From 2a39e820ac0637ff710ba6fa606969d07a33152f Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 5 Nov 2024 16:02:04 -0500 Subject: [PATCH 1121/1197] test: add type flaky to unit test (#1297) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/noxfile.py | 4 +++- packages/google-cloud-pubsub/owlbot.py | 1 + packages/google-cloud-pubsub/setup.py | 2 +- .../unit/pubsub_v1/publisher/test_publisher_client.py | 7 +++++++ 4 files changed, 12 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 2ccbfeae7ccf..47611cdffbca 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -55,7 +55,9 @@ UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS: List[str] = [ + "flaky", +] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 2e4b00bc9c87..77eb08250229 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -337,6 +337,7 @@ cov_level=100, versions=gcp.common.detect_versions(path="./google", default_first=True), unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], + unit_test_extras=["flaky"], system_test_python_versions=["3.12"], system_test_external_dependencies=["psutil","flaky"], ) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index abe06552bc56..fb2b94fad06c 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -50,7 +50,7 @@ "opentelemetry-sdk <= 1.22.0; python_version<='3.7'", "opentelemetry-sdk >= 1.27.0; python_version>='3.8'", ] -extras = {"libcst": "libcst >= 0.3.10"} +extras = {"libcst": "libcst >= 0.3.10,", "flaky": "flaky"} url = "https://github.com/googleapis/python-pubsub" package_root = os.path.abspath(os.path.dirname(__file__)) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index abc33f8cb470..55198b5909ac 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -28,6 +28,8 @@ import pytest import time +from flaky import flaky +from typing import cast, Callable, Any, TypeVar from opentelemetry import trace from google.api_core import gapic_v1 @@ -49,6 +51,10 @@ ) +C = TypeVar("C", bound=Callable[..., Any]) +typed_flaky = cast(Callable[[C], C], flaky(max_runs=5, min_passes=1)) + + def _assert_retries_equal(retry, retry2): # Retry instances cannot be directly compared, because their predicates are # different instances of the same function. We thus manually compare their other @@ -142,6 +148,7 @@ def test_init_w_custom_transport(creds): False, ], ) +@typed_flaky def test_open_telemetry_publisher_options(creds, enable_open_telemetry): if sys.version_info >= (3, 8) or enable_open_telemetry is False: client = publisher.Client( From 97e568ab21db256994b03f7451ba7532d9c638a3 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 6 Nov 2024 16:35:53 -0500 Subject: [PATCH 1122/1197] chore: partial revert of #1297 (#1298) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/noxfile.py | 4 ++-- packages/google-cloud-pubsub/owlbot.py | 2 +- packages/google-cloud-pubsub/setup.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 47611cdffbca..7bae0161f2bb 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -54,10 +54,10 @@ ] UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] -UNIT_TEST_DEPENDENCIES: List[str] = [] -UNIT_TEST_EXTRAS: List[str] = [ +UNIT_TEST_DEPENDENCIES: List[str] = [ "flaky", ] +UNIT_TEST_EXTRAS: List[str] = [] UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 77eb08250229..5828fbecf601 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -337,7 +337,7 @@ cov_level=100, versions=gcp.common.detect_versions(path="./google", default_first=True), unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], - unit_test_extras=["flaky"], + unit_test_dependencies=["flaky"], system_test_python_versions=["3.12"], system_test_external_dependencies=["psutil","flaky"], ) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index fb2b94fad06c..abe06552bc56 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -50,7 +50,7 @@ "opentelemetry-sdk <= 1.22.0; python_version<='3.7'", "opentelemetry-sdk >= 1.27.0; python_version>='3.8'", ] -extras = {"libcst": "libcst >= 0.3.10,", "flaky": "flaky"} +extras = {"libcst": "libcst >= 0.3.10"} url = "https://github.com/googleapis/python-pubsub" package_root = os.path.abspath(os.path.dirname(__file__)) From 43936be732512deb360652cdf34638ff8d1a61de Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Fri, 8 Nov 2024 06:20:25 -0500 Subject: [PATCH 1123/1197] fix: Add support for Python3.13 (#1302) --- packages/google-cloud-pubsub/setup.py | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index abe06552bc56..1d68bf87bca3 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -42,6 +42,7 @@ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "proto-plus >= 1.25.0, < 2.0.0dev; python_version >= '3.13'", "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", "grpcio-status >= 1.33.2", From f0df199fce833edd26e4a323154a830d3fd5d479 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Sat, 9 Nov 2024 17:55:41 -0500 Subject: [PATCH 1124/1197] chore(main): release 2.27.1 (#1303) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index b6aa39c5e802..aa8b67ac9255 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.27.0" + ".": "2.27.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index d55c31822aa8..570ee801f53a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.27.1](https://github.com/googleapis/python-pubsub/compare/v2.27.0...v2.27.1) (2024-11-08) + + +### Bug Fixes + +* Add support for Python3.13 ([#1302](https://github.com/googleapis/python-pubsub/issues/1302)) ([ab22e27](https://github.com/googleapis/python-pubsub/commit/ab22e27954450b4e06ec98fe2e3458056aa8ca60)) + ## [2.27.0](https://github.com/googleapis/python-pubsub/compare/v2.26.1...v2.27.0) (2024-11-02) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index f0fcebfa4138..4e3e20aece9b 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.0" # {x-release-please-version} +__version__ = "2.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index f0fcebfa4138..4e3e20aece9b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.0" # {x-release-please-version} +__version__ = "2.27.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index b1aab3038345..0800fa5f37ff 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.27.0" + "version": "2.27.1" }, "snippets": [ { From 337eaf2b5355fc8b3c980668225400e659fd1ce5 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 12 Nov 2024 15:36:04 -0500 Subject: [PATCH 1125/1197] chore(python): update dependencies in .kokoro/docker/docs (#1305) Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 ++-- .../.github/release-trigger.yml | 2 +- .../.kokoro/docker/docs/requirements.txt | 20 +++++++++---------- 3 files changed, 13 insertions(+), 13 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 862cfa2885a8..6301519a9a05 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5cddfe2fb5019bbf78335bc55f15bc13e18354a56b3ff46e1834f8e540807f05 - + digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 +# created: 2024-11-12T12:09:45.821174897Z diff --git a/packages/google-cloud-pubsub/.github/release-trigger.yml b/packages/google-cloud-pubsub/.github/release-trigger.yml index 4bb79e58eadf..aa0d30a3ebb8 100644 --- a/packages/google-cloud-pubsub/.github/release-trigger.yml +++ b/packages/google-cloud-pubsub/.github/release-trigger.yml @@ -1,2 +1,2 @@ enabled: true -multiScmName: +multiScmName: python-pubsub diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt index 66eacc82f041..8bb0764594b1 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt @@ -1,5 +1,5 @@ # -# This file is autogenerated by pip-compile with Python 3.9 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --allow-unsafe --generate-hashes requirements.in @@ -8,9 +8,9 @@ argcomplete==3.5.1 \ --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 # via nox -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 +colorlog==6.9.0 \ + --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ + --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 # via nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ @@ -24,9 +24,9 @@ nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 +packaging==24.2 \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f # via nox platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ @@ -36,7 +36,7 @@ tomli==2.0.2 \ --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed # via nox -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 +virtualenv==20.27.1 \ + --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ + --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 # via nox From 49b66e19658ce1dcf919e25f5335a362dc0689e6 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Tue, 12 Nov 2024 15:36:41 -0500 Subject: [PATCH 1126/1197] fix: Reduce the code coverage percentage to 99 (#1278) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/workflows/unittest.yml | 2 +- packages/google-cloud-pubsub/noxfile.py | 2 +- packages/google-cloud-pubsub/owlbot.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index 6a0bc07438fa..6eca3149c126 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -55,4 +55,4 @@ jobs: run: | find .coverage-results -type f -name '*.zip' -exec unzip {} \; coverage combine .coverage-results/**/.coverage* - coverage report --show-missing --fail-under=100 + coverage report --show-missing --fail-under=99 diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 7bae0161f2bb..dd182f105f6c 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -346,7 +346,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 5828fbecf601..3c91607d3fbe 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -334,7 +334,7 @@ templated_files = gcp.CommonTemplates().py_library( microgenerator=True, samples=True, - cov_level=100, + cov_level=99, versions=gcp.common.detect_versions(path="./google", default_first=True), unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], unit_test_dependencies=["flaky"], From 3322acb7f0cd70ab13a9f6a967aba8cfd3f33cc2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 3 Jan 2025 09:49:08 -0500 Subject: [PATCH 1127/1197] chore(python): Update the python version in docs presubmit to use 3.10 (#1315) Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 6 +-- .../.github/workflows/docs.yml | 2 +- .../.github/workflows/unittest.yml | 5 +- .../.kokoro/docker/docs/requirements.txt | 52 +++++++++++++++---- 4 files changed, 49 insertions(+), 16 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 6301519a9a05..1d0fd7e7878b 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -1,4 +1,4 @@ -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:2ed982f884312e4883e01b5ab8af8b6935f0216a5a2d82928d273081fc3be562 -# created: 2024-11-12T12:09:45.821174897Z + digest: sha256:a1c5112b81d645f5bbc4d4bbc99d7dcb5089a52216c0e3fb1203a0eeabadd7d5 +# created: 2025-01-02T23:09:36.975468657Z diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml index 698fbc5c94da..2833fe98fff0 100644 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index 6eca3149c126..6a0429d96101 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -5,7 +5,10 @@ on: name: unittest jobs: unit: - runs-on: ubuntu-latest + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 strategy: matrix: python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt index 8bb0764594b1..f99a5c4aac7f 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt @@ -2,11 +2,11 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes requirements.in +# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in # -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 +argcomplete==3.5.2 \ + --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ + --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb # via nox colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ @@ -23,7 +23,7 @@ filelock==3.16.1 \ nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in + # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,11 +32,41 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed +tomli==2.2.1 \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.27.1 \ - --hash=sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba \ - --hash=sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4 +virtualenv==20.28.0 \ + --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ + --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa # via nox From 934712d97044e5c922565680a315fa7a70ad0167 Mon Sep 17 00:00:00 2001 From: Zion Amsalem Date: Fri, 3 Jan 2025 17:43:51 +0200 Subject: [PATCH 1128/1197] tests: Resolve connections deprecation warning (#1310) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/pytest.ini | 2 -- packages/google-cloud-pubsub/tests/system.py | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index 34f39331626c..4cedf2b4f1e3 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -18,5 +18,3 @@ filterwarnings = # Remove once the minimum supported version of googleapis-common-protos is 1.62.0 ignore:.*pkg_resources.declare_namespace:DeprecationWarning ignore:.*pkg_resources is deprecated as an API:DeprecationWarning - # Remove once https://github.com/googleapis/python-pubsub/issues/1206 is fixed. - ignore:.*connections\(\) is deprecated and will be removed; use net_connections\(\) instead:DeprecationWarning \ No newline at end of file diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index 7b0c0f93ad9a..e1af7440275c 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -468,7 +468,7 @@ def test_subscriber_not_leaking_open_sockets( publisher.create_topic(name=topic_path) current_process = psutil.Process() - conn_count_start = len(current_process.connections()) + conn_count_start = len(current_process.net_connections()) # Publish a few messages, then synchronously pull them and check that # no sockets are leaked. @@ -487,7 +487,7 @@ def test_subscriber_not_leaking_open_sockets( response = subscriber.pull(subscription=subscription_path, max_messages=3) assert len(response.received_messages) == 3 - conn_count_end = len(current_process.connections()) + conn_count_end = len(current_process.net_connections()) # To avoid flakiness, use <= in the assertion, since on rare occasions additional # sockets are closed, causing the == assertion to fail. From d5291c48c6025370928068db542cb1227c046609 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Mon, 6 Jan 2025 14:35:44 -0500 Subject: [PATCH 1129/1197] fix: Handle TransportError Exceptions thrown from gapic_publish (#1318) --- .../cloud/pubsub_v1/publisher/_batch/thread.py | 6 +++++- .../unit/pubsub_v1/publisher/batch/test_thread.py | 13 ++++++++++--- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py index c4bf67c35b84..2afbe37611a1 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_batch/thread.py @@ -24,6 +24,7 @@ from opentelemetry import trace import google.api_core.exceptions from google.api_core import gapic_v1 +from google.auth import exceptions as auth_exceptions from google.cloud.pubsub_v1.publisher import exceptions from google.cloud.pubsub_v1.publisher import futures @@ -342,7 +343,10 @@ def _commit(self) -> None: ) span.set_attribute(key="messaging.message.id", value=message_id) wrapper.end_create_span() - except google.api_core.exceptions.GoogleAPIError as exc: + except ( + google.api_core.exceptions.GoogleAPIError, + auth_exceptions.TransportError, + ) as exc: # We failed to publish, even after retries, so set the exception on # all futures and exit. self._status = base.BatchStatus.ERROR diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index 32eaa3d9815a..ad8fa376bfd8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -31,6 +31,7 @@ import google.api_core.exceptions from google.api_core import gapic_v1 from google.auth import credentials +from google.auth import exceptions as auth_exceptions from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.publisher import exceptions @@ -329,7 +330,14 @@ def test_blocking__commit_wrong_messageid_length(): assert isinstance(future.exception(), exceptions.PublishError) -def test_block__commmit_api_error(): +@pytest.mark.parametrize( + "error", + [ + (google.api_core.exceptions.InternalServerError("Internal server error"),), + (auth_exceptions.TransportError("some transport error"),), + ], +) +def test_block__commmit_api_error(error): batch = create_batch() futures = ( batch.publish( @@ -345,7 +353,6 @@ def test_block__commmit_api_error(): ) # Make the API throw an error when publishing. - error = google.api_core.exceptions.InternalServerError("uh oh") patch = mock.patch.object(type(batch.client), "_gapic_publish", side_effect=error) with patch: @@ -353,7 +360,7 @@ def test_block__commmit_api_error(): for future in futures: assert future.done() - assert future.exception() == error + assert future.exception() == error[0] def test_block__commmit_retry_error(): From bc134de64c7c6b020d9c123afe264b0e3f3e4011 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 12:18:57 -0800 Subject: [PATCH 1130/1197] chore(main): release 2.27.2 (#1319) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index aa8b67ac9255..fd23cf2f658b 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.27.1" + ".": "2.27.2" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 570ee801f53a..bc11c524df2a 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.27.2](https://github.com/googleapis/python-pubsub/compare/v2.27.1...v2.27.2) (2025-01-06) + + +### Bug Fixes + +* Handle TransportError Exceptions thrown from gapic_publish ([#1318](https://github.com/googleapis/python-pubsub/issues/1318)) ([0e058c7](https://github.com/googleapis/python-pubsub/commit/0e058c73487384100847adcb2f4ab95a61c072c4)) + ## [2.27.1](https://github.com/googleapis/python-pubsub/compare/v2.27.0...v2.27.1) (2024-11-08) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 4e3e20aece9b..d84bd2055964 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.1" # {x-release-please-version} +__version__ = "2.27.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 4e3e20aece9b..d84bd2055964 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.1" # {x-release-please-version} +__version__ = "2.27.2" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 0800fa5f37ff..d96191968bd0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.27.1" + "version": "2.27.2" }, "snippets": [ { From 640b3a10d6f3f0a80cd0227320cd48282213124c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 14 Jan 2025 11:09:48 -0500 Subject: [PATCH 1131/1197] chore(python): exclude .github/workflows/unittest.yml in renovate config (#1323) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/renovate.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 1d0fd7e7878b..10cf433a8b00 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:a1c5112b81d645f5bbc4d4bbc99d7dcb5089a52216c0e3fb1203a0eeabadd7d5 -# created: 2025-01-02T23:09:36.975468657Z + digest: sha256:8ff1efe878e18bd82a0fb7b70bb86f77e7ab6901fed394440b6135db0ba8d84a +# created: 2025-01-09T12:01:16.422459506Z diff --git a/packages/google-cloud-pubsub/renovate.json b/packages/google-cloud-pubsub/renovate.json index 39b2a0ec9296..c7875c469bd5 100644 --- a/packages/google-cloud-pubsub/renovate.json +++ b/packages/google-cloud-pubsub/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } From 5d93e9cb568967454f08329fd2171dba27227d55 Mon Sep 17 00:00:00 2001 From: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> Date: Fri, 24 Jan 2025 15:16:13 -0500 Subject: [PATCH 1132/1197] fix: Stop using api_core default timeouts in publish since they are broken (#1326) --- .../google/cloud/pubsub_v1/types.py | 6 +++++- .../unit/pubsub_v1/publisher/test_publisher_client.py | 10 +++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py index 7e94a725078d..6746e141aecb 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/types.py @@ -35,6 +35,7 @@ from google.protobuf import timestamp_pb2 from google.api_core.protobuf_helpers import get_messages +from google.api_core.timeout import ConstantTimeout from google.pubsub_v1.types import pubsub as pubsub_gapic_types @@ -191,7 +192,10 @@ class PublisherOptions(NamedTuple): "an instance of :class:`google.api_core.retry.Retry`." ) - timeout: "OptionalTimeout" = gapic_v1.method.DEFAULT # use api_core default + # Use ConstantTimeout instead of api_core default because the default + # value results in retries with zero deadline. + # Refer https://github.com/googleapis/python-api-core/issues/654 + timeout: "OptionalTimeout" = ConstantTimeout(60) ( "Timeout settings for message publishing by the client. It should be " "compatible with :class:`~.pubsub_v1.types.TimeoutType`." diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 55198b5909ac..a311edf237e9 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -19,6 +19,7 @@ import sys import grpc +import math # special case python < 3.8 if sys.version_info.major == 3 and sys.version_info.minor < 8: @@ -35,6 +36,7 @@ from google.api_core import gapic_v1 from google.api_core import retry as retries from google.api_core.gapic_v1.client_info import METRICS_METADATA_KEY +from google.api_core.timeout import ConstantTimeout from google.cloud.pubsub_v1 import publisher from google.cloud.pubsub_v1 import types @@ -652,6 +654,8 @@ def test_publish_new_batch_needed(creds): future = client.publish(topic, b"foo", bar=b"baz") assert future is mock.sentinel.future + call_args = batch_class.call_args + # Check the mocks. batch_class.assert_called_once_with( client=mock.ANY, @@ -660,8 +664,12 @@ def test_publish_new_batch_needed(creds): batch_done_callback=None, commit_when_full=True, commit_retry=gapic_v1.method.DEFAULT, - commit_timeout=gapic_v1.method.DEFAULT, + commit_timeout=mock.ANY, ) + commit_timeout_arg = call_args[1]["commit_timeout"] + assert isinstance(commit_timeout_arg, ConstantTimeout) + assert math.isclose(commit_timeout_arg._timeout, 60) is True + message_pb = gapic_types.PubsubMessage(data=b"foo", attributes={"bar": "baz"}) wrapper = PublishMessageWrapper(message=message_pb) batch1.publish.assert_called_once_with(wrapper) From 5671083b8d1a1cde9971b7e3e90c624ed635180a Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 24 Jan 2025 13:03:42 -0800 Subject: [PATCH 1133/1197] chore(main): release 2.27.3 (#1350) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index fd23cf2f658b..f760fdd0732e 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.27.2" + ".": "2.27.3" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index bc11c524df2a..69b1f0c272f9 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.27.3](https://github.com/googleapis/python-pubsub/compare/v2.27.2...v2.27.3) (2025-01-24) + + +### Bug Fixes + +* Stop using api_core default timeouts in publish since they are broken ([#1326](https://github.com/googleapis/python-pubsub/issues/1326)) ([ba2c2ee](https://github.com/googleapis/python-pubsub/commit/ba2c2eef7da89a3c14c14d9b6191cd8738c30341)) + ## [2.27.2](https://github.com/googleapis/python-pubsub/compare/v2.27.1...v2.27.2) (2025-01-06) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index d84bd2055964..03a0c337236e 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.2" # {x-release-please-version} +__version__ = "2.27.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index d84bd2055964..03a0c337236e 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.2" # {x-release-please-version} +__version__ = "2.27.3" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d96191968bd0..f0604091931d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.27.2" + "version": "2.27.3" }, "snippets": [ { From cae93626ac5c8e6de2f80f657c8c77abad426cb6 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 30 Jan 2025 01:38:36 +0500 Subject: [PATCH 1134/1197] fix: set creds only if transport not provided (#1348) Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- .../google/cloud/pubsub_v1/publisher/client.py | 6 +++++- .../google/cloud/pubsub_v1/subscriber/client.py | 6 +++++- packages/google-cloud-pubsub/owlbot.py | 2 ++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py index 481a8472d720..0740e318558d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -128,11 +128,15 @@ def __init__( # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. + # TODO(https://github.com/googleapis/python-pubsub/issues/1349): Move the emulator + # code below to test files. if os.environ.get("PUBSUB_EMULATOR_HOST"): kwargs["client_options"] = { "api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST") } - kwargs["credentials"] = AnonymousCredentials() + # Configure credentials directly to transport, if provided. + if "transport" not in kwargs: + kwargs["credentials"] = AnonymousCredentials() # For a transient failure, retry publishing the message infinitely. self.publisher_options = types.PublisherOptions(*publisher_options) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py index 175095077f92..41277e5e15a5 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -81,11 +81,15 @@ def __init__( # Sanity check: Is our goal to use the emulator? # If so, create a grpc insecure channel with the emulator host # as the target. + # TODO(https://github.com/googleapis/python-pubsub/issues/1349): Move the emulator + # code below to test files. if os.environ.get("PUBSUB_EMULATOR_HOST"): kwargs["client_options"] = { "api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST") } - kwargs["credentials"] = AnonymousCredentials() + # Configure credentials directly to transport, if provided. + if "transport" not in kwargs: + kwargs["credentials"] = AnonymousCredentials() # Instantiate the underlying GAPIC client. super().__init__(**kwargs) diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 3c91607d3fbe..a8044bd6f744 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -99,6 +99,8 @@ if count < len(clients_to_patch): raise Exception(err_msg) + # TODO(https://github.com/googleapis/python-pubsub/issues/1349): Move the emulator + # code below to test files. count = s.replace( clients_to_patch, r"# initialize with the provided callable or the passed in class", From 745242aca068d35d13e745c79ceefb39d4d72497 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 30 Jan 2025 02:53:27 +0500 Subject: [PATCH 1135/1197] fix: get channel target for a gRPC request (#1339) --- .../publisher/test_publisher_client.py | 24 ++++++++++++++++--- .../subscriber/test_subscriber_client.py | 24 ++++++++++++++++--- 2 files changed, 42 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index a311edf237e9..1e1cc61b31e8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -57,6 +57,14 @@ typed_flaky = cast(Callable[[C], C], flaky(max_runs=5, min_passes=1)) +# NOTE: This interceptor is required to create an intercept channel. +class _PublisherClientGrpcInterceptor( + grpc.UnaryUnaryClientInterceptor, +): + def intercept_unary_unary(self, continuation, client_call_details, request): + pass + + def _assert_retries_equal(retry, retry2): # Retry instances cannot be directly compared, because their predicates are # different instances of the same function. We thus manually compare their other @@ -416,17 +424,27 @@ def init(self, *args, **kwargs): assert client.transport._ssl_channel_credentials == mock_ssl_creds -def test_init_emulator(monkeypatch): +def test_init_emulator(monkeypatch, creds): monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar:123") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. - client = publisher.Client() + + # TODO(https://github.com/grpc/grpc/issues/38519): Workaround to create an intercept + # channel (for forwards compatibility) with a channel created by the publisher client + # where target is set to the emulator host. + channel = publisher.Client().transport.grpc_channel + interceptor = _PublisherClientGrpcInterceptor() + intercept_channel = grpc.intercept_channel(channel, interceptor) + transport = publisher.Client.get_transport_class("grpc")( + credentials=creds, channel=intercept_channel + ) + client = publisher.Client(transport=transport) # Establish that a gRPC request would attempt to hit the emulator host. # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client._transport.publish._channel + channel = client._transport.publish._thunk("")._channel # Behavior to include dns prefix changed in gRPCv1.63 grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 7c0ebfd83818..4b381245de5c 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -36,6 +36,14 @@ from google.pubsub_v1.types import PubsubMessage +# NOTE: This interceptor is required to create an intercept channel. +class _SubscriberClientGrpcInterceptor( + grpc.UnaryUnaryClientInterceptor, +): + def intercept_unary_unary(self, continuation, client_call_details, request): + pass + + def test_init_default_client_info(creds): client = subscriber.Client(credentials=creds) @@ -119,17 +127,27 @@ def init(self, *args, **kwargs): assert client.transport._ssl_channel_credentials == mock_ssl_creds -def test_init_emulator(monkeypatch): +def test_init_emulator(monkeypatch, creds): monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/baz/bacon:123") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. - client = subscriber.Client() + + # TODO(https://github.com/grpc/grpc/issues/38519): Workaround to create an intercept + # channel (for forwards compatibility) with a channel created by the publisher client + # where target is set to the emulator host. + channel = subscriber.Client().transport.grpc_channel + interceptor = _SubscriberClientGrpcInterceptor() + intercept_channel = grpc.intercept_channel(channel, interceptor) + transport = subscriber.Client.get_transport_class("grpc")( + credentials=creds, channel=intercept_channel + ) + client = subscriber.Client(transport=transport) # Establish that a gRPC request would attempt to hit the emulator host. # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client._transport.pull._channel + channel = client._transport.pull._thunk("")._channel # Behavior to include dns prefix changed in gRPCv1.63 grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): From fcf6758bd64d5d260ac89b776c6abc3fd0aa27da Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Jan 2025 13:30:24 -0500 Subject: [PATCH 1136/1197] chore(python): fix docs publish build (#1327) Co-authored-by: Owl Bot --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/requirements.in | 1 + .../.kokoro/docker/docs/requirements.txt | 243 +++++++++++++++++- .../.kokoro/publish-docs.sh | 4 - 4 files changed, 237 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 10cf433a8b00..4c0027ff1c61 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:8ff1efe878e18bd82a0fb7b70bb86f77e7ab6901fed394440b6135db0ba8d84a -# created: 2025-01-09T12:01:16.422459506Z + digest: sha256:04c35dc5f49f0f503a306397d6d043685f8d2bb822ab515818c4208d7fb2db3a +# created: 2025-01-16T15:24:11.364245182Z diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in index 816817c672a1..586bd07037ae 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in @@ -1 +1,2 @@ nox +gcp-docuploader diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt index f99a5c4aac7f..a9360a25b707 100644 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt +++ b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt @@ -2,16 +2,124 @@ # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --allow-unsafe --generate-hashes synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in +# pip-compile --allow-unsafe --generate-hashes requirements.in # -argcomplete==3.5.2 \ - --hash=sha256:036d020d79048a5d525bc63880d7a4b8d1668566b8a76daf1144c0bbe0f63472 \ - --hash=sha256:23146ed7ac4403b70bd6026402468942ceba34a6732255b9edf5b7354f68a6bb +argcomplete==3.5.3 \ + --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ + --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 # via nox +cachetools==5.5.0 \ + --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ + --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a + # via google-auth +certifi==2024.12.14 \ + --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ + --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db + # via requests +charset-normalizer==3.4.1 \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 + # via requests +click==8.1.8 \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a + # via gcp-docuploader colorlog==6.9.0 \ --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via nox + # via + # gcp-docuploader + # nox distlib==0.3.9 \ --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 @@ -20,10 +128,78 @@ filelock==3.16.1 \ --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 # via virtualenv +gcp-docuploader==0.6.5 \ + --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ + --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea + # via -r requirements.in +google-api-core==2.24.0 \ + --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ + --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf + # via + # google-cloud-core + # google-cloud-storage +google-auth==2.37.0 \ + --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ + --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 + # via + # google-api-core + # google-cloud-core + # google-cloud-storage +google-cloud-core==2.4.1 \ + --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ + --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 + # via google-cloud-storage +google-cloud-storage==2.19.0 \ + --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ + --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 + # via gcp-docuploader +google-crc32c==1.6.0 \ + --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ + --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ + --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ + --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ + --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ + --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ + --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ + --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ + --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ + --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ + --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ + --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ + --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ + --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ + --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ + --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ + --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ + --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ + --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ + --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ + --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ + --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ + --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ + --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ + --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ + --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ + --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 + # via + # google-cloud-storage + # google-resumable-media +google-resumable-media==2.7.2 \ + --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ + --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 + # via google-cloud-storage +googleapis-common-protos==1.66.0 \ + --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ + --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed + # via google-api-core +idna==3.10 \ + --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ + --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 + # via requests nox==2024.10.9 \ --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r synthtool/gcp/templates/python_library/.kokoro/docker/docs/requirements.in + # via -r requirements.in packaging==24.2 \ --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f @@ -32,6 +208,51 @@ platformdirs==4.3.6 \ --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb # via virtualenv +proto-plus==1.25.0 \ + --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ + --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 + # via google-api-core +protobuf==5.29.3 \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 + # via + # gcp-docuploader + # google-api-core + # googleapis-common-protos + # proto-plus +pyasn1==0.6.1 \ + --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ + --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 + # via + # pyasn1-modules + # rsa +pyasn1-modules==0.4.1 \ + --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ + --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c + # via google-auth +requests==2.32.3 \ + --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ + --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 + # via + # google-api-core + # google-cloud-storage +rsa==4.9 \ + --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ + --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 + # via google-auth +six==1.17.0 \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 + # via gcp-docuploader tomli==2.2.1 \ --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ @@ -66,7 +287,11 @@ tomli==2.2.1 \ --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 # via nox -virtualenv==20.28.0 \ - --hash=sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0 \ - --hash=sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa +urllib3==2.3.0 \ + --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ + --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d + # via requests +virtualenv==20.28.1 \ + --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ + --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 # via nox diff --git a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh index 233205d580e9..4ed4aaf1346f 100755 --- a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh +++ b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh @@ -20,10 +20,6 @@ export PYTHONUNBUFFERED=1 export PATH="${HOME}/.local/bin:${PATH}" -# Install nox -python3.10 -m pip install --require-hashes -r .kokoro/requirements.txt -python3.10 -m nox --version - # build docs nox -s docs From 4298672ad7e45f83b93078ff89c313c5f77be9d4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Jan 2025 14:49:49 -0800 Subject: [PATCH 1137/1197] feat: add support for message transforms to Topic and Subscription (#1274) Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: Anthonios Partheniou --- .../google/pubsub/__init__.py | 6 + .../google/pubsub_v1/__init__.py | 6 + .../services/publisher/async_client.py | 20 +- .../pubsub_v1/services/publisher/client.py | 40 +- .../services/publisher/transports/README.rst | 9 + .../services/publisher/transports/base.py | 15 + .../publisher/transports/grpc_asyncio.py | 46 +- .../services/publisher/transports/rest.py | 1042 +-- .../publisher/transports/rest_base.py | 680 ++ .../services/schema_service/async_client.py | 20 +- .../services/schema_service/client.py | 40 +- .../schema_service/transports/README.rst | 9 + .../schema_service/transports/base.py | 15 + .../schema_service/transports/grpc_asyncio.py | 48 +- .../schema_service/transports/rest.py | 1140 +-- .../schema_service/transports/rest_base.py | 748 ++ .../services/subscriber/async_client.py | 20 +- .../pubsub_v1/services/subscriber/client.py | 40 +- .../services/subscriber/transports/README.rst | 9 + .../services/subscriber/transports/base.py | 15 + .../subscriber/transports/grpc_asyncio.py | 60 +- .../services/subscriber/transports/rest.py | 1588 +++-- .../subscriber/transports/rest_base.py | 1026 +++ .../google/pubsub_v1/types/__init__.py | 6 + .../google/pubsub_v1/types/pubsub.py | 698 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- .../scripts/fixup_pubsub_v1_keywords.py | 4 +- .../unit/gapic/pubsub_v1/test_publisher.py | 3938 ++++++----- .../gapic/pubsub_v1/test_schema_service.py | 4586 ++++++------ .../unit/gapic/pubsub_v1/test_subscriber.py | 6300 +++++++++-------- 30 files changed, 13099 insertions(+), 9077 deletions(-) create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/README.rst create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/README.rst create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/README.rst create mode 100644 packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index 84f0b8294dbb..c3f03499626c 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -42,6 +42,8 @@ from google.pubsub_v1.types.pubsub import GetSubscriptionRequest from google.pubsub_v1.types.pubsub import GetTopicRequest from google.pubsub_v1.types.pubsub import IngestionDataSourceSettings +from google.pubsub_v1.types.pubsub import IngestionFailureEvent +from google.pubsub_v1.types.pubsub import JavaScriptUDF from google.pubsub_v1.types.pubsub import ListSnapshotsRequest from google.pubsub_v1.types.pubsub import ListSnapshotsResponse from google.pubsub_v1.types.pubsub import ListSubscriptionsRequest @@ -53,6 +55,7 @@ from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsRequest from google.pubsub_v1.types.pubsub import ListTopicSubscriptionsResponse from google.pubsub_v1.types.pubsub import MessageStoragePolicy +from google.pubsub_v1.types.pubsub import MessageTransform from google.pubsub_v1.types.pubsub import ModifyAckDeadlineRequest from google.pubsub_v1.types.pubsub import ModifyPushConfigRequest from google.pubsub_v1.types.pubsub import PlatformLogsSettings @@ -115,6 +118,8 @@ "GetSubscriptionRequest", "GetTopicRequest", "IngestionDataSourceSettings", + "IngestionFailureEvent", + "JavaScriptUDF", "ListSnapshotsRequest", "ListSnapshotsResponse", "ListSubscriptionsRequest", @@ -126,6 +131,7 @@ "ListTopicSubscriptionsRequest", "ListTopicSubscriptionsResponse", "MessageStoragePolicy", + "MessageTransform", "ModifyAckDeadlineRequest", "ModifyPushConfigRequest", "PlatformLogsSettings", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index fd7ecb6d4c89..751f77206f1d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -40,6 +40,8 @@ from .types.pubsub import GetSubscriptionRequest from .types.pubsub import GetTopicRequest from .types.pubsub import IngestionDataSourceSettings +from .types.pubsub import IngestionFailureEvent +from .types.pubsub import JavaScriptUDF from .types.pubsub import ListSnapshotsRequest from .types.pubsub import ListSnapshotsResponse from .types.pubsub import ListSubscriptionsRequest @@ -51,6 +53,7 @@ from .types.pubsub import ListTopicSubscriptionsRequest from .types.pubsub import ListTopicSubscriptionsResponse from .types.pubsub import MessageStoragePolicy +from .types.pubsub import MessageTransform from .types.pubsub import ModifyAckDeadlineRequest from .types.pubsub import ModifyPushConfigRequest from .types.pubsub import PlatformLogsSettings @@ -116,6 +119,8 @@ "GetSubscriptionRequest", "GetTopicRequest", "IngestionDataSourceSettings", + "IngestionFailureEvent", + "JavaScriptUDF", "ListSchemaRevisionsRequest", "ListSchemaRevisionsResponse", "ListSchemasRequest", @@ -131,6 +136,7 @@ "ListTopicsRequest", "ListTopicsResponse", "MessageStoragePolicy", + "MessageTransform", "ModifyAckDeadlineRequest", "ModifyPushConfigRequest", "PlatformLogsSettings", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index eda259ad5265..4fd755d91270 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1366,11 +1366,7 @@ async def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -1491,11 +1487,7 @@ async def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -1554,11 +1546,9 @@ async def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 5a4b5a6ffa25..698aed49e4c4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -511,36 +511,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = PublisherClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -550,13 +520,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or PublisherClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/README.rst b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/README.rst new file mode 100644 index 000000000000..489748f4d996 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`PublisherTransport` is the ABC for all transports. +- public child `PublisherGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `PublisherGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BasePublisherRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `PublisherRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 800ba82ce723..45b06302db41 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -275,6 +275,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 3d98d6b5164c..6a293137a41d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -233,6 +234,9 @@ def __init__( ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) self._prep_wrapped_messages(client_info) @property @@ -585,7 +589,7 @@ def test_iam_permissions( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.create_topic: gapic_v1.method_async.wrap_method( + self.create_topic: self._wrap_method( self.create_topic, default_retry=retries.AsyncRetry( initial=0.1, @@ -599,7 +603,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.update_topic: gapic_v1.method_async.wrap_method( + self.update_topic: self._wrap_method( self.update_topic, default_retry=retries.AsyncRetry( initial=0.1, @@ -613,7 +617,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.publish: gapic_v1.method_async.wrap_method( + self.publish: self._wrap_method( self.publish, default_retry=retries.AsyncRetry( initial=0.1, @@ -633,7 +637,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_topic: gapic_v1.method_async.wrap_method( + self.get_topic: self._wrap_method( self.get_topic, default_retry=retries.AsyncRetry( initial=0.1, @@ -649,7 +653,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_topics: gapic_v1.method_async.wrap_method( + self.list_topics: self._wrap_method( self.list_topics, default_retry=retries.AsyncRetry( initial=0.1, @@ -665,7 +669,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_topic_subscriptions: gapic_v1.method_async.wrap_method( + self.list_topic_subscriptions: self._wrap_method( self.list_topic_subscriptions, default_retry=retries.AsyncRetry( initial=0.1, @@ -681,7 +685,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_topic_snapshots: gapic_v1.method_async.wrap_method( + self.list_topic_snapshots: self._wrap_method( self.list_topic_snapshots, default_retry=retries.AsyncRetry( initial=0.1, @@ -697,7 +701,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_topic: gapic_v1.method_async.wrap_method( + self.delete_topic: self._wrap_method( self.delete_topic, default_retry=retries.AsyncRetry( initial=0.1, @@ -711,7 +715,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.detach_subscription: gapic_v1.method_async.wrap_method( + self.detach_subscription: self._wrap_method( self.detach_subscription, default_retry=retries.AsyncRetry( initial=0.1, @@ -725,10 +729,34 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + __all__ = ("PublisherGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index 4d70c3e64402..3685dd55f985 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -16,43 +16,42 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub -from .base import PublisherTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +from .rest_base import _BasePublisherRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -394,8 +393,8 @@ class PublisherRestStub: _interceptor: PublisherRestInterceptor -class PublisherRestTransport(PublisherTransport): - """REST backend transport for Publisher. +class PublisherRestTransport(_BasePublisherRestTransport): + """REST backend synchronous transport for Publisher. The service that an application uses to manipulate topics, and to send messages to a topic. @@ -405,7 +404,6 @@ class PublisherRestTransport(PublisherTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -459,21 +457,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -484,19 +473,32 @@ def __init__( self._interceptor = interceptor or PublisherRestInterceptor() self._prep_wrapped_messages(client_info) - class _CreateTopic(PublisherRestStub): + class _CreateTopic(_BasePublisherRestTransport._BaseCreateTopic, PublisherRestStub): def __hash__(self): - return hash("CreateTopic") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("PublisherRestTransport.CreateTopic") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -522,45 +524,36 @@ def __call__( A topic resource. """ - http_options: List[Dict[str, str]] = [ - { - "method": "put", - "uri": "/v1/{name=projects/*/topics/*}", - "body": "*", - }, - ] + http_options = ( + _BasePublisherRestTransport._BaseCreateTopic._get_http_options() + ) request, metadata = self._interceptor.pre_create_topic(request, metadata) - pb_request = pubsub.Topic.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BasePublisherRestTransport._BaseCreateTopic._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BasePublisherRestTransport._BaseCreateTopic._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BasePublisherRestTransport._BaseCreateTopic._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = PublisherRestTransport._CreateTopic._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -576,19 +569,31 @@ def __call__( resp = self._interceptor.post_create_topic(resp) return resp - class _DeleteTopic(PublisherRestStub): + class _DeleteTopic(_BasePublisherRestTransport._BaseDeleteTopic, PublisherRestStub): def __hash__(self): - return hash("DeleteTopic") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("PublisherRestTransport.DeleteTopic") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -610,38 +615,31 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{topic=projects/*/topics/*}", - }, - ] + http_options = ( + _BasePublisherRestTransport._BaseDeleteTopic._get_http_options() + ) request, metadata = self._interceptor.pre_delete_topic(request, metadata) - pb_request = pubsub.DeleteTopicRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BasePublisherRestTransport._BaseDeleteTopic._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BasePublisherRestTransport._BaseDeleteTopic._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = PublisherRestTransport._DeleteTopic._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -649,19 +647,33 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DetachSubscription(PublisherRestStub): + class _DetachSubscription( + _BasePublisherRestTransport._BaseDetachSubscription, PublisherRestStub + ): def __hash__(self): - return hash("DetachSubscription") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("PublisherRestTransport.DetachSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -690,40 +702,29 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{subscription=projects/*/subscriptions/*}:detach", - }, - ] + http_options = ( + _BasePublisherRestTransport._BaseDetachSubscription._get_http_options() + ) request, metadata = self._interceptor.pre_detach_subscription( request, metadata ) - pb_request = pubsub.DetachSubscriptionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BasePublisherRestTransport._BaseDetachSubscription._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BasePublisherRestTransport._BaseDetachSubscription._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = PublisherRestTransport._DetachSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -739,19 +740,31 @@ def __call__( resp = self._interceptor.post_detach_subscription(resp) return resp - class _GetTopic(PublisherRestStub): + class _GetTopic(_BasePublisherRestTransport._BaseGetTopic, PublisherRestStub): def __hash__(self): - return hash("GetTopic") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("PublisherRestTransport.GetTopic") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -777,38 +790,29 @@ def __call__( A topic resource. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{topic=projects/*/topics/*}", - }, - ] + http_options = _BasePublisherRestTransport._BaseGetTopic._get_http_options() request, metadata = self._interceptor.pre_get_topic(request, metadata) - pb_request = pubsub.GetTopicRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BasePublisherRestTransport._BaseGetTopic._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BasePublisherRestTransport._BaseGetTopic._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = PublisherRestTransport._GetTopic._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -824,19 +828,31 @@ def __call__( resp = self._interceptor.post_get_topic(resp) return resp - class _ListTopics(PublisherRestStub): + class _ListTopics(_BasePublisherRestTransport._BaseListTopics, PublisherRestStub): def __hash__(self): - return hash("ListTopics") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("PublisherRestTransport.ListTopics") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -862,38 +878,31 @@ def __call__( Response for the ``ListTopics`` method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{project=projects/*}/topics", - }, - ] + http_options = ( + _BasePublisherRestTransport._BaseListTopics._get_http_options() + ) request, metadata = self._interceptor.pre_list_topics(request, metadata) - pb_request = pubsub.ListTopicsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BasePublisherRestTransport._BaseListTopics._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BasePublisherRestTransport._BaseListTopics._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = PublisherRestTransport._ListTopics._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -909,19 +918,33 @@ def __call__( resp = self._interceptor.post_list_topics(resp) return resp - class _ListTopicSnapshots(PublisherRestStub): + class _ListTopicSnapshots( + _BasePublisherRestTransport._BaseListTopicSnapshots, PublisherRestStub + ): def __hash__(self): - return hash("ListTopicSnapshots") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("PublisherRestTransport.ListTopicSnapshots") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -947,40 +970,29 @@ def __call__( Response for the ``ListTopicSnapshots`` method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{topic=projects/*/topics/*}/snapshots", - }, - ] + http_options = ( + _BasePublisherRestTransport._BaseListTopicSnapshots._get_http_options() + ) request, metadata = self._interceptor.pre_list_topic_snapshots( request, metadata ) - pb_request = pubsub.ListTopicSnapshotsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BasePublisherRestTransport._BaseListTopicSnapshots._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BasePublisherRestTransport._BaseListTopicSnapshots._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = PublisherRestTransport._ListTopicSnapshots._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -996,19 +1008,33 @@ def __call__( resp = self._interceptor.post_list_topic_snapshots(resp) return resp - class _ListTopicSubscriptions(PublisherRestStub): + class _ListTopicSubscriptions( + _BasePublisherRestTransport._BaseListTopicSubscriptions, PublisherRestStub + ): def __hash__(self): - return hash("ListTopicSubscriptions") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("PublisherRestTransport.ListTopicSubscriptions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1034,40 +1060,29 @@ def __call__( Response for the ``ListTopicSubscriptions`` method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{topic=projects/*/topics/*}/subscriptions", - }, - ] + http_options = ( + _BasePublisherRestTransport._BaseListTopicSubscriptions._get_http_options() + ) request, metadata = self._interceptor.pre_list_topic_subscriptions( request, metadata ) - pb_request = pubsub.ListTopicSubscriptionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BasePublisherRestTransport._BaseListTopicSubscriptions._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BasePublisherRestTransport._BaseListTopicSubscriptions._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = PublisherRestTransport._ListTopicSubscriptions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1083,19 +1098,32 @@ def __call__( resp = self._interceptor.post_list_topic_subscriptions(resp) return resp - class _Publish(PublisherRestStub): + class _Publish(_BasePublisherRestTransport._BasePublish, PublisherRestStub): def __hash__(self): - return hash("Publish") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("PublisherRestTransport.Publish") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1121,45 +1149,34 @@ def __call__( Response for the ``Publish`` method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{topic=projects/*/topics/*}:publish", - "body": "*", - }, - ] + http_options = _BasePublisherRestTransport._BasePublish._get_http_options() request, metadata = self._interceptor.pre_publish(request, metadata) - pb_request = pubsub.PublishRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BasePublisherRestTransport._BasePublish._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BasePublisherRestTransport._BasePublish._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BasePublisherRestTransport._BasePublish._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = PublisherRestTransport._Publish._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1175,19 +1192,32 @@ def __call__( resp = self._interceptor.post_publish(resp) return resp - class _UpdateTopic(PublisherRestStub): + class _UpdateTopic(_BasePublisherRestTransport._BaseUpdateTopic, PublisherRestStub): def __hash__(self): - return hash("UpdateTopic") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("PublisherRestTransport.UpdateTopic") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1213,45 +1243,36 @@ def __call__( A topic resource. """ - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{topic.name=projects/*/topics/*}", - "body": "*", - }, - ] + http_options = ( + _BasePublisherRestTransport._BaseUpdateTopic._get_http_options() + ) request, metadata = self._interceptor.pre_update_topic(request, metadata) - pb_request = pubsub.UpdateTopicRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BasePublisherRestTransport._BaseUpdateTopic._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BasePublisherRestTransport._BaseUpdateTopic._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BasePublisherRestTransport._BaseUpdateTopic._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = PublisherRestTransport._UpdateTopic._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1339,7 +1360,34 @@ def update_topic(self) -> Callable[[pubsub.UpdateTopicRequest], pubsub.Topic]: def get_iam_policy(self): return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - class _GetIamPolicy(PublisherRestStub): + class _GetIamPolicy( + _BasePublisherRestTransport._BaseGetIamPolicy, PublisherRestStub + ): + def __hash__(self): + return hash("PublisherRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -1363,44 +1411,31 @@ def __call__( policy_pb2.Policy: Response from GetIamPolicy method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", - }, - { - "method": "get", - "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", - }, - { - "method": "get", - "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", - }, - { - "method": "get", - "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", - }, - ] - + http_options = ( + _BasePublisherRestTransport._BaseGetIamPolicy._get_http_options() + ) request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BasePublisherRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BasePublisherRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = PublisherRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1408,8 +1443,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) return resp @@ -1417,7 +1453,35 @@ def __call__( def set_iam_policy(self): return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - class _SetIamPolicy(PublisherRestStub): + class _SetIamPolicy( + _BasePublisherRestTransport._BaseSetIamPolicy, PublisherRestStub + ): + def __hash__(self): + return hash("PublisherRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -1441,50 +1505,36 @@ def __call__( policy_pb2.Policy: Response from SetIamPolicy method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", - "body": "*", - }, - ] - + http_options = ( + _BasePublisherRestTransport._BaseSetIamPolicy._get_http_options() + ) request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) + transcoded_request = ( + _BasePublisherRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + ) - body = json.dumps(transcoded_request["body"]) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + body = _BasePublisherRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BasePublisherRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, + response = PublisherRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1492,8 +1542,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) return resp @@ -1501,7 +1552,35 @@ def __call__( def test_iam_permissions(self): return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - class _TestIamPermissions(PublisherRestStub): + class _TestIamPermissions( + _BasePublisherRestTransport._BaseTestIamPermissions, PublisherRestStub + ): + def __hash__(self): + return hash("PublisherRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -1525,52 +1604,34 @@ def __call__( iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", - "body": "*", - }, - ] - + http_options = ( + _BasePublisherRestTransport._BaseTestIamPermissions._get_http_options() + ) request, metadata = self._interceptor.pre_test_iam_permissions( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) + transcoded_request = _BasePublisherRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) - body = json.dumps(transcoded_request["body"]) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + body = _BasePublisherRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BasePublisherRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, + response = PublisherRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1578,8 +1639,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) return resp diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py new file mode 100644 index 000000000000..1fa78cdd9be4 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py @@ -0,0 +1,680 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from .base import PublisherTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + + +class _BasePublisherRestTransport(PublisherTransport): + """Base REST backend transport for Publisher. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateTopic: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/topics/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.Topic.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseCreateTopic._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteTopic: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{topic=projects/*/topics/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.DeleteTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseDeleteTopic._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDetachSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:detach", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.DetachSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseDetachSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetTopic: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{topic=projects/*/topics/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.GetTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseGetTopic._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTopics: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{project=projects/*}/topics", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListTopicsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseListTopics._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTopicSnapshots: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{topic=projects/*/topics/*}/snapshots", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListTopicSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseListTopicSnapshots._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListTopicSubscriptions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{topic=projects/*/topics/*}/subscriptions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListTopicSubscriptionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseListTopicSubscriptions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BasePublish: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{topic=projects/*/topics/*}:publish", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.PublishRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BasePublish._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateTopic: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{topic.name=projects/*/topics/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.UpdateTopicRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BasePublisherRestTransport._BaseUpdateTopic._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BasePublisherRestTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 9c8eecaecff0..2d160b06284d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -1476,11 +1476,7 @@ async def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -1600,11 +1596,7 @@ async def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -1662,11 +1654,9 @@ async def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index dff44944dda6..c56f8a4f35d2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -460,36 +460,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SchemaServiceClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -499,13 +469,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SchemaServiceClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/README.rst b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/README.rst new file mode 100644 index 000000000000..a0a06949e679 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`SchemaServiceTransport` is the ABC for all transports. +- public child `SchemaServiceGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `SchemaServiceGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseSchemaServiceRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `SchemaServiceRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index 5c7f35aa8ac6..e42f9896f91f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -276,6 +276,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index 71c362436eb2..e642ed1b9bff 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -233,6 +234,9 @@ def __init__( ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) self._prep_wrapped_messages(client_info) @property @@ -596,7 +600,7 @@ def test_iam_permissions( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.create_schema: gapic_v1.method_async.wrap_method( + self.create_schema: self._wrap_method( self.create_schema, default_retry=retries.AsyncRetry( initial=0.1, @@ -610,7 +614,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_schema: gapic_v1.method_async.wrap_method( + self.get_schema: self._wrap_method( self.get_schema, default_retry=retries.AsyncRetry( initial=0.1, @@ -624,7 +628,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_schemas: gapic_v1.method_async.wrap_method( + self.list_schemas: self._wrap_method( self.list_schemas, default_retry=retries.AsyncRetry( initial=0.1, @@ -638,7 +642,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_schema_revisions: gapic_v1.method_async.wrap_method( + self.list_schema_revisions: self._wrap_method( self.list_schema_revisions, default_retry=retries.AsyncRetry( initial=0.1, @@ -652,7 +656,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.commit_schema: gapic_v1.method_async.wrap_method( + self.commit_schema: self._wrap_method( self.commit_schema, default_retry=retries.AsyncRetry( initial=0.1, @@ -666,7 +670,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.rollback_schema: gapic_v1.method_async.wrap_method( + self.rollback_schema: self._wrap_method( self.rollback_schema, default_retry=retries.AsyncRetry( initial=0.1, @@ -680,7 +684,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_schema_revision: gapic_v1.method_async.wrap_method( + self.delete_schema_revision: self._wrap_method( self.delete_schema_revision, default_retry=retries.AsyncRetry( initial=0.1, @@ -694,7 +698,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_schema: gapic_v1.method_async.wrap_method( + self.delete_schema: self._wrap_method( self.delete_schema, default_retry=retries.AsyncRetry( initial=0.1, @@ -708,7 +712,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.validate_schema: gapic_v1.method_async.wrap_method( + self.validate_schema: self._wrap_method( self.validate_schema, default_retry=retries.AsyncRetry( initial=0.1, @@ -722,7 +726,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.validate_message: gapic_v1.method_async.wrap_method( + self.validate_message: self._wrap_method( self.validate_message, default_retry=retries.AsyncRetry( initial=0.1, @@ -736,10 +740,34 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + __all__ = ("SchemaServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index 953e58052e15..7cf86c8081bd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -16,30 +16,22 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -47,16 +39,20 @@ from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema -from .base import ( - SchemaServiceTransport, - DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, -) + +from .rest_base import _BaseSchemaServiceRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -431,8 +427,8 @@ class SchemaServiceRestStub: _interceptor: SchemaServiceRestInterceptor -class SchemaServiceRestTransport(SchemaServiceTransport): - """REST backend transport for SchemaService. +class SchemaServiceRestTransport(_BaseSchemaServiceRestTransport): + """REST backend synchronous transport for SchemaService. Service for doing schema-related operations. @@ -441,7 +437,6 @@ class SchemaServiceRestTransport(SchemaServiceTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -495,21 +490,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -520,19 +506,34 @@ def __init__( self._interceptor = interceptor or SchemaServiceRestInterceptor() self._prep_wrapped_messages(client_info) - class _CommitSchema(SchemaServiceRestStub): + class _CommitSchema( + _BaseSchemaServiceRestTransport._BaseCommitSchema, SchemaServiceRestStub + ): def __hash__(self): - return hash("CommitSchema") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.CommitSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -558,45 +559,32 @@ def __call__( A schema resource. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/schemas/*}:commit", - "body": "*", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseCommitSchema._get_http_options() + ) request, metadata = self._interceptor.pre_commit_schema(request, metadata) - pb_request = gp_schema.CommitSchemaRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSchemaServiceRestTransport._BaseCommitSchema._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSchemaServiceRestTransport._BaseCommitSchema._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSchemaServiceRestTransport._BaseCommitSchema._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SchemaServiceRestTransport._CommitSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -612,19 +600,34 @@ def __call__( resp = self._interceptor.post_commit_schema(resp) return resp - class _CreateSchema(SchemaServiceRestStub): + class _CreateSchema( + _BaseSchemaServiceRestTransport._BaseCreateSchema, SchemaServiceRestStub + ): def __hash__(self): - return hash("CreateSchema") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.CreateSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -650,45 +653,32 @@ def __call__( A schema resource. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*}/schemas", - "body": "schema", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseCreateSchema._get_http_options() + ) request, metadata = self._interceptor.pre_create_schema(request, metadata) - pb_request = gp_schema.CreateSchemaRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSchemaServiceRestTransport._BaseCreateSchema._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSchemaServiceRestTransport._BaseCreateSchema._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSchemaServiceRestTransport._BaseCreateSchema._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SchemaServiceRestTransport._CreateSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -704,19 +694,33 @@ def __call__( resp = self._interceptor.post_create_schema(resp) return resp - class _DeleteSchema(SchemaServiceRestStub): + class _DeleteSchema( + _BaseSchemaServiceRestTransport._BaseDeleteSchema, SchemaServiceRestStub + ): def __hash__(self): - return hash("DeleteSchema") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.DeleteSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -738,38 +742,27 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/schemas/*}", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_http_options() + ) request, metadata = self._interceptor.pre_delete_schema(request, metadata) - pb_request = schema.DeleteSchemaRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SchemaServiceRestTransport._DeleteSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -777,19 +770,33 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteSchemaRevision(SchemaServiceRestStub): + class _DeleteSchemaRevision( + _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision, SchemaServiceRestStub + ): def __hash__(self): - return hash("DeleteSchemaRevision") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.DeleteSchemaRevision") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -815,40 +822,29 @@ def __call__( A schema resource. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{name=projects/*/schemas/*}:deleteRevision", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision._get_http_options() + ) request, metadata = self._interceptor.pre_delete_schema_revision( request, metadata ) - pb_request = schema.DeleteSchemaRevisionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SchemaServiceRestTransport._DeleteSchemaRevision._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -864,19 +860,33 @@ def __call__( resp = self._interceptor.post_delete_schema_revision(resp) return resp - class _GetSchema(SchemaServiceRestStub): + class _GetSchema( + _BaseSchemaServiceRestTransport._BaseGetSchema, SchemaServiceRestStub + ): def __hash__(self): - return hash("GetSchema") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.GetSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -902,38 +912,31 @@ def __call__( A schema resource. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/schemas/*}", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseGetSchema._get_http_options() + ) request, metadata = self._interceptor.pre_get_schema(request, metadata) - pb_request = schema.GetSchemaRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BaseSchemaServiceRestTransport._BaseGetSchema._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSchemaServiceRestTransport._BaseGetSchema._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SchemaServiceRestTransport._GetSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -949,19 +952,33 @@ def __call__( resp = self._interceptor.post_get_schema(resp) return resp - class _ListSchemaRevisions(SchemaServiceRestStub): + class _ListSchemaRevisions( + _BaseSchemaServiceRestTransport._BaseListSchemaRevisions, SchemaServiceRestStub + ): def __hash__(self): - return hash("ListSchemaRevisions") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.ListSchemaRevisions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -987,40 +1004,29 @@ def __call__( Response for the ``ListSchemaRevisions`` method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{name=projects/*/schemas/*}:listRevisions", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseListSchemaRevisions._get_http_options() + ) request, metadata = self._interceptor.pre_list_schema_revisions( request, metadata ) - pb_request = schema.ListSchemaRevisionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseSchemaServiceRestTransport._BaseListSchemaRevisions._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSchemaServiceRestTransport._BaseListSchemaRevisions._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SchemaServiceRestTransport._ListSchemaRevisions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1036,19 +1042,33 @@ def __call__( resp = self._interceptor.post_list_schema_revisions(resp) return resp - class _ListSchemas(SchemaServiceRestStub): + class _ListSchemas( + _BaseSchemaServiceRestTransport._BaseListSchemas, SchemaServiceRestStub + ): def __hash__(self): - return hash("ListSchemas") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.ListSchemas") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1074,38 +1094,29 @@ def __call__( Response for the ``ListSchemas`` method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{parent=projects/*}/schemas", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseListSchemas._get_http_options() + ) request, metadata = self._interceptor.pre_list_schemas(request, metadata) - pb_request = schema.ListSchemasRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseSchemaServiceRestTransport._BaseListSchemas._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSchemaServiceRestTransport._BaseListSchemas._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SchemaServiceRestTransport._ListSchemas._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1121,19 +1132,34 @@ def __call__( resp = self._interceptor.post_list_schemas(resp) return resp - class _RollbackSchema(SchemaServiceRestStub): + class _RollbackSchema( + _BaseSchemaServiceRestTransport._BaseRollbackSchema, SchemaServiceRestStub + ): def __hash__(self): - return hash("RollbackSchema") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.RollbackSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1159,45 +1185,32 @@ def __call__( A schema resource. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{name=projects/*/schemas/*}:rollback", - "body": "*", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_http_options() + ) request, metadata = self._interceptor.pre_rollback_schema(request, metadata) - pb_request = schema.RollbackSchemaRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SchemaServiceRestTransport._RollbackSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1213,19 +1226,34 @@ def __call__( resp = self._interceptor.post_rollback_schema(resp) return resp - class _ValidateMessage(SchemaServiceRestStub): + class _ValidateMessage( + _BaseSchemaServiceRestTransport._BaseValidateMessage, SchemaServiceRestStub + ): def __hash__(self): - return hash("ValidateMessage") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.ValidateMessage") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1253,47 +1281,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*}/schemas:validateMessage", - "body": "*", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseValidateMessage._get_http_options() + ) request, metadata = self._interceptor.pre_validate_message( request, metadata ) - pb_request = schema.ValidateMessageRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSchemaServiceRestTransport._BaseValidateMessage._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSchemaServiceRestTransport._BaseValidateMessage._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSchemaServiceRestTransport._BaseValidateMessage._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SchemaServiceRestTransport._ValidateMessage._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1309,19 +1324,34 @@ def __call__( resp = self._interceptor.post_validate_message(resp) return resp - class _ValidateSchema(SchemaServiceRestStub): + class _ValidateSchema( + _BaseSchemaServiceRestTransport._BaseValidateSchema, SchemaServiceRestStub + ): def __hash__(self): - return hash("ValidateSchema") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SchemaServiceRestTransport.ValidateSchema") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1349,45 +1379,32 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{parent=projects/*}/schemas:validate", - "body": "*", - }, - ] + http_options = ( + _BaseSchemaServiceRestTransport._BaseValidateSchema._get_http_options() + ) request, metadata = self._interceptor.pre_validate_schema(request, metadata) - pb_request = gp_schema.ValidateSchemaRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSchemaServiceRestTransport._BaseValidateSchema._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSchemaServiceRestTransport._BaseValidateSchema._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSchemaServiceRestTransport._BaseValidateSchema._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SchemaServiceRestTransport._ValidateSchema._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1485,7 +1502,34 @@ def validate_schema( def get_iam_policy(self): return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - class _GetIamPolicy(SchemaServiceRestStub): + class _GetIamPolicy( + _BaseSchemaServiceRestTransport._BaseGetIamPolicy, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -1509,44 +1553,27 @@ def __call__( policy_pb2.Policy: Response from GetIamPolicy method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", - }, - { - "method": "get", - "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", - }, - { - "method": "get", - "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", - }, - { - "method": "get", - "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", - }, - ] - + http_options = ( + _BaseSchemaServiceRestTransport._BaseGetIamPolicy._get_http_options() + ) request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseSchemaServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseSchemaServiceRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = SchemaServiceRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1554,8 +1581,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) return resp @@ -1563,7 +1591,35 @@ def __call__( def set_iam_policy(self): return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - class _SetIamPolicy(SchemaServiceRestStub): + class _SetIamPolicy( + _BaseSchemaServiceRestTransport._BaseSetIamPolicy, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -1587,50 +1643,32 @@ def __call__( policy_pb2.Policy: Response from SetIamPolicy method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", - "body": "*", - }, - ] - + http_options = ( + _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_http_options() + ) request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) + transcoded_request = _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) - body = json.dumps(transcoded_request["body"]) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + body = _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, + response = SchemaServiceRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1638,8 +1676,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) return resp @@ -1647,7 +1686,35 @@ def __call__( def test_iam_permissions(self): return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - class _TestIamPermissions(SchemaServiceRestStub): + class _TestIamPermissions( + _BaseSchemaServiceRestTransport._BaseTestIamPermissions, SchemaServiceRestStub + ): + def __hash__(self): + return hash("SchemaServiceRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -1671,52 +1738,34 @@ def __call__( iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", - "body": "*", - }, - ] - + http_options = ( + _BaseSchemaServiceRestTransport._BaseTestIamPermissions._get_http_options() + ) request, metadata = self._interceptor.pre_test_iam_permissions( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) + transcoded_request = _BaseSchemaServiceRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) - body = json.dumps(transcoded_request["body"]) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + body = _BaseSchemaServiceRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseSchemaServiceRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, + response = SchemaServiceRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1724,8 +1773,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) return resp diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py new file mode 100644 index 000000000000..a97e454d4b37 --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py @@ -0,0 +1,748 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import schema +from google.pubsub_v1.types import schema as gp_schema + + +class _BaseSchemaServiceRestTransport(SchemaServiceTransport): + """Base REST backend transport for SchemaService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCommitSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/schemas/*}:commit", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gp_schema.CommitSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseCommitSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/schemas", + "body": "schema", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gp_schema.CreateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseCreateSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/schemas/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.DeleteSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSchemaRevision: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/schemas/*}:deleteRevision", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.DeleteSchemaRevisionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/schemas/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.GetSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseGetSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSchemaRevisions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/schemas/*}:listRevisions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.ListSchemaRevisionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseListSchemaRevisions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSchemas: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*}/schemas", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.ListSchemasRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseListSchemas._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseRollbackSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/schemas/*}:rollback", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.RollbackSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseValidateMessage: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/schemas:validateMessage", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = schema.ValidateMessageRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseValidateMessage._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseValidateSchema: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*}/schemas:validate", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gp_schema.ValidateSchemaRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSchemaServiceRestTransport._BaseValidateSchema._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseSchemaServiceRestTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index dcd4f0bbbce0..7f40480efae9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -2309,11 +2309,7 @@ async def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -2433,11 +2429,7 @@ async def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[self._client._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -2495,11 +2487,9 @@ async def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self.transport._wrapped_methods[ + self._client._transport.test_iam_permissions + ] # Certain fields should be provided within the metadata header; # add these here. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 9bad804c620f..d601b0f0d365 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -515,36 +515,6 @@ def _get_universe_domain( raise ValueError("Universe Domain cannot be an empty string.") return universe_domain - @staticmethod - def _compare_universes( - client_universe: str, credentials: ga_credentials.Credentials - ) -> bool: - """Returns True iff the universe domains used by the client and credentials match. - - Args: - client_universe (str): The universe domain configured via the client options. - credentials (ga_credentials.Credentials): The credentials being used in the client. - - Returns: - bool: True iff client_universe matches the universe in credentials. - - Raises: - ValueError: when client_universe does not match the universe in credentials. - """ - - default_universe = SubscriberClient._DEFAULT_UNIVERSE - credentials_universe = getattr(credentials, "universe_domain", default_universe) - - if client_universe != credentials_universe: - raise ValueError( - "The configured universe domain " - f"({client_universe}) does not match the universe domain " - f"found in the credentials ({credentials_universe}). " - "If you haven't configured the universe domain explicitly, " - f"`{default_universe}` is the default." - ) - return True - def _validate_universe_domain(self): """Validates client's and credentials' universe domains are consistent. @@ -554,13 +524,9 @@ def _validate_universe_domain(self): Raises: ValueError: If the configured universe domain is not valid. """ - self._is_universe_domain_valid = ( - self._is_universe_domain_valid - or SubscriberClient._compare_universes( - self.universe_domain, self.transport._credentials - ) - ) - return self._is_universe_domain_valid + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True @property def api_endpoint(self): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/README.rst b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/README.rst new file mode 100644 index 000000000000..2df98ffe64be --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/README.rst @@ -0,0 +1,9 @@ + +transport inheritance structure +_______________________________ + +`SubscriberTransport` is the ABC for all transports. +- public child `SubscriberGrpcTransport` for sync gRPC transport (defined in `grpc.py`). +- public child `SubscriberGrpcAsyncIOTransport` for async gRPC transport (defined in `grpc_asyncio.py`). +- private child `_BaseSubscriberRestTransport` for base REST transport with inner classes `_BaseMETHOD` (defined in `rest_base.py`). +- public child `SubscriberRestTransport` for sync REST transport with inner classes `METHOD` derived from the parent's corresponding `_BaseMETHOD` classes (defined in `rest.py`). diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index c5fa183b1103..51b50a55f7da 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -378,6 +378,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), } def close(self): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 9dab4a21bd4d..de960685819d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import inspect import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -235,6 +236,9 @@ def __init__( ) # Wrap messages. This must be done after self._grpc_channel exists + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) self._prep_wrapped_messages(client_info) @property @@ -842,7 +846,7 @@ def test_iam_permissions( def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { - self.create_subscription: gapic_v1.method_async.wrap_method( + self.create_subscription: self._wrap_method( self.create_subscription, default_retry=retries.AsyncRetry( initial=0.1, @@ -858,7 +862,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_subscription: gapic_v1.method_async.wrap_method( + self.get_subscription: self._wrap_method( self.get_subscription, default_retry=retries.AsyncRetry( initial=0.1, @@ -874,7 +878,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.update_subscription: gapic_v1.method_async.wrap_method( + self.update_subscription: self._wrap_method( self.update_subscription, default_retry=retries.AsyncRetry( initial=0.1, @@ -888,7 +892,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_subscriptions: gapic_v1.method_async.wrap_method( + self.list_subscriptions: self._wrap_method( self.list_subscriptions, default_retry=retries.AsyncRetry( initial=0.1, @@ -904,7 +908,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_subscription: gapic_v1.method_async.wrap_method( + self.delete_subscription: self._wrap_method( self.delete_subscription, default_retry=retries.AsyncRetry( initial=0.1, @@ -918,7 +922,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.modify_ack_deadline: gapic_v1.method_async.wrap_method( + self.modify_ack_deadline: self._wrap_method( self.modify_ack_deadline, default_retry=retries.AsyncRetry( initial=0.1, @@ -932,7 +936,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.acknowledge: gapic_v1.method_async.wrap_method( + self.acknowledge: self._wrap_method( self.acknowledge, default_retry=retries.AsyncRetry( initial=0.1, @@ -946,7 +950,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.pull: gapic_v1.method_async.wrap_method( + self.pull: self._wrap_method( self.pull, default_retry=retries.AsyncRetry( initial=0.1, @@ -963,7 +967,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.streaming_pull: gapic_v1.method_async.wrap_method( + self.streaming_pull: self._wrap_method( self.streaming_pull, default_retry=retries.AsyncRetry( initial=0.1, @@ -981,7 +985,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=900.0, client_info=client_info, ), - self.modify_push_config: gapic_v1.method_async.wrap_method( + self.modify_push_config: self._wrap_method( self.modify_push_config, default_retry=retries.AsyncRetry( initial=0.1, @@ -995,7 +999,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.get_snapshot: gapic_v1.method_async.wrap_method( + self.get_snapshot: self._wrap_method( self.get_snapshot, default_retry=retries.AsyncRetry( initial=0.1, @@ -1011,7 +1015,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.list_snapshots: gapic_v1.method_async.wrap_method( + self.list_snapshots: self._wrap_method( self.list_snapshots, default_retry=retries.AsyncRetry( initial=0.1, @@ -1027,7 +1031,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.create_snapshot: gapic_v1.method_async.wrap_method( + self.create_snapshot: self._wrap_method( self.create_snapshot, default_retry=retries.AsyncRetry( initial=0.1, @@ -1041,7 +1045,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.update_snapshot: gapic_v1.method_async.wrap_method( + self.update_snapshot: self._wrap_method( self.update_snapshot, default_retry=retries.AsyncRetry( initial=0.1, @@ -1055,7 +1059,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.delete_snapshot: gapic_v1.method_async.wrap_method( + self.delete_snapshot: self._wrap_method( self.delete_snapshot, default_retry=retries.AsyncRetry( initial=0.1, @@ -1069,7 +1073,7 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), - self.seek: gapic_v1.method_async.wrap_method( + self.seek: self._wrap_method( self.seek, default_retry=retries.AsyncRetry( initial=0.1, @@ -1085,10 +1089,34 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.get_iam_policy: self._wrap_method( + self.get_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.set_iam_policy: self._wrap_method( + self.set_iam_policy, + default_timeout=None, + client_info=client_info, + ), + self.test_iam_permissions: self._wrap_method( + self.test_iam_permissions, + default_timeout=None, + client_info=client_info, + ), } + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + def close(self): return self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc_asyncio" + __all__ = ("SubscriberGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index 0b5f2ccc9609..376fd4ab30bc 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -16,43 +16,42 @@ from google.auth.transport.requests import AuthorizedSession # type: ignore import json # type: ignore -import grpc # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries from google.api_core import rest_helpers from google.api_core import rest_streaming -from google.api_core import path_template from google.api_core import gapic_v1 from google.protobuf import json_format from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore + from requests import __version__ as requests_version import dataclasses -import re from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union import warnings -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object, None] # type: ignore - from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub -from .base import SubscriberTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +from .rest_base import _BaseSubscriberRestTransport +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -512,8 +511,8 @@ class SubscriberRestStub: _interceptor: SubscriberRestInterceptor -class SubscriberRestTransport(SubscriberTransport): - """REST backend transport for Subscriber. +class SubscriberRestTransport(_BaseSubscriberRestTransport): + """REST backend synchronous transport for Subscriber. The service that an application uses to manipulate subscriptions and to consume messages from a subscription via the ``Pull`` method or @@ -525,7 +524,6 @@ class SubscriberRestTransport(SubscriberTransport): and call it. It sends JSON representations of protocol buffers over HTTP/1.1 - """ def __init__( @@ -579,21 +577,12 @@ def __init__( # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, client_info=client_info, always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, api_audience=api_audience, ) self._session = AuthorizedSession( @@ -604,19 +593,34 @@ def __init__( self._interceptor = interceptor or SubscriberRestInterceptor() self._prep_wrapped_messages(client_info) - class _Acknowledge(SubscriberRestStub): + class _Acknowledge( + _BaseSubscriberRestTransport._BaseAcknowledge, SubscriberRestStub + ): def __hash__(self): - return hash("Acknowledge") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.Acknowledge") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -638,45 +642,36 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{subscription=projects/*/subscriptions/*}:acknowledge", - "body": "*", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseAcknowledge._get_http_options() + ) request, metadata = self._interceptor.pre_acknowledge(request, metadata) - pb_request = pubsub.AcknowledgeRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BaseSubscriberRestTransport._BaseAcknowledge._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSubscriberRestTransport._BaseAcknowledge._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSubscriberRestTransport._BaseAcknowledge._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SubscriberRestTransport._Acknowledge._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -684,19 +679,34 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _CreateSnapshot(SubscriberRestStub): + class _CreateSnapshot( + _BaseSubscriberRestTransport._BaseCreateSnapshot, SubscriberRestStub + ): def __hash__(self): - return hash("CreateSnapshot") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.CreateSnapshot") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -728,45 +738,36 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "put", - "uri": "/v1/{name=projects/*/snapshots/*}", - "body": "*", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseCreateSnapshot._get_http_options() + ) request, metadata = self._interceptor.pre_create_snapshot(request, metadata) - pb_request = pubsub.CreateSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSubscriberRestTransport._BaseCreateSnapshot._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = ( + _BaseSubscriberRestTransport._BaseCreateSnapshot._get_request_body_json( + transcoded_request + ) ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSubscriberRestTransport._BaseCreateSnapshot._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SubscriberRestTransport._CreateSnapshot._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -782,19 +783,34 @@ def __call__( resp = self._interceptor.post_create_snapshot(resp) return resp - class _CreateSubscription(SubscriberRestStub): + class _CreateSubscription( + _BaseSubscriberRestTransport._BaseCreateSubscription, SubscriberRestStub + ): def __hash__(self): - return hash("CreateSubscription") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.CreateSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -827,47 +843,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "put", - "uri": "/v1/{name=projects/*/subscriptions/*}", - "body": "*", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseCreateSubscription._get_http_options() + ) request, metadata = self._interceptor.pre_create_subscription( request, metadata ) - pb_request = pubsub.Subscription.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSubscriberRestTransport._BaseCreateSubscription._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSubscriberRestTransport._BaseCreateSubscription._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSubscriberRestTransport._BaseCreateSubscription._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SubscriberRestTransport._CreateSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -883,19 +886,33 @@ def __call__( resp = self._interceptor.post_create_subscription(resp) return resp - class _DeleteSnapshot(SubscriberRestStub): + class _DeleteSnapshot( + _BaseSubscriberRestTransport._BaseDeleteSnapshot, SubscriberRestStub + ): def __hash__(self): - return hash("DeleteSnapshot") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.DeleteSnapshot") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -917,38 +934,29 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{snapshot=projects/*/snapshots/*}", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_http_options() + ) request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) - pb_request = pubsub.DeleteSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SubscriberRestTransport._DeleteSnapshot._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -956,19 +964,33 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _DeleteSubscription(SubscriberRestStub): + class _DeleteSubscription( + _BaseSubscriberRestTransport._BaseDeleteSubscription, SubscriberRestStub + ): def __hash__(self): - return hash("DeleteSubscription") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.DeleteSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -991,40 +1013,29 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "delete", - "uri": "/v1/{subscription=projects/*/subscriptions/*}", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseDeleteSubscription._get_http_options() + ) request, metadata = self._interceptor.pre_delete_subscription( request, metadata ) - pb_request = pubsub.DeleteSubscriptionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseSubscriberRestTransport._BaseDeleteSubscription._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSubscriberRestTransport._BaseDeleteSubscription._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SubscriberRestTransport._DeleteSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1032,19 +1043,33 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _GetSnapshot(SubscriberRestStub): + class _GetSnapshot( + _BaseSubscriberRestTransport._BaseGetSnapshot, SubscriberRestStub + ): def __hash__(self): - return hash("GetSnapshot") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.GetSnapshot") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1076,38 +1101,31 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{snapshot=projects/*/snapshots/*}", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseGetSnapshot._get_http_options() + ) request, metadata = self._interceptor.pre_get_snapshot(request, metadata) - pb_request = pubsub.GetSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BaseSubscriberRestTransport._BaseGetSnapshot._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSubscriberRestTransport._BaseGetSnapshot._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SubscriberRestTransport._GetSnapshot._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1123,19 +1141,33 @@ def __call__( resp = self._interceptor.post_get_snapshot(resp) return resp - class _GetSubscription(SubscriberRestStub): + class _GetSubscription( + _BaseSubscriberRestTransport._BaseGetSubscription, SubscriberRestStub + ): def __hash__(self): - return hash("GetSubscription") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.GetSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1166,40 +1198,29 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{subscription=projects/*/subscriptions/*}", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseGetSubscription._get_http_options() + ) request, metadata = self._interceptor.pre_get_subscription( request, metadata ) - pb_request = pubsub.GetSubscriptionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseSubscriberRestTransport._BaseGetSubscription._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSubscriberRestTransport._BaseGetSubscription._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SubscriberRestTransport._GetSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1215,19 +1236,33 @@ def __call__( resp = self._interceptor.post_get_subscription(resp) return resp - class _ListSnapshots(SubscriberRestStub): + class _ListSnapshots( + _BaseSubscriberRestTransport._BaseListSnapshots, SubscriberRestStub + ): def __hash__(self): - return hash("ListSnapshots") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.ListSnapshots") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1253,38 +1288,31 @@ def __call__( Response for the ``ListSnapshots`` method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{project=projects/*}/snapshots", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseListSnapshots._get_http_options() + ) request, metadata = self._interceptor.pre_list_snapshots(request, metadata) - pb_request = pubsub.ListSnapshotsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BaseSubscriberRestTransport._BaseListSnapshots._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSubscriberRestTransport._BaseListSnapshots._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SubscriberRestTransport._ListSnapshots._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1300,19 +1328,33 @@ def __call__( resp = self._interceptor.post_list_snapshots(resp) return resp - class _ListSubscriptions(SubscriberRestStub): + class _ListSubscriptions( + _BaseSubscriberRestTransport._BaseListSubscriptions, SubscriberRestStub + ): def __hash__(self): - return hash("ListSubscriptions") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.ListSubscriptions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response def __call__( self, @@ -1338,40 +1380,29 @@ def __call__( Response for the ``ListSubscriptions`` method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{project=projects/*}/subscriptions", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseListSubscriptions._get_http_options() + ) request, metadata = self._interceptor.pre_list_subscriptions( request, metadata ) - pb_request = pubsub.ListSubscriptionsRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = _BaseSubscriberRestTransport._BaseListSubscriptions._get_transcoded_request( + http_options, request + ) # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSubscriberRestTransport._BaseListSubscriptions._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), + response = SubscriberRestTransport._ListSubscriptions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1387,19 +1418,34 @@ def __call__( resp = self._interceptor.post_list_subscriptions(resp) return resp - class _ModifyAckDeadline(SubscriberRestStub): + class _ModifyAckDeadline( + _BaseSubscriberRestTransport._BaseModifyAckDeadline, SubscriberRestStub + ): def __hash__(self): - return hash("ModifyAckDeadline") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.ModifyAckDeadline") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1422,47 +1468,34 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline", - "body": "*", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_http_options() + ) request, metadata = self._interceptor.pre_modify_ack_deadline( request, metadata ) - pb_request = pubsub.ModifyAckDeadlineRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SubscriberRestTransport._ModifyAckDeadline._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1470,19 +1503,34 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _ModifyPushConfig(SubscriberRestStub): + class _ModifyPushConfig( + _BaseSubscriberRestTransport._BaseModifyPushConfig, SubscriberRestStub + ): def __hash__(self): - return hash("ModifyPushConfig") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.ModifyPushConfig") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1505,47 +1553,34 @@ def __call__( sent along with the request as metadata. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig", - "body": "*", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseModifyPushConfig._get_http_options() + ) request, metadata = self._interceptor.pre_modify_push_config( request, metadata ) - pb_request = pubsub.ModifyPushConfigRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSubscriberRestTransport._BaseModifyPushConfig._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSubscriberRestTransport._BaseModifyPushConfig._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSubscriberRestTransport._BaseModifyPushConfig._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SubscriberRestTransport._ModifyPushConfig._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1553,19 +1588,32 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _Pull(SubscriberRestStub): + class _Pull(_BaseSubscriberRestTransport._BasePull, SubscriberRestStub): def __hash__(self): - return hash("Pull") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.Pull") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1591,45 +1639,34 @@ def __call__( Response for the ``Pull`` method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{subscription=projects/*/subscriptions/*}:pull", - "body": "*", - }, - ] + http_options = _BaseSubscriberRestTransport._BasePull._get_http_options() request, metadata = self._interceptor.pre_pull(request, metadata) - pb_request = pubsub.PullRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BaseSubscriberRestTransport._BasePull._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSubscriberRestTransport._BasePull._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSubscriberRestTransport._BasePull._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SubscriberRestTransport._Pull._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1645,19 +1682,32 @@ def __call__( resp = self._interceptor.post_pull(resp) return resp - class _Seek(SubscriberRestStub): + class _Seek(_BaseSubscriberRestTransport._BaseSeek, SubscriberRestStub): def __hash__(self): - return hash("Seek") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.Seek") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1685,45 +1735,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{subscription=projects/*/subscriptions/*}:seek", - "body": "*", - }, - ] + http_options = _BaseSubscriberRestTransport._BaseSeek._get_http_options() request, metadata = self._interceptor.pre_seek(request, metadata) - pb_request = pubsub.SeekRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = ( + _BaseSubscriberRestTransport._BaseSeek._get_transcoded_request( + http_options, request + ) + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSubscriberRestTransport._BaseSeek._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSubscriberRestTransport._BaseSeek._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SubscriberRestTransport._Seek._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1739,9 +1778,11 @@ def __call__( resp = self._interceptor.post_seek(resp) return resp - class _StreamingPull(SubscriberRestStub): + class _StreamingPull( + _BaseSubscriberRestTransport._BaseStreamingPull, SubscriberRestStub + ): def __hash__(self): - return hash("StreamingPull") + return hash("SubscriberRestTransport.StreamingPull") def __call__( self, @@ -1755,19 +1796,34 @@ def __call__( "Method StreamingPull is not available over REST transport" ) - class _UpdateSnapshot(SubscriberRestStub): + class _UpdateSnapshot( + _BaseSubscriberRestTransport._BaseUpdateSnapshot, SubscriberRestStub + ): def __hash__(self): - return hash("UpdateSnapshot") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.UpdateSnapshot") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1800,45 +1856,36 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{snapshot.name=projects/*/snapshots/*}", - "body": "*", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_http_options() + ) request, metadata = self._interceptor.pre_update_snapshot(request, metadata) - pb_request = pubsub.UpdateSnapshotRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = ( + _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_request_body_json( + transcoded_request + ) ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, + query_params = ( + _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_query_params_json( + transcoded_request ) ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SubscriberRestTransport._UpdateSnapshot._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1854,19 +1901,34 @@ def __call__( resp = self._interceptor.post_update_snapshot(resp) return resp - class _UpdateSubscription(SubscriberRestStub): + class _UpdateSubscription( + _BaseSubscriberRestTransport._BaseUpdateSubscription, SubscriberRestStub + ): def __hash__(self): - return hash("UpdateSubscription") - - __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} - - @classmethod - def _get_unset_required_fields(cls, message_dict): - return { - k: v - for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() - if k not in message_dict - } + return hash("SubscriberRestTransport.UpdateSubscription") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response def __call__( self, @@ -1897,47 +1959,34 @@ def __call__( """ - http_options: List[Dict[str, str]] = [ - { - "method": "patch", - "uri": "/v1/{subscription.name=projects/*/subscriptions/*}", - "body": "*", - }, - ] + http_options = ( + _BaseSubscriberRestTransport._BaseUpdateSubscription._get_http_options() + ) request, metadata = self._interceptor.pre_update_subscription( request, metadata ) - pb_request = pubsub.UpdateSubscriptionRequest.pb(request) - transcoded_request = path_template.transcode(http_options, pb_request) - - # Jsonify the request body + transcoded_request = _BaseSubscriberRestTransport._BaseUpdateSubscription._get_transcoded_request( + http_options, request + ) - body = json_format.MessageToJson( - transcoded_request["body"], use_integers_for_enums=True + body = _BaseSubscriberRestTransport._BaseUpdateSubscription._get_request_body_json( + transcoded_request ) - uri = transcoded_request["uri"] - method = transcoded_request["method"] # Jsonify the query params - query_params = json.loads( - json_format.MessageToJson( - transcoded_request["query_params"], - use_integers_for_enums=True, - ) + query_params = _BaseSubscriberRestTransport._BaseUpdateSubscription._get_query_params_json( + transcoded_request ) - query_params.update(self._get_unset_required_fields(query_params)) - - query_params["$alt"] = "json;enum-encoding=int" # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, + response = SubscriberRestTransport._UpdateSubscription._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2077,7 +2126,34 @@ def update_subscription( def get_iam_policy(self): return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - class _GetIamPolicy(SubscriberRestStub): + class _GetIamPolicy( + _BaseSubscriberRestTransport._BaseGetIamPolicy, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + def __call__( self, request: iam_policy_pb2.GetIamPolicyRequest, @@ -2101,44 +2177,31 @@ def __call__( policy_pb2.Policy: Response from GetIamPolicy method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "get", - "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", - }, - { - "method": "get", - "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", - }, - { - "method": "get", - "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", - }, - { - "method": "get", - "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", - }, - ] - + http_options = ( + _BaseSubscriberRestTransport._BaseGetIamPolicy._get_http_options() + ) request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) - - uri = transcoded_request["uri"] - method = transcoded_request["method"] + transcoded_request = ( + _BaseSubscriberRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseSubscriberRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), + response = SubscriberRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2146,8 +2209,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) return resp @@ -2155,7 +2219,35 @@ def __call__( def set_iam_policy(self): return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore - class _SetIamPolicy(SubscriberRestStub): + class _SetIamPolicy( + _BaseSubscriberRestTransport._BaseSetIamPolicy, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__( self, request: iam_policy_pb2.SetIamPolicyRequest, @@ -2179,50 +2271,38 @@ def __call__( policy_pb2.Policy: Response from SetIamPolicy method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", - "body": "*", - }, - ] - + http_options = ( + _BaseSubscriberRestTransport._BaseSetIamPolicy._get_http_options() + ) request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) + transcoded_request = ( + _BaseSubscriberRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + ) - body = json.dumps(transcoded_request["body"]) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + body = ( + _BaseSubscriberRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = ( + _BaseSubscriberRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, + response = SubscriberRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2230,8 +2310,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = policy_pb2.Policy() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) return resp @@ -2239,7 +2320,35 @@ def __call__( def test_iam_permissions(self): return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore - class _TestIamPermissions(SubscriberRestStub): + class _TestIamPermissions( + _BaseSubscriberRestTransport._BaseTestIamPermissions, SubscriberRestStub + ): + def __hash__(self): + return hash("SubscriberRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + def __call__( self, request: iam_policy_pb2.TestIamPermissionsRequest, @@ -2263,52 +2372,34 @@ def __call__( iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. """ - http_options: List[Dict[str, str]] = [ - { - "method": "post", - "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", - "body": "*", - }, - { - "method": "post", - "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", - "body": "*", - }, - ] - + http_options = ( + _BaseSubscriberRestTransport._BaseTestIamPermissions._get_http_options() + ) request, metadata = self._interceptor.pre_test_iam_permissions( request, metadata ) - request_kwargs = json_format.MessageToDict(request) - transcoded_request = path_template.transcode(http_options, **request_kwargs) + transcoded_request = _BaseSubscriberRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) - body = json.dumps(transcoded_request["body"]) - uri = transcoded_request["uri"] - method = transcoded_request["method"] + body = _BaseSubscriberRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) # Jsonify the query params - query_params = json.loads(json.dumps(transcoded_request["query_params"])) + query_params = _BaseSubscriberRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) # Send the request - headers = dict(metadata) - headers["Content-Type"] = "application/json" - - response = getattr(self._session, method)( - "{host}{uri}".format(host=self._host, uri=uri), - timeout=timeout, - headers=headers, - params=rest_helpers.flatten_query_params(query_params), - data=body, + response = SubscriberRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2316,8 +2407,9 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + content = response.content.decode("utf-8") resp = iam_policy_pb2.TestIamPermissionsResponse() - resp = json_format.Parse(response.content.decode("utf-8"), resp) + resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) return resp diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py new file mode 100644 index 000000000000..6626a04c1b7f --- /dev/null +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py @@ -0,0 +1,1026 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +from google.api_core import path_template +from google.api_core import gapic_v1 + +from google.protobuf import json_format +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from .base import SubscriberTransport, DEFAULT_CLIENT_INFO + +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.pubsub_v1.types import pubsub + + +class _BaseSubscriberRestTransport(SubscriberTransport): + """Base REST backend transport for Subscriber. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "pubsub.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'pubsub.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseAcknowledge: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:acknowledge", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.AcknowledgeRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseAcknowledge._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSnapshot: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/snapshots/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.CreateSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseCreateSnapshot._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "put", + "uri": "/v1/{name=projects/*/subscriptions/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.Subscription.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseCreateSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSnapshot: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{snapshot=projects/*/snapshots/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.DeleteSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{subscription=projects/*/subscriptions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.DeleteSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseDeleteSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSnapshot: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{snapshot=projects/*/snapshots/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.GetSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseGetSnapshot._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{subscription=projects/*/subscriptions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.GetSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseGetSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSnapshots: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{project=projects/*}/snapshots", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseListSnapshots._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListSubscriptions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{project=projects/*}/subscriptions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ListSubscriptionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseListSubscriptions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseModifyAckDeadline: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:modifyAckDeadline", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ModifyAckDeadlineRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseModifyPushConfig: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:modifyPushConfig", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.ModifyPushConfigRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseModifyPushConfig._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BasePull: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:pull", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.PullRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BasePull._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSeek: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{subscription=projects/*/subscriptions/*}:seek", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.SeekRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseSeek._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseStreamingPull: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + class _BaseUpdateSnapshot: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{snapshot.name=projects/*/snapshots/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.UpdateSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateSubscription: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{subscription.name=projects/*/subscriptions/*}", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = pubsub.UpdateSubscriptionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseSubscriberRestTransport._BaseUpdateSubscription._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{resource=projects/*/topics/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/subscriptions/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/snapshots/*}:getIamPolicy", + }, + { + "method": "get", + "uri": "/v1/{resource=projects/*/schemas/*}:getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:setIamPolicy", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:setIamPolicy", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{resource=projects/*/subscriptions/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/topics/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/snapshots/*}:testIamPermissions", + "body": "*", + }, + { + "method": "post", + "uri": "/v1/{resource=projects/*/schemas/*}:testIamPermissions", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseSubscriberRestTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 6feefc154d3b..5f2a260b00f2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -31,6 +31,8 @@ GetSubscriptionRequest, GetTopicRequest, IngestionDataSourceSettings, + IngestionFailureEvent, + JavaScriptUDF, ListSnapshotsRequest, ListSnapshotsResponse, ListSubscriptionsRequest, @@ -42,6 +44,7 @@ ListTopicSubscriptionsRequest, ListTopicSubscriptionsResponse, MessageStoragePolicy, + MessageTransform, ModifyAckDeadlineRequest, ModifyPushConfigRequest, PlatformLogsSettings, @@ -110,6 +113,8 @@ "GetSubscriptionRequest", "GetTopicRequest", "IngestionDataSourceSettings", + "IngestionFailureEvent", + "JavaScriptUDF", "ListSnapshotsRequest", "ListSnapshotsResponse", "ListSubscriptionsRequest", @@ -121,6 +126,7 @@ "ListTopicSubscriptionsRequest", "ListTopicSubscriptionsResponse", "MessageStoragePolicy", + "MessageTransform", "ModifyAckDeadlineRequest", "ModifyPushConfigRequest", "PlatformLogsSettings", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index d40d7c24ea85..5f15e445b82c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -32,6 +32,9 @@ "SchemaSettings", "IngestionDataSourceSettings", "PlatformLogsSettings", + "IngestionFailureEvent", + "JavaScriptUDF", + "MessageTransform", "Topic", "PubsubMessage", "GetTopicRequest", @@ -176,6 +179,18 @@ class IngestionDataSourceSettings(proto.Message): cloud_storage (google.pubsub_v1.types.IngestionDataSourceSettings.CloudStorage): Optional. Cloud Storage. + This field is a member of `oneof`_ ``source``. + azure_event_hubs (google.pubsub_v1.types.IngestionDataSourceSettings.AzureEventHubs): + Optional. Azure Event Hubs. + + This field is a member of `oneof`_ ``source``. + aws_msk (google.pubsub_v1.types.IngestionDataSourceSettings.AwsMsk): + Optional. Amazon MSK. + + This field is a member of `oneof`_ ``source``. + confluent_cloud (google.pubsub_v1.types.IngestionDataSourceSettings.ConfluentCloud): + Optional. Confluent Cloud. + This field is a member of `oneof`_ ``source``. platform_logs_settings (google.pubsub_v1.types.PlatformLogsSettings): Optional. Platform Logs settings. If unset, @@ -432,6 +447,268 @@ class PubSubAvroFormat(proto.Message): number=9, ) + class AzureEventHubs(proto.Message): + r"""Ingestion settings for Azure Event Hubs. + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.AzureEventHubs.State): + Output only. An output-only field that + indicates the state of the Event Hubs ingestion + source. + resource_group (str): + Optional. Name of the resource group within + the azure subscription. + namespace (str): + Optional. The name of the Event Hubs + namespace. + event_hub (str): + Optional. The name of the Event Hub. + client_id (str): + Optional. The client id of the Azure + application that is being used to authenticate + Pub/Sub. + tenant_id (str): + Optional. The tenant id of the Azure + application that is being used to authenticate + Pub/Sub. + subscription_id (str): + Optional. The Azure subscription id. + gcp_service_account (str): + Optional. The GCP service account to be used + for Federated Identity authentication. + """ + + class State(proto.Enum): + r"""Possible states for managed ingestion from Event Hubs. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + EVENT_HUBS_PERMISSION_DENIED (2): + Permission denied encountered while consuming data from + Event Hubs. This can happen when ``client_id``, or + ``tenant_id`` are invalid. Or the right permissions haven't + been granted. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while + publishing to the topic. + NAMESPACE_NOT_FOUND (4): + The provided Event Hubs namespace couldn't be + found. + EVENT_HUB_NOT_FOUND (5): + The provided Event Hub couldn't be found. + SUBSCRIPTION_NOT_FOUND (6): + The provided Event Hubs subscription couldn't + be found. + RESOURCE_GROUP_NOT_FOUND (7): + The provided Event Hubs resource group + couldn't be found. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + EVENT_HUBS_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + NAMESPACE_NOT_FOUND = 4 + EVENT_HUB_NOT_FOUND = 5 + SUBSCRIPTION_NOT_FOUND = 6 + RESOURCE_GROUP_NOT_FOUND = 7 + + state: "IngestionDataSourceSettings.AzureEventHubs.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.AzureEventHubs.State", + ) + resource_group: str = proto.Field( + proto.STRING, + number=2, + ) + namespace: str = proto.Field( + proto.STRING, + number=3, + ) + event_hub: str = proto.Field( + proto.STRING, + number=4, + ) + client_id: str = proto.Field( + proto.STRING, + number=5, + ) + tenant_id: str = proto.Field( + proto.STRING, + number=6, + ) + subscription_id: str = proto.Field( + proto.STRING, + number=7, + ) + gcp_service_account: str = proto.Field( + proto.STRING, + number=8, + ) + + class AwsMsk(proto.Message): + r"""Ingestion settings for Amazon MSK. + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.AwsMsk.State): + Output only. An output-only field that + indicates the state of the Amazon MSK ingestion + source. + cluster_arn (str): + Required. The Amazon Resource Name (ARN) that + uniquely identifies the cluster. + topic (str): + Required. The name of the topic in the Amazon + MSK cluster that Pub/Sub will import from. + aws_role_arn (str): + Required. AWS role ARN to be used for + Federated Identity authentication with Amazon + MSK. Check the Pub/Sub docs for how to set up + this role and the required permissions that need + to be attached to it. + gcp_service_account (str): + Required. The GCP service account to be used for Federated + Identity authentication with Amazon MSK (via a + ``AssumeRoleWithWebIdentity`` call for the provided role). + The ``aws_role_arn`` must be set up with + ``accounts.google.com:sub`` equals to this service account + number. + """ + + class State(proto.Enum): + r"""Possible states for managed ingestion from Amazon MSK. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + MSK_PERMISSION_DENIED (2): + Permission denied encountered while consuming + data from Amazon MSK. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while + publishing to the topic. + CLUSTER_NOT_FOUND (4): + The provided MSK cluster wasn't found. + TOPIC_NOT_FOUND (5): + The provided topic wasn't found. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + MSK_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + CLUSTER_NOT_FOUND = 4 + TOPIC_NOT_FOUND = 5 + + state: "IngestionDataSourceSettings.AwsMsk.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.AwsMsk.State", + ) + cluster_arn: str = proto.Field( + proto.STRING, + number=2, + ) + topic: str = proto.Field( + proto.STRING, + number=3, + ) + aws_role_arn: str = proto.Field( + proto.STRING, + number=4, + ) + gcp_service_account: str = proto.Field( + proto.STRING, + number=5, + ) + + class ConfluentCloud(proto.Message): + r"""Ingestion settings for Confluent Cloud. + + Attributes: + state (google.pubsub_v1.types.IngestionDataSourceSettings.ConfluentCloud.State): + Output only. An output-only field that + indicates the state of the Confluent Cloud + ingestion source. + bootstrap_server (str): + Required. The address of the bootstrap + server. The format is url:port. + cluster_id (str): + Required. The id of the cluster. + topic (str): + Required. The name of the topic in the + Confluent Cloud cluster that Pub/Sub will import + from. + identity_pool_id (str): + Required. The id of the identity pool to be + used for Federated Identity authentication with + Confluent Cloud. See + https://docs.confluent.io/cloud/current/security/authenticate/workload-identities/identity-providers/oauth/identity-pools.html#add-oauth-identity-pools. + gcp_service_account (str): + Required. The GCP service account to be used for Federated + Identity authentication with ``identity_pool_id``. + """ + + class State(proto.Enum): + r"""Possible states for managed ingestion from Confluent Cloud. + + Values: + STATE_UNSPECIFIED (0): + Default value. This value is unused. + ACTIVE (1): + Ingestion is active. + CONFLUENT_CLOUD_PERMISSION_DENIED (2): + Permission denied encountered while consuming + data from Confluent Cloud. + PUBLISH_PERMISSION_DENIED (3): + Permission denied encountered while + publishing to the topic. + UNREACHABLE_BOOTSTRAP_SERVER (4): + The provided bootstrap server address is + unreachable. + CLUSTER_NOT_FOUND (5): + The provided cluster wasn't found. + TOPIC_NOT_FOUND (6): + The provided topic wasn't found. + """ + STATE_UNSPECIFIED = 0 + ACTIVE = 1 + CONFLUENT_CLOUD_PERMISSION_DENIED = 2 + PUBLISH_PERMISSION_DENIED = 3 + UNREACHABLE_BOOTSTRAP_SERVER = 4 + CLUSTER_NOT_FOUND = 5 + TOPIC_NOT_FOUND = 6 + + state: "IngestionDataSourceSettings.ConfluentCloud.State" = proto.Field( + proto.ENUM, + number=1, + enum="IngestionDataSourceSettings.ConfluentCloud.State", + ) + bootstrap_server: str = proto.Field( + proto.STRING, + number=2, + ) + cluster_id: str = proto.Field( + proto.STRING, + number=3, + ) + topic: str = proto.Field( + proto.STRING, + number=4, + ) + identity_pool_id: str = proto.Field( + proto.STRING, + number=5, + ) + gcp_service_account: str = proto.Field( + proto.STRING, + number=6, + ) + aws_kinesis: AwsKinesis = proto.Field( proto.MESSAGE, number=1, @@ -444,6 +721,24 @@ class PubSubAvroFormat(proto.Message): oneof="source", message=CloudStorage, ) + azure_event_hubs: AzureEventHubs = proto.Field( + proto.MESSAGE, + number=3, + oneof="source", + message=AzureEventHubs, + ) + aws_msk: AwsMsk = proto.Field( + proto.MESSAGE, + number=5, + oneof="source", + message=AwsMsk, + ) + confluent_cloud: ConfluentCloud = proto.Field( + proto.MESSAGE, + number=6, + oneof="source", + message=ConfluentCloud, + ) platform_logs_settings: "PlatformLogsSettings" = proto.Field( proto.MESSAGE, number=4, @@ -495,6 +790,386 @@ class Severity(proto.Enum): ) +class IngestionFailureEvent(proto.Message): + r"""Payload of the Platform Log entry sent when a failure is + encountered while ingesting. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + topic (str): + Required. Name of the import topic. Format is: + projects/{project_name}/topics/{topic_name}. + error_message (str): + Required. Error details explaining why + ingestion to Pub/Sub has failed. + cloud_storage_failure (google.pubsub_v1.types.IngestionFailureEvent.CloudStorageFailure): + Optional. Failure when ingesting from Cloud + Storage. + + This field is a member of `oneof`_ ``failure``. + aws_msk_failure (google.pubsub_v1.types.IngestionFailureEvent.AwsMskFailureReason): + Optional. Failure when ingesting from Amazon + MSK. + + This field is a member of `oneof`_ ``failure``. + azure_event_hubs_failure (google.pubsub_v1.types.IngestionFailureEvent.AzureEventHubsFailureReason): + Optional. Failure when ingesting from Azure + Event Hubs. + + This field is a member of `oneof`_ ``failure``. + confluent_cloud_failure (google.pubsub_v1.types.IngestionFailureEvent.ConfluentCloudFailureReason): + Optional. Failure when ingesting from + Confluent Cloud. + + This field is a member of `oneof`_ ``failure``. + """ + + class ApiViolationReason(proto.Message): + r"""Specifies the reason why some data may have been left out of the + desired Pub/Sub message due to the API message limits + (https://cloud.google.com/pubsub/quotas#resource_limits). For + example, when the number of attributes is larger than 100, the + number of attributes is truncated to 100 to respect the limit on the + attribute count. Other attribute limits are treated similarly. When + the size of the desired message would've been larger than 10MB, the + message won't be published at all, and ingestion of the subsequent + messages will proceed as normal. + + """ + + class AvroFailureReason(proto.Message): + r"""Set when an Avro file is unsupported or its format is not + valid. When this occurs, one or more Avro objects won't be + ingested. + + """ + + class CloudStorageFailure(proto.Message): + r"""Failure when ingesting from a Cloud Storage source. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + bucket (str): + Optional. Name of the Cloud Storage bucket + used for ingestion. + object_name (str): + Optional. Name of the Cloud Storage object + which contained the section that couldn't be + ingested. + object_generation (int): + Optional. Generation of the Cloud Storage + object which contained the section that couldn't + be ingested. + avro_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.AvroFailureReason): + Optional. Failure encountered when parsing an + Avro file. + + This field is a member of `oneof`_ ``reason``. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The Pub/Sub API limits prevented + the desired message from being published. + + This field is a member of `oneof`_ ``reason``. + """ + + bucket: str = proto.Field( + proto.STRING, + number=1, + ) + object_name: str = proto.Field( + proto.STRING, + number=2, + ) + object_generation: int = proto.Field( + proto.INT64, + number=3, + ) + avro_failure_reason: "IngestionFailureEvent.AvroFailureReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.AvroFailureReason", + ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) + + class AwsMskFailureReason(proto.Message): + r"""Failure when ingesting from an Amazon MSK source. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cluster_arn (str): + Optional. The ARN of the cluster of the topic + being ingested from. + kafka_topic (str): + Optional. The name of the Kafka topic being + ingested from. + partition_id (int): + Optional. The partition ID of the message + that failed to be ingested. + offset (int): + Optional. The offset within the partition of + the message that failed to be ingested. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The Pub/Sub API limits prevented + the desired message from being published. + + This field is a member of `oneof`_ ``reason``. + """ + + cluster_arn: str = proto.Field( + proto.STRING, + number=1, + ) + kafka_topic: str = proto.Field( + proto.STRING, + number=2, + ) + partition_id: int = proto.Field( + proto.INT64, + number=3, + ) + offset: int = proto.Field( + proto.INT64, + number=4, + ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) + + class AzureEventHubsFailureReason(proto.Message): + r"""Failure when ingesting from an Azure Event Hubs source. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + namespace (str): + Optional. The namespace containing the event + hub being ingested from. + event_hub (str): + Optional. The name of the event hub being + ingested from. + partition_id (int): + Optional. The partition ID of the message + that failed to be ingested. + offset (int): + Optional. The offset within the partition of + the message that failed to be ingested. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The Pub/Sub API limits prevented + the desired message from being published. + + This field is a member of `oneof`_ ``reason``. + """ + + namespace: str = proto.Field( + proto.STRING, + number=1, + ) + event_hub: str = proto.Field( + proto.STRING, + number=2, + ) + partition_id: int = proto.Field( + proto.INT64, + number=3, + ) + offset: int = proto.Field( + proto.INT64, + number=4, + ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) + + class ConfluentCloudFailureReason(proto.Message): + r"""Failure when ingesting from a Confluent Cloud source. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cluster_id (str): + Optional. The cluster ID containing the topic + being ingested from. + kafka_topic (str): + Optional. The name of the Kafka topic being + ingested from. + partition_id (int): + Optional. The partition ID of the message + that failed to be ingested. + offset (int): + Optional. The offset within the partition of + the message that failed to be ingested. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The Pub/Sub API limits prevented + the desired message from being published. + + This field is a member of `oneof`_ ``reason``. + """ + + cluster_id: str = proto.Field( + proto.STRING, + number=1, + ) + kafka_topic: str = proto.Field( + proto.STRING, + number=2, + ) + partition_id: int = proto.Field( + proto.INT64, + number=3, + ) + offset: int = proto.Field( + proto.INT64, + number=4, + ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + error_message: str = proto.Field( + proto.STRING, + number=2, + ) + cloud_storage_failure: CloudStorageFailure = proto.Field( + proto.MESSAGE, + number=3, + oneof="failure", + message=CloudStorageFailure, + ) + aws_msk_failure: AwsMskFailureReason = proto.Field( + proto.MESSAGE, + number=4, + oneof="failure", + message=AwsMskFailureReason, + ) + azure_event_hubs_failure: AzureEventHubsFailureReason = proto.Field( + proto.MESSAGE, + number=5, + oneof="failure", + message=AzureEventHubsFailureReason, + ) + confluent_cloud_failure: ConfluentCloudFailureReason = proto.Field( + proto.MESSAGE, + number=6, + oneof="failure", + message=ConfluentCloudFailureReason, + ) + + +class JavaScriptUDF(proto.Message): + r"""User-defined JavaScript function that can transform or filter + a Pub/Sub message. + + Attributes: + function_name (str): + Required. Name of the JavasScript function + that should applied to Pub/Sub messages. + code (str): + Required. JavaScript code that contains a function + ``function_name`` with the below signature: + + + :: + + // /** + // * Transforms a Pub/Sub message. + // + // * @return {(Object)>|null)} - To + // * filter a message, return `null`. To transform a message return a map + // * with the following keys: + // * - (required) 'data' : {string} + // * - (optional) 'attributes' : {Object} + // * Returning empty `attributes` will remove all attributes from the + // * message. + // * + // * @param {(Object)>} Pub/Sub + // * message. Keys: + // * - (required) 'data' : {string} + // * - (required) 'attributes' : {Object} + // * + // * @param {Object} metadata - Pub/Sub message metadata. + // * Keys: + // * - (required) 'message_id' : {string} + // * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format + // * - (optional) 'ordering_key': {string} + // */ + // + // function (message, metadata) { + // } + + """ + + function_name: str = proto.Field( + proto.STRING, + number=1, + ) + code: str = proto.Field( + proto.STRING, + number=2, + ) + + +class MessageTransform(proto.Message): + r"""All supported message transforms types. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + javascript_udf (google.pubsub_v1.types.JavaScriptUDF): + Optional. JavaScript User Defined Function. If multiple + JavaScriptUDF's are specified on a resource, each must have + a unique ``function_name``. + + This field is a member of `oneof`_ ``transform``. + enabled (bool): + Optional. If set to true, the transform is enabled. If + false, the transform is disabled and will not be applied to + messages. Defaults to ``true``. + """ + + javascript_udf: "JavaScriptUDF" = proto.Field( + proto.MESSAGE, + number=2, + oneof="transform", + message="JavaScriptUDF", + ) + enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class Topic(proto.Message): r"""A topic resource. @@ -547,6 +1222,10 @@ class Topic(proto.Message): ingestion_data_source_settings (google.pubsub_v1.types.IngestionDataSourceSettings): Optional. Settings for ingestion from a data source into this topic. + message_transforms (MutableSequence[google.pubsub_v1.types.MessageTransform]): + Optional. Transforms to be applied to + messages published to the topic. Transforms are + applied in the order specified. """ class State(proto.Enum): @@ -610,6 +1289,11 @@ class State(proto.Enum): number=10, message="IngestionDataSourceSettings", ) + message_transforms: MutableSequence["MessageTransform"] = proto.RepeatedField( + proto.MESSAGE, + number=13, + message="MessageTransform", + ) class PubsubMessage(proto.Message): @@ -1138,6 +1822,11 @@ class Subscription(proto.Message): Output only. Information about the associated Analytics Hub subscription. Only set if the subscritpion is created by Analytics Hub. + message_transforms (MutableSequence[google.pubsub_v1.types.MessageTransform]): + Optional. Transforms to be applied to + messages before they are delivered to + subscribers. Transforms are applied in the order + specified. """ class State(proto.Enum): @@ -1160,8 +1849,8 @@ class State(proto.Enum): RESOURCE_ERROR = 2 class AnalyticsHubSubscriptionInfo(proto.Message): - r"""Information about an associated Analytics Hub subscription - (https://cloud.google.com/bigquery/docs/analytics-hub-manage-subscriptions). + r"""Information about an associated `Analytics Hub + subscription `__. Attributes: listing (str): @@ -1271,6 +1960,11 @@ class AnalyticsHubSubscriptionInfo(proto.Message): number=23, message=AnalyticsHubSubscriptionInfo, ) + message_transforms: MutableSequence["MessageTransform"] = proto.RepeatedField( + proto.MESSAGE, + number=25, + message="MessageTransform", + ) class RetryPolicy(proto.Message): diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index f0604091931d..d66015ac4b2f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.27.3" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 543f7e051da4..afcf7a8b9d08 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -43,8 +43,8 @@ class pubsubCallTransformer(cst.CSTTransformer): 'commit_schema': ('name', 'schema', ), 'create_schema': ('parent', 'schema', 'schema_id', ), 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'cloud_storage_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', 'analytics_hub_subscription_info', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', 'state', 'ingestion_data_source_settings', ), + 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'cloud_storage_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', 'analytics_hub_subscription_info', 'message_transforms', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', 'state', 'ingestion_data_source_settings', 'message_transforms', ), 'delete_schema': ('name', ), 'delete_schema_revision': ('name', 'revision_id', ), 'delete_snapshot': ('snapshot', ), diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 5af39dbf8421..423df44330d8 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -24,7 +24,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -37,6 +37,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -62,10 +69,24 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -280,86 +301,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (PublisherClient, transports.PublisherGrpcTransport, "grpc"), - (PublisherClient, transports.PublisherRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1129,25 +1070,6 @@ def test_create_topic(request_type, transport: str = "grpc"): assert response.state == pubsub.Topic.State.ACTIVE -def test_create_topic_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_topic), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() - - def test_create_topic_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1213,32 +1135,6 @@ def test_create_topic_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_topic_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_topic), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Topic( - name="name_value", - kms_key_name="kms_key_name_value", - satisfies_pzs=True, - state=pubsub.Topic.State.ACTIVE, - ) - ) - response = await client.create_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Topic() - - @pytest.mark.asyncio async def test_create_topic_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1247,7 +1143,7 @@ async def test_create_topic_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1286,7 +1182,7 @@ async def test_create_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.Topic ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1358,7 +1254,7 @@ def test_create_topic_field_headers(): @pytest.mark.asyncio async def test_create_topic_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1426,7 +1322,7 @@ def test_create_topic_flattened_error(): @pytest.mark.asyncio async def test_create_topic_flattened_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1453,7 +1349,7 @@ async def test_create_topic_flattened_async(): @pytest.mark.asyncio async def test_create_topic_flattened_error_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1507,25 +1403,6 @@ def test_update_topic(request_type, transport: str = "grpc"): assert response.state == pubsub.Topic.State.ACTIVE -def test_update_topic_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_topic), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() - - def test_update_topic_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1585,32 +1462,6 @@ def test_update_topic_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_topic_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_topic), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Topic( - name="name_value", - kms_key_name="kms_key_name_value", - satisfies_pzs=True, - state=pubsub.Topic.State.ACTIVE, - ) - ) - response = await client.update_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateTopicRequest() - - @pytest.mark.asyncio async def test_update_topic_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1619,7 +1470,7 @@ async def test_update_topic_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1658,7 +1509,7 @@ async def test_update_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateTopicRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1730,7 +1581,7 @@ def test_update_topic_field_headers(): @pytest.mark.asyncio async def test_update_topic_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1803,7 +1654,7 @@ def test_update_topic_flattened_error(): @pytest.mark.asyncio async def test_update_topic_flattened_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1834,7 +1685,7 @@ async def test_update_topic_flattened_async(): @pytest.mark.asyncio async def test_update_topic_flattened_error_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1883,25 +1734,6 @@ def test_publish(request_type, transport: str = "grpc"): assert response.message_ids == ["message_ids_value"] -def test_publish_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.publish), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.publish() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() - - def test_publish_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1965,36 +1797,13 @@ def test_publish_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_publish_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.publish), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.PublishResponse( - message_ids=["message_ids_value"], - ) - ) - response = await client.publish() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PublishRequest() - - @pytest.mark.asyncio async def test_publish_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2033,7 +1842,7 @@ async def test_publish_async( transport: str = "grpc_asyncio", request_type=pubsub.PublishRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2099,7 +1908,7 @@ def test_publish_field_headers(): @pytest.mark.asyncio async def test_publish_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2174,7 +1983,7 @@ def test_publish_flattened_error(): @pytest.mark.asyncio async def test_publish_flattened_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2207,7 +2016,7 @@ async def test_publish_flattened_async(): @pytest.mark.asyncio async def test_publish_flattened_error_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2262,25 +2071,6 @@ def test_get_topic(request_type, transport: str = "grpc"): assert response.state == pubsub.Topic.State.ACTIVE -def test_get_topic_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_topic), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() - - def test_get_topic_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2344,39 +2134,13 @@ def test_get_topic_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_topic_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_topic), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Topic( - name="name_value", - kms_key_name="kms_key_name_value", - satisfies_pzs=True, - state=pubsub.Topic.State.ACTIVE, - ) - ) - response = await client.get_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetTopicRequest() - - @pytest.mark.asyncio async def test_get_topic_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2415,7 +2179,7 @@ async def test_get_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.GetTopicRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2487,7 +2251,7 @@ def test_get_topic_field_headers(): @pytest.mark.asyncio async def test_get_topic_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2555,7 +2319,7 @@ def test_get_topic_flattened_error(): @pytest.mark.asyncio async def test_get_topic_flattened_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2582,7 +2346,7 @@ async def test_get_topic_flattened_async(): @pytest.mark.asyncio async def test_get_topic_flattened_error_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2630,25 +2394,6 @@ def test_list_topics(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_topics_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_topics), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_topics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() - - def test_list_topics_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2714,29 +2459,6 @@ def test_list_topics_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_topics_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_topics), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.ListTopicsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_topics() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicsRequest() - - @pytest.mark.asyncio async def test_list_topics_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2745,7 +2467,7 @@ async def test_list_topics_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2784,7 +2506,7 @@ async def test_list_topics_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicsRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2850,7 +2572,7 @@ def test_list_topics_field_headers(): @pytest.mark.asyncio async def test_list_topics_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2920,7 +2642,7 @@ def test_list_topics_flattened_error(): @pytest.mark.asyncio async def test_list_topics_flattened_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2949,7 +2671,7 @@ async def test_list_topics_flattened_async(): @pytest.mark.asyncio async def test_list_topics_flattened_error_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3059,7 +2781,7 @@ def test_list_topics_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_topics_async_pager(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3109,7 +2831,7 @@ async def test_list_topics_async_pager(): @pytest.mark.asyncio async def test_list_topics_async_pages(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3195,27 +2917,6 @@ def test_list_topic_subscriptions(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_topic_subscriptions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_topic_subscriptions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() - - def test_list_topic_subscriptions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3288,32 +2989,6 @@ def test_list_topic_subscriptions_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_topic_subscriptions_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_subscriptions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.ListTopicSubscriptionsResponse( - subscriptions=["subscriptions_value"], - next_page_token="next_page_token_value", - ) - ) - response = await client.list_topic_subscriptions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSubscriptionsRequest() - - @pytest.mark.asyncio async def test_list_topic_subscriptions_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3322,7 +2997,7 @@ async def test_list_topic_subscriptions_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3361,7 +3036,7 @@ async def test_list_topic_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSubscriptionsRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3433,7 +3108,7 @@ def test_list_topic_subscriptions_field_headers(): @pytest.mark.asyncio async def test_list_topic_subscriptions_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3507,7 +3182,7 @@ def test_list_topic_subscriptions_flattened_error(): @pytest.mark.asyncio async def test_list_topic_subscriptions_flattened_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3538,7 +3213,7 @@ async def test_list_topic_subscriptions_flattened_async(): @pytest.mark.asyncio async def test_list_topic_subscriptions_flattened_error_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3654,7 +3329,7 @@ def test_list_topic_subscriptions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_topic_subscriptions_async_pager(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3706,7 +3381,7 @@ async def test_list_topic_subscriptions_async_pager(): @pytest.mark.asyncio async def test_list_topic_subscriptions_async_pages(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3794,27 +3469,6 @@ def test_list_topic_snapshots(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_topic_snapshots_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_topic_snapshots() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() - - def test_list_topic_snapshots_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3886,32 +3540,6 @@ def test_list_topic_snapshots_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_topic_snapshots_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_topic_snapshots), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.ListTopicSnapshotsResponse( - snapshots=["snapshots_value"], - next_page_token="next_page_token_value", - ) - ) - response = await client.list_topic_snapshots() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListTopicSnapshotsRequest() - - @pytest.mark.asyncio async def test_list_topic_snapshots_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3920,7 +3548,7 @@ async def test_list_topic_snapshots_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3959,7 +3587,7 @@ async def test_list_topic_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListTopicSnapshotsRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4031,7 +3659,7 @@ def test_list_topic_snapshots_field_headers(): @pytest.mark.asyncio async def test_list_topic_snapshots_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4105,7 +3733,7 @@ def test_list_topic_snapshots_flattened_error(): @pytest.mark.asyncio async def test_list_topic_snapshots_flattened_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4136,7 +3764,7 @@ async def test_list_topic_snapshots_flattened_async(): @pytest.mark.asyncio async def test_list_topic_snapshots_flattened_error_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4250,7 +3878,7 @@ def test_list_topic_snapshots_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_topic_snapshots_async_pager(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4302,7 +3930,7 @@ async def test_list_topic_snapshots_async_pager(): @pytest.mark.asyncio async def test_list_topic_snapshots_async_pages(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4383,25 +4011,6 @@ def test_delete_topic(request_type, transport: str = "grpc"): assert response is None -def test_delete_topic_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() - - def test_delete_topic_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4465,25 +4074,6 @@ def test_delete_topic_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_topic_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_topic() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteTopicRequest() - - @pytest.mark.asyncio async def test_delete_topic_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4492,7 +4082,7 @@ async def test_delete_topic_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4531,7 +4121,7 @@ async def test_delete_topic_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteTopicRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4592,7 +4182,7 @@ def test_delete_topic_field_headers(): @pytest.mark.asyncio async def test_delete_topic_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4660,7 +4250,7 @@ def test_delete_topic_flattened_error(): @pytest.mark.asyncio async def test_delete_topic_flattened_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4687,7 +4277,7 @@ async def test_delete_topic_flattened_async(): @pytest.mark.asyncio async def test_delete_topic_flattened_error_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4734,27 +4324,6 @@ def test_detach_subscription(request_type, transport: str = "grpc"): assert isinstance(response, pubsub.DetachSubscriptionResponse) -def test_detach_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_subscription), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.detach_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() - - def test_detach_subscription_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4824,29 +4393,6 @@ def test_detach_subscription_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_detach_subscription_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.detach_subscription), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.DetachSubscriptionResponse() - ) - response = await client.detach_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DetachSubscriptionRequest() - - @pytest.mark.asyncio async def test_detach_subscription_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4855,7 +4401,7 @@ async def test_detach_subscription_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4894,7 +4440,7 @@ async def test_detach_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DetachSubscriptionRequest ): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4961,7 +4507,7 @@ def test_detach_subscription_field_headers(): @pytest.mark.asyncio async def test_detach_subscription_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4992,52 +4538,6 @@ async def test_detach_subscription_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - pubsub.Topic, - dict, - ], -) -def test_create_topic_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Topic( - name="name_value", - kms_key_name="kms_key_name_value", - satisfies_pzs=True, - state=pubsub.Topic.State.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_topic(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.satisfies_pzs is True - assert response.state == pubsub.Topic.State.ACTIVE - - def test_create_topic_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5156,81 +4656,6 @@ def test_create_topic_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_topic_rest_interceptors(null_interceptor): - transport = transports.PublisherRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), - ) - client = PublisherClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.PublisherRestInterceptor, "post_create_topic" - ) as post, mock.patch.object( - transports.PublisherRestInterceptor, "pre_create_topic" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.Topic.pb(pubsub.Topic()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.Topic.to_json(pubsub.Topic()) - - request = pubsub.Topic() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.Topic() - - client.create_topic( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_topic_rest_bad_request( - transport: str = "rest", request_type=pubsub.Topic -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_topic(request) - - def test_create_topic_rest_flattened(): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5286,58 +4711,6 @@ def test_create_topic_rest_flattened_error(transport: str = "rest"): ) -def test_create_topic_rest_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.UpdateTopicRequest, - dict, - ], -) -def test_update_topic_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"topic": {"name": "projects/sample1/topics/sample2"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Topic( - name="name_value", - kms_key_name="kms_key_name_value", - satisfies_pzs=True, - state=pubsub.Topic.State.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_topic(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.satisfies_pzs is True - assert response.state == pubsub.Topic.State.ACTIVE - - def test_update_topic_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5459,81 +4832,6 @@ def test_update_topic_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_topic_rest_interceptors(null_interceptor): - transport = transports.PublisherRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), - ) - client = PublisherClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.PublisherRestInterceptor, "post_update_topic" - ) as post, mock.patch.object( - transports.PublisherRestInterceptor, "pre_update_topic" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.UpdateTopicRequest.pb(pubsub.UpdateTopicRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.Topic.to_json(pubsub.Topic()) - - request = pubsub.UpdateTopicRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.Topic() - - client.update_topic( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_topic_rest_bad_request( - transport: str = "rest", request_type=pubsub.UpdateTopicRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"topic": {"name": "projects/sample1/topics/sample2"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_topic(request) - - def test_update_topic_rest_flattened(): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5591,52 +4889,6 @@ def test_update_topic_rest_flattened_error(transport: str = "rest"): ) -def test_update_topic_rest_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.PublishRequest, - dict, - ], -) -def test_publish_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.PublishResponse( - message_ids=["message_ids_value"], - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.PublishResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.publish(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.PublishResponse) - assert response.message_ids == ["message_ids_value"] - - def test_publish_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5763,83 +5015,6 @@ def test_publish_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_publish_rest_interceptors(null_interceptor): - transport = transports.PublisherRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), - ) - client = PublisherClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.PublisherRestInterceptor, "post_publish" - ) as post, mock.patch.object( - transports.PublisherRestInterceptor, "pre_publish" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.PublishRequest.pb(pubsub.PublishRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.PublishResponse.to_json( - pubsub.PublishResponse() - ) - - request = pubsub.PublishRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.PublishResponse() - - client.publish( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_publish_rest_bad_request( - transport: str = "rest", request_type=pubsub.PublishRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.publish(request) - - def test_publish_rest_flattened(): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5898,70 +5073,18 @@ def test_publish_rest_flattened_error(transport: str = "rest"): ) -def test_publish_rest_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) +def test_get_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.GetTopicRequest, - dict, - ], -) -def test_get_topic_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Topic( - name="name_value", - kms_key_name="kms_key_name_value", - satisfies_pzs=True, - state=pubsub.Topic.State.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.Topic.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_topic(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Topic) - assert response.name == "name_value" - assert response.kms_key_name == "kms_key_name_value" - assert response.satisfies_pzs is True - assert response.state == pubsub.Topic.State.ACTIVE - - -def test_get_topic_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.get_topic in client._transport._wrapped_methods @@ -6067,81 +5190,6 @@ def test_get_topic_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("topic",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_topic_rest_interceptors(null_interceptor): - transport = transports.PublisherRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), - ) - client = PublisherClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.PublisherRestInterceptor, "post_get_topic" - ) as post, mock.patch.object( - transports.PublisherRestInterceptor, "pre_get_topic" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.GetTopicRequest.pb(pubsub.GetTopicRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.Topic.to_json(pubsub.Topic()) - - request = pubsub.GetTopicRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.Topic() - - client.get_topic( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_topic_rest_bad_request( - transport: str = "rest", request_type=pubsub.GetTopicRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_topic(request) - - def test_get_topic_rest_flattened(): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6197,52 +5245,6 @@ def test_get_topic_rest_flattened_error(transport: str = "rest"): ) -def test_get_topic_rest_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.ListTopicsRequest, - dict, - ], -) -def test_list_topics_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.ListTopicsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.ListTopicsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_topics(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicsPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_topics_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6375,83 +5377,6 @@ def test_list_topics_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_topics_rest_interceptors(null_interceptor): - transport = transports.PublisherRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), - ) - client = PublisherClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.PublisherRestInterceptor, "post_list_topics" - ) as post, mock.patch.object( - transports.PublisherRestInterceptor, "pre_list_topics" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.ListTopicsRequest.pb(pubsub.ListTopicsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.ListTopicsResponse.to_json( - pubsub.ListTopicsResponse() - ) - - request = pubsub.ListTopicsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.ListTopicsResponse() - - client.list_topics( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_topics_rest_bad_request( - transport: str = "rest", request_type=pubsub.ListTopicsRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_topics(request) - - def test_list_topics_rest_flattened(): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6568,56 +5493,14 @@ def test_list_topics_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - pubsub.ListTopicSubscriptionsRequest, - dict, - ], -) -def test_list_topic_subscriptions_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.ListTopicSubscriptionsResponse( - subscriptions=["subscriptions_value"], - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_topic_subscriptions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicSubscriptionsPager) - assert response.subscriptions == ["subscriptions_value"] - assert response.next_page_token == "next_page_token_value" - - -def test_list_topic_subscriptions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_list_topic_subscriptions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -6749,85 +5632,6 @@ def test_list_topic_subscriptions_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_topic_subscriptions_rest_interceptors(null_interceptor): - transport = transports.PublisherRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), - ) - client = PublisherClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.PublisherRestInterceptor, "post_list_topic_subscriptions" - ) as post, mock.patch.object( - transports.PublisherRestInterceptor, "pre_list_topic_subscriptions" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.ListTopicSubscriptionsRequest.pb( - pubsub.ListTopicSubscriptionsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.ListTopicSubscriptionsResponse.to_json( - pubsub.ListTopicSubscriptionsResponse() - ) - - request = pubsub.ListTopicSubscriptionsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.ListTopicSubscriptionsResponse() - - client.list_topic_subscriptions( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_topic_subscriptions_rest_bad_request( - transport: str = "rest", request_type=pubsub.ListTopicSubscriptionsRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_topic_subscriptions(request) - - def test_list_topic_subscriptions_rest_flattened(): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6947,48 +5751,6 @@ def test_list_topic_subscriptions_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - pubsub.ListTopicSnapshotsRequest, - dict, - ], -) -def test_list_topic_snapshots_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.ListTopicSnapshotsResponse( - snapshots=["snapshots_value"], - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_topic_snapshots(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTopicSnapshotsPager) - assert response.snapshots == ["snapshots_value"] - assert response.next_page_token == "next_page_token_value" - - def test_list_topic_snapshots_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7127,85 +5889,6 @@ def test_list_topic_snapshots_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_topic_snapshots_rest_interceptors(null_interceptor): - transport = transports.PublisherRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), - ) - client = PublisherClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.PublisherRestInterceptor, "post_list_topic_snapshots" - ) as post, mock.patch.object( - transports.PublisherRestInterceptor, "pre_list_topic_snapshots" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.ListTopicSnapshotsRequest.pb( - pubsub.ListTopicSnapshotsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.ListTopicSnapshotsResponse.to_json( - pubsub.ListTopicSnapshotsResponse() - ) - - request = pubsub.ListTopicSnapshotsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.ListTopicSnapshotsResponse() - - client.list_topic_snapshots( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_topic_snapshots_rest_bad_request( - transport: str = "rest", request_type=pubsub.ListTopicSnapshotsRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_topic_snapshots(request) - - def test_list_topic_snapshots_rest_flattened(): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7323,53 +6006,18 @@ def test_list_topic_snapshots_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - pubsub.DeleteTopicRequest, - dict, - ], -) -def test_delete_topic_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_delete_topic_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_topic(request) - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_topic_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() # Ensure method has been cached assert client._transport.delete_topic in client._transport._wrapped_methods @@ -7472,75 +6120,6 @@ def test_delete_topic_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("topic",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_topic_rest_interceptors(null_interceptor): - transport = transports.PublisherRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), - ) - client = PublisherClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.PublisherRestInterceptor, "pre_delete_topic" - ) as pre: - pre.assert_not_called() - pb_message = pubsub.DeleteTopicRequest.pb(pubsub.DeleteTopicRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = pubsub.DeleteTopicRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_topic( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_topic_rest_bad_request( - transport: str = "rest", request_type=pubsub.DeleteTopicRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"topic": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_topic(request) - - def test_delete_topic_rest_flattened(): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7594,49 +6173,6 @@ def test_delete_topic_rest_flattened_error(transport: str = "rest"): ) -def test_delete_topic_rest_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.DetachSubscriptionRequest, - dict, - ], -) -def test_detach_subscription_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.DetachSubscriptionResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.DetachSubscriptionResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.detach_subscription(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.DetachSubscriptionResponse) - - def test_detach_subscription_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7760,91 +6296,6 @@ def test_detach_subscription_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("subscription",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_detach_subscription_rest_interceptors(null_interceptor): - transport = transports.PublisherRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), - ) - client = PublisherClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.PublisherRestInterceptor, "post_detach_subscription" - ) as post, mock.patch.object( - transports.PublisherRestInterceptor, "pre_detach_subscription" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.DetachSubscriptionRequest.pb( - pubsub.DetachSubscriptionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.DetachSubscriptionResponse.to_json( - pubsub.DetachSubscriptionResponse() - ) - - request = pubsub.DetachSubscriptionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.DetachSubscriptionResponse() - - client.detach_subscription( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_detach_subscription_rest_bad_request( - transport: str = "rest", request_type=pubsub.DetachSubscriptionRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.detach_subscription(request) - - -def test_detach_subscription_rest_error(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.PublisherGrpcTransport( @@ -7921,34 +6372,1918 @@ def test_transport_get_channel(): assert channel -@pytest.mark.parametrize( - "transport_class", - [ - transports.PublisherGrpcTransport, - transports.PublisherGrpcAsyncIOTransport, - transports.PublisherRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +@pytest.mark.parametrize( + "transport_class", + [ + transports.PublisherGrpcTransport, + transports.PublisherGrpcAsyncIOTransport, + transports.PublisherRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = PublisherClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_topic_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + call.return_value = pubsub.Topic() + client.create_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Topic() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_topic_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + call.return_value = pubsub.Topic() + client.update_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_publish_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + call.return_value = pubsub.PublishResponse() + client.publish(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PublishRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_topic_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + call.return_value = pubsub.Topic() + client.get_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topics_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + call.return_value = pubsub.ListTopicsResponse() + client.list_topics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topic_subscriptions_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSubscriptionsResponse() + client.list_topic_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topic_snapshots_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + call.return_value = pubsub.ListTopicSnapshotsResponse() + client.list_topic_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_topic_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + call.return_value = None + client.delete_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_detach_subscription_empty_call_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + call.return_value = pubsub.DetachSubscriptionResponse() + client.detach_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DetachSubscriptionRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = PublisherAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_topic_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + await client.create_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Topic() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_topic_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + await client.update_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_publish_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) + ) + await client.publish(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PublishRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_topic_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + ) + await client.get_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_topics_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_topics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_topic_subscriptions_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + ) + await client.list_topic_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_topic_snapshots_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", + ) + ) + await client.list_topic_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_topic_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_detach_subscription_empty_call_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.DetachSubscriptionResponse() + ) + await client.detach_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DetachSubscriptionRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = PublisherClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_topic_rest_bad_request(request_type=pubsub.Topic): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_topic(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Topic, + dict, + ], +) +def test_create_topic_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_create_topic" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_create_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.Topic.pb(pubsub.Topic()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.Topic.to_json(pubsub.Topic()) + req.return_value.content = return_value + + request = pubsub.Topic() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Topic() + + client.create_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_topic_rest_bad_request(request_type=pubsub.UpdateTopicRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": {"name": "projects/sample1/topics/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_topic(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateTopicRequest, + dict, + ], +) +def test_update_topic_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": {"name": "projects/sample1/topics/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_update_topic" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_update_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.UpdateTopicRequest.pb(pubsub.UpdateTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.Topic.to_json(pubsub.Topic()) + req.return_value.content = return_value + + request = pubsub.UpdateTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Topic() + + client.update_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_publish_rest_bad_request(request_type=pubsub.PublishRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.publish(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PublishRequest, + dict, + ], +) +def test_publish_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PublishResponse( + message_ids=["message_ids_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.PublishResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.publish(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PublishResponse) + assert response.message_ids == ["message_ids_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_publish_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_publish" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_publish" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.PublishRequest.pb(pubsub.PublishRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.PublishResponse.to_json(pubsub.PublishResponse()) + req.return_value.content = return_value + + request = pubsub.PublishRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.PublishResponse() + + client.publish( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_topic_rest_bad_request(request_type=pubsub.GetTopicRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_topic(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetTopicRequest, + dict, + ], +) +def test_get_topic_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Topic( + name="name_value", + kms_key_name="kms_key_name_value", + satisfies_pzs=True, + state=pubsub.Topic.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Topic.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_topic(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Topic) + assert response.name == "name_value" + assert response.kms_key_name == "kms_key_name_value" + assert response.satisfies_pzs is True + assert response.state == pubsub.Topic.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_get_topic" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_get_topic" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.GetTopicRequest.pb(pubsub.GetTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.Topic.to_json(pubsub.Topic()) + req.return_value.content = return_value + + request = pubsub.GetTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Topic() + + client.get_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_topics_rest_bad_request(request_type=pubsub.ListTopicsRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_topics(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicsRequest, + dict, + ], +) +def test_list_topics_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListTopicsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_topics(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topics_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topics" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_list_topics" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListTopicsRequest.pb(pubsub.ListTopicsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.ListTopicsResponse.to_json(pubsub.ListTopicsResponse()) + req.return_value.content = return_value + + request = pubsub.ListTopicsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListTopicsResponse() + + client.list_topics( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_topic_subscriptions_rest_bad_request( + request_type=pubsub.ListTopicSubscriptionsRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_topic_subscriptions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSubscriptionsRequest, + dict, + ], +) +def test_list_topic_subscriptions_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSubscriptionsResponse( + subscriptions=["subscriptions_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListTopicSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_topic_subscriptions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSubscriptionsPager) + assert response.subscriptions == ["subscriptions_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topic_subscriptions_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topic_subscriptions" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_list_topic_subscriptions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListTopicSubscriptionsRequest.pb( + pubsub.ListTopicSubscriptionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.ListTopicSubscriptionsResponse.to_json( + pubsub.ListTopicSubscriptionsResponse() + ) + req.return_value.content = return_value + + request = pubsub.ListTopicSubscriptionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListTopicSubscriptionsResponse() + + client.list_topic_subscriptions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_topic_snapshots_rest_bad_request( + request_type=pubsub.ListTopicSnapshotsRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_topic_snapshots(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListTopicSnapshotsRequest, + dict, + ], +) +def test_list_topic_snapshots_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListTopicSnapshotsResponse( + snapshots=["snapshots_value"], + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListTopicSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_topic_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListTopicSnapshotsPager) + assert response.snapshots == ["snapshots_value"] + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_topic_snapshots_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topic_snapshots" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_list_topic_snapshots" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListTopicSnapshotsRequest.pb( + pubsub.ListTopicSnapshotsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.ListTopicSnapshotsResponse.to_json( + pubsub.ListTopicSnapshotsResponse() + ) + req.return_value.content = return_value + + request = pubsub.ListTopicSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListTopicSnapshotsResponse() + + client.list_topic_snapshots( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_topic_rest_bad_request(request_type=pubsub.DeleteTopicRequest): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_topic(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteTopicRequest, + dict, + ], +) +def test_delete_topic_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"topic": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_topic(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_topic_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "pre_delete_topic" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.DeleteTopicRequest.pb(pubsub.DeleteTopicRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + + request = pubsub.DeleteTopicRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_topic( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_detach_subscription_rest_bad_request( + request_type=pubsub.DetachSubscriptionRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.detach_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DetachSubscriptionRequest, + dict, + ], +) +def test_detach_subscription_rest_call_success(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.DetachSubscriptionResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.DetachSubscriptionResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.detach_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.DetachSubscriptionResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_detach_subscription_rest_interceptors(null_interceptor): + transport = transports.PublisherRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.PublisherRestInterceptor(), + ) + client = PublisherClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.PublisherRestInterceptor, "post_detach_subscription" + ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "pre_detach_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.DetachSubscriptionRequest.pb( + pubsub.DetachSubscriptionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.DetachSubscriptionResponse.to_json( + pubsub.DetachSubscriptionResponse() + ) + req.return_value.content = return_value + + request = pubsub.DetachSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.DetachSubscriptionResponse() + + client.detach_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/subscriptions/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_initialize_client_w_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_topic_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_topic), "__call__") as call: + client.create_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Topic() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_topic_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_topic), "__call__") as call: + client.update_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_publish_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.publish), "__call__") as call: + client.publish(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PublishRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_topic_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_topic), "__call__") as call: + client.get_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetTopicRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topics_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_topics), "__call__") as call: + client.list_topics(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topic_subscriptions_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_subscriptions), "__call__" + ) as call: + client.list_topic_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_topic_snapshots_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_topic_snapshots), "__call__" + ) as call: + client.list_topic_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListTopicSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_topic_empty_call_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_topic), "__call__") as call: + client.delete_topic(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteTopicRequest() + + assert args[0] == request_msg -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = PublisherClient.get_transport_class(transport_name)( +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_detach_subscription_empty_call_rest(): + client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.detach_subscription), "__call__" + ) as call: + client.detach_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DetachSubscriptionRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -8613,194 +8948,6 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/topics/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.GetIamPolicyRequest, - dict, - ], -) -def test_get_iam_policy_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"resource": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/topics/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.SetIamPolicyRequest, - dict, - ], -) -def test_set_iam_policy_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"resource": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest -): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/subscriptions/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, - ], -) -def test_test_iam_permissions_rest(request_type): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"resource": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - def test_set_iam_policy(transport: str = "grpc"): client = PublisherClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8836,7 +8983,7 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -8901,7 +9048,7 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -8949,7 +9096,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -9002,7 +9149,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9068,7 +9215,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9116,7 +9263,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -9168,7 +9315,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9235,7 +9382,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9289,7 +9436,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = PublisherAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9309,22 +9456,41 @@ async def test_test_iam_permissions_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = PublisherClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = PublisherAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = PublisherClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 7be5183788b7..3dbefc470fac 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -24,7 +24,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -37,6 +37,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -60,10 +67,24 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -299,86 +320,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SchemaServiceClient, transports.SchemaServiceGrpcTransport, "grpc"), - (SchemaServiceClient, transports.SchemaServiceRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1168,25 +1109,6 @@ def test_create_schema(request_type, transport: str = "grpc"): assert response.revision_id == "revision_id_value" -def test_create_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_schema), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() - - def test_create_schema_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1252,32 +1174,6 @@ def test_create_schema_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_schema_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_schema), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gp_schema.Schema( - name="name_value", - type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - ) - response = await client.create_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CreateSchemaRequest() - - @pytest.mark.asyncio async def test_create_schema_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1286,7 +1182,7 @@ async def test_create_schema_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1325,7 +1221,7 @@ async def test_create_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.CreateSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1397,7 +1293,7 @@ def test_create_schema_field_headers(): @pytest.mark.asyncio async def test_create_schema_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1475,7 +1371,7 @@ def test_create_schema_flattened_error(): @pytest.mark.asyncio async def test_create_schema_flattened_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1510,7 +1406,7 @@ async def test_create_schema_flattened_async(): @pytest.mark.asyncio async def test_create_schema_flattened_error_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1566,25 +1462,6 @@ def test_get_schema(request_type, transport: str = "grpc"): assert response.revision_id == "revision_id_value" -def test_get_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_schema), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() - - def test_get_schema_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1648,39 +1525,13 @@ def test_get_schema_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_schema_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_schema), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schema.Schema( - name="name_value", - type_=schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - ) - response = await client.get_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.GetSchemaRequest() - - @pytest.mark.asyncio async def test_get_schema_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1719,7 +1570,7 @@ async def test_get_schema_async( transport: str = "grpc_asyncio", request_type=schema.GetSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1791,7 +1642,7 @@ def test_get_schema_field_headers(): @pytest.mark.asyncio async def test_get_schema_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1859,7 +1710,7 @@ def test_get_schema_flattened_error(): @pytest.mark.asyncio async def test_get_schema_flattened_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1886,7 +1737,7 @@ async def test_get_schema_flattened_async(): @pytest.mark.asyncio async def test_get_schema_flattened_error_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1934,25 +1785,6 @@ def test_list_schemas(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_schemas_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_schemas() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() - - def test_list_schemas_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2018,29 +1850,6 @@ def test_list_schemas_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_schemas_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schema.ListSchemasResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_schemas() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemasRequest() - - @pytest.mark.asyncio async def test_list_schemas_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2049,7 +1858,7 @@ async def test_list_schemas_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2088,7 +1897,7 @@ async def test_list_schemas_async( transport: str = "grpc_asyncio", request_type=schema.ListSchemasRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2154,7 +1963,7 @@ def test_list_schemas_field_headers(): @pytest.mark.asyncio async def test_list_schemas_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2224,7 +2033,7 @@ def test_list_schemas_flattened_error(): @pytest.mark.asyncio async def test_list_schemas_flattened_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2253,7 +2062,7 @@ async def test_list_schemas_flattened_async(): @pytest.mark.asyncio async def test_list_schemas_flattened_error_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2363,7 +2172,7 @@ def test_list_schemas_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_schemas_async_pager(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2413,7 +2222,7 @@ async def test_list_schemas_async_pager(): @pytest.mark.asyncio async def test_list_schemas_async_pages(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2497,27 +2306,6 @@ def test_list_schema_revisions(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_schema_revisions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schema_revisions), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_schema_revisions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemaRevisionsRequest() - - def test_list_schema_revisions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2590,31 +2378,6 @@ def test_list_schema_revisions_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_schema_revisions_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_schema_revisions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schema.ListSchemaRevisionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_schema_revisions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ListSchemaRevisionsRequest() - - @pytest.mark.asyncio async def test_list_schema_revisions_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2623,7 +2386,7 @@ async def test_list_schema_revisions_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2662,7 +2425,7 @@ async def test_list_schema_revisions_async( transport: str = "grpc_asyncio", request_type=schema.ListSchemaRevisionsRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2732,7 +2495,7 @@ def test_list_schema_revisions_field_headers(): @pytest.mark.asyncio async def test_list_schema_revisions_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2806,7 +2569,7 @@ def test_list_schema_revisions_flattened_error(): @pytest.mark.asyncio async def test_list_schema_revisions_flattened_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2837,7 +2600,7 @@ async def test_list_schema_revisions_flattened_async(): @pytest.mark.asyncio async def test_list_schema_revisions_flattened_error_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2951,7 +2714,7 @@ def test_list_schema_revisions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_schema_revisions_async_pager(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3003,7 +2766,7 @@ async def test_list_schema_revisions_async_pager(): @pytest.mark.asyncio async def test_list_schema_revisions_async_pages(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3093,25 +2856,6 @@ def test_commit_schema(request_type, transport: str = "grpc"): assert response.revision_id == "revision_id_value" -def test_commit_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.commit_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CommitSchemaRequest() - - def test_commit_schema_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3175,32 +2919,6 @@ def test_commit_schema_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_commit_schema_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gp_schema.Schema( - name="name_value", - type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - ) - response = await client.commit_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.CommitSchemaRequest() - - @pytest.mark.asyncio async def test_commit_schema_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3209,7 +2927,7 @@ async def test_commit_schema_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3248,7 +2966,7 @@ async def test_commit_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.CommitSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3320,7 +3038,7 @@ def test_commit_schema_field_headers(): @pytest.mark.asyncio async def test_commit_schema_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3393,7 +3111,7 @@ def test_commit_schema_flattened_error(): @pytest.mark.asyncio async def test_commit_schema_flattened_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3424,7 +3142,7 @@ async def test_commit_schema_flattened_async(): @pytest.mark.asyncio async def test_commit_schema_flattened_error_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3479,25 +3197,6 @@ def test_rollback_schema(request_type, transport: str = "grpc"): assert response.revision_id == "revision_id_value" -def test_rollback_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.rollback_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.RollbackSchemaRequest() - - def test_rollback_schema_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3563,32 +3262,6 @@ def test_rollback_schema_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_rollback_schema_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schema.Schema( - name="name_value", - type_=schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - ) - response = await client.rollback_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.RollbackSchemaRequest() - - @pytest.mark.asyncio async def test_rollback_schema_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3597,7 +3270,7 @@ async def test_rollback_schema_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3636,7 +3309,7 @@ async def test_rollback_schema_async( transport: str = "grpc_asyncio", request_type=schema.RollbackSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3708,7 +3381,7 @@ def test_rollback_schema_field_headers(): @pytest.mark.asyncio async def test_rollback_schema_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3781,7 +3454,7 @@ def test_rollback_schema_flattened_error(): @pytest.mark.asyncio async def test_rollback_schema_flattened_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3812,7 +3485,7 @@ async def test_rollback_schema_flattened_async(): @pytest.mark.asyncio async def test_rollback_schema_flattened_error_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3869,27 +3542,6 @@ def test_delete_schema_revision(request_type, transport: str = "grpc"): assert response.revision_id == "revision_id_value" -def test_delete_schema_revision_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_schema_revision), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_schema_revision() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRevisionRequest() - - def test_delete_schema_revision_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3962,34 +3614,6 @@ def test_delete_schema_revision_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_schema_revision_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_schema_revision), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schema.Schema( - name="name_value", - type_=schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - ) - response = await client.delete_schema_revision() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRevisionRequest() - - @pytest.mark.asyncio async def test_delete_schema_revision_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3998,7 +3622,7 @@ async def test_delete_schema_revision_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4037,7 +3661,7 @@ async def test_delete_schema_revision_async( transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRevisionRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4113,7 +3737,7 @@ def test_delete_schema_revision_field_headers(): @pytest.mark.asyncio async def test_delete_schema_revision_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4190,7 +3814,7 @@ def test_delete_schema_revision_flattened_error(): @pytest.mark.asyncio async def test_delete_schema_revision_flattened_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4223,7 +3847,7 @@ async def test_delete_schema_revision_flattened_async(): @pytest.mark.asyncio async def test_delete_schema_revision_flattened_error_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4269,25 +3893,6 @@ def test_delete_schema(request_type, transport: str = "grpc"): assert response is None -def test_delete_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() - - def test_delete_schema_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4351,25 +3956,6 @@ def test_delete_schema_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_schema_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.DeleteSchemaRequest() - - @pytest.mark.asyncio async def test_delete_schema_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4378,7 +3964,7 @@ async def test_delete_schema_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4417,7 +4003,7 @@ async def test_delete_schema_async( transport: str = "grpc_asyncio", request_type=schema.DeleteSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4478,7 +4064,7 @@ def test_delete_schema_field_headers(): @pytest.mark.asyncio async def test_delete_schema_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4546,7 +4132,7 @@ def test_delete_schema_flattened_error(): @pytest.mark.asyncio async def test_delete_schema_flattened_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4573,7 +4159,7 @@ async def test_delete_schema_flattened_async(): @pytest.mark.asyncio async def test_delete_schema_flattened_error_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4618,25 +4204,6 @@ def test_validate_schema(request_type, transport: str = "grpc"): assert isinstance(response, gp_schema.ValidateSchemaResponse) -def test_validate_schema_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.validate_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() - - def test_validate_schema_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4700,27 +4267,6 @@ def test_validate_schema_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_validate_schema_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - gp_schema.ValidateSchemaResponse() - ) - response = await client.validate_schema() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == gp_schema.ValidateSchemaRequest() - - @pytest.mark.asyncio async def test_validate_schema_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4729,7 +4275,7 @@ async def test_validate_schema_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4768,7 +4314,7 @@ async def test_validate_schema_async( transport: str = "grpc_asyncio", request_type=gp_schema.ValidateSchemaRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4831,7 +4377,7 @@ def test_validate_schema_field_headers(): @pytest.mark.asyncio async def test_validate_schema_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4906,7 +4452,7 @@ def test_validate_schema_flattened_error(): @pytest.mark.asyncio async def test_validate_schema_flattened_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4939,7 +4485,7 @@ async def test_validate_schema_flattened_async(): @pytest.mark.asyncio async def test_validate_schema_flattened_error_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4985,25 +4531,6 @@ def test_validate_message(request_type, transport: str = "grpc"): assert isinstance(response, schema.ValidateMessageResponse) -def test_validate_message_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.validate_message), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.validate_message() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() - - def test_validate_message_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5071,27 +4598,6 @@ def test_validate_message_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_validate_message_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.validate_message), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - schema.ValidateMessageResponse() - ) - response = await client.validate_message() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == schema.ValidateMessageRequest() - - @pytest.mark.asyncio async def test_validate_message_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -5100,7 +4606,7 @@ async def test_validate_message_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5139,7 +4645,7 @@ async def test_validate_message_async( transport: str = "grpc_asyncio", request_type=schema.ValidateMessageRequest ): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5202,7 +4708,7 @@ def test_validate_message_field_headers(): @pytest.mark.asyncio async def test_validate_message_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5231,160 +4737,40 @@ async def test_validate_message_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - gp_schema.CreateSchemaRequest, - dict, - ], -) -def test_create_schema_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_create_schema_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request_init["schema"] = { - "name": "name_value", - "type_": 1, - "definition": "definition_value", - "revision_id": "revision_id_value", - "revision_create_time": {"seconds": 751, "nanos": 543}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Determine if the message type is proto-plus or protobuf - test_field = gp_schema.CreateSchemaRequest.meta.fields["schema"] + # Ensure method has been cached + assert client._transport.create_schema in client._transport._wrapped_methods - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_schema] = mock_rpc - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + request = {} + client.create_schema(request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + client.create_schema(request) - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["schema"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["schema"][field])): - del request_init["schema"][field][i][subfield] - else: - del request_init["schema"][field][subfield] - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gp_schema.Schema( - name="name_value", - type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gp_schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_schema(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gp_schema.Schema) - assert response.name == "name_value" - assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == "definition_value" - assert response.revision_id == "revision_id_value" - - -def test_create_schema_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.create_schema in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.create_schema] = mock_rpc - - request = {} - client.create_schema(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.create_schema(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 def test_create_schema_rest_required_fields(request_type=gp_schema.CreateSchemaRequest): @@ -5479,83 +4865,6 @@ def test_create_schema_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_schema_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), - ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "post_create_schema" - ) as post, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_create_schema" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = gp_schema.CreateSchemaRequest.pb(gp_schema.CreateSchemaRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = gp_schema.Schema.to_json(gp_schema.Schema()) - - request = gp_schema.CreateSchemaRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gp_schema.Schema() - - client.create_schema( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_schema_rest_bad_request( - transport: str = "rest", request_type=gp_schema.CreateSchemaRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_schema(request) - - def test_create_schema_rest_flattened(): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -5615,58 +4924,6 @@ def test_create_schema_rest_flattened_error(transport: str = "rest"): ) -def test_create_schema_rest_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - schema.GetSchemaRequest, - dict, - ], -) -def test_get_schema_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schema.Schema( - name="name_value", - type_=schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_schema(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schema.Schema) - assert response.name == "name_value" - assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == "definition_value" - assert response.revision_id == "revision_id_value" - - def test_get_schema_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5786,96 +5043,19 @@ def test_get_schema_rest_unset_required_fields(): assert set(unset_fields) == (set(("view",)) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_schema_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( +def test_get_schema_rest_flattened(): + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), + transport="rest", ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "post_get_schema" - ) as post, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_get_schema" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = schema.GetSchemaRequest.pb(schema.GetSchemaRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schema.Schema.to_json(schema.Schema()) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() - request = schema.GetSchemaRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schema.Schema() - - client.get_schema( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_schema_rest_bad_request( - transport: str = "rest", request_type=schema.GetSchemaRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_schema(request) - - -def test_get_schema_rest_flattened(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schema.Schema() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/schemas/sample2"} + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -5918,52 +5098,6 @@ def test_get_schema_rest_flattened_error(transport: str = "rest"): ) -def test_get_schema_rest_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - schema.ListSchemasRequest, - dict, - ], -) -def test_list_schemas_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schema.ListSchemasResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schema.ListSchemasResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_schemas(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSchemasPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_schemas_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6098,85 +5232,6 @@ def test_list_schemas_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_schemas_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), - ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "post_list_schemas" - ) as post, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_list_schemas" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = schema.ListSchemasRequest.pb(schema.ListSchemasRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schema.ListSchemasResponse.to_json( - schema.ListSchemasResponse() - ) - - request = schema.ListSchemasRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schema.ListSchemasResponse() - - client.list_schemas( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_schemas_rest_bad_request( - transport: str = "rest", request_type=schema.ListSchemasRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_schemas(request) - - def test_list_schemas_rest_flattened(): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6293,46 +5348,6 @@ def test_list_schemas_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - schema.ListSchemaRevisionsRequest, - dict, - ], -) -def test_list_schema_revisions_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schema.ListSchemaRevisionsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schema.ListSchemaRevisionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_schema_revisions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSchemaRevisionsPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_schema_revisions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6474,106 +5489,25 @@ def test_list_schema_revisions_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_schema_revisions_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( +def test_list_schema_revisions_rest_flattened(): + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), + transport="rest", ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "post_list_schema_revisions" - ) as post, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_list_schema_revisions" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = schema.ListSchemaRevisionsRequest.pb( - schema.ListSchemaRevisionsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schema.ListSchemaRevisionsResponse.to_json( - schema.ListSchemaRevisionsResponse() - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemaRevisionsResponse() - request = schema.ListSchemaRevisionsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schema.ListSchemaRevisionsResponse() + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} - client.list_schema_revisions( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_schema_revisions_rest_bad_request( - transport: str = "rest", request_type=schema.ListSchemaRevisionsRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_schema_revisions(request) - - -def test_list_schema_revisions_rest_flattened(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schema.ListSchemaRevisionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/schemas/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() @@ -6674,52 +5608,6 @@ def test_list_schema_revisions_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - gp_schema.CommitSchemaRequest, - dict, - ], -) -def test_commit_schema_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gp_schema.Schema( - name="name_value", - type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gp_schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.commit_schema(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gp_schema.Schema) - assert response.name == "name_value" - assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == "definition_value" - assert response.revision_id == "revision_id_value" - - def test_commit_schema_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -6846,83 +5734,6 @@ def test_commit_schema_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_commit_schema_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), - ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "post_commit_schema" - ) as post, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_commit_schema" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = gp_schema.CommitSchemaRequest.pb(gp_schema.CommitSchemaRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = gp_schema.Schema.to_json(gp_schema.Schema()) - - request = gp_schema.CommitSchemaRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gp_schema.Schema() - - client.commit_schema( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_commit_schema_rest_bad_request( - transport: str = "rest", request_type=gp_schema.CommitSchemaRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.commit_schema(request) - - def test_commit_schema_rest_flattened(): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -6980,58 +5791,6 @@ def test_commit_schema_rest_flattened_error(transport: str = "rest"): ) -def test_commit_schema_rest_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - schema.RollbackSchemaRequest, - dict, - ], -) -def test_rollback_schema_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schema.Schema( - name="name_value", - type_=schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.rollback_schema(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schema.Schema) - assert response.name == "name_value" - assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == "definition_value" - assert response.revision_id == "revision_id_value" - - def test_rollback_schema_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7164,96 +5923,19 @@ def test_rollback_schema_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_rollback_schema_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( +def test_rollback_schema_rest_flattened(): + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), + transport="rest", ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "post_rollback_schema" - ) as post, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_rollback_schema" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = schema.RollbackSchemaRequest.pb(schema.RollbackSchemaRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schema.Schema.to_json(schema.Schema()) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema() - request = schema.RollbackSchemaRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schema.Schema() - - client.rollback_schema( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_rollback_schema_rest_bad_request( - transport: str = "rest", request_type=schema.RollbackSchemaRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.rollback_schema(request) - - -def test_rollback_schema_rest_flattened(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schema.Schema() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/schemas/sample2"} + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -7299,58 +5981,6 @@ def test_rollback_schema_rest_flattened_error(transport: str = "rest"): ) -def test_rollback_schema_rest_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - schema.DeleteSchemaRevisionRequest, - dict, - ], -) -def test_delete_schema_revision_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schema.Schema( - name="name_value", - type_=schema.Schema.Type.PROTOCOL_BUFFER, - definition="definition_value", - revision_id="revision_id_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schema.Schema.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_schema_revision(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schema.Schema) - assert response.name == "name_value" - assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER - assert response.definition == "definition_value" - assert response.revision_id == "revision_id_value" - - def test_delete_schema_revision_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7477,85 +6107,6 @@ def test_delete_schema_revision_rest_unset_required_fields(): assert set(unset_fields) == (set(("revisionId",)) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_schema_revision_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), - ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "post_delete_schema_revision" - ) as post, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_delete_schema_revision" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = schema.DeleteSchemaRevisionRequest.pb( - schema.DeleteSchemaRevisionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schema.Schema.to_json(schema.Schema()) - - request = schema.DeleteSchemaRevisionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schema.Schema() - - client.delete_schema_revision( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_delete_schema_revision_rest_bad_request( - transport: str = "rest", request_type=schema.DeleteSchemaRevisionRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_schema_revision(request) - - def test_delete_schema_revision_rest_flattened(): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7614,47 +6165,6 @@ def test_delete_schema_revision_rest_flattened_error(transport: str = "rest"): ) -def test_delete_schema_revision_rest_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - schema.DeleteSchemaRequest, - dict, - ], -) -def test_delete_schema_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_schema(request) - - # Establish that the response is the type that we expect. - assert response is None - - def test_delete_schema_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7769,90 +6279,19 @@ def test_delete_schema_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("name",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_schema_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( +def test_delete_schema_rest_flattened(): + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), + transport="rest", ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_delete_schema" - ) as pre: - pre.assert_not_called() - pb_message = schema.DeleteSchemaRequest.pb(schema.DeleteSchemaRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - request = schema.DeleteSchemaRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_schema( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_schema_rest_bad_request( - transport: str = "rest", request_type=schema.DeleteSchemaRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/schemas/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_schema(request) - - -def test_delete_schema_rest_flattened(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/schemas/sample2"} + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/schemas/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -7893,49 +6332,6 @@ def test_delete_schema_rest_flattened_error(transport: str = "rest"): ) -def test_delete_schema_rest_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - gp_schema.ValidateSchemaRequest, - dict, - ], -) -def test_validate_schema_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = gp_schema.ValidateSchemaResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = gp_schema.ValidateSchemaResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.validate_schema(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, gp_schema.ValidateSchemaResponse) - - def test_validate_schema_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8064,87 +6460,6 @@ def test_validate_schema_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_validate_schema_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), - ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "post_validate_schema" - ) as post, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_validate_schema" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = gp_schema.ValidateSchemaRequest.pb( - gp_schema.ValidateSchemaRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = gp_schema.ValidateSchemaResponse.to_json( - gp_schema.ValidateSchemaResponse() - ) - - request = gp_schema.ValidateSchemaRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = gp_schema.ValidateSchemaResponse() - - client.validate_schema( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_validate_schema_rest_bad_request( - transport: str = "rest", request_type=gp_schema.ValidateSchemaRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.validate_schema(request) - - def test_validate_schema_rest_flattened(): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8203,49 +6518,6 @@ def test_validate_schema_rest_flattened_error(transport: str = "rest"): ) -def test_validate_schema_rest_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - schema.ValidateMessageRequest, - dict, - ], -) -def test_validate_message_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = schema.ValidateMessageResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = schema.ValidateMessageResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.validate_message(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, schema.ValidateMessageResponse) - - def test_validate_message_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8368,110 +6640,25 @@ def test_validate_message_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("parent",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_validate_message_rest_interceptors(null_interceptor): - transport = transports.SchemaServiceRestTransport( +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SchemaServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SchemaServiceRestInterceptor(), ) - client = SchemaServiceClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SchemaServiceRestInterceptor, "post_validate_message" - ) as post, mock.patch.object( - transports.SchemaServiceRestInterceptor, "pre_validate_message" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = schema.ValidateMessageRequest.pb(schema.ValidateMessageRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = schema.ValidateMessageResponse.to_json( - schema.ValidateMessageResponse() + with pytest.raises(ValueError): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - request = schema.ValidateMessageRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = schema.ValidateMessageResponse() - - client.validate_message( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_validate_message_rest_bad_request( - transport: str = "rest", request_type=schema.ValidateMessageRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.validate_message(request) - - -def test_validate_message_rest_error(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.SchemaServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SchemaServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + # It is an error to provide a credentials file and a transport instance. + transport = transports.SchemaServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SchemaServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, ) # It is an error to provide an api_key and a transport instance. @@ -8529,34 +6716,2192 @@ def test_transport_get_channel(): assert channel -@pytest.mark.parametrize( - "transport_class", - [ - transports.SchemaServiceGrpcTransport, - transports.SchemaServiceGrpcAsyncIOTransport, - transports.SchemaServiceRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +@pytest.mark.parametrize( + "transport_class", + [ + transports.SchemaServiceGrpcTransport, + transports.SchemaServiceGrpcAsyncIOTransport, + transports.SchemaServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = SchemaServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + call.return_value = gp_schema.Schema() + client.create_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CreateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + call.return_value = schema.Schema() + client.get_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.GetSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_schemas_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + call.return_value = schema.ListSchemasResponse() + client.list_schemas(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemasRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_schema_revisions_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + call.return_value = schema.ListSchemaRevisionsResponse() + client.list_schema_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemaRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + call.return_value = gp_schema.Schema() + client.commit_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CommitSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + call.return_value = schema.Schema() + client.rollback_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.RollbackSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_schema_revision_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + call.return_value = schema.Schema() + client.delete_schema_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + call.return_value = None + client.delete_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_validate_schema_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + call.return_value = gp_schema.ValidateSchemaResponse() + client.validate_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.ValidateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_validate_message_empty_call_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + call.return_value = schema.ValidateMessageResponse() + client.validate_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ValidateMessageRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = SchemaServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.create_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CreateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.get_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.GetSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_schemas_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_schemas(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemasRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_schema_revisions_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_schema_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemaRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_commit_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.commit_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CommitSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_rollback_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.rollback_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.RollbackSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_schema_revision_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + ) + await client.delete_schema_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_validate_schema_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gp_schema.ValidateSchemaResponse() + ) + await client.validate_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.ValidateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_validate_message_empty_call_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + schema.ValidateMessageResponse() + ) + await client.validate_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ValidateMessageRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = SchemaServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_schema_rest_bad_request(request_type=gp_schema.CreateSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CreateSchemaRequest, + dict, + ], +) +def test_create_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request_init["schema"] = { + "name": "name_value", + "type_": 1, + "definition": "definition_value", + "revision_id": "revision_id_value", + "revision_create_time": {"seconds": 751, "nanos": 543}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gp_schema.CreateSchemaRequest.meta.fields["schema"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["schema"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["schema"][field])): + del request_init["schema"][field][i][subfield] + else: + del request_init["schema"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_create_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gp_schema.CreateSchemaRequest.pb(gp_schema.CreateSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = gp_schema.Schema.to_json(gp_schema.Schema()) + req.return_value.content = return_value + + request = gp_schema.CreateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gp_schema.Schema() + + client.create_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_schema_rest_bad_request(request_type=schema.GetSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.GetSchemaRequest, + dict, + ], +) +def test_get_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_get_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.GetSchemaRequest.pb(schema.GetSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = schema.Schema.to_json(schema.Schema()) + req.return_value.content = return_value + + request = schema.GetSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + + client.get_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_schemas_rest_bad_request(request_type=schema.ListSchemasRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_schemas(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemasRequest, + dict, + ], +) +def test_list_schemas_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemasResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.ListSchemasResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_schemas(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemasPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_schemas_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_list_schemas" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.ListSchemasRequest.pb(schema.ListSchemasRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = schema.ListSchemasResponse.to_json(schema.ListSchemasResponse()) + req.return_value.content = return_value + + request = schema.ListSchemasRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.ListSchemasResponse() + + client.list_schemas( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_schema_revisions_rest_bad_request( + request_type=schema.ListSchemaRevisionsRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_schema_revisions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ListSchemaRevisionsRequest, + dict, + ], +) +def test_list_schema_revisions_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ListSchemaRevisionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.ListSchemaRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_schema_revisions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSchemaRevisionsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_schema_revisions_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schema_revisions" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_list_schema_revisions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.ListSchemaRevisionsRequest.pb( + schema.ListSchemaRevisionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = schema.ListSchemaRevisionsResponse.to_json( + schema.ListSchemaRevisionsResponse() + ) + req.return_value.content = return_value + + request = schema.ListSchemaRevisionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.ListSchemaRevisionsResponse() + + client.list_schema_revisions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_commit_schema_rest_bad_request(request_type=gp_schema.CommitSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.commit_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.CommitSchemaRequest, + dict, + ], +) +def test_commit_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.Schema( + name="name_value", + type_=gp_schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gp_schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.commit_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.Schema) + assert response.name == "name_value" + assert response.type_ == gp_schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_commit_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_commit_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_commit_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gp_schema.CommitSchemaRequest.pb(gp_schema.CommitSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = gp_schema.Schema.to_json(gp_schema.Schema()) + req.return_value.content = return_value + + request = gp_schema.CommitSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gp_schema.Schema() + + client.commit_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_rollback_schema_rest_bad_request(request_type=schema.RollbackSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.rollback_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.RollbackSchemaRequest, + dict, + ], +) +def test_rollback_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.rollback_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_rollback_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_rollback_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_rollback_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.RollbackSchemaRequest.pb(schema.RollbackSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = schema.Schema.to_json(schema.Schema()) + req.return_value.content = return_value + + request = schema.RollbackSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + + client.rollback_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_schema_revision_rest_bad_request( + request_type=schema.DeleteSchemaRevisionRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_schema_revision(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRevisionRequest, + dict, + ], +) +def test_delete_schema_revision_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.Schema( + name="name_value", + type_=schema.Schema.Type.PROTOCOL_BUFFER, + definition="definition_value", + revision_id="revision_id_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.Schema.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_schema_revision(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.Schema) + assert response.name == "name_value" + assert response.type_ == schema.Schema.Type.PROTOCOL_BUFFER + assert response.definition == "definition_value" + assert response.revision_id == "revision_id_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_schema_revision_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_delete_schema_revision" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_delete_schema_revision" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.DeleteSchemaRevisionRequest.pb( + schema.DeleteSchemaRevisionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = schema.Schema.to_json(schema.Schema()) + req.return_value.content = return_value + + request = schema.DeleteSchemaRevisionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.Schema() + + client.delete_schema_revision( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_schema_rest_bad_request(request_type=schema.DeleteSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.DeleteSchemaRequest, + dict, + ], +) +def test_delete_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/schemas/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_schema(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_delete_schema" + ) as pre: + pre.assert_not_called() + pb_message = schema.DeleteSchemaRequest.pb(schema.DeleteSchemaRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + + request = schema.DeleteSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_validate_schema_rest_bad_request(request_type=gp_schema.ValidateSchemaRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.validate_schema(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gp_schema.ValidateSchemaRequest, + dict, + ], +) +def test_validate_schema_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gp_schema.ValidateSchemaResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gp_schema.ValidateSchemaResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.validate_schema(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gp_schema.ValidateSchemaResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_validate_schema_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_schema" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_validate_schema" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = gp_schema.ValidateSchemaRequest.pb( + gp_schema.ValidateSchemaRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = gp_schema.ValidateSchemaResponse.to_json( + gp_schema.ValidateSchemaResponse() + ) + req.return_value.content = return_value + + request = gp_schema.ValidateSchemaRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gp_schema.ValidateSchemaResponse() + + client.validate_schema( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_validate_message_rest_bad_request(request_type=schema.ValidateMessageRequest): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.validate_message(request) + + +@pytest.mark.parametrize( + "request_type", + [ + schema.ValidateMessageRequest, + dict, + ], +) +def test_validate_message_rest_call_success(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = schema.ValidateMessageResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = schema.ValidateMessageResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.validate_message(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, schema.ValidateMessageResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_validate_message_rest_interceptors(null_interceptor): + transport = transports.SchemaServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SchemaServiceRestInterceptor(), + ) + client = SchemaServiceClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_message" + ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "pre_validate_message" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = schema.ValidateMessageRequest.pb(schema.ValidateMessageRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = schema.ValidateMessageResponse.to_json( + schema.ValidateMessageResponse() + ) + req.return_value.content = return_value + + request = schema.ValidateMessageRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = schema.ValidateMessageResponse() + + client.validate_message( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/subscriptions/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_initialize_client_w_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_schema), "__call__") as call: + client.create_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CreateSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_schema), "__call__") as call: + client.get_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.GetSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_schemas_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_schemas), "__call__") as call: + client.list_schemas(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemasRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_schema_revisions_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_schema_revisions), "__call__" + ) as call: + client.list_schema_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ListSchemaRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_commit_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.commit_schema), "__call__") as call: + client.commit_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.CommitSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_rollback_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.rollback_schema), "__call__") as call: + client.rollback_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.RollbackSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_schema_revision_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_schema_revision), "__call__" + ) as call: + client.delete_schema_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_schema), "__call__") as call: + client.delete_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.DeleteSchemaRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_validate_schema_empty_call_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_schema), "__call__") as call: + client.validate_schema(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gp_schema.ValidateSchemaRequest() + assert args[0] == request_msg -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = SchemaServiceClient.get_transport_class(transport_name)( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_validate_message_empty_call_rest(): + client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.validate_message), "__call__") as call: + client.validate_message(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = schema.ValidateMessageRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -9188,194 +9533,6 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/topics/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.GetIamPolicyRequest, - dict, - ], -) -def test_get_iam_policy_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"resource": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/topics/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.SetIamPolicyRequest, - dict, - ], -) -def test_set_iam_policy_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"resource": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest -): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/subscriptions/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, - ], -) -def test_test_iam_permissions_rest(request_type): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"resource": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - def test_set_iam_policy(transport: str = "grpc"): client = SchemaServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9411,7 +9568,7 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9476,7 +9633,7 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9524,7 +9681,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -9577,7 +9734,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9643,7 +9800,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9691,7 +9848,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -9743,7 +9900,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -9810,7 +9967,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -9864,7 +10021,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = SchemaServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -9884,22 +10041,41 @@ async def test_test_iam_permissions_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = SchemaServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = SchemaServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = SchemaServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 4bef862ec680..4478942f7912 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -25,7 +25,7 @@ import grpc from grpc.experimental import aio -from collections.abc import Iterable +from collections.abc import Iterable, AsyncIterable from google.protobuf import json_format import json import math @@ -38,6 +38,13 @@ from requests.sessions import Session from google.protobuf import json_format +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 @@ -62,10 +69,24 @@ import google.auth +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + # If default endpoint is localhost, then default mtls endpoint will be the same. # This method modifies the default endpoint so the client can produce a different # mtls endpoint for endpoint testing purposes. @@ -282,86 +303,6 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (SubscriberClient, transports.SubscriberGrpcTransport, "grpc"), - (SubscriberClient, transports.SubscriberRestTransport, "rest"), - ], -) -def test__validate_universe_domain(client_class, transport_class, transport_name): - client = client_class( - transport=transport_class(credentials=ga_credentials.AnonymousCredentials()) - ) - assert client._validate_universe_domain() == True - - # Test the case when universe is already validated. - assert client._validate_universe_domain() == True - - if transport_name == "grpc": - # Test the case where credentials are provided by the - # `local_channel_credentials`. The default universes in both match. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - client = client_class(transport=transport_class(channel=channel)) - assert client._validate_universe_domain() == True - - # Test the case where credentials do not exist: e.g. a transport is provided - # with no credentials. Validation should still succeed because there is no - # mismatch with non-existent credentials. - channel = grpc.secure_channel( - "http://localhost/", grpc.local_channel_credentials() - ) - transport = transport_class(channel=channel) - transport._credentials = None - client = client_class(transport=transport) - assert client._validate_universe_domain() == True - - # TODO: This is needed to cater for older versions of google-auth - # Make this test unconditional once the minimum supported version of - # google-auth becomes 2.23.0 or higher. - google_auth_major, google_auth_minor = [ - int(part) for part in google.auth.__version__.split(".")[0:2] - ] - if google_auth_major > 2 or (google_auth_major == 2 and google_auth_minor >= 23): - credentials = ga_credentials.AnonymousCredentials() - credentials._universe_domain = "foo.com" - # Test the case when there is a universe mismatch from the credentials. - client = client_class(transport=transport_class(credentials=credentials)) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (googleapis.com) does not match the universe domain found in the credentials (foo.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test the case when there is a universe mismatch from the client. - # - # TODO: Make this test unconditional once the minimum supported version of - # google-api-core becomes 2.15.0 or higher. - api_core_major, api_core_minor = [ - int(part) for part in api_core_version.__version__.split(".")[0:2] - ] - if api_core_major > 2 or (api_core_major == 2 and api_core_minor >= 15): - client = client_class( - client_options={"universe_domain": "bar.com"}, - transport=transport_class( - credentials=ga_credentials.AnonymousCredentials(), - ), - ) - with pytest.raises(ValueError) as excinfo: - client._validate_universe_domain() - assert ( - str(excinfo.value) - == "The configured universe domain (bar.com) does not match the universe domain found in the credentials (googleapis.com). If you haven't configured the universe domain explicitly, `googleapis.com` is the default." - ) - - # Test that ValueError is raised if universe_domain is provided via client options and credentials is None - with pytest.raises(ValueError): - client._compare_universes("foo.bar", None) - - @pytest.mark.parametrize( "client_class,transport_name", [ @@ -1145,27 +1086,6 @@ def test_create_subscription(request_type, transport: str = "grpc"): assert response.state == pubsub.Subscription.State.ACTIVE -def test_create_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subscription), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() - - def test_create_subscription_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1239,39 +1159,6 @@ def test_create_subscription_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_subscription_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_subscription), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Subscription( - name="name_value", - topic="topic_value", - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter="filter_value", - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - ) - ) - response = await client.create_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.Subscription() - - @pytest.mark.asyncio async def test_create_subscription_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1280,7 +1167,7 @@ async def test_create_subscription_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1319,7 +1206,7 @@ async def test_create_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.Subscription ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1405,7 +1292,7 @@ def test_create_subscription_field_headers(): @pytest.mark.asyncio async def test_create_subscription_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1492,7 +1379,7 @@ def test_create_subscription_flattened_error(): @pytest.mark.asyncio async def test_create_subscription_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1533,7 +1420,7 @@ async def test_create_subscription_flattened_async(): @pytest.mark.asyncio async def test_create_subscription_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -1600,25 +1487,6 @@ def test_get_subscription(request_type, transport: str = "grpc"): assert response.state == pubsub.Subscription.State.ACTIVE -def test_get_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() - - def test_get_subscription_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -1684,37 +1552,6 @@ def test_get_subscription_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_subscription_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Subscription( - name="name_value", - topic="topic_value", - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter="filter_value", - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - ) - ) - response = await client.get_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSubscriptionRequest() - - @pytest.mark.asyncio async def test_get_subscription_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -1723,7 +1560,7 @@ async def test_get_subscription_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1762,7 +1599,7 @@ async def test_get_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSubscriptionRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -1844,7 +1681,7 @@ def test_get_subscription_field_headers(): @pytest.mark.asyncio async def test_get_subscription_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -1912,7 +1749,7 @@ def test_get_subscription_flattened_error(): @pytest.mark.asyncio async def test_get_subscription_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1939,7 +1776,7 @@ async def test_get_subscription_flattened_async(): @pytest.mark.asyncio async def test_get_subscription_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2005,27 +1842,6 @@ def test_update_subscription(request_type, transport: str = "grpc"): assert response.state == pubsub.Subscription.State.ACTIVE -def test_update_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_subscription), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() - - def test_update_subscription_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2091,39 +1907,6 @@ def test_update_subscription_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_subscription_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_subscription), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Subscription( - name="name_value", - topic="topic_value", - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter="filter_value", - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - ) - ) - response = await client.update_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSubscriptionRequest() - - @pytest.mark.asyncio async def test_update_subscription_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2132,7 +1915,7 @@ async def test_update_subscription_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2171,7 +1954,7 @@ async def test_update_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSubscriptionRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2257,7 +2040,7 @@ def test_update_subscription_field_headers(): @pytest.mark.asyncio async def test_update_subscription_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2334,7 +2117,7 @@ def test_update_subscription_flattened_error(): @pytest.mark.asyncio async def test_update_subscription_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2367,7 +2150,7 @@ async def test_update_subscription_flattened_async(): @pytest.mark.asyncio async def test_update_subscription_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2418,27 +2201,6 @@ def test_list_subscriptions(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_subscriptions_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_subscriptions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() - - def test_list_subscriptions_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -2510,31 +2272,6 @@ def test_list_subscriptions_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_subscriptions_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_subscriptions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.ListSubscriptionsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_subscriptions() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSubscriptionsRequest() - - @pytest.mark.asyncio async def test_list_subscriptions_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -2543,7 +2280,7 @@ async def test_list_subscriptions_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2582,7 +2319,7 @@ async def test_list_subscriptions_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSubscriptionsRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -2652,7 +2389,7 @@ def test_list_subscriptions_field_headers(): @pytest.mark.asyncio async def test_list_subscriptions_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -2726,7 +2463,7 @@ def test_list_subscriptions_flattened_error(): @pytest.mark.asyncio async def test_list_subscriptions_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2757,7 +2494,7 @@ async def test_list_subscriptions_flattened_async(): @pytest.mark.asyncio async def test_list_subscriptions_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -2871,7 +2608,7 @@ def test_list_subscriptions_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_subscriptions_async_pager(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2923,7 +2660,7 @@ async def test_list_subscriptions_async_pager(): @pytest.mark.asyncio async def test_list_subscriptions_async_pages(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3006,27 +2743,6 @@ def test_delete_subscription(request_type, transport: str = "grpc"): assert response is None -def test_delete_subscription_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() - - def test_delete_subscription_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3097,37 +2813,16 @@ def test_delete_subscription_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_delete_subscription_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_subscription), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_subscription() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSubscriptionRequest() - - -@pytest.mark.asyncio -async def test_delete_subscription_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) +async def test_delete_subscription_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 @@ -3164,7 +2859,7 @@ async def test_delete_subscription_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSubscriptionRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3229,7 +2924,7 @@ def test_delete_subscription_field_headers(): @pytest.mark.asyncio async def test_delete_subscription_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3301,7 +2996,7 @@ def test_delete_subscription_flattened_error(): @pytest.mark.asyncio async def test_delete_subscription_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3330,7 +3025,7 @@ async def test_delete_subscription_flattened_async(): @pytest.mark.asyncio async def test_delete_subscription_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3377,27 +3072,6 @@ def test_modify_ack_deadline(request_type, transport: str = "grpc"): assert response is None -def test_modify_ack_deadline_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_ack_deadline), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.modify_ack_deadline() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() - - def test_modify_ack_deadline_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3467,27 +3141,6 @@ def test_modify_ack_deadline_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_modify_ack_deadline_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_ack_deadline), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.modify_ack_deadline() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyAckDeadlineRequest() - - @pytest.mark.asyncio async def test_modify_ack_deadline_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3496,7 +3149,7 @@ async def test_modify_ack_deadline_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3535,7 +3188,7 @@ async def test_modify_ack_deadline_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyAckDeadlineRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3600,7 +3253,7 @@ def test_modify_ack_deadline_field_headers(): @pytest.mark.asyncio async def test_modify_ack_deadline_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -3682,7 +3335,7 @@ def test_modify_ack_deadline_flattened_error(): @pytest.mark.asyncio async def test_modify_ack_deadline_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3719,7 +3372,7 @@ async def test_modify_ack_deadline_flattened_async(): @pytest.mark.asyncio async def test_modify_ack_deadline_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -3766,25 +3419,6 @@ def test_acknowledge(request_type, transport: str = "grpc"): assert response is None -def test_acknowledge_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.acknowledge() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() - - def test_acknowledge_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -3848,25 +3482,6 @@ def test_acknowledge_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_acknowledge_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.acknowledge() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.AcknowledgeRequest() - - @pytest.mark.asyncio async def test_acknowledge_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -3875,7 +3490,7 @@ async def test_acknowledge_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3914,7 +3529,7 @@ async def test_acknowledge_async( transport: str = "grpc_asyncio", request_type=pubsub.AcknowledgeRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -3975,7 +3590,7 @@ def test_acknowledge_field_headers(): @pytest.mark.asyncio async def test_acknowledge_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4048,7 +3663,7 @@ def test_acknowledge_flattened_error(): @pytest.mark.asyncio async def test_acknowledge_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4079,7 +3694,7 @@ async def test_acknowledge_flattened_async(): @pytest.mark.asyncio async def test_acknowledge_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4125,25 +3740,6 @@ def test_pull(request_type, transport: str = "grpc"): assert isinstance(response, pubsub.PullResponse) -def test_pull_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.pull), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.pull() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() - - def test_pull_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4207,32 +3803,13 @@ def test_pull_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_pull_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.pull), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) - response = await client.pull() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.PullRequest() - - @pytest.mark.asyncio async def test_pull_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4270,7 +3847,7 @@ async def test_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.PullRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4331,7 +3908,7 @@ def test_pull_field_headers(): @pytest.mark.asyncio async def test_pull_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4411,7 +3988,7 @@ def test_pull_flattened_error(): @pytest.mark.asyncio async def test_pull_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4448,7 +4025,7 @@ async def test_pull_flattened_async(): @pytest.mark.asyncio async def test_pull_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -4539,7 +4116,7 @@ async def test_streaming_pull_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4578,7 +4155,7 @@ async def test_streaming_pull_async( transport: str = "grpc_asyncio", request_type=pubsub.StreamingPullRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4646,27 +4223,6 @@ def test_modify_push_config(request_type, transport: str = "grpc"): assert response is None -def test_modify_push_config_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_push_config), "__call__" - ) as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.modify_push_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() - - def test_modify_push_config_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -4736,27 +4292,6 @@ def test_modify_push_config_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_modify_push_config_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.modify_push_config), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.modify_push_config() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ModifyPushConfigRequest() - - @pytest.mark.asyncio async def test_modify_push_config_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -4765,7 +4300,7 @@ async def test_modify_push_config_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4804,7 +4339,7 @@ async def test_modify_push_config_async( transport: str = "grpc_asyncio", request_type=pubsub.ModifyPushConfigRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -4869,7 +4404,7 @@ def test_modify_push_config_field_headers(): @pytest.mark.asyncio async def test_modify_push_config_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -4946,7 +4481,7 @@ def test_modify_push_config_flattened_error(): @pytest.mark.asyncio async def test_modify_push_config_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -4979,7 +4514,7 @@ async def test_modify_push_config_flattened_async(): @pytest.mark.asyncio async def test_modify_push_config_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5030,25 +4565,6 @@ def test_get_snapshot(request_type, transport: str = "grpc"): assert response.topic == "topic_value" -def test_get_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.get_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() - - def test_get_snapshot_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5112,30 +4628,6 @@ def test_get_snapshot_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_get_snapshot_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Snapshot( - name="name_value", - topic="topic_value", - ) - ) - response = await client.get_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.GetSnapshotRequest() - - @pytest.mark.asyncio async def test_get_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -5144,7 +4636,7 @@ async def test_get_snapshot_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5183,7 +4675,7 @@ async def test_get_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.GetSnapshotRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5251,7 +4743,7 @@ def test_get_snapshot_field_headers(): @pytest.mark.asyncio async def test_get_snapshot_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5319,7 +4811,7 @@ def test_get_snapshot_flattened_error(): @pytest.mark.asyncio async def test_get_snapshot_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5346,7 +4838,7 @@ async def test_get_snapshot_flattened_async(): @pytest.mark.asyncio async def test_get_snapshot_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5394,25 +4886,6 @@ def test_list_snapshots(request_type, transport: str = "grpc"): assert response.next_page_token == "next_page_token_value" -def test_list_snapshots_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.list_snapshots() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() - - def test_list_snapshots_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -5478,29 +4951,6 @@ def test_list_snapshots_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_list_snapshots_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.ListSnapshotsResponse( - next_page_token="next_page_token_value", - ) - ) - response = await client.list_snapshots() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.ListSnapshotsRequest() - - @pytest.mark.asyncio async def test_list_snapshots_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -5509,7 +4959,7 @@ async def test_list_snapshots_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5548,7 +4998,7 @@ async def test_list_snapshots_async( transport: str = "grpc_asyncio", request_type=pubsub.ListSnapshotsRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -5614,7 +5064,7 @@ def test_list_snapshots_field_headers(): @pytest.mark.asyncio async def test_list_snapshots_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -5684,7 +5134,7 @@ def test_list_snapshots_flattened_error(): @pytest.mark.asyncio async def test_list_snapshots_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5713,7 +5163,7 @@ async def test_list_snapshots_flattened_async(): @pytest.mark.asyncio async def test_list_snapshots_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -5823,7 +5273,7 @@ def test_list_snapshots_pages(transport_name: str = "grpc"): @pytest.mark.asyncio async def test_list_snapshots_async_pager(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5873,7 +5323,7 @@ async def test_list_snapshots_async_pager(): @pytest.mark.asyncio async def test_list_snapshots_async_pages(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -5957,39 +5407,20 @@ def test_create_snapshot(request_type, transport: str = "grpc"): assert response.topic == "topic_value" -def test_create_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. +def test_create_snapshot_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.create_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() - - -def test_create_snapshot_non_empty_request_with_auto_populated_field(): - # This test is a coverage failsafe to make sure that UUID4 fields are - # automatically populated, according to AIP-4235, with non-empty requests. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Populate all string fields in the request which are not UUID4 - # since we want to check that UUID4 are populated automatically - # if they meet the requirements of AIP 4235. - request = pubsub.CreateSnapshotRequest( - name="name_value", - subscription="subscription_value", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = pubsub.CreateSnapshotRequest( + name="name_value", + subscription="subscription_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6041,30 +5472,6 @@ def test_create_snapshot_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_create_snapshot_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Snapshot( - name="name_value", - topic="topic_value", - ) - ) - response = await client.create_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.CreateSnapshotRequest() - - @pytest.mark.asyncio async def test_create_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6073,7 +5480,7 @@ async def test_create_snapshot_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6112,7 +5519,7 @@ async def test_create_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.CreateSnapshotRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6180,7 +5587,7 @@ def test_create_snapshot_field_headers(): @pytest.mark.asyncio async def test_create_snapshot_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6253,7 +5660,7 @@ def test_create_snapshot_flattened_error(): @pytest.mark.asyncio async def test_create_snapshot_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6284,7 +5691,7 @@ async def test_create_snapshot_flattened_async(): @pytest.mark.asyncio async def test_create_snapshot_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6335,25 +5742,6 @@ def test_update_snapshot(request_type, transport: str = "grpc"): assert response.topic == "topic_value" -def test_update_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.update_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() - - def test_update_snapshot_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6413,30 +5801,6 @@ def test_update_snapshot_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_update_snapshot_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - pubsub.Snapshot( - name="name_value", - topic="topic_value", - ) - ) - response = await client.update_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.UpdateSnapshotRequest() - - @pytest.mark.asyncio async def test_update_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6445,7 +5809,7 @@ async def test_update_snapshot_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6484,7 +5848,7 @@ async def test_update_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.UpdateSnapshotRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6552,7 +5916,7 @@ def test_update_snapshot_field_headers(): @pytest.mark.asyncio async def test_update_snapshot_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6625,7 +5989,7 @@ def test_update_snapshot_flattened_error(): @pytest.mark.asyncio async def test_update_snapshot_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -6656,7 +6020,7 @@ async def test_update_snapshot_flattened_async(): @pytest.mark.asyncio async def test_update_snapshot_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -6702,25 +6066,6 @@ def test_delete_snapshot(request_type, transport: str = "grpc"): assert response is None -def test_delete_snapshot_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.delete_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() - - def test_delete_snapshot_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -6784,25 +6129,6 @@ def test_delete_snapshot_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_delete_snapshot_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_snapshot() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.DeleteSnapshotRequest() - - @pytest.mark.asyncio async def test_delete_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", @@ -6811,7 +6137,7 @@ async def test_delete_snapshot_async_use_cached_wrapped_rpc( # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6850,7 +6176,7 @@ async def test_delete_snapshot_async( transport: str = "grpc_asyncio", request_type=pubsub.DeleteSnapshotRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -6911,7 +6237,7 @@ def test_delete_snapshot_field_headers(): @pytest.mark.asyncio async def test_delete_snapshot_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -6979,7 +6305,7 @@ def test_delete_snapshot_flattened_error(): @pytest.mark.asyncio async def test_delete_snapshot_flattened_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. @@ -7006,7 +6332,7 @@ async def test_delete_snapshot_flattened_async(): @pytest.mark.asyncio async def test_delete_snapshot_flattened_error_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened @@ -7051,25 +6377,6 @@ def test_seek(request_type, transport: str = "grpc"): assert isinstance(response, pubsub.SeekResponse) -def test_seek_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.seek), "__call__") as call: - call.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client.seek() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() - - def test_seek_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. @@ -7135,32 +6442,13 @@ def test_seek_use_cached_wrapped_rpc(): assert mock_rpc.call_count == 2 -@pytest.mark.asyncio -async def test_seek_empty_call_async(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.seek), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) - response = await client.seek() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == pubsub.SeekRequest() - - @pytest.mark.asyncio async def test_seek_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7198,7 +6486,7 @@ async def test_seek_async( transport: str = "grpc_asyncio", request_type=pubsub.SeekRequest ): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -7259,7 +6547,7 @@ def test_seek_field_headers(): @pytest.mark.asyncio async def test_seek_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -7286,62 +6574,6 @@ async def test_seek_field_headers_async(): ) in kw["metadata"] -@pytest.mark.parametrize( - "request_type", - [ - pubsub.Subscription, - dict, - ], -) -def test_create_subscription_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Subscription( - name="name_value", - topic="topic_value", - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter="filter_value", - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_subscription(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == "name_value" - assert response.topic == "topic_value" - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == "filter_value" - assert response.detached is True - assert response.enable_exactly_once_delivery is True - assert response.state == pubsub.Subscription.State.ACTIVE - - def test_create_subscription_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7476,96 +6708,19 @@ def test_create_subscription_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_subscription_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( +def test_create_subscription_rest_flattened(): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), + transport="rest", ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_create_subscription" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_create_subscription" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.Subscription.pb(pubsub.Subscription()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.Subscription.to_json(pubsub.Subscription()) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription() - request = pubsub.Subscription() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.Subscription() - - client.create_subscription( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_subscription_rest_bad_request( - transport: str = "rest", request_type=pubsub.Subscription -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_subscription(request) - - -def test_create_subscription_rest_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Subscription() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/subscriptions/sample2"} + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/subscriptions/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -7614,68 +6769,6 @@ def test_create_subscription_rest_flattened_error(transport: str = "rest"): ) -def test_create_subscription_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.GetSubscriptionRequest, - dict, - ], -) -def test_get_subscription_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Subscription( - name="name_value", - topic="topic_value", - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter="filter_value", - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_subscription(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == "name_value" - assert response.topic == "topic_value" - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == "filter_value" - assert response.detached is True - assert response.enable_exactly_once_delivery is True - assert response.state == pubsub.Subscription.State.ACTIVE - - def test_get_subscription_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -7797,83 +6890,6 @@ def test_get_subscription_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("subscription",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_subscription_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), - ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_get_subscription" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_get_subscription" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.GetSubscriptionRequest.pb(pubsub.GetSubscriptionRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.Subscription.to_json(pubsub.Subscription()) - - request = pubsub.GetSubscriptionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.Subscription() - - client.get_subscription( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_subscription_rest_bad_request( - transport: str = "rest", request_type=pubsub.GetSubscriptionRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_subscription(request) - - def test_get_subscription_rest_flattened(): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -7930,68 +6946,6 @@ def test_get_subscription_rest_flattened_error(transport: str = "rest"): ) -def test_get_subscription_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.UpdateSubscriptionRequest, - dict, - ], -) -def test_update_subscription_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": {"name": "projects/sample1/subscriptions/sample2"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Subscription( - name="name_value", - topic="topic_value", - ack_deadline_seconds=2066, - retain_acked_messages=True, - enable_message_ordering=True, - filter="filter_value", - detached=True, - enable_exactly_once_delivery=True, - state=pubsub.Subscription.State.ACTIVE, - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.Subscription.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_subscription(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Subscription) - assert response.name == "name_value" - assert response.topic == "topic_value" - assert response.ack_deadline_seconds == 2066 - assert response.retain_acked_messages is True - assert response.enable_message_ordering is True - assert response.filter == "filter_value" - assert response.detached is True - assert response.enable_exactly_once_delivery is True - assert response.state == pubsub.Subscription.State.ACTIVE - - def test_update_subscription_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8119,89 +7073,10 @@ def test_update_subscription_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_subscription_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( +def test_update_subscription_rest_flattened(): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), - ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_update_subscription" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_update_subscription" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.UpdateSubscriptionRequest.pb( - pubsub.UpdateSubscriptionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.Subscription.to_json(pubsub.Subscription()) - - request = pubsub.UpdateSubscriptionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.Subscription() - - client.update_subscription( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_subscription_rest_bad_request( - transport: str = "rest", request_type=pubsub.UpdateSubscriptionRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": {"name": "projects/sample1/subscriptions/sample2"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_subscription(request) - - -def test_update_subscription_rest_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="rest", ) # Mock the http request call within the method and fake a response. @@ -8259,52 +7134,6 @@ def test_update_subscription_rest_flattened_error(transport: str = "rest"): ) -def test_update_subscription_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.ListSubscriptionsRequest, - dict, - ], -) -def test_list_subscriptions_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.ListSubscriptionsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.ListSubscriptionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_subscriptions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSubscriptionsPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_subscriptions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8443,87 +7272,6 @@ def test_list_subscriptions_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_subscriptions_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), - ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_list_subscriptions" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_list_subscriptions" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.ListSubscriptionsRequest.pb( - pubsub.ListSubscriptionsRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.ListSubscriptionsResponse.to_json( - pubsub.ListSubscriptionsResponse() - ) - - request = pubsub.ListSubscriptionsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.ListSubscriptionsResponse() - - client.list_subscriptions( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_subscriptions_rest_bad_request( - transport: str = "rest", request_type=pubsub.ListSubscriptionsRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_subscriptions(request) - - def test_list_subscriptions_rest_flattened(): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8640,41 +7388,6 @@ def test_list_subscriptions_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - pubsub.DeleteSubscriptionRequest, - dict, - ], -) -def test_delete_subscription_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_subscription(request) - - # Establish that the response is the type that we expect. - assert response is None - - def test_delete_subscription_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -8795,92 +7508,19 @@ def test_delete_subscription_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("subscription",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_subscription_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( +def test_delete_subscription_rest_flattened(): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), + transport="rest", ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_delete_subscription" - ) as pre: - pre.assert_not_called() - pb_message = pubsub.DeleteSubscriptionRequest.pb( - pubsub.DeleteSubscriptionRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - request = pubsub.DeleteSubscriptionRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_subscription( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_subscription_rest_bad_request( - transport: str = "rest", request_type=pubsub.DeleteSubscriptionRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_subscription(request) - - -def test_delete_subscription_rest_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} # get truthy value for each flattened field mock_args = dict( @@ -8922,47 +7562,6 @@ def test_delete_subscription_rest_flattened_error(transport: str = "rest"): ) -def test_delete_subscription_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.ModifyAckDeadlineRequest, - dict, - ], -) -def test_modify_ack_deadline_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.modify_ack_deadline(request) - - # Establish that the response is the type that we expect. - assert response is None - - def test_modify_ack_deadline_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9101,79 +7700,6 @@ def test_modify_ack_deadline_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_modify_ack_deadline_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), - ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_modify_ack_deadline" - ) as pre: - pre.assert_not_called() - pb_message = pubsub.ModifyAckDeadlineRequest.pb( - pubsub.ModifyAckDeadlineRequest() - ) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = pubsub.ModifyAckDeadlineRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.modify_ack_deadline( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_modify_ack_deadline_rest_bad_request( - transport: str = "rest", request_type=pubsub.ModifyAckDeadlineRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.modify_ack_deadline(request) - - def test_modify_ack_deadline_rest_flattened(): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9233,47 +7759,6 @@ def test_modify_ack_deadline_rest_flattened_error(transport: str = "rest"): ) -def test_modify_ack_deadline_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.AcknowledgeRequest, - dict, - ], -) -def test_acknowledge_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.acknowledge(request) - - # Establish that the response is the type that we expect. - assert response is None - - def test_acknowledge_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9401,106 +7886,35 @@ def test_acknowledge_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_acknowledge_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( +def test_acknowledge_rest_flattened(): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), + transport="rest", ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_acknowledge" - ) as pre: - pre.assert_not_called() - pb_message = pubsub.AcknowledgeRequest.pb(pubsub.AcknowledgeRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - request = pubsub.AcknowledgeRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} - client.acknowledge( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + ack_ids=["ack_ids_value"], ) + mock_args.update(sample_request) - pre.assert_called_once() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - -def test_acknowledge_rest_bad_request( - transport: str = "rest", request_type=pubsub.AcknowledgeRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.acknowledge(request) - - -def test_acknowledge_rest_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - subscription="subscription_value", - ack_ids=["ack_ids_value"], - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.acknowledge(**mock_args) + client.acknowledge(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -9529,49 +7943,6 @@ def test_acknowledge_rest_flattened_error(transport: str = "rest"): ) -def test_acknowledge_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.PullRequest, - dict, - ], -) -def test_pull_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.PullResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.PullResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.pull(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.PullResponse) - - def test_pull_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -9702,83 +8073,6 @@ def test_pull_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_pull_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), - ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_pull" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_pull" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.PullRequest.pb(pubsub.PullRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.PullResponse.to_json(pubsub.PullResponse()) - - request = pubsub.PullRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.PullResponse() - - client.pull( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_pull_rest_bad_request( - transport: str = "rest", request_type=pubsub.PullRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.pull(request) - - def test_pull_rest_flattened(): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9840,12 +8134,6 @@ def test_pull_rest_flattened_error(transport: str = "rest"): ) -def test_pull_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - def test_streaming_pull_rest_no_http_options(): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -9857,41 +8145,6 @@ def test_streaming_pull_rest_no_http_options(): client.streaming_pull(requests) -@pytest.mark.parametrize( - "request_type", - [ - pubsub.ModifyPushConfigRequest, - dict, - ], -) -def test_modify_push_config_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.modify_push_config(request) - - # Establish that the response is the type that we expect. - assert response is None - - def test_modify_push_config_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10021,106 +8274,35 @@ def test_modify_push_config_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_modify_push_config_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( +def test_modify_push_config_rest_flattened(): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), + transport="rest", ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_modify_push_config" - ) as pre: - pre.assert_not_called() - pb_message = pubsub.ModifyPushConfigRequest.pb(pubsub.ModifyPushConfigRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None - request = pubsub.ModifyPushConfigRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata + # get arguments that satisfy an http rule for this method + sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} - client.modify_push_config( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], + # get truthy value for each flattened field + mock_args = dict( + subscription="subscription_value", + push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), ) + mock_args.update(sample_request) - pre.assert_called_once() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value - -def test_modify_push_config_rest_bad_request( - transport: str = "rest", request_type=pubsub.ModifyPushConfigRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.modify_push_config(request) - - -def test_modify_push_config_rest_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # get arguments that satisfy an http rule for this method - sample_request = {"subscription": "projects/sample1/subscriptions/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - subscription="subscription_value", - push_config=pubsub.PushConfig(push_endpoint="push_endpoint_value"), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - client.modify_push_config(**mock_args) + client.modify_push_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. @@ -10149,54 +8331,6 @@ def test_modify_push_config_rest_flattened_error(transport: str = "rest"): ) -def test_modify_push_config_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.GetSnapshotRequest, - dict, - ], -) -def test_get_snapshot_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"snapshot": "projects/sample1/snapshots/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Snapshot( - name="name_value", - topic="topic_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.get_snapshot(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == "name_value" - assert response.topic == "topic_value" - - def test_get_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10314,83 +8448,6 @@ def test_get_snapshot_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("snapshot",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_snapshot_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), - ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_get_snapshot" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_get_snapshot" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.GetSnapshotRequest.pb(pubsub.GetSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.Snapshot.to_json(pubsub.Snapshot()) - - request = pubsub.GetSnapshotRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.Snapshot() - - client.get_snapshot( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_get_snapshot_rest_bad_request( - transport: str = "rest", request_type=pubsub.GetSnapshotRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"snapshot": "projects/sample1/snapshots/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_snapshot(request) - - def test_get_snapshot_rest_flattened(): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -10446,52 +8503,6 @@ def test_get_snapshot_rest_flattened_error(transport: str = "rest"): ) -def test_get_snapshot_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.ListSnapshotsRequest, - dict, - ], -) -def test_list_snapshots_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"project": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.ListSnapshotsResponse( - next_page_token="next_page_token_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.ListSnapshotsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.list_snapshots(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListSnapshotsPager) - assert response.next_page_token == "next_page_token_value" - - def test_list_snapshots_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10624,95 +8635,16 @@ def test_list_snapshots_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_snapshots_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( +def test_list_snapshots_rest_flattened(): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), + transport="rest", ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_list_snapshots" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_list_snapshots" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.ListSnapshotsRequest.pb(pubsub.ListSnapshotsRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.ListSnapshotsResponse.to_json( - pubsub.ListSnapshotsResponse() - ) - - request = pubsub.ListSnapshotsRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.ListSnapshotsResponse() - - client.list_snapshots( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_list_snapshots_rest_bad_request( - transport: str = "rest", request_type=pubsub.ListSnapshotsRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"project": "projects/sample1"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.list_snapshots(request) - - -def test_list_snapshots_rest_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.ListSnapshotsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSnapshotsResponse() # get arguments that satisfy an http rule for this method sample_request = {"project": "projects/sample1"} @@ -10819,48 +8751,6 @@ def test_list_snapshots_rest_pager(transport: str = "rest"): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize( - "request_type", - [ - pubsub.CreateSnapshotRequest, - dict, - ], -) -def test_create_snapshot_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/snapshots/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Snapshot( - name="name_value", - topic="topic_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.create_snapshot(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == "name_value" - assert response.topic == "topic_value" - - def test_create_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -10993,83 +8883,6 @@ def test_create_snapshot_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_snapshot_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), - ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_create_snapshot" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_create_snapshot" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.CreateSnapshotRequest.pb(pubsub.CreateSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.Snapshot.to_json(pubsub.Snapshot()) - - request = pubsub.CreateSnapshotRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.Snapshot() - - client.create_snapshot( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_create_snapshot_rest_bad_request( - transport: str = "rest", request_type=pubsub.CreateSnapshotRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/snapshots/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.create_snapshot(request) - - def test_create_snapshot_rest_flattened(): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11127,54 +8940,6 @@ def test_create_snapshot_rest_flattened_error(transport: str = "rest"): ) -def test_create_snapshot_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.UpdateSnapshotRequest, - dict, - ], -) -def test_update_snapshot_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Snapshot( - name="name_value", - topic="topic_value", - ) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.Snapshot.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.update_snapshot(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.Snapshot) - assert response.name == "name_value" - assert response.topic == "topic_value" - - def test_update_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11298,96 +9063,19 @@ def test_update_snapshot_rest_unset_required_fields(): ) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_snapshot_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( +def test_update_snapshot_rest_flattened(): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), + transport="rest", ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_update_snapshot" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_update_snapshot" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.UpdateSnapshotRequest.pb(pubsub.UpdateSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.Snapshot.to_json(pubsub.Snapshot()) - request = pubsub.UpdateSnapshotRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot() - client.update_snapshot( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - post.assert_called_once() - - -def test_update_snapshot_rest_bad_request( - transport: str = "rest", request_type=pubsub.UpdateSnapshotRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.update_snapshot(request) - - -def test_update_snapshot_rest_flattened(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.Snapshot() - - # get arguments that satisfy an http rule for this method - sample_request = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} + # get arguments that satisfy an http rule for this method + sample_request = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} # get truthy value for each flattened field mock_args = dict( @@ -11433,47 +9121,6 @@ def test_update_snapshot_rest_flattened_error(transport: str = "rest"): ) -def test_update_snapshot_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.DeleteSnapshotRequest, - dict, - ], -) -def test_delete_snapshot_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"snapshot": "projects/sample1/snapshots/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = None - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = "" - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.delete_snapshot(request) - - # Establish that the response is the type that we expect. - assert response is None - - def test_delete_snapshot_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11590,77 +9237,6 @@ def test_delete_snapshot_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("snapshot",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_snapshot_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), - ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_delete_snapshot" - ) as pre: - pre.assert_not_called() - pb_message = pubsub.DeleteSnapshotRequest.pb(pubsub.DeleteSnapshotRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - - request = pubsub.DeleteSnapshotRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - - client.delete_snapshot( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], - ) - - pre.assert_called_once() - - -def test_delete_snapshot_rest_bad_request( - transport: str = "rest", request_type=pubsub.DeleteSnapshotRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"snapshot": "projects/sample1/snapshots/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.delete_snapshot(request) - - def test_delete_snapshot_rest_flattened(): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -11714,49 +9290,6 @@ def test_delete_snapshot_rest_flattened_error(transport: str = "rest"): ) -def test_delete_snapshot_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -@pytest.mark.parametrize( - "request_type", - [ - pubsub.SeekRequest, - dict, - ], -) -def test_seek_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = pubsub.SeekResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = pubsub.SeekResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - response = client.seek(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pubsub.SeekResponse) - - def test_seek_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -11875,115 +9408,32 @@ def test_seek_rest_unset_required_fields(): assert set(unset_fields) == (set(()) & set(("subscription",))) -@pytest.mark.parametrize("null_interceptor", [True, False]) -def test_seek_rest_interceptors(null_interceptor): - transport = transports.SubscriberRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - interceptor=None - if null_interceptor - else transports.SubscriberRestInterceptor(), +def test_streaming_pull_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.streaming_pull({}) + assert "Method StreamingPull is not available over REST transport" in str( + not_implemented_error.value ) - client = SubscriberClient(transport=transport) - with mock.patch.object( - type(client.transport._session), "request" - ) as req, mock.patch.object( - path_template, "transcode" - ) as transcode, mock.patch.object( - transports.SubscriberRestInterceptor, "post_seek" - ) as post, mock.patch.object( - transports.SubscriberRestInterceptor, "pre_seek" - ) as pre: - pre.assert_not_called() - post.assert_not_called() - pb_message = pubsub.SeekRequest.pb(pubsub.SeekRequest()) - transcode.return_value = { - "method": "post", - "uri": "my_uri", - "body": pb_message, - "query_params": pb_message, - } - - req.return_value = Response() - req.return_value.status_code = 200 - req.return_value.request = PreparedRequest() - req.return_value._content = pubsub.SeekResponse.to_json(pubsub.SeekResponse()) - request = pubsub.SeekRequest() - metadata = [ - ("key", "val"), - ("cephalopod", "squid"), - ] - pre.return_value = request, metadata - post.return_value = pubsub.SeekResponse() - client.seek( - request, - metadata=[ - ("key", "val"), - ("cephalopod", "squid"), - ], +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.SubscriberGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - pre.assert_called_once() - post.assert_called_once() - - -def test_seek_rest_bad_request( - transport: str = "rest", request_type=pubsub.SeekRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # send a request that will satisfy transcoding - request_init = {"subscription": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.seek(request) - - -def test_seek_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - -def test_streaming_pull_rest_error(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # Since a `google.api.http` annotation is required for using a rest transport - # method, this should error. - with pytest.raises(NotImplementedError) as not_implemented_error: - client.streaming_pull({}) - assert "Method StreamingPull is not available over REST transport" in str( - not_implemented_error.value - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.SubscriberGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.SubscriberGrpcTransport( + # It is an error to provide a credentials file and a transport instance. + transport = transports.SubscriberGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), ) with pytest.raises(ValueError): @@ -12047,34 +9497,3039 @@ def test_transport_get_channel(): assert channel -@pytest.mark.parametrize( - "transport_class", - [ - transports.SubscriberGrpcTransport, - transports.SubscriberGrpcAsyncIOTransport, - transports.SubscriberRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() +@pytest.mark.parametrize( + "transport_class", + [ + transports.SubscriberGrpcTransport, + transports.SubscriberGrpcAsyncIOTransport, + transports.SubscriberRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = SubscriberClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_subscription_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + client.create_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Subscription() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_subscription_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + call.return_value = pubsub.Subscription() + client.get_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_subscription_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + call.return_value = pubsub.Subscription() + client.update_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_subscriptions_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + call.return_value = pubsub.ListSubscriptionsResponse() + client.list_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_subscription_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + call.return_value = None + client.delete_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_modify_ack_deadline_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + call.return_value = None + client.modify_ack_deadline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyAckDeadlineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_acknowledge_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + call.return_value = None + client.acknowledge(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.AcknowledgeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_pull_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + call.return_value = pubsub.PullResponse() + client.pull(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PullRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_modify_push_config_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + call.return_value = None + client.modify_push_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyPushConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_snapshot_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + client.get_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_snapshots_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + call.return_value = pubsub.ListSnapshotsResponse() + client.list_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_snapshot_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + client.create_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.CreateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_snapshot_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + call.return_value = pubsub.Snapshot() + client.update_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_snapshot_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + call.return_value = None + client.delete_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_seek_empty_call_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + call.return_value = pubsub.SeekResponse() + client.seek(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.SeekRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = SubscriberAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_subscription_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + await client.create_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Subscription() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_subscription_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + await client.get_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_subscription_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + ) + await client.update_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_subscriptions_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_subscription_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_modify_ack_deadline_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.modify_ack_deadline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyAckDeadlineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_acknowledge_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.acknowledge(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.AcknowledgeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_pull_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.PullResponse()) + await client.pull(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PullRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_modify_push_config_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.modify_push_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyPushConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_snapshot_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + await client.get_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_snapshots_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_snapshot_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + await client.create_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.CreateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_snapshot_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + ) + await client.update_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_snapshot_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_seek_empty_call_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(pubsub.SeekResponse()) + await client.seek(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.SeekRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = SubscriberClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_subscription_rest_bad_request(request_type=pubsub.Subscription): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.Subscription, + dict, + ], +) +def test_create_subscription_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_subscription" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_create_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.Subscription.pb(pubsub.Subscription()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.Subscription.to_json(pubsub.Subscription()) + req.return_value.content = return_value + + request = pubsub.Subscription() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Subscription() + + client.create_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_subscription_rest_bad_request(request_type=pubsub.GetSubscriptionRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetSubscriptionRequest, + dict, + ], +) +def test_get_subscription_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_subscription" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_get_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.GetSubscriptionRequest.pb(pubsub.GetSubscriptionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.Subscription.to_json(pubsub.Subscription()) + req.return_value.content = return_value + + request = pubsub.GetSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Subscription() + + client.get_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_subscription_rest_bad_request( + request_type=pubsub.UpdateSubscriptionRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": {"name": "projects/sample1/subscriptions/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSubscriptionRequest, + dict, + ], +) +def test_update_subscription_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": {"name": "projects/sample1/subscriptions/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Subscription( + name="name_value", + topic="topic_value", + ack_deadline_seconds=2066, + retain_acked_messages=True, + enable_message_ordering=True, + filter="filter_value", + detached=True, + enable_exactly_once_delivery=True, + state=pubsub.Subscription.State.ACTIVE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Subscription.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_subscription(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Subscription) + assert response.name == "name_value" + assert response.topic == "topic_value" + assert response.ack_deadline_seconds == 2066 + assert response.retain_acked_messages is True + assert response.enable_message_ordering is True + assert response.filter == "filter_value" + assert response.detached is True + assert response.enable_exactly_once_delivery is True + assert response.state == pubsub.Subscription.State.ACTIVE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_subscription" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_update_subscription" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.UpdateSubscriptionRequest.pb( + pubsub.UpdateSubscriptionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.Subscription.to_json(pubsub.Subscription()) + req.return_value.content = return_value + + request = pubsub.UpdateSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Subscription() + + client.update_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_subscriptions_rest_bad_request( + request_type=pubsub.ListSubscriptionsRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_subscriptions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSubscriptionsRequest, + dict, + ], +) +def test_list_subscriptions_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSubscriptionsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListSubscriptionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_subscriptions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSubscriptionsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_subscriptions_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_subscriptions" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_list_subscriptions" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListSubscriptionsRequest.pb( + pubsub.ListSubscriptionsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.ListSubscriptionsResponse.to_json( + pubsub.ListSubscriptionsResponse() + ) + req.return_value.content = return_value + + request = pubsub.ListSubscriptionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListSubscriptionsResponse() + + client.list_subscriptions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_subscription_rest_bad_request( + request_type=pubsub.DeleteSubscriptionRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_subscription(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSubscriptionRequest, + dict, + ], +) +def test_delete_subscription_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_subscription(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_subscription_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_delete_subscription" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.DeleteSubscriptionRequest.pb( + pubsub.DeleteSubscriptionRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + + request = pubsub.DeleteSubscriptionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_subscription( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_modify_ack_deadline_rest_bad_request( + request_type=pubsub.ModifyAckDeadlineRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.modify_ack_deadline(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyAckDeadlineRequest, + dict, + ], +) +def test_modify_ack_deadline_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.modify_ack_deadline(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_modify_ack_deadline_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_modify_ack_deadline" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.ModifyAckDeadlineRequest.pb( + pubsub.ModifyAckDeadlineRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + + request = pubsub.ModifyAckDeadlineRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.modify_ack_deadline( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_acknowledge_rest_bad_request(request_type=pubsub.AcknowledgeRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.acknowledge(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.AcknowledgeRequest, + dict, + ], +) +def test_acknowledge_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.acknowledge(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_acknowledge_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_acknowledge" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.AcknowledgeRequest.pb(pubsub.AcknowledgeRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + + request = pubsub.AcknowledgeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.acknowledge( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_pull_rest_bad_request(request_type=pubsub.PullRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.pull(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.PullRequest, + dict, + ], +) +def test_pull_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.PullResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.PullResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.pull(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.PullResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_pull_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_pull" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_pull" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.PullRequest.pb(pubsub.PullRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.PullResponse.to_json(pubsub.PullResponse()) + req.return_value.content = return_value + + request = pubsub.PullRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.PullResponse() + + client.pull( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_streaming_pull_rest_error(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + client.streaming_pull({}) + assert "Method StreamingPull is not available over REST transport" in str( + not_implemented_error.value + ) + + +def test_modify_push_config_rest_bad_request( + request_type=pubsub.ModifyPushConfigRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.modify_push_config(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ModifyPushConfigRequest, + dict, + ], +) +def test_modify_push_config_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.modify_push_config(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_modify_push_config_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_modify_push_config" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.ModifyPushConfigRequest.pb(pubsub.ModifyPushConfigRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + + request = pubsub.ModifyPushConfigRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.modify_push_config( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_get_snapshot_rest_bad_request(request_type=pubsub.GetSnapshotRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.get_snapshot(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.GetSnapshotRequest, + dict, + ], +) +def test_get_snapshot_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.get_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_snapshot" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_get_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.GetSnapshotRequest.pb(pubsub.GetSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.Snapshot.to_json(pubsub.Snapshot()) + req.return_value.content = return_value + + request = pubsub.GetSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Snapshot() + + client.get_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_list_snapshots_rest_bad_request(request_type=pubsub.ListSnapshotsRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.list_snapshots(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.ListSnapshotsRequest, + dict, + ], +) +def test_list_snapshots_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "projects/sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.ListSnapshotsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.ListSnapshotsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.list_snapshots(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListSnapshotsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_snapshots_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_snapshots" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_list_snapshots" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.ListSnapshotsRequest.pb(pubsub.ListSnapshotsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.ListSnapshotsResponse.to_json( + pubsub.ListSnapshotsResponse() + ) + req.return_value.content = return_value + + request = pubsub.ListSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.ListSnapshotsResponse() + + client.list_snapshots( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_create_snapshot_rest_bad_request(request_type=pubsub.CreateSnapshotRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.create_snapshot(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.CreateSnapshotRequest, + dict, + ], +) +def test_create_snapshot_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.create_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_snapshot" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_create_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.CreateSnapshotRequest.pb(pubsub.CreateSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.Snapshot.to_json(pubsub.Snapshot()) + req.return_value.content = return_value + + request = pubsub.CreateSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Snapshot() + + client.create_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_update_snapshot_rest_bad_request(request_type=pubsub.UpdateSnapshotRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.update_snapshot(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.UpdateSnapshotRequest, + dict, + ], +) +def test_update_snapshot_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": {"name": "projects/sample1/snapshots/sample2"}} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.Snapshot( + name="name_value", + topic="topic_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.update_snapshot(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.Snapshot) + assert response.name == "name_value" + assert response.topic == "topic_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_snapshot" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_update_snapshot" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.UpdateSnapshotRequest.pb(pubsub.UpdateSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.Snapshot.to_json(pubsub.Snapshot()) + req.return_value.content = return_value + + request = pubsub.UpdateSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.Snapshot() + + client.update_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_delete_snapshot_rest_bad_request(request_type=pubsub.DeleteSnapshotRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.delete_snapshot(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.DeleteSnapshotRequest, + dict, + ], +) +def test_delete_snapshot_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"snapshot": "projects/sample1/snapshots/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.delete_snapshot(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_snapshot_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_delete_snapshot" + ) as pre: + pre.assert_not_called() + pb_message = pubsub.DeleteSnapshotRequest.pb(pubsub.DeleteSnapshotRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + + request = pubsub.DeleteSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_snapshot( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_seek_rest_bad_request(request_type=pubsub.SeekRequest): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + client.seek(request) + + +@pytest.mark.parametrize( + "request_type", + [ + pubsub.SeekRequest, + dict, + ], +) +def test_seek_rest_call_success(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"subscription": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = pubsub.SeekResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = pubsub.SeekResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + response = client.seek(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pubsub.SeekResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_seek_rest_interceptors(null_interceptor): + transport = transports.SubscriberRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.SubscriberRestInterceptor(), + ) + client = SubscriberClient(transport=transport) + + with mock.patch.object( + type(client.transport._session), "request" + ) as req, mock.patch.object( + path_template, "transcode" + ) as transcode, mock.patch.object( + transports.SubscriberRestInterceptor, "post_seek" + ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "pre_seek" + ) as pre: + pre.assert_not_called() + post.assert_not_called() + pb_message = pubsub.SeekRequest.pb(pubsub.SeekRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + return_value = pubsub.SeekResponse.to_json(pubsub.SeekResponse()) + req.return_value.content = return_value + + request = pubsub.SeekRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = pubsub.SeekResponse() + + client.seek( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.GetIamPolicyRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.GetIamPolicyRequest, + dict, + ], +) +def test_get_iam_policy_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_set_iam_policy_rest_bad_request( + request_type=iam_policy_pb2.SetIamPolicyRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/topics/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.SetIamPolicyRequest, + dict, + ], +) +def test_set_iam_policy_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/topics/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = policy_pb2.Policy() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, policy_pb2.Policy) + + +def test_test_iam_permissions_rest_bad_request( + request_type=iam_policy_pb2.TestIamPermissionsRequest, +): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"resource": "projects/sample1/subscriptions/sample2"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + iam_policy_pb2.TestIamPermissionsRequest, + dict, + ], +) +def test_test_iam_permissions_rest(request_type): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"resource": "projects/sample1/subscriptions/sample2"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = iam_policy_pb2.TestIamPermissionsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) + + +def test_initialize_client_w_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_subscription_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_subscription), "__call__" + ) as call: + client.create_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.Subscription() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_subscription_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_subscription), "__call__") as call: + client.get_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_subscription_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_subscription), "__call__" + ) as call: + client.update_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_subscriptions_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_subscriptions), "__call__" + ) as call: + client.list_subscriptions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSubscriptionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_subscription_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_subscription), "__call__" + ) as call: + client.delete_subscription(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSubscriptionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_modify_ack_deadline_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_ack_deadline), "__call__" + ) as call: + client.modify_ack_deadline(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyAckDeadlineRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_acknowledge_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.acknowledge), "__call__") as call: + client.acknowledge(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.AcknowledgeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_pull_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.pull), "__call__") as call: + client.pull(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.PullRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_modify_push_config_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.modify_push_config), "__call__" + ) as call: + client.modify_push_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ModifyPushConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_snapshot_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_snapshot), "__call__") as call: + client.get_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.GetSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_snapshots_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_snapshots), "__call__") as call: + client.list_snapshots(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.ListSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_snapshot_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_snapshot), "__call__") as call: + client.create_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.CreateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_snapshot_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_snapshot), "__call__") as call: + client.update_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.UpdateSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_snapshot_empty_call_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_snapshot), "__call__") as call: + client.delete_snapshot(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.DeleteSnapshotRequest() + assert args[0] == request_msg -@pytest.mark.parametrize( - "transport_name", - [ - "grpc", - "rest", - ], -) -def test_transport_kind(transport_name): - transport = SubscriberClient.get_transport_class(transport_name)( + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_seek_empty_call_rest(): + client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert transport.kind == transport_name + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.seek), "__call__") as call: + client.seek(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = pubsub.SeekRequest() + + assert args[0] == request_msg def test_transport_grpc_default(): @@ -12767,194 +13222,6 @@ def test_client_with_default_client_info(): prep.assert_called_once_with(client_info) -@pytest.mark.asyncio -async def test_transport_close_async(): - client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_get_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.GetIamPolicyRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/topics/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.get_iam_policy(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.GetIamPolicyRequest, - dict, - ], -) -def test_get_iam_policy_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"resource": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.get_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_set_iam_policy_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.SetIamPolicyRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/topics/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.set_iam_policy(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.SetIamPolicyRequest, - dict, - ], -) -def test_set_iam_policy_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"resource": "projects/sample1/topics/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = policy_pb2.Policy() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, policy_pb2.Policy) - - -def test_test_iam_permissions_rest_bad_request( - transport: str = "rest", request_type=iam_policy_pb2.TestIamPermissionsRequest -): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - request = request_type() - request = json_format.ParseDict( - {"resource": "projects/sample1/subscriptions/sample2"}, request - ) - - # Mock the http request call within the method and fake a BadRequest error. - with mock.patch.object(Session, "request") as req, pytest.raises( - core_exceptions.BadRequest - ): - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 400 - response_value.request = Request() - req.return_value = response_value - client.test_iam_permissions(request) - - -@pytest.mark.parametrize( - "request_type", - [ - iam_policy_pb2.TestIamPermissionsRequest, - dict, - ], -) -def test_test_iam_permissions_rest(request_type): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request_init = {"resource": "projects/sample1/subscriptions/sample2"} - request = request_type(**request_init) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = iam_policy_pb2.TestIamPermissionsResponse() - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - - response = client.test_iam_permissions(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, iam_policy_pb2.TestIamPermissionsResponse) - - def test_set_iam_policy(transport: str = "grpc"): client = SubscriberClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12990,7 +13257,7 @@ def test_set_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -13055,7 +13322,7 @@ def test_set_iam_policy_field_headers(): @pytest.mark.asyncio async def test_set_iam_policy_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13103,7 +13370,7 @@ def test_set_iam_policy_from_dict(): @pytest.mark.asyncio async def test_set_iam_policy_from_dict_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: @@ -13156,7 +13423,7 @@ def test_get_iam_policy(transport: str = "grpc"): @pytest.mark.asyncio async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -13222,7 +13489,7 @@ def test_get_iam_policy_field_headers(): @pytest.mark.asyncio async def test_get_iam_policy_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13270,7 +13537,7 @@ def test_get_iam_policy_from_dict(): @pytest.mark.asyncio async def test_get_iam_policy_from_dict_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: @@ -13322,7 +13589,7 @@ def test_test_iam_permissions(transport: str = "grpc"): @pytest.mark.asyncio async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), transport=transport, ) @@ -13389,7 +13656,7 @@ def test_test_iam_permissions_field_headers(): @pytest.mark.asyncio async def test_test_iam_permissions_field_headers_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as @@ -13443,7 +13710,7 @@ def test_test_iam_permissions_from_dict(): @pytest.mark.asyncio async def test_test_iam_permissions_from_dict_async(): client = SubscriberAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -13463,22 +13730,41 @@ async def test_test_iam_permissions_from_dict_async(): call.assert_called() -def test_transport_close(): - transports = { - "rest": "_session", - "grpc": "_grpc_channel", - } +def test_transport_close_grpc(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() - for transport, close_name in transports.items(): - client = SubscriberClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = SubscriberAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = SubscriberClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() def test_client_ctx(): From 05f1b2f13c785879d1738b3daa8650dc883361d6 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 30 Jan 2025 15:35:33 -0800 Subject: [PATCH 1138/1197] chore(main): release 2.28.0 (#1351) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 13 +++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 17 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index f760fdd0732e..e0b4b7916280 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.27.3" + ".": "2.28.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 69b1f0c272f9..63f40ce6a6ff 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,19 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.28.0](https://github.com/googleapis/python-pubsub/compare/v2.27.3...v2.28.0) (2025-01-30) + + +### Features + +* Add support for message transforms to Topic and Subscription ([#1274](https://github.com/googleapis/python-pubsub/issues/1274)) ([e5e2f3f](https://github.com/googleapis/python-pubsub/commit/e5e2f3f732f451d14dfb4c37ae979e5c04045305)) + + +### Bug Fixes + +* Get channel target for a gRPC request ([#1339](https://github.com/googleapis/python-pubsub/issues/1339)) ([16ea766](https://github.com/googleapis/python-pubsub/commit/16ea76611d121700a3f3119d18919063d12c81c1)) +* Set creds only if transport not provided ([#1348](https://github.com/googleapis/python-pubsub/issues/1348)) ([59965a4](https://github.com/googleapis/python-pubsub/commit/59965a4804a434467a47815cdbdd5ce31bbb3662)) + ## [2.27.3](https://github.com/googleapis/python-pubsub/compare/v2.27.2...v2.27.3) (2025-01-24) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 03a0c337236e..8f0f03c065a2 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.3" # {x-release-please-version} +__version__ = "2.28.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 03a0c337236e..8f0f03c065a2 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.27.3" # {x-release-please-version} +__version__ = "2.28.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index d66015ac4b2f..039f0ca3db4d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.28.0" }, "snippets": [ { From 337f2d609e9a61a4b7c2a1c27e14786e29ad01d4 Mon Sep 17 00:00:00 2001 From: Lauren Huang Date: Fri, 31 Jan 2025 09:43:37 -0500 Subject: [PATCH 1139/1197] docs(samples): Increase example max_bytes setting for cloud storage subscriptions to encourage more performant subscribe (#1324) --- packages/google-cloud-pubsub/samples/snippets/subscriber.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 180b091db2be..c09f5def14d9 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -557,7 +557,7 @@ def create_cloudstorage_subscription( # Min 1 minutes, max 10 minutes max_duration=max_duration, # Min 1 KB, max 10 GiB - max_bytes=2000, + max_bytes=10000000, ) # Wrap the subscriber in a 'with' block to automatically call close() to From 4bf819052bb16524f7ae2ac4356f228878f8aa0c Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 31 Jan 2025 16:32:53 +0100 Subject: [PATCH 1140/1197] chore(deps): update all dependencies (#1317) Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/pytest.ini | 2 ++ .../samples/snippets/requirements-test.txt | 6 +++--- .../samples/snippets/requirements.txt | 12 ++++++------ 3 files changed, 11 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index 4cedf2b4f1e3..6d55a7315bf3 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -18,3 +18,5 @@ filterwarnings = # Remove once the minimum supported version of googleapis-common-protos is 1.62.0 ignore:.*pkg_resources.declare_namespace:DeprecationWarning ignore:.*pkg_resources is deprecated as an API:DeprecationWarning + # Remove once https://github.com/googleapis/gapic-generator-python/issues/2303 is fixed + ignore:The python-bigquery library will stop supporting Python 3.7:PendingDeprecationWarning \ No newline at end of file diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index c705889ef4e6..aa57a68a2ff0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,7 +1,7 @@ backoff==2.2.1 pytest===7.4.4; python_version == '3.7' -pytest==8.3.2; python_version >= '3.8' +pytest==8.3.4; python_version >= '3.8' mock==5.1.0 flaky==3.8.1 -google-cloud-bigquery==3.25.0 -google-cloud-storage==2.18.2 +google-cloud-bigquery==3.27.0 +google-cloud-storage==2.19.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 3a16ebc94839..b348927be470 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,10 +1,10 @@ -google-cloud-pubsub==2.26.0 +google-cloud-pubsub==2.27.1 avro==1.12.0 protobuf===4.24.4; python_version == '3.7' -protobuf==5.28.0; python_version >= '3.8' +protobuf==5.29.2; python_version >= '3.8' avro==1.12.0 -opentelemetry-api==1.22.0; python_version == '3.7' -opentelemetry-sdk==1.22.0; python_version == '3.7' -opentelemetry-api==1.27.0; python_version >= '3.8' -opentelemetry-sdk==1.27.0; python_version >= '3.8' +opentelemetry-api===1.22.0; python_version == '3.7' +opentelemetry-sdk===1.22.0; python_version == '3.7' +opentelemetry-api==1.29.0; python_version >= '3.8' +opentelemetry-sdk==1.29.0; python_version >= '3.8' opentelemetry-exporter-gcp-trace==1.7.0 From 51b1274b881d67980e41368cfa9b5c87fdd3f734 Mon Sep 17 00:00:00 2001 From: Mike Prieto Date: Mon, 10 Feb 2025 11:18:32 -0500 Subject: [PATCH 1141/1197] docs: Add samples and test for ingestion from Kafka sources (#1354) --- .../samples/snippets/noxfile.py | 2 +- .../samples/snippets/publisher.py | 209 ++++++++++++++++++ .../samples/snippets/publisher_test.py | 117 ++++++++++ .../samples/snippets/requirements.txt | 2 +- 4 files changed, 328 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index c9a3d1ecbf2a..075047f97055 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -124,7 +124,7 @@ def get_pytest_env_vars() -> Dict[str, str]: "--builtin=gettext", "--max-complexity=20", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202,C901", "--max-line-length=88", ] diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 7cb7ca223d71..270451511608 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -185,6 +185,147 @@ def create_topic_with_cloud_storage_ingestion( # [END pubsub_create_topic_with_cloud_storage_ingestion] +def create_topic_with_aws_msk_ingestion( + project_id: str, + topic_id: str, + cluster_arn: str, + msk_topic: str, + aws_role_arn: str, + gcp_service_account: str, +) -> None: + """Create a new Pub/Sub topic with AWS MSK Ingestion Settings.""" + # [START pubsub_create_topic_with_aws_msk_ingestion] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # cluster_arn = "your-cluster-arn" + # msk_topic = "your-msk-topic" + # aws_role_arn = "your-aws-role-arn" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + aws_msk=IngestionDataSourceSettings.AwsMsk( + cluster_arn=cluster_arn, + topic=msk_topic, + aws_role_arn=aws_role_arn, + gcp_service_account=gcp_service_account, + ) + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with AWS MSK Ingestion Settings") + # [END pubsub_create_topic_with_aws_msk_ingestion] + + +def create_topic_with_azure_event_hubs_ingestion( + project_id: str, + topic_id: str, + resource_group: str, + namespace: str, + event_hub: str, + client_id: str, + tenant_id: str, + subscription_id: str, + gcp_service_account: str, +) -> None: + """Create a new Pub/Sub topic with Azure Event Hubs Ingestion Settings.""" + # [START pubsub_create_topic_with_azure_event_hubs_ingestion] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # resource_group = "your-resource-group" + # namespace = "your-namespace" + # event_hub = "your-event-hub" + # client_id = "your-client-id" + # tenant_id = "your-tenant-id" + # subscription_id = "your-subscription-id" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + azure_event_hubs=IngestionDataSourceSettings.AzureEventHubs( + resource_group=resource_group, + namespace=namespace, + event_hub=event_hub, + client_id=client_id, + tenant_id=tenant_id, + subscription_id=subscription_id, + gcp_service_account=gcp_service_account, + ) + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with Azure Event Hubs Ingestion Settings") + # [END pubsub_create_topic_with_azure_event_hubs_ingestion] + + +def create_topic_with_confluent_cloud_ingestion( + project_id: str, + topic_id: str, + bootstrap_server: str, + cluster_id: str, + confluent_topic: str, + identity_pool_id: str, + gcp_service_account: str, +) -> None: + """Create a new Pub/Sub topic with Confluent Cloud Ingestion Settings.""" + # [START pubsub_create_topic_with_confluent_cloud_ingestion] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import Topic + from google.pubsub_v1.types import IngestionDataSourceSettings + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # bootstrap_server = "your-bootstrap-server" + # cluster_id = "your-cluster-id" + # confluent_topic = "your-confluent-topic" + # identity_pool_id = "your-identity-pool-id" + # gcp_service_account = "your-gcp-service-account" + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic( + name=topic_path, + ingestion_data_source_settings=IngestionDataSourceSettings( + confluent_cloud=IngestionDataSourceSettings.ConfluentCloud( + bootstrap_server=bootstrap_server, + cluster_id=cluster_id, + topic=confluent_topic, + identity_pool_id=identity_pool_id, + gcp_service_account=gcp_service_account, + ) + ), + ) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with Confluent Cloud Ingestion Settings") + # [END pubsub_create_topic_with_confluent_cloud_ingestion] + + def update_topic_type( project_id: str, topic_id: str, @@ -710,6 +851,43 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: "minimum_object_create_time" ) + create_topic_with_aws_msk_ingestion_parser = subparsers.add_parser( + "create_aws_msk_ingestion", help=create_topic_with_aws_msk_ingestion.__doc__ + ) + create_topic_with_aws_msk_ingestion_parser.add_argument("topic_id") + create_topic_with_aws_msk_ingestion_parser.add_argument("cluster_arn") + create_topic_with_aws_msk_ingestion_parser.add_argument("msk_topic") + create_topic_with_aws_msk_ingestion_parser.add_argument("aws_role_arn") + create_topic_with_aws_msk_ingestion_parser.add_argument("gcp_service_account") + + create_topic_with_azure_event_hubs_ingestion_parser = subparsers.add_parser( + "create_azure_event_hubs_ingestion", + help=create_topic_with_azure_event_hubs_ingestion.__doc__, + ) + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("topic_id") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("resource_group") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("namespace") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("event_hub") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("client_id") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("tenant_id") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument("subscription_id") + create_topic_with_azure_event_hubs_ingestion_parser.add_argument( + "gcp_service_account" + ) + + create_topic_with_confluent_cloud_ingestion_parser = subparsers.add_parser( + "create_confluent_cloud_ingestion", + help=create_topic_with_confluent_cloud_ingestion.__doc__, + ) + create_topic_with_confluent_cloud_ingestion_parser.add_argument("topic_id") + create_topic_with_confluent_cloud_ingestion_parser.add_argument("bootstrap_server") + create_topic_with_confluent_cloud_ingestion_parser.add_argument("cluster_id") + create_topic_with_confluent_cloud_ingestion_parser.add_argument("confluent_topic") + create_topic_with_confluent_cloud_ingestion_parser.add_argument("identity_pool_id") + create_topic_with_confluent_cloud_ingestion_parser.add_argument( + "gcp_service_account" + ) + update_topic_type_parser = subparsers.add_parser( "update_kinesis_ingestion", help=update_topic_type.__doc__ ) @@ -798,6 +976,37 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: args.match_glob, args.minimum_object_create_time, ) + elif args.command == "create_aws_msk_ingestion": + create_topic_with_aws_msk_ingestion( + args.project_id, + args.topic_id, + args.cluster_arn, + args.msk_topic, + args.aws_role_arn, + args.gcp_service_account, + ) + elif args.command == "create_azure_event_hubs_ingestion": + create_topic_with_azure_event_hubs_ingestion( + args.project_id, + args.topic_id, + args.resource_group, + args.namespace, + args.event_hub, + args.client_id, + args.tenant_id, + args.subscription_id, + args.gcp_service_account, + ) + elif args.command == "create_confluent_cloud_ingestion": + create_topic_with_confluent_cloud_ingestion( + args.project_id, + args.topic_id, + args.bootstrap_server, + args.cluster_id, + args.confluent_topic, + args.identity_pool_id, + args.gcp_service_account, + ) elif args.command == "update_kinesis_ingestion": update_topic_type( args.project_id, diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 6f17305cb24e..dc7b94027ee0 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -196,6 +196,123 @@ def test_create_topic_with_cloud_storage_ingestion( publisher_client.delete_topic(request={"topic": topic_path}) +def test_create_topic_with_aws_msk_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual AWS resources for the test to pass. + cluster_arn = ( + "arn:aws:kafka:us-east-1:111111111111:cluster/fake-cluster-name/11111111-1111-1" + ) + msk_topic = "fake-msk-topic-name" + aws_role_arn = "arn:aws:iam::111111111111:role/fake-role-name" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_aws_msk_ingestion( + PROJECT_ID, + TOPIC_ID, + cluster_arn, + msk_topic, + aws_role_arn, + gcp_service_account, + ) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with AWS MSK Ingestion Settings" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + +def test_create_topic_with_azure_event_hubs_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual Azure resources for the test to pass. + resource_group = "fake-resource-group" + namespace = "fake-namespace" + event_hub = "fake-event-hub" + client_id = "fake-client-id" + tenant_id = "fake-tenant-id" + subcription_id = "fake-subscription-id" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_azure_event_hubs_ingestion( + PROJECT_ID, + TOPIC_ID, + resource_group, + namespace, + event_hub, + client_id, + tenant_id, + subcription_id, + gcp_service_account, + ) + + out, _ = capsys.readouterr() + assert ( + f"Created topic: {topic_path} with Azure Event Hubs Ingestion Settings" in out + ) + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + +def test_create_topic_with_confluent_cloud_ingestion( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + # Outside of automated CI tests, these values must be of actual Confluent resources for the test to pass. + bootstrap_server = "fake-bootstrap-server-id.us-south1.gcp.confluent.cloud:9092" + cluster_id = "fake-cluster-id" + confluent_topic = "fake-confluent-topic-name" + identity_pool_id = "fake-identity-pool-id" + gcp_service_account = ( + "fake-service-account@fake-gcp-project.iam.gserviceaccount.com" + ) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_confluent_cloud_ingestion( + PROJECT_ID, + TOPIC_ID, + bootstrap_server, + cluster_id, + confluent_topic, + identity_pool_id, + gcp_service_account, + ) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with Confluent Cloud Ingestion Settings" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + def test_update_topic_type( publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] ) -> None: diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index b348927be470..b6ae767c6a73 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.27.1 +google-cloud-pubsub==2.28.0 avro==1.12.0 protobuf===4.24.4; python_version == '3.7' protobuf==5.29.2; python_version >= '3.8' From b1b0fc643e987d90e34bdc9cf78472a24500fc63 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 3 Mar 2025 13:24:10 -0500 Subject: [PATCH 1142/1197] chore(python): conditionally load credentials in .kokoro/build.sh (#1364) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/.kokoro/build.sh | 20 +++++++++++++------ .../samples/snippets/noxfile.py | 2 +- .../samples/snippets/publisher.py | 2 +- 4 files changed, 18 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 4c0027ff1c61..3f7634f25f8e 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:04c35dc5f49f0f503a306397d6d043685f8d2bb822ab515818c4208d7fb2db3a -# created: 2025-01-16T15:24:11.364245182Z + digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf +# created: 2025-02-21T19:32:52.01306189Z diff --git a/packages/google-cloud-pubsub/.kokoro/build.sh b/packages/google-cloud-pubsub/.kokoro/build.sh index 90e690e7a8bf..d41b45aa1dd0 100755 --- a/packages/google-cloud-pubsub/.kokoro/build.sh +++ b/packages/google-cloud-pubsub/.kokoro/build.sh @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-pubsub" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,10 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -46,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index 075047f97055..c9a3d1ecbf2a 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -124,7 +124,7 @@ def get_pytest_env_vars() -> Dict[str, str]: "--builtin=gettext", "--max-complexity=20", "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", - "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202,C901", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", "--max-line-length=88", ] diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index 270451511608..e279324b81a4 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -809,7 +809,7 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: # [END pubsub_detach_subscription] -if __name__ == "__main__": +if __name__ == "__main__": # noqa: C901 parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, From 45ca0075d5eb2ee0942a3ac600875b64c9067abb Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 6 Mar 2025 03:47:53 +0500 Subject: [PATCH 1143/1197] chore(revert): Revert "fix: get channel target for a gRPC request" (#1371) --- .../publisher/test_publisher_client.py | 32 ++++++++----------- .../subscriber/test_subscriber_client.py | 32 ++++++++----------- 2 files changed, 26 insertions(+), 38 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 1e1cc61b31e8..d1b7d4a81cc6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -57,12 +57,16 @@ typed_flaky = cast(Callable[[C], C], flaky(max_runs=5, min_passes=1)) -# NOTE: This interceptor is required to create an intercept channel. -class _PublisherClientGrpcInterceptor( - grpc.UnaryUnaryClientInterceptor, -): - def intercept_unary_unary(self, continuation, client_call_details, request): - pass +# Attempt to use `_thunk` to obtain the underlying grpc channel from +# the intercept channel. Default to obtaining the grpc channel directly +# for backwards compatibility. +# TODO(https://github.com/grpc/grpc/issues/38519): Workaround to obtain a channel +# until a public API is available. +def get_publish_channel(client): + try: + return client._transport.publish._thunk("")._channel + except AttributeError: + return client._transport.publish._channel def _assert_retries_equal(retry, retry2): @@ -424,27 +428,17 @@ def init(self, *args, **kwargs): assert client.transport._ssl_channel_credentials == mock_ssl_creds -def test_init_emulator(monkeypatch, creds): +def test_init_emulator(monkeypatch): monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/foo/bar:123") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. - - # TODO(https://github.com/grpc/grpc/issues/38519): Workaround to create an intercept - # channel (for forwards compatibility) with a channel created by the publisher client - # where target is set to the emulator host. - channel = publisher.Client().transport.grpc_channel - interceptor = _PublisherClientGrpcInterceptor() - intercept_channel = grpc.intercept_channel(channel, interceptor) - transport = publisher.Client.get_transport_class("grpc")( - credentials=creds, channel=intercept_channel - ) - client = publisher.Client(transport=transport) + client = publisher.Client() # Establish that a gRPC request would attempt to hit the emulator host. # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client._transport.publish._thunk("")._channel + channel = get_publish_channel(client) # Behavior to include dns prefix changed in gRPCv1.63 grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 4b381245de5c..3d3ff0111213 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -36,12 +36,16 @@ from google.pubsub_v1.types import PubsubMessage -# NOTE: This interceptor is required to create an intercept channel. -class _SubscriberClientGrpcInterceptor( - grpc.UnaryUnaryClientInterceptor, -): - def intercept_unary_unary(self, continuation, client_call_details, request): - pass +# Attempt to use `_thunk` to obtain the underlying grpc channel from +# the intercept channel. Default to obtaining the grpc channel directly +# for backwards compatibility. +# TODO(https://github.com/grpc/grpc/issues/38519): Workaround to obtain a channel +# until a public API is available. +def get_pull_channel(client): + try: + return client._transport.pull._thunk("")._channel + except AttributeError: + return client._transport.pull._channel def test_init_default_client_info(creds): @@ -127,27 +131,17 @@ def init(self, *args, **kwargs): assert client.transport._ssl_channel_credentials == mock_ssl_creds -def test_init_emulator(monkeypatch, creds): +def test_init_emulator(monkeypatch): monkeypatch.setenv("PUBSUB_EMULATOR_HOST", "/baz/bacon:123") # NOTE: When the emulator host is set, a custom channel will be used, so # no credentials (mock ot otherwise) can be passed in. - - # TODO(https://github.com/grpc/grpc/issues/38519): Workaround to create an intercept - # channel (for forwards compatibility) with a channel created by the publisher client - # where target is set to the emulator host. - channel = subscriber.Client().transport.grpc_channel - interceptor = _SubscriberClientGrpcInterceptor() - intercept_channel = grpc.intercept_channel(channel, interceptor) - transport = subscriber.Client.get_transport_class("grpc")( - credentials=creds, channel=intercept_channel - ) - client = subscriber.Client(transport=transport) + client = subscriber.Client() # Establish that a gRPC request would attempt to hit the emulator host. # # Sadly, there seems to be no good way to do this without poking at # the private API of gRPC. - channel = client._transport.pull._thunk("")._channel + channel = get_pull_channel(client) # Behavior to include dns prefix changed in gRPCv1.63 grpc_major, grpc_minor = [int(part) for part in grpc.__version__.split(".")[0:2]] if grpc_major > 1 or (grpc_major == 1 and grpc_minor >= 63): From 05d03f509500ad1b31b8d5ad72758dff3a2a4c32 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 10 Mar 2025 11:29:14 -0400 Subject: [PATCH 1144/1197] chore: remove unused files (#1368) --- .../.github/.OwlBot.lock.yaml | 4 +- .../.kokoro/docker/docs/Dockerfile | 89 --- .../.kokoro/docker/docs/requirements.in | 2 - .../.kokoro/docker/docs/requirements.txt | 297 ---------- .../.kokoro/docs/common.cfg | 66 --- .../.kokoro/docs/docs-presubmit.cfg | 28 - .../google-cloud-pubsub/.kokoro/docs/docs.cfg | 1 - .../.kokoro/publish-docs.sh | 58 -- .../google-cloud-pubsub/.kokoro/release.sh | 29 - .../.kokoro/release/common.cfg | 43 -- .../.kokoro/release/release.cfg | 1 - .../.kokoro/requirements.in | 11 - .../.kokoro/requirements.txt | 509 ------------------ 13 files changed, 2 insertions(+), 1136 deletions(-) delete mode 100644 packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile delete mode 100644 packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in delete mode 100644 packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt delete mode 100644 packages/google-cloud-pubsub/.kokoro/docs/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/docs/docs.cfg delete mode 100755 packages/google-cloud-pubsub/.kokoro/publish-docs.sh delete mode 100755 packages/google-cloud-pubsub/.kokoro/release.sh delete mode 100644 packages/google-cloud-pubsub/.kokoro/release/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/release/release.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/requirements.in delete mode 100644 packages/google-cloud-pubsub/.kokoro/requirements.txt diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 3f7634f25f8e..c631e1f7d7e9 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f016446d6e520e5fb552c45b110cba3f217bffdd3d06bdddd076e9e6d13266cf -# created: 2025-02-21T19:32:52.01306189Z + digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 +# created: 2025-03-05 diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile b/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index e5410e296bd8..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:24.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - - -###################### Install python 3.10.14 for docs/docfx session - -# Download python 3.10.14 -RUN wget https://www.python.org/ftp/python/3.10.14/Python-3.10.14.tgz - -# Extract files -RUN tar -xvf Python-3.10.14.tgz - -# Install python 3.10.14 -RUN ./Python-3.10.14/configure --enable-optimizations -RUN make altinstall - -ENV PATH /usr/local/bin/python3.10:$PATH - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.10 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3.10 -m pip - -# Install build requirements -COPY requirements.txt /requirements.txt -RUN python3.10 -m pip install --require-hashes -r requirements.txt - -CMD ["python3.10"] diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in deleted file mode 100644 index 586bd07037ae..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.in +++ /dev/null @@ -1,2 +0,0 @@ -nox -gcp-docuploader diff --git a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt b/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt deleted file mode 100644 index a9360a25b707..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/docker/docs/requirements.txt +++ /dev/null @@ -1,297 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.10 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.3 \ - --hash=sha256:2ab2c4a215c59fd6caaff41a869480a23e8f6a5f910b266c1808037f4e375b61 \ - --hash=sha256:c12bf50eded8aebb298c7b7da7a5ff3ee24dffd9f5281867dfe1424b58c55392 - # via nox -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.12.14 \ - --hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \ - --hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db - # via requests -charset-normalizer==3.4.1 \ - --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ - --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ - --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ - --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ - --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ - --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ - --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ - --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ - --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ - --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ - --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ - --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ - --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ - --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ - --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ - --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ - --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ - --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ - --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ - --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ - --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ - --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ - --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ - --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ - --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ - --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ - --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ - --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ - --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ - --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ - --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ - --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ - --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ - --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ - --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ - --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ - --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ - --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ - --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ - --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ - --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ - --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ - --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ - --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ - --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ - --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ - --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ - --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ - --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ - --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ - --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ - --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ - --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ - --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ - --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ - --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ - --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ - --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ - --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ - --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ - --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ - --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ - --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ - --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ - --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ - --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ - --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ - --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ - --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ - --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ - --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ - --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ - --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ - --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ - --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ - --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ - --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ - --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ - --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ - --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ - --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ - --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ - --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ - --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ - --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ - --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ - --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ - --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ - --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ - --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ - --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ - --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 - # via requests -click==8.1.8 \ - --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ - --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a - # via gcp-docuploader -colorlog==6.9.0 \ - --hash=sha256:5906e71acd67cb07a71e779c47c4bcb45fb8c2993eebe9e5adcd6a6f1b283eff \ - --hash=sha256:bfba54a1b93b94f54e1f4fe48395725a3d92fd2a4af702f6bd70946bdc0c6ac2 - # via - # gcp-docuploader - # nox -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -google-api-core==2.24.0 \ - --hash=sha256:10d82ac0fca69c82a25b3efdeefccf6f28e02ebb97925a8cce8edbfe379929d9 \ - --hash=sha256:e255640547a597a4da010876d333208ddac417d60add22b6851a0c66a831fcaf - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.37.0 \ - --hash=sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00 \ - --hash=sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0 - # via - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.19.0 \ - --hash=sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba \ - --hash=sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.66.0 \ - --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \ - --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.2 \ - --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ - --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f - # via nox -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.25.0 \ - --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \ - --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91 - # via google-api-core -protobuf==5.29.3 \ - --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ - --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ - --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ - --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ - --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ - --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ - --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ - --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ - --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ - --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ - --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 - # via - # gcp-docuploader - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # google-api-core - # google-cloud-storage -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -six==1.17.0 \ - --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ - --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 - # via gcp-docuploader -tomli==2.2.1 \ - --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ - --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ - --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ - --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ - --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ - --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ - --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ - --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ - --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ - --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ - --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ - --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ - --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ - --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ - --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ - --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ - --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ - --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ - --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ - --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ - --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ - --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ - --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ - --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ - --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ - --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ - --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ - --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ - --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ - --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ - --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ - --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 - # via nox -urllib3==2.3.0 \ - --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \ - --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d - # via requests -virtualenv==20.28.1 \ - --hash=sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb \ - --hash=sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329 - # via nox diff --git a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg b/packages/google-cloud-pubsub/.kokoro/docs/common.cfg deleted file mode 100644 index a9392e09af1f..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/docs/common.cfg +++ /dev/null @@ -1,66 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push google cloud library docs to the Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} diff --git a/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index 2c532d9db771..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/packages/google-cloud-pubsub/.kokoro/docs/docs.cfg b/packages/google-cloud-pubsub/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh b/packages/google-cloud-pubsub/.kokoro/publish-docs.sh deleted file mode 100755 index 4ed4aaf1346f..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/publish-docs.sh +++ /dev/null @@ -1,58 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# build docs -nox -s docs - -# create metadata -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3.10 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3.10 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3.10 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3.10 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/packages/google-cloud-pubsub/.kokoro/release.sh b/packages/google-cloud-pubsub/.kokoro/release.sh deleted file mode 100755 index 006893576340..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-pubsub/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-3") -cd github/python-pubsub -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/packages/google-cloud-pubsub/.kokoro/release/common.cfg b/packages/google-cloud-pubsub/.kokoro/release/common.cfg deleted file mode 100644 index a6b92c6371c4..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/release/common.cfg +++ /dev/null @@ -1,43 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-3" - } - } -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/python-pubsub/**/*.tar.gz" - strip_prefix: "github/python-pubsub" - } -} diff --git a/packages/google-cloud-pubsub/.kokoro/release/release.cfg b/packages/google-cloud-pubsub/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.in b/packages/google-cloud-pubsub/.kokoro/requirements.in deleted file mode 100644 index fff4d9ce0d0a..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/requirements.in +++ /dev/null @@ -1,11 +0,0 @@ -gcp-docuploader -gcp-releasetool>=2 # required for compatibility with cryptography>=42.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 -cryptography>=42.0.5 diff --git a/packages/google-cloud-pubsub/.kokoro/requirements.txt b/packages/google-cloud-pubsub/.kokoro/requirements.txt deleted file mode 100644 index 006d8ef931bf..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/requirements.txt +++ /dev/null @@ -1,509 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.5.1 \ - --hash=sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363 \ - --hash=sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4 - # via nox -attrs==24.2.0 \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 - # via gcp-releasetool -backports-tarfile==1.2.0 \ - --hash=sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34 \ - --hash=sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991 - # via jaraco-context -cachetools==5.5.0 \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a - # via google-auth -certifi==2024.8.30 \ - --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ - --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 - # via requests -cffi==1.17.1 \ - --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ - --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ - --hash=sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1 \ - --hash=sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15 \ - --hash=sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36 \ - --hash=sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824 \ - --hash=sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8 \ - --hash=sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36 \ - --hash=sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17 \ - --hash=sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf \ - --hash=sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc \ - --hash=sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3 \ - --hash=sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed \ - --hash=sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702 \ - --hash=sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1 \ - --hash=sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8 \ - --hash=sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903 \ - --hash=sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6 \ - --hash=sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d \ - --hash=sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b \ - --hash=sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e \ - --hash=sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be \ - --hash=sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c \ - --hash=sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683 \ - --hash=sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9 \ - --hash=sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c \ - --hash=sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8 \ - --hash=sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1 \ - --hash=sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4 \ - --hash=sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655 \ - --hash=sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67 \ - --hash=sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595 \ - --hash=sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0 \ - --hash=sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65 \ - --hash=sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41 \ - --hash=sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6 \ - --hash=sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401 \ - --hash=sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6 \ - --hash=sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3 \ - --hash=sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16 \ - --hash=sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93 \ - --hash=sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e \ - --hash=sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4 \ - --hash=sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964 \ - --hash=sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c \ - --hash=sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576 \ - --hash=sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0 \ - --hash=sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3 \ - --hash=sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662 \ - --hash=sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3 \ - --hash=sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff \ - --hash=sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5 \ - --hash=sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd \ - --hash=sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f \ - --hash=sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5 \ - --hash=sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14 \ - --hash=sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d \ - --hash=sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9 \ - --hash=sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7 \ - --hash=sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382 \ - --hash=sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a \ - --hash=sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e \ - --hash=sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a \ - --hash=sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4 \ - --hash=sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99 \ - --hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \ - --hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.8.2 \ - --hash=sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44 \ - --hash=sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33 - # via - # gcp-docuploader - # nox -cryptography==43.0.1 \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 - # via - # -r requirements.in - # gcp-releasetool - # secretstorage -distlib==0.3.9 \ - --hash=sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87 \ - --hash=sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403 - # via virtualenv -docutils==0.21.2 \ - --hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \ - --hash=sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2 - # via readme-renderer -filelock==3.16.1 \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==2.1.1 \ - --hash=sha256:25639269f4eae510094f9dbed9894977e1966933211eb155a451deebc3fc0b30 \ - --hash=sha256:845f4ded3d9bfe8cc7fdaad789e83f4ea014affa77785259a7ddac4b243e099e - # via -r requirements.in -google-api-core==2.21.0 \ - --hash=sha256:4a152fd11a9f774ea606388d423b68aa7e6d6a0ffe4c8266f74979613ec09f81 \ - --hash=sha256:6869eacb2a37720380ba5898312af79a4d30b8bca1548fb4093e0697dc4bdf5d - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.35.0 \ - --hash=sha256:25df55f327ef021de8be50bad0dfd4a916ad0de96da86cd05661c9297723ad3f \ - --hash=sha256:f4c64ed4e01e8e8b646ef34c018f8bf3338df0c8e37d8b3bba40e7f574a3278a - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.4.1 \ - --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \ - --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61 - # via google-cloud-storage -google-cloud-storage==2.18.2 \ - --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \ - --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99 - # via gcp-docuploader -google-crc32c==1.6.0 \ - --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \ - --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \ - --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \ - --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \ - --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \ - --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \ - --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \ - --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \ - --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \ - --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \ - --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \ - --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \ - --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \ - --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \ - --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \ - --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \ - --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \ - --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \ - --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \ - --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \ - --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \ - --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \ - --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \ - --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \ - --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \ - --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \ - --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.7.2 \ - --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \ - --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0 - # via google-cloud-storage -googleapis-common-protos==1.65.0 \ - --hash=sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63 \ - --hash=sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0 - # via google-api-core -idna==3.10 \ - --hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \ - --hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3 - # via requests -importlib-metadata==8.5.0 \ - --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \ - --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.4.0 \ - --hash=sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd \ - --hash=sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790 - # via keyring -jaraco-context==6.0.1 \ - --hash=sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3 \ - --hash=sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4 - # via keyring -jaraco-functools==4.1.0 \ - --hash=sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d \ - --hash=sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.4 \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d - # via gcp-releasetool -keyring==25.4.1 \ - --hash=sha256:5426f817cf7f6f007ba5ec722b1bcad95a75b27d780343772ad76b17cb47b0bf \ - --hash=sha256:b07ebc55f3e8ed86ac81dd31ef14e81ace9dd9c3d4b5d77a6e9a2016d0d71a1b - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==3.0.1 \ - --hash=sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396 \ - --hash=sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38 \ - --hash=sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a \ - --hash=sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8 \ - --hash=sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b \ - --hash=sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad \ - --hash=sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a \ - --hash=sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a \ - --hash=sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da \ - --hash=sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6 \ - --hash=sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8 \ - --hash=sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344 \ - --hash=sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a \ - --hash=sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8 \ - --hash=sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5 \ - --hash=sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7 \ - --hash=sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170 \ - --hash=sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132 \ - --hash=sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9 \ - --hash=sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd \ - --hash=sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9 \ - --hash=sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346 \ - --hash=sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc \ - --hash=sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589 \ - --hash=sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5 \ - --hash=sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915 \ - --hash=sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295 \ - --hash=sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453 \ - --hash=sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea \ - --hash=sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b \ - --hash=sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d \ - --hash=sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b \ - --hash=sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4 \ - --hash=sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b \ - --hash=sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7 \ - --hash=sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf \ - --hash=sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f \ - --hash=sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91 \ - --hash=sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd \ - --hash=sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50 \ - --hash=sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b \ - --hash=sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583 \ - --hash=sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a \ - --hash=sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984 \ - --hash=sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c \ - --hash=sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c \ - --hash=sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25 \ - --hash=sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa \ - --hash=sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4 \ - --hash=sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3 \ - --hash=sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97 \ - --hash=sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1 \ - --hash=sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd \ - --hash=sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772 \ - --hash=sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a \ - --hash=sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729 \ - --hash=sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca \ - --hash=sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6 \ - --hash=sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635 \ - --hash=sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b \ - --hash=sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.5.0 \ - --hash=sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef \ - --hash=sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6 - # via - # jaraco-classes - # jaraco-functools -nh3==0.2.18 \ - --hash=sha256:0411beb0589eacb6734f28d5497ca2ed379eafab8ad8c84b31bb5c34072b7164 \ - --hash=sha256:14c5a72e9fe82aea5fe3072116ad4661af5cf8e8ff8fc5ad3450f123e4925e86 \ - --hash=sha256:19aaba96e0f795bd0a6c56291495ff59364f4300d4a39b29a0abc9cb3774a84b \ - --hash=sha256:34c03fa78e328c691f982b7c03d4423bdfd7da69cd707fe572f544cf74ac23ad \ - --hash=sha256:36c95d4b70530b320b365659bb5034341316e6a9b30f0b25fa9c9eff4c27a204 \ - --hash=sha256:3a157ab149e591bb638a55c8c6bcb8cdb559c8b12c13a8affaba6cedfe51713a \ - --hash=sha256:42c64511469005058cd17cc1537578eac40ae9f7200bedcfd1fc1a05f4f8c200 \ - --hash=sha256:5f36b271dae35c465ef5e9090e1fdaba4a60a56f0bb0ba03e0932a66f28b9189 \ - --hash=sha256:6955369e4d9f48f41e3f238a9e60f9410645db7e07435e62c6a9ea6135a4907f \ - --hash=sha256:7b7c2a3c9eb1a827d42539aa64091640bd275b81e097cd1d8d82ef91ffa2e811 \ - --hash=sha256:8ce0f819d2f1933953fca255db2471ad58184a60508f03e6285e5114b6254844 \ - --hash=sha256:94a166927e53972a9698af9542ace4e38b9de50c34352b962f4d9a7d4c927af4 \ - --hash=sha256:a7f1b5b2c15866f2db413a3649a8fe4fd7b428ae58be2c0f6bca5eefd53ca2be \ - --hash=sha256:c8b3a1cebcba9b3669ed1a84cc65bf005728d2f0bc1ed2a6594a992e817f3a50 \ - --hash=sha256:de3ceed6e661954871d6cd78b410213bdcb136f79aafe22aa7182e028b8c7307 \ - --hash=sha256:f0eca9ca8628dbb4e916ae2491d72957fdd35f7a5d326b7032a345f111ac07fe - # via readme-renderer -nox==2024.10.9 \ - --hash=sha256:1d36f309a0a2a853e9bccb76bbef6bb118ba92fa92674d15604ca99adeb29eab \ - --hash=sha256:7aa9dc8d1c27e9f45ab046ffd1c3b2c4f7c91755304769df231308849ebded95 - # via -r requirements.in -packaging==24.1 \ - --hash=sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002 \ - --hash=sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124 - # via - # gcp-releasetool - # nox -pkginfo==1.10.0 \ - --hash=sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297 \ - --hash=sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097 - # via twine -platformdirs==4.3.6 \ - --hash=sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907 \ - --hash=sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb - # via virtualenv -proto-plus==1.24.0 \ - --hash=sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445 \ - --hash=sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12 - # via google-api-core -protobuf==5.28.2 \ - --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ - --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ - --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ - --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ - --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ - --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ - --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ - --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ - --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ - --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ - --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos - # proto-plus -pyasn1==0.6.1 \ - --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \ - --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034 - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.4.1 \ - --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \ - --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c - # via google-auth -pycparser==2.22 \ - --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ - --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc - # via cffi -pygments==2.18.0 \ - --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ - --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a - # via - # readme-renderer - # rich -pyjwt==2.9.0 \ - --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ - --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c - # via gcp-releasetool -pyperclip==1.9.0 \ - --hash=sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310 - # via gcp-releasetool -python-dateutil==2.9.0.post0 \ - --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ - --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 - # via gcp-releasetool -readme-renderer==44.0 \ - --hash=sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151 \ - --hash=sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1 - # via twine -requests==2.32.3 \ - --hash=sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760 \ - --hash=sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.9.2 \ - --hash=sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c \ - --hash=sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1 - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -tomli==2.0.2 \ - --hash=sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38 \ - --hash=sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed - # via nox -twine==5.1.1 \ - --hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \ - --hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db - # via -r requirements.in -typing-extensions==4.12.2 \ - --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ - --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 - # via - # -r requirements.in - # rich -urllib3==2.2.3 \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 - # via - # requests - # twine -virtualenv==20.26.6 \ - --hash=sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48 \ - --hash=sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2 - # via nox -wheel==0.44.0 \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 - # via -r requirements.in -zipp==3.20.2 \ - --hash=sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350 \ - --hash=sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==75.1.0 \ - --hash=sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2 \ - --hash=sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538 - # via -r requirements.in From b4018305a6794f277ef5c21593f76c5e99d9ea39 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Tue, 11 Mar 2025 03:33:16 +0500 Subject: [PATCH 1145/1197] fix: allow logs to propagate upstream for caplog testing (#1374) Co-authored-by: Owl Bot --- .../tests/unit/pubsub_v1/conftest.py | 17 +++++++ .../pubsub_v1/subscriber/test_heartbeater.py | 11 +++-- .../unit/pubsub_v1/subscriber/test_leaser.py | 10 ++-- .../subscriber/test_messages_on_hold.py | 6 +-- .../subscriber/test_streaming_pull_manager.py | 49 +++++++++++++------ 5 files changed, 67 insertions(+), 26 deletions(-) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py index b44e2fd84f82..ab73ab26c58b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/conftest.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging import pytest from opentelemetry.sdk.trace import TracerProvider @@ -43,3 +44,19 @@ def span_exporter(): provider = trace.get_tracer_provider() provider.add_span_processor(processor) yield exporter + + +@pytest.fixture() +def modify_google_logger_propagation(): + """ + Allow propagation of logs to the root logger for tests + that depend on the caplog fixture. Restore the default + propagation setting after the test finishes. + """ + logger = logging.getLogger("google") + original_propagate = logger.propagate + logger.propagate = True + try: + yield + finally: + logger.propagate = original_propagate diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py index 503fde2c9ade..857152ac34cf 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -28,7 +28,9 @@ import pytest -def test_heartbeat_inactive_manager_active_rpc(caplog): +def test_heartbeat_inactive_manager_active_rpc( + caplog, modify_google_logger_propagation +): caplog.set_level(logging.DEBUG) manager = mock.create_autospec( @@ -46,7 +48,10 @@ def test_heartbeat_inactive_manager_active_rpc(caplog): assert "exiting" in caplog.text -def test_heartbeat_inactive_manager_inactive_rpc(caplog): +def test_heartbeat_inactive_manager_inactive_rpc( + caplog, + modify_google_logger_propagation, +): caplog.set_level(logging.DEBUG) manager = mock.create_autospec( @@ -64,7 +69,7 @@ def test_heartbeat_inactive_manager_inactive_rpc(caplog): assert "exiting" in caplog.text -def test_heartbeat_stopped(caplog): +def test_heartbeat_stopped(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) manager = mock.create_autospec( streaming_pull_manager.StreamingPullManager, instance=True diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index b5b5cac20bb0..606dcc2c9e4b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -53,7 +53,7 @@ def test_add_and_remove(): assert leaser_.bytes == 25 -def test_add_already_managed(caplog): +def test_add_already_managed(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) leaser_ = leaser.Leaser(mock.sentinel.manager) @@ -64,7 +64,7 @@ def test_add_already_managed(caplog): assert "already lease managed" in caplog.text -def test_remove_not_managed(caplog): +def test_remove_not_managed(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) leaser_ = leaser.Leaser(mock.sentinel.manager) @@ -74,7 +74,7 @@ def test_remove_not_managed(caplog): assert "not managed" in caplog.text -def test_remove_negative_bytes(caplog): +def test_remove_negative_bytes(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) leaser_ = leaser.Leaser(mock.sentinel.manager) @@ -98,7 +98,7 @@ def create_manager(flow_control=types.FlowControl()): return manager -def test_maintain_leases_inactive_manager(caplog): +def test_maintain_leases_inactive_manager(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) manager = create_manager() manager.is_active = False @@ -117,7 +117,7 @@ def test_maintain_leases_inactive_manager(caplog): assert "exiting" in caplog.text -def test_maintain_leases_stopped(caplog): +def test_maintain_leases_stopped(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) manager = create_manager() diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py index 64963de48f2e..0f060e4ead32 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_messages_on_hold.py @@ -140,7 +140,7 @@ def test_ordered_messages_one_key(): assert moh.size == 0 -def test_ordered_messages_drop_duplicate_keys(caplog): +def test_ordered_messages_drop_duplicate_keys(caplog, modify_google_logger_propagation): moh = messages_on_hold.MessagesOnHold() msg1 = make_message(ack_id="ack1", ordering_key="key1") @@ -377,7 +377,7 @@ def test_ordered_and_unordered_messages_interleaved(): assert moh.size == 0 -def test_cleanup_nonexistent_key(caplog): +def test_cleanup_nonexistent_key(caplog, modify_google_logger_propagation): moh = messages_on_hold.MessagesOnHold() moh._clean_up_ordering_key("non-existent-key") assert ( @@ -386,7 +386,7 @@ def test_cleanup_nonexistent_key(caplog): ) -def test_cleanup_key_with_messages(caplog): +def test_cleanup_key_with_messages(caplog, modify_google_logger_propagation): moh = messages_on_hold.MessagesOnHold() # Put message with "key1". diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 4d2d1b98e42b..f4ceedaf0db6 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -603,7 +603,9 @@ def test__maybe_release_messages_below_overload(): assert call_args[1].ack_id in ("ack_foo", "ack_bar") -def test__maybe_release_messages_negative_on_hold_bytes_warning(caplog): +def test__maybe_release_messages_negative_on_hold_bytes_warning( + caplog, modify_google_logger_propagation +): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) ) @@ -924,7 +926,7 @@ def test_send_unary_modack_exactly_once_disabled_with_futures(): assert future3.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS -def test_send_unary_ack_api_call_error(caplog): +def test_send_unary_ack_api_call_error(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) manager = make_manager() @@ -945,7 +947,7 @@ def test_send_unary_ack_api_call_error(caplog): assert "The front fell off" in caplog.text -def test_send_unary_modack_api_call_error(caplog): +def test_send_unary_modack_api_call_error(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) manager = make_manager() @@ -978,7 +980,9 @@ def test_send_unary_modack_api_call_error(caplog): assert "The front fell off" in caplog.text -def test_send_unary_ack_retry_error_exactly_once_disabled_no_futures(caplog): +def test_send_unary_ack_retry_error_exactly_once_disabled_no_futures( + caplog, modify_google_logger_propagation +): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() @@ -1014,7 +1018,9 @@ def test_send_unary_ack_retry_error_exactly_once_disabled_no_futures(caplog): assert "signaled streaming pull manager shutdown" in caplog.text -def test_send_unary_ack_retry_error_exactly_once_disabled_with_futures(caplog): +def test_send_unary_ack_retry_error_exactly_once_disabled_with_futures( + caplog, modify_google_logger_propagation +): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() @@ -1054,7 +1060,9 @@ def test_send_unary_ack_retry_error_exactly_once_disabled_with_futures(caplog): assert future2.result() == subscriber_exceptions.AcknowledgeStatus.SUCCESS -def test_send_unary_ack_retry_error_exactly_once_enabled_no_futures(caplog): +def test_send_unary_ack_retry_error_exactly_once_enabled_no_futures( + caplog, modify_google_logger_propagation +): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() @@ -1090,7 +1098,9 @@ def test_send_unary_ack_retry_error_exactly_once_enabled_no_futures(caplog): assert "signaled streaming pull manager shutdown" in caplog.text -def test_send_unary_ack_retry_error_exactly_once_enabled_with_futures(caplog): +def test_send_unary_ack_retry_error_exactly_once_enabled_with_futures( + caplog, modify_google_logger_propagation +): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() @@ -1136,7 +1146,9 @@ def test_send_unary_ack_retry_error_exactly_once_enabled_with_futures(caplog): ) -def test_send_unary_modack_retry_error_exactly_once_disabled_no_future(caplog): +def test_send_unary_modack_retry_error_exactly_once_disabled_no_future( + caplog, modify_google_logger_propagation +): caplog.set_level(logging.DEBUG) manager, _, _, _, _, _ = make_running_manager() @@ -1162,7 +1174,7 @@ def test_send_unary_modack_retry_error_exactly_once_disabled_no_future(caplog): def test_send_unary_modack_retry_error_exactly_once_disabled_with_futures( - caplog, + caplog, modify_google_logger_propagation ): caplog.set_level(logging.DEBUG) @@ -1191,7 +1203,7 @@ def test_send_unary_modack_retry_error_exactly_once_disabled_with_futures( def test_send_unary_modack_retry_error_exactly_once_enabled_no_futures( - caplog, + caplog, modify_google_logger_propagation ): caplog.set_level(logging.DEBUG) @@ -1218,7 +1230,7 @@ def test_send_unary_modack_retry_error_exactly_once_enabled_no_futures( def test_send_unary_modack_retry_error_exactly_once_enabled_with_futures( - caplog, + caplog, modify_google_logger_propagation ): caplog.set_level(logging.DEBUG) @@ -1271,7 +1283,9 @@ def test_heartbeat_inactive(): assert not result -def test_heartbeat_stream_ack_deadline_seconds(caplog): +def test_heartbeat_stream_ack_deadline_seconds( + caplog, modify_google_logger_propagation +): caplog.set_level(logging.DEBUG) manager = make_manager() manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) @@ -1922,7 +1936,7 @@ def test__on_response_with_leaser_overload(): assert msg.message_id in ("2", "3") -def test__on_response_none_data(caplog): +def test__on_response_none_data(caplog, modify_google_logger_propagation): caplog.set_level(logging.DEBUG) manager, _, dispatcher, leaser, _, scheduler = make_running_manager() @@ -2087,7 +2101,10 @@ def test__on_response_disable_exactly_once(): assert manager._stream_ack_deadline == 60 -def test__on_response_exactly_once_immediate_modacks_fail(caplog): +def test__on_response_exactly_once_immediate_modacks_fail( + caplog, + modify_google_logger_propagation, +): manager, _, dispatcher, leaser, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback @@ -2159,7 +2176,9 @@ def complete_futures_with_error(*args, **kwargs): assert manager.load == 0.001 -def test__on_response_exactly_once_immediate_modacks_fail_non_invalid(caplog): +def test__on_response_exactly_once_immediate_modacks_fail_non_invalid( + caplog, modify_google_logger_propagation +): manager, _, dispatcher, leaser, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback From e94f626fbdea52f24580690a72db23c449b840ea Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Mar 2025 12:00:31 -0400 Subject: [PATCH 1146/1197] feat: deprecate `enabled` field for message transforms and add `disabled` field (#1355) Co-authored-by: Owl Bot Co-authored-by: ohmayr Co-authored-by: Anthonios Partheniou --- .../services/publisher/async_client.py | 191 ++- .../pubsub_v1/services/publisher/client.py | 286 ++-- .../pubsub_v1/services/publisher/pagers.py | 48 +- .../services/publisher/transports/grpc.py | 118 +- .../publisher/transports/grpc_asyncio.py | 267 ++-- .../services/publisher/transports/rest.py | 1009 +++++++++++- .../publisher/transports/rest_base.py | 2 - .../services/schema_service/async_client.py | 204 ++- .../services/schema_service/client.py | 299 ++-- .../services/schema_service/pagers.py | 32 +- .../schema_service/transports/grpc.py | 120 +- .../schema_service/transports/grpc_asyncio.py | 269 ++-- .../schema_service/transports/rest.py | 1096 +++++++++++-- .../schema_service/transports/rest_base.py | 2 - .../services/subscriber/async_client.py | 277 +++- .../pubsub_v1/services/subscriber/client.py | 372 +++-- .../pubsub_v1/services/subscriber/pagers.py | 32 +- .../services/subscriber/transports/grpc.py | 132 +- .../subscriber/transports/grpc_asyncio.py | 281 ++-- .../services/subscriber/transports/rest.py | 1361 +++++++++++++++-- .../subscriber/transports/rest_base.py | 2 - .../google/pubsub_v1/types/pubsub.py | 64 +- .../snippet_metadata_google.pubsub.v1.json | 142 +- .../scripts/fixup_pubsub_v1_keywords.py | 3 - .../unit/gapic/pubsub_v1/test_publisher.py | 145 ++ .../gapic/pubsub_v1/test_schema_service.py | 153 ++ .../unit/gapic/pubsub_v1/test_subscriber.py | 181 +++ 27 files changed, 5835 insertions(+), 1253 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 4fd755d91270..106ce5f93ebd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -54,6 +55,15 @@ from .transports.grpc_asyncio import PublisherGrpcAsyncIOTransport from .client import PublisherClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class PublisherAsyncClient: """The service that an application uses to manipulate topics, @@ -259,6 +269,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.PublisherAsyncClient`.", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.Publisher", + "credentialsType": None, + }, + ) + async def create_topic( self, request: Optional[Union[pubsub.Topic, dict]] = None, @@ -266,7 +298,7 @@ async def create_topic( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource name rules] @@ -318,8 +350,10 @@ async def sample_create_topic(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Topic: @@ -328,7 +362,10 @@ async def sample_create_topic(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -379,7 +416,7 @@ async def update_topic( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Topic: r"""Updates an existing topic by updating the fields specified in the update mask. Note that certain @@ -438,8 +475,10 @@ async def sample_update_topic(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Topic: @@ -448,7 +487,10 @@ async def sample_update_topic(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic, update_mask]) + flattened_params = [topic, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -503,7 +545,7 @@ async def publish( messages: Optional[MutableSequence[pubsub.PubsubMessage]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.PublishResponse: r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic does not exist. @@ -554,8 +596,10 @@ async def sample_publish(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.PublishResponse: @@ -564,7 +608,10 @@ async def sample_publish(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic, messages]) + flattened_params = [topic, messages] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -614,7 +661,7 @@ async def get_topic( topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Topic: r"""Gets the configuration of a topic. @@ -658,8 +705,10 @@ async def sample_get_topic(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Topic: @@ -668,7 +717,10 @@ async def sample_get_topic(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -718,7 +770,7 @@ async def list_topics( project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTopicsAsyncPager: r"""Lists matching topics. @@ -763,8 +815,10 @@ async def sample_list_topics(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager: @@ -777,7 +831,10 @@ async def sample_list_topics(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -838,7 +895,7 @@ async def list_topic_subscriptions( topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTopicSubscriptionsAsyncPager: r"""Lists the names of the attached subscriptions on this topic. @@ -885,8 +942,10 @@ async def sample_list_topic_subscriptions(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager: @@ -899,7 +958,10 @@ async def sample_list_topic_subscriptions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -960,7 +1022,7 @@ async def list_topic_snapshots( topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTopicSnapshotsAsyncPager: r"""Lists the names of the snapshots on this topic. Snapshots are used in @@ -1011,8 +1073,10 @@ async def sample_list_topic_snapshots(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager: @@ -1025,7 +1089,10 @@ async def sample_list_topic_snapshots(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1086,7 +1153,7 @@ async def delete_topic( topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic does not exist. After a topic is deleted, a new topic @@ -1132,13 +1199,18 @@ async def sample_delete_topic(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1184,7 +1256,7 @@ async def detach_subscription( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.DetachSubscriptionResponse: r"""Detaches a subscription from this topic. All messages retained in the subscription are dropped. Subsequent ``Pull`` and @@ -1226,8 +1298,10 @@ async def sample_detach_subscription(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.DetachSubscriptionResponse: @@ -1275,22 +1349,24 @@ async def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. Replaces any existing policy. Args: - request (:class:`~.policy_pb2.SetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -1310,6 +1386,7 @@ async def set_iam_policy( **JSON Example** :: + { "bindings": [ { @@ -1394,23 +1471,25 @@ async def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. + Returns an empty policy if the function exists and does not have a + policy set. Args: request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -1515,26 +1594,28 @@ async def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control + r"""Tests the specified IAM permissions against the IAM access control policy for a function. - If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. Args: request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: - ~iam_policy_pb2.PolicyTestIamPermissionsResponse: + ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 698aed49e4c4..2db3e1cda553 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import functools import os import re @@ -50,6 +53,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -524,6 +536,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -628,6 +667,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -699,6 +742,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.PublisherClient`.", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.Publisher", + "credentialsType": None, + }, + ) + def create_topic( self, request: Optional[Union[pubsub.Topic, dict]] = None, @@ -706,7 +772,7 @@ def create_topic( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Topic: r"""Creates the given topic with the given name. See the [resource name rules] @@ -758,8 +824,10 @@ def sample_create_topic(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Topic: @@ -768,7 +836,10 @@ def sample_create_topic(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -816,7 +887,7 @@ def update_topic( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Topic: r"""Updates an existing topic by updating the fields specified in the update mask. Note that certain @@ -875,8 +946,10 @@ def sample_update_topic(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Topic: @@ -885,7 +958,10 @@ def sample_update_topic(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic, update_mask]) + flattened_params = [topic, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -937,7 +1013,7 @@ def publish( messages: Optional[MutableSequence[pubsub.PubsubMessage]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.PublishResponse: r"""Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic does not exist. @@ -988,8 +1064,10 @@ def sample_publish(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.PublishResponse: @@ -998,7 +1076,10 @@ def sample_publish(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic, messages]) + flattened_params = [topic, messages] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1047,7 +1128,7 @@ def get_topic( topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Topic: r"""Gets the configuration of a topic. @@ -1091,8 +1172,10 @@ def sample_get_topic(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Topic: @@ -1101,7 +1184,10 @@ def sample_get_topic(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1148,7 +1234,7 @@ def list_topics( project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTopicsPager: r"""Lists matching topics. @@ -1193,8 +1279,10 @@ def sample_list_topics(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.publisher.pagers.ListTopicsPager: @@ -1207,7 +1295,10 @@ def sample_list_topics(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1265,7 +1356,7 @@ def list_topic_subscriptions( topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTopicSubscriptionsPager: r"""Lists the names of the attached subscriptions on this topic. @@ -1312,8 +1403,10 @@ def sample_list_topic_subscriptions(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager: @@ -1326,7 +1419,10 @@ def sample_list_topic_subscriptions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1384,7 +1480,7 @@ def list_topic_snapshots( topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListTopicSnapshotsPager: r"""Lists the names of the snapshots on this topic. Snapshots are used in @@ -1435,8 +1531,10 @@ def sample_list_topic_snapshots(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager: @@ -1449,7 +1547,10 @@ def sample_list_topic_snapshots(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1507,7 +1608,7 @@ def delete_topic( topic: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic does not exist. After a topic is deleted, a new topic @@ -1553,13 +1654,18 @@ def sample_delete_topic(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([topic]) + flattened_params = [topic] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1602,7 +1708,7 @@ def detach_subscription( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.DetachSubscriptionResponse: r"""Detaches a subscription from this topic. All messages retained in the subscription are dropped. Subsequent ``Pull`` and @@ -1644,8 +1750,10 @@ def sample_detach_subscription(): should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.DetachSubscriptionResponse: @@ -1704,7 +1812,7 @@ def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1718,8 +1826,10 @@ def set_iam_policy( should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -1796,11 +1906,7 @@ def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -1811,16 +1917,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1828,7 +1938,7 @@ def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1843,8 +1953,10 @@ def get_iam_policy( any, should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -1921,11 +2033,7 @@ def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -1936,16 +2044,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -1953,7 +2065,7 @@ def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: TimeoutType = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -1969,8 +2081,10 @@ def test_iam_permissions( if any, should be retried. timeout (TimeoutType): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -1984,11 +2098,7 @@ def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -1999,16 +2109,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py index de3490c39300..da6de8dfaee3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -66,7 +66,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -80,8 +80,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListTopicsRequest(request) @@ -140,7 +142,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -154,8 +156,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListTopicsRequest(request) @@ -218,7 +222,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -232,8 +236,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListTopicSubscriptionsRequest(request) @@ -292,7 +298,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -306,8 +312,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListTopicSubscriptionsRequest(request) @@ -370,7 +378,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -384,8 +392,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListTopicSnapshotsRequest(request) @@ -444,7 +454,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -458,8 +468,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListTopicSnapshotsRequest(request) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index b6e07b21ebaa..3b92a0c31b38 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,8 +24,11 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -30,6 +36,81 @@ from google.pubsub_v1.types import pubsub from .base import PublisherTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class PublisherGrpcTransport(PublisherTransport): """gRPC backend transport for Publisher. @@ -186,7 +267,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -260,7 +346,7 @@ def create_topic(self) -> Callable[[pubsub.Topic], pubsub.Topic]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_topic" not in self._stubs: - self._stubs["create_topic"] = self.grpc_channel.unary_unary( + self._stubs["create_topic"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/CreateTopic", request_serializer=pubsub.Topic.serialize, response_deserializer=pubsub.Topic.deserialize, @@ -286,7 +372,7 @@ def update_topic(self) -> Callable[[pubsub.UpdateTopicRequest], pubsub.Topic]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_topic" not in self._stubs: - self._stubs["update_topic"] = self.grpc_channel.unary_unary( + self._stubs["update_topic"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/UpdateTopic", request_serializer=pubsub.UpdateTopicRequest.serialize, response_deserializer=pubsub.Topic.deserialize, @@ -311,7 +397,7 @@ def publish(self) -> Callable[[pubsub.PublishRequest], pubsub.PublishResponse]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "publish" not in self._stubs: - self._stubs["publish"] = self.grpc_channel.unary_unary( + self._stubs["publish"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/Publish", request_serializer=pubsub.PublishRequest.serialize, response_deserializer=pubsub.PublishResponse.deserialize, @@ -335,7 +421,7 @@ def get_topic(self) -> Callable[[pubsub.GetTopicRequest], pubsub.Topic]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_topic" not in self._stubs: - self._stubs["get_topic"] = self.grpc_channel.unary_unary( + self._stubs["get_topic"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/GetTopic", request_serializer=pubsub.GetTopicRequest.serialize, response_deserializer=pubsub.Topic.deserialize, @@ -361,7 +447,7 @@ def list_topics( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_topics" not in self._stubs: - self._stubs["list_topics"] = self.grpc_channel.unary_unary( + self._stubs["list_topics"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/ListTopics", request_serializer=pubsub.ListTopicsRequest.serialize, response_deserializer=pubsub.ListTopicsResponse.deserialize, @@ -390,7 +476,7 @@ def list_topic_subscriptions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_topic_subscriptions" not in self._stubs: - self._stubs["list_topic_subscriptions"] = self.grpc_channel.unary_unary( + self._stubs["list_topic_subscriptions"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/ListTopicSubscriptions", request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, @@ -423,7 +509,7 @@ def list_topic_snapshots( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_topic_snapshots" not in self._stubs: - self._stubs["list_topic_snapshots"] = self.grpc_channel.unary_unary( + self._stubs["list_topic_snapshots"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/ListTopicSnapshots", request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, @@ -452,7 +538,7 @@ def delete_topic(self) -> Callable[[pubsub.DeleteTopicRequest], empty_pb2.Empty] # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_topic" not in self._stubs: - self._stubs["delete_topic"] = self.grpc_channel.unary_unary( + self._stubs["delete_topic"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/DeleteTopic", request_serializer=pubsub.DeleteTopicRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -484,13 +570,16 @@ def detach_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "detach_subscription" not in self._stubs: - self._stubs["detach_subscription"] = self.grpc_channel.unary_unary( + self._stubs["detach_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/DetachSubscription", request_serializer=pubsub.DetachSubscriptionRequest.serialize, response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, ) return self._stubs["detach_subscription"] + def close(self): + self._logged_channel.close() + @property def set_iam_policy( self, @@ -509,7 +598,7 @@ def set_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -535,7 +624,7 @@ def get_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -564,16 +653,13 @@ def test_iam_permissions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] - def close(self): - self.grpc_channel.close() - @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 6a293137a41d..2b6e8c60326d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -34,6 +40,82 @@ from .base import PublisherTransport, DEFAULT_CLIENT_INFO from .grpc import PublisherGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class PublisherGrpcAsyncIOTransport(PublisherTransport): """gRPC AsyncIO backend transport for Publisher. @@ -233,10 +315,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = ( "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -268,7 +353,7 @@ def create_topic(self) -> Callable[[pubsub.Topic], Awaitable[pubsub.Topic]]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_topic" not in self._stubs: - self._stubs["create_topic"] = self.grpc_channel.unary_unary( + self._stubs["create_topic"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/CreateTopic", request_serializer=pubsub.Topic.serialize, response_deserializer=pubsub.Topic.deserialize, @@ -296,7 +381,7 @@ def update_topic( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_topic" not in self._stubs: - self._stubs["update_topic"] = self.grpc_channel.unary_unary( + self._stubs["update_topic"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/UpdateTopic", request_serializer=pubsub.UpdateTopicRequest.serialize, response_deserializer=pubsub.Topic.deserialize, @@ -323,7 +408,7 @@ def publish( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "publish" not in self._stubs: - self._stubs["publish"] = self.grpc_channel.unary_unary( + self._stubs["publish"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/Publish", request_serializer=pubsub.PublishRequest.serialize, response_deserializer=pubsub.PublishResponse.deserialize, @@ -347,7 +432,7 @@ def get_topic(self) -> Callable[[pubsub.GetTopicRequest], Awaitable[pubsub.Topic # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_topic" not in self._stubs: - self._stubs["get_topic"] = self.grpc_channel.unary_unary( + self._stubs["get_topic"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/GetTopic", request_serializer=pubsub.GetTopicRequest.serialize, response_deserializer=pubsub.Topic.deserialize, @@ -373,7 +458,7 @@ def list_topics( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_topics" not in self._stubs: - self._stubs["list_topics"] = self.grpc_channel.unary_unary( + self._stubs["list_topics"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/ListTopics", request_serializer=pubsub.ListTopicsRequest.serialize, response_deserializer=pubsub.ListTopicsResponse.deserialize, @@ -403,7 +488,7 @@ def list_topic_subscriptions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_topic_subscriptions" not in self._stubs: - self._stubs["list_topic_subscriptions"] = self.grpc_channel.unary_unary( + self._stubs["list_topic_subscriptions"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/ListTopicSubscriptions", request_serializer=pubsub.ListTopicSubscriptionsRequest.serialize, response_deserializer=pubsub.ListTopicSubscriptionsResponse.deserialize, @@ -436,7 +521,7 @@ def list_topic_snapshots( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_topic_snapshots" not in self._stubs: - self._stubs["list_topic_snapshots"] = self.grpc_channel.unary_unary( + self._stubs["list_topic_snapshots"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/ListTopicSnapshots", request_serializer=pubsub.ListTopicSnapshotsRequest.serialize, response_deserializer=pubsub.ListTopicSnapshotsResponse.deserialize, @@ -467,7 +552,7 @@ def delete_topic( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_topic" not in self._stubs: - self._stubs["delete_topic"] = self.grpc_channel.unary_unary( + self._stubs["delete_topic"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/DeleteTopic", request_serializer=pubsub.DeleteTopicRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -499,93 +584,13 @@ def detach_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "detach_subscription" not in self._stubs: - self._stubs["detach_subscription"] = self.grpc_channel.unary_unary( + self._stubs["detach_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Publisher/DetachSubscription", request_serializer=pubsub.DetachSubscriptionRequest.serialize, response_deserializer=pubsub.DetachSubscriptionResponse.deserialize, ) return self._stubs["detach_subscription"] - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -752,11 +757,91 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: return "grpc_asyncio" + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + __all__ = ("PublisherGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index 3685dd55f985..0089968cc58f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -33,8 +34,6 @@ import warnings -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub @@ -47,6 +46,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -145,8 +152,8 @@ def post_update_topic(self, response): """ def pre_create_topic( - self, request: pubsub.Topic, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, str]]]: + self, request: pubsub.Topic, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_topic Override in a subclass to manipulate the request or metadata @@ -157,15 +164,38 @@ def pre_create_topic( def post_create_topic(self, response: pubsub.Topic) -> pubsub.Topic: """Post-rpc interceptor for create_topic - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_topic_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_create_topic` interceptor runs + before the `post_create_topic_with_metadata` interceptor. """ return response + def post_create_topic_with_metadata( + self, response: pubsub.Topic, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_topic + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_create_topic_with_metadata` + interceptor in new development instead of the `post_create_topic` interceptor. + When both interceptors are used, this `post_create_topic_with_metadata` interceptor runs after the + `post_create_topic` interceptor. The (possibly modified) response returned by + `post_create_topic` will be passed to + `post_create_topic_with_metadata`. + """ + return response, metadata + def pre_delete_topic( - self, request: pubsub.DeleteTopicRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.DeleteTopicRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.DeleteTopicRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.DeleteTopicRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_topic Override in a subclass to manipulate the request or metadata @@ -176,8 +206,10 @@ def pre_delete_topic( def pre_detach_subscription( self, request: pubsub.DetachSubscriptionRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[pubsub.DetachSubscriptionRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.DetachSubscriptionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for detach_subscription Override in a subclass to manipulate the request or metadata @@ -190,15 +222,42 @@ def post_detach_subscription( ) -> pubsub.DetachSubscriptionResponse: """Post-rpc interceptor for detach_subscription - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_detach_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_detach_subscription` interceptor runs + before the `post_detach_subscription_with_metadata` interceptor. """ return response + def post_detach_subscription_with_metadata( + self, + response: pubsub.DetachSubscriptionResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.DetachSubscriptionResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for detach_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_detach_subscription_with_metadata` + interceptor in new development instead of the `post_detach_subscription` interceptor. + When both interceptors are used, this `post_detach_subscription_with_metadata` interceptor runs after the + `post_detach_subscription` interceptor. The (possibly modified) response returned by + `post_detach_subscription` will be passed to + `post_detach_subscription_with_metadata`. + """ + return response, metadata + def pre_get_topic( - self, request: pubsub.GetTopicRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.GetTopicRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.GetTopicRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.GetTopicRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_topic Override in a subclass to manipulate the request or metadata @@ -209,15 +268,38 @@ def pre_get_topic( def post_get_topic(self, response: pubsub.Topic) -> pubsub.Topic: """Post-rpc interceptor for get_topic - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_topic_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_get_topic` interceptor runs + before the `post_get_topic_with_metadata` interceptor. """ return response + def post_get_topic_with_metadata( + self, response: pubsub.Topic, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_topic + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_get_topic_with_metadata` + interceptor in new development instead of the `post_get_topic` interceptor. + When both interceptors are used, this `post_get_topic_with_metadata` interceptor runs after the + `post_get_topic` interceptor. The (possibly modified) response returned by + `post_get_topic` will be passed to + `post_get_topic_with_metadata`. + """ + return response, metadata + def pre_list_topics( - self, request: pubsub.ListTopicsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.ListTopicsRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.ListTopicsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ListTopicsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_topics Override in a subclass to manipulate the request or metadata @@ -230,17 +312,42 @@ def post_list_topics( ) -> pubsub.ListTopicsResponse: """Post-rpc interceptor for list_topics - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_topics_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_list_topics` interceptor runs + before the `post_list_topics_with_metadata` interceptor. """ return response + def post_list_topics_with_metadata( + self, + response: pubsub.ListTopicsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ListTopicsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_topics + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_list_topics_with_metadata` + interceptor in new development instead of the `post_list_topics` interceptor. + When both interceptors are used, this `post_list_topics_with_metadata` interceptor runs after the + `post_list_topics` interceptor. The (possibly modified) response returned by + `post_list_topics` will be passed to + `post_list_topics_with_metadata`. + """ + return response, metadata + def pre_list_topic_snapshots( self, request: pubsub.ListTopicSnapshotsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[pubsub.ListTopicSnapshotsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListTopicSnapshotsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_topic_snapshots Override in a subclass to manipulate the request or metadata @@ -253,17 +360,44 @@ def post_list_topic_snapshots( ) -> pubsub.ListTopicSnapshotsResponse: """Post-rpc interceptor for list_topic_snapshots - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_topic_snapshots_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_list_topic_snapshots` interceptor runs + before the `post_list_topic_snapshots_with_metadata` interceptor. """ return response + def post_list_topic_snapshots_with_metadata( + self, + response: pubsub.ListTopicSnapshotsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListTopicSnapshotsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_topic_snapshots + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_list_topic_snapshots_with_metadata` + interceptor in new development instead of the `post_list_topic_snapshots` interceptor. + When both interceptors are used, this `post_list_topic_snapshots_with_metadata` interceptor runs after the + `post_list_topic_snapshots` interceptor. The (possibly modified) response returned by + `post_list_topic_snapshots` will be passed to + `post_list_topic_snapshots_with_metadata`. + """ + return response, metadata + def pre_list_topic_subscriptions( self, request: pubsub.ListTopicSubscriptionsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[pubsub.ListTopicSubscriptionsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListTopicSubscriptionsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_topic_subscriptions Override in a subclass to manipulate the request or metadata @@ -276,15 +410,42 @@ def post_list_topic_subscriptions( ) -> pubsub.ListTopicSubscriptionsResponse: """Post-rpc interceptor for list_topic_subscriptions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_topic_subscriptions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_list_topic_subscriptions` interceptor runs + before the `post_list_topic_subscriptions_with_metadata` interceptor. """ return response + def post_list_topic_subscriptions_with_metadata( + self, + response: pubsub.ListTopicSubscriptionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListTopicSubscriptionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_topic_subscriptions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_list_topic_subscriptions_with_metadata` + interceptor in new development instead of the `post_list_topic_subscriptions` interceptor. + When both interceptors are used, this `post_list_topic_subscriptions_with_metadata` interceptor runs after the + `post_list_topic_subscriptions` interceptor. The (possibly modified) response returned by + `post_list_topic_subscriptions` will be passed to + `post_list_topic_subscriptions_with_metadata`. + """ + return response, metadata + def pre_publish( - self, request: pubsub.PublishRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.PublishRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.PublishRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.PublishRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for publish Override in a subclass to manipulate the request or metadata @@ -295,15 +456,40 @@ def pre_publish( def post_publish(self, response: pubsub.PublishResponse) -> pubsub.PublishResponse: """Post-rpc interceptor for publish - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_publish_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_publish` interceptor runs + before the `post_publish_with_metadata` interceptor. """ return response + def post_publish_with_metadata( + self, + response: pubsub.PublishResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.PublishResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for publish + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_publish_with_metadata` + interceptor in new development instead of the `post_publish` interceptor. + When both interceptors are used, this `post_publish_with_metadata` interceptor runs after the + `post_publish` interceptor. The (possibly modified) response returned by + `post_publish` will be passed to + `post_publish_with_metadata`. + """ + return response, metadata + def pre_update_topic( - self, request: pubsub.UpdateTopicRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.UpdateTopicRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.UpdateTopicRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.UpdateTopicRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_topic Override in a subclass to manipulate the request or metadata @@ -314,17 +500,40 @@ def pre_update_topic( def post_update_topic(self, response: pubsub.Topic) -> pubsub.Topic: """Post-rpc interceptor for update_topic - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_topic_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Publisher server but before - it is returned to user code. + it is returned to user code. This `post_update_topic` interceptor runs + before the `post_update_topic_with_metadata` interceptor. """ return response + def post_update_topic_with_metadata( + self, response: pubsub.Topic, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[pubsub.Topic, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_topic + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Publisher server but before it is returned to user code. + + We recommend only using this `post_update_topic_with_metadata` + interceptor in new development instead of the `post_update_topic` interceptor. + When both interceptors are used, this `post_update_topic_with_metadata` interceptor runs after the + `post_update_topic` interceptor. The (possibly modified) response returned by + `post_update_topic` will be passed to + `post_update_topic_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_iam_policy Override in a subclass to manipulate the request or metadata @@ -344,8 +553,10 @@ def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for set_iam_policy Override in a subclass to manipulate the request or metadata @@ -365,8 +576,11 @@ def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for test_iam_permissions Override in a subclass to manipulate the request or metadata @@ -506,7 +720,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Topic: r"""Call the create topic method over HTTP. @@ -516,8 +730,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.Topic: @@ -527,6 +743,7 @@ def __call__( http_options = ( _BasePublisherRestTransport._BaseCreateTopic._get_http_options() ) + request, metadata = self._interceptor.pre_create_topic(request, metadata) transcoded_request = ( _BasePublisherRestTransport._BaseCreateTopic._get_transcoded_request( @@ -545,6 +762,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.CreateTopic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "CreateTopic", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._CreateTopic._get_response( self._host, @@ -566,7 +810,33 @@ def __call__( pb_resp = pubsub.Topic.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_topic(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_topic_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Topic.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.create_topic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "CreateTopic", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteTopic(_BasePublisherRestTransport._BaseDeleteTopic, PublisherRestStub): @@ -601,7 +871,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the delete topic method over HTTP. @@ -611,13 +881,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BasePublisherRestTransport._BaseDeleteTopic._get_http_options() ) + request, metadata = self._interceptor.pre_delete_topic(request, metadata) transcoded_request = ( _BasePublisherRestTransport._BaseDeleteTopic._get_transcoded_request( @@ -632,6 +905,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.DeleteTopic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "DeleteTopic", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._DeleteTopic._get_response( self._host, @@ -681,7 +981,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.DetachSubscriptionResponse: r"""Call the detach subscription method over HTTP. @@ -692,8 +992,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.DetachSubscriptionResponse: @@ -705,6 +1007,7 @@ def __call__( http_options = ( _BasePublisherRestTransport._BaseDetachSubscription._get_http_options() ) + request, metadata = self._interceptor.pre_detach_subscription( request, metadata ) @@ -717,6 +1020,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.DetachSubscription", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "DetachSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._DetachSubscription._get_response( self._host, @@ -737,7 +1067,35 @@ def __call__( pb_resp = pubsub.DetachSubscriptionResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_detach_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_detach_subscription_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.DetachSubscriptionResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.detach_subscription", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "DetachSubscription", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetTopic(_BasePublisherRestTransport._BaseGetTopic, PublisherRestStub): @@ -772,7 +1130,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Topic: r"""Call the get topic method over HTTP. @@ -782,8 +1140,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.Topic: @@ -791,6 +1151,7 @@ def __call__( """ http_options = _BasePublisherRestTransport._BaseGetTopic._get_http_options() + request, metadata = self._interceptor.pre_get_topic(request, metadata) transcoded_request = ( _BasePublisherRestTransport._BaseGetTopic._get_transcoded_request( @@ -805,6 +1166,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.GetTopic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "GetTopic", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._GetTopic._get_response( self._host, @@ -825,7 +1213,33 @@ def __call__( pb_resp = pubsub.Topic.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_topic(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_topic_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Topic.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.get_topic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "GetTopic", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListTopics(_BasePublisherRestTransport._BaseListTopics, PublisherRestStub): @@ -860,7 +1274,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.ListTopicsResponse: r"""Call the list topics method over HTTP. @@ -870,8 +1284,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.ListTopicsResponse: @@ -881,6 +1297,7 @@ def __call__( http_options = ( _BasePublisherRestTransport._BaseListTopics._get_http_options() ) + request, metadata = self._interceptor.pre_list_topics(request, metadata) transcoded_request = ( _BasePublisherRestTransport._BaseListTopics._get_transcoded_request( @@ -895,6 +1312,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.ListTopics", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopics", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._ListTopics._get_response( self._host, @@ -915,7 +1359,33 @@ def __call__( pb_resp = pubsub.ListTopicsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_topics(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_topics_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListTopicsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.list_topics", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopics", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListTopicSnapshots( @@ -952,7 +1422,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.ListTopicSnapshotsResponse: r"""Call the list topic snapshots method over HTTP. @@ -962,8 +1432,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.ListTopicSnapshotsResponse: @@ -973,6 +1445,7 @@ def __call__( http_options = ( _BasePublisherRestTransport._BaseListTopicSnapshots._get_http_options() ) + request, metadata = self._interceptor.pre_list_topic_snapshots( request, metadata ) @@ -985,6 +1458,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.ListTopicSnapshots", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopicSnapshots", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._ListTopicSnapshots._get_response( self._host, @@ -1005,7 +1505,35 @@ def __call__( pb_resp = pubsub.ListTopicSnapshotsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_topic_snapshots(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_topic_snapshots_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListTopicSnapshotsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.list_topic_snapshots", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopicSnapshots", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListTopicSubscriptions( @@ -1042,7 +1570,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.ListTopicSubscriptionsResponse: r"""Call the list topic subscriptions method over HTTP. @@ -1052,8 +1580,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.ListTopicSubscriptionsResponse: @@ -1063,6 +1593,7 @@ def __call__( http_options = ( _BasePublisherRestTransport._BaseListTopicSubscriptions._get_http_options() ) + request, metadata = self._interceptor.pre_list_topic_subscriptions( request, metadata ) @@ -1075,6 +1606,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.ListTopicSubscriptions", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopicSubscriptions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._ListTopicSubscriptions._get_response( self._host, @@ -1095,7 +1653,35 @@ def __call__( pb_resp = pubsub.ListTopicSubscriptionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_topic_subscriptions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_topic_subscriptions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListTopicSubscriptionsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.list_topic_subscriptions", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "ListTopicSubscriptions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _Publish(_BasePublisherRestTransport._BasePublish, PublisherRestStub): @@ -1131,7 +1717,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.PublishResponse: r"""Call the publish method over HTTP. @@ -1141,8 +1727,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.PublishResponse: @@ -1150,6 +1738,7 @@ def __call__( """ http_options = _BasePublisherRestTransport._BasePublish._get_http_options() + request, metadata = self._interceptor.pre_publish(request, metadata) transcoded_request = ( _BasePublisherRestTransport._BasePublish._get_transcoded_request( @@ -1168,6 +1757,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.Publish", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "Publish", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._Publish._get_response( self._host, @@ -1189,7 +1805,33 @@ def __call__( pb_resp = pubsub.PublishResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_publish(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_publish_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.PublishResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.publish", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "Publish", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateTopic(_BasePublisherRestTransport._BaseUpdateTopic, PublisherRestStub): @@ -1225,7 +1867,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Topic: r"""Call the update topic method over HTTP. @@ -1235,8 +1877,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.Topic: @@ -1246,6 +1890,7 @@ def __call__( http_options = ( _BasePublisherRestTransport._BaseUpdateTopic._get_http_options() ) + request, metadata = self._interceptor.pre_update_topic(request, metadata) transcoded_request = ( _BasePublisherRestTransport._BaseUpdateTopic._get_transcoded_request( @@ -1264,6 +1909,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.UpdateTopic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "UpdateTopic", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._UpdateTopic._get_response( self._host, @@ -1285,7 +1957,33 @@ def __call__( pb_resp = pubsub.Topic.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_topic(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_topic_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Topic.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherClient.update_topic", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "UpdateTopic", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1394,7 +2092,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Call the get iam policy method over HTTP. @@ -1404,8 +2102,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from GetIamPolicy method. @@ -1414,6 +2114,7 @@ def __call__( http_options = ( _BasePublisherRestTransport._BaseGetIamPolicy._get_http_options() ) + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) transcoded_request = ( _BasePublisherRestTransport._BaseGetIamPolicy._get_transcoded_request( @@ -1428,6 +2129,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._GetIamPolicy._get_response( self._host, @@ -1447,6 +2175,27 @@ def __call__( resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1488,7 +2237,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Call the set iam policy method over HTTP. @@ -1498,8 +2247,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from SetIamPolicy method. @@ -1508,6 +2259,7 @@ def __call__( http_options = ( _BasePublisherRestTransport._BaseSetIamPolicy._get_http_options() ) + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) transcoded_request = ( _BasePublisherRestTransport._BaseSetIamPolicy._get_transcoded_request( @@ -1526,6 +2278,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._SetIamPolicy._get_response( self._host, @@ -1546,6 +2325,27 @@ def __call__( resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1587,7 +2387,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -1597,8 +2397,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. @@ -1607,6 +2409,7 @@ def __call__( http_options = ( _BasePublisherRestTransport._BaseTestIamPermissions._get_http_options() ) + request, metadata = self._interceptor.pre_test_iam_permissions( request, metadata ) @@ -1623,6 +2426,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.PublisherClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = PublisherRestTransport._TestIamPermissions._get_response( self._host, @@ -1643,6 +2473,27 @@ def __call__( resp = iam_policy_pb2.TestIamPermissionsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.PublisherAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.Publisher", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py index 1fa78cdd9be4..dad3a91b23d9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py @@ -26,8 +26,6 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 2d160b06284d..88eeb1fba2d1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -53,6 +54,15 @@ from .transports.grpc_asyncio import SchemaServiceGrpcAsyncIOTransport from .client import SchemaServiceClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class SchemaServiceAsyncClient: """Service for doing schema-related operations.""" @@ -258,6 +268,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.SchemaServiceAsyncClient`.", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.SchemaService", + "credentialsType": None, + }, + ) + async def create_schema( self, request: Optional[Union[gp_schema.CreateSchemaRequest, dict]] = None, @@ -267,7 +299,7 @@ async def create_schema( schema_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gp_schema.Schema: r"""Creates a schema. @@ -335,8 +367,10 @@ async def sample_create_schema(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -345,7 +379,10 @@ async def sample_create_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, schema, schema_id]) + flattened_params = [parent, schema, schema_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -399,7 +436,7 @@ async def get_schema( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.Schema: r"""Gets a schema. @@ -442,8 +479,10 @@ async def sample_get_schema(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -452,7 +491,10 @@ async def sample_get_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -502,7 +544,7 @@ async def list_schemas( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSchemasAsyncPager: r"""Lists schemas in a project. @@ -546,8 +588,10 @@ async def sample_list_schemas(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager: @@ -560,7 +604,10 @@ async def sample_list_schemas(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -621,7 +668,7 @@ async def list_schema_revisions( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSchemaRevisionsAsyncPager: r"""Lists all schema revisions for the named schema. @@ -665,8 +712,10 @@ async def sample_list_schema_revisions(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsAsyncPager: @@ -679,7 +728,10 @@ async def sample_list_schema_revisions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -741,7 +793,7 @@ async def commit_schema( schema: Optional[gp_schema.Schema] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gp_schema.Schema: r"""Commits a new schema revision to an existing schema. @@ -795,8 +847,10 @@ async def sample_commit_schema(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -805,7 +859,10 @@ async def sample_commit_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, schema]) + flattened_params = [name, schema] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -858,7 +915,7 @@ async def rollback_schema( revision_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.Schema: r"""Creates a new schema revision that is a copy of the provided revision_id. @@ -913,8 +970,10 @@ async def sample_rollback_schema(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -923,7 +982,10 @@ async def sample_rollback_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, revision_id]) + flattened_params = [name, revision_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -976,7 +1038,7 @@ async def delete_schema_revision( revision_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.Schema: r"""Deletes a specific schema revision. @@ -1029,8 +1091,10 @@ async def sample_delete_schema_revision(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -1039,7 +1103,10 @@ async def sample_delete_schema_revision(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, revision_id]) + flattened_params = [name, revision_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1091,7 +1158,7 @@ async def delete_schema( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a schema. @@ -1131,13 +1198,18 @@ async def sample_delete_schema(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1185,7 +1257,7 @@ async def validate_schema( schema: Optional[gp_schema.Schema] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. @@ -1239,8 +1311,10 @@ async def sample_validate_schema(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.ValidateSchemaResponse: @@ -1251,7 +1325,10 @@ async def sample_validate_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, schema]) + flattened_params = [parent, schema] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1302,7 +1379,7 @@ async def validate_message( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. @@ -1339,8 +1416,10 @@ async def sample_validate_message(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.ValidateMessageResponse: @@ -1386,21 +1465,23 @@ async def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. Replaces any existing policy. Args: - request (:class:`~.policy_pb2.SetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -1420,6 +1501,7 @@ async def set_iam_policy( **JSON Example** :: + { "bindings": [ { @@ -1504,22 +1586,24 @@ async def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. + Returns an empty policy if the function exists and does not have a + policy set. Args: request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -1624,25 +1708,27 @@ async def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control + r"""Tests the specified IAM permissions against the IAM access control policy for a function. - If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. Args: request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: - ~iam_policy_pb2.PolicyTestIamPermissionsResponse: + ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index c56f8a4f35d2..07894782cda6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import functools import os import re @@ -49,6 +52,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore @@ -473,6 +485,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -577,6 +616,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -651,6 +694,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.SchemaServiceClient`.", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.SchemaService", + "credentialsType": None, + }, + ) + def create_schema( self, request: Optional[Union[gp_schema.CreateSchemaRequest, dict]] = None, @@ -660,7 +726,7 @@ def create_schema( schema_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gp_schema.Schema: r"""Creates a schema. @@ -728,8 +794,10 @@ def sample_create_schema(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -738,7 +806,10 @@ def sample_create_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, schema, schema_id]) + flattened_params = [parent, schema, schema_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -789,7 +860,7 @@ def get_schema( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.Schema: r"""Gets a schema. @@ -832,8 +903,10 @@ def sample_get_schema(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -842,7 +915,10 @@ def sample_get_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -889,7 +965,7 @@ def list_schemas( parent: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSchemasPager: r"""Lists schemas in a project. @@ -933,8 +1009,10 @@ def sample_list_schemas(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.schema_service.pagers.ListSchemasPager: @@ -947,7 +1025,10 @@ def sample_list_schemas(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1005,7 +1086,7 @@ def list_schema_revisions( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSchemaRevisionsPager: r"""Lists all schema revisions for the named schema. @@ -1049,8 +1130,10 @@ def sample_list_schema_revisions(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsPager: @@ -1063,7 +1146,10 @@ def sample_list_schema_revisions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1122,7 +1208,7 @@ def commit_schema( schema: Optional[gp_schema.Schema] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gp_schema.Schema: r"""Commits a new schema revision to an existing schema. @@ -1176,8 +1262,10 @@ def sample_commit_schema(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -1186,7 +1274,10 @@ def sample_commit_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, schema]) + flattened_params = [name, schema] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1236,7 +1327,7 @@ def rollback_schema( revision_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.Schema: r"""Creates a new schema revision that is a copy of the provided revision_id. @@ -1291,8 +1382,10 @@ def sample_rollback_schema(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -1301,7 +1394,10 @@ def sample_rollback_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, revision_id]) + flattened_params = [name, revision_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1351,7 +1447,7 @@ def delete_schema_revision( revision_id: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.Schema: r"""Deletes a specific schema revision. @@ -1404,8 +1500,10 @@ def sample_delete_schema_revision(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Schema: @@ -1414,7 +1512,10 @@ def sample_delete_schema_revision(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, revision_id]) + flattened_params = [name, revision_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1463,7 +1564,7 @@ def delete_schema( name: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes a schema. @@ -1503,13 +1604,18 @@ def sample_delete_schema(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1554,7 +1660,7 @@ def validate_schema( schema: Optional[gp_schema.Schema] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gp_schema.ValidateSchemaResponse: r"""Validates a schema. @@ -1608,8 +1714,10 @@ def sample_validate_schema(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.ValidateSchemaResponse: @@ -1620,7 +1728,10 @@ def sample_validate_schema(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, schema]) + flattened_params = [parent, schema] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1668,7 +1779,7 @@ def validate_message( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.ValidateMessageResponse: r"""Validates a message against a schema. @@ -1705,8 +1816,10 @@ def sample_validate_message(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.ValidateMessageResponse: @@ -1763,7 +1876,7 @@ def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -1776,8 +1889,10 @@ def set_iam_policy( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -1854,11 +1969,7 @@ def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -1869,16 +1980,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -1886,7 +2001,7 @@ def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -1900,8 +2015,10 @@ def get_iam_policy( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -1978,11 +2095,7 @@ def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -1993,16 +2106,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2010,7 +2127,7 @@ def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -2025,8 +2142,10 @@ def test_iam_permissions( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -2040,11 +2159,7 @@ def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -2055,16 +2170,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index fa42a6b8cc2c..8f7080b268ec 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -66,7 +66,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -80,8 +80,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = schema.ListSchemasRequest(request) @@ -140,7 +142,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -154,8 +156,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = schema.ListSchemasRequest(request) @@ -218,7 +222,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -232,8 +236,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = schema.ListSchemaRevisionsRequest(request) @@ -292,7 +298,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -306,8 +312,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = schema.ListSchemaRevisionsRequest(request) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 421879d193d4..daabd41b93de 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,8 +24,11 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -31,6 +37,81 @@ from google.pubsub_v1.types import schema as gp_schema from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class SchemaServiceGrpcTransport(SchemaServiceTransport): """gRPC backend transport for SchemaService. @@ -186,7 +267,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -260,7 +346,7 @@ def create_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_schema" not in self._stubs: - self._stubs["create_schema"] = self.grpc_channel.unary_unary( + self._stubs["create_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/CreateSchema", request_serializer=gp_schema.CreateSchemaRequest.serialize, response_deserializer=gp_schema.Schema.deserialize, @@ -284,7 +370,7 @@ def get_schema(self) -> Callable[[schema.GetSchemaRequest], schema.Schema]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_schema" not in self._stubs: - self._stubs["get_schema"] = self.grpc_channel.unary_unary( + self._stubs["get_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/GetSchema", request_serializer=schema.GetSchemaRequest.serialize, response_deserializer=schema.Schema.deserialize, @@ -310,7 +396,7 @@ def list_schemas( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_schemas" not in self._stubs: - self._stubs["list_schemas"] = self.grpc_channel.unary_unary( + self._stubs["list_schemas"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/ListSchemas", request_serializer=schema.ListSchemasRequest.serialize, response_deserializer=schema.ListSchemasResponse.deserialize, @@ -338,7 +424,7 @@ def list_schema_revisions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_schema_revisions" not in self._stubs: - self._stubs["list_schema_revisions"] = self.grpc_channel.unary_unary( + self._stubs["list_schema_revisions"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/ListSchemaRevisions", request_serializer=schema.ListSchemaRevisionsRequest.serialize, response_deserializer=schema.ListSchemaRevisionsResponse.deserialize, @@ -364,7 +450,7 @@ def commit_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "commit_schema" not in self._stubs: - self._stubs["commit_schema"] = self.grpc_channel.unary_unary( + self._stubs["commit_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/CommitSchema", request_serializer=gp_schema.CommitSchemaRequest.serialize, response_deserializer=gp_schema.Schema.deserialize, @@ -391,7 +477,7 @@ def rollback_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "rollback_schema" not in self._stubs: - self._stubs["rollback_schema"] = self.grpc_channel.unary_unary( + self._stubs["rollback_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/RollbackSchema", request_serializer=schema.RollbackSchemaRequest.serialize, response_deserializer=schema.Schema.deserialize, @@ -417,7 +503,7 @@ def delete_schema_revision( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_schema_revision" not in self._stubs: - self._stubs["delete_schema_revision"] = self.grpc_channel.unary_unary( + self._stubs["delete_schema_revision"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/DeleteSchemaRevision", request_serializer=schema.DeleteSchemaRevisionRequest.serialize, response_deserializer=schema.Schema.deserialize, @@ -441,7 +527,7 @@ def delete_schema(self) -> Callable[[schema.DeleteSchemaRequest], empty_pb2.Empt # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_schema" not in self._stubs: - self._stubs["delete_schema"] = self.grpc_channel.unary_unary( + self._stubs["delete_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/DeleteSchema", request_serializer=schema.DeleteSchemaRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -467,7 +553,7 @@ def validate_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "validate_schema" not in self._stubs: - self._stubs["validate_schema"] = self.grpc_channel.unary_unary( + self._stubs["validate_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/ValidateSchema", request_serializer=gp_schema.ValidateSchemaRequest.serialize, response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, @@ -493,13 +579,16 @@ def validate_message( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "validate_message" not in self._stubs: - self._stubs["validate_message"] = self.grpc_channel.unary_unary( + self._stubs["validate_message"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/ValidateMessage", request_serializer=schema.ValidateMessageRequest.serialize, response_deserializer=schema.ValidateMessageResponse.deserialize, ) return self._stubs["validate_message"] + def close(self): + self._logged_channel.close() + @property def set_iam_policy( self, @@ -518,7 +607,7 @@ def set_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -544,7 +633,7 @@ def get_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -573,16 +662,13 @@ def test_iam_permissions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] - def close(self): - self.grpc_channel.close() - @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index e642ed1b9bff..c321b88c2301 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -35,6 +41,82 @@ from .base import SchemaServiceTransport, DEFAULT_CLIENT_INFO from .grpc import SchemaServiceGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class SchemaServiceGrpcAsyncIOTransport(SchemaServiceTransport): """gRPC AsyncIO backend transport for SchemaService. @@ -233,10 +315,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = ( "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -268,7 +353,7 @@ def create_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_schema" not in self._stubs: - self._stubs["create_schema"] = self.grpc_channel.unary_unary( + self._stubs["create_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/CreateSchema", request_serializer=gp_schema.CreateSchemaRequest.serialize, response_deserializer=gp_schema.Schema.deserialize, @@ -294,7 +379,7 @@ def get_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_schema" not in self._stubs: - self._stubs["get_schema"] = self.grpc_channel.unary_unary( + self._stubs["get_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/GetSchema", request_serializer=schema.GetSchemaRequest.serialize, response_deserializer=schema.Schema.deserialize, @@ -320,7 +405,7 @@ def list_schemas( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_schemas" not in self._stubs: - self._stubs["list_schemas"] = self.grpc_channel.unary_unary( + self._stubs["list_schemas"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/ListSchemas", request_serializer=schema.ListSchemasRequest.serialize, response_deserializer=schema.ListSchemasResponse.deserialize, @@ -349,7 +434,7 @@ def list_schema_revisions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_schema_revisions" not in self._stubs: - self._stubs["list_schema_revisions"] = self.grpc_channel.unary_unary( + self._stubs["list_schema_revisions"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/ListSchemaRevisions", request_serializer=schema.ListSchemaRevisionsRequest.serialize, response_deserializer=schema.ListSchemaRevisionsResponse.deserialize, @@ -375,7 +460,7 @@ def commit_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "commit_schema" not in self._stubs: - self._stubs["commit_schema"] = self.grpc_channel.unary_unary( + self._stubs["commit_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/CommitSchema", request_serializer=gp_schema.CommitSchemaRequest.serialize, response_deserializer=gp_schema.Schema.deserialize, @@ -402,7 +487,7 @@ def rollback_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "rollback_schema" not in self._stubs: - self._stubs["rollback_schema"] = self.grpc_channel.unary_unary( + self._stubs["rollback_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/RollbackSchema", request_serializer=schema.RollbackSchemaRequest.serialize, response_deserializer=schema.Schema.deserialize, @@ -428,7 +513,7 @@ def delete_schema_revision( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_schema_revision" not in self._stubs: - self._stubs["delete_schema_revision"] = self.grpc_channel.unary_unary( + self._stubs["delete_schema_revision"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/DeleteSchemaRevision", request_serializer=schema.DeleteSchemaRevisionRequest.serialize, response_deserializer=schema.Schema.deserialize, @@ -454,7 +539,7 @@ def delete_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_schema" not in self._stubs: - self._stubs["delete_schema"] = self.grpc_channel.unary_unary( + self._stubs["delete_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/DeleteSchema", request_serializer=schema.DeleteSchemaRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -482,7 +567,7 @@ def validate_schema( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "validate_schema" not in self._stubs: - self._stubs["validate_schema"] = self.grpc_channel.unary_unary( + self._stubs["validate_schema"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/ValidateSchema", request_serializer=gp_schema.ValidateSchemaRequest.serialize, response_deserializer=gp_schema.ValidateSchemaResponse.deserialize, @@ -510,93 +595,13 @@ def validate_message( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "validate_message" not in self._stubs: - self._stubs["validate_message"] = self.grpc_channel.unary_unary( + self._stubs["validate_message"] = self._logged_channel.unary_unary( "/google.pubsub.v1.SchemaService/ValidateMessage", request_serializer=schema.ValidateMessageRequest.serialize, response_deserializer=schema.ValidateMessageResponse.deserialize, ) return self._stubs["validate_message"] - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -763,11 +768,91 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: return "grpc_asyncio" + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + __all__ = ("SchemaServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index 7cf86c8081bd..1bed03dc00b9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -33,8 +34,6 @@ import warnings -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema @@ -48,6 +47,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -156,8 +163,8 @@ def post_validate_schema(self, response): def pre_commit_schema( self, request: gp_schema.CommitSchemaRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[gp_schema.CommitSchemaRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gp_schema.CommitSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for commit_schema Override in a subclass to manipulate the request or metadata @@ -168,17 +175,40 @@ def pre_commit_schema( def post_commit_schema(self, response: gp_schema.Schema) -> gp_schema.Schema: """Post-rpc interceptor for commit_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_commit_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_commit_schema` interceptor runs + before the `post_commit_schema_with_metadata` interceptor. """ return response + def post_commit_schema_with_metadata( + self, + response: gp_schema.Schema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gp_schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for commit_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_commit_schema_with_metadata` + interceptor in new development instead of the `post_commit_schema` interceptor. + When both interceptors are used, this `post_commit_schema_with_metadata` interceptor runs after the + `post_commit_schema` interceptor. The (possibly modified) response returned by + `post_commit_schema` will be passed to + `post_commit_schema_with_metadata`. + """ + return response, metadata + def pre_create_schema( self, request: gp_schema.CreateSchemaRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[gp_schema.CreateSchemaRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gp_schema.CreateSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_schema Override in a subclass to manipulate the request or metadata @@ -189,15 +219,40 @@ def pre_create_schema( def post_create_schema(self, response: gp_schema.Schema) -> gp_schema.Schema: """Post-rpc interceptor for create_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_create_schema` interceptor runs + before the `post_create_schema_with_metadata` interceptor. """ return response + def post_create_schema_with_metadata( + self, + response: gp_schema.Schema, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gp_schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_create_schema_with_metadata` + interceptor in new development instead of the `post_create_schema` interceptor. + When both interceptors are used, this `post_create_schema_with_metadata` interceptor runs after the + `post_create_schema` interceptor. The (possibly modified) response returned by + `post_create_schema` will be passed to + `post_create_schema_with_metadata`. + """ + return response, metadata + def pre_delete_schema( - self, request: schema.DeleteSchemaRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[schema.DeleteSchemaRequest, Sequence[Tuple[str, str]]]: + self, + request: schema.DeleteSchemaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.DeleteSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_schema Override in a subclass to manipulate the request or metadata @@ -208,8 +263,10 @@ def pre_delete_schema( def pre_delete_schema_revision( self, request: schema.DeleteSchemaRevisionRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[schema.DeleteSchemaRevisionRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + schema.DeleteSchemaRevisionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_schema_revision Override in a subclass to manipulate the request or metadata @@ -220,15 +277,38 @@ def pre_delete_schema_revision( def post_delete_schema_revision(self, response: schema.Schema) -> schema.Schema: """Post-rpc interceptor for delete_schema_revision - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_delete_schema_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_delete_schema_revision` interceptor runs + before the `post_delete_schema_revision_with_metadata` interceptor. """ return response + def post_delete_schema_revision_with_metadata( + self, response: schema.Schema, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_schema_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_delete_schema_revision_with_metadata` + interceptor in new development instead of the `post_delete_schema_revision` interceptor. + When both interceptors are used, this `post_delete_schema_revision_with_metadata` interceptor runs after the + `post_delete_schema_revision` interceptor. The (possibly modified) response returned by + `post_delete_schema_revision` will be passed to + `post_delete_schema_revision_with_metadata`. + """ + return response, metadata + def pre_get_schema( - self, request: schema.GetSchemaRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[schema.GetSchemaRequest, Sequence[Tuple[str, str]]]: + self, + request: schema.GetSchemaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.GetSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_schema Override in a subclass to manipulate the request or metadata @@ -239,17 +319,40 @@ def pre_get_schema( def post_get_schema(self, response: schema.Schema) -> schema.Schema: """Post-rpc interceptor for get_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_get_schema` interceptor runs + before the `post_get_schema_with_metadata` interceptor. """ return response + def post_get_schema_with_metadata( + self, response: schema.Schema, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_get_schema_with_metadata` + interceptor in new development instead of the `post_get_schema` interceptor. + When both interceptors are used, this `post_get_schema_with_metadata` interceptor runs after the + `post_get_schema` interceptor. The (possibly modified) response returned by + `post_get_schema` will be passed to + `post_get_schema_with_metadata`. + """ + return response, metadata + def pre_list_schema_revisions( self, request: schema.ListSchemaRevisionsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[schema.ListSchemaRevisionsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + schema.ListSchemaRevisionsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_schema_revisions Override in a subclass to manipulate the request or metadata @@ -262,15 +365,42 @@ def post_list_schema_revisions( ) -> schema.ListSchemaRevisionsResponse: """Post-rpc interceptor for list_schema_revisions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_schema_revisions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_list_schema_revisions` interceptor runs + before the `post_list_schema_revisions_with_metadata` interceptor. """ return response + def post_list_schema_revisions_with_metadata( + self, + response: schema.ListSchemaRevisionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + schema.ListSchemaRevisionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_schema_revisions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_list_schema_revisions_with_metadata` + interceptor in new development instead of the `post_list_schema_revisions` interceptor. + When both interceptors are used, this `post_list_schema_revisions_with_metadata` interceptor runs after the + `post_list_schema_revisions` interceptor. The (possibly modified) response returned by + `post_list_schema_revisions` will be passed to + `post_list_schema_revisions_with_metadata`. + """ + return response, metadata + def pre_list_schemas( - self, request: schema.ListSchemasRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[schema.ListSchemasRequest, Sequence[Tuple[str, str]]]: + self, + request: schema.ListSchemasRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.ListSchemasRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_schemas Override in a subclass to manipulate the request or metadata @@ -283,15 +413,40 @@ def post_list_schemas( ) -> schema.ListSchemasResponse: """Post-rpc interceptor for list_schemas - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_schemas_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_list_schemas` interceptor runs + before the `post_list_schemas_with_metadata` interceptor. """ return response + def post_list_schemas_with_metadata( + self, + response: schema.ListSchemasResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.ListSchemasResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_schemas + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_list_schemas_with_metadata` + interceptor in new development instead of the `post_list_schemas` interceptor. + When both interceptors are used, this `post_list_schemas_with_metadata` interceptor runs after the + `post_list_schemas` interceptor. The (possibly modified) response returned by + `post_list_schemas` will be passed to + `post_list_schemas_with_metadata`. + """ + return response, metadata + def pre_rollback_schema( - self, request: schema.RollbackSchemaRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[schema.RollbackSchemaRequest, Sequence[Tuple[str, str]]]: + self, + request: schema.RollbackSchemaRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.RollbackSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for rollback_schema Override in a subclass to manipulate the request or metadata @@ -302,17 +457,38 @@ def pre_rollback_schema( def post_rollback_schema(self, response: schema.Schema) -> schema.Schema: """Post-rpc interceptor for rollback_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_rollback_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_rollback_schema` interceptor runs + before the `post_rollback_schema_with_metadata` interceptor. """ return response + def post_rollback_schema_with_metadata( + self, response: schema.Schema, metadata: Sequence[Tuple[str, Union[str, bytes]]] + ) -> Tuple[schema.Schema, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for rollback_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_rollback_schema_with_metadata` + interceptor in new development instead of the `post_rollback_schema` interceptor. + When both interceptors are used, this `post_rollback_schema_with_metadata` interceptor runs after the + `post_rollback_schema` interceptor. The (possibly modified) response returned by + `post_rollback_schema` will be passed to + `post_rollback_schema_with_metadata`. + """ + return response, metadata + def pre_validate_message( self, request: schema.ValidateMessageRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[schema.ValidateMessageRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.ValidateMessageRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for validate_message Override in a subclass to manipulate the request or metadata @@ -325,17 +501,42 @@ def post_validate_message( ) -> schema.ValidateMessageResponse: """Post-rpc interceptor for validate_message - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_message_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_validate_message` interceptor runs + before the `post_validate_message_with_metadata` interceptor. """ return response + def post_validate_message_with_metadata( + self, + response: schema.ValidateMessageResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[schema.ValidateMessageResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for validate_message + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_validate_message_with_metadata` + interceptor in new development instead of the `post_validate_message` interceptor. + When both interceptors are used, this `post_validate_message_with_metadata` interceptor runs after the + `post_validate_message` interceptor. The (possibly modified) response returned by + `post_validate_message` will be passed to + `post_validate_message_with_metadata`. + """ + return response, metadata + def pre_validate_schema( self, request: gp_schema.ValidateSchemaRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[gp_schema.ValidateSchemaRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gp_schema.ValidateSchemaRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for validate_schema Override in a subclass to manipulate the request or metadata @@ -348,17 +549,44 @@ def post_validate_schema( ) -> gp_schema.ValidateSchemaResponse: """Post-rpc interceptor for validate_schema - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_validate_schema_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the SchemaService server but before - it is returned to user code. + it is returned to user code. This `post_validate_schema` interceptor runs + before the `post_validate_schema_with_metadata` interceptor. """ return response + def post_validate_schema_with_metadata( + self, + response: gp_schema.ValidateSchemaResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gp_schema.ValidateSchemaResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for validate_schema + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the SchemaService server but before it is returned to user code. + + We recommend only using this `post_validate_schema_with_metadata` + interceptor in new development instead of the `post_validate_schema` interceptor. + When both interceptors are used, this `post_validate_schema_with_metadata` interceptor runs after the + `post_validate_schema` interceptor. The (possibly modified) response returned by + `post_validate_schema` will be passed to + `post_validate_schema_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_iam_policy Override in a subclass to manipulate the request or metadata @@ -378,8 +606,10 @@ def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for set_iam_policy Override in a subclass to manipulate the request or metadata @@ -399,8 +629,11 @@ def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for test_iam_permissions Override in a subclass to manipulate the request or metadata @@ -541,7 +774,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gp_schema.Schema: r"""Call the commit schema method over HTTP. @@ -551,8 +784,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gp_schema.Schema: @@ -562,6 +797,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseCommitSchema._get_http_options() ) + request, metadata = self._interceptor.pre_commit_schema(request, metadata) transcoded_request = _BaseSchemaServiceRestTransport._BaseCommitSchema._get_transcoded_request( http_options, request @@ -576,6 +812,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.CommitSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "CommitSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._CommitSchema._get_response( self._host, @@ -597,7 +860,33 @@ def __call__( pb_resp = gp_schema.Schema.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_commit_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_commit_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gp_schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.commit_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "CommitSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateSchema( @@ -635,7 +924,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gp_schema.Schema: r"""Call the create schema method over HTTP. @@ -645,8 +934,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gp_schema.Schema: @@ -656,6 +947,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseCreateSchema._get_http_options() ) + request, metadata = self._interceptor.pre_create_schema(request, metadata) transcoded_request = _BaseSchemaServiceRestTransport._BaseCreateSchema._get_transcoded_request( http_options, request @@ -670,6 +962,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.CreateSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "CreateSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._CreateSchema._get_response( self._host, @@ -691,7 +1010,33 @@ def __call__( pb_resp = gp_schema.Schema.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gp_schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.create_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "CreateSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteSchema( @@ -728,7 +1073,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the delete schema method over HTTP. @@ -738,13 +1083,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_http_options() ) + request, metadata = self._interceptor.pre_delete_schema(request, metadata) transcoded_request = _BaseSchemaServiceRestTransport._BaseDeleteSchema._get_transcoded_request( http_options, request @@ -755,6 +1103,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.DeleteSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "DeleteSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._DeleteSchema._get_response( self._host, @@ -804,7 +1179,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.Schema: r"""Call the delete schema revision method over HTTP. @@ -814,8 +1189,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.schema.Schema: @@ -825,6 +1202,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseDeleteSchemaRevision._get_http_options() ) + request, metadata = self._interceptor.pre_delete_schema_revision( request, metadata ) @@ -837,6 +1215,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.DeleteSchemaRevision", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "DeleteSchemaRevision", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._DeleteSchemaRevision._get_response( self._host, @@ -857,7 +1262,33 @@ def __call__( pb_resp = schema.Schema.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_delete_schema_revision(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_schema_revision_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.delete_schema_revision", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "DeleteSchemaRevision", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetSchema( @@ -894,7 +1325,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.Schema: r"""Call the get schema method over HTTP. @@ -904,8 +1335,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.schema.Schema: @@ -915,6 +1348,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseGetSchema._get_http_options() ) + request, metadata = self._interceptor.pre_get_schema(request, metadata) transcoded_request = ( _BaseSchemaServiceRestTransport._BaseGetSchema._get_transcoded_request( @@ -929,6 +1363,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.GetSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "GetSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._GetSchema._get_response( self._host, @@ -949,7 +1410,33 @@ def __call__( pb_resp = schema.Schema.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.get_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "GetSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListSchemaRevisions( @@ -986,7 +1473,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.ListSchemaRevisionsResponse: r"""Call the list schema revisions method over HTTP. @@ -996,8 +1483,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.schema.ListSchemaRevisionsResponse: @@ -1007,6 +1496,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseListSchemaRevisions._get_http_options() ) + request, metadata = self._interceptor.pre_list_schema_revisions( request, metadata ) @@ -1019,6 +1509,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.ListSchemaRevisions", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ListSchemaRevisions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._ListSchemaRevisions._get_response( self._host, @@ -1039,7 +1556,35 @@ def __call__( pb_resp = schema.ListSchemaRevisionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_schema_revisions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_schema_revisions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.ListSchemaRevisionsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.list_schema_revisions", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ListSchemaRevisions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListSchemas( @@ -1076,7 +1621,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.ListSchemasResponse: r"""Call the list schemas method over HTTP. @@ -1086,8 +1631,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.schema.ListSchemasResponse: @@ -1097,6 +1644,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseListSchemas._get_http_options() ) + request, metadata = self._interceptor.pre_list_schemas(request, metadata) transcoded_request = _BaseSchemaServiceRestTransport._BaseListSchemas._get_transcoded_request( http_options, request @@ -1109,6 +1657,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.ListSchemas", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ListSchemas", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._ListSchemas._get_response( self._host, @@ -1129,7 +1704,33 @@ def __call__( pb_resp = schema.ListSchemasResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_schemas(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_schemas_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.ListSchemasResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.list_schemas", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ListSchemas", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _RollbackSchema( @@ -1167,7 +1768,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.Schema: r"""Call the rollback schema method over HTTP. @@ -1177,8 +1778,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.schema.Schema: @@ -1188,6 +1791,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_http_options() ) + request, metadata = self._interceptor.pre_rollback_schema(request, metadata) transcoded_request = _BaseSchemaServiceRestTransport._BaseRollbackSchema._get_transcoded_request( http_options, request @@ -1202,6 +1806,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.RollbackSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "RollbackSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._RollbackSchema._get_response( self._host, @@ -1223,7 +1854,33 @@ def __call__( pb_resp = schema.Schema.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_rollback_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_rollback_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.Schema.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.rollback_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "RollbackSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ValidateMessage( @@ -1261,7 +1918,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> schema.ValidateMessageResponse: r"""Call the validate message method over HTTP. @@ -1271,8 +1928,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.schema.ValidateMessageResponse: @@ -1284,6 +1943,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseValidateMessage._get_http_options() ) + request, metadata = self._interceptor.pre_validate_message( request, metadata ) @@ -1300,6 +1960,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.ValidateMessage", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ValidateMessage", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._ValidateMessage._get_response( self._host, @@ -1321,7 +2008,33 @@ def __call__( pb_resp = schema.ValidateMessageResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_validate_message(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_message_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = schema.ValidateMessageResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.validate_message", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ValidateMessage", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ValidateSchema( @@ -1359,7 +2072,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> gp_schema.ValidateSchemaResponse: r"""Call the validate schema method over HTTP. @@ -1369,8 +2082,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.gp_schema.ValidateSchemaResponse: @@ -1382,6 +2097,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseValidateSchema._get_http_options() ) + request, metadata = self._interceptor.pre_validate_schema(request, metadata) transcoded_request = _BaseSchemaServiceRestTransport._BaseValidateSchema._get_transcoded_request( http_options, request @@ -1396,6 +2112,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.ValidateSchema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ValidateSchema", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._ValidateSchema._get_response( self._host, @@ -1417,7 +2160,35 @@ def __call__( pb_resp = gp_schema.ValidateSchemaResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_validate_schema(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_validate_schema_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gp_schema.ValidateSchemaResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceClient.validate_schema", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "ValidateSchema", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -1536,7 +2307,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Call the get iam policy method over HTTP. @@ -1546,8 +2317,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from GetIamPolicy method. @@ -1556,6 +2329,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseGetIamPolicy._get_http_options() ) + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) transcoded_request = _BaseSchemaServiceRestTransport._BaseGetIamPolicy._get_transcoded_request( http_options, request @@ -1566,6 +2340,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._GetIamPolicy._get_response( self._host, @@ -1585,6 +2386,27 @@ def __call__( resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1626,7 +2448,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Call the set iam policy method over HTTP. @@ -1636,8 +2458,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from SetIamPolicy method. @@ -1646,6 +2470,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_http_options() ) + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) transcoded_request = _BaseSchemaServiceRestTransport._BaseSetIamPolicy._get_transcoded_request( http_options, request @@ -1660,6 +2485,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._SetIamPolicy._get_response( self._host, @@ -1680,6 +2532,27 @@ def __call__( resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -1721,7 +2594,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -1731,8 +2604,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. @@ -1741,6 +2616,7 @@ def __call__( http_options = ( _BaseSchemaServiceRestTransport._BaseTestIamPermissions._get_http_options() ) + request, metadata = self._interceptor.pre_test_iam_permissions( request, metadata ) @@ -1757,6 +2633,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SchemaServiceClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SchemaServiceRestTransport._TestIamPermissions._get_response( self._host, @@ -1777,6 +2680,27 @@ def __call__( resp = iam_policy_pb2.TestIamPermissionsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SchemaServiceAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.SchemaService", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py index a97e454d4b37..94312eba7f26 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py @@ -26,8 +26,6 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import schema from google.pubsub_v1.types import schema as gp_schema diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 7f40480efae9..9fc50fd311b9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging as std_logging from collections import OrderedDict import re from typing import ( @@ -58,6 +59,15 @@ from .transports.grpc_asyncio import SubscriberGrpcAsyncIOTransport from .client import SubscriberClient +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + class SubscriberAsyncClient: """The service that an application uses to manipulate subscriptions and @@ -265,6 +275,28 @@ def __init__( client_info=client_info, ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.SubscriberAsyncClient`.", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.Subscriber", + "credentialsType": None, + }, + ) + async def create_subscription( self, request: Optional[Union[pubsub.Subscription, dict]] = None, @@ -275,7 +307,7 @@ async def create_subscription( ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Subscription: r"""Creates a subscription to a given topic. See the [resource name rules] @@ -388,8 +420,10 @@ async def sample_create_subscription(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Subscription: @@ -402,7 +436,10 @@ async def sample_create_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) + flattened_params = [name, topic, push_config, ack_deadline_seconds] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -458,7 +495,7 @@ async def get_subscription( subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. @@ -502,8 +539,10 @@ async def sample_get_subscription(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Subscription: @@ -516,7 +555,10 @@ async def sample_get_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription]) + flattened_params = [subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -569,7 +611,7 @@ async def update_subscription( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Subscription: r"""Updates an existing subscription by updating the fields specified in the update mask. Note that certain @@ -628,8 +670,10 @@ async def sample_update_subscription(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Subscription: @@ -642,7 +686,10 @@ async def sample_update_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, update_mask]) + flattened_params = [subscription, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -696,7 +743,7 @@ async def list_subscriptions( project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSubscriptionsAsyncPager: r"""Lists matching subscriptions. @@ -740,8 +787,10 @@ async def sample_list_subscriptions(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager: @@ -754,7 +803,10 @@ async def sample_list_subscriptions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -815,7 +867,7 @@ async def delete_subscription( subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an existing subscription. All messages retained in the subscription are immediately dropped. Calls to ``Pull`` after @@ -861,13 +913,18 @@ async def sample_delete_subscription(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription]) + flattened_params = [subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -918,7 +975,7 @@ async def modify_ack_deadline( ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Modifies the ack deadline for a specific message. This method is useful to indicate that more time is needed to process a message @@ -987,13 +1044,18 @@ async def sample_modify_ack_deadline(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) + flattened_params = [subscription, ack_ids, ack_deadline_seconds] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1047,7 +1109,7 @@ async def acknowledge( ack_ids: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Acknowledges the messages associated with the ``ack_ids`` in the ``AcknowledgeRequest``. The Pub/Sub system can remove the @@ -1104,13 +1166,18 @@ async def sample_acknowledge(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, ack_ids]) + flattened_params = [subscription, ack_ids] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1163,7 +1230,7 @@ async def pull( max_messages: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.PullResponse: r"""Pulls messages from the server. @@ -1232,8 +1299,10 @@ async def sample_pull(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.PullResponse: @@ -1242,7 +1311,10 @@ async def sample_pull(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, return_immediately, max_messages]) + flattened_params = [subscription, return_immediately, max_messages] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1301,7 +1373,7 @@ def streaming_pull( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[pubsub.StreamingPullResponse]]: r"""Establishes a stream with the server, which sends messages down to the client. The client streams acknowledgements and ack @@ -1359,8 +1431,10 @@ def request_generator(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: AsyncIterable[google.pubsub_v1.types.StreamingPullResponse]: @@ -1397,7 +1471,7 @@ async def modify_push_config( push_config: Optional[pubsub.PushConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Modifies the ``PushConfig`` for a specified subscription. @@ -1456,13 +1530,18 @@ async def sample_modify_push_config(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, push_config]) + flattened_params = [subscription, push_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1513,7 +1592,7 @@ async def get_snapshot( snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Snapshot: r"""Gets the configuration details of a snapshot. Snapshots are used in @@ -1561,8 +1640,10 @@ async def sample_get_snapshot(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Snapshot: @@ -1577,7 +1658,10 @@ async def sample_get_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot]) + flattened_params = [snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1627,7 +1711,7 @@ async def list_snapshots( project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSnapshotsAsyncPager: r"""Lists the existing snapshots. Snapshots are used in `Seek `__ @@ -1675,8 +1759,10 @@ async def sample_list_snapshots(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager: @@ -1689,7 +1775,10 @@ async def sample_list_snapshots(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1751,7 +1840,7 @@ async def create_snapshot( subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Snapshot: r"""Creates a snapshot from the requested subscription. Snapshots are used in @@ -1834,8 +1923,10 @@ async def sample_create_snapshot(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Snapshot: @@ -1850,7 +1941,10 @@ async def sample_create_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, subscription]) + flattened_params = [name, subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1903,7 +1997,7 @@ async def update_snapshot( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot by updating the fields specified in the update mask. Snapshots are used in @@ -1959,8 +2053,10 @@ async def sample_update_snapshot(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Snapshot: @@ -1975,7 +2071,10 @@ async def sample_update_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot, update_mask]) + flattened_params = [snapshot, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2029,7 +2128,7 @@ async def delete_snapshot( snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Removes an existing snapshot. Snapshots are used in [Seek] (https://cloud.google.com/pubsub/docs/replay-overview) @@ -2078,13 +2177,18 @@ async def sample_delete_snapshot(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot]) + flattened_params = [snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2130,7 +2234,7 @@ async def seek( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.SeekResponse: r"""Seeks an existing subscription to a point in time or to a given snapshot, whichever is provided in the request. Snapshots are @@ -2174,8 +2278,10 @@ async def sample_seek(): retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.SeekResponse: @@ -2219,21 +2325,23 @@ async def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. Replaces any existing policy. Args: - request (:class:`~.policy_pb2.SetIamPolicyRequest`): + request (:class:`~.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -2253,6 +2361,7 @@ async def set_iam_policy( **JSON Example** :: + { "bindings": [ { @@ -2337,22 +2446,24 @@ async def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. + Returns an empty policy if the function exists and does not have a + policy set. Args: request (:class:`~.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if + any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -2457,25 +2568,27 @@ async def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: - r"""Tests the specified permissions against the IAM access control + r"""Tests the specified IAM permissions against the IAM access control policy for a function. - If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. + If the function does not exist, this will return an empty set + of permissions, not a NOT_FOUND error. Args: request (:class:`~.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: - ~iam_policy_pb2.PolicyTestIamPermissionsResponse: + ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. """ # Create or coerce a protobuf request object. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index d601b0f0d365..29c7be980b51 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -14,6 +14,9 @@ # limitations under the License. # from collections import OrderedDict +from http import HTTPStatus +import json +import logging as std_logging import functools import os import re @@ -52,6 +55,15 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore @@ -528,6 +540,33 @@ def _validate_universe_domain(self): # NOTE (b/349488459): universe validation is disabled until further notice. return True + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + @property def api_endpoint(self): """Return the API endpoint used by the client instance. @@ -632,6 +671,10 @@ def __init__( # Initialize the universe domain validation. self._is_universe_domain_valid = False + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + api_key_value = getattr(self._client_options, "api_key", None) if api_key_value and credentials: raise ValueError( @@ -703,6 +746,29 @@ def __init__( api_audience=self._client_options.api_audience, ) + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.pubsub_v1.SubscriberClient`.", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.pubsub.v1.Subscriber", + "credentialsType": None, + }, + ) + def create_subscription( self, request: Optional[Union[pubsub.Subscription, dict]] = None, @@ -713,7 +779,7 @@ def create_subscription( ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Subscription: r"""Creates a subscription to a given topic. See the [resource name rules] @@ -826,8 +892,10 @@ def sample_create_subscription(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Subscription: @@ -840,7 +908,10 @@ def sample_create_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, topic, push_config, ack_deadline_seconds]) + flattened_params = [name, topic, push_config, ack_deadline_seconds] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -893,7 +964,7 @@ def get_subscription( subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Subscription: r"""Gets the configuration details of a subscription. @@ -937,8 +1008,10 @@ def sample_get_subscription(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Subscription: @@ -951,7 +1024,10 @@ def sample_get_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription]) + flattened_params = [subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1001,7 +1077,7 @@ def update_subscription( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Subscription: r"""Updates an existing subscription by updating the fields specified in the update mask. Note that certain @@ -1060,8 +1136,10 @@ def sample_update_subscription(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Subscription: @@ -1074,7 +1152,10 @@ def sample_update_subscription(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, update_mask]) + flattened_params = [subscription, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1125,7 +1206,7 @@ def list_subscriptions( project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSubscriptionsPager: r"""Lists matching subscriptions. @@ -1169,8 +1250,10 @@ def sample_list_subscriptions(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager: @@ -1183,7 +1266,10 @@ def sample_list_subscriptions(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1241,7 +1327,7 @@ def delete_subscription( subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Deletes an existing subscription. All messages retained in the subscription are immediately dropped. Calls to ``Pull`` after @@ -1287,13 +1373,18 @@ def sample_delete_subscription(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription]) + flattened_params = [subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1341,7 +1432,7 @@ def modify_ack_deadline( ack_deadline_seconds: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Modifies the ack deadline for a specific message. This method is useful to indicate that more time is needed to process a message @@ -1410,13 +1501,18 @@ def sample_modify_ack_deadline(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, ack_ids, ack_deadline_seconds]) + flattened_params = [subscription, ack_ids, ack_deadline_seconds] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1467,7 +1563,7 @@ def acknowledge( ack_ids: Optional[MutableSequence[str]] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Acknowledges the messages associated with the ``ack_ids`` in the ``AcknowledgeRequest``. The Pub/Sub system can remove the @@ -1524,13 +1620,18 @@ def sample_acknowledge(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, ack_ids]) + flattened_params = [subscription, ack_ids] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1580,7 +1681,7 @@ def pull( max_messages: Optional[int] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.PullResponse: r"""Pulls messages from the server. @@ -1649,8 +1750,10 @@ def sample_pull(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.PullResponse: @@ -1659,7 +1762,10 @@ def sample_pull(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, return_immediately, max_messages]) + flattened_params = [subscription, return_immediately, max_messages] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1717,7 +1823,7 @@ def streaming_pull( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[pubsub.StreamingPullResponse]: r"""Establishes a stream with the server, which sends messages down to the client. The client streams acknowledgements and ack @@ -1775,8 +1881,10 @@ def request_generator(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: Iterable[google.pubsub_v1.types.StreamingPullResponse]: @@ -1816,7 +1924,7 @@ def modify_push_config( push_config: Optional[pubsub.PushConfig] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Modifies the ``PushConfig`` for a specified subscription. @@ -1875,13 +1983,18 @@ def sample_modify_push_config(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([subscription, push_config]) + flattened_params = [subscription, push_config] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -1929,7 +2042,7 @@ def get_snapshot( snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Snapshot: r"""Gets the configuration details of a snapshot. Snapshots are used in @@ -1977,8 +2090,10 @@ def sample_get_snapshot(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Snapshot: @@ -1993,7 +2108,10 @@ def sample_get_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot]) + flattened_params = [snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2040,7 +2158,7 @@ def list_snapshots( project: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pagers.ListSnapshotsPager: r"""Lists the existing snapshots. Snapshots are used in `Seek `__ @@ -2088,8 +2206,10 @@ def sample_list_snapshots(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager: @@ -2102,7 +2222,10 @@ def sample_list_snapshots(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([project]) + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2161,7 +2284,7 @@ def create_snapshot( subscription: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Snapshot: r"""Creates a snapshot from the requested subscription. Snapshots are used in @@ -2244,8 +2367,10 @@ def sample_create_snapshot(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Snapshot: @@ -2260,7 +2385,10 @@ def sample_create_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, subscription]) + flattened_params = [name, subscription] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2310,7 +2438,7 @@ def update_snapshot( update_mask: Optional[field_mask_pb2.FieldMask] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Snapshot: r"""Updates an existing snapshot by updating the fields specified in the update mask. Snapshots are used in @@ -2366,8 +2494,10 @@ def sample_update_snapshot(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.Snapshot: @@ -2382,7 +2512,10 @@ def sample_update_snapshot(): # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot, update_mask]) + flattened_params = [snapshot, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2433,7 +2566,7 @@ def delete_snapshot( snapshot: Optional[str] = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> None: r"""Removes an existing snapshot. Snapshots are used in [Seek] (https://cloud.google.com/pubsub/docs/replay-overview) @@ -2482,13 +2615,18 @@ def sample_delete_snapshot(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ # Create or coerce a protobuf request object. # - Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - has_flattened_params = any([snapshot]) + flattened_params = [snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " @@ -2531,7 +2669,7 @@ def seek( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.SeekResponse: r"""Seeks an existing subscription to a point in time or to a given snapshot, whichever is provided in the request. Snapshots are @@ -2575,8 +2713,10 @@ def sample_seek(): retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: google.pubsub_v1.types.SeekResponse: @@ -2633,7 +2773,7 @@ def set_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Sets the IAM access control policy on the specified function. @@ -2646,8 +2786,10 @@ def set_iam_policy( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -2724,11 +2866,7 @@ def set_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.set_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -2739,16 +2877,20 @@ def set_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def get_iam_policy( self, @@ -2756,7 +2898,7 @@ def get_iam_policy( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Gets the IAM access control policy for a function. @@ -2770,8 +2912,10 @@ def get_iam_policy( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.policy_pb2.Policy: Defines an Identity and Access Management (IAM) policy. @@ -2848,11 +2992,7 @@ def get_iam_policy( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_iam_policy, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] # Certain fields should be provided within the metadata header; # add these here. @@ -2863,16 +3003,20 @@ def get_iam_policy( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e def test_iam_permissions( self, @@ -2880,7 +3024,7 @@ def test_iam_permissions( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Tests the specified IAM permissions against the IAM access control policy for a function. @@ -2895,8 +3039,10 @@ def test_iam_permissions( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.iam_policy_pb2.TestIamPermissionsResponse: Response message for ``TestIamPermissions`` method. @@ -2910,11 +3056,7 @@ def test_iam_permissions( # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.test_iam_permissions, - default_timeout=None, - client_info=DEFAULT_CLIENT_INFO, - ) + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] # Certain fields should be provided within the metadata header; # add these here. @@ -2925,16 +3067,20 @@ def test_iam_permissions( # Validate the universe domain. self._validate_universe_domain() - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) + try: + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) - # Done; return the response. - return response + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py index c09c42027427..26adcdd63432 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -66,7 +66,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -80,8 +80,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListSubscriptionsRequest(request) @@ -140,7 +142,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -154,8 +156,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListSubscriptionsRequest(request) @@ -218,7 +222,7 @@ def __init__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiate the pager. @@ -232,8 +236,10 @@ def __init__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListSnapshotsRequest(request) @@ -292,7 +298,7 @@ def __init__( *, retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, Union[str, bytes]]] = () ): """Instantiates the pager. @@ -306,8 +312,10 @@ def __init__( retry (google.api_core.retry.AsyncRetry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ self._method = method self._request = pubsub.ListSnapshotsRequest(request) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index e706190ced95..ff59214ca991 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -13,6 +13,9 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import json +import logging as std_logging +import pickle import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union @@ -21,8 +24,11 @@ import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -30,6 +36,81 @@ from google.pubsub_v1.types import pubsub from .base import SubscriberTransport, DEFAULT_CLIENT_INFO +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": client_call_details.method, + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class SubscriberGrpcTransport(SubscriberTransport): """gRPC backend transport for Subscriber. @@ -188,7 +269,12 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @classmethod @@ -274,7 +360,7 @@ def create_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_subscription" not in self._stubs: - self._stubs["create_subscription"] = self.grpc_channel.unary_unary( + self._stubs["create_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/CreateSubscription", request_serializer=pubsub.Subscription.serialize, response_deserializer=pubsub.Subscription.deserialize, @@ -300,7 +386,7 @@ def get_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_subscription" not in self._stubs: - self._stubs["get_subscription"] = self.grpc_channel.unary_unary( + self._stubs["get_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/GetSubscription", request_serializer=pubsub.GetSubscriptionRequest.serialize, response_deserializer=pubsub.Subscription.deserialize, @@ -329,7 +415,7 @@ def update_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_subscription" not in self._stubs: - self._stubs["update_subscription"] = self.grpc_channel.unary_unary( + self._stubs["update_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/UpdateSubscription", request_serializer=pubsub.UpdateSubscriptionRequest.serialize, response_deserializer=pubsub.Subscription.deserialize, @@ -355,7 +441,7 @@ def list_subscriptions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_subscriptions" not in self._stubs: - self._stubs["list_subscriptions"] = self.grpc_channel.unary_unary( + self._stubs["list_subscriptions"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/ListSubscriptions", request_serializer=pubsub.ListSubscriptionsRequest.serialize, response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, @@ -386,7 +472,7 @@ def delete_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_subscription" not in self._stubs: - self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( + self._stubs["delete_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/DeleteSubscription", request_serializer=pubsub.DeleteSubscriptionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -417,7 +503,7 @@ def modify_ack_deadline( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "modify_ack_deadline" not in self._stubs: - self._stubs["modify_ack_deadline"] = self.grpc_channel.unary_unary( + self._stubs["modify_ack_deadline"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/ModifyAckDeadline", request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -448,7 +534,7 @@ def acknowledge(self) -> Callable[[pubsub.AcknowledgeRequest], empty_pb2.Empty]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "acknowledge" not in self._stubs: - self._stubs["acknowledge"] = self.grpc_channel.unary_unary( + self._stubs["acknowledge"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/Acknowledge", request_serializer=pubsub.AcknowledgeRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -472,7 +558,7 @@ def pull(self) -> Callable[[pubsub.PullRequest], pubsub.PullResponse]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "pull" not in self._stubs: - self._stubs["pull"] = self.grpc_channel.unary_unary( + self._stubs["pull"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/Pull", request_serializer=pubsub.PullRequest.serialize, response_deserializer=pubsub.PullResponse.deserialize, @@ -505,7 +591,7 @@ def streaming_pull( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "streaming_pull" not in self._stubs: - self._stubs["streaming_pull"] = self.grpc_channel.stream_stream( + self._stubs["streaming_pull"] = self._logged_channel.stream_stream( "/google.pubsub.v1.Subscriber/StreamingPull", request_serializer=pubsub.StreamingPullRequest.serialize, response_deserializer=pubsub.StreamingPullResponse.deserialize, @@ -537,7 +623,7 @@ def modify_push_config( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "modify_push_config" not in self._stubs: - self._stubs["modify_push_config"] = self.grpc_channel.unary_unary( + self._stubs["modify_push_config"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/ModifyPushConfig", request_serializer=pubsub.ModifyPushConfigRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -566,7 +652,7 @@ def get_snapshot(self) -> Callable[[pubsub.GetSnapshotRequest], pubsub.Snapshot] # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_snapshot" not in self._stubs: - self._stubs["get_snapshot"] = self.grpc_channel.unary_unary( + self._stubs["get_snapshot"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/GetSnapshot", request_serializer=pubsub.GetSnapshotRequest.serialize, response_deserializer=pubsub.Snapshot.deserialize, @@ -596,7 +682,7 @@ def list_snapshots( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_snapshots" not in self._stubs: - self._stubs["list_snapshots"] = self.grpc_channel.unary_unary( + self._stubs["list_snapshots"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/ListSnapshots", request_serializer=pubsub.ListSnapshotsRequest.serialize, response_deserializer=pubsub.ListSnapshotsResponse.deserialize, @@ -640,7 +726,7 @@ def create_snapshot( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_snapshot" not in self._stubs: - self._stubs["create_snapshot"] = self.grpc_channel.unary_unary( + self._stubs["create_snapshot"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/CreateSnapshot", request_serializer=pubsub.CreateSnapshotRequest.serialize, response_deserializer=pubsub.Snapshot.deserialize, @@ -671,7 +757,7 @@ def update_snapshot( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_snapshot" not in self._stubs: - self._stubs["update_snapshot"] = self.grpc_channel.unary_unary( + self._stubs["update_snapshot"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/UpdateSnapshot", request_serializer=pubsub.UpdateSnapshotRequest.serialize, response_deserializer=pubsub.Snapshot.deserialize, @@ -706,7 +792,7 @@ def delete_snapshot( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_snapshot" not in self._stubs: - self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( + self._stubs["delete_snapshot"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/DeleteSnapshot", request_serializer=pubsub.DeleteSnapshotRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -738,13 +824,16 @@ def seek(self) -> Callable[[pubsub.SeekRequest], pubsub.SeekResponse]: # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "seek" not in self._stubs: - self._stubs["seek"] = self.grpc_channel.unary_unary( + self._stubs["seek"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/Seek", request_serializer=pubsub.SeekRequest.serialize, response_deserializer=pubsub.SeekResponse.deserialize, ) return self._stubs["seek"] + def close(self): + self._logged_channel.close() + @property def set_iam_policy( self, @@ -763,7 +852,7 @@ def set_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/SetIamPolicy", request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -789,7 +878,7 @@ def get_iam_policy( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/GetIamPolicy", request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, response_deserializer=policy_pb2.Policy.FromString, @@ -818,16 +907,13 @@ def test_iam_permissions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( "/google.iam.v1.IAMPolicy/TestIamPermissions", request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, ) return self._stubs["test_iam_permissions"] - def close(self): - self.grpc_channel.close() - @property def kind(self) -> str: return "grpc" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index de960685819d..08eaf9665272 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -14,6 +14,9 @@ # limitations under the License. # import inspect +import json +import pickle +import logging as std_logging import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union @@ -23,8 +26,11 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore +from google.protobuf.json_format import MessageToJson +import google.protobuf.message import grpc # type: ignore +import proto # type: ignore from grpc.experimental import aio # type: ignore from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -34,6 +40,82 @@ from .base import SubscriberTransport, DEFAULT_CLIENT_INFO from .grpc import SubscriberGrpcTransport +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + class SubscriberGrpcAsyncIOTransport(SubscriberTransport): """gRPC AsyncIO backend transport for Subscriber. @@ -235,10 +317,13 @@ def __init__( ], ) - # Wrap messages. This must be done after self._grpc_channel exists + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel self._wrap_with_kind = ( "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters ) + # Wrap messages. This must be done after self._logged_channel exists self._prep_wrapped_messages(client_info) @property @@ -282,7 +367,7 @@ def create_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_subscription" not in self._stubs: - self._stubs["create_subscription"] = self.grpc_channel.unary_unary( + self._stubs["create_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/CreateSubscription", request_serializer=pubsub.Subscription.serialize, response_deserializer=pubsub.Subscription.deserialize, @@ -308,7 +393,7 @@ def get_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_subscription" not in self._stubs: - self._stubs["get_subscription"] = self.grpc_channel.unary_unary( + self._stubs["get_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/GetSubscription", request_serializer=pubsub.GetSubscriptionRequest.serialize, response_deserializer=pubsub.Subscription.deserialize, @@ -337,7 +422,7 @@ def update_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_subscription" not in self._stubs: - self._stubs["update_subscription"] = self.grpc_channel.unary_unary( + self._stubs["update_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/UpdateSubscription", request_serializer=pubsub.UpdateSubscriptionRequest.serialize, response_deserializer=pubsub.Subscription.deserialize, @@ -365,7 +450,7 @@ def list_subscriptions( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_subscriptions" not in self._stubs: - self._stubs["list_subscriptions"] = self.grpc_channel.unary_unary( + self._stubs["list_subscriptions"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/ListSubscriptions", request_serializer=pubsub.ListSubscriptionsRequest.serialize, response_deserializer=pubsub.ListSubscriptionsResponse.deserialize, @@ -396,7 +481,7 @@ def delete_subscription( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_subscription" not in self._stubs: - self._stubs["delete_subscription"] = self.grpc_channel.unary_unary( + self._stubs["delete_subscription"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/DeleteSubscription", request_serializer=pubsub.DeleteSubscriptionRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -427,7 +512,7 @@ def modify_ack_deadline( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "modify_ack_deadline" not in self._stubs: - self._stubs["modify_ack_deadline"] = self.grpc_channel.unary_unary( + self._stubs["modify_ack_deadline"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/ModifyAckDeadline", request_serializer=pubsub.ModifyAckDeadlineRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -460,7 +545,7 @@ def acknowledge( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "acknowledge" not in self._stubs: - self._stubs["acknowledge"] = self.grpc_channel.unary_unary( + self._stubs["acknowledge"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/Acknowledge", request_serializer=pubsub.AcknowledgeRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -484,7 +569,7 @@ def pull(self) -> Callable[[pubsub.PullRequest], Awaitable[pubsub.PullResponse]] # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "pull" not in self._stubs: - self._stubs["pull"] = self.grpc_channel.unary_unary( + self._stubs["pull"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/Pull", request_serializer=pubsub.PullRequest.serialize, response_deserializer=pubsub.PullResponse.deserialize, @@ -519,7 +604,7 @@ def streaming_pull( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "streaming_pull" not in self._stubs: - self._stubs["streaming_pull"] = self.grpc_channel.stream_stream( + self._stubs["streaming_pull"] = self._logged_channel.stream_stream( "/google.pubsub.v1.Subscriber/StreamingPull", request_serializer=pubsub.StreamingPullRequest.serialize, response_deserializer=pubsub.StreamingPullResponse.deserialize, @@ -551,7 +636,7 @@ def modify_push_config( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "modify_push_config" not in self._stubs: - self._stubs["modify_push_config"] = self.grpc_channel.unary_unary( + self._stubs["modify_push_config"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/ModifyPushConfig", request_serializer=pubsub.ModifyPushConfigRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -582,7 +667,7 @@ def get_snapshot( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_snapshot" not in self._stubs: - self._stubs["get_snapshot"] = self.grpc_channel.unary_unary( + self._stubs["get_snapshot"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/GetSnapshot", request_serializer=pubsub.GetSnapshotRequest.serialize, response_deserializer=pubsub.Snapshot.deserialize, @@ -614,7 +699,7 @@ def list_snapshots( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "list_snapshots" not in self._stubs: - self._stubs["list_snapshots"] = self.grpc_channel.unary_unary( + self._stubs["list_snapshots"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/ListSnapshots", request_serializer=pubsub.ListSnapshotsRequest.serialize, response_deserializer=pubsub.ListSnapshotsResponse.deserialize, @@ -658,7 +743,7 @@ def create_snapshot( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "create_snapshot" not in self._stubs: - self._stubs["create_snapshot"] = self.grpc_channel.unary_unary( + self._stubs["create_snapshot"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/CreateSnapshot", request_serializer=pubsub.CreateSnapshotRequest.serialize, response_deserializer=pubsub.Snapshot.deserialize, @@ -689,7 +774,7 @@ def update_snapshot( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "update_snapshot" not in self._stubs: - self._stubs["update_snapshot"] = self.grpc_channel.unary_unary( + self._stubs["update_snapshot"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/UpdateSnapshot", request_serializer=pubsub.UpdateSnapshotRequest.serialize, response_deserializer=pubsub.Snapshot.deserialize, @@ -724,7 +809,7 @@ def delete_snapshot( # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "delete_snapshot" not in self._stubs: - self._stubs["delete_snapshot"] = self.grpc_channel.unary_unary( + self._stubs["delete_snapshot"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/DeleteSnapshot", request_serializer=pubsub.DeleteSnapshotRequest.serialize, response_deserializer=empty_pb2.Empty.FromString, @@ -756,93 +841,13 @@ def seek(self) -> Callable[[pubsub.SeekRequest], Awaitable[pubsub.SeekResponse]] # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "seek" not in self._stubs: - self._stubs["seek"] = self.grpc_channel.unary_unary( + self._stubs["seek"] = self._logged_channel.unary_unary( "/google.pubsub.v1.Subscriber/Seek", request_serializer=pubsub.SeekRequest.serialize, response_deserializer=pubsub.SeekResponse.deserialize, ) return self._stubs["seek"] - @property - def set_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the set iam policy method over gRPC. - Sets the IAM access control policy on the specified - function. Replaces any existing policy. - Returns: - Callable[[~.SetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "set_iam_policy" not in self._stubs: - self._stubs["set_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/SetIamPolicy", - request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["set_iam_policy"] - - @property - def get_iam_policy( - self, - ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], Awaitable[policy_pb2.Policy]]: - r"""Return a callable for the get iam policy method over gRPC. - Gets the IAM access control policy for a function. - Returns an empty policy if the function exists and does - not have a policy set. - Returns: - Callable[[~.GetIamPolicyRequest], - Awaitable[~.Policy]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_iam_policy" not in self._stubs: - self._stubs["get_iam_policy"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/GetIamPolicy", - request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, - response_deserializer=policy_pb2.Policy.FromString, - ) - return self._stubs["get_iam_policy"] - - @property - def test_iam_permissions( - self, - ) -> Callable[ - [iam_policy_pb2.TestIamPermissionsRequest], - Awaitable[iam_policy_pb2.TestIamPermissionsResponse], - ]: - r"""Return a callable for the test iam permissions method over gRPC. - Tests the specified permissions against the IAM access control - policy for a function. If the function does not exist, this will - return an empty set of permissions, not a NOT_FOUND error. - Returns: - Callable[[~.TestIamPermissionsRequest], - Awaitable[~.TestIamPermissionsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "test_iam_permissions" not in self._stubs: - self._stubs["test_iam_permissions"] = self.grpc_channel.unary_unary( - "/google.iam.v1.IAMPolicy/TestIamPermissions", - request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, - response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, - ) - return self._stubs["test_iam_permissions"] - def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1112,11 +1117,91 @@ def _wrap_method(self, func, *args, **kwargs): return gapic_v1.method_async.wrap_method(func, *args, **kwargs) def close(self): - return self.grpc_channel.close() + return self._logged_channel.close() @property def kind(self) -> str: return "grpc_asyncio" + @property + def set_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.SetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the set iam policy method over gRPC. + Sets the IAM access control policy on the specified + function. Replaces any existing policy. + Returns: + Callable[[~.SetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "set_iam_policy" not in self._stubs: + self._stubs["set_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/SetIamPolicy", + request_serializer=iam_policy_pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["set_iam_policy"] + + @property + def get_iam_policy( + self, + ) -> Callable[[iam_policy_pb2.GetIamPolicyRequest], policy_pb2.Policy]: + r"""Return a callable for the get iam policy method over gRPC. + Gets the IAM access control policy for a function. + Returns an empty policy if the function exists and does + not have a policy set. + Returns: + Callable[[~.GetIamPolicyRequest], + ~.Policy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_iam_policy" not in self._stubs: + self._stubs["get_iam_policy"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/GetIamPolicy", + request_serializer=iam_policy_pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=policy_pb2.Policy.FromString, + ) + return self._stubs["get_iam_policy"] + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [iam_policy_pb2.TestIamPermissionsRequest], + iam_policy_pb2.TestIamPermissionsResponse, + ]: + r"""Return a callable for the test iam permissions method over gRPC. + Tests the specified permissions against the IAM access control + policy for a function. If the function does not exist, this will + return an empty set of permissions, not a NOT_FOUND error. + Returns: + Callable[[~.TestIamPermissionsRequest], + ~.TestIamPermissionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "test_iam_permissions" not in self._stubs: + self._stubs["test_iam_permissions"] = self._logged_channel.unary_unary( + "/google.iam.v1.IAMPolicy/TestIamPermissions", + request_serializer=iam_policy_pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=iam_policy_pb2.TestIamPermissionsResponse.FromString, + ) + return self._stubs["test_iam_permissions"] + __all__ = ("SubscriberGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index 376fd4ab30bc..a44591cf9558 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -13,9 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import logging +import json # type: ignore from google.auth.transport.requests import AuthorizedSession # type: ignore -import json # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.api_core import exceptions as core_exceptions from google.api_core import retry as retries @@ -33,8 +34,6 @@ import warnings -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub @@ -47,6 +46,14 @@ except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object, None] # type: ignore +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, @@ -177,8 +184,10 @@ def post_update_subscription(self, response): """ def pre_acknowledge( - self, request: pubsub.AcknowledgeRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.AcknowledgeRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.AcknowledgeRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.AcknowledgeRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for acknowledge Override in a subclass to manipulate the request or metadata @@ -187,8 +196,10 @@ def pre_acknowledge( return request, metadata def pre_create_snapshot( - self, request: pubsub.CreateSnapshotRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.CreateSnapshotRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.CreateSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.CreateSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_snapshot Override in a subclass to manipulate the request or metadata @@ -199,15 +210,40 @@ def pre_create_snapshot( def post_create_snapshot(self, response: pubsub.Snapshot) -> pubsub.Snapshot: """Post-rpc interceptor for create_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_create_snapshot` interceptor runs + before the `post_create_snapshot_with_metadata` interceptor. """ return response + def post_create_snapshot_with_metadata( + self, + response: pubsub.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_create_snapshot_with_metadata` + interceptor in new development instead of the `post_create_snapshot` interceptor. + When both interceptors are used, this `post_create_snapshot_with_metadata` interceptor runs after the + `post_create_snapshot` interceptor. The (possibly modified) response returned by + `post_create_snapshot` will be passed to + `post_create_snapshot_with_metadata`. + """ + return response, metadata + def pre_create_subscription( - self, request: pubsub.Subscription, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, str]]]: + self, + request: pubsub.Subscription, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for create_subscription Override in a subclass to manipulate the request or metadata @@ -220,15 +256,40 @@ def post_create_subscription( ) -> pubsub.Subscription: """Post-rpc interceptor for create_subscription - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_create_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_create_subscription` interceptor runs + before the `post_create_subscription_with_metadata` interceptor. """ return response + def post_create_subscription_with_metadata( + self, + response: pubsub.Subscription, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_create_subscription_with_metadata` + interceptor in new development instead of the `post_create_subscription` interceptor. + When both interceptors are used, this `post_create_subscription_with_metadata` interceptor runs after the + `post_create_subscription` interceptor. The (possibly modified) response returned by + `post_create_subscription` will be passed to + `post_create_subscription_with_metadata`. + """ + return response, metadata + def pre_delete_snapshot( - self, request: pubsub.DeleteSnapshotRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.DeleteSnapshotRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.DeleteSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.DeleteSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for delete_snapshot Override in a subclass to manipulate the request or metadata @@ -239,8 +300,10 @@ def pre_delete_snapshot( def pre_delete_subscription( self, request: pubsub.DeleteSubscriptionRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[pubsub.DeleteSubscriptionRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.DeleteSubscriptionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for delete_subscription Override in a subclass to manipulate the request or metadata @@ -249,8 +312,10 @@ def pre_delete_subscription( return request, metadata def pre_get_snapshot( - self, request: pubsub.GetSnapshotRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.GetSnapshotRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.GetSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.GetSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_snapshot Override in a subclass to manipulate the request or metadata @@ -261,17 +326,40 @@ def pre_get_snapshot( def post_get_snapshot(self, response: pubsub.Snapshot) -> pubsub.Snapshot: """Post-rpc interceptor for get_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_get_snapshot` interceptor runs + before the `post_get_snapshot_with_metadata` interceptor. """ return response + def post_get_snapshot_with_metadata( + self, + response: pubsub.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_get_snapshot_with_metadata` + interceptor in new development instead of the `post_get_snapshot` interceptor. + When both interceptors are used, this `post_get_snapshot_with_metadata` interceptor runs after the + `post_get_snapshot` interceptor. The (possibly modified) response returned by + `post_get_snapshot` will be passed to + `post_get_snapshot_with_metadata`. + """ + return response, metadata + def pre_get_subscription( self, request: pubsub.GetSubscriptionRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[pubsub.GetSubscriptionRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.GetSubscriptionRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for get_subscription Override in a subclass to manipulate the request or metadata @@ -284,15 +372,40 @@ def post_get_subscription( ) -> pubsub.Subscription: """Post-rpc interceptor for get_subscription - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_get_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_get_subscription` interceptor runs + before the `post_get_subscription_with_metadata` interceptor. """ return response + def post_get_subscription_with_metadata( + self, + response: pubsub.Subscription, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_get_subscription_with_metadata` + interceptor in new development instead of the `post_get_subscription` interceptor. + When both interceptors are used, this `post_get_subscription_with_metadata` interceptor runs after the + `post_get_subscription` interceptor. The (possibly modified) response returned by + `post_get_subscription` will be passed to + `post_get_subscription_with_metadata`. + """ + return response, metadata + def pre_list_snapshots( - self, request: pubsub.ListSnapshotsRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.ListSnapshotsRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.ListSnapshotsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ListSnapshotsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for list_snapshots Override in a subclass to manipulate the request or metadata @@ -305,17 +418,42 @@ def post_list_snapshots( ) -> pubsub.ListSnapshotsResponse: """Post-rpc interceptor for list_snapshots - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_snapshots_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_list_snapshots` interceptor runs + before the `post_list_snapshots_with_metadata` interceptor. """ return response + def post_list_snapshots_with_metadata( + self, + response: pubsub.ListSnapshotsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ListSnapshotsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_snapshots + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_list_snapshots_with_metadata` + interceptor in new development instead of the `post_list_snapshots` interceptor. + When both interceptors are used, this `post_list_snapshots_with_metadata` interceptor runs after the + `post_list_snapshots` interceptor. The (possibly modified) response returned by + `post_list_snapshots` will be passed to + `post_list_snapshots_with_metadata`. + """ + return response, metadata + def pre_list_subscriptions( self, request: pubsub.ListSubscriptionsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[pubsub.ListSubscriptionsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListSubscriptionsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for list_subscriptions Override in a subclass to manipulate the request or metadata @@ -328,17 +466,44 @@ def post_list_subscriptions( ) -> pubsub.ListSubscriptionsResponse: """Post-rpc interceptor for list_subscriptions - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_list_subscriptions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_list_subscriptions` interceptor runs + before the `post_list_subscriptions_with_metadata` interceptor. """ return response + def post_list_subscriptions_with_metadata( + self, + response: pubsub.ListSubscriptionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ListSubscriptionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_subscriptions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_list_subscriptions_with_metadata` + interceptor in new development instead of the `post_list_subscriptions` interceptor. + When both interceptors are used, this `post_list_subscriptions_with_metadata` interceptor runs after the + `post_list_subscriptions` interceptor. The (possibly modified) response returned by + `post_list_subscriptions` will be passed to + `post_list_subscriptions_with_metadata`. + """ + return response, metadata + def pre_modify_ack_deadline( self, request: pubsub.ModifyAckDeadlineRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[pubsub.ModifyAckDeadlineRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.ModifyAckDeadlineRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for modify_ack_deadline Override in a subclass to manipulate the request or metadata @@ -349,8 +514,8 @@ def pre_modify_ack_deadline( def pre_modify_push_config( self, request: pubsub.ModifyPushConfigRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[pubsub.ModifyPushConfigRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.ModifyPushConfigRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for modify_push_config Override in a subclass to manipulate the request or metadata @@ -359,8 +524,10 @@ def pre_modify_push_config( return request, metadata def pre_pull( - self, request: pubsub.PullRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.PullRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.PullRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.PullRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for pull Override in a subclass to manipulate the request or metadata @@ -371,15 +538,40 @@ def pre_pull( def post_pull(self, response: pubsub.PullResponse) -> pubsub.PullResponse: """Post-rpc interceptor for pull - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_pull_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_pull` interceptor runs + before the `post_pull_with_metadata` interceptor. """ return response + def post_pull_with_metadata( + self, + response: pubsub.PullResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.PullResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for pull + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_pull_with_metadata` + interceptor in new development instead of the `post_pull` interceptor. + When both interceptors are used, this `post_pull_with_metadata` interceptor runs after the + `post_pull` interceptor. The (possibly modified) response returned by + `post_pull` will be passed to + `post_pull_with_metadata`. + """ + return response, metadata + def pre_seek( - self, request: pubsub.SeekRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.SeekRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.SeekRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.SeekRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for seek Override in a subclass to manipulate the request or metadata @@ -390,15 +582,40 @@ def pre_seek( def post_seek(self, response: pubsub.SeekResponse) -> pubsub.SeekResponse: """Post-rpc interceptor for seek - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_seek_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_seek` interceptor runs + before the `post_seek_with_metadata` interceptor. """ return response + def post_seek_with_metadata( + self, + response: pubsub.SeekResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.SeekResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for seek + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_seek_with_metadata` + interceptor in new development instead of the `post_seek` interceptor. + When both interceptors are used, this `post_seek_with_metadata` interceptor runs after the + `post_seek` interceptor. The (possibly modified) response returned by + `post_seek` will be passed to + `post_seek_with_metadata`. + """ + return response, metadata + def pre_update_snapshot( - self, request: pubsub.UpdateSnapshotRequest, metadata: Sequence[Tuple[str, str]] - ) -> Tuple[pubsub.UpdateSnapshotRequest, Sequence[Tuple[str, str]]]: + self, + request: pubsub.UpdateSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.UpdateSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]]]: """Pre-rpc interceptor for update_snapshot Override in a subclass to manipulate the request or metadata @@ -409,17 +626,42 @@ def pre_update_snapshot( def post_update_snapshot(self, response: pubsub.Snapshot) -> pubsub.Snapshot: """Post-rpc interceptor for update_snapshot - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_snapshot_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_update_snapshot` interceptor runs + before the `post_update_snapshot_with_metadata` interceptor. """ return response + def post_update_snapshot_with_metadata( + self, + response: pubsub.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_snapshot + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_update_snapshot_with_metadata` + interceptor in new development instead of the `post_update_snapshot` interceptor. + When both interceptors are used, this `post_update_snapshot_with_metadata` interceptor runs after the + `post_update_snapshot` interceptor. The (possibly modified) response returned by + `post_update_snapshot` will be passed to + `post_update_snapshot_with_metadata`. + """ + return response, metadata + def pre_update_subscription( self, request: pubsub.UpdateSubscriptionRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[pubsub.UpdateSubscriptionRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + pubsub.UpdateSubscriptionRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for update_subscription Override in a subclass to manipulate the request or metadata @@ -432,17 +674,42 @@ def post_update_subscription( ) -> pubsub.Subscription: """Post-rpc interceptor for update_subscription - Override in a subclass to manipulate the response + DEPRECATED. Please use the `post_update_subscription_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response after it is returned by the Subscriber server but before - it is returned to user code. + it is returned to user code. This `post_update_subscription` interceptor runs + before the `post_update_subscription_with_metadata` interceptor. """ return response + def post_update_subscription_with_metadata( + self, + response: pubsub.Subscription, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[pubsub.Subscription, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_subscription + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Subscriber server but before it is returned to user code. + + We recommend only using this `post_update_subscription_with_metadata` + interceptor in new development instead of the `post_update_subscription` interceptor. + When both interceptors are used, this `post_update_subscription_with_metadata` interceptor runs after the + `post_update_subscription` interceptor. The (possibly modified) response returned by + `post_update_subscription` will be passed to + `post_update_subscription_with_metadata`. + """ + return response, metadata + def pre_get_iam_policy( self, request: iam_policy_pb2.GetIamPolicyRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.GetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for get_iam_policy Override in a subclass to manipulate the request or metadata @@ -462,8 +729,10 @@ def post_get_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: def pre_set_iam_policy( self, request: iam_policy_pb2.SetIamPolicyRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.SetIamPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: """Pre-rpc interceptor for set_iam_policy Override in a subclass to manipulate the request or metadata @@ -483,8 +752,11 @@ def post_set_iam_policy(self, response: policy_pb2.Policy) -> policy_pb2.Policy: def pre_test_iam_permissions( self, request: iam_policy_pb2.TestIamPermissionsRequest, - metadata: Sequence[Tuple[str, str]], - ) -> Tuple[iam_policy_pb2.TestIamPermissionsRequest, Sequence[Tuple[str, str]]]: + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + iam_policy_pb2.TestIamPermissionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: """Pre-rpc interceptor for test_iam_permissions Override in a subclass to manipulate the request or metadata @@ -628,7 +900,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the acknowledge method over HTTP. @@ -638,13 +910,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseSubscriberRestTransport._BaseAcknowledge._get_http_options() ) + request, metadata = self._interceptor.pre_acknowledge(request, metadata) transcoded_request = ( _BaseSubscriberRestTransport._BaseAcknowledge._get_transcoded_request( @@ -663,6 +938,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.Acknowledge", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Acknowledge", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._Acknowledge._get_response( self._host, @@ -714,7 +1016,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Snapshot: r"""Call the create snapshot method over HTTP. @@ -724,8 +1026,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.Snapshot: @@ -741,6 +1045,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseCreateSnapshot._get_http_options() ) + request, metadata = self._interceptor.pre_create_snapshot(request, metadata) transcoded_request = _BaseSubscriberRestTransport._BaseCreateSnapshot._get_transcoded_request( http_options, request @@ -759,6 +1064,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.CreateSnapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "CreateSnapshot", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._CreateSnapshot._get_response( self._host, @@ -780,7 +1112,33 @@ def __call__( pb_resp = pubsub.Snapshot.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_snapshot_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Snapshot.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.create_snapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "CreateSnapshot", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _CreateSubscription( @@ -818,7 +1176,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Subscription: r"""Call the create subscription method over HTTP. @@ -831,8 +1189,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.Subscription: @@ -846,6 +1206,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseCreateSubscription._get_http_options() ) + request, metadata = self._interceptor.pre_create_subscription( request, metadata ) @@ -862,6 +1223,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.CreateSubscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "CreateSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._CreateSubscription._get_response( self._host, @@ -883,7 +1271,33 @@ def __call__( pb_resp = pubsub.Subscription.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_create_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_subscription_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Subscription.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.create_subscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "CreateSubscription", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _DeleteSnapshot( @@ -920,7 +1334,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the delete snapshot method over HTTP. @@ -930,13 +1344,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_http_options() ) + request, metadata = self._interceptor.pre_delete_snapshot(request, metadata) transcoded_request = _BaseSubscriberRestTransport._BaseDeleteSnapshot._get_transcoded_request( http_options, request @@ -949,6 +1366,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.DeleteSnapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "DeleteSnapshot", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._DeleteSnapshot._get_response( self._host, @@ -998,7 +1442,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the delete subscription method over HTTP. @@ -1009,13 +1453,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseSubscriberRestTransport._BaseDeleteSubscription._get_http_options() ) + request, metadata = self._interceptor.pre_delete_subscription( request, metadata ) @@ -1028,6 +1475,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.DeleteSubscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "DeleteSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._DeleteSubscription._get_response( self._host, @@ -1077,7 +1551,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Snapshot: r"""Call the get snapshot method over HTTP. @@ -1087,8 +1561,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.Snapshot: @@ -1104,6 +1580,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseGetSnapshot._get_http_options() ) + request, metadata = self._interceptor.pre_get_snapshot(request, metadata) transcoded_request = ( _BaseSubscriberRestTransport._BaseGetSnapshot._get_transcoded_request( @@ -1118,6 +1595,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.GetSnapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetSnapshot", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._GetSnapshot._get_response( self._host, @@ -1138,7 +1642,33 @@ def __call__( pb_resp = pubsub.Snapshot.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_snapshot_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Snapshot.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.get_snapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetSnapshot", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _GetSubscription( @@ -1175,7 +1705,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Subscription: r"""Call the get subscription method over HTTP. @@ -1186,8 +1716,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.Subscription: @@ -1201,6 +1733,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseGetSubscription._get_http_options() ) + request, metadata = self._interceptor.pre_get_subscription( request, metadata ) @@ -1213,6 +1746,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.GetSubscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._GetSubscription._get_response( self._host, @@ -1233,7 +1793,33 @@ def __call__( pb_resp = pubsub.Subscription.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_get_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_subscription_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Subscription.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.get_subscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetSubscription", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListSnapshots( @@ -1270,7 +1856,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.ListSnapshotsResponse: r"""Call the list snapshots method over HTTP. @@ -1280,8 +1866,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.ListSnapshotsResponse: @@ -1291,6 +1879,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseListSnapshots._get_http_options() ) + request, metadata = self._interceptor.pre_list_snapshots(request, metadata) transcoded_request = ( _BaseSubscriberRestTransport._BaseListSnapshots._get_transcoded_request( @@ -1305,6 +1894,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.ListSnapshots", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ListSnapshots", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._ListSnapshots._get_response( self._host, @@ -1325,7 +1941,33 @@ def __call__( pb_resp = pubsub.ListSnapshotsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_snapshots(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_snapshots_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListSnapshotsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.list_snapshots", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ListSnapshots", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ListSubscriptions( @@ -1362,7 +2004,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.ListSubscriptionsResponse: r"""Call the list subscriptions method over HTTP. @@ -1372,8 +2014,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.ListSubscriptionsResponse: @@ -1383,6 +2027,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseListSubscriptions._get_http_options() ) + request, metadata = self._interceptor.pre_list_subscriptions( request, metadata ) @@ -1395,6 +2040,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.ListSubscriptions", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ListSubscriptions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._ListSubscriptions._get_response( self._host, @@ -1415,7 +2087,35 @@ def __call__( pb_resp = pubsub.ListSubscriptionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_list_subscriptions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_subscriptions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.ListSubscriptionsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.list_subscriptions", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ListSubscriptions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _ModifyAckDeadline( @@ -1453,7 +2153,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the modify ack deadline method over HTTP. @@ -1464,13 +2164,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseSubscriberRestTransport._BaseModifyAckDeadline._get_http_options() ) + request, metadata = self._interceptor.pre_modify_ack_deadline( request, metadata ) @@ -1487,6 +2190,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.ModifyAckDeadline", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ModifyAckDeadline", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._ModifyAckDeadline._get_response( self._host, @@ -1538,7 +2268,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ): r"""Call the modify push config method over HTTP. @@ -1549,13 +2279,16 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. """ http_options = ( _BaseSubscriberRestTransport._BaseModifyPushConfig._get_http_options() ) + request, metadata = self._interceptor.pre_modify_push_config( request, metadata ) @@ -1572,6 +2305,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.ModifyPushConfig", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "ModifyPushConfig", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._ModifyPushConfig._get_response( self._host, @@ -1621,7 +2381,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.PullResponse: r"""Call the pull method over HTTP. @@ -1631,8 +2391,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.PullResponse: @@ -1640,6 +2402,7 @@ def __call__( """ http_options = _BaseSubscriberRestTransport._BasePull._get_http_options() + request, metadata = self._interceptor.pre_pull(request, metadata) transcoded_request = ( _BaseSubscriberRestTransport._BasePull._get_transcoded_request( @@ -1658,6 +2421,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.Pull", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Pull", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._Pull._get_response( self._host, @@ -1679,7 +2469,31 @@ def __call__( pb_resp = pubsub.PullResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_pull(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_pull_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.PullResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.pull", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Pull", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _Seek(_BaseSubscriberRestTransport._BaseSeek, SubscriberRestStub): @@ -1715,7 +2529,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.SeekResponse: r"""Call the seek method over HTTP. @@ -1725,8 +2539,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.SeekResponse: @@ -1736,6 +2552,7 @@ def __call__( """ http_options = _BaseSubscriberRestTransport._BaseSeek._get_http_options() + request, metadata = self._interceptor.pre_seek(request, metadata) transcoded_request = ( _BaseSubscriberRestTransport._BaseSeek._get_transcoded_request( @@ -1754,6 +2571,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.Seek", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Seek", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._Seek._get_response( self._host, @@ -1775,7 +2619,31 @@ def __call__( pb_resp = pubsub.SeekResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_seek(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_seek_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.SeekResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.seek", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "Seek", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _StreamingPull( @@ -1790,7 +2658,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> rest_streaming.ResponseIterator: raise NotImplementedError( "Method StreamingPull is not available over REST transport" @@ -1831,7 +2699,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Snapshot: r"""Call the update snapshot method over HTTP. @@ -1842,8 +2710,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.Snapshot: @@ -1859,6 +2729,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_http_options() ) + request, metadata = self._interceptor.pre_update_snapshot(request, metadata) transcoded_request = _BaseSubscriberRestTransport._BaseUpdateSnapshot._get_transcoded_request( http_options, request @@ -1877,6 +2748,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.UpdateSnapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "UpdateSnapshot", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._UpdateSnapshot._get_response( self._host, @@ -1898,7 +2796,33 @@ def __call__( pb_resp = pubsub.Snapshot.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_snapshot(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_snapshot_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Snapshot.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.update_snapshot", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "UpdateSnapshot", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp class _UpdateSubscription( @@ -1936,7 +2860,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> pubsub.Subscription: r"""Call the update subscription method over HTTP. @@ -1947,8 +2871,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: ~.pubsub.Subscription: @@ -1962,6 +2888,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseUpdateSubscription._get_http_options() ) + request, metadata = self._interceptor.pre_update_subscription( request, metadata ) @@ -1978,6 +2905,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.UpdateSubscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "UpdateSubscription", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._UpdateSubscription._get_response( self._host, @@ -1999,7 +2953,33 @@ def __call__( pb_resp = pubsub.Subscription.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + resp = self._interceptor.post_update_subscription(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_subscription_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = pubsub.Subscription.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberClient.update_subscription", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "UpdateSubscription", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) return resp @property @@ -2160,7 +3140,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Call the get iam policy method over HTTP. @@ -2170,8 +3150,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from GetIamPolicy method. @@ -2180,6 +3162,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseGetIamPolicy._get_http_options() ) + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) transcoded_request = ( _BaseSubscriberRestTransport._BaseGetIamPolicy._get_transcoded_request( @@ -2194,6 +3177,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._GetIamPolicy._get_response( self._host, @@ -2213,6 +3223,27 @@ def __call__( resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberAsyncClient.GetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "GetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2254,7 +3285,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> policy_pb2.Policy: r"""Call the set iam policy method over HTTP. @@ -2264,8 +3295,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: policy_pb2.Policy: Response from SetIamPolicy method. @@ -2274,6 +3307,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseSetIamPolicy._get_http_options() ) + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) transcoded_request = ( _BaseSubscriberRestTransport._BaseSetIamPolicy._get_transcoded_request( @@ -2294,6 +3328,33 @@ def __call__( ) ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._SetIamPolicy._get_response( self._host, @@ -2314,6 +3375,27 @@ def __call__( resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberAsyncClient.SetIamPolicy", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "SetIamPolicy", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property @@ -2355,7 +3437,7 @@ def __call__( *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, - metadata: Sequence[Tuple[str, str]] = (), + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> iam_policy_pb2.TestIamPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -2365,8 +3447,10 @@ def __call__( retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. Returns: iam_policy_pb2.TestIamPermissionsResponse: Response from TestIamPermissions method. @@ -2375,6 +3459,7 @@ def __call__( http_options = ( _BaseSubscriberRestTransport._BaseTestIamPermissions._get_http_options() ) + request, metadata = self._interceptor.pre_test_iam_permissions( request, metadata ) @@ -2391,6 +3476,33 @@ def __call__( transcoded_request ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.pubsub_v1.SubscriberClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + # Send the request response = SubscriberRestTransport._TestIamPermissions._get_response( self._host, @@ -2411,6 +3523,27 @@ def __call__( resp = iam_policy_pb2.TestIamPermissionsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.pubsub_v1.SubscriberAsyncClient.TestIamPermissions", + extra={ + "serviceName": "google.pubsub.v1.Subscriber", + "rpcName": "TestIamPermissions", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) return resp @property diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py index 6626a04c1b7f..033af0b16bc5 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py @@ -26,8 +26,6 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore from google.protobuf import empty_pb2 # type: ignore from google.pubsub_v1.types import pubsub diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 5f15e445b82c..8917db59f4c4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1099,35 +1099,33 @@ class JavaScriptUDF(proto.Message): Required. JavaScript code that contains a function ``function_name`` with the below signature: - :: - // /** - // * Transforms a Pub/Sub message. - // - // * @return {(Object)>|null)} - To - // * filter a message, return `null`. To transform a message return a map - // * with the following keys: - // * - (required) 'data' : {string} - // * - (optional) 'attributes' : {Object} - // * Returning empty `attributes` will remove all attributes from the - // * message. - // * - // * @param {(Object)>} Pub/Sub - // * message. Keys: - // * - (required) 'data' : {string} - // * - (required) 'attributes' : {Object} - // * - // * @param {Object} metadata - Pub/Sub message metadata. - // * Keys: - // * - (required) 'message_id' : {string} - // * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format - // * - (optional) 'ordering_key': {string} - // */ - // - // function (message, metadata) { - // } - + /** + * Transforms a Pub/Sub message. + + * @return {(Object)>|null)} - To + * filter a message, return `null`. To transform a message return a map + * with the following keys: + * - (required) 'data' : {string} + * - (optional) 'attributes' : {Object} + * Returning empty `attributes` will remove all attributes from the + * message. + * + * @param {(Object)>} Pub/Sub + * message. Keys: + * - (required) 'data' : {string} + * - (required) 'attributes' : {Object} + * + * @param {Object} metadata - Pub/Sub message metadata. + * Keys: + * - (required) 'message_id' : {string} + * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format + * - (optional) 'ordering_key': {string} + */ + + function (message, metadata) { + } """ function_name: str = proto.Field( @@ -1153,9 +1151,11 @@ class MessageTransform(proto.Message): This field is a member of `oneof`_ ``transform``. enabled (bool): - Optional. If set to true, the transform is enabled. If - false, the transform is disabled and will not be applied to - messages. Defaults to ``true``. + Optional. This field is deprecated, use the ``disabled`` + field to disable transforms. + disabled (bool): + Optional. If true, the transform is disabled and will not be + applied to messages. Defaults to ``false``. """ javascript_udf: "JavaScriptUDF" = proto.Field( @@ -1168,6 +1168,10 @@ class MessageTransform(proto.Message): proto.BOOL, number=3, ) + disabled: bool = proto.Field( + proto.BOOL, + number=4, + ) class Topic(proto.Message): diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 039f0ca3db4d..dd0d6423a363 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.28.0" + "version": "0.1.0" }, "snippets": [ { @@ -47,7 +47,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Topic", @@ -127,7 +127,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Topic", @@ -208,7 +208,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_topic" @@ -285,7 +285,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_topic" @@ -359,7 +359,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", @@ -435,7 +435,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.DetachSubscriptionResponse", @@ -516,7 +516,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Topic", @@ -596,7 +596,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Topic", @@ -677,7 +677,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsAsyncPager", @@ -757,7 +757,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSnapshotsPager", @@ -838,7 +838,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsAsyncPager", @@ -918,7 +918,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicSubscriptionsPager", @@ -999,7 +999,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsAsyncPager", @@ -1079,7 +1079,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.publisher.pagers.ListTopicsPager", @@ -1164,7 +1164,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.PublishResponse", @@ -1248,7 +1248,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.PublishResponse", @@ -1333,7 +1333,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Topic", @@ -1417,7 +1417,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Topic", @@ -1502,7 +1502,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -1586,7 +1586,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -1675,7 +1675,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -1763,7 +1763,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -1848,7 +1848,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -1932,7 +1932,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -2013,7 +2013,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_schema" @@ -2090,7 +2090,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_schema" @@ -2168,7 +2168,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -2248,7 +2248,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -2329,7 +2329,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsAsyncPager", @@ -2409,7 +2409,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemaRevisionsPager", @@ -2490,7 +2490,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasAsyncPager", @@ -2570,7 +2570,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.schema_service.pagers.ListSchemasPager", @@ -2655,7 +2655,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -2739,7 +2739,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Schema", @@ -2816,7 +2816,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.ValidateMessageResponse", @@ -2892,7 +2892,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.ValidateMessageResponse", @@ -2977,7 +2977,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", @@ -3061,7 +3061,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.ValidateSchemaResponse", @@ -3146,7 +3146,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "acknowledge" @@ -3227,7 +3227,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "acknowledge" @@ -3309,7 +3309,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Snapshot", @@ -3393,7 +3393,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Snapshot", @@ -3486,7 +3486,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Subscription", @@ -3578,7 +3578,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Subscription", @@ -3659,7 +3659,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_snapshot" @@ -3736,7 +3736,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_snapshot" @@ -3814,7 +3814,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_subscription" @@ -3891,7 +3891,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "delete_subscription" @@ -3969,7 +3969,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Snapshot", @@ -4049,7 +4049,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Snapshot", @@ -4130,7 +4130,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Subscription", @@ -4210,7 +4210,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Subscription", @@ -4291,7 +4291,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsAsyncPager", @@ -4371,7 +4371,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSnapshotsPager", @@ -4452,7 +4452,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsAsyncPager", @@ -4532,7 +4532,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.services.subscriber.pagers.ListSubscriptionsPager", @@ -4621,7 +4621,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "modify_ack_deadline" @@ -4706,7 +4706,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "modify_ack_deadline" @@ -4788,7 +4788,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "modify_push_config" @@ -4869,7 +4869,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "shortName": "modify_push_config" @@ -4955,7 +4955,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.PullResponse", @@ -5043,7 +5043,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.PullResponse", @@ -5120,7 +5120,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.SeekResponse", @@ -5196,7 +5196,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.SeekResponse", @@ -5273,7 +5273,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", @@ -5349,7 +5349,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "Iterable[google.pubsub_v1.types.StreamingPullResponse]", @@ -5434,7 +5434,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Snapshot", @@ -5518,7 +5518,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Snapshot", @@ -5603,7 +5603,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Subscription", @@ -5687,7 +5687,7 @@ }, { "name": "metadata", - "type": "Sequence[Tuple[str, str]" + "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], "resultType": "google.pubsub_v1.types.Subscription", diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index afcf7a8b9d08..2e9609f5f48a 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -74,9 +74,6 @@ class pubsubCallTransformer(cst.CSTTransformer): 'update_topic': ('topic', 'update_mask', ), 'validate_message': ('parent', 'name', 'schema', 'message', 'encoding', ), 'validate_schema': ('parent', 'schema', ), - 'get_iam_policy': ('resource', 'options', ), - 'set_iam_policy': ('resource', 'policy', ), - 'test_iam_permissions': ('resource', 'permissions', ), } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 423df44330d8..4856b1cad297 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -69,6 +69,14 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -301,6 +309,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = PublisherClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = PublisherClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4639,6 +4690,7 @@ def test_create_topic_rest_required_fields(request_type=pubsub.Topic): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_topic(request) @@ -4684,6 +4736,7 @@ def test_create_topic_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_topic(**mock_args) @@ -4807,6 +4860,7 @@ def test_update_topic_rest_required_fields(request_type=pubsub.UpdateTopicReques response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_topic(request) @@ -4861,6 +4915,7 @@ def test_update_topic_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_topic(**mock_args) @@ -4990,6 +5045,7 @@ def test_publish_rest_required_fields(request_type=pubsub.PublishRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.publish(request) @@ -5044,6 +5100,7 @@ def test_publish_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.publish(**mock_args) @@ -5173,6 +5230,7 @@ def test_get_topic_rest_required_fields(request_type=pubsub.GetTopicRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_topic(request) @@ -5218,6 +5276,7 @@ def test_get_topic_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_topic(**mock_args) @@ -5352,6 +5411,7 @@ def test_list_topics_rest_required_fields(request_type=pubsub.ListTopicsRequest) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_topics(request) @@ -5405,6 +5465,7 @@ def test_list_topics_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_topics(**mock_args) @@ -5607,6 +5668,7 @@ def test_list_topic_subscriptions_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_topic_subscriptions(request) @@ -5660,6 +5722,7 @@ def test_list_topic_subscriptions_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_topic_subscriptions(**mock_args) @@ -5864,6 +5927,7 @@ def test_list_topic_snapshots_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_topic_snapshots(request) @@ -5917,6 +5981,7 @@ def test_list_topic_snapshots_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_topic_snapshots(**mock_args) @@ -6103,6 +6168,7 @@ def test_delete_topic_rest_required_fields(request_type=pubsub.DeleteTopicReques response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_topic(request) @@ -6146,6 +6212,7 @@ def test_delete_topic_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_topic(**mock_args) @@ -6279,6 +6346,7 @@ def test_detach_subscription_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.detach_subscription(request) @@ -6891,6 +6959,7 @@ def test_create_topic_rest_bad_request(request_type=pubsub.Topic): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_topic(request) @@ -6929,6 +6998,7 @@ def test_create_topic_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_topic(request) # Establish that the response is the type that we expect. @@ -6954,10 +7024,13 @@ def test_create_topic_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_create_topic" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_create_topic_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_create_topic" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.Topic.pb(pubsub.Topic()) transcode.return_value = { "method": "post", @@ -6968,6 +7041,7 @@ def test_create_topic_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.Topic.to_json(pubsub.Topic()) req.return_value.content = return_value @@ -6978,6 +7052,7 @@ def test_create_topic_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.Topic() + post_with_metadata.return_value = pubsub.Topic(), metadata client.create_topic( request, @@ -6989,6 +7064,7 @@ def test_create_topic_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_topic_rest_bad_request(request_type=pubsub.UpdateTopicRequest): @@ -7010,6 +7086,7 @@ def test_update_topic_rest_bad_request(request_type=pubsub.UpdateTopicRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_topic(request) @@ -7048,6 +7125,7 @@ def test_update_topic_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_topic(request) # Establish that the response is the type that we expect. @@ -7073,10 +7151,13 @@ def test_update_topic_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_update_topic" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_update_topic_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_update_topic" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.UpdateTopicRequest.pb(pubsub.UpdateTopicRequest()) transcode.return_value = { "method": "post", @@ -7087,6 +7168,7 @@ def test_update_topic_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.Topic.to_json(pubsub.Topic()) req.return_value.content = return_value @@ -7097,6 +7179,7 @@ def test_update_topic_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.Topic() + post_with_metadata.return_value = pubsub.Topic(), metadata client.update_topic( request, @@ -7108,6 +7191,7 @@ def test_update_topic_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_publish_rest_bad_request(request_type=pubsub.PublishRequest): @@ -7129,6 +7213,7 @@ def test_publish_rest_bad_request(request_type=pubsub.PublishRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.publish(request) @@ -7164,6 +7249,7 @@ def test_publish_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.publish(request) # Establish that the response is the type that we expect. @@ -7186,10 +7272,13 @@ def test_publish_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_publish" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_publish_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_publish" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.PublishRequest.pb(pubsub.PublishRequest()) transcode.return_value = { "method": "post", @@ -7200,6 +7289,7 @@ def test_publish_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.PublishResponse.to_json(pubsub.PublishResponse()) req.return_value.content = return_value @@ -7210,6 +7300,7 @@ def test_publish_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.PublishResponse() + post_with_metadata.return_value = pubsub.PublishResponse(), metadata client.publish( request, @@ -7221,6 +7312,7 @@ def test_publish_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_topic_rest_bad_request(request_type=pubsub.GetTopicRequest): @@ -7242,6 +7334,7 @@ def test_get_topic_rest_bad_request(request_type=pubsub.GetTopicRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_topic(request) @@ -7280,6 +7373,7 @@ def test_get_topic_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_topic(request) # Establish that the response is the type that we expect. @@ -7305,10 +7399,13 @@ def test_get_topic_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_get_topic" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_get_topic_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_get_topic" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.GetTopicRequest.pb(pubsub.GetTopicRequest()) transcode.return_value = { "method": "post", @@ -7319,6 +7416,7 @@ def test_get_topic_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.Topic.to_json(pubsub.Topic()) req.return_value.content = return_value @@ -7329,6 +7427,7 @@ def test_get_topic_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.Topic() + post_with_metadata.return_value = pubsub.Topic(), metadata client.get_topic( request, @@ -7340,6 +7439,7 @@ def test_get_topic_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_topics_rest_bad_request(request_type=pubsub.ListTopicsRequest): @@ -7361,6 +7461,7 @@ def test_list_topics_rest_bad_request(request_type=pubsub.ListTopicsRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_topics(request) @@ -7396,6 +7497,7 @@ def test_list_topics_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_topics(request) # Establish that the response is the type that we expect. @@ -7418,10 +7520,13 @@ def test_list_topics_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_list_topics" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topics_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_list_topics" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.ListTopicsRequest.pb(pubsub.ListTopicsRequest()) transcode.return_value = { "method": "post", @@ -7432,6 +7537,7 @@ def test_list_topics_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.ListTopicsResponse.to_json(pubsub.ListTopicsResponse()) req.return_value.content = return_value @@ -7442,6 +7548,7 @@ def test_list_topics_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.ListTopicsResponse() + post_with_metadata.return_value = pubsub.ListTopicsResponse(), metadata client.list_topics( request, @@ -7453,6 +7560,7 @@ def test_list_topics_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_topic_subscriptions_rest_bad_request( @@ -7476,6 +7584,7 @@ def test_list_topic_subscriptions_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_topic_subscriptions(request) @@ -7512,6 +7621,7 @@ def test_list_topic_subscriptions_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_topic_subscriptions(request) # Establish that the response is the type that we expect. @@ -7535,10 +7645,14 @@ def test_list_topic_subscriptions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_list_topic_subscriptions" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, + "post_list_topic_subscriptions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_list_topic_subscriptions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.ListTopicSubscriptionsRequest.pb( pubsub.ListTopicSubscriptionsRequest() ) @@ -7551,6 +7665,7 @@ def test_list_topic_subscriptions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.ListTopicSubscriptionsResponse.to_json( pubsub.ListTopicSubscriptionsResponse() ) @@ -7563,6 +7678,10 @@ def test_list_topic_subscriptions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.ListTopicSubscriptionsResponse() + post_with_metadata.return_value = ( + pubsub.ListTopicSubscriptionsResponse(), + metadata, + ) client.list_topic_subscriptions( request, @@ -7574,6 +7693,7 @@ def test_list_topic_subscriptions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_topic_snapshots_rest_bad_request( @@ -7597,6 +7717,7 @@ def test_list_topic_snapshots_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_topic_snapshots(request) @@ -7633,6 +7754,7 @@ def test_list_topic_snapshots_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_topic_snapshots(request) # Establish that the response is the type that we expect. @@ -7656,10 +7778,13 @@ def test_list_topic_snapshots_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_list_topic_snapshots" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_list_topic_snapshots_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_list_topic_snapshots" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.ListTopicSnapshotsRequest.pb( pubsub.ListTopicSnapshotsRequest() ) @@ -7672,6 +7797,7 @@ def test_list_topic_snapshots_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.ListTopicSnapshotsResponse.to_json( pubsub.ListTopicSnapshotsResponse() ) @@ -7684,6 +7810,7 @@ def test_list_topic_snapshots_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.ListTopicSnapshotsResponse() + post_with_metadata.return_value = pubsub.ListTopicSnapshotsResponse(), metadata client.list_topic_snapshots( request, @@ -7695,6 +7822,7 @@ def test_list_topic_snapshots_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_topic_rest_bad_request(request_type=pubsub.DeleteTopicRequest): @@ -7716,6 +7844,7 @@ def test_delete_topic_rest_bad_request(request_type=pubsub.DeleteTopicRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_topic(request) @@ -7746,6 +7875,7 @@ def test_delete_topic_rest_call_success(request_type): json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_topic(request) # Establish that the response is the type that we expect. @@ -7778,6 +7908,7 @@ def test_delete_topic_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = pubsub.DeleteTopicRequest() metadata = [ @@ -7818,6 +7949,7 @@ def test_detach_subscription_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.detach_subscription(request) @@ -7851,6 +7983,7 @@ def test_detach_subscription_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.detach_subscription(request) # Establish that the response is the type that we expect. @@ -7872,10 +8005,13 @@ def test_detach_subscription_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.PublisherRestInterceptor, "post_detach_subscription" ) as post, mock.patch.object( + transports.PublisherRestInterceptor, "post_detach_subscription_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.PublisherRestInterceptor, "pre_detach_subscription" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.DetachSubscriptionRequest.pb( pubsub.DetachSubscriptionRequest() ) @@ -7888,6 +8024,7 @@ def test_detach_subscription_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.DetachSubscriptionResponse.to_json( pubsub.DetachSubscriptionResponse() ) @@ -7900,6 +8037,7 @@ def test_detach_subscription_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.DetachSubscriptionResponse() + post_with_metadata.return_value = pubsub.DetachSubscriptionResponse(), metadata client.detach_subscription( request, @@ -7911,6 +8049,7 @@ def test_detach_subscription_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -7936,6 +8075,7 @@ def test_get_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_iam_policy(request) @@ -7966,6 +8106,7 @@ def test_get_iam_policy_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_iam_policy(request) @@ -7996,6 +8137,7 @@ def test_set_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.set_iam_policy(request) @@ -8026,6 +8168,7 @@ def test_set_iam_policy_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.set_iam_policy(request) @@ -8056,6 +8199,7 @@ def test_test_iam_permissions_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.test_iam_permissions(request) @@ -8086,6 +8230,7 @@ def test_test_iam_permissions_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.test_iam_permissions(request) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 3dbefc470fac..194cddacfe9f 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -67,6 +67,14 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -320,6 +328,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SchemaServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -4840,6 +4891,7 @@ def test_create_schema_rest_required_fields(request_type=gp_schema.CreateSchemaR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_schema(request) @@ -4895,6 +4947,7 @@ def test_create_schema_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_schema(**mock_args) @@ -5026,6 +5079,7 @@ def test_get_schema_rest_required_fields(request_type=schema.GetSchemaRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_schema(request) @@ -5071,6 +5125,7 @@ def test_get_schema_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_schema(**mock_args) @@ -5206,6 +5261,7 @@ def test_list_schemas_rest_required_fields(request_type=schema.ListSchemasReques response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_schemas(request) @@ -5260,6 +5316,7 @@ def test_list_schemas_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_schemas(**mock_args) @@ -5463,6 +5520,7 @@ def test_list_schema_revisions_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_schema_revisions(request) @@ -5517,6 +5575,7 @@ def test_list_schema_revisions_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_schema_revisions(**mock_args) @@ -5709,6 +5768,7 @@ def test_commit_schema_rest_required_fields(request_type=gp_schema.CommitSchemaR response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.commit_schema(request) @@ -5763,6 +5823,7 @@ def test_commit_schema_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.commit_schema(**mock_args) @@ -5898,6 +5959,7 @@ def test_rollback_schema_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.rollback_schema(request) @@ -5952,6 +6014,7 @@ def test_rollback_schema_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.rollback_schema(**mock_args) @@ -6090,6 +6153,7 @@ def test_delete_schema_revision_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_schema_revision(request) @@ -6136,6 +6200,7 @@ def test_delete_schema_revision_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_schema_revision(**mock_args) @@ -6262,6 +6327,7 @@ def test_delete_schema_rest_required_fields(request_type=schema.DeleteSchemaRequ response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_schema(request) @@ -6305,6 +6371,7 @@ def test_delete_schema_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_schema(**mock_args) @@ -6435,6 +6502,7 @@ def test_validate_schema_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.validate_schema(request) @@ -6489,6 +6557,7 @@ def test_validate_schema_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.validate_schema(**mock_args) @@ -6623,6 +6692,7 @@ def test_validate_message_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.validate_message(request) @@ -7281,6 +7351,7 @@ def test_create_schema_rest_bad_request(request_type=gp_schema.CreateSchemaReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_schema(request) @@ -7393,6 +7464,7 @@ def get_message_fields(field): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_schema(request) # Establish that the response is the type that we expect. @@ -7420,10 +7492,13 @@ def test_create_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_create_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_create_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_create_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gp_schema.CreateSchemaRequest.pb(gp_schema.CreateSchemaRequest()) transcode.return_value = { "method": "post", @@ -7434,6 +7509,7 @@ def test_create_schema_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = gp_schema.Schema.to_json(gp_schema.Schema()) req.return_value.content = return_value @@ -7444,6 +7520,7 @@ def test_create_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gp_schema.Schema() + post_with_metadata.return_value = gp_schema.Schema(), metadata client.create_schema( request, @@ -7455,6 +7532,7 @@ def test_create_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_schema_rest_bad_request(request_type=schema.GetSchemaRequest): @@ -7476,6 +7554,7 @@ def test_get_schema_rest_bad_request(request_type=schema.GetSchemaRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_schema(request) @@ -7514,6 +7593,7 @@ def test_get_schema_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_schema(request) # Establish that the response is the type that we expect. @@ -7541,10 +7621,13 @@ def test_get_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_get_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_get_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_get_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema.GetSchemaRequest.pb(schema.GetSchemaRequest()) transcode.return_value = { "method": "post", @@ -7555,6 +7638,7 @@ def test_get_schema_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = schema.Schema.to_json(schema.Schema()) req.return_value.content = return_value @@ -7565,6 +7649,7 @@ def test_get_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema.Schema() + post_with_metadata.return_value = schema.Schema(), metadata client.get_schema( request, @@ -7576,6 +7661,7 @@ def test_get_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_schemas_rest_bad_request(request_type=schema.ListSchemasRequest): @@ -7597,6 +7683,7 @@ def test_list_schemas_rest_bad_request(request_type=schema.ListSchemasRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_schemas(request) @@ -7632,6 +7719,7 @@ def test_list_schemas_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_schemas(request) # Establish that the response is the type that we expect. @@ -7656,10 +7744,13 @@ def test_list_schemas_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_list_schemas" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_list_schemas_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_list_schemas" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema.ListSchemasRequest.pb(schema.ListSchemasRequest()) transcode.return_value = { "method": "post", @@ -7670,6 +7761,7 @@ def test_list_schemas_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = schema.ListSchemasResponse.to_json(schema.ListSchemasResponse()) req.return_value.content = return_value @@ -7680,6 +7772,7 @@ def test_list_schemas_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema.ListSchemasResponse() + post_with_metadata.return_value = schema.ListSchemasResponse(), metadata client.list_schemas( request, @@ -7691,6 +7784,7 @@ def test_list_schemas_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_schema_revisions_rest_bad_request( @@ -7714,6 +7808,7 @@ def test_list_schema_revisions_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_schema_revisions(request) @@ -7749,6 +7844,7 @@ def test_list_schema_revisions_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_schema_revisions(request) # Establish that the response is the type that we expect. @@ -7773,10 +7869,14 @@ def test_list_schema_revisions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_list_schema_revisions" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, + "post_list_schema_revisions_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_list_schema_revisions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema.ListSchemaRevisionsRequest.pb( schema.ListSchemaRevisionsRequest() ) @@ -7789,6 +7889,7 @@ def test_list_schema_revisions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = schema.ListSchemaRevisionsResponse.to_json( schema.ListSchemaRevisionsResponse() ) @@ -7801,6 +7902,7 @@ def test_list_schema_revisions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema.ListSchemaRevisionsResponse() + post_with_metadata.return_value = schema.ListSchemaRevisionsResponse(), metadata client.list_schema_revisions( request, @@ -7812,6 +7914,7 @@ def test_list_schema_revisions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_commit_schema_rest_bad_request(request_type=gp_schema.CommitSchemaRequest): @@ -7833,6 +7936,7 @@ def test_commit_schema_rest_bad_request(request_type=gp_schema.CommitSchemaReque response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.commit_schema(request) @@ -7871,6 +7975,7 @@ def test_commit_schema_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.commit_schema(request) # Establish that the response is the type that we expect. @@ -7898,10 +8003,13 @@ def test_commit_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_commit_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_commit_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_commit_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gp_schema.CommitSchemaRequest.pb(gp_schema.CommitSchemaRequest()) transcode.return_value = { "method": "post", @@ -7912,6 +8020,7 @@ def test_commit_schema_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = gp_schema.Schema.to_json(gp_schema.Schema()) req.return_value.content = return_value @@ -7922,6 +8031,7 @@ def test_commit_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gp_schema.Schema() + post_with_metadata.return_value = gp_schema.Schema(), metadata client.commit_schema( request, @@ -7933,6 +8043,7 @@ def test_commit_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_rollback_schema_rest_bad_request(request_type=schema.RollbackSchemaRequest): @@ -7954,6 +8065,7 @@ def test_rollback_schema_rest_bad_request(request_type=schema.RollbackSchemaRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.rollback_schema(request) @@ -7992,6 +8104,7 @@ def test_rollback_schema_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.rollback_schema(request) # Establish that the response is the type that we expect. @@ -8019,10 +8132,13 @@ def test_rollback_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_rollback_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_rollback_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_rollback_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema.RollbackSchemaRequest.pb(schema.RollbackSchemaRequest()) transcode.return_value = { "method": "post", @@ -8033,6 +8149,7 @@ def test_rollback_schema_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = schema.Schema.to_json(schema.Schema()) req.return_value.content = return_value @@ -8043,6 +8160,7 @@ def test_rollback_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema.Schema() + post_with_metadata.return_value = schema.Schema(), metadata client.rollback_schema( request, @@ -8054,6 +8172,7 @@ def test_rollback_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_schema_revision_rest_bad_request( @@ -8077,6 +8196,7 @@ def test_delete_schema_revision_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_schema_revision(request) @@ -8115,6 +8235,7 @@ def test_delete_schema_revision_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_schema_revision(request) # Establish that the response is the type that we expect. @@ -8142,10 +8263,14 @@ def test_delete_schema_revision_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_delete_schema_revision" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, + "post_delete_schema_revision_with_metadata", + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_delete_schema_revision" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema.DeleteSchemaRevisionRequest.pb( schema.DeleteSchemaRevisionRequest() ) @@ -8158,6 +8283,7 @@ def test_delete_schema_revision_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = schema.Schema.to_json(schema.Schema()) req.return_value.content = return_value @@ -8168,6 +8294,7 @@ def test_delete_schema_revision_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema.Schema() + post_with_metadata.return_value = schema.Schema(), metadata client.delete_schema_revision( request, @@ -8179,6 +8306,7 @@ def test_delete_schema_revision_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_schema_rest_bad_request(request_type=schema.DeleteSchemaRequest): @@ -8200,6 +8328,7 @@ def test_delete_schema_rest_bad_request(request_type=schema.DeleteSchemaRequest) response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_schema(request) @@ -8230,6 +8359,7 @@ def test_delete_schema_rest_call_success(request_type): json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_schema(request) # Establish that the response is the type that we expect. @@ -8264,6 +8394,7 @@ def test_delete_schema_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = schema.DeleteSchemaRequest() metadata = [ @@ -8302,6 +8433,7 @@ def test_validate_schema_rest_bad_request(request_type=gp_schema.ValidateSchemaR response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.validate_schema(request) @@ -8335,6 +8467,7 @@ def test_validate_schema_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.validate_schema(request) # Establish that the response is the type that we expect. @@ -8358,10 +8491,13 @@ def test_validate_schema_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_validate_schema" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_schema_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_validate_schema" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = gp_schema.ValidateSchemaRequest.pb( gp_schema.ValidateSchemaRequest() ) @@ -8374,6 +8510,7 @@ def test_validate_schema_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = gp_schema.ValidateSchemaResponse.to_json( gp_schema.ValidateSchemaResponse() ) @@ -8386,6 +8523,7 @@ def test_validate_schema_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = gp_schema.ValidateSchemaResponse() + post_with_metadata.return_value = gp_schema.ValidateSchemaResponse(), metadata client.validate_schema( request, @@ -8397,6 +8535,7 @@ def test_validate_schema_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_validate_message_rest_bad_request(request_type=schema.ValidateMessageRequest): @@ -8418,6 +8557,7 @@ def test_validate_message_rest_bad_request(request_type=schema.ValidateMessageRe response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.validate_message(request) @@ -8451,6 +8591,7 @@ def test_validate_message_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.validate_message(request) # Establish that the response is the type that we expect. @@ -8474,10 +8615,13 @@ def test_validate_message_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SchemaServiceRestInterceptor, "post_validate_message" ) as post, mock.patch.object( + transports.SchemaServiceRestInterceptor, "post_validate_message_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SchemaServiceRestInterceptor, "pre_validate_message" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = schema.ValidateMessageRequest.pb(schema.ValidateMessageRequest()) transcode.return_value = { "method": "post", @@ -8488,6 +8632,7 @@ def test_validate_message_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = schema.ValidateMessageResponse.to_json( schema.ValidateMessageResponse() ) @@ -8500,6 +8645,7 @@ def test_validate_message_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = schema.ValidateMessageResponse() + post_with_metadata.return_value = schema.ValidateMessageResponse(), metadata client.validate_message( request, @@ -8511,6 +8657,7 @@ def test_validate_message_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -8536,6 +8683,7 @@ def test_get_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_iam_policy(request) @@ -8566,6 +8714,7 @@ def test_get_iam_policy_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_iam_policy(request) @@ -8596,6 +8745,7 @@ def test_set_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.set_iam_policy(request) @@ -8626,6 +8776,7 @@ def test_set_iam_policy_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.set_iam_policy(request) @@ -8656,6 +8807,7 @@ def test_test_iam_permissions_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.test_iam_permissions(request) @@ -8686,6 +8838,7 @@ def test_test_iam_permissions_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.test_iam_permissions(request) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 4478942f7912..966e30957a23 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -69,6 +69,14 @@ import google.auth +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + async def mock_async_gen(data, chunk_size=1): for i in range(0, len(data)): # pragma: NO COVER chunk = data[i : i + chunk_size] @@ -303,6 +311,49 @@ def test__get_universe_domain(): assert str(excinfo.value) == "Universe Domain cannot be an empty string." +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = SubscriberClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = SubscriberClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + @pytest.mark.parametrize( "client_class,transport_name", [ @@ -6683,6 +6734,7 @@ def test_create_subscription_rest_required_fields(request_type=pubsub.Subscripti response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_subscription(request) @@ -6739,6 +6791,7 @@ def test_create_subscription_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_subscription(**mock_args) @@ -6873,6 +6926,7 @@ def test_get_subscription_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_subscription(request) @@ -6918,6 +6972,7 @@ def test_get_subscription_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_subscription(**mock_args) @@ -7048,6 +7103,7 @@ def test_update_subscription_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_subscription(request) @@ -7104,6 +7160,7 @@ def test_update_subscription_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_subscription(**mock_args) @@ -7247,6 +7304,7 @@ def test_list_subscriptions_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_subscriptions(request) @@ -7300,6 +7358,7 @@ def test_list_subscriptions_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_subscriptions(**mock_args) @@ -7491,6 +7550,7 @@ def test_delete_subscription_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_subscription(request) @@ -7534,6 +7594,7 @@ def test_delete_subscription_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_subscription(**mock_args) @@ -7674,6 +7735,7 @@ def test_modify_ack_deadline_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.modify_ack_deadline(request) @@ -7728,6 +7790,7 @@ def test_modify_ack_deadline_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.modify_ack_deadline(**mock_args) @@ -7861,6 +7924,7 @@ def test_acknowledge_rest_required_fields(request_type=pubsub.AcknowledgeRequest response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.acknowledge(request) @@ -7913,6 +7977,7 @@ def test_acknowledge_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.acknowledge(**mock_args) @@ -8048,6 +8113,7 @@ def test_pull_rest_required_fields(request_type=pubsub.PullRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.pull(request) @@ -8103,6 +8169,7 @@ def test_pull_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.pull(**mock_args) @@ -8249,6 +8316,7 @@ def test_modify_push_config_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.modify_push_config(request) @@ -8301,6 +8369,7 @@ def test_modify_push_config_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.modify_push_config(**mock_args) @@ -8431,6 +8500,7 @@ def test_get_snapshot_rest_required_fields(request_type=pubsub.GetSnapshotReques response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_snapshot(request) @@ -8476,6 +8546,7 @@ def test_get_snapshot_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_snapshot(**mock_args) @@ -8610,6 +8681,7 @@ def test_list_snapshots_rest_required_fields(request_type=pubsub.ListSnapshotsRe response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_snapshots(request) @@ -8663,6 +8735,7 @@ def test_list_snapshots_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_snapshots(**mock_args) @@ -8858,6 +8931,7 @@ def test_create_snapshot_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_snapshot(request) @@ -8912,6 +8986,7 @@ def test_create_snapshot_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_snapshot(**mock_args) @@ -9038,6 +9113,7 @@ def test_update_snapshot_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_snapshot(request) @@ -9092,6 +9168,7 @@ def test_update_snapshot_rest_flattened(): json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_snapshot(**mock_args) @@ -9220,6 +9297,7 @@ def test_delete_snapshot_rest_required_fields( response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_snapshot(request) @@ -9263,6 +9341,7 @@ def test_delete_snapshot_rest_flattened(): json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_snapshot(**mock_args) @@ -9391,6 +9470,7 @@ def test_seek_rest_required_fields(request_type=pubsub.SeekRequest): response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.seek(request) @@ -10310,6 +10390,7 @@ def test_create_subscription_rest_bad_request(request_type=pubsub.Subscription): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_subscription(request) @@ -10353,6 +10434,7 @@ def test_create_subscription_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_subscription(request) # Establish that the response is the type that we expect. @@ -10385,10 +10467,13 @@ def test_create_subscription_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_create_subscription" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_subscription_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_create_subscription" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.Subscription.pb(pubsub.Subscription()) transcode.return_value = { "method": "post", @@ -10399,6 +10484,7 @@ def test_create_subscription_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.Subscription.to_json(pubsub.Subscription()) req.return_value.content = return_value @@ -10409,6 +10495,7 @@ def test_create_subscription_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.Subscription() + post_with_metadata.return_value = pubsub.Subscription(), metadata client.create_subscription( request, @@ -10420,6 +10507,7 @@ def test_create_subscription_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_subscription_rest_bad_request(request_type=pubsub.GetSubscriptionRequest): @@ -10441,6 +10529,7 @@ def test_get_subscription_rest_bad_request(request_type=pubsub.GetSubscriptionRe response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_subscription(request) @@ -10484,6 +10573,7 @@ def test_get_subscription_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_subscription(request) # Establish that the response is the type that we expect. @@ -10516,10 +10606,13 @@ def test_get_subscription_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_get_subscription" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_subscription_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_get_subscription" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.GetSubscriptionRequest.pb(pubsub.GetSubscriptionRequest()) transcode.return_value = { "method": "post", @@ -10530,6 +10623,7 @@ def test_get_subscription_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.Subscription.to_json(pubsub.Subscription()) req.return_value.content = return_value @@ -10540,6 +10634,7 @@ def test_get_subscription_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.Subscription() + post_with_metadata.return_value = pubsub.Subscription(), metadata client.get_subscription( request, @@ -10551,6 +10646,7 @@ def test_get_subscription_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_subscription_rest_bad_request( @@ -10574,6 +10670,7 @@ def test_update_subscription_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_subscription(request) @@ -10617,6 +10714,7 @@ def test_update_subscription_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_subscription(request) # Establish that the response is the type that we expect. @@ -10649,10 +10747,13 @@ def test_update_subscription_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_update_subscription" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_subscription_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_update_subscription" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.UpdateSubscriptionRequest.pb( pubsub.UpdateSubscriptionRequest() ) @@ -10665,6 +10766,7 @@ def test_update_subscription_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.Subscription.to_json(pubsub.Subscription()) req.return_value.content = return_value @@ -10675,6 +10777,7 @@ def test_update_subscription_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.Subscription() + post_with_metadata.return_value = pubsub.Subscription(), metadata client.update_subscription( request, @@ -10686,6 +10789,7 @@ def test_update_subscription_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_subscriptions_rest_bad_request( @@ -10709,6 +10813,7 @@ def test_list_subscriptions_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_subscriptions(request) @@ -10744,6 +10849,7 @@ def test_list_subscriptions_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_subscriptions(request) # Establish that the response is the type that we expect. @@ -10768,10 +10874,13 @@ def test_list_subscriptions_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_list_subscriptions" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_subscriptions_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_list_subscriptions" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.ListSubscriptionsRequest.pb( pubsub.ListSubscriptionsRequest() ) @@ -10784,6 +10893,7 @@ def test_list_subscriptions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.ListSubscriptionsResponse.to_json( pubsub.ListSubscriptionsResponse() ) @@ -10796,6 +10906,7 @@ def test_list_subscriptions_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.ListSubscriptionsResponse() + post_with_metadata.return_value = pubsub.ListSubscriptionsResponse(), metadata client.list_subscriptions( request, @@ -10807,6 +10918,7 @@ def test_list_subscriptions_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_subscription_rest_bad_request( @@ -10830,6 +10942,7 @@ def test_delete_subscription_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_subscription(request) @@ -10860,6 +10973,7 @@ def test_delete_subscription_rest_call_success(request_type): json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_subscription(request) # Establish that the response is the type that we expect. @@ -10896,6 +11010,7 @@ def test_delete_subscription_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = pubsub.DeleteSubscriptionRequest() metadata = [ @@ -10936,6 +11051,7 @@ def test_modify_ack_deadline_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.modify_ack_deadline(request) @@ -10966,6 +11082,7 @@ def test_modify_ack_deadline_rest_call_success(request_type): json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.modify_ack_deadline(request) # Establish that the response is the type that we expect. @@ -11002,6 +11119,7 @@ def test_modify_ack_deadline_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = pubsub.ModifyAckDeadlineRequest() metadata = [ @@ -11040,6 +11158,7 @@ def test_acknowledge_rest_bad_request(request_type=pubsub.AcknowledgeRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.acknowledge(request) @@ -11070,6 +11189,7 @@ def test_acknowledge_rest_call_success(request_type): json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.acknowledge(request) # Establish that the response is the type that we expect. @@ -11104,6 +11224,7 @@ def test_acknowledge_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = pubsub.AcknowledgeRequest() metadata = [ @@ -11142,6 +11263,7 @@ def test_pull_rest_bad_request(request_type=pubsub.PullRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.pull(request) @@ -11175,6 +11297,7 @@ def test_pull_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.pull(request) # Establish that the response is the type that we expect. @@ -11198,10 +11321,13 @@ def test_pull_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_pull" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_pull_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_pull" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.PullRequest.pb(pubsub.PullRequest()) transcode.return_value = { "method": "post", @@ -11212,6 +11338,7 @@ def test_pull_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.PullResponse.to_json(pubsub.PullResponse()) req.return_value.content = return_value @@ -11222,6 +11349,7 @@ def test_pull_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.PullResponse() + post_with_metadata.return_value = pubsub.PullResponse(), metadata client.pull( request, @@ -11233,6 +11361,7 @@ def test_pull_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_streaming_pull_rest_error(): @@ -11268,6 +11397,7 @@ def test_modify_push_config_rest_bad_request( response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.modify_push_config(request) @@ -11298,6 +11428,7 @@ def test_modify_push_config_rest_call_success(request_type): json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.modify_push_config(request) # Establish that the response is the type that we expect. @@ -11332,6 +11463,7 @@ def test_modify_push_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = pubsub.ModifyPushConfigRequest() metadata = [ @@ -11370,6 +11502,7 @@ def test_get_snapshot_rest_bad_request(request_type=pubsub.GetSnapshotRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_snapshot(request) @@ -11406,6 +11539,7 @@ def test_get_snapshot_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_snapshot(request) # Establish that the response is the type that we expect. @@ -11431,10 +11565,13 @@ def test_get_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_get_snapshot" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_get_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_get_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.GetSnapshotRequest.pb(pubsub.GetSnapshotRequest()) transcode.return_value = { "method": "post", @@ -11445,6 +11582,7 @@ def test_get_snapshot_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.Snapshot.to_json(pubsub.Snapshot()) req.return_value.content = return_value @@ -11455,6 +11593,7 @@ def test_get_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.Snapshot() + post_with_metadata.return_value = pubsub.Snapshot(), metadata client.get_snapshot( request, @@ -11466,6 +11605,7 @@ def test_get_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_list_snapshots_rest_bad_request(request_type=pubsub.ListSnapshotsRequest): @@ -11487,6 +11627,7 @@ def test_list_snapshots_rest_bad_request(request_type=pubsub.ListSnapshotsReques response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.list_snapshots(request) @@ -11522,6 +11663,7 @@ def test_list_snapshots_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.list_snapshots(request) # Establish that the response is the type that we expect. @@ -11546,10 +11688,13 @@ def test_list_snapshots_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_list_snapshots" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_list_snapshots_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_list_snapshots" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.ListSnapshotsRequest.pb(pubsub.ListSnapshotsRequest()) transcode.return_value = { "method": "post", @@ -11560,6 +11705,7 @@ def test_list_snapshots_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.ListSnapshotsResponse.to_json( pubsub.ListSnapshotsResponse() ) @@ -11572,6 +11718,7 @@ def test_list_snapshots_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.ListSnapshotsResponse() + post_with_metadata.return_value = pubsub.ListSnapshotsResponse(), metadata client.list_snapshots( request, @@ -11583,6 +11730,7 @@ def test_list_snapshots_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_create_snapshot_rest_bad_request(request_type=pubsub.CreateSnapshotRequest): @@ -11604,6 +11752,7 @@ def test_create_snapshot_rest_bad_request(request_type=pubsub.CreateSnapshotRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.create_snapshot(request) @@ -11640,6 +11789,7 @@ def test_create_snapshot_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.create_snapshot(request) # Establish that the response is the type that we expect. @@ -11665,10 +11815,13 @@ def test_create_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_create_snapshot" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_create_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_create_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.CreateSnapshotRequest.pb(pubsub.CreateSnapshotRequest()) transcode.return_value = { "method": "post", @@ -11679,6 +11832,7 @@ def test_create_snapshot_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.Snapshot.to_json(pubsub.Snapshot()) req.return_value.content = return_value @@ -11689,6 +11843,7 @@ def test_create_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.Snapshot() + post_with_metadata.return_value = pubsub.Snapshot(), metadata client.create_snapshot( request, @@ -11700,6 +11855,7 @@ def test_create_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_update_snapshot_rest_bad_request(request_type=pubsub.UpdateSnapshotRequest): @@ -11721,6 +11877,7 @@ def test_update_snapshot_rest_bad_request(request_type=pubsub.UpdateSnapshotRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.update_snapshot(request) @@ -11757,6 +11914,7 @@ def test_update_snapshot_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.update_snapshot(request) # Establish that the response is the type that we expect. @@ -11782,10 +11940,13 @@ def test_update_snapshot_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_update_snapshot" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_update_snapshot_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_update_snapshot" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.UpdateSnapshotRequest.pb(pubsub.UpdateSnapshotRequest()) transcode.return_value = { "method": "post", @@ -11796,6 +11957,7 @@ def test_update_snapshot_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.Snapshot.to_json(pubsub.Snapshot()) req.return_value.content = return_value @@ -11806,6 +11968,7 @@ def test_update_snapshot_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.Snapshot() + post_with_metadata.return_value = pubsub.Snapshot(), metadata client.update_snapshot( request, @@ -11817,6 +11980,7 @@ def test_update_snapshot_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_delete_snapshot_rest_bad_request(request_type=pubsub.DeleteSnapshotRequest): @@ -11838,6 +12002,7 @@ def test_delete_snapshot_rest_bad_request(request_type=pubsub.DeleteSnapshotRequ response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.delete_snapshot(request) @@ -11868,6 +12033,7 @@ def test_delete_snapshot_rest_call_success(request_type): json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.delete_snapshot(request) # Establish that the response is the type that we expect. @@ -11902,6 +12068,7 @@ def test_delete_snapshot_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} request = pubsub.DeleteSnapshotRequest() metadata = [ @@ -11940,6 +12107,7 @@ def test_seek_rest_bad_request(request_type=pubsub.SeekRequest): response_value.status_code = 400 response_value.request = mock.Mock() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.seek(request) @@ -11973,6 +12141,7 @@ def test_seek_rest_call_success(request_type): json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.seek(request) # Establish that the response is the type that we expect. @@ -11996,10 +12165,13 @@ def test_seek_rest_interceptors(null_interceptor): ) as transcode, mock.patch.object( transports.SubscriberRestInterceptor, "post_seek" ) as post, mock.patch.object( + transports.SubscriberRestInterceptor, "post_seek_with_metadata" + ) as post_with_metadata, mock.patch.object( transports.SubscriberRestInterceptor, "pre_seek" ) as pre: pre.assert_not_called() post.assert_not_called() + post_with_metadata.assert_not_called() pb_message = pubsub.SeekRequest.pb(pubsub.SeekRequest()) transcode.return_value = { "method": "post", @@ -12010,6 +12182,7 @@ def test_seek_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} return_value = pubsub.SeekResponse.to_json(pubsub.SeekResponse()) req.return_value.content = return_value @@ -12020,6 +12193,7 @@ def test_seek_rest_interceptors(null_interceptor): ] pre.return_value = request, metadata post.return_value = pubsub.SeekResponse() + post_with_metadata.return_value = pubsub.SeekResponse(), metadata client.seek( request, @@ -12031,6 +12205,7 @@ def test_seek_rest_interceptors(null_interceptor): pre.assert_called_once() post.assert_called_once() + post_with_metadata.assert_called_once() def test_get_iam_policy_rest_bad_request( @@ -12056,6 +12231,7 @@ def test_get_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.get_iam_policy(request) @@ -12086,6 +12262,7 @@ def test_get_iam_policy_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.get_iam_policy(request) @@ -12116,6 +12293,7 @@ def test_set_iam_policy_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.set_iam_policy(request) @@ -12146,6 +12324,7 @@ def test_set_iam_policy_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.set_iam_policy(request) @@ -12176,6 +12355,7 @@ def test_test_iam_permissions_rest_bad_request( response_value.status_code = 400 response_value.request = Request() req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} client.test_iam_permissions(request) @@ -12206,6 +12386,7 @@ def test_test_iam_permissions_rest(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} response = client.test_iam_permissions(request) From 3e652208c30668b2d30231f00d3d3aefcba26c8e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 17 Mar 2025 13:11:05 -0400 Subject: [PATCH 1147/1197] fix: allow Protobuf 6.x (#1369) --- packages/google-cloud-pubsub/setup.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 1d68bf87bca3..899cefde6c52 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -36,15 +36,15 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "grpcio >= 1.51.3, < 2.0dev", # https://github.com/googleapis/python-pubsub/issues/609 + "grpcio >= 1.51.3, < 2.0.0", # https://github.com/googleapis/python-pubsub/issues/609 # google-api-core >= 1.34.0 is allowed in order to support google-api-core 1.x - "google-auth >= 2.14.1, <3.0.0dev", - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "proto-plus >= 1.25.0, < 2.0.0dev; python_version >= '3.13'", - "protobuf>=3.20.2,<6.0.0dev,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "grpc-google-iam-v1 >= 0.12.4, < 1.0.0dev", + "google-auth >= 2.14.1, <3.0.0", + "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, < 2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, < 1.0.0", "grpcio-status >= 1.33.2", "opentelemetry-api <= 1.22.0; python_version<='3.7'", "opentelemetry-api >= 1.27.0; python_version>='3.8'", From 590b07e966e51470c3113bb58170cf957c6e54da Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 19 Mar 2025 10:53:26 -0400 Subject: [PATCH 1148/1197] chore: Update gapic-generator-python to 1.23.6 (#1377) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/google/pubsub/__init__.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/__init__.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/services/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/async_client.py | 2 +- .../google/pubsub_v1/services/publisher/client.py | 2 +- .../google/pubsub_v1/services/publisher/pagers.py | 2 +- .../google/pubsub_v1/services/publisher/transports/__init__.py | 2 +- .../google/pubsub_v1/services/publisher/transports/base.py | 2 +- .../google/pubsub_v1/services/publisher/transports/grpc.py | 2 +- .../pubsub_v1/services/publisher/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/publisher/transports/rest.py | 2 +- .../google/pubsub_v1/services/publisher/transports/rest_base.py | 2 +- .../google/pubsub_v1/services/schema_service/__init__.py | 2 +- .../google/pubsub_v1/services/schema_service/async_client.py | 2 +- .../google/pubsub_v1/services/schema_service/client.py | 2 +- .../google/pubsub_v1/services/schema_service/pagers.py | 2 +- .../pubsub_v1/services/schema_service/transports/__init__.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/base.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/grpc.py | 2 +- .../services/schema_service/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/schema_service/transports/rest.py | 2 +- .../pubsub_v1/services/schema_service/transports/rest_base.py | 2 +- .../google/pubsub_v1/services/subscriber/__init__.py | 2 +- .../google/pubsub_v1/services/subscriber/async_client.py | 2 +- .../google/pubsub_v1/services/subscriber/client.py | 2 +- .../google/pubsub_v1/services/subscriber/pagers.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/__init__.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/base.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/grpc.py | 2 +- .../pubsub_v1/services/subscriber/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/services/subscriber/transports/rest.py | 2 +- .../pubsub_v1/services/subscriber/transports/rest_base.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 2 +- packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py | 2 +- .../pubsub_v1_generated_publisher_create_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_create_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_delete_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_delete_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_detach_subscription_async.py | 2 +- .../pubsub_v1_generated_publisher_detach_subscription_sync.py | 2 +- .../pubsub_v1_generated_publisher_get_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_get_topic_sync.py | 2 +- .../pubsub_v1_generated_publisher_list_topic_snapshots_async.py | 2 +- .../pubsub_v1_generated_publisher_list_topic_snapshots_sync.py | 2 +- ...sub_v1_generated_publisher_list_topic_subscriptions_async.py | 2 +- ...bsub_v1_generated_publisher_list_topic_subscriptions_sync.py | 2 +- .../pubsub_v1_generated_publisher_list_topics_async.py | 2 +- .../pubsub_v1_generated_publisher_list_topics_sync.py | 2 +- .../pubsub_v1_generated_publisher_publish_async.py | 2 +- .../pubsub_v1_generated_publisher_publish_sync.py | 2 +- .../pubsub_v1_generated_publisher_update_topic_async.py | 2 +- .../pubsub_v1_generated_publisher_update_topic_sync.py | 2 +- .../pubsub_v1_generated_schema_service_commit_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_commit_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_create_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_create_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_delete_schema_async.py | 2 +- ..._v1_generated_schema_service_delete_schema_revision_async.py | 2 +- ...b_v1_generated_schema_service_delete_schema_revision_sync.py | 2 +- .../pubsub_v1_generated_schema_service_delete_schema_sync.py | 2 +- .../pubsub_v1_generated_schema_service_get_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_get_schema_sync.py | 2 +- ...b_v1_generated_schema_service_list_schema_revisions_async.py | 2 +- ...ub_v1_generated_schema_service_list_schema_revisions_sync.py | 2 +- .../pubsub_v1_generated_schema_service_list_schemas_async.py | 2 +- .../pubsub_v1_generated_schema_service_list_schemas_sync.py | 2 +- .../pubsub_v1_generated_schema_service_rollback_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_rollback_schema_sync.py | 2 +- ...pubsub_v1_generated_schema_service_validate_message_async.py | 2 +- .../pubsub_v1_generated_schema_service_validate_message_sync.py | 2 +- .../pubsub_v1_generated_schema_service_validate_schema_async.py | 2 +- .../pubsub_v1_generated_schema_service_validate_schema_sync.py | 2 +- .../pubsub_v1_generated_subscriber_acknowledge_async.py | 2 +- .../pubsub_v1_generated_subscriber_acknowledge_sync.py | 2 +- .../pubsub_v1_generated_subscriber_create_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_create_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_create_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_create_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_delete_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_delete_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_delete_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_delete_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_get_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_get_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_get_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_get_subscription_sync.py | 2 +- .../pubsub_v1_generated_subscriber_list_snapshots_async.py | 2 +- .../pubsub_v1_generated_subscriber_list_snapshots_sync.py | 2 +- .../pubsub_v1_generated_subscriber_list_subscriptions_async.py | 2 +- .../pubsub_v1_generated_subscriber_list_subscriptions_sync.py | 2 +- .../pubsub_v1_generated_subscriber_modify_ack_deadline_async.py | 2 +- .../pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py | 2 +- .../pubsub_v1_generated_subscriber_modify_push_config_async.py | 2 +- .../pubsub_v1_generated_subscriber_modify_push_config_sync.py | 2 +- .../pubsub_v1_generated_subscriber_pull_async.py | 2 +- .../pubsub_v1_generated_subscriber_pull_sync.py | 2 +- .../pubsub_v1_generated_subscriber_seek_async.py | 2 +- .../pubsub_v1_generated_subscriber_seek_sync.py | 2 +- .../pubsub_v1_generated_subscriber_streaming_pull_async.py | 2 +- .../pubsub_v1_generated_subscriber_streaming_pull_sync.py | 2 +- .../pubsub_v1_generated_subscriber_update_snapshot_async.py | 2 +- .../pubsub_v1_generated_subscriber_update_snapshot_sync.py | 2 +- .../pubsub_v1_generated_subscriber_update_subscription_async.py | 2 +- .../pubsub_v1_generated_subscriber_update_subscription_sync.py | 2 +- .../google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py | 2 +- packages/google-cloud-pubsub/tests/__init__.py | 2 +- packages/google-cloud-pubsub/tests/unit/__init__.py | 2 +- packages/google-cloud-pubsub/tests/unit/gapic/__init__.py | 2 +- .../google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_publisher.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_schema_service.py | 2 +- .../tests/unit/gapic/pubsub_v1/test_subscriber.py | 2 +- 114 files changed, 114 insertions(+), 114 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index c3f03499626c..d88449a53f45 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 751f77206f1d..9e23f583cba6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py index 7e1e8e5ce602..6c1355801d0a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 106ce5f93ebd..87838c006783 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 2db3e1cda553..c94cec688ae1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py index da6de8dfaee3..162d9da79e41 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py index 393b9a55fc2a..75bfa7de099a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 45b06302db41..354a6c7141f4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 3b92a0c31b38..f27b968a8266 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 2b6e8c60326d..90683cbd0585 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index 0089968cc58f..a5c99da1bb81 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py index dad3a91b23d9..14308a300312 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py index 570d29e7c544..0908014e8c82 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 88eeb1fba2d1..4ebf3ac946d6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 07894782cda6..40fc381b812f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py index 8f7080b268ec..02beaee40be6 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py index 73976e7fb32d..78c2fa21d683 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index e42f9896f91f..c1187d1cdc59 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index daabd41b93de..f089c2724c6b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index c321b88c2301..f030d0563f9c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index 1bed03dc00b9..f4eab1dd87ff 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py index 94312eba7f26..0ce5285bd93d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py index e6994bdf6520..0e651adb73a4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 9fc50fd311b9..4ad7306f2654 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 29c7be980b51..44f3b15986c1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py index 26adcdd63432..9f879cfc82c1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py index af60aba8eb03..73e9fd44f8ba 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 51b50a55f7da..01fd7ddcb28b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index ff59214ca991..6a10424feb46 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 08eaf9665272..04576b5870cb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index a44591cf9558..62211228e588 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py index 033af0b16bc5..f4fb076565ff 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest_base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 5f2a260b00f2..85c6b901b1a9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 8917db59f4c4..74b65549bcbb 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py index 9353e4817cf9..e1f376ed9043 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/schema.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py index 5694b24ef2d5..e1bf1f2c11c9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py index 7de319c67e67..941fea1d4986 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_create_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py index 3d2b74803eb4..2fad1b0997be 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py index e760eddbd671..27b58c27a06c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_delete_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py index 86508954b39c..22fb9e7e6de4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py index af6b4583799a..058c10e73d26 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_detach_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py index a8bea68c2d6b..a8de7a307ced 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py index 051b980931a0..d2846a7504fe 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_get_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py index 4eed18b38ac6..e8a3e2e8dc04 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py index 1e2266757919..3a51a39b8b4b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_snapshots_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py index 959f1e824c81..cbc81e48fe80 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py index c648b8d1a8d3..dee0821cd769 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topic_subscriptions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py index dc4a36ef83da..0fc18583a106 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py index 8d71be39861a..2d2a987ee60c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_list_topics_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py index 5c6b63a2dd96..536b7f099c50 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py index 9af236d44ece..e89f90320bd1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_publish_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py index d9451b6241d7..a814eab541c8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py index 9b7326c2cf02..46c967e4e9b0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_publisher_update_topic_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py index b93ec0b5d1b1..e24d459c883d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py index c7caa1cbb577..d3be03abe919 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_commit_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py index 9cdd164f5bdb..7eaf44f443e1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py index ce8619977972..da7cf76c9e11 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_create_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py index 1bbebfff5dc6..6fffc7395a17 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py index a35c3af1b19a..fa37387cdad1 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py index 118fe734067f..4d1ac5e19d4b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_revision_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py index 77dfe887b2a6..64640ba166c4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_delete_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py index 240df5a2a39b..feb39e86e15d 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py index 2d45faebf8e5..cf387dbcfc6c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_get_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py index 7ea5120011ca..9c2f61ad4027 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py index 6aebac042f1f..08b49520c5b9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schema_revisions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py index 963618fa9808..7d88f3194e62 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py index d7b9e64641b2..776abc3d48c3 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_list_schemas_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py index b50a08b189a0..66628743ccd4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py index 4d83c8cacd77..2a5d2687d103 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_rollback_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py index 3dcd25490f40..127b90fec702 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py index 4f509d6b3023..08e3b9142755 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py index 5deb08a44f80..5cdc6072d1a5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py index cc78267927f4..af9792f1e73f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_schema_service_validate_schema_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py index 7fc2967d5bcf..37ea78fa1050 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py index 85b492df02f1..80cc79a64595 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_acknowledge_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py index 3f7c691da483..f1084952bab2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py index 7738b03da4bd..207b31599f67 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py index 3062180367f6..64a7f134d1b4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py index cbb05f62af69..7efb7a912ad8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_create_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py index 5bea031a7dd2..b92fab270b89 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py index 01342901b37f..dd7533eaf4b2 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py index 08a15f4ed063..12c85f95e4bf 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py index 1cde73a72c16..c9285d87e0c5 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_delete_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py index 40960acc7187..fd22fe0237f8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py index 3ee2e9c62b62..a027bcddff17 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py index 47c7399989ef..12eabdec4a2b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py index 22832bc89a3d..13b7ea6268f6 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_get_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py index bcc3be15dac4..0d369877336f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py index 3477e32d32e7..4568bef4826a 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_snapshots_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py index 7b00831a303c..b7811265a7b0 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py index 8e1a96487b5c..5bdc68dd574c 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_list_subscriptions_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py index 8e93722929b4..4492740cd23e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py index 1d32afec25b7..d198d4bab4dc 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_ack_deadline_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py index 1d88c959046b..155db77c62db 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py index 43ac23e4fc35..bca872f9d1f4 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_modify_push_config_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py index 3e6f26a0b2d8..d351f26cfb37 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py index 950f18cc17fa..e110075922df 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_pull_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py index 9038bfd11f10..b5eab9a46993 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py index b57af8fb014f..8a0063f66278 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_seek_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py index 332c128954b4..b2ecd899b1f7 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py index 47926a80a3d7..2de00926956e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_streaming_pull_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py index 833885af5927..7aa873ec187e 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py index 1c8f8530da4b..7cb4af13e3b9 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_snapshot_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py index 5c00a74dcd5c..ed6a5512b591 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_async.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py index 5cc3de6ece70..a592001ec94f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py +++ b/packages/google-cloud-pubsub/samples/generated_samples/pubsub_v1_generated_subscriber_update_subscription_sync.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 2e9609f5f48a..3bb90d7806c9 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/__init__.py b/packages/google-cloud-pubsub/tests/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-pubsub/tests/__init__.py +++ b/packages/google-cloud-pubsub/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/__init__.py b/packages/google-cloud-pubsub/tests/unit/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-pubsub/tests/unit/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py index 8f6cf068242c..cbf94b283c70 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 4856b1cad297..223679ad1524 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 194cddacfe9f..7f83558611bc 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 966e30957a23..417249e97480 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. From 2ffad59e7f8baaedf67c2fd57b04ef413f81eec5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 20 Mar 2025 11:09:49 -0400 Subject: [PATCH 1149/1197] chore(main): release 2.29.0 (#1352) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 24 +++++++++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 28 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index e0b4b7916280..15777e3f6a0e 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.28.0" + ".": "2.29.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 63f40ce6a6ff..9ffc0d132c18 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,30 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.29.0](https://github.com/googleapis/python-pubsub/compare/v2.28.0...v2.29.0) (2025-03-19) + + +### Features + +* Add REST Interceptors which support reading metadata ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) +* Add support for opt-in debug logging ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) +* Deprecate `enabled` field for message transforms and add `disabled` field ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) + + +### Bug Fixes + +* Allow logs to propagate upstream for caplog testing ([#1374](https://github.com/googleapis/python-pubsub/issues/1374)) ([fa39b0e](https://github.com/googleapis/python-pubsub/commit/fa39b0e87695da40036c1daec1b3108374672d61)) +* Allow Protobuf 6.x ([#1369](https://github.com/googleapis/python-pubsub/issues/1369)) ([c95b7a5](https://github.com/googleapis/python-pubsub/commit/c95b7a5bad7138a70e56c278970f5b54939a68f8)) +* Fix typing issue with gRPC metadata when key ends in -bin ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) + + +### Documentation + +* A comment for field `code` in message `.google.pubsub.v1.JavaScriptUDF` is changed ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) +* Add samples and test for ingestion from Kafka sources ([#1354](https://github.com/googleapis/python-pubsub/issues/1354)) ([820f986](https://github.com/googleapis/python-pubsub/commit/820f986104ca39fd0c92ba6816319e939be1ed63)) +* Deprecate `enabled` field for message transforms and add `disabled` field ([4363179](https://github.com/googleapis/python-pubsub/commit/43631790781ccfe071a7ecad41949399d3dbd063)) +* **samples:** Increase example max_bytes setting for cloud storage subscriptions to encourage more performant subscribe ([#1324](https://github.com/googleapis/python-pubsub/issues/1324)) ([cb760a7](https://github.com/googleapis/python-pubsub/commit/cb760a71cd4ad035d0c2c4c0f7b66bf52f18808c)) + ## [2.28.0](https://github.com/googleapis/python-pubsub/compare/v2.27.3...v2.28.0) (2025-01-30) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 8f0f03c065a2..07483fa04d24 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.28.0" # {x-release-please-version} +__version__ = "2.29.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 8f0f03c065a2..07483fa04d24 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.28.0" # {x-release-please-version} +__version__ = "2.29.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index dd0d6423a363..9f802d89d587 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.29.0" }, "snippets": [ { From f783c1e3944605414f63ec22415f55b4a248a05e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 7 Apr 2025 17:46:37 -0400 Subject: [PATCH 1150/1197] docs: update documentation for JavaScriptUDF to indicate that the `message_id` metadata field is optional instead of required (#1380) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py | 2 +- .../generated_samples/snippet_metadata_google.pubsub.v1.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 74b65549bcbb..6f27edfeeec3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1119,7 +1119,7 @@ class JavaScriptUDF(proto.Message): * * @param {Object} metadata - Pub/Sub message metadata. * Keys: - * - (required) 'message_id' : {string} + * - (optional) 'message_id' : {string} * - (optional) 'publish_time': {string} YYYY-MM-DDTHH:MM:SSZ format * - (optional) 'ordering_key': {string} */ diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 9f802d89d587..dd0d6423a363 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.29.0" + "version": "0.1.0" }, "snippets": [ { From 72b548d82216b40effca1ef4d23873aa38ab4092 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 14 Apr 2025 08:29:17 -0700 Subject: [PATCH 1151/1197] chore(python): remove noxfile.py from templates (#1383) Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../.github/.OwlBot.lock.yaml | 4 +- packages/google-cloud-pubsub/owlbot.py | 92 +------------------ 2 files changed, 3 insertions(+), 93 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index c631e1f7d7e9..508ba98efebf 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:5581906b957284864632cde4e9c51d1cc66b0094990b27e689132fe5cd036046 -# created: 2025-03-05 + digest: sha256:25de45b58e52021d3a24a6273964371a97a4efeefe6ad3845a64e697c63b6447 +# created: 2025-04-14T14:34:43.260858345Z diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index a8044bd6f744..3aa352620d2b 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -326,7 +326,7 @@ if count < 1: raise Exception(".coveragerc replacement failed.") - s.move([library], excludes=["**/gapic_version.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) + s.move([library], excludes=["**/gapic_version.py", "noxfile.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) s.remove_staging_dirs() # ---------------------------------------------------------------------------- @@ -345,96 +345,6 @@ ) s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml", "README.rst", "docs/index.rst"]) -# ---------------------------------------------------------------------------- -# Add mypy nox session. -# ---------------------------------------------------------------------------- -s.replace( - "noxfile.py", - r"LINT_PATHS = \[.*?\]", - '\g<0>\n\nMYPY_VERSION = "mypy==1.10.0"', -) -s.replace( - "noxfile.py", r'"blacken",', '\g<0>\n "mypy",', -) -s.replace( - "noxfile.py", - r"nox\.options\.error_on_missing_interpreters = True", - textwrap.dedent( - ''' \g<0> - - - @nox.session(python=DEFAULT_PYTHON_VERSION) - def mypy(session): - """Run type checks with mypy.""" - session.install("-e", ".[all]") - session.install(MYPY_VERSION) - - # Version 2.1.1 of google-api-core version is the first type-checked release. - # Version 2.2.0 of google-cloud-core version is the first type-checked release. - session.install( - "google-api-core[grpc]>=2.1.1", - "google-cloud-core>=2.2.0", - ) - - # Just install the type info directly, since "mypy --install-types" might - # require an additional pass. - # Exclude types-protobuf==4.24.0.20240106 - # See https://github.com/python/typeshed/issues/11254 - session.install("types-protobuf!=4.24.0.20240106", "types-setuptools") - - # TODO: Only check the hand-written layer, the generated code does not pass - # mypy checks yet. - # https://github.com/googleapis/gapic-generator-python/issues/1092 - session.run("mypy", "-p", "google.cloud")''' - ), -) - - -# ---------------------------------------------------------------------------- -# Add mypy_samples nox session. -# ---------------------------------------------------------------------------- -s.replace( - "noxfile.py", - r' "mypy",', - '\g<0>\n # https://github.com/googleapis/python-pubsub/pull/552#issuecomment-1016256936' - '\n # "mypy_samples", # TODO: uncomment when the check passes', -) -s.replace( - "noxfile.py", - r'session\.run\("mypy", "-p", "google.cloud"\)', - textwrap.dedent( - ''' \g<0> - - - @nox.session(python=DEFAULT_PYTHON_VERSION) - def mypy_samples(session): - """Run type checks with mypy.""" - - session.install("-e", ".[all]") - - session.install("pytest") - session.install(MYPY_VERSION) - - # Just install the type info directly, since "mypy --install-types" might - # require an additional pass. - session.install("types-mock", "types-protobuf", "types-setuptools") - - session.run( - "mypy", - "--config-file", - str(CURRENT_DIRECTORY / "samples" / "snippets" / "mypy.ini"), - "--no-incremental", # Required by warn-unused-configs from mypy.ini to work - "samples/", - )''' - ), -) - - -# Only consider the hand-written layer when assessing the test coverage. -s.replace( - "noxfile.py", "--cov=google", "--cov=google/cloud", -) - s.replace(".github/blunderbuss.yml", "googleapis/api-pubsub", "mukund-ananthu") python.py_samples(skip_readmes=True) From d03e58268b41584b7a195818adca96176c4e1958 Mon Sep 17 00:00:00 2001 From: Kamal Aboul-Hosn Date: Mon, 28 Apr 2025 17:01:17 -0400 Subject: [PATCH 1152/1197] docs(sample): Add samples for topic and subscription SMTs (#1386) Co-authored-by: mukund-ananthu <83691193+mukund-ananthu@users.noreply.github.com> --- .../samples/snippets/publisher.py | 43 ++++++++++++++++ .../samples/snippets/publisher_test.py | 20 ++++++++ .../samples/snippets/subscriber.py | 49 +++++++++++++++++++ .../samples/snippets/subscriber_test.py | 31 ++++++++++++ 4 files changed, 143 insertions(+) diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index e279324b81a4..d2b6dd2b87cb 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -326,6 +326,39 @@ def create_topic_with_confluent_cloud_ingestion( # [END pubsub_create_topic_with_confluent_cloud_ingestion] +def create_topic_with_smt( + project_id: str, + topic_id: str, +) -> None: + """Create a new Pub/Sub topic with a UDF SMT.""" + # [START pubsub_create_topic_with_smt] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import JavaScriptUDF, MessageTransform, Topic + + # TODO(developer) + # project_id = "your-project-id" + # topic_id = "your-topic-id" + + code = """function redactSSN(message, metadata) { + const data = JSON.parse(message.data); + delete data['ssn']; + message.data = JSON.stringify(data); + return message; + }""" + udf = JavaScriptUDF(code=code, function_name="redactSSN") + transforms = [MessageTransform(javascript_udf=udf)] + + publisher = pubsub_v1.PublisherClient() + topic_path = publisher.topic_path(project_id, topic_id) + + request = Topic(name=topic_path, message_transforms=transforms) + + topic = publisher.create_topic(request=request) + + print(f"Created topic: {topic.name} with SMT") + # [END pubsub_create_topic_with_smt] + + def update_topic_type( project_id: str, topic_id: str, @@ -888,6 +921,11 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: "gcp_service_account" ) + create_parser = subparsers.add_parser( + "create_smt", help=create_topic_with_smt.__doc__ + ) + create_parser.add_argument("topic_id") + update_topic_type_parser = subparsers.add_parser( "update_kinesis_ingestion", help=update_topic_type.__doc__ ) @@ -1007,6 +1045,11 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: args.identity_pool_id, args.gcp_service_account, ) + elif args.command == "create_smt": + create_topic_with_smt( + args.project_id, + args.topic_id, + ) elif args.command == "update_kinesis_ingestion": update_topic_type( args.project_id, diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index dc7b94027ee0..1c691bd5c21f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -313,6 +313,26 @@ def test_create_topic_with_confluent_cloud_ingestion( publisher_client.delete_topic(request={"topic": topic_path}) +def test_create_with_smt( + publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] +) -> None: + # The scope of `topic_path` is limited to this function. + topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + + try: + publisher_client.delete_topic(request={"topic": topic_path}) + except NotFound: + pass + + publisher.create_topic_with_smt(PROJECT_ID, TOPIC_ID) + + out, _ = capsys.readouterr() + assert f"Created topic: {topic_path} with SMT" in out + + # Clean up resource created for the test. + publisher_client.delete_topic(request={"topic": topic_path}) + + def test_update_topic_type( publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] ) -> None: diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index c09f5def14d9..5549d056fbe7 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -578,6 +578,45 @@ def create_cloudstorage_subscription( # [END pubsub_create_cloud_storage_subscription] +def create_subscription_with_smt( + project_id: str, topic_id: str, subscription_id: str +) -> None: + """Create a subscription with a UDF SMT.""" + # [START pubsub_create_subscription_with_smt] + from google.cloud import pubsub_v1 + from google.pubsub_v1.types import JavaScriptUDF, MessageTransform + + # TODO(developer): Choose an existing topic. + # project_id = "your-project-id" + # topic_id = "your-topic-id" + # subscription_id = "your-subscription-id" + + publisher = pubsub_v1.PublisherClient() + subscriber = pubsub_v1.SubscriberClient() + topic_path = publisher.topic_path(project_id, topic_id) + subscription_path = subscriber.subscription_path(project_id, subscription_id) + + code = """function redactSSN(message, metadata) { + const data = JSON.parse(message.data); + delete data['ssn']; + message.data = JSON.stringify(data); + return message; + }""" + udf = JavaScriptUDF(code=code, function_name="redactSSN") + transforms = [MessageTransform(javascript_udf=udf)] + + with subscriber: + subscription = subscriber.create_subscription( + request={ + "name": subscription_path, + "topic": topic_path, + "message_transforms": transforms, + } + ) + print(f"Created subscription with SMT: {subscription}") + # [END pubsub_create_subscription_with_smt] + + def delete_subscription(project_id: str, subscription_id: str) -> None: """Deletes an existing Pub/Sub topic.""" # [START pubsub_delete_subscription] @@ -1310,6 +1349,12 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_cloudstorage_subscription_parser.add_argument("subscription_id") create_cloudstorage_subscription_parser.add_argument("bucket") + create_subscription_with_smt_parser = subparsers.add_parser( + "create-with-smt", help=create_subscription_with_smt.__doc__ + ) + create_subscription_with_smt_parser.add_argument("topic_id") + create_subscription_with_smt_parser.add_argument("subscription_id") + delete_parser = subparsers.add_parser("delete", help=delete_subscription.__doc__) delete_parser.add_argument("subscription_id") @@ -1471,6 +1516,10 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: create_cloudstorage_subscription( args.project_id, args.topic_id, args.subscription_id, args.bucket ) + elif args.command == "create-with-smt": + create_subscription_with_smt( + args.project_id, args.topic_id, args.subscription_id + ) elif args.command == "delete": delete_subscription(args.project_id, args.subscription_id) diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 86f7a94cef93..53a844e01423 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -579,6 +579,37 @@ def test_create_push_subscription( subscriber_client.delete_subscription(request={"subscription": subscription_path}) +def test_create_subscription_with_smt( + subscriber_client: pubsub_v1.SubscriberClient, + topic: str, + capsys: CaptureFixture[str], +) -> None: + subscription_for_create_name = ( + f"subscription-test-subscription-for-create-with-smt-{PY_VERSION}-{UUID}" + ) + + subscription_path = subscriber_client.subscription_path( + PROJECT_ID, subscription_for_create_name + ) + + try: + subscriber_client.delete_subscription( + request={"subscription": subscription_path} + ) + except NotFound: + pass + + subscriber.create_subscription_with_smt( + PROJECT_ID, TOPIC, subscription_for_create_name + ) + + out, _ = capsys.readouterr() + assert f"{subscription_for_create_name}" in out + + # Clean up. + subscriber_client.delete_subscription(request={"subscription": subscription_path}) + + def test_update_push_subscription( subscriber_client: pubsub_v1.SubscriberClient, topic: str, From ac623ca292e448463af7efdc2fb2d2e15919f67c Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Wed, 21 May 2025 06:05:34 -0400 Subject: [PATCH 1153/1197] docs: update readme links (#1409) --- packages/google-cloud-pubsub/README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index dd3032e002cb..97010e9985e8 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -27,7 +27,7 @@ independently written applications. :target: https://pypi.org/project/google-cloud-pubsub/ .. _Google Cloud Pub / Sub: https://cloud.google.com/pubsub/ .. _Product Documentation: https://cloud.google.com/pubsub/docs -.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/pubsub/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/pubsub/latest/summary_overview Quick Start ----------- @@ -116,7 +116,7 @@ messages to it To learn more, consult the `publishing documentation`_. -.. _publishing documentation: https://cloud.google.com/python/docs/reference/pubsub/latest +.. _publishing documentation: https://cloud.google.com/python/docs/reference/pubsub/latest/google.cloud.pubsub_v1.publisher.client.Client Subscribing @@ -162,7 +162,7 @@ block the current thread until a given condition obtains: It is also possible to pull messages in a synchronous (blocking) fashion. To learn more about subscribing, consult the `subscriber documentation`_. -.. _subscriber documentation: https://cloud.google.com/python/docs/reference/pubsub/latest +.. _subscriber documentation: https://cloud.google.com/python/docs/reference/pubsub/latest/google.cloud.pubsub_v1.subscriber.client.Client Authentication From 717402eba738b54760609cf6fd22ba8503914272 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Wed, 21 May 2025 06:07:07 -0400 Subject: [PATCH 1154/1197] chore: Update gapic-generator-python to 1.25.0 (#1385) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.flake8 | 15 ++++++++------- packages/google-cloud-pubsub/MANIFEST.in | 13 ++++--------- .../services/publisher/async_client.py | 4 ++++ .../pubsub_v1/services/publisher/client.py | 3 +++ .../services/publisher/transports/base.py | 4 ++++ .../services/publisher/transports/grpc.py | 3 +-- .../services/publisher/transports/rest.py | 4 ++++ .../services/schema_service/async_client.py | 4 ++++ .../pubsub_v1/services/schema_service/client.py | 3 +++ .../services/schema_service/transports/base.py | 4 ++++ .../services/schema_service/transports/grpc.py | 3 +-- .../services/schema_service/transports/rest.py | 4 ++++ .../services/subscriber/async_client.py | 4 ++++ .../pubsub_v1/services/subscriber/client.py | 3 +++ .../services/subscriber/transports/base.py | 4 ++++ .../services/subscriber/transports/grpc.py | 3 +-- .../services/subscriber/transports/rest.py | 4 ++++ .../testing/constraints-3.13.txt | 17 +++++++++++------ 18 files changed, 71 insertions(+), 28 deletions(-) diff --git a/packages/google-cloud-pubsub/.flake8 b/packages/google-cloud-pubsub/.flake8 index 32986c79287a..90316de21489 100644 --- a/packages/google-cloud-pubsub/.flake8 +++ b/packages/google-cloud-pubsub/.flake8 @@ -1,28 +1,29 @@ # -*- coding: utf-8 -*- -# -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# [flake8] +# TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): +# Resolve flake8 lint issues ignore = E203, E231, E266, E501, W503 exclude = - # Exclude generated code. - **/proto/** + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2333): + # Ensure that generated code passes flake8 lint **/gapic/** **/services/** **/types/** + # Exclude Protobuf gencode *_pb2.py # Standard linting exemptions. diff --git a/packages/google-cloud-pubsub/MANIFEST.in b/packages/google-cloud-pubsub/MANIFEST.in index d6814cd60037..dae249ec8976 100644 --- a/packages/google-cloud-pubsub/MANIFEST.in +++ b/packages/google-cloud-pubsub/MANIFEST.in @@ -1,25 +1,20 @@ # -*- coding: utf-8 -*- -# -# Copyright 2024 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by synthtool. DO NOT EDIT! +# include README.rst LICENSE -recursive-include google *.json *.proto py.typed +recursive-include google *.py *.pyi *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ - -# Exclude scripts for samples readmegen -prune scripts/readme-gen diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 87838c006783..52f1f1e30f59 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1662,5 +1663,8 @@ async def __aexit__(self, exc_type, exc, tb): client_library_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("PublisherAsyncClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index c94cec688ae1..c8f39273e290 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -47,6 +47,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -2129,5 +2130,7 @@ def test_iam_permissions( client_library_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("PublisherClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 354a6c7141f4..0fb41c922213 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -35,6 +36,9 @@ client_library_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class PublisherTransport(abc.ABC): """Abstract transport class for Publisher.""" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index f27b968a8266..9e8ed1737b1d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -73,12 +73,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.pubsub.v1.Publisher", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index a5c99da1bb81..92ee46e15399 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -61,6 +62,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class PublisherRestInterceptor: """Interceptor for Publisher. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py index 4ebf3ac946d6..b2d139fa080b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/async_client.py @@ -37,6 +37,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -1775,5 +1776,8 @@ async def __aexit__(self, exc_type, exc, tb): client_library_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("SchemaServiceAsyncClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 40fc381b812f..493ffd2b675c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -46,6 +46,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -2190,5 +2191,7 @@ def test_iam_permissions( client_library_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("SchemaServiceClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index c1187d1cdc59..c30773b74c59 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -36,6 +37,9 @@ client_library_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class SchemaServiceTransport(abc.ABC): """Abstract transport class for SchemaService.""" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index f089c2724c6b..1a746ef43d6f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -74,12 +74,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.pubsub.v1.SchemaService", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index f4eab1dd87ff..6d8902c5dee0 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -62,6 +63,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class SchemaServiceRestInterceptor: """Interceptor for SchemaService. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 4ad7306f2654..5d3dd3da7dcf 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -41,6 +41,7 @@ from google.api_core import retry_async as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: @@ -2635,5 +2636,8 @@ async def __aexit__(self, exc_type, exc, tb): client_library_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + __all__ = ("SubscriberAsyncClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 44f3b15986c1..eef3ab77de33 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -49,6 +49,7 @@ from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] @@ -3087,5 +3088,7 @@ def test_iam_permissions( client_library_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ("SubscriberClient",) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 01fd7ddcb28b..cc62113f93fc 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -25,6 +25,7 @@ from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import google.protobuf from google.iam.v1 import iam_policy_pb2 # type: ignore from google.iam.v1 import policy_pb2 # type: ignore @@ -35,6 +36,9 @@ client_library_version=package_version.__version__ ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class SubscriberTransport(abc.ABC): """Abstract transport class for Subscriber.""" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 6a10424feb46..ec400da0ca6c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -73,12 +73,11 @@ def intercept_unary_unary(self, continuation, client_call_details, request): f"Sending request for {client_call_details.method}", extra={ "serviceName": "google.pubsub.v1.Subscriber", - "rpcName": client_call_details.method, + "rpcName": str(client_call_details.method), "request": grpc_request, "metadata": grpc_request["metadata"], }, ) - response = continuation(client_call_details, request) if logging_enabled: # pragma: NO COVER response_metadata = response.trailing_metadata() diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index 62211228e588..bb0ee68c6e89 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -23,6 +23,7 @@ from google.api_core import rest_helpers from google.api_core import rest_streaming from google.api_core import gapic_v1 +import google.protobuf from google.protobuf import json_format from google.iam.v1 import iam_policy_pb2 # type: ignore @@ -61,6 +62,9 @@ rest_version=f"requests@{requests_version}", ) +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + class SubscriberRestInterceptor: """Interceptor for Subscriber. diff --git a/packages/google-cloud-pubsub/testing/constraints-3.13.txt b/packages/google-cloud-pubsub/testing/constraints-3.13.txt index ad3f0fa58e2d..2010e549cceb 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.13.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.13.txt @@ -1,7 +1,12 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf -grpc-google-iam-v1 +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 From 5cb99012c305b1a9cac7b24c45821186040ff8f0 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 21 May 2025 06:09:16 -0400 Subject: [PATCH 1155/1197] fix: remove setup.cfg configuration for creating universal wheels (#1376) Co-authored-by: ohmayr --- packages/google-cloud-pubsub/setup.cfg | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 packages/google-cloud-pubsub/setup.cfg diff --git a/packages/google-cloud-pubsub/setup.cfg b/packages/google-cloud-pubsub/setup.cfg deleted file mode 100644 index 052350089505..000000000000 --- a/packages/google-cloud-pubsub/setup.cfg +++ /dev/null @@ -1,19 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[bdist_wheel] -universal = 1 From 8432bf4a0dd8fa8703b82c49f9c000d214aef95f Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Wed, 21 May 2025 16:53:20 -0400 Subject: [PATCH 1156/1197] chore: change assignees for issues and PRs to abbrowne126 (#1410) --- packages/google-cloud-pubsub/.github/blunderbuss.yml | 6 +++--- packages/google-cloud-pubsub/owlbot.py | 4 +--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/blunderbuss.yml b/packages/google-cloud-pubsub/.github/blunderbuss.yml index 3408b580a839..ac5c87339243 100644 --- a/packages/google-cloud-pubsub/.github/blunderbuss.yml +++ b/packages/google-cloud-pubsub/.github/blunderbuss.yml @@ -4,14 +4,14 @@ # Note: This file is autogenerated. To make changes to the assignee # team, please update `codeowner_team` in `.repo-metadata.json`. assign_issues: - - mukund-ananthu + - abbrowne126 assign_issues_by: - labels: - "samples" to: - googleapis/python-samples-reviewers - - mukund-ananthu + - abbrowne126 assign_prs: - - mukund-ananthu + - abbrowne126 diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 3aa352620d2b..d845e5758e0c 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -343,9 +343,7 @@ system_test_python_versions=["3.12"], system_test_external_dependencies=["psutil","flaky"], ) -s.move(templated_files, excludes=[".coveragerc", ".github/release-please.yml", "README.rst", "docs/index.rst"]) - -s.replace(".github/blunderbuss.yml", "googleapis/api-pubsub", "mukund-ananthu") +s.move(templated_files, excludes=[".coveragerc", ".github/blunderbuss.yml", ".github/release-please.yml", "README.rst", "docs/index.rst"]) python.py_samples(skip_readmes=True) From 00cb37c0c7c6fad3abdd092d015645fd39105211 Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Fri, 23 May 2025 21:51:22 +0200 Subject: [PATCH 1157/1197] chore(deps): update all dependencies (#1412) Co-authored-by: Anthonios Partheniou --- .../samples/snippets/requirements-test.txt | 9 +++++---- .../samples/snippets/requirements.txt | 11 ++++++----- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index aa57a68a2ff0..2bf14b760868 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,7 +1,8 @@ backoff==2.2.1 pytest===7.4.4; python_version == '3.7' -pytest==8.3.4; python_version >= '3.8' -mock==5.1.0 +pytest==8.3.5; python_version >= '3.8' +mock==5.2.0 flaky==3.8.1 -google-cloud-bigquery==3.27.0 -google-cloud-storage==2.19.0 +google-cloud-bigquery==3.30.0; python_version < '3.9' +google-cloud-bigquery==3.33.0; python_version >= '3.9' +google-cloud-storage==3.1.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index b6ae767c6a73..83202d4f8c95 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,10 +1,11 @@ -google-cloud-pubsub==2.28.0 +google-cloud-pubsub==2.29.0 avro==1.12.0 protobuf===4.24.4; python_version == '3.7' -protobuf==5.29.2; python_version >= '3.8' +protobuf===5.29.4; python_version == '3.8' +protobuf==6.31.0; python_version >= '3.9' avro==1.12.0 opentelemetry-api===1.22.0; python_version == '3.7' opentelemetry-sdk===1.22.0; python_version == '3.7' -opentelemetry-api==1.29.0; python_version >= '3.8' -opentelemetry-sdk==1.29.0; python_version >= '3.8' -opentelemetry-exporter-gcp-trace==1.7.0 +opentelemetry-api==1.33.1; python_version >= '3.8' +opentelemetry-sdk==1.33.1; python_version >= '3.8' +opentelemetry-exporter-gcp-trace==1.9.0 From 9a4df7dc1c63f6b0a9cf413034dd24b90b21854f Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Wed, 28 May 2025 13:15:28 -0400 Subject: [PATCH 1158/1197] chore(main): release 2.29.1 (#1381) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 14 ++++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 18 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 15777e3f6a0e..db87e5c90c11 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.29.0" + ".": "2.29.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 9ffc0d132c18..1435f6752726 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,20 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.29.1](https://github.com/googleapis/python-pubsub/compare/v2.29.0...v2.29.1) (2025-05-23) + + +### Bug Fixes + +* Remove setup.cfg configuration for creating universal wheels ([#1376](https://github.com/googleapis/python-pubsub/issues/1376)) ([60639c4](https://github.com/googleapis/python-pubsub/commit/60639c4928105ae8a72c8e37b1f48f75cc2ffcc3)) + + +### Documentation + +* **sample:** Add samples for topic and subscription SMTs ([#1386](https://github.com/googleapis/python-pubsub/issues/1386)) ([4d072e0](https://github.com/googleapis/python-pubsub/commit/4d072e088b59f692dc3d59c3197a2993c125917e)) +* Update documentation for JavaScriptUDF to indicate that the `message_id` metadata field is optional instead of required ([#1380](https://github.com/googleapis/python-pubsub/issues/1380)) ([be90054](https://github.com/googleapis/python-pubsub/commit/be9005412fea06bea917c8b6861546b7e6c62a1e)) +* Update readme links ([#1409](https://github.com/googleapis/python-pubsub/issues/1409)) ([77ba05d](https://github.com/googleapis/python-pubsub/commit/77ba05d4ba5b84a25c1a07c5397bbc184fa6041d)) + ## [2.29.0](https://github.com/googleapis/python-pubsub/compare/v2.28.0...v2.29.0) (2025-03-19) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 07483fa04d24..f620421904f1 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.29.0" # {x-release-please-version} +__version__ = "2.29.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 07483fa04d24..f620421904f1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.29.0" # {x-release-please-version} +__version__ = "2.29.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index dd0d6423a363..dbb835bb320b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.29.1" }, "snippets": [ { From 8661b308256215541260f2f8f9f1398ae215ae35 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 6 Jun 2025 17:03:00 -0700 Subject: [PATCH 1159/1197] feat: Add SchemaViolationReason to IngestionFailureEvent (#1411) Co-authored-by: Owl Bot Co-authored-by: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> --- .../services/subscriber/async_client.py | 4 +- .../pubsub_v1/services/subscriber/client.py | 4 +- .../services/subscriber/transports/grpc.py | 2 +- .../subscriber/transports/grpc_asyncio.py | 2 +- .../google/pubsub_v1/types/pubsub.py | 169 +++++++++++++++--- 5 files changed, 154 insertions(+), 27 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 5d3dd3da7dcf..77f96d6dfeab 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -1377,7 +1377,7 @@ def streaming_pull( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Awaitable[AsyncIterable[pubsub.StreamingPullResponse]]: r"""Establishes a stream with the server, which sends messages down - to the client. The client streams acknowledgements and ack + to the client. The client streams acknowledgments and ack deadline modifications back to the server. The server will close the stream and return the status on any error. The server may close the stream with status ``UNAVAILABLE`` to reassign @@ -1427,7 +1427,7 @@ def request_generator(): requests (AsyncIterator[`google.pubsub_v1.types.StreamingPullRequest`]): The request object AsyncIterator. Request for the ``StreamingPull`` streaming RPC method. This request is used to establish the initial stream as - well as to stream acknowledgements and ack deadline + well as to stream acknowledgments and ack deadline modifications from the client to the server. retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, should be retried. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index eef3ab77de33..ff7edf538e09 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -1827,7 +1827,7 @@ def streaming_pull( metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> Iterable[pubsub.StreamingPullResponse]: r"""Establishes a stream with the server, which sends messages down - to the client. The client streams acknowledgements and ack + to the client. The client streams acknowledgments and ack deadline modifications back to the server. The server will close the stream and return the status on any error. The server may close the stream with status ``UNAVAILABLE`` to reassign @@ -1877,7 +1877,7 @@ def request_generator(): requests (Iterator[google.pubsub_v1.types.StreamingPullRequest]): The request object iterator. Request for the ``StreamingPull`` streaming RPC method. This request is used to establish the initial stream as - well as to stream acknowledgements and ack deadline + well as to stream acknowledgments and ack deadline modifications from the client to the server. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index ec400da0ca6c..36c77648b347 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -571,7 +571,7 @@ def streaming_pull( r"""Return a callable for the streaming pull method over gRPC. Establishes a stream with the server, which sends messages down - to the client. The client streams acknowledgements and ack + to the client. The client streams acknowledgments and ack deadline modifications back to the server. The server will close the stream and return the status on any error. The server may close the stream with status ``UNAVAILABLE`` to reassign diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 04576b5870cb..78f8afd4ca2d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -585,7 +585,7 @@ def streaming_pull( r"""Return a callable for the streaming pull method over gRPC. Establishes a stream with the server, which sends messages down - to the client. The client streams acknowledgements and ack + to the client. The client streams acknowledgments and ack deadline modifications back to the server. The server will close the stream and return the status on any error. The server may close the stream with status ``UNAVAILABLE`` to reassign diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 6f27edfeeec3..dcc25bb313b3 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -827,6 +827,11 @@ class IngestionFailureEvent(proto.Message): Optional. Failure when ingesting from Confluent Cloud. + This field is a member of `oneof`_ ``failure``. + aws_kinesis_failure (google.pubsub_v1.types.IngestionFailureEvent.AwsKinesisFailureReason): + Optional. Failure when ingesting from AWS + Kinesis. + This field is a member of `oneof`_ ``failure``. """ @@ -850,6 +855,12 @@ class AvroFailureReason(proto.Message): """ + class SchemaViolationReason(proto.Message): + r"""Set when a Pub/Sub message fails to get published due to a + schema validation violation. + + """ + class CloudStorageFailure(proto.Message): r"""Failure when ingesting from a Cloud Storage source. @@ -881,6 +892,11 @@ class CloudStorageFailure(proto.Message): Optional. The Pub/Sub API limits prevented the desired message from being published. + This field is a member of `oneof`_ ``reason``. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + This field is a member of `oneof`_ ``reason``. """ @@ -908,10 +924,23 @@ class CloudStorageFailure(proto.Message): oneof="reason", message="IngestionFailureEvent.ApiViolationReason", ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=7, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) class AwsMskFailureReason(proto.Message): r"""Failure when ingesting from an Amazon MSK source. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -931,6 +960,11 @@ class AwsMskFailureReason(proto.Message): Optional. The Pub/Sub API limits prevented the desired message from being published. + This field is a member of `oneof`_ ``reason``. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + This field is a member of `oneof`_ ``reason``. """ @@ -956,10 +990,23 @@ class AwsMskFailureReason(proto.Message): oneof="reason", message="IngestionFailureEvent.ApiViolationReason", ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) class AzureEventHubsFailureReason(proto.Message): r"""Failure when ingesting from an Azure Event Hubs source. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -979,6 +1026,11 @@ class AzureEventHubsFailureReason(proto.Message): Optional. The Pub/Sub API limits prevented the desired message from being published. + This field is a member of `oneof`_ ``reason``. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + This field is a member of `oneof`_ ``reason``. """ @@ -1004,10 +1056,23 @@ class AzureEventHubsFailureReason(proto.Message): oneof="reason", message="IngestionFailureEvent.ApiViolationReason", ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) class ConfluentCloudFailureReason(proto.Message): r"""Failure when ingesting from a Confluent Cloud source. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -1027,6 +1092,11 @@ class ConfluentCloudFailureReason(proto.Message): Optional. The Pub/Sub API limits prevented the desired message from being published. + This field is a member of `oneof`_ ``reason``. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + This field is a member of `oneof`_ ``reason``. """ @@ -1052,6 +1122,57 @@ class ConfluentCloudFailureReason(proto.Message): oneof="reason", message="IngestionFailureEvent.ApiViolationReason", ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) + + class AwsKinesisFailureReason(proto.Message): + r"""Failure when ingesting from an AWS Kinesis source. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + stream_arn (str): + Optional. The stream ARN of the Kinesis + stream being ingested from. + partition_key (str): + Optional. The partition key of the message + that failed to be ingested. + sequence_number (str): + Optional. The sequence number of the message + that failed to be ingested. + schema_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.SchemaViolationReason): + Optional. The Pub/Sub message failed schema + validation. + + This field is a member of `oneof`_ ``reason``. + """ + + stream_arn: str = proto.Field( + proto.STRING, + number=1, + ) + partition_key: str = proto.Field( + proto.STRING, + number=2, + ) + sequence_number: str = proto.Field( + proto.STRING, + number=3, + ) + schema_violation_reason: "IngestionFailureEvent.SchemaViolationReason" = ( + proto.Field( + proto.MESSAGE, + number=4, + oneof="reason", + message="IngestionFailureEvent.SchemaViolationReason", + ) + ) topic: str = proto.Field( proto.STRING, @@ -1085,6 +1206,12 @@ class ConfluentCloudFailureReason(proto.Message): oneof="failure", message=ConfluentCloudFailureReason, ) + aws_kinesis_failure: AwsKinesisFailureReason = proto.Field( + proto.MESSAGE, + number=7, + oneof="failure", + message=AwsKinesisFailureReason, + ) class JavaScriptUDF(proto.Message): @@ -1784,8 +1911,8 @@ class Subscription(proto.Message): This generally implies that messages will be retried as soon as possible for healthy subscribers. RetryPolicy will be triggered on - NACKs or acknowledgement deadline exceeded - events for a given message. + NACKs or acknowledgment deadline exceeded events + for a given message. detached (bool): Optional. Indicates whether the subscription is detached from its topic. Detached subscriptions don't receive @@ -1799,7 +1926,7 @@ class Subscription(proto.Message): ``message_id`` on this subscription: - The message sent to a subscriber is guaranteed not to be - resent before the message's acknowledgement deadline + resent before the message's acknowledgment deadline expires. - An acknowledged message will not be resent to a subscriber. @@ -1977,7 +2104,7 @@ class RetryPolicy(proto.Message): Retry delay will be exponential based on provided minimum and maximum backoffs. https://en.wikipedia.org/wiki/Exponential_backoff. - RetryPolicy will be triggered on NACKs or acknowledgement deadline + RetryPolicy will be triggered on NACKs or acknowledgment deadline exceeded events for a given message. Retry Policy is implemented on a best effort basis. At times, the @@ -2037,7 +2164,7 @@ class DeadLetterPolicy(proto.Message): message. The value must be between 5 and 100. The number of delivery attempts is defined as 1 + (the sum - of number of NACKs and number of times the acknowledgement + of number of NACKs and number of times the acknowledgment deadline has been exceeded for the message). A NACK is any call to ModifyAckDeadline with a 0 deadline. @@ -2386,7 +2513,7 @@ class CloudStorageConfig(proto.Message): elapse before a new Cloud Storage file is created. Min 1 minute, max 10 minutes, default 5 minutes. May not exceed the subscription's - acknowledgement deadline. + acknowledgment deadline. max_bytes (int): Optional. The maximum bytes that can be written to a Cloud Storage file before a new file is created. Min 1 KB, max 10 @@ -2838,7 +2965,7 @@ class AcknowledgeRequest(proto.Message): class StreamingPullRequest(proto.Message): r"""Request for the ``StreamingPull`` streaming RPC method. This request is used to establish the initial stream as well as to stream - acknowledgements and ack deadline modifications from the client to + acknowledgments and ack deadline modifications from the client to the server. Attributes: @@ -2849,12 +2976,12 @@ class StreamingPullRequest(proto.Message): client to server. Format is ``projects/{project}/subscriptions/{sub}``. ack_ids (MutableSequence[str]): - Optional. List of acknowledgement IDs for acknowledging + Optional. List of acknowledgment IDs for acknowledging previously received messages (received on this stream or a different stream). If an ack ID has expired, the corresponding message may be redelivered later. Acknowledging a message more than once will not result in an - error. If the acknowledgement ID is malformed, the stream + error. If the acknowledgment ID is malformed, the stream will be aborted with status ``INVALID_ARGUMENT``. modify_deadline_seconds (MutableSequence[int]): Optional. The list of new ack deadlines for the IDs listed @@ -2872,7 +2999,7 @@ class StreamingPullRequest(proto.Message): request. If the value is < 0 (an error), the stream will be aborted with status ``INVALID_ARGUMENT``. modify_deadline_ack_ids (MutableSequence[str]): - Optional. List of acknowledgement IDs whose deadline will be + Optional. List of acknowledgment IDs whose deadline will be modified based on the corresponding element in ``modify_deadline_seconds``. This field can be used to indicate that more time is needed to process a message by @@ -2974,22 +3101,22 @@ class StreamingPullResponse(proto.Message): """ class AcknowledgeConfirmation(proto.Message): - r"""Acknowledgement IDs sent in one or more previous requests to + r"""Acknowledgment IDs sent in one or more previous requests to acknowledge a previously received message. Attributes: ack_ids (MutableSequence[str]): Optional. Successfully processed - acknowledgement IDs. + acknowledgment IDs. invalid_ack_ids (MutableSequence[str]): - Optional. List of acknowledgement IDs that - were malformed or whose acknowledgement deadline + Optional. List of acknowledgment IDs that + were malformed or whose acknowledgment deadline has expired. unordered_ack_ids (MutableSequence[str]): - Optional. List of acknowledgement IDs that + Optional. List of acknowledgment IDs that were out of order. temporary_failed_ack_ids (MutableSequence[str]): - Optional. List of acknowledgement IDs that + Optional. List of acknowledgment IDs that failed processing with temporary issues. """ @@ -3011,19 +3138,19 @@ class AcknowledgeConfirmation(proto.Message): ) class ModifyAckDeadlineConfirmation(proto.Message): - r"""Acknowledgement IDs sent in one or more previous requests to + r"""Acknowledgment IDs sent in one or more previous requests to modify the deadline for a specific message. Attributes: ack_ids (MutableSequence[str]): Optional. Successfully processed - acknowledgement IDs. + acknowledgment IDs. invalid_ack_ids (MutableSequence[str]): - Optional. List of acknowledgement IDs that - were malformed or whose acknowledgement deadline + Optional. List of acknowledgment IDs that + were malformed or whose acknowledgment deadline has expired. temporary_failed_ack_ids (MutableSequence[str]): - Optional. List of acknowledgement IDs that + Optional. List of acknowledgment IDs that failed processing with temporary issues. """ From 1c254ee7218ca292727f516c4295a847770c3ca7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 9 Jun 2025 09:53:07 -0400 Subject: [PATCH 1160/1197] chore(main): release 2.30.0 (#1418) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index db87e5c90c11..8028c14b5a29 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.29.1" + ".": "2.30.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 1435f6752726..7a92657580d4 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.30.0](https://github.com/googleapis/python-pubsub/compare/v2.29.1...v2.30.0) (2025-06-07) + + +### Features + +* Add SchemaViolationReason to IngestionFailureEvent ([#1411](https://github.com/googleapis/python-pubsub/issues/1411)) ([c046ca2](https://github.com/googleapis/python-pubsub/commit/c046ca22e9bddff6b50f7670bf6b9b9470bf78e8)) + ## [2.29.1](https://github.com/googleapis/python-pubsub/compare/v2.29.0...v2.29.1) (2025-05-23) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index f620421904f1..5ebb3bec4b3c 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.29.1" # {x-release-please-version} +__version__ = "2.30.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index f620421904f1..5ebb3bec4b3c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.29.1" # {x-release-please-version} +__version__ = "2.30.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index dbb835bb320b..28ac15bef288 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.29.1" + "version": "2.30.0" }, "snippets": [ { From 2dca30fb6bd6622c916b8c179ed478ecd31e631d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Jun 2025 11:53:16 -0400 Subject: [PATCH 1161/1197] build(deps): bump protobuf from 6.31.0 to 6.31.1 in /samples/snippets (#1424) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 83202d4f8c95..22d6f32d1beb 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -2,7 +2,7 @@ google-cloud-pubsub==2.29.0 avro==1.12.0 protobuf===4.24.4; python_version == '3.7' protobuf===5.29.4; python_version == '3.8' -protobuf==6.31.0; python_version >= '3.9' +protobuf==6.31.1; python_version >= '3.9' avro==1.12.0 opentelemetry-api===1.22.0; python_version == '3.7' opentelemetry-sdk===1.22.0; python_version == '3.7' From a0f2c075c6e0d8c7baa6db663aaaaa29f425d20c Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Tue, 24 Jun 2025 16:42:55 -0400 Subject: [PATCH 1162/1197] Fix: Surface Fatal Stream Errors to Future; Adjust Retryable Error Codes (#1422) --- .../_protocol/streaming_pull_manager.py | 54 ++++++++++++++++--- .../subscriber/test_streaming_pull_manager.py | 53 +++++++++++++++--- 2 files changed, 92 insertions(+), 15 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 4c9e1c20e7bc..486a728b4819 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -16,6 +16,7 @@ import collections import functools +import inspect import itertools import logging import threading @@ -62,14 +63,22 @@ _REGULAR_SHUTDOWN_THREAD_NAME = "Thread-RegularStreamShutdown" _RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" _RETRYABLE_STREAM_ERRORS = ( + exceptions.Aborted, exceptions.DeadlineExceeded, - exceptions.ServiceUnavailable, + exceptions.GatewayTimeout, exceptions.InternalServerError, + exceptions.ResourceExhausted, + exceptions.ServiceUnavailable, exceptions.Unknown, - exceptions.GatewayTimeout, - exceptions.Aborted, ) -_TERMINATING_STREAM_ERRORS = (exceptions.Cancelled,) +_TERMINATING_STREAM_ERRORS = ( + exceptions.Cancelled, + exceptions.InvalidArgument, + exceptions.NotFound, + exceptions.PermissionDenied, + exceptions.Unauthenticated, + exceptions.Unauthorized, +) _MAX_LOAD = 1.0 """The load threshold above which to pause the incoming message stream.""" @@ -98,6 +107,13 @@ code_pb2.UNAVAILABLE, } +# `on_fatal_exception` was added in `google-api-core v2.25.1``, which allows us to inform +# callers on unrecoverable errors. We can only pass this arg if it's available in the +# `BackgroundConsumer` spec. +_SHOULD_USE_ON_FATAL_ERROR_CALLBACK = "on_fatal_exception" in inspect.getfullargspec( + bidi.BackgroundConsumer +) + def _wrap_as_exception(maybe_exception: Any) -> BaseException: """Wrap an object as a Python exception, if needed. @@ -876,7 +892,18 @@ def open( assert self._scheduler is not None scheduler_queue = self._scheduler.queue self._dispatcher = dispatcher.Dispatcher(self, scheduler_queue) - self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) + + # `on_fatal_exception` is only available in more recent library versions. + # For backwards compatibility reasons, we only pass it when `google-api-core` supports it. + if _SHOULD_USE_ON_FATAL_ERROR_CALLBACK: + self._consumer = bidi.BackgroundConsumer( + self._rpc, + self._on_response, + on_fatal_exception=self._on_fatal_exception, + ) + else: + self._consumer = bidi.BackgroundConsumer(self._rpc, self._on_response) + self._leaser = leaser.Leaser(self) self._heartbeater = heartbeater.Heartbeater(self) @@ -1247,6 +1274,17 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: self.maybe_pause_consumer() + def _on_fatal_exception(self, exception: BaseException) -> None: + """ + Called whenever `self.consumer` receives a non-retryable exception. + We close the manager on such non-retryable cases. + """ + _LOGGER.exception( + "Streaming pull terminating after receiving non-recoverable error: %s", + exception, + ) + self.close(exception) + def _should_recover(self, exception: BaseException) -> bool: """Determine if an error on the RPC stream should be recovered. @@ -1283,8 +1321,10 @@ def _should_terminate(self, exception: BaseException) -> bool: in a list of terminating exceptions. """ exception = _wrap_as_exception(exception) - if isinstance(exception, _TERMINATING_STREAM_ERRORS): - _LOGGER.debug("Observed terminating stream error %s", exception) + is_api_error = isinstance(exception, exceptions.GoogleAPICallError) + # Terminate any non-API errors, or non-retryable errors (permission denied, unauthorized, etc.) + if not is_api_error or isinstance(exception, _TERMINATING_STREAM_ERRORS): + _LOGGER.error("Observed terminating stream error %s", exception) return True _LOGGER.debug("Observed non-terminating stream error %s", exception) return False diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index f4ceedaf0db6..86d2461e797d 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -1333,7 +1333,13 @@ def test_open(heartbeater, dispatcher, leaser, background_consumer, resumable_bi leaser.return_value.start.assert_called_once() assert manager.leaser == leaser.return_value - background_consumer.assert_called_once_with(manager._rpc, manager._on_response) + if streaming_pull_manager._SHOULD_USE_ON_FATAL_ERROR_CALLBACK: + background_consumer.assert_called_once_with( + manager._rpc, manager._on_response, manager._on_fatal_exception + ) + else: + background_consumer.assert_called_once_with(manager._rpc, manager._on_response) + background_consumer.return_value.start.assert_called_once() assert manager._consumer == background_consumer.return_value @@ -1432,6 +1438,31 @@ def test_close(): assert manager.is_active is False +def test_closes_on_fatal_consumer_error(): + ( + manager, + consumer, + dispatcher, + leaser, + heartbeater, + scheduler, + ) = make_running_manager() + + if streaming_pull_manager._SHOULD_USE_ON_FATAL_ERROR_CALLBACK: + error = ValueError("some fatal exception") + manager._on_fatal_exception(error) + + await_manager_shutdown(manager, timeout=3) + + consumer.stop.assert_called_once() + leaser.stop.assert_called_once() + dispatcher.stop.assert_called_once() + heartbeater.stop.assert_called_once() + scheduler.shutdown.assert_called_once() + + assert manager.is_active is False + + def test_close_inactive_consumer(): ( manager, @@ -2270,18 +2301,24 @@ def test__should_recover_false(): def test__should_terminate_true(): manager = make_manager() - details = "Cancelled. Go away, before I taunt you a second time." - exc = exceptions.Cancelled(details) - - assert manager._should_terminate(exc) is True + for exc in [ + exceptions.Cancelled(""), + exceptions.PermissionDenied(""), + TypeError(), + ValueError(), + ]: + assert manager._should_terminate(exc) def test__should_terminate_false(): manager = make_manager() - exc = TypeError("wahhhhhh") - - assert manager._should_terminate(exc) is False + for exc in [ + exceptions.ResourceExhausted(""), + exceptions.ServiceUnavailable(""), + exceptions.DeadlineExceeded(""), + ]: + assert not manager._should_terminate(exc) @mock.patch("threading.Thread", autospec=True) From 4dcd54852a0b878e44d798358ee6c875ef2ebc45 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 26 Jun 2025 16:44:28 -0400 Subject: [PATCH 1163/1197] feat: Add MessageTransformationFailureReason to IngestionFailureEvent (#1427) Co-authored-by: Owl Bot --- .../google/pubsub_v1/types/pubsub.py | 72 ++++++++++++++++++- .../snippet_metadata_google.pubsub.v1.json | 2 +- 2 files changed, 71 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index dcc25bb313b3..227da2208a85 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -861,6 +861,12 @@ class SchemaViolationReason(proto.Message): """ + class MessageTransformationFailureReason(proto.Message): + r"""Set when a Pub/Sub message fails to get published due to a + message transformation error. + + """ + class CloudStorageFailure(proto.Message): r"""Failure when ingesting from a Cloud Storage source. @@ -897,6 +903,11 @@ class CloudStorageFailure(proto.Message): Optional. The Pub/Sub message failed schema validation. + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + This field is a member of `oneof`_ ``reason``. """ @@ -932,6 +943,12 @@ class CloudStorageFailure(proto.Message): message="IngestionFailureEvent.SchemaViolationReason", ) ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=8, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) class AwsMskFailureReason(proto.Message): r"""Failure when ingesting from an Amazon MSK source. @@ -965,6 +982,11 @@ class AwsMskFailureReason(proto.Message): Optional. The Pub/Sub message failed schema validation. + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + This field is a member of `oneof`_ ``reason``. """ @@ -998,6 +1020,12 @@ class AwsMskFailureReason(proto.Message): message="IngestionFailureEvent.SchemaViolationReason", ) ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=7, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) class AzureEventHubsFailureReason(proto.Message): r"""Failure when ingesting from an Azure Event Hubs source. @@ -1031,6 +1059,11 @@ class AzureEventHubsFailureReason(proto.Message): Optional. The Pub/Sub message failed schema validation. + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + This field is a member of `oneof`_ ``reason``. """ @@ -1064,6 +1097,12 @@ class AzureEventHubsFailureReason(proto.Message): message="IngestionFailureEvent.SchemaViolationReason", ) ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=7, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) class ConfluentCloudFailureReason(proto.Message): r"""Failure when ingesting from a Confluent Cloud source. @@ -1097,6 +1136,11 @@ class ConfluentCloudFailureReason(proto.Message): Optional. The Pub/Sub message failed schema validation. + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + This field is a member of `oneof`_ ``reason``. """ @@ -1130,10 +1174,21 @@ class ConfluentCloudFailureReason(proto.Message): message="IngestionFailureEvent.SchemaViolationReason", ) ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=7, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) class AwsKinesisFailureReason(proto.Message): r"""Failure when ingesting from an AWS Kinesis source. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -1150,6 +1205,11 @@ class AwsKinesisFailureReason(proto.Message): Optional. The Pub/Sub message failed schema validation. + This field is a member of `oneof`_ ``reason``. + message_transformation_failure_reason (google.pubsub_v1.types.IngestionFailureEvent.MessageTransformationFailureReason): + Optional. Failure encountered when applying a + message transformation to the Pub/Sub message. + This field is a member of `oneof`_ ``reason``. """ @@ -1173,6 +1233,12 @@ class AwsKinesisFailureReason(proto.Message): message="IngestionFailureEvent.SchemaViolationReason", ) ) + message_transformation_failure_reason: "IngestionFailureEvent.MessageTransformationFailureReason" = proto.Field( + proto.MESSAGE, + number=5, + oneof="reason", + message="IngestionFailureEvent.MessageTransformationFailureReason", + ) topic: str = proto.Field( proto.STRING, @@ -3091,10 +3157,12 @@ class StreamingPullResponse(proto.Message): will not be empty. acknowledge_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): Optional. This field will only be set if - ``enable_exactly_once_delivery`` is set to ``true``. + ``enable_exactly_once_delivery`` is set to ``true`` and is + not guaranteed to be populated. modify_ack_deadline_confirmation (google.pubsub_v1.types.StreamingPullResponse.ModifyAckDeadlineConfirmation): Optional. This field will only be set if - ``enable_exactly_once_delivery`` is set to ``true``. + ``enable_exactly_once_delivery`` is set to ``true`` and is + not guaranteed to be populated. subscription_properties (google.pubsub_v1.types.StreamingPullResponse.SubscriptionProperties): Optional. Properties associated with this subscription. diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 28ac15bef288..dd0d6423a363 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.30.0" + "version": "0.1.0" }, "snippets": [ { From 0e5b9034e477e7b20aec78d0ba8b3e06e2d11eb5 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 8 Jul 2025 09:54:25 -0400 Subject: [PATCH 1164/1197] chore(main): release 2.31.0 (#1426) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 12 ++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 16 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 8028c14b5a29..25b8f2e809ea 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.30.0" + ".": "2.31.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index 7a92657580d4..c9daf8a3e047 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,18 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.31.0](https://github.com/googleapis/python-pubsub/compare/v2.30.0...v2.31.0) (2025-06-26) + + +### Features + +* Add MessageTransformationFailureReason to IngestionFailureEvent ([#1427](https://github.com/googleapis/python-pubsub/issues/1427)) ([8ab13e1](https://github.com/googleapis/python-pubsub/commit/8ab13e1b71c151f0146548e7224dd38c9d719a88)) + + +### Bug Fixes + +* Surface Fatal Stream Errors to Future; Adjust Retryable Error Codes ([#1422](https://github.com/googleapis/python-pubsub/issues/1422)) ([e081beb](https://github.com/googleapis/python-pubsub/commit/e081beb29056035304d365ec9c50fa7ffbac6886)) + ## [2.30.0](https://github.com/googleapis/python-pubsub/compare/v2.29.1...v2.30.0) (2025-06-07) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 5ebb3bec4b3c..8ab09c42e9c1 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.30.0" # {x-release-please-version} +__version__ = "2.31.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 5ebb3bec4b3c..8ab09c42e9c1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.30.0" # {x-release-please-version} +__version__ = "2.31.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index dd0d6423a363..cb25ebf705a8 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.31.0" }, "snippets": [ { From 7b4f7939225f6ba30cc6cf203e0f76357252fd65 Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Wed, 23 Jul 2025 15:38:23 -0400 Subject: [PATCH 1165/1197] Fix: Propagate Otel Context to Subscriber Callback if Provided (#1429) Co-authored-by: Owl Bot --- .../open_telemetry/subscribe_opentelemetry.py | 10 +++++++++- .../subscriber/_protocol/streaming_pull_manager.py | 6 ++++-- .../subscriber/test_streaming_pull_manager.py | 4 ++-- 3 files changed, 15 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py index 88870be605a0..5a6abd21b445 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/open_telemetry/subscribe_opentelemetry.py @@ -160,7 +160,7 @@ def end_subscribe_scheduler_span(self) -> None: assert self._scheduler_span is not None self._scheduler_span.end() - def start_process_span(self) -> None: + def start_process_span(self) -> trace.Span: assert self._subscribe_span is not None tracer = trace.get_tracer(_OPEN_TELEMETRY_TRACER_NAME) publish_create_span_link: Optional[trace.Link] = None @@ -186,6 +186,7 @@ def start_process_span(self) -> None: end_on_exit=False, ) as process_span: self._process_span = process_span + return process_span def end_process_span(self) -> None: assert self._process_span is not None @@ -200,6 +201,13 @@ def add_process_span_event(self, event: str) -> None: }, ) + def __enter__(self) -> trace.Span: + return self.start_process_span() + + def __exit__(self, exc_type, exc_val, traceback): + if self._process_span: + self.end_process_span() + def start_modack_span( subscribe_span_links: List[trace.Link], diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 486a728b4819..8c8ebf479135 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -148,8 +148,10 @@ def _wrap_callback_errors( try: if message.opentelemetry_data: message.opentelemetry_data.end_subscribe_concurrency_control_span() - message.opentelemetry_data.start_process_span() - callback(message) + with message.opentelemetry_data: + callback(message) + else: + callback(message) except BaseException as exc: # Note: the likelihood of this failing is extremely low. This just adds # a message to a queue, so if this doesn't work the world is in an diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 86d2461e797d..f45959637b53 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -2956,10 +2956,10 @@ def test_opentelemetry_subscriber_concurrency_control_span_end(span_exporter): streaming_pull_manager._wrap_callback_errors(mock.Mock(), mock.Mock(), msg) spans = span_exporter.get_finished_spans() - assert len(spans) == 1 + assert len(spans) == 2 concurrency_control_span = spans[0] - concurrency_control_span.name == "subscriber concurrency control" + assert concurrency_control_span.name == "subscriber concurrency control" def test_opentelemetry_wrap_callback_error(span_exporter): From 98f5ad429a41c1693ffbfa00a4fb61502678cb4e Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Mon, 28 Jul 2025 16:27:00 -0400 Subject: [PATCH 1166/1197] fix: Change Log Severities for Terminated Streams (#1433) --- .../pubsub_v1/subscriber/_protocol/streaming_pull_manager.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 8c8ebf479135..de3ac3780344 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -1281,7 +1281,7 @@ def _on_fatal_exception(self, exception: BaseException) -> None: Called whenever `self.consumer` receives a non-retryable exception. We close the manager on such non-retryable cases. """ - _LOGGER.exception( + _LOGGER.info( "Streaming pull terminating after receiving non-recoverable error: %s", exception, ) @@ -1326,7 +1326,7 @@ def _should_terminate(self, exception: BaseException) -> bool: is_api_error = isinstance(exception, exceptions.GoogleAPICallError) # Terminate any non-API errors, or non-retryable errors (permission denied, unauthorized, etc.) if not is_api_error or isinstance(exception, _TERMINATING_STREAM_ERRORS): - _LOGGER.error("Observed terminating stream error %s", exception) + _LOGGER.debug("Observed terminating stream error %s", exception) return True _LOGGER.debug("Observed non-terminating stream error %s", exception) return False From 9d76c83cb243f703b97220d0ec046cc43766efb8 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Mon, 28 Jul 2025 14:09:26 -0700 Subject: [PATCH 1167/1197] chore(main): release 2.31.1 (#1431) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../google-cloud-pubsub/.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 8 ++++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 12 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 25b8f2e809ea..1c48d61d0ee1 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.31.0" + ".": "2.31.1" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index c9daf8a3e047..da524d1a06b3 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,14 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.31.1](https://github.com/googleapis/python-pubsub/compare/v2.31.0...v2.31.1) (2025-07-28) + + +### Bug Fixes + +* Change Log Severities for Terminated Streams ([#1433](https://github.com/googleapis/python-pubsub/issues/1433)) ([3a3aa79](https://github.com/googleapis/python-pubsub/commit/3a3aa79040d656a3391a153386ec662d002f9368)) +* Propagate Otel Context to Subscriber Callback if Provided ([#1429](https://github.com/googleapis/python-pubsub/issues/1429)) ([b0f6f49](https://github.com/googleapis/python-pubsub/commit/b0f6f49f65752e88523f9c4209366d2a18140416)) + ## [2.31.0](https://github.com/googleapis/python-pubsub/compare/v2.30.0...v2.31.0) (2025-06-26) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 8ab09c42e9c1..79eaa5593b8c 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.0" # {x-release-please-version} +__version__ = "2.31.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 8ab09c42e9c1..79eaa5593b8c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.0" # {x-release-please-version} +__version__ = "2.31.1" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index cb25ebf705a8..1e6003150105 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.31.0" + "version": "2.31.1" }, "snippets": [ { From 36d09d411608e60b7666a095c7c1ec8363c0e82b Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Tue, 16 Sep 2025 10:25:24 -0400 Subject: [PATCH 1168/1197] chore(deps): Upgrade snippets and sample dependencies (#1421) Co-authored-by: Owl Bot --- packages/google-cloud-pubsub/.github/workflows/lint.yml | 2 +- packages/google-cloud-pubsub/noxfile.py | 6 +++--- .../samples/snippets/requirements-test.txt | 7 ++++--- .../google-cloud-pubsub/samples/snippets/requirements.txt | 8 +++++--- 4 files changed, 13 insertions(+), 10 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml index 4866193af2a9..1051da0bdda4 100644 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index dd182f105f6c..ba80fd80b490 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -34,7 +34,7 @@ MYPY_VERSION = "mypy==1.10.0" -DEFAULT_PYTHON_VERSION = "3.8" +DEFAULT_PYTHON_VERSION = "3.10" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ "3.7", @@ -351,7 +351,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.10") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" @@ -386,7 +386,7 @@ def docs(session): ) -@nox.session(python="3.10") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docfx(session): """Build the docfx yaml files for this library.""" diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 2bf14b760868..5b13ec325a15 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,8 +1,9 @@ backoff==2.2.1 pytest===7.4.4; python_version == '3.7' -pytest==8.3.5; python_version >= '3.8' +pytest===8.3.5; python_version == '3.8' +pytest==8.4.0; python_version >= '3.9' mock==5.2.0 flaky==3.8.1 -google-cloud-bigquery==3.30.0; python_version < '3.9' -google-cloud-bigquery==3.33.0; python_version >= '3.9' +google-cloud-bigquery===3.30.0; python_version <= '3.8' +google-cloud-bigquery==3.34.0; python_version >= '3.9' google-cloud-storage==3.1.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 22d6f32d1beb..1d6f5d992956 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-pubsub==2.29.0 +google-cloud-pubsub==2.29.1 avro==1.12.0 protobuf===4.24.4; python_version == '3.7' protobuf===5.29.4; python_version == '3.8' @@ -6,6 +6,8 @@ protobuf==6.31.1; python_version >= '3.9' avro==1.12.0 opentelemetry-api===1.22.0; python_version == '3.7' opentelemetry-sdk===1.22.0; python_version == '3.7' -opentelemetry-api==1.33.1; python_version >= '3.8' -opentelemetry-sdk==1.33.1; python_version >= '3.8' +opentelemetry-api===1.33.1; python_version == '3.8' +opentelemetry-sdk===1.33.1; python_version == '3.8' +opentelemetry-api==1.34.0; python_version >= '3.9' +opentelemetry-sdk==1.34.0; python_version >= '3.9' opentelemetry-exporter-gcp-trace==1.9.0 From 009fc451433791c73ead5c5ee34f8e0781f882ae Mon Sep 17 00:00:00 2001 From: Mend Renovate Date: Thu, 2 Oct 2025 22:08:14 +0100 Subject: [PATCH 1169/1197] chore(deps): update all dependencies (#1456) Co-authored-by: abbrowne126 <81702808+abbrowne126@users.noreply.github.com> --- .../.github/workflows/docs.yml | 8 ++++---- .../.github/workflows/lint.yml | 6 +++--- .../pubsub_v1/publisher/_sequencer/base.py | 4 ++-- .../pubsub_v1/services/publisher/client.py | 2 +- .../services/schema_service/client.py | 2 +- .../pubsub_v1/services/subscriber/client.py | 4 ++-- .../google/pubsub_v1/types/__init__.py | 5 +++-- packages/google-cloud-pubsub/mypy.ini | 4 +++- packages/google-cloud-pubsub/noxfile.py | 20 +++++++++++-------- packages/google-cloud-pubsub/owlbot.py | 2 +- packages/google-cloud-pubsub/pytest.ini | 4 +++- .../samples/snippets/requirements-test.txt | 6 +++--- .../samples/snippets/requirements.txt | 8 ++++---- 13 files changed, 42 insertions(+), 33 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml index 2833fe98fff0..0d0fdb861cbb 100644 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.10" - name: Install nox @@ -24,9 +24,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.10" - name: Install nox diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml index 1051da0bdda4..46a3ff38f31d 100644 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -8,11 +8,11 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Setup Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: - python-version: "3.10" + python-version: "3.13" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py index 58ec5a571bb8..daaacaa33de8 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/publisher/_sequencer/base.py @@ -53,8 +53,8 @@ def unpause(self) -> None: # pragma: NO COVER def publish( self, message: gapic_types.PubsubMessage, - retry: "OptionalRetry" = gapic_v1.method.DEFAULT, - timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, + retry: "OptionalRetry" = gapic_v1.method.DEFAULT, # type: ignore + timeout: gapic_types.TimeoutType = gapic_v1.method.DEFAULT, # type: ignore ) -> "futures.Future": # pragma: NO COVER """Publish message for this ordering key. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index c8f39273e290..28cfa680bb07 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -725,7 +725,7 @@ def __init__( emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: - if issubclass(transport_init, type(self)._transport_registry["grpc"]): + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore channel = grpc.insecure_channel(target=emulator_host) else: channel = grpc.aio.insecure_channel(target=emulator_host) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 493ffd2b675c..29730b85e54b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -677,7 +677,7 @@ def __init__( emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: - if issubclass(transport_init, type(self)._transport_registry["grpc"]): + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore channel = grpc.insecure_channel(target=emulator_host) else: channel = grpc.aio.insecure_channel(target=emulator_host) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index ff7edf538e09..f5945ba64455 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -729,7 +729,7 @@ def __init__( emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: - if issubclass(transport_init, type(self)._transport_registry["grpc"]): + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore channel = grpc.insecure_channel(target=emulator_host) else: channel = grpc.aio.insecure_channel(target=emulator_host) @@ -1897,7 +1897,7 @@ def request_generator(): # Wrappers in api-core should not automatically pre-fetch the first # stream result, as this breaks the stream when re-opening it. # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 - self._transport.streaming_pull._prefetch_first_result_ = False + self._transport.streaming_pull._prefetch_first_result_ = False # type: ignore # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 85c6b901b1a9..c4f020ee201c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -87,12 +87,13 @@ Encoding, SchemaView, ) +import google.api_core.timeout TimeoutType = Union[ int, float, - "google.api_core.timeout.ConstantTimeout", - "google.api_core.timeout.ExponentialTimeout", + google.api_core.timeout.ConstantTimeout, + google.api_core.timeout.ExponentialTimeout, ] """The type of the timeout parameter of publisher client methods.""" diff --git a/packages/google-cloud-pubsub/mypy.ini b/packages/google-cloud-pubsub/mypy.ini index 574c5aed394b..00f693e269be 100644 --- a/packages/google-cloud-pubsub/mypy.ini +++ b/packages/google-cloud-pubsub/mypy.ini @@ -1,3 +1,5 @@ [mypy] -python_version = 3.7 +python_version = 3.8 namespace_packages = True +# Autogenerated folder - TODO remove this https://github.com/googleapis/python-pubsub/issues/536 +exclude = google/pubsub_v1 diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index ba80fd80b490..70e65a571105 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -34,7 +34,7 @@ MYPY_VERSION = "mypy==1.10.0" -DEFAULT_PYTHON_VERSION = "3.10" +DEFAULT_PYTHON_VERSION = "3.13" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ "3.7", @@ -105,8 +105,7 @@ def mypy(session): # Version 2.1.1 of google-api-core version is the first type-checked release. # Version 2.2.0 of google-cloud-core version is the first type-checked release. session.install( - "google-api-core[grpc]>=2.1.1", - "google-cloud-core>=2.2.0", + "google-api-core[grpc]>=2.1.1", "google-cloud-core>=2.2.0", "types-requests" ) # Just install the type info directly, since "mypy --install-types" might @@ -118,7 +117,8 @@ def mypy(session): # TODO: Only check the hand-written layer, the generated code does not pass # mypy checks yet. # https://github.com/googleapis/gapic-generator-python/issues/1092 - session.run("mypy", "-p", "google.cloud") + # TODO: Re-enable mypy checks once we merge, since incremental checks are failing due to protobuf upgrade + # session.run("mypy", "-p", "google.cloud", "--exclude", "google/pubsub_v1/") @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -132,7 +132,9 @@ def mypy_samples(session): # Just install the type info directly, since "mypy --install-types" might # require an additional pass. - session.install("types-mock", "types-protobuf", "types-setuptools") + session.install( + "types-mock", "types-protobuf", "types-setuptools", "types-requests" + ) session.run( "mypy", @@ -192,7 +194,7 @@ def format(session): @nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") + session.install("setuptools", "docutils", "pygments") session.run("python", "setup.py", "check", "--restructuredtext", "--strict") @@ -351,7 +353,8 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python=DEFAULT_PYTHON_VERSION) +# py > 3.10 not supported yet +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" @@ -386,7 +389,8 @@ def docs(session): ) -@nox.session(python=DEFAULT_PYTHON_VERSION) +# py > 3.10 not supported yet +@nox.session(python="3.10") def docfx(session): """Build the docfx yaml files for this library.""" diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index d845e5758e0c..2a131d557bad 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -108,7 +108,7 @@ emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") if emulator_host: - if issubclass(transport_init, type(self)._transport_registry["grpc"]): + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore channel = grpc.insecure_channel(target=emulator_host) else: channel = grpc.aio.insecure_channel(target=emulator_host) diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index 6d55a7315bf3..09a522efefb5 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -19,4 +19,6 @@ filterwarnings = ignore:.*pkg_resources.declare_namespace:DeprecationWarning ignore:.*pkg_resources is deprecated as an API:DeprecationWarning # Remove once https://github.com/googleapis/gapic-generator-python/issues/2303 is fixed - ignore:The python-bigquery library will stop supporting Python 3.7:PendingDeprecationWarning \ No newline at end of file + ignore:The python-bigquery library will stop supporting Python 3.7:PendingDeprecationWarning + # Remove once we move off credential files https://github.com/googleapis/google-auth-library-python/pull/1812 + ignore:Your config file at [/home/kbuilder/.docker/config.json] contains these credential helper entries:DeprecationWarning diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 5b13ec325a15..7659e3676b23 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,9 +1,9 @@ backoff==2.2.1 pytest===7.4.4; python_version == '3.7' pytest===8.3.5; python_version == '3.8' -pytest==8.4.0; python_version >= '3.9' +pytest==8.4.2; python_version >= '3.9' mock==5.2.0 flaky==3.8.1 google-cloud-bigquery===3.30.0; python_version <= '3.8' -google-cloud-bigquery==3.34.0; python_version >= '3.9' -google-cloud-storage==3.1.0 +google-cloud-bigquery==3.38.0; python_version >= '3.9' +google-cloud-storage==3.4.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 1d6f5d992956..63a78cd67aaa 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,13 +1,13 @@ -google-cloud-pubsub==2.29.1 +google-cloud-pubsub==2.31.1 avro==1.12.0 protobuf===4.24.4; python_version == '3.7' protobuf===5.29.4; python_version == '3.8' -protobuf==6.31.1; python_version >= '3.9' +protobuf==6.32.1; python_version >= '3.9' avro==1.12.0 opentelemetry-api===1.22.0; python_version == '3.7' opentelemetry-sdk===1.22.0; python_version == '3.7' opentelemetry-api===1.33.1; python_version == '3.8' opentelemetry-sdk===1.33.1; python_version == '3.8' -opentelemetry-api==1.34.0; python_version >= '3.9' -opentelemetry-sdk==1.34.0; python_version >= '3.9' +opentelemetry-api==1.37.0; python_version >= '3.9' +opentelemetry-sdk==1.37.0; python_version >= '3.9' opentelemetry-exporter-gcp-trace==1.9.0 From 5365162a6e5ee7060fae2cc913a4b42af37e5227 Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Mon, 6 Oct 2025 11:56:15 -0400 Subject: [PATCH 1170/1197] feat: debug logs (#1460) Co-authored-by: Owl Bot --- .../subscriber/_protocol/requests.py | 2 + .../_protocol/streaming_pull_manager.py | 119 ++++++++++++++---- .../cloud/pubsub_v1/subscriber/message.py | 30 +++++ packages/google-cloud-pubsub/pytest.ini | 2 + .../unit/pubsub_v1/subscriber/test_message.py | 24 +++- .../subscriber/test_streaming_pull_manager.py | 30 +++-- 6 files changed, 170 insertions(+), 37 deletions(-) diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py index 6fd35896b9cc..9a0ba5a5075d 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/requests.py @@ -32,6 +32,7 @@ class AckRequest(NamedTuple): ordering_key: Optional[str] future: Optional["futures.Future"] opentelemetry_data: Optional[SubscribeOpenTelemetry] = None + message_id: Optional[str] = None class DropRequest(NamedTuple): @@ -52,6 +53,7 @@ class ModAckRequest(NamedTuple): seconds: float future: Optional["futures.Future"] opentelemetry_data: Optional[SubscribeOpenTelemetry] = None + message_id: Optional[str] = None class NackRequest(NamedTuple): diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index de3ac3780344..d509d80749e6 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -21,7 +21,16 @@ import logging import threading import typing -from typing import Any, Dict, Callable, Iterable, List, Optional, Set, Tuple +from typing import ( + Any, + Dict, + Callable, + Iterable, + List, + Optional, + Set, + Tuple, +) import uuid from opentelemetry import trace @@ -60,6 +69,12 @@ _LOGGER = logging.getLogger(__name__) +_SLOW_ACK_LOGGER = logging.getLogger("slow-ack") +_STREAMS_LOGGER = logging.getLogger("subscriber-streams") +_FLOW_CONTROL_LOGGER = logging.getLogger("subscriber-flow-control") +_CALLBACK_DELIVERY_LOGGER = logging.getLogger("callback-delivery") +_CALLBACK_EXCEPTION_LOGGER = logging.getLogger("callback-exceptions") +_EXPIRY_LOGGER = logging.getLogger("expiry") _REGULAR_SHUTDOWN_THREAD_NAME = "Thread-RegularStreamShutdown" _RPC_ERROR_THREAD_NAME = "Thread-OnRpcTerminated" _RETRYABLE_STREAM_ERRORS = ( @@ -145,6 +160,14 @@ def _wrap_callback_errors( callback: The user callback. message: The Pub/Sub message. """ + _CALLBACK_DELIVERY_LOGGER.debug( + "Message (id=%s, ack_id=%s, ordering_key=%s, exactly_once=%s) received by subscriber callback", + message.message_id, + message.ack_id, + message.ordering_key, + message.exactly_once_enabled, + ) + try: if message.opentelemetry_data: message.opentelemetry_data.end_subscribe_concurrency_control_span() @@ -156,9 +179,15 @@ def _wrap_callback_errors( # Note: the likelihood of this failing is extremely low. This just adds # a message to a queue, so if this doesn't work the world is in an # unrecoverable state and this thread should just bail. - _LOGGER.exception( - "Top-level exception occurred in callback while processing a message" + + _CALLBACK_EXCEPTION_LOGGER.exception( + "Message (id=%s, ack_id=%s, ordering_key=%s, exactly_once=%s)'s callback threw exception, nacking message.", + message.message_id, + message.ack_id, + message.ordering_key, + message.exactly_once_enabled, ) + message.nack() on_callback_error(exc) @@ -199,6 +228,9 @@ def _process_requests( error_status: Optional["status_pb2.Status"], ack_reqs_dict: Dict[str, requests.AckRequest], errors_dict: Optional[Dict[str, str]], + ack_histogram: Optional[histogram.Histogram] = None, + # TODO - Change this param to a Union of Literals when we drop p3.7 support + req_type: str = "ack", ): """Process requests when exactly-once delivery is enabled by referring to error_status and errors_dict. @@ -209,28 +241,40 @@ def _process_requests( """ requests_completed = [] requests_to_retry = [] - for ack_id in ack_reqs_dict: + for ack_id, ack_request in ack_reqs_dict.items(): + # Debug logging: slow acks + if ( + req_type == "ack" + and ack_histogram + and ack_request.time_to_ack > ack_histogram.percentile(percent=99) + ): + _SLOW_ACK_LOGGER.debug( + "Message (id=%s, ack_id=%s) ack duration of %s s is higher than the p99 ack duration", + ack_request.message_id, + ack_request.ack_id, + ) + # Handle special errors returned for ack/modack RPCs via the ErrorInfo # sidecar metadata when exactly-once delivery is enabled. if errors_dict and ack_id in errors_dict: exactly_once_error = errors_dict[ack_id] if exactly_once_error.startswith("TRANSIENT_"): - requests_to_retry.append(ack_reqs_dict[ack_id]) + requests_to_retry.append(ack_request) else: if exactly_once_error == "PERMANENT_FAILURE_INVALID_ACK_ID": exc = AcknowledgeError(AcknowledgeStatus.INVALID_ACK_ID, info=None) else: exc = AcknowledgeError(AcknowledgeStatus.OTHER, exactly_once_error) - future = ack_reqs_dict[ack_id].future + future = ack_request.future if future is not None: future.set_exception(exc) - requests_completed.append(ack_reqs_dict[ack_id]) + requests_completed.append(ack_request) # Temporary GRPC errors are retried elif ( error_status and error_status.code in _EXACTLY_ONCE_DELIVERY_TEMPORARY_RETRY_ERRORS ): - requests_to_retry.append(ack_reqs_dict[ack_id]) + requests_to_retry.append(ack_request) # Other GRPC errors are NOT retried elif error_status: if error_status.code == code_pb2.PERMISSION_DENIED: @@ -239,20 +283,20 @@ def _process_requests( exc = AcknowledgeError(AcknowledgeStatus.FAILED_PRECONDITION, info=None) else: exc = AcknowledgeError(AcknowledgeStatus.OTHER, str(error_status)) - future = ack_reqs_dict[ack_id].future + future = ack_request.future if future is not None: future.set_exception(exc) - requests_completed.append(ack_reqs_dict[ack_id]) + requests_completed.append(ack_request) # Since no error occurred, requests with futures are completed successfully. - elif ack_reqs_dict[ack_id].future: - future = ack_reqs_dict[ack_id].future + elif ack_request.future: + future = ack_request.future # success assert future is not None future.set_result(AcknowledgeStatus.SUCCESS) - requests_completed.append(ack_reqs_dict[ack_id]) + requests_completed.append(ack_request) # All other requests are considered completed. else: - requests_completed.append(ack_reqs_dict[ack_id]) + requests_completed.append(ack_request) return requests_completed, requests_to_retry @@ -560,8 +604,10 @@ def maybe_pause_consumer(self) -> None: with self._pause_resume_lock: if self.load >= _MAX_LOAD: if self._consumer is not None and not self._consumer.is_paused: - _LOGGER.debug( - "Message backlog over load at %.2f, pausing.", self.load + _FLOW_CONTROL_LOGGER.debug( + "Message backlog over load at %.2f (threshold %.2f), initiating client-side flow control", + self.load, + _RESUME_THRESHOLD, ) self._consumer.pause() @@ -588,10 +634,18 @@ def maybe_resume_consumer(self) -> None: self._maybe_release_messages() if self.load < _RESUME_THRESHOLD: - _LOGGER.debug("Current load is %.2f, resuming consumer.", self.load) + _FLOW_CONTROL_LOGGER.debug( + "Current load is %.2f (threshold %.2f), suspending client-side flow control.", + self.load, + _RESUME_THRESHOLD, + ) self._consumer.resume() else: - _LOGGER.debug("Did not resume, current load is %.2f.", self.load) + _FLOW_CONTROL_LOGGER.debug( + "Current load is %.2f (threshold %.2f), retaining client-side flow control.", + self.load, + _RESUME_THRESHOLD, + ) def _maybe_release_messages(self) -> None: """Release (some of) the held messages if the current load allows for it. @@ -702,7 +756,7 @@ def send_unary_ack( if self._exactly_once_delivery_enabled(): requests_completed, requests_to_retry = _process_requests( - error_status, ack_reqs_dict, ack_errors_dict + error_status, ack_reqs_dict, ack_errors_dict, self.ack_histogram, "ack" ) else: requests_completed = [] @@ -796,7 +850,11 @@ def send_unary_modack( if self._exactly_once_delivery_enabled(): requests_completed, requests_to_retry = _process_requests( - error_status, ack_reqs_dict, modack_errors_dict + error_status, + ack_reqs_dict, + modack_errors_dict, + self.ack_histogram, + "modack", ) else: requests_completed = [] @@ -1239,6 +1297,11 @@ def _on_response(self, response: gapic_types.StreamingPullResponse) -> None: receipt_modack=True, ) + if len(expired_ack_ids): + _EXPIRY_LOGGER.debug( + "ack ids %s were dropped as they have already expired.", expired_ack_ids + ) + with self._pause_resume_lock: if self._scheduler is None or self._leaser is None: _LOGGER.debug( @@ -1304,9 +1367,13 @@ def _should_recover(self, exception: BaseException) -> bool: # If this is in the list of idempotent exceptions, then we want to # recover. if isinstance(exception, _RETRYABLE_STREAM_ERRORS): - _LOGGER.debug("Observed recoverable stream error %s", exception) + _STREAMS_LOGGER.debug( + "Observed recoverable stream error %s, reopening stream", exception + ) return True - _LOGGER.debug("Observed non-recoverable stream error %s", exception) + _STREAMS_LOGGER.debug( + "Observed non-recoverable stream error %s, shutting down stream", exception + ) return False def _should_terminate(self, exception: BaseException) -> bool: @@ -1326,9 +1393,13 @@ def _should_terminate(self, exception: BaseException) -> bool: is_api_error = isinstance(exception, exceptions.GoogleAPICallError) # Terminate any non-API errors, or non-retryable errors (permission denied, unauthorized, etc.) if not is_api_error or isinstance(exception, _TERMINATING_STREAM_ERRORS): - _LOGGER.debug("Observed terminating stream error %s", exception) + _STREAMS_LOGGER.debug( + "Observed terminating stream error %s, shutting down stream", exception + ) return True - _LOGGER.debug("Observed non-terminating stream error %s", exception) + _STREAMS_LOGGER.debug( + "Observed non-terminating stream error %s, attempting to reopen", exception + ) return False def _on_rpc_done(self, future: Any) -> None: diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py index 61f60c4d9973..aa715ac6705f 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/message.py @@ -16,6 +16,7 @@ import datetime as dt import json +import logging import math import time import typing @@ -43,6 +44,8 @@ attributes: {} }}""" +_ACK_NACK_LOGGER = logging.getLogger("ack-nack") + _SUCCESS_FUTURE = futures.Future() _SUCCESS_FUTURE.set_result(AcknowledgeStatus.SUCCESS) @@ -274,6 +277,7 @@ def ack(self) -> None: time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( requests.AckRequest( + message_id=self.message_id, ack_id=self._ack_id, byte_size=self.size, time_to_ack=time_to_ack, @@ -282,6 +286,12 @@ def ack(self) -> None: opentelemetry_data=self.opentelemetry_data, ) ) + _ACK_NACK_LOGGER.debug( + "Called ack for message (id=%s, ack_id=%s, ordering_key=%s)", + self.message_id, + self.ack_id, + self.ordering_key, + ) def ack_with_response(self) -> "futures.Future": """Acknowledge the given message. @@ -322,6 +332,12 @@ def ack_with_response(self) -> "futures.Future": pubsub_v1.subscriber.exceptions.AcknowledgeError exception will be thrown. """ + _ACK_NACK_LOGGER.debug( + "Called ack for message (id=%s, ack_id=%s, ordering_key=%s, exactly_once=True)", + self.message_id, + self.ack_id, + self.ordering_key, + ) if self.opentelemetry_data: self.opentelemetry_data.add_process_span_event("ack called") self.opentelemetry_data.end_process_span() @@ -335,6 +351,7 @@ def ack_with_response(self) -> "futures.Future": time_to_ack = math.ceil(time.time() - self._received_timestamp) self._request_queue.put( requests.AckRequest( + message_id=self.message_id, ack_id=self._ack_id, byte_size=self.size, time_to_ack=time_to_ack, @@ -382,6 +399,7 @@ def modify_ack_deadline(self, seconds: int) -> None: """ self._request_queue.put( requests.ModAckRequest( + message_id=self.message_id, ack_id=self._ack_id, seconds=seconds, future=None, @@ -445,6 +463,7 @@ def modify_ack_deadline_with_response(self, seconds: int) -> "futures.Future": self._request_queue.put( requests.ModAckRequest( + message_id=self.message_id, ack_id=self._ack_id, seconds=seconds, future=req_future, @@ -461,6 +480,13 @@ def nack(self) -> None: may take place immediately or after a delay, and may arrive at this subscriber or another. """ + _ACK_NACK_LOGGER.debug( + "Called nack for message (id=%s, ack_id=%s, ordering_key=%s, exactly_once=%s)", + self.message_id, + self.ack_id, + self.ordering_key, + self._exactly_once_delivery_enabled_func(), + ) if self.opentelemetry_data: self.opentelemetry_data.add_process_span_event("nack called") self.opentelemetry_data.end_process_span() @@ -530,3 +556,7 @@ def nack_with_response(self) -> "futures.Future": ) return future + + @property + def exactly_once_enabled(self): + return self._exactly_once_delivery_enabled_func() diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index 09a522efefb5..fc17230eff6b 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -21,4 +21,6 @@ filterwarnings = # Remove once https://github.com/googleapis/gapic-generator-python/issues/2303 is fixed ignore:The python-bigquery library will stop supporting Python 3.7:PendingDeprecationWarning # Remove once we move off credential files https://github.com/googleapis/google-auth-library-python/pull/1812 + # Note that these are used in tests only ignore:Your config file at [/home/kbuilder/.docker/config.json] contains these credential helper entries:DeprecationWarning + ignore:The `credentials_file` argument is deprecated because of a potential security risk:DeprecationWarning \ No newline at end of file diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 8d9d2566e0e5..03bdc1514347 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -289,6 +289,7 @@ def test_ack(): msg.ack() put.assert_called_once_with( requests.AckRequest( + message_id=msg.message_id, ack_id="bogus_ack_id", byte_size=30, time_to_ack=mock.ANY, @@ -305,6 +306,7 @@ def test_ack_with_response_exactly_once_delivery_disabled(): future = msg.ack_with_response() put.assert_called_once_with( requests.AckRequest( + message_id=msg.message_id, ack_id="bogus_ack_id", byte_size=30, time_to_ack=mock.ANY, @@ -325,6 +327,7 @@ def test_ack_with_response_exactly_once_delivery_enabled(): future = msg.ack_with_response() put.assert_called_once_with( requests.AckRequest( + message_id=msg.message_id, ack_id="bogus_ack_id", byte_size=30, time_to_ack=mock.ANY, @@ -350,7 +353,12 @@ def test_modify_ack_deadline(): with mock.patch.object(msg._request_queue, "put") as put: msg.modify_ack_deadline(60) put.assert_called_once_with( - requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60, future=None) + requests.ModAckRequest( + message_id=msg.message_id, + ack_id="bogus_ack_id", + seconds=60, + future=None, + ) ) check_call_types(put, requests.ModAckRequest) @@ -360,7 +368,12 @@ def test_modify_ack_deadline_with_response_exactly_once_delivery_disabled(): with mock.patch.object(msg._request_queue, "put") as put: future = msg.modify_ack_deadline_with_response(60) put.assert_called_once_with( - requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60, future=None) + requests.ModAckRequest( + message_id=msg.message_id, + ack_id="bogus_ack_id", + seconds=60, + future=None, + ) ) assert future.result() == AcknowledgeStatus.SUCCESS assert future == message._SUCCESS_FUTURE @@ -374,7 +387,12 @@ def test_modify_ack_deadline_with_response_exactly_once_delivery_enabled(): with mock.patch.object(msg._request_queue, "put") as put: future = msg.modify_ack_deadline_with_response(60) put.assert_called_once_with( - requests.ModAckRequest(ack_id="bogus_ack_id", seconds=60, future=future) + requests.ModAckRequest( + message_id=msg.message_id, + ack_id="bogus_ack_id", + seconds=60, + future=future, + ) ) check_call_types(put, requests.ModAckRequest) diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index f45959637b53..b9561d747914 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import functools import logging import sys import threading @@ -61,6 +60,15 @@ from google.rpc import error_details_pb2 +def create_mock_message(**kwargs): + _message_mock = mock.create_autospec(message.Message, instance=True) + msg = _message_mock.return_value + for k, v in kwargs.items(): + setattr(msg, k, v) + + return msg + + @pytest.mark.parametrize( "exception,expected_cls", [ @@ -80,7 +88,7 @@ def test__wrap_as_exception(exception, expected_cls): def test__wrap_callback_errors_no_error(): - msg = mock.create_autospec(message.Message, instance=True) + msg = create_mock_message() callback = mock.Mock() on_callback_error = mock.Mock() @@ -99,7 +107,7 @@ def test__wrap_callback_errors_no_error(): ], ) def test__wrap_callback_errors_error(callback_error): - msg = mock.create_autospec(message.Message, instance=True) + msg = create_mock_message() callback = mock.Mock(side_effect=callback_error) on_callback_error = mock.Mock() @@ -511,8 +519,8 @@ def test__maybe_release_messages_on_overload(): manager = make_manager( flow_control=types.FlowControl(max_messages=10, max_bytes=1000) ) + msg = create_mock_message(ack_id="ack", size=11) - msg = mock.create_autospec(message.Message, instance=True, ack_id="ack", size=11) manager._messages_on_hold.put(msg) manager._on_hold_bytes = msg.size @@ -539,9 +547,8 @@ def test_opentelemetry__maybe_release_messages_subscribe_scheduler_span(span_exp # max load is hit. _leaser = manager._leaser = mock.create_autospec(leaser.Leaser) fake_leaser_add(_leaser, init_msg_count=8, assumed_msg_size=10) - msg = mock.create_autospec( - message.Message, instance=True, ack_id="ack_foo", size=10 - ) + msg = create_mock_message(ack_id="ack_foo", size=10) + msg.message_id = 3 opentelemetry_data = SubscribeOpenTelemetry(msg) msg.opentelemetry_data = opentelemetry_data @@ -611,7 +618,7 @@ def test__maybe_release_messages_negative_on_hold_bytes_warning( ) manager._callback = lambda msg: msg # pragma: NO COVER - msg = mock.create_autospec(message.Message, instance=True, ack_id="ack", size=17) + msg = create_mock_message(ack_id="ack", size=17) manager._messages_on_hold.put(msg) manager._on_hold_bytes = 5 # too low for some reason @@ -1633,8 +1640,11 @@ def test_close_nacks_internally_queued_messages(): def fake_nack(self): nacked_messages.append(self.data) - MockMsg = functools.partial(mock.create_autospec, message.Message, instance=True) - messages = [MockMsg(data=b"msg1"), MockMsg(data=b"msg2"), MockMsg(data=b"msg3")] + messages = [ + create_message(data=b"msg1"), + create_message(data=b"msg2"), + create_message(data=b"msg3"), + ] for msg in messages: msg.nack = stdlib_types.MethodType(fake_nack, msg) From dd8a39b1fdeae36f746e3f3a96ea53e2bcb32226 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 7 Oct 2025 11:21:24 -0400 Subject: [PATCH 1171/1197] chore: update owlbot post processor image (#1502) --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/owlbot.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 508ba98efebf..9a7846675f55 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:25de45b58e52021d3a24a6273964371a97a4efeefe6ad3845a64e697c63b6447 -# created: 2025-04-14T14:34:43.260858345Z + digest: sha256:4a9e5d44b98e8672e2037ee22bc6b4f8e844a2d75fcb78ea8a4b38510112abc6 +# created: 2025-10-07 diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index 2a131d557bad..c58e5a67aa9b 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -343,7 +343,7 @@ system_test_python_versions=["3.12"], system_test_external_dependencies=["psutil","flaky"], ) -s.move(templated_files, excludes=[".coveragerc", ".github/blunderbuss.yml", ".github/release-please.yml", "README.rst", "docs/index.rst"]) +s.move(templated_files, excludes=[".coveragerc", ".github/**", "README.rst", "docs/**", ".kokoro/**"]) python.py_samples(skip_readmes=True) From e5c3c4fdd51fbc77dc85616157e98e23ba21b51f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 16:41:31 -0400 Subject: [PATCH 1172/1197] feat: support the protocol version in StreamingPullRequest (#1455) Co-authored-by: Owl Bot --- .../pubsub_v1/services/subscriber/client.py | 2 +- .../google/pubsub_v1/types/__init__.py | 5 +- .../google/pubsub_v1/types/pubsub.py | 58 +++++++++++-------- packages/google-cloud-pubsub/mypy.ini | 4 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- .../scripts/fixup_pubsub_v1_keywords.py | 2 +- 6 files changed, 39 insertions(+), 34 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index f5945ba64455..d928d36787e9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -1897,7 +1897,7 @@ def request_generator(): # Wrappers in api-core should not automatically pre-fetch the first # stream result, as this breaks the stream when re-opening it. # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 - self._transport.streaming_pull._prefetch_first_result_ = False # type: ignore + self._transport.streaming_pull._prefetch_first_result_ = False # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index c4f020ee201c..85c6b901b1a9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -87,13 +87,12 @@ Encoding, SchemaView, ) -import google.api_core.timeout TimeoutType = Union[ int, float, - google.api_core.timeout.ConstantTimeout, - google.api_core.timeout.ExponentialTimeout, + "google.api_core.timeout.ConstantTimeout", + "google.api_core.timeout.ExponentialTimeout", ] """The type of the timeout parameter of publisher client methods.""" diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 227da2208a85..9fc8f87ebdd9 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -241,13 +241,13 @@ class State(proto.Enum): Permission denied encountered while consuming data from Kinesis. This can happen if: - - The provided ``aws_role_arn`` does not exist or does not - have the appropriate permissions attached. - - The provided ``aws_role_arn`` is not set up properly for - Identity Federation using ``gcp_service_account``. - - The Pub/Sub SA is not granted the - ``iam.serviceAccounts.getOpenIdToken`` permission on - ``gcp_service_account``. + - The provided ``aws_role_arn`` does not exist or does not + have the appropriate permissions attached. + - The provided ``aws_role_arn`` is not set up properly for + Identity Federation using ``gcp_service_account``. + - The Pub/Sub SA is not granted the + ``iam.serviceAccounts.getOpenIdToken`` permission on + ``gcp_service_account``. PUBLISH_PERMISSION_DENIED (3): Permission denied encountered while publishing to the topic. This can happen if the Pub/Sub SA has not been granted the @@ -347,9 +347,9 @@ class State(proto.Enum): granted the `appropriate permissions `__: - - storage.objects.list: to list the objects in a bucket. - - storage.objects.get: to read the objects in a bucket. - - storage.buckets.get: to verify the bucket exists. + - storage.objects.list: to list the objects in a bucket. + - storage.objects.get: to read the objects in a bucket. + - storage.buckets.get: to verify the bucket exists. PUBLISH_PERMISSION_DENIED (3): Permission denied encountered while publishing to the topic. This can happen if the Pub/Sub SA has not been granted the @@ -1991,11 +1991,11 @@ class Subscription(proto.Message): for the delivery of a message with a given value of ``message_id`` on this subscription: - - The message sent to a subscriber is guaranteed not to be - resent before the message's acknowledgment deadline - expires. - - An acknowledged message will not be resent to a - subscriber. + - The message sent to a subscriber is guaranteed not to be + resent before the message's acknowledgment deadline + expires. + - An acknowledged message will not be resent to a + subscriber. Note that subscribers may still receive multiple copies of a message when ``enable_exactly_once_delivery`` is true if the @@ -2309,10 +2309,10 @@ class PushConfig(proto.Message): The only supported values for the ``x-goog-version`` attribute are: - - ``v1beta1``: uses the push format defined in the v1beta1 - Pub/Sub API. - - ``v1`` or ``v1beta2``: uses the push format defined in - the v1 Pub/Sub API. + - ``v1beta1``: uses the push format defined in the v1beta1 + Pub/Sub API. + - ``v1`` or ``v1beta2``: uses the push format defined in the + v1 Pub/Sub API. For example: ``attributes { "x-goog-version": "v1" }`` oidc_token (google.pubsub_v1.types.PushConfig.OidcToken): @@ -2478,12 +2478,11 @@ class State(proto.Enum): Cannot write to the BigQuery table because of permission denied errors. This can happen if - - Pub/Sub SA has not been granted the `appropriate BigQuery - IAM - permissions `__ - - bigquery.googleapis.com API is not enabled for the - project - (`instructions `__) + - Pub/Sub SA has not been granted the `appropriate BigQuery + IAM + permissions `__ + - bigquery.googleapis.com API is not enabled for the project + (`instructions `__) NOT_FOUND (3): Cannot write to the BigQuery table because it does not exist. @@ -3111,6 +3110,11 @@ class StreamingPullRequest(proto.Message): only be set on the initial StreamingPullRequest. If it is set on a subsequent request, the stream will be aborted with status ``INVALID_ARGUMENT``. + protocol_version (int): + Optional. The protocol version used by the client. This + property can only be set on the initial + StreamingPullRequest. If it is set on a subsequent request, + the stream will be aborted with status ``INVALID_ARGUMENT``. """ subscription: str = proto.Field( @@ -3145,6 +3149,10 @@ class StreamingPullRequest(proto.Message): proto.INT64, number=8, ) + protocol_version: int = proto.Field( + proto.INT64, + number=10, + ) class StreamingPullResponse(proto.Message): diff --git a/packages/google-cloud-pubsub/mypy.ini b/packages/google-cloud-pubsub/mypy.ini index 00f693e269be..574c5aed394b 100644 --- a/packages/google-cloud-pubsub/mypy.ini +++ b/packages/google-cloud-pubsub/mypy.ini @@ -1,5 +1,3 @@ [mypy] -python_version = 3.8 +python_version = 3.7 namespace_packages = True -# Autogenerated folder - TODO remove this https://github.com/googleapis/python-pubsub/issues/536 -exclude = google/pubsub_v1 diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 1e6003150105..dd0d6423a363 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.31.1" + "version": "0.1.0" }, "snippets": [ { diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 3bb90d7806c9..9f5619e8cf9f 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -68,7 +68,7 @@ class pubsubCallTransformer(cst.CSTTransformer): 'pull': ('subscription', 'max_messages', 'return_immediately', ), 'rollback_schema': ('name', 'revision_id', ), 'seek': ('subscription', 'time', 'snapshot', ), - 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', ), + 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', 'protocol_version', ), 'update_snapshot': ('snapshot', 'update_mask', ), 'update_subscription': ('subscription', 'update_mask', ), 'update_topic': ('topic', 'update_mask', ), From be75b64b708958658d86d2fba47d9cff23e55c6d Mon Sep 17 00:00:00 2001 From: Chalmer Lowe Date: Wed, 22 Oct 2025 16:18:27 -0400 Subject: [PATCH 1173/1197] feat: Adds Python 3.14 support (#1512) Co-authored-by: Owl Bot --- .../.github/sync-repo-settings.yaml | 2 + .../.github/workflows/unittest.yml | 2 +- .../.kokoro/presubmit/presubmit.cfg | 7 ++- .../.kokoro/samples/python3.14/common.cfg | 40 +++++++++++++++++ .../.kokoro/samples/python3.14/continuous.cfg | 6 +++ .../samples/python3.14/periodic-head.cfg | 11 +++++ .../.kokoro/samples/python3.14/periodic.cfg | 6 +++ .../.kokoro/samples/python3.14/presubmit.cfg | 6 +++ packages/google-cloud-pubsub/CONTRIBUTING.rst | 4 +- .../_protocol/streaming_pull_manager.py | 2 +- .../cloud/pubsub_v1/subscriber/scheduler.py | 23 +++++++++- packages/google-cloud-pubsub/noxfile.py | 23 +++++++--- packages/google-cloud-pubsub/owlbot.py | 2 +- packages/google-cloud-pubsub/setup.py | 4 +- .../testing/constraints-3.14.txt | 13 ++++++ .../publisher/test_publisher_client.py | 45 ++++++++++--------- 16 files changed, 162 insertions(+), 34 deletions(-) create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/common.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/continuous.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic-head.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic.cfg create mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/presubmit.cfg create mode 100644 packages/google-cloud-pubsub/testing/constraints-3.14.txt diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml index 77c1a4fb5cf4..bfde18cc016c 100644 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -27,4 +27,6 @@ branchProtectionRules: - 'unit (3.10)' - 'unit (3.11)' - 'unit (3.12)' + - 'unit (3.13)' + - 'unit (3.14)' - 'cover' diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index 6a0429d96101..d59bbb1b82a6 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] + python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg index 8f43917d92fe..227ccdf47138 100644 --- a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg +++ b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg @@ -1 +1,6 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "NOX_SESSION" + value: "system-3.12 blacken mypy format" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/common.cfg new file mode 100644 index 000000000000..f6feff7057c7 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.14" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-314" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-pubsub/.kokoro/trampoline_v2.sh" diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/continuous.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic-head.cfg new file mode 100644 index 000000000000..f9cfcd33e058 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic.cfg new file mode 100644 index 000000000000..71cd1e597e38 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/presubmit.cfg new file mode 100644 index 000000000000..b196817872e9 --- /dev/null +++ b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index f153c3ae7ec8..417b1e9f85d8 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11, 3.12 and 3.13 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, 3.13 and 3.14 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -228,6 +228,7 @@ We support: - `Python 3.11`_ - `Python 3.12`_ - `Python 3.13`_ +- `Python 3.14`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ @@ -236,6 +237,7 @@ We support: .. _Python 3.11: https://docs.python.org/3.11/ .. _Python 3.12: https://docs.python.org/3.12/ .. _Python 3.13: https://docs.python.org/3.13/ +.. _Python 3.14: https://docs.python.org/3.14/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index d509d80749e6..5132456a2705 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -458,7 +458,7 @@ def dispatcher(self) -> Optional[dispatcher.Dispatcher]: return self._dispatcher @property - def leaser(self) -> Optional[leaser.Leaser]: + def leaser(self) -> Optional["leaser.Leaser"]: """The leaser helper.""" return self._leaser diff --git a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py index a3b3c88e1a16..cc3393bd75e7 100644 --- a/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py +++ b/packages/google-cloud-pubsub/google/cloud/pubsub_v1/subscriber/scheduler.py @@ -21,6 +21,7 @@ import abc import concurrent.futures import queue +import sys import typing from typing import Callable, List, Optional import warnings @@ -37,7 +38,7 @@ class Scheduler(metaclass=abc.ABCMeta): @property @abc.abstractmethod - def queue(self) -> queue.Queue: # pragma: NO COVER + def queue(self) -> "queue.Queue": # pragma: NO COVER """Queue: A concurrency-safe queue specific to the underlying concurrency implementation. @@ -162,7 +163,25 @@ def shutdown( work_item = self._executor._work_queue.get(block=False) if work_item is None: # Exceutor in shutdown mode. continue - dropped_messages.append(work_item.args[0]) # type: ignore[index] + + dropped_message = None + if sys.version_info < (3, 14): + # For Python < 3.14, work_item.args is a tuple of positional arguments. + # The message is expected to be the first argument. + if hasattr(work_item, "args") and work_item.args: + dropped_message = work_item.args[0] # type: ignore[index] + else: + # For Python >= 3.14, work_item.task is (fn, args, kwargs). + # The message is expected to be the first item in the args tuple (task[1]). + if ( + hasattr(work_item, "task") + and len(work_item.task) == 3 + and work_item.task[1] + ): + dropped_message = work_item.task[1][0] + + if dropped_message is not None: + dropped_messages.append(dropped_message) except queue.Empty: pass diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 70e65a571105..7455daf83a7d 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -44,6 +44,7 @@ "3.11", "3.12", "3.13", + "3.14", ] UNIT_TEST_STANDARD_DEPENDENCIES = [ "mock", @@ -234,7 +235,12 @@ def install_unittest_dependencies(session, *constraints): def unit(session, protobuf_implementation): # Install all test dependencies, then install this package in-place. - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): session.skip("cpp implementation is not supported in python 3.11+") constraints_path = str( @@ -325,15 +331,15 @@ def system(session): if system_test_exists: session.run( "py.test", - "--quiet", + "--verbose", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_path, *session.posargs, ) - if system_test_folder_exists: + if os.path.exists(system_test_folder_path): session.run( "py.test", - "--quiet", + "--verbose", f"--junitxml=system_{session.python}_sponge_log.xml", system_test_folder_path, *session.posargs, @@ -436,7 +442,7 @@ def docfx(session): ) -@nox.session(python="3.13") +@nox.session(python="3.14") @nox.parametrize( "protobuf_implementation", ["python", "upb", "cpp"], @@ -444,7 +450,12 @@ def docfx(session): def prerelease_deps(session, protobuf_implementation): """Run all tests with prerelease versions of dependencies installed.""" - if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"): + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): session.skip("cpp implementation is not supported in python 3.11+") # Install all dependencies diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index c58e5a67aa9b..abaf534e2a15 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -338,7 +338,7 @@ samples=True, cov_level=99, versions=gcp.common.detect_versions(path="./google", default_first=True), - unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], unit_test_dependencies=["flaky"], system_test_python_versions=["3.12"], system_test_external_dependencies=["psutil","flaky"], diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 899cefde6c52..6dbea105a82e 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -36,7 +36,8 @@ release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "grpcio >= 1.51.3, < 2.0.0", # https://github.com/googleapis/python-pubsub/issues/609 + "grpcio >= 1.51.3, < 2.0.0; python_version < '3.14'", # https://github.com/googleapis/python-pubsub/issues/609 + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", # google-api-core >= 1.34.0 is allowed in order to support google-api-core 1.x "google-auth >= 2.14.1, <3.0.0", "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", @@ -88,6 +89,7 @@ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/packages/google-cloud-pubsub/testing/constraints-3.14.txt b/packages/google-cloud-pubsub/testing/constraints-3.14.txt new file mode 100644 index 000000000000..1dba0484d801 --- /dev/null +++ b/packages/google-cloud-pubsub/testing/constraints-3.14.txt @@ -0,0 +1,13 @@ +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. +# List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 +google-api-core>=2 +google-auth>=2 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 +grpcio >= 1.75.1 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index d1b7d4a81cc6..651c040baeb8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -308,33 +308,38 @@ def test_opentelemetry_flow_control_exception(creds, span_exporter): future2.result() spans = span_exporter.get_finished_spans() - # Span 1 = Publisher Flow Control Span of first publish - # Span 2 = Publisher Batching Span of first publish - # Span 3 = Publisher Flow Control Span of second publish(raises FlowControlLimitError) - # Span 4 = Publish Create Span of second publish(raises FlowControlLimitError) - assert len(spans) == 4 - failed_flow_control_span = spans[2] - finished_publish_create_span = spans[3] + # Find the spans related to the second, failing publish call + failed_create_span = None + failed_fc_span = None + for span in spans: + if span.name == "topicID create": + if span.status.status_code == trace.StatusCode.ERROR: + failed_create_span = span + elif span.name == "publisher flow control": + if span.status.status_code == trace.StatusCode.ERROR: + failed_fc_span = span + + assert failed_create_span is not None, "Failed 'topicID create' span not found" + assert failed_fc_span is not None, "Failed 'publisher flow control' span not found" # Verify failed flow control span values. - assert failed_flow_control_span.name == "publisher flow control" - assert failed_flow_control_span.kind == trace.SpanKind.INTERNAL + assert failed_fc_span.kind == trace.SpanKind.INTERNAL assert ( - failed_flow_control_span.parent.span_id - == finished_publish_create_span.get_span_context().span_id + failed_fc_span.parent.span_id == failed_create_span.get_span_context().span_id ) - assert failed_flow_control_span.status.status_code == trace.StatusCode.ERROR - - assert len(failed_flow_control_span.events) == 1 - assert failed_flow_control_span.events[0].name == "exception" + assert len(failed_fc_span.events) == 1 + assert failed_fc_span.events[0].name == "exception" # Verify finished publish create span values - assert finished_publish_create_span.name == "topicID create" - assert finished_publish_create_span.status.status_code == trace.StatusCode.ERROR - assert len(finished_publish_create_span.events) == 2 - assert finished_publish_create_span.events[0].name == "publish start" - assert finished_publish_create_span.events[1].name == "exception" + assert failed_create_span.status.status_code == trace.StatusCode.ERROR + assert len(failed_create_span.events) >= 1 # Should have at least 'publish start' + assert failed_create_span.events[0].name == "publish start" + # Check for exception event + has_exception_event = any( + event.name == "exception" for event in failed_create_span.events + ) + assert has_exception_event, "Exception event not found in failed create span" @pytest.mark.skipif( From abdfd72db546d7b86648109de359cb9e3b332bd1 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Mon, 27 Oct 2025 08:13:43 -0700 Subject: [PATCH 1174/1197] chore(librarian): onboard to librarian (#1515) --- packages/google-cloud-pubsub/.librarian/config.yaml | 6 ++++++ packages/google-cloud-pubsub/.librarian/state.yaml | 10 ++++++++++ 2 files changed, 16 insertions(+) create mode 100644 packages/google-cloud-pubsub/.librarian/config.yaml create mode 100644 packages/google-cloud-pubsub/.librarian/state.yaml diff --git a/packages/google-cloud-pubsub/.librarian/config.yaml b/packages/google-cloud-pubsub/.librarian/config.yaml new file mode 100644 index 000000000000..111f94dd5cad --- /dev/null +++ b/packages/google-cloud-pubsub/.librarian/config.yaml @@ -0,0 +1,6 @@ +global_files_allowlist: + # Allow the container to read and write the root `CHANGELOG.md` + # file during the `release` step to update the latest client library + # versions which are hardcoded in the file. + - path: "CHANGELOG.md" + permissions: "read-write" diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml new file mode 100644 index 000000000000..deaa22de7f20 --- /dev/null +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -0,0 +1,10 @@ +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator:latest +libraries: + - id: google-cloud-pubsub + version: 2.31.1 + apis: [] + source_roots: + - . + preserve_regex: [] + remove_regex: [] + tag_format: v{version} From 28b3bacda2fbab6546d7865b2574a99f3f94a5fe Mon Sep 17 00:00:00 2001 From: ohmayr Date: Mon, 27 Oct 2025 16:10:53 -0700 Subject: [PATCH 1175/1197] Revert "chore(librarian): onboard to librarian" (#1544) --- packages/google-cloud-pubsub/.librarian/config.yaml | 6 ------ packages/google-cloud-pubsub/.librarian/state.yaml | 10 ---------- 2 files changed, 16 deletions(-) delete mode 100644 packages/google-cloud-pubsub/.librarian/config.yaml delete mode 100644 packages/google-cloud-pubsub/.librarian/state.yaml diff --git a/packages/google-cloud-pubsub/.librarian/config.yaml b/packages/google-cloud-pubsub/.librarian/config.yaml deleted file mode 100644 index 111f94dd5cad..000000000000 --- a/packages/google-cloud-pubsub/.librarian/config.yaml +++ /dev/null @@ -1,6 +0,0 @@ -global_files_allowlist: - # Allow the container to read and write the root `CHANGELOG.md` - # file during the `release` step to update the latest client library - # versions which are hardcoded in the file. - - path: "CHANGELOG.md" - permissions: "read-write" diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml deleted file mode 100644 index deaa22de7f20..000000000000 --- a/packages/google-cloud-pubsub/.librarian/state.yaml +++ /dev/null @@ -1,10 +0,0 @@ -image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator:latest -libraries: - - id: google-cloud-pubsub - version: 2.31.1 - apis: [] - source_roots: - - . - preserve_regex: [] - remove_regex: [] - tag_format: v{version} From 265dc975c25b43e973d66182bfbdc51f83cce368 Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Tue, 28 Oct 2025 13:46:24 -0400 Subject: [PATCH 1176/1197] fix: ignore future warnings on python versions (#1546) --- packages/google-cloud-pubsub/pytest.ini | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index fc17230eff6b..165f566b6733 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -23,4 +23,7 @@ filterwarnings = # Remove once we move off credential files https://github.com/googleapis/google-auth-library-python/pull/1812 # Note that these are used in tests only ignore:Your config file at [/home/kbuilder/.docker/config.json] contains these credential helper entries:DeprecationWarning - ignore:The `credentials_file` argument is deprecated because of a potential security risk:DeprecationWarning \ No newline at end of file + ignore:The `credentials_file` argument is deprecated because of a potential security risk:DeprecationWarning + ignore:You are using a Python version.*which Google will stop supporting in new releases of google\.api_core.*:FutureWarning + ignore:You are using a non-supported Python version \(([\d\.]+)\)\. Google will not post any further updates to google\.api_core.*:FutureWarning + ignore:You are using a Python version \(([\d\.]+)\) past its end of life\. Google will update google\.api_core.*:FutureWarning From 2918819e8520d76716854308a7e65cb57d136125 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 11:13:09 -0700 Subject: [PATCH 1177/1197] chore(main): release 2.32.0 (#1499) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 14 ++++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 18 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 1c48d61d0ee1..7bc1375d5fea 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.31.1" + ".": "2.32.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index da524d1a06b3..cbe6cea281e1 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,20 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.32.0](https://github.com/googleapis/python-pubsub/compare/v2.31.1...v2.32.0) (2025-10-28) + + +### Features + +* Adds Python 3.14 support ([#1512](https://github.com/googleapis/python-pubsub/issues/1512)) ([95a2690](https://github.com/googleapis/python-pubsub/commit/95a26907efecfa5d56b140b7f833640b7fbb21d7)) +* Debug logs ([#1460](https://github.com/googleapis/python-pubsub/issues/1460)) ([b5d4a45](https://github.com/googleapis/python-pubsub/commit/b5d4a458ca9319bebbe3142a1f05d4d4471c8d4d)) +* Support the protocol version in StreamingPullRequest ([#1455](https://github.com/googleapis/python-pubsub/issues/1455)) ([e6294a1](https://github.com/googleapis/python-pubsub/commit/e6294a1883abf9809cb56d5cd4ad25cc501bc994)) + + +### Bug Fixes + +* Ignore future warnings on python versions ([#1546](https://github.com/googleapis/python-pubsub/issues/1546)) ([8e28dea](https://github.com/googleapis/python-pubsub/commit/8e28dea5b68fc940266d0b1a9f2a07a7b5f10b34)) + ## [2.31.1](https://github.com/googleapis/python-pubsub/compare/v2.31.0...v2.31.1) (2025-07-28) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 79eaa5593b8c..3c958586feba 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.1" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 79eaa5593b8c..3c958586feba 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.31.1" # {x-release-please-version} +__version__ = "2.32.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index dd0d6423a363..23d997304077 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "0.1.0" + "version": "2.32.0" }, "snippets": [ { From ca4c8219a74432d6b298898ece47e61dbe210ec8 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 28 Oct 2025 14:43:31 -0400 Subject: [PATCH 1178/1197] chore(python): Add Python 3.14 to python post processor image (#1504) Co-authored-by: Owl Bot Co-authored-by: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> --- packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml | 4 ++-- packages/google-cloud-pubsub/samples/snippets/noxfile.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml index 9a7846675f55..4a311db0294c 100644 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml @@ -13,5 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4a9e5d44b98e8672e2037ee22bc6b4f8e844a2d75fcb78ea8a4b38510112abc6 -# created: 2025-10-07 + digest: sha256:543e209e7c1c1ffe720eb4db1a3f045a75099304fb19aa11a47dc717b8aae2a9 +# created: 2025-10-09T14:48:42.914384887Z diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index c9a3d1ecbf2a..c326375be9bf 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]: # DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] +ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] From f9c0d735aa44630563365d607bdeaa58f2d9a81c Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 29 Oct 2025 11:29:43 -0700 Subject: [PATCH 1179/1197] chore(librarian): onboard to librarian (#1547) --- packages/google-cloud-pubsub/.librarian/config.yaml | 6 ++++++ packages/google-cloud-pubsub/.librarian/state.yaml | 10 ++++++++++ 2 files changed, 16 insertions(+) create mode 100644 packages/google-cloud-pubsub/.librarian/config.yaml create mode 100644 packages/google-cloud-pubsub/.librarian/state.yaml diff --git a/packages/google-cloud-pubsub/.librarian/config.yaml b/packages/google-cloud-pubsub/.librarian/config.yaml new file mode 100644 index 000000000000..111f94dd5cad --- /dev/null +++ b/packages/google-cloud-pubsub/.librarian/config.yaml @@ -0,0 +1,6 @@ +global_files_allowlist: + # Allow the container to read and write the root `CHANGELOG.md` + # file during the `release` step to update the latest client library + # versions which are hardcoded in the file. + - path: "CHANGELOG.md" + permissions: "read-write" diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml new file mode 100644 index 000000000000..deaa22de7f20 --- /dev/null +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -0,0 +1,10 @@ +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator:latest +libraries: + - id: google-cloud-pubsub + version: 2.31.1 + apis: [] + source_roots: + - . + preserve_regex: [] + remove_regex: [] + tag_format: v{version} From cfbfcd118551730250c814ad7d43074508458059 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 30 Oct 2025 12:40:45 -0700 Subject: [PATCH 1180/1197] feat: Annotate some resource fields with their corresponding API types (#1503) Co-authored-by: Owl Bot Co-authored-by: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> --- .../services/publisher/async_client.py | 4 + .../pubsub_v1/services/publisher/client.py | 41 ++++++++++ .../services/publisher/transports/base.py | 5 +- .../services/publisher/transports/grpc.py | 8 +- .../publisher/transports/grpc_asyncio.py | 8 +- .../services/publisher/transports/rest.py | 5 +- .../schema_service/transports/base.py | 5 +- .../schema_service/transports/grpc.py | 8 +- .../schema_service/transports/grpc_asyncio.py | 8 +- .../schema_service/transports/rest.py | 5 +- .../services/subscriber/async_client.py | 2 + .../pubsub_v1/services/subscriber/client.py | 24 ++++++ .../services/subscriber/transports/base.py | 5 +- .../services/subscriber/transports/grpc.py | 8 +- .../subscriber/transports/grpc_asyncio.py | 8 +- .../services/subscriber/transports/rest.py | 5 +- .../google/pubsub_v1/types/pubsub.py | 52 ++++++++++++- .../scripts/fixup_pubsub_v1_keywords.py | 6 +- .../testing/constraints-3.10.txt | 2 + .../testing/constraints-3.11.txt | 2 + .../testing/constraints-3.12.txt | 2 + .../testing/constraints-3.13.txt | 1 + .../testing/constraints-3.9.txt | 2 + .../unit/gapic/pubsub_v1/test_publisher.py | 60 ++++++++++++++- .../unit/gapic/pubsub_v1/test_subscriber.py | 77 +++++++++++++------ 25 files changed, 290 insertions(+), 63 deletions(-) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 52f1f1e30f59..3767a460bc84 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -80,8 +80,12 @@ class PublisherAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = PublisherClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = PublisherClient._DEFAULT_UNIVERSE + crypto_key_path = staticmethod(PublisherClient.crypto_key_path) + parse_crypto_key_path = staticmethod(PublisherClient.parse_crypto_key_path) schema_path = staticmethod(PublisherClient.schema_path) parse_schema_path = staticmethod(PublisherClient.parse_schema_path) + snapshot_path = staticmethod(PublisherClient.snapshot_path) + parse_snapshot_path = staticmethod(PublisherClient.parse_snapshot_path) subscription_path = staticmethod(PublisherClient.subscription_path) parse_subscription_path = staticmethod(PublisherClient.parse_subscription_path) topic_path = staticmethod(PublisherClient.topic_path) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 28cfa680bb07..6debee7eeb8c 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -215,6 +215,30 @@ def transport(self) -> PublisherTransport: """ return self._transport + @staticmethod + def crypto_key_path( + project: str, + location: str, + key_ring: str, + crypto_key: str, + ) -> str: + """Returns a fully-qualified crypto_key string.""" + return "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + + @staticmethod + def parse_crypto_key_path(path: str) -> Dict[str, str]: + """Parses a crypto_key path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/keyRings/(?P.+?)/cryptoKeys/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def schema_path( project: str, @@ -232,6 +256,23 @@ def parse_schema_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/schemas/(?P.+?)$", path) return m.groupdict() if m else {} + @staticmethod + def snapshot_path( + project: str, + snapshot: str, + ) -> str: + """Returns a fully-qualified snapshot string.""" + return "projects/{project}/snapshots/{snapshot}".format( + project=project, + snapshot=snapshot, + ) + + @staticmethod + def parse_snapshot_path(path: str) -> Dict[str, str]: + """Parses a snapshot path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/snapshots/(?P.+?)$", path) + return m.groupdict() if m else {} + @staticmethod def subscription_path( project: str, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py index 0fb41c922213..b9d6a6279508 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/base.py @@ -73,9 +73,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py index 9e8ed1737b1d..e192152d818a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc.py @@ -155,9 +155,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -292,9 +293,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py index 90683cbd0585..14b9fdd067d7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/grpc_asyncio.py @@ -152,8 +152,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -204,9 +205,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py index 92ee46e15399..aeb07184cef4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/transports/rest.py @@ -650,9 +650,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py index c30773b74c59..bfe254e0a234 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/base.py @@ -74,9 +74,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py index 1a746ef43d6f..5bcfd8b9babf 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc.py @@ -155,9 +155,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -292,9 +293,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py index f030d0563f9c..ac2980dedab7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/grpc_asyncio.py @@ -152,8 +152,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -204,9 +205,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py index 6d8902c5dee0..a0d42c2ddba1 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/transports/rest.py @@ -702,9 +702,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 77f96d6dfeab..34843a0e0c3b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -86,6 +86,8 @@ class SubscriberAsyncClient: _DEFAULT_ENDPOINT_TEMPLATE = SubscriberClient._DEFAULT_ENDPOINT_TEMPLATE _DEFAULT_UNIVERSE = SubscriberClient._DEFAULT_UNIVERSE + listing_path = staticmethod(SubscriberClient.listing_path) + parse_listing_path = staticmethod(SubscriberClient.parse_listing_path) snapshot_path = staticmethod(SubscriberClient.snapshot_path) parse_snapshot_path = staticmethod(SubscriberClient.parse_snapshot_path) subscription_path = staticmethod(SubscriberClient.subscription_path) diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index d928d36787e9..98b11e8c331b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -219,6 +219,30 @@ def transport(self) -> SubscriberTransport: """ return self._transport + @staticmethod + def listing_path( + project: str, + location: str, + data_exchange: str, + listing: str, + ) -> str: + """Returns a fully-qualified listing string.""" + return "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}".format( + project=project, + location=location, + data_exchange=data_exchange, + listing=listing, + ) + + @staticmethod + def parse_listing_path(path: str) -> Dict[str, str]: + """Parses a listing path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/dataExchanges/(?P.+?)/listings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def snapshot_path( project: str, diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index cc62113f93fc..a25ff562f7ca 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -73,9 +73,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py index 36c77648b347..70516379157b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc.py @@ -157,9 +157,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if a ``channel`` instance is provided. channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): @@ -294,9 +295,10 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 78f8afd4ca2d..ad53fe76c321 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -154,8 +154,9 @@ def create_channel( credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -206,9 +207,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if a ``channel`` instance is provided. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py index bb0ee68c6e89..50a247cefc28 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/rest.py @@ -828,9 +828,10 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 9fc8f87ebdd9..26c13fb18c44 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -1210,6 +1210,13 @@ class AwsKinesisFailureReason(proto.Message): Optional. Failure encountered when applying a message transformation to the Pub/Sub message. + This field is a member of `oneof`_ ``reason``. + api_violation_reason (google.pubsub_v1.types.IngestionFailureEvent.ApiViolationReason): + Optional. The message failed to be published + due to an API violation. This is only set when + the size of the data field of the Kinesis record + is zero. + This field is a member of `oneof`_ ``reason``. """ @@ -1239,6 +1246,12 @@ class AwsKinesisFailureReason(proto.Message): oneof="reason", message="IngestionFailureEvent.MessageTransformationFailureReason", ) + api_violation_reason: "IngestionFailureEvent.ApiViolationReason" = proto.Field( + proto.MESSAGE, + number=6, + oneof="reason", + message="IngestionFailureEvent.ApiViolationReason", + ) topic: str = proto.Field( proto.STRING, @@ -1423,6 +1436,13 @@ class Topic(proto.Message): Optional. Transforms to be applied to messages published to the topic. Transforms are applied in the order specified. + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag + keys/values directly bound to this resource. For + example: + + "123/environment": "production", + "123/costCenter": "marketing". """ class State(proto.Enum): @@ -1491,6 +1511,11 @@ class State(proto.Enum): number=13, message="MessageTransform", ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=14, + ) class PubsubMessage(proto.Message): @@ -2024,6 +2049,13 @@ class Subscription(proto.Message): messages before they are delivered to subscribers. Transforms are applied in the order specified. + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag + keys/values directly bound to this resource. For + example: + + "123/environment": "production", + "123/costCenter": "marketing". """ class State(proto.Enum): @@ -2162,6 +2194,11 @@ class AnalyticsHubSubscriptionInfo(proto.Message): number=25, message="MessageTransform", ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=26, + ) class RetryPolicy(proto.Message): @@ -3161,8 +3198,7 @@ class StreamingPullResponse(proto.Message): Attributes: received_messages (MutableSequence[google.pubsub_v1.types.ReceivedMessage]): - Optional. Received Pub/Sub messages. This - will not be empty. + Optional. Received Pub/Sub messages. acknowledge_confirmation (google.pubsub_v1.types.StreamingPullResponse.AcknowledgeConfirmation): Optional. This field will only be set if ``enable_exactly_once_delivery`` is set to ``true`` and is @@ -3312,6 +3348,13 @@ class CreateSnapshotRequest(proto.Message): labels (MutableMapping[str, str]): Optional. See `Creating and managing labels `__. + tags (MutableMapping[str, str]): + Optional. Input only. Immutable. Tag + keys/values directly bound to this resource. For + example: + + "123/environment": "production", + "123/costCenter": "marketing". """ name: str = proto.Field( @@ -3327,6 +3370,11 @@ class CreateSnapshotRequest(proto.Message): proto.STRING, number=3, ) + tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) class UpdateSnapshotRequest(proto.Message): diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py index 9f5619e8cf9f..e4c132570d14 100644 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py @@ -42,9 +42,9 @@ class pubsubCallTransformer(cst.CSTTransformer): 'acknowledge': ('subscription', 'ack_ids', ), 'commit_schema': ('name', 'schema', ), 'create_schema': ('parent', 'schema', 'schema_id', ), - 'create_snapshot': ('name', 'subscription', 'labels', ), - 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'cloud_storage_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', 'analytics_hub_subscription_info', 'message_transforms', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', 'state', 'ingestion_data_source_settings', 'message_transforms', ), + 'create_snapshot': ('name', 'subscription', 'labels', 'tags', ), + 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'cloud_storage_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', 'analytics_hub_subscription_info', 'message_transforms', 'tags', ), + 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', 'state', 'ingestion_data_source_settings', 'message_transforms', 'tags', ), 'delete_schema': ('name', ), 'delete_schema_revision': ('name', 'revision_id', ), 'delete_snapshot': ('snapshot', ), diff --git a/packages/google-cloud-pubsub/testing/constraints-3.10.txt b/packages/google-cloud-pubsub/testing/constraints-3.10.txt index ad3f0fa58e2d..ef1c92ffffeb 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.10.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.10.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.11.txt b/packages/google-cloud-pubsub/testing/constraints-3.11.txt index ad3f0fa58e2d..ef1c92ffffeb 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.11.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.11.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.12.txt b/packages/google-cloud-pubsub/testing/constraints-3.12.txt index ad3f0fa58e2d..ef1c92ffffeb 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.12.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.12.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.13.txt b/packages/google-cloud-pubsub/testing/constraints-3.13.txt index 2010e549cceb..2ae5a677e852 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.13.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.13.txt @@ -7,6 +7,7 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 grpc-google-iam-v1>=0 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.9.txt b/packages/google-cloud-pubsub/testing/constraints-3.9.txt index ad3f0fa58e2d..ef1c92ffffeb 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.9.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.9.txt @@ -2,6 +2,8 @@ # This constraints file is required for unit tests. # List all library dependencies and extras in this file. google-api-core +google-auth +grpcio proto-plus protobuf grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 223679ad1524..6748ac09ad03 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -8898,9 +8898,38 @@ def test_publisher_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel -def test_schema_path(): +def test_crypto_key_path(): project = "squid" - schema = "clam" + location = "clam" + key_ring = "whelk" + crypto_key = "octopus" + expected = "projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}".format( + project=project, + location=location, + key_ring=key_ring, + crypto_key=crypto_key, + ) + actual = PublisherClient.crypto_key_path(project, location, key_ring, crypto_key) + assert expected == actual + + +def test_parse_crypto_key_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "key_ring": "cuttlefish", + "crypto_key": "mussel", + } + path = PublisherClient.crypto_key_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_crypto_key_path(path) + assert expected == actual + + +def test_schema_path(): + project = "winkle" + schema = "nautilus" expected = "projects/{project}/schemas/{schema}".format( project=project, schema=schema, @@ -8911,8 +8940,8 @@ def test_schema_path(): def test_parse_schema_path(): expected = { - "project": "whelk", - "schema": "octopus", + "project": "scallop", + "schema": "abalone", } path = PublisherClient.schema_path(**expected) @@ -8921,6 +8950,29 @@ def test_parse_schema_path(): assert expected == actual +def test_snapshot_path(): + project = "squid" + snapshot = "clam" + expected = "projects/{project}/snapshots/{snapshot}".format( + project=project, + snapshot=snapshot, + ) + actual = PublisherClient.snapshot_path(project, snapshot) + assert expected == actual + + +def test_parse_snapshot_path(): + expected = { + "project": "whelk", + "snapshot": "octopus", + } + path = PublisherClient.snapshot_path(**expected) + + # Check that the path construction is reversible. + actual = PublisherClient.parse_snapshot_path(path) + assert expected == actual + + def test_subscription_path(): project = "oyster" subscription = "nudibranch" diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 417249e97480..9186c815a67c 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -13208,9 +13208,38 @@ def test_subscriber_transport_channel_mtls_with_adc(transport_class): assert transport.grpc_channel == mock_grpc_channel -def test_snapshot_path(): +def test_listing_path(): project = "squid" - snapshot = "clam" + location = "clam" + data_exchange = "whelk" + listing = "octopus" + expected = "projects/{project}/locations/{location}/dataExchanges/{data_exchange}/listings/{listing}".format( + project=project, + location=location, + data_exchange=data_exchange, + listing=listing, + ) + actual = SubscriberClient.listing_path(project, location, data_exchange, listing) + assert expected == actual + + +def test_parse_listing_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "data_exchange": "cuttlefish", + "listing": "mussel", + } + path = SubscriberClient.listing_path(**expected) + + # Check that the path construction is reversible. + actual = SubscriberClient.parse_listing_path(path) + assert expected == actual + + +def test_snapshot_path(): + project = "winkle" + snapshot = "nautilus" expected = "projects/{project}/snapshots/{snapshot}".format( project=project, snapshot=snapshot, @@ -13221,8 +13250,8 @@ def test_snapshot_path(): def test_parse_snapshot_path(): expected = { - "project": "whelk", - "snapshot": "octopus", + "project": "scallop", + "snapshot": "abalone", } path = SubscriberClient.snapshot_path(**expected) @@ -13232,8 +13261,8 @@ def test_parse_snapshot_path(): def test_subscription_path(): - project = "oyster" - subscription = "nudibranch" + project = "squid" + subscription = "clam" expected = "projects/{project}/subscriptions/{subscription}".format( project=project, subscription=subscription, @@ -13244,8 +13273,8 @@ def test_subscription_path(): def test_parse_subscription_path(): expected = { - "project": "cuttlefish", - "subscription": "mussel", + "project": "whelk", + "subscription": "octopus", } path = SubscriberClient.subscription_path(**expected) @@ -13255,8 +13284,8 @@ def test_parse_subscription_path(): def test_topic_path(): - project = "winkle" - topic = "nautilus" + project = "oyster" + topic = "nudibranch" expected = "projects/{project}/topics/{topic}".format( project=project, topic=topic, @@ -13267,8 +13296,8 @@ def test_topic_path(): def test_parse_topic_path(): expected = { - "project": "scallop", - "topic": "abalone", + "project": "cuttlefish", + "topic": "mussel", } path = SubscriberClient.topic_path(**expected) @@ -13278,7 +13307,7 @@ def test_parse_topic_path(): def test_common_billing_account_path(): - billing_account = "squid" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -13288,7 +13317,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "clam", + "billing_account": "nautilus", } path = SubscriberClient.common_billing_account_path(**expected) @@ -13298,7 +13327,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "whelk" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -13308,7 +13337,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "octopus", + "folder": "abalone", } path = SubscriberClient.common_folder_path(**expected) @@ -13318,7 +13347,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "oyster" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -13328,7 +13357,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "nudibranch", + "organization": "clam", } path = SubscriberClient.common_organization_path(**expected) @@ -13338,7 +13367,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "cuttlefish" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -13348,7 +13377,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "mussel", + "project": "octopus", } path = SubscriberClient.common_project_path(**expected) @@ -13358,8 +13387,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "winkle" - location = "nautilus" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -13370,8 +13399,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "scallop", - "location": "abalone", + "project": "cuttlefish", + "location": "mussel", } path = SubscriberClient.common_location_path(**expected) From 3af9ae4b9e4efae5b6691be118bd27bb6b1168a7 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 30 Oct 2025 13:07:09 -0700 Subject: [PATCH 1181/1197] chore(main): release 2.33.0 (#1553) Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> --- .../.release-please-manifest.json | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 19 +++++++++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 23 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json index 7bc1375d5fea..7d35f2a0f0bc 100644 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ b/packages/google-cloud-pubsub/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "2.32.0" + ".": "2.33.0" } \ No newline at end of file diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index cbe6cea281e1..ac71546a8567 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -5,6 +5,25 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.33.0](https://github.com/googleapis/python-pubsub/compare/v2.32.0...v2.33.0) (2025-10-30) + + +### Features + +* Add AwsKinesisFailureReason.ApiViolationReason ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) +* Add tags to Subscription, Topic, and CreateSnapshotRequest messages for use in CreateSubscription, CreateTopic, and CreateSnapshot requests respectively ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) +* Annotate some resource fields with their corresponding API types ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) + + +### Bug Fixes + +* Deprecate credentials_file argument ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) + + +### Documentation + +* A comment for field `received_messages` in message `.google.pubsub.v1.StreamingPullResponse` is changed ([ac68093](https://github.com/googleapis/python-pubsub/commit/ac6809350758306f28fa1ab46939bc438b5a5e19)) + ## [2.32.0](https://github.com/googleapis/python-pubsub/compare/v2.31.1...v2.32.0) (2025-10-28) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 3c958586feba..0c5de5c03afe 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "2.33.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 3c958586feba..0c5de5c03afe 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.32.0" # {x-release-please-version} +__version__ = "2.33.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index 23d997304077..bcad2272f09b 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.32.0" + "version": "2.33.0" }, "snippets": [ { From bbfbb7bb3cdfa282f91c6fc0845acaa0f2acfdd5 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Thu, 30 Oct 2025 13:27:25 -0700 Subject: [PATCH 1182/1197] chore: cleanup owlbot and release please files (#1548) This PR cleans up files for owl bot post processor and release please. --- .../.github/.OwlBot.lock.yaml | 17 ------------ .../google-cloud-pubsub/.github/.OwlBot.yaml | 26 ------------------- .../.github/release-please.yml | 12 --------- .../.github/release-trigger.yml | 2 -- .../google-cloud-pubsub/.librarian/state.yaml | 2 +- 5 files changed, 1 insertion(+), 58 deletions(-) delete mode 100644 packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml delete mode 100644 packages/google-cloud-pubsub/.github/.OwlBot.yaml delete mode 100644 packages/google-cloud-pubsub/.github/release-please.yml delete mode 100644 packages/google-cloud-pubsub/.github/release-trigger.yml diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml deleted file mode 100644 index 4a311db0294c..000000000000 --- a/packages/google-cloud-pubsub/.github/.OwlBot.lock.yaml +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:543e209e7c1c1ffe720eb4db1a3f045a75099304fb19aa11a47dc717b8aae2a9 -# created: 2025-10-09T14:48:42.914384887Z diff --git a/packages/google-cloud-pubsub/.github/.OwlBot.yaml b/packages/google-cloud-pubsub/.github/.OwlBot.yaml deleted file mode 100644 index 0bfe82f743cb..000000000000 --- a/packages/google-cloud-pubsub/.github/.OwlBot.yaml +++ /dev/null @@ -1,26 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - -deep-remove-regex: - - /owl-bot-staging - -deep-copy-regex: - - source: /google/pubsub/(v.*)/.*-py/(.*) - dest: /owl-bot-staging/$1/$2 - -begin-after-commit-hash: 40278112d2922ec917140dcb5cc6d5ef2923aeb2 - diff --git a/packages/google-cloud-pubsub/.github/release-please.yml b/packages/google-cloud-pubsub/.github/release-please.yml deleted file mode 100644 index fe749ff6b15d..000000000000 --- a/packages/google-cloud-pubsub/.github/release-please.yml +++ /dev/null @@ -1,12 +0,0 @@ -releaseType: python -handleGHRelease: true -manifest: true -# NOTE: this section is generated by synthtool.languages.python -# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py -branches: -- branch: v1 - handleGHRelease: true - releaseType: python -- branch: v0 - handleGHRelease: true - releaseType: python diff --git a/packages/google-cloud-pubsub/.github/release-trigger.yml b/packages/google-cloud-pubsub/.github/release-trigger.yml deleted file mode 100644 index aa0d30a3ebb8..000000000000 --- a/packages/google-cloud-pubsub/.github/release-trigger.yml +++ /dev/null @@ -1,2 +0,0 @@ -enabled: true -multiScmName: python-pubsub diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml index deaa22de7f20..4d7702c552d4 100644 --- a/packages/google-cloud-pubsub/.librarian/state.yaml +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -1,7 +1,7 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator:latest libraries: - id: google-cloud-pubsub - version: 2.31.1 + version: 2.32.0 apis: [] source_roots: - . From 15a49e0bd61a132f6de228aa31cc62281a26c16e Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 30 Oct 2025 17:05:03 -0400 Subject: [PATCH 1183/1197] chore(librarian): remove release please configuration files (#1554) Towards https://github.com/googleapis/librarian/issues/2457 --- .../.release-please-manifest.json | 4 --- .../release-please-config.json | 25 ------------------- 2 files changed, 29 deletions(-) delete mode 100644 packages/google-cloud-pubsub/.release-please-manifest.json delete mode 100644 packages/google-cloud-pubsub/release-please-config.json diff --git a/packages/google-cloud-pubsub/.release-please-manifest.json b/packages/google-cloud-pubsub/.release-please-manifest.json deleted file mode 100644 index 7d35f2a0f0bc..000000000000 --- a/packages/google-cloud-pubsub/.release-please-manifest.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - ".": "2.33.0" -} - \ No newline at end of file diff --git a/packages/google-cloud-pubsub/release-please-config.json b/packages/google-cloud-pubsub/release-please-config.json deleted file mode 100644 index 9093524152e4..000000000000 --- a/packages/google-cloud-pubsub/release-please-config.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", - "packages": { - ".": { - "release-type": "python", - "extra-files": [ - "google/pubsub/gapic_version.py", - "google/pubsub_v1/gapic_version.py", - { - "type": "json", - "path": "samples/generated_samples/snippet_metadata_google.pubsub.v1.json", - "jsonpath": "$.clientLibrary.version" - } - ] - } - }, - "release-type": "python", - "plugins": [ - { - "type": "sentence-case" - } - ], - "initial-version": "0.1.0" -} - \ No newline at end of file From 9366428f1d26862777bddacf4f55db03bff907f0 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Wed, 5 Nov 2025 05:12:40 -0500 Subject: [PATCH 1184/1197] chore(librarian): onboard to librarian, including generate (#1552) Towards https://github.com/googleapis/librarian/issues/2457 --- .../.github/workflows/lint.yml | 2 +- .../.github/workflows/unittest.yml | 2 +- .../generator-input/.repo-metadata.json | 18 + .../.librarian/generator-input/noxfile.py | 556 ++++++++++++++++++ .../.librarian/generator-input/owlbot.py | 352 +++++++++++ .../.librarian/generator-input/setup.py | 104 ++++ .../google-cloud-pubsub/.librarian/state.yaml | 12 +- .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- packages/google-cloud-pubsub/noxfile.py | 4 +- packages/google-cloud-pubsub/owlbot.py | 2 +- .../testing/constraints-3.14.txt | 2 +- 12 files changed, 1047 insertions(+), 11 deletions(-) create mode 100644 packages/google-cloud-pubsub/.librarian/generator-input/.repo-metadata.json create mode 100644 packages/google-cloud-pubsub/.librarian/generator-input/noxfile.py create mode 100644 packages/google-cloud-pubsub/.librarian/generator-input/owlbot.py create mode 100644 packages/google-cloud-pubsub/.librarian/generator-input/setup.py diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml index 46a3ff38f31d..6204983fd945 100644 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -12,7 +12,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v6 with: - python-version: "3.13" + python-version: "3.14" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index d59bbb1b82a6..04e4113044fe 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -45,7 +45,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.14" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/.repo-metadata.json b/packages/google-cloud-pubsub/.librarian/generator-input/.repo-metadata.json new file mode 100644 index 000000000000..8d12e4cc0b62 --- /dev/null +++ b/packages/google-cloud-pubsub/.librarian/generator-input/.repo-metadata.json @@ -0,0 +1,18 @@ +{ + "name": "pubsub", + "name_pretty": "Google Cloud Pub/Sub", + "product_documentation": "https://cloud.google.com/pubsub/docs/", + "client_documentation": "https://cloud.google.com/python/docs/reference/pubsub/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559741", + "release_level": "stable", + "language": "python", + "repo": "googleapis/python-pubsub", + "distribution_name": "google-cloud-pubsub", + "api_id": "pubsub.googleapis.com", + "requires_billing": true, + "default_version": "v1", + "codeowner_team": "@googleapis/api-pubsub", + "api_shortname": "pubsub", + "library_type": "GAPIC_COMBO", + "api_description": "is designed to provide reliable, many-to-many, asynchronous messaging between applications. Publisher applications can send messages to a topic and other applications can subscribe to that topic to receive the messages. By decoupling senders and receivers, Google Cloud Pub/Sub allows developers to communicate between independently written applications." +} diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/noxfile.py b/packages/google-cloud-pubsub/.librarian/generator-input/noxfile.py new file mode 100644 index 000000000000..fd552166cba0 --- /dev/null +++ b/packages/google-cloud-pubsub/.librarian/generator-input/noxfile.py @@ -0,0 +1,556 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! + +from __future__ import absolute_import + +import os +import pathlib +import re +import shutil +from typing import Dict, List +import warnings + +import nox + +FLAKE8_VERSION = "flake8==6.1.0" +BLACK_VERSION = "black[jupyter]==23.7.0" +ISORT_VERSION = "isort==5.11.0" +LINT_PATHS = ["google", "tests", "noxfile.py", "setup.py"] + +MYPY_VERSION = "mypy==1.10.0" + +DEFAULT_PYTHON_VERSION = "3.14" + +UNIT_TEST_PYTHON_VERSIONS: List[str] = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", + "3.12", + "3.13", + "3.14", +] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = [] +UNIT_TEST_DEPENDENCIES: List[str] = [ + "flaky", +] +UNIT_TEST_EXTRAS: List[str] = [] +UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"] +SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = [ + "psutil", + "flaky", +] +SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_DEPENDENCIES: List[str] = [] +SYSTEM_TEST_EXTRAS: List[str] = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {} + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "mypy", + # https://github.com/googleapis/python-pubsub/pull/552#issuecomment-1016256936 + # "mypy_samples", # TODO: uncomment when the check passes + "docs", + "docfx", + "format", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy(session): + """Run type checks with mypy.""" + session.install("-e", ".[all]") + session.install(MYPY_VERSION) + + # Version 2.1.1 of google-api-core version is the first type-checked release. + # Version 2.2.0 of google-cloud-core version is the first type-checked release. + session.install( + "google-api-core[grpc]>=2.1.1", "google-cloud-core>=2.2.0", "types-requests" + ) + + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + # Exclude types-protobuf==4.24.0.20240106 + # See https://github.com/python/typeshed/issues/11254 + session.install("types-protobuf!=4.24.0.20240106", "types-setuptools") + + # TODO: Only check the hand-written layer, the generated code does not pass + # mypy checks yet. + # https://github.com/googleapis/gapic-generator-python/issues/1092 + # TODO: Re-enable mypy checks once we merge, since incremental checks are failing due to protobuf upgrade + # session.run("mypy", "-p", "google.cloud", "--exclude", "google/pubsub_v1/") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def mypy_samples(session): + """Run type checks with mypy.""" + + session.install("-e", ".[all]") + + session.install("pytest") + session.install(MYPY_VERSION) + + # Just install the type info directly, since "mypy --install-types" might + # require an additional pass. + session.install( + "types-mock", "types-protobuf", "types-setuptools", "types-requests" + ) + + session.run( + "mypy", + "--config-file", + str(CURRENT_DIRECTORY / "samples" / "snippets" / "mypy.ini"), + "--no-incremental", # Required by warn-unused-configs from mypy.ini to work + "samples/", + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install(FLAKE8_VERSION, BLACK_VERSION) + session.run( + "black", + "--check", + *LINT_PATHS, + ) + session.run("flake8", "google", "tests") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("setuptools", "docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") + + +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def unit(session, protobuf_implementation): + # Install all test dependencies, then install this package in-place. + + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): + session.skip("cpp implementation is not supported in python 3.11+") + + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + install_unittest_dependencies(session, "-c", constraints_path) + + # TODO(https://github.com/googleapis/synthtool/issues/1976): + # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped. + # The 'cpp' implementation requires Protobuf<4. + if protobuf_implementation == "cpp": + session.install("protobuf<4") + + # Run py.test against the unit tests. + session.run( + "py.test", + "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google/cloud", + "--cov=tests/unit", + "--cov-append", + "--cov-config=.coveragerc", + "--cov-report=", + "--cov-fail-under=0", + os.path.join("tests", "unit"), + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + +def install_systemtest_dependencies(session, *constraints): + # Use pre-release gRPC for system tests. + # Exclude version 1.52.0rc1 which has a known issue. + # See https://github.com/grpc/grpc/issues/32163 + session.install("--pre", "grpcio!=1.52.0rc1") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) +def system(session): + """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") + + system_test_exists = os.path.exists(system_test_path) + system_test_folder_exists = os.path.exists(system_test_folder_path) + # Sanity check: only run tests if found. + if not system_test_exists and not system_test_folder_exists: + session.skip("System tests were not found") + + install_systemtest_dependencies(session, "-c", constraints_path) + + # Run py.test against the system tests. + if system_test_exists: + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=99") + + session.run("coverage", "erase") + + +# py > 3.10 not supported yet +@nox.session(python="3.10") +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +# py > 3.10 not supported yet +@nox.session(python="3.10") +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python="3.14") +@nox.parametrize( + "protobuf_implementation", + ["python", "upb", "cpp"], +) +def prerelease_deps(session, protobuf_implementation): + """Run all tests with prerelease versions of dependencies installed.""" + + if protobuf_implementation == "cpp" and session.python in ( + "3.11", + "3.12", + "3.13", + "3.14", + ): + session.skip("cpp implementation is not supported in python 3.11+") + + # Install all dependencies + session.install("-e", ".[all, tests, tracing]") + unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES + session.install(*unit_deps_all) + system_deps_all = ( + SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES + ) + session.install(*system_deps_all) + + # Because we test minimum dependency versions on the minimum Python + # version, the first version we test with in the unit tests sessions has a + # constraints file containing all dependencies and extras. + with open( + CURRENT_DIRECTORY + / "testing" + / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt", + encoding="utf-8", + ) as constraints_file: + constraints_text = constraints_file.read() + + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + + session.install(*constraints_deps) + + prerel_deps = [ + "protobuf", + # dependency of grpc + "six", + "grpc-google-iam-v1", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "google-api-core", + "google-auth", + "proto-plus", + "google-cloud-testutils", + # dependencies of google-cloud-testutils" + "click", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + # Print out prerelease package versions + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") + + session.run( + "py.test", + "tests/unit", + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + + system_test_path = os.path.join("tests", "system.py") + system_test_folder_path = os.path.join("tests", "system") + + # Only run system tests if found. + if os.path.exists(system_test_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) + if os.path.exists(system_test_folder_path): + session.run( + "py.test", + "--verbose", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + env={ + "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation, + }, + ) diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/owlbot.py b/packages/google-cloud-pubsub/.librarian/generator-input/owlbot.py new file mode 100644 index 000000000000..5e6af79555f7 --- /dev/null +++ b/packages/google-cloud-pubsub/.librarian/generator-input/owlbot.py @@ -0,0 +1,352 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from pathlib import Path +import re +import shutil +import textwrap + +import synthtool as s +import synthtool.gcp as gcp +from synthtool.languages import python + +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- + +clean_up_generated_samples = True + +# Load the default version defined in .repo-metadata.json. +default_version = json.load(open(".repo-metadata.json", "rt")).get( + "default_version" +) + +for library in s.get_staging_dirs(default_version): + if clean_up_generated_samples: + shutil.rmtree("samples/generated_samples", ignore_errors=True) + clean_up_generated_samples = False + + # DEFAULT SCOPES and SERVICE_ADDRESS are being used. so let's force them in. + s.replace( + library / f"google/pubsub_{library.name}/services/*er/*client.py", + r"""DEFAULT_ENDPOINT = \"pubsub\.googleapis\.com\"""", + """ + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/pubsub', + ) + + SERVICE_ADDRESS = "pubsub.googleapis.com:443" + \"""The default address of the service.\""" + + \g<0>""", + ) + + # Modify GRPC options in transports. + count = s.replace( + [ + library / f"google/pubsub_{library.name}/services/*/transports/grpc*", + library / f"tests/unit/gapic/pubsub_{library.name}/*", + ], + "options=\[.*?\]", + """options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ("grpc.max_metadata_size", 4 * 1024 * 1024), + ("grpc.keepalive_time_ms", 30000), + ]""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 15: + raise Exception("Expected replacements for gRPC channel options not made.") + + # If the emulator is used, force an insecure gRPC channel to avoid SSL errors. + clients_to_patch = [ + library / f"google/pubsub_{library.name}/services/publisher/client.py", + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + library / f"google/pubsub_{library.name}/services/schema_service/client.py", + ] + err_msg = ( + "Expected replacements for gRPC channel to use with the emulator not made." + ) + + count = s.replace(clients_to_patch, r"import os", "import functools\n\g<0>") + + if count < len(clients_to_patch): + raise Exception(err_msg) + + count = s.replace( + clients_to_patch, + f"from \.transports\.base", + "\nimport grpc\n\g<0>", + ) + + if count < len(clients_to_patch): + raise Exception(err_msg) + + # TODO(https://github.com/googleapis/python-pubsub/issues/1349): Move the emulator + # code below to test files. + count = s.replace( + clients_to_patch, + r"# initialize with the provided callable or the passed in class", + """\g<0> + + emulator_host = os.environ.get("PUBSUB_EMULATOR_HOST") + if emulator_host: + if issubclass(transport_init, type(self)._transport_registry["grpc"]): # type: ignore + channel = grpc.insecure_channel(target=emulator_host) + else: + channel = grpc.aio.insecure_channel(target=emulator_host) + transport_init = functools.partial(transport_init, channel=channel) + + """, + ) + + if count < len(clients_to_patch): + raise Exception(err_msg) + + # Monkey patch the streaming_pull() GAPIC method to disable pre-fetching stream + # results. + s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + ( + r"# Wrap the RPC method.*\n" + r"\s+# and friendly error.*\n" + r"\s+rpc = self\._transport\._wrapped_methods\[self\._transport\.streaming_pull\]" + ), + """ + # Wrappers in api-core should not automatically pre-fetch the first + # stream result, as this breaks the stream when re-opening it. + # https://github.com/googleapis/python-pubsub/issues/93#issuecomment-630762257 + self._transport.streaming_pull._prefetch_first_result_ = False + + \g<0>""", + ) + + # Emit deprecation warning if return_immediately flag is set with synchronous pull. + s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/*client.py", + r"from google.pubsub_v1 import gapic_version as package_version", + "import warnings\n\g<0>", + ) + + count = s.replace( + library / f"google/pubsub_{library.name}/services/subscriber/*client.py", + r""" + ([^\n\S]+(?:async\ )?def\ pull\(.*?->\ pubsub\.PullResponse:.*?) + ((?P[^\n\S]+)\#\ Wrap\ the\ RPC\ method) + """, + textwrap.dedent( + """ + \g<1> + \gif request.return_immediately: + \g warnings.warn( + \g "The return_immediately flag is deprecated and should be set to False.", + \g category=DeprecationWarning, + \g ) + + \g<2>""" + ), + flags=re.MULTILINE | re.DOTALL | re.VERBOSE, + ) + + if count != 2: + raise Exception("Too many or too few replacements in pull() methods.") + + # Silence deprecation warnings in pull() method flattened parameter tests. + s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + "import os", + "\g<0>\nimport warnings", + ) + + count = s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+(client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 1: + raise Exception("Catch warnings replacement failed.") + + count = s.replace( + library / f"tests/unit/gapic/pubsub_{library.name}/test_subscriber.py", + textwrap.dedent( + r""" + ([^\n\S]+# Call the method with a truthy value for each flattened field, + [^\n\S]+# using the keyword arguments to the method\.) + \s+response = (await client\.pull\(.*?\))""" + ), + """\n\g<1> + with warnings.catch_warnings(): + warnings.simplefilter("ignore", category=DeprecationWarning) + \g<2>""", + flags=re.MULTILINE | re.DOTALL, + ) + + if count < 1: + raise Exception("Catch warnings replacement failed.") + + # Make sure that client library version is present in user agent header. + count = s.replace( + [ + library + / f"google/pubsub_{library.name}/services/publisher/async_client.py", + library / f"google/pubsub_{library.name}/services/publisher/client.py", + library + / f"google/pubsub_{library.name}/services/publisher/transports/base.py", + library + / f"google/pubsub_{library.name}/services/schema_service/async_client.py", + library / f"google/pubsub_{library.name}/services/schema_service/client.py", + library + / f"google/pubsub_{library.name}/services/schema_service/transports/base.py", + library + / f"google/pubsub_{library.name}/services/subscriber/async_client.py", + library / f"google/pubsub_{library.name}/services/subscriber/client.py", + library + / f"google/pubsub_{library.name}/services/subscriber/transports/base.py", + ], + r"""gapic_version=package_version.__version__""", + "client_library_version=package_version.__version__", + ) + + if count < 1: + raise Exception("client_library_version replacement failed.") + + # Allow timeout to be an instance of google.api_core.timeout.* + count = s.replace( + library / f"google/pubsub_{library.name}/types/__init__.py", + r"from \.pubsub import \(", + "from typing import Union\n\n\g<0>", + ) + + if count < 1: + raise Exception("Catch timeout replacement 1 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/types/__init__.py", + r"__all__ = \(\n", + textwrap.dedent( + '''\ + TimeoutType = Union[ + int, + float, + "google.api_core.timeout.ConstantTimeout", + "google.api_core.timeout.ExponentialTimeout", + ] + """The type of the timeout parameter of publisher client methods.""" + + \g<0> "TimeoutType",''' + ), + ) + + if count < 1: + raise Exception("Catch timeout replacement 2 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"from google.api_core import retry as retries.*\n", + "\g<0>from google.api_core import timeout as timeouts # type: ignore\n", + ) + + if count < 1: + raise Exception("Catch timeout replacement 3 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + f"from google\.pubsub_{library.name}\.types import pubsub", + f"\g<0>\nfrom google.pubsub_{library.name}.types import TimeoutType", + ) + + if count < 1: + raise Exception("Catch timeout replacement 4 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"(\s+)timeout: Union\[float, object\] = gapic_v1.method.DEFAULT.*\n", + f"\g<1>timeout: TimeoutType = gapic_{library.name}.method.DEFAULT,", + ) + + if count < 1: + raise Exception("Catch timeout replacement 5 failed.") + + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/*client.py", + r"([^\S\r\n]+)timeout \(float\): (.*)\n", + ("\g<1>timeout (TimeoutType):\n" "\g<1> \g<2>\n"), + ) + + if count < 1: + raise Exception("Catch timeout replacement 6 failed.") + + # Override the default max retry deadline for publisher methods. + count = s.replace( + library / f"google/pubsub_{library.name}/services/publisher/transports/base.py", + r"deadline=60\.0", + "deadline=600.0", + ) + if count < 9: + raise Exception( + "Default retry deadline not overriden for all publisher methods." + ) + + # The namespace package declaration in google/cloud/__init__.py should be excluded + # from coverage. + count = s.replace( + library / ".coveragerc", + "google/pubsub/__init__.py", + """google/cloud/__init__.py + google/pubsub/__init__.py""", + ) + + if count < 1: + raise Exception(".coveragerc replacement failed.") + + s.move([library], excludes=["noxfile.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) +s.remove_staging_dirs() + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- + +templated_files = gcp.CommonTemplates().py_library( + microgenerator=True, + samples=True, + cov_level=99, + versions=gcp.common.detect_versions(path="./google", default_first=True), + unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], + unit_test_dependencies=["flaky"], + system_test_python_versions=["3.12"], + system_test_external_dependencies=["psutil","flaky"], +) +s.move(templated_files, excludes=[".coveragerc", ".github/**", "README.rst", "docs/**", ".kokoro/**"]) + +python.py_samples(skip_readmes=True) + +# run format session for all directories which have a noxfile +for noxfile in Path(".").glob("**/noxfile.py"): + s.shell.run(["nox", "-s", "blacken"], cwd=noxfile.parent, hide_output=False) diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/setup.py b/packages/google-cloud-pubsub/.librarian/generator-input/setup.py new file mode 100644 index 000000000000..6dbea105a82e --- /dev/null +++ b/packages/google-cloud-pubsub/.librarian/generator-input/setup.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = "google-cloud-pubsub" + + +description = "Google Cloud Pub/Sub API client library" + +version = {} +with open(os.path.join(package_root, "google/pubsub/gapic_version.py")) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "grpcio >= 1.51.3, < 2.0.0; python_version < '3.14'", # https://github.com/googleapis/python-pubsub/issues/609 + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + # google-api-core >= 1.34.0 is allowed in order to support google-api-core 1.x + "google-auth >= 2.14.1, <3.0.0", + "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0", + "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'", + "proto-plus >= 1.25.0, < 2.0.0; python_version >= '3.13'", + "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", + "grpc-google-iam-v1 >= 0.12.4, < 1.0.0", + "grpcio-status >= 1.33.2", + "opentelemetry-api <= 1.22.0; python_version<='3.7'", + "opentelemetry-api >= 1.27.0; python_version>='3.8'", + "opentelemetry-sdk <= 1.22.0; python_version<='3.7'", + "opentelemetry-sdk >= 1.27.0; python_version>='3.8'", +] +extras = {"libcst": "libcst >= 0.3.10"} +url = "https://github.com/googleapis/python-pubsub" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.find_namespace_packages() + if package.startswith("google") +] + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + install_requires=dependencies, + extras_require=extras, + python_requires=">=3.7", + scripts=["scripts/fixup_pubsub_v1_keywords.py"], + include_package_data=True, + zip_safe=False, +) diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml index 4d7702c552d4..bfadaea65bbb 100644 --- a/packages/google-cloud-pubsub/.librarian/state.yaml +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -1,10 +1,16 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator:latest libraries: - id: google-cloud-pubsub - version: 2.32.0 - apis: [] + version: 2.33.0 + last_generated_commit: fee5b32df810adbd07d6a20bd97d9239937ef6e4 + apis: + - path: google/pubsub/v1 + service_config: pubsub_v1.yaml source_roots: - . preserve_regex: [] - remove_regex: [] + remove_regex: + - google/pubsub + - google/pubsub_v1 + - tests/unit/gapic tag_format: v{version} diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 0c5de5c03afe..1eb64a9d6513 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 0c5de5c03afe..1eb64a9d6513 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2022 Google LLC +# Copyright 2025 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index 7455daf83a7d..fd552166cba0 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -30,11 +30,11 @@ FLAKE8_VERSION = "flake8==6.1.0" BLACK_VERSION = "black[jupyter]==23.7.0" ISORT_VERSION = "isort==5.11.0" -LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +LINT_PATHS = ["google", "tests", "noxfile.py", "setup.py"] MYPY_VERSION = "mypy==1.10.0" -DEFAULT_PYTHON_VERSION = "3.13" +DEFAULT_PYTHON_VERSION = "3.14" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ "3.7", diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/owlbot.py index abaf534e2a15..5e6af79555f7 100644 --- a/packages/google-cloud-pubsub/owlbot.py +++ b/packages/google-cloud-pubsub/owlbot.py @@ -326,7 +326,7 @@ if count < 1: raise Exception(".coveragerc replacement failed.") - s.move([library], excludes=["**/gapic_version.py", "noxfile.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) + s.move([library], excludes=["noxfile.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) s.remove_staging_dirs() # ---------------------------------------------------------------------------- diff --git a/packages/google-cloud-pubsub/testing/constraints-3.14.txt b/packages/google-cloud-pubsub/testing/constraints-3.14.txt index 1dba0484d801..2ae5a677e852 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.14.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.14.txt @@ -7,7 +7,7 @@ # Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 +grpcio>=1 proto-plus>=1 protobuf>=6 grpc-google-iam-v1>=0 -grpcio >= 1.75.1 From 558b64495826089a679330a635f7f048f07e6d9c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Thu, 6 Nov 2025 15:59:02 -0500 Subject: [PATCH 1185/1197] chore(librarian): specify all generated files in remove_regex (#1556) This PR resolves the following issue when running `librarian generate`. Librarian now prevents generated files from clobbering existing files. All generated files must match `remove_regex` otherwise we will see errotr like `file existed in destination` ``` time=2025-11-06T20:18:57.185Z level=INFO msg="=== Docker end =================================================================" time=2025-11-06T20:18:57.185Z level=INFO msg="cleaning directories" "source roots"=[.] time=2025-11-06T20:18:57.200Z level=INFO msg="copying library files" id=google-cloud-pubsub destination=/usr/local/google/home/partheniou/git/python-pubsub source=/tmp/librarian-2318048050/output/google-cloud-pubsub time=2025-11-06T20:18:57.201Z level=ERROR msg="failed to generate library" id=google-cloud-pubsub err="file existed in destination: /usr/local/google/home/partheniou/git/python-pubsub/.coveragerc" time=2025-11-06T20:18:57.201Z level=INFO msg="generation statistics" all=1 successes=0 skipped=0 failures=1 time=2025-11-06T20:18:57.201Z level=ERROR msg="librarian command failed" err="all 1 libraries failed to generate (skipped: 0)" ``` Command used ``` librarian generate --generate-unchanged ``` --- .../google-cloud-pubsub/.librarian/state.yaml | 29 ++++++++++++++++--- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml index bfadaea65bbb..c7c8712746b9 100644 --- a/packages/google-cloud-pubsub/.librarian/state.yaml +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -2,7 +2,7 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-li libraries: - id: google-cloud-pubsub version: 2.33.0 - last_generated_commit: fee5b32df810adbd07d6a20bd97d9239937ef6e4 + last_generated_commit: 9fcfbea0aa5b50fa22e190faceb073d74504172b apis: - path: google/pubsub/v1 service_config: pubsub_v1.yaml @@ -10,7 +10,28 @@ libraries: - . preserve_regex: [] remove_regex: - - google/pubsub - - google/pubsub_v1 - - tests/unit/gapic + - ^google/pubsub + - ^google/pubsub_v1 + - ^tests/unit/gapic + - ^tests/__init__.py + - ^tests/unit/__init__.py + - ^.coveragerc + - ^.flake8 + - ^.pre-commit-config.yaml + - ^.repo-metadata.json + - ^.trampolinerc + - ^LICENSE + - ^MANIFEST.in + - ^SECURITY.md + - ^mypy.ini + - ^noxfile.py + - ^owlbot.py + - ^renovate.json + - ^samples/AUTHORING_GUIDE.md + - ^samples/CONTRIBUTING.md + - ^samples/generated_samples + - ^scripts/fixup_pubsub_v1_keywords.py + - ^setup.py + - ^testing/constraints-3.9 + - ^testing/constraints-3.1 tag_format: v{version} From bd96653e8a35c9cbdde1dc81bbca7f86d8794067 Mon Sep 17 00:00:00 2001 From: ohmayr Date: Wed, 12 Nov 2025 11:47:45 -0800 Subject: [PATCH 1186/1197] chore: update container sha for librarian (#1557) This PR updates the librarian container sha for v1.0.0 support. --------- Co-authored-by: Anthonios Partheniou --- packages/google-cloud-pubsub/.librarian/state.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml index c7c8712746b9..98b1fe52dff2 100644 --- a/packages/google-cloud-pubsub/.librarian/state.yaml +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -1,4 +1,4 @@ -image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator:latest +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:ce48ed695c727f7e13efd1fd68f466a55a0d772c87b69158720cec39965bc8b2 libraries: - id: google-cloud-pubsub version: 2.33.0 From 8c2f20ff6c50753f57756ddcd7adfd8756b05d32 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 16 Dec 2025 16:28:10 -0500 Subject: [PATCH 1187/1197] feat(gapic): support mTLS certificates when available (#1566) chore: librarian update image pull request: 20251216T203515Z --- .../{owlbot.py => librarian.py} | 0 .../.librarian/generator-input/setup.py | 1 - .../google-cloud-pubsub/.librarian/state.yaml | 2 +- .../google/pubsub_v1/__init__.py | 104 +++++++ .../pubsub_v1/services/publisher/client.py | 46 +++- .../services/schema_service/client.py | 46 +++- .../pubsub_v1/services/subscriber/client.py | 46 +++- .../{owlbot.py => librarian.py} | 0 packages/google-cloud-pubsub/noxfile.py | 4 + packages/google-cloud-pubsub/pytest.ini | 9 + .../scripts/fixup_pubsub_v1_keywords.py | 210 --------------- packages/google-cloud-pubsub/setup.py | 5 +- .../unit/gapic/pubsub_v1/test_publisher.py | 255 ++++++++++++++++-- .../gapic/pubsub_v1/test_schema_service.py | 255 ++++++++++++++++-- .../unit/gapic/pubsub_v1/test_subscriber.py | 255 ++++++++++++++++-- 15 files changed, 896 insertions(+), 342 deletions(-) rename packages/google-cloud-pubsub/.librarian/generator-input/{owlbot.py => librarian.py} (100%) rename packages/google-cloud-pubsub/{owlbot.py => librarian.py} (100%) delete mode 100644 packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/owlbot.py b/packages/google-cloud-pubsub/.librarian/generator-input/librarian.py similarity index 100% rename from packages/google-cloud-pubsub/.librarian/generator-input/owlbot.py rename to packages/google-cloud-pubsub/.librarian/generator-input/librarian.py diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/setup.py b/packages/google-cloud-pubsub/.librarian/generator-input/setup.py index 6dbea105a82e..dd2809f82448 100644 --- a/packages/google-cloud-pubsub/.librarian/generator-input/setup.py +++ b/packages/google-cloud-pubsub/.librarian/generator-input/setup.py @@ -98,7 +98,6 @@ install_requires=dependencies, extras_require=extras, python_requires=">=3.7", - scripts=["scripts/fixup_pubsub_v1_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml index 98b1fe52dff2..9a7ad85b3f37 100644 --- a/packages/google-cloud-pubsub/.librarian/state.yaml +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -1,4 +1,4 @@ -image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:ce48ed695c727f7e13efd1fd68f466a55a0d772c87b69158720cec39965bc8b2 +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209 libraries: - id: google-cloud-pubsub version: 2.33.0 diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 9e23f583cba6..00d6b495eb51 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -15,8 +15,18 @@ # from google.pubsub_v1 import gapic_version as package_version +import google.api_core as api_core +import sys + __version__ = package_version.__version__ +if sys.version_info >= (3, 8): # pragma: NO COVER + from importlib import metadata +else: # pragma: NO COVER + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata + from .services.publisher import PublisherClient from .services.publisher import PublisherAsyncClient @@ -94,6 +104,100 @@ from .types.schema import Encoding from .types.schema import SchemaView +if hasattr(api_core, "check_python_version") and hasattr( + api_core, "check_dependency_versions" +): # pragma: NO COVER + api_core.check_python_version("google.pubsub_v1") # type: ignore + api_core.check_dependency_versions("google.pubsub_v1") # type: ignore +else: # pragma: NO COVER + # An older version of api_core is installed which does not define the + # functions above. We do equivalent checks manually. + try: + import warnings + import sys + + _py_version_str = sys.version.split()[0] + _package_label = "google.pubsub_v1" + if sys.version_info < (3, 9): + warnings.warn( + "You are using a non-supported Python version " + + f"({_py_version_str}). Google will not post any further " + + f"updates to {_package_label} supporting this Python version. " + + "Please upgrade to the latest Python version, or at " + + f"least to Python 3.9, and then update {_package_label}.", + FutureWarning, + ) + if sys.version_info[:2] == (3, 9): + warnings.warn( + f"You are using a Python version ({_py_version_str}) " + + f"which Google will stop supporting in {_package_label} in " + + "January 2026. Please " + + "upgrade to the latest Python version, or at " + + "least to Python 3.10, before then, and " + + f"then update {_package_label}.", + FutureWarning, + ) + + def parse_version_to_tuple(version_string: str): + """Safely converts a semantic version string to a comparable tuple of integers. + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) + + def _get_version(dependency_name): + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return (parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple + return (None, "--") + + _dependency_package = "google.protobuf" + _next_supported_version = "4.25.8" + _next_supported_version_tuple = (4, 25, 8) + _recommendation = " (we recommend 6.x)" + (_version_used, _version_used_string) = _get_version(_dependency_package) + if _version_used and _version_used < _next_supported_version_tuple: + warnings.warn( + f"Package {_package_label} depends on " + + f"{_dependency_package}, currently installed at version " + + f"{_version_used_string}. Future updates to " + + f"{_package_label} will require {_dependency_package} at " + + f"version {_next_supported_version} or higher{_recommendation}." + + " Please ensure " + + "that either (a) your Python environment doesn't pin the " + + f"version of {_dependency_package}, so that updates to " + + f"{_package_label} can require the higher version, or " + + "(b) you manually update your Python environment to use at " + + f"least version {_next_supported_version} of " + + f"{_dependency_package}.", + FutureWarning, + ) + except Exception: + warnings.warn( + "Could not determine the version of Python " + + "currently being used. To continue receiving " + + "updates for {_package_label}, ensure you are " + + "using a supported version of Python; see " + + "https://devguide.python.org/versions/" + ) + __all__ = ( "PublisherAsyncClient", "SchemaServiceAsyncClient", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 6debee7eeb8c..27ed4dce8fde 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -168,6 +168,34 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "pubsub.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -427,12 +455,8 @@ def get_mtls_endpoint_and_cert_source( ) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = PublisherClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError( "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" @@ -440,7 +464,7 @@ def get_mtls_endpoint_and_cert_source( # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -472,20 +496,14 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv( - "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" - ).lower() + use_client_cert = PublisherClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError( "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py index 29730b85e54b..300f23998397 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/schema_service/client.py @@ -153,6 +153,34 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "pubsub.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -335,12 +363,8 @@ def get_mtls_endpoint_and_cert_source( ) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = SchemaServiceClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError( "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" @@ -348,7 +372,7 @@ def get_mtls_endpoint_and_cert_source( # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -380,20 +404,14 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv( - "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" - ).lower() + use_client_cert = SchemaServiceClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError( "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 98b11e8c331b..2a946f72691d 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -172,6 +172,34 @@ def _get_default_mtls_endpoint(api_endpoint): _DEFAULT_ENDPOINT_TEMPLATE = "pubsub.{UNIVERSE_DOMAIN}" _DEFAULT_UNIVERSE = "googleapis.com" + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -414,12 +442,8 @@ def get_mtls_endpoint_and_cert_source( ) if client_options is None: client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_client_cert = SubscriberClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError( "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" @@ -427,7 +451,7 @@ def get_mtls_endpoint_and_cert_source( # Figure out the client cert source to use. client_cert_source = None - if use_client_cert == "true": + if use_client_cert: if client_options.client_cert_source: client_cert_source = client_options.client_cert_source elif mtls.has_default_client_cert_source(): @@ -459,20 +483,14 @@ def _read_environment_variables(): google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT is not any of ["auto", "never", "always"]. """ - use_client_cert = os.getenv( - "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" - ).lower() + use_client_cert = SubscriberClient._use_client_cert_effective() use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) if use_mtls_endpoint not in ("auto", "never", "always"): raise MutualTLSChannelError( "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) - return use_client_cert == "true", use_mtls_endpoint, universe_domain_env + return use_client_cert, use_mtls_endpoint, universe_domain_env @staticmethod def _get_client_cert_source(provided_cert_source, use_cert_flag): diff --git a/packages/google-cloud-pubsub/owlbot.py b/packages/google-cloud-pubsub/librarian.py similarity index 100% rename from packages/google-cloud-pubsub/owlbot.py rename to packages/google-cloud-pubsub/librarian.py diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index fd552166cba0..d1b3c15d1fc5 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -14,6 +14,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` +# The source of truth for this file is `.librarian/generator-input` + + # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index 165f566b6733..a1f4f115e914 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -27,3 +27,12 @@ filterwarnings = ignore:You are using a Python version.*which Google will stop supporting in new releases of google\.api_core.*:FutureWarning ignore:You are using a non-supported Python version \(([\d\.]+)\)\. Google will not post any further updates to google\.api_core.*:FutureWarning ignore:You are using a Python version \(([\d\.]+)\) past its end of life\. Google will update google\.api_core.*:FutureWarning + # Remove after support for Python 3.7 is dropped + ignore:You are using a non-supported Python version \(3\.7:FutureWarning + # Remove after support for Python 3.8 is dropped + ignore:You are using a non-supported Python version \(3\.8:DeprecationWarning + ignore:You are using a non-supported Python version \(3\.8:FutureWarning + # Remove after support for Python 3.9 is dropped + ignore:You are using a Python version \(3\.9:FutureWarning + # Remove after support for Python 3.10 is dropped + ignore:.*You are using a Python version \(3\.10:FutureWarning diff --git a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py b/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py deleted file mode 100644 index e4c132570d14..000000000000 --- a/packages/google-cloud-pubsub/scripts/fixup_pubsub_v1_keywords.py +++ /dev/null @@ -1,210 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2025 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class pubsubCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'acknowledge': ('subscription', 'ack_ids', ), - 'commit_schema': ('name', 'schema', ), - 'create_schema': ('parent', 'schema', 'schema_id', ), - 'create_snapshot': ('name', 'subscription', 'labels', 'tags', ), - 'create_subscription': ('name', 'topic', 'push_config', 'bigquery_config', 'cloud_storage_config', 'ack_deadline_seconds', 'retain_acked_messages', 'message_retention_duration', 'labels', 'enable_message_ordering', 'expiration_policy', 'filter', 'dead_letter_policy', 'retry_policy', 'detached', 'enable_exactly_once_delivery', 'topic_message_retention_duration', 'state', 'analytics_hub_subscription_info', 'message_transforms', 'tags', ), - 'create_topic': ('name', 'labels', 'message_storage_policy', 'kms_key_name', 'schema_settings', 'satisfies_pzs', 'message_retention_duration', 'state', 'ingestion_data_source_settings', 'message_transforms', 'tags', ), - 'delete_schema': ('name', ), - 'delete_schema_revision': ('name', 'revision_id', ), - 'delete_snapshot': ('snapshot', ), - 'delete_subscription': ('subscription', ), - 'delete_topic': ('topic', ), - 'detach_subscription': ('subscription', ), - 'get_schema': ('name', 'view', ), - 'get_snapshot': ('snapshot', ), - 'get_subscription': ('subscription', ), - 'get_topic': ('topic', ), - 'list_schema_revisions': ('name', 'view', 'page_size', 'page_token', ), - 'list_schemas': ('parent', 'view', 'page_size', 'page_token', ), - 'list_snapshots': ('project', 'page_size', 'page_token', ), - 'list_subscriptions': ('project', 'page_size', 'page_token', ), - 'list_topics': ('project', 'page_size', 'page_token', ), - 'list_topic_snapshots': ('topic', 'page_size', 'page_token', ), - 'list_topic_subscriptions': ('topic', 'page_size', 'page_token', ), - 'modify_ack_deadline': ('subscription', 'ack_ids', 'ack_deadline_seconds', ), - 'modify_push_config': ('subscription', 'push_config', ), - 'publish': ('topic', 'messages', ), - 'pull': ('subscription', 'max_messages', 'return_immediately', ), - 'rollback_schema': ('name', 'revision_id', ), - 'seek': ('subscription', 'time', 'snapshot', ), - 'streaming_pull': ('subscription', 'stream_ack_deadline_seconds', 'ack_ids', 'modify_deadline_seconds', 'modify_deadline_ack_ids', 'client_id', 'max_outstanding_messages', 'max_outstanding_bytes', 'protocol_version', ), - 'update_snapshot': ('snapshot', 'update_mask', ), - 'update_subscription': ('subscription', 'update_mask', ), - 'update_topic': ('topic', 'update_mask', ), - 'validate_message': ('parent', 'name', 'schema', 'message', 'encoding', ), - 'validate_schema': ('parent', 'schema', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=pubsubCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the pubsub client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 6dbea105a82e..74a11ebf6b6a 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # + +# DO NOT EDIT THIS FILE OUTSIDE OF `.librarian/generator-input` +# The source of truth for this file is `.librarian/generator-input` + import io import os @@ -98,7 +102,6 @@ install_requires=dependencies, extras_require=extras, python_requires=">=3.7", - scripts=["scripts/fixup_pubsub_v1_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 6748ac09ad03..978021fcdab0 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -153,12 +153,19 @@ def test__read_environment_variables(): with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError) as excinfo: - PublisherClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + PublisherClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert PublisherClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert PublisherClient._read_environment_variables() == (False, "never", None) @@ -185,6 +192,105 @@ def test__read_environment_variables(): ) +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert PublisherClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert PublisherClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert PublisherClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert PublisherClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + PublisherClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert PublisherClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert PublisherClient._use_client_cert_effective() is False + + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -548,17 +654,6 @@ def test_publisher_client_client_options(client_class, transport_class, transpor == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: @@ -770,6 +865,119 @@ def test_publisher_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -820,18 +1028,6 @@ def test_publisher_client_get_mtls_endpoint_and_cert_source(client_class): == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - @pytest.mark.parametrize("client_class", [PublisherClient, PublisherAsyncClient]) @mock.patch.object( @@ -8804,6 +9000,7 @@ def test_publisher_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize( "transport_class", [transports.PublisherGrpcTransport, transports.PublisherGrpcAsyncIOTransport], diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index 7f83558611bc..f71b66805959 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -160,12 +160,19 @@ def test__read_environment_variables(): with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError) as excinfo: - SchemaServiceClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + SchemaServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert SchemaServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert SchemaServiceClient._read_environment_variables() == ( @@ -204,6 +211,105 @@ def test__read_environment_variables(): ) +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert SchemaServiceClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SchemaServiceClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert SchemaServiceClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert SchemaServiceClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + SchemaServiceClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert SchemaServiceClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert SchemaServiceClient._use_client_cert_effective() is False + + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -569,17 +675,6 @@ def test_schema_service_client_client_options( == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: @@ -795,6 +890,119 @@ def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -845,18 +1053,6 @@ def test_schema_service_client_get_mtls_endpoint_and_cert_source(client_class): == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - @pytest.mark.parametrize( "client_class", [SchemaServiceClient, SchemaServiceAsyncClient] @@ -9437,6 +9633,7 @@ def test_schema_service_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize( "transport_class", [ diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 9186c815a67c..816b04500667 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -155,12 +155,19 @@ def test__read_environment_variables(): with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError) as excinfo: - SubscriberClient._read_environment_variables() - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + SubscriberClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert SubscriberClient._read_environment_variables() == ( + False, + "auto", + None, + ) with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): assert SubscriberClient._read_environment_variables() == (False, "never", None) @@ -187,6 +194,105 @@ def test__read_environment_variables(): ) +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert SubscriberClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert SubscriberClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert SubscriberClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert SubscriberClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + SubscriberClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert SubscriberClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert SubscriberClient._use_client_cert_effective() is False + + def test__get_client_cert_source(): mock_provided_cert_source = mock.Mock() mock_default_cert_source = mock.Mock() @@ -552,17 +658,6 @@ def test_subscriber_client_client_options( == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client = client_class(transport=transport_name) - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: @@ -774,6 +869,119 @@ def test_subscriber_client_get_mtls_endpoint_and_cert_source(client_class): assert api_endpoint == mock_api_endpoint assert cert_source is None + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() @@ -824,18 +1032,6 @@ def test_subscriber_client_get_mtls_endpoint_and_cert_source(client_class): == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" ) - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError) as excinfo: - client_class.get_mtls_endpoint_and_cert_source() - - assert ( - str(excinfo.value) - == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - @pytest.mark.parametrize("client_class", [SubscriberClient, SubscriberAsyncClient]) @mock.patch.object( @@ -13114,6 +13310,7 @@ def test_subscriber_grpc_asyncio_transport_channel(): # Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are # removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") @pytest.mark.parametrize( "transport_class", [transports.SubscriberGrpcTransport, transports.SubscriberGrpcAsyncIOTransport], From e63a4aee957ab60ac2312f7eb3faa11c391089c0 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 16 Dec 2025 14:31:54 -0800 Subject: [PATCH 1188/1197] chore: librarian release pull request: 20251216T133533Z (#1567) PR created by the Librarian CLI to initialize a release. Merging this PR will auto trigger a release. Librarian Version: v0.7.0 Language Image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209
google-cloud-pubsub: 2.34.0 ## [2.34.0](https://github.com/googleapis/python-pubsub/compare/v2.33.0...v2.34.0) (2025-12-16) ### Features * support mTLS certificates when available (#1566) ([24761a2f](https://github.com/googleapis/python-pubsub/commit/24761a2f))
--- packages/google-cloud-pubsub/.librarian/state.yaml | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 7 +++++++ .../google-cloud-pubsub/google/pubsub/gapic_version.py | 2 +- .../google-cloud-pubsub/google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 11 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml index 9a7ad85b3f37..286fb52efbdb 100644 --- a/packages/google-cloud-pubsub/.librarian/state.yaml +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -1,7 +1,7 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209 libraries: - id: google-cloud-pubsub - version: 2.33.0 + version: 2.34.0 last_generated_commit: 9fcfbea0aa5b50fa22e190faceb073d74504172b apis: - path: google/pubsub/v1 diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index ac71546a8567..bcf798e60370 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.34.0](https://github.com/googleapis/python-pubsub/compare/v2.33.0...v2.34.0) (2025-12-16) + + +### Features + +* support mTLS certificates when available (#1566) ([24761a2fedeb17f5af98a72a62306ad59306a553](https://github.com/googleapis/python-pubsub/commit/24761a2fedeb17f5af98a72a62306ad59306a553)) + ## [2.33.0](https://github.com/googleapis/python-pubsub/compare/v2.32.0...v2.33.0) (2025-10-30) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index 1eb64a9d6513..b31b170e1e8f 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.33.0" # {x-release-please-version} +__version__ = "2.34.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index 1eb64a9d6513..b31b170e1e8f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.33.0" # {x-release-please-version} +__version__ = "2.34.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index bcad2272f09b..f3af602abd59 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.33.0" + "version": "2.34.0" }, "snippets": [ { From 2a0a5c8fd5309df6fbdda4713d6d0902a4aa2b28 Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Fri, 23 Jan 2026 14:30:46 -0500 Subject: [PATCH 1189/1197] Chore: Remove Py3.7+3.8 Support (#1573) Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) Fixes #1563 --- .../.github/sync-repo-settings.yaml | 4 -- .../.github/workflows/docs.yml | 4 +- .../.github/workflows/unittest.yml | 2 +- .../.kokoro/samples/python3.7/common.cfg | 40 ------------------- .../.kokoro/samples/python3.7/continuous.cfg | 6 --- .../samples/python3.7/periodic-head.cfg | 11 ----- .../.kokoro/samples/python3.7/periodic.cfg | 6 --- .../.kokoro/samples/python3.7/presubmit.cfg | 6 --- .../.kokoro/samples/python3.8/common.cfg | 40 ------------------- .../.kokoro/samples/python3.8/continuous.cfg | 6 --- .../samples/python3.8/periodic-head.cfg | 11 ----- .../.kokoro/samples/python3.8/periodic.cfg | 6 --- .../.kokoro/samples/python3.8/presubmit.cfg | 6 --- .../.librarian/generator-input/librarian.py | 2 +- .../.librarian/generator-input/noxfile.py | 2 - .../.librarian/generator-input/setup.py | 10 ++--- packages/google-cloud-pubsub/CONTRIBUTING.rst | 10 ++--- packages/google-cloud-pubsub/README.rst | 12 +++--- packages/google-cloud-pubsub/librarian.py | 14 +++++-- packages/google-cloud-pubsub/mypy.ini | 2 +- packages/google-cloud-pubsub/noxfile.py | 2 - packages/google-cloud-pubsub/pytest.ini | 14 +------ .../samples/snippets/noxfile.py | 3 +- .../samples/snippets/requirements-test.txt | 7 +--- .../samples/snippets/requirements.txt | 12 ++---- .../templates/install_deps.tmpl.rst | 2 +- packages/google-cloud-pubsub/setup.py | 10 ++--- .../testing/constraints-3.10.txt | 12 +++--- .../testing/constraints-3.11.txt | 12 +++--- .../testing/constraints-3.12.txt | 12 +++--- .../testing/constraints-3.13.txt | 8 +--- .../testing/constraints-3.14.txt | 8 +--- .../testing/constraints-3.7.txt | 13 ------ .../testing/constraints-3.8.txt | 7 ---- .../testing/constraints-3.9.txt | 12 +++--- packages/google-cloud-pubsub/tests/system.py | 7 +--- .../unit/gapic/pubsub_v1/test_publisher.py | 8 +--- .../gapic/pubsub_v1/test_schema_service.py | 7 +--- .../unit/gapic/pubsub_v1/test_subscriber.py | 7 +--- .../pubsub_v1/publisher/batch/test_thread.py | 6 +-- .../sequencer/test_ordered_sequencer.py | 8 +--- .../sequencer/test_unordered_sequencer.py | 8 +--- .../publisher/test_publisher_client.py | 6 +-- .../pubsub_v1/subscriber/test_dispatcher.py | 6 +-- .../subscriber/test_futures_subscriber.py | 8 +--- .../pubsub_v1/subscriber/test_heartbeater.py | 7 +--- .../subscriber/test_helper_threads.py | 8 +--- .../unit/pubsub_v1/subscriber/test_leaser.py | 7 +--- .../unit/pubsub_v1/subscriber/test_message.py | 7 +--- .../pubsub_v1/subscriber/test_scheduler.py | 7 +--- .../subscriber/test_streaming_pull_manager.py | 6 +-- .../test_subscribe_opentelemetry.py | 7 +--- .../subscriber/test_subscriber_client.py | 6 +-- .../tests/unit/pubsub_v1/test_futures.py | 6 +-- 54 files changed, 89 insertions(+), 377 deletions(-) delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic-head.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.7/presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic-head.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.8/presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/testing/constraints-3.7.txt delete mode 100644 packages/google-cloud-pubsub/testing/constraints-3.8.txt diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml index bfde18cc016c..ecc31984da4b 100644 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml @@ -11,8 +11,6 @@ branchProtectionRules: - 'Kokoro - Against Pub/Sub Lite samples' - 'cla/google' - 'Samples - Lint' - - 'Samples - Python 3.7' - - 'Samples - Python 3.8' - 'Samples - Python 3.9' - 'Samples - Python 3.10' - 'Samples - Python 3.11' @@ -21,8 +19,6 @@ branchProtectionRules: - 'docs' - 'docfx' - 'lint' - - 'unit (3.7)' - - 'unit (3.8)' - 'unit (3.9)' - 'unit (3.10)' - 'unit (3.11)' diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml index 0d0fdb861cbb..c5ee988370fe 100644 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ b/packages/google-cloud-pubsub/.github/workflows/docs.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup Python uses: actions/setup-python@v6 with: @@ -24,7 +24,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup Python uses: actions/setup-python@v6 with: diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml index 04e4113044fe..9fb410b8171b 100644 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ b/packages/google-cloud-pubsub/.github/workflows/unittest.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-22.04 strategy: matrix: - python: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] + python: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg deleted file mode 100644 index 9156c5975a64..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.7" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py37" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic-head.cfg deleted file mode 100644 index f9cfcd33e058..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.7/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.7/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg deleted file mode 100644 index 5922bef077e7..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.8" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py38" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic-head.cfg deleted file mode 100644 index f9cfcd33e058..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.8/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.8/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/librarian.py b/packages/google-cloud-pubsub/.librarian/generator-input/librarian.py index 5e6af79555f7..612003377a2d 100644 --- a/packages/google-cloud-pubsub/.librarian/generator-input/librarian.py +++ b/packages/google-cloud-pubsub/.librarian/generator-input/librarian.py @@ -326,7 +326,7 @@ if count < 1: raise Exception(".coveragerc replacement failed.") - s.move([library], excludes=["noxfile.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) + s.move([library], excludes=["noxfile.py", "README.rst", "docs/**/*", "setup.py"]) s.remove_staging_dirs() # ---------------------------------------------------------------------------- diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/noxfile.py b/packages/google-cloud-pubsub/.librarian/generator-input/noxfile.py index fd552166cba0..170360d6fc7a 100644 --- a/packages/google-cloud-pubsub/.librarian/generator-input/noxfile.py +++ b/packages/google-cloud-pubsub/.librarian/generator-input/noxfile.py @@ -37,8 +37,6 @@ DEFAULT_PYTHON_VERSION = "3.14" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ - "3.7", - "3.8", "3.9", "3.10", "3.11", diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/setup.py b/packages/google-cloud-pubsub/.librarian/generator-input/setup.py index dd2809f82448..761bc90ceae5 100644 --- a/packages/google-cloud-pubsub/.librarian/generator-input/setup.py +++ b/packages/google-cloud-pubsub/.librarian/generator-input/setup.py @@ -47,10 +47,8 @@ "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0", "grpcio-status >= 1.33.2", - "opentelemetry-api <= 1.22.0; python_version<='3.7'", - "opentelemetry-api >= 1.27.0; python_version>='3.8'", - "opentelemetry-sdk <= 1.22.0; python_version<='3.7'", - "opentelemetry-sdk >= 1.27.0; python_version>='3.8'", + "opentelemetry-api >= 1.27.0", + "opentelemetry-sdk >= 1.27.0", ] extras = {"libcst": "libcst >= 0.3.10"} url = "https://github.com/googleapis/python-pubsub" @@ -82,8 +80,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -97,7 +93,7 @@ packages=packages, install_requires=dependencies, extras_require=extras, - python_requires=">=3.7", + python_requires=">=3.9", include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-pubsub/CONTRIBUTING.rst b/packages/google-cloud-pubsub/CONTRIBUTING.rst index 417b1e9f85d8..4e926536bf26 100644 --- a/packages/google-cloud-pubsub/CONTRIBUTING.rst +++ b/packages/google-cloud-pubsub/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, 3.13 and 3.14 on both UNIX and Windows. + 3.9, 3.10, 3.11, 3.12, 3.13 and 3.14 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -195,11 +195,11 @@ configure them just like the System Tests. # Run all tests in a folder $ cd samples/snippets - $ nox -s py-3.8 + $ nox -s py-3.14 # Run a single sample test $ cd samples/snippets - $ nox -s py-3.8 -- -k + $ nox -s py-3.14 -- -k ******************************************** Note About ``README`` as it pertains to PyPI @@ -221,8 +221,6 @@ Supported Python Versions We support: -- `Python 3.7`_ -- `Python 3.8`_ - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ @@ -230,8 +228,6 @@ We support: - `Python 3.13`_ - `Python 3.14`_ -.. _Python 3.7: https://docs.python.org/3.7/ -.. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ diff --git a/packages/google-cloud-pubsub/README.rst b/packages/google-cloud-pubsub/README.rst index 97010e9985e8..86ea7e48eb00 100644 --- a/packages/google-cloud-pubsub/README.rst +++ b/packages/google-cloud-pubsub/README.rst @@ -1,7 +1,7 @@ Python Client for Google Cloud Pub / Sub ======================================== -|GA| |pypi| |versions| +|GA| |pypi| |versions| `Google Cloud Pub / Sub`_ is a fully-managed real-time messaging service that allows you to send and receive messages between independent applications. You @@ -60,11 +60,13 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.7 +Python >= 3.9 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python <= 3.6. +Python < 3.9 + +The last version of this library compatible with Python 3.7 and 3.8 is google-cloud-pubsub==2.34.0. The last version of this library compatible with Python 2.7 is google-cloud-pubsub==1.7.0. @@ -146,7 +148,7 @@ the topic, and subscribe to that, passing a callback function. with pubsub_v1.SubscriberClient() as subscriber: subscriber.create_subscription( - name=subscription_name, topic=topic_name) + name=subscription_name, topic=topic_name) future = subscriber.subscribe(subscription_name, callback) The future returned by the call to ``subscriber.subscribe`` can be used to @@ -190,7 +192,7 @@ For example, to use JSON Web Tokens, provide a `google.auth.jwt.Credentials`_ in # The same for the publisher, except that the "audience" claim needs to be adjusted publisher_audience = "https://pubsub.googleapis.com/google.pubsub.v1.Publisher" - credentials_pub = credentials.with_claims(audience=publisher_audience) + credentials_pub = credentials.with_claims(audience=publisher_audience) publisher = pubsub_v1.PublisherClient(credentials=credentials_pub) .. _Credentials: https://google-auth.readthedocs.io/en/latest/reference/google.auth.credentials.html#google.auth.credentials.Credentials diff --git a/packages/google-cloud-pubsub/librarian.py b/packages/google-cloud-pubsub/librarian.py index 5e6af79555f7..ecaa36d0e248 100644 --- a/packages/google-cloud-pubsub/librarian.py +++ b/packages/google-cloud-pubsub/librarian.py @@ -326,7 +326,15 @@ if count < 1: raise Exception(".coveragerc replacement failed.") - s.move([library], excludes=["noxfile.py", "README.rst", "docs/**/*", "setup.py", "testing/constraints-3.7.txt", "testing/constraints-3.8.txt"]) + s.move( + [library], + excludes=[ + "noxfile.py", + "README.rst", + "docs/**/*", + "setup.py", + ], + ) s.remove_staging_dirs() # ---------------------------------------------------------------------------- @@ -338,10 +346,10 @@ samples=True, cov_level=99, versions=gcp.common.detect_versions(path="./google", default_first=True), - unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], + unit_test_python_versions=["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], unit_test_dependencies=["flaky"], system_test_python_versions=["3.12"], - system_test_external_dependencies=["psutil","flaky"], + system_test_external_dependencies=["psutil", "flaky"], ) s.move(templated_files, excludes=[".coveragerc", ".github/**", "README.rst", "docs/**", ".kokoro/**"]) diff --git a/packages/google-cloud-pubsub/mypy.ini b/packages/google-cloud-pubsub/mypy.ini index 574c5aed394b..a3cb5c292172 100644 --- a/packages/google-cloud-pubsub/mypy.ini +++ b/packages/google-cloud-pubsub/mypy.ini @@ -1,3 +1,3 @@ [mypy] -python_version = 3.7 +python_version = 3.14 namespace_packages = True diff --git a/packages/google-cloud-pubsub/noxfile.py b/packages/google-cloud-pubsub/noxfile.py index d1b3c15d1fc5..d7fa4a7e8441 100644 --- a/packages/google-cloud-pubsub/noxfile.py +++ b/packages/google-cloud-pubsub/noxfile.py @@ -41,8 +41,6 @@ DEFAULT_PYTHON_VERSION = "3.14" UNIT_TEST_PYTHON_VERSIONS: List[str] = [ - "3.7", - "3.8", "3.9", "3.10", "3.11", diff --git a/packages/google-cloud-pubsub/pytest.ini b/packages/google-cloud-pubsub/pytest.ini index a1f4f115e914..41cad40d640b 100644 --- a/packages/google-cloud-pubsub/pytest.ini +++ b/packages/google-cloud-pubsub/pytest.ini @@ -6,8 +6,6 @@ filterwarnings = ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning # Remove once https://github.com/grpc/grpc/issues/35086 is fixed ignore:There is no current event loop:DeprecationWarning:grpc.aio._channel - # Remove after support for Python 3.7 is dropped - ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1938 is fixed ignore:The return_immediately flag is deprecated and should be set to False.:DeprecationWarning # Remove warning once https://github.com/googleapis/gapic-generator-python/issues/1939 is fixed @@ -26,13 +24,5 @@ filterwarnings = ignore:The `credentials_file` argument is deprecated because of a potential security risk:DeprecationWarning ignore:You are using a Python version.*which Google will stop supporting in new releases of google\.api_core.*:FutureWarning ignore:You are using a non-supported Python version \(([\d\.]+)\)\. Google will not post any further updates to google\.api_core.*:FutureWarning - ignore:You are using a Python version \(([\d\.]+)\) past its end of life\. Google will update google\.api_core.*:FutureWarning - # Remove after support for Python 3.7 is dropped - ignore:You are using a non-supported Python version \(3\.7:FutureWarning - # Remove after support for Python 3.8 is dropped - ignore:You are using a non-supported Python version \(3\.8:DeprecationWarning - ignore:You are using a non-supported Python version \(3\.8:FutureWarning - # Remove after support for Python 3.9 is dropped - ignore:You are using a Python version \(3\.9:FutureWarning - # Remove after support for Python 3.10 is dropped - ignore:.*You are using a Python version \(3\.10:FutureWarning + # These google library EOL warnings for Python versions don't matter for the purposes of a test. + ignore::FutureWarning:google.*: diff --git a/packages/google-cloud-pubsub/samples/snippets/noxfile.py b/packages/google-cloud-pubsub/samples/snippets/noxfile.py index c326375be9bf..50f2fce56834 100644 --- a/packages/google-cloud-pubsub/samples/snippets/noxfile.py +++ b/packages/google-cloud-pubsub/samples/snippets/noxfile.py @@ -87,9 +87,8 @@ def get_pytest_env_vars() -> Dict[str, str]: return ret -# DO NOT EDIT - automatically generated. # All versions used to test samples. -ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] +ALL_VERSIONS = ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] # Any default versions that should be ignored. IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index 7659e3676b23..a4969d6fe7cc 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,9 +1,6 @@ backoff==2.2.1 -pytest===7.4.4; python_version == '3.7' -pytest===8.3.5; python_version == '3.8' -pytest==8.4.2; python_version >= '3.9' +pytest==8.4.2 mock==5.2.0 flaky==3.8.1 -google-cloud-bigquery===3.30.0; python_version <= '3.8' -google-cloud-bigquery==3.38.0; python_version >= '3.9' +google-cloud-bigquery==3.38.0 google-cloud-storage==3.4.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 63a78cd67aaa..997d5a201dec 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,13 +1,7 @@ google-cloud-pubsub==2.31.1 avro==1.12.0 -protobuf===4.24.4; python_version == '3.7' -protobuf===5.29.4; python_version == '3.8' -protobuf==6.32.1; python_version >= '3.9' +protobuf==6.32.1 avro==1.12.0 -opentelemetry-api===1.22.0; python_version == '3.7' -opentelemetry-sdk===1.22.0; python_version == '3.7' -opentelemetry-api===1.33.1; python_version == '3.8' -opentelemetry-sdk===1.33.1; python_version == '3.8' -opentelemetry-api==1.37.0; python_version >= '3.9' -opentelemetry-sdk==1.37.0; python_version >= '3.9' +opentelemetry-api==1.37.0 +opentelemetry-sdk==1.37.0 opentelemetry-exporter-gcp-trace==1.9.0 diff --git a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst index 6f069c6c87a5..f21db80c4d0f 100644 --- a/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/packages/google-cloud-pubsub/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 3.7+. +#. Create a virtualenv. Samples are compatible with Python 3.9+. .. code-block:: bash diff --git a/packages/google-cloud-pubsub/setup.py b/packages/google-cloud-pubsub/setup.py index 74a11ebf6b6a..211a3306fc23 100644 --- a/packages/google-cloud-pubsub/setup.py +++ b/packages/google-cloud-pubsub/setup.py @@ -51,10 +51,8 @@ "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", "grpc-google-iam-v1 >= 0.12.4, < 1.0.0", "grpcio-status >= 1.33.2", - "opentelemetry-api <= 1.22.0; python_version<='3.7'", - "opentelemetry-api >= 1.27.0; python_version>='3.8'", - "opentelemetry-sdk <= 1.22.0; python_version<='3.7'", - "opentelemetry-sdk >= 1.27.0; python_version>='3.8'", + "opentelemetry-api >= 1.27.0", + "opentelemetry-sdk >= 1.27.0", ] extras = {"libcst": "libcst >= 0.3.10"} url = "https://github.com/googleapis/python-pubsub" @@ -86,8 +84,6 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", @@ -101,7 +97,7 @@ packages=packages, install_requires=dependencies, extras_require=extras, - python_requires=">=3.7", + python_requires=">=3.9", include_package_data=True, zip_safe=False, ) diff --git a/packages/google-cloud-pubsub/testing/constraints-3.10.txt b/packages/google-cloud-pubsub/testing/constraints-3.10.txt index ef1c92ffffeb..cc0b74a08a38 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.10.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.10.txt @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.11.txt b/packages/google-cloud-pubsub/testing/constraints-3.11.txt index ef1c92ffffeb..cc0b74a08a38 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.11.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.11.txt @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.12.txt b/packages/google-cloud-pubsub/testing/constraints-3.12.txt index ef1c92ffffeb..cc0b74a08a38 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.12.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.12.txt @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.13.txt b/packages/google-cloud-pubsub/testing/constraints-3.13.txt index 2ae5a677e852..cc0b74a08a38 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.13.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.13.txt @@ -1,10 +1,6 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. # List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 grpcio>=1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.14.txt b/packages/google-cloud-pubsub/testing/constraints-3.14.txt index 2ae5a677e852..cc0b74a08a38 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.14.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.14.txt @@ -1,10 +1,6 @@ -# We use the constraints file for the latest Python version -# (currently this file) to check that the latest -# major versions of dependencies are supported in setup.py. +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. # List all library dependencies and extras in this file. -# Require the latest major version be installed for each dependency. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", -# Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 grpcio>=1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.7.txt b/packages/google-cloud-pubsub/testing/constraints-3.7.txt deleted file mode 100644 index 08db5de87ef9..000000000000 --- a/packages/google-cloud-pubsub/testing/constraints-3.7.txt +++ /dev/null @@ -1,13 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -google-auth==2.14.1 -proto-plus==1.22.0 -protobuf==3.20.2 -grpc-google-iam-v1==0.12.4 -grpcio==1.51.3 -grpcio-status==1.33.2 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.8.txt b/packages/google-cloud-pubsub/testing/constraints-3.8.txt deleted file mode 100644 index 30520e2d05cc..000000000000 --- a/packages/google-cloud-pubsub/testing/constraints-3.8.txt +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core==1.34.0 -proto-plus -protobuf -grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.9.txt b/packages/google-cloud-pubsub/testing/constraints-3.9.txt index ef1c92ffffeb..cc0b74a08a38 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.9.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.9.txt @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core -google-auth -grpcio -proto-plus -protobuf -grpc-google-iam-v1 +google-api-core>=2 +google-auth>=2 +grpcio>=1 +proto-plus>=1 +protobuf>=6 +grpc-google-iam-v1>=0 diff --git a/packages/google-cloud-pubsub/tests/system.py b/packages/google-cloud-pubsub/tests/system.py index e1af7440275c..9db2a5e1280b 100644 --- a/packages/google-cloud-pubsub/tests/system.py +++ b/packages/google-cloud-pubsub/tests/system.py @@ -20,16 +20,11 @@ import operator as op import os import psutil -import sys import threading import time from typing import Any, Callable, cast, TypeVar -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock from flaky import flaky import pytest diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 978021fcdab0..33c3afef4af7 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -15,12 +15,8 @@ # import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock + +import mock import grpc from grpc.experimental import aio diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py index f71b66805959..76dd7b1f8a40 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_schema_service.py @@ -15,12 +15,7 @@ # import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock +import mock import grpc from grpc.experimental import aio diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 816b04500667..52fc8c1338f6 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -16,12 +16,7 @@ import os import warnings -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock +import mock import grpc from grpc.experimental import aio diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index ad8fa376bfd8..dc6d25fadfd8 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -17,11 +17,7 @@ import threading import time -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py index 4377d1447287..f7c166aabe44 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_ordered_sequencer.py @@ -13,14 +13,8 @@ # limitations under the License. import concurrent.futures as futures -import sys - -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest from google.auth import credentials diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py index 739bae3bddfc..054e66da0584 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/sequencer/test_unordered_sequencer.py @@ -11,14 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import sys - -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest from google.auth import credentials diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 651c040baeb8..cc417d49222b 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -21,11 +21,7 @@ import grpc import math -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest import time diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py index 5483c48c5eae..23e1a6c18713 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_dispatcher.py @@ -29,11 +29,7 @@ ) from google.pubsub_v1.types import PubsubMessage -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest from google.cloud.pubsub_v1.subscriber.exceptions import ( diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py index d10da6fb19c5..c4c539f96231 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_futures_subscriber.py @@ -13,14 +13,8 @@ # limitations under the License. from __future__ import absolute_import -import sys - -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest from google.cloud.pubsub_v1.subscriber import futures diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py index 857152ac34cf..cd9fd9762921 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -13,17 +13,12 @@ # limitations under the License. import logging -import sys import threading from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py index bfbaf3e56509..54659a5a3cbe 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_helper_threads.py @@ -12,13 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys - -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import queue diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 606dcc2c9e4b..3d2a96151f4f 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -13,7 +13,6 @@ # limitations under the License. import logging -import sys import threading from google.cloud.pubsub_v1 import types @@ -27,11 +26,7 @@ ) from google.cloud.pubsub_v1.subscriber import message -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py index 03bdc1514347..676536f01e54 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_message.py @@ -14,14 +14,9 @@ import datetime import queue -import sys import time -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock from google.api_core import datetime_helpers from google.cloud.pubsub_v1.subscriber import message diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py index 3ed1978c1ecb..22bd53729241 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_scheduler.py @@ -15,15 +15,10 @@ import concurrent.futures import queue import pytest -import sys import threading import time -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock from google.cloud.pubsub_v1.subscriber import scheduler diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index b9561d747914..953b882f8d1a 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -32,11 +32,7 @@ from google.cloud.pubsub_v1.types import PubsubMessage -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest from google.api_core import bidi diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py index 2fb89aa7c62b..efa3e4f71ae4 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscribe_opentelemetry.py @@ -14,7 +14,6 @@ import datetime import time -import sys import queue import pytest @@ -32,11 +31,7 @@ from google.cloud.pubsub_v1.subscriber.message import Message from google.cloud.pubsub_v1.types import PubsubMessage -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock RECEIVED = datetime.datetime(2012, 4, 21, 15, 0, tzinfo=datetime.timezone.utc) RECEIVED_SECONDS = datetime_helpers.to_milliseconds(RECEIVED) // 1000 diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py index 3d3ff0111213..ecaf23cf9917 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/subscriber/test_subscriber_client.py @@ -16,11 +16,7 @@ import grpc -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest diff --git a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py index 5a4dad41a9a2..d6af2f359917 100644 --- a/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py +++ b/packages/google-cloud-pubsub/tests/unit/pubsub_v1/test_futures.py @@ -17,11 +17,7 @@ import threading import time -# special case python < 3.8 -if sys.version_info.major == 3 and sys.version_info.minor < 8: - import mock -else: - from unittest import mock +from unittest import mock import pytest From 23d81f81c7ef6160feb4ba95d3e41e5c3992a9e4 Mon Sep 17 00:00:00 2001 From: gurusai-voleti Date: Tue, 3 Feb 2026 03:33:30 +0530 Subject: [PATCH 1190/1197] chore: Migrate gsutil usage to gcloud storage (#1574) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Automated: Migrate {target_path} from gsutil to gcloud storage This CL is part of the on going effort to migrate from the legacy `gsutil` tool to the new and improved `gcloud storage` command-line interface. `gcloud storage` is the recommended and modern tool for interacting with Google Cloud Storage, offering better performance, unified authentication, and a more consistent command structure with other `gcloud` components. 🚀 ### Automation Details This change was **generated automatically** by an agent that targets users of `gsutil`. The transformations applied are based on the [gsutil to gcloud storage migration guide](http://go/gsutil-gcloud-storage-migration-guide). ### ⚠️ Action Required: Please Review and Test Carefully While we have based the automation on the migration guide, every use case is unique. **It is crucial that you thoroughly test these changes in environments appropriate to your use-case before merging.** Be aware of potential differences between `gsutil` and `gcloud storage` that could impact your workflows. For instance, the structure of command output may have changed, requiring updates to any scripts that parse it. Similarly, command behavior can differ subtly; the `gcloud storage rsync` command has a different file deletion logic than `gsutil rsync`, which could lead to unintended file deletions. Our migration guides can help guide you through a list of mappings and some notable differences between the two tools. Standard presubmit tests are run as part of this CL's workflow. **If you need to target an additional test workflow or require assistance with testing, please let us know.** Please verify that all your Cloud Storage operations continue to work as expected to avoid any potential disruptions in production. ### Support and Collaboration The `GCS CLI` team is here to help! If you encounter any issues, have a complex use case that this automated change doesn't cover, or face any other blockers, please don't hesitate to reach out. We are happy to work with you to test and adjust these changes as needed. **Contact:** `gcs-cli-hyd@google.com` We appreciate your partnership in this important migration effort! #gsutil-migration --- packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh index 35fa529231dc..d03f92dfc489 100755 --- a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh +++ b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh @@ -26,8 +26,8 @@ # To run this script, first download few files from gcs to /dev/shm. # (/dev/shm is passed into the container as KOKORO_GFILE_DIR). # -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm -# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# gcloud storage cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gcloud storage cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm # # Then run the script. # .kokoro/trampoline_v2.sh From 60ec69992e6a92e9734ec7ba8348000bed96c711 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 5 Feb 2026 11:00:57 -0800 Subject: [PATCH 1191/1197] build(deps): bump protobuf from 6.32.1 to 6.33.5 in /samples/snippets (#1577) Bumps [protobuf](https://github.com/protocolbuffers/protobuf) from 6.32.1 to 6.33.5.
Release notes

Sourced from protobuf's releases.

Protocol Buffers v34.0-rc1

Announcements

Bazel

Compiler

C++

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=protobuf&package-manager=pip&previous-version=6.32.1&new-version=6.33.5)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself) You can disable automated security fix PRs for this repo from the [Security Alerts page](https://github.com/googleapis/python-pubsub/network/alerts).
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- packages/google-cloud-pubsub/samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 997d5a201dec..5ee669828bfd 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,6 +1,6 @@ google-cloud-pubsub==2.31.1 avro==1.12.0 -protobuf==6.32.1 +protobuf==6.33.5 avro==1.12.0 opentelemetry-api==1.37.0 opentelemetry-sdk==1.37.0 From 7f2735dd9e3ebf26f74cd75312f883abb7d54f68 Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Thu, 5 Feb 2026 15:41:31 -0500 Subject: [PATCH 1192/1197] chore: bump dependencies manually (#1579) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) Fixes #1578 🦕 --- .../google-cloud-pubsub/.github/workflows/lint.yml | 2 +- .../samples/snippets/publisher.py | 5 +---- .../samples/snippets/publisher_test.py | 5 +++-- .../samples/snippets/requirements-test.txt | 7 ++++--- .../samples/snippets/requirements.txt | 12 ++++++------ .../samples/snippets/subscriber.py | 2 +- .../samples/snippets/subscriber_test.py | 2 +- 7 files changed, 17 insertions(+), 18 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml index 6204983fd945..a52933488f3d 100644 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ b/packages/google-cloud-pubsub/.github/workflows/lint.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v5 + uses: actions/checkout@v6 - name: Setup Python uses: actions/setup-python@v6 with: diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher.py b/packages/google-cloud-pubsub/samples/snippets/publisher.py index d2b6dd2b87cb..13a70bee6c48 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher.py @@ -1046,10 +1046,7 @@ def detach_subscription(project_id: str, subscription_id: str) -> None: args.gcp_service_account, ) elif args.command == "create_smt": - create_topic_with_smt( - args.project_id, - args.topic_id, - ) + create_topic_with_smt(args.project_id, f"{args.topic_id}-smt") elif args.command == "update_kinesis_ingestion": update_topic_type( args.project_id, diff --git a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py index 1c691bd5c21f..43c6b284826f 100644 --- a/packages/google-cloud-pubsub/samples/snippets/publisher_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/publisher_test.py @@ -316,15 +316,16 @@ def test_create_topic_with_confluent_cloud_ingestion( def test_create_with_smt( publisher_client: pubsub_v1.PublisherClient, capsys: CaptureFixture[str] ) -> None: + smt_topic_name = f"{TOPIC_ID}-smt" # The scope of `topic_path` is limited to this function. - topic_path = publisher_client.topic_path(PROJECT_ID, TOPIC_ID) + topic_path = publisher_client.topic_path(PROJECT_ID, smt_topic_name) try: publisher_client.delete_topic(request={"topic": topic_path}) except NotFound: pass - publisher.create_topic_with_smt(PROJECT_ID, TOPIC_ID) + publisher.create_topic_with_smt(PROJECT_ID, smt_topic_name) out, _ = capsys.readouterr() assert f"Created topic: {topic_path} with SMT" in out diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt index a4969d6fe7cc..1aeb6d04c03c 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements-test.txt @@ -1,6 +1,7 @@ backoff==2.2.1 -pytest==8.4.2 +pytest==8.4.2; python_version <= '3.9' +pytest==9.0.2; python_version > '3.9' mock==5.2.0 flaky==3.8.1 -google-cloud-bigquery==3.38.0 -google-cloud-storage==3.4.0 +google-cloud-bigquery==3.40.0 +google-cloud-storage==3.9.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/requirements.txt b/packages/google-cloud-pubsub/samples/snippets/requirements.txt index 5ee669828bfd..25029a9f972d 100644 --- a/packages/google-cloud-pubsub/samples/snippets/requirements.txt +++ b/packages/google-cloud-pubsub/samples/snippets/requirements.txt @@ -1,7 +1,7 @@ -google-cloud-pubsub==2.31.1 -avro==1.12.0 +google-cloud-pubsub==2.34.0 +avro==1.12.1 protobuf==6.33.5 -avro==1.12.0 -opentelemetry-api==1.37.0 -opentelemetry-sdk==1.37.0 -opentelemetry-exporter-gcp-trace==1.9.0 +avro==1.12.1 +opentelemetry-api==1.39.1 +opentelemetry-sdk==1.39.1 +opentelemetry-exporter-gcp-trace==1.11.0 diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber.py b/packages/google-cloud-pubsub/samples/snippets/subscriber.py index 5549d056fbe7..94d083ae1ab6 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber.py @@ -1518,7 +1518,7 @@ def callback(message: pubsub_v1.subscriber.message.Message) -> None: ) elif args.command == "create-with-smt": create_subscription_with_smt( - args.project_id, args.topic_id, args.subscription_id + args.project_id, f"{args.topic_id}-smt", f"{args.subscription_id}-smt" ) elif args.command == "delete": diff --git a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py index 53a844e01423..d1f1db94c846 100644 --- a/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py +++ b/packages/google-cloud-pubsub/samples/snippets/subscriber_test.py @@ -585,7 +585,7 @@ def test_create_subscription_with_smt( capsys: CaptureFixture[str], ) -> None: subscription_for_create_name = ( - f"subscription-test-subscription-for-create-with-smt-{PY_VERSION}-{UUID}" + f"subscription-test-subscription-for-create-with-smt-{PY_VERSION}-{UUID}-smt" ) subscription_path = subscriber_client.subscription_path( From 301ebdd788f32f91239954f84674489e4f9d1626 Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Thu, 5 Feb 2026 16:52:58 -0500 Subject: [PATCH 1193/1197] chore: librarian generate pull request: 20260205T210552Z (#1580) PR created by the Librarian CLI to generate Cloud Client Libraries code from protos. BEGIN_COMMIT BEGIN_NESTED_COMMIT feat: Add AIInference MessageTransform type PiperOrigin-RevId: 853856321 Library-IDs: google-cloud-pubsub Source-link: [googleapis/googleapis@9fb55c41](https://github.com/googleapis/googleapis/commit/9fb55c41) END_NESTED_COMMIT BEGIN_NESTED_COMMIT docs: A comment for field `topic` in message `.google.pubsub.v1.Subscription` is updated PiperOrigin-RevId: 853856321 Library-IDs: google-cloud-pubsub Source-link: [googleapis/googleapis@9fb55c41](https://github.com/googleapis/googleapis/commit/9fb55c41) END_NESTED_COMMIT BEGIN_NESTED_COMMIT docs: A comment for field `analytics_hub_subscription_info` in message `.google.pubsub.v1.Subscription` is updated PiperOrigin-RevId: 853856321 Library-IDs: google-cloud-pubsub Source-link: [googleapis/googleapis@9fb55c41](https://github.com/googleapis/googleapis/commit/9fb55c41) END_NESTED_COMMIT BEGIN_NESTED_COMMIT docs: A comment for field `subscription` in message `.google.pubsub.v1.CreateSnapshotRequest` is updated PiperOrigin-RevId: 853856321 Library-IDs: google-cloud-pubsub Source-link: [googleapis/googleapis@9fb55c41](https://github.com/googleapis/googleapis/commit/9fb55c41) END_NESTED_COMMIT BEGIN_NESTED_COMMIT docs: add tags documentation links to Pub/Sub resource tags fields PiperOrigin-RevId: 845891076 Library-IDs: google-cloud-pubsub Source-link: [googleapis/googleapis@ff251e77](https://github.com/googleapis/googleapis/commit/ff251e77) END_NESTED_COMMIT BEGIN_NESTED_COMMIT docs: Add the IDENTIFIER field behavior annotation to fields of Cloud Pub/Sub methods that represent a specific identity and need to be sourced with additional care PiperOrigin-RevId: 840763233 Library-IDs: google-cloud-pubsub Source-link: [googleapis/googleapis@d89bb8a1](https://github.com/googleapis/googleapis/commit/d89bb8a1) END_NESTED_COMMIT END_COMMIT This pull request is generated with proto changes between [googleapis/googleapis@9fcfbea0](https://github.com/googleapis/googleapis/commit/9fcfbea0aa5b50fa22e190faceb073d74504172b) (exclusive) and [googleapis/googleapis@9fb55c41](https://github.com/googleapis/googleapis/commit/9fb55c416c90ff9e14f7101d68394eb9fca5918b) (inclusive). Librarian Version: v0.8.0 Language Image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209 --- .../.librarian/generator-input/librarian.py | 4 +- .../google-cloud-pubsub/.librarian/state.yaml | 2 +- .../google/pubsub/__init__.py | 2 + .../google/pubsub_v1/__init__.py | 2 + .../services/publisher/async_client.py | 16 +-- .../pubsub_v1/services/publisher/client.py | 16 +-- .../services/subscriber/async_client.py | 4 +- .../pubsub_v1/services/subscriber/client.py | 4 +- .../google/pubsub_v1/types/__init__.py | 2 + .../google/pubsub_v1/types/pubsub.py | 126 ++++++++++++++++-- .../testing/constraints-3.10.txt | 12 +- .../testing/constraints-3.11.txt | 12 +- .../testing/constraints-3.12.txt | 12 +- .../testing/constraints-3.13.txt | 8 +- .../testing/constraints-3.14.txt | 8 +- .../testing/constraints-3.9.txt | 12 +- .../unit/gapic/pubsub_v1/test_publisher.py | 1 + .../unit/gapic/pubsub_v1/test_subscriber.py | 1 + 18 files changed, 183 insertions(+), 61 deletions(-) diff --git a/packages/google-cloud-pubsub/.librarian/generator-input/librarian.py b/packages/google-cloud-pubsub/.librarian/generator-input/librarian.py index 612003377a2d..5263c2285c5c 100644 --- a/packages/google-cloud-pubsub/.librarian/generator-input/librarian.py +++ b/packages/google-cloud-pubsub/.librarian/generator-input/librarian.py @@ -338,10 +338,10 @@ samples=True, cov_level=99, versions=gcp.common.detect_versions(path="./google", default_first=True), - unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], + unit_test_python_versions=["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], unit_test_dependencies=["flaky"], system_test_python_versions=["3.12"], - system_test_external_dependencies=["psutil","flaky"], + system_test_external_dependencies=["psutil", "flaky"], ) s.move(templated_files, excludes=[".coveragerc", ".github/**", "README.rst", "docs/**", ".kokoro/**"]) diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml index 286fb52efbdb..a8f6625a3a70 100644 --- a/packages/google-cloud-pubsub/.librarian/state.yaml +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -2,7 +2,7 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-li libraries: - id: google-cloud-pubsub version: 2.34.0 - last_generated_commit: 9fcfbea0aa5b50fa22e190faceb073d74504172b + last_generated_commit: 2d0b3a154fb4f56993487c5409850ad3431a2690 apis: - path: google/pubsub/v1 service_config: pubsub_v1.yaml diff --git a/packages/google-cloud-pubsub/google/pubsub/__init__.py b/packages/google-cloud-pubsub/google/pubsub/__init__.py index d88449a53f45..b61343e55b8a 100644 --- a/packages/google-cloud-pubsub/google/pubsub/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub/__init__.py @@ -28,6 +28,7 @@ from google.pubsub_v1.services.subscriber.async_client import SubscriberAsyncClient from google.pubsub_v1.types.pubsub import AcknowledgeRequest +from google.pubsub_v1.types.pubsub import AIInference from google.pubsub_v1.types.pubsub import BigQueryConfig from google.pubsub_v1.types.pubsub import CloudStorageConfig from google.pubsub_v1.types.pubsub import CreateSnapshotRequest @@ -104,6 +105,7 @@ "SubscriberClient", "SubscriberAsyncClient", "AcknowledgeRequest", + "AIInference", "BigQueryConfig", "CloudStorageConfig", "CreateSnapshotRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py index 00d6b495eb51..5d7a6518ce2f 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/__init__.py @@ -36,6 +36,7 @@ from .services.subscriber import SubscriberAsyncClient from .types.pubsub import AcknowledgeRequest +from .types.pubsub import AIInference from .types.pubsub import BigQueryConfig from .types.pubsub import CloudStorageConfig from .types.pubsub import CreateSnapshotRequest @@ -202,6 +203,7 @@ def _get_version(dependency_name): "PublisherAsyncClient", "SchemaServiceAsyncClient", "SubscriberAsyncClient", + "AIInference", "AcknowledgeRequest", "BigQueryConfig", "CloudStorageConfig", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py index 3767a460bc84..9f52347d4b89 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/async_client.py @@ -339,14 +339,14 @@ async def sample_create_topic(): request (Optional[Union[google.pubsub_v1.types.Topic, dict]]): The request object. A topic resource. name (:class:`str`): - Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` - must start with a letter, and contain only letters - (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), - plus (``+``) or percent signs (``%``). It must be - between 3 and 255 characters in length, and it must not - start with ``"goog"``. + Required. Identifier. The name of the topic. It must + have the format ``"projects/{project}/topics/{topic}"``. + ``{topic}`` must start with a letter, and contain only + letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py index 27ed4dce8fde..7467b75405a4 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/publisher/client.py @@ -868,14 +868,14 @@ def sample_create_topic(): request (Union[google.pubsub_v1.types.Topic, dict]): The request object. A topic resource. name (str): - Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` - must start with a letter, and contain only letters - (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), - underscores (``_``), periods (``.``), tildes (``~``), - plus (``+``) or percent signs (``%``). It must be - between 3 and 255 characters in length, and it must not - start with ``"goog"``. + Required. Identifier. The name of the topic. It must + have the format ``"projects/{project}/topics/{topic}"``. + ``{topic}`` must start with a letter, and contain only + letters (``[A-Za-z]``), numbers (``[0-9]``), dashes + (``-``), underscores (``_``), periods (``.``), tildes + (``~``), plus (``+``) or percent signs (``%``). It must + be between 3 and 255 characters in length, and it must + not start with ``"goog"``. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py index 34843a0e0c3b..a7f1cc3f58d7 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/async_client.py @@ -360,8 +360,8 @@ async def sample_create_subscription(): then the subscriber will pull and ack messages using API methods. At most one of these fields may be set. name (:class:`str`): - Required. The name of the subscription. It must have the - format + Required. Identifier. The name of the subscription. It + must have the format ``"projects/{project}/subscriptions/{subscription}"``. ``{subscription}`` must start with a letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py index 2a946f72691d..23e7ff6d0527 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/client.py @@ -872,8 +872,8 @@ def sample_create_subscription(): then the subscriber will pull and ack messages using API methods. At most one of these fields may be set. name (str): - Required. The name of the subscription. It must have the - format + Required. Identifier. The name of the subscription. It + must have the format ``"projects/{project}/subscriptions/{subscription}"``. ``{subscription}`` must start with a letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py index 85c6b901b1a9..593abc464c03 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/__init__.py @@ -17,6 +17,7 @@ from .pubsub import ( AcknowledgeRequest, + AIInference, BigQueryConfig, CloudStorageConfig, CreateSnapshotRequest, @@ -99,6 +100,7 @@ __all__ = ( "TimeoutType", "AcknowledgeRequest", + "AIInference", "BigQueryConfig", "CloudStorageConfig", "CreateSnapshotRequest", diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py index 26c13fb18c44..1a5663c29dcd 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/types/pubsub.py @@ -21,6 +21,7 @@ from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.types import schema as gp_schema @@ -34,6 +35,7 @@ "PlatformLogsSettings", "IngestionFailureEvent", "JavaScriptUDF", + "AIInference", "MessageTransform", "Topic", "PubsubMessage", @@ -1344,9 +1346,78 @@ class JavaScriptUDF(proto.Message): ) +class AIInference(proto.Message): + r"""Configuration for making inference requests against Vertex AI + models. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + endpoint (str): + Required. An endpoint to a Vertex AI model of the form + ``projects/{project}/locations/{location}/endpoints/{endpoint}`` + or + ``projects/{project}/locations/{location}/publishers/{publisher}/models/{model}``. + Vertex AI API requests will be sent to this endpoint. + unstructured_inference (google.pubsub_v1.types.AIInference.UnstructuredInference): + Optional. Requests and responses can be any + arbitrary JSON object. + + This field is a member of `oneof`_ ``inference_mode``. + service_account_email (str): + Optional. The service account to use to make prediction + requests against endpoints. The resource creator or updater + that specifies this field must have + ``iam.serviceAccounts.actAs`` permission on the service + account. If not specified, the Pub/Sub `service + agent <{$universe.dns_names.final_documentation_domain}/iam/docs/service-agents>`__, + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, + is used. + """ + + class UnstructuredInference(proto.Message): + r"""Configuration for making inferences using arbitrary JSON + payloads. + + Attributes: + parameters (google.protobuf.struct_pb2.Struct): + Optional. A parameters object to be included + in each inference request. The parameters object + is combined with the data field of the Pub/Sub + message to form the inference request. + """ + + parameters: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=1, + message=struct_pb2.Struct, + ) + + endpoint: str = proto.Field( + proto.STRING, + number=1, + ) + unstructured_inference: UnstructuredInference = proto.Field( + proto.MESSAGE, + number=2, + oneof="inference_mode", + message=UnstructuredInference, + ) + service_account_email: str = proto.Field( + proto.STRING, + number=3, + ) + + class MessageTransform(proto.Message): r"""All supported message transforms types. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -1355,6 +1426,13 @@ class MessageTransform(proto.Message): JavaScriptUDF's are specified on a resource, each must have a unique ``function_name``. + This field is a member of `oneof`_ ``transform``. + ai_inference (google.pubsub_v1.types.AIInference): + Optional. AI Inference. Specifies the Vertex + AI endpoint that inference requests built from + the Pub/Sub message data and provided parameters + will be sent to. + This field is a member of `oneof`_ ``transform``. enabled (bool): Optional. This field is deprecated, use the ``disabled`` @@ -1370,6 +1448,12 @@ class MessageTransform(proto.Message): oneof="transform", message="JavaScriptUDF", ) + ai_inference: "AIInference" = proto.Field( + proto.MESSAGE, + number=6, + oneof="transform", + message="AIInference", + ) enabled: bool = proto.Field( proto.BOOL, number=3, @@ -1385,10 +1469,10 @@ class Topic(proto.Message): Attributes: name (str): - Required. The name of the topic. It must have the format - ``"projects/{project}/topics/{topic}"``. ``{topic}`` must - start with a letter, and contain only letters - (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), + Required. Identifier. The name of the topic. It must have + the format ``"projects/{project}/topics/{topic}"``. + ``{topic}`` must start with a letter, and contain only + letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``), underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent signs (``%``). It must be between 3 and 255 characters in length, and it must not start with @@ -1442,7 +1526,11 @@ class Topic(proto.Message): example: "123/environment": "production", - "123/costCenter": "marketing". + "123/costCenter": "marketing" + See + https://docs.cloud.google.com/pubsub/docs/tags + for more information on using tags with Pub/Sub + resources. """ class State(proto.Enum): @@ -1889,8 +1977,8 @@ class Subscription(proto.Message): Attributes: name (str): - Required. The name of the subscription. It must have the - format + Required. Identifier. The name of the subscription. It must + have the format ``"projects/{project}/subscriptions/{subscription}"``. ``{subscription}`` must start with a letter, and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes @@ -2043,7 +2131,7 @@ class Subscription(proto.Message): analytics_hub_subscription_info (google.pubsub_v1.types.Subscription.AnalyticsHubSubscriptionInfo): Output only. Information about the associated Analytics Hub subscription. Only set if the - subscritpion is created by Analytics Hub. + subscription is created by Analytics Hub. message_transforms (MutableSequence[google.pubsub_v1.types.MessageTransform]): Optional. Transforms to be applied to messages before they are delivered to @@ -2055,7 +2143,11 @@ class Subscription(proto.Message): example: "123/environment": "production", - "123/costCenter": "marketing". + "123/costCenter": "marketing" + See + https://docs.cloud.google.com/pubsub/docs/tags + for more information on using tags with Pub/Sub + resources. """ class State(proto.Enum): @@ -2530,6 +2622,10 @@ class State(proto.Enum): Cannot write to the destination because enforce_in_transit is set to true and the destination locations are not in the allowed regions. + VERTEX_AI_LOCATION_RESTRICTION (6): + Cannot write to the BigQuery table because the table is not + in the same location as where Vertex AI models used in + ``message_transform``\ s are deployed. """ STATE_UNSPECIFIED = 0 ACTIVE = 1 @@ -2537,6 +2633,7 @@ class State(proto.Enum): NOT_FOUND = 3 SCHEMA_MISMATCH = 4 IN_TRANSIT_LOCATION_RESTRICTION = 5 + VERTEX_AI_LOCATION_RESTRICTION = 6 table: str = proto.Field( proto.STRING, @@ -2663,6 +2760,10 @@ class State(proto.Enum): Cannot write to the Cloud Storage bucket due to an incompatibility between the topic schema and subscription settings. + VERTEX_AI_LOCATION_RESTRICTION (6): + Cannot write to the Cloud Storage bucket because the bucket + is not in the same location as where Vertex AI models used + in ``message_transform``\ s are deployed. """ STATE_UNSPECIFIED = 0 ACTIVE = 1 @@ -2670,6 +2771,7 @@ class State(proto.Enum): NOT_FOUND = 3 IN_TRANSIT_LOCATION_RESTRICTION = 4 SCHEMA_MISMATCH = 5 + VERTEX_AI_LOCATION_RESTRICTION = 6 class TextConfig(proto.Message): r"""Configuration for writing message data in text format. @@ -3354,7 +3456,11 @@ class CreateSnapshotRequest(proto.Message): example: "123/environment": "production", - "123/costCenter": "marketing". + "123/costCenter": "marketing" + See + https://docs.cloud.google.com/pubsub/docs/tags + for more information on using tags with Pub/Sub + resources. """ name: str = proto.Field( diff --git a/packages/google-cloud-pubsub/testing/constraints-3.10.txt b/packages/google-cloud-pubsub/testing/constraints-3.10.txt index cc0b74a08a38..ef1c92ffffeb 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.10.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.10.txt @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.11.txt b/packages/google-cloud-pubsub/testing/constraints-3.11.txt index cc0b74a08a38..ef1c92ffffeb 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.11.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.11.txt @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.12.txt b/packages/google-cloud-pubsub/testing/constraints-3.12.txt index cc0b74a08a38..ef1c92ffffeb 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.12.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.12.txt @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.13.txt b/packages/google-cloud-pubsub/testing/constraints-3.13.txt index cc0b74a08a38..2ae5a677e852 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.13.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.13.txt @@ -1,6 +1,10 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 grpcio>=1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.14.txt b/packages/google-cloud-pubsub/testing/constraints-3.14.txt index cc0b74a08a38..2ae5a677e852 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.14.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.14.txt @@ -1,6 +1,10 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. +# We use the constraints file for the latest Python version +# (currently this file) to check that the latest +# major versions of dependencies are supported in setup.py. # List all library dependencies and extras in this file. +# Require the latest major version be installed for each dependency. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0", +# Then this file should have google-cloud-foo>=1 google-api-core>=2 google-auth>=2 grpcio>=1 diff --git a/packages/google-cloud-pubsub/testing/constraints-3.9.txt b/packages/google-cloud-pubsub/testing/constraints-3.9.txt index cc0b74a08a38..ef1c92ffffeb 100644 --- a/packages/google-cloud-pubsub/testing/constraints-3.9.txt +++ b/packages/google-cloud-pubsub/testing/constraints-3.9.txt @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- # This constraints file is required for unit tests. # List all library dependencies and extras in this file. -google-api-core>=2 -google-auth>=2 -grpcio>=1 -proto-plus>=1 -protobuf>=6 -grpc-google-iam-v1>=0 +google-api-core +google-auth +grpcio +proto-plus +protobuf +grpc-google-iam-v1 diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py index 33c3afef4af7..0edfbd382466 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_publisher.py @@ -55,6 +55,7 @@ from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.publisher import PublisherAsyncClient from google.pubsub_v1.services.publisher import PublisherClient diff --git a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py index 52fc8c1338f6..1aa2e55c9db3 100644 --- a/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py +++ b/packages/google-cloud-pubsub/tests/unit/gapic/pubsub_v1/test_subscriber.py @@ -55,6 +55,7 @@ from google.oauth2 import service_account from google.protobuf import duration_pb2 # type: ignore from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import struct_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.pubsub_v1.services.subscriber import SubscriberAsyncClient from google.pubsub_v1.services.subscriber import SubscriberClient From 6291bc7e8b156feb3ce401257688e3ac8c4a5224 Mon Sep 17 00:00:00 2001 From: Andrew Browne <81702808+abbrowne126@users.noreply.github.com> Date: Thu, 5 Feb 2026 17:20:42 -0500 Subject: [PATCH 1194/1197] chore: librarian release pull request: 20260205T215747Z (#1581) PR created by the Librarian CLI to initialize a release. Merging this PR will auto trigger a release. Librarian Version: v0.8.0 Language Image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209
google-cloud-pubsub: 2.35.0 ## [2.35.0](https://github.com/googleapis/python-pubsub/compare/v2.34.0...v2.35.0) (2026-02-05) ### Features * Add AIInference MessageTransform type (PiperOrigin-RevId: 853856321) ([07011139](https://github.com/googleapis/python-pubsub/commit/07011139)) ### Documentation * A comment for field `analytics_hub_subscription_info` in message `.google.pubsub.v1.Subscription` is updated (PiperOrigin-RevId: 853856321) ([07011139](https://github.com/googleapis/python-pubsub/commit/07011139)) * Add the IDENTIFIER field behavior annotation to fields of Cloud Pub/Sub methods that represent a specific identity and need to be sourced with additional care (PiperOrigin-RevId: 840763233) ([07011139](https://github.com/googleapis/python-pubsub/commit/07011139)) * A comment for field `topic` in message `.google.pubsub.v1.Subscription` is updated (PiperOrigin-RevId: 853856321) ([07011139](https://github.com/googleapis/python-pubsub/commit/07011139)) * A comment for field `subscription` in message `.google.pubsub.v1.CreateSnapshotRequest` is updated (PiperOrigin-RevId: 853856321) ([07011139](https://github.com/googleapis/python-pubsub/commit/07011139)) * add tags documentation links to Pub/Sub resource tags fields (PiperOrigin-RevId: 845891076) ([07011139](https://github.com/googleapis/python-pubsub/commit/07011139))
--- .../google-cloud-pubsub/.librarian/state.yaml | 2 +- packages/google-cloud-pubsub/CHANGELOG.md | 16 ++++++++++++++++ .../google/pubsub/gapic_version.py | 2 +- .../google/pubsub_v1/gapic_version.py | 2 +- .../snippet_metadata_google.pubsub.v1.json | 2 +- 5 files changed, 20 insertions(+), 4 deletions(-) diff --git a/packages/google-cloud-pubsub/.librarian/state.yaml b/packages/google-cloud-pubsub/.librarian/state.yaml index a8f6625a3a70..2d5b23ac6709 100644 --- a/packages/google-cloud-pubsub/.librarian/state.yaml +++ b/packages/google-cloud-pubsub/.librarian/state.yaml @@ -1,7 +1,7 @@ image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:b8058df4c45e9a6e07f6b4d65b458d0d059241dd34c814f151c8bf6b89211209 libraries: - id: google-cloud-pubsub - version: 2.34.0 + version: 2.35.0 last_generated_commit: 2d0b3a154fb4f56993487c5409850ad3431a2690 apis: - path: google/pubsub/v1 diff --git a/packages/google-cloud-pubsub/CHANGELOG.md b/packages/google-cloud-pubsub/CHANGELOG.md index bcf798e60370..bef0c4c9e407 100644 --- a/packages/google-cloud-pubsub/CHANGELOG.md +++ b/packages/google-cloud-pubsub/CHANGELOG.md @@ -4,6 +4,22 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## [2.35.0](https://github.com/googleapis/python-pubsub/compare/v2.34.0...v2.35.0) (2026-02-05) + + +### Documentation + +* A comment for field `topic` in message `.google.pubsub.v1.Subscription` is updated ([07011139f51e4ff195889faf849bd707655e6d46](https://github.com/googleapis/python-pubsub/commit/07011139f51e4ff195889faf849bd707655e6d46)) +* A comment for field `analytics_hub_subscription_info` in message `.google.pubsub.v1.Subscription` is updated ([07011139f51e4ff195889faf849bd707655e6d46](https://github.com/googleapis/python-pubsub/commit/07011139f51e4ff195889faf849bd707655e6d46)) +* A comment for field `subscription` in message `.google.pubsub.v1.CreateSnapshotRequest` is updated ([07011139f51e4ff195889faf849bd707655e6d46](https://github.com/googleapis/python-pubsub/commit/07011139f51e4ff195889faf849bd707655e6d46)) +* add tags documentation links to Pub/Sub resource tags fields ([07011139f51e4ff195889faf849bd707655e6d46](https://github.com/googleapis/python-pubsub/commit/07011139f51e4ff195889faf849bd707655e6d46)) +* Add the IDENTIFIER field behavior annotation to fields of Cloud Pub/Sub methods that represent a specific identity and need to be sourced with additional care ([07011139f51e4ff195889faf849bd707655e6d46](https://github.com/googleapis/python-pubsub/commit/07011139f51e4ff195889faf849bd707655e6d46)) + + +### Features + +* Add AIInference MessageTransform type ([07011139f51e4ff195889faf849bd707655e6d46](https://github.com/googleapis/python-pubsub/commit/07011139f51e4ff195889faf849bd707655e6d46)) + ## [2.34.0](https://github.com/googleapis/python-pubsub/compare/v2.33.0...v2.34.0) (2025-12-16) diff --git a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py index b31b170e1e8f..6d72a226d08b 100644 --- a/packages/google-cloud-pubsub/google/pubsub/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.34.0" # {x-release-please-version} +__version__ = "2.35.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py index b31b170e1e8f..6d72a226d08b 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/gapic_version.py @@ -13,4 +13,4 @@ # See the License for the specific language governing permissions and # limitations under the License. # -__version__ = "2.34.0" # {x-release-please-version} +__version__ = "2.35.0" # {x-release-please-version} diff --git a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json index f3af602abd59..2a423fd8652f 100644 --- a/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json +++ b/packages/google-cloud-pubsub/samples/generated_samples/snippet_metadata_google.pubsub.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-pubsub", - "version": "2.34.0" + "version": "2.35.0" }, "snippets": [ { From 54667f453790f2d4cbeee6920602b57fe65025d1 Mon Sep 17 00:00:00 2001 From: Tomo Suzuki Date: Mon, 23 Feb 2026 11:15:07 -0500 Subject: [PATCH 1195/1197] chore: replace old teams with cloud-sdk-python-team and pubsub-team (#1582) This PR replaces the old yoshi-python team with cloud-sdk-python-team and api-pubsub with pubsub-team. b/478003109 --- packages/google-cloud-pubsub/.github/CODEOWNERS | 8 ++++---- packages/google-cloud-pubsub/.repo-metadata.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/google-cloud-pubsub/.github/CODEOWNERS b/packages/google-cloud-pubsub/.github/CODEOWNERS index f1b33465e33d..a01d0971fa64 100644 --- a/packages/google-cloud-pubsub/.github/CODEOWNERS +++ b/packages/google-cloud-pubsub/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python @googleapis/api-pubsub are the default owners for changes in this repo -* @googleapis/yoshi-python @googleapis/api-pubsub +# @googleapis/cloud-sdk-python-team @googleapis/pubsub-team are the default owners for changes in this repo +* @googleapis/cloud-sdk-python-team @googleapis/pubsub-team -# @googleapis/python-samples-reviewers @googleapis/api-pubsub are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/api-pubsub +# @googleapis/python-samples-reviewers @googleapis/pubsub-team are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/pubsub-team diff --git a/packages/google-cloud-pubsub/.repo-metadata.json b/packages/google-cloud-pubsub/.repo-metadata.json index 8d12e4cc0b62..f65318dbacdf 100644 --- a/packages/google-cloud-pubsub/.repo-metadata.json +++ b/packages/google-cloud-pubsub/.repo-metadata.json @@ -11,7 +11,7 @@ "api_id": "pubsub.googleapis.com", "requires_billing": true, "default_version": "v1", - "codeowner_team": "@googleapis/api-pubsub", + "codeowner_team": "@googleapis/cloud-sdk-python-team @googleapis/pubsub-team", "api_shortname": "pubsub", "library_type": "GAPIC_COMBO", "api_description": "is designed to provide reliable, many-to-many, asynchronous messaging between applications. Publisher applications can send messages to a topic and other applications can subscribe to that topic to receive the messages. By decoupling senders and receivers, Google Cloud Pub/Sub allows developers to communicate between independently written applications." From 241704d529f73cb7c04bbf4753e208155f28087c Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 2 Mar 2026 17:29:50 +0000 Subject: [PATCH 1196/1197] Trigger owlbot post-processor --- .../google-cloud-pubsub/google-cloud-pubsub.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 owl-bot-staging/google-cloud-pubsub/google-cloud-pubsub/google-cloud-pubsub.txt diff --git a/owl-bot-staging/google-cloud-pubsub/google-cloud-pubsub/google-cloud-pubsub.txt b/owl-bot-staging/google-cloud-pubsub/google-cloud-pubsub/google-cloud-pubsub.txt new file mode 100644 index 000000000000..e69de29bb2d1 From ad6590cee8ff8c3f9b79f664ee07c2251ad1ae36 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Mon, 2 Mar 2026 17:30:01 +0000 Subject: [PATCH 1197/1197] build: google-cloud-pubsub migration: adjust owlbot-related files --- .../google-cloud-pubsub/.github/CODEOWNERS | 12 - .../.github/CONTRIBUTING.md | 28 - .../.github/ISSUE_TEMPLATE/bug_report.md | 43 -- .../.github/ISSUE_TEMPLATE/feature_request.md | 18 - .../.github/ISSUE_TEMPLATE/support_request.md | 7 - .../.github/PULL_REQUEST_TEMPLATE.md | 7 - .../.github/auto-approve.yml | 3 - .../.github/auto-label.yaml | 20 - .../.github/blunderbuss.yml | 17 - .../google-cloud-pubsub/.github/flakybot.yaml | 1 - .../.github/header-checker-lint.yml | 15 - .../.github/snippet-bot.yml | 0 .../.github/sync-repo-settings.yaml | 28 - .../.github/workflows/docs.yml | 38 -- .../.github/workflows/lint.yml | 25 - .../.github/workflows/unittest.yml | 61 --- packages/google-cloud-pubsub/.kokoro/build.sh | 60 --- .../.kokoro/continuous/common.cfg | 27 - .../.kokoro/continuous/continuous.cfg | 1 - .../.kokoro/continuous/prerelease-deps.cfg | 7 - .../.kokoro/populate-secrets.sh | 43 -- .../presubmit-against-pubsublite-samples.sh | 106 ---- .../.kokoro/presubmit/common.cfg | 27 - .../.kokoro/presubmit/prerelease-deps.cfg | 7 - .../presubmit-against-pubsublite-samples.cfg | 11 - .../.kokoro/presubmit/presubmit.cfg | 6 - .../.kokoro/samples/lint/common.cfg | 34 -- .../.kokoro/samples/lint/continuous.cfg | 6 - .../.kokoro/samples/lint/periodic.cfg | 6 - .../.kokoro/samples/lint/presubmit.cfg | 6 - .../.kokoro/samples/python3.10/common.cfg | 40 -- .../.kokoro/samples/python3.10/continuous.cfg | 6 - .../samples/python3.10/periodic-head.cfg | 11 - .../.kokoro/samples/python3.10/periodic.cfg | 6 - .../.kokoro/samples/python3.10/presubmit.cfg | 6 - .../.kokoro/samples/python3.11/common.cfg | 40 -- .../.kokoro/samples/python3.11/continuous.cfg | 6 - .../samples/python3.11/periodic-head.cfg | 11 - .../.kokoro/samples/python3.11/periodic.cfg | 6 - .../.kokoro/samples/python3.11/presubmit.cfg | 6 - .../.kokoro/samples/python3.12/common.cfg | 40 -- .../.kokoro/samples/python3.12/continuous.cfg | 6 - .../samples/python3.12/periodic-head.cfg | 11 - .../.kokoro/samples/python3.12/periodic.cfg | 6 - .../.kokoro/samples/python3.12/presubmit.cfg | 6 - .../.kokoro/samples/python3.13/common.cfg | 40 -- .../.kokoro/samples/python3.13/continuous.cfg | 6 - .../samples/python3.13/periodic-head.cfg | 11 - .../.kokoro/samples/python3.13/periodic.cfg | 6 - .../.kokoro/samples/python3.13/presubmit.cfg | 6 - .../.kokoro/samples/python3.14/common.cfg | 40 -- .../.kokoro/samples/python3.14/continuous.cfg | 6 - .../samples/python3.14/periodic-head.cfg | 11 - .../.kokoro/samples/python3.14/periodic.cfg | 6 - .../.kokoro/samples/python3.14/presubmit.cfg | 6 - .../.kokoro/samples/python3.9/common.cfg | 40 -- .../.kokoro/samples/python3.9/continuous.cfg | 6 - .../samples/python3.9/periodic-head.cfg | 11 - .../.kokoro/samples/python3.9/periodic.cfg | 6 - .../.kokoro/samples/python3.9/presubmit.cfg | 6 - .../.kokoro/test-samples-against-head.sh | 26 - .../.kokoro/test-samples-impl.sh | 103 ---- .../.kokoro/test-samples.sh | 44 -- .../google-cloud-pubsub/.kokoro/trampoline.sh | 28 - .../.kokoro/trampoline_v2.sh | 487 ------------------ packages/google-cloud-pubsub/.trampolinerc | 61 --- .../google-cloud-pubsub/docs/changelog.md | 1 - .../single-library.git-migrate-history.sh | 6 +- 68 files changed, 3 insertions(+), 1837 deletions(-) delete mode 100644 packages/google-cloud-pubsub/.github/CODEOWNERS delete mode 100644 packages/google-cloud-pubsub/.github/CONTRIBUTING.md delete mode 100644 packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md delete mode 100644 packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/feature_request.md delete mode 100644 packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/support_request.md delete mode 100644 packages/google-cloud-pubsub/.github/PULL_REQUEST_TEMPLATE.md delete mode 100644 packages/google-cloud-pubsub/.github/auto-approve.yml delete mode 100644 packages/google-cloud-pubsub/.github/auto-label.yaml delete mode 100644 packages/google-cloud-pubsub/.github/blunderbuss.yml delete mode 100644 packages/google-cloud-pubsub/.github/flakybot.yaml delete mode 100644 packages/google-cloud-pubsub/.github/header-checker-lint.yml delete mode 100644 packages/google-cloud-pubsub/.github/snippet-bot.yml delete mode 100644 packages/google-cloud-pubsub/.github/sync-repo-settings.yaml delete mode 100644 packages/google-cloud-pubsub/.github/workflows/docs.yml delete mode 100644 packages/google-cloud-pubsub/.github/workflows/lint.yml delete mode 100644 packages/google-cloud-pubsub/.github/workflows/unittest.yml delete mode 100755 packages/google-cloud-pubsub/.kokoro/build.sh delete mode 100644 packages/google-cloud-pubsub/.kokoro/continuous/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/continuous/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/continuous/prerelease-deps.cfg delete mode 100755 packages/google-cloud-pubsub/.kokoro/populate-secrets.sh delete mode 100755 packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh delete mode 100644 packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/presubmit/prerelease-deps.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/lint/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/lint/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/lint/presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic-head.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.10/presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic-head.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.11/presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic-head.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.12/presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic-head.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.13/presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic-head.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.14/presubmit.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/continuous.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic-head.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg delete mode 100644 packages/google-cloud-pubsub/.kokoro/samples/python3.9/presubmit.cfg delete mode 100755 packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh delete mode 100755 packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh delete mode 100755 packages/google-cloud-pubsub/.kokoro/test-samples.sh delete mode 100755 packages/google-cloud-pubsub/.kokoro/trampoline.sh delete mode 100755 packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh delete mode 100644 packages/google-cloud-pubsub/.trampolinerc delete mode 120000 packages/google-cloud-pubsub/docs/changelog.md diff --git a/packages/google-cloud-pubsub/.github/CODEOWNERS b/packages/google-cloud-pubsub/.github/CODEOWNERS deleted file mode 100644 index a01d0971fa64..000000000000 --- a/packages/google-cloud-pubsub/.github/CODEOWNERS +++ /dev/null @@ -1,12 +0,0 @@ -# Code owners file. -# This file controls who is tagged for review for any given pull request. -# -# For syntax help see: -# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. - -# @googleapis/cloud-sdk-python-team @googleapis/pubsub-team are the default owners for changes in this repo -* @googleapis/cloud-sdk-python-team @googleapis/pubsub-team - -# @googleapis/python-samples-reviewers @googleapis/pubsub-team are the default owners for samples changes -/samples/ @googleapis/python-samples-reviewers @googleapis/pubsub-team diff --git a/packages/google-cloud-pubsub/.github/CONTRIBUTING.md b/packages/google-cloud-pubsub/.github/CONTRIBUTING.md deleted file mode 100644 index 939e5341e74d..000000000000 --- a/packages/google-cloud-pubsub/.github/CONTRIBUTING.md +++ /dev/null @@ -1,28 +0,0 @@ -# How to Contribute - -We'd love to accept your patches and contributions to this project. There are -just a few small guidelines you need to follow. - -## Contributor License Agreement - -Contributions to this project must be accompanied by a Contributor License -Agreement. You (or your employer) retain the copyright to your contribution; -this simply gives us permission to use and redistribute your contributions as -part of the project. Head over to to see -your current agreements on file or to sign a new one. - -You generally only need to submit a CLA once, so if you've already submitted one -(even if it was for a different project), you probably don't need to do it -again. - -## Code reviews - -All submissions, including submissions by project members, require review. We -use GitHub pull requests for this purpose. Consult -[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more -information on using pull requests. - -## Community Guidelines - -This project follows [Google's Open Source Community -Guidelines](https://opensource.google.com/conduct/). diff --git a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index 27057e60ffd0..000000000000 --- a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - -Please run down the following list and make sure you've tried the usual "quick fixes": - - - Search the issues already opened: https://github.com/googleapis/python-pubsub/issues - - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python - -If you are still having issues, please be sure to include as much information as possible: - -#### Environment details - - - OS type and version: - - Python version: `python --version` - - pip version: `pip --version` - - `google-cloud-pubsub` version: `pip show google-cloud-pubsub` - -#### Steps to reproduce - - 1. ? - 2. ? - -#### Code example - -```python -# example -``` - -#### Stack trace -``` -# example -``` - -Making sure to follow these steps will guarantee the quickest resolution possible. - -Thanks! diff --git a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/feature_request.md b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 6365857f33c6..000000000000 --- a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,18 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this library - ---- - -Thanks for stopping by to let us know something could be better! - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. - - **Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - **Describe the solution you'd like** -A clear and concise description of what you want to happen. - **Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - **Additional context** -Add any other context or screenshots about the feature request here. diff --git a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/support_request.md b/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/support_request.md deleted file mode 100644 index 995869032125..000000000000 --- a/packages/google-cloud-pubsub/.github/ISSUE_TEMPLATE/support_request.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -name: Support request -about: If you have a support contract with Google, please create an issue in the Google Cloud Support console. - ---- - -**PLEASE READ**: If you have a support contract with Google, please create an issue in the [support console](https://cloud.google.com/support/) instead of filing on GitHub. This will ensure a timely response. diff --git a/packages/google-cloud-pubsub/.github/PULL_REQUEST_TEMPLATE.md b/packages/google-cloud-pubsub/.github/PULL_REQUEST_TEMPLATE.md deleted file mode 100644 index 89f4a4d1a5c9..000000000000 --- a/packages/google-cloud-pubsub/.github/PULL_REQUEST_TEMPLATE.md +++ /dev/null @@ -1,7 +0,0 @@ -Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: -- [ ] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-pubsub/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea -- [ ] Ensure the tests and linter pass -- [ ] Code coverage does not decrease (if any source code was changed) -- [ ] Appropriate docs were updated (if necessary) - -Fixes # 🦕 diff --git a/packages/google-cloud-pubsub/.github/auto-approve.yml b/packages/google-cloud-pubsub/.github/auto-approve.yml deleted file mode 100644 index 311ebbb853a9..000000000000 --- a/packages/google-cloud-pubsub/.github/auto-approve.yml +++ /dev/null @@ -1,3 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve -processes: - - "OwlBotTemplateChanges" diff --git a/packages/google-cloud-pubsub/.github/auto-label.yaml b/packages/google-cloud-pubsub/.github/auto-label.yaml deleted file mode 100644 index 21786a4eb085..000000000000 --- a/packages/google-cloud-pubsub/.github/auto-label.yaml +++ /dev/null @@ -1,20 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -requestsize: - enabled: true - -path: - pullrequest: true - paths: - samples: "samples" diff --git a/packages/google-cloud-pubsub/.github/blunderbuss.yml b/packages/google-cloud-pubsub/.github/blunderbuss.yml deleted file mode 100644 index ac5c87339243..000000000000 --- a/packages/google-cloud-pubsub/.github/blunderbuss.yml +++ /dev/null @@ -1,17 +0,0 @@ -# Blunderbuss config -# -# This file controls who is assigned for pull requests and issues. -# Note: This file is autogenerated. To make changes to the assignee -# team, please update `codeowner_team` in `.repo-metadata.json`. -assign_issues: - - abbrowne126 - -assign_issues_by: - - labels: - - "samples" - to: - - googleapis/python-samples-reviewers - - abbrowne126 - -assign_prs: - - abbrowne126 diff --git a/packages/google-cloud-pubsub/.github/flakybot.yaml b/packages/google-cloud-pubsub/.github/flakybot.yaml deleted file mode 100644 index cb83375f9893..000000000000 --- a/packages/google-cloud-pubsub/.github/flakybot.yaml +++ /dev/null @@ -1 +0,0 @@ -issuePriority: p2 diff --git a/packages/google-cloud-pubsub/.github/header-checker-lint.yml b/packages/google-cloud-pubsub/.github/header-checker-lint.yml deleted file mode 100644 index 6fe78aa7987a..000000000000 --- a/packages/google-cloud-pubsub/.github/header-checker-lint.yml +++ /dev/null @@ -1,15 +0,0 @@ -{"allowedCopyrightHolders": ["Google LLC"], - "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], - "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], - "sourceFileExtensions": [ - "ts", - "js", - "java", - "sh", - "Dockerfile", - "yaml", - "py", - "html", - "txt" - ] -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.github/snippet-bot.yml b/packages/google-cloud-pubsub/.github/snippet-bot.yml deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml b/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml deleted file mode 100644 index ecc31984da4b..000000000000 --- a/packages/google-cloud-pubsub/.github/sync-repo-settings.yaml +++ /dev/null @@ -1,28 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings -# Rules for main branch protection -branchProtectionRules: -# Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `main` -- pattern: main - requiresCodeOwnerReviews: true - requiresStrictStatusChecks: true - requiredStatusCheckContexts: - - 'Kokoro' - - 'Kokoro - Against Pub/Sub Lite samples' - - 'cla/google' - - 'Samples - Lint' - - 'Samples - Python 3.9' - - 'Samples - Python 3.10' - - 'Samples - Python 3.11' - - 'Samples - Python 3.12' - - 'OwlBot Post Processor' - - 'docs' - - 'docfx' - - 'lint' - - 'unit (3.9)' - - 'unit (3.10)' - - 'unit (3.11)' - - 'unit (3.12)' - - 'unit (3.13)' - - 'unit (3.14)' - - 'cover' diff --git a/packages/google-cloud-pubsub/.github/workflows/docs.yml b/packages/google-cloud-pubsub/.github/workflows/docs.yml deleted file mode 100644 index c5ee988370fe..000000000000 --- a/packages/google-cloud-pubsub/.github/workflows/docs.yml +++ /dev/null @@ -1,38 +0,0 @@ -on: - pull_request: - branches: - - main -name: docs -jobs: - docs: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v6 - - name: Setup Python - uses: actions/setup-python@v6 - with: - python-version: "3.10" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run docs - run: | - nox -s docs - docfx: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v6 - - name: Setup Python - uses: actions/setup-python@v6 - with: - python-version: "3.10" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run docfx - run: | - nox -s docfx diff --git a/packages/google-cloud-pubsub/.github/workflows/lint.yml b/packages/google-cloud-pubsub/.github/workflows/lint.yml deleted file mode 100644 index a52933488f3d..000000000000 --- a/packages/google-cloud-pubsub/.github/workflows/lint.yml +++ /dev/null @@ -1,25 +0,0 @@ -on: - pull_request: - branches: - - main -name: lint -jobs: - lint: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v6 - - name: Setup Python - uses: actions/setup-python@v6 - with: - python-version: "3.14" - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run lint - run: | - nox -s lint - - name: Run lint_setup_py - run: | - nox -s lint_setup_py diff --git a/packages/google-cloud-pubsub/.github/workflows/unittest.yml b/packages/google-cloud-pubsub/.github/workflows/unittest.yml deleted file mode 100644 index 9fb410b8171b..000000000000 --- a/packages/google-cloud-pubsub/.github/workflows/unittest.yml +++ /dev/null @@ -1,61 +0,0 @@ -on: - pull_request: - branches: - - main -name: unittest -jobs: - unit: - # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. - # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix - # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories - runs-on: ubuntu-22.04 - strategy: - matrix: - python: ['3.9', '3.10', '3.11', '3.12', '3.13', '3.14'] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python }} - - name: Install nox - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install nox - - name: Run unit tests - env: - COVERAGE_FILE: .coverage-${{ matrix.python }} - run: | - nox -s unit-${{ matrix.python }} - - name: Upload coverage results - uses: actions/upload-artifact@v4 - with: - name: coverage-artifact-${{ matrix.python }} - path: .coverage-${{ matrix.python }} - include-hidden-files: true - - cover: - runs-on: ubuntu-latest - needs: - - unit - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.14" - - name: Install coverage - run: | - python -m pip install --upgrade setuptools pip wheel - python -m pip install coverage - - name: Download coverage results - uses: actions/download-artifact@v4 - with: - path: .coverage-results/ - - name: Report coverage results - run: | - find .coverage-results -type f -name '*.zip' -exec unzip {} \; - coverage combine .coverage-results/**/.coverage* - coverage report --show-missing --fail-under=99 diff --git a/packages/google-cloud-pubsub/.kokoro/build.sh b/packages/google-cloud-pubsub/.kokoro/build.sh deleted file mode 100755 index d41b45aa1dd0..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/build.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") - -if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") -fi - -pushd "${PROJECT_ROOT}" - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Setup service account credentials. -if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] -then - export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json -fi - -# Setup project id. -if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] -then - export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") -fi - -# If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then - cleanup() { - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - } - trap cleanup EXIT HUP -fi - -# If NOX_SESSION is set, it only runs the specified session, -# otherwise run all the sessions. -if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} -else - python3 -m nox -fi diff --git a/packages/google-cloud-pubsub/.kokoro/continuous/common.cfg b/packages/google-cloud-pubsub/.kokoro/continuous/common.cfg deleted file mode 100644 index a812af888d39..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/continuous/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/build.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/continuous/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/continuous/continuous.cfg deleted file mode 100644 index 8f43917d92fe..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/continuous/continuous.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/continuous/prerelease-deps.cfg b/packages/google-cloud-pubsub/.kokoro/continuous/prerelease-deps.cfg deleted file mode 100644 index 3595fb43f5c0..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/continuous/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh b/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh deleted file mode 100755 index c435402f473e..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/populate-secrets.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} -function msg { println "$*" >&2 ;} -function println { printf '%s\n' "$(now) $*" ;} - - -# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: -# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com -SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" -msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" -mkdir -p ${SECRET_LOCATION} -for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") -do - msg "Retrieving secret ${key}" - docker run --entrypoint=gcloud \ - --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ - gcr.io/google.com/cloudsdktool/cloud-sdk \ - secrets versions access latest \ - --project cloud-devrel-kokoro-resources \ - --secret ${key} > \ - "${SECRET_LOCATION}/${key}" - if [[ $? == 0 ]]; then - msg "Secret written to ${SECRET_LOCATION}/${key}" - else - msg "Error retrieving secret ${key}" - fi -done diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh b/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh deleted file mode 100755 index 639cbb8d3181..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh +++ /dev/null @@ -1,106 +0,0 @@ -#!/bin/bash -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.9 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -cd github/python-pubsub - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) - -# Clone googleapis/python-pubsublite -git clone https://github.com/googleapis/python-pubsublite.git - -# Find all requirements.txt in the Pub/Sub Lite samples directory (may break on whitespace). -for file in python-pubsublite/samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use pytest to execute tests for py-3.8 - python3.8 -m venv py-3.8 - source py-3.8/bin/activate - # Install python-pubsublite samples tests requirements. - python -m pip install --upgrade pip - python -m pip install -r requirements.txt -q - python -m pip install -r requirements-test.txt -q - # Install python-pubsub from source. - python -m pip install -e "$ROOT" -q - python -m pytest quickstart_test.py - EXIT=$? - - deactivate py-3.8 - rm -rf py-3.8/ - - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg deleted file mode 100644 index a812af888d39..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/presubmit/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Download resources for system tests (service account key, etc.) -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/google-cloud-python" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/build.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/prerelease-deps.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/prerelease-deps.cfg deleted file mode 100644 index 3595fb43f5c0..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/presubmit/prerelease-deps.cfg +++ /dev/null @@ -1,7 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "prerelease_deps" -} diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg deleted file mode 100644 index 0ad289456ff7..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit-against-pubsublite-samples.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/presubmit-against-pubsublite-samples.sh" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg deleted file mode 100644 index 227ccdf47138..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/presubmit/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "NOX_SESSION" - value: "system-3.12 blacken mypy format" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg deleted file mode 100644 index a0106ace3cb0..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/lint/common.cfg +++ /dev/null @@ -1,34 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "lint" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/lint/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/lint/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/lint/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/lint/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/lint/periodic.cfg deleted file mode 100644 index 50fec9649732..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/lint/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/lint/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/lint/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/lint/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/common.cfg deleted file mode 100644 index 363d8b0f758e..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.10" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-310" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic-head.cfg deleted file mode 100644 index f9cfcd33e058..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.10/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.10/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/common.cfg deleted file mode 100644 index f337a0d54a67..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.11" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-311" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic-head.cfg deleted file mode 100644 index f9cfcd33e058..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.11/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.11/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/common.cfg deleted file mode 100644 index ae61007721b8..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.12" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-312" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic-head.cfg deleted file mode 100644 index f9cfcd33e058..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.12/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.12/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/common.cfg deleted file mode 100644 index 96783769ba40..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.13" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-313" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic-head.cfg deleted file mode 100644 index f9cfcd33e058..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.13/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.13/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/common.cfg deleted file mode 100644 index f6feff7057c7..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.14" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-314" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/continuous.cfg deleted file mode 100644 index b196817872e9..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic-head.cfg deleted file mode 100644 index f9cfcd33e058..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.14/presubmit.cfg deleted file mode 100644 index b196817872e9..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.14/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg deleted file mode 100644 index a69739cce9a8..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/common.cfg +++ /dev/null @@ -1,40 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Specify which tests to run -env_vars: { - key: "RUN_TESTS_SESSION" - value: "py-3.9" -} - -# Declare build specific Cloud project. -env_vars: { - key: "BUILD_SPECIFIC_GCLOUD_PROJECT" - value: "python-docs-samples-tests-py39" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples.sh" -} - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" -} - -# Download secrets for samples -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-pubsub/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/continuous.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/continuous.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/continuous.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic-head.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic-head.cfg deleted file mode 100644 index f9cfcd33e058..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic-head.cfg +++ /dev/null @@ -1,11 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg deleted file mode 100644 index 71cd1e597e38..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/periodic.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "False" -} diff --git a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/presubmit.cfg b/packages/google-cloud-pubsub/.kokoro/samples/python3.9/presubmit.cfg deleted file mode 100644 index a1c8d9759c88..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/samples/python3.9/presubmit.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "INSTALL_LIBRARY_FROM_SOURCE" - value: "True" -} \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh deleted file mode 100755 index e9d8bd79a644..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-against-head.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A customized test runner for samples. -# -# For periodic builds, you can specify this file for testing against head. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh b/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh deleted file mode 100755 index 53e365bc4e79..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/test-samples-impl.sh +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Exit early if samples don't exist -if ! find samples -name 'requirements.txt' | grep -q .; then - echo "No tests run. './samples/**/requirements.txt' not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -# `virtualenv==20.26.6` is added for Python 3.7 compatibility -python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.9 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot - fi - - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" diff --git a/packages/google-cloud-pubsub/.kokoro/test-samples.sh b/packages/google-cloud-pubsub/.kokoro/test-samples.sh deleted file mode 100755 index 7933d820149a..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/test-samples.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# The default test runner for samples. -# -# For periodic builds, we rewinds the repo to the latest release, and -# run test-samples-impl.sh. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail -# Enables `**` to include files nested inside sub-folders -shopt -s globstar - -# Run periodic samples tests at latest release -if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - # preserving the test runner implementation. - cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - echo "Now we rewind the repo back to the latest release..." - LATEST_RELEASE=$(git describe --abbrev=0 --tags) - git checkout $LATEST_RELEASE - echo "The current head is: " - echo $(git rev-parse --verify HEAD) - echo "--- IMPORTANT IMPORTANT IMPORTANT ---" - # move back the test runner implementation if there's no file. - if [ ! -f .kokoro/test-samples-impl.sh ]; then - cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh - fi -fi - -exec .kokoro/test-samples-impl.sh diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline.sh b/packages/google-cloud-pubsub/.kokoro/trampoline.sh deleted file mode 100755 index 48f79699706e..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/trampoline.sh +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Always run the cleanup script, regardless of the success of bouncing into -# the container. -function cleanup() { - chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh - echo "cleanup"; -} -trap cleanup EXIT - -$(dirname $0)/populate-secrets.sh # Secret Manager secrets. -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh b/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh deleted file mode 100755 index d03f92dfc489..000000000000 --- a/packages/google-cloud-pubsub/.kokoro/trampoline_v2.sh +++ /dev/null @@ -1,487 +0,0 @@ -#!/usr/bin/env bash -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# trampoline_v2.sh -# -# This script does 3 things. -# -# 1. Prepare the Docker image for the test -# 2. Run the Docker with appropriate flags to run the test -# 3. Upload the newly built Docker image -# -# in a way that is somewhat compatible with trampoline_v1. -# -# To run this script, first download few files from gcs to /dev/shm. -# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). -# -# gcloud storage cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm -# gcloud storage cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm -# -# Then run the script. -# .kokoro/trampoline_v2.sh -# -# These environment variables are required: -# TRAMPOLINE_IMAGE: The docker image to use. -# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. -# -# You can optionally change these environment variables: -# TRAMPOLINE_IMAGE_UPLOAD: -# (true|false): Whether to upload the Docker image after the -# successful builds. -# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. -# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. -# Defaults to /workspace. -# Potentially there are some repo specific envvars in .trampolinerc in -# the project root. - - -set -euo pipefail - -TRAMPOLINE_VERSION="2.0.5" - -if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then - readonly IO_COLOR_RED="$(tput setaf 1)" - readonly IO_COLOR_GREEN="$(tput setaf 2)" - readonly IO_COLOR_YELLOW="$(tput setaf 3)" - readonly IO_COLOR_RESET="$(tput sgr0)" -else - readonly IO_COLOR_RED="" - readonly IO_COLOR_GREEN="" - readonly IO_COLOR_YELLOW="" - readonly IO_COLOR_RESET="" -fi - -function function_exists { - [ $(LC_ALL=C type -t $1)"" == "function" ] -} - -# Logs a message using the given color. The first argument must be one -# of the IO_COLOR_* variables defined above, such as -# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the -# given color. The log message will also have an RFC-3339 timestamp -# prepended (in UTC). You can disable the color output by setting -# TERM=vt100. -function log_impl() { - local color="$1" - shift - local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" - echo "================================================================" - echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" - echo "================================================================" -} - -# Logs the given message with normal coloring and a timestamp. -function log() { - log_impl "${IO_COLOR_RESET}" "$@" -} - -# Logs the given message in green with a timestamp. -function log_green() { - log_impl "${IO_COLOR_GREEN}" "$@" -} - -# Logs the given message in yellow with a timestamp. -function log_yellow() { - log_impl "${IO_COLOR_YELLOW}" "$@" -} - -# Logs the given message in red with a timestamp. -function log_red() { - log_impl "${IO_COLOR_RED}" "$@" -} - -readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) -readonly tmphome="${tmpdir}/h" -mkdir -p "${tmphome}" - -function cleanup() { - rm -rf "${tmpdir}" -} -trap cleanup EXIT - -RUNNING_IN_CI="${RUNNING_IN_CI:-false}" - -# The workspace in the container, defaults to /workspace. -TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" - -pass_down_envvars=( - # TRAMPOLINE_V2 variables. - # Tells scripts whether they are running as part of CI or not. - "RUNNING_IN_CI" - # Indicates which CI system we're in. - "TRAMPOLINE_CI" - # Indicates the version of the script. - "TRAMPOLINE_VERSION" -) - -log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" - -# Detect which CI systems we're in. If we're in any of the CI systems -# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be -# the name of the CI system. Both envvars will be passing down to the -# container for telling which CI system we're in. -if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then - # descriptive env var for indicating it's on CI. - RUNNING_IN_CI="true" - TRAMPOLINE_CI="kokoro" - if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then - if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then - log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." - exit 1 - fi - # This service account will be activated later. - TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" - else - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - gcloud auth list - fi - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet - fi - pass_down_envvars+=( - # KOKORO dynamic variables. - "KOKORO_BUILD_NUMBER" - "KOKORO_BUILD_ID" - "KOKORO_JOB_NAME" - "KOKORO_GIT_COMMIT" - "KOKORO_GITHUB_COMMIT" - "KOKORO_GITHUB_PULL_REQUEST_NUMBER" - "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For FlakyBot - "KOKORO_GITHUB_COMMIT_URL" - "KOKORO_GITHUB_PULL_REQUEST_URL" - ) -elif [[ "${TRAVIS:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="travis" - pass_down_envvars+=( - "TRAVIS_BRANCH" - "TRAVIS_BUILD_ID" - "TRAVIS_BUILD_NUMBER" - "TRAVIS_BUILD_WEB_URL" - "TRAVIS_COMMIT" - "TRAVIS_COMMIT_MESSAGE" - "TRAVIS_COMMIT_RANGE" - "TRAVIS_JOB_NAME" - "TRAVIS_JOB_NUMBER" - "TRAVIS_JOB_WEB_URL" - "TRAVIS_PULL_REQUEST" - "TRAVIS_PULL_REQUEST_BRANCH" - "TRAVIS_PULL_REQUEST_SHA" - "TRAVIS_PULL_REQUEST_SLUG" - "TRAVIS_REPO_SLUG" - "TRAVIS_SECURE_ENV_VARS" - "TRAVIS_TAG" - ) -elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="github-workflow" - pass_down_envvars+=( - "GITHUB_WORKFLOW" - "GITHUB_RUN_ID" - "GITHUB_RUN_NUMBER" - "GITHUB_ACTION" - "GITHUB_ACTIONS" - "GITHUB_ACTOR" - "GITHUB_REPOSITORY" - "GITHUB_EVENT_NAME" - "GITHUB_EVENT_PATH" - "GITHUB_SHA" - "GITHUB_REF" - "GITHUB_HEAD_REF" - "GITHUB_BASE_REF" - ) -elif [[ "${CIRCLECI:-}" == "true" ]]; then - RUNNING_IN_CI="true" - TRAMPOLINE_CI="circleci" - pass_down_envvars+=( - "CIRCLE_BRANCH" - "CIRCLE_BUILD_NUM" - "CIRCLE_BUILD_URL" - "CIRCLE_COMPARE_URL" - "CIRCLE_JOB" - "CIRCLE_NODE_INDEX" - "CIRCLE_NODE_TOTAL" - "CIRCLE_PREVIOUS_BUILD_NUM" - "CIRCLE_PROJECT_REPONAME" - "CIRCLE_PROJECT_USERNAME" - "CIRCLE_REPOSITORY_URL" - "CIRCLE_SHA1" - "CIRCLE_STAGE" - "CIRCLE_USERNAME" - "CIRCLE_WORKFLOW_ID" - "CIRCLE_WORKFLOW_JOB_ID" - "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" - "CIRCLE_WORKFLOW_WORKSPACE_ID" - ) -fi - -# Configure the service account for pulling the docker image. -function repo_root() { - local dir="$1" - while [[ ! -d "${dir}/.git" ]]; do - dir="$(dirname "$dir")" - done - echo "${dir}" -} - -# Detect the project root. In CI builds, we assume the script is in -# the git tree and traverse from there, otherwise, traverse from `pwd` -# to find `.git` directory. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - PROGRAM_PATH="$(realpath "$0")" - PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" - PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" -else - PROJECT_ROOT="$(repo_root $(pwd))" -fi - -log_yellow "Changing to the project root: ${PROJECT_ROOT}." -cd "${PROJECT_ROOT}" - -# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need -# to use this environment variable in `PROJECT_ROOT`. -if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then - - mkdir -p "${tmpdir}/gcloud" - gcloud_config_dir="${tmpdir}/gcloud" - - log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." - export CLOUDSDK_CONFIG="${gcloud_config_dir}" - - log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." - gcloud auth activate-service-account \ - --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" - log_yellow "Configuring Container Registry access" - gcloud auth configure-docker --quiet -fi - -required_envvars=( - # The basic trampoline configurations. - "TRAMPOLINE_IMAGE" - "TRAMPOLINE_BUILD_FILE" -) - -if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then - source "${PROJECT_ROOT}/.trampolinerc" -fi - -log_yellow "Checking environment variables." -for e in "${required_envvars[@]}" -do - if [[ -z "${!e:-}" ]]; then - log "Missing ${e} env var. Aborting." - exit 1 - fi -done - -# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 -# script: e.g. "github/repo-name/.kokoro/run_tests.sh" -TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" -log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" - -# ignore error on docker operations and test execution -set +e - -log_yellow "Preparing Docker image." -# We only download the docker image in CI builds. -if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - # Download the docker image specified by `TRAMPOLINE_IMAGE` - - # We may want to add --max-concurrent-downloads flag. - - log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." - if docker pull "${TRAMPOLINE_IMAGE}"; then - log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="true" - else - log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." - has_image="false" - fi -else - # For local run, check if we have the image. - if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then - has_image="true" - else - has_image="false" - fi -fi - - -# The default user for a Docker container has uid 0 (root). To avoid -# creating root-owned files in the build directory we tell docker to -# use the current user ID. -user_uid="$(id -u)" -user_gid="$(id -g)" -user_name="$(id -un)" - -# To allow docker in docker, we add the user to the docker group in -# the host os. -docker_gid=$(cut -d: -f3 < <(getent group docker)) - -update_cache="false" -if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then - # Build the Docker image from the source. - context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") - docker_build_flags=( - "-f" "${TRAMPOLINE_DOCKERFILE}" - "-t" "${TRAMPOLINE_IMAGE}" - "--build-arg" "UID=${user_uid}" - "--build-arg" "USERNAME=${user_name}" - ) - if [[ "${has_image}" == "true" ]]; then - docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") - fi - - log_yellow "Start building the docker image." - if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then - echo "docker build" "${docker_build_flags[@]}" "${context_dir}" - fi - - # ON CI systems, we want to suppress docker build logs, only - # output the logs when it fails. - if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then - if docker build "${docker_build_flags[@]}" "${context_dir}" \ - > "${tmpdir}/docker_build.log" 2>&1; then - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - cat "${tmpdir}/docker_build.log" - fi - - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - log_yellow "Dumping the build logs:" - cat "${tmpdir}/docker_build.log" - exit 1 - fi - else - if docker build "${docker_build_flags[@]}" "${context_dir}"; then - log_green "Finished building the docker image." - update_cache="true" - else - log_red "Failed to build the Docker image, aborting." - exit 1 - fi - fi -else - if [[ "${has_image}" != "true" ]]; then - log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." - exit 1 - fi -fi - -# We use an array for the flags so they are easier to document. -docker_flags=( - # Remove the container after it exists. - "--rm" - - # Use the host network. - "--network=host" - - # Run in priviledged mode. We are not using docker for sandboxing or - # isolation, just for packaging our dev tools. - "--privileged" - - # Run the docker script with the user id. Because the docker image gets to - # write in ${PWD} you typically want this to be your user id. - # To allow docker in docker, we need to use docker gid on the host. - "--user" "${user_uid}:${docker_gid}" - - # Pass down the USER. - "--env" "USER=${user_name}" - - # Mount the project directory inside the Docker container. - "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" - "--workdir" "${TRAMPOLINE_WORKSPACE}" - "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" - - # Mount the temporary home directory. - "--volume" "${tmphome}:/h" - "--env" "HOME=/h" - - # Allow docker in docker. - "--volume" "/var/run/docker.sock:/var/run/docker.sock" - - # Mount the /tmp so that docker in docker can mount the files - # there correctly. - "--volume" "/tmp:/tmp" - # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR - # TODO(tmatsuo): This part is not portable. - "--env" "TRAMPOLINE_SECRET_DIR=/secrets" - "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" - "--env" "KOKORO_GFILE_DIR=/secrets/gfile" - "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" - "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" -) - -# Add an option for nicer output if the build gets a tty. -if [[ -t 0 ]]; then - docker_flags+=("-it") -fi - -# Passing down env vars -for e in "${pass_down_envvars[@]}" -do - if [[ -n "${!e:-}" ]]; then - docker_flags+=("--env" "${e}=${!e}") - fi -done - -# If arguments are given, all arguments will become the commands run -# in the container, otherwise run TRAMPOLINE_BUILD_FILE. -if [[ $# -ge 1 ]]; then - log_yellow "Running the given commands '" "${@:1}" "' in the container." - readonly commands=("${@:1}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" -else - log_yellow "Running the tests in a Docker container." - docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") - if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then - echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" - fi - docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" -fi - - -test_retval=$? - -if [[ ${test_retval} -eq 0 ]]; then - log_green "Build finished with ${test_retval}" -else - log_red "Build finished with ${test_retval}" -fi - -# Only upload it when the test passes. -if [[ "${update_cache}" == "true" ]] && \ - [[ $test_retval == 0 ]] && \ - [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then - log_yellow "Uploading the Docker image." - if docker push "${TRAMPOLINE_IMAGE}"; then - log_green "Finished uploading the Docker image." - else - log_red "Failed uploading the Docker image." - fi - # Call trampoline_after_upload_hook if it's defined. - if function_exists trampoline_after_upload_hook; then - trampoline_after_upload_hook - fi - -fi - -exit "${test_retval}" diff --git a/packages/google-cloud-pubsub/.trampolinerc b/packages/google-cloud-pubsub/.trampolinerc deleted file mode 100644 index 0080152373d5..000000000000 --- a/packages/google-cloud-pubsub/.trampolinerc +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2024 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Add required env vars here. -required_envvars+=( -) - -# Add env vars which are passed down into the container here. -pass_down_envvars+=( - "NOX_SESSION" - ############### - # Docs builds - ############### - "STAGING_BUCKET" - "V2_STAGING_BUCKET" - ################## - # Samples builds - ################## - "INSTALL_LIBRARY_FROM_SOURCE" - "RUN_TESTS_SESSION" - "BUILD_SPECIFIC_GCLOUD_PROJECT" - # Target directories. - "RUN_TESTS_DIRS" - # The nox session to run. - "RUN_TESTS_SESSION" -) - -# Prevent unintentional override on the default image. -if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ - [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." - exit 1 -fi - -# Define the default value if it makes sense. -if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then - TRAMPOLINE_IMAGE_UPLOAD="" -fi - -if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then - TRAMPOLINE_IMAGE="" -fi - -if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then - TRAMPOLINE_DOCKERFILE="" -fi - -if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then - TRAMPOLINE_BUILD_FILE="" -fi diff --git a/packages/google-cloud-pubsub/docs/changelog.md b/packages/google-cloud-pubsub/docs/changelog.md deleted file mode 120000 index 04c99a55caae..000000000000 --- a/packages/google-cloud-pubsub/docs/changelog.md +++ /dev/null @@ -1 +0,0 @@ -../CHANGELOG.md \ No newline at end of file diff --git a/scripts/split_repo_migration/single-library.git-migrate-history.sh b/scripts/split_repo_migration/single-library.git-migrate-history.sh index 9f926e1d030e..650abac5ca26 100755 --- a/scripts/split_repo_migration/single-library.git-migrate-history.sh +++ b/scripts/split_repo_migration/single-library.git-migrate-history.sh @@ -70,7 +70,7 @@ echo "Created working directory: ${WORKDIR}" pushd "${WORKDIR}" # cd into workdir echo "Cloning source repository: ${SOURCE_REPO}" -git clone --recurse-submodules "git@github.com:${SOURCE_REPO}.git" source-repo +git clone --recurse-submodules --recurse-submodules "git@github.com:${SOURCE_REPO}.git" source-repo pushd source-repo @@ -124,7 +124,7 @@ git filter-branch \ --force \ --prune-empty \ --tree-filter \ - "git submodule update --init --recursive; find . -mindepth 2 -name .git -exec rm -rf {} +; shopt -s dotglob; mkdir -p ${WORKDIR}/migrated-source; mv * ${WORKDIR}/migrated-source; mkdir -p ${TARGET_PATH}; { mv ${WORKDIR}/migrated-source/* ${TARGET_PATH} || echo 'No files to move' ; }" + "git submodule update --init --recursive; find . -mindepth 2 -name .git -exec rm -rf {} +; git submodule update --init --recursive; find . -mindepth 2 -name .git -exec rm -rf {} +; shopt -s dotglob; mkdir -p ${WORKDIR}/migrated-source; mv * ${WORKDIR}/migrated-source; mkdir -p ${TARGET_PATH}; { mv ${WORKDIR}/migrated-source/* ${TARGET_PATH} || echo 'No files to move' ; }" # back to workdir popd @@ -142,7 +142,7 @@ echo "Success" popd # back to workdir # Do a diff between source code split repo and migrated code. -git clone --recurse-submodules "git@github.com:${SOURCE_REPO}.git" source-repo-validation # Not ideal to clone again. +git clone --recurse-submodules --recurse-submodules "git@github.com:${SOURCE_REPO}.git" source-repo-validation # Not ideal to clone again. find source-repo-validation -name .git -exec rm -rf {} + # That folder is not needed for validation. DIFF_FILE="${WORKDIR}/diff.txt"